diff --git a/.claude/settings.json.example b/.claude/settings.json.example new file mode 100644 index 0000000000..1149895340 --- /dev/null +++ b/.claude/settings.json.example @@ -0,0 +1,19 @@ +{ + "permissions": { + "allow": [], + "deny": [] + }, + "env": { + "__comment": "Environment variables for MCP servers. Override in .claude/settings.local.json with actual values.", + "GITHUB_PERSONAL_ACCESS_TOKEN": "ghp_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" + }, + "enabledMcpjsonServers": [ + "context7", + "sequential-thinking", + "github", + "fetch", + "playwright", + "ide" + ], + "enableAllProjectMcpServers": true + } \ No newline at end of file diff --git a/.devcontainer/README.md b/.devcontainer/README.md index 2b18630a21..359e2e5aef 100644 --- a/.devcontainer/README.md +++ b/.devcontainer/README.md @@ -1,23 +1,26 @@ # Development with devcontainer + This project includes a devcontainer configuration that allows you to open the project in a container with a fully configured development environment. Both frontend and backend environments are initialized when the container is started. + ## GitHub Codespaces + [![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/langgenius/dify) you can simply click the button above to open this project in GitHub Codespaces. For more info, check out the [GitHub documentation](https://docs.github.com/en/free-pro-team@latest/github/developing-online-with-codespaces/creating-a-codespace#creating-a-codespace). - ## VS Code Dev Containers + [![Open in Dev Containers](https://img.shields.io/static/v1?label=Dev%20Containers&message=Open&color=blue&logo=visualstudiocode)](https://vscode.dev/redirect?url=vscode://ms-vscode-remote.remote-containers/cloneInVolume?url=https://github.com/langgenius/dify) if you have VS Code installed, you can click the button above to open this project in VS Code Dev Containers. You can learn more in the [Dev Containers documentation](https://code.visualstudio.com/docs/devcontainers/containers). - ## Pros of Devcontainer + Unified Development Environment: By using devcontainers, you can ensure that all developers are developing in the same environment, reducing the occurrence of "it works on my machine" type of issues. Quick Start: New developers can set up their development environment in a few simple steps, without spending a lot of time on environment configuration. @@ -25,11 +28,13 @@ Quick Start: New developers can set up their development environment in a few si Isolation: Devcontainers isolate your project from your host operating system, reducing the chance of OS updates or other application installations impacting the development environment. ## Cons of Devcontainer + Learning Curve: For developers unfamiliar with Docker and VS Code, using devcontainers may be somewhat complex. Performance Impact: While usually minimal, programs running inside a devcontainer may be slightly slower than those running directly on the host. ## Troubleshooting + if you see such error message when you open this project in codespaces: ![Alt text](troubleshooting.png) diff --git a/.devcontainer/post_create_command.sh b/.devcontainer/post_create_command.sh index c25bde87b0..2fef313f72 100755 --- a/.devcontainer/post_create_command.sh +++ b/.devcontainer/post_create_command.sh @@ -1,6 +1,6 @@ #!/bin/bash -npm add -g pnpm@10.13.1 +corepack enable cd web && pnpm install pipx install uv diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md index 47e2453f41..a59630d112 100644 --- a/.github/CODE_OF_CONDUCT.md +++ b/.github/CODE_OF_CONDUCT.md @@ -17,27 +17,25 @@ diverse, inclusive, and healthy community. Examples of behavior that contributes to a positive environment for our community include: -* Demonstrating empathy and kindness toward other people -* Being respectful of differing opinions, viewpoints, and experiences -* Giving and gracefully accepting constructive feedback -* Accepting responsibility and apologizing to those affected by our mistakes, +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience -* Focusing on what is best not just for us as individuals, but for the +- Focusing on what is best not just for us as individuals, but for the overall community Examples of unacceptable behavior include: -* The use of sexualized language or imagery, and sexual attention or +- The use of sexualized language or imagery, and sexual attention or advances of any kind -* Trolling, insulting or derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or email +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email address, without their explicit permission -* Other conduct which could reasonably be considered inappropriate in a +- Other conduct which could reasonably be considered inappropriate in a professional setting ## Language Policy To facilitate clear and effective communication, all discussions, comments, documentation, and pull requests in this project should be conducted in English. This ensures that all contributors can participate and collaborate effectively. - - diff --git a/.github/actions/setup-uv/action.yml b/.github/actions/setup-uv/action.yml deleted file mode 100644 index 6990f6becf..0000000000 --- a/.github/actions/setup-uv/action.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: Setup UV and Python - -inputs: - python-version: - description: Python version to use and the UV installed with - required: true - default: '3.12' - uv-version: - description: UV version to set up - required: true - default: '0.8.9' - uv-lockfile: - description: Path to the UV lockfile to restore cache from - required: true - default: '' - enable-cache: - required: true - default: true - -runs: - using: composite - steps: - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ inputs.python-version }} - - - name: Install uv - uses: astral-sh/setup-uv@v6 - with: - version: ${{ inputs.uv-version }} - python-version: ${{ inputs.python-version }} - enable-cache: ${{ inputs.enable-cache }} - cache-dependency-glob: ${{ inputs.uv-lockfile }} diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index f4a5f754e0..aa5a50918a 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,8 +1,8 @@ > [!IMPORTANT] > > 1. Make sure you have read our [contribution guidelines](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) -> 2. Ensure there is an associated issue and you have been assigned to it -> 3. Use the correct syntax to link this PR: `Fixes #`. +> 1. Ensure there is an associated issue and you have been assigned to it +> 1. Use the correct syntax to link this PR: `Fixes #`. ## Summary @@ -12,7 +12,7 @@ | Before | After | |--------|-------| -| ... | ... | +| ... | ... | ## Checklist diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index 9c3daddbfc..28ef67a133 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -33,10 +33,11 @@ jobs: persist-credentials: false - name: Setup UV and Python - uses: ./.github/actions/setup-uv + uses: astral-sh/setup-uv@v6 with: + enable-cache: true python-version: ${{ matrix.python-version }} - uv-lockfile: api/uv.lock + cache-dependency-glob: api/uv.lock - name: Check UV lockfile run: uv lock --project api --check @@ -47,7 +48,16 @@ jobs: - name: Run Unit tests run: | uv run --project api bash dev/pytest/pytest_unit_tests.sh - + - name: Run ty check + run: | + cd api + uv add --dev ty + uv run ty check || true + - name: Run pyrefly check + run: | + cd api + uv add --dev pyrefly + uv run pyrefly check || true - name: Coverage Summary run: | set -x diff --git a/.github/workflows/autofix.yml b/.github/workflows/autofix.yml index 152ff3b648..2c9cee2140 100644 --- a/.github/workflows/autofix.yml +++ b/.github/workflows/autofix.yml @@ -2,6 +2,7 @@ name: autofix.ci on: workflow_call: pull_request: + branches: [ "main" ] push: branches: [ "main" ] permissions: @@ -15,7 +16,9 @@ jobs: - uses: actions/checkout@v4 # Use uv to ensure we have the same ruff version in CI and locally. - - uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f + - uses: astral-sh/setup-uv@v6 + with: + python-version: "3.12" - run: | cd api uv sync --dev @@ -23,6 +26,10 @@ jobs: uv run ruff check --fix-only . # Format code uv run ruff format . - + - name: ast-grep + run: | + uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all + - name: mdformat + run: | + uvx mdformat . - uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27 - diff --git a/.github/workflows/db-migration-test.yml b/.github/workflows/db-migration-test.yml index 5181546b4a..e8ff85e95c 100644 --- a/.github/workflows/db-migration-test.yml +++ b/.github/workflows/db-migration-test.yml @@ -25,9 +25,11 @@ jobs: persist-credentials: false - name: Setup UV and Python - uses: ./.github/actions/setup-uv + uses: astral-sh/setup-uv@v6 with: - uv-lockfile: api/uv.lock + enable-cache: true + python-version: "3.12" + cache-dependency-glob: api/uv.lock - name: Install dependencies run: uv sync --project api diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index 9aad9558b0..8d0ec35ca1 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -36,10 +36,11 @@ jobs: - name: Setup UV and Python if: steps.changed-files.outputs.any_changed == 'true' - uses: ./.github/actions/setup-uv + uses: astral-sh/setup-uv@v6 with: - uv-lockfile: api/uv.lock enable-cache: false + python-version: "3.12" + cache-dependency-glob: api/uv.lock - name: Install dependencies if: steps.changed-files.outputs.any_changed == 'true' diff --git a/.github/workflows/vdb-tests.yml b/.github/workflows/vdb-tests.yml index 912267094b..f2ca09fba2 100644 --- a/.github/workflows/vdb-tests.yml +++ b/.github/workflows/vdb-tests.yml @@ -39,10 +39,11 @@ jobs: remove_tool_cache: true - name: Setup UV and Python - uses: ./.github/actions/setup-uv + uses: astral-sh/setup-uv@v6 with: + enable-cache: true python-version: ${{ matrix.python-version }} - uv-lockfile: api/uv.lock + cache-dependency-glob: api/uv.lock - name: Check UV lockfile run: uv lock --project api --check diff --git a/.mcp.json b/.mcp.json new file mode 100644 index 0000000000..8eceaf9ead --- /dev/null +++ b/.mcp.json @@ -0,0 +1,34 @@ +{ + "mcpServers": { + "context7": { + "type": "http", + "url": "https://mcp.context7.com/mcp" + }, + "sequential-thinking": { + "type": "stdio", + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-sequential-thinking"], + "env": {} + }, + "github": { + "type": "stdio", + "command": "npx", + "args": ["-y", "@modelcontextprotocol/server-github"], + "env": { + "GITHUB_PERSONAL_ACCESS_TOKEN": "${GITHUB_PERSONAL_ACCESS_TOKEN}" + } + }, + "fetch": { + "type": "stdio", + "command": "uvx", + "args": ["mcp-server-fetch"], + "env": {} + }, + "playwright": { + "type": "stdio", + "command": "npx", + "args": ["-y", "@playwright/mcp@latest"], + "env": {} + } + } + } \ No newline at end of file diff --git a/.vscode/README.md b/.vscode/README.md index 26516f0540..87b45787c3 100644 --- a/.vscode/README.md +++ b/.vscode/README.md @@ -4,10 +4,10 @@ This `launch.json.template` file provides various debug configurations for the D ## How to Use -1. **Create `launch.json`**: If you don't have one, create a file named `launch.json` inside the `.vscode` directory. -2. **Copy Content**: Copy the entire content from `launch.json.template` into your newly created `launch.json` file. -3. **Select Debug Configuration**: Go to the Run and Debug view in VS Code / Cursor (Ctrl+Shift+D or Cmd+Shift+D). -4. **Start Debugging**: Select the desired configuration from the dropdown menu and click the green play button. +1. **Create `launch.json`**: If you don't have one, create a file named `launch.json` inside the `.vscode` directory. +1. **Copy Content**: Copy the entire content from `launch.json.template` into your newly created `launch.json` file. +1. **Select Debug Configuration**: Go to the Run and Debug view in VS Code / Cursor (Ctrl+Shift+D or Cmd+Shift+D). +1. **Start Debugging**: Select the desired configuration from the dropdown menu and click the green play button. ## Tips diff --git a/AGENTS.md b/AGENTS.md new file mode 120000 index 0000000000..681311eb9c --- /dev/null +++ b/AGENTS.md @@ -0,0 +1 @@ +CLAUDE.md \ No newline at end of file diff --git a/CLAUDE.md b/CLAUDE.md index 7ce04382c9..1b649ca9a0 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -7,6 +7,7 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management. The codebase consists of: + - **Backend API** (`/api`): Python Flask application with Domain-Driven Design architecture - **Frontend Web** (`/web`): Next.js 15 application with TypeScript and React 19 - **Docker deployment** (`/docker`): Containerized deployment configurations @@ -46,6 +47,7 @@ pnpm test # Run Jest tests ## Testing Guidelines ### Backend Testing + - Use `pytest` for all backend tests - Write tests first (TDD approach) - Test structure: Arrange-Act-Assert @@ -53,11 +55,13 @@ pnpm test # Run Jest tests ## Code Style Requirements ### Python + - Use type hints for all functions and class attributes - No `Any` types unless absolutely necessary - Implement special methods (`__repr__`, `__str__`) appropriately -### TypeScript/JavaScript +### TypeScript/JavaScript + - Strict TypeScript configuration - ESLint with Prettier integration - Avoid `any` type @@ -73,11 +77,13 @@ pnpm test # Run Jest tests ## Common Development Tasks ### Adding a New API Endpoint + 1. Create controller in `/api/controllers/` -2. Add service logic in `/api/services/` -3. Update routes in controller's `__init__.py` -4. Write tests in `/api/tests/` +1. Add service logic in `/api/services/` +1. Update routes in controller's `__init__.py` +1. Write tests in `/api/tests/` ## Project-Specific Conventions - All async tasks use Celery with Redis as broker +- **Internationalization**: Frontend supports multiple languages with English (`web/i18n/en-US/`) as the source. All user-facing text must use i18n keys, no hardcoded strings. Edit corresponding module files in `en-US/` directory for translations. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5d4ba36485..fdc414b047 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -34,11 +34,11 @@ Don't forget to link an existing issue or open a new issue in the PR's descripti How we prioritize: - | Issue Type | Priority | - | ------------------------------------------------------------ | --------------- | - | Bugs in core functions (cloud service, cannot login, applications not working, security loopholes) | Critical | - | Non-critical bugs, performance boosts | Medium Priority | - | Minor fixes (typos, confusing but working UI) | Low Priority | +| Issue Type | Priority | +| ------------------------------------------------------------ | --------------- | +| Bugs in core functions (cloud service, cannot login, applications not working, security loopholes) | Critical | +| Non-critical bugs, performance boosts | Medium Priority | +| Minor fixes (typos, confusing but working UI) | Low Priority | ### Feature requests @@ -52,23 +52,25 @@ How we prioritize: How we prioritize: - | Feature Type | Priority | - | ------------------------------------------------------------ | --------------- | - | High-Priority Features as being labeled by a team member | High Priority | - | Popular feature requests from our [community feedback board](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Medium Priority | - | Non-core features and minor enhancements | Low Priority | - | Valuable but not immediate | Future-Feature | +| Feature Type | Priority | +| ------------------------------------------------------------ | --------------- | +| High-Priority Features as being labeled by a team member | High Priority | +| Popular feature requests from our [community feedback board](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Medium Priority | +| Non-core features and minor enhancements | Low Priority | +| Valuable but not immediate | Future-Feature | + ## Submitting your PR ### Pull Request Process 1. Fork the repository -2. Before you draft a PR, please create an issue to discuss the changes you want to make -3. Create a new branch for your changes -4. Please add tests for your changes accordingly -5. Ensure your code passes the existing tests -6. Please link the issue in the PR description, `fixes #` -7. Get merged! +1. Before you draft a PR, please create an issue to discuss the changes you want to make +1. Create a new branch for your changes +1. Please add tests for your changes accordingly +1. Ensure your code passes the existing tests +1. Please link the issue in the PR description, `fixes #` +1. Get merged! + ### Setup the project #### Frontend @@ -82,12 +84,14 @@ For setting up the backend service, kindly refer to our detailed [instructions]( #### Other things to note We recommend reviewing this document carefully before proceeding with the setup, as it contains essential information about: + - Prerequisites and dependencies - Installation steps - Configuration details - Common troubleshooting tips Feel free to reach out if you encounter any issues during the setup process. + ## Getting Help If you ever get stuck or get a burning question while contributing, simply shoot your queries our way via the related GitHub issue, or hop onto our [Discord](https://discord.gg/8Tpq4AcN9c) for a quick chat. diff --git a/CONTRIBUTING_CN.md b/CONTRIBUTING_CN.md index 69ae7071bb..c278c8fd7a 100644 --- a/CONTRIBUTING_CN.md +++ b/CONTRIBUTING_CN.md @@ -34,12 +34,11 @@ 优先级划分: - | 问题类型 | 优先级 | - | -------------------------------------------------- | ---------- | - | 核心功能 bug(云服务、登录失败、应用无法使用、安全漏洞) | 紧急 | - | 非关键 bug、性能优化 | 中等优先级 | - | 小修复(拼写错误、界面混乱但可用) | 低优先级 | - +| 问题类型 | 优先级 | +| -------------------------------------------------- | ---------- | +| 核心功能 bug(云服务、登录失败、应用无法使用、安全漏洞) | 紧急 | +| 非关键 bug、性能优化 | 中等优先级 | +| 小修复(拼写错误、界面混乱但可用) | 低优先级 | ### 功能请求 @@ -53,12 +52,12 @@ 优先级划分: - | 功能类型 | 优先级 | - | -------------------------------------------------- | ---------- | - | 被团队成员标记为高优先级的功能 | 高优先级 | - | 来自[社区反馈板](https://github.com/langgenius/dify/discussions/categories/feedbacks)的热门功能请求 | 中等优先级 | - | 非核心功能和小改进 | 低优先级 | - | 有价值但非紧急的功能 | 未来特性 | +| 功能类型 | 优先级 | +| -------------------------------------------------- | ---------- | +| 被团队成员标记为高优先级的功能 | 高优先级 | +| 来自[社区反馈板](https://github.com/langgenius/dify/discussions/categories/feedbacks)的热门功能请求 | 中等优先级 | +| 非核心功能和小改进 | 低优先级 | +| 有价值但非紧急的功能 | 未来特性 | ## 提交 PR @@ -67,12 +66,12 @@ ### PR 提交流程 1. Fork 本仓库 -2. 在提交 PR 之前,请先创建 issue 讨论你想要做的修改 -3. 为你的修改创建一个新的分支 -4. 请为你的修改添加相应的测试 -5. 确保你的代码能通过现有的测试 -6. 请在 PR 描述中关联相关 issue,格式为 `fixes #` -7. 等待合并! +1. 在提交 PR 之前,请先创建 issue 讨论你想要做的修改 +1. 为你的修改创建一个新的分支 +1. 请为你的修改添加相应的测试 +1. 确保你的代码能通过现有的测试 +1. 请在 PR 描述中关联相关 issue,格式为 `fixes #` +1. 等待合并! #### 前端 @@ -85,6 +84,7 @@ #### 其他注意事项 我们建议在开始设置之前仔细阅读本文档,因为它包含以下重要信息: + - 前置条件和依赖项 - 安装步骤 - 配置细节 diff --git a/CONTRIBUTING_DE.md b/CONTRIBUTING_DE.md index ddbf3abc55..f819e80bbb 100644 --- a/CONTRIBUTING_DE.md +++ b/CONTRIBUTING_DE.md @@ -32,11 +32,11 @@ Vergessen Sie nicht, in der PR-Beschreibung ein bestehendes Issue zu verlinken o Unsere Priorisierung: - | Fehlertyp | Priorität | - | ------------------------------------------------------------ | --------------- | - | Fehler in Kernfunktionen (Cloud-Service, Login nicht möglich, Anwendungen funktionieren nicht, Sicherheitslücken) | Kritisch | - | Nicht-kritische Fehler, Leistungsverbesserungen | Mittlere Priorität | - | Kleinere Korrekturen (Tippfehler, verwirrende aber funktionierende UI) | Niedrige Priorität | +| Fehlertyp | Priorität | +| ------------------------------------------------------------ | --------------- | +| Fehler in Kernfunktionen (Cloud-Service, Login nicht möglich, Anwendungen funktionieren nicht, Sicherheitslücken) | Kritisch | +| Nicht-kritische Fehler, Leistungsverbesserungen | Mittlere Priorität | +| Kleinere Korrekturen (Tippfehler, verwirrende aber funktionierende UI) | Niedrige Priorität | ### Feature-Anfragen @@ -50,24 +50,24 @@ Unsere Priorisierung: Unsere Priorisierung: - | Feature-Typ | Priorität | - | ------------------------------------------------------------ | --------------- | - | Hochprioritäre Features (durch Teammitglied gekennzeichnet) | Hohe Priorität | - | Beliebte Feature-Anfragen aus unserem [Community-Feedback-Board](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Mittlere Priorität | - | Nicht-Kernfunktionen und kleinere Verbesserungen | Niedrige Priorität | - | Wertvoll, aber nicht dringend | Zukunfts-Feature | +| Feature-Typ | Priorität | +| ------------------------------------------------------------ | --------------- | +| Hochprioritäre Features (durch Teammitglied gekennzeichnet) | Hohe Priorität | +| Beliebte Feature-Anfragen aus unserem [Community-Feedback-Board](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Mittlere Priorität | +| Nicht-Kernfunktionen und kleinere Verbesserungen | Niedrige Priorität | +| Wertvoll, aber nicht dringend | Zukunfts-Feature | ## Einreichen Ihres PRs ### Pull-Request-Prozess 1. Repository forken -2. Vor dem Erstellen eines PRs bitte ein Issue zur Diskussion der Änderungen erstellen -3. Einen neuen Branch für Ihre Änderungen erstellen -4. Tests für Ihre Änderungen hinzufügen -5. Sicherstellen, dass Ihr Code die bestehenden Tests besteht -6. Issue in der PR-Beschreibung verlinken (`fixes #`) -7. Auf den Merge warten! +1. Vor dem Erstellen eines PRs bitte ein Issue zur Diskussion der Änderungen erstellen +1. Einen neuen Branch für Ihre Änderungen erstellen +1. Tests für Ihre Änderungen hinzufügen +1. Sicherstellen, dass Ihr Code die bestehenden Tests besteht +1. Issue in der PR-Beschreibung verlinken (`fixes #`) +1. Auf den Merge warten! ### Projekt einrichten @@ -82,6 +82,7 @@ Für die Einrichtung des Backend-Service folgen Sie bitte unseren detaillierten #### Weitere Hinweise Wir empfehlen, dieses Dokument sorgfältig zu lesen, da es wichtige Informationen enthält über: + - Voraussetzungen und Abhängigkeiten - Installationsschritte - Konfigurationsdetails @@ -92,4 +93,3 @@ Bei Problemen während der Einrichtung können Sie sich gerne an uns wenden. ## Hilfe bekommen Wenn Sie beim Mitwirken Fragen haben oder nicht weiterkommen, stellen Sie Ihre Fragen einfach im entsprechenden GitHub Issue oder besuchen Sie unseren [Discord](https://discord.gg/8Tpq4AcN9c) für einen schnellen Austausch. - diff --git a/CONTRIBUTING_ES.md b/CONTRIBUTING_ES.md index 98cbb5b457..e19d958c65 100644 --- a/CONTRIBUTING_ES.md +++ b/CONTRIBUTING_ES.md @@ -34,11 +34,11 @@ No olvides vincular un issue existente o abrir uno nuevo en la descripción del Cómo priorizamos: - | Tipo de Issue | Prioridad | - | ------------------------------------------------------------ | --------------- | - | Errores en funciones principales (servicio en la nube, no poder iniciar sesión, aplicaciones que no funcionan, fallos de seguridad) | Crítica | - | Errores no críticos, mejoras de rendimiento | Prioridad Media | - | Correcciones menores (errores tipográficos, UI confusa pero funcional) | Prioridad Baja | +| Tipo de Issue | Prioridad | +| ------------------------------------------------------------ | --------------- | +| Errores en funciones principales (servicio en la nube, no poder iniciar sesión, aplicaciones que no funcionan, fallos de seguridad) | Crítica | +| Errores no críticos, mejoras de rendimiento | Prioridad Media | +| Correcciones menores (errores tipográficos, UI confusa pero funcional) | Prioridad Baja | ### Solicitudes de funcionalidades @@ -52,23 +52,25 @@ Cómo priorizamos: Cómo priorizamos: - | Tipo de Funcionalidad | Prioridad | - | ------------------------------------------------------------ | --------------- | - | Funcionalidades de alta prioridad etiquetadas por un miembro del equipo | Prioridad Alta | - | Solicitudes populares de funcionalidades de nuestro [tablero de comentarios de la comunidad](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Prioridad Media | - | Funcionalidades no principales y mejoras menores | Prioridad Baja | - | Valiosas pero no inmediatas | Futura-Funcionalidad | +| Tipo de Funcionalidad | Prioridad | +| ------------------------------------------------------------ | --------------- | +| Funcionalidades de alta prioridad etiquetadas por un miembro del equipo | Prioridad Alta | +| Solicitudes populares de funcionalidades de nuestro [tablero de comentarios de la comunidad](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Prioridad Media | +| Funcionalidades no principales y mejoras menores | Prioridad Baja | +| Valiosas pero no inmediatas | Futura-Funcionalidad | + ## Enviando tu PR ### Proceso de Pull Request 1. Haz un fork del repositorio -2. Antes de redactar un PR, por favor crea un issue para discutir los cambios que quieres hacer -3. Crea una nueva rama para tus cambios -4. Por favor añade pruebas para tus cambios en consecuencia -5. Asegúrate de que tu código pasa las pruebas existentes -6. Por favor vincula el issue en la descripción del PR, `fixes #` -7. ¡Fusiona tu código! +1. Antes de redactar un PR, por favor crea un issue para discutir los cambios que quieres hacer +1. Crea una nueva rama para tus cambios +1. Por favor añade pruebas para tus cambios en consecuencia +1. Asegúrate de que tu código pasa las pruebas existentes +1. Por favor vincula el issue en la descripción del PR, `fixes #` +1. ¡Fusiona tu código! + ### Configuración del proyecto #### Frontend @@ -82,12 +84,14 @@ Para configurar el servicio backend, por favor consulta nuestras [instrucciones #### Otras cosas a tener en cuenta Recomendamos revisar este documento cuidadosamente antes de proceder con la configuración, ya que contiene información esencial sobre: + - Requisitos previos y dependencias - Pasos de instalación - Detalles de configuración - Consejos comunes de solución de problemas No dudes en contactarnos si encuentras algún problema durante el proceso de configuración. + ## Obteniendo Ayuda -Si alguna vez te quedas atascado o tienes una pregunta urgente mientras contribuyes, simplemente envíanos tus consultas a través del issue relacionado de GitHub, o únete a nuestro [Discord](https://discord.gg/8Tpq4AcN9c) para una charla rápida. +Si alguna vez te quedas atascado o tienes una pregunta urgente mientras contribuyes, simplemente envíanos tus consultas a través del issue relacionado de GitHub, o únete a nuestro [Discord](https://discord.gg/8Tpq4AcN9c) para una charla rápida. diff --git a/CONTRIBUTING_FR.md b/CONTRIBUTING_FR.md index fc8410dfd6..335e943fcd 100644 --- a/CONTRIBUTING_FR.md +++ b/CONTRIBUTING_FR.md @@ -34,11 +34,11 @@ N'oubliez pas de lier un problème existant ou d'ouvrir un nouveau problème dan Comment nous priorisons : - | Type de Problème | Priorité | - | ------------------------------------------------------------ | --------------- | - | Bugs dans les fonctions principales (service cloud, impossibilité de se connecter, applications qui ne fonctionnent pas, failles de sécurité) | Critique | - | Bugs non critiques, améliorations de performance | Priorité Moyenne | - | Corrections mineures (fautes de frappe, UI confuse mais fonctionnelle) | Priorité Basse | +| Type de Problème | Priorité | +| ------------------------------------------------------------ | --------------- | +| Bugs dans les fonctions principales (service cloud, impossibilité de se connecter, applications qui ne fonctionnent pas, failles de sécurité) | Critique | +| Bugs non critiques, améliorations de performance | Priorité Moyenne | +| Corrections mineures (fautes de frappe, UI confuse mais fonctionnelle) | Priorité Basse | ### Demandes de fonctionnalités @@ -52,23 +52,25 @@ Comment nous priorisons : Comment nous priorisons : - | Type de Fonctionnalité | Priorité | - | ------------------------------------------------------------ | --------------- | - | Fonctionnalités hautement prioritaires étiquetées par un membre de l'équipe | Priorité Haute | - | Demandes populaires de fonctionnalités de notre [tableau de feedback communautaire](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Priorité Moyenne | - | Fonctionnalités non essentielles et améliorations mineures | Priorité Basse | - | Précieuses mais non immédiates | Fonctionnalité Future | +| Type de Fonctionnalité | Priorité | +| ------------------------------------------------------------ | --------------- | +| Fonctionnalités hautement prioritaires étiquetées par un membre de l'équipe | Priorité Haute | +| Demandes populaires de fonctionnalités de notre [tableau de feedback communautaire](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Priorité Moyenne | +| Fonctionnalités non essentielles et améliorations mineures | Priorité Basse | +| Précieuses mais non immédiates | Fonctionnalité Future | + ## Soumettre votre PR ### Processus de Pull Request 1. Forkez le dépôt -2. Avant de rédiger une PR, veuillez créer un problème pour discuter des changements que vous souhaitez apporter -3. Créez une nouvelle branche pour vos changements -4. Veuillez ajouter des tests pour vos changements en conséquence -5. Assurez-vous que votre code passe les tests existants -6. Veuillez lier le problème dans la description de la PR, `fixes #` -7. Faites fusionner votre code ! +1. Avant de rédiger une PR, veuillez créer un problème pour discuter des changements que vous souhaitez apporter +1. Créez une nouvelle branche pour vos changements +1. Veuillez ajouter des tests pour vos changements en conséquence +1. Assurez-vous que votre code passe les tests existants +1. Veuillez lier le problème dans la description de la PR, `fixes #` +1. Faites fusionner votre code ! + ### Configuration du projet #### Frontend @@ -82,12 +84,14 @@ Pour configurer le service backend, veuillez consulter nos [instructions détail #### Autres choses à noter Nous recommandons de revoir attentivement ce document avant de procéder à la configuration, car il contient des informations essentielles sur : + - Prérequis et dépendances - Étapes d'installation - Détails de configuration - Conseils courants de dépannage N'hésitez pas à nous contacter si vous rencontrez des problèmes pendant le processus de configuration. + ## Obtenir de l'aide -Si jamais vous êtes bloqué ou avez une question urgente en contribuant, envoyez-nous simplement vos questions via le problème GitHub concerné, ou rejoignez notre [Discord](https://discord.gg/8Tpq4AcN9c) pour une discussion rapide. +Si jamais vous êtes bloqué ou avez une question urgente en contribuant, envoyez-nous simplement vos questions via le problème GitHub concerné, ou rejoignez notre [Discord](https://discord.gg/8Tpq4AcN9c) pour une discussion rapide. diff --git a/CONTRIBUTING_JA.md b/CONTRIBUTING_JA.md index e991d0263e..2d0d79fc16 100644 --- a/CONTRIBUTING_JA.md +++ b/CONTRIBUTING_JA.md @@ -34,11 +34,11 @@ PRの説明には、既存のイシューへのリンクを含めるか、新し 優先順位の付け方: - | 問題の種類 | 優先度 | - | ------------------------------------------------------------ | --------- | - | コア機能のバグ(クラウドサービス、ログイン不可、アプリケーション不具合、セキュリティ脆弱性) | 最重要 | - | 重要度の低いバグ、パフォーマンス改善 | 中程度 | - | 軽微な修正(タイプミス、分かりにくいが動作するUI) | 低 | +| 問題の種類 | 優先度 | +| ------------------------------------------------------------ | --------- | +| コア機能のバグ(クラウドサービス、ログイン不可、アプリケーション不具合、セキュリティ脆弱性) | 最重要 | +| 重要度の低いバグ、パフォーマンス改善 | 中程度 | +| 軽微な修正(タイプミス、分かりにくいが動作するUI) | 低 | ### 機能リクエスト @@ -52,24 +52,24 @@ PRの説明には、既存のイシューへのリンクを含めるか、新し 優先順位の付け方: - | 機能の種類 | 優先度 | - | ------------------------------------------------------------ | --------- | - | チームメンバーによって高優先度とラベル付けされた機能 | 高 | - | [コミュニティフィードボード](https://github.com/langgenius/dify/discussions/categories/feedbacks)での人気の機能リクエスト | 中程度 | - | 非コア機能と軽微な改善 | 低 | - | 価値はあるが緊急性の低いもの | 将来対応 | +| 機能の種類 | 優先度 | +| ------------------------------------------------------------ | --------- | +| チームメンバーによって高優先度とラベル付けされた機能 | 高 | +| [コミュニティフィードボード](https://github.com/langgenius/dify/discussions/categories/feedbacks)での人気の機能リクエスト | 中程度 | +| 非コア機能と軽微な改善 | 低 | +| 価値はあるが緊急性の低いもの | 将来対応 | ## PRの提出 ### プルリクエストのプロセス 1. リポジトリをフォークする -2. PRを作成する前に、変更内容についてイシューで議論する -3. 変更用の新しいブランチを作成する -4. 変更に応じたテストを追加する -5. 既存のテストをパスすることを確認する -6. PRの説明文にイシューをリンクする(`fixes #`) -7. マージ完了! +1. PRを作成する前に、変更内容についてイシューで議論する +1. 変更用の新しいブランチを作成する +1. 変更に応じたテストを追加する +1. 既存のテストをパスすることを確認する +1. PRの説明文にイシューをリンクする(`fixes #`) +1. マージ完了! ### プロジェクトのセットアップ @@ -84,6 +84,7 @@ PRの説明には、既存のイシューへのリンクを含めるか、新し #### その他の注意点 セットアップを進める前に、以下の重要な情報が含まれているため、このドキュメントを注意深く確認することをお勧めします: + - 前提条件と依存関係 - インストール手順 - 設定の詳細 @@ -94,4 +95,3 @@ PRの説明には、既存のイシューへのリンクを含めるか、新し ## サポートを受ける 貢献中に行き詰まったり、緊急の質問がある場合は、関連するGitHubイシューで質問するか、[Discord](https://discord.gg/8Tpq4AcN9c)で気軽にチャットしてください。 - diff --git a/CONTRIBUTING_KR.md b/CONTRIBUTING_KR.md index 78d3f38c47..14b1c9a9ca 100644 --- a/CONTRIBUTING_KR.md +++ b/CONTRIBUTING_KR.md @@ -34,11 +34,11 @@ PR 설명에 기존 이슈를 연결하거나 새 이슈를 여는 것을 잊지 우선순위 결정 방법: - | 이슈 유형 | 우선순위 | - | ------------------------------------------------------------ | --------------- | - | 핵심 기능의 버그(클라우드 서비스, 로그인 불가, 애플리케이션 작동 불능, 보안 취약점) | 중대 | - | 비중요 버그, 성능 향상 | 중간 우선순위 | - | 사소한 수정(오타, 혼란스럽지만 작동하는 UI) | 낮은 우선순위 | +| 이슈 유형 | 우선순위 | +| ------------------------------------------------------------ | --------------- | +| 핵심 기능의 버그(클라우드 서비스, 로그인 불가, 애플리케이션 작동 불능, 보안 취약점) | 중대 | +| 비중요 버그, 성능 향상 | 중간 우선순위 | +| 사소한 수정(오타, 혼란스럽지만 작동하는 UI) | 낮은 우선순위 | ### 기능 요청 @@ -52,23 +52,25 @@ PR 설명에 기존 이슈를 연결하거나 새 이슈를 여는 것을 잊지 우선순위 결정 방법: - | 기능 유형 | 우선순위 | - | ------------------------------------------------------------ | --------------- | - | 팀 구성원에 의해 레이블이 지정된 고우선순위 기능 | 높은 우선순위 | - | 우리의 [커뮤니티 피드백 보드](https://github.com/langgenius/dify/discussions/categories/feedbacks)에서 인기 있는 기능 요청 | 중간 우선순위 | - | 비핵심 기능 및 사소한 개선 | 낮은 우선순위 | - | 가치 있지만 즉시 필요하지 않은 기능 | 미래 기능 | +| 기능 유형 | 우선순위 | +| ------------------------------------------------------------ | --------------- | +| 팀 구성원에 의해 레이블이 지정된 고우선순위 기능 | 높은 우선순위 | +| 우리의 [커뮤니티 피드백 보드](https://github.com/langgenius/dify/discussions/categories/feedbacks)에서 인기 있는 기능 요청 | 중간 우선순위 | +| 비핵심 기능 및 사소한 개선 | 낮은 우선순위 | +| 가치 있지만 즉시 필요하지 않은 기능 | 미래 기능 | + ## PR 제출하기 ### Pull Request 프로세스 1. 저장소를 포크하세요 -2. PR을 작성하기 전에, 변경하고자 하는 내용에 대해 논의하기 위한 이슈를 생성해 주세요 -3. 변경 사항을 위한 새 브랜치를 만드세요 -4. 변경 사항에 대한 테스트를 적절히 추가해 주세요 -5. 코드가 기존 테스트를 통과하는지 확인하세요 -6. PR 설명에 이슈를 연결해 주세요, `fixes #<이슈_번호>` -7. 병합 완료! +1. PR을 작성하기 전에, 변경하고자 하는 내용에 대해 논의하기 위한 이슈를 생성해 주세요 +1. 변경 사항을 위한 새 브랜치를 만드세요 +1. 변경 사항에 대한 테스트를 적절히 추가해 주세요 +1. 코드가 기존 테스트를 통과하는지 확인하세요 +1. PR 설명에 이슈를 연결해 주세요, `fixes #<이슈_번호>` +1. 병합 완료! + ### 프로젝트 설정하기 #### 프론트엔드 @@ -82,12 +84,14 @@ PR 설명에 기존 이슈를 연결하거나 새 이슈를 여는 것을 잊지 #### 기타 참고 사항 설정을 진행하기 전에 이 문서를 주의 깊게 검토하는 것을 권장합니다. 다음과 같은 필수 정보가 포함되어 있습니다: + - 필수 조건 및 종속성 - 설치 단계 - 구성 세부 정보 - 일반적인 문제 해결 팁 설정 과정에서 문제가 발생하면 언제든지 연락해 주세요. + ## 도움 받기 -기여하는 동안 막히거나 긴급한 질문이 있으면, 관련 GitHub 이슈를 통해 질문을 보내거나, 빠른 대화를 위해 우리의 [Discord](https://discord.gg/8Tpq4AcN9c)에 참여하세요. +기여하는 동안 막히거나 긴급한 질문이 있으면, 관련 GitHub 이슈를 통해 질문을 보내거나, 빠른 대화를 위해 우리의 [Discord](https://discord.gg/8Tpq4AcN9c)에 참여하세요. diff --git a/CONTRIBUTING_PT.md b/CONTRIBUTING_PT.md index 7347fd7f9c..aeabcad51f 100644 --- a/CONTRIBUTING_PT.md +++ b/CONTRIBUTING_PT.md @@ -34,11 +34,11 @@ Não se esqueça de vincular um problema existente ou abrir um novo problema na Como priorizamos: - | Tipo de Problema | Prioridade | - | ------------------------------------------------------------ | --------------- | - | Bugs em funções centrais (serviço em nuvem, não conseguir fazer login, aplicações não funcionando, falhas de segurança) | Crítica | - | Bugs não críticos, melhorias de desempenho | Prioridade Média | - | Correções menores (erros de digitação, interface confusa mas funcional) | Prioridade Baixa | +| Tipo de Problema | Prioridade | +| ------------------------------------------------------------ | --------------- | +| Bugs em funções centrais (serviço em nuvem, não conseguir fazer login, aplicações não funcionando, falhas de segurança) | Crítica | +| Bugs não críticos, melhorias de desempenho | Prioridade Média | +| Correções menores (erros de digitação, interface confusa mas funcional) | Prioridade Baixa | ### Solicitações de recursos @@ -52,23 +52,25 @@ Como priorizamos: Como priorizamos: - | Tipo de Recurso | Prioridade | - | ------------------------------------------------------------ | --------------- | - | Recursos de alta prioridade conforme rotulado por um membro da equipe | Prioridade Alta | - | Solicitações populares de recursos do nosso [quadro de feedback da comunidade](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Prioridade Média | - | Recursos não essenciais e melhorias menores | Prioridade Baixa | - | Valiosos mas não imediatos | Recurso Futuro | +| Tipo de Recurso | Prioridade | +| ------------------------------------------------------------ | --------------- | +| Recursos de alta prioridade conforme rotulado por um membro da equipe | Prioridade Alta | +| Solicitações populares de recursos do nosso [quadro de feedback da comunidade](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Prioridade Média | +| Recursos não essenciais e melhorias menores | Prioridade Baixa | +| Valiosos mas não imediatos | Recurso Futuro | + ## Enviando seu PR ### Processo de Pull Request 1. Faça um fork do repositório -2. Antes de elaborar um PR, por favor crie um problema para discutir as mudanças que você quer fazer -3. Crie um novo branch para suas alterações -4. Por favor, adicione testes para suas alterações conforme apropriado -5. Certifique-se de que seu código passa nos testes existentes -6. Por favor, vincule o problema na descrição do PR, `fixes #` -7. Faça o merge do seu código! +1. Antes de elaborar um PR, por favor crie um problema para discutir as mudanças que você quer fazer +1. Crie um novo branch para suas alterações +1. Por favor, adicione testes para suas alterações conforme apropriado +1. Certifique-se de que seu código passa nos testes existentes +1. Por favor, vincule o problema na descrição do PR, `fixes #` +1. Faça o merge do seu código! + ### Configurando o projeto #### Frontend @@ -82,12 +84,14 @@ Para configurar o serviço backend, por favor consulte nossas [instruções deta #### Outras coisas a observar Recomendamos revisar este documento cuidadosamente antes de prosseguir com a configuração, pois ele contém informações essenciais sobre: + - Pré-requisitos e dependências - Etapas de instalação - Detalhes de configuração - Dicas comuns de solução de problemas Sinta-se à vontade para entrar em contato se encontrar quaisquer problemas durante o processo de configuração. + ## Obtendo Ajuda -Se você ficar preso ou tiver uma dúvida urgente enquanto contribui, simplesmente envie suas perguntas através do problema relacionado no GitHub, ou entre no nosso [Discord](https://discord.gg/8Tpq4AcN9c) para uma conversa rápida. +Se você ficar preso ou tiver uma dúvida urgente enquanto contribui, simplesmente envie suas perguntas através do problema relacionado no GitHub, ou entre no nosso [Discord](https://discord.gg/8Tpq4AcN9c) para uma conversa rápida. diff --git a/CONTRIBUTING_TR.md b/CONTRIBUTING_TR.md index 681f05689b..d016802a53 100644 --- a/CONTRIBUTING_TR.md +++ b/CONTRIBUTING_TR.md @@ -34,11 +34,11 @@ PR açıklamasında mevcut bir sorunu bağlamayı veya yeni bir sorun açmayı u Nasıl önceliklendiriyoruz: - | Sorun Türü | Öncelik | - | ------------------------------------------------------------ | --------------- | - | Temel işlevlerdeki hatalar (bulut hizmeti, giriş yapamama, çalışmayan uygulamalar, güvenlik açıkları) | Kritik | - | Kritik olmayan hatalar, performans artışları | Orta Öncelik | - | Küçük düzeltmeler (yazım hataları, kafa karıştırıcı ama çalışan UI) | Düşük Öncelik | +| Sorun Türü | Öncelik | +| ------------------------------------------------------------ | --------------- | +| Temel işlevlerdeki hatalar (bulut hizmeti, giriş yapamama, çalışmayan uygulamalar, güvenlik açıkları) | Kritik | +| Kritik olmayan hatalar, performans artışları | Orta Öncelik | +| Küçük düzeltmeler (yazım hataları, kafa karıştırıcı ama çalışan UI) | Düşük Öncelik | ### Özellik İstekleri @@ -52,23 +52,25 @@ Nasıl önceliklendiriyoruz: Nasıl önceliklendiriyoruz: - | Özellik Türü | Öncelik | - | ------------------------------------------------------------ | --------------- | - | Bir ekip üyesi tarafından etiketlenen Yüksek Öncelikli Özellikler | Yüksek Öncelik | - | [Topluluk geri bildirim panosundan](https://github.com/langgenius/dify/discussions/categories/feedbacks) popüler özellik istekleri | Orta Öncelik | - | Temel olmayan özellikler ve küçük geliştirmeler | Düşük Öncelik | - | Değerli ama acil olmayan | Gelecek-Özellik | +| Özellik Türü | Öncelik | +| ------------------------------------------------------------ | --------------- | +| Bir ekip üyesi tarafından etiketlenen Yüksek Öncelikli Özellikler | Yüksek Öncelik | +| [Topluluk geri bildirim panosundan](https://github.com/langgenius/dify/discussions/categories/feedbacks) popüler özellik istekleri | Orta Öncelik | +| Temel olmayan özellikler ve küçük geliştirmeler | Düşük Öncelik | +| Değerli ama acil olmayan | Gelecek-Özellik | + ## PR'nizi Göndermek ### Pull Request Süreci 1. Depoyu fork edin -2. Bir PR taslağı oluşturmadan önce, yapmak istediğiniz değişiklikleri tartışmak için lütfen bir sorun oluşturun -3. Değişiklikleriniz için yeni bir dal oluşturun -4. Lütfen değişiklikleriniz için uygun testler ekleyin -5. Kodunuzun mevcut testleri geçtiğinden emin olun -6. Lütfen PR açıklamasında sorunu bağlayın, `fixes #` -7. Kodunuzu birleştirin! +1. Bir PR taslağı oluşturmadan önce, yapmak istediğiniz değişiklikleri tartışmak için lütfen bir sorun oluşturun +1. Değişiklikleriniz için yeni bir dal oluşturun +1. Lütfen değişiklikleriniz için uygun testler ekleyin +1. Kodunuzun mevcut testleri geçtiğinden emin olun +1. Lütfen PR açıklamasında sorunu bağlayın, `fixes #` +1. Kodunuzu birleştirin! + ### Projeyi Kurma #### Frontend @@ -82,12 +84,14 @@ Backend hizmetini kurmak için, lütfen `api/README.md` dosyasındaki detaylı [ #### Dikkat Edilecek Diğer Şeyler Kuruluma geçmeden önce bu belgeyi dikkatlice incelemenizi öneririz, çünkü şunlar hakkında temel bilgiler içerir: + - Ön koşullar ve bağımlılıklar - Kurulum adımları - Yapılandırma detayları - Yaygın sorun giderme ipuçları Kurulum süreci sırasında herhangi bir sorunla karşılaşırsanız bizimle iletişime geçmekten çekinmeyin. + ## Yardım Almak -Katkıda bulunurken takılırsanız veya yanıcı bir sorunuz olursa, sorularınızı ilgili GitHub sorunu aracılığıyla bize gönderin veya hızlı bir sohbet için [Discord'umuza](https://discord.gg/8Tpq4AcN9c) katılın. +Katkıda bulunurken takılırsanız veya yanıcı bir sorunuz olursa, sorularınızı ilgili GitHub sorunu aracılığıyla bize gönderin veya hızlı bir sohbet için [Discord'umuza](https://discord.gg/8Tpq4AcN9c) katılın. diff --git a/CONTRIBUTING_TW.md b/CONTRIBUTING_TW.md index a61ea918c5..5c4d7022fe 100644 --- a/CONTRIBUTING_TW.md +++ b/CONTRIBUTING_TW.md @@ -22,7 +22,7 @@ ### 錯誤回報 -> [!IMPORTANT] +> [!IMPORTANT]\ > 提交錯誤回報時,請務必包含以下資訊: - 清晰明確的標題 @@ -34,15 +34,15 @@ 優先順序評估: - | 議題類型 | 優先級 | - | -------- | ------ | - | 核心功能錯誤(雲端服務、無法登入、應用程式無法運作、安全漏洞) | 緊急 | - | 非緊急錯誤、效能優化 | 中等 | - | 次要修正(拼字錯誤、介面混淆但可運作) | 低 | +| 議題類型 | 優先級 | +| -------- | ------ | +| 核心功能錯誤(雲端服務、無法登入、應用程式無法運作、安全漏洞) | 緊急 | +| 非緊急錯誤、效能優化 | 中等 | +| 次要修正(拼字錯誤、介面混淆但可運作) | 低 | ### 功能請求 -> [!NOTE] +> [!NOTE]\ > 提交功能請求時,請務必包含以下資訊: - 清晰明確的標題 @@ -52,24 +52,24 @@ 優先順序評估: - | 功能類型 | 優先級 | - | -------- | ------ | - | 團隊成員標記為高優先級的功能 | 高 | - | 來自[社群回饋板](https://github.com/langgenius/dify/discussions/categories/feedbacks)的熱門功能請求 | 中 | - | 非核心功能和小幅改進 | 低 | - | 有價值但非急迫的功能 | 未來功能 | +| 功能類型 | 優先級 | +| -------- | ------ | +| 團隊成員標記為高優先級的功能 | 高 | +| 來自[社群回饋板](https://github.com/langgenius/dify/discussions/categories/feedbacks)的熱門功能請求 | 中 | +| 非核心功能和小幅改進 | 低 | +| 有價值但非急迫的功能 | 未來功能 | ## 提交 PR ### PR 流程 1. Fork 專案 -2. 在開始撰寫 PR 前,請先建立議題討論你想做的更改 -3. 為你的更改建立新分支 -4. 請為你的更改新增相應的測試 -5. 確保你的程式碼通過現有測試 -6. 請在 PR 描述中連結相關議題,使用 `fixes #` -7. 等待合併! +1. 在開始撰寫 PR 前,請先建立議題討論你想做的更改 +1. 為你的更改建立新分支 +1. 請為你的更改新增相應的測試 +1. 確保你的程式碼通過現有測試 +1. 請在 PR 描述中連結相關議題,使用 `fixes #` +1. 等待合併! ### 專案設定 @@ -84,6 +84,7 @@ #### 其他注意事項 我們建議在開始設定前仔細閱讀此文件,因為它包含以下重要資訊: + - 前置需求和相依性 - 安裝步驟 - 設定細節 @@ -94,4 +95,3 @@ ## 尋求協助 如果你在貢獻過程中遇到困難或有急切的問題,可以透過相關的 GitHub 議題詢問,或加入我們的 [Discord](https://discord.gg/8Tpq4AcN9c) 進行即時交流。 - diff --git a/CONTRIBUTING_VI.md b/CONTRIBUTING_VI.md index 807054acce..2ad431296a 100644 --- a/CONTRIBUTING_VI.md +++ b/CONTRIBUTING_VI.md @@ -22,7 +22,7 @@ Hãy tham gia, đóng góp và cùng nhau xây dựng điều tuyệt vời! ### Báo cáo lỗi -> [!QUAN TRỌNG] +> [!QUAN TRỌNG]\ > Vui lòng đảm bảo cung cấp các thông tin sau khi gửi báo cáo lỗi: - Tiêu đề rõ ràng và mô tả @@ -34,11 +34,11 @@ Hãy tham gia, đóng góp và cùng nhau xây dựng điều tuyệt vời! Cách chúng tôi ưu tiên: - | Loại vấn đề | Mức độ ưu tiên | - | ----------- | -------------- | - | Lỗi trong các chức năng cốt lõi (dịch vụ đám mây, không thể đăng nhập, ứng dụng không hoạt động, lỗ hổng bảo mật) | Quan trọng | - | Lỗi không nghiêm trọng, cải thiện hiệu suất | Ưu tiên trung bình | - | Sửa lỗi nhỏ (lỗi chính tả, UI gây nhầm lẫn nhưng vẫn hoạt động) | Ưu tiên thấp | +| Loại vấn đề | Mức độ ưu tiên | +| ----------- | -------------- | +| Lỗi trong các chức năng cốt lõi (dịch vụ đám mây, không thể đăng nhập, ứng dụng không hoạt động, lỗ hổng bảo mật) | Quan trọng | +| Lỗi không nghiêm trọng, cải thiện hiệu suất | Ưu tiên trung bình | +| Sửa lỗi nhỏ (lỗi chính tả, UI gây nhầm lẫn nhưng vẫn hoạt động) | Ưu tiên thấp | ### Yêu cầu tính năng @@ -52,24 +52,24 @@ Cách chúng tôi ưu tiên: Cách chúng tôi ưu tiên: - | Loại tính năng | Mức độ ưu tiên | - | -------------- | -------------- | - | Tính năng ưu tiên cao được gắn nhãn bởi thành viên nhóm | Ưu tiên cao | - | Yêu cầu tính năng phổ biến từ [bảng phản hồi cộng đồng](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Ưu tiên trung bình | - | Tính năng không cốt lõi và cải tiến nhỏ | Ưu tiên thấp | - | Có giá trị nhưng không cấp bách | Tính năng tương lai | +| Loại tính năng | Mức độ ưu tiên | +| -------------- | -------------- | +| Tính năng ưu tiên cao được gắn nhãn bởi thành viên nhóm | Ưu tiên cao | +| Yêu cầu tính năng phổ biến từ [bảng phản hồi cộng đồng](https://github.com/langgenius/dify/discussions/categories/feedbacks) | Ưu tiên trung bình | +| Tính năng không cốt lõi và cải tiến nhỏ | Ưu tiên thấp | +| Có giá trị nhưng không cấp bách | Tính năng tương lai | ## Gửi PR của bạn ### Quy trình tạo Pull Request 1. Fork repository -2. Trước khi soạn PR, vui lòng tạo issue để thảo luận về các thay đổi bạn muốn thực hiện -3. Tạo nhánh mới cho các thay đổi của bạn -4. Vui lòng thêm test cho các thay đổi tương ứng -5. Đảm bảo code của bạn vượt qua các test hiện có -6. Vui lòng liên kết issue trong mô tả PR, `fixes #` -7. Được merge! +1. Trước khi soạn PR, vui lòng tạo issue để thảo luận về các thay đổi bạn muốn thực hiện +1. Tạo nhánh mới cho các thay đổi của bạn +1. Vui lòng thêm test cho các thay đổi tương ứng +1. Đảm bảo code của bạn vượt qua các test hiện có +1. Vui lòng liên kết issue trong mô tả PR, `fixes #` +1. Được merge! ### Thiết lập dự án @@ -84,6 +84,7 @@ Cách chúng tôi ưu tiên: #### Các điểm cần lưu ý khác Chúng tôi khuyến nghị xem xét kỹ tài liệu này trước khi tiến hành thiết lập, vì nó chứa thông tin thiết yếu về: + - Điều kiện tiên quyết và dependencies - Các bước cài đặt - Chi tiết cấu hình @@ -94,4 +95,3 @@ Chúng tôi khuyến nghị xem xét kỹ tài liệu này trước khi tiến h ## Nhận trợ giúp Nếu bạn bị mắc kẹt hoặc có câu hỏi cấp bách trong quá trình đóng góp, chỉ cần gửi câu hỏi của bạn thông qua issue GitHub liên quan, hoặc tham gia [Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi để trò chuyện nhanh. - diff --git a/README.md b/README.md index 80e44b0728..90da1d3def 100644 --- a/README.md +++ b/README.md @@ -107,74 +107,6 @@ Monitor and analyze application logs and performance over time. You could contin **7. Backend-as-a-Service**: All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic. -## Feature Comparison - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
FeatureDify.AILangChainFlowiseOpenAI Assistants API
Programming ApproachAPI + App-orientedPython CodeApp-orientedAPI-oriented
Supported LLMsRich VarietyRich VarietyRich VarietyOpenAI-only
RAG Engine
Agent
Workflow
Observability
Enterprise Feature (SSO/Access control)
Local Deployment
- ## Using Dify - **Cloud
** @@ -185,7 +117,8 @@ All of Dify's offerings come with corresponding APIs, so you could effortlessly Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions. - **Dify for enterprise / organizations
** - We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs.
+ We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) to discuss enterprise needs.
+ > For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one click. It's an affordable AMI offering with the option to create apps with custom logo and branding. ## Staying ahead @@ -230,16 +163,15 @@ Deploy Dify to AWS with [CDK](https://aws.amazon.com/cdk/) #### Using Alibaba Cloud Computing Nest -Quickly deploy Dify to Alibaba cloud with [Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) +Quickly deploy Dify to Alibaba cloud with [Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) #### Using Alibaba Cloud Data Management -One-Click deploy Dify to Alibaba Cloud with [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) +One-Click deploy Dify to Alibaba Cloud with [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) #### Deploy to AKS with Azure Devops Pipeline -One-Click deploy Dify to AKS with [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) - +One-Click deploy Dify to AKS with [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) ## Contributing diff --git a/README_AR.md b/README_AR.md index 9c8378d087..2451757ab5 100644 --- a/README_AR.md +++ b/README_AR.md @@ -52,7 +52,7 @@ مشروع Dify هو منصة تطوير تطبيقات الذكاء الصناعي مفتوحة المصدر. تجمع واجهته البديهية بين سير العمل الذكي بالذكاء الاصطناعي وخط أنابيب RAG وقدرات الوكيل وإدارة النماذج وميزات الملاحظة وأكثر من ذلك، مما يتيح لك الانتقال بسرعة من المرحلة التجريبية إلى الإنتاج. إليك قائمة بالميزات الأساسية:

-**1. سير العمل**: قم ببناء واختبار سير عمل الذكاء الاصطناعي القوي على قماش بصري، مستفيدًا من جميع الميزات التالية وأكثر. +**1. سير العمل**: قم ببناء واختبار سير عمل الذكاء الاصطناعي القوي على قماش بصري، مستفيدًا من جميع الميزات التالية وأكثر. **2. الدعم الشامل للنماذج**: تكامل سلس مع مئات من LLMs الخاصة / مفتوحة المصدر من عشرات من موفري التحليل والحلول المستضافة ذاتيًا، مما يغطي GPT و Mistral و Llama3 وأي نماذج متوافقة مع واجهة OpenAI API. يمكن العثور على قائمة كاملة بمزودي النموذج المدعومين [هنا](https://docs.dify.ai/getting-started/readme/model-providers). @@ -68,88 +68,20 @@ **7.الواجهة الخلفية (Backend) كخدمة**: تأتي جميع عروض Dify مع APIs مطابقة، حتى يمكنك دمج Dify بسهولة في منطق أعمالك الخاص. -## مقارنة الميزات - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
الميزةDify.AILangChainFlowiseOpenAI Assistants API
نهج البرمجةموجّه لـ تطبيق + واجهة برمجة تطبيق (API)برمجة Pythonموجه لتطبيقواجهة برمجة تطبيق (API)
LLMs المدعومةتنوع غنيتنوع غنيتنوع غنيفقط OpenAI
محرك RAG
الوكيل
سير العمل
الملاحظة
ميزات الشركات (SSO / مراقبة الوصول)
نشر محلي
- ## استخدام Dify - **سحابة
** -نحن نستضيف [خدمة Dify Cloud](https://dify.ai) لأي شخص لتجربتها بدون أي إعدادات. توفر كل قدرات النسخة التي تمت استضافتها ذاتيًا، وتتضمن 200 أمر GPT-4 مجانًا في خطة الصندوق الرملي. + نحن نستضيف [خدمة Dify Cloud](https://dify.ai) لأي شخص لتجربتها بدون أي إعدادات. توفر كل قدرات النسخة التي تمت استضافتها ذاتيًا، وتتضمن 200 أمر GPT-4 مجانًا في خطة الصندوق الرملي. - **استضافة ذاتية لنسخة المجتمع Dify
** -ابدأ سريعًا في تشغيل Dify في بيئتك باستخدام [دليل البدء السريع](#البدء السريع). -استخدم [توثيقنا](https://docs.dify.ai) للمزيد من المراجع والتعليمات الأعمق. + ابدأ سريعًا في تشغيل Dify في بيئتك باستخدام \[دليل البدء السريع\](#البدء السريع). + استخدم [توثيقنا](https://docs.dify.ai) للمزيد من المراجع والتعليمات الأعمق. - **مشروع Dify للشركات / المؤسسات
** -نحن نوفر ميزات إضافية مركزة على الشركات. [جدول اجتماع معنا](https://cal.com/guchenhe/30min) أو [أرسل لنا بريدًا إلكترونيًا](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) لمناقشة احتياجات الشركات.
+ نحن نوفر ميزات إضافية مركزة على الشركات. [جدول اجتماع معنا](https://cal.com/guchenhe/30min) أو [أرسل لنا بريدًا إلكترونيًا](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) لمناقشة احتياجات الشركات.
> بالنسبة للشركات الناشئة والشركات الصغيرة التي تستخدم خدمات AWS، تحقق من [Dify Premium على AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) ونشرها في شبكتك الخاصة على AWS VPC بنقرة واحدة. إنها عرض AMI بأسعار معقولة مع خيار إنشاء تطبيقات بشعار وعلامة تجارية مخصصة. -> + ## البقاء قدمًا قم بإضافة نجمة إلى Dify على GitHub وتلق تنبيهًا فوريًا بالإصدارات الجديدة. @@ -157,11 +89,11 @@ ![نجمنا](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) ## البداية السريعة -> + > قبل تثبيت Dify، تأكد من أن جهازك يلبي الحد الأدنى من متطلبات النظام التالية: > ->- معالج >= 2 نواة ->- ذاكرة وصول عشوائي (RAM) >= 4 جيجابايت +> - معالج >= 2 نواة +> - ذاكرة وصول عشوائي (RAM) >= 4 جيجابايت
@@ -212,8 +144,9 @@ docker compose up -d - [AWS CDK بواسطة @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws) #### استخدام Alibaba Cloud للنشر - [بسرعة نشر Dify إلى سحابة علي بابا مع عش الحوسبة السحابية علي بابا](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) - + +[بسرعة نشر Dify إلى سحابة علي بابا مع عش الحوسبة السحابية علي بابا](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + #### استخدام Alibaba Cloud Data Management للنشر انشر ​​Dify على علي بابا كلاود بنقرة واحدة باستخدام [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) @@ -222,7 +155,6 @@ docker compose up -d انشر Dify على AKS بنقرة واحدة باستخدام [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) - ## المساهمة لأولئك الذين يرغبون في المساهمة، انظر إلى [دليل المساهمة](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) لدينا. @@ -237,6 +169,7 @@ docker compose up -d ## المجتمع والاتصال + - [مناقشة GitHub](https://github.com/langgenius/dify/discussions). الأفضل لـ: مشاركة التعليقات وطرح الأسئلة. - [المشكلات على GitHub](https://github.com/langgenius/dify/issues). الأفضل لـ: الأخطاء التي تواجهها في استخدام Dify.AI، واقتراحات الميزات. انظر [دليل المساهمة](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). - [Discord](https://discord.gg/FngNHpbcY7). الأفضل لـ: مشاركة تطبيقاتك والترفيه مع المجتمع. diff --git a/README_BN.md b/README_BN.md index a31aafdf56..ef24dea171 100644 --- a/README_BN.md +++ b/README_BN.md @@ -56,133 +56,67 @@ ডিফাই একটি ওপেন-সোর্স LLM অ্যাপ ডেভেলপমেন্ট প্ল্যাটফর্ম। এটি ইন্টুইটিভ ইন্টারফেস, এজেন্টিক AI ওয়ার্কফ্লো, RAG পাইপলাইন, এজেন্ট ক্যাপাবিলিটি, মডেল ম্যানেজমেন্ট, মনিটরিং সুবিধা এবং আরও অনেক কিছু একত্রিত করে, যা দ্রুত প্রোটোটাইপ থেকে প্রোডাকশন পর্যন্ত নিয়ে যেতে সহায়তা করে। ## কুইক স্টার্ট + +> ডিফাই ইনস্টল করার আগে, নিশ্চিত করুন যে আপনার মেশিন নিম্নলিখিত ন্যূনতম কনফিগারেশনের প্রয়োজনীয়তা পূরন করে : > -> ডিফাই ইনস্টল করার আগে, নিশ্চিত করুন যে আপনার মেশিন নিম্নলিখিত ন্যূনতম কনফিগারেশনের প্রয়োজনীয়তা পূরন করে : -> ->- সিপিউ >= 2 কোর ->- র‍্যাম >= 4 জিবি +> - সিপিউ >= 2 কোর +> - র‍্যাম >= 4 জিবি
ডিফাই সার্ভার চালু করার সবচেয়ে সহজ উপায় [docker compose](docker/docker-compose.yaml) মাধ্যমে। নিম্নলিখিত কমান্ডগুলো ব্যবহার করে ডিফাই চালানোর আগে, নিশ্চিত করুন যে আপনার মেশিনে [Docker](https://docs.docker.com/get-docker/) এবং [Docker Compose](https://docs.docker.com/compose/install/) ইনস্টল করা আছে : + ```bash cd dify cd docker cp .env.example .env docker compose up -d ``` + চালানোর পর, আপনি আপনার ব্রাউজারে [http://localhost/install](http://localhost/install)-এ ডিফাই ড্যাশবোর্ডে অ্যাক্সেস করতে পারেন এবং ইনিশিয়ালাইজেশন প্রক্রিয়া শুরু করতে পারেন। #### সাহায্যের খোঁজে -ডিফাই সেট আপ করতে সমস্যা হলে দয়া করে আমাদের [FAQ](https://docs.dify.ai/getting-started/install-self-hosted/faqs) দেখুন। যদি তবুও সমস্যা থেকে থাকে, তাহলে [কমিউনিটি এবং আমাদের](#community--contact) সাথে যোগাযোগ করুন। +ডিফাই সেট আপ করতে সমস্যা হলে দয়া করে আমাদের [FAQ](https://docs.dify.ai/getting-started/install-self-hosted/faqs) দেখুন। যদি তবুও সমস্যা থেকে থাকে, তাহলে [কমিউনিটি এবং আমাদের](#community--contact) সাথে যোগাযোগ করুন। > যদি আপনি ডিফাইতে অবদান রাখতে বা অতিরিক্ত উন্নয়ন করতে চান, আমাদের [সোর্স কোড থেকে ডিপ্লয়মেন্টের গাইড](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code) দেখুন। ## প্রধান ফিচারসমূহ **১. ওয়ার্কফ্লো**: - ভিজ্যুয়াল ক্যানভাসে AI ওয়ার্কফ্লো তৈরি এবং পরীক্ষা করুন, নিম্নলিখিত সব ফিচার এবং তার বাইরেও আরও অনেক কিছু ব্যবহার করে। +ভিজ্যুয়াল ক্যানভাসে AI ওয়ার্কফ্লো তৈরি এবং পরীক্ষা করুন, নিম্নলিখিত সব ফিচার এবং তার বাইরেও আরও অনেক কিছু ব্যবহার করে। -**২. মডেল সাপোর্ট**: - GPT, Mistral, Llama3, এবং যেকোনো OpenAI API-সামঞ্জস্যপূর্ণ মডেলসহ, কয়েক ডজন ইনফারেন্স প্রদানকারী এবং সেল্ফ-হোস্টেড সমাধান থেকে শুরু করে প্রোপ্রাইটরি/ওপেন-সোর্স LLM-এর সাথে সহজে ইন্টিগ্রেশন। সমর্থিত মডেল প্রদানকারীদের একটি সম্পূর্ণ তালিকা পাওয়া যাবে [এখানে](https://docs.dify.ai/getting-started/readme/model-providers)। +**২. মডেল সাপোর্ট**: +GPT, Mistral, Llama3, এবং যেকোনো OpenAI API-সামঞ্জস্যপূর্ণ মডেলসহ, কয়েক ডজন ইনফারেন্স প্রদানকারী এবং সেল্ফ-হোস্টেড সমাধান থেকে শুরু করে প্রোপ্রাইটরি/ওপেন-সোর্স LLM-এর সাথে সহজে ইন্টিগ্রেশন। সমর্থিত মডেল প্রদানকারীদের একটি সম্পূর্ণ তালিকা পাওয়া যাবে [এখানে](https://docs.dify.ai/getting-started/readme/model-providers)। ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) -**3. প্রম্পট IDE**: - প্রম্পট তৈরি, মডেলের পারফরম্যান্স তুলনা এবং চ্যাট-বেজড অ্যাপে টেক্সট-টু-স্পিচের মতো বৈশিষ্ট্য যুক্ত করার জন্য ইন্টুইটিভ ইন্টারফেস। +**3. প্রম্পট IDE**: +প্রম্পট তৈরি, মডেলের পারফরম্যান্স তুলনা এবং চ্যাট-বেজড অ্যাপে টেক্সট-টু-স্পিচের মতো বৈশিষ্ট্য যুক্ত করার জন্য ইন্টুইটিভ ইন্টারফেস। **4. RAG পাইপলাইন**: - ডকুমেন্ট ইনজেশন থেকে শুরু করে রিট্রিভ পর্যন্ত সবকিছুই বিস্তৃত RAG ক্যাপাবিলিটির আওতাভুক্ত। PDF, PPT এবং অন্যান্য সাধারণ ডকুমেন্ট ফর্ম্যাট থেকে টেক্সট এক্সট্রাকশনের জন্য আউট-অফ-বক্স সাপোর্ট। +ডকুমেন্ট ইনজেশন থেকে শুরু করে রিট্রিভ পর্যন্ত সবকিছুই বিস্তৃত RAG ক্যাপাবিলিটির আওতাভুক্ত। PDF, PPT এবং অন্যান্য সাধারণ ডকুমেন্ট ফর্ম্যাট থেকে টেক্সট এক্সট্রাকশনের জন্য আউট-অফ-বক্স সাপোর্ট। -**5. এজেন্ট ক্যাপাবিলিটি**: - LLM ফাংশন কলিং বা ReAct উপর ভিত্তি করে এজেন্ট ডিফাইন করতে পারেন এবং এজেন্টের জন্য পূর্ব-নির্মিত বা কাস্টম টুলস যুক্ত করতে পারেন। Dify AI এজেন্টদের জন্য 50+ বিল্ট-ইন টুলস সরবরাহ করে, যেমন Google Search, DALL·E, Stable Diffusion এবং WolframAlpha। +**5. এজেন্ট ক্যাপাবিলিটি**: +LLM ফাংশন কলিং বা ReAct উপর ভিত্তি করে এজেন্ট ডিফাইন করতে পারেন এবং এজেন্টের জন্য পূর্ব-নির্মিত বা কাস্টম টুলস যুক্ত করতে পারেন। Dify AI এজেন্টদের জন্য 50+ বিল্ট-ইন টুলস সরবরাহ করে, যেমন Google Search, DALL·E, Stable Diffusion এবং WolframAlpha। -**6. এলএলএম-অপ্স**: - সময়ের সাথে সাথে অ্যাপ্লিকেশন লগ এবং পারফরম্যান্স মনিটর এবং বিশ্লেষণ করুন। প্রডাকশন ডেটা এবং annotation এর উপর ভিত্তি করে প্রম্পট, ডেটাসেট এবং মডেলগুলিকে ক্রমাগত উন্নত করতে পারেন। +**6. এলএলএম-অপ্স**: +সময়ের সাথে সাথে অ্যাপ্লিকেশন লগ এবং পারফরম্যান্স মনিটর এবং বিশ্লেষণ করুন। প্রডাকশন ডেটা এবং annotation এর উপর ভিত্তি করে প্রম্পট, ডেটাসেট এবং মডেলগুলিকে ক্রমাগত উন্নত করতে পারেন। **7. ব্যাকএন্ড-অ্যাজ-এ-সার্ভিস**: - ডিফাই-এর সমস্ত অফার সংশ্লিষ্ট API-সহ আছে, যাতে আপনি অনায়াসে ডিফাইকে আপনার নিজস্ব বিজনেস লজিকে ইন্টেগ্রেট করতে পারেন। +ডিফাই-এর সমস্ত অফার সংশ্লিষ্ট API-সহ আছে, যাতে আপনি অনায়াসে ডিফাইকে আপনার নিজস্ব বিজনেস লজিকে ইন্টেগ্রেট করতে পারেন। -## বৈশিষ্ট্য তুলনা - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
বৈশিষ্ট্যDify.AILangChainFlowiseOpenAI Assistants API
প্রোগ্রামিং পদ্ধতিAPI + App-orientedPython CodeApp-orientedAPI-oriented
সাপোর্টেড LLMsRich VarietyRich VarietyRich VarietyOpenAI-only
RAG ইঞ্জিন
এজেন্ট
ওয়ার্কফ্লো
অবজার্ভেবল
এন্টারপ্রাইজ ফিচার (SSO/Access control)
লোকাল ডেপ্লয়মেন্ট
- -## ডিফাই-এর ব্যবহার +## ডিফাই-এর ব্যবহার - **ক্লাউড
** -জিরো সেটাপে ব্যবহার করতে আমাদের [Dify Cloud](https://dify.ai) সার্ভিসটি ব্যবহার করতে পারেন। এখানে সেল্ফহোস্টিং-এর সকল ফিচার ও ক্যাপাবিলিটিসহ স্যান্ডবক্সে ২০০ জিপিটি-৪ কল ফ্রি পাবেন। + জিরো সেটাপে ব্যবহার করতে আমাদের [Dify Cloud](https://dify.ai) সার্ভিসটি ব্যবহার করতে পারেন। এখানে সেল্ফহোস্টিং-এর সকল ফিচার ও ক্যাপাবিলিটিসহ স্যান্ডবক্সে ২০০ জিপিটি-৪ কল ফ্রি পাবেন। - **সেল্ফহোস্টিং ডিফাই কমিউনিটি সংস্করণ
** -সেল্ফহোস্ট করতে এই [স্টার্টার গাইড](#quick-start) ব্যবহার করে দ্রুত আপনার এনভায়রনমেন্টে ডিফাই চালান। -আরো ইন-ডেপথ রেফারেন্সের জন্য [ডকুমেন্টেশন](https://docs.dify.ai) দেখেন। + সেল্ফহোস্ট করতে এই [স্টার্টার গাইড](#quick-start) ব্যবহার করে দ্রুত আপনার এনভায়রনমেন্টে ডিফাই চালান। + আরো ইন-ডেপথ রেফারেন্সের জন্য [ডকুমেন্টেশন](https://docs.dify.ai) দেখেন। - **এন্টারপ্রাইজ / প্রতিষ্ঠানের জন্য Dify
** -আমরা এন্টারপ্রাইজ/প্রতিষ্ঠান-কেন্দ্রিক সেবা প্রদান করে থাকি । [এই চ্যাটবটের মাধ্যমে আপনার প্রশ্নগুলি আমাদের জন্য লগ করুন।](https://udify.app/chat/22L1zSxg6yW1cWQg) অথবা [আমাদের ইমেল পাঠান](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) আপনার চাহিদা সম্পর্কে আলোচনা করার জন্য।
+ আমরা এন্টারপ্রাইজ/প্রতিষ্ঠান-কেন্দ্রিক সেবা প্রদান করে থাকি । [এই চ্যাটবটের মাধ্যমে আপনার প্রশ্নগুলি আমাদের জন্য লগ করুন।](https://udify.app/chat/22L1zSxg6yW1cWQg) অথবা [আমাদের ইমেল পাঠান](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) আপনার চাহিদা সম্পর্কে আলোচনা করার জন্য।
> AWS ব্যবহারকারী স্টার্টআপ এবং ছোট ব্যবসার জন্য, [AWS মার্কেটপ্লেসে Dify Premium](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) দেখুন এবং এক-ক্লিকের মাধ্যমে এটি আপনার নিজস্ব AWS VPC-তে ডিপ্লয় করুন। এটি একটি সাশ্রয়ী মূল্যের AMI অফার, যাতে কাস্টম লোগো এবং ব্র্যান্ডিং সহ অ্যাপ তৈরির সুবিধা আছে। @@ -194,10 +128,10 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন ## Advanced Setup -যদি আপনার কনফিগারেশনটি কাস্টমাইজ করার প্রয়োজন হয়, তাহলে অনুগ্রহ করে আমাদের [.env.example](docker/.env.example) ফাইল দেখুন এবং আপনার `.env` ফাইলে সংশ্লিষ্ট মানগুলি আপডেট করুন। এছাড়াও, আপনার নির্দিষ্ট এনভায়রনমেন্ট এবং প্রয়োজনীয়তার উপর ভিত্তি করে আপনাকে `docker-compose.yaml` ফাইলে সমন্বয় করতে হতে পারে, যেমন ইমেজ ভার্সন পরিবর্তন করা, পোর্ট ম্যাপিং করা, অথবা ভলিউম মাউন্ট করা। +যদি আপনার কনফিগারেশনটি কাস্টমাইজ করার প্রয়োজন হয়, তাহলে অনুগ্রহ করে আমাদের [.env.example](docker/.env.example) ফাইল দেখুন এবং আপনার `.env` ফাইলে সংশ্লিষ্ট মানগুলি আপডেট করুন। এছাড়াও, আপনার নির্দিষ্ট এনভায়রনমেন্ট এবং প্রয়োজনীয়তার উপর ভিত্তি করে আপনাকে `docker-compose.yaml` ফাইলে সমন্বয় করতে হতে পারে, যেমন ইমেজ ভার্সন পরিবর্তন করা, পোর্ট ম্যাপিং করা, অথবা ভলিউম মাউন্ট করা। যেকোনো পরিবর্তন করার পর, অনুগ্রহ করে `docker-compose up -d` পুনরায় চালান। ভেরিয়েবলের সম্পূর্ণ তালিকা [এখানে] (https://docs.dify.ai/getting-started/install-self-hosted/environments) খুঁজে পেতে পারেন। -যদি আপনি একটি হাইলি এভেইলেবল সেটআপ কনফিগার করতে চান, তাহলে কমিউনিটি [Helm Charts](https://helm.sh/) এবং YAML ফাইল রয়েছে যা Dify কে Kubernetes-এ ডিপ্লয় করার প্রক্রিয়া বর্ণনা করে। +যদি আপনি একটি হাইলি এভেইলেবল সেটআপ কনফিগার করতে চান, তাহলে কমিউনিটি [Helm Charts](https://helm.sh/) এবং YAML ফাইল রয়েছে যা Dify কে Kubernetes-এ ডিপ্লয় করার প্রক্রিয়া বর্ণনা করে। - [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) - [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm) @@ -206,7 +140,6 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন - [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s) - [🚀 নতুন! YAML ফাইলসমূহ (Dify v1.6.0 সমর্থিত) তৈরি করেছেন @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes) - #### টেরাফর্ম ব্যবহার করে ডিপ্লয় [terraform](https://www.terraform.io/) ব্যবহার করে এক ক্লিকেই ক্লাউড প্ল্যাটফর্মে Dify ডিপ্লয় করুন। @@ -230,17 +163,16 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন #### Alibaba Cloud ব্যবহার করে ডিপ্লয় - [Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) +[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) #### Alibaba Cloud Data Management ব্যবহার করে ডিপ্লয় - [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) +[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) - #### AKS-এ ডিপ্লয় করার জন্য Azure Devops Pipeline ব্যবহার +#### AKS-এ ডিপ্লয় করার জন্য Azure Devops Pipeline ব্যবহার [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) ব্যবহার করে Dify কে AKS-এ এক ক্লিকে ডিপ্লয় করুন - ## Contributing যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা] দেখুন (https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)। @@ -251,9 +183,9 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন ## কমিউনিটি এবং যোগাযোগ - [GitHub Discussion](https://github.com/langgenius/dify/discussions) ফিডব্যাক এবং প্রতিক্রিয়া জানানোর মাধ্যম। -- [GitHub Issues](https://github.com/langgenius/dify/issues). Dify.AI ব্যবহার করে আপনি যেসব বাগের সম্মুখীন হন এবং ফিচার প্রস্তাবনা। আমাদের [অবদান নির্দেশিকা](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) দেখুন। -- [Discord](https://discord.gg/FngNHpbcY7) আপনার এপ্লিকেশন শেয়ার এবং কমিউনিটি আড্ডার মাধ্যম। -- [X(Twitter)](https://twitter.com/dify_ai) আপনার এপ্লিকেশন শেয়ার এবং কমিউনিটি আড্ডার মাধ্যম। +- [GitHub Issues](https://github.com/langgenius/dify/issues). Dify.AI ব্যবহার করে আপনি যেসব বাগের সম্মুখীন হন এবং ফিচার প্রস্তাবনা। আমাদের [অবদান নির্দেশিকা](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) দেখুন। +- [Discord](https://discord.gg/FngNHpbcY7) আপনার এপ্লিকেশন শেয়ার এবং কমিউনিটি আড্ডার মাধ্যম। +- [X(Twitter)](https://twitter.com/dify_ai) আপনার এপ্লিকেশন শেয়ার এবং কমিউনিটি আড্ডার মাধ্যম। **অবদানকারীদের তালিকা** @@ -265,7 +197,7 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন [![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date) -## নিরাপত্তা বিষয়ক +## নিরাপত্তা বিষয়ক আপনার গোপনীয়তা রক্ষা করতে, অনুগ্রহ করে GitHub-এ নিরাপত্তা সংক্রান্ত সমস্যা পোস্ট করা এড়িয়ে চলুন। পরিবর্তে, আপনার প্রশ্নগুলি ঠিকানায় পাঠান এবং আমরা আপনাকে আরও বিস্তারিত উত্তর প্রদান করব। diff --git a/README_CN.md b/README_CN.md index 0698693429..9aaebf4037 100644 --- a/README_CN.md +++ b/README_CN.md @@ -48,8 +48,7 @@ README in বাংলা - -# +#
langgenius%2Fdify | 趋势转变 @@ -58,109 +57,41 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI 工作流、RAG 管道、Agent、模型管理、可观测性功能等,让您可以快速从原型到生产。以下是其核心功能列表:

-**1. 工作流**: - 在画布上构建和测试功能强大的 AI 工作流程,利用以下所有功能以及更多功能。 +**1. 工作流**: +在画布上构建和测试功能强大的 AI 工作流程,利用以下所有功能以及更多功能。 -**2. 全面的模型支持**: - 与数百种专有/开源 LLMs 以及数十种推理提供商和自托管解决方案无缝集成,涵盖 GPT、Mistral、Llama3 以及任何与 OpenAI API 兼容的模型。完整的支持模型提供商列表可在[此处](https://docs.dify.ai/getting-started/readme/model-providers)找到。 +**2. 全面的模型支持**: +与数百种专有/开源 LLMs 以及数十种推理提供商和自托管解决方案无缝集成,涵盖 GPT、Mistral、Llama3 以及任何与 OpenAI API 兼容的模型。完整的支持模型提供商列表可在[此处](https://docs.dify.ai/getting-started/readme/model-providers)找到。 ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) +**3. Prompt IDE**: +用于制作提示、比较模型性能以及向基于聊天的应用程序添加其他功能(如文本转语音)的直观界面。 -**3. Prompt IDE**: - 用于制作提示、比较模型性能以及向基于聊天的应用程序添加其他功能(如文本转语音)的直观界面。 +**4. RAG Pipeline**: +广泛的 RAG 功能,涵盖从文档摄入到检索的所有内容,支持从 PDF、PPT 和其他常见文档格式中提取文本的开箱即用的支持。 -**4. RAG Pipeline**: - 广泛的 RAG 功能,涵盖从文档摄入到检索的所有内容,支持从 PDF、PPT 和其他常见文档格式中提取文本的开箱即用的支持。 +**5. Agent 智能体**: +您可以基于 LLM 函数调用或 ReAct 定义 Agent,并为 Agent 添加预构建或自定义工具。Dify 为 AI Agent 提供了 50 多种内置工具,如谷歌搜索、DALL·E、Stable Diffusion 和 WolframAlpha 等。 -**5. Agent 智能体**: - 您可以基于 LLM 函数调用或 ReAct 定义 Agent,并为 Agent 添加预构建或自定义工具。Dify 为 AI Agent 提供了 50 多种内置工具,如谷歌搜索、DALL·E、Stable Diffusion 和 WolframAlpha 等。 +**6. LLMOps**: +随时间监视和分析应用程序日志和性能。您可以根据生产数据和标注持续改进提示、数据集和模型。 -**6. LLMOps**: - 随时间监视和分析应用程序日志和性能。您可以根据生产数据和标注持续改进提示、数据集和模型。 - -**7. 后端即服务**: - 所有 Dify 的功能都带有相应的 API,因此您可以轻松地将 Dify 集成到自己的业务逻辑中。 - - -## 功能比较 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
功能Dify.AILangChainFlowiseOpenAI Assistant API
编程方法API + 应用程序导向Python 代码应用程序导向API 导向
支持的 LLMs丰富多样丰富多样丰富多样仅限 OpenAI
RAG 引擎
Agent
工作流
可观测性
企业功能(SSO/访问控制)
本地部署
+**7. 后端即服务**: +所有 Dify 的功能都带有相应的 API,因此您可以轻松地将 Dify 集成到自己的业务逻辑中。 ## 使用 Dify - **云
** -我们提供[ Dify 云服务](https://dify.ai),任何人都可以零设置尝试。它提供了自部署版本的所有功能,并在沙盒计划中包含 200 次免费的 GPT-4 调用。 + 我们提供[ Dify 云服务](https://dify.ai),任何人都可以零设置尝试。它提供了自部署版本的所有功能,并在沙盒计划中包含 200 次免费的 GPT-4 调用。 - **自托管 Dify 社区版
** -使用这个[入门指南](#快速启动)快速在您的环境中运行 Dify。 -使用我们的[文档](https://docs.dify.ai)进行进一步的参考和更深入的说明。 + 使用这个[入门指南](#%E5%BF%AB%E9%80%9F%E5%90%AF%E5%8A%A8)快速在您的环境中运行 Dify。 + 使用我们的[文档](https://docs.dify.ai)进行进一步的参考和更深入的说明。 - **面向企业/组织的 Dify
** -我们提供额外的面向企业的功能。[给我们发送电子邮件](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)讨论企业需求。
+ 我们提供额外的面向企业的功能。[给我们发送电子邮件](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry)讨论企业需求。
+ > 对于使用 AWS 的初创公司和中小型企业,请查看 [AWS Marketplace 上的 Dify 高级版](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6),并使用一键部署到您自己的 AWS VPC。它是一个价格实惠的 AMI 产品,提供了使用自定义徽标和品牌创建应用程序的选项。 ## 保持领先 @@ -199,30 +130,35 @@ docker compose up -d 使用 [Helm Chart](https://helm.sh/) 版本或者 Kubernetes 资源清单(YAML),可以在 Kubernetes 上部署 Dify。 - [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify) + - [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm) + - [Helm Chart by @magicsong](https://github.com/magicsong/ai-charts) + - [YAML 文件 by @Winson-030](https://github.com/Winson-030/dify-kubernetes) + - [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s) - [🚀 NEW! YAML 文件 (支持 Dify v1.6.0) by @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes) - - #### 使用 Terraform 部署 使用 [terraform](https://www.terraform.io/) 一键将 Dify 部署到云平台 ##### Azure Global + - [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform) ##### Google Cloud + - [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) #### 使用 AWS CDK 部署 使用 [CDK](https://aws.amazon.com/cdk/) 将 Dify 部署到 AWS -##### AWS +##### AWS + - [AWS CDK by @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) - [AWS CDK by @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws) @@ -242,10 +178,9 @@ docker compose up -d [![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date) - ## Contributing -对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。 +对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_CN.md)。 同时,请考虑通过社交媒体、活动和会议来支持 Dify 的分享。 > 我们正在寻找贡献者来帮助将 Dify 翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)获取更多信息,并在我们的[Discord 社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。 @@ -262,10 +197,10 @@ docker compose up -d - [GitHub Discussion](https://github.com/langgenius/dify/discussions). 👉:分享您的应用程序并与社区交流。 - [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](CONTRIBUTING.md)。 -- [电子邮件支持](mailto:hello@dify.ai?subject=[GitHub]Questions%20About%20Dify)。👉:关于使用 Dify.AI 的问题。 +- [电子邮件支持](mailto:hello@dify.ai?subject=%5BGitHub%5DQuestions%20About%20Dify)。👉:关于使用 Dify.AI 的问题。 - [Discord](https://discord.gg/FngNHpbcY7)。👉:分享您的应用程序并与社区交流。 - [X(Twitter)](https://twitter.com/dify_ai)。👉:分享您的应用程序并与社区交流。 -- [商业许可](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)。👉:有关商业用途许可 Dify.AI 的商业咨询。 +- [商业许可](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry)。👉:有关商业用途许可 Dify.AI 的商业咨询。 ## 安全问题 diff --git a/README_DE.md b/README_DE.md index 392cc7885e..a08fe63d4f 100644 --- a/README_DE.md +++ b/README_DE.md @@ -56,10 +56,11 @@ Dify ist eine Open-Source-Plattform zur Entwicklung von LLM-Anwendungen. Ihre intuitive Benutzeroberfläche vereint agentenbasierte KI-Workflows, RAG-Pipelines, Agentenfunktionen, Modellverwaltung, Überwachungsfunktionen und mehr, sodass Sie schnell von einem Prototyp in die Produktion übergehen können. ## Schnellstart + > Bevor Sie Dify installieren, stellen Sie sicher, dass Ihr System die folgenden Mindestanforderungen erfüllt: -> ->- CPU >= 2 Core ->- RAM >= 4 GiB +> +> - CPU >= 2 Core +> - RAM >= 4 GiB
@@ -75,115 +76,48 @@ docker compose up -d Nachdem Sie den Server gestartet haben, können Sie über Ihren Browser auf das Dify Dashboard unter [http://localhost/install](http://localhost/install) zugreifen und den Initialisierungsprozess starten. #### Hilfe suchen + Bitte beachten Sie unsere [FAQ](https://docs.dify.ai/getting-started/install-self-hosted/faqs), wenn Sie Probleme bei der Einrichtung von Dify haben. Wenden Sie sich an [die Community und uns](#community--contact), falls weiterhin Schwierigkeiten auftreten. > Wenn Sie zu Dify beitragen oder zusätzliche Entwicklungen durchführen möchten, lesen Sie bitte unseren [Leitfaden zur Bereitstellung aus dem Quellcode](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code). ## Wesentliche Merkmale -**1. Workflow**: - Erstellen und testen Sie leistungsstarke KI-Workflows auf einer visuellen Oberfläche, wobei Sie alle der folgenden Funktionen und darüber hinaus nutzen können. -**2. Umfassende Modellunterstützung**: - Nahtlose Integration mit Hunderten von proprietären und Open-Source-LLMs von Dutzenden Inferenzanbietern und selbstgehosteten Lösungen, die GPT, Mistral, Llama3 und alle mit der OpenAI API kompatiblen Modelle abdecken. Eine vollständige Liste der unterstützten Modellanbieter finden Sie [hier](https://docs.dify.ai/getting-started/readme/model-providers). +**1. Workflow**: +Erstellen und testen Sie leistungsstarke KI-Workflows auf einer visuellen Oberfläche, wobei Sie alle der folgenden Funktionen und darüber hinaus nutzen können. +**2. Umfassende Modellunterstützung**: +Nahtlose Integration mit Hunderten von proprietären und Open-Source-LLMs von Dutzenden Inferenzanbietern und selbstgehosteten Lösungen, die GPT, Mistral, Llama3 und alle mit der OpenAI API kompatiblen Modelle abdecken. Eine vollständige Liste der unterstützten Modellanbieter finden Sie [hier](https://docs.dify.ai/getting-started/readme/model-providers). ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) +**3. Prompt IDE**: +Intuitive Benutzeroberfläche zum Erstellen von Prompts, zum Vergleichen der Modellleistung und zum Hinzufügen zusätzlicher Funktionen wie Text-to-Speech in einer chatbasierten Anwendung. -**3. Prompt IDE**: - Intuitive Benutzeroberfläche zum Erstellen von Prompts, zum Vergleichen der Modellleistung und zum Hinzufügen zusätzlicher Funktionen wie Text-to-Speech in einer chatbasierten Anwendung. +**4. RAG Pipeline**: +Umfassende RAG-Funktionalitäten, die alles von der Dokumenteneinlesung bis zur -abfrage abdecken, mit sofort einsatzbereiter Unterstützung für die Textextraktion aus PDFs, PPTs und anderen gängigen Dokumentformaten. -**4. RAG Pipeline**: - Umfassende RAG-Funktionalitäten, die alles von der Dokumenteneinlesung bis zur -abfrage abdecken, mit sofort einsatzbereiter Unterstützung für die Textextraktion aus PDFs, PPTs und anderen gängigen Dokumentformaten. +**5. Fähigkeiten des Agenten**: +Sie können Agenten basierend auf LLM Function Calling oder ReAct definieren und vorgefertigte oder benutzerdefinierte Tools für den Agenten hinzufügen. Dify stellt über 50 integrierte Tools für KI-Agenten bereit, wie zum Beispiel Google Search, DALL·E, Stable Diffusion und WolframAlpha. -**5. Fähigkeiten des Agenten**: - Sie können Agenten basierend auf LLM Function Calling oder ReAct definieren und vorgefertigte oder benutzerdefinierte Tools für den Agenten hinzufügen. Dify stellt über 50 integrierte Tools für KI-Agenten bereit, wie zum Beispiel Google Search, DALL·E, Stable Diffusion und WolframAlpha. +**6. LLMOps**: +Überwachen und analysieren Sie Anwendungsprotokolle und die Leistung im Laufe der Zeit. Sie können kontinuierlich Prompts, Datensätze und Modelle basierend auf Produktionsdaten und Annotationen verbessern. -**6. LLMOps**: - Überwachen und analysieren Sie Anwendungsprotokolle und die Leistung im Laufe der Zeit. Sie können kontinuierlich Prompts, Datensätze und Modelle basierend auf Produktionsdaten und Annotationen verbessern. - -**7. Backend-as-a-Service**: - Alle Dify-Angebote kommen mit entsprechenden APIs, sodass Sie Dify mühelos in Ihre eigene Geschäftslogik integrieren können. - -## Vergleich der Merkmale - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
FeatureDify.AILangChainFlowiseOpenAI Assistants API
Programming ApproachAPI + App-orientedPython CodeApp-orientedAPI-oriented
Supported LLMsRich VarietyRich VarietyRich VarietyOpenAI-only
RAG Engine
Agent
Workflow
Observability
Enterprise Feature (SSO/Access control)
Local Deployment
+**7. Backend-as-a-Service**: +Alle Dify-Angebote kommen mit entsprechenden APIs, sodass Sie Dify mühelos in Ihre eigene Geschäftslogik integrieren können. ## Dify verwenden - **Cloud
** -Wir hosten einen [Dify Cloud](https://dify.ai)-Service, den jeder ohne Einrichtung ausprobieren kann. Er bietet alle Funktionen der selbstgehosteten Version und beinhaltet 200 kostenlose GPT-4-Aufrufe im Sandbox-Plan. + Wir hosten einen [Dify Cloud](https://dify.ai)-Service, den jeder ohne Einrichtung ausprobieren kann. Er bietet alle Funktionen der selbstgehosteten Version und beinhaltet 200 kostenlose GPT-4-Aufrufe im Sandbox-Plan. - **Selbstgehostete Dify Community Edition
** -Starten Sie Dify schnell in Ihrer Umgebung mit diesem [Schnellstart-Leitfaden](#quick-start). Nutzen Sie unsere [Dokumentation](https://docs.dify.ai) für weiterführende Informationen und detaillierte Anweisungen. + Starten Sie Dify schnell in Ihrer Umgebung mit diesem [Schnellstart-Leitfaden](#quick-start). Nutzen Sie unsere [Dokumentation](https://docs.dify.ai) für weiterführende Informationen und detaillierte Anweisungen. - **Dify für Unternehmen / Organisationen
** -Wir bieten zusätzliche, unternehmensspezifische Funktionen. [Über diesen Chatbot können Sie uns Ihre Fragen mitteilen](https://udify.app/chat/22L1zSxg6yW1cWQg) oder [senden Sie uns eine E-Mail](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry), um Ihre unternehmerischen Bedürfnisse zu besprechen.
- > Für Startups und kleine Unternehmen, die AWS nutzen, schauen Sie sich [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) an und stellen Sie es mit nur einem Klick in Ihrer eigenen AWS VPC bereit. Es handelt sich um ein erschwingliches AMI-Angebot mit der Option, Apps mit individuellem Logo und Branding zu erstellen. + Wir bieten zusätzliche, unternehmensspezifische Funktionen. [Über diesen Chatbot können Sie uns Ihre Fragen mitteilen](https://udify.app/chat/22L1zSxg6yW1cWQg) oder [senden Sie uns eine E-Mail](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry), um Ihre unternehmerischen Bedürfnisse zu besprechen.
+ > Für Startups und kleine Unternehmen, die AWS nutzen, schauen Sie sich [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) an und stellen Sie es mit nur einem Klick in Ihrer eigenen AWS VPC bereit. Es handelt sich um ein erschwingliches AMI-Angebot mit der Option, Apps mit individuellem Logo und Branding zu erstellen. ## Immer einen Schritt voraus @@ -191,7 +125,6 @@ Star Dify auf GitHub und lassen Sie sich sofort über neue Releases benachrichti ![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) - ## Erweiterte Einstellungen Falls Sie die Konfiguration anpassen müssen, lesen Sie bitte die Kommentare in unserer [.env.example](docker/.env.example)-Datei und aktualisieren Sie die entsprechenden Werte in Ihrer `.env`-Datei. Zusätzlich müssen Sie eventuell Anpassungen an der `docker-compose.yaml`-Datei vornehmen, wie zum Beispiel das Ändern von Image-Versionen, Portzuordnungen oder Volumen-Mounts, je nach Ihrer spezifischen Einsatzumgebung und Ihren Anforderungen. Nachdem Sie Änderungen vorgenommen haben, starten Sie `docker-compose up -d` erneut. Eine vollständige Liste der verfügbaren Umgebungsvariablen finden Sie [hier](https://docs.dify.ai/getting-started/install-self-hosted/environments). @@ -210,20 +143,23 @@ Falls Sie eine hochverfügbare Konfiguration einrichten möchten, gibt es von de Stellen Sie Dify mit nur einem Klick mithilfe von [terraform](https://www.terraform.io/) auf einer Cloud-Plattform bereit. ##### Azure Global + - [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform) ##### Google Cloud + - [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) #### Verwendung von AWS CDK für die Bereitstellung Bereitstellung von Dify auf AWS mit [CDK](https://aws.amazon.com/cdk/) -##### AWS +##### AWS + - [AWS CDK by @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) - [AWS CDK by @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws) -#### Alibaba Cloud +#### Alibaba Cloud [Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) @@ -235,20 +171,18 @@ Ein-Klick-Bereitstellung von Dify in der Alibaba Cloud mit [Alibaba Cloud Data M Stellen Sie Dify mit einem Klick in AKS bereit, indem Sie [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) verwenden - ## Contributing -Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren. - +Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_DE.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren. > Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen – außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c). ## Gemeinschaft & Kontakt -* [GitHub Discussion](https://github.com/langgenius/dify/discussions). Am besten geeignet für: den Austausch von Feedback und das Stellen von Fragen. -* [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). -* [Discord](https://discord.gg/FngNHpbcY7). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community. -* [X(Twitter)](https://twitter.com/dify_ai). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community. +- [GitHub Discussion](https://github.com/langgenius/dify/discussions). Am besten geeignet für: den Austausch von Feedback und das Stellen von Fragen. +- [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Discord](https://discord.gg/FngNHpbcY7). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community. +- [X(Twitter)](https://twitter.com/dify_ai). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community. **Mitwirkende** @@ -260,7 +194,6 @@ Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide]( [![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date) - ## Offenlegung der Sicherheit Um Ihre Privatsphäre zu schützen, vermeiden Sie es bitte, Sicherheitsprobleme auf GitHub zu posten. Schicken Sie Ihre Fragen stattdessen an security@dify.ai und wir werden Ihnen eine ausführlichere Antwort geben. @@ -268,4 +201,3 @@ Um Ihre Privatsphäre zu schützen, vermeiden Sie es bitte, Sicherheitsprobleme ## Lizenz Dieses Repository steht unter der [Dify Open Source License](LICENSE), die im Wesentlichen Apache 2.0 mit einigen zusätzlichen Einschränkungen ist. - diff --git a/README_ES.md b/README_ES.md index 859da5bfd7..d8fdbf54e6 100644 --- a/README_ES.md +++ b/README_ES.md @@ -48,7 +48,7 @@ README in বাংলা

-# +#

langgenius%2Fdify | Trendshift @@ -56,111 +56,42 @@ Dify es una plataforma de desarrollo de aplicaciones de LLM de código abierto. Su interfaz intuitiva combina flujo de trabajo de IA, pipeline RAG, capacidades de agente, gestión de modelos, características de observabilidad y más, lo que le permite pasar rápidamente de un prototipo a producción. Aquí hay una lista de las características principales:

-**1. Flujo de trabajo**: - Construye y prueba potentes flujos de trabajo de IA en un lienzo visual, aprovechando todas las siguientes características y más. +**1. Flujo de trabajo**: +Construye y prueba potentes flujos de trabajo de IA en un lienzo visual, aprovechando todas las siguientes características y más. -**2. Soporte de modelos completo**: - Integración perfecta con cientos de LLMs propietarios / de código abierto de docenas de proveedores de inferencia y soluciones auto-alojadas, que cubren GPT, Mistral, Llama3 y cualquier modelo compatible con la API de OpenAI. Se puede encontrar una lista completa de proveedores de modelos admitidos [aquí](https://docs.dify.ai/getting-started/readme/model-providers). +**2. Soporte de modelos completo**: +Integración perfecta con cientos de LLMs propietarios / de código abierto de docenas de proveedores de inferencia y soluciones auto-alojadas, que cubren GPT, Mistral, Llama3 y cualquier modelo compatible con la API de OpenAI. Se puede encontrar una lista completa de proveedores de modelos admitidos [aquí](https://docs.dify.ai/getting-started/readme/model-providers). ![proveedores-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) +**3. IDE de prompt**: +Interfaz intuitiva para crear prompts, comparar el rendimiento del modelo y agregar características adicionales como texto a voz a una aplicación basada en chat. -**3. IDE de prompt**: - Interfaz intuitiva para crear prompts, comparar el rendimiento del modelo y agregar características adicionales como texto a voz a una aplicación basada en chat. +**4. Pipeline RAG**: +Amplias capacidades de RAG que cubren todo, desde la ingestión de documentos hasta la recuperación, con soporte listo para usar para la extracción de texto de PDF, PPT y otros formatos de documento comunes. -**4. Pipeline RAG**: - Amplias capacidades de RAG que cubren todo, desde la ingestión de documentos hasta la recuperación, con soporte listo para usar para la extracción de texto de PDF, PPT y otros formatos de documento comunes. +**5. Capacidades de agente**: +Puedes definir agentes basados en LLM Function Calling o ReAct, y agregar herramientas preconstruidas o personalizadas para el agente. Dify proporciona más de 50 herramientas integradas para agentes de IA, como Búsqueda de Google, DALL·E, Difusión Estable y WolframAlpha. -**5. Capacidades de agente**: - Puedes definir agentes basados en LLM Function Calling o ReAct, y agregar herramientas preconstruidas o personalizadas para el agente. Dify proporciona más de 50 herramientas integradas para agentes de IA, como Búsqueda de Google, DALL·E, Difusión Estable y WolframAlpha. +**6. LLMOps**: +Supervisa y analiza registros de aplicaciones y rendimiento a lo largo del tiempo. Podrías mejorar continuamente prompts, conjuntos de datos y modelos basados en datos de producción y anotaciones. -**6. LLMOps**: - Supervisa y analiza registros de aplicaciones y rendimiento a lo largo del tiempo. Podrías mejorar continuamente prompts, conjuntos de datos y modelos basados en datos de producción y anotaciones. - -**7. Backend como servicio**: - Todas las ofertas de Dify vienen con APIs correspondientes, por lo que podrías integrar Dify sin esfuerzo en tu propia lógica empresarial. - - -## Comparación de características - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
CaracterísticaDify.AILangChainFlowiseAPI de Asistentes de OpenAI
Enfoque de programaciónAPI + orientado a la aplicaciónCódigo PythonOrientado a la aplicaciónOrientado a la API
LLMs admitidosGran variedadGran variedadGran variedadSolo OpenAI
Motor RAG
Agente
Flujo de trabajo
Observabilidad
Característica empresarial (SSO/Control de acceso)
Implementación local
+**7. Backend como servicio**: +Todas las ofertas de Dify vienen con APIs correspondientes, por lo que podrías integrar Dify sin esfuerzo en tu propia lógica empresarial. ## Usando Dify - **Nube
** -Hospedamos un servicio [Dify Cloud](https://dify.ai) para que cualquiera lo pruebe sin configuración. Proporciona todas las capacidades de la versión autoimplementada e incluye 200 llamadas gratuitas a GPT-4 en el plan sandbox. + Hospedamos un servicio [Dify Cloud](https://dify.ai) para que cualquiera lo pruebe sin configuración. Proporciona todas las capacidades de la versión autoimplementada e incluye 200 llamadas gratuitas a GPT-4 en el plan sandbox. - **Auto-alojamiento de Dify Community Edition
** -Pon rápidamente Dify en funcionamiento en tu entorno con esta [guía de inicio rápido](#quick-start). -Usa nuestra [documentación](https://docs.dify.ai) para más referencias e instrucciones más detalladas. + Pon rápidamente Dify en funcionamiento en tu entorno con esta [guía de inicio rápido](#quick-start). + Usa nuestra [documentación](https://docs.dify.ai) para más referencias e instrucciones más detalladas. - **Dify para Empresas / Organizaciones
** -Proporcionamos características adicionales centradas en la empresa. [Envíanos un correo electrónico](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) para discutir las necesidades empresariales.
- > Para startups y pequeñas empresas que utilizan AWS, echa un vistazo a [Dify Premium en AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) e impleméntalo en tu propio VPC de AWS con un clic. Es una AMI asequible que ofrece la opción de crear aplicaciones con logotipo y marca personalizados. + Proporcionamos características adicionales centradas en la empresa. [Envíanos un correo electrónico](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) para discutir las necesidades empresariales.
+ > Para startups y pequeñas empresas que utilizan AWS, echa un vistazo a [Dify Premium en AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) e impleméntalo en tu propio VPC de AWS con un clic. Es una AMI asequible que ofrece la opción de crear aplicaciones con logotipo y marca personalizados. ## Manteniéndote al tanto @@ -168,13 +99,12 @@ Dale estrella a Dify en GitHub y serás notificado instantáneamente de las nuev ![danos estrella](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) - - ## Inicio Rápido + > Antes de instalar Dify, asegúrate de que tu máquina cumpla con los siguientes requisitos mínimos del sistema: -> ->- CPU >= 2 núcleos ->- RAM >= 4GB +> +> - CPU >= 2 núcleos +> - RAM >= 4GB
@@ -210,16 +140,19 @@ Si desea configurar una configuración de alta disponibilidad, la comunidad prop Despliega Dify en una plataforma en la nube con un solo clic utilizando [terraform](https://www.terraform.io/) ##### Azure Global + - [Azure Terraform por @nikawang](https://github.com/nikawang/dify-azure-terraform) ##### Google Cloud + - [Google Cloud Terraform por @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) #### Usando AWS CDK para el Despliegue Despliegue Dify en AWS usando [CDK](https://aws.amazon.com/cdk/) -##### AWS +##### AWS + - [AWS CDK por @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) - [AWS CDK por @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws) @@ -235,13 +168,11 @@ Despliega Dify en Alibaba Cloud con un solo clic con [Alibaba Cloud Data Managem Implementa Dify en AKS con un clic usando [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) - ## Contribuir -Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_ES.md). Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en eventos y conferencias. - > Estamos buscando colaboradores para ayudar con la traducción de Dify a idiomas que no sean el mandarín o el inglés. Si estás interesado en ayudar, consulta el [README de i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para obtener más información y déjanos un comentario en el canal `global-users` de nuestro [Servidor de Comunidad en Discord](https://discord.gg/8Tpq4AcN9c). **Contribuidores** @@ -252,15 +183,22 @@ Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en ## Comunidad y Contacto -* [Discusión en GitHub](https://github.com/langgenius/dify/discussions). Lo mejor para: compartir comentarios y hacer preguntas. -* [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). -* [Discord](https://discord.gg/FngNHpbcY7). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad. -* [X(Twitter)](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad. +- [Discusión en GitHub](https://github.com/langgenius/dify/discussions). Lo mejor para: compartir comentarios y hacer preguntas. +- [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Discord](https://discord.gg/FngNHpbcY7). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad. +- [X(Twitter)](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad. ## Historial de Estrellas [![Gráfico de Historial de Estrellas](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date) +## Divulgación de Seguridad + +Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En su lugar, envía tus preguntas a security@dify.ai y te proporcionaremos una respuesta más detallada. + +## Licencia + +Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. ## Divulgación de Seguridad @@ -269,10 +207,3 @@ Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En ## Licencia Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. -## Divulgación de Seguridad - -Para proteger tu privacidad, evita publicar problemas de seguridad en GitHub. En su lugar, envía tus preguntas a security@dify.ai y te proporcionaremos una respuesta más detallada. - -## Licencia - -Este repositorio está disponible bajo la [Licencia de Código Abierto de Dify](LICENSE), que es esencialmente Apache 2.0 con algunas restricciones adicionales. diff --git a/README_FR.md b/README_FR.md index fcadad419b..7474ea50c2 100644 --- a/README_FR.md +++ b/README_FR.md @@ -48,7 +48,7 @@ README in বাংলা

-# +#

langgenius%2Fdify | Trendshift @@ -56,111 +56,42 @@ Dify est une plateforme de développement d'applications LLM open source. Son interface intuitive combine un flux de travail d'IA, un pipeline RAG, des capacités d'agent, une gestion de modèles, des fonctionnalités d'observabilité, et plus encore, vous permettant de passer rapidement du prototype à la production. Voici une liste des fonctionnalités principales:

-**1. Flux de travail** : - Construisez et testez des flux de travail d'IA puissants sur un canevas visuel, en utilisant toutes les fonctionnalités suivantes et plus encore. +**1. Flux de travail** : +Construisez et testez des flux de travail d'IA puissants sur un canevas visuel, en utilisant toutes les fonctionnalités suivantes et plus encore. -**2. Prise en charge complète des modèles** : - Intégration transparente avec des centaines de LLM propriétaires / open source provenant de dizaines de fournisseurs d'inférence et de solutions auto-hébergées, couvrant GPT, Mistral, Llama3, et tous les modèles compatibles avec l'API OpenAI. Une liste complète des fournisseurs de modèles pris en charge se trouve [ici](https://docs.dify.ai/getting-started/readme/model-providers). +**2. Prise en charge complète des modèles** : +Intégration transparente avec des centaines de LLM propriétaires / open source provenant de dizaines de fournisseurs d'inférence et de solutions auto-hébergées, couvrant GPT, Mistral, Llama3, et tous les modèles compatibles avec l'API OpenAI. Une liste complète des fournisseurs de modèles pris en charge se trouve [ici](https://docs.dify.ai/getting-started/readme/model-providers). ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) +**3. IDE de prompt** : +Interface intuitive pour créer des prompts, comparer les performances des modèles et ajouter des fonctionnalités supplémentaires telles que la synthèse vocale à une application basée sur des chats. -**3. IDE de prompt** : - Interface intuitive pour créer des prompts, comparer les performances des modèles et ajouter des fonctionnalités supplémentaires telles que la synthèse vocale à une application basée sur des chats. +**4. Pipeline RAG** : +Des capacités RAG étendues qui couvrent tout, de l'ingestion de documents à la récupération, avec un support prêt à l'emploi pour l'extraction de texte à partir de PDF, PPT et autres formats de document courants. -**4. Pipeline RAG** : - Des capacités RAG étendues qui couvrent tout, de l'ingestion de documents à la récupération, avec un support prêt à l'emploi pour l'extraction de texte à partir de PDF, PPT et autres formats de document courants. +**5. Capacités d'agent** : +Vous pouvez définir des agents basés sur l'appel de fonction LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DALL·E, Stable Diffusion et WolframAlpha. -**5. Capacités d'agent** : - Vous pouvez définir des agents basés sur l'appel de fonction LLM ou ReAct, et ajouter des outils pré-construits ou personnalisés pour l'agent. Dify fournit plus de 50 outils intégrés pour les agents d'IA, tels que la recherche Google, DALL·E, Stable Diffusion et WolframAlpha. +**6. LLMOps** : +Surveillez et analysez les journaux d'application et les performances au fil du temps. Vous pouvez continuellement améliorer les prompts, les ensembles de données et les modèles en fonction des données de production et des annotations. -**6. LLMOps** : - Surveillez et analysez les journaux d'application et les performances au fil du temps. Vous pouvez continuellement améliorer les prompts, les ensembles de données et les modèles en fonction des données de production et des annotations. - -**7. Backend-as-a-Service** : - Toutes les offres de Dify sont accompagnées d'API correspondantes, vous permettant d'intégrer facilement Dify dans votre propre logique métier. - - -## Comparaison des fonctionnalités - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
FonctionnalitéDify.AILangChainFlowiseOpenAI Assistants API
Approche de programmationAPI + ApplicationCode PythonApplicationAPI
LLMs pris en chargeGrande variétéGrande variétéGrande variétéUniquement OpenAI
Moteur RAG
Agent
Flux de travail
Observabilité
Fonctionnalité d'entreprise (SSO/Contrôle d'accès)
Déploiement local
+**7. Backend-as-a-Service** : +Toutes les offres de Dify sont accompagnées d'API correspondantes, vous permettant d'intégrer facilement Dify dans votre propre logique métier. ## Utiliser Dify - **Cloud
** -Nous hébergeons un service [Dify Cloud](https://dify.ai) pour que tout le monde puisse l'essayer sans aucune configuration. Il fournit toutes les capacités de la version auto-hébergée et comprend 200 appels GPT-4 gratuits dans le plan bac à sable. + Nous hébergeons un service [Dify Cloud](https://dify.ai) pour que tout le monde puisse l'essayer sans aucune configuration. Il fournit toutes les capacités de la version auto-hébergée et comprend 200 appels GPT-4 gratuits dans le plan bac à sable. - **Auto-hébergement Dify Community Edition
** -Lancez rapidement Dify dans votre environnement avec ce [guide de démarrage](#quick-start). -Utilisez notre [documentation](https://docs.dify.ai) pour plus de références et des instructions plus détaillées. + Lancez rapidement Dify dans votre environnement avec ce [guide de démarrage](#quick-start). + Utilisez notre [documentation](https://docs.dify.ai) pour plus de références et des instructions plus détaillées. - **Dify pour les entreprises / organisations
** -Nous proposons des fonctionnalités supplémentaires adaptées aux entreprises. [Envoyez-nous un e-mail](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) pour discuter des besoins de l'entreprise.
- > Pour les startups et les petites entreprises utilisant AWS, consultez [Dify Premium sur AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) et déployez-le dans votre propre VPC AWS en un clic. C'est une offre AMI abordable avec la possibilité de créer des applications avec un logo et une marque personnalisés. + Nous proposons des fonctionnalités supplémentaires adaptées aux entreprises. [Envoyez-nous un e-mail](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) pour discuter des besoins de l'entreprise.
+ > Pour les startups et les petites entreprises utilisant AWS, consultez [Dify Premium sur AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) et déployez-le dans votre propre VPC AWS en un clic. C'est une offre AMI abordable avec la possibilité de créer des applications avec un logo et une marque personnalisés. ## Rester en avance @@ -168,13 +99,12 @@ Mettez une étoile à Dify sur GitHub et soyez instantanément informé des nouv ![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) - - ## Démarrage rapide + > Avant d'installer Dify, assurez-vous que votre machine répond aux exigences système minimales suivantes: -> ->- CPU >= 2 cœurs ->- RAM >= 4 Go +> +> - CPU >= 2 cœurs +> - RAM >= 4 Go
@@ -208,16 +138,19 @@ Si vous souhaitez configurer une configuration haute disponibilité, la communau Déployez Dify sur une plateforme cloud en un clic en utilisant [terraform](https://www.terraform.io/) ##### Azure Global + - [Azure Terraform par @nikawang](https://github.com/nikawang/dify-azure-terraform) ##### Google Cloud + - [Google Cloud Terraform par @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) #### Utilisation d'AWS CDK pour le déploiement Déployez Dify sur AWS en utilisant [CDK](https://aws.amazon.com/cdk/) -##### AWS +##### AWS + - [AWS CDK par @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) - [AWS CDK par @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws) @@ -233,13 +166,11 @@ Déployez Dify en un clic sur Alibaba Cloud avec [Alibaba Cloud Data Management] Déployez Dify sur AKS en un clic en utilisant [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) - ## Contribuer -Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_FR.md). Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur les réseaux sociaux et lors d'événements et de conférences. - > Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c). **Contributeurs** @@ -250,15 +181,22 @@ Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur le ## Communauté & Contact -* [Discussion GitHub](https://github.com/langgenius/dify/discussions). Meilleur pour: partager des commentaires et poser des questions. -* [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). -* [Discord](https://discord.gg/FngNHpbcY7). Meilleur pour: partager vos applications et passer du temps avec la communauté. -* [X(Twitter)](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté. +- [Discussion GitHub](https://github.com/langgenius/dify/discussions). Meilleur pour: partager des commentaires et poser des questions. +- [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Discord](https://discord.gg/FngNHpbcY7). Meilleur pour: partager vos applications et passer du temps avec la communauté. +- [X(Twitter)](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté. ## Historique des étoiles [![Graphique de l'historique des étoiles](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date) +## Divulgation de sécurité + +Pour protéger votre vie privée, veuillez éviter de publier des problèmes de sécurité sur GitHub. Au lieu de cela, envoyez vos questions à security@dify.ai et nous vous fournirons une réponse plus détaillée. + +## Licence + +Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. ## Divulgation de sécurité @@ -267,10 +205,3 @@ Pour protéger votre vie privée, veuillez éviter de publier des problèmes de ## Licence Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. -## Divulgation de sécurité - -Pour protéger votre vie privée, veuillez éviter de publier des problèmes de sécurité sur GitHub. Au lieu de cela, envoyez vos questions à security@dify.ai et nous vous fournirons une réponse plus détaillée. - -## Licence - -Ce référentiel est disponible sous la [Licence open source Dify](LICENSE), qui est essentiellement l'Apache 2.0 avec quelques restrictions supplémentaires. diff --git a/README_JA.md b/README_JA.md index 6ddc30789c..a782849f6e 100644 --- a/README_JA.md +++ b/README_JA.md @@ -48,7 +48,7 @@ README in বাংলা

-# +#

langgenius%2Fdify | Trendshift @@ -58,110 +58,41 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ

**1. ワークフロー**: - 強力なAIワークフローをビジュアルキャンバス上で構築し、テストできます。すべての機能、および以下の機能を使用できます。 +強力なAIワークフローをビジュアルキャンバス上で構築し、テストできます。すべての機能、および以下の機能を使用できます。 **2. 総合的なモデルサポート**: - 数百ものプロプライエタリ/オープンソースのLLMと、数十もの推論プロバイダーおよびセルフホスティングソリューションとのシームレスな統合を提供します。GPT、Mistral、Llama3、OpenAI APIと互換性のあるすべてのモデルを統合されています。サポートされているモデルプロバイダーの完全なリストは[こちら](https://docs.dify.ai/getting-started/readme/model-providers)をご覧ください。 +数百ものプロプライエタリ/オープンソースのLLMと、数十もの推論プロバイダーおよびセルフホスティングソリューションとのシームレスな統合を提供します。GPT、Mistral、Llama3、OpenAI APIと互換性のあるすべてのモデルを統合されています。サポートされているモデルプロバイダーの完全なリストは[こちら](https://docs.dify.ai/getting-started/readme/model-providers)をご覧ください。 ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) - **3. プロンプトIDE**: - プロンプトの作成、モデルパフォーマンスの比較が行え、チャットベースのアプリに音声合成などの機能も追加できます。 +プロンプトの作成、モデルパフォーマンスの比較が行え、チャットベースのアプリに音声合成などの機能も追加できます。 **4. RAGパイプライン**: - ドキュメントの取り込みから検索までをカバーする広範なRAG機能ができます。ほかにもPDF、PPT、その他の一般的なドキュメントフォーマットからのテキスト抽出のサポートも提供します。 +ドキュメントの取り込みから検索までをカバーする広範なRAG機能ができます。ほかにもPDF、PPT、その他の一般的なドキュメントフォーマットからのテキスト抽出のサポートも提供します。 **5. エージェント機能**: - LLM Function CallingやReActに基づくエージェントの定義が可能で、AIエージェント用のプリビルトまたはカスタムツールを追加できます。Difyには、Google検索、DALL·E、Stable Diffusion、WolframAlphaなどのAIエージェント用の50以上の組み込みツールが提供します。 +LLM Function CallingやReActに基づくエージェントの定義が可能で、AIエージェント用のプリビルトまたはカスタムツールを追加できます。Difyには、Google検索、DALL·E、Stable Diffusion、WolframAlphaなどのAIエージェント用の50以上の組み込みツールが提供します。 **6. LLMOps**: - アプリケーションのログやパフォーマンスを監視と分析し、生産のデータと注釈に基づいて、プロンプト、データセット、モデルを継続的に改善できます。 +アプリケーションのログやパフォーマンスを監視と分析し、生産のデータと注釈に基づいて、プロンプト、データセット、モデルを継続的に改善できます。 **7. Backend-as-a-Service**: - すべての機能はAPIを提供されており、Difyを自分のビジネスロジックに簡単に統合できます。 - - -## 機能比較 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
機能Dify.AILangChainFlowiseOpenAI Assistants API
プログラミングアプローチAPI + アプリ指向Pythonコードアプリ指向API指向
サポートされているLLMバラエティ豊かバラエティ豊かバラエティ豊かOpenAIのみ
RAGエンジン
エージェント
ワークフロー
観測性
エンタープライズ機能(SSO/アクセス制御)
ローカル展開
+すべての機能はAPIを提供されており、Difyを自分のビジネスロジックに簡単に統合できます。 ## Difyの使用方法 - **クラウド
** -[こちら](https://dify.ai)のDify Cloudサービスを利用して、セットアップ不要で試すことができます。サンドボックスプランには、200回のGPT-4呼び出しが無料で含まれています。 + [こちら](https://dify.ai)のDify Cloudサービスを利用して、セットアップ不要で試すことができます。サンドボックスプランには、200回のGPT-4呼び出しが無料で含まれています。 - **Dify Community Editionのセルフホスティング
** -この[スタートガイド](#クイックスタート)を使用して、ローカル環境でDifyを簡単に実行できます。 -詳しくは[ドキュメント](https://docs.dify.ai)をご覧ください。 + この[スタートガイド](#%E3%82%AF%E3%82%A4%E3%83%83%E3%82%AF%E3%82%B9%E3%82%BF%E3%83%BC%E3%83%88)を使用して、ローカル環境でDifyを簡単に実行できます。 + 詳しくは[ドキュメント](https://docs.dify.ai)をご覧ください。 - **企業/組織向けのDify
** -企業中心の機能を提供しています。[メールを送信](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)して企業のニーズについて相談してください。
- > AWSを使用しているスタートアップ企業や中小企業の場合は、[AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)のDify Premiumをチェックして、ワンクリックで自分のAWS VPCにデプロイできます。さらに、手頃な価格のAMIオファリングとして、ロゴやブランディングをカスタマイズしてアプリケーションを作成するオプションがあります。 + 企業中心の機能を提供しています。[メールを送信](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry)して企業のニーズについて相談してください。
+ > AWSを使用しているスタートアップ企業や中小企業の場合は、[AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)のDify Premiumをチェックして、ワンクリックで自分のAWS VPCにデプロイできます。さらに、手頃な価格のAMIオファリングとして、ロゴやブランディングをカスタマイズしてアプリケーションを作成するオプションがあります。 ## 最新の情報を入手 @@ -169,13 +100,12 @@ GitHub上でDifyにスターを付けることで、Difyに関する新しいニ ![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) - - ## クイックスタート + > Difyをインストールする前に、お使いのマシンが以下の最小システム要件を満たしていることを確認してください: > ->- CPU >= 2コア ->- RAM >= 4GB +> - CPU >= 2コア +> - RAM >= 4GB
@@ -209,9 +139,11 @@ docker compose up -d [terraform](https://www.terraform.io/) を使用して、ワンクリックでDifyをクラウドプラットフォームにデプロイします ##### Azure Global + - [@nikawangによるAzure Terraform](https://github.com/nikawang/dify-azure-terraform) ##### Google Cloud + - [@sotazumによるGoogle Cloud Terraform](https://github.com/DeNA/dify-google-cloud-terraform) #### AWS CDK を使用したデプロイ @@ -219,26 +151,27 @@ docker compose up -d [CDK](https://aws.amazon.com/cdk/) を使用して、DifyをAWSにデプロイします ##### AWS + - [@KevinZhaoによるAWS CDK (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) - [@tmokmssによるAWS CDK (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws) #### Alibaba Cloud + [Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) #### Alibaba Cloud Data Management + [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) を利用して、DifyをAlibaba Cloudへワンクリックでデプロイできます #### AKSへのデプロイにAzure Devops Pipelineを使用 [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)を使用してDifyをAKSにワンクリックでデプロイ - ## 貢献 -コードに貢献したい方は、[Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)を参照してください。 +コードに貢献したい方は、[Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_JA.md)を参照してください。 同時に、DifyをSNSやイベント、カンファレンスで共有してサポートしていただけると幸いです。 - > Difyを英語または中国語以外の言語に翻訳してくれる貢献者を募集しています。興味がある場合は、詳細については[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)を参照してください。また、[Discordコミュニティサーバー](https://discord.gg/8Tpq4AcN9c)の`global-users`チャンネルにコメントを残してください。 **貢献者** @@ -249,12 +182,10 @@ docker compose up -d ## コミュニティ & お問い合わせ -* [GitHub Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。 -* [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](CONTRIBUTING_JA.md)を参照してください -* [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。 -* [X(Twitter)](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。 - - +- [GitHub Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。 +- [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](CONTRIBUTING_JA.md)を参照してください +- [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。 +- [X(Twitter)](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。 ## ライセンス diff --git a/README_KL.md b/README_KL.md index 7232da8003..93da9a6140 100644 --- a/README_KL.md +++ b/README_KL.md @@ -48,7 +48,7 @@ README in বাংলা

-# +#

langgenius%2Fdify | Trendshift @@ -56,111 +56,42 @@ Dify is an open-source LLM app development platform. Its intuitive interface combines AI workflow, RAG pipeline, agent capabilities, model management, observability features and more, letting you quickly go from prototype to production. Here's a list of the core features:

-**1. Workflow**: - Build and test powerful AI workflows on a visual canvas, leveraging all the following features and beyond. +**1. Workflow**: +Build and test powerful AI workflows on a visual canvas, leveraging all the following features and beyond. -**2. Comprehensive model support**: - Seamless integration with hundreds of proprietary / open-source LLMs from dozens of inference providers and self-hosted solutions, covering GPT, Mistral, Llama3, and any OpenAI API-compatible models. A full list of supported model providers can be found [here](https://docs.dify.ai/getting-started/readme/model-providers). +**2. Comprehensive model support**: +Seamless integration with hundreds of proprietary / open-source LLMs from dozens of inference providers and self-hosted solutions, covering GPT, Mistral, Llama3, and any OpenAI API-compatible models. A full list of supported model providers can be found [here](https://docs.dify.ai/getting-started/readme/model-providers). ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) +**3. Prompt IDE**: +Intuitive interface for crafting prompts, comparing model performance, and adding additional features such as text-to-speech to a chat-based app. -**3. Prompt IDE**: - Intuitive interface for crafting prompts, comparing model performance, and adding additional features such as text-to-speech to a chat-based app. +**4. RAG Pipeline**: +Extensive RAG capabilities that cover everything from document ingestion to retrieval, with out-of-box support for text extraction from PDFs, PPTs, and other common document formats. -**4. RAG Pipeline**: - Extensive RAG capabilities that cover everything from document ingestion to retrieval, with out-of-box support for text extraction from PDFs, PPTs, and other common document formats. +**5. Agent capabilities**: +You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DALL·E, Stable Diffusion and WolframAlpha. -**5. Agent capabilities**: - You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DALL·E, Stable Diffusion and WolframAlpha. +**6. LLMOps**: +Monitor and analyze application logs and performance over time. You could continuously improve prompts, datasets, and models based on production data and annotations. -**6. LLMOps**: - Monitor and analyze application logs and performance over time. You could continuously improve prompts, datasets, and models based on production data and annotations. - -**7. Backend-as-a-Service**: - All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic. - - -## Feature Comparison - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
FeatureDify.AILangChainFlowiseOpenAI Assistants API
Programming ApproachAPI + App-orientedPython CodeApp-orientedAPI-oriented
Supported LLMsRich VarietyRich VarietyRich VarietyOpenAI-only
RAG Engine
Agent
Workflow
Observability
Enterprise Feature (SSO/Access control)
Local Deployment
+**7. Backend-as-a-Service**: +All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic. ## Using Dify - **Cloud
** -We host a [Dify Cloud](https://dify.ai) service for anyone to try with zero setup. It provides all the capabilities of the self-deployed version, and includes 200 free GPT-4 calls in the sandbox plan. + We host a [Dify Cloud](https://dify.ai) service for anyone to try with zero setup. It provides all the capabilities of the self-deployed version, and includes 200 free GPT-4 calls in the sandbox plan. - **Self-hosting Dify Community Edition
** -Quickly get Dify running in your environment with this [starter guide](#quick-start). -Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions. + Quickly get Dify running in your environment with this [starter guide](#quick-start). + Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions. - **Dify for Enterprise / Organizations
** -We provide additional enterprise-centric features. [Send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs.
- > For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one-click. It's an affordable AMI offering with the option to create apps with custom logo and branding. + We provide additional enterprise-centric features. [Send us an email](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) to discuss enterprise needs.
+ > For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one-click. It's an affordable AMI offering with the option to create apps with custom logo and branding. ## Staying ahead @@ -168,13 +99,12 @@ Star Dify on GitHub and be instantly notified of new releases. ![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) - - ## Quick Start + > Before installing Dify, make sure your machine meets the following minimum system requirements: -> ->- CPU >= 2 Core ->- RAM >= 4GB +> +> - CPU >= 2 Core +> - RAM >= 4GB
@@ -208,16 +138,19 @@ If you'd like to configure a highly-available setup, there are community-contrib wa'logh nIqHom neH ghun deployment toy'wI' [terraform](https://www.terraform.io/) lo'laH. ##### Azure Global + - [Azure Terraform mung @nikawang](https://github.com/nikawang/dify-azure-terraform) ##### Google Cloud + - [Google Cloud Terraform qachlot @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) #### AWS CDK atorlugh pilersitsineq wa'logh nIqHom neH ghun deployment toy'wI' [CDK](https://aws.amazon.com/cdk/) lo'laH. -##### AWS +##### AWS + - [AWS CDK qachlot @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) - [AWS CDK qachlot @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws) @@ -233,13 +166,11 @@ wa'logh nIqHom neH ghun deployment toy'wI' [CDK](https://aws.amazon.com/cdk/) lo [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) lo'laH Dify AKS 'e' wa'DIch click 'e' Deploy - ## Contributing -For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). At the same time, please consider supporting Dify by sharing it on social media and at events and conferences. - > We are looking for contributors to help with translating Dify to languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c). **Contributors** @@ -250,18 +181,18 @@ At the same time, please consider supporting Dify by sharing it on social media ## Community & Contact -* [GitHub Discussion](https://github.com/langgenius/dify/discussions +- \[GitHub Discussion\](https://github.com/langgenius/dify/discussions ). Best for: sharing feedback and asking questions. -* [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). -* [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community. -* [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community. + +- [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community. +- [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community. ## Star History [![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date) - ## Security Disclosure To protect your privacy, please avoid posting security issues on GitHub. Instead, send your questions to security@dify.ai and we will provide you with a more detailed answer. diff --git a/README_KR.md b/README_KR.md index 74010d43ed..ec28cc0f61 100644 --- a/README_KR.md +++ b/README_KR.md @@ -48,99 +48,30 @@ README in বাংলা

- - Dify는 오픈 소스 LLM 앱 개발 플랫폼입니다. 직관적인 인터페이스를 통해 AI 워크플로우, RAG 파이프라인, 에이전트 기능, 모델 관리, 관찰 기능 등을 결합하여 프로토타입에서 프로덕션까지 빠르게 전환할 수 있습니다. 주요 기능 목록은 다음과 같습니다:

+Dify는 오픈 소스 LLM 앱 개발 플랫폼입니다. 직관적인 인터페이스를 통해 AI 워크플로우, RAG 파이프라인, 에이전트 기능, 모델 관리, 관찰 기능 등을 결합하여 프로토타입에서 프로덕션까지 빠르게 전환할 수 있습니다. 주요 기능 목록은 다음과 같습니다:

**1. 워크플로우**: - 다음 기능들을 비롯한 다양한 기능을 활용하여 시각적 캔버스에서 강력한 AI 워크플로우를 구축하고 테스트하세요. +다음 기능들을 비롯한 다양한 기능을 활용하여 시각적 캔버스에서 강력한 AI 워크플로우를 구축하고 테스트하세요. -**2. 포괄적인 모델 지원:**: +**2. 포괄적인 모델 지원:**: 수십 개의 추론 제공업체와 자체 호스팅 솔루션에서 제공하는 수백 개의 독점 및 오픈 소스 LLM과 원활하게 통합되며, GPT, Mistral, Llama3 및 모든 OpenAI API 호환 모델을 포함합니다. 지원되는 모델 제공업체의 전체 목록은 [여기](https://docs.dify.ai/getting-started/readme/model-providers)에서 확인할 수 있습니다. ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) - **3. 통합 개발환경**: - 프롬프트를 작성하고, 모델 성능을 비교하며, 텍스트-음성 변환과 같은 추가 기능을 채팅 기반 앱에 추가할 수 있는 직관적인 인터페이스를 제공합니다. +프롬프트를 작성하고, 모델 성능을 비교하며, 텍스트-음성 변환과 같은 추가 기능을 채팅 기반 앱에 추가할 수 있는 직관적인 인터페이스를 제공합니다. -**4. RAG 파이프라인**: - 문서 수집부터 검색까지 모든 것을 다루며, PDF, PPT 및 기타 일반적인 문서 형식에서 텍스트 추출을 위한 기본 지원이 포함되어 있는 광범위한 RAG 기능을 제공합니다. +**4. RAG 파이프라인**: +문서 수집부터 검색까지 모든 것을 다루며, PDF, PPT 및 기타 일반적인 문서 형식에서 텍스트 추출을 위한 기본 지원이 포함되어 있는 광범위한 RAG 기능을 제공합니다. **5. 에이전트 기능**: - LLM 함수 호출 또는 ReAct를 기반으로 에이전트를 정의하고 에이전트에 대해 사전 구축된 도구나 사용자 정의 도구를 추가할 수 있습니다. Dify는 Google Search, DALL·E, Stable Diffusion, WolframAlpha 등 AI 에이전트를 위한 50개 이상의 내장 도구를 제공합니다. +LLM 함수 호출 또는 ReAct를 기반으로 에이전트를 정의하고 에이전트에 대해 사전 구축된 도구나 사용자 정의 도구를 추가할 수 있습니다. Dify는 Google Search, DALL·E, Stable Diffusion, WolframAlpha 등 AI 에이전트를 위한 50개 이상의 내장 도구를 제공합니다. **6. LLMOps**: - 시간 경과에 따른 애플리케이션 로그와 성능을 모니터링하고 분석합니다. 생산 데이터와 주석을 기반으로 프롬프트, 데이터세트, 모델을 지속적으로 개선할 수 있습니다. +시간 경과에 따른 애플리케이션 로그와 성능을 모니터링하고 분석합니다. 생산 데이터와 주석을 기반으로 프롬프트, 데이터세트, 모델을 지속적으로 개선할 수 있습니다. **7. Backend-as-a-Service**: - Dify의 모든 제품에는 해당 API가 함께 제공되므로 Dify를 자신의 비즈니스 로직에 쉽게 통합할 수 있습니다. - -## 기능 비교 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
기능Dify.AILangChainFlowiseOpenAI Assistants API
프로그래밍 접근 방식API + 앱 중심Python 코드앱 중심API 중심
지원되는 LLMs다양한 종류다양한 종류다양한 종류OpenAI 전용
RAG 엔진
에이전트
워크플로우
가시성
기업용 기능 (SSO/접근 제어)
로컬 배포
+Dify의 모든 제품에는 해당 API가 함께 제공되므로 Dify를 자신의 비즈니스 로직에 쉽게 통합할 수 있습니다. ## Dify 사용하기 @@ -148,27 +79,26 @@ 우리는 누구나 설정이 필요 없이 사용해 볼 수 있도록 [Dify 클라우드](https://dify.ai) 서비스를 호스팅합니다. 이는 자체 배포 버전의 모든 기능을 제공하며, 샌드박스 플랜에서 무료로 200회의 GPT-4 호출을 포함합니다. - **셀프-호스팅 Dify 커뮤니티 에디션
** - 환경에서 Dify를 빠르게 실행하려면 이 [스타터 가이드를](#quick-start) 참조하세요. + 환경에서 Dify를 빠르게 실행하려면 이 [스타터 가이드를](#quick-start) 참조하세요. 추가 참조 및 더 심층적인 지침은 [문서](https://docs.dify.ai)를 사용하세요. - **기업 / 조직을 위한 Dify
** - 우리는 추가적인 기업 중심 기능을 제공합니다. 잡거나 [이메일 보내기](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)를 통해 기업 요구 사항을 논의하십시오.
+ 우리는 추가적인 기업 중심 기능을 제공합니다. 잡거나 [이메일 보내기](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry)를 통해 기업 요구 사항을 논의하십시오.
+ > AWS를 사용하는 스타트업 및 중소기업의 경우 [AWS Marketplace에서 Dify Premium](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6)을 확인하고 한 번의 클릭으로 자체 AWS VPC에 배포하십시오. 맞춤형 로고와 브랜딩이 포함된 앱을 생성할 수 있는 옵션이 포함된 저렴한 AMI 제품입니다. - - ## 앞서가기 GitHub에서 Dify에 별표를 찍어 새로운 릴리스를 즉시 알림 받으세요. ![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) - - ## 빠른 시작 ->Dify를 설치하기 전에 컴퓨터가 다음과 같은 최소 시스템 요구 사항을 충족하는지 확인하세요 : ->- CPU >= 2 Core ->- RAM >= 4GB + +> Dify를 설치하기 전에 컴퓨터가 다음과 같은 최소 시스템 요구 사항을 충족하는지 확인하세요 : +> +> - CPU >= 2 Core +> - RAM >= 4GB
@@ -202,16 +132,19 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 [terraform](https://www.terraform.io/)을 사용하여 단 한 번의 클릭으로 Dify를 클라우드 플랫폼에 배포하십시오 ##### Azure Global + - [nikawang의 Azure Terraform](https://github.com/nikawang/dify-azure-terraform) ##### Google Cloud + - [sotazum의 Google Cloud Terraform](https://github.com/DeNA/dify-google-cloud-terraform) #### AWS CDK를 사용한 배포 [CDK](https://aws.amazon.com/cdk/)를 사용하여 AWS에 Dify 배포 -##### AWS +##### AWS + - [KevinZhao의 AWS CDK (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) - [tmokmss의 AWS CDK (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws) @@ -227,14 +160,12 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)을 사용하여 Dify를 AKS에 원클릭으로 배포 - ## 기여 -코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요. +코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_KR.md)를 참조하세요. 동시에 Dify를 소셜 미디어와 행사 및 컨퍼런스에 공유하여 지원하는 것을 고려해 주시기 바랍니다. - -> 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요. +> 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요. **기여자** @@ -244,17 +175,15 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 ## 커뮤니티 & 연락처 -* [GitHub 토론](https://github.com/langgenius/dify/discussions). 피드백 공유 및 질문하기에 적합합니다. -* [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요. -* [디스코드](https://discord.gg/FngNHpbcY7). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다. -* [트위터](https://twitter.com/dify_ai). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다. - +- [GitHub 토론](https://github.com/langgenius/dify/discussions). 피드백 공유 및 질문하기에 적합합니다. +- [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요. +- [디스코드](https://discord.gg/FngNHpbcY7). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다. +- [트위터](https://twitter.com/dify_ai). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다. ## Star 히스토리 [![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date) - ## 보안 공개 개인정보 보호를 위해 보안 문제를 GitHub에 게시하지 마십시오. 대신 security@dify.ai로 질문을 보내주시면 더 자세한 답변을 드리겠습니다. diff --git a/README_PT.md b/README_PT.md index f9e3ef7f4b..da8f354a49 100644 --- a/README_PT.md +++ b/README_PT.md @@ -1,4 +1,5 @@ ![cover-v5-optimized](./images/GitHub_README_if.png) +

📌 Introduzindo o Dify Workflow com Upload de Arquivo: Recrie o Podcast Google NotebookLM

@@ -55,111 +56,42 @@ Dify é uma plataforma de desenvolvimento de aplicativos LLM de código aberto. Sua interface intuitiva combina workflow de IA, pipeline RAG, capacidades de agente, gerenciamento de modelos, recursos de observabilidade e muito mais, permitindo que você vá rapidamente do protótipo à produção. Aqui está uma lista das principais funcionalidades:

-**1. Workflow**: - Construa e teste workflows poderosos de IA em uma interface visual, aproveitando todos os recursos a seguir e muito mais. +**1. Workflow**: +Construa e teste workflows poderosos de IA em uma interface visual, aproveitando todos os recursos a seguir e muito mais. -**2. Suporte abrangente a modelos**: - Integração perfeita com centenas de LLMs proprietários e de código aberto de diversas provedoras e soluções auto-hospedadas, abrangendo GPT, Mistral, Llama3 e qualquer modelo compatível com a API da OpenAI. A lista completa de provedores suportados pode ser encontrada [aqui](https://docs.dify.ai/getting-started/readme/model-providers). +**2. Suporte abrangente a modelos**: +Integração perfeita com centenas de LLMs proprietários e de código aberto de diversas provedoras e soluções auto-hospedadas, abrangendo GPT, Mistral, Llama3 e qualquer modelo compatível com a API da OpenAI. A lista completa de provedores suportados pode ser encontrada [aqui](https://docs.dify.ai/getting-started/readme/model-providers). ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) +**3. IDE de Prompt**: +Interface intuitiva para criação de prompts, comparação de desempenho de modelos e adição de recursos como conversão de texto para fala em um aplicativo baseado em chat. -**3. IDE de Prompt**: - Interface intuitiva para criação de prompts, comparação de desempenho de modelos e adição de recursos como conversão de texto para fala em um aplicativo baseado em chat. +**4. Pipeline RAG**: +Extensas capacidades de RAG que cobrem desde a ingestão de documentos até a recuperação, com suporte nativo para extração de texto de PDFs, PPTs e outros formatos de documentos comuns. -**4. Pipeline RAG**: - Extensas capacidades de RAG que cobrem desde a ingestão de documentos até a recuperação, com suporte nativo para extração de texto de PDFs, PPTs e outros formatos de documentos comuns. +**5. Capacidades de agente**: +Você pode definir agentes com base em LLM Function Calling ou ReAct e adicionar ferramentas pré-construídas ou personalizadas para o agente. O Dify oferece mais de 50 ferramentas integradas para agentes de IA, como Google Search, DALL·E, Stable Diffusion e WolframAlpha. -**5. Capacidades de agente**: - Você pode definir agentes com base em LLM Function Calling ou ReAct e adicionar ferramentas pré-construídas ou personalizadas para o agente. O Dify oferece mais de 50 ferramentas integradas para agentes de IA, como Google Search, DALL·E, Stable Diffusion e WolframAlpha. +**6. LLMOps**: +Monitore e analise os registros e o desempenho do aplicativo ao longo do tempo. É possível melhorar continuamente prompts, conjuntos de dados e modelos com base nos dados de produção e anotações. -**6. LLMOps**: - Monitore e analise os registros e o desempenho do aplicativo ao longo do tempo. É possível melhorar continuamente prompts, conjuntos de dados e modelos com base nos dados de produção e anotações. - -**7. Backend como Serviço**: - Todas os recursos do Dify vêm com APIs correspondentes, permitindo que você integre o Dify sem esforço na lógica de negócios da sua empresa. - - -## Comparação de recursos - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
RecursoDify.AILangChainFlowiseOpenAI Assistants API
Abordagem de ProgramaçãoOrientada a API + AplicativoCódigo PythonOrientada a AplicativoOrientada a API
LLMs SuportadosVariedade RicaVariedade RicaVariedade RicaApenas OpenAI
RAG Engine
Agente
Workflow
Observabilidade
Recursos Empresariais (SSO/Controle de Acesso)
Implantação Local
+**7. Backend como Serviço**: +Todas os recursos do Dify vêm com APIs correspondentes, permitindo que você integre o Dify sem esforço na lógica de negócios da sua empresa. ## Usando o Dify - **Nuvem
** -Oferecemos o serviço [Dify Cloud](https://dify.ai) para qualquer pessoa experimentar sem nenhuma configuração. Ele fornece todas as funcionalidades da versão auto-hospedada, incluindo 200 chamadas GPT-4 gratuitas no plano sandbox. + Oferecemos o serviço [Dify Cloud](https://dify.ai) para qualquer pessoa experimentar sem nenhuma configuração. Ele fornece todas as funcionalidades da versão auto-hospedada, incluindo 200 chamadas GPT-4 gratuitas no plano sandbox. - **Auto-hospedagem do Dify Community Edition
** -Configure rapidamente o Dify no seu ambiente com este [guia inicial](#quick-start). -Use nossa [documentação](https://docs.dify.ai) para referências adicionais e instruções mais detalhadas. + Configure rapidamente o Dify no seu ambiente com este [guia inicial](#quick-start). + Use nossa [documentação](https://docs.dify.ai) para referências adicionais e instruções mais detalhadas. - **Dify para empresas/organizações
** -Oferecemos recursos adicionais voltados para empresas. [Envie suas perguntas através deste chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) ou [envie-nos um e-mail](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) para discutir necessidades empresariais.
- > Para startups e pequenas empresas que utilizam AWS, confira o [Dify Premium no AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) e implemente no seu próprio AWS VPC com um clique. É uma oferta AMI acessível com a opção de criar aplicativos com logotipo e marca personalizados. + Oferecemos recursos adicionais voltados para empresas. [Envie suas perguntas através deste chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) ou [envie-nos um e-mail](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) para discutir necessidades empresariais.
+ > Para startups e pequenas empresas que utilizam AWS, confira o [Dify Premium no AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) e implemente no seu próprio AWS VPC com um clique. É uma oferta AMI acessível com a opção de criar aplicativos com logotipo e marca personalizados. ## Mantendo-se atualizado @@ -167,13 +99,12 @@ Dê uma estrela no Dify no GitHub e seja notificado imediatamente sobre novos la ![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) - - ## Início rápido + > Antes de instalar o Dify, certifique-se de que sua máquina atenda aos seguintes requisitos mínimos de sistema: -> ->- CPU >= 2 Núcleos ->- RAM >= 4 GiB +> +> - CPU >= 2 Núcleos +> - RAM >= 4 GiB
@@ -207,16 +138,19 @@ Se deseja configurar uma instalação de alta disponibilidade, há [Helm Charts] Implante o Dify na Plataforma Cloud com um único clique usando [terraform](https://www.terraform.io/) ##### Azure Global + - [Azure Terraform por @nikawang](https://github.com/nikawang/dify-azure-terraform) ##### Google Cloud + - [Google Cloud Terraform por @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) #### Usando AWS CDK para Implantação Implante o Dify na AWS usando [CDK](https://aws.amazon.com/cdk/) -##### AWS +##### AWS + - [AWS CDK por @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) - [AWS CDK por @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws) @@ -232,10 +166,9 @@ Implante o Dify na Alibaba Cloud com um clique usando o [Alibaba Cloud Data Mana Implante o Dify no AKS com um clique usando [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) - ## Contribuindo -Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_PT.md). Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em eventos e conferências. > Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c). @@ -248,10 +181,10 @@ Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em ## Comunidade e contato -* [Discussões no GitHub](https://github.com/langgenius/dify/discussions). Melhor para: compartilhar feedback e fazer perguntas. -* [Problemas no GitHub](https://github.com/langgenius/dify/issues). Melhor para: relatar bugs encontrados no Dify.AI e propor novos recursos. Veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). -* [Discord](https://discord.gg/FngNHpbcY7). Melhor para: compartilhar suas aplicações e interagir com a comunidade. -* [X(Twitter)](https://twitter.com/dify_ai). Melhor para: compartilhar suas aplicações e interagir com a comunidade. +- [Discussões no GitHub](https://github.com/langgenius/dify/discussions). Melhor para: compartilhar feedback e fazer perguntas. +- [Problemas no GitHub](https://github.com/langgenius/dify/issues). Melhor para: relatar bugs encontrados no Dify.AI e propor novos recursos. Veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Discord](https://discord.gg/FngNHpbcY7). Melhor para: compartilhar suas aplicações e interagir com a comunidade. +- [X(Twitter)](https://twitter.com/dify_ai). Melhor para: compartilhar suas aplicações e interagir com a comunidade. ## Histórico de estrelas diff --git a/README_SI.md b/README_SI.md index ac16df798b..c20dc3484f 100644 --- a/README_SI.md +++ b/README_SI.md @@ -50,14 +50,14 @@ README in বাংলা

- -Dify je odprtokodna platforma za razvoj aplikacij LLM. Njegov intuitivni vmesnik združuje agentski potek dela z umetno inteligenco, cevovod RAG, zmogljivosti agentov, upravljanje modelov, funkcije opazovanja in več, kar vam omogoča hiter prehod od prototipa do proizvodnje. +Dify je odprtokodna platforma za razvoj aplikacij LLM. Njegov intuitivni vmesnik združuje agentski potek dela z umetno inteligenco, cevovod RAG, zmogljivosti agentov, upravljanje modelov, funkcije opazovanja in več, kar vam omogoča hiter prehod od prototipa do proizvodnje. ## Hitri začetek + > Preden namestite Dify, se prepričajte, da vaša naprava izpolnjuje naslednje minimalne sistemske zahteve: -> ->- CPU >= 2 Core ->- RAM >= 4 GiB +> +> - CPU >= 2 Core +> - RAM >= 4 GiB
@@ -73,116 +73,48 @@ docker compose up -d Po zagonu lahko dostopate do nadzorne plošče Dify v brskalniku na [http://localhost/install](http://localhost/install) in začnete postopek inicializacije. #### Iskanje pomoči + Prosimo, glejte naša pogosta vprašanja [FAQ](https://docs.dify.ai/getting-started/install-self-hosted/faqs) če naletite na težave pri nastavitvi Dify. Če imate še vedno težave, se obrnite na [skupnost ali nas](#community--contact). > Če želite prispevati k Difyju ali narediti dodaten razvoj, glejte naš vodnik za [uvajanje iz izvorne kode](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code) ## Ključne značilnosti -**1. Potek dela**: - Zgradite in preizkusite zmogljive poteke dela AI na vizualnem platnu, pri čemer izkoristite vse naslednje funkcije in več. -**2. Celovita podpora za modele**: - Brezhibna integracija s stotinami lastniških/odprtokodnih LLM-jev ducatov ponudnikov sklepanja in samostojnih rešitev, ki pokrivajo GPT, Mistral, Llama3 in vse modele, združljive z API-jem OpenAI. Celoten seznam podprtih ponudnikov modelov najdete [tukaj](https://docs.dify.ai/getting-started/readme/model-providers). +**1. Potek dela**: +Zgradite in preizkusite zmogljive poteke dela AI na vizualnem platnu, pri čemer izkoristite vse naslednje funkcije in več. + +**2. Celovita podpora za modele**: +Brezhibna integracija s stotinami lastniških/odprtokodnih LLM-jev ducatov ponudnikov sklepanja in samostojnih rešitev, ki pokrivajo GPT, Mistral, Llama3 in vse modele, združljive z API-jem OpenAI. Celoten seznam podprtih ponudnikov modelov najdete [tukaj](https://docs.dify.ai/getting-started/readme/model-providers). ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) +**3. Prompt IDE**: +intuitivni vmesnik za ustvarjanje pozivov, primerjavo zmogljivosti modela in dodajanje dodatnih funkcij, kot je pretvorba besedila v govor, aplikaciji, ki temelji na klepetu. -**3. Prompt IDE**: - intuitivni vmesnik za ustvarjanje pozivov, primerjavo zmogljivosti modela in dodajanje dodatnih funkcij, kot je pretvorba besedila v govor, aplikaciji, ki temelji na klepetu. +**4. RAG Pipeline**: +E Obsežne zmogljivosti RAG, ki pokrivajo vse od vnosa dokumenta do priklica, s podporo za ekstrakcijo besedila iz datotek PDF, PPT in drugih običajnih formatov dokumentov. -**4. RAG Pipeline**: - E Obsežne zmogljivosti RAG, ki pokrivajo vse od vnosa dokumenta do priklica, s podporo za ekstrakcijo besedila iz datotek PDF, PPT in drugih običajnih formatov dokumentov. +**5. Agent capabilities**: +definirate lahko agente, ki temeljijo na klicanju funkcij LLM ali ReAct, in dodate vnaprej izdelana orodja ali orodja po meri za agenta. Dify ponuja več kot 50 vgrajenih orodij za agente AI, kot so Google Search, DALL·E, Stable Diffusion in WolframAlpha. -**5. Agent capabilities**: - definirate lahko agente, ki temeljijo na klicanju funkcij LLM ali ReAct, in dodate vnaprej izdelana orodja ali orodja po meri za agenta. Dify ponuja več kot 50 vgrajenih orodij za agente AI, kot so Google Search, DALL·E, Stable Diffusion in WolframAlpha. +**6. LLMOps**: +Spremljajte in analizirajte dnevnike aplikacij in učinkovitost skozi čas. Pozive, nabore podatkov in modele lahko nenehno izboljšujete na podlagi proizvodnih podatkov in opomb. -**6. LLMOps**: - Spremljajte in analizirajte dnevnike aplikacij in učinkovitost skozi čas. Pozive, nabore podatkov in modele lahko nenehno izboljšujete na podlagi proizvodnih podatkov in opomb. - -**7. Backend-as-a-Service**: - AVse ponudbe Difyja so opremljene z ustreznimi API-ji, tako da lahko Dify brez težav integrirate v svojo poslovno logiko. - -## Primerjava Funkcij - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
FunkcijaDify.AILangChainFlowiseOpenAI Assistants API
Programski pristopAPI + usmerjeno v aplikacijePython kodaUsmerjeno v aplikacijeUsmerjeno v API
Podprti LLM-jiBogata izbiraBogata izbiraBogata izbiraSamo OpenAI
RAG pogon
Agent
Potek dela
Spremljanje
Funkcija za podjetja (SSO/nadzor dostopa)
Lokalna namestitev
+**7. Backend-as-a-Service**: +AVse ponudbe Difyja so opremljene z ustreznimi API-ji, tako da lahko Dify brez težav integrirate v svojo poslovno logiko. ## Uporaba Dify - **Cloud
** -Gostimo storitev Dify Cloud za vsakogar, ki jo lahko preizkusite brez nastavitev. Zagotavlja vse zmožnosti različice za samostojno namestitev in vključuje 200 brezplačnih klicev GPT-4 v načrtu peskovnika. + Gostimo storitev Dify Cloud za vsakogar, ki jo lahko preizkusite brez nastavitev. Zagotavlja vse zmožnosti različice za samostojno namestitev in vključuje 200 brezplačnih klicev GPT-4 v načrtu peskovnika. - **Self-hosting Dify Community Edition
** -Hitro zaženite Dify v svojem okolju s tem [začetnim vodnikom](#quick-start) . Za dodatne reference in podrobnejša navodila uporabite našo [dokumentacijo](https://docs.dify.ai) . - + Hitro zaženite Dify v svojem okolju s tem [začetnim vodnikom](#quick-start) . Za dodatne reference in podrobnejša navodila uporabite našo [dokumentacijo](https://docs.dify.ai) . - **Dify za podjetja/organizacije
** -Ponujamo dodatne funkcije, osredotočene na podjetja. Zabeležite svoja vprašanja prek tega klepetalnega robota ali nam pošljite e-pošto, da se pogovorimo o potrebah podjetja.
- > Za novoustanovljena podjetja in mala podjetja, ki uporabljajo AWS, si oglejte Dify Premium na AWS Marketplace in ga z enim klikom uvedite v svoj AWS VPC. To je cenovno ugodna ponudba AMI z možnostjo ustvarjanja aplikacij z logotipom in blagovno znamko po meri. + Ponujamo dodatne funkcije, osredotočene na podjetja. Zabeležite svoja vprašanja prek tega klepetalnega robota ali nam pošljite e-pošto, da se pogovorimo o potrebah podjetja.
+ > Za novoustanovljena podjetja in mala podjetja, ki uporabljajo AWS, si oglejte Dify Premium na AWS Marketplace in ga z enim klikom uvedite v svoj AWS VPC. To je cenovno ugodna ponudba AMI z možnostjo ustvarjanja aplikacij z logotipom in blagovno znamko po meri. ## Staying ahead @@ -190,7 +122,6 @@ Star Dify on GitHub and be instantly notified of new releases. ![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) - ## Napredne nastavitve Če morate prilagoditi konfiguracijo, si oglejte komentarje v naši datoteki .env.example in posodobite ustrezne vrednosti v svoji .env datoteki. Poleg tega boste morda morali prilagoditi docker-compose.yamlsamo datoteko, na primer spremeniti različice slike, preslikave vrat ali namestitve nosilca, glede na vaše specifično okolje in zahteve za uvajanje. Po kakršnih koli spremembah ponovno zaženite docker-compose up -d. Celoten seznam razpoložljivih spremenljivk okolja najdete tukaj . @@ -208,16 +139,19 @@ Star Dify on GitHub and be instantly notified of new releases. namestite Dify v Cloud Platform z enim klikom z uporabo [terraform](https://www.terraform.io/) ##### Azure Global + - [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform) ##### Google Cloud + - [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) #### Uporaba AWS CDK za uvajanje Uvedite Dify v AWS z uporabo [CDK](https://aws.amazon.com/cdk/) -##### AWS +##### AWS + - [AWS CDK by @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) - [AWS CDK by @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws) @@ -233,21 +167,18 @@ Z enim klikom namestite Dify na Alibaba Cloud z [Alibaba Cloud Data Management]( Z enim klikom namestite Dify v AKS z uporabo [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) - ## Prispevam -Za tiste, ki bi radi prispevali kodo, si oglejte naš vodnik za prispevke . Hkrati vas prosimo, da podprete Dify tako, da ga delite na družbenih medijih ter na dogodkih in konferencah. - - +Za tiste, ki bi radi prispevali kodo, si oglejte naš vodnik za prispevke . Hkrati vas prosimo, da podprete Dify tako, da ga delite na družbenih medijih ter na dogodkih in konferencah. > Iščemo sodelavce za pomoč pri prevajanju Difyja v jezike, ki niso mandarinščina ali angleščina. Če želite pomagati, si oglejte i18n README za več informacij in nam pustite komentar v global-userskanalu našega strežnika skupnosti Discord . ## Skupnost in stik -* [GitHub Discussion](https://github.com/langgenius/dify/discussions). Najboljše za: izmenjavo povratnih informacij in postavljanje vprašanj. -* [GitHub Issues](https://github.com/langgenius/dify/issues). Najboljše za: hrošče, na katere naletite pri uporabi Dify.AI, in predloge funkcij. Oglejte si naš [vodnik za prispevke](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). -* [Discord](https://discord.gg/FngNHpbcY7). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo. -* [X(Twitter)](https://twitter.com/dify_ai). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo. +- [GitHub Discussion](https://github.com/langgenius/dify/discussions). Najboljše za: izmenjavo povratnih informacij in postavljanje vprašanj. +- [GitHub Issues](https://github.com/langgenius/dify/issues). Najboljše za: hrošče, na katere naletite pri uporabi Dify.AI, in predloge funkcij. Oglejte si naš [vodnik za prispevke](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Discord](https://discord.gg/FngNHpbcY7). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo. +- [X(Twitter)](https://twitter.com/dify_ai). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo. **Contributors** @@ -259,7 +190,6 @@ Za tiste, ki bi radi prispevali kodo, si oglejte naš vodnik za prispevke . Hkra [![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date) - ## Varnostno razkritje Zaradi zaščite vaše zasebnosti se izogibajte objavljanju varnostnih vprašanj na GitHub. Namesto tega pošljite vprašanja na security@dify.ai in zagotovili vam bomo podrobnejši odgovor. diff --git a/README_TR.md b/README_TR.md index 8065ec908c..21df0d1605 100644 --- a/README_TR.md +++ b/README_TR.md @@ -48,11 +48,10 @@ README in বাংলা

- Dify, açık kaynaklı bir LLM uygulama geliştirme platformudur. Sezgisel arayüzü, AI iş akışı, RAG pipeline'ı, ajan yetenekleri, model yönetimi, gözlemlenebilirlik özellikleri ve daha fazlasını birleştirerek, prototipten üretime hızlıca geçmenizi sağlar. İşte temel özelliklerin bir listesi:

-**1. Workflow**: +**1. Workflow**: Görsel bir arayüz üzerinde güçlü AI iş akışları oluşturun ve test edin, aşağıdaki tüm özellikleri ve daha fazlasını kullanarak. **2. Kapsamlı model desteği**: @@ -60,101 +59,33 @@ Görsel bir arayüz üzerinde güçlü AI iş akışları oluşturun ve test edi ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) +**3. Prompt IDE**: +Komut istemlerini oluşturmak, model performansını karşılaştırmak ve sohbet tabanlı uygulamalara metin-konuşma gibi ek özellikler eklemek için kullanıcı dostu bir arayüz. -**3. Prompt IDE**: - Komut istemlerini oluşturmak, model performansını karşılaştırmak ve sohbet tabanlı uygulamalara metin-konuşma gibi ek özellikler eklemek için kullanıcı dostu bir arayüz. +**4. RAG Pipeline**: +Belge alımından bilgi çekmeye kadar geniş kapsamlı RAG yetenekleri. PDF'ler, PPT'ler ve diğer yaygın belge formatlarından metin çıkarma için hazır destek sunar. -**4. RAG Pipeline**: - Belge alımından bilgi çekmeye kadar geniş kapsamlı RAG yetenekleri. PDF'ler, PPT'ler ve diğer yaygın belge formatlarından metin çıkarma için hazır destek sunar. +**5. Ajan yetenekleri**: +LLM Fonksiyon Çağırma veya ReAct'a dayalı ajanlar tanımlayabilir ve bu ajanlara önceden hazırlanmış veya özel araçlar ekleyebilirsiniz. Dify, AI ajanları için Google Arama, DALL·E, Stable Diffusion ve WolframAlpha gibi 50'den fazla yerleşik araç sağlar. -**5. Ajan yetenekleri**: - LLM Fonksiyon Çağırma veya ReAct'a dayalı ajanlar tanımlayabilir ve bu ajanlara önceden hazırlanmış veya özel araçlar ekleyebilirsiniz. Dify, AI ajanları için Google Arama, DALL·E, Stable Diffusion ve WolframAlpha gibi 50'den fazla yerleşik araç sağlar. +**6. LLMOps**: +Uygulama loglarını ve performans metriklerini zaman içinde izleme ve analiz etme imkanı. Üretim ortamından elde edilen verilere ve kullanıcı geri bildirimlerine dayanarak, prompt'ları, veri setlerini ve modelleri sürekli olarak optimize edebilirsiniz. Bu sayede, AI uygulamanızın performansını ve doğruluğunu sürekli olarak artırabilirsiniz. -**6. LLMOps**: - Uygulama loglarını ve performans metriklerini zaman içinde izleme ve analiz etme imkanı. Üretim ortamından elde edilen verilere ve kullanıcı geri bildirimlerine dayanarak, prompt'ları, veri setlerini ve modelleri sürekli olarak optimize edebilirsiniz. Bu sayede, AI uygulamanızın performansını ve doğruluğunu sürekli olarak artırabilirsiniz. - -**7. Hizmet Olarak Backend**: - Dify'ın tüm özellikleri ilgili API'lerle birlikte gelir, böylece Dify'ı kendi iş mantığınıza kolayca entegre edebilirsiniz. - - -## Özellik karşılaştırması - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ÖzellikDify.AILangChainFlowiseOpenAI Assistants API
Programlama YaklaşımıAPI + Uygulama odaklıPython KoduUygulama odaklıAPI odaklı
Desteklenen LLM'lerZengin ÇeşitlilikZengin ÇeşitlilikZengin ÇeşitlilikYalnızca OpenAI
RAG Motoru
Ajan
İş Akışı
Gözlemlenebilirlik
Kurumsal Özellikler (SSO/Erişim kontrolü)
Yerel Dağıtım
+**7. Hizmet Olarak Backend**: +Dify'ın tüm özellikleri ilgili API'lerle birlikte gelir, böylece Dify'ı kendi iş mantığınıza kolayca entegre edebilirsiniz. ## Dify'ı Kullanma - **Cloud
** -Herkesin sıfır kurulumla denemesi için bir [Dify Cloud](https://dify.ai) hizmeti sunuyoruz. Bu hizmet, kendi kendine dağıtılan versiyonun tüm yeteneklerini sağlar ve sandbox planında 200 ücretsiz GPT-4 çağrısı içerir. + Herkesin sıfır kurulumla denemesi için bir [Dify Cloud](https://dify.ai) hizmeti sunuyoruz. Bu hizmet, kendi kendine dağıtılan versiyonun tüm yeteneklerini sağlar ve sandbox planında 200 ücretsiz GPT-4 çağrısı içerir. - **Dify Topluluk Sürümünü Kendi Sunucunuzda Barındırma
** -Bu [başlangıç kılavuzu](#quick-start) ile Dify'ı kendi ortamınızda hızlıca çalıştırın. -Daha fazla referans ve detaylı talimatlar için [dokümantasyonumuzu](https://docs.dify.ai) kullanın. + Bu [başlangıç kılavuzu](#quick-start) ile Dify'ı kendi ortamınızda hızlıca çalıştırın. + Daha fazla referans ve detaylı talimatlar için [dokümantasyonumuzu](https://docs.dify.ai) kullanın. - **Kurumlar / organizasyonlar için Dify
** -Ek kurumsal odaklı özellikler sunuyoruz. Kurumsal ihtiyaçları görüşmek için [bize bir e-posta gönderin](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry).
+ Ek kurumsal odaklı özellikler sunuyoruz. Kurumsal ihtiyaçları görüşmek için [bize bir e-posta gönderin](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry).
+ > AWS kullanan startuplar ve küçük işletmeler için, [AWS Marketplace'deki Dify Premium'a](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) göz atın ve tek tıklamayla kendi AWS VPC'nize dağıtın. Bu, özel logo ve marka ile uygulamalar oluşturma seçeneğine sahip uygun fiyatlı bir AMI teklifdir. ## Güncel Kalma @@ -163,13 +94,12 @@ GitHub'da Dify'a yıldız verin ve yeni sürümlerden anında haberdar olun. ![bizi-yıldızlayın](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) - - ## Hızlı başlangıç + > Dify'ı kurmadan önce, makinenizin aşağıdaki minimum sistem gereksinimlerini karşıladığından emin olun: -> ->- CPU >= 2 Çekirdek ->- RAM >= 4GB +> +> - CPU >= 2 Çekirdek +> - RAM >= 4GB
Dify sunucusunu başlatmanın en kolay yolu, [docker-compose.yml](docker/docker-compose.yaml) dosyamızı çalıştırmaktır. Kurulum komutunu çalıştırmadan önce, makinenizde [Docker](https://docs.docker.com/get-docker/) ve [Docker Compose](https://docs.docker.com/compose/install/)'un kurulu olduğundan emin olun: @@ -201,16 +131,19 @@ Yüksek kullanılabilirliğe sahip bir kurulum yapılandırmak isterseniz, Dify' Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.terraform.io/) kullanarak ##### Azure Global + - [Azure Terraform tarafından @nikawang](https://github.com/nikawang/dify-azure-terraform) ##### Google Cloud + - [Google Cloud Terraform tarafından @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) #### AWS CDK ile Dağıtım [CDK](https://aws.amazon.com/cdk/) kullanarak Dify'ı AWS'ye dağıtın -##### AWS +##### AWS + - [AWS CDK tarafından @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) - [AWS CDK tarafından @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws) @@ -226,10 +159,9 @@ Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.ter [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) kullanarak Dify'ı tek tıkla AKS'ye dağıtın - ## Katkıda Bulunma -Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakabilirsiniz. +Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_TR.md) bakabilirsiniz. Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda paylaşarak desteklemeyi düşünün. > Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın. @@ -242,10 +174,10 @@ Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda p ## Topluluk & iletişim -* [GitHub Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için. -* [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakın. -* [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. -* [X(Twitter)](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. +- [GitHub Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için. +- [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakın. +- [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. +- [X(Twitter)](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. ## Star history diff --git a/README_TW.md b/README_TW.md index c36027183c..18d0724784 100644 --- a/README_TW.md +++ b/README_TW.md @@ -106,85 +106,18 @@ docker compose up -d **7. 後端即服務**: Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify 整合到您自己的業務邏輯中。 -## 功能比較 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
功能Dify.AILangChainFlowiseOpenAI Assistants API
程式設計方法API + 應用導向Python 代碼應用導向API 導向
支援的 LLM 模型豐富多樣豐富多樣豐富多樣僅限 OpenAI
RAG 引擎
代理功能
工作流程
可觀察性
企業級功能 (SSO/存取控制)
本地部署
- ## 使用 Dify - **雲端服務
** 我們提供 [Dify Cloud](https://dify.ai) 服務,任何人都可以零配置嘗試。它提供與自部署版本相同的所有功能,並在沙盒計劃中包含 200 次免費 GPT-4 調用。 - **自託管 Dify 社區版
** - 使用這份[快速指南](#快速開始)在您的環境中快速運行 Dify。 + 使用這份[快速指南](#%E5%BF%AB%E9%80%9F%E9%96%8B%E5%A7%8B)在您的環境中快速運行 Dify。 使用我們的[文檔](https://docs.dify.ai)獲取更多參考和深入指導。 - **企業/組織版 Dify
** - 我們提供額外的企業中心功能。[通過這個聊天機器人記錄您的問題](https://udify.app/chat/22L1zSxg6yW1cWQg)或[發送電子郵件給我們](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)討論企業需求。
+ 我們提供額外的企業中心功能。[通過這個聊天機器人記錄您的問題](https://udify.app/chat/22L1zSxg6yW1cWQg)或[發送電子郵件給我們](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry)討論企業需求。
+ > 對於使用 AWS 的初創企業和小型企業,請查看 [AWS Marketplace 上的 Dify Premium](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6),並一鍵部署到您自己的 AWS VPC。這是一個經濟實惠的 AMI 產品,可選擇使用自定義徽標和品牌創建應用。 ## 保持領先 @@ -238,10 +171,9 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify 使用[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) 將 Dify 一鍵部署到 AKS - ## 貢獻 -對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。 +對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_TW.md)。 同時,也請考慮透過在社群媒體和各種活動與會議上分享 Dify 來支持我們。 > 我們正在尋找貢獻者協助將 Dify 翻譯成中文和英文以外的語言。如果您有興趣幫忙,請查看 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) 獲取更多資訊,並在我們的 [Discord 社群伺服器](https://discord.gg/8Tpq4AcN9c) 的 `global-users` 頻道留言給我們。 diff --git a/README_VI.md b/README_VI.md index 958a70114a..6d5305fb75 100644 --- a/README_VI.md +++ b/README_VI.md @@ -48,115 +48,45 @@ README in বাংলা

- Dify là một nền tảng phát triển ứng dụng LLM mã nguồn mở. Giao diện trực quan kết hợp quy trình làm việc AI, mô hình RAG, khả năng tác nhân, quản lý mô hình, tính năng quan sát và hơn thế nữa, cho phép bạn nhanh chóng chuyển từ nguyên mẫu sang sản phẩm. Đây là danh sách các tính năng cốt lõi:

-**1. Quy trình làm việc**: - Xây dựng và kiểm tra các quy trình làm việc AI mạnh mẽ trên một canvas trực quan, tận dụng tất cả các tính năng sau đây và hơn thế nữa. +**1. Quy trình làm việc**: +Xây dựng và kiểm tra các quy trình làm việc AI mạnh mẽ trên một canvas trực quan, tận dụng tất cả các tính năng sau đây và hơn thế nữa. -**2. Hỗ trợ mô hình toàn diện**: - Tích hợp liền mạch với hàng trăm mô hình LLM độc quyền / mã nguồn mở từ hàng chục nhà cung cấp suy luận và giải pháp tự lưu trữ, bao gồm GPT, Mistral, Llama3, và bất kỳ mô hình tương thích API OpenAI nào. Danh sách đầy đủ các nhà cung cấp mô hình được hỗ trợ có thể được tìm thấy [tại đây](https://docs.dify.ai/getting-started/readme/model-providers). +**2. Hỗ trợ mô hình toàn diện**: +Tích hợp liền mạch với hàng trăm mô hình LLM độc quyền / mã nguồn mở từ hàng chục nhà cung cấp suy luận và giải pháp tự lưu trữ, bao gồm GPT, Mistral, Llama3, và bất kỳ mô hình tương thích API OpenAI nào. Danh sách đầy đủ các nhà cung cấp mô hình được hỗ trợ có thể được tìm thấy [tại đây](https://docs.dify.ai/getting-started/readme/model-providers). ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) +**3. IDE Prompt**: +Giao diện trực quan để tạo prompt, so sánh hiệu suất mô hình và thêm các tính năng bổ sung như chuyển văn bản thành giọng nói cho một ứng dụng dựa trên trò chuyện. -**3. IDE Prompt**: - Giao diện trực quan để tạo prompt, so sánh hiệu suất mô hình và thêm các tính năng bổ sung như chuyển văn bản thành giọng nói cho một ứng dụng dựa trên trò chuyện. +**4. Mô hình RAG**: +Khả năng RAG mở rộng bao gồm mọi thứ từ nhập tài liệu đến truy xuất, với hỗ trợ sẵn có cho việc trích xuất văn bản từ PDF, PPT và các định dạng tài liệu phổ biến khác. -**4. Mô hình RAG**: - Khả năng RAG mở rộng bao gồm mọi thứ từ nhập tài liệu đến truy xuất, với hỗ trợ sẵn có cho việc trích xuất văn bản từ PDF, PPT và các định dạng tài liệu phổ biến khác. +**5. Khả năng tác nhân**: +Bạn có thể định nghĩa các tác nhân dựa trên LLM Function Calling hoặc ReAct, và thêm các công cụ được xây dựng sẵn hoặc tùy chỉnh cho tác nhân. Dify cung cấp hơn 50 công cụ tích hợp sẵn cho các tác nhân AI, như Google Search, DALL·E, Stable Diffusion và WolframAlpha. -**5. Khả năng tác nhân**: - Bạn có thể định nghĩa các tác nhân dựa trên LLM Function Calling hoặc ReAct, và thêm các công cụ được xây dựng sẵn hoặc tùy chỉnh cho tác nhân. Dify cung cấp hơn 50 công cụ tích hợp sẵn cho các tác nhân AI, như Google Search, DALL·E, Stable Diffusion và WolframAlpha. +**6. LLMOps**: +Giám sát và phân tích nhật ký và hiệu suất ứng dụng theo thời gian. Bạn có thể liên tục cải thiện prompt, bộ dữ liệu và mô hình dựa trên dữ liệu sản xuất và chú thích. -**6. LLMOps**: - Giám sát và phân tích nhật ký và hiệu suất ứng dụng theo thời gian. Bạn có thể liên tục cải thiện prompt, bộ dữ liệu và mô hình dựa trên dữ liệu sản xuất và chú thích. - -**7. Backend-as-a-Service**: - Tất cả các dịch vụ của Dify đều đi kèm với các API tương ứng, vì vậy bạn có thể dễ dàng tích hợp Dify vào logic kinh doanh của riêng mình. - - -## So sánh tính năng - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Tính năngDify.AILangChainFlowiseOpenAI Assistants API
Phương pháp lập trìnhHướng API + Ứng dụngMã PythonHướng ứng dụngHướng API
LLMs được hỗ trợĐa dạng phong phúĐa dạng phong phúĐa dạng phong phúChỉ OpenAI
RAG Engine
Agent
Quy trình làm việc
Khả năng quan sát
Tính năng doanh nghiệp (SSO/Kiểm soát truy cập)
Triển khai cục bộ
+**7. Backend-as-a-Service**: +Tất cả các dịch vụ của Dify đều đi kèm với các API tương ứng, vì vậy bạn có thể dễ dàng tích hợp Dify vào logic kinh doanh của riêng mình. ## Sử dụng Dify - **Cloud
** -Chúng tôi lưu trữ dịch vụ [Dify Cloud](https://dify.ai) cho bất kỳ ai muốn thử mà không cần cài đặt. Nó cung cấp tất cả các khả năng của phiên bản tự triển khai và bao gồm 200 lượt gọi GPT-4 miễn phí trong gói sandbox. + Chúng tôi lưu trữ dịch vụ [Dify Cloud](https://dify.ai) cho bất kỳ ai muốn thử mà không cần cài đặt. Nó cung cấp tất cả các khả năng của phiên bản tự triển khai và bao gồm 200 lượt gọi GPT-4 miễn phí trong gói sandbox. - **Tự triển khai Dify Community Edition
** -Nhanh chóng chạy Dify trong môi trường của bạn với [hướng dẫn bắt đầu](#quick-start) này. -Sử dụng [tài liệu](https://docs.dify.ai) của chúng tôi để tham khảo thêm và nhận hướng dẫn chi tiết hơn. + Nhanh chóng chạy Dify trong môi trường của bạn với [hướng dẫn bắt đầu](#quick-start) này. + Sử dụng [tài liệu](https://docs.dify.ai) của chúng tôi để tham khảo thêm và nhận hướng dẫn chi tiết hơn. - **Dify cho doanh nghiệp / tổ chức
** -Chúng tôi cung cấp các tính năng bổ sung tập trung vào doanh nghiệp. [Ghi lại câu hỏi của bạn cho chúng tôi thông qua chatbot này](https://udify.app/chat/22L1zSxg6yW1cWQg) hoặc [gửi email cho chúng tôi](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) để thảo luận về nhu cầu doanh nghiệp.
- > Đối với các công ty khởi nghiệp và doanh nghiệp nhỏ sử dụng AWS, hãy xem [Dify Premium trên AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) và triển khai nó vào AWS VPC của riêng bạn chỉ với một cú nhấp chuột. Đây là một AMI giá cả phải chăng với tùy chọn tạo ứng dụng với logo và thương hiệu tùy chỉnh. + Chúng tôi cung cấp các tính năng bổ sung tập trung vào doanh nghiệp. [Ghi lại câu hỏi của bạn cho chúng tôi thông qua chatbot này](https://udify.app/chat/22L1zSxg6yW1cWQg) hoặc [gửi email cho chúng tôi](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) để thảo luận về nhu cầu doanh nghiệp.
+ > Đối với các công ty khởi nghiệp và doanh nghiệp nhỏ sử dụng AWS, hãy xem [Dify Premium trên AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) và triển khai nó vào AWS VPC của riêng bạn chỉ với một cú nhấp chuột. Đây là một AMI giá cả phải chăng với tùy chọn tạo ứng dụng với logo và thương hiệu tùy chỉnh. ## Luôn cập nhật @@ -164,13 +94,12 @@ Yêu thích Dify trên GitHub và được thông báo ngay lập tức về cá ![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) - - ## Bắt đầu nhanh + > Trước khi cài đặt Dify, hãy đảm bảo máy của bạn đáp ứng các yêu cầu hệ thống tối thiểu sau: -> ->- CPU >= 2 Core ->- RAM >= 4GB +> +> - CPU >= 2 Core +> - RAM >= 4GB
@@ -203,20 +132,22 @@ Nếu bạn muốn cấu hình một cài đặt có độ sẵn sàng cao, có Triển khai Dify lên nền tảng đám mây với một cú nhấp chuột bằng cách sử dụng [terraform](https://www.terraform.io/) ##### Azure Global + - [Azure Terraform bởi @nikawang](https://github.com/nikawang/dify-azure-terraform) ##### Google Cloud + - [Google Cloud Terraform bởi @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) #### Sử dụng AWS CDK để Triển khai Triển khai Dify trên AWS bằng [CDK](https://aws.amazon.com/cdk/) -##### AWS +##### AWS + - [AWS CDK bởi @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) - [AWS CDK bởi @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws) - #### Alibaba Cloud [Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) @@ -229,13 +160,11 @@ Triển khai Dify lên Alibaba Cloud chỉ với một cú nhấp chuột bằng Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure Devops Pipeline Helm Chart bởi @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) - ## Đóng góp -Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi. +Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING_VI.md) của chúng tôi. Đồng thời, vui lòng xem xét hỗ trợ Dify bằng cách chia sẻ nó trên mạng xã hội và tại các sự kiện và hội nghị. - > Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi. **Người đóng góp** @@ -246,10 +175,10 @@ Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure De ## Cộng đồng & liên hệ -* [Thảo luận GitHub](https://github.com/langgenius/dify/discussions). Tốt nhất cho: chia sẻ phản hồi và đặt câu hỏi. -* [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi. -* [Discord](https://discord.gg/FngNHpbcY7). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng. -* [X(Twitter)](https://twitter.com/dify_ai). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng. +- [Thảo luận GitHub](https://github.com/langgenius/dify/discussions). Tốt nhất cho: chia sẻ phản hồi và đặt câu hỏi. +- [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi. +- [Discord](https://discord.gg/FngNHpbcY7). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng. +- [X(Twitter)](https://twitter.com/dify_ai). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng. ## Lịch sử Yêu thích diff --git a/api/.env.example b/api/.env.example index 3c30872422..e947c5584b 100644 --- a/api/.env.example +++ b/api/.env.example @@ -478,6 +478,13 @@ API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node # API workflow run repository implementation API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository +# Workflow log cleanup configuration +# Enable automatic cleanup of workflow run logs to manage database size +WORKFLOW_LOG_CLEANUP_ENABLED=true +# Number of days to retain workflow run logs (default: 30 days) +WORKFLOW_LOG_RETENTION_DAYS=30 +# Batch size for workflow log cleanup operations (default: 100) +WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100 # App configuration APP_MAX_EXECUTION_TIME=1200 @@ -557,3 +564,7 @@ QUEUE_MONITOR_THRESHOLD=200 QUEUE_MONITOR_ALERT_EMAILS= # Monitor interval in minutes, default is 30 minutes QUEUE_MONITOR_INTERVAL=30 + +# Swagger UI configuration +SWAGGER_UI_ENABLED=true +SWAGGER_UI_PATH=/swagger-ui.html diff --git a/api/.ruff.toml b/api/.ruff.toml index db6872b9c8..9668dc9f76 100644 --- a/api/.ruff.toml +++ b/api/.ruff.toml @@ -43,6 +43,7 @@ select = [ "S302", # suspicious-marshal-usage, disallow use of `marshal` module "S311", # suspicious-non-cryptographic-random-usage "G001", # don't use str format to logging messages + "G003", # don't use + in logging messages "G004", # don't use f-strings to format logging messages ] diff --git a/api/README.md b/api/README.md index b5298edf92..d322963ffc 100644 --- a/api/README.md +++ b/api/README.md @@ -3,7 +3,7 @@ ## Usage > [!IMPORTANT] -> +> > In the v1.3.0 release, `poetry` has been replaced with > [`uv`](https://docs.astral.sh/uv/) as the package manager > for Dify API backend service. @@ -20,25 +20,29 @@ cd ../api ``` -2. Copy `.env.example` to `.env` +1. Copy `.env.example` to `.env` ```cli - cp .env.example .env + cp .env.example .env ``` -3. Generate a `SECRET_KEY` in the `.env` file. + +1. Generate a `SECRET_KEY` in the `.env` file. bash for Linux + ```bash for Linux sed -i "/^SECRET_KEY=/c\SECRET_KEY=$(openssl rand -base64 42)" .env ``` + bash for Mac + ```bash for Mac secret_key=$(openssl rand -base64 42) sed -i '' "/^SECRET_KEY=/c\\ SECRET_KEY=${secret_key}" .env ``` -4. Create environment. +1. Create environment. Dify API service uses [UV](https://docs.astral.sh/uv/) to manage dependencies. First, you need to add the uv package manager, if you don't have it already. @@ -49,13 +53,13 @@ brew install uv ``` -5. Install dependencies +1. Install dependencies ```bash uv sync --dev ``` -6. Run migrate +1. Run migrate Before the first launch, migrate the database to the latest version. @@ -63,24 +67,27 @@ uv run flask db upgrade ``` -7. Start backend +1. Start backend ```bash uv run flask run --host 0.0.0.0 --port=5001 --debug ``` -8. Start Dify [web](../web) service. -9. Setup your application by visiting `http://localhost:3000`. -10. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service. +1. Start Dify [web](../web) service. - ```bash - uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage - ``` +1. Setup your application by visiting `http://localhost:3000`. - Addition, if you want to debug the celery scheduled tasks, you can use the following command in another terminal: - ```bash - uv run celery -A app.celery beat - ``` +1. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service. + +```bash +uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation +``` + +Addition, if you want to debug the celery scheduled tasks, you can use the following command in another terminal: + +```bash +uv run celery -A app.celery beat +``` ## Testing @@ -90,9 +97,16 @@ uv sync --dev ``` -2. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml` +1. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`, more can check [Claude.md](../CLAUDE.md) ```bash - uv run -P api bash dev/pytest/pytest_all_tests.sh - ``` + uv run pytest # Run all tests + uv run pytest tests/unit_tests/ # Unit tests only + uv run pytest tests/integration_tests/ # Integration tests + # Code quality + ../dev/reformat # Run all formatters and linters + uv run ruff check --fix ./ # Fix linting issues + uv run ruff format ./ # Format code + uv run mypy . # Type checking + ``` diff --git a/api/app_factory.py b/api/app_factory.py index 032d6b17fc..8a0417dd72 100644 --- a/api/app_factory.py +++ b/api/app_factory.py @@ -5,6 +5,8 @@ from configs import dify_config from contexts.wrapper import RecyclableContextVar from dify_app import DifyApp +logger = logging.getLogger(__name__) + # ---------------------------- # Application Factory Function @@ -32,7 +34,7 @@ def create_app() -> DifyApp: initialize_extensions(app) end_time = time.perf_counter() if dify_config.DEBUG: - logging.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2)) + logger.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2)) return app @@ -93,14 +95,14 @@ def initialize_extensions(app: DifyApp): is_enabled = ext.is_enabled() if hasattr(ext, "is_enabled") else True if not is_enabled: if dify_config.DEBUG: - logging.info("Skipped %s", short_name) + logger.info("Skipped %s", short_name) continue start_time = time.perf_counter() ext.init_app(app) end_time = time.perf_counter() if dify_config.DEBUG: - logging.info("Loaded %s (%s ms)", short_name, round((end_time - start_time) * 1000, 2)) + logger.info("Loaded %s (%s ms)", short_name, round((end_time - start_time) * 1000, 2)) def create_migrations_app(): diff --git a/api/child_class.py b/api/child_class.py new file mode 100644 index 0000000000..b210607b92 --- /dev/null +++ b/api/child_class.py @@ -0,0 +1,11 @@ +from tests.integration_tests.utils.parent_class import ParentClass + + +class ChildClass(ParentClass): + """Test child class for module import helper tests""" + + def __init__(self, name): + super().__init__(name) + + def get_name(self): + return f"Child: {self.name}" diff --git a/api/commands.py b/api/commands.py index 6b38e34b9b..89fef39d25 100644 --- a/api/commands.py +++ b/api/commands.py @@ -38,6 +38,8 @@ from services.plugin.data_migration import PluginDataMigration from services.plugin.plugin_migration import PluginMigration from tasks.remove_app_and_related_data_task import delete_draft_variables_batch +logger = logging.getLogger(__name__) + @click.command("reset-password", help="Reset the account password.") @click.option("--email", prompt=True, help="Account email to reset password for") @@ -685,7 +687,7 @@ def upgrade_db(): click.echo(click.style("Database migration successful!", fg="green")) except Exception: - logging.exception("Failed to execute database migration") + logger.exception("Failed to execute database migration") finally: lock.release() else: @@ -733,7 +735,7 @@ where sites.id is null limit 1000""" except Exception: failed_app_ids.append(app_id) click.echo(click.style(f"Failed to fix missing site for app {app_id}", fg="red")) - logging.exception("Failed to fix app related site missing issue, app_id: %s", app_id) + logger.exception("Failed to fix app related site missing issue, app_id: %s", app_id) continue if not processed_count: diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 0b2f99aece..7638cd1899 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -1,4 +1,4 @@ -from typing import Annotated, Literal, Optional +from typing import Literal, Optional from pydantic import ( AliasChoices, @@ -968,6 +968,26 @@ class AccountConfig(BaseSettings): ) +class WorkflowLogConfig(BaseSettings): + WORKFLOW_LOG_CLEANUP_ENABLED: bool = Field(default=True, description="Enable workflow run log cleanup") + WORKFLOW_LOG_RETENTION_DAYS: int = Field(default=30, description="Retention days for workflow run logs") + WORKFLOW_LOG_CLEANUP_BATCH_SIZE: int = Field( + default=100, description="Batch size for workflow run log cleanup operations" + ) + + +class SwaggerUIConfig(BaseSettings): + SWAGGER_UI_ENABLED: bool = Field( + description="Whether to enable Swagger UI in api module", + default=True, + ) + + SWAGGER_UI_PATH: str = Field( + description="Swagger UI page path in api module", + default="/swagger-ui.html", + ) + + class FeatureConfig( # place the configs in alphabet order AppExecutionConfig, @@ -999,9 +1019,11 @@ class FeatureConfig( WorkspaceConfig, LoginConfig, AccountConfig, + SwaggerUIConfig, # hosted services config HostedServiceConfig, CeleryBeatConfig, CeleryScheduleTasksConfig, + WorkflowLogConfig, ): pass diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index ba8bbc7135..4751b96010 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -215,6 +215,7 @@ class DatabaseConfig(BaseSettings): "pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING, "connect_args": connect_args, "pool_use_lifo": self.SQLALCHEMY_POOL_USE_LIFO, + "pool_reset_on_return": None, } diff --git a/api/controllers/common/fields.py b/api/controllers/common/fields.py index 3466eea1f6..df9de825de 100644 --- a/api/controllers/common/fields.py +++ b/api/controllers/common/fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import Api, Namespace, fields from libs.helper import AppIconUrlField @@ -10,6 +10,12 @@ parameters__system_parameters = { "workflow_file_upload_limit": fields.Integer, } + +def build_system_parameters_model(api_or_ns: Api | Namespace): + """Build the system parameters model for the API or Namespace.""" + return api_or_ns.model("SystemParameters", parameters__system_parameters) + + parameters_fields = { "opening_statement": fields.String, "suggested_questions": fields.Raw, @@ -25,6 +31,14 @@ parameters_fields = { "system_parameters": fields.Nested(parameters__system_parameters), } + +def build_parameters_model(api_or_ns: Api | Namespace): + """Build the parameters model for the API or Namespace.""" + copied_fields = parameters_fields.copy() + copied_fields["system_parameters"] = fields.Nested(build_system_parameters_model(api_or_ns)) + return api_or_ns.model("Parameters", copied_fields) + + site_fields = { "title": fields.String, "chat_color_theme": fields.String, @@ -41,3 +55,8 @@ site_fields = { "show_workflow_steps": fields.Boolean, "use_icon_as_answer_icon": fields.Boolean, } + + +def build_site_model(api_or_ns: Api | Namespace): + """Build the site model for the API or Namespace.""" + return api_or_ns.model("Site", site_fields) diff --git a/api/controllers/common/helpers.py b/api/controllers/common/helpers.py index 008f1f0f7a..6a5197635e 100644 --- a/api/controllers/common/helpers.py +++ b/api/controllers/common/helpers.py @@ -1,3 +1,4 @@ +import contextlib import mimetypes import os import platform @@ -65,10 +66,8 @@ def guess_file_info_from_response(response: httpx.Response): # Use python-magic to guess MIME type if still unknown or generic if mimetype == "application/octet-stream" and magic is not None: - try: + with contextlib.suppress(magic.MagicException): mimetype = magic.from_buffer(response.content[:1024], mime=True) - except magic.MagicException: - pass extension = os.path.splitext(filename)[1] diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index 57dbc8da64..e25f92399c 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -84,7 +84,6 @@ from .datasets import ( external, hit_testing, metadata, - upload_file, website, ) diff --git a/api/controllers/console/admin.py b/api/controllers/console/admin.py index 8a55197fb6..7e5c28200a 100644 --- a/api/controllers/console/admin.py +++ b/api/controllers/console/admin.py @@ -1,7 +1,7 @@ from functools import wraps from flask import request -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import NotFound, Unauthorized diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py index d7500c415c..401e88709a 100644 --- a/api/controllers/console/apikey.py +++ b/api/controllers/console/apikey.py @@ -1,8 +1,8 @@ -from typing import Any +from typing import Any, Optional -import flask_restful +import flask_restx from flask_login import current_user -from flask_restful import Resource, fields, marshal_with +from flask_restx import Resource, fields, marshal_with from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden @@ -40,7 +40,7 @@ def _get_resource(resource_id, tenant_id, resource_model): ).scalar_one_or_none() if resource is None: - flask_restful.abort(404, message=f"{resource_model.__name__} not found.") + flask_restx.abort(404, message=f"{resource_model.__name__} not found.") return resource @@ -49,7 +49,7 @@ class BaseApiKeyListResource(Resource): method_decorators = [account_initialization_required, login_required, setup_required] resource_type: str | None = None - resource_model: Any = None + resource_model: Optional[Any] = None resource_id_field: str | None = None token_prefix: str | None = None max_keys = 10 @@ -81,7 +81,7 @@ class BaseApiKeyListResource(Resource): ) if current_key_count >= self.max_keys: - flask_restful.abort( + flask_restx.abort( 400, message=f"Cannot create more than {self.max_keys} API keys for this resource type.", code="max_keys_exceeded", @@ -102,7 +102,7 @@ class BaseApiKeyResource(Resource): method_decorators = [account_initialization_required, login_required, setup_required] resource_type: str | None = None - resource_model: Any = None + resource_model: Optional[Any] = None resource_id_field: str | None = None def delete(self, resource_id, api_key_id): @@ -126,7 +126,7 @@ class BaseApiKeyResource(Resource): ) if key is None: - flask_restful.abort(404, message="API key not found") + flask_restx.abort(404, message="API key not found") db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete() db.session.commit() diff --git a/api/controllers/console/app/advanced_prompt_template.py b/api/controllers/console/app/advanced_prompt_template.py index c228743fa5..c6cb6f6e3a 100644 --- a/api/controllers/console/app/advanced_prompt_template.py +++ b/api/controllers/console/app/advanced_prompt_template.py @@ -1,4 +1,4 @@ -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from controllers.console import api from controllers.console.wraps import account_initialization_required, setup_required diff --git a/api/controllers/console/app/agent.py b/api/controllers/console/app/agent.py index d433415894..a964154207 100644 --- a/api/controllers/console/app/agent.py +++ b/api/controllers/console/app/agent.py @@ -1,4 +1,4 @@ -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from controllers.console import api from controllers.console.app.wraps import get_app_model diff --git a/api/controllers/console/app/annotation.py b/api/controllers/console/app/annotation.py index 493a9a52e2..37d23ccd9f 100644 --- a/api/controllers/console/app/annotation.py +++ b/api/controllers/console/app/annotation.py @@ -1,6 +1,8 @@ +from typing import Literal + from flask import request from flask_login import current_user -from flask_restful import Resource, marshal, marshal_with, reqparse +from flask_restx import Resource, marshal, marshal_with, reqparse from werkzeug.exceptions import Forbidden from controllers.common.errors import NoFileUploadedError, TooManyFilesError @@ -24,7 +26,7 @@ class AnnotationReplyActionApi(Resource): @login_required @account_initialization_required @cloud_edition_billing_resource_check("annotation") - def post(self, app_id, action): + def post(self, app_id, action: Literal["enable", "disable"]): if not current_user.is_editor: raise Forbidden() @@ -38,8 +40,6 @@ class AnnotationReplyActionApi(Resource): result = AppAnnotationService.enable_app_annotation(args, app_id) elif action == "disable": result = AppAnnotationService.disable_app_annotation(app_id) - else: - raise ValueError("Unsupported annotation reply action") return result, 200 diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index 1cc13d669c..a6eb86122d 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -2,7 +2,7 @@ import uuid from typing import cast from flask_login import current_user -from flask_restful import Resource, inputs, marshal, marshal_with, reqparse +from flask_restx import Resource, inputs, marshal, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import BadRequest, Forbidden, abort diff --git a/api/controllers/console/app/app_import.py b/api/controllers/console/app/app_import.py index 9ffb94e9f9..aee93a8814 100644 --- a/api/controllers/console/app/app_import.py +++ b/api/controllers/console/app/app_import.py @@ -1,7 +1,7 @@ from typing import cast from flask_login import current_user -from flask_restful import Resource, marshal_with, reqparse +from flask_restx import Resource, marshal_with, reqparse from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden diff --git a/api/controllers/console/app/audio.py b/api/controllers/console/app/audio.py index 665cf1aede..aaf5c3dfaa 100644 --- a/api/controllers/console/app/audio.py +++ b/api/controllers/console/app/audio.py @@ -1,7 +1,7 @@ import logging from flask import request -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from werkzeug.exceptions import InternalServerError import services @@ -31,6 +31,8 @@ from services.errors.audio import ( UnsupportedAudioTypeServiceError, ) +logger = logging.getLogger(__name__) + class ChatMessageAudioApi(Resource): @setup_required @@ -49,7 +51,7 @@ class ChatMessageAudioApi(Resource): return response except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except NoAudioUploadedServiceError: raise NoAudioUploadedError() @@ -70,7 +72,7 @@ class ChatMessageAudioApi(Resource): except ValueError as e: raise e except Exception as e: - logging.exception("Failed to handle post request to ChatMessageAudioApi") + logger.exception("Failed to handle post request to ChatMessageAudioApi") raise InternalServerError() @@ -97,7 +99,7 @@ class ChatMessageTextApi(Resource): ) return response except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except NoAudioUploadedServiceError: raise NoAudioUploadedError() @@ -118,7 +120,7 @@ class ChatMessageTextApi(Resource): except ValueError as e: raise e except Exception as e: - logging.exception("Failed to handle post request to ChatMessageTextApi") + logger.exception("Failed to handle post request to ChatMessageTextApi") raise InternalServerError() @@ -160,7 +162,7 @@ class TextModesApi(Resource): except ValueError as e: raise e except Exception as e: - logging.exception("Failed to handle get request to TextModesApi") + logger.exception("Failed to handle get request to TextModesApi") raise InternalServerError() diff --git a/api/controllers/console/app/completion.py b/api/controllers/console/app/completion.py index ad94112f05..701ebb0b4a 100644 --- a/api/controllers/console/app/completion.py +++ b/api/controllers/console/app/completion.py @@ -2,7 +2,7 @@ import logging import flask_login from flask import request -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from werkzeug.exceptions import InternalServerError, NotFound import services @@ -34,6 +34,8 @@ from models.model import AppMode from services.app_generate_service import AppGenerateService from services.errors.llm import InvokeRateLimitError +logger = logging.getLogger(__name__) + # define completion message api for user class CompletionMessageApi(Resource): @@ -67,7 +69,7 @@ class CompletionMessageApi(Resource): except services.errors.conversation.ConversationCompletedError: raise ConversationCompletedError() except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -80,7 +82,7 @@ class CompletionMessageApi(Resource): except ValueError as e: raise e except Exception as e: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() @@ -134,7 +136,7 @@ class ChatMessageApi(Resource): except services.errors.conversation.ConversationCompletedError: raise ConversationCompletedError() except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -149,7 +151,7 @@ class ChatMessageApi(Resource): except ValueError as e: raise e except Exception as e: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index 6ddae6fad5..06f0218771 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -2,8 +2,8 @@ from datetime import datetime import pytz # pip install pytz from flask_login import current_user -from flask_restful import Resource, marshal_with, reqparse -from flask_restful.inputs import int_range +from flask_restx import Resource, marshal_with, reqparse +from flask_restx.inputs import int_range from sqlalchemy import func, or_ from sqlalchemy.orm import joinedload from werkzeug.exceptions import Forbidden, NotFound @@ -24,6 +24,8 @@ from libs.helper import DatetimeString from libs.login import login_required from models import Conversation, EndUser, Message, MessageAnnotation from models.model import AppMode +from services.conversation_service import ConversationService +from services.errors.conversation import ConversationNotExistsError class CompletionConversationApi(Resource): @@ -46,7 +48,9 @@ class CompletionConversationApi(Resource): parser.add_argument("limit", type=int_range(1, 100), default=20, location="args") args = parser.parse_args() - query = db.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.mode == "completion") + query = db.select(Conversation).where( + Conversation.app_id == app_model.id, Conversation.mode == "completion", Conversation.is_deleted.is_(False) + ) if args["keyword"]: query = query.join(Message, Message.conversation_id == Conversation.id).where( @@ -119,18 +123,11 @@ class CompletionConversationDetailApi(Resource): raise Forbidden() conversation_id = str(conversation_id) - conversation = ( - db.session.query(Conversation) - .where(Conversation.id == conversation_id, Conversation.app_id == app_model.id) - .first() - ) - - if not conversation: + try: + ConversationService.delete(app_model, conversation_id, current_user) + except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") - conversation.is_deleted = True - db.session.commit() - return {"result": "success"}, 204 @@ -171,7 +168,7 @@ class ChatConversationApi(Resource): .subquery() ) - query = db.select(Conversation).where(Conversation.app_id == app_model.id) + query = db.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.is_deleted.is_(False)) if args["keyword"]: keyword_filter = f"%{args['keyword']}%" @@ -284,18 +281,11 @@ class ChatConversationDetailApi(Resource): raise Forbidden() conversation_id = str(conversation_id) - conversation = ( - db.session.query(Conversation) - .where(Conversation.id == conversation_id, Conversation.app_id == app_model.id) - .first() - ) - - if not conversation: + try: + ConversationService.delete(app_model, conversation_id, current_user) + except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") - conversation.is_deleted = True - db.session.commit() - return {"result": "success"}, 204 diff --git a/api/controllers/console/app/conversation_variables.py b/api/controllers/console/app/conversation_variables.py index d49f433ba1..5ca4c33f87 100644 --- a/api/controllers/console/app/conversation_variables.py +++ b/api/controllers/console/app/conversation_variables.py @@ -1,4 +1,4 @@ -from flask_restful import Resource, marshal_with, reqparse +from flask_restx import Resource, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session diff --git a/api/controllers/console/app/generator.py b/api/controllers/console/app/generator.py index b46292305b..497fd53df7 100644 --- a/api/controllers/console/app/generator.py +++ b/api/controllers/console/app/generator.py @@ -1,7 +1,7 @@ from collections.abc import Sequence from flask_login import current_user -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from controllers.console import api from controllers.console.app.error import ( @@ -12,6 +12,8 @@ from controllers.console.app.error import ( ) from controllers.console.wraps import account_initialization_required, setup_required from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError +from core.helper.code_executor.javascript.javascript_code_provider import JavascriptCodeProvider +from core.helper.code_executor.python3.python3_code_provider import Python3CodeProvider from core.llm_generator.llm_generator import LLMGenerator from core.model_runtime.errors.invoke import InvokeError from libs.login import login_required @@ -123,13 +125,20 @@ class InstructionGenerateApi(Resource): parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json") parser.add_argument("ideal_output", type=str, required=False, default="", location="json") args = parser.parse_args() - + code_template = ( + Python3CodeProvider.get_default_code() + if args["language"] == "python" + else (JavascriptCodeProvider.get_default_code()) + if args["language"] == "javascript" + else "" + ) try: - if args["current"] == "" and args["node_id"] != "": # Generate from nothing for a workflow node + # Generate from nothing for a workflow node + if (args["current"] == code_template or args["current"] == "") and args["node_id"] != "": from models import App, db from services.workflow_service import WorkflowService - app = db.session.query(App).filter(App.id == args["flow_id"]).first() + app = db.session.query(App).where(App.id == args["flow_id"]).first() if not app: return {"error": f"app {args['flow_id']} not found"}, 400 workflow = WorkflowService().get_draft_workflow(app_model=app) diff --git a/api/controllers/console/app/mcp_server.py b/api/controllers/console/app/mcp_server.py index 2344fd5acb..541803e539 100644 --- a/api/controllers/console/app/mcp_server.py +++ b/api/controllers/console/app/mcp_server.py @@ -2,7 +2,7 @@ import json from enum import StrEnum from flask_login import current_user -from flask_restful import Resource, marshal_with, reqparse +from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import NotFound from controllers.console import api diff --git a/api/controllers/console/app/message.py b/api/controllers/console/app/message.py index 680ac4a64c..05b668b803 100644 --- a/api/controllers/console/app/message.py +++ b/api/controllers/console/app/message.py @@ -1,8 +1,9 @@ import logging from flask_login import current_user -from flask_restful import Resource, fields, marshal_with, reqparse -from flask_restful.inputs import int_range +from flask_restx import Resource, fields, marshal_with, reqparse +from flask_restx.inputs import int_range +from sqlalchemy import exists, select from werkzeug.exceptions import Forbidden, InternalServerError, NotFound from controllers.console import api @@ -33,6 +34,8 @@ from services.errors.conversation import ConversationNotExistsError from services.errors.message import MessageNotExistsError, SuggestedQuestionsAfterAnswerDisabledError from services.message_service import MessageService +logger = logging.getLogger(__name__) + class ChatMessageListApi(Resource): message_infinite_scroll_pagination_fields = { @@ -92,21 +95,18 @@ class ChatMessageListApi(Resource): .all() ) - has_more = False if len(history_messages) == args["limit"]: current_page_first_message = history_messages[-1] - rest_count = ( - db.session.query(Message) - .where( + + has_more = db.session.scalar( + select( + exists().where( Message.conversation_id == conversation.id, Message.created_at < current_page_first_message.created_at, Message.id != current_page_first_message.id, ) - .count() ) - - if rest_count > 0: - has_more = True + ) history_messages = list(reversed(history_messages)) @@ -215,7 +215,7 @@ class MessageSuggestedQuestionApi(Resource): except SuggestedQuestionsAfterAnswerDisabledError: raise AppSuggestedQuestionsAfterAnswerDisabledError() except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() return {"data": questions} diff --git a/api/controllers/console/app/model_config.py b/api/controllers/console/app/model_config.py index 029138fb6b..52ff9b923d 100644 --- a/api/controllers/console/app/model_config.py +++ b/api/controllers/console/app/model_config.py @@ -3,7 +3,7 @@ from typing import cast from flask import request from flask_login import current_user -from flask_restful import Resource +from flask_restx import Resource from controllers.console import api from controllers.console.app.wraps import get_app_model diff --git a/api/controllers/console/app/ops_trace.py b/api/controllers/console/app/ops_trace.py index 978c02412c..74c2867c2f 100644 --- a/api/controllers/console/app/ops_trace.py +++ b/api/controllers/console/app/ops_trace.py @@ -1,4 +1,4 @@ -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from werkzeug.exceptions import BadRequest from controllers.console import api diff --git a/api/controllers/console/app/site.py b/api/controllers/console/app/site.py index 03418f1dd2..778ce92da6 100644 --- a/api/controllers/console/app/site.py +++ b/api/controllers/console/app/site.py @@ -1,5 +1,5 @@ from flask_login import current_user -from flask_restful import Resource, marshal_with, reqparse +from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import Forbidden, NotFound from constants.languages import supported_language diff --git a/api/controllers/console/app/statistic.py b/api/controllers/console/app/statistic.py index 343b7acd7b..27e405af38 100644 --- a/api/controllers/console/app/statistic.py +++ b/api/controllers/console/app/statistic.py @@ -5,7 +5,7 @@ import pytz import sqlalchemy as sa from flask import jsonify from flask_login import current_user -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from controllers.console import api from controllers.console.app.wraps import get_app_model diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index c58301b300..e36f308bd4 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -4,7 +4,7 @@ from collections.abc import Sequence from typing import cast from flask import abort, request -from flask_restful import Resource, inputs, marshal_with, reqparse +from flask_restx import Resource, inputs, marshal_with, reqparse from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden, InternalServerError, NotFound @@ -72,6 +72,7 @@ class DraftWorkflowApi(Resource): Get draft workflow """ # The role of the current user in the ta table must be admin, owner, or editor + assert isinstance(current_user, Account) if not current_user.is_editor: raise Forbidden() @@ -94,6 +95,7 @@ class DraftWorkflowApi(Resource): Sync draft workflow """ # The role of the current user in the ta table must be admin, owner, or editor + assert isinstance(current_user, Account) if not current_user.is_editor: raise Forbidden() @@ -171,6 +173,7 @@ class AdvancedChatDraftWorkflowRunApi(Resource): Run draft workflow """ # The role of the current user in the ta table must be admin, owner, or editor + assert isinstance(current_user, Account) if not current_user.is_editor: raise Forbidden() @@ -205,7 +208,7 @@ class AdvancedChatDraftWorkflowRunApi(Resource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() @@ -218,13 +221,12 @@ class AdvancedChatDraftRunIterationNodeApi(Resource): """ Run draft workflow iteration node """ + if not isinstance(current_user, Account): + raise Forbidden() # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() - if not isinstance(current_user, Account): - raise Forbidden() - parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, location="json") args = parser.parse_args() @@ -242,7 +244,7 @@ class AdvancedChatDraftRunIterationNodeApi(Resource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() @@ -256,11 +258,10 @@ class WorkflowDraftRunIterationNodeApi(Resource): Run draft workflow iteration node """ # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() - if not isinstance(current_user, Account): raise Forbidden() + if not current_user.is_editor: + raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, location="json") @@ -279,7 +280,7 @@ class WorkflowDraftRunIterationNodeApi(Resource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() @@ -292,12 +293,12 @@ class AdvancedChatDraftRunLoopNodeApi(Resource): """ Run draft workflow loop node """ - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() if not isinstance(current_user, Account): raise Forbidden() + # The role of the current user in the ta table must be admin, owner, or editor + if not current_user.is_editor: + raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, location="json") @@ -316,7 +317,7 @@ class AdvancedChatDraftRunLoopNodeApi(Resource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() @@ -329,12 +330,12 @@ class WorkflowDraftRunLoopNodeApi(Resource): """ Run draft workflow loop node """ - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() if not isinstance(current_user, Account): raise Forbidden() + # The role of the current user in the ta table must be admin, owner, or editor + if not current_user.is_editor: + raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, location="json") @@ -353,7 +354,7 @@ class WorkflowDraftRunLoopNodeApi(Resource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() @@ -366,12 +367,12 @@ class DraftWorkflowRunApi(Resource): """ Run draft workflow """ - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() if not isinstance(current_user, Account): raise Forbidden() + # The role of the current user in the ta table must be admin, owner, or editor + if not current_user.is_editor: + raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") @@ -405,6 +406,9 @@ class WorkflowTaskStopApi(Resource): """ Stop workflow task """ + + if not isinstance(current_user, Account): + raise Forbidden() # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() @@ -424,12 +428,12 @@ class DraftWorkflowNodeRunApi(Resource): """ Run draft workflow node """ - # The role of the current user in the ta table must be admin, owner, or editor - if not current_user.is_editor: - raise Forbidden() if not isinstance(current_user, Account): raise Forbidden() + # The role of the current user in the ta table must be admin, owner, or editor + if not current_user.is_editor: + raise Forbidden() parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") @@ -472,6 +476,9 @@ class PublishedWorkflowApi(Resource): """ Get published workflow """ + + if not isinstance(current_user, Account): + raise Forbidden() # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() @@ -491,13 +498,12 @@ class PublishedWorkflowApi(Resource): """ Publish workflow """ + if not isinstance(current_user, Account): + raise Forbidden() # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() - if not isinstance(current_user, Account): - raise Forbidden() - parser = reqparse.RequestParser() parser.add_argument("marked_name", type=str, required=False, default="", location="json") parser.add_argument("marked_comment", type=str, required=False, default="", location="json") @@ -541,6 +547,9 @@ class DefaultBlockConfigsApi(Resource): """ Get default block config """ + + if not isinstance(current_user, Account): + raise Forbidden() # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() @@ -559,13 +568,12 @@ class DefaultBlockConfigApi(Resource): """ Get default block config """ + if not isinstance(current_user, Account): + raise Forbidden() # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() - if not isinstance(current_user, Account): - raise Forbidden() - parser = reqparse.RequestParser() parser.add_argument("q", type=str, location="args") args = parser.parse_args() @@ -595,13 +603,12 @@ class ConvertToWorkflowApi(Resource): Convert expert mode of chatbot app to workflow mode Convert Completion App to Workflow App """ + if not isinstance(current_user, Account): + raise Forbidden() # The role of the current user in the ta table must be admin, owner, or editor if not current_user.is_editor: raise Forbidden() - if not isinstance(current_user, Account): - raise Forbidden() - if request.data: parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=False, nullable=True, location="json") @@ -645,6 +652,9 @@ class PublishedAllWorkflowApi(Resource): """ Get published workflows """ + + if not isinstance(current_user, Account): + raise Forbidden() if not current_user.is_editor: raise Forbidden() @@ -693,13 +703,12 @@ class WorkflowByIdApi(Resource): """ Update workflow attributes """ + if not isinstance(current_user, Account): + raise Forbidden() # Check permission if not current_user.is_editor: raise Forbidden() - if not isinstance(current_user, Account): - raise Forbidden() - parser = reqparse.RequestParser() parser.add_argument("marked_name", type=str, required=False, location="json") parser.add_argument("marked_comment", type=str, required=False, location="json") @@ -750,13 +759,12 @@ class WorkflowByIdApi(Resource): """ Delete workflow """ + if not isinstance(current_user, Account): + raise Forbidden() # Check permission if not current_user.is_editor: raise Forbidden() - if not isinstance(current_user, Account): - raise Forbidden() - workflow_service = WorkflowService() # Create a session and manage the transaction diff --git a/api/controllers/console/app/workflow_app_log.py b/api/controllers/console/app/workflow_app_log.py index 310146a5e7..8d8cdc93cf 100644 --- a/api/controllers/console/app/workflow_app_log.py +++ b/api/controllers/console/app/workflow_app_log.py @@ -1,6 +1,6 @@ from dateutil.parser import isoparse -from flask_restful import Resource, marshal_with, reqparse -from flask_restful.inputs import int_range +from flask_restx import Resource, marshal_with, reqparse +from flask_restx.inputs import int_range from sqlalchemy.orm import Session from controllers.console import api diff --git a/api/controllers/console/app/workflow_draft_variable.py b/api/controllers/console/app/workflow_draft_variable.py index 414c07ef50..a0b73f7e07 100644 --- a/api/controllers/console/app/workflow_draft_variable.py +++ b/api/controllers/console/app/workflow_draft_variable.py @@ -2,7 +2,7 @@ import logging from typing import Any, NoReturn from flask import Response -from flask_restful import Resource, fields, inputs, marshal, marshal_with, reqparse +from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse from sqlalchemy.orm import Session from werkzeug.exceptions import Forbidden @@ -21,6 +21,7 @@ from factories.file_factory import build_from_mapping, build_from_mappings from factories.variable_factory import build_segment_with_type from libs.login import current_user, login_required from models import App, AppMode, db +from models.account import Account from models.workflow import WorkflowDraftVariable from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService from services.workflow_service import WorkflowService @@ -135,6 +136,7 @@ def _api_prerequisite(f): @account_initialization_required @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) def wrapper(*args, **kwargs): + assert isinstance(current_user, Account) if not current_user.is_editor: raise Forbidden() return f(*args, **kwargs) diff --git a/api/controllers/console/app/workflow_run.py b/api/controllers/console/app/workflow_run.py index 9099700213..dccbfd8648 100644 --- a/api/controllers/console/app/workflow_run.py +++ b/api/controllers/console/app/workflow_run.py @@ -1,8 +1,8 @@ from typing import cast from flask_login import current_user -from flask_restful import Resource, marshal_with, reqparse -from flask_restful.inputs import int_range +from flask_restx import Resource, marshal_with, reqparse +from flask_restx.inputs import int_range from controllers.console import api from controllers.console.app.wraps import get_app_model diff --git a/api/controllers/console/app/workflow_statistic.py b/api/controllers/console/app/workflow_statistic.py index 7f80afd83b..7cef175c14 100644 --- a/api/controllers/console/app/workflow_statistic.py +++ b/api/controllers/console/app/workflow_statistic.py @@ -5,7 +5,7 @@ import pytz import sqlalchemy as sa from flask import jsonify from flask_login import current_user -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from controllers.console import api from controllers.console.app.wraps import get_app_model diff --git a/api/controllers/console/app/wraps.py b/api/controllers/console/app/wraps.py index 132dc1f96b..c7e300279a 100644 --- a/api/controllers/console/app/wraps.py +++ b/api/controllers/console/app/wraps.py @@ -6,9 +6,11 @@ from controllers.console.app.error import AppNotFoundError from extensions.ext_database import db from libs.login import current_user from models import App, AppMode +from models.account import Account def _load_app_model(app_id: str) -> Optional[App]: + assert isinstance(current_user, Account) app_model = ( db.session.query(App) .where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") diff --git a/api/controllers/console/auth/activate.py b/api/controllers/console/auth/activate.py index 2562fb5eb8..e82e403ec2 100644 --- a/api/controllers/console/auth/activate.py +++ b/api/controllers/console/auth/activate.py @@ -1,5 +1,5 @@ from flask import request -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from constants.languages import supported_language from controllers.console import api diff --git a/api/controllers/console/auth/data_source_bearer_auth.py b/api/controllers/console/auth/data_source_bearer_auth.py index b8c3c8f012..796e6916cc 100644 --- a/api/controllers/console/auth/data_source_bearer_auth.py +++ b/api/controllers/console/auth/data_source_bearer_auth.py @@ -1,5 +1,5 @@ from flask_login import current_user -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden from controllers.console import api diff --git a/api/controllers/console/auth/data_source_oauth.py b/api/controllers/console/auth/data_source_oauth.py index 4940b48754..35a91a52ea 100644 --- a/api/controllers/console/auth/data_source_oauth.py +++ b/api/controllers/console/auth/data_source_oauth.py @@ -3,7 +3,7 @@ import logging import requests from flask import current_app, redirect, request from flask_login import current_user -from flask_restful import Resource +from flask_restx import Resource from werkzeug.exceptions import Forbidden from configs import dify_config @@ -13,6 +13,8 @@ from libs.oauth_data_source import NotionOAuth from ..wraps import account_initialization_required, setup_required +logger = logging.getLogger(__name__) + def get_oauth_providers(): with current_app.app_context(): @@ -80,7 +82,7 @@ class OAuthDataSourceBinding(Resource): try: oauth_provider.get_access_token(code) except requests.exceptions.HTTPError as e: - logging.exception( + logger.exception( "An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text ) return {"error": "OAuth data source process failed"}, 400 @@ -103,7 +105,7 @@ class OAuthDataSourceSync(Resource): try: oauth_provider.sync_data_source(binding_id) except requests.exceptions.HTTPError as e: - logging.exception( + logger.exception( "An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text ) return {"error": "OAuth data source process failed"}, 400 diff --git a/api/controllers/console/auth/error.py b/api/controllers/console/auth/error.py index 8c5e23de58..7853bef917 100644 --- a/api/controllers/console/auth/error.py +++ b/api/controllers/console/auth/error.py @@ -55,6 +55,12 @@ class EmailOrPasswordMismatchError(BaseHTTPException): code = 400 +class AuthenticationFailedError(BaseHTTPException): + error_code = "authentication_failed" + description = "Invalid email or password." + code = 401 + + class EmailPasswordLoginLimitError(BaseHTTPException): error_code = "email_code_login_limit" description = "Too many incorrect password attempts. Please try again later." diff --git a/api/controllers/console/auth/forgot_password.py b/api/controllers/console/auth/forgot_password.py index 3bbe3177fc..ede0696854 100644 --- a/api/controllers/console/auth/forgot_password.py +++ b/api/controllers/console/auth/forgot_password.py @@ -2,7 +2,7 @@ import base64 import secrets from flask import request -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from sqlalchemy import select from sqlalchemy.orm import Session diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index 5f2a24322d..6ed49f48ff 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -2,15 +2,15 @@ from typing import cast import flask_login from flask import request -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse import services from configs import dify_config from constants.languages import languages from controllers.console import api from controllers.console.auth.error import ( + AuthenticationFailedError, EmailCodeError, - EmailOrPasswordMismatchError, EmailPasswordLoginLimitError, InvalidEmailError, InvalidTokenError, @@ -79,7 +79,7 @@ class LoginApi(Resource): raise AccountBannedError() except services.errors.account.AccountPasswordError: AccountService.add_login_error_rate_limit(args["email"]) - raise EmailOrPasswordMismatchError() + raise AuthenticationFailedError() except services.errors.account.AccountNotFoundError: if FeatureService.get_system_features().is_allow_register: token = AccountService.send_reset_password_email(email=args["email"], language=language) @@ -132,6 +132,7 @@ class ResetPasswordSendEmailApi(Resource): account = AccountService.get_user_through_email(args["email"]) except AccountRegisterError as are: raise AccountInFreezeError() + if account is None: if FeatureService.get_system_features().is_allow_register: token = AccountService.send_reset_password_email(email=args["email"], language=language) @@ -221,7 +222,7 @@ class EmailCodeLoginApi(Resource): email=user_email, name=user_email, interface_language=languages[0] ) except WorkSpaceNotAllowedCreateError: - return NotAllowedCreateWorkspace() + raise NotAllowedCreateWorkspace() except AccountRegisterError as are: raise AccountInFreezeError() except WorkspacesLimitExceededError: diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index 4a6cb99390..40c62f1f3e 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -3,7 +3,7 @@ from typing import Optional import requests from flask import current_app, redirect, request -from flask_restful import Resource +from flask_restx import Resource from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import Unauthorized @@ -24,6 +24,8 @@ from services.feature_service import FeatureService from .. import api +logger = logging.getLogger(__name__) + def get_oauth_providers(): with current_app.app_context(): @@ -80,7 +82,7 @@ class OAuthCallback(Resource): user_info = oauth_provider.get_user_info(token) except requests.exceptions.RequestException as e: error_text = e.response.text if e.response else str(e) - logging.exception("An error occurred during the OAuth process with %s: %s", provider, error_text) + logger.exception("An error occurred during the OAuth process with %s: %s", provider, error_text) return {"error": "OAuth process failed"}, 400 if invite_token and RegisterService.is_valid_invite_token(invite_token): diff --git a/api/controllers/console/billing/billing.py b/api/controllers/console/billing/billing.py index 4b0c82ae6c..8ebb745a60 100644 --- a/api/controllers/console/billing/billing.py +++ b/api/controllers/console/billing/billing.py @@ -1,5 +1,5 @@ from flask_login import current_user -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from controllers.console import api from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required diff --git a/api/controllers/console/billing/compliance.py b/api/controllers/console/billing/compliance.py index 9679632ac7..4bc073f679 100644 --- a/api/controllers/console/billing/compliance.py +++ b/api/controllers/console/billing/compliance.py @@ -1,6 +1,6 @@ from flask import request from flask_login import current_user -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from libs.helper import extract_remote_ip from libs.login import login_required diff --git a/api/controllers/console/datasets/data_source.py b/api/controllers/console/datasets/data_source.py index 39f8ab5787..6083a53bec 100644 --- a/api/controllers/console/datasets/data_source.py +++ b/api/controllers/console/datasets/data_source.py @@ -2,7 +2,7 @@ import json from flask import request from flask_login import current_user -from flask_restful import Resource, marshal_with, reqparse +from flask_restx import Resource, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session from werkzeug.exceptions import NotFound diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 2befd2a651..a5a18e7f33 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -1,7 +1,7 @@ -import flask_restful +import flask_restx from flask import request from flask_login import current_user -from flask_restful import Resource, marshal, marshal_with, reqparse +from flask_restx import Resource, marshal, marshal_with, reqparse from werkzeug.exceptions import Forbidden, NotFound import services @@ -553,7 +553,7 @@ class DatasetIndexingStatusApi(Resource): } documents_status.append(marshal(document_dict, document_status_fields)) data = {"data": documents_status} - return data + return data, 200 class DatasetApiKeyApi(Resource): @@ -589,7 +589,7 @@ class DatasetApiKeyApi(Resource): ) if current_key_count >= self.max_keys: - flask_restful.abort( + flask_restx.abort( 400, message=f"Cannot create more than {self.max_keys} API keys for this resource type.", code="max_keys_exceeded", @@ -629,7 +629,7 @@ class DatasetApiDeleteApi(Resource): ) if key is None: - flask_restful.abort(404, message="API key not found") + flask_restx.abort(404, message="API key not found") db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete() db.session.commit() diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 4e0955bd43..8d50b0d41c 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -1,10 +1,10 @@ import logging from argparse import ArgumentTypeError -from typing import cast +from typing import Literal, cast from flask import request from flask_login import current_user -from flask_restful import Resource, marshal, marshal_with, reqparse +from flask_restx import Resource, marshal, marshal_with, reqparse from sqlalchemy import asc, desc, select from werkzeug.exceptions import Forbidden, NotFound @@ -54,6 +54,8 @@ from models import Dataset, DatasetProcessRule, Document, DocumentSegment, Uploa from services.dataset_service import DatasetService, DocumentService from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig +logger = logging.getLogger(__name__) + class DocumentResource(Resource): def get_document(self, dataset_id: str, document_id: str) -> Document: @@ -468,25 +470,11 @@ class DocumentBatchIndexingEstimateApi(DocumentResource): return {"tokens": 0, "total_price": 0, "currency": "USD", "total_segments": 0, "preview": []}, 200 data_process_rule = documents[0].dataset_process_rule data_process_rule_dict = data_process_rule.to_dict() - info_list = [] extract_settings = [] for document in documents: if document.indexing_status in {"completed", "error"}: raise DocumentAlreadyFinishedError() data_source_info = document.data_source_info_dict - # format document files info - if data_source_info and "upload_file_id" in data_source_info: - file_id = data_source_info["upload_file_id"] - info_list.append(file_id) - # format document notion info - elif ( - data_source_info and "notion_workspace_id" in data_source_info and "notion_page_id" in data_source_info - ): - pages = [] - page = {"page_id": data_source_info["notion_page_id"], "type": data_source_info["type"]} - pages.append(page) - notion_info = {"workspace_id": data_source_info["notion_workspace_id"], "pages": pages} - info_list.append(notion_info) if document.data_source_type == "upload_file": file_id = data_source_info["upload_file_id"] @@ -758,7 +746,7 @@ class DocumentProcessingApi(DocumentResource): @login_required @account_initialization_required @cloud_edition_billing_rate_limit_check("knowledge") - def patch(self, dataset_id, document_id, action): + def patch(self, dataset_id, document_id, action: Literal["pause", "resume"]): dataset_id = str(dataset_id) document_id = str(document_id) document = self.get_document(dataset_id, document_id) @@ -784,8 +772,6 @@ class DocumentProcessingApi(DocumentResource): document.paused_at = None document.is_paused = False db.session.commit() - else: - raise InvalidActionError() return {"result": "success"}, 200 @@ -840,7 +826,7 @@ class DocumentStatusApi(DocumentResource): @account_initialization_required @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") - def patch(self, dataset_id, action): + def patch(self, dataset_id, action: Literal["enable", "disable", "archive", "un_archive"]): dataset_id = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id) if dataset is None: @@ -968,7 +954,7 @@ class DocumentRetryApi(DocumentResource): raise DocumentAlreadyFinishedError() retry_documents.append(document) except Exception: - logging.exception("Failed to retry document, document id: %s", document_id) + logger.exception("Failed to retry document, document id: %s", document_id) continue # retry document DocumentService.retry_document(dataset_id, retry_documents) diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index 8c429044d7..463fd2d7ec 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -2,7 +2,7 @@ import uuid from flask import request from flask_login import current_user -from flask_restful import Resource, marshal, reqparse +from flask_restx import Resource, marshal, reqparse from sqlalchemy import select from werkzeug.exceptions import Forbidden, NotFound @@ -584,7 +584,12 @@ class ChildChunkUpdateApi(Resource): child_chunk_id = str(child_chunk_id) child_chunk = ( db.session.query(ChildChunk) - .where(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id) + .where( + ChildChunk.id == str(child_chunk_id), + ChildChunk.tenant_id == current_user.current_tenant_id, + ChildChunk.segment_id == segment.id, + ChildChunk.document_id == document_id, + ) .first() ) if not child_chunk: @@ -633,7 +638,12 @@ class ChildChunkUpdateApi(Resource): child_chunk_id = str(child_chunk_id) child_chunk = ( db.session.query(ChildChunk) - .where(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id) + .where( + ChildChunk.id == str(child_chunk_id), + ChildChunk.tenant_id == current_user.current_tenant_id, + ChildChunk.segment_id == segment.id, + ChildChunk.document_id == document_id, + ) .first() ) if not child_chunk: diff --git a/api/controllers/console/datasets/external.py b/api/controllers/console/datasets/external.py index cf9081e154..043f39f623 100644 --- a/api/controllers/console/datasets/external.py +++ b/api/controllers/console/datasets/external.py @@ -1,6 +1,6 @@ from flask import request from flask_login import current_user -from flask_restful import Resource, marshal, reqparse +from flask_restx import Resource, marshal, reqparse from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services diff --git a/api/controllers/console/datasets/hit_testing.py b/api/controllers/console/datasets/hit_testing.py index fba5d4c0f3..2ad192571b 100644 --- a/api/controllers/console/datasets/hit_testing.py +++ b/api/controllers/console/datasets/hit_testing.py @@ -1,4 +1,4 @@ -from flask_restful import Resource +from flask_restx import Resource from controllers.console import api from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase diff --git a/api/controllers/console/datasets/hit_testing_base.py b/api/controllers/console/datasets/hit_testing_base.py index 3b4c076863..cfbfc50873 100644 --- a/api/controllers/console/datasets/hit_testing_base.py +++ b/api/controllers/console/datasets/hit_testing_base.py @@ -1,7 +1,7 @@ import logging from flask_login import current_user -from flask_restful import marshal, reqparse +from flask_restx import marshal, reqparse from werkzeug.exceptions import Forbidden, InternalServerError, NotFound import services.dataset_service @@ -23,6 +23,8 @@ from fields.hit_testing_fields import hit_testing_record_fields from services.dataset_service import DatasetService from services.hit_testing_service import HitTestingService +logger = logging.getLogger(__name__) + class DatasetsHitTestingBase: @staticmethod @@ -81,5 +83,5 @@ class DatasetsHitTestingBase: except ValueError as e: raise ValueError(str(e)) except Exception as e: - logging.exception("Hit testing failed.") + logger.exception("Hit testing failed.") raise InternalServerError(str(e)) diff --git a/api/controllers/console/datasets/metadata.py b/api/controllers/console/datasets/metadata.py index 65f76fb402..6aa309f930 100644 --- a/api/controllers/console/datasets/metadata.py +++ b/api/controllers/console/datasets/metadata.py @@ -1,5 +1,7 @@ +from typing import Literal + from flask_login import current_user -from flask_restful import Resource, marshal_with, reqparse +from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import NotFound from controllers.console import api @@ -100,7 +102,7 @@ class DatasetMetadataBuiltInFieldActionApi(Resource): @login_required @account_initialization_required @enterprise_license_required - def post(self, dataset_id, action): + def post(self, dataset_id, action: Literal["enable", "disable"]): dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: diff --git a/api/controllers/console/datasets/upload_file.py b/api/controllers/console/datasets/upload_file.py deleted file mode 100644 index 9b456c771d..0000000000 --- a/api/controllers/console/datasets/upload_file.py +++ /dev/null @@ -1,62 +0,0 @@ -from flask_login import current_user -from flask_restful import Resource -from werkzeug.exceptions import NotFound - -from controllers.console import api -from controllers.console.wraps import ( - account_initialization_required, - setup_required, -) -from core.file import helpers as file_helpers -from extensions.ext_database import db -from models.dataset import Dataset -from models.model import UploadFile -from services.dataset_service import DocumentService - - -class UploadFileApi(Resource): - @setup_required - @account_initialization_required - def get(self, dataset_id, document_id): - """Get upload file.""" - # check dataset - dataset_id = str(dataset_id) - dataset = ( - db.session.query(Dataset) - .filter(Dataset.tenant_id == current_user.current_tenant_id, Dataset.id == dataset_id) - .first() - ) - if not dataset: - raise NotFound("Dataset not found.") - # check document - document_id = str(document_id) - document = DocumentService.get_document(dataset.id, document_id) - if not document: - raise NotFound("Document not found.") - # check upload file - if document.data_source_type != "upload_file": - raise ValueError(f"Document data source type ({document.data_source_type}) is not upload_file.") - data_source_info = document.data_source_info_dict - if data_source_info and "upload_file_id" in data_source_info: - file_id = data_source_info["upload_file_id"] - upload_file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first() - if not upload_file: - raise NotFound("UploadFile not found.") - else: - raise ValueError("Upload file id not found in document data source info.") - - url = file_helpers.get_signed_file_url(upload_file_id=upload_file.id) - return { - "id": upload_file.id, - "name": upload_file.name, - "size": upload_file.size, - "extension": upload_file.extension, - "url": url, - "download_url": f"{url}&as_attachment=true", - "mime_type": upload_file.mime_type, - "created_by": upload_file.created_by, - "created_at": upload_file.created_at.timestamp(), - }, 200 - - -api.add_resource(UploadFileApi, "/datasets//documents//upload-file") diff --git a/api/controllers/console/datasets/website.py b/api/controllers/console/datasets/website.py index fcdc91ec67..bdaa268462 100644 --- a/api/controllers/console/datasets/website.py +++ b/api/controllers/console/datasets/website.py @@ -1,4 +1,4 @@ -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from controllers.console import api from controllers.console.datasets.error import WebsiteCrawlError diff --git a/api/controllers/console/explore/audio.py b/api/controllers/console/explore/audio.py index d564a00a76..dc275fe18a 100644 --- a/api/controllers/console/explore/audio.py +++ b/api/controllers/console/explore/audio.py @@ -26,6 +26,8 @@ from services.errors.audio import ( UnsupportedAudioTypeServiceError, ) +logger = logging.getLogger(__name__) + class ChatAudioApi(InstalledAppResource): def post(self, installed_app): @@ -38,7 +40,7 @@ class ChatAudioApi(InstalledAppResource): return response except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except NoAudioUploadedServiceError: raise NoAudioUploadedError() @@ -59,13 +61,13 @@ class ChatAudioApi(InstalledAppResource): except ValueError as e: raise e except Exception as e: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() class ChatTextApi(InstalledAppResource): def post(self, installed_app): - from flask_restful import reqparse + from flask_restx import reqparse app_model = installed_app.app try: @@ -83,7 +85,7 @@ class ChatTextApi(InstalledAppResource): response = AudioService.transcript_tts(app_model=app_model, text=text, voice=voice, message_id=message_id) return response except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except NoAudioUploadedServiceError: raise NoAudioUploadedError() @@ -104,5 +106,5 @@ class ChatTextApi(InstalledAppResource): except ValueError as e: raise e except Exception as e: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() diff --git a/api/controllers/console/explore/completion.py b/api/controllers/console/explore/completion.py index 4842fefc57..cc46f54ea3 100644 --- a/api/controllers/console/explore/completion.py +++ b/api/controllers/console/explore/completion.py @@ -1,7 +1,7 @@ import logging from flask_login import current_user -from flask_restful import reqparse +from flask_restx import reqparse from werkzeug.exceptions import InternalServerError, NotFound import services @@ -32,6 +32,8 @@ from models.model import AppMode from services.app_generate_service import AppGenerateService from services.errors.llm import InvokeRateLimitError +logger = logging.getLogger(__name__) + # define completion api for user class CompletionApi(InstalledAppResource): @@ -65,7 +67,7 @@ class CompletionApi(InstalledAppResource): except services.errors.conversation.ConversationCompletedError: raise ConversationCompletedError() except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -78,7 +80,7 @@ class CompletionApi(InstalledAppResource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() @@ -125,7 +127,7 @@ class ChatApi(InstalledAppResource): except services.errors.conversation.ConversationCompletedError: raise ConversationCompletedError() except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -140,7 +142,7 @@ class ChatApi(InstalledAppResource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() diff --git a/api/controllers/console/explore/conversation.py b/api/controllers/console/explore/conversation.py index d7c161cc6d..a8d46954b5 100644 --- a/api/controllers/console/explore/conversation.py +++ b/api/controllers/console/explore/conversation.py @@ -1,6 +1,6 @@ from flask_login import current_user -from flask_restful import marshal_with, reqparse -from flask_restful.inputs import int_range +from flask_restx import marshal_with, reqparse +from flask_restx.inputs import int_range from sqlalchemy.orm import Session from werkzeug.exceptions import NotFound diff --git a/api/controllers/console/explore/installed_app.py b/api/controllers/console/explore/installed_app.py index ad62bd6e08..3ccedd654b 100644 --- a/api/controllers/console/explore/installed_app.py +++ b/api/controllers/console/explore/installed_app.py @@ -3,7 +3,7 @@ from typing import Any from flask import request from flask_login import current_user -from flask_restful import Resource, inputs, marshal_with, reqparse +from flask_restx import Resource, inputs, marshal_with, reqparse from sqlalchemy import and_ from werkzeug.exceptions import BadRequest, Forbidden, NotFound diff --git a/api/controllers/console/explore/message.py b/api/controllers/console/explore/message.py index de95a9e7b0..608bc6d007 100644 --- a/api/controllers/console/explore/message.py +++ b/api/controllers/console/explore/message.py @@ -1,8 +1,8 @@ import logging from flask_login import current_user -from flask_restful import marshal_with, reqparse -from flask_restful.inputs import int_range +from flask_restx import marshal_with, reqparse +from flask_restx.inputs import int_range from werkzeug.exceptions import InternalServerError, NotFound from controllers.console.app.error import ( @@ -35,6 +35,8 @@ from services.errors.message import ( ) from services.message_service import MessageService +logger = logging.getLogger(__name__) + class MessageListApi(InstalledAppResource): @marshal_with(message_infinite_scroll_pagination_fields) @@ -126,7 +128,7 @@ class MessageMoreLikeThisApi(InstalledAppResource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() @@ -158,7 +160,7 @@ class MessageSuggestedQuestionApi(InstalledAppResource): except InvokeError as e: raise CompletionRequestError(e.description) except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() return {"data": questions} diff --git a/api/controllers/console/explore/parameter.py b/api/controllers/console/explore/parameter.py index a1280d91d1..c368744759 100644 --- a/api/controllers/console/explore/parameter.py +++ b/api/controllers/console/explore/parameter.py @@ -1,4 +1,4 @@ -from flask_restful import marshal_with +from flask_restx import marshal_with from controllers.common import fields from controllers.console import api diff --git a/api/controllers/console/explore/recommended_app.py b/api/controllers/console/explore/recommended_app.py index ce85f495aa..62f9350b71 100644 --- a/api/controllers/console/explore/recommended_app.py +++ b/api/controllers/console/explore/recommended_app.py @@ -1,5 +1,5 @@ from flask_login import current_user -from flask_restful import Resource, fields, marshal_with, reqparse +from flask_restx import Resource, fields, marshal_with, reqparse from constants.languages import languages from controllers.console import api diff --git a/api/controllers/console/explore/saved_message.py b/api/controllers/console/explore/saved_message.py index 339e7007a0..5353dbcad5 100644 --- a/api/controllers/console/explore/saved_message.py +++ b/api/controllers/console/explore/saved_message.py @@ -1,6 +1,6 @@ from flask_login import current_user -from flask_restful import fields, marshal_with, reqparse -from flask_restful.inputs import int_range +from flask_restx import fields, marshal_with, reqparse +from flask_restx.inputs import int_range from werkzeug.exceptions import NotFound from controllers.console import api diff --git a/api/controllers/console/explore/workflow.py b/api/controllers/console/explore/workflow.py index 3f625e6609..0a5a88d6f5 100644 --- a/api/controllers/console/explore/workflow.py +++ b/api/controllers/console/explore/workflow.py @@ -1,6 +1,6 @@ import logging -from flask_restful import reqparse +from flask_restx import reqparse from werkzeug.exceptions import InternalServerError from controllers.console.app.error import ( @@ -43,7 +43,7 @@ class InstalledAppWorkflowRunApi(InstalledAppResource): parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") parser.add_argument("files", type=list, required=False, location="json") args = parser.parse_args() - + assert current_user is not None try: response = AppGenerateService.generate( app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.EXPLORE, streaming=True @@ -63,7 +63,7 @@ class InstalledAppWorkflowRunApi(InstalledAppResource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() @@ -76,6 +76,7 @@ class InstalledAppWorkflowTaskStopApi(InstalledAppResource): app_mode = AppMode.value_of(app_model.mode) if app_mode != AppMode.WORKFLOW: raise NotWorkflowAppError() + assert current_user is not None AppQueueManager.set_stop_flag(task_id, InvokeFrom.EXPLORE, current_user.id) diff --git a/api/controllers/console/explore/wraps.py b/api/controllers/console/explore/wraps.py index de97fb149e..e86103184a 100644 --- a/api/controllers/console/explore/wraps.py +++ b/api/controllers/console/explore/wraps.py @@ -1,7 +1,7 @@ from functools import wraps from flask_login import current_user -from flask_restful import Resource +from flask_restx import Resource from werkzeug.exceptions import NotFound from controllers.console.explore.error import AppAccessDeniedError diff --git a/api/controllers/console/extension.py b/api/controllers/console/extension.py index 07a241ef86..e157041c35 100644 --- a/api/controllers/console/extension.py +++ b/api/controllers/console/extension.py @@ -1,5 +1,5 @@ from flask_login import current_user -from flask_restful import Resource, marshal_with, reqparse +from flask_restx import Resource, marshal_with, reqparse from constants import HIDDEN_VALUE from controllers.console import api diff --git a/api/controllers/console/feature.py b/api/controllers/console/feature.py index 70ab4ff865..6236832d39 100644 --- a/api/controllers/console/feature.py +++ b/api/controllers/console/feature.py @@ -1,5 +1,5 @@ from flask_login import current_user -from flask_restful import Resource +from flask_restx import Resource from libs.login import login_required from services.feature_service import FeatureService diff --git a/api/controllers/console/files.py b/api/controllers/console/files.py index a87d270e9c..101a49a32e 100644 --- a/api/controllers/console/files.py +++ b/api/controllers/console/files.py @@ -2,7 +2,7 @@ from typing import Literal from flask import request from flask_login import current_user -from flask_restful import Resource, marshal_with +from flask_restx import Resource, marshal_with from werkzeug.exceptions import Forbidden import services diff --git a/api/controllers/console/init_validate.py b/api/controllers/console/init_validate.py index b19e331d2e..2a37b1708a 100644 --- a/api/controllers/console/init_validate.py +++ b/api/controllers/console/init_validate.py @@ -1,7 +1,7 @@ import os from flask import session -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from sqlalchemy import select from sqlalchemy.orm import Session diff --git a/api/controllers/console/ping.py b/api/controllers/console/ping.py index cd28cc946e..1a53a2347e 100644 --- a/api/controllers/console/ping.py +++ b/api/controllers/console/ping.py @@ -1,4 +1,4 @@ -from flask_restful import Resource +from flask_restx import Resource from controllers.console import api diff --git a/api/controllers/console/remote_files.py b/api/controllers/console/remote_files.py index c356113c40..73014cfc97 100644 --- a/api/controllers/console/remote_files.py +++ b/api/controllers/console/remote_files.py @@ -3,7 +3,7 @@ from typing import cast import httpx from flask_login import current_user -from flask_restful import Resource, marshal_with, reqparse +from flask_restx import Resource, marshal_with, reqparse import services from controllers.common import helpers diff --git a/api/controllers/console/setup.py b/api/controllers/console/setup.py index e1f19a87a3..8e230496f0 100644 --- a/api/controllers/console/setup.py +++ b/api/controllers/console/setup.py @@ -1,5 +1,5 @@ from flask import request -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from configs import dify_config from libs.helper import StrLen, email, extract_remote_ip diff --git a/api/controllers/console/tag/tags.py b/api/controllers/console/tag/tags.py index cb5dedca21..c45e7dbb26 100644 --- a/api/controllers/console/tag/tags.py +++ b/api/controllers/console/tag/tags.py @@ -1,11 +1,11 @@ from flask import request from flask_login import current_user -from flask_restful import Resource, marshal_with, reqparse +from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import Forbidden from controllers.console import api from controllers.console.wraps import account_initialization_required, setup_required -from fields.tag_fields import tag_fields +from fields.tag_fields import dataset_tag_fields from libs.login import login_required from models.model import Tag from services.tag_service import TagService @@ -21,7 +21,7 @@ class TagListApi(Resource): @setup_required @login_required @account_initialization_required - @marshal_with(tag_fields) + @marshal_with(dataset_tag_fields) def get(self): tag_type = request.args.get("type", type=str, default="") keyword = request.args.get("keyword", default=None, type=str) diff --git a/api/controllers/console/version.py b/api/controllers/console/version.py index 894785abc8..95515c38f9 100644 --- a/api/controllers/console/version.py +++ b/api/controllers/console/version.py @@ -2,13 +2,15 @@ import json import logging import requests -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from packaging import version from configs import dify_config from . import api +logger = logging.getLogger(__name__) + class VersionApi(Resource): def get(self): @@ -34,7 +36,7 @@ class VersionApi(Resource): try: response = requests.get(check_update_url, {"current_version": args.get("current_version")}, timeout=(3, 10)) except Exception as error: - logging.warning("Check update version error: %s.", str(error)) + logger.warning("Check update version error: %s.", str(error)) result["version"] = args.get("current_version") return result @@ -55,7 +57,7 @@ def _has_new_version(*, latest_version: str, current_version: str) -> bool: # Compare versions return latest > current except version.InvalidVersion: - logging.warning("Invalid version format: latest=%s, current=%s", latest_version, current_version) + logger.warning("Invalid version format: latest=%s, current=%s", latest_version, current_version) return False diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index 4d5357cd18..5b2828dbab 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -1,7 +1,9 @@ +from datetime import datetime + import pytz from flask import request from flask_login import current_user -from flask_restful import Resource, fields, marshal_with, reqparse +from flask_restx import Resource, fields, marshal_with, reqparse from sqlalchemy import select from sqlalchemy.orm import Session @@ -327,6 +329,9 @@ class EducationVerifyApi(Resource): class EducationApi(Resource): status_fields = { "result": fields.Boolean, + "is_student": fields.Boolean, + "expire_at": TimestampField, + "allow_refresh": fields.Boolean, } @setup_required @@ -354,7 +359,11 @@ class EducationApi(Resource): def get(self): account = current_user - return BillingService.EducationIdentity.is_active(account.id) + res = BillingService.EducationIdentity.status(account.id) + # convert expire_at to UTC timestamp from isoformat + if res and "expire_at" in res: + res["expire_at"] = datetime.fromisoformat(res["expire_at"]).astimezone(pytz.utc) + return res class EducationAutoCompleteApi(Resource): diff --git a/api/controllers/console/workspace/agent_providers.py b/api/controllers/console/workspace/agent_providers.py index 88c37767e3..08bab6fcb5 100644 --- a/api/controllers/console/workspace/agent_providers.py +++ b/api/controllers/console/workspace/agent_providers.py @@ -1,5 +1,5 @@ from flask_login import current_user -from flask_restful import Resource +from flask_restx import Resource from controllers.console import api from controllers.console.wraps import account_initialization_required, setup_required diff --git a/api/controllers/console/workspace/endpoint.py b/api/controllers/console/workspace/endpoint.py index eb53dcb16e..96e873d42b 100644 --- a/api/controllers/console/workspace/endpoint.py +++ b/api/controllers/console/workspace/endpoint.py @@ -1,5 +1,5 @@ from flask_login import current_user -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden from controllers.console import api diff --git a/api/controllers/console/workspace/load_balancing_config.py b/api/controllers/console/workspace/load_balancing_config.py index b4eb5e246b..7c1bc7c075 100644 --- a/api/controllers/console/workspace/load_balancing_config.py +++ b/api/controllers/console/workspace/load_balancing_config.py @@ -1,4 +1,4 @@ -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden from controllers.console import api @@ -6,7 +6,7 @@ from controllers.console.wraps import account_initialization_required, setup_req from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError from libs.login import current_user, login_required -from models.account import TenantAccountRole +from models.account import Account, TenantAccountRole from services.model_load_balancing_service import ModelLoadBalancingService @@ -15,10 +15,12 @@ class LoadBalancingCredentialsValidateApi(Resource): @login_required @account_initialization_required def post(self, provider: str): + assert isinstance(current_user, Account) if not TenantAccountRole.is_privileged_role(current_user.current_role): raise Forbidden() tenant_id = current_user.current_tenant_id + assert tenant_id is not None parser = reqparse.RequestParser() parser.add_argument("model", type=str, required=True, nullable=False, location="json") @@ -64,10 +66,12 @@ class LoadBalancingConfigCredentialsValidateApi(Resource): @login_required @account_initialization_required def post(self, provider: str, config_id: str): + assert isinstance(current_user, Account) if not TenantAccountRole.is_privileged_role(current_user.current_role): raise Forbidden() tenant_id = current_user.current_tenant_id + assert tenant_id is not None parser = reqparse.RequestParser() parser.add_argument("model", type=str, required=True, nullable=False, location="json") diff --git a/api/controllers/console/workspace/members.py b/api/controllers/console/workspace/members.py index f7424923b9..cf2a10f453 100644 --- a/api/controllers/console/workspace/members.py +++ b/api/controllers/console/workspace/members.py @@ -2,7 +2,7 @@ from urllib import parse from flask import request from flask_login import current_user -from flask_restful import Resource, abort, marshal_with, reqparse +from flask_restx import Resource, abort, marshal_with, reqparse import services from configs import dify_config @@ -54,7 +54,7 @@ class MemberInviteEmailApi(Resource): @cloud_edition_billing_resource_check("members") def post(self): parser = reqparse.RequestParser() - parser.add_argument("emails", type=str, required=True, location="json", action="append") + parser.add_argument("emails", type=list, required=True, location="json") parser.add_argument("role", type=str, required=True, default="admin", location="json") parser.add_argument("language", type=str, required=False, location="json") args = parser.parse_args() diff --git a/api/controllers/console/workspace/model_providers.py b/api/controllers/console/workspace/model_providers.py index ff0fcbda6e..3861fb8e99 100644 --- a/api/controllers/console/workspace/model_providers.py +++ b/api/controllers/console/workspace/model_providers.py @@ -2,7 +2,7 @@ import io from flask import send_file from flask_login import current_user -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden from controllers.console import api @@ -10,6 +10,7 @@ from controllers.console.wraps import account_initialization_required, setup_req from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.utils.encoders import jsonable_encoder +from libs.helper import StrLen, uuid_value from libs.login import login_required from services.billing_service import BillingService from services.model_provider_service import ModelProviderService @@ -45,12 +46,109 @@ class ModelProviderCredentialApi(Resource): @account_initialization_required def get(self, provider: str): tenant_id = current_user.current_tenant_id + # if credential_id is not provided, return current used credential + parser = reqparse.RequestParser() + parser.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="args") + args = parser.parse_args() model_provider_service = ModelProviderService() - credentials = model_provider_service.get_provider_credentials(tenant_id=tenant_id, provider=provider) + credentials = model_provider_service.get_provider_credential( + tenant_id=tenant_id, provider=provider, credential_id=args.get("credential_id") + ) return {"credentials": credentials} + @setup_required + @login_required + @account_initialization_required + def post(self, provider: str): + if not current_user.is_admin_or_owner: + raise Forbidden() + + parser = reqparse.RequestParser() + parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") + parser.add_argument("name", type=StrLen(30), required=True, nullable=False, location="json") + args = parser.parse_args() + + model_provider_service = ModelProviderService() + + try: + model_provider_service.create_provider_credential( + tenant_id=current_user.current_tenant_id, + provider=provider, + credentials=args["credentials"], + credential_name=args["name"], + ) + except CredentialsValidateFailedError as ex: + raise ValueError(str(ex)) + + return {"result": "success"}, 201 + + @setup_required + @login_required + @account_initialization_required + def put(self, provider: str): + if not current_user.is_admin_or_owner: + raise Forbidden() + + parser = reqparse.RequestParser() + parser.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") + parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") + parser.add_argument("name", type=StrLen(30), required=True, nullable=False, location="json") + args = parser.parse_args() + + model_provider_service = ModelProviderService() + + try: + model_provider_service.update_provider_credential( + tenant_id=current_user.current_tenant_id, + provider=provider, + credentials=args["credentials"], + credential_id=args["credential_id"], + credential_name=args["name"], + ) + except CredentialsValidateFailedError as ex: + raise ValueError(str(ex)) + + return {"result": "success"} + + @setup_required + @login_required + @account_initialization_required + def delete(self, provider: str): + if not current_user.is_admin_or_owner: + raise Forbidden() + parser = reqparse.RequestParser() + parser.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") + args = parser.parse_args() + + model_provider_service = ModelProviderService() + model_provider_service.remove_provider_credential( + tenant_id=current_user.current_tenant_id, provider=provider, credential_id=args["credential_id"] + ) + + return {"result": "success"}, 204 + + +class ModelProviderCredentialSwitchApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self, provider: str): + if not current_user.is_admin_or_owner: + raise Forbidden() + parser = reqparse.RequestParser() + parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") + args = parser.parse_args() + + service = ModelProviderService() + service.switch_active_provider_credential( + tenant_id=current_user.current_tenant_id, + provider=provider, + credential_id=args["credential_id"], + ) + return {"result": "success"} + class ModelProviderValidateApi(Resource): @setup_required @@ -69,7 +167,7 @@ class ModelProviderValidateApi(Resource): error = "" try: - model_provider_service.provider_credentials_validate( + model_provider_service.validate_provider_credentials( tenant_id=tenant_id, provider=provider, credentials=args["credentials"] ) except CredentialsValidateFailedError as ex: @@ -84,42 +182,6 @@ class ModelProviderValidateApi(Resource): return response -class ModelProviderApi(Resource): - @setup_required - @login_required - @account_initialization_required - def post(self, provider: str): - if not current_user.is_admin_or_owner: - raise Forbidden() - - parser = reqparse.RequestParser() - parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") - args = parser.parse_args() - - model_provider_service = ModelProviderService() - - try: - model_provider_service.save_provider_credentials( - tenant_id=current_user.current_tenant_id, provider=provider, credentials=args["credentials"] - ) - except CredentialsValidateFailedError as ex: - raise ValueError(str(ex)) - - return {"result": "success"}, 201 - - @setup_required - @login_required - @account_initialization_required - def delete(self, provider: str): - if not current_user.is_admin_or_owner: - raise Forbidden() - - model_provider_service = ModelProviderService() - model_provider_service.remove_provider_credentials(tenant_id=current_user.current_tenant_id, provider=provider) - - return {"result": "success"}, 204 - - class ModelProviderIconApi(Resource): """ Get model provider icon @@ -187,8 +249,10 @@ class ModelProviderPaymentCheckoutUrlApi(Resource): api.add_resource(ModelProviderListApi, "/workspaces/current/model-providers") api.add_resource(ModelProviderCredentialApi, "/workspaces/current/model-providers//credentials") +api.add_resource( + ModelProviderCredentialSwitchApi, "/workspaces/current/model-providers//credentials/switch" +) api.add_resource(ModelProviderValidateApi, "/workspaces/current/model-providers//credentials/validate") -api.add_resource(ModelProviderApi, "/workspaces/current/model-providers/") api.add_resource( PreferredProviderTypeUpdateApi, "/workspaces/current/model-providers//preferred-provider-type" diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py index 514d1084c4..35fc61e48a 100644 --- a/api/controllers/console/workspace/models.py +++ b/api/controllers/console/workspace/models.py @@ -1,7 +1,7 @@ import logging from flask_login import current_user -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden from controllers.console import api @@ -9,10 +9,13 @@ from controllers.console.wraps import account_initialization_required, setup_req from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.utils.encoders import jsonable_encoder +from libs.helper import StrLen, uuid_value from libs.login import login_required from services.model_load_balancing_service import ModelLoadBalancingService from services.model_provider_service import ModelProviderService +logger = logging.getLogger(__name__) + class DefaultModelApi(Resource): @setup_required @@ -72,7 +75,7 @@ class DefaultModelApi(Resource): model=model_setting["model"], ) except Exception as ex: - logging.exception( + logger.exception( "Failed to update default model, model type: %s, model: %s", model_setting["model_type"], model_setting.get("model"), @@ -98,6 +101,7 @@ class ModelProviderModelApi(Resource): @login_required @account_initialization_required def post(self, provider: str): + # To save the model's load balance configs if not current_user.is_admin_or_owner: raise Forbidden() @@ -113,22 +117,26 @@ class ModelProviderModelApi(Resource): choices=[mt.value for mt in ModelType], location="json", ) - parser.add_argument("credentials", type=dict, required=False, nullable=True, location="json") parser.add_argument("load_balancing", type=dict, required=False, nullable=True, location="json") parser.add_argument("config_from", type=str, required=False, nullable=True, location="json") + parser.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="json") args = parser.parse_args() + if args.get("config_from", "") == "custom-model": + if not args.get("credential_id"): + raise ValueError("credential_id is required when configuring a custom-model") + service = ModelProviderService() + service.switch_active_custom_model_credential( + tenant_id=current_user.current_tenant_id, + provider=provider, + model_type=args["model_type"], + model=args["model"], + credential_id=args["credential_id"], + ) + model_load_balancing_service = ModelLoadBalancingService() - if ( - "load_balancing" in args - and args["load_balancing"] - and "enabled" in args["load_balancing"] - and args["load_balancing"]["enabled"] - ): - if "configs" not in args["load_balancing"]: - raise ValueError("invalid load balancing configs") - + if "load_balancing" in args and args["load_balancing"] and "configs" in args["load_balancing"]: # save load balancing configs model_load_balancing_service.update_load_balancing_configs( tenant_id=tenant_id, @@ -136,37 +144,17 @@ class ModelProviderModelApi(Resource): model=args["model"], model_type=args["model_type"], configs=args["load_balancing"]["configs"], + config_from=args.get("config_from", ""), ) - # enable load balancing - model_load_balancing_service.enable_model_load_balancing( - tenant_id=tenant_id, provider=provider, model=args["model"], model_type=args["model_type"] - ) - else: - # disable load balancing - model_load_balancing_service.disable_model_load_balancing( - tenant_id=tenant_id, provider=provider, model=args["model"], model_type=args["model_type"] - ) - - if args.get("config_from", "") != "predefined-model": - model_provider_service = ModelProviderService() - - try: - model_provider_service.save_model_credentials( - tenant_id=tenant_id, - provider=provider, - model=args["model"], - model_type=args["model_type"], - credentials=args["credentials"], - ) - except CredentialsValidateFailedError as ex: - logging.exception( - "Failed to save model credentials, tenant_id: %s, model: %s, model_type: %s", - tenant_id, - args.get("model"), - args.get("model_type"), - ) - raise ValueError(str(ex)) + if args.get("load_balancing", {}).get("enabled"): + model_load_balancing_service.enable_model_load_balancing( + tenant_id=tenant_id, provider=provider, model=args["model"], model_type=args["model_type"] + ) + else: + model_load_balancing_service.disable_model_load_balancing( + tenant_id=tenant_id, provider=provider, model=args["model"], model_type=args["model_type"] + ) return {"result": "success"}, 200 @@ -192,7 +180,7 @@ class ModelProviderModelApi(Resource): args = parser.parse_args() model_provider_service = ModelProviderService() - model_provider_service.remove_model_credentials( + model_provider_service.remove_model( tenant_id=tenant_id, provider=provider, model=args["model"], model_type=args["model_type"] ) @@ -216,11 +204,17 @@ class ModelProviderModelCredentialApi(Resource): choices=[mt.value for mt in ModelType], location="args", ) + parser.add_argument("config_from", type=str, required=False, nullable=True, location="args") + parser.add_argument("credential_id", type=uuid_value, required=False, nullable=True, location="args") args = parser.parse_args() model_provider_service = ModelProviderService() - credentials = model_provider_service.get_model_credentials( - tenant_id=tenant_id, provider=provider, model_type=args["model_type"], model=args["model"] + current_credential = model_provider_service.get_model_credential( + tenant_id=tenant_id, + provider=provider, + model_type=args["model_type"], + model=args["model"], + credential_id=args.get("credential_id"), ) model_load_balancing_service = ModelLoadBalancingService() @@ -228,10 +222,173 @@ class ModelProviderModelCredentialApi(Resource): tenant_id=tenant_id, provider=provider, model=args["model"], model_type=args["model_type"] ) - return { - "credentials": credentials, - "load_balancing": {"enabled": is_load_balancing_enabled, "configs": load_balancing_configs}, - } + if args.get("config_from", "") == "predefined-model": + available_credentials = model_provider_service.provider_manager.get_provider_available_credentials( + tenant_id=tenant_id, provider_name=provider + ) + else: + model_type = ModelType.value_of(args["model_type"]).to_origin_model_type() + available_credentials = model_provider_service.provider_manager.get_provider_model_available_credentials( + tenant_id=tenant_id, provider_name=provider, model_type=model_type, model_name=args["model"] + ) + + return jsonable_encoder( + { + "credentials": current_credential.get("credentials") if current_credential else {}, + "current_credential_id": current_credential.get("current_credential_id") + if current_credential + else None, + "current_credential_name": current_credential.get("current_credential_name") + if current_credential + else None, + "load_balancing": {"enabled": is_load_balancing_enabled, "configs": load_balancing_configs}, + "available_credentials": available_credentials, + } + ) + + @setup_required + @login_required + @account_initialization_required + def post(self, provider: str): + if not current_user.is_admin_or_owner: + raise Forbidden() + + parser = reqparse.RequestParser() + parser.add_argument("model", type=str, required=True, nullable=False, location="json") + parser.add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + parser.add_argument("name", type=StrLen(30), required=True, nullable=False, location="json") + parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") + args = parser.parse_args() + + tenant_id = current_user.current_tenant_id + model_provider_service = ModelProviderService() + + try: + model_provider_service.create_model_credential( + tenant_id=tenant_id, + provider=provider, + model=args["model"], + model_type=args["model_type"], + credentials=args["credentials"], + credential_name=args["name"], + ) + except CredentialsValidateFailedError as ex: + logger.exception( + "Failed to save model credentials, tenant_id: %s, model: %s, model_type: %s", + tenant_id, + args.get("model"), + args.get("model_type"), + ) + raise ValueError(str(ex)) + + return {"result": "success"}, 201 + + @setup_required + @login_required + @account_initialization_required + def put(self, provider: str): + if not current_user.is_admin_or_owner: + raise Forbidden() + + parser = reqparse.RequestParser() + parser.add_argument("model", type=str, required=True, nullable=False, location="json") + parser.add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + parser.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") + parser.add_argument("credentials", type=dict, required=True, nullable=False, location="json") + parser.add_argument("name", type=StrLen(30), required=True, nullable=False, location="json") + args = parser.parse_args() + + model_provider_service = ModelProviderService() + + try: + model_provider_service.update_model_credential( + tenant_id=current_user.current_tenant_id, + provider=provider, + model_type=args["model_type"], + model=args["model"], + credentials=args["credentials"], + credential_id=args["credential_id"], + credential_name=args["name"], + ) + except CredentialsValidateFailedError as ex: + raise ValueError(str(ex)) + + return {"result": "success"} + + @setup_required + @login_required + @account_initialization_required + def delete(self, provider: str): + if not current_user.is_admin_or_owner: + raise Forbidden() + parser = reqparse.RequestParser() + parser.add_argument("model", type=str, required=True, nullable=False, location="json") + parser.add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + parser.add_argument("credential_id", type=uuid_value, required=True, nullable=False, location="json") + args = parser.parse_args() + + model_provider_service = ModelProviderService() + model_provider_service.remove_model_credential( + tenant_id=current_user.current_tenant_id, + provider=provider, + model_type=args["model_type"], + model=args["model"], + credential_id=args["credential_id"], + ) + + return {"result": "success"}, 204 + + +class ModelProviderModelCredentialSwitchApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self, provider: str): + if not current_user.is_admin_or_owner: + raise Forbidden() + parser = reqparse.RequestParser() + parser.add_argument("model", type=str, required=True, nullable=False, location="json") + parser.add_argument( + "model_type", + type=str, + required=True, + nullable=False, + choices=[mt.value for mt in ModelType], + location="json", + ) + parser.add_argument("credential_id", type=str, required=True, nullable=False, location="json") + args = parser.parse_args() + + service = ModelProviderService() + service.add_model_credential_to_model_list( + tenant_id=current_user.current_tenant_id, + provider=provider, + model_type=args["model_type"], + model=args["model"], + credential_id=args["credential_id"], + ) + return {"result": "success"} class ModelProviderModelEnableApi(Resource): @@ -314,7 +471,7 @@ class ModelProviderModelValidateApi(Resource): error = "" try: - model_provider_service.model_credentials_validate( + model_provider_service.validate_model_credentials( tenant_id=tenant_id, provider=provider, model=args["model"], @@ -379,6 +536,10 @@ api.add_resource( api.add_resource( ModelProviderModelCredentialApi, "/workspaces/current/model-providers//models/credentials" ) +api.add_resource( + ModelProviderModelCredentialSwitchApi, + "/workspaces/current/model-providers//models/credentials/switch", +) api.add_resource( ModelProviderModelValidateApi, "/workspaces/current/model-providers//models/credentials/validate" ) diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py index 09846d5c94..fd5421fa64 100644 --- a/api/controllers/console/workspace/plugin.py +++ b/api/controllers/console/workspace/plugin.py @@ -2,7 +2,7 @@ import io from flask import request, send_file from flask_login import current_user -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden from configs import dify_config diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index 8c8b73b45d..d9f2e45ddf 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -3,7 +3,7 @@ from urllib.parse import urlparse from flask import make_response, redirect, request, send_file from flask_login import current_user -from flask_restful import ( +from flask_restx import ( Resource, reqparse, ) @@ -95,7 +95,6 @@ class ToolBuiltinProviderInfoApi(Resource): def get(self, provider): user = current_user - user_id = user.id tenant_id = user.current_tenant_id return jsonable_encoder(BuiltinToolManageService.get_builtin_tool_provider_info(tenant_id, provider)) diff --git a/api/controllers/console/workspace/workspace.py b/api/controllers/console/workspace/workspace.py index f4f0078da7..e7a3aca66c 100644 --- a/api/controllers/console/workspace/workspace.py +++ b/api/controllers/console/workspace/workspace.py @@ -2,7 +2,7 @@ import logging from flask import request from flask_login import current_user -from flask_restful import Resource, fields, inputs, marshal, marshal_with, reqparse +from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse from sqlalchemy import select from werkzeug.exceptions import Unauthorized @@ -31,6 +31,9 @@ from services.feature_service import FeatureService from services.file_service import FileService from services.workspace_service import WorkspaceService +logger = logging.getLogger(__name__) + + provider_fields = { "provider_name": fields.String, "provider_type": fields.String, @@ -120,7 +123,7 @@ class TenantApi(Resource): @marshal_with(tenant_fields) def get(self): if request.path == "/info": - logging.warning("Deprecated URL /info was used.") + logger.warning("Deprecated URL /info was used.") tenant = current_user.current_tenant diff --git a/api/controllers/console/wraps.py b/api/controllers/console/wraps.py index d862dac373..d3fd1d52e5 100644 --- a/api/controllers/console/wraps.py +++ b/api/controllers/console/wraps.py @@ -1,3 +1,4 @@ +import contextlib import json import os import time @@ -178,7 +179,7 @@ def cloud_edition_billing_rate_limit_check(resource: str): def cloud_utm_record(view): @wraps(view) def decorated(*args, **kwargs): - try: + with contextlib.suppress(Exception): features = FeatureService.get_features(current_user.current_tenant_id) if features.billing.enabled: @@ -187,8 +188,7 @@ def cloud_utm_record(view): if utm_info: utm_info_dict: dict = json.loads(utm_info) OperationService.record_utm(current_user.current_tenant_id, utm_info_dict) - except Exception as e: - pass + return view(*args, **kwargs) return decorated diff --git a/api/controllers/files/__init__.py b/api/controllers/files/__init__.py index d4c3245708..821ad220a2 100644 --- a/api/controllers/files/__init__.py +++ b/api/controllers/files/__init__.py @@ -1,9 +1,20 @@ from flask import Blueprint +from flask_restx import Namespace from libs.external_api import ExternalApi -bp = Blueprint("files", __name__) -api = ExternalApi(bp) +bp = Blueprint("files", __name__, url_prefix="/files") +api = ExternalApi( + bp, + version="1.0", + title="Files API", + description="API for file operations including upload and preview", + doc="/docs", # Enable Swagger UI at /files/docs +) + +files_ns = Namespace("files", description="File operations", path="/") from . import image_preview, tool_files, upload + +api.add_namespace(files_ns) diff --git a/api/controllers/files/image_preview.py b/api/controllers/files/image_preview.py index 91f7b27d1d..48baac6556 100644 --- a/api/controllers/files/image_preview.py +++ b/api/controllers/files/image_preview.py @@ -1,16 +1,17 @@ from urllib.parse import quote from flask import Response, request -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from werkzeug.exceptions import NotFound import services from controllers.common.errors import UnsupportedFileTypeError -from controllers.files import api +from controllers.files import files_ns from services.account_service import TenantService from services.file_service import FileService +@files_ns.route("//image-preview") class ImagePreviewApi(Resource): """ Deprecated @@ -39,6 +40,7 @@ class ImagePreviewApi(Resource): return Response(generator, mimetype=mimetype) +@files_ns.route("//file-preview") class FilePreviewApi(Resource): def get(self, file_id): file_id = str(file_id) @@ -94,6 +96,7 @@ class FilePreviewApi(Resource): return response +@files_ns.route("/workspaces//webapp-logo") class WorkspaceWebappLogoApi(Resource): def get(self, workspace_id): workspace_id = str(workspace_id) @@ -112,8 +115,3 @@ class WorkspaceWebappLogoApi(Resource): raise UnsupportedFileTypeError() return Response(generator, mimetype=mimetype) - - -api.add_resource(ImagePreviewApi, "/files//image-preview") -api.add_resource(FilePreviewApi, "/files//file-preview") -api.add_resource(WorkspaceWebappLogoApi, "/files/workspaces//webapp-logo") diff --git a/api/controllers/files/tool_files.py b/api/controllers/files/tool_files.py index d9c4e50511..faa9b733c2 100644 --- a/api/controllers/files/tool_files.py +++ b/api/controllers/files/tool_files.py @@ -1,17 +1,18 @@ from urllib.parse import quote from flask import Response -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from werkzeug.exceptions import Forbidden, NotFound from controllers.common.errors import UnsupportedFileTypeError -from controllers.files import api +from controllers.files import files_ns from core.tools.signature import verify_tool_file_signature from core.tools.tool_file_manager import ToolFileManager from models import db as global_db -class ToolFilePreviewApi(Resource): +@files_ns.route("/tools/.") +class ToolFileApi(Resource): def get(self, file_id, extension): file_id = str(file_id) @@ -52,6 +53,3 @@ class ToolFilePreviewApi(Resource): response.headers["Content-Disposition"] = f"attachment; filename*=UTF-8''{encoded_filename}" return response - - -api.add_resource(ToolFilePreviewApi, "/files/tools/.") diff --git a/api/controllers/files/upload.py b/api/controllers/files/upload.py index bcc72d131c..7a2b3b0428 100644 --- a/api/controllers/files/upload.py +++ b/api/controllers/files/upload.py @@ -1,7 +1,9 @@ from mimetypes import guess_extension +from typing import Optional -from flask import request -from flask_restful import Resource, marshal_with +from flask_restx import Resource, reqparse +from flask_restx.api import HTTPStatus +from werkzeug.datastructures import FileStorage from werkzeug.exceptions import Forbidden import services @@ -10,39 +12,76 @@ from controllers.common.errors import ( UnsupportedFileTypeError, ) from controllers.console.wraps import setup_required -from controllers.files import api +from controllers.files import files_ns from controllers.inner_api.plugin.wraps import get_user from core.file.helpers import verify_plugin_file_signature from core.tools.tool_file_manager import ToolFileManager -from fields.file_fields import file_fields +from fields.file_fields import build_file_model + +# Define parser for both documentation and validation +upload_parser = reqparse.RequestParser() +upload_parser.add_argument("file", location="files", type=FileStorage, required=True, help="File to upload") +upload_parser.add_argument( + "timestamp", type=str, required=True, location="args", help="Unix timestamp for signature verification" +) +upload_parser.add_argument( + "nonce", type=str, required=True, location="args", help="Random string for signature verification" +) +upload_parser.add_argument( + "sign", type=str, required=True, location="args", help="HMAC signature for request validation" +) +upload_parser.add_argument("tenant_id", type=str, required=True, location="args", help="Tenant identifier") +upload_parser.add_argument("user_id", type=str, required=False, location="args", help="User identifier") +@files_ns.route("/upload/for-plugin") class PluginUploadFileApi(Resource): @setup_required - @marshal_with(file_fields) + @files_ns.expect(upload_parser) + @files_ns.doc("upload_plugin_file") + @files_ns.doc(description="Upload a file for plugin usage with signature verification") + @files_ns.doc( + responses={ + 201: "File uploaded successfully", + 400: "Invalid request parameters", + 403: "Forbidden - Invalid signature or missing parameters", + 413: "File too large", + 415: "Unsupported file type", + } + ) + @files_ns.marshal_with(build_file_model(files_ns), code=HTTPStatus.CREATED) def post(self): - # get file from request - file = request.files["file"] + """Upload a file for plugin usage. - timestamp = request.args.get("timestamp") - nonce = request.args.get("nonce") - sign = request.args.get("sign") - tenant_id = request.args.get("tenant_id") - if not tenant_id: - raise Forbidden("Invalid request.") + Accepts a file upload with signature verification for security. + The file must be accompanied by valid timestamp, nonce, and signature parameters. - user_id = request.args.get("user_id") + Returns: + dict: File metadata including ID, URLs, and properties + int: HTTP status code (201 for success) + + Raises: + Forbidden: Invalid signature or missing required parameters + FileTooLargeError: File exceeds size limit + UnsupportedFileTypeError: File type not supported + """ + # Parse and validate all arguments + args = upload_parser.parse_args() + + file: FileStorage = args["file"] + timestamp: str = args["timestamp"] + nonce: str = args["nonce"] + sign: str = args["sign"] + tenant_id: str = args["tenant_id"] + user_id: Optional[str] = args.get("user_id") user = get_user(tenant_id, user_id) - filename = file.filename - mimetype = file.mimetype + filename: Optional[str] = file.filename + mimetype: Optional[str] = file.mimetype if not filename or not mimetype: raise Forbidden("Invalid request.") - if not timestamp or not nonce or not sign: - raise Forbidden("Invalid request.") - if not verify_plugin_file_signature( filename=filename, mimetype=mimetype, @@ -88,6 +127,3 @@ class PluginUploadFileApi(Resource): raise FileTooLargeError(file_too_large_error.description) except services.errors.file.UnsupportedFileTypeError: raise UnsupportedFileTypeError() - - -api.add_resource(PluginUploadFileApi, "/files/upload/for-plugin") diff --git a/api/controllers/inner_api/__init__.py b/api/controllers/inner_api/__init__.py index d51db4322a..d29a7be139 100644 --- a/api/controllers/inner_api/__init__.py +++ b/api/controllers/inner_api/__init__.py @@ -1,10 +1,23 @@ from flask import Blueprint +from flask_restx import Namespace from libs.external_api import ExternalApi bp = Blueprint("inner_api", __name__, url_prefix="/inner/api") -api = ExternalApi(bp) + +api = ExternalApi( + bp, + version="1.0", + title="Inner API", + description="Internal APIs for enterprise features, billing, and plugin communication", + doc="/docs", # Enable Swagger UI at /inner/api/docs +) + +# Create namespace +inner_api_ns = Namespace("inner_api", description="Internal API operations", path="/") from . import mail from .plugin import plugin from .workspace import workspace + +api.add_namespace(inner_api_ns) diff --git a/api/controllers/inner_api/mail.py b/api/controllers/inner_api/mail.py index ce3373d65c..0b2be03e43 100644 --- a/api/controllers/inner_api/mail.py +++ b/api/controllers/inner_api/mail.py @@ -1,27 +1,70 @@ -from flask_restful import ( - Resource, # type: ignore - reqparse, -) +from flask_restx import Resource, reqparse from controllers.console.wraps import setup_required -from controllers.inner_api import api -from controllers.inner_api.wraps import enterprise_inner_api_only -from services.enterprise.mail_service import DifyMail, EnterpriseMailService +from controllers.inner_api import inner_api_ns +from controllers.inner_api.wraps import billing_inner_api_only, enterprise_inner_api_only +from tasks.mail_inner_task import send_inner_email_task + +_mail_parser = reqparse.RequestParser() +_mail_parser.add_argument("to", type=str, action="append", required=True) +_mail_parser.add_argument("subject", type=str, required=True) +_mail_parser.add_argument("body", type=str, required=True) +_mail_parser.add_argument("substitutions", type=dict, required=False) -class EnterpriseMail(Resource): - @setup_required - @enterprise_inner_api_only +class BaseMail(Resource): + """Shared logic for sending an inner email.""" + def post(self): - parser = reqparse.RequestParser() - parser.add_argument("to", type=str, action="append", required=True) - parser.add_argument("subject", type=str, required=True) - parser.add_argument("body", type=str, required=True) - parser.add_argument("substitutions", type=dict, required=False) - args = parser.parse_args() - - EnterpriseMailService.send_mail(DifyMail(**args)) + args = _mail_parser.parse_args() + send_inner_email_task.delay( + to=args["to"], + subject=args["subject"], + body=args["body"], + substitutions=args["substitutions"], + ) return {"message": "success"}, 200 -api.add_resource(EnterpriseMail, "/enterprise/mail") +@inner_api_ns.route("/enterprise/mail") +class EnterpriseMail(BaseMail): + method_decorators = [setup_required, enterprise_inner_api_only] + + @inner_api_ns.doc("send_enterprise_mail") + @inner_api_ns.doc(description="Send internal email for enterprise features") + @inner_api_ns.expect(_mail_parser) + @inner_api_ns.doc( + responses={200: "Email sent successfully", 401: "Unauthorized - invalid API key", 404: "Service not available"} + ) + def post(self): + """Send internal email for enterprise features. + + This endpoint allows sending internal emails for enterprise-specific + notifications and communications. + + Returns: + dict: Success message with status code 200 + """ + return super().post() + + +@inner_api_ns.route("/billing/mail") +class BillingMail(BaseMail): + method_decorators = [setup_required, billing_inner_api_only] + + @inner_api_ns.doc("send_billing_mail") + @inner_api_ns.doc(description="Send internal email for billing notifications") + @inner_api_ns.expect(_mail_parser) + @inner_api_ns.doc( + responses={200: "Email sent successfully", 401: "Unauthorized - invalid API key", 404: "Service not available"} + ) + def post(self): + """Send internal email for billing notifications. + + This endpoint allows sending internal emails for billing-related + notifications and alerts. + + Returns: + dict: Success message with status code 200 + """ + return super().post() diff --git a/api/controllers/inner_api/plugin/plugin.py b/api/controllers/inner_api/plugin/plugin.py index 5dfe41eb6b..170a794d89 100644 --- a/api/controllers/inner_api/plugin/plugin.py +++ b/api/controllers/inner_api/plugin/plugin.py @@ -1,7 +1,7 @@ -from flask_restful import Resource +from flask_restx import Resource from controllers.console.wraps import setup_required -from controllers.inner_api import api +from controllers.inner_api import inner_api_ns from controllers.inner_api.plugin.wraps import get_user_tenant, plugin_data from controllers.inner_api.wraps import plugin_inner_api_only from core.file.helpers import get_signed_file_url_for_plugin @@ -35,11 +35,21 @@ from models.account import Account, Tenant from models.model import EndUser +@inner_api_ns.route("/invoke/llm") class PluginInvokeLLMApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestInvokeLLM) + @inner_api_ns.doc("plugin_invoke_llm") + @inner_api_ns.doc(description="Invoke LLM models through plugin interface") + @inner_api_ns.doc( + responses={ + 200: "LLM invocation successful (streaming response)", + 401: "Unauthorized - invalid API key", + 404: "Service not available", + } + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeLLM): def generator(): response = PluginModelBackwardsInvocation.invoke_llm(user_model.id, tenant_model, payload) @@ -48,11 +58,21 @@ class PluginInvokeLLMApi(Resource): return length_prefixed_response(0xF, generator()) +@inner_api_ns.route("/invoke/llm/structured-output") class PluginInvokeLLMWithStructuredOutputApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestInvokeLLMWithStructuredOutput) + @inner_api_ns.doc("plugin_invoke_llm_structured") + @inner_api_ns.doc(description="Invoke LLM models with structured output through plugin interface") + @inner_api_ns.doc( + responses={ + 200: "LLM structured output invocation successful (streaming response)", + 401: "Unauthorized - invalid API key", + 404: "Service not available", + } + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeLLMWithStructuredOutput): def generator(): response = PluginModelBackwardsInvocation.invoke_llm_with_structured_output( @@ -63,11 +83,21 @@ class PluginInvokeLLMWithStructuredOutputApi(Resource): return length_prefixed_response(0xF, generator()) +@inner_api_ns.route("/invoke/text-embedding") class PluginInvokeTextEmbeddingApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestInvokeTextEmbedding) + @inner_api_ns.doc("plugin_invoke_text_embedding") + @inner_api_ns.doc(description="Invoke text embedding models through plugin interface") + @inner_api_ns.doc( + responses={ + 200: "Text embedding successful", + 401: "Unauthorized - invalid API key", + 404: "Service not available", + } + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeTextEmbedding): try: return jsonable_encoder( @@ -83,11 +113,17 @@ class PluginInvokeTextEmbeddingApi(Resource): return jsonable_encoder(BaseBackwardsInvocationResponse(error=str(e))) +@inner_api_ns.route("/invoke/rerank") class PluginInvokeRerankApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestInvokeRerank) + @inner_api_ns.doc("plugin_invoke_rerank") + @inner_api_ns.doc(description="Invoke rerank models through plugin interface") + @inner_api_ns.doc( + responses={200: "Rerank successful", 401: "Unauthorized - invalid API key", 404: "Service not available"} + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeRerank): try: return jsonable_encoder( @@ -103,11 +139,21 @@ class PluginInvokeRerankApi(Resource): return jsonable_encoder(BaseBackwardsInvocationResponse(error=str(e))) +@inner_api_ns.route("/invoke/tts") class PluginInvokeTTSApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestInvokeTTS) + @inner_api_ns.doc("plugin_invoke_tts") + @inner_api_ns.doc(description="Invoke text-to-speech models through plugin interface") + @inner_api_ns.doc( + responses={ + 200: "TTS invocation successful (streaming response)", + 401: "Unauthorized - invalid API key", + 404: "Service not available", + } + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeTTS): def generator(): response = PluginModelBackwardsInvocation.invoke_tts( @@ -120,11 +166,17 @@ class PluginInvokeTTSApi(Resource): return length_prefixed_response(0xF, generator()) +@inner_api_ns.route("/invoke/speech2text") class PluginInvokeSpeech2TextApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestInvokeSpeech2Text) + @inner_api_ns.doc("plugin_invoke_speech2text") + @inner_api_ns.doc(description="Invoke speech-to-text models through plugin interface") + @inner_api_ns.doc( + responses={200: "Speech2Text successful", 401: "Unauthorized - invalid API key", 404: "Service not available"} + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeSpeech2Text): try: return jsonable_encoder( @@ -140,11 +192,17 @@ class PluginInvokeSpeech2TextApi(Resource): return jsonable_encoder(BaseBackwardsInvocationResponse(error=str(e))) +@inner_api_ns.route("/invoke/moderation") class PluginInvokeModerationApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestInvokeModeration) + @inner_api_ns.doc("plugin_invoke_moderation") + @inner_api_ns.doc(description="Invoke moderation models through plugin interface") + @inner_api_ns.doc( + responses={200: "Moderation successful", 401: "Unauthorized - invalid API key", 404: "Service not available"} + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeModeration): try: return jsonable_encoder( @@ -160,11 +218,21 @@ class PluginInvokeModerationApi(Resource): return jsonable_encoder(BaseBackwardsInvocationResponse(error=str(e))) +@inner_api_ns.route("/invoke/tool") class PluginInvokeToolApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestInvokeTool) + @inner_api_ns.doc("plugin_invoke_tool") + @inner_api_ns.doc(description="Invoke tools through plugin interface") + @inner_api_ns.doc( + responses={ + 200: "Tool invocation successful (streaming response)", + 401: "Unauthorized - invalid API key", + 404: "Service not available", + } + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeTool): def generator(): return PluginToolBackwardsInvocation.convert_to_event_stream( @@ -182,11 +250,21 @@ class PluginInvokeToolApi(Resource): return length_prefixed_response(0xF, generator()) +@inner_api_ns.route("/invoke/parameter-extractor") class PluginInvokeParameterExtractorNodeApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestInvokeParameterExtractorNode) + @inner_api_ns.doc("plugin_invoke_parameter_extractor") + @inner_api_ns.doc(description="Invoke parameter extractor node through plugin interface") + @inner_api_ns.doc( + responses={ + 200: "Parameter extraction successful", + 401: "Unauthorized - invalid API key", + 404: "Service not available", + } + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeParameterExtractorNode): try: return jsonable_encoder( @@ -205,11 +283,21 @@ class PluginInvokeParameterExtractorNodeApi(Resource): return jsonable_encoder(BaseBackwardsInvocationResponse(error=str(e))) +@inner_api_ns.route("/invoke/question-classifier") class PluginInvokeQuestionClassifierNodeApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestInvokeQuestionClassifierNode) + @inner_api_ns.doc("plugin_invoke_question_classifier") + @inner_api_ns.doc(description="Invoke question classifier node through plugin interface") + @inner_api_ns.doc( + responses={ + 200: "Question classification successful", + 401: "Unauthorized - invalid API key", + 404: "Service not available", + } + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeQuestionClassifierNode): try: return jsonable_encoder( @@ -228,11 +316,21 @@ class PluginInvokeQuestionClassifierNodeApi(Resource): return jsonable_encoder(BaseBackwardsInvocationResponse(error=str(e))) +@inner_api_ns.route("/invoke/app") class PluginInvokeAppApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestInvokeApp) + @inner_api_ns.doc("plugin_invoke_app") + @inner_api_ns.doc(description="Invoke application through plugin interface") + @inner_api_ns.doc( + responses={ + 200: "App invocation successful (streaming response)", + 401: "Unauthorized - invalid API key", + 404: "Service not available", + } + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeApp): response = PluginAppBackwardsInvocation.invoke_app( app_id=payload.app_id, @@ -248,11 +346,21 @@ class PluginInvokeAppApi(Resource): return length_prefixed_response(0xF, PluginAppBackwardsInvocation.convert_to_event_stream(response)) +@inner_api_ns.route("/invoke/encrypt") class PluginInvokeEncryptApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestInvokeEncrypt) + @inner_api_ns.doc("plugin_invoke_encrypt") + @inner_api_ns.doc(description="Encrypt or decrypt data through plugin interface") + @inner_api_ns.doc( + responses={ + 200: "Encryption/decryption successful", + 401: "Unauthorized - invalid API key", + 404: "Service not available", + } + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeEncrypt): """ encrypt or decrypt data @@ -265,11 +373,21 @@ class PluginInvokeEncryptApi(Resource): return BaseBackwardsInvocationResponse(error=str(e)).model_dump() +@inner_api_ns.route("/invoke/summary") class PluginInvokeSummaryApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestInvokeSummary) + @inner_api_ns.doc("plugin_invoke_summary") + @inner_api_ns.doc(description="Invoke summary functionality through plugin interface") + @inner_api_ns.doc( + responses={ + 200: "Summary generation successful", + 401: "Unauthorized - invalid API key", + 404: "Service not available", + } + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestInvokeSummary): try: return BaseBackwardsInvocationResponse( @@ -285,40 +403,43 @@ class PluginInvokeSummaryApi(Resource): return BaseBackwardsInvocationResponse(error=str(e)).model_dump() +@inner_api_ns.route("/upload/file/request") class PluginUploadFileRequestApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestRequestUploadFile) + @inner_api_ns.doc("plugin_upload_file_request") + @inner_api_ns.doc(description="Request signed URL for file upload through plugin interface") + @inner_api_ns.doc( + responses={ + 200: "Signed URL generated successfully", + 401: "Unauthorized - invalid API key", + 404: "Service not available", + } + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestRequestUploadFile): # generate signed url url = get_signed_file_url_for_plugin(payload.filename, payload.mimetype, tenant_model.id, user_model.id) return BaseBackwardsInvocationResponse(data={"url": url}).model_dump() +@inner_api_ns.route("/fetch/app/info") class PluginFetchAppInfoApi(Resource): @setup_required @plugin_inner_api_only @get_user_tenant @plugin_data(payload_type=RequestFetchAppInfo) + @inner_api_ns.doc("plugin_fetch_app_info") + @inner_api_ns.doc(description="Fetch application information through plugin interface") + @inner_api_ns.doc( + responses={ + 200: "App information retrieved successfully", + 401: "Unauthorized - invalid API key", + 404: "Service not available", + } + ) def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestFetchAppInfo): return BaseBackwardsInvocationResponse( data=PluginAppBackwardsInvocation.fetch_app_info(payload.app_id, tenant_model.id) ).model_dump() - - -api.add_resource(PluginInvokeLLMApi, "/invoke/llm") -api.add_resource(PluginInvokeLLMWithStructuredOutputApi, "/invoke/llm/structured-output") -api.add_resource(PluginInvokeTextEmbeddingApi, "/invoke/text-embedding") -api.add_resource(PluginInvokeRerankApi, "/invoke/rerank") -api.add_resource(PluginInvokeTTSApi, "/invoke/tts") -api.add_resource(PluginInvokeSpeech2TextApi, "/invoke/speech2text") -api.add_resource(PluginInvokeModerationApi, "/invoke/moderation") -api.add_resource(PluginInvokeToolApi, "/invoke/tool") -api.add_resource(PluginInvokeParameterExtractorNodeApi, "/invoke/parameter-extractor") -api.add_resource(PluginInvokeQuestionClassifierNodeApi, "/invoke/question-classifier") -api.add_resource(PluginInvokeAppApi, "/invoke/app") -api.add_resource(PluginInvokeEncryptApi, "/invoke/encrypt") -api.add_resource(PluginInvokeSummaryApi, "/invoke/summary") -api.add_resource(PluginUploadFileRequestApi, "/upload/file/request") -api.add_resource(PluginFetchAppInfoApi, "/fetch/app/info") diff --git a/api/controllers/inner_api/plugin/wraps.py b/api/controllers/inner_api/plugin/wraps.py index b533614d4d..89b4ac7506 100644 --- a/api/controllers/inner_api/plugin/wraps.py +++ b/api/controllers/inner_api/plugin/wraps.py @@ -4,7 +4,7 @@ from typing import Optional from flask import current_app, request from flask_login import user_logged_in -from flask_restful import reqparse +from flask_restx import reqparse from pydantic import BaseModel from sqlalchemy.orm import Session diff --git a/api/controllers/inner_api/workspace/workspace.py b/api/controllers/inner_api/workspace/workspace.py index 77568b75f1..47f0240cd2 100644 --- a/api/controllers/inner_api/workspace/workspace.py +++ b/api/controllers/inner_api/workspace/workspace.py @@ -1,9 +1,9 @@ import json -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from controllers.console.wraps import setup_required -from controllers.inner_api import api +from controllers.inner_api import inner_api_ns from controllers.inner_api.wraps import enterprise_inner_api_only from events.tenant_event import tenant_was_created from extensions.ext_database import db @@ -11,9 +11,19 @@ from models.account import Account from services.account_service import TenantService +@inner_api_ns.route("/enterprise/workspace") class EnterpriseWorkspace(Resource): @setup_required @enterprise_inner_api_only + @inner_api_ns.doc("create_enterprise_workspace") + @inner_api_ns.doc(description="Create a new enterprise workspace with owner assignment") + @inner_api_ns.doc( + responses={ + 200: "Workspace created successfully", + 401: "Unauthorized - invalid API key", + 404: "Owner account not found or service not available", + } + ) def post(self): parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, location="json") @@ -44,9 +54,19 @@ class EnterpriseWorkspace(Resource): } +@inner_api_ns.route("/enterprise/workspace/ownerless") class EnterpriseWorkspaceNoOwnerEmail(Resource): @setup_required @enterprise_inner_api_only + @inner_api_ns.doc("create_enterprise_workspace_ownerless") + @inner_api_ns.doc(description="Create a new enterprise workspace without initial owner assignment") + @inner_api_ns.doc( + responses={ + 200: "Workspace created successfully", + 401: "Unauthorized - invalid API key", + 404: "Service not available", + } + ) def post(self): parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, location="json") @@ -71,7 +91,3 @@ class EnterpriseWorkspaceNoOwnerEmail(Resource): "message": "enterprise workspace created.", "tenant": resp, } - - -api.add_resource(EnterpriseWorkspace, "/enterprise/workspace") -api.add_resource(EnterpriseWorkspaceNoOwnerEmail, "/enterprise/workspace/ownerless") diff --git a/api/controllers/inner_api/wraps.py b/api/controllers/inner_api/wraps.py index 9e7b3d4f29..c5aa318f58 100644 --- a/api/controllers/inner_api/wraps.py +++ b/api/controllers/inner_api/wraps.py @@ -10,6 +10,22 @@ from extensions.ext_database import db from models.model import EndUser +def billing_inner_api_only(view): + @wraps(view) + def decorated(*args, **kwargs): + if not dify_config.INNER_API: + abort(404) + + # get header 'X-Inner-Api-Key' + inner_api_key = request.headers.get("X-Inner-Api-Key") + if not inner_api_key or inner_api_key != dify_config.INNER_API_KEY: + abort(401) + + return view(*args, **kwargs) + + return decorated + + def enterprise_inner_api_only(view): @wraps(view) def decorated(*args, **kwargs): diff --git a/api/controllers/mcp/__init__.py b/api/controllers/mcp/__init__.py index 1b3e0a5621..c344ffad08 100644 --- a/api/controllers/mcp/__init__.py +++ b/api/controllers/mcp/__init__.py @@ -1,8 +1,20 @@ from flask import Blueprint +from flask_restx import Namespace from libs.external_api import ExternalApi bp = Blueprint("mcp", __name__, url_prefix="/mcp") -api = ExternalApi(bp) + +api = ExternalApi( + bp, + version="1.0", + title="MCP API", + description="API for Model Context Protocol operations", + doc="/docs", # Enable Swagger UI at /mcp/docs +) + +mcp_ns = Namespace("mcp", description="MCP operations", path="/") from . import mcp + +api.add_namespace(mcp_ns) diff --git a/api/controllers/mcp/mcp.py b/api/controllers/mcp/mcp.py index 87d678796f..fc19749011 100644 --- a/api/controllers/mcp/mcp.py +++ b/api/controllers/mcp/mcp.py @@ -1,8 +1,10 @@ -from flask_restful import Resource, reqparse +from typing import Optional, Union + +from flask_restx import Resource, reqparse from pydantic import ValidationError from controllers.console.app.mcp_server import AppMCPServerStatus -from controllers.mcp import api +from controllers.mcp import mcp_ns from core.app.app_config.entities import VariableEntity from core.mcp import types from core.mcp.server.streamable_http import MCPServerStreamableHTTPRequestHandler @@ -13,22 +15,58 @@ from libs import helper from models.model import App, AppMCPServer, AppMode +def int_or_str(value): + """Validate that a value is either an integer or string.""" + if isinstance(value, (int, str)): + return value + else: + return None + + +# Define parser for both documentation and validation +mcp_request_parser = reqparse.RequestParser() +mcp_request_parser.add_argument( + "jsonrpc", type=str, required=True, location="json", help="JSON-RPC version (should be '2.0')" +) +mcp_request_parser.add_argument("method", type=str, required=True, location="json", help="The method to invoke") +mcp_request_parser.add_argument("params", type=dict, required=False, location="json", help="Parameters for the method") +mcp_request_parser.add_argument( + "id", type=int_or_str, required=False, location="json", help="Request ID for tracking responses" +) + + +@mcp_ns.route("/server//mcp") class MCPAppApi(Resource): - def post(self, server_code): - def int_or_str(value): - if isinstance(value, (int, str)): - return value - else: - return None + @mcp_ns.expect(mcp_request_parser) + @mcp_ns.doc("handle_mcp_request") + @mcp_ns.doc(description="Handle Model Context Protocol (MCP) requests for a specific server") + @mcp_ns.doc(params={"server_code": "Unique identifier for the MCP server"}) + @mcp_ns.doc( + responses={ + 200: "MCP response successfully processed", + 400: "Invalid MCP request or parameters", + 404: "Server or app not found", + } + ) + def post(self, server_code: str): + """Handle MCP requests for a specific server. - parser = reqparse.RequestParser() - parser.add_argument("jsonrpc", type=str, required=True, location="json") - parser.add_argument("method", type=str, required=True, location="json") - parser.add_argument("params", type=dict, required=False, location="json") - parser.add_argument("id", type=int_or_str, required=False, location="json") - args = parser.parse_args() + Processes JSON-RPC formatted requests according to the Model Context Protocol specification. + Validates the server status and associated app before processing the request. - request_id = args.get("id") + Args: + server_code: Unique identifier for the MCP server + + Returns: + dict: JSON-RPC response from the MCP handler + + Raises: + ValidationError: Invalid request format or parameters + """ + # Parse and validate all arguments + args = mcp_request_parser.parse_args() + + request_id: Optional[Union[int, str]] = args.get("id") server = db.session.query(AppMCPServer).where(AppMCPServer.server_code == server_code).first() if not server: @@ -99,6 +137,3 @@ class MCPAppApi(Resource): mcp_server_handler = MCPServerStreamableHTTPRequestHandler(app, request, converted_user_input_form) response = mcp_server_handler.handle() return helper.compact_generate_response(response) - - -api.add_resource(MCPAppApi, "/server//mcp") diff --git a/api/controllers/service_api/__init__.py b/api/controllers/service_api/__init__.py index b26f29d98d..763345d723 100644 --- a/api/controllers/service_api/__init__.py +++ b/api/controllers/service_api/__init__.py @@ -1,11 +1,23 @@ from flask import Blueprint +from flask_restx import Namespace from libs.external_api import ExternalApi bp = Blueprint("service_api", __name__, url_prefix="/v1") -api = ExternalApi(bp) + +api = ExternalApi( + bp, + version="1.0", + title="Service API", + description="API for application services", + doc="/docs", # Enable Swagger UI at /v1/docs +) + +service_api_ns = Namespace("service_api", description="Service operations", path="/") from . import index from .app import annotation, app, audio, completion, conversation, file, file_preview, message, site, workflow from .dataset import dataset, document, hit_testing, metadata, segment, upload_file from .workspace import models + +api.add_namespace(service_api_ns) diff --git a/api/controllers/service_api/app/annotation.py b/api/controllers/service_api/app/annotation.py index 9b22c535f4..9038bda11a 100644 --- a/api/controllers/service_api/app/annotation.py +++ b/api/controllers/service_api/app/annotation.py @@ -1,38 +1,74 @@ +from typing import Literal + from flask import request -from flask_restful import Resource, marshal, marshal_with, reqparse +from flask_restx import Api, Namespace, Resource, fields, reqparse +from flask_restx.api import HTTPStatus from werkzeug.exceptions import Forbidden -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.wraps import validate_app_token from extensions.ext_redis import redis_client -from fields.annotation_fields import ( - annotation_fields, -) +from fields.annotation_fields import annotation_fields, build_annotation_model from libs.login import current_user +from models.account import Account from models.model import App from services.annotation_service import AppAnnotationService +# Define parsers for annotation API +annotation_create_parser = reqparse.RequestParser() +annotation_create_parser.add_argument("question", required=True, type=str, location="json", help="Annotation question") +annotation_create_parser.add_argument("answer", required=True, type=str, location="json", help="Annotation answer") +annotation_reply_action_parser = reqparse.RequestParser() +annotation_reply_action_parser.add_argument( + "score_threshold", required=True, type=float, location="json", help="Score threshold for annotation matching" +) +annotation_reply_action_parser.add_argument( + "embedding_provider_name", required=True, type=str, location="json", help="Embedding provider name" +) +annotation_reply_action_parser.add_argument( + "embedding_model_name", required=True, type=str, location="json", help="Embedding model name" +) + + +@service_api_ns.route("/apps/annotation-reply/") class AnnotationReplyActionApi(Resource): + @service_api_ns.expect(annotation_reply_action_parser) + @service_api_ns.doc("annotation_reply_action") + @service_api_ns.doc(description="Enable or disable annotation reply feature") + @service_api_ns.doc(params={"action": "Action to perform: 'enable' or 'disable'"}) + @service_api_ns.doc( + responses={ + 200: "Action completed successfully", + 401: "Unauthorized - invalid API token", + } + ) @validate_app_token - def post(self, app_model: App, action): - parser = reqparse.RequestParser() - parser.add_argument("score_threshold", required=True, type=float, location="json") - parser.add_argument("embedding_provider_name", required=True, type=str, location="json") - parser.add_argument("embedding_model_name", required=True, type=str, location="json") - args = parser.parse_args() + def post(self, app_model: App, action: Literal["enable", "disable"]): + """Enable or disable annotation reply feature.""" + args = annotation_reply_action_parser.parse_args() if action == "enable": result = AppAnnotationService.enable_app_annotation(args, app_model.id) elif action == "disable": result = AppAnnotationService.disable_app_annotation(app_model.id) - else: - raise ValueError("Unsupported annotation reply action") return result, 200 +@service_api_ns.route("/apps/annotation-reply//status/") class AnnotationReplyActionStatusApi(Resource): + @service_api_ns.doc("get_annotation_reply_action_status") + @service_api_ns.doc(description="Get the status of an annotation reply action job") + @service_api_ns.doc(params={"action": "Action type", "job_id": "Job ID"}) + @service_api_ns.doc( + responses={ + 200: "Job status retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Job not found", + } + ) @validate_app_token def get(self, app_model: App, job_id, action): + """Get the status of an annotation reply action job.""" job_id = str(job_id) app_annotation_job_key = f"{action}_app_annotation_job_{str(job_id)}" cache_result = redis_client.get(app_annotation_job_key) @@ -48,60 +84,114 @@ class AnnotationReplyActionStatusApi(Resource): return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200 +# Define annotation list response model +annotation_list_fields = { + "data": fields.List(fields.Nested(annotation_fields)), + "has_more": fields.Boolean, + "limit": fields.Integer, + "total": fields.Integer, + "page": fields.Integer, +} + + +def build_annotation_list_model(api_or_ns: Api | Namespace): + """Build the annotation list model for the API or Namespace.""" + copied_annotation_list_fields = annotation_list_fields.copy() + copied_annotation_list_fields["data"] = fields.List(fields.Nested(build_annotation_model(api_or_ns))) + return api_or_ns.model("AnnotationList", copied_annotation_list_fields) + + +@service_api_ns.route("/apps/annotations") class AnnotationListApi(Resource): + @service_api_ns.doc("list_annotations") + @service_api_ns.doc(description="List annotations for the application") + @service_api_ns.doc( + responses={ + 200: "Annotations retrieved successfully", + 401: "Unauthorized - invalid API token", + } + ) @validate_app_token + @service_api_ns.marshal_with(build_annotation_list_model(service_api_ns)) def get(self, app_model: App): + """List annotations for the application.""" page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) keyword = request.args.get("keyword", default="", type=str) annotation_list, total = AppAnnotationService.get_annotation_list_by_app_id(app_model.id, page, limit, keyword) - response = { - "data": marshal(annotation_list, annotation_fields), + return { + "data": annotation_list, "has_more": len(annotation_list) == limit, "limit": limit, "total": total, "page": page, } - return response, 200 + @service_api_ns.expect(annotation_create_parser) + @service_api_ns.doc("create_annotation") + @service_api_ns.doc(description="Create a new annotation") + @service_api_ns.doc( + responses={ + 201: "Annotation created successfully", + 401: "Unauthorized - invalid API token", + } + ) @validate_app_token - @marshal_with(annotation_fields) + @service_api_ns.marshal_with(build_annotation_model(service_api_ns), code=HTTPStatus.CREATED) def post(self, app_model: App): - parser = reqparse.RequestParser() - parser.add_argument("question", required=True, type=str, location="json") - parser.add_argument("answer", required=True, type=str, location="json") - args = parser.parse_args() + """Create a new annotation.""" + args = annotation_create_parser.parse_args() annotation = AppAnnotationService.insert_app_annotation_directly(args, app_model.id) - return annotation + return annotation, 201 +@service_api_ns.route("/apps/annotations/") class AnnotationUpdateDeleteApi(Resource): + @service_api_ns.expect(annotation_create_parser) + @service_api_ns.doc("update_annotation") + @service_api_ns.doc(description="Update an existing annotation") + @service_api_ns.doc(params={"annotation_id": "Annotation ID"}) + @service_api_ns.doc( + responses={ + 200: "Annotation updated successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - insufficient permissions", + 404: "Annotation not found", + } + ) @validate_app_token - @marshal_with(annotation_fields) + @service_api_ns.marshal_with(build_annotation_model(service_api_ns)) def put(self, app_model: App, annotation_id): + """Update an existing annotation.""" + assert isinstance(current_user, Account) if not current_user.is_editor: raise Forbidden() annotation_id = str(annotation_id) - parser = reqparse.RequestParser() - parser.add_argument("question", required=True, type=str, location="json") - parser.add_argument("answer", required=True, type=str, location="json") - args = parser.parse_args() + args = annotation_create_parser.parse_args() annotation = AppAnnotationService.update_app_annotation_directly(args, app_model.id, annotation_id) return annotation + @service_api_ns.doc("delete_annotation") + @service_api_ns.doc(description="Delete an annotation") + @service_api_ns.doc(params={"annotation_id": "Annotation ID"}) + @service_api_ns.doc( + responses={ + 204: "Annotation deleted successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - insufficient permissions", + 404: "Annotation not found", + } + ) @validate_app_token def delete(self, app_model: App, annotation_id): + """Delete an annotation.""" + assert isinstance(current_user, Account) + if not current_user.is_editor: raise Forbidden() annotation_id = str(annotation_id) AppAnnotationService.delete_app_annotation(app_model.id, annotation_id) return {"result": "success"}, 204 - - -api.add_resource(AnnotationReplyActionApi, "/apps/annotation-reply/") -api.add_resource(AnnotationReplyActionStatusApi, "/apps/annotation-reply//status/") -api.add_resource(AnnotationListApi, "/apps/annotations") -api.add_resource(AnnotationUpdateDeleteApi, "/apps/annotations/") diff --git a/api/controllers/service_api/app/app.py b/api/controllers/service_api/app/app.py index 89222d5e83..2dbeed1d68 100644 --- a/api/controllers/service_api/app/app.py +++ b/api/controllers/service_api/app/app.py @@ -1,7 +1,7 @@ -from flask_restful import Resource, marshal_with +from flask_restx import Resource -from controllers.common import fields -from controllers.service_api import api +from controllers.common.fields import build_parameters_model +from controllers.service_api import service_api_ns from controllers.service_api.app.error import AppUnavailableError from controllers.service_api.wraps import validate_app_token from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict @@ -9,13 +9,26 @@ from models.model import App, AppMode from services.app_service import AppService +@service_api_ns.route("/parameters") class AppParameterApi(Resource): """Resource for app variables.""" + @service_api_ns.doc("get_app_parameters") + @service_api_ns.doc(description="Retrieve application input parameters and configuration") + @service_api_ns.doc( + responses={ + 200: "Parameters retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Application not found", + } + ) @validate_app_token - @marshal_with(fields.parameters_fields) + @service_api_ns.marshal_with(build_parameters_model(service_api_ns)) def get(self, app_model: App): - """Retrieve app parameters.""" + """Retrieve app parameters. + + Returns the input form parameters and configuration for the application. + """ if app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}: workflow = app_model.workflow if workflow is None: @@ -35,17 +48,43 @@ class AppParameterApi(Resource): return get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form) +@service_api_ns.route("/meta") class AppMetaApi(Resource): + @service_api_ns.doc("get_app_meta") + @service_api_ns.doc(description="Get application metadata") + @service_api_ns.doc( + responses={ + 200: "Metadata retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Application not found", + } + ) @validate_app_token def get(self, app_model: App): - """Get app meta""" + """Get app metadata. + + Returns metadata about the application including configuration and settings. + """ return AppService().get_app_meta(app_model) +@service_api_ns.route("/info") class AppInfoApi(Resource): + @service_api_ns.doc("get_app_info") + @service_api_ns.doc(description="Get basic application information") + @service_api_ns.doc( + responses={ + 200: "Application info retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Application not found", + } + ) @validate_app_token def get(self, app_model: App): - """Get app information""" + """Get app information. + + Returns basic information about the application including name, description, tags, and mode. + """ tags = [tag.name for tag in app_model.tags] return { "name": app_model.name, @@ -54,8 +93,3 @@ class AppInfoApi(Resource): "mode": app_model.mode, "author_name": app_model.author_name, } - - -api.add_resource(AppParameterApi, "/parameters") -api.add_resource(AppMetaApi, "/meta") -api.add_resource(AppInfoApi, "/info") diff --git a/api/controllers/service_api/app/audio.py b/api/controllers/service_api/app/audio.py index 848863cf1b..8148fa8ccc 100644 --- a/api/controllers/service_api/app/audio.py +++ b/api/controllers/service_api/app/audio.py @@ -1,11 +1,11 @@ import logging from flask import request -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from werkzeug.exceptions import InternalServerError import services -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.app.error import ( AppUnavailableError, AudioTooLargeError, @@ -29,10 +29,29 @@ from services.errors.audio import ( UnsupportedAudioTypeServiceError, ) +logger = logging.getLogger(__name__) + +@service_api_ns.route("/audio-to-text") class AudioApi(Resource): + @service_api_ns.doc("audio_to_text") + @service_api_ns.doc(description="Convert audio to text using speech-to-text") + @service_api_ns.doc( + responses={ + 200: "Audio successfully transcribed", + 400: "Bad request - no audio or invalid audio", + 401: "Unauthorized - invalid API token", + 413: "Audio file too large", + 415: "Unsupported audio type", + 500: "Internal server error", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.FORM)) def post(self, app_model: App, end_user: EndUser): + """Convert audio to text using speech-to-text. + + Accepts an audio file upload and returns the transcribed text. + """ file = request.files["file"] try: @@ -40,7 +59,7 @@ class AudioApi(Resource): return response except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except NoAudioUploadedServiceError: raise NoAudioUploadedError() @@ -61,20 +80,39 @@ class AudioApi(Resource): except ValueError as e: raise e except Exception as e: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() +# Define parser for text-to-audio API +text_to_audio_parser = reqparse.RequestParser() +text_to_audio_parser.add_argument("message_id", type=str, required=False, location="json", help="Message ID") +text_to_audio_parser.add_argument("voice", type=str, location="json", help="Voice to use for TTS") +text_to_audio_parser.add_argument("text", type=str, location="json", help="Text to convert to audio") +text_to_audio_parser.add_argument("streaming", type=bool, location="json", help="Enable streaming response") + + +@service_api_ns.route("/text-to-audio") class TextApi(Resource): + @service_api_ns.expect(text_to_audio_parser) + @service_api_ns.doc("text_to_audio") + @service_api_ns.doc(description="Convert text to audio using text-to-speech") + @service_api_ns.doc( + responses={ + 200: "Text successfully converted to audio", + 400: "Bad request - invalid parameters", + 401: "Unauthorized - invalid API token", + 500: "Internal server error", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON)) def post(self, app_model: App, end_user: EndUser): + """Convert text to audio using text-to-speech. + + Converts the provided text to audio using the specified voice. + """ try: - parser = reqparse.RequestParser() - parser.add_argument("message_id", type=str, required=False, location="json") - parser.add_argument("voice", type=str, location="json") - parser.add_argument("text", type=str, location="json") - parser.add_argument("streaming", type=bool, location="json") - args = parser.parse_args() + args = text_to_audio_parser.parse_args() message_id = args.get("message_id", None) text = args.get("text", None) @@ -85,7 +123,7 @@ class TextApi(Resource): return response except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except NoAudioUploadedServiceError: raise NoAudioUploadedError() @@ -106,9 +144,5 @@ class TextApi(Resource): except ValueError as e: raise e except Exception as e: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() - - -api.add_resource(AudioApi, "/audio-to-text") -api.add_resource(TextApi, "/text-to-audio") diff --git a/api/controllers/service_api/app/completion.py b/api/controllers/service_api/app/completion.py index ea57f04850..22428ee0ab 100644 --- a/api/controllers/service_api/app/completion.py +++ b/api/controllers/service_api/app/completion.py @@ -1,11 +1,11 @@ import logging from flask import request -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from werkzeug.exceptions import BadRequest, InternalServerError, NotFound import services -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.app.error import ( AppUnavailableError, CompletionRequestError, @@ -33,21 +33,71 @@ from services.app_generate_service import AppGenerateService from services.errors.app import IsDraftWorkflowError, WorkflowIdFormatError, WorkflowNotFoundError from services.errors.llm import InvokeRateLimitError +logger = logging.getLogger(__name__) + +# Define parser for completion API +completion_parser = reqparse.RequestParser() +completion_parser.add_argument( + "inputs", type=dict, required=True, location="json", help="Input parameters for completion" +) +completion_parser.add_argument("query", type=str, location="json", default="", help="The query string") +completion_parser.add_argument("files", type=list, required=False, location="json", help="List of file attachments") +completion_parser.add_argument( + "response_mode", type=str, choices=["blocking", "streaming"], location="json", help="Response mode" +) +completion_parser.add_argument( + "retriever_from", type=str, required=False, default="dev", location="json", help="Retriever source" +) + +# Define parser for chat API +chat_parser = reqparse.RequestParser() +chat_parser.add_argument("inputs", type=dict, required=True, location="json", help="Input parameters for chat") +chat_parser.add_argument("query", type=str, required=True, location="json", help="The chat query") +chat_parser.add_argument("files", type=list, required=False, location="json", help="List of file attachments") +chat_parser.add_argument( + "response_mode", type=str, choices=["blocking", "streaming"], location="json", help="Response mode" +) +chat_parser.add_argument("conversation_id", type=uuid_value, location="json", help="Existing conversation ID") +chat_parser.add_argument( + "retriever_from", type=str, required=False, default="dev", location="json", help="Retriever source" +) +chat_parser.add_argument( + "auto_generate_name", + type=bool, + required=False, + default=True, + location="json", + help="Auto generate conversation name", +) +chat_parser.add_argument("workflow_id", type=str, required=False, location="json", help="Workflow ID for advanced chat") + + +@service_api_ns.route("/completion-messages") class CompletionApi(Resource): + @service_api_ns.expect(completion_parser) + @service_api_ns.doc("create_completion") + @service_api_ns.doc(description="Create a completion for the given prompt") + @service_api_ns.doc( + responses={ + 200: "Completion created successfully", + 400: "Bad request - invalid parameters", + 401: "Unauthorized - invalid API token", + 404: "Conversation not found", + 500: "Internal server error", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON, required=True)) def post(self, app_model: App, end_user: EndUser): + """Create a completion for the given prompt. + + This endpoint generates a completion based on the provided inputs and query. + Supports both blocking and streaming response modes. + """ if app_model.mode != "completion": raise AppUnavailableError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, location="json", default="") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json") - - args = parser.parse_args() + args = completion_parser.parse_args() external_trace_id = get_external_trace_id(request) if external_trace_id: args["external_trace_id"] = external_trace_id @@ -71,7 +121,7 @@ class CompletionApi(Resource): except services.errors.conversation.ConversationCompletedError: raise ConversationCompletedError() except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -84,13 +134,25 @@ class CompletionApi(Resource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() +@service_api_ns.route("/completion-messages//stop") class CompletionStopApi(Resource): + @service_api_ns.doc("stop_completion") + @service_api_ns.doc(description="Stop a running completion task") + @service_api_ns.doc(params={"task_id": "The ID of the task to stop"}) + @service_api_ns.doc( + responses={ + 200: "Task stopped successfully", + 401: "Unauthorized - invalid API token", + 404: "Task not found", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON, required=True)) - def post(self, app_model: App, end_user: EndUser, task_id): + def post(self, app_model: App, end_user: EndUser, task_id: str): + """Stop a running completion task.""" if app_model.mode != "completion": raise AppUnavailableError() @@ -99,23 +161,33 @@ class CompletionStopApi(Resource): return {"result": "success"}, 200 +@service_api_ns.route("/chat-messages") class ChatApi(Resource): + @service_api_ns.expect(chat_parser) + @service_api_ns.doc("create_chat_message") + @service_api_ns.doc(description="Send a message in a chat conversation") + @service_api_ns.doc( + responses={ + 200: "Message sent successfully", + 400: "Bad request - invalid parameters or workflow issues", + 401: "Unauthorized - invalid API token", + 404: "Conversation or workflow not found", + 429: "Rate limit exceeded", + 500: "Internal server error", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON, required=True)) def post(self, app_model: App, end_user: EndUser): + """Send a message in a chat conversation. + + This endpoint handles chat messages for chat, agent chat, and advanced chat applications. + Supports conversation management and both blocking and streaming response modes. + """ app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, location="json") - parser.add_argument("query", type=str, required=True, location="json") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - parser.add_argument("conversation_id", type=uuid_value, location="json") - parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json") - parser.add_argument("auto_generate_name", type=bool, required=False, default=True, location="json") - parser.add_argument("workflow_id", type=str, required=False, location="json") - args = parser.parse_args() + args = chat_parser.parse_args() external_trace_id = get_external_trace_id(request) if external_trace_id: @@ -140,7 +212,7 @@ class ChatApi(Resource): except services.errors.conversation.ConversationCompletedError: raise ConversationCompletedError() except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -155,13 +227,25 @@ class ChatApi(Resource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() +@service_api_ns.route("/chat-messages//stop") class ChatStopApi(Resource): + @service_api_ns.doc("stop_chat_message") + @service_api_ns.doc(description="Stop a running chat message generation") + @service_api_ns.doc(params={"task_id": "The ID of the task to stop"}) + @service_api_ns.doc( + responses={ + 200: "Task stopped successfully", + 401: "Unauthorized - invalid API token", + 404: "Task not found", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON, required=True)) - def post(self, app_model: App, end_user: EndUser, task_id): + def post(self, app_model: App, end_user: EndUser, task_id: str): + """Stop a running chat message generation.""" app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() @@ -169,9 +253,3 @@ class ChatStopApi(Resource): AppQueueManager.set_stop_flag(task_id, InvokeFrom.SERVICE_API, end_user.id) return {"result": "success"}, 200 - - -api.add_resource(CompletionApi, "/completion-messages") -api.add_resource(CompletionStopApi, "/completion-messages//stop") -api.add_resource(ChatApi, "/chat-messages") -api.add_resource(ChatStopApi, "/chat-messages//stop") diff --git a/api/controllers/service_api/app/conversation.py b/api/controllers/service_api/app/conversation.py index 073307ac4a..4860bf3a79 100644 --- a/api/controllers/service_api/app/conversation.py +++ b/api/controllers/service_api/app/conversation.py @@ -1,48 +1,97 @@ -from flask_restful import Resource, marshal_with, reqparse -from flask_restful.inputs import int_range +from flask_restx import Resource, reqparse +from flask_restx.inputs import int_range from sqlalchemy.orm import Session from werkzeug.exceptions import BadRequest, NotFound import services -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.app.error import NotChatAppError from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token from core.app.entities.app_invoke_entities import InvokeFrom from extensions.ext_database import db from fields.conversation_fields import ( - conversation_delete_fields, - conversation_infinite_scroll_pagination_fields, - simple_conversation_fields, + build_conversation_delete_model, + build_conversation_infinite_scroll_pagination_model, + build_simple_conversation_model, ) from fields.conversation_variable_fields import ( - conversation_variable_fields, - conversation_variable_infinite_scroll_pagination_fields, + build_conversation_variable_infinite_scroll_pagination_model, + build_conversation_variable_model, ) from libs.helper import uuid_value from models.model import App, AppMode, EndUser from services.conversation_service import ConversationService +# Define parsers for conversation APIs +conversation_list_parser = reqparse.RequestParser() +conversation_list_parser.add_argument( + "last_id", type=uuid_value, location="args", help="Last conversation ID for pagination" +) +conversation_list_parser.add_argument( + "limit", + type=int_range(1, 100), + required=False, + default=20, + location="args", + help="Number of conversations to return", +) +conversation_list_parser.add_argument( + "sort_by", + type=str, + choices=["created_at", "-created_at", "updated_at", "-updated_at"], + required=False, + default="-updated_at", + location="args", + help="Sort order for conversations", +) +conversation_rename_parser = reqparse.RequestParser() +conversation_rename_parser.add_argument("name", type=str, required=False, location="json", help="New conversation name") +conversation_rename_parser.add_argument( + "auto_generate", type=bool, required=False, default=False, location="json", help="Auto-generate conversation name" +) + +conversation_variables_parser = reqparse.RequestParser() +conversation_variables_parser.add_argument( + "last_id", type=uuid_value, location="args", help="Last variable ID for pagination" +) +conversation_variables_parser.add_argument( + "limit", type=int_range(1, 100), required=False, default=20, location="args", help="Number of variables to return" +) + +conversation_variable_update_parser = reqparse.RequestParser() +# using lambda is for passing the already-typed value without modification +# if no lambda, it will be converted to string +# the string cannot be converted using json.loads +conversation_variable_update_parser.add_argument( + "value", required=True, location="json", type=lambda x: x, help="New value for the conversation variable" +) + + +@service_api_ns.route("/conversations") class ConversationApi(Resource): + @service_api_ns.expect(conversation_list_parser) + @service_api_ns.doc("list_conversations") + @service_api_ns.doc(description="List all conversations for the current user") + @service_api_ns.doc( + responses={ + 200: "Conversations retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Last conversation not found", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY)) - @marshal_with(conversation_infinite_scroll_pagination_fields) + @service_api_ns.marshal_with(build_conversation_infinite_scroll_pagination_model(service_api_ns)) def get(self, app_model: App, end_user: EndUser): + """List all conversations for the current user. + + Supports pagination using last_id and limit parameters. + """ app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - parser.add_argument( - "sort_by", - type=str, - choices=["created_at", "-created_at", "updated_at", "-updated_at"], - required=False, - default="-updated_at", - location="args", - ) - args = parser.parse_args() + args = conversation_list_parser.parse_args() try: with Session(db.engine) as session: @@ -59,10 +108,22 @@ class ConversationApi(Resource): raise NotFound("Last Conversation Not Exists.") +@service_api_ns.route("/conversations/") class ConversationDetailApi(Resource): + @service_api_ns.doc("delete_conversation") + @service_api_ns.doc(description="Delete a specific conversation") + @service_api_ns.doc(params={"c_id": "Conversation ID"}) + @service_api_ns.doc( + responses={ + 204: "Conversation deleted successfully", + 401: "Unauthorized - invalid API token", + 404: "Conversation not found", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON)) - @marshal_with(conversation_delete_fields) + @service_api_ns.marshal_with(build_conversation_delete_model(service_api_ns), code=204) def delete(self, app_model: App, end_user: EndUser, c_id): + """Delete a specific conversation.""" app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() @@ -76,20 +137,30 @@ class ConversationDetailApi(Resource): return {"result": "success"}, 204 +@service_api_ns.route("/conversations//name") class ConversationRenameApi(Resource): + @service_api_ns.expect(conversation_rename_parser) + @service_api_ns.doc("rename_conversation") + @service_api_ns.doc(description="Rename a conversation or auto-generate a name") + @service_api_ns.doc(params={"c_id": "Conversation ID"}) + @service_api_ns.doc( + responses={ + 200: "Conversation renamed successfully", + 401: "Unauthorized - invalid API token", + 404: "Conversation not found", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON)) - @marshal_with(simple_conversation_fields) + @service_api_ns.marshal_with(build_simple_conversation_model(service_api_ns)) def post(self, app_model: App, end_user: EndUser, c_id): + """Rename a conversation or auto-generate a name.""" app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() conversation_id = str(c_id) - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=False, location="json") - parser.add_argument("auto_generate", type=bool, required=False, default=False, location="json") - args = parser.parse_args() + args = conversation_rename_parser.parse_args() try: return ConversationService.rename(app_model, conversation_id, end_user, args["name"], args["auto_generate"]) @@ -97,10 +168,26 @@ class ConversationRenameApi(Resource): raise NotFound("Conversation Not Exists.") +@service_api_ns.route("/conversations//variables") class ConversationVariablesApi(Resource): + @service_api_ns.expect(conversation_variables_parser) + @service_api_ns.doc("list_conversation_variables") + @service_api_ns.doc(description="List all variables for a conversation") + @service_api_ns.doc(params={"c_id": "Conversation ID"}) + @service_api_ns.doc( + responses={ + 200: "Variables retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Conversation not found", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY)) - @marshal_with(conversation_variable_infinite_scroll_pagination_fields) + @service_api_ns.marshal_with(build_conversation_variable_infinite_scroll_pagination_model(service_api_ns)) def get(self, app_model: App, end_user: EndUser, c_id): + """List all variables for a conversation. + + Conversational variables are only available for chat applications. + """ # conversational variable only for chat app app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -108,10 +195,7 @@ class ConversationVariablesApi(Resource): conversation_id = str(c_id) - parser = reqparse.RequestParser() - parser.add_argument("last_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - args = parser.parse_args() + args = conversation_variables_parser.parse_args() try: return ConversationService.get_conversational_variable( @@ -121,11 +205,28 @@ class ConversationVariablesApi(Resource): raise NotFound("Conversation Not Exists.") +@service_api_ns.route("/conversations//variables/") class ConversationVariableDetailApi(Resource): + @service_api_ns.expect(conversation_variable_update_parser) + @service_api_ns.doc("update_conversation_variable") + @service_api_ns.doc(description="Update a conversation variable's value") + @service_api_ns.doc(params={"c_id": "Conversation ID", "variable_id": "Variable ID"}) + @service_api_ns.doc( + responses={ + 200: "Variable updated successfully", + 400: "Bad request - type mismatch", + 401: "Unauthorized - invalid API token", + 404: "Conversation or variable not found", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON)) - @marshal_with(conversation_variable_fields) + @service_api_ns.marshal_with(build_conversation_variable_model(service_api_ns)) def put(self, app_model: App, end_user: EndUser, c_id, variable_id): - """Update a conversation variable's value""" + """Update a conversation variable's value. + + Allows updating the value of a specific conversation variable. + The value must match the variable's expected type. + """ app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() @@ -133,12 +234,7 @@ class ConversationVariableDetailApi(Resource): conversation_id = str(c_id) variable_id = str(variable_id) - parser = reqparse.RequestParser() - # using lambda is for passing the already-typed value without modification - # if no lambda, it will be converted to string - # the string cannot be converted using json.loads - parser.add_argument("value", required=True, location="json", type=lambda x: x) - args = parser.parse_args() + args = conversation_variable_update_parser.parse_args() try: return ConversationService.update_conversation_variable( @@ -150,15 +246,3 @@ class ConversationVariableDetailApi(Resource): raise NotFound("Conversation Variable Not Exists.") except services.errors.conversation.ConversationVariableTypeMismatchError as e: raise BadRequest(str(e)) - - -api.add_resource(ConversationRenameApi, "/conversations//name", endpoint="conversation_name") -api.add_resource(ConversationApi, "/conversations") -api.add_resource(ConversationDetailApi, "/conversations/", endpoint="conversation_detail") -api.add_resource(ConversationVariablesApi, "/conversations//variables", endpoint="conversation_variables") -api.add_resource( - ConversationVariableDetailApi, - "/conversations//variables/", - endpoint="conversation_variable_detail", - methods=["PUT"], -) diff --git a/api/controllers/service_api/app/file.py b/api/controllers/service_api/app/file.py index 37153ca5db..05f27545b3 100644 --- a/api/controllers/service_api/app/file.py +++ b/api/controllers/service_api/app/file.py @@ -1,5 +1,6 @@ from flask import request -from flask_restful import Resource, marshal_with +from flask_restx import Resource +from flask_restx.api import HTTPStatus import services from controllers.common.errors import ( @@ -9,17 +10,33 @@ from controllers.common.errors import ( TooManyFilesError, UnsupportedFileTypeError, ) -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token -from fields.file_fields import file_fields +from fields.file_fields import build_file_model from models.model import App, EndUser from services.file_service import FileService +@service_api_ns.route("/files/upload") class FileApi(Resource): + @service_api_ns.doc("upload_file") + @service_api_ns.doc(description="Upload a file for use in conversations") + @service_api_ns.doc( + responses={ + 201: "File uploaded successfully", + 400: "Bad request - no file or invalid file", + 401: "Unauthorized - invalid API token", + 413: "File too large", + 415: "Unsupported file type", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.FORM)) - @marshal_with(file_fields) + @service_api_ns.marshal_with(build_file_model(service_api_ns), code=HTTPStatus.CREATED) def post(self, app_model: App, end_user: EndUser): + """Upload a file for use in conversations. + + Accepts a single file upload via multipart/form-data. + """ # check file if "file" not in request.files: raise NoFileUploadedError() @@ -47,6 +64,3 @@ class FileApi(Resource): raise UnsupportedFileTypeError() return upload_file, 201 - - -api.add_resource(FileApi, "/files/upload") diff --git a/api/controllers/service_api/app/file_preview.py b/api/controllers/service_api/app/file_preview.py index 57141033d1..84d80ea101 100644 --- a/api/controllers/service_api/app/file_preview.py +++ b/api/controllers/service_api/app/file_preview.py @@ -2,9 +2,9 @@ import logging from urllib.parse import quote from flask import Response -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.app.error import ( FileAccessDeniedError, FileNotFoundError, @@ -17,6 +17,14 @@ from models.model import App, EndUser, Message, MessageFile, UploadFile logger = logging.getLogger(__name__) +# Define parser for file preview API +file_preview_parser = reqparse.RequestParser() +file_preview_parser.add_argument( + "as_attachment", type=bool, required=False, default=False, location="args", help="Download as attachment" +) + + +@service_api_ns.route("/files//preview") class FilePreviewApi(Resource): """ Service API File Preview endpoint @@ -25,33 +33,30 @@ class FilePreviewApi(Resource): Files can only be accessed if they belong to messages within the requesting app's context. """ + @service_api_ns.expect(file_preview_parser) + @service_api_ns.doc("preview_file") + @service_api_ns.doc(description="Preview or download a file uploaded via Service API") + @service_api_ns.doc(params={"file_id": "UUID of the file to preview"}) + @service_api_ns.doc( + responses={ + 200: "File retrieved successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - file access denied", + 404: "File not found", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY)) def get(self, app_model: App, end_user: EndUser, file_id: str): """ - Preview/Download a file that was uploaded via Service API + Preview/Download a file that was uploaded via Service API. - Args: - app_model: The authenticated app model - end_user: The authenticated end user (optional) - file_id: UUID of the file to preview - - Query Parameters: - user: Optional user identifier - as_attachment: Boolean, whether to download as attachment (default: false) - - Returns: - Stream response with file content - - Raises: - FileNotFoundError: File does not exist - FileAccessDeniedError: File access denied (not owned by app) + Provides secure file preview/download functionality. + Files can only be accessed if they belong to messages within the requesting app's context. """ file_id = str(file_id) # Parse query parameters - parser = reqparse.RequestParser() - parser.add_argument("as_attachment", type=bool, required=False, default=False, location="args") - args = parser.parse_args() + args = file_preview_parser.parse_args() # Validate file ownership and get file objects message_file, upload_file = self._validate_file_ownership(file_id, app_model.id) @@ -180,7 +185,3 @@ class FilePreviewApi(Resource): response.headers["Cache-Control"] = "public, max-age=3600" # Cache for 1 hour return response - - -# Register the API endpoint -api.add_resource(FilePreviewApi, "/files//preview") diff --git a/api/controllers/service_api/app/message.py b/api/controllers/service_api/app/message.py index a4f95cb1cb..fc506ef723 100644 --- a/api/controllers/service_api/app/message.py +++ b/api/controllers/service_api/app/message.py @@ -1,17 +1,17 @@ import json import logging -from flask_restful import Resource, fields, marshal_with, reqparse -from flask_restful.inputs import int_range +from flask_restx import Api, Namespace, Resource, fields, reqparse +from flask_restx.inputs import int_range from werkzeug.exceptions import BadRequest, InternalServerError, NotFound import services -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.app.error import NotChatAppError from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token from core.app.entities.app_invoke_entities import InvokeFrom -from fields.conversation_fields import message_file_fields -from fields.message_fields import agent_thought_fields, feedback_fields +from fields.conversation_fields import build_message_file_model +from fields.message_fields import build_agent_thought_model, build_feedback_model from fields.raws import FilesContainedField from libs.helper import TimestampField, uuid_value from models.model import App, AppMode, EndUser @@ -22,8 +22,40 @@ from services.errors.message import ( ) from services.message_service import MessageService +logger = logging.getLogger(__name__) -class MessageListApi(Resource): + +# Define parsers for message APIs +message_list_parser = reqparse.RequestParser() +message_list_parser.add_argument( + "conversation_id", required=True, type=uuid_value, location="args", help="Conversation ID" +) +message_list_parser.add_argument("first_id", type=uuid_value, location="args", help="First message ID for pagination") +message_list_parser.add_argument( + "limit", type=int_range(1, 100), required=False, default=20, location="args", help="Number of messages to return" +) + +message_feedback_parser = reqparse.RequestParser() +message_feedback_parser.add_argument( + "rating", type=str, choices=["like", "dislike", None], location="json", help="Feedback rating" +) +message_feedback_parser.add_argument("content", type=str, location="json", help="Feedback content") + +feedback_list_parser = reqparse.RequestParser() +feedback_list_parser.add_argument("page", type=int, default=1, location="args", help="Page number") +feedback_list_parser.add_argument( + "limit", type=int_range(1, 101), required=False, default=20, location="args", help="Number of feedbacks per page" +) + + +def build_message_model(api_or_ns: Api | Namespace): + """Build the message model for the API or Namespace.""" + # First build the nested models + feedback_model = build_feedback_model(api_or_ns) + agent_thought_model = build_agent_thought_model(api_or_ns) + message_file_model = build_message_file_model(api_or_ns) + + # Then build the message fields with nested models message_fields = { "id": fields.String, "conversation_id": fields.String, @@ -31,37 +63,58 @@ class MessageListApi(Resource): "inputs": FilesContainedField, "query": fields.String, "answer": fields.String(attribute="re_sign_file_url_answer"), - "message_files": fields.List(fields.Nested(message_file_fields)), - "feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True), + "message_files": fields.List(fields.Nested(message_file_model)), + "feedback": fields.Nested(feedback_model, attribute="user_feedback", allow_null=True), "retriever_resources": fields.Raw( attribute=lambda obj: json.loads(obj.message_metadata).get("retriever_resources", []) if obj.message_metadata else [] ), "created_at": TimestampField, - "agent_thoughts": fields.List(fields.Nested(agent_thought_fields)), + "agent_thoughts": fields.List(fields.Nested(agent_thought_model)), "status": fields.String, "error": fields.String, } + return api_or_ns.model("Message", message_fields) + + +def build_message_infinite_scroll_pagination_model(api_or_ns: Api | Namespace): + """Build the message infinite scroll pagination model for the API or Namespace.""" + # Build the nested message model first + message_model = build_message_model(api_or_ns) message_infinite_scroll_pagination_fields = { "limit": fields.Integer, "has_more": fields.Boolean, - "data": fields.List(fields.Nested(message_fields)), + "data": fields.List(fields.Nested(message_model)), } + return api_or_ns.model("MessageInfiniteScrollPagination", message_infinite_scroll_pagination_fields) + +@service_api_ns.route("/messages") +class MessageListApi(Resource): + @service_api_ns.expect(message_list_parser) + @service_api_ns.doc("list_messages") + @service_api_ns.doc(description="List messages in a conversation") + @service_api_ns.doc( + responses={ + 200: "Messages retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Conversation or first message not found", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY)) - @marshal_with(message_infinite_scroll_pagination_fields) + @service_api_ns.marshal_with(build_message_infinite_scroll_pagination_model(service_api_ns)) def get(self, app_model: App, end_user: EndUser): + """List messages in a conversation. + + Retrieves messages with pagination support using first_id. + """ app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: raise NotChatAppError() - parser = reqparse.RequestParser() - parser.add_argument("conversation_id", required=True, type=uuid_value, location="args") - parser.add_argument("first_id", type=uuid_value, location="args") - parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args") - args = parser.parse_args() + args = message_list_parser.parse_args() try: return MessageService.pagination_by_first_id( @@ -73,15 +126,28 @@ class MessageListApi(Resource): raise NotFound("First Message Not Exists.") +@service_api_ns.route("/messages//feedbacks") class MessageFeedbackApi(Resource): + @service_api_ns.expect(message_feedback_parser) + @service_api_ns.doc("create_message_feedback") + @service_api_ns.doc(description="Submit feedback for a message") + @service_api_ns.doc(params={"message_id": "Message ID"}) + @service_api_ns.doc( + responses={ + 200: "Feedback submitted successfully", + 401: "Unauthorized - invalid API token", + 404: "Message not found", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON, required=True)) def post(self, app_model: App, end_user: EndUser, message_id): + """Submit feedback for a message. + + Allows users to rate messages as like/dislike and provide optional feedback content. + """ message_id = str(message_id) - parser = reqparse.RequestParser() - parser.add_argument("rating", type=str, choices=["like", "dislike", None], location="json") - parser.add_argument("content", type=str, location="json") - args = parser.parse_args() + args = message_feedback_parser.parse_args() try: MessageService.create_feedback( @@ -97,21 +163,48 @@ class MessageFeedbackApi(Resource): return {"result": "success"} +@service_api_ns.route("/app/feedbacks") class AppGetFeedbacksApi(Resource): + @service_api_ns.expect(feedback_list_parser) + @service_api_ns.doc("get_app_feedbacks") + @service_api_ns.doc(description="Get all feedbacks for the application") + @service_api_ns.doc( + responses={ + 200: "Feedbacks retrieved successfully", + 401: "Unauthorized - invalid API token", + } + ) @validate_app_token def get(self, app_model: App): - """Get All Feedbacks of an app""" - parser = reqparse.RequestParser() - parser.add_argument("page", type=int, default=1, location="args") - parser.add_argument("limit", type=int_range(1, 101), required=False, default=20, location="args") - args = parser.parse_args() + """Get all feedbacks for the application. + + Returns paginated list of all feedback submitted for messages in this app. + """ + args = feedback_list_parser.parse_args() feedbacks = MessageService.get_all_messages_feedbacks(app_model, page=args["page"], limit=args["limit"]) return {"data": feedbacks} +@service_api_ns.route("/messages//suggested") class MessageSuggestedApi(Resource): + @service_api_ns.doc("get_suggested_questions") + @service_api_ns.doc(description="Get suggested follow-up questions for a message") + @service_api_ns.doc(params={"message_id": "Message ID"}) + @service_api_ns.doc( + responses={ + 200: "Suggested questions retrieved successfully", + 400: "Suggested questions feature is disabled", + 401: "Unauthorized - invalid API token", + 404: "Message not found", + 500: "Internal server error", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY, required=True)) def get(self, app_model: App, end_user: EndUser, message_id): + """Get suggested follow-up questions for a message. + + Returns AI-generated follow-up questions based on the message content. + """ message_id = str(message_id) app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -126,13 +219,7 @@ class MessageSuggestedApi(Resource): except SuggestedQuestionsAfterAnswerDisabledError: raise BadRequest("Suggested Questions Is Disabled.") except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() return {"result": "success", "data": questions} - - -api.add_resource(MessageListApi, "/messages") -api.add_resource(MessageFeedbackApi, "/messages//feedbacks") -api.add_resource(MessageSuggestedApi, "/messages//suggested") -api.add_resource(AppGetFeedbacksApi, "/app/feedbacks") diff --git a/api/controllers/service_api/app/site.py b/api/controllers/service_api/app/site.py index c157b39f6b..9f8324a84e 100644 --- a/api/controllers/service_api/app/site.py +++ b/api/controllers/service_api/app/site.py @@ -1,30 +1,41 @@ -from flask_restful import Resource, marshal_with +from flask_restx import Resource from werkzeug.exceptions import Forbidden -from controllers.common import fields -from controllers.service_api import api +from controllers.common.fields import build_site_model +from controllers.service_api import service_api_ns from controllers.service_api.wraps import validate_app_token from extensions.ext_database import db from models.account import TenantStatus from models.model import App, Site +@service_api_ns.route("/site") class AppSiteApi(Resource): """Resource for app sites.""" + @service_api_ns.doc("get_app_site") + @service_api_ns.doc(description="Get application site configuration") + @service_api_ns.doc( + responses={ + 200: "Site configuration retrieved successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - site not found or tenant archived", + } + ) @validate_app_token - @marshal_with(fields.site_fields) + @service_api_ns.marshal_with(build_site_model(service_api_ns)) def get(self, app_model: App): - """Retrieve app site info.""" + """Retrieve app site info. + + Returns the site configuration for the application including theme, icons, and text. + """ site = db.session.query(Site).where(Site.app_id == app_model.id).first() if not site: raise Forbidden() + assert app_model.tenant if app_model.tenant.status == TenantStatus.ARCHIVE: raise Forbidden() return site - - -api.add_resource(AppSiteApi, "/site") diff --git a/api/controllers/service_api/app/workflow.py b/api/controllers/service_api/app/workflow.py index cd8a5f03ac..f175766e61 100644 --- a/api/controllers/service_api/app/workflow.py +++ b/api/controllers/service_api/app/workflow.py @@ -2,12 +2,12 @@ import logging from dateutil.parser import isoparse from flask import request -from flask_restful import Resource, fields, marshal_with, reqparse -from flask_restful.inputs import int_range +from flask_restx import Api, Namespace, Resource, fields, reqparse +from flask_restx.inputs import int_range from sqlalchemy.orm import Session, sessionmaker from werkzeug.exceptions import BadRequest, InternalServerError, NotFound -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.app.error import ( CompletionRequestError, NotWorkflowAppError, @@ -28,7 +28,7 @@ from core.helper.trace_id_helper import get_external_trace_id from core.model_runtime.errors.invoke import InvokeError from core.workflow.entities.workflow_execution import WorkflowExecutionStatus from extensions.ext_database import db -from fields.workflow_app_log_fields import workflow_app_log_pagination_fields +from fields.workflow_app_log_fields import build_workflow_app_log_pagination_model from libs import helper from libs.helper import TimestampField from models.model import App, AppMode, EndUser @@ -40,6 +40,34 @@ from services.workflow_app_service import WorkflowAppService logger = logging.getLogger(__name__) +# Define parsers for workflow APIs +workflow_run_parser = reqparse.RequestParser() +workflow_run_parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") +workflow_run_parser.add_argument("files", type=list, required=False, location="json") +workflow_run_parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") + +workflow_log_parser = reqparse.RequestParser() +workflow_log_parser.add_argument("keyword", type=str, location="args") +workflow_log_parser.add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args") +workflow_log_parser.add_argument("created_at__before", type=str, location="args") +workflow_log_parser.add_argument("created_at__after", type=str, location="args") +workflow_log_parser.add_argument( + "created_by_end_user_session_id", + type=str, + location="args", + required=False, + default=None, +) +workflow_log_parser.add_argument( + "created_by_account", + type=str, + location="args", + required=False, + default=None, +) +workflow_log_parser.add_argument("page", type=int_range(1, 99999), default=1, location="args") +workflow_log_parser.add_argument("limit", type=int_range(1, 100), default=20, location="args") + workflow_run_fields = { "id": fields.String, "workflow_id": fields.String, @@ -55,12 +83,29 @@ workflow_run_fields = { } +def build_workflow_run_model(api_or_ns: Api | Namespace): + """Build the workflow run model for the API or Namespace.""" + return api_or_ns.model("WorkflowRun", workflow_run_fields) + + +@service_api_ns.route("/workflows/run/") class WorkflowRunDetailApi(Resource): + @service_api_ns.doc("get_workflow_run_detail") + @service_api_ns.doc(description="Get workflow run details") + @service_api_ns.doc(params={"workflow_run_id": "Workflow run ID"}) + @service_api_ns.doc( + responses={ + 200: "Workflow run details retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Workflow run not found", + } + ) @validate_app_token - @marshal_with(workflow_run_fields) + @service_api_ns.marshal_with(build_workflow_run_model(service_api_ns)) def get(self, app_model: App, workflow_run_id: str): - """ - Get a workflow task running detail + """Get a workflow task running detail. + + Returns detailed information about a specific workflow run. """ app_mode = AppMode.value_of(app_model.mode) if app_mode not in [AppMode.WORKFLOW, AppMode.ADVANCED_CHAT]: @@ -78,21 +123,33 @@ class WorkflowRunDetailApi(Resource): return workflow_run +@service_api_ns.route("/workflows/run") class WorkflowRunApi(Resource): + @service_api_ns.expect(workflow_run_parser) + @service_api_ns.doc("run_workflow") + @service_api_ns.doc(description="Execute a workflow") + @service_api_ns.doc( + responses={ + 200: "Workflow executed successfully", + 400: "Bad request - invalid parameters or workflow issues", + 401: "Unauthorized - invalid API token", + 404: "Workflow not found", + 429: "Rate limit exceeded", + 500: "Internal server error", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON, required=True)) def post(self, app_model: App, end_user: EndUser): - """ - Run workflow + """Execute a workflow. + + Runs a workflow with the provided inputs and returns the results. + Supports both blocking and streaming response modes. """ app_mode = AppMode.value_of(app_model.mode) if app_mode != AppMode.WORKFLOW: raise NotWorkflowAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - args = parser.parse_args() + args = workflow_run_parser.parse_args() external_trace_id = get_external_trace_id(request) if external_trace_id: args["external_trace_id"] = external_trace_id @@ -117,25 +174,37 @@ class WorkflowRunApi(Resource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() +@service_api_ns.route("/workflows//run") class WorkflowRunByIdApi(Resource): + @service_api_ns.expect(workflow_run_parser) + @service_api_ns.doc("run_workflow_by_id") + @service_api_ns.doc(description="Execute a specific workflow by ID") + @service_api_ns.doc(params={"workflow_id": "Workflow ID to execute"}) + @service_api_ns.doc( + responses={ + 200: "Workflow executed successfully", + 400: "Bad request - invalid parameters or workflow issues", + 401: "Unauthorized - invalid API token", + 404: "Workflow not found", + 429: "Rate limit exceeded", + 500: "Internal server error", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON, required=True)) def post(self, app_model: App, end_user: EndUser, workflow_id: str): - """ - Run specific workflow by ID + """Run specific workflow by ID. + + Executes a specific workflow version identified by its ID. """ app_mode = AppMode.value_of(app_model.mode) if app_mode != AppMode.WORKFLOW: raise NotWorkflowAppError() - parser = reqparse.RequestParser() - parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") - parser.add_argument("files", type=list, required=False, location="json") - parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json") - args = parser.parse_args() + args = workflow_run_parser.parse_args() # Add workflow_id to args for AppGenerateService args["workflow_id"] = workflow_id @@ -170,16 +239,25 @@ class WorkflowRunByIdApi(Resource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() +@service_api_ns.route("/workflows/tasks//stop") class WorkflowTaskStopApi(Resource): + @service_api_ns.doc("stop_workflow_task") + @service_api_ns.doc(description="Stop a running workflow task") + @service_api_ns.doc(params={"task_id": "Task ID to stop"}) + @service_api_ns.doc( + responses={ + 200: "Task stopped successfully", + 401: "Unauthorized - invalid API token", + 404: "Task not found", + } + ) @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON, required=True)) def post(self, app_model: App, end_user: EndUser, task_id: str): - """ - Stop workflow task - """ + """Stop a running workflow task.""" app_mode = AppMode.value_of(app_model.mode) if app_mode != AppMode.WORKFLOW: raise NotWorkflowAppError() @@ -189,35 +267,25 @@ class WorkflowTaskStopApi(Resource): return {"result": "success"} +@service_api_ns.route("/workflows/logs") class WorkflowAppLogApi(Resource): + @service_api_ns.expect(workflow_log_parser) + @service_api_ns.doc("get_workflow_logs") + @service_api_ns.doc(description="Get workflow execution logs") + @service_api_ns.doc( + responses={ + 200: "Logs retrieved successfully", + 401: "Unauthorized - invalid API token", + } + ) @validate_app_token - @marshal_with(workflow_app_log_pagination_fields) + @service_api_ns.marshal_with(build_workflow_app_log_pagination_model(service_api_ns)) def get(self, app_model: App): + """Get workflow app logs. + + Returns paginated workflow execution logs with filtering options. """ - Get workflow app logs - """ - parser = reqparse.RequestParser() - parser.add_argument("keyword", type=str, location="args") - parser.add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args") - parser.add_argument("created_at__before", type=str, location="args") - parser.add_argument("created_at__after", type=str, location="args") - parser.add_argument( - "created_by_end_user_session_id", - type=str, - location="args", - required=False, - default=None, - ) - parser.add_argument( - "created_by_account", - type=str, - location="args", - required=False, - default=None, - ) - parser.add_argument("page", type=int_range(1, 99999), default=1, location="args") - parser.add_argument("limit", type=int_range(1, 100), default=20, location="args") - args = parser.parse_args() + args = workflow_log_parser.parse_args() args.status = WorkflowExecutionStatus(args.status) if args.status else None if args.created_at__before: @@ -243,10 +311,3 @@ class WorkflowAppLogApi(Resource): ) return workflow_app_log_pagination - - -api.add_resource(WorkflowRunApi, "/workflows/run") -api.add_resource(WorkflowRunDetailApi, "/workflows/run/") -api.add_resource(WorkflowRunByIdApi, "/workflows//run") -api.add_resource(WorkflowTaskStopApi, "/workflows/tasks//stop") -api.add_resource(WorkflowAppLogApi, "/workflows/logs") diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index 29eef41253..7b74c961bb 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -1,9 +1,11 @@ +from typing import Literal + from flask import request -from flask_restful import marshal, marshal_with, reqparse +from flask_restx import marshal, reqparse from werkzeug.exceptions import Forbidden, NotFound import services.dataset_service -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError, InvalidActionError from controllers.service_api.wraps import ( DatasetApiResource, @@ -14,8 +16,9 @@ from core.model_runtime.entities.model_entities import ModelType from core.plugin.entities.plugin import ModelProviderID from core.provider_manager import ProviderManager from fields.dataset_fields import dataset_detail_fields -from fields.tag_fields import tag_fields +from fields.tag_fields import build_dataset_tag_fields from libs.login import current_user +from models.account import Account from models.dataset import Dataset, DatasetPermissionEnum from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService from services.entities.knowledge_entities.knowledge_entities import RetrievalModel @@ -34,12 +37,171 @@ def _validate_description_length(description): return description +# Define parsers for dataset operations +dataset_create_parser = reqparse.RequestParser() +dataset_create_parser.add_argument( + "name", + nullable=False, + required=True, + help="type is required. Name must be between 1 to 40 characters.", + type=_validate_name, +) +dataset_create_parser.add_argument( + "description", + type=_validate_description_length, + nullable=True, + required=False, + default="", +) +dataset_create_parser.add_argument( + "indexing_technique", + type=str, + location="json", + choices=Dataset.INDEXING_TECHNIQUE_LIST, + help="Invalid indexing technique.", +) +dataset_create_parser.add_argument( + "permission", + type=str, + location="json", + choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), + help="Invalid permission.", + required=False, + nullable=False, +) +dataset_create_parser.add_argument( + "external_knowledge_api_id", + type=str, + nullable=True, + required=False, + default="_validate_name", +) +dataset_create_parser.add_argument( + "provider", + type=str, + nullable=True, + required=False, + default="vendor", +) +dataset_create_parser.add_argument( + "external_knowledge_id", + type=str, + nullable=True, + required=False, +) +dataset_create_parser.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json") +dataset_create_parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json") +dataset_create_parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") + +dataset_update_parser = reqparse.RequestParser() +dataset_update_parser.add_argument( + "name", + nullable=False, + help="type is required. Name must be between 1 to 40 characters.", + type=_validate_name, +) +dataset_update_parser.add_argument( + "description", location="json", store_missing=False, type=_validate_description_length +) +dataset_update_parser.add_argument( + "indexing_technique", + type=str, + location="json", + choices=Dataset.INDEXING_TECHNIQUE_LIST, + nullable=True, + help="Invalid indexing technique.", +) +dataset_update_parser.add_argument( + "permission", + type=str, + location="json", + choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), + help="Invalid permission.", +) +dataset_update_parser.add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.") +dataset_update_parser.add_argument( + "embedding_model_provider", type=str, location="json", help="Invalid embedding model provider." +) +dataset_update_parser.add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.") +dataset_update_parser.add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.") +dataset_update_parser.add_argument( + "external_retrieval_model", + type=dict, + required=False, + nullable=True, + location="json", + help="Invalid external retrieval model.", +) +dataset_update_parser.add_argument( + "external_knowledge_id", + type=str, + required=False, + nullable=True, + location="json", + help="Invalid external knowledge id.", +) +dataset_update_parser.add_argument( + "external_knowledge_api_id", + type=str, + required=False, + nullable=True, + location="json", + help="Invalid external knowledge api id.", +) + +tag_create_parser = reqparse.RequestParser() +tag_create_parser.add_argument( + "name", + nullable=False, + required=True, + help="Name must be between 1 to 50 characters.", + type=lambda x: x + if x and 1 <= len(x) <= 50 + else (_ for _ in ()).throw(ValueError("Name must be between 1 to 50 characters.")), +) + +tag_update_parser = reqparse.RequestParser() +tag_update_parser.add_argument( + "name", + nullable=False, + required=True, + help="Name must be between 1 to 50 characters.", + type=lambda x: x + if x and 1 <= len(x) <= 50 + else (_ for _ in ()).throw(ValueError("Name must be between 1 to 50 characters.")), +) +tag_update_parser.add_argument("tag_id", nullable=False, required=True, help="Id of a tag.", type=str) + +tag_delete_parser = reqparse.RequestParser() +tag_delete_parser.add_argument("tag_id", nullable=False, required=True, help="Id of a tag.", type=str) + +tag_binding_parser = reqparse.RequestParser() +tag_binding_parser.add_argument( + "tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required." +) +tag_binding_parser.add_argument( + "target_id", type=str, nullable=False, required=True, location="json", help="Target Dataset ID is required." +) + +tag_unbinding_parser = reqparse.RequestParser() +tag_unbinding_parser.add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.") +tag_unbinding_parser.add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.") + + +@service_api_ns.route("/datasets") class DatasetListApi(DatasetApiResource): """Resource for datasets.""" + @service_api_ns.doc("list_datasets") + @service_api_ns.doc(description="List all datasets") + @service_api_ns.doc( + responses={ + 200: "Datasets retrieved successfully", + 401: "Unauthorized - invalid API token", + } + ) def get(self, tenant_id): """Resource for getting datasets.""" - page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) # provider = request.args.get("provider", default="vendor") @@ -52,7 +214,10 @@ class DatasetListApi(DatasetApiResource): ) # check embedding setting provider_manager = ProviderManager() - configurations = provider_manager.get_configurations(tenant_id=current_user.current_tenant_id) + assert isinstance(current_user, Account) + cid = current_user.current_tenant_id + assert cid is not None + configurations = provider_manager.get_configurations(tenant_id=cid) embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True) @@ -74,65 +239,20 @@ class DatasetListApi(DatasetApiResource): response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page} return response, 200 + @service_api_ns.expect(dataset_create_parser) + @service_api_ns.doc("create_dataset") + @service_api_ns.doc(description="Create a new dataset") + @service_api_ns.doc( + responses={ + 200: "Dataset created successfully", + 401: "Unauthorized - invalid API token", + 400: "Bad request - invalid parameters", + } + ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id): """Resource for creating datasets.""" - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - required=True, - help="type is required. Name must be between 1 to 40 characters.", - type=_validate_name, - ) - parser.add_argument( - "description", - type=_validate_description_length, - nullable=True, - required=False, - default="", - ) - parser.add_argument( - "indexing_technique", - type=str, - location="json", - choices=Dataset.INDEXING_TECHNIQUE_LIST, - help="Invalid indexing technique.", - ) - parser.add_argument( - "permission", - type=str, - location="json", - choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), - help="Invalid permission.", - required=False, - nullable=False, - ) - parser.add_argument( - "external_knowledge_api_id", - type=str, - nullable=True, - required=False, - default="_validate_name", - ) - parser.add_argument( - "provider", - type=str, - nullable=True, - required=False, - default="vendor", - ) - parser.add_argument( - "external_knowledge_id", - type=str, - nullable=True, - required=False, - ) - parser.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json") - parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json") - parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") - - args = parser.parse_args() + args = dataset_create_parser.parse_args() if args.get("embedding_model_provider"): DatasetService.check_embedding_model_setting( @@ -150,6 +270,7 @@ class DatasetListApi(DatasetApiResource): ) try: + assert isinstance(current_user, Account) dataset = DatasetService.create_empty_dataset( tenant_id=tenant_id, name=args["name"], @@ -172,9 +293,21 @@ class DatasetListApi(DatasetApiResource): return marshal(dataset, dataset_detail_fields), 200 +@service_api_ns.route("/datasets/") class DatasetApi(DatasetApiResource): """Resource for dataset.""" + @service_api_ns.doc("get_dataset") + @service_api_ns.doc(description="Get a specific dataset by ID") + @service_api_ns.doc(params={"dataset_id": "Dataset ID"}) + @service_api_ns.doc( + responses={ + 200: "Dataset retrieved successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - insufficient permissions", + 404: "Dataset not found", + } + ) def get(self, _, dataset_id): dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -191,7 +324,10 @@ class DatasetApi(DatasetApiResource): # check embedding setting provider_manager = ProviderManager() - configurations = provider_manager.get_configurations(tenant_id=current_user.current_tenant_id) + assert isinstance(current_user, Account) + cid = current_user.current_tenant_id + assert cid is not None + configurations = provider_manager.get_configurations(tenant_id=cid) embedding_models = configurations.get_models(model_type=ModelType.TEXT_EMBEDDING, only_active=True) @@ -214,6 +350,18 @@ class DatasetApi(DatasetApiResource): return data, 200 + @service_api_ns.expect(dataset_update_parser) + @service_api_ns.doc("update_dataset") + @service_api_ns.doc(description="Update an existing dataset") + @service_api_ns.doc(params={"dataset_id": "Dataset ID"}) + @service_api_ns.doc( + responses={ + 200: "Dataset updated successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - insufficient permissions", + 404: "Dataset not found", + } + ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def patch(self, _, dataset_id): dataset_id_str = str(dataset_id) @@ -221,63 +369,7 @@ class DatasetApi(DatasetApiResource): if dataset is None: raise NotFound("Dataset not found.") - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - help="type is required. Name must be between 1 to 40 characters.", - type=_validate_name, - ) - parser.add_argument("description", location="json", store_missing=False, type=_validate_description_length) - parser.add_argument( - "indexing_technique", - type=str, - location="json", - choices=Dataset.INDEXING_TECHNIQUE_LIST, - nullable=True, - help="Invalid indexing technique.", - ) - parser.add_argument( - "permission", - type=str, - location="json", - choices=(DatasetPermissionEnum.ONLY_ME, DatasetPermissionEnum.ALL_TEAM, DatasetPermissionEnum.PARTIAL_TEAM), - help="Invalid permission.", - ) - parser.add_argument("embedding_model", type=str, location="json", help="Invalid embedding model.") - parser.add_argument( - "embedding_model_provider", type=str, location="json", help="Invalid embedding model provider." - ) - parser.add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.") - parser.add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.") - - parser.add_argument( - "external_retrieval_model", - type=dict, - required=False, - nullable=True, - location="json", - help="Invalid external retrieval model.", - ) - - parser.add_argument( - "external_knowledge_id", - type=str, - required=False, - nullable=True, - location="json", - help="Invalid external knowledge id.", - ) - - parser.add_argument( - "external_knowledge_api_id", - type=str, - required=False, - nullable=True, - location="json", - help="Invalid external knowledge api id.", - ) - args = parser.parse_args() + args = dataset_update_parser.parse_args() data = request.get_json() # check embedding model setting @@ -307,6 +399,7 @@ class DatasetApi(DatasetApiResource): raise NotFound("Dataset not found.") result_data = marshal(dataset, dataset_detail_fields) + assert isinstance(current_user, Account) tenant_id = current_user.current_tenant_id if data.get("partial_member_list") and data.get("permission") == "partial_members": @@ -325,6 +418,17 @@ class DatasetApi(DatasetApiResource): return result_data, 200 + @service_api_ns.doc("delete_dataset") + @service_api_ns.doc(description="Delete a dataset") + @service_api_ns.doc(params={"dataset_id": "Dataset ID"}) + @service_api_ns.doc( + responses={ + 204: "Dataset deleted successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset not found", + 409: "Conflict - dataset is in use", + } + ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, _, dataset_id): """ @@ -355,17 +459,35 @@ class DatasetApi(DatasetApiResource): raise DatasetInUseError() +@service_api_ns.route("/datasets//documents/status/") class DocumentStatusApi(DatasetApiResource): """Resource for batch document status operations.""" - def patch(self, tenant_id, dataset_id, action): + @service_api_ns.doc("update_document_status") + @service_api_ns.doc(description="Batch update document status") + @service_api_ns.doc( + params={ + "dataset_id": "Dataset ID", + "action": "Action to perform: 'enable', 'disable', 'archive', or 'un_archive'", + } + ) + @service_api_ns.doc( + responses={ + 200: "Document status updated successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - insufficient permissions", + 404: "Dataset not found", + 400: "Bad request - invalid action", + } + ) + def patch(self, tenant_id, dataset_id, action: Literal["enable", "disable", "archive", "un_archive"]): """ Batch update document status. Args: tenant_id: tenant id dataset_id: dataset id - action: action to perform (enable, disable, archive, un_archive) + action: action to perform (Literal["enable", "disable", "archive", "un_archive"]) Returns: dict: A dictionary with a key 'result' and a value 'success' @@ -405,53 +527,70 @@ class DocumentStatusApi(DatasetApiResource): return {"result": "success"}, 200 +@service_api_ns.route("/datasets/tags") class DatasetTagsApi(DatasetApiResource): + @service_api_ns.doc("list_dataset_tags") + @service_api_ns.doc(description="Get all knowledge type tags") + @service_api_ns.doc( + responses={ + 200: "Tags retrieved successfully", + 401: "Unauthorized - invalid API token", + } + ) @validate_dataset_token - @marshal_with(tag_fields) + @service_api_ns.marshal_with(build_dataset_tag_fields(service_api_ns)) def get(self, _, dataset_id): """Get all knowledge type tags.""" - tags = TagService.get_tags("knowledge", current_user.current_tenant_id) + assert isinstance(current_user, Account) + cid = current_user.current_tenant_id + assert cid is not None + tags = TagService.get_tags("knowledge", cid) return tags, 200 + @service_api_ns.expect(tag_create_parser) + @service_api_ns.doc("create_dataset_tag") + @service_api_ns.doc(description="Add a knowledge type tag") + @service_api_ns.doc( + responses={ + 200: "Tag created successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - insufficient permissions", + } + ) + @service_api_ns.marshal_with(build_dataset_tag_fields(service_api_ns)) @validate_dataset_token def post(self, _, dataset_id): """Add a knowledge type tag.""" + assert isinstance(current_user, Account) if not (current_user.is_editor or current_user.is_dataset_editor): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - required=True, - help="Name must be between 1 to 50 characters.", - type=DatasetTagsApi._validate_tag_name, - ) - - args = parser.parse_args() + args = tag_create_parser.parse_args() args["type"] = "knowledge" tag = TagService.save_tags(args) response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": 0} - return response, 200 + @service_api_ns.expect(tag_update_parser) + @service_api_ns.doc("update_dataset_tag") + @service_api_ns.doc(description="Update a knowledge type tag") + @service_api_ns.doc( + responses={ + 200: "Tag updated successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - insufficient permissions", + } + ) + @service_api_ns.marshal_with(build_dataset_tag_fields(service_api_ns)) @validate_dataset_token def patch(self, _, dataset_id): + assert isinstance(current_user, Account) if not (current_user.is_editor or current_user.is_dataset_editor): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument( - "name", - nullable=False, - required=True, - help="Name must be between 1 to 50 characters.", - type=DatasetTagsApi._validate_tag_name, - ) - parser.add_argument("tag_id", nullable=False, required=True, help="Id of a tag.", type=str) - args = parser.parse_args() + args = tag_update_parser.parse_args() args["type"] = "knowledge" tag = TagService.update_tags(args, args.get("tag_id")) @@ -461,80 +600,98 @@ class DatasetTagsApi(DatasetApiResource): return response, 200 + @service_api_ns.expect(tag_delete_parser) + @service_api_ns.doc("delete_dataset_tag") + @service_api_ns.doc(description="Delete a knowledge type tag") + @service_api_ns.doc( + responses={ + 204: "Tag deleted successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - insufficient permissions", + } + ) @validate_dataset_token def delete(self, _, dataset_id): """Delete a knowledge type tag.""" + assert isinstance(current_user, Account) if not current_user.is_editor: raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("tag_id", nullable=False, required=True, help="Id of a tag.", type=str) - args = parser.parse_args() + args = tag_delete_parser.parse_args() TagService.delete_tag(args.get("tag_id")) return 204 - @staticmethod - def _validate_tag_name(name): - if not name or len(name) < 1 or len(name) > 50: - raise ValueError("Name must be between 1 to 50 characters.") - return name - +@service_api_ns.route("/datasets/tags/binding") class DatasetTagBindingApi(DatasetApiResource): + @service_api_ns.expect(tag_binding_parser) + @service_api_ns.doc("bind_dataset_tags") + @service_api_ns.doc(description="Bind tags to a dataset") + @service_api_ns.doc( + responses={ + 204: "Tags bound successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - insufficient permissions", + } + ) @validate_dataset_token def post(self, _, dataset_id): # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator + assert isinstance(current_user, Account) if not (current_user.is_editor or current_user.is_dataset_editor): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument( - "tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required." - ) - parser.add_argument( - "target_id", type=str, nullable=False, required=True, location="json", help="Target Dataset ID is required." - ) - - args = parser.parse_args() + args = tag_binding_parser.parse_args() args["type"] = "knowledge" TagService.save_tag_binding(args) return 204 +@service_api_ns.route("/datasets/tags/unbinding") class DatasetTagUnbindingApi(DatasetApiResource): + @service_api_ns.expect(tag_unbinding_parser) + @service_api_ns.doc("unbind_dataset_tag") + @service_api_ns.doc(description="Unbind a tag from a dataset") + @service_api_ns.doc( + responses={ + 204: "Tag unbound successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - insufficient permissions", + } + ) @validate_dataset_token def post(self, _, dataset_id): # The role of the current user in the ta table must be admin, owner, editor, or dataset_operator + assert isinstance(current_user, Account) if not (current_user.is_editor or current_user.is_dataset_editor): raise Forbidden() - parser = reqparse.RequestParser() - parser.add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.") - parser.add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.") - - args = parser.parse_args() + args = tag_unbinding_parser.parse_args() args["type"] = "knowledge" TagService.delete_tag_binding(args) return 204 +@service_api_ns.route("/datasets//tags") class DatasetTagsBindingStatusApi(DatasetApiResource): + @service_api_ns.doc("get_dataset_tags_binding_status") + @service_api_ns.doc(description="Get tags bound to a specific dataset") + @service_api_ns.doc(params={"dataset_id": "Dataset ID"}) + @service_api_ns.doc( + responses={ + 200: "Tags retrieved successfully", + 401: "Unauthorized - invalid API token", + } + ) @validate_dataset_token def get(self, _, *args, **kwargs): """Get all knowledge type tags.""" dataset_id = kwargs.get("dataset_id") + assert isinstance(current_user, Account) + assert current_user.current_tenant_id is not None tags = TagService.get_tags_by_target_id("knowledge", current_user.current_tenant_id, str(dataset_id)) tags_list = [{"id": tag.id, "name": tag.name} for tag in tags] response = {"data": tags_list, "total": len(tags)} return response, 200 - - -api.add_resource(DatasetListApi, "/datasets") -api.add_resource(DatasetApi, "/datasets/") -api.add_resource(DocumentStatusApi, "/datasets//documents/status/") -api.add_resource(DatasetTagsApi, "/datasets/tags") -api.add_resource(DatasetTagBindingApi, "/datasets/tags/binding") -api.add_resource(DatasetTagUnbindingApi, "/datasets/tags/unbinding") -api.add_resource(DatasetTagsBindingStatusApi, "/datasets//tags") diff --git a/api/controllers/service_api/dataset/document.py b/api/controllers/service_api/dataset/document.py index d0354f7851..43232229c8 100644 --- a/api/controllers/service_api/dataset/document.py +++ b/api/controllers/service_api/dataset/document.py @@ -1,7 +1,7 @@ import json from flask import request -from flask_restful import marshal, reqparse +from flask_restx import marshal, reqparse from sqlalchemy import desc, select from werkzeug.exceptions import Forbidden, NotFound @@ -13,7 +13,7 @@ from controllers.common.errors import ( TooManyFilesError, UnsupportedFileTypeError, ) -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.app.error import ProviderNotInitializeError from controllers.service_api.dataset.error import ( ArchivedDocumentImmutableError, @@ -34,32 +34,64 @@ from services.dataset_service import DatasetService, DocumentService from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig from services.file_service import FileService +# Define parsers for document operations +document_text_create_parser = reqparse.RequestParser() +document_text_create_parser.add_argument("name", type=str, required=True, nullable=False, location="json") +document_text_create_parser.add_argument("text", type=str, required=True, nullable=False, location="json") +document_text_create_parser.add_argument("process_rule", type=dict, required=False, nullable=True, location="json") +document_text_create_parser.add_argument("original_document_id", type=str, required=False, location="json") +document_text_create_parser.add_argument( + "doc_form", type=str, default="text_model", required=False, nullable=False, location="json" +) +document_text_create_parser.add_argument( + "doc_language", type=str, default="English", required=False, nullable=False, location="json" +) +document_text_create_parser.add_argument( + "indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json" +) +document_text_create_parser.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json") +document_text_create_parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json") +document_text_create_parser.add_argument( + "embedding_model_provider", type=str, required=False, nullable=True, location="json" +) +document_text_update_parser = reqparse.RequestParser() +document_text_update_parser.add_argument("name", type=str, required=False, nullable=True, location="json") +document_text_update_parser.add_argument("text", type=str, required=False, nullable=True, location="json") +document_text_update_parser.add_argument("process_rule", type=dict, required=False, nullable=True, location="json") +document_text_update_parser.add_argument( + "doc_form", type=str, default="text_model", required=False, nullable=False, location="json" +) +document_text_update_parser.add_argument( + "doc_language", type=str, default="English", required=False, nullable=False, location="json" +) +document_text_update_parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") + + +@service_api_ns.route( + "/datasets//document/create_by_text", + "/datasets//document/create-by-text", +) class DocumentAddByTextApi(DatasetApiResource): """Resource for documents.""" + @service_api_ns.expect(document_text_create_parser) + @service_api_ns.doc("create_document_by_text") + @service_api_ns.doc(description="Create a new document by providing text content") + @service_api_ns.doc(params={"dataset_id": "Dataset ID"}) + @service_api_ns.doc( + responses={ + 200: "Document created successfully", + 401: "Unauthorized - invalid API token", + 400: "Bad request - invalid parameters", + } + ) @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_resource_check("documents", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): """Create document by text.""" - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, nullable=False, location="json") - parser.add_argument("text", type=str, required=True, nullable=False, location="json") - parser.add_argument("process_rule", type=dict, required=False, nullable=True, location="json") - parser.add_argument("original_document_id", type=str, required=False, location="json") - parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") - parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" - ) - parser.add_argument( - "indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json" - ) - parser.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json") - parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json") - parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json") - - args = parser.parse_args() + args = document_text_create_parser.parse_args() dataset_id = str(dataset_id) tenant_id = str(tenant_id) @@ -117,23 +149,29 @@ class DocumentAddByTextApi(DatasetApiResource): return documents_and_batch_fields, 200 +@service_api_ns.route( + "/datasets//documents//update_by_text", + "/datasets//documents//update-by-text", +) class DocumentUpdateByTextApi(DatasetApiResource): """Resource for update documents.""" + @service_api_ns.expect(document_text_update_parser) + @service_api_ns.doc("update_document_by_text") + @service_api_ns.doc(description="Update an existing document by providing text content") + @service_api_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"}) + @service_api_ns.doc( + responses={ + 200: "Document updated successfully", + 401: "Unauthorized - invalid API token", + 404: "Document not found", + } + ) @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, document_id): """Update document by text.""" - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=False, nullable=True, location="json") - parser.add_argument("text", type=str, required=False, nullable=True, location="json") - parser.add_argument("process_rule", type=dict, required=False, nullable=True, location="json") - parser.add_argument("doc_form", type=str, default="text_model", required=False, nullable=False, location="json") - parser.add_argument( - "doc_language", type=str, default="English", required=False, nullable=False, location="json" - ) - parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") - args = parser.parse_args() + args = document_text_update_parser.parse_args() dataset_id = str(dataset_id) tenant_id = str(tenant_id) dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() @@ -187,9 +225,23 @@ class DocumentUpdateByTextApi(DatasetApiResource): return documents_and_batch_fields, 200 +@service_api_ns.route( + "/datasets//document/create_by_file", + "/datasets//document/create-by-file", +) class DocumentAddByFileApi(DatasetApiResource): """Resource for documents.""" + @service_api_ns.doc("create_document_by_file") + @service_api_ns.doc(description="Create a new document by uploading a file") + @service_api_ns.doc(params={"dataset_id": "Dataset ID"}) + @service_api_ns.doc( + responses={ + 200: "Document created successfully", + 401: "Unauthorized - invalid API token", + 400: "Bad request - invalid file or parameters", + } + ) @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_resource_check("documents", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") @@ -281,9 +333,23 @@ class DocumentAddByFileApi(DatasetApiResource): return documents_and_batch_fields, 200 +@service_api_ns.route( + "/datasets//documents//update_by_file", + "/datasets//documents//update-by-file", +) class DocumentUpdateByFileApi(DatasetApiResource): """Resource for update documents.""" + @service_api_ns.doc("update_document_by_file") + @service_api_ns.doc(description="Update an existing document by uploading a file") + @service_api_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"}) + @service_api_ns.doc( + responses={ + 200: "Document updated successfully", + 401: "Unauthorized - invalid API token", + 404: "Document not found", + } + ) @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, document_id): @@ -358,7 +424,18 @@ class DocumentUpdateByFileApi(DatasetApiResource): return documents_and_batch_fields, 200 +@service_api_ns.route("/datasets//documents") class DocumentListApi(DatasetApiResource): + @service_api_ns.doc("list_documents") + @service_api_ns.doc(description="List all documents in a dataset") + @service_api_ns.doc(params={"dataset_id": "Dataset ID"}) + @service_api_ns.doc( + responses={ + 200: "Documents retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset not found", + } + ) def get(self, tenant_id, dataset_id): dataset_id = str(dataset_id) tenant_id = str(tenant_id) @@ -391,7 +468,18 @@ class DocumentListApi(DatasetApiResource): return response +@service_api_ns.route("/datasets//documents//indexing-status") class DocumentIndexingStatusApi(DatasetApiResource): + @service_api_ns.doc("get_document_indexing_status") + @service_api_ns.doc(description="Get indexing status for documents in a batch") + @service_api_ns.doc(params={"dataset_id": "Dataset ID", "batch": "Batch ID"}) + @service_api_ns.doc( + responses={ + 200: "Indexing status retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset or documents not found", + } + ) def get(self, tenant_id, dataset_id, batch): dataset_id = str(dataset_id) batch = str(batch) @@ -440,9 +528,21 @@ class DocumentIndexingStatusApi(DatasetApiResource): return data +@service_api_ns.route("/datasets//documents/") class DocumentApi(DatasetApiResource): METADATA_CHOICES = {"all", "only", "without"} + @service_api_ns.doc("get_document") + @service_api_ns.doc(description="Get a specific document by ID") + @service_api_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"}) + @service_api_ns.doc( + responses={ + 200: "Document retrieved successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - insufficient permissions", + 404: "Document not found", + } + ) def get(self, tenant_id, dataset_id, document_id): dataset_id = str(dataset_id) document_id = str(document_id) @@ -534,6 +634,17 @@ class DocumentApi(DatasetApiResource): return response + @service_api_ns.doc("delete_document") + @service_api_ns.doc(description="Delete a document") + @service_api_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"}) + @service_api_ns.doc( + responses={ + 204: "Document deleted successfully", + 401: "Unauthorized - invalid API token", + 403: "Forbidden - document is archived", + 404: "Document not found", + } + ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id, dataset_id, document_id): """Delete document.""" @@ -564,28 +675,3 @@ class DocumentApi(DatasetApiResource): raise DocumentIndexingError("Cannot delete document during indexing.") return 204 - - -api.add_resource( - DocumentAddByTextApi, - "/datasets//document/create_by_text", - "/datasets//document/create-by-text", -) -api.add_resource( - DocumentAddByFileApi, - "/datasets//document/create_by_file", - "/datasets//document/create-by-file", -) -api.add_resource( - DocumentUpdateByTextApi, - "/datasets//documents//update_by_text", - "/datasets//documents//update-by-text", -) -api.add_resource( - DocumentUpdateByFileApi, - "/datasets//documents//update_by_file", - "/datasets//documents//update-by-file", -) -api.add_resource(DocumentApi, "/datasets//documents/") -api.add_resource(DocumentListApi, "/datasets//documents") -api.add_resource(DocumentIndexingStatusApi, "/datasets//documents//indexing-status") diff --git a/api/controllers/service_api/dataset/hit_testing.py b/api/controllers/service_api/dataset/hit_testing.py index 52e9bca5da..d81287d56f 100644 --- a/api/controllers/service_api/dataset/hit_testing.py +++ b/api/controllers/service_api/dataset/hit_testing.py @@ -1,11 +1,26 @@ from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_rate_limit_check +@service_api_ns.route("/datasets//hit-testing", "/datasets//retrieve") class HitTestingApi(DatasetApiResource, DatasetsHitTestingBase): + @service_api_ns.doc("dataset_hit_testing") + @service_api_ns.doc(description="Perform hit testing on a dataset") + @service_api_ns.doc(params={"dataset_id": "Dataset ID"}) + @service_api_ns.doc( + responses={ + 200: "Hit testing results", + 401: "Unauthorized - invalid API token", + 404: "Dataset not found", + } + ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): + """Perform hit testing on a dataset. + + Tests retrieval performance for the specified dataset. + """ dataset_id_str = str(dataset_id) dataset = self.get_and_validate_dataset(dataset_id_str) @@ -13,6 +28,3 @@ class HitTestingApi(DatasetApiResource, DatasetsHitTestingBase): self.hit_testing_args_check(args) return self.perform_hit_testing(dataset, args) - - -api.add_resource(HitTestingApi, "/datasets//hit-testing", "/datasets//retrieve") diff --git a/api/controllers/service_api/dataset/metadata.py b/api/controllers/service_api/dataset/metadata.py index 6ba818c5fc..9defe6af03 100644 --- a/api/controllers/service_api/dataset/metadata.py +++ b/api/controllers/service_api/dataset/metadata.py @@ -1,8 +1,10 @@ +from typing import Literal + from flask_login import current_user # type: ignore -from flask_restful import marshal, reqparse +from flask_restx import marshal, reqparse from werkzeug.exceptions import NotFound -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_rate_limit_check from fields.dataset_fields import dataset_metadata_fields from services.dataset_service import DatasetService @@ -12,14 +14,43 @@ from services.entities.knowledge_entities.knowledge_entities import ( ) from services.metadata_service import MetadataService +# Define parsers for metadata APIs +metadata_create_parser = reqparse.RequestParser() +metadata_create_parser.add_argument( + "type", type=str, required=True, nullable=False, location="json", help="Metadata type" +) +metadata_create_parser.add_argument( + "name", type=str, required=True, nullable=False, location="json", help="Metadata name" +) +metadata_update_parser = reqparse.RequestParser() +metadata_update_parser.add_argument( + "name", type=str, required=True, nullable=False, location="json", help="New metadata name" +) + +document_metadata_parser = reqparse.RequestParser() +document_metadata_parser.add_argument( + "operation_data", type=list, required=True, nullable=False, location="json", help="Metadata operation data" +) + + +@service_api_ns.route("/datasets//metadata") class DatasetMetadataCreateServiceApi(DatasetApiResource): + @service_api_ns.expect(metadata_create_parser) + @service_api_ns.doc("create_dataset_metadata") + @service_api_ns.doc(description="Create metadata for a dataset") + @service_api_ns.doc(params={"dataset_id": "Dataset ID"}) + @service_api_ns.doc( + responses={ + 201: "Metadata created successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset not found", + } + ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): - parser = reqparse.RequestParser() - parser.add_argument("type", type=str, required=True, nullable=False, location="json") - parser.add_argument("name", type=str, required=True, nullable=False, location="json") - args = parser.parse_args() + """Create metadata for a dataset.""" + args = metadata_create_parser.parse_args() metadata_args = MetadataArgs(**args) dataset_id_str = str(dataset_id) @@ -31,7 +62,18 @@ class DatasetMetadataCreateServiceApi(DatasetApiResource): metadata = MetadataService.create_metadata(dataset_id_str, metadata_args) return marshal(metadata, dataset_metadata_fields), 201 + @service_api_ns.doc("get_dataset_metadata") + @service_api_ns.doc(description="Get all metadata for a dataset") + @service_api_ns.doc(params={"dataset_id": "Dataset ID"}) + @service_api_ns.doc( + responses={ + 200: "Metadata retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset not found", + } + ) def get(self, tenant_id, dataset_id): + """Get all metadata for a dataset.""" dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -39,12 +81,23 @@ class DatasetMetadataCreateServiceApi(DatasetApiResource): return MetadataService.get_dataset_metadatas(dataset), 200 +@service_api_ns.route("/datasets//metadata/") class DatasetMetadataServiceApi(DatasetApiResource): + @service_api_ns.expect(metadata_update_parser) + @service_api_ns.doc("update_dataset_metadata") + @service_api_ns.doc(description="Update metadata name") + @service_api_ns.doc(params={"dataset_id": "Dataset ID", "metadata_id": "Metadata ID"}) + @service_api_ns.doc( + responses={ + 200: "Metadata updated successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset or metadata not found", + } + ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def patch(self, tenant_id, dataset_id, metadata_id): - parser = reqparse.RequestParser() - parser.add_argument("name", type=str, required=True, nullable=False, location="json") - args = parser.parse_args() + """Update metadata name.""" + args = metadata_update_parser.parse_args() dataset_id_str = str(dataset_id) metadata_id_str = str(metadata_id) @@ -56,8 +109,19 @@ class DatasetMetadataServiceApi(DatasetApiResource): metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name")) return marshal(metadata, dataset_metadata_fields), 200 + @service_api_ns.doc("delete_dataset_metadata") + @service_api_ns.doc(description="Delete metadata") + @service_api_ns.doc(params={"dataset_id": "Dataset ID", "metadata_id": "Metadata ID"}) + @service_api_ns.doc( + responses={ + 204: "Metadata deleted successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset or metadata not found", + } + ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id, dataset_id, metadata_id): + """Delete metadata.""" dataset_id_str = str(dataset_id) metadata_id_str = str(metadata_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -69,15 +133,37 @@ class DatasetMetadataServiceApi(DatasetApiResource): return 204 +@service_api_ns.route("/datasets/metadata/built-in") class DatasetMetadataBuiltInFieldServiceApi(DatasetApiResource): + @service_api_ns.doc("get_built_in_fields") + @service_api_ns.doc(description="Get all built-in metadata fields") + @service_api_ns.doc( + responses={ + 200: "Built-in fields retrieved successfully", + 401: "Unauthorized - invalid API token", + } + ) def get(self, tenant_id): + """Get all built-in metadata fields.""" built_in_fields = MetadataService.get_built_in_fields() return {"fields": built_in_fields}, 200 +@service_api_ns.route("/datasets//metadata/built-in/") class DatasetMetadataBuiltInFieldActionServiceApi(DatasetApiResource): + @service_api_ns.doc("toggle_built_in_field") + @service_api_ns.doc(description="Enable or disable built-in metadata field") + @service_api_ns.doc(params={"dataset_id": "Dataset ID", "action": "Action to perform: 'enable' or 'disable'"}) + @service_api_ns.doc( + responses={ + 200: "Action completed successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset not found", + } + ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") - def post(self, tenant_id, dataset_id, action): + def post(self, tenant_id, dataset_id, action: Literal["enable", "disable"]): + """Enable or disable built-in metadata field.""" dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: @@ -91,29 +177,31 @@ class DatasetMetadataBuiltInFieldActionServiceApi(DatasetApiResource): return 200 +@service_api_ns.route("/datasets//documents/metadata") class DocumentMetadataEditServiceApi(DatasetApiResource): + @service_api_ns.expect(document_metadata_parser) + @service_api_ns.doc("update_documents_metadata") + @service_api_ns.doc(description="Update metadata for multiple documents") + @service_api_ns.doc(params={"dataset_id": "Dataset ID"}) + @service_api_ns.doc( + responses={ + 200: "Documents metadata updated successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset not found", + } + ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): + """Update metadata for multiple documents.""" dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) if dataset is None: raise NotFound("Dataset not found.") DatasetService.check_dataset_permission(dataset, current_user) - parser = reqparse.RequestParser() - parser.add_argument("operation_data", type=list, required=True, nullable=False, location="json") - args = parser.parse_args() + args = document_metadata_parser.parse_args() metadata_args = MetadataOperationData(**args) MetadataService.update_documents_metadata(dataset, metadata_args) return 200 - - -api.add_resource(DatasetMetadataCreateServiceApi, "/datasets//metadata") -api.add_resource(DatasetMetadataServiceApi, "/datasets//metadata/") -api.add_resource(DatasetMetadataBuiltInFieldServiceApi, "/datasets/metadata/built-in") -api.add_resource( - DatasetMetadataBuiltInFieldActionServiceApi, "/datasets//metadata/built-in/" -) -api.add_resource(DocumentMetadataEditServiceApi, "/datasets//documents/metadata") diff --git a/api/controllers/service_api/dataset/segment.py b/api/controllers/service_api/dataset/segment.py index 31f862dc8f..f5e2010ca4 100644 --- a/api/controllers/service_api/dataset/segment.py +++ b/api/controllers/service_api/dataset/segment.py @@ -1,9 +1,9 @@ from flask import request from flask_login import current_user -from flask_restful import marshal, reqparse +from flask_restx import marshal, reqparse from werkzeug.exceptions import NotFound -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.app.error import ProviderNotInitializeError from controllers.service_api.wraps import ( DatasetApiResource, @@ -19,34 +19,59 @@ from fields.segment_fields import child_chunk_fields, segment_fields from models.dataset import Dataset from services.dataset_service import DatasetService, DocumentService, SegmentService from services.entities.knowledge_entities.knowledge_entities import SegmentUpdateArgs -from services.errors.chunk import ( - ChildChunkDeleteIndexError, - ChildChunkIndexingError, -) -from services.errors.chunk import ( - ChildChunkDeleteIndexError as ChildChunkDeleteIndexServiceError, -) -from services.errors.chunk import ( - ChildChunkIndexingError as ChildChunkIndexingServiceError, -) +from services.errors.chunk import ChildChunkDeleteIndexError, ChildChunkIndexingError +from services.errors.chunk import ChildChunkDeleteIndexError as ChildChunkDeleteIndexServiceError +from services.errors.chunk import ChildChunkIndexingError as ChildChunkIndexingServiceError + +# Define parsers for segment operations +segment_create_parser = reqparse.RequestParser() +segment_create_parser.add_argument("segments", type=list, required=False, nullable=True, location="json") + +segment_list_parser = reqparse.RequestParser() +segment_list_parser.add_argument("status", type=str, action="append", default=[], location="args") +segment_list_parser.add_argument("keyword", type=str, default=None, location="args") + +segment_update_parser = reqparse.RequestParser() +segment_update_parser.add_argument("segment", type=dict, required=False, nullable=True, location="json") + +child_chunk_create_parser = reqparse.RequestParser() +child_chunk_create_parser.add_argument("content", type=str, required=True, nullable=False, location="json") + +child_chunk_list_parser = reqparse.RequestParser() +child_chunk_list_parser.add_argument("limit", type=int, default=20, location="args") +child_chunk_list_parser.add_argument("keyword", type=str, default=None, location="args") +child_chunk_list_parser.add_argument("page", type=int, default=1, location="args") + +child_chunk_update_parser = reqparse.RequestParser() +child_chunk_update_parser.add_argument("content", type=str, required=True, nullable=False, location="json") +@service_api_ns.route("/datasets//documents//segments") class SegmentApi(DatasetApiResource): """Resource for segments.""" + @service_api_ns.expect(segment_create_parser) + @service_api_ns.doc("create_segments") + @service_api_ns.doc(description="Create segments in a document") + @service_api_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"}) + @service_api_ns.doc( + responses={ + 200: "Segments created successfully", + 400: "Bad request - segments data is missing", + 401: "Unauthorized - invalid API token", + 404: "Dataset or document not found", + } + ) @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") - def post(self, tenant_id, dataset_id, document_id): + def post(self, tenant_id: str, dataset_id: str, document_id: str): """Create single segment.""" # check dataset - dataset_id = str(dataset_id) - tenant_id = str(tenant_id) dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check document - document_id = str(document_id) document = DocumentService.get_document(dataset.id, document_id) if not document: raise NotFound("Document not found.") @@ -71,9 +96,7 @@ class SegmentApi(DatasetApiResource): except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) # validate args - parser = reqparse.RequestParser() - parser.add_argument("segments", type=list, required=False, nullable=True, location="json") - args = parser.parse_args() + args = segment_create_parser.parse_args() if args["segments"] is not None: for args_item in args["segments"]: SegmentService.segment_create_args_validate(args_item, document) @@ -82,18 +105,26 @@ class SegmentApi(DatasetApiResource): else: return {"error": "Segments is required"}, 400 - def get(self, tenant_id, dataset_id, document_id): + @service_api_ns.expect(segment_list_parser) + @service_api_ns.doc("list_segments") + @service_api_ns.doc(description="List segments in a document") + @service_api_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"}) + @service_api_ns.doc( + responses={ + 200: "Segments retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset or document not found", + } + ) + def get(self, tenant_id: str, dataset_id: str, document_id: str): """Get segments.""" # check dataset - dataset_id = str(dataset_id) - tenant_id = str(tenant_id) page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check document - document_id = str(document_id) document = DocumentService.get_document(dataset.id, document_id) if not document: raise NotFound("Document not found.") @@ -114,10 +145,7 @@ class SegmentApi(DatasetApiResource): except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) - parser = reqparse.RequestParser() - parser.add_argument("status", type=str, action="append", default=[], location="args") - parser.add_argument("keyword", type=str, default=None, location="args") - args = parser.parse_args() + args = segment_list_parser.parse_args() segments, total = SegmentService.get_segments( document_id=document_id, @@ -140,43 +168,62 @@ class SegmentApi(DatasetApiResource): return response, 200 +@service_api_ns.route("/datasets//documents//segments/") class DatasetSegmentApi(DatasetApiResource): + @service_api_ns.doc("delete_segment") + @service_api_ns.doc(description="Delete a specific segment") + @service_api_ns.doc( + params={"dataset_id": "Dataset ID", "document_id": "Document ID", "segment_id": "Segment ID to delete"} + ) + @service_api_ns.doc( + responses={ + 204: "Segment deleted successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset, document, or segment not found", + } + ) @cloud_edition_billing_rate_limit_check("knowledge", "dataset") - def delete(self, tenant_id, dataset_id, document_id, segment_id): + def delete(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): # check dataset - dataset_id = str(dataset_id) - tenant_id = str(tenant_id) dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check user's model setting DatasetService.check_dataset_model_setting(dataset) # check document - document_id = str(document_id) document = DocumentService.get_document(dataset_id, document_id) if not document: raise NotFound("Document not found.") # check segment - segment_id = str(segment_id) segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) if not segment: raise NotFound("Segment not found.") SegmentService.delete_segment(segment, document, dataset) return 204 + @service_api_ns.expect(segment_update_parser) + @service_api_ns.doc("update_segment") + @service_api_ns.doc(description="Update a specific segment") + @service_api_ns.doc( + params={"dataset_id": "Dataset ID", "document_id": "Document ID", "segment_id": "Segment ID to update"} + ) + @service_api_ns.doc( + responses={ + 200: "Segment updated successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset, document, or segment not found", + } + ) @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") - def post(self, tenant_id, dataset_id, document_id, segment_id): + def post(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): # check dataset - dataset_id = str(dataset_id) - tenant_id = str(tenant_id) dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check user's model setting DatasetService.check_dataset_model_setting(dataset) # check document - document_id = str(document_id) document = DocumentService.get_document(dataset_id, document_id) if not document: raise NotFound("Document not found.") @@ -197,37 +244,39 @@ class DatasetSegmentApi(DatasetApiResource): except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) # check segment - segment_id = str(segment_id) segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) if not segment: raise NotFound("Segment not found.") # validate args - parser = reqparse.RequestParser() - parser.add_argument("segment", type=dict, required=False, nullable=True, location="json") - args = parser.parse_args() + args = segment_update_parser.parse_args() updated_segment = SegmentService.update_segment( SegmentUpdateArgs(**args["segment"]), segment, document, dataset ) return {"data": marshal(updated_segment, segment_fields), "doc_form": document.doc_form}, 200 - def get(self, tenant_id, dataset_id, document_id, segment_id): + @service_api_ns.doc("get_segment") + @service_api_ns.doc(description="Get a specific segment by ID") + @service_api_ns.doc( + responses={ + 200: "Segment retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset, document, or segment not found", + } + ) + def get(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): # check dataset - dataset_id = str(dataset_id) - tenant_id = str(tenant_id) dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check user's model setting DatasetService.check_dataset_model_setting(dataset) # check document - document_id = str(document_id) document = DocumentService.get_document(dataset_id, document_id) if not document: raise NotFound("Document not found.") # check segment - segment_id = str(segment_id) segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -235,29 +284,41 @@ class DatasetSegmentApi(DatasetApiResource): return {"data": marshal(segment, segment_fields), "doc_form": document.doc_form}, 200 +@service_api_ns.route( + "/datasets//documents//segments//child_chunks" +) class ChildChunkApi(DatasetApiResource): """Resource for child chunks.""" + @service_api_ns.expect(child_chunk_create_parser) + @service_api_ns.doc("create_child_chunk") + @service_api_ns.doc(description="Create a new child chunk for a segment") + @service_api_ns.doc( + params={"dataset_id": "Dataset ID", "document_id": "Document ID", "segment_id": "Parent segment ID"} + ) + @service_api_ns.doc( + responses={ + 200: "Child chunk created successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset, document, or segment not found", + } + ) @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") - def post(self, tenant_id, dataset_id, document_id, segment_id): + def post(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): """Create child chunk.""" # check dataset - dataset_id = str(dataset_id) - tenant_id = str(tenant_id) dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check document - document_id = str(document_id) document = DocumentService.get_document(dataset.id, document_id) if not document: raise NotFound("Document not found.") # check segment - segment_id = str(segment_id) segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) if not segment: raise NotFound("Segment not found.") @@ -280,43 +341,46 @@ class ChildChunkApi(DatasetApiResource): raise ProviderNotInitializeError(ex.description) # validate args - parser = reqparse.RequestParser() - parser.add_argument("content", type=str, required=True, nullable=False, location="json") - args = parser.parse_args() + args = child_chunk_create_parser.parse_args() try: - child_chunk = SegmentService.create_child_chunk(args.get("content"), segment, document, dataset) + child_chunk = SegmentService.create_child_chunk(args["content"], segment, document, dataset) except ChildChunkIndexingServiceError as e: raise ChildChunkIndexingError(str(e)) return {"data": marshal(child_chunk, child_chunk_fields)}, 200 - def get(self, tenant_id, dataset_id, document_id, segment_id): + @service_api_ns.expect(child_chunk_list_parser) + @service_api_ns.doc("list_child_chunks") + @service_api_ns.doc(description="List child chunks for a segment") + @service_api_ns.doc( + params={"dataset_id": "Dataset ID", "document_id": "Document ID", "segment_id": "Parent segment ID"} + ) + @service_api_ns.doc( + responses={ + 200: "Child chunks retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset, document, or segment not found", + } + ) + def get(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str): """Get child chunks.""" # check dataset - dataset_id = str(dataset_id) - tenant_id = str(tenant_id) dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check document - document_id = str(document_id) document = DocumentService.get_document(dataset.id, document_id) if not document: raise NotFound("Document not found.") # check segment - segment_id = str(segment_id) segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) if not segment: raise NotFound("Segment not found.") - parser = reqparse.RequestParser() - parser.add_argument("limit", type=int, default=20, location="args") - parser.add_argument("keyword", type=str, default=None, location="args") - parser.add_argument("page", type=int, default=1, location="args") - args = parser.parse_args() + args = child_chunk_list_parser.parse_args() page = args["page"] limit = min(args["limit"], 100) @@ -333,40 +397,63 @@ class ChildChunkApi(DatasetApiResource): }, 200 +@service_api_ns.route( + "/datasets//documents//segments//child_chunks/" +) class DatasetChildChunkApi(DatasetApiResource): """Resource for updating child chunks.""" + @service_api_ns.doc("delete_child_chunk") + @service_api_ns.doc(description="Delete a specific child chunk") + @service_api_ns.doc( + params={ + "dataset_id": "Dataset ID", + "document_id": "Document ID", + "segment_id": "Parent segment ID", + "child_chunk_id": "Child chunk ID to delete", + } + ) + @service_api_ns.doc( + responses={ + 204: "Child chunk deleted successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset, document, segment, or child chunk not found", + } + ) @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") - def delete(self, tenant_id, dataset_id, document_id, segment_id, child_chunk_id): + def delete(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str, child_chunk_id: str): """Delete child chunk.""" # check dataset - dataset_id = str(dataset_id) - tenant_id = str(tenant_id) dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") # check document - document_id = str(document_id) document = DocumentService.get_document(dataset.id, document_id) if not document: raise NotFound("Document not found.") # check segment - segment_id = str(segment_id) segment = SegmentService.get_segment_by_id(segment_id=segment_id, tenant_id=current_user.current_tenant_id) if not segment: raise NotFound("Segment not found.") + # validate segment belongs to the specified document + if segment.document_id != document_id: + raise NotFound("Document not found.") + # check child chunk - child_chunk_id = str(child_chunk_id) child_chunk = SegmentService.get_child_chunk_by_id( child_chunk_id=child_chunk_id, tenant_id=current_user.current_tenant_id ) if not child_chunk: raise NotFound("Child chunk not found.") + # validate child chunk belongs to the specified segment + if child_chunk.segment_id != segment.id: + raise NotFound("Child chunk not found.") + try: SegmentService.delete_child_chunk(child_chunk, dataset) except ChildChunkDeleteIndexServiceError as e: @@ -374,14 +461,30 @@ class DatasetChildChunkApi(DatasetApiResource): return 204 + @service_api_ns.expect(child_chunk_update_parser) + @service_api_ns.doc("update_child_chunk") + @service_api_ns.doc(description="Update a specific child chunk") + @service_api_ns.doc( + params={ + "dataset_id": "Dataset ID", + "document_id": "Document ID", + "segment_id": "Parent segment ID", + "child_chunk_id": "Child chunk ID to update", + } + ) + @service_api_ns.doc( + responses={ + 200: "Child chunk updated successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset, document, segment, or child chunk not found", + } + ) @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_rate_limit_check("knowledge", "dataset") - def patch(self, tenant_id, dataset_id, document_id, segment_id, child_chunk_id): + def patch(self, tenant_id: str, dataset_id: str, document_id: str, segment_id: str, child_chunk_id: str): """Update child chunk.""" # check dataset - dataset_id = str(dataset_id) - tenant_id = str(tenant_id) dataset = db.session.query(Dataset).where(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() if not dataset: raise NotFound("Dataset not found.") @@ -396,6 +499,10 @@ class DatasetChildChunkApi(DatasetApiResource): if not segment: raise NotFound("Segment not found.") + # validate segment belongs to the specified document + if segment.document_id != document_id: + raise NotFound("Segment not found.") + # get child chunk child_chunk = SegmentService.get_child_chunk_by_id( child_chunk_id=child_chunk_id, tenant_id=current_user.current_tenant_id @@ -403,29 +510,16 @@ class DatasetChildChunkApi(DatasetApiResource): if not child_chunk: raise NotFound("Child chunk not found.") + # validate child chunk belongs to the specified segment + if child_chunk.segment_id != segment.id: + raise NotFound("Child chunk not found.") + # validate args - parser = reqparse.RequestParser() - parser.add_argument("content", type=str, required=True, nullable=False, location="json") - args = parser.parse_args() + args = child_chunk_update_parser.parse_args() try: - child_chunk = SegmentService.update_child_chunk( - args.get("content"), child_chunk, segment, document, dataset - ) + child_chunk = SegmentService.update_child_chunk(args["content"], child_chunk, segment, document, dataset) except ChildChunkIndexingServiceError as e: raise ChildChunkIndexingError(str(e)) return {"data": marshal(child_chunk, child_chunk_fields)}, 200 - - -api.add_resource(SegmentApi, "/datasets//documents//segments") -api.add_resource( - DatasetSegmentApi, "/datasets//documents//segments/" -) -api.add_resource( - ChildChunkApi, "/datasets//documents//segments//child_chunks" -) -api.add_resource( - DatasetChildChunkApi, - "/datasets//documents//segments//child_chunks/", -) diff --git a/api/controllers/service_api/dataset/upload_file.py b/api/controllers/service_api/dataset/upload_file.py index 3b4721b5b0..27b36a6402 100644 --- a/api/controllers/service_api/dataset/upload_file.py +++ b/api/controllers/service_api/dataset/upload_file.py @@ -1,6 +1,6 @@ from werkzeug.exceptions import NotFound -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.wraps import ( DatasetApiResource, ) @@ -11,9 +11,23 @@ from models.model import UploadFile from services.dataset_service import DocumentService +@service_api_ns.route("/datasets//documents//upload-file") class UploadFileApi(DatasetApiResource): + @service_api_ns.doc("get_upload_file") + @service_api_ns.doc(description="Get upload file information and download URL") + @service_api_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"}) + @service_api_ns.doc( + responses={ + 200: "Upload file information retrieved successfully", + 401: "Unauthorized - invalid API token", + 404: "Dataset, document, or upload file not found", + } + ) def get(self, tenant_id, dataset_id, document_id): - """Get upload file.""" + """Get upload file information and download URL. + + Returns information about an uploaded file including its download URL. + """ # check dataset dataset_id = str(dataset_id) tenant_id = str(tenant_id) @@ -49,6 +63,3 @@ class UploadFileApi(DatasetApiResource): "created_by": upload_file.created_by, "created_at": upload_file.created_at.timestamp(), }, 200 - - -api.add_resource(UploadFileApi, "/datasets//documents//upload-file") diff --git a/api/controllers/service_api/index.py b/api/controllers/service_api/index.py index 9bb5df4c4e..a9d2d6fadc 100644 --- a/api/controllers/service_api/index.py +++ b/api/controllers/service_api/index.py @@ -1,9 +1,10 @@ -from flask_restful import Resource +from flask_restx import Resource from configs import dify_config -from controllers.service_api import api +from controllers.service_api import service_api_ns +@service_api_ns.route("/") class IndexApi(Resource): def get(self): return { @@ -11,6 +12,3 @@ class IndexApi(Resource): "api_version": "v1", "server_version": dify_config.project.version, } - - -api.add_resource(IndexApi, "/") diff --git a/api/controllers/service_api/workspace/models.py b/api/controllers/service_api/workspace/models.py index 3f18474674..536cf81a2f 100644 --- a/api/controllers/service_api/workspace/models.py +++ b/api/controllers/service_api/workspace/models.py @@ -1,21 +1,32 @@ from flask_login import current_user -from flask_restful import Resource +from flask_restx import Resource -from controllers.service_api import api +from controllers.service_api import service_api_ns from controllers.service_api.wraps import validate_dataset_token from core.model_runtime.utils.encoders import jsonable_encoder from services.model_provider_service import ModelProviderService +@service_api_ns.route("/workspaces/current/models/model-types/") class ModelProviderAvailableModelApi(Resource): + @service_api_ns.doc("get_available_models") + @service_api_ns.doc(description="Get available models by model type") + @service_api_ns.doc(params={"model_type": "Type of model to retrieve"}) + @service_api_ns.doc( + responses={ + 200: "Models retrieved successfully", + 401: "Unauthorized - invalid API token", + } + ) @validate_dataset_token def get(self, _, model_type): + """Get available models by model type. + + Returns a list of available models for the specified model type. + """ tenant_id = current_user.current_tenant_id model_provider_service = ModelProviderService() models = model_provider_service.get_models_by_model_type(tenant_id=tenant_id, model_type=model_type) return jsonable_encoder({"data": models}) - - -api.add_resource(ModelProviderAvailableModelApi, "/workspaces/current/models/model-types/") diff --git a/api/controllers/service_api/wraps.py b/api/controllers/service_api/wraps.py index da81cc8bc3..8aac3de4c3 100644 --- a/api/controllers/service_api/wraps.py +++ b/api/controllers/service_api/wraps.py @@ -7,7 +7,7 @@ from typing import Optional from flask import current_app, request from flask_login import user_logged_in # type: ignore -from flask_restful import Resource +from flask_restx import Resource from pydantic import BaseModel from sqlalchemy import select, update from sqlalchemy.orm import Session diff --git a/api/controllers/web/__init__.py b/api/controllers/web/__init__.py index 56749a0e25..3b0a9e341a 100644 --- a/api/controllers/web/__init__.py +++ b/api/controllers/web/__init__.py @@ -1,19 +1,20 @@ from flask import Blueprint +from flask_restx import Namespace from libs.external_api import ExternalApi -from .files import FileApi -from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi - bp = Blueprint("web", __name__, url_prefix="/api") -api = ExternalApi(bp) -# Files -api.add_resource(FileApi, "/files/upload") +api = ExternalApi( + bp, + version="1.0", + title="Web API", + description="Public APIs for web applications including file uploads, chat interactions, and app management", + doc="/docs", # Enable Swagger UI at /api/docs +) -# Remote files -api.add_resource(RemoteFileInfoApi, "/remote-files/") -api.add_resource(RemoteFileUploadApi, "/remote-files/upload") +# Create namespace +web_ns = Namespace("web", description="Web application API operations", path="/") from . import ( app, @@ -21,11 +22,15 @@ from . import ( completion, conversation, feature, + files, forgot_password, login, message, passport, + remote_files, saved_message, site, workflow, ) + +api.add_namespace(web_ns) diff --git a/api/controllers/web/app.py b/api/controllers/web/app.py index 197859e8f3..e0c3e997ce 100644 --- a/api/controllers/web/app.py +++ b/api/controllers/web/app.py @@ -1,9 +1,11 @@ +import logging + from flask import request -from flask_restful import Resource, marshal_with, reqparse +from flask_restx import Resource, marshal_with, reqparse from werkzeug.exceptions import Unauthorized from controllers.common import fields -from controllers.web import api +from controllers.web import web_ns from controllers.web.error import AppUnavailableError from controllers.web.wraps import WebApiResource from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict @@ -14,10 +16,25 @@ from services.enterprise.enterprise_service import EnterpriseService from services.feature_service import FeatureService from services.webapp_auth_service import WebAppAuthService +logger = logging.getLogger(__name__) + +@web_ns.route("/parameters") class AppParameterApi(WebApiResource): """Resource for app variables.""" + @web_ns.doc("Get App Parameters") + @web_ns.doc(description="Retrieve the parameters for a specific app.") + @web_ns.doc( + responses={ + 200: "Success", + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "App Not Found", + 500: "Internal Server Error", + } + ) @marshal_with(fields.parameters_fields) def get(self, app_model: App, end_user): """Retrieve app parameters.""" @@ -40,13 +57,42 @@ class AppParameterApi(WebApiResource): return get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form) +@web_ns.route("/meta") class AppMeta(WebApiResource): + @web_ns.doc("Get App Meta") + @web_ns.doc(description="Retrieve the metadata for a specific app.") + @web_ns.doc( + responses={ + 200: "Success", + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "App Not Found", + 500: "Internal Server Error", + } + ) def get(self, app_model: App, end_user): """Get app meta""" return AppService().get_app_meta(app_model) +@web_ns.route("/webapp/access-mode") class AppAccessMode(Resource): + @web_ns.doc("Get App Access Mode") + @web_ns.doc(description="Retrieve the access mode for a web application (public or restricted).") + @web_ns.doc( + params={ + "appId": {"description": "Application ID", "type": "string", "required": False}, + "appCode": {"description": "Application code", "type": "string", "required": False}, + } + ) + @web_ns.doc( + responses={ + 200: "Success", + 400: "Bad Request", + 500: "Internal Server Error", + } + ) def get(self): parser = reqparse.RequestParser() parser.add_argument("appId", type=str, required=False, location="args") @@ -70,7 +116,19 @@ class AppAccessMode(Resource): return {"accessMode": res.access_mode} +@web_ns.route("/webapp/permission") class AppWebAuthPermission(Resource): + @web_ns.doc("Check App Permission") + @web_ns.doc(description="Check if user has permission to access a web application.") + @web_ns.doc(params={"appId": {"description": "Application ID", "type": "string", "required": True}}) + @web_ns.doc( + responses={ + 200: "Success", + 400: "Bad Request", + 401: "Unauthorized", + 500: "Internal Server Error", + } + ) def get(self): user_id = "visitor" try: @@ -87,8 +145,11 @@ class AppWebAuthPermission(Resource): decoded = PassportService().verify(tk) user_id = decoded.get("user_id", "visitor") - except Exception as e: - pass + except Unauthorized: + raise + except Exception: + logger.exception("Unexpected error during auth verification") + raise features = FeatureService.get_system_features() if not features.webapp_auth.enabled: @@ -105,10 +166,3 @@ class AppWebAuthPermission(Resource): if WebAppAuthService.is_app_require_permission_check(app_id=app_id): res = EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp(str(user_id), app_code) return {"result": res} - - -api.add_resource(AppParameterApi, "/parameters") -api.add_resource(AppMeta, "/meta") -# webapp auth apis -api.add_resource(AppAccessMode, "/webapp/access-mode") -api.add_resource(AppWebAuthPermission, "/webapp/permission") diff --git a/api/controllers/web/audio.py b/api/controllers/web/audio.py index 2919ca9af4..2c0f6c9759 100644 --- a/api/controllers/web/audio.py +++ b/api/controllers/web/audio.py @@ -1,6 +1,7 @@ import logging from flask import request +from flask_restx import fields, marshal_with, reqparse from werkzeug.exceptions import InternalServerError import services @@ -28,9 +29,30 @@ from services.errors.audio import ( UnsupportedAudioTypeServiceError, ) +logger = logging.getLogger(__name__) + class AudioApi(WebApiResource): + audio_to_text_response_fields = { + "text": fields.String, + } + + @marshal_with(audio_to_text_response_fields) + @api.doc("Audio to Text") + @api.doc(description="Convert audio file to text using speech-to-text service.") + @api.doc( + responses={ + 200: "Success", + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 413: "Audio file too large", + 415: "Unsupported audio type", + 500: "Internal Server Error", + } + ) def post(self, app_model: App, end_user): + """Convert audio to text""" file = request.files["file"] try: @@ -38,7 +60,7 @@ class AudioApi(WebApiResource): return response except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except NoAudioUploadedServiceError: raise NoAudioUploadedError() @@ -59,14 +81,30 @@ class AudioApi(WebApiResource): except ValueError as e: raise e except Exception as e: - logging.exception("Failed to handle post request to AudioApi") + logger.exception("Failed to handle post request to AudioApi") raise InternalServerError() class TextApi(WebApiResource): - def post(self, app_model: App, end_user): - from flask_restful import reqparse + text_to_audio_response_fields = { + "audio_url": fields.String, + "duration": fields.Float, + } + @marshal_with(text_to_audio_response_fields) + @api.doc("Text to Audio") + @api.doc(description="Convert text to audio using text-to-speech service.") + @api.doc( + responses={ + 200: "Success", + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 500: "Internal Server Error", + } + ) + def post(self, app_model: App, end_user): + """Convert text to audio""" try: parser = reqparse.RequestParser() parser.add_argument("message_id", type=str, required=False, location="json") @@ -84,7 +122,7 @@ class TextApi(WebApiResource): return response except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except NoAudioUploadedServiceError: raise NoAudioUploadedError() @@ -105,7 +143,7 @@ class TextApi(WebApiResource): except ValueError as e: raise e except Exception as e: - logging.exception("Failed to handle post request to TextApi") + logger.exception("Failed to handle post request to TextApi") raise InternalServerError() diff --git a/api/controllers/web/completion.py b/api/controllers/web/completion.py index fd3b9aa804..a42bf5fc6e 100644 --- a/api/controllers/web/completion.py +++ b/api/controllers/web/completion.py @@ -1,6 +1,6 @@ import logging -from flask_restful import reqparse +from flask_restx import reqparse from werkzeug.exceptions import InternalServerError, NotFound import services @@ -31,9 +31,37 @@ from models.model import AppMode from services.app_generate_service import AppGenerateService from services.errors.llm import InvokeRateLimitError +logger = logging.getLogger(__name__) + # define completion api for user class CompletionApi(WebApiResource): + @api.doc("Create Completion Message") + @api.doc(description="Create a completion message for text generation applications.") + @api.doc( + params={ + "inputs": {"description": "Input variables for the completion", "type": "object", "required": True}, + "query": {"description": "Query text for completion", "type": "string", "required": False}, + "files": {"description": "Files to be processed", "type": "array", "required": False}, + "response_mode": { + "description": "Response mode: blocking or streaming", + "type": "string", + "enum": ["blocking", "streaming"], + "required": False, + }, + "retriever_from": {"description": "Source of retriever", "type": "string", "required": False}, + } + ) + @api.doc( + responses={ + 200: "Success", + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "App Not Found", + 500: "Internal Server Error", + } + ) def post(self, app_model, end_user): if app_model.mode != "completion": raise NotCompletionAppError() @@ -61,7 +89,7 @@ class CompletionApi(WebApiResource): except services.errors.conversation.ConversationCompletedError: raise ConversationCompletedError() except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -74,11 +102,24 @@ class CompletionApi(WebApiResource): except ValueError as e: raise e except Exception as e: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() class CompletionStopApi(WebApiResource): + @api.doc("Stop Completion Message") + @api.doc(description="Stop a running completion message task.") + @api.doc(params={"task_id": {"description": "Task ID to stop", "type": "string", "required": True}}) + @api.doc( + responses={ + 200: "Success", + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Task Not Found", + 500: "Internal Server Error", + } + ) def post(self, app_model, end_user, task_id): if app_model.mode != "completion": raise NotCompletionAppError() @@ -89,6 +130,34 @@ class CompletionStopApi(WebApiResource): class ChatApi(WebApiResource): + @api.doc("Create Chat Message") + @api.doc(description="Create a chat message for conversational applications.") + @api.doc( + params={ + "inputs": {"description": "Input variables for the chat", "type": "object", "required": True}, + "query": {"description": "User query/message", "type": "string", "required": True}, + "files": {"description": "Files to be processed", "type": "array", "required": False}, + "response_mode": { + "description": "Response mode: blocking or streaming", + "type": "string", + "enum": ["blocking", "streaming"], + "required": False, + }, + "conversation_id": {"description": "Conversation UUID", "type": "string", "required": False}, + "parent_message_id": {"description": "Parent message UUID", "type": "string", "required": False}, + "retriever_from": {"description": "Source of retriever", "type": "string", "required": False}, + } + ) + @api.doc( + responses={ + 200: "Success", + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "App Not Found", + 500: "Internal Server Error", + } + ) def post(self, app_model, end_user): app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -119,7 +188,7 @@ class ChatApi(WebApiResource): except services.errors.conversation.ConversationCompletedError: raise ConversationCompletedError() except services.errors.app_model_config.AppModelConfigBrokenError: - logging.exception("App model config broken.") + logger.exception("App model config broken.") raise AppUnavailableError() except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -134,11 +203,24 @@ class ChatApi(WebApiResource): except ValueError as e: raise e except Exception as e: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() class ChatStopApi(WebApiResource): + @api.doc("Stop Chat Message") + @api.doc(description="Stop a running chat message task.") + @api.doc(params={"task_id": {"description": "Task ID to stop", "type": "string", "required": True}}) + @api.doc( + responses={ + 200: "Success", + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Task Not Found", + 500: "Internal Server Error", + } + ) def post(self, app_model, end_user, task_id): app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: diff --git a/api/controllers/web/conversation.py b/api/controllers/web/conversation.py index 98cea3974f..ea41388268 100644 --- a/api/controllers/web/conversation.py +++ b/api/controllers/web/conversation.py @@ -1,5 +1,5 @@ -from flask_restful import marshal_with, reqparse -from flask_restful.inputs import int_range +from flask_restx import fields, marshal_with, reqparse +from flask_restx.inputs import int_range from sqlalchemy.orm import Session from werkzeug.exceptions import NotFound @@ -58,6 +58,11 @@ class ConversationListApi(WebApiResource): class ConversationApi(WebApiResource): + delete_response_fields = { + "result": fields.String, + } + + @marshal_with(delete_response_fields) def delete(self, app_model, end_user, c_id): app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -94,6 +99,11 @@ class ConversationRenameApi(WebApiResource): class ConversationPinApi(WebApiResource): + pin_response_fields = { + "result": fields.String, + } + + @marshal_with(pin_response_fields) def patch(self, app_model, end_user, c_id): app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -110,6 +120,11 @@ class ConversationPinApi(WebApiResource): class ConversationUnPinApi(WebApiResource): + unpin_response_fields = { + "result": fields.String, + } + + @marshal_with(unpin_response_fields) def patch(self, app_model, end_user, c_id): app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: diff --git a/api/controllers/web/feature.py b/api/controllers/web/feature.py index 0563ed2238..cce3dae95d 100644 --- a/api/controllers/web/feature.py +++ b/api/controllers/web/feature.py @@ -1,12 +1,21 @@ -from flask_restful import Resource +from flask_restx import Resource -from controllers.web import api +from controllers.web import web_ns from services.feature_service import FeatureService +@web_ns.route("/system-features") class SystemFeatureApi(Resource): + @web_ns.doc("get_system_features") + @web_ns.doc(description="Get system feature flags and configuration") + @web_ns.doc(responses={200: "System features retrieved successfully", 500: "Internal server error"}) def get(self): + """Get system feature flags and configuration. + + Returns the current system feature flags and configuration + that control various functionalities across the platform. + + Returns: + dict: System feature configuration object + """ return FeatureService.get_system_features().model_dump() - - -api.add_resource(SystemFeatureApi, "/system-features") diff --git a/api/controllers/web/files.py b/api/controllers/web/files.py index 0c30435825..7508874fae 100644 --- a/api/controllers/web/files.py +++ b/api/controllers/web/files.py @@ -1,5 +1,5 @@ from flask import request -from flask_restful import marshal_with +from flask_restx import marshal_with import services from controllers.common.errors import ( @@ -9,14 +9,50 @@ from controllers.common.errors import ( TooManyFilesError, UnsupportedFileTypeError, ) +from controllers.web import web_ns from controllers.web.wraps import WebApiResource -from fields.file_fields import file_fields +from fields.file_fields import build_file_model from services.file_service import FileService +@web_ns.route("/files/upload") class FileApi(WebApiResource): - @marshal_with(file_fields) + @web_ns.doc("upload_file") + @web_ns.doc(description="Upload a file for use in web applications") + @web_ns.doc( + responses={ + 201: "File uploaded successfully", + 400: "Bad request - invalid file or parameters", + 413: "File too large", + 415: "Unsupported file type", + } + ) + @marshal_with(build_file_model(web_ns)) def post(self, app_model, end_user): + """Upload a file for use in web applications. + + Accepts file uploads for use within web applications, supporting + multiple file types with automatic validation and storage. + + Args: + app_model: The associated application model + end_user: The end user uploading the file + + Form Parameters: + file: The file to upload (required) + source: Optional source type (datasets or None) + + Returns: + dict: File information including ID, URL, and metadata + int: HTTP status code 201 for success + + Raises: + NoFileUploadedError: No file provided in request + TooManyFilesError: Multiple files provided (only one allowed) + FilenameNotExistsError: File has no filename + FileTooLargeError: File exceeds size limit + UnsupportedFileTypeError: File type not supported + """ if "file" not in request.files: raise NoFileUploadedError() diff --git a/api/controllers/web/forgot_password.py b/api/controllers/web/forgot_password.py index 0da8d65efc..c743d0f52b 100644 --- a/api/controllers/web/forgot_password.py +++ b/api/controllers/web/forgot_password.py @@ -2,20 +2,21 @@ import base64 import secrets from flask import request -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from sqlalchemy import select from sqlalchemy.orm import Session from controllers.console.auth.error import ( + AuthenticationFailedError, EmailCodeError, EmailPasswordResetLimitError, InvalidEmailError, InvalidTokenError, PasswordMismatchError, ) -from controllers.console.error import AccountNotFound, EmailSendIpLimitError +from controllers.console.error import EmailSendIpLimitError from controllers.console.wraps import email_password_login_enabled, only_edition_enterprise, setup_required -from controllers.web import api +from controllers.web import web_ns from extensions.ext_database import db from libs.helper import email, extract_remote_ip from libs.password import hash_password, valid_password @@ -23,10 +24,21 @@ from models.account import Account from services.account_service import AccountService +@web_ns.route("/forgot-password") class ForgotPasswordSendEmailApi(Resource): @only_edition_enterprise @setup_required @email_password_login_enabled + @web_ns.doc("send_forgot_password_email") + @web_ns.doc(description="Send password reset email") + @web_ns.doc( + responses={ + 200: "Password reset email sent successfully", + 400: "Bad request - invalid email format", + 404: "Account not found", + 429: "Too many requests - rate limit exceeded", + } + ) def post(self): parser = reqparse.RequestParser() parser.add_argument("email", type=email, required=True, location="json") @@ -46,17 +58,23 @@ class ForgotPasswordSendEmailApi(Resource): account = session.execute(select(Account).filter_by(email=args["email"])).scalar_one_or_none() token = None if account is None: - raise AccountNotFound() + raise AuthenticationFailedError() else: token = AccountService.send_reset_password_email(account=account, email=args["email"], language=language) return {"result": "success", "data": token} +@web_ns.route("/forgot-password/validity") class ForgotPasswordCheckApi(Resource): @only_edition_enterprise @setup_required @email_password_login_enabled + @web_ns.doc("check_forgot_password_token") + @web_ns.doc(description="Verify password reset token validity") + @web_ns.doc( + responses={200: "Token is valid", 400: "Bad request - invalid token format", 401: "Invalid or expired token"} + ) def post(self): parser = reqparse.RequestParser() parser.add_argument("email", type=str, required=True, location="json") @@ -93,10 +111,21 @@ class ForgotPasswordCheckApi(Resource): return {"is_valid": True, "email": token_data.get("email"), "token": new_token} +@web_ns.route("/forgot-password/resets") class ForgotPasswordResetApi(Resource): @only_edition_enterprise @setup_required @email_password_login_enabled + @web_ns.doc("reset_password") + @web_ns.doc(description="Reset user password with verification token") + @web_ns.doc( + responses={ + 200: "Password reset successfully", + 400: "Bad request - invalid parameters or password mismatch", + 401: "Invalid or expired token", + 404: "Account not found", + } + ) def post(self): parser = reqparse.RequestParser() parser.add_argument("token", type=str, required=True, nullable=False, location="json") @@ -131,7 +160,7 @@ class ForgotPasswordResetApi(Resource): if account: self._update_existing_account(account, password_hashed, salt, session) else: - raise AccountNotFound() + raise AuthenticationFailedError() return {"result": "success"} @@ -140,8 +169,3 @@ class ForgotPasswordResetApi(Resource): account.password = base64.b64encode(password_hashed).decode() account.password_salt = base64.b64encode(salt).decode() session.commit() - - -api.add_resource(ForgotPasswordSendEmailApi, "/forgot-password") -api.add_resource(ForgotPasswordCheckApi, "/forgot-password/validity") -api.add_resource(ForgotPasswordResetApi, "/forgot-password/resets") diff --git a/api/controllers/web/login.py b/api/controllers/web/login.py index 01c4f4a262..d2b7c72baa 100644 --- a/api/controllers/web/login.py +++ b/api/controllers/web/login.py @@ -1,22 +1,38 @@ -from flask_restful import Resource, reqparse +from flask_restx import Resource, reqparse from jwt import InvalidTokenError # type: ignore import services -from controllers.console.auth.error import EmailCodeError, EmailOrPasswordMismatchError, InvalidEmailError -from controllers.console.error import AccountBannedError, AccountNotFound +from controllers.console.auth.error import ( + AuthenticationFailedError, + EmailCodeError, + InvalidEmailError, +) +from controllers.console.error import AccountBannedError from controllers.console.wraps import only_edition_enterprise, setup_required -from controllers.web import api +from controllers.web import web_ns from libs.helper import email from libs.password import valid_password from services.account_service import AccountService from services.webapp_auth_service import WebAppAuthService +@web_ns.route("/login") class LoginApi(Resource): """Resource for web app email/password login.""" @setup_required @only_edition_enterprise + @web_ns.doc("web_app_login") + @web_ns.doc(description="Authenticate user for web application access") + @web_ns.doc( + responses={ + 200: "Authentication successful", + 400: "Bad request - invalid email or password format", + 401: "Authentication failed - email or password mismatch", + 403: "Account banned or login disabled", + 404: "Account not found", + } + ) def post(self): """Authenticate user and login.""" parser = reqparse.RequestParser() @@ -29,9 +45,9 @@ class LoginApi(Resource): except services.errors.account.AccountLoginError: raise AccountBannedError() except services.errors.account.AccountPasswordError: - raise EmailOrPasswordMismatchError() + raise AuthenticationFailedError() except services.errors.account.AccountNotFoundError: - raise AccountNotFound() + raise AuthenticationFailedError() token = WebAppAuthService.login(account=account) return {"result": "success", "data": {"access_token": token}} @@ -47,9 +63,19 @@ class LoginApi(Resource): # return {"result": "success"} +@web_ns.route("/email-code-login") class EmailCodeLoginSendEmailApi(Resource): @setup_required @only_edition_enterprise + @web_ns.doc("send_email_code_login") + @web_ns.doc(description="Send email verification code for login") + @web_ns.doc( + responses={ + 200: "Email code sent successfully", + 400: "Bad request - invalid email format", + 404: "Account not found", + } + ) def post(self): parser = reqparse.RequestParser() parser.add_argument("email", type=email, required=True, location="json") @@ -63,16 +89,27 @@ class EmailCodeLoginSendEmailApi(Resource): account = WebAppAuthService.get_user_through_email(args["email"]) if account is None: - raise AccountNotFound() + raise AuthenticationFailedError() else: token = WebAppAuthService.send_email_code_login_email(account=account, language=language) return {"result": "success", "data": token} +@web_ns.route("/email-code-login/validity") class EmailCodeLoginApi(Resource): @setup_required @only_edition_enterprise + @web_ns.doc("verify_email_code_login") + @web_ns.doc(description="Verify email code and complete login") + @web_ns.doc( + responses={ + 200: "Email code verified and login successful", + 400: "Bad request - invalid code or token", + 401: "Invalid token or expired code", + 404: "Account not found", + } + ) def post(self): parser = reqparse.RequestParser() parser.add_argument("email", type=str, required=True, location="json") @@ -95,14 +132,8 @@ class EmailCodeLoginApi(Resource): WebAppAuthService.revoke_email_code_login_token(args["token"]) account = WebAppAuthService.get_user_through_email(user_email) if not account: - raise AccountNotFound() + raise AuthenticationFailedError() token = WebAppAuthService.login(account=account) AccountService.reset_login_error_rate_limit(args["email"]) return {"result": "success", "data": {"access_token": token}} - - -api.add_resource(LoginApi, "/login") -# api.add_resource(LogoutApi, "/logout") -api.add_resource(EmailCodeLoginSendEmailApi, "/email-code-login") -api.add_resource(EmailCodeLoginApi, "/email-code-login/validity") diff --git a/api/controllers/web/message.py b/api/controllers/web/message.py index 7bb81cd0d3..17e06e8856 100644 --- a/api/controllers/web/message.py +++ b/api/controllers/web/message.py @@ -1,7 +1,7 @@ import logging -from flask_restful import fields, marshal_with, reqparse -from flask_restful.inputs import int_range +from flask_restx import fields, marshal_with, reqparse +from flask_restx.inputs import int_range from werkzeug.exceptions import InternalServerError, NotFound from controllers.web import api @@ -35,6 +35,8 @@ from services.errors.message import ( ) from services.message_service import MessageService +logger = logging.getLogger(__name__) + class MessageListApi(WebApiResource): message_fields = { @@ -83,6 +85,11 @@ class MessageListApi(WebApiResource): class MessageFeedbackApi(WebApiResource): + feedback_response_fields = { + "result": fields.String, + } + + @marshal_with(feedback_response_fields) def post(self, app_model, end_user, message_id): message_id = str(message_id) @@ -145,11 +152,16 @@ class MessageMoreLikeThisApi(WebApiResource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() class MessageSuggestedQuestionApi(WebApiResource): + suggested_questions_response_fields = { + "data": fields.List(fields.String), + } + + @marshal_with(suggested_questions_response_fields) def get(self, app_model, end_user, message_id): app_mode = AppMode.value_of(app_model.mode) if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}: @@ -176,7 +188,7 @@ class MessageSuggestedQuestionApi(WebApiResource): except InvokeError as e: raise CompletionRequestError(e.description) except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() return {"data": questions} diff --git a/api/controllers/web/passport.py b/api/controllers/web/passport.py index acd3a8b539..6f7105a724 100644 --- a/api/controllers/web/passport.py +++ b/api/controllers/web/passport.py @@ -2,12 +2,12 @@ import uuid from datetime import UTC, datetime, timedelta from flask import request -from flask_restful import Resource +from flask_restx import Resource from sqlalchemy import func, select from werkzeug.exceptions import NotFound, Unauthorized from configs import dify_config -from controllers.web import api +from controllers.web import web_ns from controllers.web.error import WebAppAuthRequiredError from extensions.ext_database import db from libs.passport import PassportService @@ -17,9 +17,19 @@ from services.feature_service import FeatureService from services.webapp_auth_service import WebAppAuthService, WebAppAuthType +@web_ns.route("/passport") class PassportResource(Resource): """Base resource for passport.""" + @web_ns.doc("get_passport") + @web_ns.doc(description="Get authentication passport for web application access") + @web_ns.doc( + responses={ + 200: "Passport retrieved successfully", + 401: "Unauthorized - missing app code or invalid authentication", + 404: "Application or user not found", + } + ) def get(self): system_features = FeatureService.get_system_features() app_code = request.headers.get("X-App-Code") @@ -94,9 +104,6 @@ class PassportResource(Resource): } -api.add_resource(PassportResource, "/passport") - - def decode_enterprise_webapp_user_id(jwt_token: str | None): """ Decode the enterprise user session from the Authorization header. diff --git a/api/controllers/web/remote_files.py b/api/controllers/web/remote_files.py index 4e19716c3d..ab20c7667c 100644 --- a/api/controllers/web/remote_files.py +++ b/api/controllers/web/remote_files.py @@ -1,7 +1,7 @@ import urllib.parse import httpx -from flask_restful import marshal_with, reqparse +from flask_restx import marshal_with, reqparse import services from controllers.common import helpers @@ -10,16 +10,44 @@ from controllers.common.errors import ( RemoteFileUploadError, UnsupportedFileTypeError, ) +from controllers.web import web_ns from controllers.web.wraps import WebApiResource from core.file import helpers as file_helpers from core.helper import ssrf_proxy -from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields +from fields.file_fields import build_file_with_signed_url_model, build_remote_file_info_model from services.file_service import FileService +@web_ns.route("/remote-files/") class RemoteFileInfoApi(WebApiResource): - @marshal_with(remote_file_info_fields) + @web_ns.doc("get_remote_file_info") + @web_ns.doc(description="Get information about a remote file") + @web_ns.doc( + responses={ + 200: "Remote file information retrieved successfully", + 400: "Bad request - invalid URL", + 404: "Remote file not found", + 500: "Failed to fetch remote file", + } + ) + @marshal_with(build_remote_file_info_model(web_ns)) def get(self, app_model, end_user, url): + """Get information about a remote file. + + Retrieves basic information about a file located at a remote URL, + including content type and content length. + + Args: + app_model: The associated application model + end_user: The end user making the request + url: URL-encoded path to the remote file + + Returns: + dict: Remote file information including type and length + + Raises: + HTTPException: If the remote file cannot be accessed + """ decoded_url = urllib.parse.unquote(url) resp = ssrf_proxy.head(decoded_url) if resp.status_code != httpx.codes.OK: @@ -32,9 +60,42 @@ class RemoteFileInfoApi(WebApiResource): } +@web_ns.route("/remote-files/upload") class RemoteFileUploadApi(WebApiResource): - @marshal_with(file_fields_with_signed_url) - def post(self, app_model, end_user): # Add app_model and end_user parameters + @web_ns.doc("upload_remote_file") + @web_ns.doc(description="Upload a file from a remote URL") + @web_ns.doc( + responses={ + 201: "Remote file uploaded successfully", + 400: "Bad request - invalid URL or parameters", + 413: "File too large", + 415: "Unsupported file type", + 500: "Failed to fetch remote file", + } + ) + @marshal_with(build_file_with_signed_url_model(web_ns)) + def post(self, app_model, end_user): + """Upload a file from a remote URL. + + Downloads a file from the provided remote URL and uploads it + to the platform storage for use in web applications. + + Args: + app_model: The associated application model + end_user: The end user making the request + + JSON Parameters: + url: The remote URL to download the file from (required) + + Returns: + dict: File information including ID, signed URL, and metadata + int: HTTP status code 201 for success + + Raises: + RemoteFileUploadError: Failed to fetch file from remote URL + FileTooLargeError: File exceeds size limit + UnsupportedFileTypeError: File type not supported + """ parser = reqparse.RequestParser() parser.add_argument("url", type=str, required=True, help="URL is required") args = parser.parse_args() diff --git a/api/controllers/web/saved_message.py b/api/controllers/web/saved_message.py index d7188ef0b3..7a9d24114e 100644 --- a/api/controllers/web/saved_message.py +++ b/api/controllers/web/saved_message.py @@ -1,5 +1,5 @@ -from flask_restful import fields, marshal_with, reqparse -from flask_restful.inputs import int_range +from flask_restx import fields, marshal_with, reqparse +from flask_restx.inputs import int_range from werkzeug.exceptions import NotFound from controllers.web import api @@ -30,6 +30,10 @@ class SavedMessageListApi(WebApiResource): "data": fields.List(fields.Nested(message_fields)), } + post_response_fields = { + "result": fields.String, + } + @marshal_with(saved_message_infinite_scroll_pagination_fields) def get(self, app_model, end_user): if app_model.mode != "completion": @@ -42,6 +46,7 @@ class SavedMessageListApi(WebApiResource): return SavedMessageService.pagination_by_last_id(app_model, end_user, args["last_id"], args["limit"]) + @marshal_with(post_response_fields) def post(self, app_model, end_user): if app_model.mode != "completion": raise NotCompletionAppError() @@ -59,6 +64,11 @@ class SavedMessageListApi(WebApiResource): class SavedMessageApi(WebApiResource): + delete_response_fields = { + "result": fields.String, + } + + @marshal_with(delete_response_fields) def delete(self, app_model, end_user, message_id): message_id = str(message_id) diff --git a/api/controllers/web/site.py b/api/controllers/web/site.py index 3c133499b7..91d67bf9d8 100644 --- a/api/controllers/web/site.py +++ b/api/controllers/web/site.py @@ -1,4 +1,4 @@ -from flask_restful import fields, marshal_with +from flask_restx import fields, marshal_with from werkzeug.exceptions import Forbidden from configs import dify_config @@ -53,6 +53,18 @@ class AppSiteApi(WebApiResource): "custom_config": fields.Raw(attribute="custom_config"), } + @api.doc("Get App Site Info") + @api.doc(description="Retrieve app site information and configuration.") + @api.doc( + responses={ + 200: "Success", + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "App Not Found", + 500: "Internal Server Error", + } + ) @marshal_with(app_fields) def get(self, app_model, end_user): """Retrieve app site info.""" diff --git a/api/controllers/web/workflow.py b/api/controllers/web/workflow.py index 590fd3f2c7..3566cfae38 100644 --- a/api/controllers/web/workflow.py +++ b/api/controllers/web/workflow.py @@ -1,6 +1,6 @@ import logging -from flask_restful import reqparse +from flask_restx import reqparse from werkzeug.exceptions import InternalServerError from controllers.web import api @@ -30,6 +30,24 @@ logger = logging.getLogger(__name__) class WorkflowRunApi(WebApiResource): + @api.doc("Run Workflow") + @api.doc(description="Execute a workflow with provided inputs and files.") + @api.doc( + params={ + "inputs": {"description": "Input variables for the workflow", "type": "object", "required": True}, + "files": {"description": "Files to be processed by the workflow", "type": "array", "required": False}, + } + ) + @api.doc( + responses={ + 200: "Success", + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "App Not Found", + 500: "Internal Server Error", + } + ) def post(self, app_model: App, end_user: EndUser): """ Run workflow @@ -62,11 +80,28 @@ class WorkflowRunApi(WebApiResource): except ValueError as e: raise e except Exception: - logging.exception("internal server error.") + logger.exception("internal server error.") raise InternalServerError() class WorkflowTaskStopApi(WebApiResource): + @api.doc("Stop Workflow Task") + @api.doc(description="Stop a running workflow task.") + @api.doc( + params={ + "task_id": {"description": "Task ID to stop", "type": "string", "required": True}, + } + ) + @api.doc( + responses={ + 200: "Success", + 400: "Bad Request", + 401: "Unauthorized", + 403: "Forbidden", + 404: "Task Not Found", + 500: "Internal Server Error", + } + ) def post(self, app_model: App, end_user: EndUser, task_id: str): """ Stop workflow task diff --git a/api/controllers/web/wraps.py b/api/controllers/web/wraps.py index ae6f14a689..94fa5d5626 100644 --- a/api/controllers/web/wraps.py +++ b/api/controllers/web/wraps.py @@ -2,7 +2,7 @@ from datetime import UTC, datetime from functools import wraps from flask import request -from flask_restful import Resource +from flask_restx import Resource from sqlalchemy import select from werkzeug.exceptions import BadRequest, NotFound, Unauthorized diff --git a/api/core/agent/base_agent_runner.py b/api/core/agent/base_agent_runner.py index ad9b625350..f7c83f927f 100644 --- a/api/core/agent/base_agent_runner.py +++ b/api/core/agent/base_agent_runner.py @@ -512,7 +512,6 @@ class BaseAgentRunner(AppRunner): if not file_objs: return UserPromptMessage(content=message.query) prompt_message_contents: list[PromptMessageContentUnionTypes] = [] - prompt_message_contents.append(TextPromptMessageContent(data=message.query)) for file in file_objs: prompt_message_contents.append( file_manager.to_prompt_message_content( @@ -520,4 +519,6 @@ class BaseAgentRunner(AppRunner): image_detail_config=image_detail_config, ) ) + prompt_message_contents.append(TextPromptMessageContent(data=message.query)) + return UserPromptMessage(content=prompt_message_contents) diff --git a/api/core/agent/cot_agent_runner.py b/api/core/agent/cot_agent_runner.py index 565fb42478..6cb1077126 100644 --- a/api/core/agent/cot_agent_runner.py +++ b/api/core/agent/cot_agent_runner.py @@ -197,7 +197,7 @@ class CotAgentRunner(BaseAgentRunner, ABC): final_answer = scratchpad.action.action_input else: final_answer = f"{scratchpad.action.action_input}" - except json.JSONDecodeError: + except TypeError: final_answer = f"{scratchpad.action.action_input}" else: function_call_state = True diff --git a/api/core/agent/cot_chat_agent_runner.py b/api/core/agent/cot_chat_agent_runner.py index 5ff89bdacb..4d1d94eadc 100644 --- a/api/core/agent/cot_chat_agent_runner.py +++ b/api/core/agent/cot_chat_agent_runner.py @@ -39,9 +39,6 @@ class CotChatAgentRunner(CotAgentRunner): Organize user query """ if self.files: - prompt_message_contents: list[PromptMessageContentUnionTypes] = [] - prompt_message_contents.append(TextPromptMessageContent(data=query)) - # get image detail config image_detail_config = ( self.application_generate_entity.file_upload_config.image_config.detail @@ -52,6 +49,8 @@ class CotChatAgentRunner(CotAgentRunner): else None ) image_detail_config = image_detail_config or ImagePromptMessageContent.DETAIL.LOW + + prompt_message_contents: list[PromptMessageContentUnionTypes] = [] for file in self.files: prompt_message_contents.append( file_manager.to_prompt_message_content( @@ -59,6 +58,7 @@ class CotChatAgentRunner(CotAgentRunner): image_detail_config=image_detail_config, ) ) + prompt_message_contents.append(TextPromptMessageContent(data=query)) prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) else: diff --git a/api/core/agent/fc_agent_runner.py b/api/core/agent/fc_agent_runner.py index 4df71ce9de..9eb853aa74 100644 --- a/api/core/agent/fc_agent_runner.py +++ b/api/core/agent/fc_agent_runner.py @@ -126,8 +126,8 @@ class FunctionCallAgentRunner(BaseAgentRunner): tool_call_inputs = json.dumps( {tool_call[1]: tool_call[2] for tool_call in tool_calls}, ensure_ascii=False ) - except json.JSONDecodeError: - # ensure ascii to avoid encoding error + except TypeError: + # fallback: force ASCII to handle non-serializable objects tool_call_inputs = json.dumps({tool_call[1]: tool_call[2] for tool_call in tool_calls}) if chunk.delta.message and chunk.delta.message.content: @@ -153,8 +153,8 @@ class FunctionCallAgentRunner(BaseAgentRunner): tool_call_inputs = json.dumps( {tool_call[1]: tool_call[2] for tool_call in tool_calls}, ensure_ascii=False ) - except json.JSONDecodeError: - # ensure ascii to avoid encoding error + except TypeError: + # fallback: force ASCII to handle non-serializable objects tool_call_inputs = json.dumps({tool_call[1]: tool_call[2] for tool_call in tool_calls}) if result.usage: @@ -395,9 +395,6 @@ class FunctionCallAgentRunner(BaseAgentRunner): Organize user query """ if self.files: - prompt_message_contents: list[PromptMessageContentUnionTypes] = [] - prompt_message_contents.append(TextPromptMessageContent(data=query)) - # get image detail config image_detail_config = ( self.application_generate_entity.file_upload_config.image_config.detail @@ -408,6 +405,8 @@ class FunctionCallAgentRunner(BaseAgentRunner): else None ) image_detail_config = image_detail_config or ImagePromptMessageContent.DETAIL.LOW + + prompt_message_contents: list[PromptMessageContentUnionTypes] = [] for file in self.files: prompt_message_contents.append( file_manager.to_prompt_message_content( @@ -415,6 +414,7 @@ class FunctionCallAgentRunner(BaseAgentRunner): image_detail_config=image_detail_config, ) ) + prompt_message_contents.append(TextPromptMessageContent(data=query)) prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) else: diff --git a/api/core/app/app_config/easy_ui_based_app/variables/manager.py b/api/core/app/app_config/easy_ui_based_app/variables/manager.py index 2f2445a336..6375733448 100644 --- a/api/core/app/app_config/easy_ui_based_app/variables/manager.py +++ b/api/core/app/app_config/easy_ui_based_app/variables/manager.py @@ -3,6 +3,17 @@ import re from core.app.app_config.entities import ExternalDataVariableEntity, VariableEntity, VariableEntityType from core.external_data_tool.factory import ExternalDataToolFactory +_ALLOWED_VARIABLE_ENTITY_TYPE = frozenset( + [ + VariableEntityType.TEXT_INPUT, + VariableEntityType.SELECT, + VariableEntityType.PARAGRAPH, + VariableEntityType.NUMBER, + VariableEntityType.EXTERNAL_DATA_TOOL, + VariableEntityType.CHECKBOX, + ] +) + class BasicVariablesConfigManager: @classmethod @@ -47,6 +58,7 @@ class BasicVariablesConfigManager: VariableEntityType.PARAGRAPH, VariableEntityType.NUMBER, VariableEntityType.SELECT, + VariableEntityType.CHECKBOX, }: variable = variables[variable_type] variable_entities.append( @@ -96,8 +108,17 @@ class BasicVariablesConfigManager: variables = [] for item in config["user_input_form"]: key = list(item.keys())[0] - if key not in {"text-input", "select", "paragraph", "number", "external_data_tool"}: - raise ValueError("Keys in user_input_form list can only be 'text-input', 'paragraph' or 'select'") + # if key not in {"text-input", "select", "paragraph", "number", "external_data_tool"}: + if key not in { + VariableEntityType.TEXT_INPUT, + VariableEntityType.SELECT, + VariableEntityType.PARAGRAPH, + VariableEntityType.NUMBER, + VariableEntityType.EXTERNAL_DATA_TOOL, + VariableEntityType.CHECKBOX, + }: + allowed_keys = ", ".join(i.value for i in _ALLOWED_VARIABLE_ENTITY_TYPE) + raise ValueError(f"Keys in user_input_form list can only be {allowed_keys}") form_item = item[key] if "label" not in form_item: diff --git a/api/core/app/app_config/entities.py b/api/core/app/app_config/entities.py index 0df0aa59b2..df2074df2c 100644 --- a/api/core/app/app_config/entities.py +++ b/api/core/app/app_config/entities.py @@ -97,6 +97,7 @@ class VariableEntityType(StrEnum): EXTERNAL_DATA_TOOL = "external_data_tool" FILE = "file" FILE_LIST = "file-list" + CHECKBOX = "checkbox" class VariableEntity(BaseModel): @@ -167,7 +168,7 @@ class ModelConfig(BaseModel): provider: str name: str mode: LLMMode - completion_params: dict[str, Any] = {} + completion_params: dict[str, Any] = Field(default_factory=dict) class Condition(BaseModel): diff --git a/api/core/app/apps/base_app_generate_response_converter.py b/api/core/app/apps/base_app_generate_response_converter.py index 29c1ad598e..af3731bdc7 100644 --- a/api/core/app/apps/base_app_generate_response_converter.py +++ b/api/core/app/apps/base_app_generate_response_converter.py @@ -8,6 +8,8 @@ from core.app.entities.task_entities import AppBlockingResponse, AppStreamRespon from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError from core.model_runtime.errors.invoke import InvokeError +logger = logging.getLogger(__name__) + class AppGenerateResponseConverter(ABC): _blocking_response_type: type[AppBlockingResponse] @@ -120,7 +122,7 @@ class AppGenerateResponseConverter(ABC): if data: data.setdefault("message", getattr(e, "description", str(e))) else: - logging.error(e) + logger.error(e) data = { "code": "internal_server_error", "message": "Internal Server Error, please contact support.", diff --git a/api/core/app/apps/base_app_generator.py b/api/core/app/apps/base_app_generator.py index beece1d77e..42634fc48b 100644 --- a/api/core/app/apps/base_app_generator.py +++ b/api/core/app/apps/base_app_generator.py @@ -103,18 +103,23 @@ class BaseAppGenerator: f"(type '{variable_entity.type}') {variable_entity.variable} in input form must be a string" ) - if variable_entity.type == VariableEntityType.NUMBER and isinstance(value, str): - # handle empty string case - if not value.strip(): - return None - # may raise ValueError if user_input_value is not a valid number - try: - if "." in value: - return float(value) - else: - return int(value) - except ValueError: - raise ValueError(f"{variable_entity.variable} in input form must be a valid number") + if variable_entity.type == VariableEntityType.NUMBER: + if isinstance(value, (int, float)): + return value + elif isinstance(value, str): + # handle empty string case + if not value.strip(): + return None + # may raise ValueError if user_input_value is not a valid number + try: + if "." in value: + return float(value) + else: + return int(value) + except ValueError: + raise ValueError(f"{variable_entity.variable} in input form must be a valid number") + else: + raise TypeError(f"expected value type int, float or str, got {type(value)}, value: {value}") match variable_entity.type: case VariableEntityType.SELECT: @@ -144,6 +149,11 @@ class BaseAppGenerator: raise ValueError( f"{variable_entity.variable} in input form must be less than {variable_entity.max_length} files" ) + case VariableEntityType.CHECKBOX: + if not isinstance(value, bool): + raise ValueError(f"{variable_entity.variable} in input form must be a valid boolean value") + case _: + raise AssertionError("this statement should be unreachable.") return value diff --git a/api/core/app/apps/common/workflow_response_converter.py b/api/core/app/apps/common/workflow_response_converter.py index 34a1da2227..1a89237333 100644 --- a/api/core/app/apps/common/workflow_response_converter.py +++ b/api/core/app/apps/common/workflow_response_converter.py @@ -50,6 +50,7 @@ from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution from core.workflow.nodes import NodeType from core.workflow.nodes.tool.entities import ToolNodeData from core.workflow.workflow_type_encoder import WorkflowRuntimeTypeConverter +from libs.datetime_utils import naive_utc_now from models import ( Account, CreatorUserRole, @@ -399,7 +400,7 @@ class WorkflowResponseConverter: if event.error is None else WorkflowNodeExecutionStatus.FAILED, error=None, - elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(), + elapsed_time=(naive_utc_now() - event.start_at).total_seconds(), total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0, execution_metadata=event.metadata, finished_at=int(time.time()), @@ -478,7 +479,7 @@ class WorkflowResponseConverter: if event.error is None else WorkflowNodeExecutionStatus.FAILED, error=None, - elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(), + elapsed_time=(naive_utc_now() - event.start_at).total_seconds(), total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0, execution_metadata=event.metadata, finished_at=int(time.time()), diff --git a/api/core/app/entities/queue_entities.py b/api/core/app/entities/queue_entities.py index 42e6a1519c..d663dbb175 100644 --- a/api/core/app/entities/queue_entities.py +++ b/api/core/app/entities/queue_entities.py @@ -610,7 +610,7 @@ class QueueErrorEvent(AppQueueEvent): """ event: QueueEvent = QueueEvent.ERROR - error: Any = None + error: Optional[Any] = None class QueuePingEvent(AppQueueEvent): diff --git a/api/core/app/entities/task_entities.py b/api/core/app/entities/task_entities.py index 25c889e922..a1c0368354 100644 --- a/api/core/app/entities/task_entities.py +++ b/api/core/app/entities/task_entities.py @@ -142,7 +142,7 @@ class MessageEndStreamResponse(StreamResponse): event: StreamEvent = StreamEvent.MESSAGE_END id: str - metadata: dict = {} + metadata: dict = Field(default_factory=dict) files: Optional[Sequence[Mapping[str, Any]]] = None @@ -261,7 +261,7 @@ class NodeStartStreamResponse(StreamResponse): predecessor_node_id: Optional[str] = None inputs: Optional[Mapping[str, Any]] = None created_at: int - extras: dict = {} + extras: dict = Field(default_factory=dict) parallel_id: Optional[str] = None parallel_start_node_id: Optional[str] = None parent_parallel_id: Optional[str] = None @@ -503,7 +503,7 @@ class IterationNodeStartStreamResponse(StreamResponse): node_type: str title: str created_at: int - extras: dict = {} + extras: dict = Field(default_factory=dict) metadata: Mapping = {} inputs: Mapping = {} parallel_id: Optional[str] = None @@ -531,7 +531,7 @@ class IterationNodeNextStreamResponse(StreamResponse): index: int created_at: int pre_iteration_output: Optional[Any] = None - extras: dict = {} + extras: dict = Field(default_factory=dict) parallel_id: Optional[str] = None parallel_start_node_id: Optional[str] = None parallel_mode_run_id: Optional[str] = None @@ -590,7 +590,7 @@ class LoopNodeStartStreamResponse(StreamResponse): node_type: str title: str created_at: int - extras: dict = {} + extras: dict = Field(default_factory=dict) metadata: Mapping = {} inputs: Mapping = {} parallel_id: Optional[str] = None @@ -618,7 +618,7 @@ class LoopNodeNextStreamResponse(StreamResponse): index: int created_at: int pre_loop_output: Optional[Any] = None - extras: dict = {} + extras: dict = Field(default_factory=dict) parallel_id: Optional[str] = None parallel_start_node_id: Optional[str] = None parallel_mode_run_id: Optional[str] = None @@ -764,7 +764,7 @@ class ChatbotAppBlockingResponse(AppBlockingResponse): conversation_id: str message_id: str answer: str - metadata: dict = {} + metadata: dict = Field(default_factory=dict) created_at: int data: Data @@ -784,7 +784,7 @@ class CompletionAppBlockingResponse(AppBlockingResponse): mode: str message_id: str answer: str - metadata: dict = {} + metadata: dict = Field(default_factory=dict) created_at: int data: Data diff --git a/api/core/app/task_pipeline/based_generate_task_pipeline.py b/api/core/app/task_pipeline/based_generate_task_pipeline.py index 014c7fd4f5..8c0a442158 100644 --- a/api/core/app/task_pipeline/based_generate_task_pipeline.py +++ b/api/core/app/task_pipeline/based_generate_task_pipeline.py @@ -52,7 +52,8 @@ class BasedGenerateTaskPipeline: elif isinstance(e, InvokeError | ValueError): err = e else: - err = Exception(e.description if getattr(e, "description", None) is not None else str(e)) + description = getattr(e, "description", None) + err = Exception(description if description is not None else str(e)) if not message_id or not session: return err diff --git a/api/core/app/task_pipeline/message_cycle_manager.py b/api/core/app/task_pipeline/message_cycle_manager.py index f3b9dbf758..50b51f70fe 100644 --- a/api/core/app/task_pipeline/message_cycle_manager.py +++ b/api/core/app/task_pipeline/message_cycle_manager.py @@ -32,6 +32,8 @@ from extensions.ext_database import db from models.model import AppMode, Conversation, MessageAnnotation, MessageFile from services.annotation_service import AppAnnotationService +logger = logging.getLogger(__name__) + class MessageCycleManager: def __init__( @@ -98,7 +100,7 @@ class MessageCycleManager: conversation.name = name except Exception as e: if dify_config.DEBUG: - logging.exception("generate conversation name failed, conversation_id: %s", conversation_id) + logger.exception("generate conversation name failed, conversation_id: %s", conversation_id) pass db.session.merge(conversation) @@ -181,7 +183,7 @@ class MessageCycleManager: :param message_id: message id :return: """ - message_file = db.session.query(MessageFile).filter(MessageFile.id == message_id).first() + message_file = db.session.query(MessageFile).where(MessageFile.id == message_id).first() event_type = StreamEvent.MESSAGE_FILE if message_file else StreamEvent.MESSAGE return MessageStreamResponse( diff --git a/api/core/entities/model_entities.py b/api/core/entities/model_entities.py index e1c021a44a..ac64a8e3a0 100644 --- a/api/core/entities/model_entities.py +++ b/api/core/entities/model_entities.py @@ -19,6 +19,7 @@ class ModelStatus(Enum): QUOTA_EXCEEDED = "quota-exceeded" NO_PERMISSION = "no-permission" DISABLED = "disabled" + CREDENTIAL_REMOVED = "credential-removed" class SimpleModelProviderEntity(BaseModel): @@ -54,6 +55,7 @@ class ProviderModelWithStatusEntity(ProviderModel): status: ModelStatus load_balancing_enabled: bool = False + has_invalid_load_balancing_configs: bool = False def raise_for_status(self) -> None: """ diff --git a/api/core/entities/provider_configuration.py b/api/core/entities/provider_configuration.py index 8bfbd82e1f..ca3c36b878 100644 --- a/api/core/entities/provider_configuration.py +++ b/api/core/entities/provider_configuration.py @@ -1,4 +1,3 @@ -import datetime import json import logging from collections import defaultdict @@ -7,6 +6,8 @@ from json import JSONDecodeError from typing import Optional from pydantic import BaseModel, ConfigDict, Field +from sqlalchemy import func, select +from sqlalchemy.orm import Session from constants import HIDDEN_VALUE from core.entities.model_entities import ModelStatus, ModelWithProviderEntity, SimpleModelProviderEntity @@ -29,10 +30,13 @@ from core.model_runtime.model_providers.__base.ai_model import AIModel from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory from core.plugin.entities.plugin import ModelProviderID from extensions.ext_database import db +from libs.datetime_utils import naive_utc_now from models.provider import ( LoadBalancingModelConfig, Provider, + ProviderCredential, ProviderModel, + ProviderModelCredential, ProviderModelSetting, ProviderType, TenantPreferredModelProvider, @@ -45,7 +49,16 @@ original_provider_configurate_methods: dict[str, list[ConfigurateMethod]] = {} class ProviderConfiguration(BaseModel): """ - Model class for provider configuration. + Provider configuration entity for managing model provider settings. + + This class handles: + - Provider credentials CRUD and switch + - Custom Model credentials CRUD and switch + - System vs custom provider switching + - Load balancing configurations + - Model enablement/disablement + + TODO: lots of logic in a BaseModel entity should be separated, the exceptions should be classified """ tenant_id: str @@ -155,33 +168,17 @@ class ProviderConfiguration(BaseModel): Check custom configuration available. :return: """ - return self.custom_configuration.provider is not None or len(self.custom_configuration.models) > 0 - - def get_custom_credentials(self, obfuscated: bool = False) -> dict | None: - """ - Get custom credentials. - - :param obfuscated: obfuscated secret data in credentials - :return: - """ - if self.custom_configuration.provider is None: - return None - - credentials = self.custom_configuration.provider.credentials - if not obfuscated: - return credentials - - # Obfuscate credentials - return self.obfuscated_credentials( - credentials=credentials, - credential_form_schemas=self.provider.provider_credential_schema.credential_form_schemas - if self.provider.provider_credential_schema - else [], + has_provider_credentials = ( + self.custom_configuration.provider is not None + and len(self.custom_configuration.provider.available_credentials) > 0 ) - def _get_custom_provider_credentials(self) -> Provider | None: + has_model_configurations = len(self.custom_configuration.models) > 0 + return has_provider_credentials or has_model_configurations + + def _get_provider_record(self, session: Session) -> Provider | None: """ - Get custom provider credentials. + Get custom provider record. """ # get provider model_provider_id = ModelProviderID(self.provider.provider) @@ -189,156 +186,442 @@ class ProviderConfiguration(BaseModel): if model_provider_id.is_langgenius(): provider_names.append(model_provider_id.provider_name) - provider_record = ( - db.session.query(Provider) - .where( - Provider.tenant_id == self.tenant_id, - Provider.provider_type == ProviderType.CUSTOM.value, - Provider.provider_name.in_(provider_names), - ) - .first() + stmt = select(Provider).where( + Provider.tenant_id == self.tenant_id, + Provider.provider_type == ProviderType.CUSTOM.value, + Provider.provider_name.in_(provider_names), ) - return provider_record + return session.execute(stmt).scalar_one_or_none() - def custom_credentials_validate(self, credentials: dict) -> tuple[Provider | None, dict]: + def _get_specific_provider_credential(self, credential_id: str) -> dict | None: """ - Validate custom credentials. - :param credentials: provider credentials + Get a specific provider credential by ID. + :param credential_id: Credential ID :return: """ - provider_record = self._get_custom_provider_credentials() - - # Get provider credential secret variables - provider_credential_secret_variables = self.extract_secret_variables( + # Extract secret variables from provider credential schema + credential_secret_variables = self.extract_secret_variables( self.provider.provider_credential_schema.credential_form_schemas if self.provider.provider_credential_schema else [] ) - if provider_record: - try: - # fix origin data - if provider_record.encrypted_config: - if not provider_record.encrypted_config.startswith("{"): - original_credentials = {"openai_api_key": provider_record.encrypted_config} - else: - original_credentials = json.loads(provider_record.encrypted_config) - else: - original_credentials = {} - except JSONDecodeError: - original_credentials = {} + with Session(db.engine) as session: + # Prefer the actual provider record name if exists (to handle aliased provider names) + provider_record = self._get_provider_record(session) + provider_name = provider_record.provider_name if provider_record else self.provider.provider - # encrypt credentials - for key, value in credentials.items(): - if key in provider_credential_secret_variables: - # if send [__HIDDEN__] in secret input, it will be same as original value - if value == HIDDEN_VALUE and key in original_credentials: - credentials[key] = encrypter.decrypt_token(self.tenant_id, original_credentials[key]) - - model_provider_factory = ModelProviderFactory(self.tenant_id) - credentials = model_provider_factory.provider_credentials_validate( - provider=self.provider.provider, credentials=credentials - ) - - for key, value in credentials.items(): - if key in provider_credential_secret_variables: - credentials[key] = encrypter.encrypt_token(self.tenant_id, value) - - return provider_record, credentials - - def add_or_update_custom_credentials(self, credentials: dict) -> None: - """ - Add or update custom provider credentials. - :param credentials: - :return: - """ - # validate custom provider config - provider_record, credentials = self.custom_credentials_validate(credentials) - - # save provider - # Note: Do not switch the preferred provider, which allows users to use quotas first - if provider_record: - provider_record.encrypted_config = json.dumps(credentials) - provider_record.is_valid = True - provider_record.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) - db.session.commit() - else: - provider_record = Provider() - provider_record.tenant_id = self.tenant_id - provider_record.provider_name = self.provider.provider - provider_record.provider_type = ProviderType.CUSTOM.value - provider_record.encrypted_config = json.dumps(credentials) - provider_record.is_valid = True - - db.session.add(provider_record) - db.session.commit() - - provider_model_credentials_cache = ProviderCredentialsCache( - tenant_id=self.tenant_id, identity_id=provider_record.id, cache_type=ProviderCredentialsCacheType.PROVIDER - ) - - provider_model_credentials_cache.delete() - - self.switch_preferred_provider_type(ProviderType.CUSTOM) - - def delete_custom_credentials(self) -> None: - """ - Delete custom provider credentials. - :return: - """ - # get provider - provider_record = self._get_custom_provider_credentials() - - # delete provider - if provider_record: - self.switch_preferred_provider_type(ProviderType.SYSTEM) - - db.session.delete(provider_record) - db.session.commit() - - provider_model_credentials_cache = ProviderCredentialsCache( - tenant_id=self.tenant_id, - identity_id=provider_record.id, - cache_type=ProviderCredentialsCacheType.PROVIDER, + stmt = select(ProviderCredential).where( + ProviderCredential.id == credential_id, + ProviderCredential.tenant_id == self.tenant_id, + ProviderCredential.provider_name == provider_name, ) - provider_model_credentials_cache.delete() + credential = session.execute(stmt).scalar_one_or_none() - def get_custom_model_credentials( - self, model_type: ModelType, model: str, obfuscated: bool = False - ) -> Optional[dict]: + if not credential or not credential.encrypted_config: + raise ValueError(f"Credential with id {credential_id} not found.") + + try: + credentials = json.loads(credential.encrypted_config) + except JSONDecodeError: + credentials = {} + + # Decrypt secret variables + for key in credential_secret_variables: + if key in credentials and credentials[key] is not None: + try: + credentials[key] = encrypter.decrypt_token(tenant_id=self.tenant_id, token=credentials[key]) + except Exception: + pass + + return self.obfuscated_credentials( + credentials=credentials, + credential_form_schemas=self.provider.provider_credential_schema.credential_form_schemas + if self.provider.provider_credential_schema + else [], + ) + + def _check_provider_credential_name_exists( + self, credential_name: str, session: Session, exclude_id: str | None = None + ) -> bool: """ - Get custom model credentials. + not allowed same name when create or update a credential + """ + stmt = select(ProviderCredential.id).where( + ProviderCredential.tenant_id == self.tenant_id, + ProviderCredential.provider_name == self.provider.provider, + ProviderCredential.credential_name == credential_name, + ) + if exclude_id: + stmt = stmt.where(ProviderCredential.id != exclude_id) + return session.execute(stmt).scalar_one_or_none() is not None - :param model_type: model type - :param model: model name - :param obfuscated: obfuscated secret data in credentials + def get_provider_credential(self, credential_id: str | None = None) -> dict | None: + """ + Get provider credentials. + + :param credential_id: if provided, return the specified credential :return: """ - if not self.custom_configuration.models: - return None - for model_configuration in self.custom_configuration.models: - if model_configuration.model_type == model_type and model_configuration.model == model: - credentials = model_configuration.credentials - if not obfuscated: - return credentials + if credential_id: + return self._get_specific_provider_credential(credential_id) - # Obfuscate credentials - return self.obfuscated_credentials( - credentials=credentials, - credential_form_schemas=self.provider.model_credential_schema.credential_form_schemas - if self.provider.model_credential_schema - else [], + # Default behavior: return current active provider credentials + credentials = self.custom_configuration.provider.credentials if self.custom_configuration.provider else {} + + return self.obfuscated_credentials( + credentials=credentials, + credential_form_schemas=self.provider.provider_credential_schema.credential_form_schemas + if self.provider.provider_credential_schema + else [], + ) + + def validate_provider_credentials( + self, credentials: dict, credential_id: str = "", session: Session | None = None + ) -> dict: + """ + Validate custom credentials. + :param credentials: provider credentials + :param credential_id: (Optional)If provided, can use existing credential's hidden api key to validate + :param session: optional database session + :return: + """ + + def _validate(s: Session) -> dict: + # Get provider credential secret variables + provider_credential_secret_variables = self.extract_secret_variables( + self.provider.provider_credential_schema.credential_form_schemas + if self.provider.provider_credential_schema + else [] + ) + + if credential_id: + try: + stmt = select(ProviderCredential).where( + ProviderCredential.tenant_id == self.tenant_id, + ProviderCredential.provider_name == self.provider.provider, + ProviderCredential.id == credential_id, + ) + credential_record = s.execute(stmt).scalar_one_or_none() + # fix origin data + if credential_record and credential_record.encrypted_config: + if not credential_record.encrypted_config.startswith("{"): + original_credentials = {"openai_api_key": credential_record.encrypted_config} + else: + original_credentials = json.loads(credential_record.encrypted_config) + else: + original_credentials = {} + except JSONDecodeError: + original_credentials = {} + + # encrypt credentials + for key, value in credentials.items(): + if key in provider_credential_secret_variables: + # if send [__HIDDEN__] in secret input, it will be same as original value + if value == HIDDEN_VALUE and key in original_credentials: + credentials[key] = encrypter.decrypt_token( + tenant_id=self.tenant_id, token=original_credentials[key] + ) + + model_provider_factory = ModelProviderFactory(self.tenant_id) + validated_credentials = model_provider_factory.provider_credentials_validate( + provider=self.provider.provider, credentials=credentials + ) + + for key, value in validated_credentials.items(): + if key in provider_credential_secret_variables: + validated_credentials[key] = encrypter.encrypt_token(self.tenant_id, value) + + return validated_credentials + + if session: + return _validate(session) + else: + with Session(db.engine) as new_session: + return _validate(new_session) + + def create_provider_credential(self, credentials: dict, credential_name: str) -> None: + """ + Add custom provider credentials. + :param credentials: provider credentials + :param credential_name: credential name + :return: + """ + with Session(db.engine) as session: + if self._check_provider_credential_name_exists(credential_name=credential_name, session=session): + raise ValueError(f"Credential with name '{credential_name}' already exists.") + + credentials = self.validate_provider_credentials(credentials=credentials, session=session) + provider_record = self._get_provider_record(session) + try: + new_record = ProviderCredential( + tenant_id=self.tenant_id, + provider_name=self.provider.provider, + encrypted_config=json.dumps(credentials), + credential_name=credential_name, ) + session.add(new_record) + session.flush() - return None + if not provider_record: + # If provider record does not exist, create it + provider_record = Provider( + tenant_id=self.tenant_id, + provider_name=self.provider.provider, + provider_type=ProviderType.CUSTOM.value, + is_valid=True, + credential_id=new_record.id, + ) + session.add(provider_record) - def _get_custom_model_credentials( + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_record.id, + cache_type=ProviderCredentialsCacheType.PROVIDER, + ) + provider_model_credentials_cache.delete() + + self.switch_preferred_provider_type(provider_type=ProviderType.CUSTOM, session=session) + + session.commit() + except Exception: + session.rollback() + raise + + def update_provider_credential( + self, + credentials: dict, + credential_id: str, + credential_name: str, + ) -> None: + """ + update a saved provider credential (by credential_id). + + :param credentials: provider credentials + :param credential_id: credential id + :param credential_name: credential name + :return: + """ + with Session(db.engine) as session: + if self._check_provider_credential_name_exists( + credential_name=credential_name, session=session, exclude_id=credential_id + ): + raise ValueError(f"Credential with name '{credential_name}' already exists.") + + credentials = self.validate_provider_credentials( + credentials=credentials, credential_id=credential_id, session=session + ) + provider_record = self._get_provider_record(session) + stmt = select(ProviderCredential).where( + ProviderCredential.id == credential_id, + ProviderCredential.tenant_id == self.tenant_id, + ProviderCredential.provider_name == self.provider.provider, + ) + + # Get the credential record to update + credential_record = session.execute(stmt).scalar_one_or_none() + if not credential_record: + raise ValueError("Credential record not found.") + try: + # Update credential + credential_record.encrypted_config = json.dumps(credentials) + credential_record.credential_name = credential_name + credential_record.updated_at = naive_utc_now() + + session.commit() + + if provider_record and provider_record.credential_id == credential_id: + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_record.id, + cache_type=ProviderCredentialsCacheType.PROVIDER, + ) + provider_model_credentials_cache.delete() + + self._update_load_balancing_configs_with_credential( + credential_id=credential_id, + credential_record=credential_record, + credential_source="provider", + session=session, + ) + except Exception: + session.rollback() + raise + + def _update_load_balancing_configs_with_credential( + self, + credential_id: str, + credential_record: ProviderCredential | ProviderModelCredential, + credential_source: str, + session: Session, + ) -> None: + """ + Update load balancing configurations that reference the given credential_id. + + :param credential_id: credential id + :param credential_record: the encrypted_config and credential_name + :param credential_source: the credential comes from the provider_credential(`provider`) + or the provider_model_credential(`custom_model`) + :param session: the database session + :return: + """ + # Find all load balancing configs that use this credential_id + stmt = select(LoadBalancingModelConfig).where( + LoadBalancingModelConfig.tenant_id == self.tenant_id, + LoadBalancingModelConfig.provider_name == self.provider.provider, + LoadBalancingModelConfig.credential_id == credential_id, + LoadBalancingModelConfig.credential_source_type == credential_source, + ) + load_balancing_configs = session.execute(stmt).scalars().all() + + if not load_balancing_configs: + return + + # Update each load balancing config with the new credentials + for lb_config in load_balancing_configs: + # Update the encrypted_config with the new credentials + lb_config.encrypted_config = credential_record.encrypted_config + lb_config.name = credential_record.credential_name + lb_config.updated_at = naive_utc_now() + + # Clear cache for this load balancing config + lb_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=lb_config.id, + cache_type=ProviderCredentialsCacheType.LOAD_BALANCING_MODEL, + ) + lb_credentials_cache.delete() + + session.commit() + + def delete_provider_credential(self, credential_id: str) -> None: + """ + Delete a saved provider credential (by credential_id). + + :param credential_id: credential id + :return: + """ + with Session(db.engine) as session: + stmt = select(ProviderCredential).where( + ProviderCredential.id == credential_id, + ProviderCredential.tenant_id == self.tenant_id, + ProviderCredential.provider_name == self.provider.provider, + ) + + # Get the credential record to update + credential_record = session.execute(stmt).scalar_one_or_none() + if not credential_record: + raise ValueError("Credential record not found.") + + # Check if this credential is used in load balancing configs + lb_stmt = select(LoadBalancingModelConfig).where( + LoadBalancingModelConfig.tenant_id == self.tenant_id, + LoadBalancingModelConfig.provider_name == self.provider.provider, + LoadBalancingModelConfig.credential_id == credential_id, + LoadBalancingModelConfig.credential_source_type == "provider", + ) + lb_configs_using_credential = session.execute(lb_stmt).scalars().all() + try: + for lb_config in lb_configs_using_credential: + lb_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=lb_config.id, + cache_type=ProviderCredentialsCacheType.LOAD_BALANCING_MODEL, + ) + lb_credentials_cache.delete() + + lb_config.credential_id = None + lb_config.encrypted_config = None + lb_config.enabled = False + lb_config.name = "__delete__" + lb_config.updated_at = naive_utc_now() + session.add(lb_config) + + # Check if this is the currently active credential + provider_record = self._get_provider_record(session) + + # Check available credentials count BEFORE deleting + # if this is the last credential, we need to delete the provider record + count_stmt = select(func.count(ProviderCredential.id)).where( + ProviderCredential.tenant_id == self.tenant_id, + ProviderCredential.provider_name == self.provider.provider, + ) + available_credentials_count = session.execute(count_stmt).scalar() or 0 + session.delete(credential_record) + + if provider_record and available_credentials_count <= 1: + # If all credentials are deleted, delete the provider record + session.delete(provider_record) + + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_record.id, + cache_type=ProviderCredentialsCacheType.PROVIDER, + ) + provider_model_credentials_cache.delete() + self.switch_preferred_provider_type(provider_type=ProviderType.SYSTEM, session=session) + elif provider_record and provider_record.credential_id == credential_id: + provider_record.credential_id = None + provider_record.updated_at = naive_utc_now() + + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_record.id, + cache_type=ProviderCredentialsCacheType.PROVIDER, + ) + provider_model_credentials_cache.delete() + self.switch_preferred_provider_type(provider_type=ProviderType.SYSTEM, session=session) + + session.commit() + except Exception: + session.rollback() + raise + + def switch_active_provider_credential(self, credential_id: str) -> None: + """ + Switch active provider credential (copy the selected one into current active snapshot). + + :param credential_id: credential id + :return: + """ + with Session(db.engine) as session: + stmt = select(ProviderCredential).where( + ProviderCredential.id == credential_id, + ProviderCredential.tenant_id == self.tenant_id, + ProviderCredential.provider_name == self.provider.provider, + ) + credential_record = session.execute(stmt).scalar_one_or_none() + if not credential_record: + raise ValueError("Credential record not found.") + + provider_record = self._get_provider_record(session) + if not provider_record: + raise ValueError("Provider record not found.") + + try: + provider_record.credential_id = credential_record.id + provider_record.updated_at = naive_utc_now() + session.commit() + + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_record.id, + cache_type=ProviderCredentialsCacheType.PROVIDER, + ) + provider_model_credentials_cache.delete() + self.switch_preferred_provider_type(ProviderType.CUSTOM, session=session) + except Exception: + session.rollback() + raise + + def _get_custom_model_record( self, model_type: ModelType, model: str, + session: Session, ) -> ProviderModel | None: """ Get custom model credentials. @@ -349,128 +632,495 @@ class ProviderConfiguration(BaseModel): if model_provider_id.is_langgenius(): provider_names.append(model_provider_id.provider_name) - provider_model_record = ( - db.session.query(ProviderModel) - .where( - ProviderModel.tenant_id == self.tenant_id, - ProviderModel.provider_name.in_(provider_names), - ProviderModel.model_name == model, - ProviderModel.model_type == model_type.to_origin_model_type(), - ) - .first() + stmt = select(ProviderModel).where( + ProviderModel.tenant_id == self.tenant_id, + ProviderModel.provider_name.in_(provider_names), + ProviderModel.model_name == model, + ProviderModel.model_type == model_type.to_origin_model_type(), ) - return provider_model_record + return session.execute(stmt).scalar_one_or_none() - def custom_model_credentials_validate( - self, model_type: ModelType, model: str, credentials: dict - ) -> tuple[ProviderModel | None, dict]: + def _get_specific_custom_model_credential( + self, model_type: ModelType, model: str, credential_id: str + ) -> dict | None: """ - Validate custom model credentials. - - :param model_type: model type - :param model: model name - :param credentials: model credentials + Get a specific provider credential by ID. + :param credential_id: Credential ID :return: """ - # get provider model - provider_model_record = self._get_custom_model_credentials(model_type, model) - - # Get provider credential secret variables - provider_credential_secret_variables = self.extract_secret_variables( + model_credential_secret_variables = self.extract_secret_variables( self.provider.model_credential_schema.credential_form_schemas if self.provider.model_credential_schema else [] ) - if provider_model_record: - try: - original_credentials = ( - json.loads(provider_model_record.encrypted_config) if provider_model_record.encrypted_config else {} - ) - except JSONDecodeError: - original_credentials = {} - - # decrypt credentials - for key, value in credentials.items(): - if key in provider_credential_secret_variables: - # if send [__HIDDEN__] in secret input, it will be same as original value - if value == HIDDEN_VALUE and key in original_credentials: - credentials[key] = encrypter.decrypt_token(self.tenant_id, original_credentials[key]) - - model_provider_factory = ModelProviderFactory(self.tenant_id) - credentials = model_provider_factory.model_credentials_validate( - provider=self.provider.provider, model_type=model_type, model=model, credentials=credentials - ) - - for key, value in credentials.items(): - if key in provider_credential_secret_variables: - credentials[key] = encrypter.encrypt_token(self.tenant_id, value) - - return provider_model_record, credentials - - def add_or_update_custom_model_credentials(self, model_type: ModelType, model: str, credentials: dict) -> None: - """ - Add or update custom model credentials. - - :param model_type: model type - :param model: model name - :param credentials: model credentials - :return: - """ - # validate custom model config - provider_model_record, credentials = self.custom_model_credentials_validate(model_type, model, credentials) - - # save provider model - # Note: Do not switch the preferred provider, which allows users to use quotas first - if provider_model_record: - provider_model_record.encrypted_config = json.dumps(credentials) - provider_model_record.is_valid = True - provider_model_record.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) - db.session.commit() - else: - provider_model_record = ProviderModel() - provider_model_record.tenant_id = self.tenant_id - provider_model_record.provider_name = self.provider.provider - provider_model_record.model_name = model - provider_model_record.model_type = model_type.to_origin_model_type() - provider_model_record.encrypted_config = json.dumps(credentials) - provider_model_record.is_valid = True - db.session.add(provider_model_record) - db.session.commit() - - provider_model_credentials_cache = ProviderCredentialsCache( - tenant_id=self.tenant_id, - identity_id=provider_model_record.id, - cache_type=ProviderCredentialsCacheType.MODEL, - ) - - provider_model_credentials_cache.delete() - - def delete_custom_model_credentials(self, model_type: ModelType, model: str) -> None: - """ - Delete custom model credentials. - :param model_type: model type - :param model: model name - :return: - """ - # get provider model - provider_model_record = self._get_custom_model_credentials(model_type, model) - - # delete provider model - if provider_model_record: - db.session.delete(provider_model_record) - db.session.commit() - - provider_model_credentials_cache = ProviderCredentialsCache( - tenant_id=self.tenant_id, - identity_id=provider_model_record.id, - cache_type=ProviderCredentialsCacheType.MODEL, + with Session(db.engine) as session: + stmt = select(ProviderModelCredential).where( + ProviderModelCredential.id == credential_id, + ProviderModelCredential.tenant_id == self.tenant_id, + ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.model_name == model, + ProviderModelCredential.model_type == model_type.to_origin_model_type(), ) - provider_model_credentials_cache.delete() + credential_record = session.execute(stmt).scalar_one_or_none() - def _get_provider_model_setting(self, model_type: ModelType, model: str) -> ProviderModelSetting | None: + if not credential_record or not credential_record.encrypted_config: + raise ValueError(f"Credential with id {credential_id} not found.") + + try: + credentials = json.loads(credential_record.encrypted_config) + except JSONDecodeError: + credentials = {} + + # Decrypt secret variables + for key in model_credential_secret_variables: + if key in credentials and credentials[key] is not None: + try: + credentials[key] = encrypter.decrypt_token(tenant_id=self.tenant_id, token=credentials[key]) + except Exception: + pass + + current_credential_id = credential_record.id + current_credential_name = credential_record.credential_name + credentials = self.obfuscated_credentials( + credentials=credentials, + credential_form_schemas=self.provider.model_credential_schema.credential_form_schemas + if self.provider.model_credential_schema + else [], + ) + + return { + "current_credential_id": current_credential_id, + "current_credential_name": current_credential_name, + "credentials": credentials, + } + + def _check_custom_model_credential_name_exists( + self, model_type: ModelType, model: str, credential_name: str, session: Session, exclude_id: str | None = None + ) -> bool: + """ + not allowed same name when create or update a credential + """ + stmt = select(ProviderModelCredential).where( + ProviderModelCredential.tenant_id == self.tenant_id, + ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.model_name == model, + ProviderModelCredential.model_type == model_type.to_origin_model_type(), + ProviderModelCredential.credential_name == credential_name, + ) + if exclude_id: + stmt = stmt.where(ProviderModelCredential.id != exclude_id) + return session.execute(stmt).scalar_one_or_none() is not None + + def get_custom_model_credential( + self, model_type: ModelType, model: str, credential_id: str | None + ) -> Optional[dict]: + """ + Get custom model credentials. + + :param model_type: model type + :param model: model name + :return: + """ + # If credential_id is provided, return the specific credential + if credential_id: + return self._get_specific_custom_model_credential( + model_type=model_type, model=model, credential_id=credential_id + ) + + for model_configuration in self.custom_configuration.models: + if ( + model_configuration.model_type == model_type + and model_configuration.model == model + and model_configuration.credentials + ): + current_credential_id = model_configuration.current_credential_id + current_credential_name = model_configuration.current_credential_name + credentials = self.obfuscated_credentials( + credentials=model_configuration.credentials, + credential_form_schemas=self.provider.model_credential_schema.credential_form_schemas + if self.provider.model_credential_schema + else [], + ) + return { + "current_credential_id": current_credential_id, + "current_credential_name": current_credential_name, + "credentials": credentials, + } + return None + + def validate_custom_model_credentials( + self, + model_type: ModelType, + model: str, + credentials: dict, + credential_id: str = "", + session: Session | None = None, + ) -> dict: + """ + Validate custom model credentials. + + :param model_type: model type + :param model: model name + :param credentials: model credentials dict + :param credential_id: (Optional)If provided, can use existing credential's hidden api key to validate + :return: + """ + + def _validate(s: Session) -> dict: + # Get provider credential secret variables + provider_credential_secret_variables = self.extract_secret_variables( + self.provider.model_credential_schema.credential_form_schemas + if self.provider.model_credential_schema + else [] + ) + + if credential_id: + try: + stmt = select(ProviderModelCredential).where( + ProviderModelCredential.id == credential_id, + ProviderModelCredential.tenant_id == self.tenant_id, + ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.model_name == model, + ProviderModelCredential.model_type == model_type.to_origin_model_type(), + ) + credential_record = s.execute(stmt).scalar_one_or_none() + original_credentials = ( + json.loads(credential_record.encrypted_config) + if credential_record and credential_record.encrypted_config + else {} + ) + except JSONDecodeError: + original_credentials = {} + + # decrypt credentials + for key, value in credentials.items(): + if key in provider_credential_secret_variables: + # if send [__HIDDEN__] in secret input, it will be same as original value + if value == HIDDEN_VALUE and key in original_credentials: + credentials[key] = encrypter.decrypt_token( + tenant_id=self.tenant_id, token=original_credentials[key] + ) + + model_provider_factory = ModelProviderFactory(self.tenant_id) + validated_credentials = model_provider_factory.model_credentials_validate( + provider=self.provider.provider, model_type=model_type, model=model, credentials=credentials + ) + + for key, value in validated_credentials.items(): + if key in provider_credential_secret_variables: + validated_credentials[key] = encrypter.encrypt_token(self.tenant_id, value) + + return validated_credentials + + if session: + return _validate(session) + else: + with Session(db.engine) as new_session: + return _validate(new_session) + + def create_custom_model_credential( + self, model_type: ModelType, model: str, credentials: dict, credential_name: str + ) -> None: + """ + Create a custom model credential. + + :param model_type: model type + :param model: model name + :param credentials: model credentials dict + :return: + """ + with Session(db.engine) as session: + if self._check_custom_model_credential_name_exists( + model=model, model_type=model_type, credential_name=credential_name, session=session + ): + raise ValueError(f"Model credential with name '{credential_name}' already exists for {model}.") + # validate custom model config + credentials = self.validate_custom_model_credentials( + model_type=model_type, model=model, credentials=credentials, session=session + ) + provider_model_record = self._get_custom_model_record(model_type=model_type, model=model, session=session) + + try: + credential = ProviderModelCredential( + tenant_id=self.tenant_id, + provider_name=self.provider.provider, + model_name=model, + model_type=model_type.to_origin_model_type(), + encrypted_config=json.dumps(credentials), + credential_name=credential_name, + ) + session.add(credential) + session.flush() + + # save provider model + if not provider_model_record: + provider_model_record = ProviderModel( + tenant_id=self.tenant_id, + provider_name=self.provider.provider, + model_name=model, + model_type=model_type.to_origin_model_type(), + credential_id=credential.id, + is_valid=True, + ) + session.add(provider_model_record) + + session.commit() + + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_model_record.id, + cache_type=ProviderCredentialsCacheType.MODEL, + ) + provider_model_credentials_cache.delete() + except Exception: + session.rollback() + raise + + def update_custom_model_credential( + self, model_type: ModelType, model: str, credentials: dict, credential_name: str, credential_id: str + ) -> None: + """ + Update a custom model credential. + + :param model_type: model type + :param model: model name + :param credentials: model credentials dict + :param credential_name: credential name + :param credential_id: credential id + :return: + """ + with Session(db.engine) as session: + if self._check_custom_model_credential_name_exists( + model=model, + model_type=model_type, + credential_name=credential_name, + session=session, + exclude_id=credential_id, + ): + raise ValueError(f"Model credential with name '{credential_name}' already exists for {model}.") + # validate custom model config + credentials = self.validate_custom_model_credentials( + model_type=model_type, + model=model, + credentials=credentials, + credential_id=credential_id, + session=session, + ) + provider_model_record = self._get_custom_model_record(model_type=model_type, model=model, session=session) + + stmt = select(ProviderModelCredential).where( + ProviderModelCredential.id == credential_id, + ProviderModelCredential.tenant_id == self.tenant_id, + ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.model_name == model, + ProviderModelCredential.model_type == model_type.to_origin_model_type(), + ) + credential_record = session.execute(stmt).scalar_one_or_none() + if not credential_record: + raise ValueError("Credential record not found.") + + try: + # Update credential + credential_record.encrypted_config = json.dumps(credentials) + credential_record.credential_name = credential_name + credential_record.updated_at = naive_utc_now() + session.commit() + + if provider_model_record and provider_model_record.credential_id == credential_id: + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_model_record.id, + cache_type=ProviderCredentialsCacheType.MODEL, + ) + provider_model_credentials_cache.delete() + + self._update_load_balancing_configs_with_credential( + credential_id=credential_id, + credential_record=credential_record, + credential_source="custom_model", + session=session, + ) + except Exception: + session.rollback() + raise + + def delete_custom_model_credential(self, model_type: ModelType, model: str, credential_id: str) -> None: + """ + Delete a saved provider credential (by credential_id). + + :param credential_id: credential id + :return: + """ + with Session(db.engine) as session: + stmt = select(ProviderModelCredential).where( + ProviderModelCredential.id == credential_id, + ProviderModelCredential.tenant_id == self.tenant_id, + ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.model_name == model, + ProviderModelCredential.model_type == model_type.to_origin_model_type(), + ) + credential_record = session.execute(stmt).scalar_one_or_none() + if not credential_record: + raise ValueError("Credential record not found.") + + lb_stmt = select(LoadBalancingModelConfig).where( + LoadBalancingModelConfig.tenant_id == self.tenant_id, + LoadBalancingModelConfig.provider_name == self.provider.provider, + LoadBalancingModelConfig.credential_id == credential_id, + LoadBalancingModelConfig.credential_source_type == "custom_model", + ) + lb_configs_using_credential = session.execute(lb_stmt).scalars().all() + + try: + for lb_config in lb_configs_using_credential: + lb_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=lb_config.id, + cache_type=ProviderCredentialsCacheType.LOAD_BALANCING_MODEL, + ) + lb_credentials_cache.delete() + lb_config.credential_id = None + lb_config.encrypted_config = None + lb_config.enabled = False + lb_config.name = "__delete__" + lb_config.updated_at = naive_utc_now() + session.add(lb_config) + + # Check if this is the currently active credential + provider_model_record = self._get_custom_model_record(model_type, model, session=session) + + # Check available credentials count BEFORE deleting + # if this is the last credential, we need to delete the custom model record + count_stmt = select(func.count(ProviderModelCredential.id)).where( + ProviderModelCredential.tenant_id == self.tenant_id, + ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.model_name == model, + ProviderModelCredential.model_type == model_type.to_origin_model_type(), + ) + available_credentials_count = session.execute(count_stmt).scalar() or 0 + session.delete(credential_record) + + if provider_model_record and available_credentials_count <= 1: + # If all credentials are deleted, delete the custom model record + session.delete(provider_model_record) + elif provider_model_record and provider_model_record.credential_id == credential_id: + provider_model_record.credential_id = None + provider_model_record.updated_at = naive_utc_now() + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_model_record.id, + cache_type=ProviderCredentialsCacheType.PROVIDER, + ) + provider_model_credentials_cache.delete() + + session.commit() + + except Exception: + session.rollback() + raise + + def add_model_credential_to_model(self, model_type: ModelType, model: str, credential_id: str) -> None: + """ + if model list exist this custom model, switch the custom model credential. + if model list not exist this custom model, use the credential to add a new custom model record. + + :param model_type: model type + :param model: model name + :param credential_id: credential id + :return: + """ + with Session(db.engine) as session: + stmt = select(ProviderModelCredential).where( + ProviderModelCredential.id == credential_id, + ProviderModelCredential.tenant_id == self.tenant_id, + ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.model_name == model, + ProviderModelCredential.model_type == model_type.to_origin_model_type(), + ) + credential_record = session.execute(stmt).scalar_one_or_none() + if not credential_record: + raise ValueError("Credential record not found.") + + # validate custom model config + provider_model_record = self._get_custom_model_record(model_type=model_type, model=model, session=session) + + if not provider_model_record: + # create provider model record + provider_model_record = ProviderModel( + tenant_id=self.tenant_id, + provider_name=self.provider.provider, + model_name=model, + model_type=model_type.to_origin_model_type(), + credential_id=credential_id, + ) + else: + if provider_model_record.credential_id == credential_record.id: + raise ValueError("Can't add same credential") + provider_model_record.credential_id = credential_record.id + provider_model_record.updated_at = naive_utc_now() + session.add(provider_model_record) + session.commit() + + def switch_custom_model_credential(self, model_type: ModelType, model: str, credential_id: str) -> None: + """ + switch the custom model credential. + + :param model_type: model type + :param model: model name + :param credential_id: credential id + :return: + """ + with Session(db.engine) as session: + stmt = select(ProviderModelCredential).where( + ProviderModelCredential.id == credential_id, + ProviderModelCredential.tenant_id == self.tenant_id, + ProviderModelCredential.provider_name == self.provider.provider, + ProviderModelCredential.model_name == model, + ProviderModelCredential.model_type == model_type.to_origin_model_type(), + ) + credential_record = session.execute(stmt).scalar_one_or_none() + if not credential_record: + raise ValueError("Credential record not found.") + + provider_model_record = self._get_custom_model_record(model_type=model_type, model=model, session=session) + if not provider_model_record: + raise ValueError("The custom model record not found.") + + provider_model_record.credential_id = credential_record.id + provider_model_record.updated_at = naive_utc_now() + session.add(provider_model_record) + session.commit() + + def delete_custom_model(self, model_type: ModelType, model: str) -> None: + """ + Delete custom model. + :param model_type: model type + :param model: model name + :return: + """ + with Session(db.engine) as session: + # get provider model + provider_model_record = self._get_custom_model_record(model_type=model_type, model=model, session=session) + + # delete provider model + if provider_model_record: + session.delete(provider_model_record) + session.commit() + + provider_model_credentials_cache = ProviderCredentialsCache( + tenant_id=self.tenant_id, + identity_id=provider_model_record.id, + cache_type=ProviderCredentialsCacheType.MODEL, + ) + + provider_model_credentials_cache.delete() + + def _get_provider_model_setting( + self, model_type: ModelType, model: str, session: Session + ) -> ProviderModelSetting | None: """ Get provider model setting. """ @@ -479,16 +1129,13 @@ class ProviderConfiguration(BaseModel): if model_provider_id.is_langgenius(): provider_names.append(model_provider_id.provider_name) - return ( - db.session.query(ProviderModelSetting) - .where( - ProviderModelSetting.tenant_id == self.tenant_id, - ProviderModelSetting.provider_name.in_(provider_names), - ProviderModelSetting.model_type == model_type.to_origin_model_type(), - ProviderModelSetting.model_name == model, - ) - .first() + stmt = select(ProviderModelSetting).where( + ProviderModelSetting.tenant_id == self.tenant_id, + ProviderModelSetting.provider_name.in_(provider_names), + ProviderModelSetting.model_type == model_type.to_origin_model_type(), + ProviderModelSetting.model_name == model, ) + return session.execute(stmt).scalars().first() def enable_model(self, model_type: ModelType, model: str) -> ProviderModelSetting: """ @@ -497,21 +1144,23 @@ class ProviderConfiguration(BaseModel): :param model: model name :return: """ - model_setting = self._get_provider_model_setting(model_type, model) + with Session(db.engine) as session: + model_setting = self._get_provider_model_setting(model_type=model_type, model=model, session=session) - if model_setting: - model_setting.enabled = True - model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) - db.session.commit() - else: - model_setting = ProviderModelSetting() - model_setting.tenant_id = self.tenant_id - model_setting.provider_name = self.provider.provider - model_setting.model_type = model_type.to_origin_model_type() - model_setting.model_name = model - model_setting.enabled = True - db.session.add(model_setting) - db.session.commit() + if model_setting: + model_setting.enabled = True + model_setting.updated_at = naive_utc_now() + + else: + model_setting = ProviderModelSetting( + tenant_id=self.tenant_id, + provider_name=self.provider.provider, + model_type=model_type.to_origin_model_type(), + model_name=model, + enabled=True, + ) + session.add(model_setting) + session.commit() return model_setting @@ -522,21 +1171,22 @@ class ProviderConfiguration(BaseModel): :param model: model name :return: """ - model_setting = self._get_provider_model_setting(model_type, model) + with Session(db.engine) as session: + model_setting = self._get_provider_model_setting(model_type=model_type, model=model, session=session) - if model_setting: - model_setting.enabled = False - model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) - db.session.commit() - else: - model_setting = ProviderModelSetting() - model_setting.tenant_id = self.tenant_id - model_setting.provider_name = self.provider.provider - model_setting.model_type = model_type.to_origin_model_type() - model_setting.model_name = model - model_setting.enabled = False - db.session.add(model_setting) - db.session.commit() + if model_setting: + model_setting.enabled = False + model_setting.updated_at = naive_utc_now() + else: + model_setting = ProviderModelSetting( + tenant_id=self.tenant_id, + provider_name=self.provider.provider, + model_type=model_type.to_origin_model_type(), + model_name=model, + enabled=False, + ) + session.add(model_setting) + session.commit() return model_setting @@ -547,27 +1197,8 @@ class ProviderConfiguration(BaseModel): :param model: model name :return: """ - return self._get_provider_model_setting(model_type, model) - - def _get_load_balancing_config(self, model_type: ModelType, model: str) -> Optional[LoadBalancingModelConfig]: - """ - Get load balancing config. - """ - model_provider_id = ModelProviderID(self.provider.provider) - provider_names = [self.provider.provider] - if model_provider_id.is_langgenius(): - provider_names.append(model_provider_id.provider_name) - - return ( - db.session.query(LoadBalancingModelConfig) - .where( - LoadBalancingModelConfig.tenant_id == self.tenant_id, - LoadBalancingModelConfig.provider_name.in_(provider_names), - LoadBalancingModelConfig.model_type == model_type.to_origin_model_type(), - LoadBalancingModelConfig.model_name == model, - ) - .first() - ) + with Session(db.engine) as session: + return self._get_provider_model_setting(model_type=model_type, model=model, session=session) def enable_model_load_balancing(self, model_type: ModelType, model: str) -> ProviderModelSetting: """ @@ -581,35 +1212,32 @@ class ProviderConfiguration(BaseModel): if model_provider_id.is_langgenius(): provider_names.append(model_provider_id.provider_name) - load_balancing_config_count = ( - db.session.query(LoadBalancingModelConfig) - .where( + with Session(db.engine) as session: + stmt = select(func.count(LoadBalancingModelConfig.id)).where( LoadBalancingModelConfig.tenant_id == self.tenant_id, LoadBalancingModelConfig.provider_name.in_(provider_names), LoadBalancingModelConfig.model_type == model_type.to_origin_model_type(), LoadBalancingModelConfig.model_name == model, ) - .count() - ) + load_balancing_config_count = session.execute(stmt).scalar() or 0 + if load_balancing_config_count <= 1: + raise ValueError("Model load balancing configuration must be more than 1.") - if load_balancing_config_count <= 1: - raise ValueError("Model load balancing configuration must be more than 1.") + model_setting = self._get_provider_model_setting(model_type=model_type, model=model, session=session) - model_setting = self._get_provider_model_setting(model_type, model) - - if model_setting: - model_setting.load_balancing_enabled = True - model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) - db.session.commit() - else: - model_setting = ProviderModelSetting() - model_setting.tenant_id = self.tenant_id - model_setting.provider_name = self.provider.provider - model_setting.model_type = model_type.to_origin_model_type() - model_setting.model_name = model - model_setting.load_balancing_enabled = True - db.session.add(model_setting) - db.session.commit() + if model_setting: + model_setting.load_balancing_enabled = True + model_setting.updated_at = naive_utc_now() + else: + model_setting = ProviderModelSetting( + tenant_id=self.tenant_id, + provider_name=self.provider.provider, + model_type=model_type.to_origin_model_type(), + model_name=model, + load_balancing_enabled=True, + ) + session.add(model_setting) + session.commit() return model_setting @@ -620,35 +1248,23 @@ class ProviderConfiguration(BaseModel): :param model: model name :return: """ - model_provider_id = ModelProviderID(self.provider.provider) - provider_names = [self.provider.provider] - if model_provider_id.is_langgenius(): - provider_names.append(model_provider_id.provider_name) - model_setting = ( - db.session.query(ProviderModelSetting) - .where( - ProviderModelSetting.tenant_id == self.tenant_id, - ProviderModelSetting.provider_name.in_(provider_names), - ProviderModelSetting.model_type == model_type.to_origin_model_type(), - ProviderModelSetting.model_name == model, - ) - .first() - ) + with Session(db.engine) as session: + model_setting = self._get_provider_model_setting(model_type=model_type, model=model, session=session) - if model_setting: - model_setting.load_balancing_enabled = False - model_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) - db.session.commit() - else: - model_setting = ProviderModelSetting() - model_setting.tenant_id = self.tenant_id - model_setting.provider_name = self.provider.provider - model_setting.model_type = model_type.to_origin_model_type() - model_setting.model_name = model - model_setting.load_balancing_enabled = False - db.session.add(model_setting) - db.session.commit() + if model_setting: + model_setting.load_balancing_enabled = False + model_setting.updated_at = naive_utc_now() + else: + model_setting = ProviderModelSetting( + tenant_id=self.tenant_id, + provider_name=self.provider.provider, + model_type=model_type.to_origin_model_type(), + model_name=model, + load_balancing_enabled=False, + ) + session.add(model_setting) + session.commit() return model_setting @@ -664,7 +1280,7 @@ class ProviderConfiguration(BaseModel): # Get model instance of LLM return model_provider_factory.get_model_type_instance(provider=self.provider.provider, model_type=model_type) - def get_model_schema(self, model_type: ModelType, model: str, credentials: dict) -> AIModelEntity | None: + def get_model_schema(self, model_type: ModelType, model: str, credentials: dict | None) -> AIModelEntity | None: """ Get model schema """ @@ -673,7 +1289,7 @@ class ProviderConfiguration(BaseModel): provider=self.provider.provider, model_type=model_type, model=model, credentials=credentials ) - def switch_preferred_provider_type(self, provider_type: ProviderType) -> None: + def switch_preferred_provider_type(self, provider_type: ProviderType, session: Session | None = None) -> None: """ Switch preferred provider type. :param provider_type: @@ -685,31 +1301,35 @@ class ProviderConfiguration(BaseModel): if provider_type == ProviderType.SYSTEM and not self.system_configuration.enabled: return - # get preferred provider - model_provider_id = ModelProviderID(self.provider.provider) - provider_names = [self.provider.provider] - if model_provider_id.is_langgenius(): - provider_names.append(model_provider_id.provider_name) + def _switch(s: Session) -> None: + # get preferred provider + model_provider_id = ModelProviderID(self.provider.provider) + provider_names = [self.provider.provider] + if model_provider_id.is_langgenius(): + provider_names.append(model_provider_id.provider_name) - preferred_model_provider = ( - db.session.query(TenantPreferredModelProvider) - .where( + stmt = select(TenantPreferredModelProvider).where( TenantPreferredModelProvider.tenant_id == self.tenant_id, TenantPreferredModelProvider.provider_name.in_(provider_names), ) - .first() - ) + preferred_model_provider = s.execute(stmt).scalars().first() - if preferred_model_provider: - preferred_model_provider.preferred_provider_type = provider_type.value + if preferred_model_provider: + preferred_model_provider.preferred_provider_type = provider_type.value + else: + preferred_model_provider = TenantPreferredModelProvider( + tenant_id=self.tenant_id, + provider_name=self.provider.provider, + preferred_provider_type=provider_type.value, + ) + s.add(preferred_model_provider) + s.commit() + + if session: + return _switch(session) else: - preferred_model_provider = TenantPreferredModelProvider() - preferred_model_provider.tenant_id = self.tenant_id - preferred_model_provider.provider_name = self.provider.provider - preferred_model_provider.preferred_provider_type = provider_type.value - db.session.add(preferred_model_provider) - - db.session.commit() + with Session(db.engine) as session: + return _switch(session) def extract_secret_variables(self, credential_form_schemas: list[CredentialFormSchema]) -> list[str]: """ @@ -973,14 +1593,24 @@ class ProviderConfiguration(BaseModel): status = ModelStatus.ACTIVE if credentials else ModelStatus.NO_CONFIGURE load_balancing_enabled = False + has_invalid_load_balancing_configs = False if m.model_type in model_setting_map and m.model in model_setting_map[m.model_type]: model_setting = model_setting_map[m.model_type][m.model] if model_setting.enabled is False: status = ModelStatus.DISABLED - if len(model_setting.load_balancing_configs) > 1: + provider_model_lb_configs = [ + config + for config in model_setting.load_balancing_configs + if config.credential_source_type != "custom_model" + ] + + if len(provider_model_lb_configs) > 1: load_balancing_enabled = True + if any(config.name == "__delete__" for config in provider_model_lb_configs): + has_invalid_load_balancing_configs = True + provider_models.append( ModelWithProviderEntity( model=m.model, @@ -993,6 +1623,7 @@ class ProviderConfiguration(BaseModel): provider=SimpleModelProviderEntity(self.provider), status=status, load_balancing_enabled=load_balancing_enabled, + has_invalid_load_balancing_configs=has_invalid_load_balancing_configs, ) ) @@ -1017,6 +1648,7 @@ class ProviderConfiguration(BaseModel): status = ModelStatus.ACTIVE load_balancing_enabled = False + has_invalid_load_balancing_configs = False if ( custom_model_schema.model_type in model_setting_map and custom_model_schema.model in model_setting_map[custom_model_schema.model_type] @@ -1025,9 +1657,21 @@ class ProviderConfiguration(BaseModel): if model_setting.enabled is False: status = ModelStatus.DISABLED - if len(model_setting.load_balancing_configs) > 1: + custom_model_lb_configs = [ + config + for config in model_setting.load_balancing_configs + if config.credential_source_type != "provider" + ] + + if len(custom_model_lb_configs) > 1: load_balancing_enabled = True + if any(config.name == "__delete__" for config in custom_model_lb_configs): + has_invalid_load_balancing_configs = True + + if len(model_configuration.available_model_credentials) > 0 and not model_configuration.credentials: + status = ModelStatus.CREDENTIAL_REMOVED + provider_models.append( ModelWithProviderEntity( model=custom_model_schema.model, @@ -1040,6 +1684,7 @@ class ProviderConfiguration(BaseModel): provider=SimpleModelProviderEntity(self.provider), status=status, load_balancing_enabled=load_balancing_enabled, + has_invalid_load_balancing_configs=has_invalid_load_balancing_configs, ) ) diff --git a/api/core/entities/provider_entities.py b/api/core/entities/provider_entities.py index a5a6e62bd7..1b87bffe57 100644 --- a/api/core/entities/provider_entities.py +++ b/api/core/entities/provider_entities.py @@ -69,6 +69,15 @@ class QuotaConfiguration(BaseModel): restrict_models: list[RestrictModel] = [] +class CredentialConfiguration(BaseModel): + """ + Model class for credential configuration. + """ + + credential_id: str + credential_name: str + + class SystemConfiguration(BaseModel): """ Model class for provider system configuration. @@ -86,6 +95,9 @@ class CustomProviderConfiguration(BaseModel): """ credentials: dict + current_credential_id: Optional[str] = None + current_credential_name: Optional[str] = None + available_credentials: list[CredentialConfiguration] = [] class CustomModelConfiguration(BaseModel): @@ -95,7 +107,10 @@ class CustomModelConfiguration(BaseModel): model: str model_type: ModelType - credentials: dict + credentials: dict | None + current_credential_id: Optional[str] = None + current_credential_name: Optional[str] = None + available_model_credentials: list[CredentialConfiguration] = [] # pydantic configs model_config = ConfigDict(protected_namespaces=()) @@ -118,6 +133,7 @@ class ModelLoadBalancingConfiguration(BaseModel): id: str name: str credentials: dict + credential_source_type: str | None = None class ModelSettings(BaseModel): diff --git a/api/core/extension/extensible.py b/api/core/extension/extensible.py index 557f7eb1ed..fa32b29f31 100644 --- a/api/core/extension/extensible.py +++ b/api/core/extension/extensible.py @@ -10,6 +10,8 @@ from pydantic import BaseModel from core.helper.position_helper import sort_to_dict_by_position_map +logger = logging.getLogger(__name__) + class ExtensionModule(enum.Enum): MODERATION = "moderation" @@ -17,7 +19,7 @@ class ExtensionModule(enum.Enum): class ModuleExtension(BaseModel): - extension_class: Any = None + extension_class: Optional[Any] = None name: str label: Optional[dict] = None form_schema: Optional[list] = None @@ -66,7 +68,7 @@ class Extensible: # Check for extension module file if (extension_name + ".py") not in file_names: - logging.warning("Missing %s.py file in %s, Skip.", extension_name, subdir_path) + logger.warning("Missing %s.py file in %s, Skip.", extension_name, subdir_path) continue # Check for builtin flag and position @@ -95,7 +97,7 @@ class Extensible: break if not extension_class: - logging.warning("Missing subclass of %s in %s, Skip.", cls.__name__, module_name) + logger.warning("Missing subclass of %s in %s, Skip.", cls.__name__, module_name) continue # Load schema if not builtin @@ -103,7 +105,7 @@ class Extensible: if not builtin: json_path = os.path.join(subdir_path, "schema.json") if not os.path.exists(json_path): - logging.warning("Missing schema.json file in %s, Skip.", subdir_path) + logger.warning("Missing schema.json file in %s, Skip.", subdir_path) continue with open(json_path, encoding="utf-8") as f: @@ -122,7 +124,7 @@ class Extensible: ) except Exception as e: - logging.exception("Error scanning extensions") + logger.exception("Error scanning extensions") raise # Sort extensions by position diff --git a/api/core/extension/extension.py b/api/core/extension/extension.py index 9eb9e0306b..50c3f9b5f4 100644 --- a/api/core/extension/extension.py +++ b/api/core/extension/extension.py @@ -38,6 +38,7 @@ class Extension: def extension_class(self, module: ExtensionModule, extension_name: str) -> type: module_extension = self.module_extension(module, extension_name) + assert module_extension.extension_class is not None t: type = module_extension.extension_class return t diff --git a/api/core/helper/encrypter.py b/api/core/helper/encrypter.py index f761d20374..cac7e8e6e0 100644 --- a/api/core/helper/encrypter.py +++ b/api/core/helper/encrypter.py @@ -17,6 +17,7 @@ def encrypt_token(tenant_id: str, token: str): if not (tenant := db.session.query(Tenant).where(Tenant.id == tenant_id).first()): raise ValueError(f"Tenant with id {tenant_id} not found") + assert tenant.encrypt_public_key is not None encrypted_token = rsa.encrypt(token, tenant.encrypt_public_key) return base64.b64encode(encrypted_token).decode() diff --git a/api/core/helper/module_import_helper.py b/api/core/helper/module_import_helper.py index 251309fa2c..159c5d23fa 100644 --- a/api/core/helper/module_import_helper.py +++ b/api/core/helper/module_import_helper.py @@ -4,6 +4,8 @@ import sys from types import ModuleType from typing import AnyStr +logger = logging.getLogger(__name__) + def import_module_from_source(*, module_name: str, py_file_path: AnyStr, use_lazy_loader: bool = False) -> ModuleType: """ @@ -30,7 +32,7 @@ def import_module_from_source(*, module_name: str, py_file_path: AnyStr, use_laz spec.loader.exec_module(module) return module except Exception as e: - logging.exception("Failed to load module %s from script file '%s'", module_name, repr(py_file_path)) + logger.exception("Failed to load module %s from script file '%s'", module_name, repr(py_file_path)) raise e diff --git a/api/core/helper/ssrf_proxy.py b/api/core/helper/ssrf_proxy.py index 329527633c..efeba9e5ee 100644 --- a/api/core/helper/ssrf_proxy.py +++ b/api/core/helper/ssrf_proxy.py @@ -9,6 +9,8 @@ import httpx from configs import dify_config +logger = logging.getLogger(__name__) + SSRF_DEFAULT_MAX_RETRIES = dify_config.SSRF_DEFAULT_MAX_RETRIES HTTP_REQUEST_NODE_SSL_VERIFY = True # Default value for HTTP_REQUEST_NODE_SSL_VERIFY is True @@ -73,12 +75,12 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): if response.status_code not in STATUS_FORCELIST: return response else: - logging.warning( + logger.warning( "Received status code %s for URL %s which is in the force list", response.status_code, url ) except httpx.RequestError as e: - logging.warning("Request to URL %s failed on attempt %s: %s", url, retries + 1, e) + logger.warning("Request to URL %s failed on attempt %s: %s", url, retries + 1, e) if max_retries == 0: raise diff --git a/api/core/helper/trace_id_helper.py b/api/core/helper/trace_id_helper.py index df42837796..5cd0ea5c66 100644 --- a/api/core/helper/trace_id_helper.py +++ b/api/core/helper/trace_id_helper.py @@ -1,3 +1,4 @@ +import contextlib import re from collections.abc import Mapping from typing import Any, Optional @@ -97,10 +98,8 @@ def parse_traceparent_header(traceparent: str) -> Optional[str]: Reference: W3C Trace Context Specification: https://www.w3.org/TR/trace-context/ """ - try: + with contextlib.suppress(Exception): parts = traceparent.split("-") if len(parts) == 4 and len(parts[1]) == 32: return parts[1] - except Exception: - pass return None diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py index 2387658bb6..a8e6c261c2 100644 --- a/api/core/indexing_runner.py +++ b/api/core/indexing_runner.py @@ -1,5 +1,4 @@ import concurrent.futures -import datetime import json import logging import re @@ -9,7 +8,6 @@ import uuid from typing import Any, Optional, cast from flask import current_app -from flask_login import current_user from sqlalchemy.orm.exc import ObjectDeletedError from configs import dify_config @@ -30,16 +28,19 @@ from core.rag.splitter.fixed_text_splitter import ( FixedRecursiveCharacterTextSplitter, ) from core.rag.splitter.text_splitter import TextSplitter -from core.tools.utils.rag_web_reader import get_image_upload_file_ids +from core.tools.utils.web_reader_tool import get_image_upload_file_ids from extensions.ext_database import db from extensions.ext_redis import redis_client from extensions.ext_storage import storage from libs import helper +from libs.datetime_utils import naive_utc_now from models.dataset import ChildChunk, Dataset, DatasetProcessRule, DocumentSegment from models.dataset import Document as DatasetDocument from models.model import UploadFile from services.feature_service import FeatureService +logger = logging.getLogger(__name__) + class IndexingRunner: def __init__(self): @@ -88,15 +89,15 @@ class IndexingRunner: except ProviderTokenNotInitError as e: dataset_document.indexing_status = "error" dataset_document.error = str(e.description) - dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + dataset_document.stopped_at = naive_utc_now() db.session.commit() except ObjectDeletedError: - logging.warning("Document deleted, document id: %s", dataset_document.id) + logger.warning("Document deleted, document id: %s", dataset_document.id) except Exception as e: - logging.exception("consume document failed") + logger.exception("consume document failed") dataset_document.indexing_status = "error" dataset_document.error = str(e) - dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + dataset_document.stopped_at = naive_utc_now() db.session.commit() def run_in_splitting_status(self, dataset_document: DatasetDocument): @@ -151,13 +152,13 @@ class IndexingRunner: except ProviderTokenNotInitError as e: dataset_document.indexing_status = "error" dataset_document.error = str(e.description) - dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + dataset_document.stopped_at = naive_utc_now() db.session.commit() except Exception as e: - logging.exception("consume document failed") + logger.exception("consume document failed") dataset_document.indexing_status = "error" dataset_document.error = str(e) - dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + dataset_document.stopped_at = naive_utc_now() db.session.commit() def run_in_indexing_status(self, dataset_document: DatasetDocument): @@ -209,13 +210,6 @@ class IndexingRunner: documents.append(document) # build index - # get the process rule - processing_rule = ( - db.session.query(DatasetProcessRule) - .where(DatasetProcessRule.id == dataset_document.dataset_process_rule_id) - .first() - ) - index_type = dataset_document.doc_form index_processor = IndexProcessorFactory(index_type).init_index_processor() self._load( @@ -226,13 +220,13 @@ class IndexingRunner: except ProviderTokenNotInitError as e: dataset_document.indexing_status = "error" dataset_document.error = str(e.description) - dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + dataset_document.stopped_at = naive_utc_now() db.session.commit() except Exception as e: - logging.exception("consume document failed") + logger.exception("consume document failed") dataset_document.indexing_status = "error" dataset_document.error = str(e) - dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + dataset_document.stopped_at = naive_utc_now() db.session.commit() def indexing_estimate( @@ -295,7 +289,7 @@ class IndexingRunner: text_docs, embedding_model_instance=embedding_model_instance, process_rule=processing_rule.to_dict(), - tenant_id=current_user.current_tenant_id, + tenant_id=tenant_id, doc_language=doc_language, preview=True, ) @@ -322,7 +316,7 @@ class IndexingRunner: try: storage.delete(image_file.key) except Exception: - logging.exception( + logger.exception( "Delete image_files failed while indexing_estimate, \ image_upload_file_is: %s", upload_file_id, @@ -401,7 +395,7 @@ class IndexingRunner: after_indexing_status="splitting", extra_update_params={ DatasetDocument.word_count: sum(len(text_doc.page_content) for text_doc in text_docs), - DatasetDocument.parsing_completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + DatasetDocument.parsing_completed_at: naive_utc_now(), }, ) @@ -584,7 +578,7 @@ class IndexingRunner: after_indexing_status="completed", extra_update_params={ DatasetDocument.tokens: tokens, - DatasetDocument.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + DatasetDocument.completed_at: naive_utc_now(), DatasetDocument.indexing_latency: indexing_end_at - indexing_start_at, DatasetDocument.error: None, }, @@ -609,7 +603,7 @@ class IndexingRunner: { DocumentSegment.status: "completed", DocumentSegment.enabled: True, - DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + DocumentSegment.completed_at: naive_utc_now(), } ) @@ -640,7 +634,7 @@ class IndexingRunner: { DocumentSegment.status: "completed", DocumentSegment.enabled: True, - DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + DocumentSegment.completed_at: naive_utc_now(), } ) @@ -728,7 +722,7 @@ class IndexingRunner: doc_store.add_documents(docs=documents, save_child=dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX) # update document status to indexing - cur_time = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + cur_time = naive_utc_now() self._update_document_index_status( document_id=dataset_document.id, after_indexing_status="indexing", @@ -743,7 +737,7 @@ class IndexingRunner: dataset_document_id=dataset_document.id, update_params={ DocumentSegment.status: "indexing", - DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + DocumentSegment.indexing_at: naive_utc_now(), }, ) pass diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index 64fc3a3e80..c5c10f096d 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -31,6 +31,8 @@ from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution from core.workflow.graph_engine.entities.event import AgentLogEvent from models import App, Message, WorkflowNodeExecutionModel, db +logger = logging.getLogger(__name__) + class LLMGenerator: @classmethod @@ -68,7 +70,7 @@ class LLMGenerator: result_dict = json.loads(cleaned_answer) answer = result_dict["Your Output"] except json.JSONDecodeError as e: - logging.exception("Failed to generate name after answer, use query instead") + logger.exception("Failed to generate name after answer, use query instead") answer = query name = answer.strip() @@ -125,7 +127,7 @@ class LLMGenerator: except InvokeError: questions = [] except Exception: - logging.exception("Failed to generate suggested questions after answer") + logger.exception("Failed to generate suggested questions after answer") questions = [] return questions @@ -173,7 +175,7 @@ class LLMGenerator: error = str(e) error_step = "generate rule config" except Exception as e: - logging.exception("Failed to generate rule config, model: %s", model_config.get("name")) + logger.exception("Failed to generate rule config, model: %s", model_config.get("name")) rule_config["error"] = str(e) rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else "" @@ -270,7 +272,7 @@ class LLMGenerator: error_step = "generate conversation opener" except Exception as e: - logging.exception("Failed to generate rule config, model: %s", model_config.get("name")) + logger.exception("Failed to generate rule config, model: %s", model_config.get("name")) rule_config["error"] = str(e) rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else "" @@ -319,7 +321,7 @@ class LLMGenerator: error = str(e) return {"code": "", "language": code_language, "error": f"Failed to generate code. Error: {error}"} except Exception as e: - logging.exception( + logger.exception( "Failed to invoke LLM model, model: %s, language: %s", model_config.get("name"), code_language ) return {"code": "", "language": code_language, "error": f"An unexpected error occurred: {str(e)}"} @@ -392,16 +394,15 @@ class LLMGenerator: error = str(e) return {"output": "", "error": f"Failed to generate JSON Schema. Error: {error}"} except Exception as e: - logging.exception("Failed to invoke LLM model, model: %s", model_config.get("name")) + logger.exception("Failed to invoke LLM model, model: %s", model_config.get("name")) return {"output": "", "error": f"An unexpected error occurred: {str(e)}"} @staticmethod def instruction_modify_legacy( tenant_id: str, flow_id: str, current: str, instruction: str, model_config: dict, ideal_output: str | None ) -> dict: - app: App | None = db.session.query(App).filter(App.id == flow_id).first() last_run: Message | None = ( - db.session.query(Message).filter(Message.app_id == flow_id).order_by(Message.created_at.desc()).first() + db.session.query(Message).where(Message.app_id == flow_id).order_by(Message.created_at.desc()).first() ) if not last_run: return LLMGenerator.__instruction_modify_common( @@ -442,7 +443,7 @@ class LLMGenerator: ) -> dict: from services.workflow_service import WorkflowService - app: App | None = db.session.query(App).filter(App.id == flow_id).first() + app: App | None = db.session.query(App).where(App.id == flow_id).first() if not app: raise ValueError("App not found.") workflow = WorkflowService().get_draft_workflow(app_model=app) @@ -532,7 +533,7 @@ class LLMGenerator: model=model_config.get("name", ""), ) match node_type: - case "llm", "agent": + case "llm" | "agent": system_prompt = LLM_MODIFY_PROMPT_SYSTEM case "code": system_prompt = LLM_MODIFY_CODE_SYSTEM @@ -570,5 +571,7 @@ class LLMGenerator: error = str(e) return {"error": f"Failed to generate code. Error: {error}"} except Exception as e: - logging.exception("Failed to invoke LLM model, model: " + json.dumps(model_config.get("name")), exc_info=e) + logger.exception( + "Failed to invoke LLM model, model: %s", json.dumps(model_config.get("name")), exc_info=True + ) return {"error": f"An unexpected error occurred: {str(e)}"} diff --git a/api/core/llm_generator/prompts.py b/api/core/llm_generator/prompts.py index e38828578a..9268347526 100644 --- a/api/core/llm_generator/prompts.py +++ b/api/core/llm_generator/prompts.py @@ -414,7 +414,7 @@ When you are modifying the code, you should remember: - Get inputs from the parameters of the function and have explicit type annotations. - Write proper imports at the top of the code. - Use return statement to return the result. -- You should return a `dict`. +- You should return a `dict`. If you need to return a `result: str`, you should `return {"result": result}`. Your output must strictly follow the schema format, do not output any content outside of the JSON body. """ # noqa: E501 diff --git a/api/core/mcp/auth/auth_flow.py b/api/core/mcp/auth/auth_flow.py index bcb31a816f..eb783297c3 100644 --- a/api/core/mcp/auth/auth_flow.py +++ b/api/core/mcp/auth/auth_flow.py @@ -5,9 +5,9 @@ import os import secrets import urllib.parse from typing import Optional -from urllib.parse import urljoin +from urllib.parse import urljoin, urlparse -import requests +import httpx from pydantic import BaseModel, ValidationError from core.mcp.auth.auth_provider import OAuthClientProvider @@ -99,24 +99,52 @@ def handle_callback(state_key: str, authorization_code: str) -> OAuthCallbackSta return full_state_data +def check_support_resource_discovery(server_url: str) -> tuple[bool, str]: + """Check if the server supports OAuth 2.0 Resource Discovery.""" + b_scheme, b_netloc, b_path, b_params, b_query, b_fragment = urlparse(server_url, "", True) + url_for_resource_discovery = f"{b_scheme}://{b_netloc}/.well-known/oauth-protected-resource{b_path}" + if b_query: + url_for_resource_discovery += f"?{b_query}" + if b_fragment: + url_for_resource_discovery += f"#{b_fragment}" + try: + headers = {"MCP-Protocol-Version": LATEST_PROTOCOL_VERSION, "User-Agent": "Dify"} + response = httpx.get(url_for_resource_discovery, headers=headers) + if 200 <= response.status_code < 300: + body = response.json() + if "authorization_server_url" in body: + return True, body["authorization_server_url"][0] + else: + return False, "" + return False, "" + except httpx.RequestError as e: + # Not support resource discovery, fall back to well-known OAuth metadata + return False, "" + + def discover_oauth_metadata(server_url: str, protocol_version: Optional[str] = None) -> Optional[OAuthMetadata]: """Looks up RFC 8414 OAuth 2.0 Authorization Server Metadata.""" - url = urljoin(server_url, "/.well-known/oauth-authorization-server") + # First check if the server supports OAuth 2.0 Resource Discovery + support_resource_discovery, oauth_discovery_url = check_support_resource_discovery(server_url) + if support_resource_discovery: + url = oauth_discovery_url + else: + url = urljoin(server_url, "/.well-known/oauth-authorization-server") try: headers = {"MCP-Protocol-Version": protocol_version or LATEST_PROTOCOL_VERSION} - response = requests.get(url, headers=headers) + response = httpx.get(url, headers=headers) if response.status_code == 404: return None - if not response.ok: + if not response.is_success: raise ValueError(f"HTTP {response.status_code} trying to load well-known OAuth metadata") return OAuthMetadata.model_validate(response.json()) - except requests.RequestException as e: - if isinstance(e, requests.ConnectionError): - response = requests.get(url) + except httpx.RequestError as e: + if isinstance(e, httpx.ConnectError): + response = httpx.get(url) if response.status_code == 404: return None - if not response.ok: + if not response.is_success: raise ValueError(f"HTTP {response.status_code} trying to load well-known OAuth metadata") return OAuthMetadata.model_validate(response.json()) raise @@ -206,8 +234,8 @@ def exchange_authorization( if client_information.client_secret: params["client_secret"] = client_information.client_secret - response = requests.post(token_url, data=params) - if not response.ok: + response = httpx.post(token_url, data=params) + if not response.is_success: raise ValueError(f"Token exchange failed: HTTP {response.status_code}") return OAuthTokens.model_validate(response.json()) @@ -237,8 +265,8 @@ def refresh_authorization( if client_information.client_secret: params["client_secret"] = client_information.client_secret - response = requests.post(token_url, data=params) - if not response.ok: + response = httpx.post(token_url, data=params) + if not response.is_success: raise ValueError(f"Token refresh failed: HTTP {response.status_code}") return OAuthTokens.model_validate(response.json()) @@ -256,12 +284,12 @@ def register_client( else: registration_url = urljoin(server_url, "/register") - response = requests.post( + response = httpx.post( registration_url, json=client_metadata.model_dump(), headers={"Content-Type": "application/json"}, ) - if not response.ok: + if not response.is_success: response.raise_for_status() return OAuthClientInformationFull.model_validate(response.json()) @@ -283,7 +311,7 @@ def auth( raise ValueError("Existing OAuth client information is required when exchanging an authorization code") try: full_information = register_client(server_url, metadata, provider.client_metadata) - except requests.RequestException as e: + except httpx.RequestError as e: raise ValueError(f"Could not register OAuth client: {e}") provider.save_client_information(full_information) client_information = full_information diff --git a/api/core/mcp/mcp_client.py b/api/core/mcp/mcp_client.py index 7d90d51956..d3f97a87cf 100644 --- a/api/core/mcp/mcp_client.py +++ b/api/core/mcp/mcp_client.py @@ -152,7 +152,7 @@ class MCPClient: # ExitStack will handle proper cleanup of all managed context managers self._exit_stack.close() except Exception as e: - logging.exception("Error during cleanup") + logger.exception("Error during cleanup") raise ValueError(f"Error during cleanup: {e}") finally: self._session = None diff --git a/api/core/mcp/session/base_session.py b/api/core/mcp/session/base_session.py index 3f98aa94ae..1bd533581d 100644 --- a/api/core/mcp/session/base_session.py +++ b/api/core/mcp/session/base_session.py @@ -4,7 +4,7 @@ from collections.abc import Callable from concurrent.futures import Future, ThreadPoolExecutor, TimeoutError from datetime import timedelta from types import TracebackType -from typing import Any, Generic, Self, TypeVar +from typing import Any, Generic, Optional, Self, TypeVar from httpx import HTTPStatusError from pydantic import BaseModel @@ -31,6 +31,9 @@ from core.mcp.types import ( SessionMessage, ) +logger = logging.getLogger(__name__) + + SendRequestT = TypeVar("SendRequestT", ClientRequest, ServerRequest) SendResultT = TypeVar("SendResultT", ClientResult, ServerResult) SendNotificationT = TypeVar("SendNotificationT", ClientNotification, ServerNotification) @@ -209,7 +212,7 @@ class BaseSession( request: SendRequestT, result_type: type[ReceiveResultT], request_read_timeout_seconds: timedelta | None = None, - metadata: MessageMetadata = None, + metadata: Optional[MessageMetadata] = None, ) -> ReceiveResultT: """ Sends a request and wait for a response. Raises an McpError if the @@ -366,7 +369,7 @@ class BaseSession( self._handle_incoming(notification) except Exception as e: # For other validation errors, log and continue - logging.warning("Failed to validate notification: %s. Message was: %s", e, message.message.root) + logger.warning("Failed to validate notification: %s. Message was: %s", e, message.message.root) else: # Response or error response_queue = self._response_streams.get(message.message.root.id) if response_queue is not None: @@ -376,7 +379,7 @@ class BaseSession( except queue.Empty: continue except Exception: - logging.exception("Error in message processing loop") + logger.exception("Error in message processing loop") raise def _received_request(self, responder: RequestResponder[ReceiveRequestT, SendResultT]) -> None: diff --git a/api/core/mcp/types.py b/api/core/mcp/types.py index 99d985a781..49aa8e4498 100644 --- a/api/core/mcp/types.py +++ b/api/core/mcp/types.py @@ -1173,7 +1173,7 @@ class SessionMessage: """A message with specific metadata for transport-specific features.""" message: JSONRPCMessage - metadata: MessageMetadata = None + metadata: Optional[MessageMetadata] = None class OAuthClientMetadata(BaseModel): diff --git a/api/core/model_runtime/README.md b/api/core/model_runtime/README.md index b5de7ad412..3abb3f63ac 100644 --- a/api/core/model_runtime/README.md +++ b/api/core/model_runtime/README.md @@ -30,7 +30,7 @@ This module provides the interface for invoking and authenticating various model In addition, this list also returns configurable parameter information and rules for LLM, as shown below: - ![image-20231210144814617](./docs/en_US/images/index/image-20231210144814617.png) + ![image-20231210144814617](./docs/en_US/images/index/image-20231210144814617.png) These parameters are all defined in the backend, allowing different settings for various parameters supported by different models, as detailed in: [Schema](./docs/en_US/schema.md#ParameterRule). @@ -60,8 +60,6 @@ Model Runtime is divided into three layers: It offers direct invocation of various model types, predefined model configuration information, getting predefined/remote model lists, model credential authentication methods. Different models provide additional special methods, like LLM's pre-computed tokens method, cost information obtaining method, etc., **allowing horizontal expansion** for different models under the same provider (within supported model types). - - ## Next Steps - Add new provider configuration: [Link](./docs/en_US/provider_scale_out.md) diff --git a/api/core/model_runtime/README_CN.md b/api/core/model_runtime/README_CN.md index 2fc2a60461..19846481e0 100644 --- a/api/core/model_runtime/README_CN.md +++ b/api/core/model_runtime/README_CN.md @@ -20,19 +20,19 @@ ![image-20231210143654461](./docs/zh_Hans/images/index/image-20231210143654461.png) -​ 展示所有已支持的供应商列表,除了返回供应商名称、图标之外,还提供了支持的模型类型列表,预定义模型列表、配置方式以及配置凭据的表单规则等等,规则设计详见:[Schema](./docs/zh_Hans/schema.md)。 +​ 展示所有已支持的供应商列表,除了返回供应商名称、图标之外,还提供了支持的模型类型列表,预定义模型列表、配置方式以及配置凭据的表单规则等等,规则设计详见:[Schema](./docs/zh_Hans/schema.md)。 - 可选择的模型列表展示 ![image-20231210144229650](./docs/zh_Hans/images/index/image-20231210144229650.png) -​ 配置供应商/模型凭据后,可在此下拉(应用编排界面/默认模型)查看可用的 LLM 列表,其中灰色的为未配置凭据供应商的预定义模型列表,方便用户查看已支持的模型。 +​ 配置供应商/模型凭据后,可在此下拉(应用编排界面/默认模型)查看可用的 LLM 列表,其中灰色的为未配置凭据供应商的预定义模型列表,方便用户查看已支持的模型。 -​ 除此之外,该列表还返回了 LLM 可配置的参数信息和规则,如下图: +​ 除此之外,该列表还返回了 LLM 可配置的参数信息和规则,如下图: -​ ![image-20231210144814617](./docs/zh_Hans/images/index/image-20231210144814617.png) +​ ![image-20231210144814617](./docs/zh_Hans/images/index/image-20231210144814617.png) -​ 这里的参数均为后端定义,相比之前只有 5 种固定参数,这里可为不同模型设置所支持的各种参数,详见:[Schema](./docs/zh_Hans/schema.md#ParameterRule)。 +​ 这里的参数均为后端定义,相比之前只有 5 种固定参数,这里可为不同模型设置所支持的各种参数,详见:[Schema](./docs/zh_Hans/schema.md#ParameterRule)。 - 供应商/模型凭据鉴权 @@ -40,7 +40,7 @@ ![image-20231210151628992](./docs/zh_Hans/images/index/image-20231210151628992.png) -​ 供应商列表返回了凭据表单的配置信息,可通过 Runtime 提供的接口对凭据进行鉴权,上图 1 为供应商凭据 DEMO,上图 2 为模型凭据 DEMO。 +​ 供应商列表返回了凭据表单的配置信息,可通过 Runtime 提供的接口对凭据进行鉴权,上图 1 为供应商凭据 DEMO,上图 2 为模型凭据 DEMO。 ## 结构 @@ -57,9 +57,10 @@ Model Runtime 分三层: 提供获取当前供应商模型列表、获取模型实例、供应商凭据鉴权、供应商配置规则信息,**可横向扩展**以支持不同的供应商。 对于供应商/模型凭据,有两种情况 + - 如 OpenAI 这类中心化供应商,需要定义如**api_key**这类的鉴权凭据 - 如[**Xinference**](https://github.com/xorbitsai/inference)这类本地部署的供应商,需要定义如**server_url**这类的地址凭据,有时候还需要定义**model_uid**之类的模型类型凭据,就像下面这样,当在供应商层定义了这些凭据后,就可以在前端页面上直接展示,无需修改前端逻辑。 - ![Alt text](docs/zh_Hans/images/index/image.png) + ![Alt text](docs/zh_Hans/images/index/image.png) 当配置好凭据后,就可以通过 DifyRuntime 的外部接口直接获取到对应供应商所需要的**Schema**(凭据表单规则),从而在可以在不修改前端逻辑的情况下,提供新的供应商/模型的支持。 @@ -76,14 +77,17 @@ Model Runtime 分三层: ## 下一步 ### [增加新的供应商配置 👈🏻](./docs/zh_Hans/provider_scale_out.md) + 当添加后,这里将会出现一个新的供应商 ![Alt text](docs/zh_Hans/images/index/image-1.png) -### [为已存在的供应商新增模型 👈🏻](./docs/zh_Hans/provider_scale_out.md#增加模型) +### [为已存在的供应商新增模型 👈🏻](./docs/zh_Hans/provider_scale_out.md#%E5%A2%9E%E5%8A%A0%E6%A8%A1%E5%9E%8B) + 当添加后,对应供应商的模型列表中将会出现一个新的预定义模型供用户选择,如 GPT-3.5 GPT-4 ChatGLM3-6b 等,而对于支持自定义模型的供应商,则不需要新增模型。 ![Alt text](docs/zh_Hans/images/index/image-2.png) ### [接口的具体实现 👈🏻](./docs/zh_Hans/interfaces.md) + 你可以在这里找到你想要查看的接口的具体实现,以及接口的参数和返回值的具体含义。 diff --git a/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md b/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md index d845c4bd09..245aa4699c 100644 --- a/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md +++ b/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md @@ -56,7 +56,6 @@ provider_credential_schema: credential_form_schemas: ``` - Then, we need to determine what credentials are required to define a model in Xinference. - Since it supports three different types of models, we need to specify the model_type to denote the model type. Here is how we can define it: @@ -191,7 +190,6 @@ def get_num_tokens(self, model: str, credentials: dict, prompt_messages: list[Pr """ ``` - Sometimes, you might not want to return 0 directly. In such cases, you can use `self._get_num_tokens_by_gpt2(text: str)` to get pre-computed tokens and ensure environment variable `PLUGIN_BASED_TOKEN_COUNTING_ENABLED` is set to `true`, This method is provided by the `AIModel` base class, and it uses GPT2's Tokenizer for calculation. However, it should be noted that this is only a substitute and may not be fully accurate. - Model Credentials Validation diff --git a/api/core/model_runtime/docs/en_US/interfaces.md b/api/core/model_runtime/docs/en_US/interfaces.md index 158d4b306b..9a8c2ec942 100644 --- a/api/core/model_runtime/docs/en_US/interfaces.md +++ b/api/core/model_runtime/docs/en_US/interfaces.md @@ -35,12 +35,11 @@ All models need to uniformly implement the following 2 methods: Similar to provider credential verification, this step involves verification for an individual model. - ```python def validate_credentials(self, model: str, credentials: dict) -> None: """ Validate model credentials - + :param model: model name :param credentials: model credentials :return: @@ -77,12 +76,12 @@ All models need to uniformly implement the following 2 methods: The key is the error type thrown to the caller The value is the error type thrown by the model, which needs to be converted into a unified error type for the caller. - + :return: Invoke error mapping """ ``` -​ You can refer to OpenAI's `_invoke_error_mapping` for an example. +​ You can refer to OpenAI's `_invoke_error_mapping` for an example. ### LLM @@ -92,7 +91,6 @@ Inherit the `__base.large_language_model.LargeLanguageModel` base class and impl Implement the core method for LLM invocation, which can support both streaming and synchronous returns. - ```python def _invoke(self, model: str, credentials: dict, prompt_messages: list[PromptMessage], model_parameters: dict, @@ -101,7 +99,7 @@ Inherit the `__base.large_language_model.LargeLanguageModel` base class and impl -> Union[LLMResult, Generator]: """ Invoke large language model - + :param model: model name :param credentials: model credentials :param prompt_messages: prompt messages @@ -122,7 +120,7 @@ Inherit the `__base.large_language_model.LargeLanguageModel` base class and impl The parameters of credential information are defined by either the `provider_credential_schema` or `model_credential_schema` in the provider's YAML configuration file. Inputs such as `api_key` are included. - - `prompt_messages` (array[[PromptMessage](#PromptMessage)]) List of prompts + - `prompt_messages` (array\[[PromptMessage](#PromptMessage)\]) List of prompts If the model is of the `Completion` type, the list only needs to include one [UserPromptMessage](#UserPromptMessage) element; @@ -132,7 +130,7 @@ Inherit the `__base.large_language_model.LargeLanguageModel` base class and impl The model parameters are defined by the `parameter_rules` in the model's YAML configuration. - - `tools` (array[[PromptMessageTool](#PromptMessageTool)]) [optional] List of tools, equivalent to the `function` in `function calling`. + - `tools` (array\[[PromptMessageTool](#PromptMessageTool)\]) [optional] List of tools, equivalent to the `function` in `function calling`. That is, the tool list for tool calling. @@ -142,7 +140,7 @@ Inherit the `__base.large_language_model.LargeLanguageModel` base class and impl - `stream` (bool) Whether to output in a streaming manner, default is True - Streaming output returns Generator[[LLMResultChunk](#LLMResultChunk)], non-streaming output returns [LLMResult](#LLMResult). + Streaming output returns Generator\[[LLMResultChunk](#LLMResultChunk)\], non-streaming output returns [LLMResult](#LLMResult). - `user` (string) [optional] Unique identifier of the user @@ -150,7 +148,7 @@ Inherit the `__base.large_language_model.LargeLanguageModel` base class and impl - Returns - Streaming output returns Generator[[LLMResultChunk](#LLMResultChunk)], non-streaming output returns [LLMResult](#LLMResult). + Streaming output returns Generator\[[LLMResultChunk](#LLMResultChunk)\], non-streaming output returns [LLMResult](#LLMResult). - Pre-calculating Input Tokens @@ -187,7 +185,6 @@ Inherit the `__base.large_language_model.LargeLanguageModel` base class and impl When the provider supports adding custom LLMs, this method can be implemented to allow custom models to fetch model schema. The default return null. - ### TextEmbedding Inherit the `__base.text_embedding_model.TextEmbeddingModel` base class and implement the following interfaces: @@ -200,7 +197,7 @@ Inherit the `__base.text_embedding_model.TextEmbeddingModel` base class and impl -> TextEmbeddingResult: """ Invoke large language model - + :param model: model name :param credentials: model credentials :param texts: texts to embed @@ -256,7 +253,7 @@ Inherit the `__base.rerank_model.RerankModel` base class and implement the follo -> RerankResult: """ Invoke rerank model - + :param model: model name :param credentials: model credentials :param query: search query @@ -302,7 +299,7 @@ Inherit the `__base.speech2text_model.Speech2TextModel` base class and implement def _invoke(self, model: str, credentials: dict, file: IO[bytes], user: Optional[str] = None) -> str: """ Invoke large language model - + :param model: model name :param credentials: model credentials :param file: audio file @@ -339,7 +336,7 @@ Inherit the `__base.text2speech_model.Text2SpeechModel` base class and implement def _invoke(self, model: str, credentials: dict, content_text: str, streaming: bool, user: Optional[str] = None): """ Invoke large language model - + :param model: model name :param credentials: model credentials :param content_text: text content to be translated @@ -381,7 +378,7 @@ Inherit the `__base.moderation_model.ModerationModel` base class and implement t -> bool: """ Invoke large language model - + :param model: model name :param credentials: model credentials :param text: text to moderate @@ -408,11 +405,9 @@ Inherit the `__base.moderation_model.ModerationModel` base class and implement t False indicates that the input text is safe, True indicates otherwise. - - ## Entities -### PromptMessageRole +### PromptMessageRole Message role @@ -583,7 +578,7 @@ class PromptMessageTool(BaseModel): parameters: dict ``` ---- +______________________________________________________________________ ### LLMResult @@ -650,7 +645,7 @@ class LLMUsage(ModelUsage): latency: float # Request latency (s) ``` ---- +______________________________________________________________________ ### TextEmbeddingResult @@ -680,7 +675,7 @@ class EmbeddingUsage(ModelUsage): latency: float # Request latency (s) ``` ---- +______________________________________________________________________ ### RerankResult diff --git a/api/core/model_runtime/docs/en_US/predefined_model_scale_out.md b/api/core/model_runtime/docs/en_US/predefined_model_scale_out.md index a770ed157b..97968e9988 100644 --- a/api/core/model_runtime/docs/en_US/predefined_model_scale_out.md +++ b/api/core/model_runtime/docs/en_US/predefined_model_scale_out.md @@ -153,8 +153,11 @@ Runtime Errors: - `InvokeConnectionError` Connection error - `InvokeServerUnavailableError` Service provider unavailable + - `InvokeRateLimitError` Rate limit reached + - `InvokeAuthorizationError` Authorization failed + - `InvokeBadRequestError` Parameter error ```python diff --git a/api/core/model_runtime/docs/en_US/provider_scale_out.md b/api/core/model_runtime/docs/en_US/provider_scale_out.md index 07be5811d3..c38c7c0f0c 100644 --- a/api/core/model_runtime/docs/en_US/provider_scale_out.md +++ b/api/core/model_runtime/docs/en_US/provider_scale_out.md @@ -63,6 +63,7 @@ You can also refer to the YAML configuration information under other provider di ### Implementing Provider Code Providers need to inherit the `__base.model_provider.ModelProvider` base class and implement the `validate_provider_credentials` method for unified provider credential verification. For reference, see [AnthropicProvider](https://github.com/langgenius/dify-runtime/blob/main/lib/model_providers/anthropic/anthropic.py). + > If the provider is the type of `customizable-model`, there is no need to implement the `validate_provider_credentials` method. ```python @@ -80,7 +81,7 @@ def validate_provider_credentials(self, credentials: dict) -> None: Of course, you can also preliminarily reserve the implementation of `validate_provider_credentials` and directly reuse it after the model credential verification method is implemented. ---- +______________________________________________________________________ ### Adding Models @@ -166,7 +167,7 @@ In `llm.py`, create an Anthropic LLM class, which we name `AnthropicLargeLanguag -> Union[LLMResult, Generator]: """ Invoke large language model - + :param model: model name :param credentials: model credentials :param prompt_messages: prompt messages @@ -205,7 +206,7 @@ In `llm.py`, create an Anthropic LLM class, which we name `AnthropicLargeLanguag def validate_credentials(self, model: str, credentials: dict) -> None: """ Validate model credentials - + :param model: model name :param credentials: model credentials :return: @@ -232,7 +233,7 @@ In `llm.py`, create an Anthropic LLM class, which we name `AnthropicLargeLanguag The key is the error type thrown to the caller The value is the error type thrown by the model, which needs to be converted into a unified error type for the caller. - + :return: Invoke error mapping """ ``` diff --git a/api/core/model_runtime/docs/en_US/schema.md b/api/core/model_runtime/docs/en_US/schema.md index f819a4dbdc..1cea4127f4 100644 --- a/api/core/model_runtime/docs/en_US/schema.md +++ b/api/core/model_runtime/docs/en_US/schema.md @@ -28,8 +28,8 @@ - `url` (object) help link, i18n - `zh_Hans` (string) [optional] Chinese link - `en_US` (string) English link -- `supported_model_types` (array[[ModelType](#ModelType)]) Supported model types -- `configurate_methods` (array[[ConfigurateMethod](#ConfigurateMethod)]) Configuration methods +- `supported_model_types` (array\[[ModelType](#ModelType)\]) Supported model types +- `configurate_methods` (array\[[ConfigurateMethod](#ConfigurateMethod)\]) Configuration methods - `provider_credential_schema` ([ProviderCredentialSchema](#ProviderCredentialSchema)) Provider credential specification - `model_credential_schema` ([ModelCredentialSchema](#ModelCredentialSchema)) Model credential specification @@ -40,23 +40,23 @@ - `zh_Hans` (string) [optional] Chinese label name - `en_US` (string) English label name - `model_type` ([ModelType](#ModelType)) Model type -- `features` (array[[ModelFeature](#ModelFeature)]) [optional] Supported feature list +- `features` (array\[[ModelFeature](#ModelFeature)\]) [optional] Supported feature list - `model_properties` (object) Model properties - `mode` ([LLMMode](#LLMMode)) Mode (available for model type `llm`) - `context_size` (int) Context size (available for model types `llm`, `text-embedding`) - `max_chunks` (int) Maximum number of chunks (available for model types `text-embedding`, `moderation`) - `file_upload_limit` (int) Maximum file upload limit, in MB (available for model type `speech2text`) - `supported_file_extensions` (string) Supported file extension formats, e.g., mp3, mp4 (available for model type `speech2text`) - - `default_voice` (string) default voice, e.g.:alloy,echo,fable,onyx,nova,shimmer(available for model type `tts`) - - `voices` (list) List of available voice.(available for model type `tts`) - - `mode` (string) voice model.(available for model type `tts`) - - `name` (string) voice model display name.(available for model type `tts`) - - `language` (string) the voice model supports languages.(available for model type `tts`) - - `word_limit` (int) Single conversion word limit, paragraph-wise by default(available for model type `tts`) - - `audio_type` (string) Support audio file extension format, e.g.:mp3,wav(available for model type `tts`) - - `max_workers` (int) Number of concurrent workers supporting text and audio conversion(available for model type`tts`) + - `default_voice` (string) default voice, e.g.:alloy,echo,fable,onyx,nova,shimmer(available for model type `tts`) + - `voices` (list) List of available voice.(available for model type `tts`) + - `mode` (string) voice model.(available for model type `tts`) + - `name` (string) voice model display name.(available for model type `tts`) + - `language` (string) the voice model supports languages.(available for model type `tts`) + - `word_limit` (int) Single conversion word limit, paragraph-wise by default(available for model type `tts`) + - `audio_type` (string) Support audio file extension format, e.g.:mp3,wav(available for model type `tts`) + - `max_workers` (int) Number of concurrent workers supporting text and audio conversion(available for model type`tts`) - `max_characters_per_chunk` (int) Maximum characters per chunk (available for model type `moderation`) -- `parameter_rules` (array[[ParameterRule](#ParameterRule)]) [optional] Model invocation parameter rules +- `parameter_rules` (array\[[ParameterRule](#ParameterRule)\]) [optional] Model invocation parameter rules - `pricing` ([PriceConfig](#PriceConfig)) [optional] Pricing information - `deprecated` (bool) Whether deprecated. If deprecated, the model will no longer be displayed in the list, but those already configured can continue to be used. Default False. @@ -74,6 +74,7 @@ - `predefined-model` Predefined model Indicates that users can use the predefined models under the provider by configuring the unified provider credentials. + - `customizable-model` Customizable model Users need to add credential configuration for each model. @@ -103,6 +104,7 @@ ### ParameterRule - `name` (string) Actual model invocation parameter name + - `use_template` (string) [optional] Using template By default, 5 variable content configuration templates are preset: @@ -112,7 +114,7 @@ - `frequency_penalty` - `presence_penalty` - `max_tokens` - + In use_template, you can directly set the template variable name, which will use the default configuration in entities.defaults.PARAMETER_RULE_TEMPLATE No need to set any parameters other than `name` and `use_template`. If additional configuration parameters are set, they will override the default configuration. Refer to `openai/llm/gpt-3.5-turbo.yaml`. @@ -155,7 +157,7 @@ ### ProviderCredentialSchema -- `credential_form_schemas` (array[[CredentialFormSchema](#CredentialFormSchema)]) Credential form standard +- `credential_form_schemas` (array\[[CredentialFormSchema](#CredentialFormSchema)\]) Credential form standard ### ModelCredentialSchema @@ -166,7 +168,7 @@ - `placeholder` (object) Model prompt content - `en_US`(string) English - `zh_Hans`(string) [optional] Chinese -- `credential_form_schemas` (array[[CredentialFormSchema](#CredentialFormSchema)]) Credential form standard +- `credential_form_schemas` (array\[[CredentialFormSchema](#CredentialFormSchema)\]) Credential form standard ### CredentialFormSchema @@ -177,12 +179,12 @@ - `type` ([FormType](#FormType)) Form item type - `required` (bool) Whether required - `default`(string) Default value -- `options` (array[[FormOption](#FormOption)]) Specific property of form items of type `select` or `radio`, defining dropdown content +- `options` (array\[[FormOption](#FormOption)\]) Specific property of form items of type `select` or `radio`, defining dropdown content - `placeholder`(object) Specific property of form items of type `text-input`, placeholder content - `en_US`(string) English - `zh_Hans` (string) [optional] Chinese - `max_length` (int) Specific property of form items of type `text-input`, defining maximum input length, 0 for no limit. -- `show_on` (array[[FormShowOnObject](#FormShowOnObject)]) Displayed when other form item values meet certain conditions, displayed always if empty. +- `show_on` (array\[[FormShowOnObject](#FormShowOnObject)\]) Displayed when other form item values meet certain conditions, displayed always if empty. ### FormType @@ -198,7 +200,7 @@ - `en_US`(string) English - `zh_Hans`(string) [optional] Chinese - `value` (string) Dropdown option value -- `show_on` (array[[FormShowOnObject](#FormShowOnObject)]) Displayed when other form item values meet certain conditions, displayed always if empty. +- `show_on` (array\[[FormShowOnObject](#FormShowOnObject)\]) Displayed when other form item values meet certain conditions, displayed always if empty. ### FormShowOnObject diff --git a/api/core/model_runtime/docs/zh_Hans/customizable_model_scale_out.md b/api/core/model_runtime/docs/zh_Hans/customizable_model_scale_out.md index 7d30655469..825f9349d7 100644 --- a/api/core/model_runtime/docs/zh_Hans/customizable_model_scale_out.md +++ b/api/core/model_runtime/docs/zh_Hans/customizable_model_scale_out.md @@ -10,7 +10,6 @@ ![Alt text](images/index/image-3.png) - 在前文中,我们已经知道了供应商无需实现`validate_provider_credential`,Runtime 会自行根据用户在此选择的模型类型和模型名称调用对应的模型层的`validate_credentials`来进行验证。 ### 编写供应商 yaml @@ -55,6 +54,7 @@ provider_credential_schema: 随后,我们需要思考在 Xinference 中定义一个模型需要哪些凭据 - 它支持三种不同的模型,因此,我们需要有`model_type`来指定这个模型的类型,它有三种类型,所以我们这么编写 + ```yaml provider_credential_schema: credential_form_schemas: @@ -76,7 +76,9 @@ provider_credential_schema: label: en_US: Rerank ``` + - 每一个模型都有自己的名称`model_name`,因此需要在这里定义 + ```yaml - variable: model_name type: text-input @@ -88,7 +90,9 @@ provider_credential_schema: zh_Hans: 填写模型名称 en_US: Input model name ``` + - 填写 Xinference 本地部署的地址 + ```yaml - variable: server_url label: @@ -100,7 +104,9 @@ provider_credential_schema: zh_Hans: 在此输入 Xinference 的服务器地址,如 https://example.com/xxx en_US: Enter the url of your Xinference, for example https://example.com/xxx ``` + - 每个模型都有唯一的 model_uid,因此需要在这里定义 + ```yaml - variable: model_uid label: @@ -112,6 +118,7 @@ provider_credential_schema: zh_Hans: 在此输入您的 Model UID en_US: Enter the model uid ``` + 现在,我们就完成了供应商的基础定义。 ### 编写模型代码 @@ -132,7 +139,7 @@ provider_credential_schema: -> Union[LLMResult, Generator]: """ Invoke large language model - + :param model: model name :param credentials: model credentials :param prompt_messages: prompt messages @@ -189,7 +196,7 @@ provider_credential_schema: def validate_credentials(self, model: str, credentials: dict) -> None: """ Validate model credentials - + :param model: model name :param credentials: model credentials :return: @@ -197,78 +204,78 @@ provider_credential_schema: ``` - 模型参数 Schema - + 与自定义类型不同,由于没有在 yaml 文件中定义一个模型支持哪些参数,因此,我们需要动态时间模型参数的 Schema。 - + 如 Xinference 支持`max_tokens` `temperature` `top_p` 这三个模型参数。 - + 但是有的供应商根据不同的模型支持不同的参数,如供应商`OpenLLM`支持`top_k`,但是并不是这个供应商提供的所有模型都支持`top_k`,我们这里举例 A 模型支持`top_k`,B 模型不支持`top_k`,那么我们需要在这里动态生成模型参数的 Schema,如下所示: - - ```python - def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: - """ - used to define customizable model schema - """ - rules = [ - ParameterRule( - name='temperature', type=ParameterType.FLOAT, - use_template='temperature', - label=I18nObject( - zh_Hans='温度', en_US='Temperature' - ) - ), - ParameterRule( - name='top_p', type=ParameterType.FLOAT, - use_template='top_p', - label=I18nObject( - zh_Hans='Top P', en_US='Top P' - ) - ), - ParameterRule( - name='max_tokens', type=ParameterType.INT, - use_template='max_tokens', - min=1, - default=512, - label=I18nObject( - zh_Hans='最大生成长度', en_US='Max Tokens' - ) - ) - ] - # if model is A, add top_k to rules - if model == 'A': - rules.append( - ParameterRule( - name='top_k', type=ParameterType.INT, - use_template='top_k', - min=1, - default=50, - label=I18nObject( - zh_Hans='Top K', en_US='Top K' - ) - ) - ) + ```python + def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]: + """ + used to define customizable model schema + """ + rules = [ + ParameterRule( + name='temperature', type=ParameterType.FLOAT, + use_template='temperature', + label=I18nObject( + zh_Hans='温度', en_US='Temperature' + ) + ), + ParameterRule( + name='top_p', type=ParameterType.FLOAT, + use_template='top_p', + label=I18nObject( + zh_Hans='Top P', en_US='Top P' + ) + ), + ParameterRule( + name='max_tokens', type=ParameterType.INT, + use_template='max_tokens', + min=1, + default=512, + label=I18nObject( + zh_Hans='最大生成长度', en_US='Max Tokens' + ) + ) + ] - """ - some NOT IMPORTANT code here - """ + # if model is A, add top_k to rules + if model == 'A': + rules.append( + ParameterRule( + name='top_k', type=ParameterType.INT, + use_template='top_k', + min=1, + default=50, + label=I18nObject( + zh_Hans='Top K', en_US='Top K' + ) + ) + ) - entity = AIModelEntity( - model=model, - label=I18nObject( - en_US=model - ), - fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, - model_type=model_type, - model_properties={ - ModelPropertyKey.MODE: ModelType.LLM, - }, - parameter_rules=rules - ) + """ + some NOT IMPORTANT code here + """ + + entity = AIModelEntity( + model=model, + label=I18nObject( + en_US=model + ), + fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, + model_type=model_type, + model_properties={ + ModelPropertyKey.MODE: ModelType.LLM, + }, + parameter_rules=rules + ) + + return entity + ``` - return entity - ``` - - 调用异常错误映射表 当模型调用异常时需要映射到 Runtime 指定的 `InvokeError` 类型,方便 Dify 针对不同错误做不同后续处理。 @@ -278,7 +285,7 @@ provider_credential_schema: - `InvokeConnectionError` 调用连接错误 - `InvokeServerUnavailableError ` 调用服务方不可用 - `InvokeRateLimitError ` 调用达到限额 - - `InvokeAuthorizationError` 调用鉴权失败 + - `InvokeAuthorizationError` 调用鉴权失败 - `InvokeBadRequestError ` 调用传参有误 ```python @@ -289,7 +296,7 @@ provider_credential_schema: The key is the error type thrown to the caller The value is the error type thrown by the model, which needs to be converted into a unified error type for the caller. - + :return: Invoke error mapping """ ``` diff --git a/api/core/model_runtime/docs/zh_Hans/interfaces.md b/api/core/model_runtime/docs/zh_Hans/interfaces.md index 93a48cafb8..8eeeee9ff9 100644 --- a/api/core/model_runtime/docs/zh_Hans/interfaces.md +++ b/api/core/model_runtime/docs/zh_Hans/interfaces.md @@ -49,7 +49,7 @@ class XinferenceProvider(Provider): def validate_credentials(self, model: str, credentials: dict) -> None: """ Validate model credentials - + :param model: model name :param credentials: model credentials :return: @@ -75,7 +75,7 @@ class XinferenceProvider(Provider): - `InvokeConnectionError` 调用连接错误 - `InvokeServerUnavailableError ` 调用服务方不可用 - `InvokeRateLimitError ` 调用达到限额 - - `InvokeAuthorizationError` 调用鉴权失败 + - `InvokeAuthorizationError` 调用鉴权失败 - `InvokeBadRequestError ` 调用传参有误 ```python @@ -86,36 +86,36 @@ class XinferenceProvider(Provider): The key is the error type thrown to the caller The value is the error type thrown by the model, which needs to be converted into a unified error type for the caller. - + :return: Invoke error mapping """ ``` 也可以直接抛出对应 Errors,并做如下定义,这样在之后的调用中可以直接抛出`InvokeConnectionError`等异常。 - - ```python - @property - def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: - return { - InvokeConnectionError: [ - InvokeConnectionError - ], - InvokeServerUnavailableError: [ - InvokeServerUnavailableError - ], - InvokeRateLimitError: [ - InvokeRateLimitError - ], - InvokeAuthorizationError: [ - InvokeAuthorizationError - ], - InvokeBadRequestError: [ - InvokeBadRequestError - ], - } - ``` -​ 可参考 OpenAI `_invoke_error_mapping`。 + ```python + @property + def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: + return { + InvokeConnectionError: [ + InvokeConnectionError + ], + InvokeServerUnavailableError: [ + InvokeServerUnavailableError + ], + InvokeRateLimitError: [ + InvokeRateLimitError + ], + InvokeAuthorizationError: [ + InvokeAuthorizationError + ], + InvokeBadRequestError: [ + InvokeBadRequestError + ], + } + ``` + +​ 可参考 OpenAI `_invoke_error_mapping`。 ### LLM @@ -133,7 +133,7 @@ class XinferenceProvider(Provider): -> Union[LLMResult, Generator]: """ Invoke large language model - + :param model: model name :param credentials: model credentials :param prompt_messages: prompt messages @@ -151,38 +151,38 @@ class XinferenceProvider(Provider): - `model` (string) 模型名称 - `credentials` (object) 凭据信息 - + 凭据信息的参数由供应商 YAML 配置文件的 `provider_credential_schema` 或 `model_credential_schema` 定义,传入如:`api_key` 等。 - - `prompt_messages` (array[[PromptMessage](#PromptMessage)]) Prompt 列表 - + - `prompt_messages` (array\[[PromptMessage](#PromptMessage)\]) Prompt 列表 + 若模型为 `Completion` 类型,则列表只需要传入一个 [UserPromptMessage](#UserPromptMessage) 元素即可; - + 若模型为 `Chat` 类型,需要根据消息不同传入 [SystemPromptMessage](#SystemPromptMessage), [UserPromptMessage](#UserPromptMessage), [AssistantPromptMessage](#AssistantPromptMessage), [ToolPromptMessage](#ToolPromptMessage) 元素列表 - `model_parameters` (object) 模型参数 - + 模型参数由模型 YAML 配置的 `parameter_rules` 定义。 - - `tools` (array[[PromptMessageTool](#PromptMessageTool)]) [optional] 工具列表,等同于 `function calling` 中的 `function`。 - + - `tools` (array\[[PromptMessageTool](#PromptMessageTool)\]) [optional] 工具列表,等同于 `function calling` 中的 `function`。 + 即传入 tool calling 的工具列表。 - `stop` (array[string]) [optional] 停止序列 - + 模型返回将在停止序列定义的字符串之前停止输出。 - `stream` (bool) 是否流式输出,默认 True - - 流式输出返回 Generator[[LLMResultChunk](#LLMResultChunk)],非流式输出返回 [LLMResult](#LLMResult)。 + + 流式输出返回 Generator\[[LLMResultChunk](#LLMResultChunk)\],非流式输出返回 [LLMResult](#LLMResult)。 - `user` (string) [optional] 用户的唯一标识符 - + 可以帮助供应商监控和检测滥用行为。 - 返回 - 流式输出返回 Generator[[LLMResultChunk](#LLMResultChunk)],非流式输出返回 [LLMResult](#LLMResult)。 + 流式输出返回 Generator\[[LLMResultChunk](#LLMResultChunk)\],非流式输出返回 [LLMResult](#LLMResult)。 - 预计算输入 tokens @@ -236,7 +236,7 @@ class XinferenceProvider(Provider): -> TextEmbeddingResult: """ Invoke large language model - + :param model: model name :param credentials: model credentials :param texts: texts to embed @@ -294,7 +294,7 @@ class XinferenceProvider(Provider): -> RerankResult: """ Invoke rerank model - + :param model: model name :param credentials: model credentials :param query: search query @@ -342,7 +342,7 @@ class XinferenceProvider(Provider): -> str: """ Invoke large language model - + :param model: model name :param credentials: model credentials :param file: audio file @@ -379,7 +379,7 @@ class XinferenceProvider(Provider): def _invoke(self, model: str, credentials: dict, content_text: str, streaming: bool, user: Optional[str] = None): """ Invoke large language model - + :param model: model name :param credentials: model credentials :param content_text: text content to be translated @@ -421,7 +421,7 @@ class XinferenceProvider(Provider): -> bool: """ Invoke large language model - + :param model: model name :param credentials: model credentials :param text: text to moderate @@ -448,11 +448,9 @@ class XinferenceProvider(Provider): False 代表传入的文本安全,True 则反之。 - - ## 实体 -### PromptMessageRole +### PromptMessageRole 消息角色 @@ -623,7 +621,7 @@ class PromptMessageTool(BaseModel): parameters: dict # 工具参数 dict ``` ---- +______________________________________________________________________ ### LLMResult @@ -690,7 +688,7 @@ class LLMUsage(ModelUsage): latency: float # 请求耗时 (s) ``` ---- +______________________________________________________________________ ### TextEmbeddingResult @@ -720,7 +718,7 @@ class EmbeddingUsage(ModelUsage): latency: float # 请求耗时 (s) ``` ---- +______________________________________________________________________ ### RerankResult diff --git a/api/core/model_runtime/docs/zh_Hans/predefined_model_scale_out.md b/api/core/model_runtime/docs/zh_Hans/predefined_model_scale_out.md index 80e7982e9f..cd4de51ef7 100644 --- a/api/core/model_runtime/docs/zh_Hans/predefined_model_scale_out.md +++ b/api/core/model_runtime/docs/zh_Hans/predefined_model_scale_out.md @@ -62,7 +62,7 @@ pricing: # 价格信息 建议将所有模型配置都准备完毕后再开始模型代码的实现。 -同样,也可以参考 `model_providers` 目录下其他供应商对应模型类型目录下的 YAML 配置信息,完整的 YAML 规则见:[Schema](schema.md#aimodelentity)。 +同样,也可以参考 `model_providers` 目录下其他供应商对应模型类型目录下的 YAML 配置信息,完整的 YAML 规则见:[Schema](schema.md#aimodelentity)。 ### 实现模型调用代码 @@ -82,7 +82,7 @@ pricing: # 价格信息 -> Union[LLMResult, Generator]: """ Invoke large language model - + :param model: model name :param credentials: model credentials :param prompt_messages: prompt messages @@ -137,7 +137,7 @@ pricing: # 价格信息 def validate_credentials(self, model: str, credentials: dict) -> None: """ Validate model credentials - + :param model: model name :param credentials: model credentials :return: @@ -153,7 +153,7 @@ pricing: # 价格信息 - `InvokeConnectionError` 调用连接错误 - `InvokeServerUnavailableError ` 调用服务方不可用 - `InvokeRateLimitError ` 调用达到限额 - - `InvokeAuthorizationError` 调用鉴权失败 + - `InvokeAuthorizationError` 调用鉴权失败 - `InvokeBadRequestError ` 调用传参有误 ```python @@ -164,7 +164,7 @@ pricing: # 价格信息 The key is the error type thrown to the caller The value is the error type thrown by the model, which needs to be converted into a unified error type for the caller. - + :return: Invoke error mapping """ ``` diff --git a/api/core/model_runtime/docs/zh_Hans/provider_scale_out.md b/api/core/model_runtime/docs/zh_Hans/provider_scale_out.md index 2048b506ac..de48b0d11a 100644 --- a/api/core/model_runtime/docs/zh_Hans/provider_scale_out.md +++ b/api/core/model_runtime/docs/zh_Hans/provider_scale_out.md @@ -5,7 +5,7 @@ - `predefined-model ` 预定义模型 表示用户只需要配置统一的供应商凭据即可使用供应商下的预定义模型。 - + - `customizable-model` 自定义模型 用户需要新增每个模型的凭据配置,如 Xinference,它同时支持 LLM 和 Text Embedding,但是每个模型都有唯一的**model_uid**,如果想要将两者同时接入,就需要为每个模型配置一个**model_uid**。 @@ -23,9 +23,11 @@ ### 介绍 #### 名词解释 - - `module`: 一个`module`即为一个 Python Package,或者通俗一点,称为一个文件夹,里面包含了一个`__init__.py`文件,以及其他的`.py`文件。 + +- `module`: 一个`module`即为一个 Python Package,或者通俗一点,称为一个文件夹,里面包含了一个`__init__.py`文件,以及其他的`.py`文件。 #### 步骤 + 新增一个供应商主要分为几步,这里简单列出,帮助大家有一个大概的认识,具体的步骤会在下面详细介绍。 - 创建供应商 yaml 文件,根据[ProviderSchema](./schema.md#provider)编写 @@ -117,7 +119,7 @@ model_credential_schema: en_US: Enter your API Base ``` -也可以参考 `model_providers` 目录下其他供应商目录下的 YAML 配置信息,完整的 YAML 规则见:[Schema](schema.md#provider)。 +也可以参考 `model_providers` 目录下其他供应商目录下的 YAML 配置信息,完整的 YAML 规则见:[Schema](schema.md#provider)。 #### 实现供应商代码 @@ -155,12 +157,14 @@ def validate_provider_credentials(self, credentials: dict) -> None: #### 增加模型 #### [增加预定义模型 👈🏻](./predefined_model_scale_out.md) + 对于预定义模型,我们可以通过简单定义一个 yaml,并通过实现调用代码来接入。 #### [增加自定义模型 👈🏻](./customizable_model_scale_out.md) + 对于自定义模型,我们只需要实现调用代码即可接入,但是它需要处理的参数可能会更加复杂。 ---- +______________________________________________________________________ ### 测试 diff --git a/api/core/model_runtime/docs/zh_Hans/schema.md b/api/core/model_runtime/docs/zh_Hans/schema.md index 681f49c435..e68cb500e1 100644 --- a/api/core/model_runtime/docs/zh_Hans/schema.md +++ b/api/core/model_runtime/docs/zh_Hans/schema.md @@ -16,9 +16,9 @@ - `zh_Hans` (string) [optional] 中文描述 - `en_US` (string) 英文描述 - `icon_small` (string) [optional] 供应商小 ICON,存储在对应供应商实现目录下的 `_assets` 目录,中英文策略同 `label` - - `zh_Hans` (string) [optional] 中文 ICON + - `zh_Hans` (string) [optional] 中文 ICON - `en_US` (string) 英文 ICON -- `icon_large` (string) [optional] 供应商大 ICON,存储在对应供应商实现目录下的 _assets 目录,中英文策略同 label +- `icon_large` (string) [optional] 供应商大 ICON,存储在对应供应商实现目录下的 \_assets 目录,中英文策略同 label - `zh_Hans `(string) [optional] 中文 ICON - `en_US` (string) 英文 ICON - `background` (string) [optional] 背景颜色色值,例:#FFFFFF,为空则展示前端默认色值。 @@ -29,8 +29,8 @@ - `url` (object) 帮助链接,i18n - `zh_Hans` (string) [optional] 中文链接 - `en_US` (string) 英文链接 -- `supported_model_types` (array[[ModelType](#ModelType)]) 支持的模型类型 -- `configurate_methods` (array[[ConfigurateMethod](#ConfigurateMethod)]) 配置方式 +- `supported_model_types` (array\[[ModelType](#ModelType)\]) 支持的模型类型 +- `configurate_methods` (array\[[ConfigurateMethod](#ConfigurateMethod)\]) 配置方式 - `provider_credential_schema` ([ProviderCredentialSchema](#ProviderCredentialSchema)) 供应商凭据规格 - `model_credential_schema` ([ModelCredentialSchema](#ModelCredentialSchema)) 模型凭据规格 @@ -41,23 +41,23 @@ - `zh_Hans `(string) [optional] 中文标签名 - `en_US` (string) 英文标签名 - `model_type` ([ModelType](#ModelType)) 模型类型 -- `features` (array[[ModelFeature](#ModelFeature)]) [optional] 支持功能列表 +- `features` (array\[[ModelFeature](#ModelFeature)\]) [optional] 支持功能列表 - `model_properties` (object) 模型属性 - `mode` ([LLMMode](#LLMMode)) 模式 (模型类型 `llm` 可用) - `context_size` (int) 上下文大小 (模型类型 `llm` `text-embedding` 可用) - `max_chunks` (int) 最大分块数量 (模型类型 `text-embedding ` `moderation` 可用) - `file_upload_limit` (int) 文件最大上传限制,单位:MB。(模型类型 `speech2text` 可用) - - `supported_file_extensions` (string) 支持文件扩展格式,如:mp3,mp4(模型类型 `speech2text` 可用) - - `default_voice` (string) 缺省音色,必选:alloy,echo,fable,onyx,nova,shimmer(模型类型 `tts` 可用) - - `voices` (list) 可选音色列表。 - - `mode` (string) 音色模型。(模型类型 `tts` 可用) - - `name` (string) 音色模型显示名称。(模型类型 `tts` 可用) - - `language` (string) 音色模型支持语言。(模型类型 `tts` 可用) - - `word_limit` (int) 单次转换字数限制,默认按段落分段(模型类型 `tts` 可用) - - `audio_type` (string) 支持音频文件扩展格式,如:mp3,wav(模型类型 `tts` 可用) - - `max_workers` (int) 支持文字音频转换并发任务数(模型类型 `tts` 可用) - - `max_characters_per_chunk` (int) 每块最大字符数 (模型类型 `moderation` 可用) -- `parameter_rules` (array[[ParameterRule](#ParameterRule)]) [optional] 模型调用参数规则 + - `supported_file_extensions` (string) 支持文件扩展格式,如:mp3,mp4(模型类型 `speech2text` 可用) + - `default_voice` (string) 缺省音色,必选:alloy,echo,fable,onyx,nova,shimmer(模型类型 `tts` 可用) + - `voices` (list) 可选音色列表。 + - `mode` (string) 音色模型。(模型类型 `tts` 可用) + - `name` (string) 音色模型显示名称。(模型类型 `tts` 可用) + - `language` (string) 音色模型支持语言。(模型类型 `tts` 可用) + - `word_limit` (int) 单次转换字数限制,默认按段落分段(模型类型 `tts` 可用) + - `audio_type` (string) 支持音频文件扩展格式,如:mp3,wav(模型类型 `tts` 可用) + - `max_workers` (int) 支持文字音频转换并发任务数(模型类型 `tts` 可用) + - `max_characters_per_chunk` (int) 每块最大字符数 (模型类型 `moderation` 可用) +- `parameter_rules` (array\[[ParameterRule](#ParameterRule)\]) [optional] 模型调用参数规则 - `pricing` ([PriceConfig](#PriceConfig)) [optional] 价格信息 - `deprecated` (bool) 是否废弃。若废弃,模型列表将不再展示,但已经配置的可以继续使用,默认 False。 @@ -75,6 +75,7 @@ - `predefined-model ` 预定义模型 表示用户只需要配置统一的供应商凭据即可使用供应商下的预定义模型。 + - `customizable-model` 自定义模型 用户需要新增每个模型的凭据配置。 @@ -106,7 +107,7 @@ - `name` (string) 调用模型实际参数名 - `use_template` (string) [optional] 使用模板 - + 默认预置了 5 种变量内容配置模板: - `temperature` @@ -114,7 +115,7 @@ - `frequency_penalty` - `presence_penalty` - `max_tokens` - + 可在 use_template 中直接设置模板变量名,将会使用 entities.defaults.PARAMETER_RULE_TEMPLATE 中的默认配置 不用设置除 `name` 和 `use_template` 之外的所有参数,若设置了额外的配置参数,将覆盖默认配置。 可参考 `openai/llm/gpt-3.5-turbo.yaml`。 @@ -157,7 +158,7 @@ ### ProviderCredentialSchema -- `credential_form_schemas` (array[[CredentialFormSchema](#CredentialFormSchema)]) 凭据表单规范 +- `credential_form_schemas` (array\[[CredentialFormSchema](#CredentialFormSchema)\]) 凭据表单规范 ### ModelCredentialSchema @@ -168,7 +169,7 @@ - `placeholder` (object) 模型提示内容 - `en_US`(string) 英文 - `zh_Hans`(string) [optional] 中文 -- `credential_form_schemas` (array[[CredentialFormSchema](#CredentialFormSchema)]) 凭据表单规范 +- `credential_form_schemas` (array\[[CredentialFormSchema](#CredentialFormSchema)\]) 凭据表单规范 ### CredentialFormSchema @@ -179,12 +180,12 @@ - `type` ([FormType](#FormType)) 表单项类型 - `required` (bool) 是否必填 - `default`(string) 默认值 -- `options` (array[[FormOption](#FormOption)]) 表单项为 `select` 或 `radio` 专有属性,定义下拉内容 +- `options` (array\[[FormOption](#FormOption)\]) 表单项为 `select` 或 `radio` 专有属性,定义下拉内容 - `placeholder`(object) 表单项为 `text-input `专有属性,表单项 PlaceHolder - `en_US`(string) 英文 - `zh_Hans` (string) [optional] 中文 - `max_length` (int) 表单项为`text-input`专有属性,定义输入最大长度,0 为不限制。 -- `show_on` (array[[FormShowOnObject](#FormShowOnObject)]) 当其他表单项值符合条件时显示,为空则始终显示。 +- `show_on` (array\[[FormShowOnObject](#FormShowOnObject)\]) 当其他表单项值符合条件时显示,为空则始终显示。 ### FormType @@ -200,7 +201,7 @@ - `en_US`(string) 英文 - `zh_Hans`(string) [optional] 中文 - `value` (string) 下拉选项值 -- `show_on` (array[[FormShowOnObject](#FormShowOnObject)]) 当其他表单项值符合条件时显示,为空则始终显示。 +- `show_on` (array\[[FormShowOnObject](#FormShowOnObject)\]) 当其他表单项值符合条件时显示,为空则始终显示。 ### FormShowOnObject diff --git a/api/core/model_runtime/entities/llm_entities.py b/api/core/model_runtime/entities/llm_entities.py index ace2c1f770..dc6032e405 100644 --- a/api/core/model_runtime/entities/llm_entities.py +++ b/api/core/model_runtime/entities/llm_entities.py @@ -1,7 +1,9 @@ +from __future__ import annotations + from collections.abc import Mapping, Sequence from decimal import Decimal from enum import StrEnum -from typing import Any, Optional +from typing import Any, Optional, TypedDict, Union from pydantic import BaseModel, Field @@ -18,6 +20,26 @@ class LLMMode(StrEnum): CHAT = "chat" +class LLMUsageMetadata(TypedDict, total=False): + """ + TypedDict for LLM usage metadata. + All fields are optional. + """ + + prompt_tokens: int + completion_tokens: int + total_tokens: int + prompt_unit_price: Union[float, str] + completion_unit_price: Union[float, str] + total_price: Union[float, str] + currency: str + prompt_price_unit: Union[float, str] + completion_price_unit: Union[float, str] + prompt_price: Union[float, str] + completion_price: Union[float, str] + latency: float + + class LLMUsage(ModelUsage): """ Model class for llm usage. @@ -54,23 +76,27 @@ class LLMUsage(ModelUsage): ) @classmethod - def from_metadata(cls, metadata: dict) -> "LLMUsage": + def from_metadata(cls, metadata: LLMUsageMetadata) -> LLMUsage: """ Create LLMUsage instance from metadata dictionary with default values. Args: - metadata: Dictionary containing usage metadata + metadata: TypedDict containing usage metadata Returns: LLMUsage instance with values from metadata or defaults """ - total_tokens = metadata.get("total_tokens", 0) + prompt_tokens = metadata.get("prompt_tokens", 0) completion_tokens = metadata.get("completion_tokens", 0) - if total_tokens > 0 and completion_tokens == 0: - completion_tokens = total_tokens + total_tokens = metadata.get("total_tokens", 0) + + # If total_tokens is not provided but prompt and completion tokens are, + # calculate total_tokens + if total_tokens == 0 and (prompt_tokens > 0 or completion_tokens > 0): + total_tokens = prompt_tokens + completion_tokens return cls( - prompt_tokens=metadata.get("prompt_tokens", 0), + prompt_tokens=prompt_tokens, completion_tokens=completion_tokens, total_tokens=total_tokens, prompt_unit_price=Decimal(str(metadata.get("prompt_unit_price", 0))), @@ -84,7 +110,7 @@ class LLMUsage(ModelUsage): latency=metadata.get("latency", 0.0), ) - def plus(self, other: "LLMUsage") -> "LLMUsage": + def plus(self, other: LLMUsage) -> LLMUsage: """ Add two LLMUsage instances together. @@ -109,7 +135,7 @@ class LLMUsage(ModelUsage): latency=self.latency + other.latency, ) - def __add__(self, other: "LLMUsage") -> "LLMUsage": + def __add__(self, other: LLMUsage) -> LLMUsage: """ Overload the + operator to add two LLMUsage instances. diff --git a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py index b7db0b78bc..68d30112d9 100644 --- a/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py +++ b/api/core/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py @@ -1,10 +1,10 @@ import logging from threading import Lock -from typing import Any +from typing import Any, Optional logger = logging.getLogger(__name__) -_tokenizer: Any = None +_tokenizer: Optional[Any] = None _lock = Lock() diff --git a/api/core/model_runtime/model_providers/model_provider_factory.py b/api/core/model_runtime/model_providers/model_provider_factory.py index f8590b38f8..24cf69a50b 100644 --- a/api/core/model_runtime/model_providers/model_provider_factory.py +++ b/api/core/model_runtime/model_providers/model_provider_factory.py @@ -201,7 +201,7 @@ class ModelProviderFactory: return filtered_credentials def get_model_schema( - self, *, provider: str, model_type: ModelType, model: str, credentials: dict + self, *, provider: str, model_type: ModelType, model: str, credentials: dict | None ) -> AIModelEntity | None: """ Get model schema diff --git a/api/core/moderation/api/api.py b/api/core/moderation/api/api.py index 332381555b..af51b72cd5 100644 --- a/api/core/moderation/api/api.py +++ b/api/core/moderation/api/api.py @@ -1,6 +1,6 @@ from typing import Optional -from pydantic import BaseModel +from pydantic import BaseModel, Field from core.extension.api_based_extension_requestor import APIBasedExtensionPoint, APIBasedExtensionRequestor from core.helper.encrypter import decrypt_token @@ -11,7 +11,7 @@ from models.api_based_extension import APIBasedExtension class ModerationInputParams(BaseModel): app_id: str = "" - inputs: dict = {} + inputs: dict = Field(default_factory=dict) query: str = "" diff --git a/api/core/moderation/base.py b/api/core/moderation/base.py index d8c392d097..f079478798 100644 --- a/api/core/moderation/base.py +++ b/api/core/moderation/base.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from enum import Enum from typing import Optional -from pydantic import BaseModel +from pydantic import BaseModel, Field from core.extension.extensible import Extensible, ExtensionModule @@ -16,7 +16,7 @@ class ModerationInputsResult(BaseModel): flagged: bool = False action: ModerationAction preset_response: str = "" - inputs: dict = {} + inputs: dict = Field(default_factory=dict) query: str = "" @@ -100,14 +100,14 @@ class Moderation(Extensible, ABC): if not inputs_config.get("preset_response"): raise ValueError("inputs_config.preset_response is required") - if len(inputs_config.get("preset_response", 0)) > 100: + if len(inputs_config.get("preset_response", "0")) > 100: raise ValueError("inputs_config.preset_response must be less than 100 characters") if outputs_config_enabled: if not outputs_config.get("preset_response"): raise ValueError("outputs_config.preset_response is required") - if len(outputs_config.get("preset_response", 0)) > 100: + if len(outputs_config.get("preset_response", "0")) > 100: raise ValueError("outputs_config.preset_response must be less than 100 characters") diff --git a/api/core/ops/aliyun_trace/aliyun_trace.py b/api/core/ops/aliyun_trace/aliyun_trace.py index 82f54582ed..1ddfc4cc29 100644 --- a/api/core/ops/aliyun_trace/aliyun_trace.py +++ b/api/core/ops/aliyun_trace/aliyun_trace.py @@ -306,7 +306,7 @@ class AliyunDataTrace(BaseTraceInstance): node_span = self.build_workflow_task_span(trace_id, workflow_span_id, trace_info, node_execution) return node_span except Exception as e: - logging.debug("Error occurred in build_workflow_node_span: %s", e, exc_info=True) + logger.debug("Error occurred in build_workflow_node_span: %s", e, exc_info=True) return None def get_workflow_node_status(self, node_execution: WorkflowNodeExecution) -> Status: diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index 7eb5da7e3a..aa2f17553d 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -37,6 +37,8 @@ from models.model import App, AppModelConfig, Conversation, Message, MessageFile from models.workflow import WorkflowAppLog, WorkflowRun from tasks.ops_trace_task import process_trace_tasks +logger = logging.getLogger(__name__) + class OpsTraceProviderConfigMap(dict[str, dict[str, Any]]): def __getitem__(self, provider: str) -> dict[str, Any]: @@ -287,7 +289,7 @@ class OpsTraceManager: # create new tracing_instance and update the cache if it absent tracing_instance = trace_instance(config_class(**decrypt_trace_config)) cls.ops_trace_instances_cache[decrypt_trace_config_key] = tracing_instance - logging.info("new tracing_instance for app_id: %s", app_id) + logger.info("new tracing_instance for app_id: %s", app_id) return tracing_instance @classmethod @@ -328,7 +330,7 @@ class OpsTraceManager: except KeyError: raise ValueError(f"Invalid tracing provider: {tracing_provider}") else: - if tracing_provider is not None: + if tracing_provider is None: raise ValueError(f"Invalid tracing provider: {tracing_provider}") app_config: Optional[App] = db.session.query(App).where(App.id == app_id).first() @@ -849,7 +851,7 @@ class TraceQueueManager: trace_task.app_id = self.app_id trace_manager_queue.put(trace_task) except Exception as e: - logging.exception("Error adding trace task, trace_type %s", trace_task.trace_type) + logger.exception("Error adding trace task, trace_type %s", trace_task.trace_type) finally: self.start_timer() @@ -868,7 +870,7 @@ class TraceQueueManager: if tasks: self.send_to_celery(tasks) except Exception as e: - logging.exception("Error processing trace tasks") + logger.exception("Error processing trace tasks") def start_timer(self): global trace_manager_timer diff --git a/api/core/plugin/backwards_invocation/app.py b/api/core/plugin/backwards_invocation/app.py index e8c9bed099..cf62dc6ab6 100644 --- a/api/core/plugin/backwards_invocation/app.py +++ b/api/core/plugin/backwards_invocation/app.py @@ -154,7 +154,7 @@ class PluginAppBackwardsInvocation(BaseBackwardsInvocation): """ workflow = app.workflow if not workflow: - raise ValueError("") + raise ValueError("unexpected app type") return WorkflowAppGenerator().generate( app_model=app, diff --git a/api/core/plugin/impl/agent.py b/api/core/plugin/impl/agent.py index 9575c57ac8..3c994ce70a 100644 --- a/api/core/plugin/impl/agent.py +++ b/api/core/plugin/impl/agent.py @@ -8,6 +8,7 @@ from core.plugin.entities.plugin_daemon import ( ) from core.plugin.entities.request import PluginInvokeContext from core.plugin.impl.base import BasePluginClient +from core.plugin.utils.chunk_merger import merge_blob_chunks class PluginAgentClient(BasePluginClient): @@ -113,4 +114,4 @@ class PluginAgentClient(BasePluginClient): "Content-Type": "application/json", }, ) - return response + return merge_blob_chunks(response) diff --git a/api/core/plugin/impl/base.py b/api/core/plugin/impl/base.py index 6f32498b42..6c65bdb0fd 100644 --- a/api/core/plugin/impl/base.py +++ b/api/core/plugin/impl/base.py @@ -141,11 +141,11 @@ class BasePluginClient: response.raise_for_status() except HTTPError as e: msg = f"Failed to request plugin daemon, status: {e.response.status_code}, url: {path}" - logging.exception(msg) + logger.exception(msg) raise e except Exception as e: msg = f"Failed to request plugin daemon, url: {path}" - logging.exception(msg) + logger.exception(msg) raise ValueError(msg) from e try: @@ -158,7 +158,7 @@ class BasePluginClient: f"Failed to parse response from plugin daemon to PluginDaemonBasicResponse [{str(type.__name__)}]," f" url: {path}" ) - logging.exception(msg) + logger.exception(msg) raise ValueError(msg) if rep.code != 0: diff --git a/api/core/plugin/impl/tool.py b/api/core/plugin/impl/tool.py index 04225f95ee..4c1558efcc 100644 --- a/api/core/plugin/impl/tool.py +++ b/api/core/plugin/impl/tool.py @@ -6,6 +6,7 @@ from pydantic import BaseModel from core.plugin.entities.plugin import GenericProviderID, ToolProviderID from core.plugin.entities.plugin_daemon import PluginBasicBooleanResponse, PluginToolProviderEntity from core.plugin.impl.base import BasePluginClient +from core.plugin.utils.chunk_merger import merge_blob_chunks from core.tools.entities.tool_entities import CredentialType, ToolInvokeMessage, ToolParameter @@ -113,61 +114,7 @@ class PluginToolManager(BasePluginClient): }, ) - class FileChunk: - """ - Only used for internal processing. - """ - - bytes_written: int - total_length: int - data: bytearray - - def __init__(self, total_length: int): - self.bytes_written = 0 - self.total_length = total_length - self.data = bytearray(total_length) - - files: dict[str, FileChunk] = {} - for resp in response: - if resp.type == ToolInvokeMessage.MessageType.BLOB_CHUNK: - assert isinstance(resp.message, ToolInvokeMessage.BlobChunkMessage) - # Get blob chunk information - chunk_id = resp.message.id - total_length = resp.message.total_length - blob_data = resp.message.blob - is_end = resp.message.end - - # Initialize buffer for this file if it doesn't exist - if chunk_id not in files: - files[chunk_id] = FileChunk(total_length) - - # If this is the final chunk, yield a complete blob message - if is_end: - yield ToolInvokeMessage( - type=ToolInvokeMessage.MessageType.BLOB, - message=ToolInvokeMessage.BlobMessage(blob=files[chunk_id].data), - meta=resp.meta, - ) - else: - # Check if file is too large (30MB limit) - if files[chunk_id].bytes_written + len(blob_data) > 30 * 1024 * 1024: - # Delete the file if it's too large - del files[chunk_id] - # Skip yielding this message - raise ValueError("File is too large which reached the limit of 30MB") - - # Check if single chunk is too large (8KB limit) - if len(blob_data) > 8192: - # Skip yielding this message - raise ValueError("File chunk is too large which reached the limit of 8KB") - - # Append the blob data to the buffer - files[chunk_id].data[ - files[chunk_id].bytes_written : files[chunk_id].bytes_written + len(blob_data) - ] = blob_data - files[chunk_id].bytes_written += len(blob_data) - else: - yield resp + return merge_blob_chunks(response) def validate_provider_credentials( self, tenant_id: str, user_id: str, provider: str, credentials: dict[str, Any] diff --git a/api/core/plugin/utils/chunk_merger.py b/api/core/plugin/utils/chunk_merger.py new file mode 100644 index 0000000000..21ca2d8d37 --- /dev/null +++ b/api/core/plugin/utils/chunk_merger.py @@ -0,0 +1,92 @@ +from collections.abc import Generator +from dataclasses import dataclass, field +from typing import TypeVar, Union, cast + +from core.agent.entities import AgentInvokeMessage +from core.tools.entities.tool_entities import ToolInvokeMessage + +MessageType = TypeVar("MessageType", bound=Union[ToolInvokeMessage, AgentInvokeMessage]) + + +@dataclass +class FileChunk: + """ + Buffer for accumulating file chunks during streaming. + """ + + total_length: int + bytes_written: int = field(default=0, init=False) + data: bytearray = field(init=False) + + def __post_init__(self) -> None: + self.data = bytearray(self.total_length) + + +def merge_blob_chunks( + response: Generator[MessageType, None, None], + max_file_size: int = 30 * 1024 * 1024, + max_chunk_size: int = 8192, +) -> Generator[MessageType, None, None]: + """ + Merge streaming blob chunks into complete blob messages. + + This function processes a stream of plugin invoke messages, accumulating + BLOB_CHUNK messages by their ID until the final chunk is received, + then yielding a single complete BLOB message. + + Args: + response: Generator yielding messages that may include blob chunks + max_file_size: Maximum allowed file size in bytes (default: 30MB) + max_chunk_size: Maximum allowed chunk size in bytes (default: 8KB) + + Yields: + Messages from the response stream, with blob chunks merged into complete blobs + + Raises: + ValueError: If file size exceeds max_file_size or chunk size exceeds max_chunk_size + """ + files: dict[str, FileChunk] = {} + + for resp in response: + if resp.type == ToolInvokeMessage.MessageType.BLOB_CHUNK: + assert isinstance(resp.message, ToolInvokeMessage.BlobChunkMessage) + # Get blob chunk information + chunk_id = resp.message.id + total_length = resp.message.total_length + blob_data = resp.message.blob + is_end = resp.message.end + + # Initialize buffer for this file if it doesn't exist + if chunk_id not in files: + files[chunk_id] = FileChunk(total_length) + + # Check if file is too large (before appending) + if files[chunk_id].bytes_written + len(blob_data) > max_file_size: + # Delete the file if it's too large + del files[chunk_id] + raise ValueError(f"File is too large which reached the limit of {max_file_size / 1024 / 1024}MB") + + # Check if single chunk is too large + if len(blob_data) > max_chunk_size: + raise ValueError(f"File chunk is too large which reached the limit of {max_chunk_size / 1024}KB") + + # Append the blob data to the buffer + files[chunk_id].data[files[chunk_id].bytes_written : files[chunk_id].bytes_written + len(blob_data)] = ( + blob_data + ) + files[chunk_id].bytes_written += len(blob_data) + + # If this is the final chunk, yield a complete blob message + if is_end: + # Create the appropriate message type based on the response type + message_class = type(resp) + merged_message = message_class( + type=ToolInvokeMessage.MessageType.BLOB, + message=ToolInvokeMessage.BlobMessage(blob=files[chunk_id].data[: files[chunk_id].bytes_written]), + meta=resp.meta, + ) + yield cast(MessageType, merged_message) + # Clean up the buffer + del files[chunk_id] + else: + yield resp diff --git a/api/core/prompt/advanced_prompt_transform.py b/api/core/prompt/advanced_prompt_transform.py index 0f0fe65f27..16c145f936 100644 --- a/api/core/prompt/advanced_prompt_transform.py +++ b/api/core/prompt/advanced_prompt_transform.py @@ -125,11 +125,11 @@ class AdvancedPromptTransform(PromptTransform): if files: prompt_message_contents: list[PromptMessageContentUnionTypes] = [] - prompt_message_contents.append(TextPromptMessageContent(data=prompt)) for file in files: prompt_message_contents.append( file_manager.to_prompt_message_content(file, image_detail_config=image_detail_config) ) + prompt_message_contents.append(TextPromptMessageContent(data=prompt)) prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) else: @@ -196,16 +196,17 @@ class AdvancedPromptTransform(PromptTransform): query = parser.format(prompt_inputs) + prompt_message_contents: list[PromptMessageContentUnionTypes] = [] if memory and memory_config: prompt_messages = self._append_chat_histories(memory, memory_config, prompt_messages, model_config) if files and query is not None: - prompt_message_contents: list[PromptMessageContentUnionTypes] = [] - prompt_message_contents.append(TextPromptMessageContent(data=query)) for file in files: prompt_message_contents.append( file_manager.to_prompt_message_content(file, image_detail_config=image_detail_config) ) + prompt_message_contents.append(TextPromptMessageContent(data=query)) + prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) else: prompt_messages.append(UserPromptMessage(content=query)) @@ -215,27 +216,27 @@ class AdvancedPromptTransform(PromptTransform): last_message = prompt_messages[-1] if prompt_messages else None if last_message and last_message.role == PromptMessageRole.USER: # get last user message content and add files - prompt_message_contents = [TextPromptMessageContent(data=cast(str, last_message.content))] for file in files: prompt_message_contents.append( file_manager.to_prompt_message_content(file, image_detail_config=image_detail_config) ) + prompt_message_contents.append(TextPromptMessageContent(data=cast(str, last_message.content))) last_message.content = prompt_message_contents else: - prompt_message_contents = [TextPromptMessageContent(data="")] # not for query for file in files: prompt_message_contents.append( file_manager.to_prompt_message_content(file, image_detail_config=image_detail_config) ) + prompt_message_contents.append(TextPromptMessageContent(data="")) prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) else: - prompt_message_contents = [TextPromptMessageContent(data=query)] for file in files: prompt_message_contents.append( file_manager.to_prompt_message_content(file, image_detail_config=image_detail_config) ) + prompt_message_contents.append(TextPromptMessageContent(data=query)) prompt_messages.append(UserPromptMessage(content=prompt_message_contents)) elif query: diff --git a/api/core/prompt/simple_prompt_transform.py b/api/core/prompt/simple_prompt_transform.py index e19c6419ca..13f4163d80 100644 --- a/api/core/prompt/simple_prompt_transform.py +++ b/api/core/prompt/simple_prompt_transform.py @@ -265,11 +265,11 @@ class SimplePromptTransform(PromptTransform): ) -> UserPromptMessage: if files: prompt_message_contents: list[PromptMessageContentUnionTypes] = [] - prompt_message_contents.append(TextPromptMessageContent(data=prompt)) for file in files: prompt_message_contents.append( file_manager.to_prompt_message_content(file, image_detail_config=image_detail_config) ) + prompt_message_contents.append(TextPromptMessageContent(data=prompt)) prompt_message = UserPromptMessage(content=prompt_message_contents) else: diff --git a/api/core/provider_manager.py b/api/core/provider_manager.py index 9250497d29..28a4ce0778 100644 --- a/api/core/provider_manager.py +++ b/api/core/provider_manager.py @@ -1,3 +1,4 @@ +import contextlib import json from collections import defaultdict from json import JSONDecodeError @@ -11,6 +12,7 @@ from configs import dify_config from core.entities.model_entities import DefaultModelEntity, DefaultModelProviderEntity from core.entities.provider_configuration import ProviderConfiguration, ProviderConfigurations, ProviderModelBundle from core.entities.provider_entities import ( + CredentialConfiguration, CustomConfiguration, CustomModelConfiguration, CustomProviderConfiguration, @@ -39,7 +41,9 @@ from extensions.ext_redis import redis_client from models.provider import ( LoadBalancingModelConfig, Provider, + ProviderCredential, ProviderModel, + ProviderModelCredential, ProviderModelSetting, ProviderType, TenantDefaultModel, @@ -487,6 +491,61 @@ class ProviderManager: return provider_name_to_provider_load_balancing_model_configs_dict + @staticmethod + def get_provider_available_credentials(tenant_id: str, provider_name: str) -> list[CredentialConfiguration]: + """ + Get provider all credentials. + + :param tenant_id: workspace id + :param provider_name: provider name + :return: + """ + with Session(db.engine, expire_on_commit=False) as session: + stmt = ( + select(ProviderCredential) + .where(ProviderCredential.tenant_id == tenant_id, ProviderCredential.provider_name == provider_name) + .order_by(ProviderCredential.created_at.desc()) + ) + + available_credentials = session.scalars(stmt).all() + + return [ + CredentialConfiguration(credential_id=credential.id, credential_name=credential.credential_name) + for credential in available_credentials + ] + + @staticmethod + def get_provider_model_available_credentials( + tenant_id: str, provider_name: str, model_name: str, model_type: str + ) -> list[CredentialConfiguration]: + """ + Get provider custom model all credentials. + + :param tenant_id: workspace id + :param provider_name: provider name + :param model_name: model name + :param model_type: model type + :return: + """ + with Session(db.engine, expire_on_commit=False) as session: + stmt = ( + select(ProviderModelCredential) + .where( + ProviderModelCredential.tenant_id == tenant_id, + ProviderModelCredential.provider_name == provider_name, + ProviderModelCredential.model_name == model_name, + ProviderModelCredential.model_type == model_type, + ) + .order_by(ProviderModelCredential.created_at.desc()) + ) + + available_credentials = session.scalars(stmt).all() + + return [ + CredentialConfiguration(credential_id=credential.id, credential_name=credential.credential_name) + for credential in available_credentials + ] + @staticmethod def _init_trial_provider_records( tenant_id: str, provider_name_to_provider_records_dict: dict[str, list[Provider]] @@ -589,9 +648,6 @@ class ProviderManager: if provider_record.provider_type == ProviderType.SYSTEM.value: continue - if not provider_record.encrypted_config: - continue - custom_provider_record = provider_record # Get custom provider credentials @@ -610,8 +666,8 @@ class ProviderManager: try: # fix origin data if custom_provider_record.encrypted_config is None: - raise ValueError("No credentials found") - if not custom_provider_record.encrypted_config.startswith("{"): + provider_credentials = {} + elif not custom_provider_record.encrypted_config.startswith("{"): provider_credentials = {"openai_api_key": custom_provider_record.encrypted_config} else: provider_credentials = json.loads(custom_provider_record.encrypted_config) @@ -624,21 +680,26 @@ class ProviderManager: for variable in provider_credential_secret_variables: if variable in provider_credentials: - try: + with contextlib.suppress(ValueError): provider_credentials[variable] = encrypter.decrypt_token_with_decoding( provider_credentials.get(variable) or "", # type: ignore self.decoding_rsa_key, self.decoding_cipher_rsa, ) - except ValueError: - pass # cache provider credentials provider_credentials_cache.set(credentials=provider_credentials) else: provider_credentials = cached_provider_credentials - custom_provider_configuration = CustomProviderConfiguration(credentials=provider_credentials) + custom_provider_configuration = CustomProviderConfiguration( + credentials=provider_credentials, + current_credential_name=custom_provider_record.credential_name, + current_credential_id=custom_provider_record.credential_id, + available_credentials=self.get_provider_available_credentials( + tenant_id, custom_provider_record.provider_name + ), + ) # Get provider model credential secret variables model_credential_secret_variables = self._extract_secret_variables( @@ -650,8 +711,12 @@ class ProviderManager: # Get custom provider model credentials custom_model_configurations = [] for provider_model_record in provider_model_records: - if not provider_model_record.encrypted_config: - continue + available_model_credentials = self.get_provider_model_available_credentials( + tenant_id, + provider_model_record.provider_name, + provider_model_record.model_name, + provider_model_record.model_type, + ) provider_model_credentials_cache = ProviderCredentialsCache( tenant_id=tenant_id, identity_id=provider_model_record.id, cache_type=ProviderCredentialsCacheType.MODEL @@ -660,7 +725,7 @@ class ProviderManager: # Get cached provider model credentials cached_provider_model_credentials = provider_model_credentials_cache.get() - if not cached_provider_model_credentials: + if not cached_provider_model_credentials and provider_model_record.encrypted_config: try: provider_model_credentials = json.loads(provider_model_record.encrypted_config) except JSONDecodeError: @@ -672,14 +737,12 @@ class ProviderManager: for variable in model_credential_secret_variables: if variable in provider_model_credentials: - try: + with contextlib.suppress(ValueError): provider_model_credentials[variable] = encrypter.decrypt_token_with_decoding( provider_model_credentials.get(variable), self.decoding_rsa_key, self.decoding_cipher_rsa, ) - except ValueError: - pass # cache provider model credentials provider_model_credentials_cache.set(credentials=provider_model_credentials) @@ -691,6 +754,9 @@ class ProviderManager: model=provider_model_record.model_name, model_type=ModelType.value_of(provider_model_record.model_type), credentials=provider_model_credentials, + current_credential_id=provider_model_record.credential_id, + current_credential_name=provider_model_record.credential_name, + available_model_credentials=available_model_credentials, ) ) @@ -902,6 +968,18 @@ class ProviderManager: load_balancing_model_config.model_name == provider_model_setting.model_name and load_balancing_model_config.model_type == provider_model_setting.model_type ): + if load_balancing_model_config.name == "__delete__": + # to calculate current model whether has invalidate lb configs + load_balancing_configs.append( + ModelLoadBalancingConfiguration( + id=load_balancing_model_config.id, + name=load_balancing_model_config.name, + credentials={}, + credential_source_type=load_balancing_model_config.credential_source_type, + ) + ) + continue + if not load_balancing_model_config.enabled: continue @@ -958,6 +1036,7 @@ class ProviderManager: id=load_balancing_model_config.id, name=load_balancing_model_config.name, credentials=provider_model_credentials, + credential_source_type=load_balancing_model_config.credential_source_type, ) ) diff --git a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py index 14481b1f10..bb61b71bb1 100644 --- a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py +++ b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py @@ -98,18 +98,26 @@ class AnalyticdbVectorBySql: try: cur.execute(f"CREATE DATABASE {self.databaseName}") except Exception as e: - if "already exists" in str(e): - return - raise e + if "already exists" not in str(e): + raise e finally: cur.close() conn.close() self.pool = self._create_connection_pool() with self._get_cursor() as cur: + conn = cur.connection + try: + cur.execute("CREATE EXTENSION IF NOT EXISTS zhparser;") + except Exception as e: + conn.rollback() + raise RuntimeError( + "Failed to create zhparser extension. Please ensure it is available in your AnalyticDB." + ) from e try: cur.execute("CREATE TEXT SEARCH CONFIGURATION zh_cn (PARSER = zhparser)") cur.execute("ALTER TEXT SEARCH CONFIGURATION zh_cn ADD MAPPING FOR n,v,a,i,e,l,x WITH simple") except Exception as e: + conn.rollback() if "already exists" not in str(e): raise e cur.execute( diff --git a/api/core/rag/datasource/vdb/clickzetta/README.md b/api/core/rag/datasource/vdb/clickzetta/README.md index 2ee3e657d3..969d4e40a0 100644 --- a/api/core/rag/datasource/vdb/clickzetta/README.md +++ b/api/core/rag/datasource/vdb/clickzetta/README.md @@ -92,17 +92,21 @@ Clickzetta supports advanced full-text search with multiple analyzers: ### Analyzer Types 1. **keyword**: No tokenization, treats the entire string as a single token + - Best for: Exact matching, IDs, codes -2. **english**: Designed for English text +1. **english**: Designed for English text + - Features: Recognizes ASCII letters and numbers, converts to lowercase - Best for: English content -3. **chinese**: Chinese text tokenizer +1. **chinese**: Chinese text tokenizer + - Features: Recognizes Chinese and English characters, removes punctuation - Best for: Chinese or mixed Chinese-English content -4. **unicode**: Multi-language tokenizer based on Unicode +1. **unicode**: Multi-language tokenizer based on Unicode + - Features: Recognizes text boundaries in multiple languages - Best for: Multi-language content @@ -124,21 +128,25 @@ Clickzetta supports advanced full-text search with multiple analyzers: ### Vector Search 1. **Adjust exploration factor** for accuracy vs speed trade-off: + ```sql SET cz.vector.index.search.ef=64; ``` -2. **Use appropriate distance functions**: +1. **Use appropriate distance functions**: + - `cosine_distance`: Best for normalized embeddings (e.g., from language models) - `l2_distance`: Best for raw feature vectors ### Full-Text Search 1. **Choose the right analyzer**: + - Use `keyword` for exact matching - Use language-specific analyzers for better tokenization -2. **Combine with vector search**: +1. **Combine with vector search**: + - Pre-filter with full-text search for better performance - Use hybrid search for improved relevance @@ -147,27 +155,30 @@ Clickzetta supports advanced full-text search with multiple analyzers: ### Connection Issues 1. Verify all 7 required configuration parameters are set -2. Check network connectivity to Clickzetta service -3. Ensure the user has proper permissions on the schema +1. Check network connectivity to Clickzetta service +1. Ensure the user has proper permissions on the schema ### Search Performance 1. Verify vector index exists: + ```sql SHOW INDEX FROM .; ``` -2. Check if vector index is being used: +1. Check if vector index is being used: + ```sql EXPLAIN SELECT ... WHERE l2_distance(...) < threshold; ``` + Look for `vector_index_search_type` in the execution plan. ### Full-Text Search Not Working 1. Verify inverted index is created -2. Check analyzer configuration matches your content language -3. Use `TOKENIZE()` function to test tokenization: +1. Check analyzer configuration matches your content language +1. Use `TOKENIZE()` function to test tokenization: ```sql SELECT TOKENIZE('your text', map('analyzer', 'chinese', 'mode', 'smart')); ``` @@ -175,13 +186,13 @@ Clickzetta supports advanced full-text search with multiple analyzers: ## Limitations 1. Vector operations don't support `ORDER BY` or `GROUP BY` directly on vector columns -2. Full-text search relevance scores are not provided by Clickzetta -3. Inverted index creation may fail for very large existing tables (continue without error) -4. Index naming constraints: +1. Full-text search relevance scores are not provided by Clickzetta +1. Inverted index creation may fail for very large existing tables (continue without error) +1. Index naming constraints: - Index names must be unique within a schema - Only one vector index can be created per column - The implementation uses timestamps to ensure unique index names -5. A column can only have one vector index at a time +1. A column can only have one vector index at a time ## References diff --git a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py index 1059b855a2..6e8077ffd9 100644 --- a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py +++ b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py @@ -1,3 +1,4 @@ +import contextlib import json import logging import queue @@ -214,10 +215,8 @@ class ClickzettaConnectionPool: return connection else: # Connection expired or invalid, close it - try: + with contextlib.suppress(Exception): connection.close() - except Exception: - pass # No valid connection found, create new one return self._create_connection(config) @@ -228,10 +227,8 @@ class ClickzettaConnectionPool: if config_key not in self._pool_locks: # Pool was cleaned up, just close the connection - try: + with contextlib.suppress(Exception): connection.close() - except Exception: - pass return with self._pool_locks[config_key]: @@ -243,10 +240,8 @@ class ClickzettaConnectionPool: logger.debug("Returned ClickZetta connection to pool") else: # Pool full or connection invalid, close it - try: + with contextlib.suppress(Exception): connection.close() - except Exception: - pass def _cleanup_expired_connections(self) -> None: """Clean up expired connections from all pools.""" @@ -265,10 +260,8 @@ class ClickzettaConnectionPool: if current_time - last_used < self._connection_timeout: valid_connections.append((connection, last_used)) else: - try: + with contextlib.suppress(Exception): connection.close() - except Exception: - pass self._pools[config_key] = valid_connections @@ -299,10 +292,8 @@ class ClickzettaConnectionPool: with self._pool_locks[config_key]: pool = self._pools[config_key] for connection, _ in pool: - try: + with contextlib.suppress(Exception): connection.close() - except Exception: - pass pool.clear() diff --git a/api/core/rag/datasource/vdb/milvus/milvus_vector.py b/api/core/rag/datasource/vdb/milvus/milvus_vector.py index d64f366e0e..8ae616fa77 100644 --- a/api/core/rag/datasource/vdb/milvus/milvus_vector.py +++ b/api/core/rag/datasource/vdb/milvus/milvus_vector.py @@ -101,7 +101,7 @@ class MilvusVector(BaseVector): if "Zilliz Cloud" in milvus_version: return True # For standard Milvus installations, check version number - return version.parse(milvus_version).base_version >= version.parse("2.5.0").base_version + return version.parse(milvus_version) >= version.parse("2.5.0") except Exception as e: logger.warning("Failed to check Milvus version: %s. Disabling hybrid search.", str(e)) return False @@ -259,8 +259,16 @@ class MilvusVector(BaseVector): """ Search for documents by full-text search (if hybrid search is enabled). """ - if not self._hybrid_search_enabled or not self.field_exists(Field.SPARSE_VECTOR.value): - logger.warning("Full-text search is not supported in current Milvus version (requires >= 2.5.0)") + if not self._hybrid_search_enabled: + logger.warning( + "Full-text search is disabled: set MILVUS_ENABLE_HYBRID_SEARCH=true (requires Milvus >= 2.5.0)." + ) + return [] + if not self.field_exists(Field.SPARSE_VECTOR.value): + logger.warning( + "Full-text search unavailable: collection missing 'sparse_vector' field; " + "recreate the collection after enabling MILVUS_ENABLE_HYBRID_SEARCH to add BM25 sparse index." + ) return [] document_ids_filter = kwargs.get("document_ids_filter") filter = "" diff --git a/api/core/rag/datasource/vdb/myscale/myscale_vector.py b/api/core/rag/datasource/vdb/myscale/myscale_vector.py index d5ec4b4436..99f766a88a 100644 --- a/api/core/rag/datasource/vdb/myscale/myscale_vector.py +++ b/api/core/rag/datasource/vdb/myscale/myscale_vector.py @@ -15,6 +15,8 @@ from core.rag.embedding.embedding_base import Embeddings from core.rag.models.document import Document from models.dataset import Dataset +logger = logging.getLogger(__name__) + class MyScaleConfig(BaseModel): host: str @@ -53,7 +55,7 @@ class MyScaleVector(BaseVector): return self.add_texts(documents=texts, embeddings=embeddings, **kwargs) def _create_collection(self, dimension: int): - logging.info("create MyScale collection %s with dimension %s", self._collection_name, dimension) + logger.info("create MyScale collection %s with dimension %s", self._collection_name, dimension) self._client.command(f"CREATE DATABASE IF NOT EXISTS {self._config.database}") fts_params = f"('{self._config.fts_params}')" if self._config.fts_params else "" sql = f""" @@ -151,7 +153,7 @@ class MyScaleVector(BaseVector): for r in self._client.query(sql).named_results() ] except Exception as e: - logging.exception("\033[91m\033[1m%s\033[0m \033[95m%s\033[0m", type(e), str(e)) # noqa:TRY401 + logger.exception("\033[91m\033[1m%s\033[0m \033[95m%s\033[0m", type(e), str(e)) # noqa:TRY401 return [] def delete(self) -> None: diff --git a/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py index d6dfe967d7..556d03940e 100644 --- a/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py +++ b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py @@ -4,8 +4,8 @@ import math from typing import Any from pydantic import BaseModel, model_validator -from pyobvector import VECTOR, ObVecClient # type: ignore -from sqlalchemy import JSON, Column, String, func +from pyobvector import VECTOR, FtsIndexParam, FtsParser, ObVecClient, l2_distance # type: ignore +from sqlalchemy import JSON, Column, String from sqlalchemy.dialects.mysql import LONGTEXT from configs import dify_config @@ -119,14 +119,21 @@ class OceanBaseVector(BaseVector): ) try: if self._hybrid_search_enabled: - self._client.perform_raw_text_sql(f"""ALTER TABLE {self._collection_name} - ADD FULLTEXT INDEX fulltext_index_for_col_text (text) WITH PARSER ik""") + self._client.create_fts_idx_with_fts_index_param( + table_name=self._collection_name, + fts_idx_param=FtsIndexParam( + index_name="fulltext_index_for_col_text", + field_names=["text"], + parser_type=FtsParser.IK, + ), + ) except Exception as e: raise Exception( "Failed to add fulltext index to the target table, your OceanBase version must be 4.3.5.1 or above " + "to support fulltext index and vector index in the same table", e, ) + self._client.refresh_metadata([self._collection_name]) redis_client.set(collection_exist_cache_key, 1, ex=3600) def _check_hybrid_search_support(self) -> bool: @@ -145,7 +152,7 @@ class OceanBaseVector(BaseVector): ob_full_version = result.fetchone()[0] ob_version = ob_full_version.split()[1] logger.debug("Current OceanBase version is %s", ob_version) - return version.parse(ob_version).base_version >= version.parse("4.3.5.1").base_version + return version.parse(ob_version) >= version.parse("4.3.5.1") except Exception as e: logger.warning("Failed to check OceanBase version: %s. Disabling hybrid search.", str(e)) return False @@ -252,7 +259,7 @@ class OceanBaseVector(BaseVector): vec_column_name="vector", vec_data=query_vector, topk=topk, - distance_func=func.l2_distance, + distance_func=l2_distance, output_column_names=["text", "metadata"], with_dist=True, where_clause=_where_clause, diff --git a/api/core/rag/datasource/vdb/oracle/oraclevector.py b/api/core/rag/datasource/vdb/oracle/oraclevector.py index 303c3fe31c..0956914070 100644 --- a/api/core/rag/datasource/vdb/oracle/oraclevector.py +++ b/api/core/rag/datasource/vdb/oracle/oraclevector.py @@ -188,14 +188,17 @@ class OracleVector(BaseVector): def text_exists(self, id: str) -> bool: with self._get_connection() as conn: with conn.cursor() as cur: - cur.execute(f"SELECT id FROM {self.table_name} WHERE id = '%s'" % (id,)) + cur.execute(f"SELECT id FROM {self.table_name} WHERE id = :1", (id,)) return cur.fetchone() is not None conn.close() def get_by_ids(self, ids: list[str]) -> list[Document]: + if not ids: + return [] with self._get_connection() as conn: with conn.cursor() as cur: - cur.execute(f"SELECT meta, text FROM {self.table_name} WHERE id IN %s", (tuple(ids),)) + placeholders = ", ".join(f":{i + 1}" for i in range(len(ids))) + cur.execute(f"SELECT meta, text FROM {self.table_name} WHERE id IN ({placeholders})", ids) docs = [] for record in cur: docs.append(Document(page_content=record[1], metadata=record[0])) @@ -208,14 +211,15 @@ class OracleVector(BaseVector): return with self._get_connection() as conn: with conn.cursor() as cur: - cur.execute(f"DELETE FROM {self.table_name} WHERE id IN %s" % (tuple(ids),)) + placeholders = ", ".join(f":{i + 1}" for i in range(len(ids))) + cur.execute(f"DELETE FROM {self.table_name} WHERE id IN ({placeholders})", ids) conn.commit() conn.close() def delete_by_metadata_field(self, key: str, value: str) -> None: with self._get_connection() as conn: with conn.cursor() as cur: - cur.execute(f"DELETE FROM {self.table_name} WHERE meta->>%s = %s", (key, value)) + cur.execute(f"DELETE FROM {self.table_name} WHERE JSON_VALUE(meta, '$." + key + "') = :1", (value,)) conn.commit() conn.close() @@ -227,12 +231,20 @@ class OracleVector(BaseVector): :param top_k: The number of nearest neighbors to return, default is 5. :return: List of Documents that are nearest to the query vector. """ + # Validate and sanitize top_k to prevent SQL injection top_k = kwargs.get("top_k", 4) + if not isinstance(top_k, int) or top_k <= 0 or top_k > 10000: + top_k = 4 # Use default if invalid + document_ids_filter = kwargs.get("document_ids_filter") where_clause = "" + params = [numpy.array(query_vector)] + if document_ids_filter: - document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) - where_clause = f"WHERE metadata->>'document_id' in ({document_ids})" + placeholders = ", ".join(f":{i + 2}" for i in range(len(document_ids_filter))) + where_clause = f"WHERE JSON_VALUE(meta, '$.document_id') IN ({placeholders})" + params.extend(document_ids_filter) + with self._get_connection() as conn: conn.inputtypehandler = self.input_type_handler conn.outputtypehandler = self.output_type_handler @@ -241,7 +253,7 @@ class OracleVector(BaseVector): f"""SELECT meta, text, vector_distance(embedding,(select to_vector(:1) from dual),cosine) AS distance FROM {self.table_name} {where_clause} ORDER BY distance fetch first {top_k} rows only""", - [numpy.array(query_vector)], + params, ) docs = [] score_threshold = float(kwargs.get("score_threshold") or 0.0) @@ -259,9 +271,11 @@ class OracleVector(BaseVector): import nltk # type: ignore from nltk.corpus import stopwords # type: ignore + # Validate and sanitize top_k to prevent SQL injection top_k = kwargs.get("top_k", 5) + if not isinstance(top_k, int) or top_k <= 0 or top_k > 10000: + top_k = 5 # Use default if invalid # just not implement fetch by score_threshold now, may be later - score_threshold = float(kwargs.get("score_threshold") or 0.0) if len(query) > 0: # Check which language the query is in zh_pattern = re.compile("[\u4e00-\u9fa5]+") @@ -297,14 +311,21 @@ class OracleVector(BaseVector): with conn.cursor() as cur: document_ids_filter = kwargs.get("document_ids_filter") where_clause = "" + params: dict[str, Any] = {"kk": " ACCUM ".join(entities)} + if document_ids_filter: - document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) - where_clause = f" AND metadata->>'document_id' in ({document_ids}) " + placeholders = [] + for i, doc_id in enumerate(document_ids_filter): + param_name = f"doc_id_{i}" + placeholders.append(f":{param_name}") + params[param_name] = doc_id + where_clause = f" AND JSON_VALUE(meta, '$.document_id') IN ({', '.join(placeholders)}) " + cur.execute( f"""select meta, text, embedding FROM {self.table_name} WHERE CONTAINS(text, :kk, 1) > 0 {where_clause} order by score(1) desc fetch first {top_k} rows only""", - kk=" ACCUM ".join(entities), + params, ) docs = [] for record in cur: diff --git a/api/core/rag/datasource/vdb/pgvector/pgvector.py b/api/core/rag/datasource/vdb/pgvector/pgvector.py index 746773da63..a2985b9d00 100644 --- a/api/core/rag/datasource/vdb/pgvector/pgvector.py +++ b/api/core/rag/datasource/vdb/pgvector/pgvector.py @@ -19,6 +19,8 @@ from core.rag.models.document import Document from extensions.ext_redis import redis_client from models.dataset import Dataset +logger = logging.getLogger(__name__) + class PGVectorConfig(BaseModel): host: str @@ -155,7 +157,7 @@ class PGVector(BaseVector): cur.execute(f"DELETE FROM {self.table_name} WHERE id IN %s", (tuple(ids),)) except psycopg2.errors.UndefinedTable: # table not exists - logging.warning("Table %s not found, skipping delete operation.", self.table_name) + logger.warning("Table %s not found, skipping delete operation.", self.table_name) return except Exception as e: raise e diff --git a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py index 91d667ff2c..e66959045f 100644 --- a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py +++ b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py @@ -17,6 +17,8 @@ from core.rag.models.document import Document from extensions.ext_redis import redis_client from models import Dataset +logger = logging.getLogger(__name__) + class TableStoreConfig(BaseModel): access_key_id: Optional[str] = None @@ -145,7 +147,7 @@ class TableStoreVector(BaseVector): with redis_client.lock(lock_name, timeout=20): collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): - logging.info("Collection %s already exists.", self._collection_name) + logger.info("Collection %s already exists.", self._collection_name) return self._create_table_if_not_exist() @@ -155,7 +157,7 @@ class TableStoreVector(BaseVector): def _create_table_if_not_exist(self) -> None: table_list = self._tablestore_client.list_table() if self._table_name in table_list: - logging.info("Tablestore system table[%s] already exists", self._table_name) + logger.info("Tablestore system table[%s] already exists", self._table_name) return None schema_of_primary_key = [("id", "STRING")] @@ -163,12 +165,12 @@ class TableStoreVector(BaseVector): table_options = tablestore.TableOptions() reserved_throughput = tablestore.ReservedThroughput(tablestore.CapacityUnit(0, 0)) self._tablestore_client.create_table(table_meta, table_options, reserved_throughput) - logging.info("Tablestore create table[%s] successfully.", self._table_name) + logger.info("Tablestore create table[%s] successfully.", self._table_name) def _create_search_index_if_not_exist(self, dimension: int) -> None: search_index_list = self._tablestore_client.list_search_index(table_name=self._table_name) if self._index_name in [t[1] for t in search_index_list]: - logging.info("Tablestore system index[%s] already exists", self._index_name) + logger.info("Tablestore system index[%s] already exists", self._index_name) return None field_schemas = [ @@ -206,20 +208,20 @@ class TableStoreVector(BaseVector): index_meta = tablestore.SearchIndexMeta(field_schemas) self._tablestore_client.create_search_index(self._table_name, self._index_name, index_meta) - logging.info("Tablestore create system index[%s] successfully.", self._index_name) + logger.info("Tablestore create system index[%s] successfully.", self._index_name) def _delete_table_if_exist(self): search_index_list = self._tablestore_client.list_search_index(table_name=self._table_name) for resp_tuple in search_index_list: self._tablestore_client.delete_search_index(resp_tuple[0], resp_tuple[1]) - logging.info("Tablestore delete index[%s] successfully.", self._index_name) + logger.info("Tablestore delete index[%s] successfully.", self._index_name) self._tablestore_client.delete_table(self._table_name) - logging.info("Tablestore delete system table[%s] successfully.", self._index_name) + logger.info("Tablestore delete system table[%s] successfully.", self._index_name) def _delete_search_index(self) -> None: self._tablestore_client.delete_search_index(self._table_name, self._index_name) - logging.info("Tablestore delete index[%s] successfully.", self._index_name) + logger.info("Tablestore delete index[%s] successfully.", self._index_name) def _write_row(self, primary_key: str, attributes: dict[str, Any]) -> None: pk = [("id", primary_key)] diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py index e848b39c4d..a76b5d579c 100644 --- a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py @@ -3,7 +3,7 @@ import os import uuid from collections.abc import Generator, Iterable, Sequence from itertools import islice -from typing import TYPE_CHECKING, Any, Optional, Union, cast +from typing import TYPE_CHECKING, Any, Optional, Union import qdrant_client import requests @@ -398,7 +398,6 @@ class TidbOnQdrantVector(BaseVector): def _reload_if_needed(self): if isinstance(self._client, QdrantLocal): - self._client = cast(QdrantLocal, self._client) self._client._load() @classmethod diff --git a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py index f8a851a246..e5492cb7f3 100644 --- a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py +++ b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py @@ -83,14 +83,14 @@ class TiDBVector(BaseVector): self._dimension = 1536 def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): - logger.info("create collection and add texts, collection_name: " + self._collection_name) + logger.info("create collection and add texts, collection_name: %s", self._collection_name) self._create_collection(len(embeddings[0])) self.add_texts(texts, embeddings) self._dimension = len(embeddings[0]) pass def _create_collection(self, dimension: int): - logger.info("_create_collection, collection_name " + self._collection_name) + logger.info("_create_collection, collection_name %s", self._collection_name) lock_name = f"vector_indexing_lock_{self._collection_name}" with redis_client.lock(lock_name, timeout=20): collection_exist_cache_key = f"vector_indexing_{self._collection_name}" diff --git a/api/core/rag/embedding/cached_embedding.py b/api/core/rag/embedding/cached_embedding.py index 9848a28384..e27c1f0594 100644 --- a/api/core/rag/embedding/cached_embedding.py +++ b/api/core/rag/embedding/cached_embedding.py @@ -75,7 +75,7 @@ class CacheEmbedding(Embeddings): except IntegrityError: db.session.rollback() except Exception: - logging.exception("Failed transform embedding") + logger.exception("Failed transform embedding") cache_embeddings = [] try: for i, n_embedding in zip(embedding_queue_indices, embedding_queue_embeddings): @@ -95,7 +95,7 @@ class CacheEmbedding(Embeddings): db.session.rollback() except Exception as ex: db.session.rollback() - logger.exception("Failed to embed documents: %s") + logger.exception("Failed to embed documents") raise ex return text_embeddings @@ -122,7 +122,7 @@ class CacheEmbedding(Embeddings): raise ValueError("Normalized embedding is nan please try again") except Exception as ex: if dify_config.DEBUG: - logging.exception("Failed to embed query text '%s...(%s chars)'", text[:10], len(text)) + logger.exception("Failed to embed query text '%s...(%s chars)'", text[:10], len(text)) raise ex try: @@ -136,7 +136,7 @@ class CacheEmbedding(Embeddings): redis_client.setex(embedding_cache_key, 600, encoded_str) except Exception as ex: if dify_config.DEBUG: - logging.exception( + logger.exception( "Failed to add embedding to redis for the text '%s...(%s chars)'", text[:10], len(text) ) raise ex diff --git a/api/core/rag/extractor/excel_extractor.py b/api/core/rag/extractor/excel_extractor.py index a3b35458df..7cc554c74d 100644 --- a/api/core/rag/extractor/excel_extractor.py +++ b/api/core/rag/extractor/excel_extractor.py @@ -34,9 +34,8 @@ class ExcelExtractor(BaseExtractor): for sheet_name in wb.sheetnames: sheet = wb[sheet_name] data = sheet.values - try: - cols = next(data) - except StopIteration: + cols = next(data, None) + if cols is None: continue df = pd.DataFrame(data, columns=cols) diff --git a/api/core/rag/extractor/extract_processor.py b/api/core/rag/extractor/extract_processor.py index bc19899ea5..e6b28b1bf4 100644 --- a/api/core/rag/extractor/extract_processor.py +++ b/api/core/rag/extractor/extract_processor.py @@ -73,8 +73,8 @@ class ExtractProcessor: suffix = "." + match.group(1) else: suffix = "" - # FIXME mypy: Cannot determine type of 'tempfile._get_candidate_names' better not use it here - file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore + # https://stackoverflow.com/questions/26541416/generate-temporary-file-names-without-creating-actual-file-in-python#comment90414256_26541521 + file_path = f"{temp_dir}/{tempfile.gettempdir()}{suffix}" Path(file_path).write_bytes(response.content) extract_setting = ExtractSetting(datasource_type="upload_file", document_model="text_model") if return_text: diff --git a/api/core/rag/extractor/pdf_extractor.py b/api/core/rag/extractor/pdf_extractor.py index 04033dec3f..7dfe2e357c 100644 --- a/api/core/rag/extractor/pdf_extractor.py +++ b/api/core/rag/extractor/pdf_extractor.py @@ -1,5 +1,6 @@ """Abstract interface for document loader implementations.""" +import contextlib from collections.abc import Iterator from typing import Optional, cast @@ -25,12 +26,10 @@ class PdfExtractor(BaseExtractor): def extract(self) -> list[Document]: plaintext_file_exists = False if self._file_cache_key: - try: + with contextlib.suppress(FileNotFoundError): text = cast(bytes, storage.load(self._file_cache_key)).decode("utf-8") plaintext_file_exists = True return [Document(page_content=text)] - except FileNotFoundError: - pass documents = list(self.load()) text_list = [] for document in documents: diff --git a/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py b/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py index f1fa5dde5c..856a9bce18 100644 --- a/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_eml_extractor.py @@ -1,4 +1,5 @@ import base64 +import contextlib import logging from typing import Optional @@ -33,7 +34,7 @@ class UnstructuredEmailExtractor(BaseExtractor): elements = partition_email(filename=self._file_path) # noinspection PyBroadException - try: + with contextlib.suppress(Exception): for element in elements: element_text = element.text.strip() @@ -43,8 +44,6 @@ class UnstructuredEmailExtractor(BaseExtractor): element_decode = base64.b64decode(element_text) soup = BeautifulSoup(element_decode.decode("utf-8"), "html.parser") element.text = soup.get_text() - except Exception: - pass from unstructured.chunking.title import chunk_by_title diff --git a/api/core/rag/extractor/watercrawl/provider.py b/api/core/rag/extractor/watercrawl/provider.py index 21fbb2100f..da03fc67a6 100644 --- a/api/core/rag/extractor/watercrawl/provider.py +++ b/api/core/rag/extractor/watercrawl/provider.py @@ -1,6 +1,6 @@ from collections.abc import Generator from datetime import datetime -from typing import Any +from typing import Any, Optional from core.rag.extractor.watercrawl.client import WaterCrawlAPIClient @@ -9,7 +9,7 @@ class WaterCrawlProvider: def __init__(self, api_key, base_url: str | None = None): self.client = WaterCrawlAPIClient(api_key, base_url) - def crawl_url(self, url, options: dict | Any = None) -> dict: + def crawl_url(self, url, options: Optional[dict | Any] = None) -> dict: options = options or {} spider_options = { "max_depth": 1, diff --git a/api/core/rag/extractor/word_extractor.py b/api/core/rag/extractor/word_extractor.py index 0eff7c186a..f3b162e3d3 100644 --- a/api/core/rag/extractor/word_extractor.py +++ b/api/core/rag/extractor/word_extractor.py @@ -1,6 +1,5 @@ """Abstract interface for document loader implementations.""" -import datetime import logging import mimetypes import os @@ -19,6 +18,7 @@ from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document from extensions.ext_database import db from extensions.ext_storage import storage +from libs.datetime_utils import naive_utc_now from models.enums import CreatorUserRole from models.model import UploadFile @@ -117,10 +117,10 @@ class WordExtractor(BaseExtractor): mime_type=mime_type or "", created_by=self.user_id, created_by_role=CreatorUserRole.ACCOUNT, - created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + created_at=naive_utc_now(), used=True, used_by=self.user_id, - used_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + used_at=naive_utc_now(), ) db.session.add(upload_file) diff --git a/api/core/rag/index_processor/processor/qa_index_processor.py b/api/core/rag/index_processor/processor/qa_index_processor.py index 75f3153697..609a8aafa1 100644 --- a/api/core/rag/index_processor/processor/qa_index_processor.py +++ b/api/core/rag/index_processor/processor/qa_index_processor.py @@ -23,6 +23,8 @@ from libs import helper from models.dataset import Dataset from services.entities.knowledge_entities.knowledge_entities import Rule +logger = logging.getLogger(__name__) + class QAIndexProcessor(BaseIndexProcessor): def extract(self, extract_setting: ExtractSetting, **kwargs) -> list[Document]: @@ -182,7 +184,7 @@ class QAIndexProcessor(BaseIndexProcessor): qa_documents.append(qa_document) format_documents.extend(qa_documents) except Exception as e: - logging.exception("Failed to format qa document") + logger.exception("Failed to format qa document") all_qa_documents.extend(format_documents) diff --git a/api/core/rag/models/document.py b/api/core/rag/models/document.py index 04a3428ad8..ff63a6780e 100644 --- a/api/core/rag/models/document.py +++ b/api/core/rag/models/document.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from collections.abc import Sequence from typing import Any, Optional -from pydantic import BaseModel +from pydantic import BaseModel, Field class ChildDocument(BaseModel): @@ -15,7 +15,7 @@ class ChildDocument(BaseModel): """Arbitrary metadata about the page content (e.g., source, relationships to other documents, etc.). """ - metadata: dict = {} + metadata: dict = Field(default_factory=dict) class Document(BaseModel): @@ -28,7 +28,7 @@ class Document(BaseModel): """Arbitrary metadata about the page content (e.g., source, relationships to other documents, etc.). """ - metadata: dict = {} + metadata: dict = Field(default_factory=dict) provider: Optional[str] = "dify" diff --git a/api/core/rag/rerank/weight_rerank.py b/api/core/rag/rerank/weight_rerank.py index cbc96037bf..80de746e29 100644 --- a/api/core/rag/rerank/weight_rerank.py +++ b/api/core/rag/rerank/weight_rerank.py @@ -39,9 +39,16 @@ class WeightRerankRunner(BaseRerankRunner): unique_documents = [] doc_ids = set() for document in documents: - if document.metadata is not None and document.metadata["doc_id"] not in doc_ids: + if ( + document.provider == "dify" + and document.metadata is not None + and document.metadata["doc_id"] not in doc_ids + ): doc_ids.add(document.metadata["doc_id"]) unique_documents.append(document) + else: + if document not in unique_documents: + unique_documents.append(document) documents = unique_documents diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index a25bc65646..cd4af72832 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -1012,7 +1012,7 @@ class DatasetRetrieval: def _process_metadata_filter_func( self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list ): - if value is None: + if value is None and condition not in ("empty", "not empty"): return key = f"{metadata_name}_{sequence}" diff --git a/api/core/repositories/factory.py b/api/core/repositories/factory.py index 09c775f3a6..854c122331 100644 --- a/api/core/repositories/factory.py +++ b/api/core/repositories/factory.py @@ -5,10 +5,7 @@ This module provides a Django-like settings system for repository implementation allowing users to configure different repository backends through string paths. """ -import importlib -import inspect -import logging -from typing import Protocol, Union +from typing import Union from sqlalchemy.engine import Engine from sqlalchemy.orm import sessionmaker @@ -16,12 +13,11 @@ from sqlalchemy.orm import sessionmaker from configs import dify_config from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from libs.module_loading import import_string from models import Account, EndUser from models.enums import WorkflowRunTriggeredFrom from models.workflow import WorkflowNodeExecutionTriggeredFrom -logger = logging.getLogger(__name__) - class RepositoryImportError(Exception): """Raised when a repository implementation cannot be imported or instantiated.""" @@ -37,96 +33,6 @@ class DifyCoreRepositoryFactory: are specified as module paths (e.g., 'module.submodule.ClassName'). """ - @staticmethod - def _import_class(class_path: str) -> type: - """ - Import a class from a module path string. - - Args: - class_path: Full module path to the class (e.g., 'module.submodule.ClassName') - - Returns: - The imported class - - Raises: - RepositoryImportError: If the class cannot be imported - """ - try: - module_path, class_name = class_path.rsplit(".", 1) - module = importlib.import_module(module_path) - repo_class = getattr(module, class_name) - assert isinstance(repo_class, type) - return repo_class - except (ValueError, ImportError, AttributeError) as e: - raise RepositoryImportError(f"Cannot import repository class '{class_path}': {e}") from e - - @staticmethod - def _validate_repository_interface(repository_class: type, expected_interface: type[Protocol]) -> None: # type: ignore - """ - Validate that a class implements the expected repository interface. - - Args: - repository_class: The class to validate - expected_interface: The expected interface/protocol - - Raises: - RepositoryImportError: If the class doesn't implement the interface - """ - # Check if the class has all required methods from the protocol - required_methods = [ - method - for method in dir(expected_interface) - if not method.startswith("_") and callable(getattr(expected_interface, method, None)) - ] - - missing_methods = [] - for method_name in required_methods: - if not hasattr(repository_class, method_name): - missing_methods.append(method_name) - - if missing_methods: - raise RepositoryImportError( - f"Repository class '{repository_class.__name__}' does not implement required methods " - f"{missing_methods} from interface '{expected_interface.__name__}'" - ) - - @staticmethod - def _validate_constructor_signature(repository_class: type, required_params: list[str]) -> None: - """ - Validate that a repository class constructor accepts required parameters. - Args: - repository_class: The class to validate - required_params: List of required parameter names - Raises: - RepositoryImportError: If the constructor doesn't accept required parameters - """ - - try: - # MyPy may flag the line below with the following error: - # - # > Accessing "__init__" on an instance is unsound, since - # > instance.__init__ could be from an incompatible subclass. - # - # Despite this, we need to ensure that the constructor of `repository_class` - # has a compatible signature. - signature = inspect.signature(repository_class.__init__) # type: ignore[misc] - param_names = list(signature.parameters.keys()) - - # Remove 'self' parameter - if "self" in param_names: - param_names.remove("self") - - missing_params = [param for param in required_params if param not in param_names] - if missing_params: - raise RepositoryImportError( - f"Repository class '{repository_class.__name__}' constructor does not accept required parameters: " - f"{missing_params}. Expected parameters: {required_params}" - ) - except Exception as e: - raise RepositoryImportError( - f"Failed to validate constructor signature for '{repository_class.__name__}': {e}" - ) from e - @classmethod def create_workflow_execution_repository( cls, @@ -151,24 +57,16 @@ class DifyCoreRepositoryFactory: RepositoryImportError: If the configured repository cannot be created """ class_path = dify_config.CORE_WORKFLOW_EXECUTION_REPOSITORY - logger.debug("Creating WorkflowExecutionRepository from: %s", class_path) try: - repository_class = cls._import_class(class_path) - cls._validate_repository_interface(repository_class, WorkflowExecutionRepository) - - # All repository types now use the same constructor parameters + repository_class = import_string(class_path) return repository_class( # type: ignore[no-any-return] session_factory=session_factory, user=user, app_id=app_id, triggered_from=triggered_from, ) - except RepositoryImportError: - # Re-raise our custom errors as-is - raise - except Exception as e: - logger.exception("Failed to create WorkflowExecutionRepository") + except (ImportError, Exception) as e: raise RepositoryImportError(f"Failed to create WorkflowExecutionRepository from '{class_path}': {e}") from e @classmethod @@ -195,24 +93,16 @@ class DifyCoreRepositoryFactory: RepositoryImportError: If the configured repository cannot be created """ class_path = dify_config.CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY - logger.debug("Creating WorkflowNodeExecutionRepository from: %s", class_path) try: - repository_class = cls._import_class(class_path) - cls._validate_repository_interface(repository_class, WorkflowNodeExecutionRepository) - - # All repository types now use the same constructor parameters + repository_class = import_string(class_path) return repository_class( # type: ignore[no-any-return] session_factory=session_factory, user=user, app_id=app_id, triggered_from=triggered_from, ) - except RepositoryImportError: - # Re-raise our custom errors as-is - raise - except Exception as e: - logger.exception("Failed to create WorkflowNodeExecutionRepository") + except (ImportError, Exception) as e: raise RepositoryImportError( f"Failed to create WorkflowNodeExecutionRepository from '{class_path}': {e}" ) from e diff --git a/api/core/tools/custom_tool/tool.py b/api/core/tools/custom_tool/tool.py index e112de9578..97342640f5 100644 --- a/api/core/tools/custom_tool/tool.py +++ b/api/core/tools/custom_tool/tool.py @@ -81,14 +81,11 @@ class ApiTool(Tool): return ToolProviderType.API def assembling_request(self, parameters: dict[str, Any]) -> dict[str, Any]: + headers = {} if self.runtime is None: raise ToolProviderCredentialValidationError("runtime not initialized") - headers = {} - if self.runtime is None: - raise ValueError("runtime is required") credentials = self.runtime.credentials or {} - if "auth_type" not in credentials: raise ToolProviderCredentialValidationError("Missing auth_type") @@ -278,35 +275,30 @@ class ApiTool(Tool): if files: headers.pop("Content-Type", None) - if method in { - "get", - "head", - "post", - "put", - "delete", - "patch", - "options", - "GET", - "POST", - "PUT", - "PATCH", - "DELETE", - "HEAD", - "OPTIONS", - }: - response: httpx.Response = getattr(ssrf_proxy, method.lower())( - url, - params=params, - headers=headers, - cookies=cookies, - data=body, - files=files, - timeout=API_TOOL_DEFAULT_TIMEOUT, - follow_redirects=True, - ) - return response - else: + _METHOD_MAP = { + "get": ssrf_proxy.get, + "head": ssrf_proxy.head, + "post": ssrf_proxy.post, + "put": ssrf_proxy.put, + "delete": ssrf_proxy.delete, + "patch": ssrf_proxy.patch, + } + method_lc = method.lower() + if method_lc not in _METHOD_MAP: raise ValueError(f"Invalid http method {method}") + response: httpx.Response = _METHOD_MAP[ + method_lc + ]( # https://discuss.python.org/t/type-inference-for-function-return-types/42926 + url, + params=params, + headers=headers, + cookies=cookies, + data=body, + files=files, + timeout=API_TOOL_DEFAULT_TIMEOUT, + follow_redirects=True, + ) + return response def _convert_body_property_any_of( self, property: dict[str, Any], value: Any, any_of: list[dict[str, Any]], max_recursive=10 diff --git a/api/core/tools/entities/api_entities.py b/api/core/tools/entities/api_entities.py index 27ce96b90e..48015c04ee 100644 --- a/api/core/tools/entities/api_entities.py +++ b/api/core/tools/entities/api_entities.py @@ -62,7 +62,7 @@ class ToolProviderApiEntity(BaseModel): parameter.pop("input_schema", None) # ------------- optional_fields = self.optional_field("server_url", self.server_url) - if self.type == ToolProviderType.MCP.value: + if self.type == ToolProviderType.MCP: optional_fields.update(self.optional_field("updated_at", self.updated_at)) optional_fields.update(self.optional_field("server_identifier", self.server_identifier)) return { diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 5ffba07b44..df599a09a3 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -1,4 +1,5 @@ import base64 +import contextlib import enum from collections.abc import Mapping from enum import Enum @@ -227,10 +228,8 @@ class ToolInvokeMessage(BaseModel): @classmethod def decode_blob_message(cls, v): if isinstance(v, dict) and "blob" in v: - try: + with contextlib.suppress(Exception): v["blob"] = base64.b64decode(v["blob"]) - except Exception: - pass return v @field_serializer("message") diff --git a/api/core/tools/tool_engine.py b/api/core/tools/tool_engine.py index 83444c02d8..c3fdc37303 100644 --- a/api/core/tools/tool_engine.py +++ b/api/core/tools/tool_engine.py @@ -1,3 +1,4 @@ +import contextlib import json from collections.abc import Generator, Iterable from copy import deepcopy @@ -69,10 +70,8 @@ class ToolEngine: if parameters and len(parameters) == 1: tool_parameters = {parameters[0].name: tool_parameters} else: - try: + with contextlib.suppress(Exception): tool_parameters = json.loads(tool_parameters) - except Exception: - pass if not isinstance(tool_parameters, dict): raise ValueError(f"tool_parameters should be a dict, but got a string: {tool_parameters}") @@ -270,20 +269,18 @@ class ToolEngine: if response.meta.get("mime_type"): mimetype = response.meta.get("mime_type") else: - try: + with contextlib.suppress(Exception): url = URL(cast(ToolInvokeMessage.TextMessage, response.message).text) extension = url.suffix guess_type_result, _ = guess_type(f"a{extension}") if guess_type_result: mimetype = guess_type_result - except Exception: - pass if not mimetype: mimetype = "image/jpeg" yield ToolInvokeMessageBinary( - mimetype=response.meta.get("mime_type", "image/jpeg"), + mimetype=response.meta.get("mime_type", mimetype), url=cast(ToolInvokeMessage.TextMessage, response.message).text, ) elif response.type == ToolInvokeMessage.MessageType.BLOB: diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index 7472f4f605..3454ec3489 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -9,6 +9,7 @@ from typing import TYPE_CHECKING, Any, Literal, Optional, Union, cast import sqlalchemy as sa from pydantic import TypeAdapter +from sqlalchemy.orm import Session from yarl import URL import contexts @@ -617,8 +618,9 @@ class ToolManager: WHERE tenant_id = :tenant_id ORDER BY tenant_id, provider, is_default DESC, created_at DESC """ - ids = [row.id for row in db.session.execute(sa.text(sql), {"tenant_id": tenant_id}).all()] - return db.session.query(BuiltinToolProvider).where(BuiltinToolProvider.id.in_(ids)).all() + with Session(db.engine, autoflush=False) as session: + ids = [row.id for row in session.execute(sa.text(sql), {"tenant_id": tenant_id}).all()] + return session.query(BuiltinToolProvider).where(BuiltinToolProvider.id.in_(ids)).all() @classmethod def list_providers_from_api( @@ -959,7 +961,7 @@ class ToolManager: elif provider_type == ToolProviderType.WORKFLOW: return cls.generate_workflow_tool_icon_url(tenant_id, provider_id) elif provider_type == ToolProviderType.PLUGIN: - provider = ToolManager.get_builtin_provider(provider_id, tenant_id) + provider = ToolManager.get_plugin_provider(provider_id, tenant_id) if isinstance(provider, PluginToolProviderController): try: return cls.generate_plugin_tool_icon_url(tenant_id, provider.entity.identity.icon) diff --git a/api/core/tools/utils/configuration.py b/api/core/tools/utils/configuration.py index aceba6e69f..3a9391dbb1 100644 --- a/api/core/tools/utils/configuration.py +++ b/api/core/tools/utils/configuration.py @@ -1,3 +1,4 @@ +import contextlib from copy import deepcopy from typing import Any @@ -137,11 +138,9 @@ class ToolParameterConfigurationManager: and parameter.type == ToolParameter.ToolParameterType.SECRET_INPUT ): if parameter.name in parameters: - try: - has_secret_input = True + has_secret_input = True + with contextlib.suppress(Exception): parameters[parameter.name] = encrypter.decrypt_token(self.tenant_id, parameters[parameter.name]) - except Exception: - pass if has_secret_input: cache.set(parameters) diff --git a/api/core/tools/utils/encryption.py b/api/core/tools/utils/encryption.py index 5fdfd3b9d1..d771293e11 100644 --- a/api/core/tools/utils/encryption.py +++ b/api/core/tools/utils/encryption.py @@ -1,3 +1,4 @@ +import contextlib from copy import deepcopy from typing import Any, Optional, Protocol @@ -111,14 +112,12 @@ class ProviderConfigEncrypter: for field_name, field in fields.items(): if field.type == BasicProviderConfig.Type.SECRET_INPUT: if field_name in data: - try: + with contextlib.suppress(Exception): # if the value is None or empty string, skip decrypt if not data[field_name]: continue data[field_name] = encrypter.decrypt_token(self.tenant_id, data[field_name]) - except Exception: - pass self.provider_config_cache.set(data) return data diff --git a/api/core/tools/utils/message_transformer.py b/api/core/tools/utils/message_transformer.py index ac12d83ef2..8357dac0d7 100644 --- a/api/core/tools/utils/message_transformer.py +++ b/api/core/tools/utils/message_transformer.py @@ -8,20 +8,21 @@ from uuid import UUID import numpy as np import pytz -from flask_login import current_user from core.file import File, FileTransferMethod, FileType from core.tools.entities.tool_entities import ToolInvokeMessage from core.tools.tool_file_manager import ToolFileManager +from libs.login import current_user +from models.account import Account logger = logging.getLogger(__name__) def safe_json_value(v): if isinstance(v, datetime): - tz_name = getattr(current_user, "timezone", None) if current_user is not None else None - if not tz_name: - tz_name = "UTC" + tz_name = "UTC" + if isinstance(current_user, Account) and current_user.timezone is not None: + tz_name = current_user.timezone return v.astimezone(pytz.timezone(tz_name)).isoformat() elif isinstance(v, date): return v.isoformat() @@ -46,7 +47,7 @@ def safe_json_value(v): return v -def safe_json_dict(d): +def safe_json_dict(d: dict): if not isinstance(d, dict): raise TypeError("safe_json_dict() expects a dictionary (dict) as input") return {k: safe_json_value(v) for k, v in d.items()} diff --git a/api/core/tools/utils/rag_web_reader.py b/api/core/tools/utils/rag_web_reader.py deleted file mode 100644 index 22c47fa814..0000000000 --- a/api/core/tools/utils/rag_web_reader.py +++ /dev/null @@ -1,17 +0,0 @@ -import re - - -def get_image_upload_file_ids(content): - pattern = r"!\[image\]\((http?://.*?(file-preview|image-preview))\)" - matches = re.findall(pattern, content) - image_upload_file_ids = [] - for match in matches: - if match[1] == "file-preview": - content_pattern = r"files/([^/]+)/file-preview" - else: - content_pattern = r"files/([^/]+)/image-preview" - content_match = re.search(content_pattern, match[0]) - if content_match: - image_upload_file_id = content_match.group(1) - image_upload_file_ids.append(image_upload_file_id) - return image_upload_file_ids diff --git a/api/core/tools/utils/web_reader_tool.py b/api/core/tools/utils/web_reader_tool.py index df052c16db..d8403c2e15 100644 --- a/api/core/tools/utils/web_reader_tool.py +++ b/api/core/tools/utils/web_reader_tool.py @@ -87,7 +87,7 @@ def get_url(url: str, user_agent: Optional[str] = None) -> str: res = FULL_TEMPLATE.format( title=article.title, - author=article.auther, + author=article.author, text=article.text, ) @@ -97,7 +97,7 @@ def get_url(url: str, user_agent: Optional[str] = None) -> str: @dataclass class Article: title: str - auther: str + author: str text: Sequence[dict] @@ -105,7 +105,7 @@ def extract_using_readabilipy(html: str): json_article: dict[str, Any] = simple_json_from_html_string(html, use_readability=True) article = Article( title=json_article.get("title") or "", - auther=json_article.get("byline") or "", + author=json_article.get("byline") or "", text=json_article.get("plain_text") or [], ) @@ -113,7 +113,7 @@ def extract_using_readabilipy(html: str): def get_image_upload_file_ids(content): - pattern = r"!\[image\]\((http?://.*?(file-preview|image-preview))\)" + pattern = r"!\[image\]\((https?://.*?(file-preview|image-preview))\)" matches = re.findall(pattern, content) image_upload_file_ids = [] for match in matches: diff --git a/api/core/tools/workflow_as_tool/provider.py b/api/core/tools/workflow_as_tool/provider.py index 83f5f558d5..18e6993b38 100644 --- a/api/core/tools/workflow_as_tool/provider.py +++ b/api/core/tools/workflow_as_tool/provider.py @@ -203,9 +203,6 @@ class WorkflowToolProviderController(ToolProviderController): raise ValueError("app not found") app = db_providers.app - if not app: - raise ValueError("can not read app of workflow") - self.tools = [self._get_db_provider_tool(db_providers, app)] return self.tools diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index 6824e5e0e8..1387df5973 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -3,8 +3,6 @@ import logging from collections.abc import Generator from typing import Any, Optional, cast -from flask_login import current_user - from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod from core.tools.__base.tool import Tool from core.tools.__base.tool_runtime import ToolRuntime @@ -17,8 +15,8 @@ from core.tools.entities.tool_entities import ( from core.tools.errors import ToolInvokeError from extensions.ext_database import db from factories.file_factory import build_from_mapping -from models.account import Account -from models.model import App, EndUser +from libs.login import current_user +from models.model import App from models.workflow import Workflow logger = logging.getLogger(__name__) @@ -81,11 +79,11 @@ class WorkflowTool(Tool): generator = WorkflowAppGenerator() assert self.runtime is not None assert self.runtime.invoke_from is not None - + assert current_user is not None result = generator.generate( app_model=app, workflow=workflow, - user=cast("Account | EndUser", current_user), + user=current_user, args={"inputs": tool_parameters, "files": files}, invoke_from=self.runtime.invoke_from, streaming=False, diff --git a/api/core/variables/segments.py b/api/core/variables/segments.py index a99f5eece3..9e7616874e 100644 --- a/api/core/variables/segments.py +++ b/api/core/variables/segments.py @@ -151,6 +151,11 @@ class FileSegment(Segment): return "" +class BooleanSegment(Segment): + value_type: SegmentType = SegmentType.BOOLEAN + value: bool + + class ArrayAnySegment(ArraySegment): value_type: SegmentType = SegmentType.ARRAY_ANY value: Sequence[Any] @@ -198,6 +203,11 @@ class ArrayFileSegment(ArraySegment): return "" +class ArrayBooleanSegment(ArraySegment): + value_type: SegmentType = SegmentType.ARRAY_BOOLEAN + value: Sequence[bool] + + def get_segment_discriminator(v: Any) -> SegmentType | None: if isinstance(v, Segment): return v.value_type @@ -231,11 +241,13 @@ SegmentUnion: TypeAlias = Annotated[ | Annotated[IntegerSegment, Tag(SegmentType.INTEGER)] | Annotated[ObjectSegment, Tag(SegmentType.OBJECT)] | Annotated[FileSegment, Tag(SegmentType.FILE)] + | Annotated[BooleanSegment, Tag(SegmentType.BOOLEAN)] | Annotated[ArrayAnySegment, Tag(SegmentType.ARRAY_ANY)] | Annotated[ArrayStringSegment, Tag(SegmentType.ARRAY_STRING)] | Annotated[ArrayNumberSegment, Tag(SegmentType.ARRAY_NUMBER)] | Annotated[ArrayObjectSegment, Tag(SegmentType.ARRAY_OBJECT)] | Annotated[ArrayFileSegment, Tag(SegmentType.ARRAY_FILE)] + | Annotated[ArrayBooleanSegment, Tag(SegmentType.ARRAY_BOOLEAN)] ), Discriminator(get_segment_discriminator), ] diff --git a/api/core/variables/types.py b/api/core/variables/types.py index d28fb11401..55f8ae3c72 100644 --- a/api/core/variables/types.py +++ b/api/core/variables/types.py @@ -6,7 +6,12 @@ from core.file.models import File class ArrayValidation(StrEnum): - """Strategy for validating array elements""" + """Strategy for validating array elements. + + Note: + The `NONE` and `FIRST` strategies are primarily for compatibility purposes. + Avoid using them in new code whenever possible. + """ # Skip element validation (only check array container) NONE = "none" @@ -27,12 +32,14 @@ class SegmentType(StrEnum): SECRET = "secret" FILE = "file" + BOOLEAN = "boolean" ARRAY_ANY = "array[any]" ARRAY_STRING = "array[string]" ARRAY_NUMBER = "array[number]" ARRAY_OBJECT = "array[object]" ARRAY_FILE = "array[file]" + ARRAY_BOOLEAN = "array[boolean]" NONE = "none" @@ -76,12 +83,18 @@ class SegmentType(StrEnum): return SegmentType.ARRAY_FILE case SegmentType.NONE: return SegmentType.ARRAY_ANY + case SegmentType.BOOLEAN: + return SegmentType.ARRAY_BOOLEAN case _: # This should be unreachable. raise ValueError(f"not supported value {value}") if value is None: return SegmentType.NONE - elif isinstance(value, int) and not isinstance(value, bool): + # Important: The check for `bool` must precede the check for `int`, + # as `bool` is a subclass of `int` in Python's type hierarchy. + elif isinstance(value, bool): + return SegmentType.BOOLEAN + elif isinstance(value, int): return SegmentType.INTEGER elif isinstance(value, float): return SegmentType.FLOAT @@ -111,7 +124,7 @@ class SegmentType(StrEnum): else: return all(element_type.is_valid(i, array_validation=ArrayValidation.NONE) for i in value) - def is_valid(self, value: Any, array_validation: ArrayValidation = ArrayValidation.FIRST) -> bool: + def is_valid(self, value: Any, array_validation: ArrayValidation = ArrayValidation.ALL) -> bool: """ Check if a value matches the segment type. Users of `SegmentType` should call this method, instead of using @@ -126,7 +139,11 @@ class SegmentType(StrEnum): """ if self.is_array_type(): return self._validate_array(value, array_validation) - elif self == SegmentType.NUMBER: + # Important: The check for `bool` must precede the check for `int`, + # as `bool` is a subclass of `int` in Python's type hierarchy. + elif self == SegmentType.BOOLEAN: + return isinstance(value, bool) + elif self in [SegmentType.INTEGER, SegmentType.FLOAT, SegmentType.NUMBER]: return isinstance(value, (int, float)) elif self == SegmentType.STRING: return isinstance(value, str) @@ -141,6 +158,27 @@ class SegmentType(StrEnum): else: raise AssertionError("this statement should be unreachable.") + @staticmethod + def cast_value(value: Any, type_: "SegmentType") -> Any: + # Cast Python's `bool` type to `int` when the runtime type requires + # an integer or number. + # + # This ensures compatibility with existing workflows that may use `bool` as + # `int`, since in Python's type system, `bool` is a subtype of `int`. + # + # This function exists solely to maintain compatibility with existing workflows. + # It should not be used to compromise the integrity of the runtime type system. + # No additional casting rules should be introduced to this function. + + if type_ in ( + SegmentType.INTEGER, + SegmentType.NUMBER, + ) and isinstance(value, bool): + return int(value) + if type_ == SegmentType.ARRAY_NUMBER and all(isinstance(i, bool) for i in value): + return [int(i) for i in value] + return value + def exposed_type(self) -> "SegmentType": """Returns the type exposed to the frontend. @@ -150,6 +188,20 @@ class SegmentType(StrEnum): return SegmentType.NUMBER return self + def element_type(self) -> "SegmentType | None": + """Return the element type of the current segment type, or `None` if the element type is undefined. + + Raises: + ValueError: If the current segment type is not an array type. + + Note: + For certain array types, such as `SegmentType.ARRAY_ANY`, their element types are not defined + by the runtime system. In such cases, this method will return `None`. + """ + if not self.is_array_type(): + raise ValueError(f"element_type is only supported by array type, got {self}") + return _ARRAY_ELEMENT_TYPES_MAPPING.get(self) + _ARRAY_ELEMENT_TYPES_MAPPING: Mapping[SegmentType, SegmentType] = { # ARRAY_ANY does not have corresponding element type. @@ -157,6 +209,7 @@ _ARRAY_ELEMENT_TYPES_MAPPING: Mapping[SegmentType, SegmentType] = { SegmentType.ARRAY_NUMBER: SegmentType.NUMBER, SegmentType.ARRAY_OBJECT: SegmentType.OBJECT, SegmentType.ARRAY_FILE: SegmentType.FILE, + SegmentType.ARRAY_BOOLEAN: SegmentType.BOOLEAN, } _ARRAY_TYPES = frozenset( @@ -166,7 +219,6 @@ _ARRAY_TYPES = frozenset( ] ) - _NUMERICAL_TYPES = frozenset( [ SegmentType.NUMBER, diff --git a/api/core/variables/variables.py b/api/core/variables/variables.py index a31ebc848e..16c8116ac1 100644 --- a/api/core/variables/variables.py +++ b/api/core/variables/variables.py @@ -8,11 +8,13 @@ from core.helper import encrypter from .segments import ( ArrayAnySegment, + ArrayBooleanSegment, ArrayFileSegment, ArrayNumberSegment, ArrayObjectSegment, ArraySegment, ArrayStringSegment, + BooleanSegment, FileSegment, FloatSegment, IntegerSegment, @@ -96,10 +98,18 @@ class FileVariable(FileSegment, Variable): pass +class BooleanVariable(BooleanSegment, Variable): + pass + + class ArrayFileVariable(ArrayFileSegment, ArrayVariable): pass +class ArrayBooleanVariable(ArrayBooleanSegment, ArrayVariable): + pass + + # The `VariableUnion`` type is used to enable serialization and deserialization with Pydantic. # Use `Variable` for type hinting when serialization is not required. # @@ -114,11 +124,13 @@ VariableUnion: TypeAlias = Annotated[ | Annotated[IntegerVariable, Tag(SegmentType.INTEGER)] | Annotated[ObjectVariable, Tag(SegmentType.OBJECT)] | Annotated[FileVariable, Tag(SegmentType.FILE)] + | Annotated[BooleanVariable, Tag(SegmentType.BOOLEAN)] | Annotated[ArrayAnyVariable, Tag(SegmentType.ARRAY_ANY)] | Annotated[ArrayStringVariable, Tag(SegmentType.ARRAY_STRING)] | Annotated[ArrayNumberVariable, Tag(SegmentType.ARRAY_NUMBER)] | Annotated[ArrayObjectVariable, Tag(SegmentType.ARRAY_OBJECT)] | Annotated[ArrayFileVariable, Tag(SegmentType.ARRAY_FILE)] + | Annotated[ArrayBooleanVariable, Tag(SegmentType.ARRAY_BOOLEAN)] | Annotated[SecretVariable, Tag(SegmentType.SECRET)] ), Discriminator(get_segment_discriminator), diff --git a/api/core/workflow/entities/workflow_execution.py b/api/core/workflow/entities/workflow_execution.py index 781be4b3c6..f00dc11aa6 100644 --- a/api/core/workflow/entities/workflow_execution.py +++ b/api/core/workflow/entities/workflow_execution.py @@ -6,12 +6,14 @@ implementation details like tenant_id, app_id, etc. """ from collections.abc import Mapping -from datetime import UTC, datetime +from datetime import datetime from enum import StrEnum from typing import Any, Optional from pydantic import BaseModel, Field +from libs.datetime_utils import naive_utc_now + class WorkflowType(StrEnum): """ @@ -60,7 +62,7 @@ class WorkflowExecution(BaseModel): Calculate elapsed time in seconds. If workflow is not finished, use current time. """ - end_time = self.finished_at or datetime.now(UTC).replace(tzinfo=None) + end_time = self.finished_at or naive_utc_now() return (end_time - self.started_at).total_seconds() @classmethod diff --git a/api/core/workflow/graph_engine/entities/graph_runtime_state.py b/api/core/workflow/graph_engine/entities/graph_runtime_state.py index a62ffe46c9..e2ec7b17f0 100644 --- a/api/core/workflow/graph_engine/entities/graph_runtime_state.py +++ b/api/core/workflow/graph_engine/entities/graph_runtime_state.py @@ -22,7 +22,7 @@ class GraphRuntimeState(BaseModel): # # Note: Since the type of this field is `dict[str, Any]`, its values may not remain consistent # after a serialization and deserialization round trip. - outputs: dict[str, Any] = {} + outputs: dict[str, Any] = Field(default_factory=dict) node_run_steps: int = 0 """node run steps""" diff --git a/api/core/workflow/graph_engine/entities/runtime_route_state.py b/api/core/workflow/graph_engine/entities/runtime_route_state.py index f2d9c98936..a4ddfafab5 100644 --- a/api/core/workflow/graph_engine/entities/runtime_route_state.py +++ b/api/core/workflow/graph_engine/entities/runtime_route_state.py @@ -1,5 +1,5 @@ import uuid -from datetime import UTC, datetime +from datetime import datetime from enum import Enum from typing import Optional @@ -7,6 +7,7 @@ from pydantic import BaseModel, Field from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus +from libs.datetime_utils import naive_utc_now class RouteNodeState(BaseModel): @@ -71,7 +72,7 @@ class RouteNodeState(BaseModel): raise Exception(f"Invalid route status {run_result.status}") self.node_run_result = run_result - self.finished_at = datetime.now(UTC).replace(tzinfo=None) + self.finished_at = naive_utc_now() class RuntimeRouteState(BaseModel): @@ -89,7 +90,7 @@ class RuntimeRouteState(BaseModel): :param node_id: node id """ - state = RouteNodeState(node_id=node_id, start_at=datetime.now(UTC).replace(tzinfo=None)) + state = RouteNodeState(node_id=node_id, start_at=naive_utc_now()) self.node_state_mapping[state.id] = state return state diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index b9663d32f7..03b920ccbb 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -6,7 +6,6 @@ import uuid from collections.abc import Generator, Mapping from concurrent.futures import ThreadPoolExecutor, wait from copy import copy, deepcopy -from datetime import UTC, datetime from typing import Any, Optional, cast from flask import Flask, current_app @@ -51,6 +50,7 @@ from core.workflow.nodes.base import BaseNode from core.workflow.nodes.end.end_stream_processor import EndStreamProcessor from core.workflow.nodes.enums import ErrorStrategy, FailBranchSourceHandle from core.workflow.nodes.event import RunCompletedEvent, RunRetrieverResourceEvent, RunStreamChunkEvent +from libs.datetime_utils import naive_utc_now from libs.flask_utils import preserve_flask_contexts from models.enums import UserFrom from models.workflow import WorkflowType @@ -640,7 +640,7 @@ class GraphEngine: while should_continue_retry and retries <= max_retries: try: # run node - retry_start_at = datetime.now(UTC).replace(tzinfo=None) + retry_start_at = naive_utc_now() # yield control to other threads time.sleep(0.001) event_stream = node.run() diff --git a/api/core/workflow/nodes/agent/agent_node.py b/api/core/workflow/nodes/agent/agent_node.py index 2b6382a8a6..144f036aa4 100644 --- a/api/core/workflow/nodes/agent/agent_node.py +++ b/api/core/workflow/nodes/agent/agent_node.py @@ -13,8 +13,9 @@ from core.agent.strategy.plugin import PluginAgentStrategy from core.file import File, FileTransferMethod from core.memory.token_buffer_memory import TokenBufferMemory from core.model_manager import ModelInstance, ModelManager -from core.model_runtime.entities.llm_entities import LLMUsage +from core.model_runtime.entities.llm_entities import LLMUsage, LLMUsageMetadata from core.model_runtime.entities.model_entities import AIModelEntity, ModelType +from core.model_runtime.utils.encoders import jsonable_encoder from core.plugin.entities.request import InvokeCredentials from core.plugin.impl.exc import PluginDaemonClientSideError from core.plugin.impl.plugin import PluginInstaller @@ -558,7 +559,7 @@ class AgentNode(BaseNode): assert isinstance(message.message, ToolInvokeMessage.JsonMessage) if node_type == NodeType.AGENT: msg_metadata: dict[str, Any] = message.message.json_object.pop("execution_metadata", {}) - llm_usage = LLMUsage.from_metadata(msg_metadata) + llm_usage = LLMUsage.from_metadata(cast(LLMUsageMetadata, msg_metadata)) agent_execution_metadata = { WorkflowNodeExecutionMetadataKey(key): value for key, value in msg_metadata.items() @@ -692,7 +693,13 @@ class AgentNode(BaseNode): yield RunCompletedEvent( run_result=NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, - outputs={"text": text, "files": ArrayFileSegment(value=files), "json": json_output, **variables}, + outputs={ + "text": text, + "usage": jsonable_encoder(llm_usage), + "files": ArrayFileSegment(value=files), + "json": json_output, + **variables, + }, metadata={ **agent_execution_metadata, WorkflowNodeExecutionMetadataKey.TOOL_INFO: tool_info, diff --git a/api/core/workflow/nodes/code/code_node.py b/api/core/workflow/nodes/code/code_node.py index fdf3932827..17bd841fc9 100644 --- a/api/core/workflow/nodes/code/code_node.py +++ b/api/core/workflow/nodes/code/code_node.py @@ -8,6 +8,7 @@ from core.helper.code_executor.code_node_provider import CodeNodeProvider from core.helper.code_executor.javascript.javascript_code_provider import JavascriptCodeProvider from core.helper.code_executor.python3.python3_code_provider import Python3CodeProvider from core.variables.segments import ArrayFileSegment +from core.variables.types import SegmentType from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus from core.workflow.nodes.base import BaseNode @@ -119,6 +120,14 @@ class CodeNode(BaseNode): return value.replace("\x00", "") + def _check_boolean(self, value: bool | None, variable: str) -> bool | None: + if value is None: + return None + if not isinstance(value, bool): + raise OutputValidationError(f"Output variable `{variable}` must be a boolean") + + return value + def _check_number(self, value: int | float | None, variable: str) -> int | float | None: """ Check number @@ -173,6 +182,8 @@ class CodeNode(BaseNode): prefix=f"{prefix}.{output_name}" if prefix else output_name, depth=depth + 1, ) + elif isinstance(output_value, bool): + self._check_boolean(output_value, variable=f"{prefix}.{output_name}" if prefix else output_name) elif isinstance(output_value, int | float): self._check_number( value=output_value, variable=f"{prefix}.{output_name}" if prefix else output_name @@ -232,7 +243,7 @@ class CodeNode(BaseNode): if output_name not in result: raise OutputValidationError(f"Output {prefix}{dot}{output_name} is missing.") - if output_config.type == "object": + if output_config.type == SegmentType.OBJECT: # check if output is object if not isinstance(result.get(output_name), dict): if result[output_name] is None: @@ -249,18 +260,28 @@ class CodeNode(BaseNode): prefix=f"{prefix}.{output_name}", depth=depth + 1, ) - elif output_config.type == "number": + elif output_config.type == SegmentType.NUMBER: # check if number available - transformed_result[output_name] = self._check_number( - value=result[output_name], variable=f"{prefix}{dot}{output_name}" - ) - elif output_config.type == "string": + checked = self._check_number(value=result[output_name], variable=f"{prefix}{dot}{output_name}") + # If the output is a boolean and the output schema specifies a NUMBER type, + # convert the boolean value to an integer. + # + # This ensures compatibility with existing workflows that may use + # `True` and `False` as values for NUMBER type outputs. + transformed_result[output_name] = self._convert_boolean_to_int(checked) + + elif output_config.type == SegmentType.STRING: # check if string available transformed_result[output_name] = self._check_string( value=result[output_name], variable=f"{prefix}{dot}{output_name}", ) - elif output_config.type == "array[number]": + elif output_config.type == SegmentType.BOOLEAN: + transformed_result[output_name] = self._check_boolean( + value=result[output_name], + variable=f"{prefix}{dot}{output_name}", + ) + elif output_config.type == SegmentType.ARRAY_NUMBER: # check if array of number available if not isinstance(result[output_name], list): if result[output_name] is None: @@ -278,10 +299,17 @@ class CodeNode(BaseNode): ) transformed_result[output_name] = [ - self._check_number(value=value, variable=f"{prefix}{dot}{output_name}[{i}]") + # If the element is a boolean and the output schema specifies a `array[number]` type, + # convert the boolean value to an integer. + # + # This ensures compatibility with existing workflows that may use + # `True` and `False` as values for NUMBER type outputs. + self._convert_boolean_to_int( + self._check_number(value=value, variable=f"{prefix}{dot}{output_name}[{i}]"), + ) for i, value in enumerate(result[output_name]) ] - elif output_config.type == "array[string]": + elif output_config.type == SegmentType.ARRAY_STRING: # check if array of string available if not isinstance(result[output_name], list): if result[output_name] is None: @@ -302,7 +330,7 @@ class CodeNode(BaseNode): self._check_string(value=value, variable=f"{prefix}{dot}{output_name}[{i}]") for i, value in enumerate(result[output_name]) ] - elif output_config.type == "array[object]": + elif output_config.type == SegmentType.ARRAY_OBJECT: # check if array of object available if not isinstance(result[output_name], list): if result[output_name] is None: @@ -340,6 +368,22 @@ class CodeNode(BaseNode): ) for i, value in enumerate(result[output_name]) ] + elif output_config.type == SegmentType.ARRAY_BOOLEAN: + # check if array of object available + if not isinstance(result[output_name], list): + if result[output_name] is None: + transformed_result[output_name] = None + else: + raise OutputValidationError( + f"Output {prefix}{dot}{output_name} is not an array," + f" got {type(result.get(output_name))} instead." + ) + else: + transformed_result[output_name] = [ + self._check_boolean(value=value, variable=f"{prefix}{dot}{output_name}[{i}]") + for i, value in enumerate(result[output_name]) + ] + else: raise OutputValidationError(f"Output type {output_config.type} is not supported.") @@ -374,3 +418,16 @@ class CodeNode(BaseNode): @property def retry(self) -> bool: return self._node_data.retry_config.retry_enabled + + @staticmethod + def _convert_boolean_to_int(value: bool | int | float | None) -> int | float | None: + """This function convert boolean to integers when the output schema specifies a NUMBER type. + + This ensures compatibility with existing workflows that may use + `True` and `False` as values for NUMBER type outputs. + """ + if value is None: + return None + if isinstance(value, bool): + return int(value) + return value diff --git a/api/core/workflow/nodes/code/entities.py b/api/core/workflow/nodes/code/entities.py index a454035888..9d380c6fb6 100644 --- a/api/core/workflow/nodes/code/entities.py +++ b/api/core/workflow/nodes/code/entities.py @@ -1,11 +1,31 @@ -from typing import Literal, Optional +from typing import Annotated, Literal, Optional -from pydantic import BaseModel +from pydantic import AfterValidator, BaseModel from core.helper.code_executor.code_executor import CodeLanguage +from core.variables.types import SegmentType from core.workflow.entities.variable_entities import VariableSelector from core.workflow.nodes.base import BaseNodeData +_ALLOWED_OUTPUT_FROM_CODE = frozenset( + [ + SegmentType.STRING, + SegmentType.NUMBER, + SegmentType.OBJECT, + SegmentType.BOOLEAN, + SegmentType.ARRAY_STRING, + SegmentType.ARRAY_NUMBER, + SegmentType.ARRAY_OBJECT, + SegmentType.ARRAY_BOOLEAN, + ] +) + + +def _validate_type(segment_type: SegmentType) -> SegmentType: + if segment_type not in _ALLOWED_OUTPUT_FROM_CODE: + raise ValueError(f"invalid type for code output, expected {_ALLOWED_OUTPUT_FROM_CODE}, actual {segment_type}") + return segment_type + class CodeNodeData(BaseNodeData): """ @@ -13,7 +33,7 @@ class CodeNodeData(BaseNodeData): """ class Output(BaseModel): - type: Literal["string", "number", "object", "array[string]", "array[number]", "array[object]"] + type: Annotated[SegmentType, AfterValidator(_validate_type)] children: Optional[dict[str, "CodeNodeData.Output"]] = None class Dependency(BaseModel): diff --git a/api/core/workflow/nodes/http_request/executor.py b/api/core/workflow/nodes/http_request/executor.py index c9f7fa1221..b6f9383618 100644 --- a/api/core/workflow/nodes/http_request/executor.py +++ b/api/core/workflow/nodes/http_request/executor.py @@ -12,6 +12,7 @@ from json_repair import repair_json from configs import dify_config from core.file import file_manager +from core.file.enums import FileTransferMethod from core.helper import ssrf_proxy from core.variables.segments import ArrayFileSegment, FileSegment from core.workflow.entities.variable_pool import VariablePool @@ -228,7 +229,9 @@ class Executor: files: dict[str, list[tuple[str | None, bytes, str]]] = {} for key, files_in_segment in files_list: for file in files_in_segment: - if file.related_id is not None: + if file.related_id is not None or ( + file.transfer_method == FileTransferMethod.REMOTE_URL and file.remote_url is not None + ): file_tuple = ( file.filename, file_manager.download(file), @@ -326,22 +329,16 @@ class Executor: """ do http request depending on api bundle """ - if self.method not in { - "get", - "head", - "post", - "put", - "delete", - "patch", - "options", - "GET", - "POST", - "PUT", - "PATCH", - "DELETE", - "HEAD", - "OPTIONS", - }: + _METHOD_MAP = { + "get": ssrf_proxy.get, + "head": ssrf_proxy.head, + "post": ssrf_proxy.post, + "put": ssrf_proxy.put, + "delete": ssrf_proxy.delete, + "patch": ssrf_proxy.patch, + } + method_lc = self.method.lower() + if method_lc not in _METHOD_MAP: raise InvalidHttpMethodError(f"Invalid http method {self.method}") request_args = { @@ -359,11 +356,11 @@ class Executor: } # request_args = {k: v for k, v in request_args.items() if v is not None} try: - response = getattr(ssrf_proxy, self.method.lower())(**request_args) + response: httpx.Response = _METHOD_MAP[method_lc](**request_args) except (ssrf_proxy.MaxRetriesExceededError, httpx.RequestError) as e: raise HttpRequestNodeError(str(e)) from e # FIXME: fix type ignore, this maybe httpx type issue - return response # type: ignore + return response def invoke(self) -> Response: # assemble headers diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index def1e1cfa3..7f591a3ea9 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -4,7 +4,7 @@ import time import uuid from collections.abc import Generator, Mapping, Sequence from concurrent.futures import Future, wait -from datetime import UTC, datetime +from datetime import datetime from queue import Empty, Queue from typing import TYPE_CHECKING, Any, Optional, cast @@ -41,6 +41,7 @@ from core.workflow.nodes.enums import ErrorStrategy, NodeType from core.workflow.nodes.event import NodeEvent, RunCompletedEvent from core.workflow.nodes.iteration.entities import ErrorHandleMode, IterationNodeData from factories.variable_factory import build_segment +from libs.datetime_utils import naive_utc_now from libs.flask_utils import preserve_flask_contexts from .exc import ( @@ -179,7 +180,7 @@ class IterationNode(BaseNode): thread_pool_id=self.thread_pool_id, ) - start_at = datetime.now(UTC).replace(tzinfo=None) + start_at = naive_utc_now() yield IterationRunStartedEvent( iteration_id=self.id, @@ -428,7 +429,7 @@ class IterationNode(BaseNode): """ run single iteration """ - iter_start_at = datetime.now(UTC).replace(tzinfo=None) + iter_start_at = naive_utc_now() try: rst = graph_engine.run() @@ -505,7 +506,7 @@ class IterationNode(BaseNode): variable_pool.add([self.node_id, "index"], next_index) if next_index < len(iterator_list_value): variable_pool.add([self.node_id, "item"], iterator_list_value[next_index]) - duration = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds() + duration = (naive_utc_now() - iter_start_at).total_seconds() iter_run_map[iteration_run_id] = duration yield IterationRunNextEvent( iteration_id=self.id, @@ -526,7 +527,7 @@ class IterationNode(BaseNode): if next_index < len(iterator_list_value): variable_pool.add([self.node_id, "item"], iterator_list_value[next_index]) - duration = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds() + duration = (naive_utc_now() - iter_start_at).total_seconds() iter_run_map[iteration_run_id] = duration yield IterationRunNextEvent( iteration_id=self.id, @@ -602,7 +603,7 @@ class IterationNode(BaseNode): if next_index < len(iterator_list_value): variable_pool.add([self.node_id, "item"], iterator_list_value[next_index]) - duration = (datetime.now(UTC).replace(tzinfo=None) - iter_start_at).total_seconds() + duration = (naive_utc_now() - iter_start_at).total_seconds() iter_run_map[iteration_run_id] = duration yield IterationRunNextEvent( iteration_id=self.id, diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 7303b68501..5e5c9f520e 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING, Any, Optional, cast from sqlalchemy import Float, and_, func, or_, text from sqlalchemy import cast as sqlalchemy_cast -from sqlalchemy.orm import Session +from sqlalchemy.orm import sessionmaker from core.app.app_config.entities import DatasetRetrieveConfigEntity from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity @@ -175,7 +175,7 @@ class KnowledgeRetrievalNode(BaseNode): redis_client.zremrangebyscore(key, 0, current_time - 60000) request_count = redis_client.zcard(key) if request_count > knowledge_rate_limit.limit: - with Session(db.engine) as session: + with sessionmaker(db.engine).begin() as session: # add ratelimit record rate_limit_log = RateLimitLog( tenant_id=self.tenant_id, @@ -183,7 +183,6 @@ class KnowledgeRetrievalNode(BaseNode): operation="knowledge", ) session.add(rate_limit_log) - session.commit() return NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, inputs=variables, @@ -389,6 +388,15 @@ class KnowledgeRetrievalNode(BaseNode): "segment_id": segment.id, "retriever_from": "workflow", "score": record.score or 0.0, + "child_chunks": [ + { + "id": str(getattr(chunk, "id", "")), + "content": str(getattr(chunk, "content", "")), + "position": int(getattr(chunk, "position", 0)), + "score": float(getattr(chunk, "score", 0.0)), + } + for chunk in (record.child_chunks or []) + ], "segment_hit_count": segment.hit_count, "segment_word_count": segment.word_count, "segment_position": segment.position, @@ -572,7 +580,7 @@ class KnowledgeRetrievalNode(BaseNode): def _process_metadata_filter_func( self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list ): - if value is None: + if value is None and condition not in ("empty", "not empty"): return key = f"{metadata_name}_{sequence}" diff --git a/api/core/workflow/nodes/list_operator/entities.py b/api/core/workflow/nodes/list_operator/entities.py index 75df784a92..e51a91f07f 100644 --- a/api/core/workflow/nodes/list_operator/entities.py +++ b/api/core/workflow/nodes/list_operator/entities.py @@ -1,36 +1,43 @@ from collections.abc import Sequence -from typing import Literal +from enum import StrEnum from pydantic import BaseModel, Field from core.workflow.nodes.base import BaseNodeData -_Condition = Literal[ + +class FilterOperator(StrEnum): # string conditions - "contains", - "start with", - "end with", - "is", - "in", - "empty", - "not contains", - "is not", - "not in", - "not empty", + CONTAINS = "contains" + START_WITH = "start with" + END_WITH = "end with" + IS = "is" + IN = "in" + EMPTY = "empty" + NOT_CONTAINS = "not contains" + IS_NOT = "is not" + NOT_IN = "not in" + NOT_EMPTY = "not empty" # number conditions - "=", - "≠", - "<", - ">", - "≥", - "≤", -] + EQUAL = "=" + NOT_EQUAL = "≠" + LESS_THAN = "<" + GREATER_THAN = ">" + GREATER_THAN_OR_EQUAL = "≥" + LESS_THAN_OR_EQUAL = "≤" + + +class Order(StrEnum): + ASC = "asc" + DESC = "desc" class FilterCondition(BaseModel): key: str = "" - comparison_operator: _Condition = "contains" - value: str | Sequence[str] = "" + comparison_operator: FilterOperator = FilterOperator.CONTAINS + # the value is bool if the filter operator is comparing with + # a boolean constant. + value: str | Sequence[str] | bool = "" class FilterBy(BaseModel): @@ -38,10 +45,10 @@ class FilterBy(BaseModel): conditions: Sequence[FilterCondition] = Field(default_factory=list) -class OrderBy(BaseModel): +class OrderByConfig(BaseModel): enabled: bool = False key: str = "" - value: Literal["asc", "desc"] = "asc" + value: Order = Order.ASC class Limit(BaseModel): @@ -57,6 +64,6 @@ class ExtractConfig(BaseModel): class ListOperatorNodeData(BaseNodeData): variable: Sequence[str] = Field(default_factory=list) filter_by: FilterBy - order_by: OrderBy + order_by: OrderByConfig limit: Limit extract_by: ExtractConfig = Field(default_factory=ExtractConfig) diff --git a/api/core/workflow/nodes/list_operator/node.py b/api/core/workflow/nodes/list_operator/node.py index d2e022dc9d..a727a826c6 100644 --- a/api/core/workflow/nodes/list_operator/node.py +++ b/api/core/workflow/nodes/list_operator/node.py @@ -1,18 +1,40 @@ from collections.abc import Callable, Mapping, Sequence -from typing import Any, Literal, Optional, Union +from typing import Any, Optional, TypeAlias, TypeVar from core.file import File from core.variables import ArrayFileSegment, ArrayNumberSegment, ArrayStringSegment -from core.variables.segments import ArrayAnySegment, ArraySegment +from core.variables.segments import ArrayAnySegment, ArrayBooleanSegment, ArraySegment from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus from core.workflow.nodes.base import BaseNode from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.enums import ErrorStrategy, NodeType -from .entities import ListOperatorNodeData +from .entities import FilterOperator, ListOperatorNodeData, Order from .exc import InvalidConditionError, InvalidFilterValueError, InvalidKeyError, ListOperatorError +_SUPPORTED_TYPES_TUPLE = ( + ArrayFileSegment, + ArrayNumberSegment, + ArrayStringSegment, + ArrayBooleanSegment, +) +_SUPPORTED_TYPES_ALIAS: TypeAlias = ArrayFileSegment | ArrayNumberSegment | ArrayStringSegment | ArrayBooleanSegment + + +_T = TypeVar("_T") + + +def _negation(filter_: Callable[[_T], bool]) -> Callable[[_T], bool]: + """Returns the negation of a given filter function. If the original filter + returns `True` for a value, the negated filter will return `False`, and vice versa. + """ + + def wrapper(value: _T) -> bool: + return not filter_(value) + + return wrapper + class ListOperatorNode(BaseNode): _node_type = NodeType.LIST_OPERATOR @@ -69,11 +91,8 @@ class ListOperatorNode(BaseNode): process_data=process_data, outputs=outputs, ) - if not isinstance(variable, ArrayFileSegment | ArrayNumberSegment | ArrayStringSegment): - error_message = ( - f"Variable {self._node_data.variable} is not an ArrayFileSegment, ArrayNumberSegment " - "or ArrayStringSegment" - ) + if not isinstance(variable, _SUPPORTED_TYPES_TUPLE): + error_message = f"Variable {self._node_data.variable} is not an array type, actual type: {type(variable)}" return NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, error=error_message, inputs=inputs, outputs=outputs ) @@ -122,9 +141,7 @@ class ListOperatorNode(BaseNode): outputs=outputs, ) - def _apply_filter( - self, variable: Union[ArrayFileSegment, ArrayNumberSegment, ArrayStringSegment] - ) -> Union[ArrayFileSegment, ArrayNumberSegment, ArrayStringSegment]: + def _apply_filter(self, variable: _SUPPORTED_TYPES_ALIAS) -> _SUPPORTED_TYPES_ALIAS: filter_func: Callable[[Any], bool] result: list[Any] = [] for condition in self._node_data.filter_by.conditions: @@ -154,33 +171,35 @@ class ListOperatorNode(BaseNode): ) result = list(filter(filter_func, variable.value)) variable = variable.model_copy(update={"value": result}) + elif isinstance(variable, ArrayBooleanSegment): + if not isinstance(condition.value, bool): + raise InvalidFilterValueError(f"Invalid filter value: {condition.value}") + filter_func = _get_boolean_filter_func(condition=condition.comparison_operator, value=condition.value) + result = list(filter(filter_func, variable.value)) + variable = variable.model_copy(update={"value": result}) + else: + raise AssertionError("this statment should be unreachable.") return variable - def _apply_order( - self, variable: Union[ArrayFileSegment, ArrayNumberSegment, ArrayStringSegment] - ) -> Union[ArrayFileSegment, ArrayNumberSegment, ArrayStringSegment]: - if isinstance(variable, ArrayStringSegment): - result = _order_string(order=self._node_data.order_by.value, array=variable.value) - variable = variable.model_copy(update={"value": result}) - elif isinstance(variable, ArrayNumberSegment): - result = _order_number(order=self._node_data.order_by.value, array=variable.value) + def _apply_order(self, variable: _SUPPORTED_TYPES_ALIAS) -> _SUPPORTED_TYPES_ALIAS: + if isinstance(variable, (ArrayStringSegment, ArrayNumberSegment, ArrayBooleanSegment)): + result = sorted(variable.value, reverse=self._node_data.order_by == Order.DESC) variable = variable.model_copy(update={"value": result}) elif isinstance(variable, ArrayFileSegment): result = _order_file( order=self._node_data.order_by.value, order_by=self._node_data.order_by.key, array=variable.value ) variable = variable.model_copy(update={"value": result}) + else: + raise AssertionError("this statement should be unreachable") + return variable - def _apply_slice( - self, variable: Union[ArrayFileSegment, ArrayNumberSegment, ArrayStringSegment] - ) -> Union[ArrayFileSegment, ArrayNumberSegment, ArrayStringSegment]: + def _apply_slice(self, variable: _SUPPORTED_TYPES_ALIAS) -> _SUPPORTED_TYPES_ALIAS: result = variable.value[: self._node_data.limit.size] return variable.model_copy(update={"value": result}) - def _extract_slice( - self, variable: Union[ArrayFileSegment, ArrayNumberSegment, ArrayStringSegment] - ) -> Union[ArrayFileSegment, ArrayNumberSegment, ArrayStringSegment]: + def _extract_slice(self, variable: _SUPPORTED_TYPES_ALIAS) -> _SUPPORTED_TYPES_ALIAS: value = int(self.graph_runtime_state.variable_pool.convert_template(self._node_data.extract_by.serial).text) if value < 1: raise ValueError(f"Invalid serial index: must be >= 1, got {value}") @@ -232,11 +251,11 @@ def _get_string_filter_func(*, condition: str, value: str) -> Callable[[str], bo case "empty": return lambda x: x == "" case "not contains": - return lambda x: not _contains(value)(x) + return _negation(_contains(value)) case "is not": - return lambda x: not _is(value)(x) + return _negation(_is(value)) case "not in": - return lambda x: not _in(value)(x) + return _negation(_in(value)) case "not empty": return lambda x: x != "" case _: @@ -248,7 +267,7 @@ def _get_sequence_filter_func(*, condition: str, value: Sequence[str]) -> Callab case "in": return _in(value) case "not in": - return lambda x: not _in(value)(x) + return _negation(_in(value)) case _: raise InvalidConditionError(f"Invalid condition: {condition}") @@ -271,6 +290,16 @@ def _get_number_filter_func(*, condition: str, value: int | float) -> Callable[[ raise InvalidConditionError(f"Invalid condition: {condition}") +def _get_boolean_filter_func(*, condition: FilterOperator, value: bool) -> Callable[[bool], bool]: + match condition: + case FilterOperator.IS: + return _is(value) + case FilterOperator.IS_NOT: + return _negation(_is(value)) + case _: + raise InvalidConditionError(f"Invalid condition: {condition}") + + def _get_file_filter_func(*, key: str, condition: str, value: str | Sequence[str]) -> Callable[[File], bool]: extract_func: Callable[[File], Any] if key in {"name", "extension", "mime_type", "url"} and isinstance(value, str): @@ -298,7 +327,7 @@ def _endswith(value: str) -> Callable[[str], bool]: return lambda x: x.endswith(value) -def _is(value: str) -> Callable[[str], bool]: +def _is(value: _T) -> Callable[[_T], bool]: return lambda x: x == value @@ -330,21 +359,13 @@ def _ge(value: int | float) -> Callable[[int | float], bool]: return lambda x: x >= value -def _order_number(*, order: Literal["asc", "desc"], array: Sequence[int | float]): - return sorted(array, key=lambda x: x, reverse=order == "desc") - - -def _order_string(*, order: Literal["asc", "desc"], array: Sequence[str]): - return sorted(array, key=lambda x: x, reverse=order == "desc") - - -def _order_file(*, order: Literal["asc", "desc"], order_by: str = "", array: Sequence[File]): +def _order_file(*, order: Order, order_by: str = "", array: Sequence[File]): extract_func: Callable[[File], Any] if order_by in {"name", "type", "extension", "mime_type", "transfer_method", "url"}: extract_func = _get_file_extract_string_func(key=order_by) - return sorted(array, key=lambda x: extract_func(x), reverse=order == "desc") + return sorted(array, key=lambda x: extract_func(x), reverse=order == Order.DESC) elif order_by == "size": extract_func = _get_file_extract_number_func(key=order_by) - return sorted(array, key=lambda x: extract_func(x), reverse=order == "desc") + return sorted(array, key=lambda x: extract_func(x), reverse=order == Order.DESC) else: raise InvalidKeyError(f"Invalid order key: {order_by}") diff --git a/api/core/workflow/nodes/llm/entities.py b/api/core/workflow/nodes/llm/entities.py index 4bb62d35a2..e6f8abeba0 100644 --- a/api/core/workflow/nodes/llm/entities.py +++ b/api/core/workflow/nodes/llm/entities.py @@ -13,7 +13,7 @@ class ModelConfig(BaseModel): provider: str name: str mode: LLMMode - completion_params: dict[str, Any] = {} + completion_params: dict[str, Any] = Field(default_factory=dict) class ContextConfig(BaseModel): diff --git a/api/core/workflow/nodes/llm/llm_utils.py b/api/core/workflow/nodes/llm/llm_utils.py index 0966c87a1d..2441e30c87 100644 --- a/api/core/workflow/nodes/llm/llm_utils.py +++ b/api/core/workflow/nodes/llm/llm_utils.py @@ -1,5 +1,4 @@ from collections.abc import Sequence -from datetime import UTC, datetime from typing import Optional, cast from sqlalchemy import select, update @@ -20,6 +19,7 @@ from core.variables.segments import ArrayAnySegment, ArrayFileSegment, FileSegme from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import SystemVariableKey from core.workflow.nodes.llm.entities import ModelConfig +from libs.datetime_utils import naive_utc_now from models import db from models.model import Conversation from models.provider import Provider, ProviderType @@ -149,7 +149,7 @@ def deduct_llm_quota(tenant_id: str, model_instance: ModelInstance, usage: LLMUs ) .values( quota_used=Provider.quota_used + used_quota, - last_used=datetime.now(tz=UTC).replace(tzinfo=None), + last_used=naive_utc_now(), ) ) session.execute(stmt) diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index dfc2a0000b..10059fdcb1 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -3,7 +3,7 @@ import io import json import logging from collections.abc import Generator, Mapping, Sequence -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any, Optional, Union from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity from core.file import FileType, file_manager @@ -55,7 +55,6 @@ from core.workflow.entities.variable_entities import VariableSelector from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus from core.workflow.enums import SystemVariableKey -from core.workflow.graph_engine.entities.event import InNodeEvent from core.workflow.nodes.base import BaseNode from core.workflow.nodes.base.entities import BaseNodeData, RetryConfig from core.workflow.nodes.enums import ErrorStrategy, NodeType @@ -90,6 +89,7 @@ from .file_saver import FileSaverImpl, LLMFileSaver if TYPE_CHECKING: from core.file.models import File from core.workflow.graph_engine import Graph, GraphInitParams, GraphRuntimeState + from core.workflow.graph_engine.entities.event import InNodeEvent logger = logging.getLogger(__name__) @@ -161,7 +161,7 @@ class LLMNode(BaseNode): def version(cls) -> str: return "1" - def _run(self) -> Generator[NodeEvent | InNodeEvent, None, None]: + def _run(self) -> Generator[Union[NodeEvent, "InNodeEvent"], None, None]: node_inputs: Optional[dict[str, Any]] = None process_data = None result_text = "" @@ -737,7 +737,7 @@ class LLMNode(BaseNode): and isinstance(prompt_messages[-1], UserPromptMessage) and isinstance(prompt_messages[-1].content, list) ): - prompt_messages[-1] = UserPromptMessage(content=prompt_messages[-1].content + file_prompts) + prompt_messages[-1] = UserPromptMessage(content=file_prompts + prompt_messages[-1].content) else: prompt_messages.append(UserPromptMessage(content=file_prompts)) diff --git a/api/core/workflow/nodes/loop/entities.py b/api/core/workflow/nodes/loop/entities.py index d04e0bfae1..3ed4d21ba5 100644 --- a/api/core/workflow/nodes/loop/entities.py +++ b/api/core/workflow/nodes/loop/entities.py @@ -12,9 +12,11 @@ _VALID_VAR_TYPE = frozenset( SegmentType.STRING, SegmentType.NUMBER, SegmentType.OBJECT, + SegmentType.BOOLEAN, SegmentType.ARRAY_STRING, SegmentType.ARRAY_NUMBER, SegmentType.ARRAY_OBJECT, + SegmentType.ARRAY_BOOLEAN, ] ) diff --git a/api/core/workflow/nodes/loop/loop_node.py b/api/core/workflow/nodes/loop/loop_node.py index 655de9362f..64296dc046 100644 --- a/api/core/workflow/nodes/loop/loop_node.py +++ b/api/core/workflow/nodes/loop/loop_node.py @@ -2,7 +2,7 @@ import json import logging import time from collections.abc import Generator, Mapping, Sequence -from datetime import UTC, datetime +from datetime import datetime from typing import TYPE_CHECKING, Any, Literal, Optional, cast from configs import dify_config @@ -36,6 +36,7 @@ from core.workflow.nodes.event import NodeEvent, RunCompletedEvent from core.workflow.nodes.loop.entities import LoopNodeData from core.workflow.utils.condition.processor import ConditionProcessor from factories.variable_factory import TypeMismatchError, build_segment_with_type +from libs.datetime_utils import naive_utc_now if TYPE_CHECKING: from core.workflow.entities.variable_pool import VariablePool @@ -143,7 +144,7 @@ class LoopNode(BaseNode): thread_pool_id=self.thread_pool_id, ) - start_at = datetime.now(UTC).replace(tzinfo=None) + start_at = naive_utc_now() condition_processor = ConditionProcessor() # Start Loop event @@ -171,7 +172,7 @@ class LoopNode(BaseNode): try: check_break_result = False for i in range(loop_count): - loop_start_time = datetime.now(UTC).replace(tzinfo=None) + loop_start_time = naive_utc_now() # run single loop loop_result = yield from self._run_single_loop( graph_engine=graph_engine, @@ -185,7 +186,7 @@ class LoopNode(BaseNode): start_at=start_at, inputs=inputs, ) - loop_end_time = datetime.now(UTC).replace(tzinfo=None) + loop_end_time = naive_utc_now() single_loop_variable = {} for key, selector in loop_variable_selectors.items(): @@ -313,30 +314,31 @@ class LoopNode(BaseNode): and event.node_type == NodeType.LOOP_END and not isinstance(event, NodeRunStreamChunkEvent) ): - check_break_result = True + # Check if variables in break conditions exist and process conditions + # Allow loop internal variables to be used in break conditions + available_conditions = [] + for condition in break_conditions: + variable = self.graph_runtime_state.variable_pool.get(condition.variable_selector) + if variable: + available_conditions.append(condition) + + # Process conditions if at least one variable is available + if available_conditions: + input_conditions, group_result, check_break_result = condition_processor.process_conditions( + variable_pool=self.graph_runtime_state.variable_pool, + conditions=available_conditions, + operator=logical_operator, + ) + if check_break_result: + break + else: + check_break_result = True yield self._handle_event_metadata(event=event, iter_run_index=current_index) break if isinstance(event, NodeRunSucceededEvent): yield self._handle_event_metadata(event=event, iter_run_index=current_index) - # Check if all variables in break conditions exist - exists_variable = False - for condition in break_conditions: - if not self.graph_runtime_state.variable_pool.get(condition.variable_selector): - exists_variable = False - break - else: - exists_variable = True - if exists_variable: - input_conditions, group_result, check_break_result = condition_processor.process_conditions( - variable_pool=self.graph_runtime_state.variable_pool, - conditions=break_conditions, - operator=logical_operator, - ) - if check_break_result: - break - elif isinstance(event, BaseGraphEvent): if isinstance(event, GraphRunFailedEvent): # Loop run failed @@ -402,11 +404,11 @@ class LoopNode(BaseNode): for node_id in loop_graph.node_ids: variable_pool.remove([node_id]) - _outputs = {} + _outputs: dict[str, Segment | int | None] = {} for loop_variable_key, loop_variable_selector in loop_variable_selectors.items(): _loop_variable_segment = variable_pool.get(loop_variable_selector) if _loop_variable_segment: - _outputs[loop_variable_key] = _loop_variable_segment.value + _outputs[loop_variable_key] = _loop_variable_segment else: _outputs[loop_variable_key] = None @@ -520,21 +522,33 @@ class LoopNode(BaseNode): return variable_mapping @staticmethod - def _get_segment_for_constant(var_type: SegmentType, value: Any) -> Segment: + def _get_segment_for_constant(var_type: SegmentType, original_value: Any) -> Segment: """Get the appropriate segment type for a constant value.""" - if var_type in ["array[string]", "array[number]", "array[object]"]: - if value and isinstance(value, str): - value = json.loads(value) + # TODO: Refactor for maintainability: + # 1. Ensure type handling logic stays synchronized with _VALID_VAR_TYPE (entities.py) + # 2. Consider moving this method to LoopVariableData class for better encapsulation + if not var_type.is_array_type() or var_type == SegmentType.ARRAY_BOOLEAN: + value = original_value + elif var_type in [ + SegmentType.ARRAY_NUMBER, + SegmentType.ARRAY_OBJECT, + SegmentType.ARRAY_STRING, + ]: + if original_value and isinstance(original_value, str): + value = json.loads(original_value) else: + logger.warning("unexpected value for LoopNode, value_type=%s, value=%s", original_value, var_type) value = [] + else: + raise AssertionError("this statement should be unreachable.") try: - return build_segment_with_type(var_type, value) + return build_segment_with_type(var_type, value=value) except TypeMismatchError as type_exc: # Attempt to parse the value as a JSON-encoded string, if applicable. - if not isinstance(value, str): + if not isinstance(original_value, str): raise try: - value = json.loads(value) + value = json.loads(original_value) except ValueError: raise type_exc return build_segment_with_type(var_type, value) diff --git a/api/core/workflow/nodes/parameter_extractor/entities.py b/api/core/workflow/nodes/parameter_extractor/entities.py index 916778d167..12347d21a5 100644 --- a/api/core/workflow/nodes/parameter_extractor/entities.py +++ b/api/core/workflow/nodes/parameter_extractor/entities.py @@ -1,10 +1,46 @@ -from typing import Any, Literal, Optional +from typing import Annotated, Any, Literal, Optional -from pydantic import BaseModel, Field, field_validator +from pydantic import ( + BaseModel, + BeforeValidator, + Field, + field_validator, +) from core.prompt.entities.advanced_prompt_entities import MemoryConfig +from core.variables.types import SegmentType from core.workflow.nodes.base import BaseNodeData -from core.workflow.nodes.llm import ModelConfig, VisionConfig +from core.workflow.nodes.llm.entities import ModelConfig, VisionConfig + +_OLD_BOOL_TYPE_NAME = "bool" +_OLD_SELECT_TYPE_NAME = "select" + +_VALID_PARAMETER_TYPES = frozenset( + [ + SegmentType.STRING, # "string", + SegmentType.NUMBER, # "number", + SegmentType.BOOLEAN, + SegmentType.ARRAY_STRING, + SegmentType.ARRAY_NUMBER, + SegmentType.ARRAY_OBJECT, + SegmentType.ARRAY_BOOLEAN, + _OLD_BOOL_TYPE_NAME, # old boolean type used by Parameter Extractor node + _OLD_SELECT_TYPE_NAME, # string type with enumeration choices. + ] +) + + +def _validate_type(parameter_type: str) -> SegmentType: + if not isinstance(parameter_type, str): + raise TypeError(f"type should be str, got {type(parameter_type)}, value={parameter_type}") + if parameter_type not in _VALID_PARAMETER_TYPES: + raise ValueError(f"type {parameter_type} is not allowd to use in Parameter Extractor node.") + + if parameter_type == _OLD_BOOL_TYPE_NAME: + return SegmentType.BOOLEAN + elif parameter_type == _OLD_SELECT_TYPE_NAME: + return SegmentType.STRING + return SegmentType(parameter_type) class _ParameterConfigError(Exception): @@ -17,7 +53,7 @@ class ParameterConfig(BaseModel): """ name: str - type: Literal["string", "number", "bool", "select", "array[string]", "array[number]", "array[object]"] + type: Annotated[SegmentType, BeforeValidator(_validate_type)] options: Optional[list[str]] = None description: str required: bool @@ -32,17 +68,20 @@ class ParameterConfig(BaseModel): return str(value) def is_array_type(self) -> bool: - return self.type in ("array[string]", "array[number]", "array[object]") + return self.type.is_array_type() - def element_type(self) -> Literal["string", "number", "object"]: - if self.type == "array[number]": - return "number" - elif self.type == "array[string]": - return "string" - elif self.type == "array[object]": - return "object" - else: - raise _ParameterConfigError(f"{self.type} is not array type.") + def element_type(self) -> SegmentType: + """Return the element type of the parameter. + + Raises a ValueError if the parameter's type is not an array type. + """ + element_type = self.type.element_type() + # At this point, self.type is guaranteed to be one of `ARRAY_STRING`, + # `ARRAY_NUMBER`, `ARRAY_OBJECT`, or `ARRAY_BOOLEAN`. + # + # See: _VALID_PARAMETER_TYPES for reference. + assert element_type is not None, f"the element type should not be None, {self.type=}" + return element_type class ParameterExtractorNodeData(BaseNodeData): @@ -74,16 +113,18 @@ class ParameterExtractorNodeData(BaseNodeData): for parameter in self.parameters: parameter_schema: dict[str, Any] = {"description": parameter.description} - if parameter.type in {"string", "select"}: + if parameter.type == SegmentType.STRING: parameter_schema["type"] = "string" - elif parameter.type.startswith("array"): + elif parameter.type.is_array_type(): parameter_schema["type"] = "array" - nested_type = parameter.type[6:-1] - parameter_schema["items"] = {"type": nested_type} + element_type = parameter.type.element_type() + if element_type is None: + raise AssertionError("element type should not be None.") + parameter_schema["items"] = {"type": element_type.value} else: parameter_schema["type"] = parameter.type - if parameter.type == "select": + if parameter.options: parameter_schema["enum"] = parameter.options parameters["properties"][parameter.name] = parameter_schema diff --git a/api/core/workflow/nodes/parameter_extractor/exc.py b/api/core/workflow/nodes/parameter_extractor/exc.py index 6511aba185..247518cf20 100644 --- a/api/core/workflow/nodes/parameter_extractor/exc.py +++ b/api/core/workflow/nodes/parameter_extractor/exc.py @@ -1,3 +1,8 @@ +from typing import Any + +from core.variables.types import SegmentType + + class ParameterExtractorNodeError(ValueError): """Base error for ParameterExtractorNode.""" @@ -48,3 +53,23 @@ class InvalidArrayValueError(ParameterExtractorNodeError): class InvalidModelModeError(ParameterExtractorNodeError): """Raised when the model mode is invalid.""" + + +class InvalidValueTypeError(ParameterExtractorNodeError): + def __init__( + self, + /, + parameter_name: str, + expected_type: SegmentType, + actual_type: SegmentType | None, + value: Any, + ) -> None: + message = ( + f"Invalid value for parameter {parameter_name}, expected segment type: {expected_type}, " + f"actual_type: {actual_type}, python_type: {type(value)}, value: {value}" + ) + super().__init__(message) + self.parameter_name = parameter_name + self.expected_type = expected_type + self.actual_type = actual_type + self.value = value diff --git a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py index 45c5e0a62c..3dcde5ad81 100644 --- a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py +++ b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py @@ -1,3 +1,4 @@ +import contextlib import json import logging import uuid @@ -25,7 +26,7 @@ from core.prompt.advanced_prompt_transform import AdvancedPromptTransform from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate from core.prompt.simple_prompt_transform import ModelMode from core.prompt.utils.prompt_message_util import PromptMessageUtil -from core.variables.types import SegmentType +from core.variables.types import ArrayValidation, SegmentType from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus @@ -38,16 +39,13 @@ from factories.variable_factory import build_segment_with_type from .entities import ParameterExtractorNodeData from .exc import ( - InvalidArrayValueError, - InvalidBoolValueError, InvalidInvokeResultError, InvalidModelModeError, InvalidModelTypeError, InvalidNumberOfParametersError, - InvalidNumberValueError, InvalidSelectValueError, - InvalidStringValueError, InvalidTextContentTypeError, + InvalidValueTypeError, ModelSchemaNotFoundError, ParameterExtractorNodeError, RequiredParameterMissingError, @@ -548,9 +546,6 @@ class ParameterExtractorNode(BaseNode): return prompt_messages def _validate_result(self, data: ParameterExtractorNodeData, result: dict) -> dict: - """ - Validate result. - """ if len(data.parameters) != len(result): raise InvalidNumberOfParametersError("Invalid number of parameters") @@ -558,101 +553,106 @@ class ParameterExtractorNode(BaseNode): if parameter.required and parameter.name not in result: raise RequiredParameterMissingError(f"Parameter {parameter.name} is required") - if parameter.type == "select" and parameter.options and result.get(parameter.name) not in parameter.options: - raise InvalidSelectValueError(f"Invalid `select` value for parameter {parameter.name}") - - if parameter.type == "number" and not isinstance(result.get(parameter.name), int | float): - raise InvalidNumberValueError(f"Invalid `number` value for parameter {parameter.name}") - - if parameter.type == "bool" and not isinstance(result.get(parameter.name), bool): - raise InvalidBoolValueError(f"Invalid `bool` value for parameter {parameter.name}") - - if parameter.type == "string" and not isinstance(result.get(parameter.name), str): - raise InvalidStringValueError(f"Invalid `string` value for parameter {parameter.name}") - - if parameter.type.startswith("array"): - parameters = result.get(parameter.name) - if not isinstance(parameters, list): - raise InvalidArrayValueError(f"Invalid `array` value for parameter {parameter.name}") - nested_type = parameter.type[6:-1] - for item in parameters: - if nested_type == "number" and not isinstance(item, int | float): - raise InvalidArrayValueError(f"Invalid `array[number]` value for parameter {parameter.name}") - if nested_type == "string" and not isinstance(item, str): - raise InvalidArrayValueError(f"Invalid `array[string]` value for parameter {parameter.name}") - if nested_type == "object" and not isinstance(item, dict): - raise InvalidArrayValueError(f"Invalid `array[object]` value for parameter {parameter.name}") + param_value = result.get(parameter.name) + if not parameter.type.is_valid(param_value, array_validation=ArrayValidation.ALL): + inferred_type = SegmentType.infer_segment_type(param_value) + raise InvalidValueTypeError( + parameter_name=parameter.name, + expected_type=parameter.type, + actual_type=inferred_type, + value=param_value, + ) + if parameter.type == SegmentType.STRING and parameter.options: + if param_value not in parameter.options: + raise InvalidSelectValueError(f"Invalid `select` value for parameter {parameter.name}") return result + @staticmethod + def _transform_number(value: int | float | str | bool) -> int | float | None: + """ + Attempts to transform the input into an integer or float. + + Returns: + int or float: The transformed number if the conversion is successful. + None: If the transformation fails. + + Note: + Boolean values `True` and `False` are converted to integers `1` and `0`, respectively. + This behavior ensures compatibility with existing workflows that may use boolean types as integers. + """ + if isinstance(value, bool): + return int(value) + elif isinstance(value, (int, float)): + return value + elif not isinstance(value, str): + return None + if "." in value: + try: + return float(value) + except ValueError: + return None + else: + try: + return int(value) + except ValueError: + return None + def _transform_result(self, data: ParameterExtractorNodeData, result: dict) -> dict: """ Transform result into standard format. """ - transformed_result = {} + transformed_result: dict[str, Any] = {} for parameter in data.parameters: if parameter.name in result: + param_value = result[parameter.name] # transform value - if parameter.type == "number": - if isinstance(result[parameter.name], int | float): - transformed_result[parameter.name] = result[parameter.name] - elif isinstance(result[parameter.name], str): - try: - if "." in result[parameter.name]: - result[parameter.name] = float(result[parameter.name]) - else: - result[parameter.name] = int(result[parameter.name]) - except ValueError: - pass - else: - pass - # TODO: bool is not supported in the current version - # elif parameter.type == 'bool': - # if isinstance(result[parameter.name], bool): - # transformed_result[parameter.name] = bool(result[parameter.name]) - # elif isinstance(result[parameter.name], str): - # if result[parameter.name].lower() in ['true', 'false']: - # transformed_result[parameter.name] = bool(result[parameter.name].lower() == 'true') - # elif isinstance(result[parameter.name], int): - # transformed_result[parameter.name] = bool(result[parameter.name]) - elif parameter.type in {"string", "select"}: - if isinstance(result[parameter.name], str): - transformed_result[parameter.name] = result[parameter.name] + if parameter.type == SegmentType.NUMBER: + transformed = self._transform_number(param_value) + if transformed is not None: + transformed_result[parameter.name] = transformed + elif parameter.type == SegmentType.BOOLEAN: + if isinstance(result[parameter.name], (bool, int)): + transformed_result[parameter.name] = bool(result[parameter.name]) + # elif isinstance(result[parameter.name], str): + # if result[parameter.name].lower() in ["true", "false"]: + # transformed_result[parameter.name] = bool(result[parameter.name].lower() == "true") + elif parameter.type == SegmentType.STRING: + if isinstance(param_value, str): + transformed_result[parameter.name] = param_value elif parameter.is_array_type(): - if isinstance(result[parameter.name], list): + if isinstance(param_value, list): nested_type = parameter.element_type() assert nested_type is not None segment_value = build_segment_with_type(segment_type=SegmentType(parameter.type), value=[]) transformed_result[parameter.name] = segment_value - for item in result[parameter.name]: - if nested_type == "number": - if isinstance(item, int | float): - segment_value.value.append(item) - elif isinstance(item, str): - try: - if "." in item: - segment_value.value.append(float(item)) - else: - segment_value.value.append(int(item)) - except ValueError: - pass - elif nested_type == "string": + for item in param_value: + if nested_type == SegmentType.NUMBER: + transformed = self._transform_number(item) + if transformed is not None: + segment_value.value.append(transformed) + elif nested_type == SegmentType.STRING: if isinstance(item, str): segment_value.value.append(item) - elif nested_type == "object": + elif nested_type == SegmentType.OBJECT: if isinstance(item, dict): segment_value.value.append(item) + elif nested_type == SegmentType.BOOLEAN: + if isinstance(item, bool): + segment_value.value.append(item) if parameter.name not in transformed_result: - if parameter.type == "number": - transformed_result[parameter.name] = 0 - elif parameter.type == "bool": - transformed_result[parameter.name] = False - elif parameter.type in {"string", "select"}: - transformed_result[parameter.name] = "" - elif parameter.type.startswith("array"): + if parameter.type.is_array_type(): transformed_result[parameter.name] = build_segment_with_type( segment_type=SegmentType(parameter.type), value=[] ) + elif parameter.type in (SegmentType.STRING, SegmentType.SECRET): + transformed_result[parameter.name] = "" + elif parameter.type == SegmentType.NUMBER: + transformed_result[parameter.name] = 0 + elif parameter.type == SegmentType.BOOLEAN: + transformed_result[parameter.name] = False + else: + raise AssertionError("this statement should be unreachable.") return transformed_result @@ -666,10 +666,8 @@ class ParameterExtractorNode(BaseNode): if result[idx] == "{" or result[idx] == "[": json_str = extract_json(result[idx:]) if json_str: - try: + with contextlib.suppress(Exception): return cast(dict, json.loads(json_str)) - except Exception: - pass logger.info("extra error: %s", result) return None @@ -686,10 +684,9 @@ class ParameterExtractorNode(BaseNode): if result[idx] == "{" or result[idx] == "[": json_str = extract_json(result[idx:]) if json_str: - try: + with contextlib.suppress(Exception): return cast(dict, json.loads(json_str)) - except Exception: - pass + logger.info("extra error: %s", result) return None diff --git a/api/core/workflow/nodes/variable_assigner/v1/node.py b/api/core/workflow/nodes/variable_assigner/v1/node.py index 51383fa588..321d280b1f 100644 --- a/api/core/workflow/nodes/variable_assigner/v1/node.py +++ b/api/core/workflow/nodes/variable_assigner/v1/node.py @@ -2,6 +2,7 @@ from collections.abc import Callable, Mapping, Sequence from typing import TYPE_CHECKING, Any, Optional, TypeAlias from core.variables import SegmentType, Variable +from core.variables.segments import BooleanSegment from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID from core.workflow.conversation_variable_updater import ConversationVariableUpdater from core.workflow.entities.node_entities import NodeRunResult @@ -158,8 +159,8 @@ class VariableAssignerNode(BaseNode): def get_zero_value(t: SegmentType): # TODO(QuantumGhost): this should be a method of `SegmentType`. match t: - case SegmentType.ARRAY_OBJECT | SegmentType.ARRAY_STRING | SegmentType.ARRAY_NUMBER: - return variable_factory.build_segment([]) + case SegmentType.ARRAY_OBJECT | SegmentType.ARRAY_STRING | SegmentType.ARRAY_NUMBER | SegmentType.ARRAY_BOOLEAN: + return variable_factory.build_segment_with_type(t, []) case SegmentType.OBJECT: return variable_factory.build_segment({}) case SegmentType.STRING: @@ -170,5 +171,7 @@ def get_zero_value(t: SegmentType): return variable_factory.build_segment(0.0) case SegmentType.NUMBER: return variable_factory.build_segment(0) + case SegmentType.BOOLEAN: + return BooleanSegment(value=False) case _: raise VariableOperatorNodeError(f"unsupported variable type: {t}") diff --git a/api/core/workflow/nodes/variable_assigner/v2/constants.py b/api/core/workflow/nodes/variable_assigner/v2/constants.py index 7f760e5baa..1a4b81c39c 100644 --- a/api/core/workflow/nodes/variable_assigner/v2/constants.py +++ b/api/core/workflow/nodes/variable_assigner/v2/constants.py @@ -4,9 +4,11 @@ from core.variables import SegmentType EMPTY_VALUE_MAPPING = { SegmentType.STRING: "", SegmentType.NUMBER: 0, + SegmentType.BOOLEAN: False, SegmentType.OBJECT: {}, SegmentType.ARRAY_ANY: [], SegmentType.ARRAY_STRING: [], SegmentType.ARRAY_NUMBER: [], SegmentType.ARRAY_OBJECT: [], + SegmentType.ARRAY_BOOLEAN: [], } diff --git a/api/core/workflow/nodes/variable_assigner/v2/helpers.py b/api/core/workflow/nodes/variable_assigner/v2/helpers.py index 7a20975b15..324f23a900 100644 --- a/api/core/workflow/nodes/variable_assigner/v2/helpers.py +++ b/api/core/workflow/nodes/variable_assigner/v2/helpers.py @@ -16,28 +16,15 @@ def is_operation_supported(*, variable_type: SegmentType, operation: Operation): SegmentType.NUMBER, SegmentType.INTEGER, SegmentType.FLOAT, + SegmentType.BOOLEAN, } case Operation.ADD | Operation.SUBTRACT | Operation.MULTIPLY | Operation.DIVIDE: # Only number variable can be added, subtracted, multiplied or divided return variable_type in {SegmentType.NUMBER, SegmentType.INTEGER, SegmentType.FLOAT} - case Operation.APPEND | Operation.EXTEND: + case Operation.APPEND | Operation.EXTEND | Operation.REMOVE_FIRST | Operation.REMOVE_LAST: # Only array variable can be appended or extended - return variable_type in { - SegmentType.ARRAY_ANY, - SegmentType.ARRAY_OBJECT, - SegmentType.ARRAY_STRING, - SegmentType.ARRAY_NUMBER, - SegmentType.ARRAY_FILE, - } - case Operation.REMOVE_FIRST | Operation.REMOVE_LAST: # Only array variable can have elements removed - return variable_type in { - SegmentType.ARRAY_ANY, - SegmentType.ARRAY_OBJECT, - SegmentType.ARRAY_STRING, - SegmentType.ARRAY_NUMBER, - SegmentType.ARRAY_FILE, - } + return variable_type.is_array_type() case _: return False @@ -50,7 +37,7 @@ def is_variable_input_supported(*, operation: Operation): def is_constant_input_supported(*, variable_type: SegmentType, operation: Operation): match variable_type: - case SegmentType.STRING | SegmentType.OBJECT: + case SegmentType.STRING | SegmentType.OBJECT | SegmentType.BOOLEAN: return operation in {Operation.OVER_WRITE, Operation.SET} case SegmentType.NUMBER | SegmentType.INTEGER | SegmentType.FLOAT: return operation in { @@ -72,6 +59,9 @@ def is_input_value_valid(*, variable_type: SegmentType, operation: Operation, va case SegmentType.STRING: return isinstance(value, str) + case SegmentType.BOOLEAN: + return isinstance(value, bool) + case SegmentType.NUMBER | SegmentType.INTEGER | SegmentType.FLOAT: if not isinstance(value, int | float): return False @@ -91,6 +81,8 @@ def is_input_value_valid(*, variable_type: SegmentType, operation: Operation, va return isinstance(value, int | float) case SegmentType.ARRAY_OBJECT if operation == Operation.APPEND: return isinstance(value, dict) + case SegmentType.ARRAY_BOOLEAN if operation == Operation.APPEND: + return isinstance(value, bool) # Array & Extend / Overwrite case SegmentType.ARRAY_ANY if operation in {Operation.EXTEND, Operation.OVER_WRITE}: @@ -101,6 +93,8 @@ def is_input_value_valid(*, variable_type: SegmentType, operation: Operation, va return isinstance(value, list) and all(isinstance(item, int | float) for item in value) case SegmentType.ARRAY_OBJECT if operation in {Operation.EXTEND, Operation.OVER_WRITE}: return isinstance(value, list) and all(isinstance(item, dict) for item in value) + case SegmentType.ARRAY_BOOLEAN if operation in {Operation.EXTEND, Operation.OVER_WRITE}: + return isinstance(value, list) and all(isinstance(item, bool) for item in value) case _: return False diff --git a/api/core/workflow/utils/condition/entities.py b/api/core/workflow/utils/condition/entities.py index 56871a15d8..77a214571a 100644 --- a/api/core/workflow/utils/condition/entities.py +++ b/api/core/workflow/utils/condition/entities.py @@ -45,5 +45,5 @@ class SubVariableCondition(BaseModel): class Condition(BaseModel): variable_selector: list[str] comparison_operator: SupportedComparisonOperator - value: str | Sequence[str] | None = None + value: str | Sequence[str] | bool | None = None sub_variable_condition: SubVariableCondition | None = None diff --git a/api/core/workflow/utils/condition/processor.py b/api/core/workflow/utils/condition/processor.py index 9795387788..7efd1acbf1 100644 --- a/api/core/workflow/utils/condition/processor.py +++ b/api/core/workflow/utils/condition/processor.py @@ -1,13 +1,27 @@ +import json from collections.abc import Sequence -from typing import Any, Literal +from typing import Any, Literal, Union from core.file import FileAttribute, file_manager from core.variables import ArrayFileSegment +from core.variables.segments import ArrayBooleanSegment, BooleanSegment from core.workflow.entities.variable_pool import VariablePool from .entities import Condition, SubCondition, SupportedComparisonOperator +def _convert_to_bool(value: Any) -> bool: + if isinstance(value, int): + return bool(value) + + if isinstance(value, str): + loaded = json.loads(value) + if isinstance(loaded, (int, bool)): + return bool(loaded) + + raise TypeError(f"unexpected value: type={type(value)}, value={value}") + + class ConditionProcessor: def process_conditions( self, @@ -48,9 +62,16 @@ class ConditionProcessor: ) else: actual_value = variable.value if variable else None - expected_value = condition.value + expected_value: str | Sequence[str] | bool | list[bool] | None = condition.value if isinstance(expected_value, str): expected_value = variable_pool.convert_template(expected_value).text + # Here we need to explicit convet the input string to boolean. + if isinstance(variable, (BooleanSegment, ArrayBooleanSegment)) and expected_value is not None: + # The following two lines is for compatibility with existing workflows. + if isinstance(expected_value, list): + expected_value = [_convert_to_bool(i) for i in expected_value] + else: + expected_value = _convert_to_bool(expected_value) input_conditions.append( { "actual_value": actual_value, @@ -77,7 +98,7 @@ def _evaluate_condition( *, operator: SupportedComparisonOperator, value: Any, - expected: str | Sequence[str] | None, + expected: Union[str, Sequence[str], bool | Sequence[bool], None], ) -> bool: match operator: case "contains": @@ -130,7 +151,7 @@ def _assert_contains(*, value: Any, expected: Any) -> bool: if not value: return False - if not isinstance(value, str | list): + if not isinstance(value, (str, list)): raise ValueError("Invalid actual value type: string or array") if expected not in value: @@ -142,7 +163,7 @@ def _assert_not_contains(*, value: Any, expected: Any) -> bool: if not value: return True - if not isinstance(value, str | list): + if not isinstance(value, (str, list)): raise ValueError("Invalid actual value type: string or array") if expected in value: @@ -178,8 +199,8 @@ def _assert_is(*, value: Any, expected: Any) -> bool: if value is None: return False - if not isinstance(value, str): - raise ValueError("Invalid actual value type: string") + if not isinstance(value, (str, bool)): + raise ValueError("Invalid actual value type: string or boolean") if value != expected: return False @@ -190,8 +211,8 @@ def _assert_is_not(*, value: Any, expected: Any) -> bool: if value is None: return False - if not isinstance(value, str): - raise ValueError("Invalid actual value type: string") + if not isinstance(value, (str, bool)): + raise ValueError("Invalid actual value type: string or boolean") if value == expected: return False @@ -214,10 +235,13 @@ def _assert_equal(*, value: Any, expected: Any) -> bool: if value is None: return False - if not isinstance(value, int | float): - raise ValueError("Invalid actual value type: number") + if not isinstance(value, (int, float, bool)): + raise ValueError("Invalid actual value type: number or boolean") - if isinstance(value, int): + # Handle boolean comparison + if isinstance(value, bool): + expected = bool(expected) + elif isinstance(value, int): expected = int(expected) else: expected = float(expected) @@ -231,10 +255,13 @@ def _assert_not_equal(*, value: Any, expected: Any) -> bool: if value is None: return False - if not isinstance(value, int | float): - raise ValueError("Invalid actual value type: number") + if not isinstance(value, (int, float, bool)): + raise ValueError("Invalid actual value type: number or boolean") - if isinstance(value, int): + # Handle boolean comparison + if isinstance(value, bool): + expected = bool(expected) + elif isinstance(value, int): expected = int(expected) else: expected = float(expected) @@ -248,7 +275,7 @@ def _assert_greater_than(*, value: Any, expected: Any) -> bool: if value is None: return False - if not isinstance(value, int | float): + if not isinstance(value, (int, float)): raise ValueError("Invalid actual value type: number") if isinstance(value, int): @@ -265,7 +292,7 @@ def _assert_less_than(*, value: Any, expected: Any) -> bool: if value is None: return False - if not isinstance(value, int | float): + if not isinstance(value, (int, float)): raise ValueError("Invalid actual value type: number") if isinstance(value, int): @@ -282,7 +309,7 @@ def _assert_greater_than_or_equal(*, value: Any, expected: Any) -> bool: if value is None: return False - if not isinstance(value, int | float): + if not isinstance(value, (int, float)): raise ValueError("Invalid actual value type: number") if isinstance(value, int): @@ -299,7 +326,7 @@ def _assert_less_than_or_equal(*, value: Any, expected: Any) -> bool: if value is None: return False - if not isinstance(value, int | float): + if not isinstance(value, (int, float)): raise ValueError("Invalid actual value type: number") if isinstance(value, int): diff --git a/api/docker/entrypoint.sh b/api/docker/entrypoint.sh index da147fe895..ddef26faaf 100755 --- a/api/docker/entrypoint.sh +++ b/api/docker/entrypoint.sh @@ -31,8 +31,8 @@ if [[ "${MODE}" == "worker" ]]; then fi exec celery -A app.celery worker -P ${CELERY_WORKER_CLASS:-gevent} $CONCURRENCY_OPTION \ - --max-tasks-per-child ${MAX_TASK_PRE_CHILD:-50} --loglevel ${LOG_LEVEL:-INFO} \ - -Q ${CELERY_QUEUES:-dataset,mail,ops_trace,app_deletion,plugin,workflow_storage} + --max-tasks-per-child ${MAX_TASKS_PER_CHILD:-50} --loglevel ${LOG_LEVEL:-INFO} \ + -Q ${CELERY_QUEUES:-dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation} elif [[ "${MODE}" == "beat" ]]; then exec celery -A app.celery beat --loglevel ${LOG_LEVEL:-INFO} diff --git a/api/events/event_handlers/clean_when_document_deleted.py b/api/events/event_handlers/clean_when_document_deleted.py index 00a66f50ad..bbc913b7cf 100644 --- a/api/events/event_handlers/clean_when_document_deleted.py +++ b/api/events/event_handlers/clean_when_document_deleted.py @@ -8,4 +8,6 @@ def handle(sender, **kwargs): dataset_id = kwargs.get("dataset_id") doc_form = kwargs.get("doc_form") file_id = kwargs.get("file_id") + assert dataset_id is not None + assert doc_form is not None clean_document_task.delay(document_id, dataset_id, doc_form, file_id) diff --git a/api/events/event_handlers/create_document_index.py b/api/events/event_handlers/create_document_index.py index c607161e2a..8778f5cafe 100644 --- a/api/events/event_handlers/create_document_index.py +++ b/api/events/event_handlers/create_document_index.py @@ -1,3 +1,4 @@ +import contextlib import logging import time @@ -10,6 +11,8 @@ from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from models.dataset import Document +logger = logging.getLogger(__name__) + @document_index_created.connect def handle(sender, **kwargs): @@ -18,7 +21,7 @@ def handle(sender, **kwargs): documents = [] start_at = time.perf_counter() for document_id in document_ids: - logging.info(click.style(f"Start process document: {document_id}", fg="green")) + logger.info(click.style(f"Start process document: {document_id}", fg="green")) document = ( db.session.query(Document) @@ -38,12 +41,11 @@ def handle(sender, **kwargs): db.session.add(document) db.session.commit() - try: - indexing_runner = IndexingRunner() - indexing_runner.run(documents) - end_at = time.perf_counter() - logging.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green")) - except DocumentIsPausedError as ex: - logging.info(click.style(str(ex), fg="yellow")) - except Exception: - pass + with contextlib.suppress(Exception): + try: + indexing_runner = IndexingRunner() + indexing_runner.run(documents) + end_at = time.perf_counter() + logger.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green")) + except DocumentIsPausedError as ex: + logger.info(click.style(str(ex), fg="yellow")) diff --git a/api/events/event_handlers/update_provider_when_message_created.py b/api/events/event_handlers/update_provider_when_message_created.py index 2ed42c71ea..90eb524c93 100644 --- a/api/events/event_handlers/update_provider_when_message_created.py +++ b/api/events/event_handlers/update_provider_when_message_created.py @@ -85,6 +85,7 @@ def handle(sender: Message, **kwargs): values=_ProviderUpdateValues(last_used=current_time), description="basic_last_used_update", ) + logger.info("provider used, tenant_id=%s, provider_name=%s", tenant_id, provider_name) updates_to_perform.append(basic_update) # 2. Check if we need to deduct quota (system provider only) @@ -186,9 +187,11 @@ def _execute_provider_updates(updates_to_perform: list[_ProviderUpdateOperation] if not updates_to_perform: return + updates_to_perform = sorted(updates_to_perform, key=lambda i: (i.filters.tenant_id, i.filters.provider_name)) + # Use SQLAlchemy's context manager for transaction management # This automatically handles commit/rollback - with Session(db.engine) as session: + with Session(db.engine) as session, session.begin(): # Use a single transaction for all updates for update_operation in updates_to_perform: filters = update_operation.filters @@ -212,10 +215,13 @@ def _execute_provider_updates(updates_to_perform: list[_ProviderUpdateOperation] # Prepare values dict for SQLAlchemy update update_values = {} - if values.last_used is not None: - update_values["last_used"] = values.last_used + # updateing to `last_used` is removed due to performance reason. + # ref: https://github.com/langgenius/dify/issues/24526 if values.quota_used is not None: update_values["quota_used"] = values.quota_used + # Skip the current update operation if no updates are required. + if not update_values: + continue # Build and execute the update statement stmt = update(Provider).where(*where_conditions).values(**update_values) diff --git a/api/extensions/ext_blueprints.py b/api/extensions/ext_blueprints.py index a4d013ffc0..1024fd9ce6 100644 --- a/api/extensions/ext_blueprints.py +++ b/api/extensions/ext_blueprints.py @@ -29,7 +29,6 @@ def init_app(app: DifyApp): methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], expose_headers=["X-Version", "X-Env"], ) - app.register_blueprint(web_bp) CORS( @@ -40,10 +39,13 @@ def init_app(app: DifyApp): methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], expose_headers=["X-Version", "X-Env"], ) - app.register_blueprint(console_app_bp) - CORS(files_bp, allow_headers=["Content-Type"], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"]) + CORS( + files_bp, + allow_headers=["Content-Type"], + methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], + ) app.register_blueprint(files_bp) app.register_blueprint(inner_api_bp) diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index 198f60e554..fb5352ca8f 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -3,8 +3,8 @@ from datetime import timedelta from typing import Any, Optional import pytz -from celery import Celery, Task # type: ignore -from celery.schedules import crontab # type: ignore +from celery import Celery, Task +from celery.schedules import crontab from configs import dify_config from dify_app import DifyApp @@ -66,7 +66,6 @@ def init_app(app: DifyApp) -> Celery: task_cls=FlaskTask, broker=dify_config.CELERY_BROKER_URL, backend=dify_config.CELERY_BACKEND, - task_ignore_result=True, ) celery_app.conf.update( @@ -77,6 +76,7 @@ def init_app(app: DifyApp) -> Celery: worker_task_log_format=dify_config.LOG_FORMAT, worker_hijack_root_logger=False, timezone=pytz.timezone(dify_config.LOG_TZ or "UTC"), + task_ignore_result=True, ) # Apply SSL configuration if enabled @@ -145,13 +145,19 @@ def init_app(app: DifyApp) -> Celery: minutes=dify_config.QUEUE_MONITOR_INTERVAL if dify_config.QUEUE_MONITOR_INTERVAL else 30 ), } - if dify_config.ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK: + if dify_config.ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK and dify_config.MARKETPLACE_ENABLED: imports.append("schedule.check_upgradable_plugin_task") beat_schedule["check_upgradable_plugin_task"] = { "task": "schedule.check_upgradable_plugin_task.check_upgradable_plugin_task", "schedule": crontab(minute="*/15"), } - + if dify_config.WORKFLOW_LOG_CLEANUP_ENABLED: + # 2:00 AM every day + imports.append("schedule.clean_workflow_runlogs_precise") + beat_schedule["clean_workflow_runlogs_precise"] = { + "task": "schedule.clean_workflow_runlogs_precise.clean_workflow_runlogs_precise", + "schedule": crontab(minute="0", hour="2"), + } celery_app.conf.update(beat_schedule=beat_schedule, imports=imports) return celery_app diff --git a/api/extensions/ext_database.py b/api/extensions/ext_database.py index 93842a3036..b32616b172 100644 --- a/api/extensions/ext_database.py +++ b/api/extensions/ext_database.py @@ -1,6 +1,55 @@ +import logging + +import gevent +from sqlalchemy import event +from sqlalchemy.pool import Pool + from dify_app import DifyApp from models import db +logger = logging.getLogger(__name__) + +# Global flag to avoid duplicate registration of event listener +_GEVENT_COMPATIBILITY_SETUP: bool = False + + +def _safe_rollback(connection) -> None: + """Safely rollback database connection. + + Args: + connection: Database connection object + """ + try: + connection.rollback() + except Exception: # pylint: disable=broad-exception-caught + logger.exception("Failed to rollback connection") + + +def _setup_gevent_compatibility() -> None: + global _GEVENT_COMPATIBILITY_SETUP # pylint: disable=global-statement + + # Avoid duplicate registration + if _GEVENT_COMPATIBILITY_SETUP: + return + + @event.listens_for(Pool, "reset") + def _safe_reset(dbapi_connection, connection_record, reset_state) -> None: # pylint: disable=unused-argument + if reset_state.terminate_only: + return + + # Safe rollback for connection + try: + hub = gevent.get_hub() + if hasattr(hub, "loop") and getattr(hub.loop, "in_callback", False): + gevent.spawn_later(0, lambda: _safe_rollback(dbapi_connection)) + else: + _safe_rollback(dbapi_connection) + except (AttributeError, ImportError): + _safe_rollback(dbapi_connection) + + _GEVENT_COMPATIBILITY_SETUP = True + def init_app(app: DifyApp): db.init_app(app) + _setup_gevent_compatibility() diff --git a/api/extensions/ext_login.py b/api/extensions/ext_login.py index 9b18e25eaa..cd01a31068 100644 --- a/api/extensions/ext_login.py +++ b/api/extensions/ext_login.py @@ -20,6 +20,10 @@ login_manager = flask_login.LoginManager() @login_manager.request_loader def load_user_from_request(request_from_flask_login): """Load user based on the request.""" + # Skip authentication for documentation endpoints + if dify_config.SWAGGER_UI_ENABLED and request.path.endswith((dify_config.SWAGGER_UI_PATH, "/swagger.json")): + return None + auth_header = request.headers.get("Authorization", "") auth_token: str | None = None if auth_header: diff --git a/api/extensions/ext_mail.py b/api/extensions/ext_mail.py index fe05138196..58ab023559 100644 --- a/api/extensions/ext_mail.py +++ b/api/extensions/ext_mail.py @@ -6,6 +6,8 @@ from flask import Flask from configs import dify_config from dify_app import DifyApp +logger = logging.getLogger(__name__) + class Mail: def __init__(self): @@ -18,7 +20,7 @@ class Mail: def init_app(self, app: Flask): mail_type = dify_config.MAIL_TYPE if not mail_type: - logging.warning("MAIL_TYPE is not set") + logger.warning("MAIL_TYPE is not set") return if dify_config.MAIL_DEFAULT_SEND_FROM: diff --git a/api/extensions/ext_otel.py b/api/extensions/ext_otel.py index a8f025a750..7313d8e3c7 100644 --- a/api/extensions/ext_otel.py +++ b/api/extensions/ext_otel.py @@ -1,4 +1,5 @@ import atexit +import contextlib import logging import os import platform @@ -7,7 +8,7 @@ import sys from typing import Union import flask -from celery.signals import worker_init # type: ignore +from celery.signals import worker_init from flask_login import user_loaded_from_request, user_logged_in # type: ignore from configs import dify_config @@ -15,6 +16,8 @@ from dify_app import DifyApp from libs.helper import extract_tenant_id from models import Account, EndUser +logger = logging.getLogger(__name__) + @user_logged_in.connect @user_loaded_from_request.connect @@ -32,7 +35,7 @@ def on_user_loaded(_sender, user: Union["Account", "EndUser"]): current_span.set_attribute("service.tenant.id", tenant_id) current_span.set_attribute("service.user.id", user.id) except Exception: - logging.exception("Error setting tenant and user attributes") + logger.exception("Error setting tenant and user attributes") pass @@ -73,12 +76,12 @@ def init_app(app: DifyApp): attributes[SpanAttributes.HTTP_METHOD] = str(request.method) _http_response_counter.add(1, attributes) except Exception: - logging.exception("Error setting status and attributes") + logger.exception("Error setting status and attributes") pass instrumentor = FlaskInstrumentor() if dify_config.DEBUG: - logging.info("Initializing Flask instrumentor") + logger.info("Initializing Flask instrumentor") instrumentor.instrument_app(app, response_hook=response_hook) def init_sqlalchemy_instrumentor(app: DifyApp): @@ -106,7 +109,7 @@ def init_app(app: DifyApp): """Custom logging handler that creates spans for logging.exception() calls""" def emit(self, record: logging.LogRecord): - try: + with contextlib.suppress(Exception): if record.exc_info: tracer = get_tracer_provider().get_tracer("dify.exception.logging") with tracer.start_as_current_span( @@ -126,9 +129,6 @@ def init_app(app: DifyApp): if record.exc_info[0]: span.set_attribute("exception.type", record.exc_info[0].__name__) - except Exception: - pass - from opentelemetry import trace from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import OTLPMetricExporter as GRPCMetricExporter from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter as GRPCSpanExporter @@ -255,5 +255,5 @@ def init_celery_worker(*args, **kwargs): tracer_provider = get_tracer_provider() metric_provider = get_meter_provider() if dify_config.DEBUG: - logging.info("Initializing OpenTelemetry for Celery worker") + logger.info("Initializing OpenTelemetry for Celery worker") CeleryInstrumentor(tracer_provider=tracer_provider, meter_provider=metric_provider).instrument() diff --git a/api/extensions/ext_redis.py b/api/extensions/ext_redis.py index f5f544679f..1b22886fc1 100644 --- a/api/extensions/ext_redis.py +++ b/api/extensions/ext_redis.py @@ -3,7 +3,7 @@ import logging import ssl from collections.abc import Callable from datetime import timedelta -from typing import TYPE_CHECKING, Any, Union +from typing import TYPE_CHECKING, Any, Optional, Union import redis from redis import RedisError @@ -246,7 +246,7 @@ def init_app(app: DifyApp): app.extensions["redis"] = redis_client -def redis_fallback(default_return: Any = None): +def redis_fallback(default_return: Optional[Any] = None): """ decorator to handle Redis operation exceptions and return a default value when Redis is unavailable. diff --git a/api/extensions/ext_request_logging.py b/api/extensions/ext_request_logging.py index 7c69483e0f..f7263e18c4 100644 --- a/api/extensions/ext_request_logging.py +++ b/api/extensions/ext_request_logging.py @@ -8,7 +8,7 @@ from flask.signals import request_finished, request_started from configs import dify_config -_logger = logging.getLogger(__name__) +logger = logging.getLogger(__name__) def _is_content_type_json(content_type: str) -> bool: @@ -20,20 +20,20 @@ def _is_content_type_json(content_type: str) -> bool: def _log_request_started(_sender, **_extra): """Log the start of a request.""" - if not _logger.isEnabledFor(logging.DEBUG): + if not logger.isEnabledFor(logging.DEBUG): return request = flask.request if not (_is_content_type_json(request.content_type) and request.data): - _logger.debug("Received Request %s -> %s", request.method, request.path) + logger.debug("Received Request %s -> %s", request.method, request.path) return try: json_data = json.loads(request.data) except (TypeError, ValueError): - _logger.exception("Failed to parse JSON request") + logger.exception("Failed to parse JSON request") return formatted_json = json.dumps(json_data, ensure_ascii=False, indent=2) - _logger.debug( + logger.debug( "Received Request %s -> %s, Request Body:\n%s", request.method, request.path, @@ -43,21 +43,21 @@ def _log_request_started(_sender, **_extra): def _log_request_finished(_sender, response, **_extra): """Log the end of a request.""" - if not _logger.isEnabledFor(logging.DEBUG) or response is None: + if not logger.isEnabledFor(logging.DEBUG) or response is None: return if not _is_content_type_json(response.content_type): - _logger.debug("Response %s %s", response.status, response.content_type) + logger.debug("Response %s %s", response.status, response.content_type) return response_data = response.get_data(as_text=True) try: json_data = json.loads(response_data) except (TypeError, ValueError): - _logger.exception("Failed to parse JSON response") + logger.exception("Failed to parse JSON response") return formatted_json = json.dumps(json_data, ensure_ascii=False, indent=2) - _logger.debug( + logger.debug( "Response %s %s, Response Body:\n%s", response.status, response.content_type, diff --git a/api/extensions/ext_storage.py b/api/extensions/ext_storage.py index d13393dd14..2960cde242 100644 --- a/api/extensions/ext_storage.py +++ b/api/extensions/ext_storage.py @@ -65,7 +65,7 @@ class Storage: from extensions.storage.volcengine_tos_storage import VolcengineTosStorage return VolcengineTosStorage - case StorageType.SUPBASE: + case StorageType.SUPABASE: from extensions.storage.supabase_storage import SupabaseStorage return SupabaseStorage diff --git a/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py b/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py index 09ab37f42e..754c437fd7 100644 --- a/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py +++ b/api/extensions/storage/clickzetta_volume/clickzetta_volume_storage.py @@ -292,7 +292,6 @@ class ClickZettaVolumeStorage(BaseStorage): # Get the actual volume path (may include dify_km prefix) volume_path = self._get_volume_path(filename, dataset_id) - actual_filename = volume_path.split("/")[-1] if "/" in volume_path else volume_path # For User Volume, use the full path with dify_km prefix if volume_prefix == "USER VOLUME": diff --git a/api/extensions/storage/clickzetta_volume/file_lifecycle.py b/api/extensions/storage/clickzetta_volume/file_lifecycle.py index d5d04f121b..2e0724f678 100644 --- a/api/extensions/storage/clickzetta_volume/file_lifecycle.py +++ b/api/extensions/storage/clickzetta_volume/file_lifecycle.py @@ -7,7 +7,7 @@ import json import logging from dataclasses import asdict, dataclass -from datetime import datetime, timedelta +from datetime import datetime from enum import Enum from typing import Any, Optional @@ -185,7 +185,6 @@ class FileLifecycleManager: versions.append(current_metadata) # 获取历史版本 - version_pattern = f"{self._version_prefix}{filename}.v*" try: version_files = self._storage.scan(self._dataset_id or "", files=True) for file_path in version_files: @@ -331,7 +330,6 @@ class FileLifecycleManager: """ try: cleaned_count = 0 - cutoff_date = datetime.now() - timedelta(days=max_age_days) # 获取所有版本文件 try: diff --git a/api/extensions/storage/storage_type.py b/api/extensions/storage/storage_type.py index bc2d632159..baffa423b6 100644 --- a/api/extensions/storage/storage_type.py +++ b/api/extensions/storage/storage_type.py @@ -14,4 +14,4 @@ class StorageType(StrEnum): S3 = "s3" TENCENT_COS = "tencent-cos" VOLCENGINE_TOS = "volcengine-tos" - SUPBASE = "supabase" + SUPABASE = "supabase" diff --git a/api/factories/variable_factory.py b/api/factories/variable_factory.py index 39ebd009d5..0274b6e89c 100644 --- a/api/factories/variable_factory.py +++ b/api/factories/variable_factory.py @@ -7,11 +7,13 @@ from core.file import File from core.variables.exc import VariableError from core.variables.segments import ( ArrayAnySegment, + ArrayBooleanSegment, ArrayFileSegment, ArrayNumberSegment, ArrayObjectSegment, ArraySegment, ArrayStringSegment, + BooleanSegment, FileSegment, FloatSegment, IntegerSegment, @@ -23,10 +25,12 @@ from core.variables.segments import ( from core.variables.types import SegmentType from core.variables.variables import ( ArrayAnyVariable, + ArrayBooleanVariable, ArrayFileVariable, ArrayNumberVariable, ArrayObjectVariable, ArrayStringVariable, + BooleanVariable, FileVariable, FloatVariable, IntegerVariable, @@ -49,17 +53,19 @@ class TypeMismatchError(Exception): # Define the constant SEGMENT_TO_VARIABLE_MAP = { - StringSegment: StringVariable, - IntegerSegment: IntegerVariable, - FloatSegment: FloatVariable, - ObjectSegment: ObjectVariable, - FileSegment: FileVariable, - ArrayStringSegment: ArrayStringVariable, + ArrayAnySegment: ArrayAnyVariable, + ArrayBooleanSegment: ArrayBooleanVariable, + ArrayFileSegment: ArrayFileVariable, ArrayNumberSegment: ArrayNumberVariable, ArrayObjectSegment: ArrayObjectVariable, - ArrayFileSegment: ArrayFileVariable, - ArrayAnySegment: ArrayAnyVariable, + ArrayStringSegment: ArrayStringVariable, + BooleanSegment: BooleanVariable, + FileSegment: FileVariable, + FloatSegment: FloatVariable, + IntegerSegment: IntegerVariable, NoneSegment: NoneVariable, + ObjectSegment: ObjectVariable, + StringSegment: StringVariable, } @@ -99,6 +105,8 @@ def _build_variable_from_mapping(*, mapping: Mapping[str, Any], selector: Sequen mapping = dict(mapping) mapping["value_type"] = SegmentType.FLOAT result = FloatVariable.model_validate(mapping) + case SegmentType.BOOLEAN: + result = BooleanVariable.model_validate(mapping) case SegmentType.NUMBER if not isinstance(value, float | int): raise VariableError(f"invalid number value {value}") case SegmentType.OBJECT if isinstance(value, dict): @@ -109,6 +117,8 @@ def _build_variable_from_mapping(*, mapping: Mapping[str, Any], selector: Sequen result = ArrayNumberVariable.model_validate(mapping) case SegmentType.ARRAY_OBJECT if isinstance(value, list): result = ArrayObjectVariable.model_validate(mapping) + case SegmentType.ARRAY_BOOLEAN if isinstance(value, list): + result = ArrayBooleanVariable.model_validate(mapping) case _: raise VariableError(f"not supported value type {value_type}") if result.size > dify_config.MAX_VARIABLE_SIZE: @@ -118,10 +128,6 @@ def _build_variable_from_mapping(*, mapping: Mapping[str, Any], selector: Sequen return cast(Variable, result) -def infer_segment_type_from_value(value: Any, /) -> SegmentType: - return build_segment(value).value_type - - def build_segment(value: Any, /) -> Segment: # NOTE: If you have runtime type information available, consider using the `build_segment_with_type` # below @@ -129,6 +135,8 @@ def build_segment(value: Any, /) -> Segment: return NoneSegment() if isinstance(value, str): return StringSegment(value=value) + if isinstance(value, bool): + return BooleanSegment(value=value) if isinstance(value, int): return IntegerSegment(value=value) if isinstance(value, float): @@ -152,6 +160,8 @@ def build_segment(value: Any, /) -> Segment: return ArrayStringSegment(value=value) case SegmentType.NUMBER | SegmentType.INTEGER | SegmentType.FLOAT: return ArrayNumberSegment(value=value) + case SegmentType.BOOLEAN: + return ArrayBooleanSegment(value=value) case SegmentType.OBJECT: return ArrayObjectSegment(value=value) case SegmentType.FILE: @@ -170,6 +180,7 @@ _segment_factory: Mapping[SegmentType, type[Segment]] = { SegmentType.INTEGER: IntegerSegment, SegmentType.FLOAT: FloatSegment, SegmentType.FILE: FileSegment, + SegmentType.BOOLEAN: BooleanSegment, SegmentType.OBJECT: ObjectSegment, # Array types SegmentType.ARRAY_ANY: ArrayAnySegment, @@ -177,6 +188,7 @@ _segment_factory: Mapping[SegmentType, type[Segment]] = { SegmentType.ARRAY_NUMBER: ArrayNumberSegment, SegmentType.ARRAY_OBJECT: ArrayObjectSegment, SegmentType.ARRAY_FILE: ArrayFileSegment, + SegmentType.ARRAY_BOOLEAN: ArrayBooleanSegment, } @@ -225,6 +237,8 @@ def build_segment_with_type(segment_type: SegmentType, value: Any) -> Segment: return ArrayAnySegment(value=value) elif segment_type == SegmentType.ARRAY_STRING: return ArrayStringSegment(value=value) + elif segment_type == SegmentType.ARRAY_BOOLEAN: + return ArrayBooleanSegment(value=value) elif segment_type == SegmentType.ARRAY_NUMBER: return ArrayNumberSegment(value=value) elif segment_type == SegmentType.ARRAY_OBJECT: diff --git a/api/fields/annotation_fields.py b/api/fields/annotation_fields.py index 379dcc6d16..38835d5ac7 100644 --- a/api/fields/annotation_fields.py +++ b/api/fields/annotation_fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import Api, Namespace, fields from libs.helper import TimestampField @@ -11,6 +11,12 @@ annotation_fields = { # 'account': fields.Nested(simple_account_fields, allow_null=True) } + +def build_annotation_model(api_or_ns: Api | Namespace): + """Build the annotation model for the API or Namespace.""" + return api_or_ns.model("Annotation", annotation_fields) + + annotation_list_fields = { "data": fields.List(fields.Nested(annotation_fields)), } diff --git a/api/fields/api_based_extension_fields.py b/api/fields/api_based_extension_fields.py index a85d4a34db..a2dda1dc15 100644 --- a/api/fields/api_based_extension_fields.py +++ b/api/fields/api_based_extension_fields.py @@ -1,10 +1,10 @@ -from flask_restful import fields +from flask_restx import fields from libs.helper import TimestampField class HiddenAPIKey(fields.Raw): - def output(self, key, obj): + def output(self, key, obj, **kwargs): api_key = obj.api_key # If the length of the api_key is less than 8 characters, show the first and last characters if len(api_key) <= 8: diff --git a/api/fields/app_fields.py b/api/fields/app_fields.py index 1a5fcabf97..1f14d663b8 100644 --- a/api/fields/app_fields.py +++ b/api/fields/app_fields.py @@ -1,6 +1,6 @@ import json -from flask_restful import fields +from flask_restx import fields from fields.workflow_fields import workflow_partial_fields from libs.helper import AppIconUrlField, TimestampField diff --git a/api/fields/conversation_fields.py b/api/fields/conversation_fields.py index 370e8a5a58..ecc267cf38 100644 --- a/api/fields/conversation_fields.py +++ b/api/fields/conversation_fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import Api, Namespace, fields from fields.member_fields import simple_account_fields from libs.helper import TimestampField @@ -45,6 +45,12 @@ message_file_fields = { "upload_file_id": fields.String(default=None), } + +def build_message_file_model(api_or_ns: Api | Namespace): + """Build the message file fields for the API or Namespace.""" + return api_or_ns.model("MessageFile", message_file_fields) + + agent_thought_fields = { "id": fields.String, "chain_id": fields.String, @@ -209,3 +215,22 @@ conversation_infinite_scroll_pagination_fields = { "has_more": fields.Boolean, "data": fields.List(fields.Nested(simple_conversation_fields)), } + + +def build_conversation_infinite_scroll_pagination_model(api_or_ns: Api | Namespace): + """Build the conversation infinite scroll pagination model for the API or Namespace.""" + simple_conversation_model = build_simple_conversation_model(api_or_ns) + + copied_fields = conversation_infinite_scroll_pagination_fields.copy() + copied_fields["data"] = fields.List(fields.Nested(simple_conversation_model)) + return api_or_ns.model("ConversationInfiniteScrollPagination", copied_fields) + + +def build_conversation_delete_model(api_or_ns: Api | Namespace): + """Build the conversation delete model for the API or Namespace.""" + return api_or_ns.model("ConversationDelete", conversation_delete_fields) + + +def build_simple_conversation_model(api_or_ns: Api | Namespace): + """Build the simple conversation model for the API or Namespace.""" + return api_or_ns.model("SimpleConversation", simple_conversation_fields) diff --git a/api/fields/conversation_variable_fields.py b/api/fields/conversation_variable_fields.py index c5a0c9a49d..7d5e311591 100644 --- a/api/fields/conversation_variable_fields.py +++ b/api/fields/conversation_variable_fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import Api, Namespace, fields from libs.helper import TimestampField @@ -27,3 +27,19 @@ conversation_variable_infinite_scroll_pagination_fields = { "has_more": fields.Boolean, "data": fields.List(fields.Nested(conversation_variable_fields)), } + + +def build_conversation_variable_model(api_or_ns: Api | Namespace): + """Build the conversation variable model for the API or Namespace.""" + return api_or_ns.model("ConversationVariable", conversation_variable_fields) + + +def build_conversation_variable_infinite_scroll_pagination_model(api_or_ns: Api | Namespace): + """Build the conversation variable infinite scroll pagination model for the API or Namespace.""" + # Build the nested variable model first + conversation_variable_model = build_conversation_variable_model(api_or_ns) + + copied_fields = conversation_variable_infinite_scroll_pagination_fields.copy() + copied_fields["data"] = fields.List(fields.Nested(conversation_variable_model)) + + return api_or_ns.model("ConversationVariableInfiniteScrollPagination", copied_fields) diff --git a/api/fields/data_source_fields.py b/api/fields/data_source_fields.py index 071071376f..27ab505376 100644 --- a/api/fields/data_source_fields.py +++ b/api/fields/data_source_fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import fields from libs.helper import TimestampField @@ -24,8 +24,6 @@ integrate_notion_info_list_fields = { "notion_info": fields.List(fields.Nested(integrate_workspace_fields)), } -integrate_icon_fields = {"type": fields.String, "url": fields.String, "emoji": fields.String} - integrate_page_fields = { "page_name": fields.String, "page_id": fields.String, diff --git a/api/fields/dataset_fields.py b/api/fields/dataset_fields.py index 32a88cc5db..5a3082516e 100644 --- a/api/fields/dataset_fields.py +++ b/api/fields/dataset_fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import fields from libs.helper import TimestampField diff --git a/api/fields/document_fields.py b/api/fields/document_fields.py index 7fd43e8dbe..9be59f7454 100644 --- a/api/fields/document_fields.py +++ b/api/fields/document_fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import fields from fields.dataset_fields import dataset_fields from libs.helper import TimestampField diff --git a/api/fields/end_user_fields.py b/api/fields/end_user_fields.py index 99e529f9d1..ea43e3b5fd 100644 --- a/api/fields/end_user_fields.py +++ b/api/fields/end_user_fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import Api, Namespace, fields simple_end_user_fields = { "id": fields.String, @@ -6,3 +6,7 @@ simple_end_user_fields = { "is_anonymous": fields.Boolean, "session_id": fields.String, } + + +def build_simple_end_user_model(api_or_ns: Api | Namespace): + return api_or_ns.model("SimpleEndUser", simple_end_user_fields) diff --git a/api/fields/file_fields.py b/api/fields/file_fields.py index 8b4839ef97..dd359e2f5f 100644 --- a/api/fields/file_fields.py +++ b/api/fields/file_fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import Api, Namespace, fields from libs.helper import TimestampField @@ -11,6 +11,19 @@ upload_config_fields = { "workflow_file_upload_limit": fields.Integer, } + +def build_upload_config_model(api_or_ns: Api | Namespace): + """Build the upload config model for the API or Namespace. + + Args: + api_or_ns: Flask-RestX Api or Namespace instance + + Returns: + The registered model + """ + return api_or_ns.model("UploadConfig", upload_config_fields) + + file_fields = { "id": fields.String, "name": fields.String, @@ -22,12 +35,37 @@ file_fields = { "preview_url": fields.String, } + +def build_file_model(api_or_ns: Api | Namespace): + """Build the file model for the API or Namespace. + + Args: + api_or_ns: Flask-RestX Api or Namespace instance + + Returns: + The registered model + """ + return api_or_ns.model("File", file_fields) + + remote_file_info_fields = { "file_type": fields.String(attribute="file_type"), "file_length": fields.Integer(attribute="file_length"), } +def build_remote_file_info_model(api_or_ns: Api | Namespace): + """Build the remote file info model for the API or Namespace. + + Args: + api_or_ns: Flask-RestX Api or Namespace instance + + Returns: + The registered model + """ + return api_or_ns.model("RemoteFileInfo", remote_file_info_fields) + + file_fields_with_signed_url = { "id": fields.String, "name": fields.String, @@ -38,3 +76,15 @@ file_fields_with_signed_url = { "created_by": fields.String, "created_at": TimestampField, } + + +def build_file_with_signed_url_model(api_or_ns: Api | Namespace): + """Build the file with signed URL model for the API or Namespace. + + Args: + api_or_ns: Flask-RestX Api or Namespace instance + + Returns: + The registered model + """ + return api_or_ns.model("FileWithSignedUrl", file_fields_with_signed_url) diff --git a/api/fields/hit_testing_fields.py b/api/fields/hit_testing_fields.py index 9d67999ea4..75bdff1803 100644 --- a/api/fields/hit_testing_fields.py +++ b/api/fields/hit_testing_fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import fields from libs.helper import TimestampField diff --git a/api/fields/installed_app_fields.py b/api/fields/installed_app_fields.py index e0b3e340f6..16dd26a10e 100644 --- a/api/fields/installed_app_fields.py +++ b/api/fields/installed_app_fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import fields from libs.helper import AppIconUrlField, TimestampField diff --git a/api/fields/member_fields.py b/api/fields/member_fields.py index 8007b7e052..08e38a6931 100644 --- a/api/fields/member_fields.py +++ b/api/fields/member_fields.py @@ -1,8 +1,17 @@ -from flask_restful import fields +from flask_restx import Api, Namespace, fields from libs.helper import AvatarUrlField, TimestampField -simple_account_fields = {"id": fields.String, "name": fields.String, "email": fields.String} +simple_account_fields = { + "id": fields.String, + "name": fields.String, + "email": fields.String, +} + + +def build_simple_account_model(api_or_ns: Api | Namespace): + return api_or_ns.model("SimpleAccount", simple_account_fields) + account_fields = { "id": fields.String, diff --git a/api/fields/message_fields.py b/api/fields/message_fields.py index e6aebd810f..a419da2e18 100644 --- a/api/fields/message_fields.py +++ b/api/fields/message_fields.py @@ -1,11 +1,19 @@ -from flask_restful import fields +from flask_restx import Api, Namespace, fields from fields.conversation_fields import message_file_fields from libs.helper import TimestampField from .raws import FilesContainedField -feedback_fields = {"rating": fields.String} +feedback_fields = { + "rating": fields.String, +} + + +def build_feedback_model(api_or_ns: Api | Namespace): + """Build the feedback model for the API or Namespace.""" + return api_or_ns.model("Feedback", feedback_fields) + agent_thought_fields = { "id": fields.String, @@ -21,6 +29,12 @@ agent_thought_fields = { "files": fields.List(fields.String), } + +def build_agent_thought_model(api_or_ns: Api | Namespace): + """Build the agent thought model for the API or Namespace.""" + return api_or_ns.model("AgentThought", agent_thought_fields) + + retriever_resource_fields = { "id": fields.String, "message_id": fields.String, diff --git a/api/fields/raws.py b/api/fields/raws.py index 15ec16ab13..9bc6a12c78 100644 --- a/api/fields/raws.py +++ b/api/fields/raws.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import fields from core.file import File diff --git a/api/fields/segment_fields.py b/api/fields/segment_fields.py index 4126c24598..2ff917d6bc 100644 --- a/api/fields/segment_fields.py +++ b/api/fields/segment_fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import fields from libs.helper import TimestampField diff --git a/api/fields/tag_fields.py b/api/fields/tag_fields.py index 9af4fc57dd..d5b7c86a04 100644 --- a/api/fields/tag_fields.py +++ b/api/fields/tag_fields.py @@ -1,3 +1,12 @@ -from flask_restful import fields +from flask_restx import Api, Namespace, fields -tag_fields = {"id": fields.String, "name": fields.String, "type": fields.String, "binding_count": fields.String} +dataset_tag_fields = { + "id": fields.String, + "name": fields.String, + "type": fields.String, + "binding_count": fields.String, +} + + +def build_dataset_tag_fields(api_or_ns: Api | Namespace): + return api_or_ns.model("DataSetTag", dataset_tag_fields) diff --git a/api/fields/workflow_app_log_fields.py b/api/fields/workflow_app_log_fields.py index 823c99ec6b..243efd817c 100644 --- a/api/fields/workflow_app_log_fields.py +++ b/api/fields/workflow_app_log_fields.py @@ -1,8 +1,8 @@ -from flask_restful import fields +from flask_restx import Api, Namespace, fields -from fields.end_user_fields import simple_end_user_fields -from fields.member_fields import simple_account_fields -from fields.workflow_run_fields import workflow_run_for_log_fields +from fields.end_user_fields import build_simple_end_user_model, simple_end_user_fields +from fields.member_fields import build_simple_account_model, simple_account_fields +from fields.workflow_run_fields import build_workflow_run_for_log_model, workflow_run_for_log_fields from libs.helper import TimestampField workflow_app_log_partial_fields = { @@ -15,6 +15,24 @@ workflow_app_log_partial_fields = { "created_at": TimestampField, } + +def build_workflow_app_log_partial_model(api_or_ns: Api | Namespace): + """Build the workflow app log partial model for the API or Namespace.""" + workflow_run_model = build_workflow_run_for_log_model(api_or_ns) + simple_account_model = build_simple_account_model(api_or_ns) + simple_end_user_model = build_simple_end_user_model(api_or_ns) + + copied_fields = workflow_app_log_partial_fields.copy() + copied_fields["workflow_run"] = fields.Nested(workflow_run_model, attribute="workflow_run", allow_null=True) + copied_fields["created_by_account"] = fields.Nested( + simple_account_model, attribute="created_by_account", allow_null=True + ) + copied_fields["created_by_end_user"] = fields.Nested( + simple_end_user_model, attribute="created_by_end_user", allow_null=True + ) + return api_or_ns.model("WorkflowAppLogPartial", copied_fields) + + workflow_app_log_pagination_fields = { "page": fields.Integer, "limit": fields.Integer, @@ -22,3 +40,13 @@ workflow_app_log_pagination_fields = { "has_more": fields.Boolean, "data": fields.List(fields.Nested(workflow_app_log_partial_fields)), } + + +def build_workflow_app_log_pagination_model(api_or_ns: Api | Namespace): + """Build the workflow app log pagination model for the API or Namespace.""" + # Build the nested partial model first + workflow_app_log_partial_model = build_workflow_app_log_partial_model(api_or_ns) + + copied_fields = workflow_app_log_pagination_fields.copy() + copied_fields["data"] = fields.List(fields.Nested(workflow_app_log_partial_model)) + return api_or_ns.model("WorkflowAppLogPagination", copied_fields) diff --git a/api/fields/workflow_fields.py b/api/fields/workflow_fields.py index 930e59cc1c..f048d0f3b6 100644 --- a/api/fields/workflow_fields.py +++ b/api/fields/workflow_fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import fields from core.helper import encrypter from core.variables import SecretVariable, SegmentType, Variable diff --git a/api/fields/workflow_run_fields.py b/api/fields/workflow_run_fields.py index a106728e9c..6462d8ce5a 100644 --- a/api/fields/workflow_run_fields.py +++ b/api/fields/workflow_run_fields.py @@ -1,4 +1,4 @@ -from flask_restful import fields +from flask_restx import Api, Namespace, fields from fields.end_user_fields import simple_end_user_fields from fields.member_fields import simple_account_fields @@ -17,6 +17,11 @@ workflow_run_for_log_fields = { "exceptions_count": fields.Integer, } + +def build_workflow_run_for_log_model(api_or_ns: Api | Namespace): + return api_or_ns.model("WorkflowRunForLog", workflow_run_for_log_fields) + + workflow_run_for_list_fields = { "id": fields.String, "version": fields.String, diff --git a/api/lazy_load_class.py b/api/lazy_load_class.py new file mode 100644 index 0000000000..dd3c2a16e8 --- /dev/null +++ b/api/lazy_load_class.py @@ -0,0 +1,11 @@ +from tests.integration_tests.utils.parent_class import ParentClass + + +class LazyLoadChildClass(ParentClass): + """Test lazy load child class for module import helper tests""" + + def __init__(self, name): + super().__init__(name) + + def get_name(self): + return self.name diff --git a/api/libs/external_api.py b/api/libs/external_api.py index 2070df3e55..a630a97fd6 100644 --- a/api/libs/external_api.py +++ b/api/libs/external_api.py @@ -1,119 +1,128 @@ import re import sys +from collections.abc import Mapping from typing import Any -from flask import current_app, got_request_exception -from flask_restful import Api, http_status_message -from werkzeug.datastructures import Headers +from flask import Blueprint, Flask, current_app, got_request_exception +from flask_restx import Api from werkzeug.exceptions import HTTPException +from werkzeug.http import HTTP_STATUS_CODES +from configs import dify_config from core.errors.error import AppInvokeQuotaExceededError -class ExternalApi(Api): - def handle_error(self, e): - """Error handler for the API transforms a raised exception into a Flask - response, with the appropriate HTTP status code and body. +def http_status_message(code): + return HTTP_STATUS_CODES.get(code, "") - :param e: the raised Exception object - :type e: Exception - """ +def register_external_error_handlers(api: Api) -> None: + @api.errorhandler(HTTPException) + def handle_http_exception(e: HTTPException): got_request_exception.send(current_app, exception=e) - headers = Headers() - if isinstance(e, HTTPException): - if e.response is not None: - resp = e.get_response() - return resp + # If Werkzeug already prepared a Response, just use it. + if getattr(e, "response", None) is not None: + return e.response - status_code = e.code - default_data = { - "code": re.sub(r"(?= 500: - exc_info: Any = sys.exc_info() - if exc_info[1] is None: - exc_info = None - current_app.log_exception(exc_info) - - if status_code == 406 and self.default_mediatype is None: - # if we are handling NotAcceptable (406), make sure that - # make_response uses a representation we support as the - # default mediatype (so that make_response doesn't throw - # another NotAcceptable error). - supported_mediatypes = list(self.representations.keys()) # only supported application/json - fallback_mediatype = supported_mediatypes[0] if supported_mediatypes else "text/plain" - data = {"code": "not_acceptable", "message": data.get("message")} - resp = self.make_response(data, status_code, headers, fallback_mediatype=fallback_mediatype) + # Payload per status + if status_code == 406 and api.default_mediatype is None: + data = {"code": "not_acceptable", "message": default_data["message"], "status": status_code} + return data, status_code, headers elif status_code == 400: - if isinstance(data.get("message"), dict): - param_key, param_value = list(data.get("message", {}).items())[0] - data = {"code": "invalid_param", "message": param_value, "params": param_key} + msg = default_data["message"] + if isinstance(msg, Mapping) and msg: + # Convert param errors like {"field": "reason"} into a friendly shape + param_key, param_value = next(iter(msg.items())) + data = { + "code": "invalid_param", + "message": str(param_value), + "params": param_key, + "status": status_code, + } else: - if "code" not in data: - data["code"] = "unknown" - - resp = self.make_response(data, status_code, headers) + data = {**default_data} + data.setdefault("code", "unknown") + return data, status_code, headers else: - if "code" not in data: - data["code"] = "unknown" + data = {**default_data} + data.setdefault("code", "unknown") + # If you need WWW-Authenticate for 401, add it to headers + if status_code == 401: + headers["WWW-Authenticate"] = 'Bearer realm="api"' + return data, status_code, headers - resp = self.make_response(data, status_code, headers) + @api.errorhandler(ValueError) + def handle_value_error(e: ValueError): + got_request_exception.send(current_app, exception=e) + status_code = 400 + data = {"code": "invalid_param", "message": str(e), "status": status_code} + return data, status_code - if status_code == 401: - resp = self.unauthorized(resp) - return resp + @api.errorhandler(AppInvokeQuotaExceededError) + def handle_quota_exceeded(e: AppInvokeQuotaExceededError): + got_request_exception.send(current_app, exception=e) + status_code = 429 + data = {"code": "too_many_requests", "message": str(e), "status": status_code} + return data, status_code + + @api.errorhandler(Exception) + def handle_general_exception(e: Exception): + got_request_exception.send(current_app, exception=e) + + status_code = 500 + data: dict[str, Any] = getattr(e, "data", {"message": http_status_message(status_code)}) + + # 🔒 Normalize non-mapping data (e.g., if someone set e.data = Response) + if not isinstance(data, Mapping): + data = {"message": str(e)} + + data.setdefault("code", "unknown") + data.setdefault("status", status_code) + + # Log stack + exc_info: Any = sys.exc_info() + if exc_info[1] is None: + exc_info = None + current_app.log_exception(exc_info) + + return data, status_code + + +class ExternalApi(Api): + _authorizations = { + "Bearer": { + "type": "apiKey", + "in": "header", + "name": "Authorization", + "description": "Type: Bearer {your-api-key}", + } + } + + def __init__(self, app: Blueprint | Flask, *args, **kwargs): + kwargs.setdefault("authorizations", self._authorizations) + kwargs.setdefault("security", "Bearer") + kwargs["add_specs"] = dify_config.SWAGGER_UI_ENABLED + kwargs["doc"] = dify_config.SWAGGER_UI_PATH if dify_config.SWAGGER_UI_ENABLED else False + + # manual separate call on construction and init_app to ensure configs in kwargs effective + super().__init__(app=None, *args, **kwargs) # type: ignore + self.init_app(app, **kwargs) + register_external_error_handlers(self) diff --git a/api/libs/helper.py b/api/libs/helper.py index b36f972e19..96e8524660 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -14,7 +14,7 @@ from typing import TYPE_CHECKING, Any, Optional, Union, cast from zoneinfo import available_timezones from flask import Response, stream_with_context -from flask_restful import fields +from flask_restx import fields from pydantic import BaseModel from configs import dify_config @@ -27,6 +27,8 @@ if TYPE_CHECKING: from models.account import Account from models.model import EndUser +logger = logging.getLogger(__name__) + def extract_tenant_id(user: Union["Account", "EndUser"]) -> str | None: """ @@ -57,7 +59,7 @@ def run(script): class AppIconUrlField(fields.Raw): - def output(self, key, obj): + def output(self, key, obj, **kwargs): if obj is None: return None @@ -72,7 +74,7 @@ class AppIconUrlField(fields.Raw): class AvatarUrlField(fields.Raw): - def output(self, key, obj): + def output(self, key, obj, **kwargs): if obj is None: return None @@ -299,8 +301,8 @@ class TokenManager: if expiry_minutes is None: raise ValueError(f"Expiry minutes for {token_type} token is not set") token_key = cls._get_token_key(token, token_type) - expiry_time = int(expiry_minutes * 60) - redis_client.setex(token_key, expiry_time, json.dumps(token_data)) + expiry_seconds = int(expiry_minutes * 60) + redis_client.setex(token_key, expiry_seconds, json.dumps(token_data)) if account_id: cls._set_current_token_for_account(account_id, token, token_type, expiry_minutes) @@ -321,7 +323,7 @@ class TokenManager: key = cls._get_token_key(token, token_type) token_data_json = redis_client.get(key) if token_data_json is None: - logging.warning("%s token %s not found with key %s", token_type, token, key) + logger.warning("%s token %s not found with key %s", token_type, token, key) return None token_data: Optional[dict[str, Any]] = json.loads(token_data_json) return token_data @@ -334,11 +336,11 @@ class TokenManager: @classmethod def _set_current_token_for_account( - cls, account_id: str, token: str, token_type: str, expiry_hours: Union[int, float] + cls, account_id: str, token: str, token_type: str, expiry_minutes: Union[int, float] ): key = cls._get_account_token_key(account_id, token_type) - expiry_time = int(expiry_hours * 60 * 60) - redis_client.setex(key, expiry_time, token) + expiry_seconds = int(expiry_minutes * 60) + redis_client.setex(key, expiry_seconds, token) @classmethod def _get_account_token_key(cls, account_id: str, token_type: str) -> str: diff --git a/api/libs/login.py b/api/libs/login.py index e3a7fe2948..711d16e3b9 100644 --- a/api/libs/login.py +++ b/api/libs/login.py @@ -1,5 +1,5 @@ from functools import wraps -from typing import Any +from typing import Union, cast from flask import current_app, g, has_request_context, request from flask_login.config import EXEMPT_METHODS # type: ignore @@ -11,7 +11,7 @@ from models.model import EndUser #: A proxy for the current user. If no user is logged in, this will be an #: anonymous user -current_user: Any = LocalProxy(lambda: _get_user()) +current_user = cast(Union[Account, EndUser, None], LocalProxy(lambda: _get_user())) def login_required(func): @@ -52,7 +52,7 @@ def login_required(func): def decorated_view(*args, **kwargs): if request.method in EXEMPT_METHODS or dify_config.LOGIN_DISABLED: pass - elif not current_user.is_authenticated: + elif current_user is not None and not current_user.is_authenticated: return current_app.login_manager.unauthorized() # type: ignore # flask 1.x compatibility diff --git a/api/libs/module_loading.py b/api/libs/module_loading.py new file mode 100644 index 0000000000..616d072a1b --- /dev/null +++ b/api/libs/module_loading.py @@ -0,0 +1,55 @@ +""" +Module loading utilities similar to Django's module_loading. + +Reference implementation from Django: +https://github.com/django/django/blob/main/django/utils/module_loading.py +""" + +import sys +from importlib import import_module +from typing import Any + + +def cached_import(module_path: str, class_name: str) -> Any: + """ + Import a module and return the named attribute/class from it, with caching. + + Args: + module_path: The module path to import from + class_name: The attribute/class name to retrieve + + Returns: + The imported attribute/class + """ + if not ( + (module := sys.modules.get(module_path)) + and (spec := getattr(module, "__spec__", None)) + and getattr(spec, "_initializing", False) is False + ): + module = import_module(module_path) + return getattr(module, class_name) + + +def import_string(dotted_path: str) -> Any: + """ + Import a dotted module path and return the attribute/class designated by + the last name in the path. Raise ImportError if the import failed. + + Args: + dotted_path: Full module path to the class (e.g., 'module.submodule.ClassName') + + Returns: + The imported class or attribute + + Raises: + ImportError: If the module or attribute cannot be imported + """ + try: + module_path, class_name = dotted_path.rsplit(".", 1) + except ValueError as err: + raise ImportError(f"{dotted_path} doesn't look like a module path") from err + + try: + return cached_import(module_path, class_name) + except AttributeError as err: + raise ImportError(f'Module "{module_path}" does not define a "{class_name}" attribute/class') from err diff --git a/api/libs/sendgrid.py b/api/libs/sendgrid.py index cfc6c7d794..5f7d31d47d 100644 --- a/api/libs/sendgrid.py +++ b/api/libs/sendgrid.py @@ -4,6 +4,8 @@ import sendgrid # type: ignore from python_http_client.exceptions import ForbiddenError, UnauthorizedError from sendgrid.helpers.mail import Content, Email, Mail, To # type: ignore +logger = logging.getLogger(__name__) + class SendGridClient: def __init__(self, sendgrid_api_key: str, _from: str): @@ -11,7 +13,7 @@ class SendGridClient: self._from = _from def send(self, mail: dict): - logging.debug("Sending email with SendGrid") + logger.debug("Sending email with SendGrid") try: _to = mail["to"] @@ -27,19 +29,19 @@ class SendGridClient: mail = Mail(from_email, to_email, subject, content) mail_json = mail.get() # type: ignore response = sg.client.mail.send.post(request_body=mail_json) - logging.debug(response.status_code) - logging.debug(response.body) - logging.debug(response.headers) + logger.debug(response.status_code) + logger.debug(response.body) + logger.debug(response.headers) except TimeoutError as e: - logging.exception("SendGridClient Timeout occurred while sending email") + logger.exception("SendGridClient Timeout occurred while sending email") raise except (UnauthorizedError, ForbiddenError) as e: - logging.exception( + logger.exception( "SendGridClient Authentication failed. " "Verify that your credentials and the 'from' email address are correct" ) raise except Exception as e: - logging.exception("SendGridClient Unexpected error occurred while sending email to %s", _to) + logger.exception("SendGridClient Unexpected error occurred while sending email to %s", _to) raise diff --git a/api/libs/smtp.py b/api/libs/smtp.py index a01ad6fab8..8203ca8503 100644 --- a/api/libs/smtp.py +++ b/api/libs/smtp.py @@ -3,6 +3,8 @@ import smtplib from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText +logger = logging.getLogger(__name__) + class SMTPClient: def __init__( @@ -44,13 +46,13 @@ class SMTPClient: smtp.sendmail(self._from, mail["to"], msg.as_string()) except smtplib.SMTPException as e: - logging.exception("SMTP error occurred") + logger.exception("SMTP error occurred") raise except TimeoutError as e: - logging.exception("Timeout occurred while sending email") + logger.exception("Timeout occurred while sending email") raise except Exception as e: - logging.exception("Unexpected error occurred while sending email to %s", mail["to"]) + logger.exception("Unexpected error occurred while sending email to %s", mail["to"]) raise finally: if smtp: diff --git a/api/migrations/versions/2025_08_09_1553-e8446f481c1e_add_provider_credential_pool_support.py b/api/migrations/versions/2025_08_09_1553-e8446f481c1e_add_provider_credential_pool_support.py new file mode 100644 index 0000000000..7bcdc8f498 --- /dev/null +++ b/api/migrations/versions/2025_08_09_1553-e8446f481c1e_add_provider_credential_pool_support.py @@ -0,0 +1,177 @@ +"""Add provider multi credential support + +Revision ID: e8446f481c1e +Revises: 8bcc02c9bd07 +Create Date: 2025-08-09 15:53:54.341341 + +""" +from alembic import op +from libs.uuid_utils import uuidv7 +import models as models +import sqlalchemy as sa +from sqlalchemy.sql import table, column + +# revision identifiers, used by Alembic. +revision = 'e8446f481c1e' +down_revision = 'fa8b0fa6f407' +branch_labels = None +depends_on = None + + +def upgrade(): + # Create provider_credentials table + op.create_table('provider_credentials', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=255), nullable=False), + sa.Column('credential_name', sa.String(length=255), nullable=False), + sa.Column('encrypted_config', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_credential_pkey') + ) + + # Create index for provider_credentials + with op.batch_alter_table('provider_credentials', schema=None) as batch_op: + batch_op.create_index('provider_credential_tenant_provider_idx', ['tenant_id', 'provider_name'], unique=False) + + # Add credential_id to providers table + with op.batch_alter_table('providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('credential_id', models.types.StringUUID(), nullable=True)) + + # Add credential_id to load_balancing_model_configs table + with op.batch_alter_table('load_balancing_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('credential_id', models.types.StringUUID(), nullable=True)) + + migrate_existing_providers_data() + + # Remove encrypted_config column from providers table after migration + with op.batch_alter_table('providers', schema=None) as batch_op: + batch_op.drop_column('encrypted_config') + + +def migrate_existing_providers_data(): + """migrate providers table data to provider_credentials""" + + # Define table structure for data manipulation + providers_table = table('providers', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('encrypted_config', sa.Text()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()), + column('credential_id', models.types.StringUUID()), + ) + + provider_credential_table = table('provider_credentials', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('credential_name', sa.String()), + column('encrypted_config', sa.Text()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()) + ) + + # Get database connection + conn = op.get_bind() + + # Query all existing providers data + existing_providers = conn.execute( + sa.select(providers_table.c.id, providers_table.c.tenant_id, + providers_table.c.provider_name, providers_table.c.encrypted_config, + providers_table.c.created_at, providers_table.c.updated_at) + .where(providers_table.c.encrypted_config.isnot(None)) + ).fetchall() + + # Iterate through each provider and insert into provider_credentials + for provider in existing_providers: + credential_id = str(uuidv7()) + if not provider.encrypted_config or provider.encrypted_config.strip() == '': + continue + + # Insert into provider_credentials table + conn.execute( + provider_credential_table.insert().values( + id=credential_id, + tenant_id=provider.tenant_id, + provider_name=provider.provider_name, + credential_name='API_KEY1', # Use a default name + encrypted_config=provider.encrypted_config, + created_at=provider.created_at, + updated_at=provider.updated_at + ) + ) + + # Update original providers table, set credential_id + conn.execute( + providers_table.update() + .where(providers_table.c.id == provider.id) + .values( + credential_id=credential_id, + ) + ) + +def downgrade(): + # Re-add encrypted_config column to providers table + with op.batch_alter_table('providers', schema=None) as batch_op: + batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True)) + + # Migrate data back from provider_credentials to providers + migrate_data_back_to_providers() + + # Remove credential_id columns + with op.batch_alter_table('load_balancing_model_configs', schema=None) as batch_op: + batch_op.drop_column('credential_id') + + with op.batch_alter_table('providers', schema=None) as batch_op: + batch_op.drop_column('credential_id') + + # Drop provider_credentials table + op.drop_table('provider_credentials') + + +def migrate_data_back_to_providers(): + """Migrate data back from provider_credentials to providers table for downgrade""" + + # Define table structure for data manipulation + providers_table = table('providers', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('encrypted_config', sa.Text()), + column('credential_id', models.types.StringUUID()), + ) + + provider_credential_table = table('provider_credentials', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('credential_name', sa.String()), + column('encrypted_config', sa.Text()), + ) + + # Get database connection + conn = op.get_bind() + + # Query providers that have credential_id + providers_with_credentials = conn.execute( + sa.select(providers_table.c.id, providers_table.c.credential_id) + .where(providers_table.c.credential_id.isnot(None)) + ).fetchall() + + # For each provider, get the credential data and update providers table + for provider in providers_with_credentials: + credential = conn.execute( + sa.select(provider_credential_table.c.encrypted_config) + .where(provider_credential_table.c.id == provider.credential_id) + ).fetchone() + + if credential: + # Update providers table with encrypted_config from credential + conn.execute( + providers_table.update() + .where(providers_table.c.id == provider.id) + .values(encrypted_config=credential.encrypted_config) + ) diff --git a/api/migrations/versions/2025_08_13_1605-0e154742a5fa_add_provider_model_multi_credential.py b/api/migrations/versions/2025_08_13_1605-0e154742a5fa_add_provider_model_multi_credential.py new file mode 100644 index 0000000000..aa7331ec60 --- /dev/null +++ b/api/migrations/versions/2025_08_13_1605-0e154742a5fa_add_provider_model_multi_credential.py @@ -0,0 +1,186 @@ +"""Add provider model multi credential support + +Revision ID: 0e154742a5fa +Revises: e8446f481c1e +Create Date: 2025-08-13 16:05:42.657730 + +""" + +from alembic import op +from libs.uuid_utils import uuidv7 +import models as models +import sqlalchemy as sa +from sqlalchemy.sql import table, column + + +# revision identifiers, used by Alembic. +revision = '0e154742a5fa' +down_revision = 'e8446f481c1e' +branch_labels = None +depends_on = None + + +def upgrade(): + # Create provider_model_credentials table + op.create_table('provider_model_credentials', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('provider_name', sa.String(length=255), nullable=False), + sa.Column('model_name', sa.String(length=255), nullable=False), + sa.Column('model_type', sa.String(length=40), nullable=False), + sa.Column('credential_name', sa.String(length=255), nullable=False), + sa.Column('encrypted_config', sa.Text(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.PrimaryKeyConstraint('id', name='provider_model_credential_pkey') + ) + + # Create index for provider_model_credentials + with op.batch_alter_table('provider_model_credentials', schema=None) as batch_op: + batch_op.create_index('provider_model_credential_tenant_provider_model_idx', ['tenant_id', 'provider_name', 'model_name', 'model_type'], unique=False) + + # Add credential_id to provider_models table + with op.batch_alter_table('provider_models', schema=None) as batch_op: + batch_op.add_column(sa.Column('credential_id', models.types.StringUUID(), nullable=True)) + + + # Add credential_source_type to load_balancing_model_configs table + with op.batch_alter_table('load_balancing_model_configs', schema=None) as batch_op: + batch_op.add_column(sa.Column('credential_source_type', sa.String(length=40), nullable=True)) + + # Migrate existing provider_models data + migrate_existing_provider_models_data() + + # Remove encrypted_config column from provider_models table after migration + with op.batch_alter_table('provider_models', schema=None) as batch_op: + batch_op.drop_column('encrypted_config') + + +def migrate_existing_provider_models_data(): + """migrate provider_models table data to provider_model_credentials""" + + # Define table structure for data manipulation + provider_models_table = table('provider_models', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('model_name', sa.String()), + column('model_type', sa.String()), + column('encrypted_config', sa.Text()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()), + column('credential_id', models.types.StringUUID()), + ) + + provider_model_credentials_table = table('provider_model_credentials', + column('id', models.types.StringUUID()), + column('tenant_id', models.types.StringUUID()), + column('provider_name', sa.String()), + column('model_name', sa.String()), + column('model_type', sa.String()), + column('credential_name', sa.String()), + column('encrypted_config', sa.Text()), + column('created_at', sa.DateTime()), + column('updated_at', sa.DateTime()) + ) + + + # Get database connection + conn = op.get_bind() + + # Query all existing provider_models data with encrypted_config + existing_provider_models = conn.execute( + sa.select(provider_models_table.c.id, provider_models_table.c.tenant_id, + provider_models_table.c.provider_name, provider_models_table.c.model_name, + provider_models_table.c.model_type, provider_models_table.c.encrypted_config, + provider_models_table.c.created_at, provider_models_table.c.updated_at) + .where(provider_models_table.c.encrypted_config.isnot(None)) + ).fetchall() + + # Iterate through each provider_model and insert into provider_model_credentials + for provider_model in existing_provider_models: + if not provider_model.encrypted_config or provider_model.encrypted_config.strip() == '': + continue + + credential_id = str(uuidv7()) + + # Insert into provider_model_credentials table + conn.execute( + provider_model_credentials_table.insert().values( + id=credential_id, + tenant_id=provider_model.tenant_id, + provider_name=provider_model.provider_name, + model_name=provider_model.model_name, + model_type=provider_model.model_type, + credential_name='API_KEY1', # Use a default name + encrypted_config=provider_model.encrypted_config, + created_at=provider_model.created_at, + updated_at=provider_model.updated_at + ) + ) + + # Update original provider_models table, set credential_id + conn.execute( + provider_models_table.update() + .where(provider_models_table.c.id == provider_model.id) + .values(credential_id=credential_id) + ) + + +def downgrade(): + # Re-add encrypted_config column to provider_models table + with op.batch_alter_table('provider_models', schema=None) as batch_op: + batch_op.add_column(sa.Column('encrypted_config', sa.Text(), nullable=True)) + + # Migrate data back from provider_model_credentials to provider_models + migrate_data_back_to_provider_models() + + with op.batch_alter_table('provider_models', schema=None) as batch_op: + batch_op.drop_column('credential_id') + + # Remove credential_source_type column from load_balancing_model_configs + with op.batch_alter_table('load_balancing_model_configs', schema=None) as batch_op: + batch_op.drop_column('credential_source_type') + + # Drop provider_model_credentials table + op.drop_table('provider_model_credentials') + + +def migrate_data_back_to_provider_models(): + """Migrate data back from provider_model_credentials to provider_models table for downgrade""" + + # Define table structure for data manipulation + provider_models_table = table('provider_models', + column('id', models.types.StringUUID()), + column('encrypted_config', sa.Text()), + column('credential_id', models.types.StringUUID()), + ) + + provider_model_credentials_table = table('provider_model_credentials', + column('id', models.types.StringUUID()), + column('encrypted_config', sa.Text()), + ) + + # Get database connection + conn = op.get_bind() + + # Query provider_models that have credential_id + provider_models_with_credentials = conn.execute( + sa.select(provider_models_table.c.id, provider_models_table.c.credential_id) + .where(provider_models_table.c.credential_id.isnot(None)) + ).fetchall() + + # For each provider_model, get the credential data and update provider_models table + for provider_model in provider_models_with_credentials: + credential = conn.execute( + sa.select(provider_model_credentials_table.c.encrypted_config) + .where(provider_model_credentials_table.c.id == provider_model.credential_id) + ).fetchone() + + if credential: + # Update provider_models table with encrypted_config from credential + conn.execute( + provider_models_table.update() + .where(provider_models_table.c.id == provider_model.id) + .values(encrypted_config=credential.encrypted_config) + ) diff --git a/api/models/account.py b/api/models/account.py index 1a0752440d..7b7d393414 100644 --- a/api/models/account.py +++ b/api/models/account.py @@ -200,7 +200,7 @@ class Tenant(Base): id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()")) name: Mapped[str] = mapped_column(String(255)) - encrypt_public_key = db.Column(sa.Text) + encrypt_public_key: Mapped[Optional[str]] = mapped_column(sa.Text) plan: Mapped[str] = mapped_column(String(255), server_default=sa.text("'basic'::character varying")) status: Mapped[str] = mapped_column(String(255), server_default=sa.text("'normal'::character varying")) custom_config: Mapped[Optional[str]] = mapped_column(sa.Text) @@ -325,5 +325,5 @@ class TenantPluginAutoUpgradeStrategy(Base): upgrade_mode: Mapped[UpgradeMode] = mapped_column(String(16), nullable=False, server_default="exclude") exclude_plugins: Mapped[list[str]] = mapped_column(sa.ARRAY(String(255)), nullable=False) # plugin_id (author/name) include_plugins: Mapped[list[str]] = mapped_column(sa.ARRAY(String(255)), nullable=False) # plugin_id (author/name) - created_at = db.Column(DateTime, nullable=False, server_default=func.current_timestamp()) - updated_at = db.Column(DateTime, nullable=False, server_default=func.current_timestamp()) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) diff --git a/api/models/dataset.py b/api/models/dataset.py index 3b1d289bc4..d4519f62d7 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -29,6 +29,8 @@ from .engine import db from .model import App, Tag, TagBinding, UploadFile from .types import StringUUID +logger = logging.getLogger(__name__) + class DatasetPermissionEnum(enum.StrEnum): ONLY_ME = "only_me" @@ -60,8 +62,8 @@ class Dataset(Base): created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) updated_by = mapped_column(StringUUID, nullable=True) updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) - embedding_model = db.Column(String(255), nullable=True) # TODO: mapped_column - embedding_model_provider = db.Column(String(255), nullable=True) # TODO: mapped_column + embedding_model = mapped_column(String(255), nullable=True) + embedding_model_provider = mapped_column(String(255), nullable=True) collection_binding_id = mapped_column(StringUUID, nullable=True) retrieval_model = mapped_column(JSONB, nullable=True) built_in_field_enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) @@ -914,7 +916,7 @@ class DatasetKeywordTable(Base): return json.loads(keyword_table_text.decode("utf-8"), cls=SetDecoder) return None except Exception as e: - logging.exception("Failed to load keyword table from file: %s", file_key) + logger.exception("Failed to load keyword table from file: %s", file_key) return None diff --git a/api/models/model.py b/api/models/model.py index c4303f3cc5..ed1be14a6c 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -17,7 +17,7 @@ if TYPE_CHECKING: import sqlalchemy as sa from flask import request from flask_login import UserMixin -from sqlalchemy import Float, Index, PrimaryKeyConstraint, String, func, text +from sqlalchemy import Float, Index, PrimaryKeyConstraint, String, exists, func, select, text from sqlalchemy.orm import Mapped, Session, mapped_column from configs import dify_config @@ -77,7 +77,7 @@ class App(Base): description: Mapped[str] = mapped_column(sa.Text, server_default=sa.text("''::character varying")) mode: Mapped[str] = mapped_column(String(255)) icon_type: Mapped[Optional[str]] = mapped_column(String(255)) # image, emoji - icon = db.Column(String(255)) + icon = mapped_column(String(255)) icon_background: Mapped[Optional[str]] = mapped_column(String(255)) app_model_config_id = mapped_column(StringUUID, nullable=True) workflow_id = mapped_column(StringUUID, nullable=True) @@ -904,7 +904,7 @@ class Message(Base): message_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) message_unit_price = mapped_column(sa.Numeric(10, 4), nullable=False) message_price_unit = mapped_column(sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001")) - answer: Mapped[str] = db.Column(sa.Text, nullable=False) # TODO make it mapped_column + answer: Mapped[str] = mapped_column(sa.Text, nullable=False) answer_tokens: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) answer_unit_price = mapped_column(sa.Numeric(10, 4), nullable=False) answer_price_unit = mapped_column(sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001")) @@ -1321,7 +1321,7 @@ class MessageAnnotation(Base): app_id: Mapped[str] = mapped_column(StringUUID) conversation_id: Mapped[Optional[str]] = mapped_column(StringUUID, sa.ForeignKey("conversations.id")) message_id: Mapped[Optional[str]] = mapped_column(StringUUID) - question = db.Column(sa.Text, nullable=True) + question = mapped_column(sa.Text, nullable=True) content = mapped_column(sa.Text, nullable=False) hit_count: Mapped[int] = mapped_column(sa.Integer, nullable=False, server_default=sa.text("0")) account_id = mapped_column(StringUUID, nullable=False) @@ -1553,7 +1553,7 @@ class ApiToken(Base): def generate_api_key(prefix, n): while True: result = prefix + generate_string(n) - if db.session.query(ApiToken).where(ApiToken.token == result).count() > 0: + if db.session.scalar(select(exists().where(ApiToken.token == result))): continue return result @@ -1677,7 +1677,7 @@ class MessageAgentThought(Base): message_unit_price = mapped_column(sa.Numeric, nullable=True) message_price_unit = mapped_column(sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001")) message_files = mapped_column(sa.Text, nullable=True) - answer = db.Column(sa.Text, nullable=True) + answer = mapped_column(sa.Text, nullable=True) answer_token: Mapped[Optional[int]] = mapped_column(sa.Integer, nullable=True) answer_unit_price = mapped_column(sa.Numeric, nullable=True) answer_price_unit = mapped_column(sa.Numeric(10, 7), nullable=False, server_default=sa.text("0.001")) diff --git a/api/models/provider.py b/api/models/provider.py index 4ea2c59fdb..18bf0ac5ad 100644 --- a/api/models/provider.py +++ b/api/models/provider.py @@ -1,5 +1,6 @@ from datetime import datetime from enum import Enum +from functools import cached_property from typing import Optional import sqlalchemy as sa @@ -7,6 +8,7 @@ from sqlalchemy import DateTime, String, func, text from sqlalchemy.orm import Mapped, mapped_column from .base import Base +from .engine import db from .types import StringUUID @@ -60,9 +62,9 @@ class Provider(Base): provider_type: Mapped[str] = mapped_column( String(40), nullable=False, server_default=text("'custom'::character varying") ) - encrypted_config: Mapped[Optional[str]] = mapped_column(sa.Text, nullable=True) is_valid: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=text("false")) last_used: Mapped[Optional[datetime]] = mapped_column(DateTime, nullable=True) + credential_id: Mapped[Optional[str]] = mapped_column(StringUUID, nullable=True) quota_type: Mapped[Optional[str]] = mapped_column( String(40), nullable=True, server_default=text("''::character varying") @@ -79,6 +81,21 @@ class Provider(Base): f" provider_type='{self.provider_type}')>" ) + @cached_property + def credential(self): + if self.credential_id: + return db.session.query(ProviderCredential).where(ProviderCredential.id == self.credential_id).first() + + @property + def credential_name(self): + credential = self.credential + return credential.credential_name if credential else None + + @property + def encrypted_config(self): + credential = self.credential + return credential.encrypted_config if credential else None + @property def token_is_set(self): """ @@ -116,11 +133,30 @@ class ProviderModel(Base): provider_name: Mapped[str] = mapped_column(String(255), nullable=False) model_name: Mapped[str] = mapped_column(String(255), nullable=False) model_type: Mapped[str] = mapped_column(String(40), nullable=False) - encrypted_config: Mapped[Optional[str]] = mapped_column(sa.Text, nullable=True) + credential_id: Mapped[Optional[str]] = mapped_column(StringUUID, nullable=True) is_valid: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=text("false")) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + @cached_property + def credential(self): + if self.credential_id: + return ( + db.session.query(ProviderModelCredential) + .where(ProviderModelCredential.id == self.credential_id) + .first() + ) + + @property + def credential_name(self): + credential = self.credential + return credential.credential_name if credential else None + + @property + def encrypted_config(self): + credential = self.credential + return credential.encrypted_config if credential else None + class TenantDefaultModel(Base): __tablename__ = "tenant_default_models" @@ -220,6 +256,56 @@ class LoadBalancingModelConfig(Base): model_type: Mapped[str] = mapped_column(String(40), nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False) encrypted_config: Mapped[Optional[str]] = mapped_column(sa.Text, nullable=True) + credential_id: Mapped[Optional[str]] = mapped_column(StringUUID, nullable=True) + credential_source_type: Mapped[Optional[str]] = mapped_column(String(40), nullable=True) enabled: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=text("true")) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + + +class ProviderCredential(Base): + """ + Provider credential - stores multiple named credentials for each provider + """ + + __tablename__ = "provider_credentials" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="provider_credential_pkey"), + sa.Index("provider_credential_tenant_provider_idx", "tenant_id", "provider_name"), + ) + + id: Mapped[str] = mapped_column(StringUUID, server_default=text("uuidv7()")) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + provider_name: Mapped[str] = mapped_column(String(255), nullable=False) + credential_name: Mapped[str] = mapped_column(String(255), nullable=False) + encrypted_config: Mapped[str] = mapped_column(sa.Text, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + + +class ProviderModelCredential(Base): + """ + Provider model credential - stores multiple named credentials for each provider model + """ + + __tablename__ = "provider_model_credentials" + __table_args__ = ( + sa.PrimaryKeyConstraint("id", name="provider_model_credential_pkey"), + sa.Index( + "provider_model_credential_tenant_provider_model_idx", + "tenant_id", + "provider_name", + "model_name", + "model_type", + ), + ) + + id: Mapped[str] = mapped_column(StringUUID, server_default=text("uuidv7()")) + tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) + provider_name: Mapped[str] = mapped_column(String(255), nullable=False) + model_name: Mapped[str] = mapped_column(String(255), nullable=False) + model_type: Mapped[str] = mapped_column(String(40), nullable=False) + credential_name: Mapped[str] = mapped_column(String(255), nullable=False) + encrypted_config: Mapped[str] = mapped_column(sa.Text, nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) + updated_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp()) diff --git a/api/models/task.py b/api/models/task.py index ab700c553c..9a52fcfb41 100644 --- a/api/models/task.py +++ b/api/models/task.py @@ -2,7 +2,7 @@ from datetime import datetime from typing import Optional import sqlalchemy as sa -from celery import states # type: ignore +from celery import states from sqlalchemy import DateTime, String from sqlalchemy.orm import Mapped, mapped_column diff --git a/api/models/workflow.py b/api/models/workflow.py index 7ff463e08f..4d0089fa4e 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -7,8 +7,7 @@ from typing import TYPE_CHECKING, Any, Optional, Union from uuid import uuid4 import sqlalchemy as sa -from flask_login import current_user -from sqlalchemy import DateTime, orm +from sqlalchemy import DateTime, exists, orm, select from core.file.constants import maybe_file_object from core.file.models import File @@ -18,7 +17,6 @@ from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIAB from core.workflow.nodes.enums import NodeType from factories.variable_factory import TypeMismatchError, build_segment_with_type from libs.datetime_utils import naive_utc_now -from libs.helper import extract_tenant_id from ._workflow_exc import NodeNotFoundError, WorkflowDataError @@ -40,7 +38,7 @@ from .engine import db from .enums import CreatorUserRole, DraftVariableType from .types import EnumText, StringUUID -_logger = logging.getLogger(__name__) +logger = logging.getLogger(__name__) class WorkflowType(Enum): @@ -338,12 +336,13 @@ class Workflow(Base): """ from models.tools import WorkflowToolProvider - return ( - db.session.query(WorkflowToolProvider) - .where(WorkflowToolProvider.tenant_id == self.tenant_id, WorkflowToolProvider.app_id == self.app_id) - .count() - > 0 + stmt = select( + exists().where( + WorkflowToolProvider.tenant_id == self.tenant_id, + WorkflowToolProvider.app_id == self.app_id, + ) ) + return db.session.execute(stmt).scalar_one() @property def environment_variables(self) -> Sequence[StringVariable | IntegerVariable | FloatVariable | SecretVariable]: @@ -351,8 +350,8 @@ class Workflow(Base): if self._environment_variables is None: self._environment_variables = "{}" - # Get tenant_id from current_user (Account or EndUser) - tenant_id = extract_tenant_id(current_user) + # Use workflow.tenant_id to avoid relying on request user in background threads + tenant_id = self.tenant_id if not tenant_id: return [] @@ -382,8 +381,8 @@ class Workflow(Base): self._environment_variables = "{}" return - # Get tenant_id from current_user (Account or EndUser) - tenant_id = extract_tenant_id(current_user) + # Use workflow.tenant_id to avoid relying on request user in background threads + tenant_id = self.tenant_id if not tenant_id: self._environment_variables = "{}" @@ -923,7 +922,7 @@ def _naive_utc_datetime(): class WorkflowDraftVariable(Base): """`WorkflowDraftVariable` record variables and outputs generated during - debugging worfklow or chatflow. + debugging workflow or chatflow. IMPORTANT: This model maintains multiple invariant rules that must be preserved. Do not instantiate this class directly with the constructor. @@ -1057,7 +1056,7 @@ class WorkflowDraftVariable(Base): def get_selector(self) -> list[str]: selector = json.loads(self.selector) if not isinstance(selector, list): - _logger.error( + logger.error( "invalid selector loaded from database, type=%s, value=%s", type(selector), self.selector, diff --git a/api/mypy.ini b/api/mypy.ini index 3a6a54afe1..bd771a056f 100644 --- a/api/mypy.ini +++ b/api/mypy.ini @@ -12,8 +12,14 @@ exclude = (?x)( [mypy-flask_login] ignore_missing_imports=True -[mypy-flask_restful] +[mypy-flask_restx] ignore_missing_imports=True -[mypy-flask_restful.inputs] +[mypy-flask_restx.api] +ignore_missing_imports=True + +[mypy-flask_restx.inputs] +ignore_missing_imports=True + +[mypy-google.cloud.storage] ignore_missing_imports=True diff --git a/api/pyproject.toml b/api/pyproject.toml index 61a725a830..3078202498 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "dify-api" -version = "1.7.2" +version = "1.8.0" requires-python = ">=3.11,<3.13" dependencies = [ @@ -13,13 +13,12 @@ dependencies = [ "cachetools~=5.3.0", "celery~=5.5.2", "chardet~=5.1.0", - "flask~=3.1.0", + "flask~=3.1.2", "flask-compress~=1.17", "flask-cors~=6.0.0", "flask-login~=0.6.3", "flask-migrate~=4.0.7", "flask-orjson~=2.0.0", - "flask-restful~=0.3.10", "flask-sqlalchemy~=3.1.1", "gevent~=24.11.1", "gmpy2~=2.2.1", @@ -68,7 +67,7 @@ dependencies = [ "pydantic~=2.11.4", "pydantic-extra-types~=2.10.3", "pydantic-settings~=2.9.1", - "pyjwt~=2.8.0", + "pyjwt~=2.10.1", "pypdfium2==4.30.0", "python-docx~=1.1.0", "python-dotenv==1.0.1", @@ -78,9 +77,9 @@ dependencies = [ "resend~=2.9.0", "sentry-sdk[flask]~=2.28.0", "sqlalchemy~=2.0.29", - "starlette==0.41.0", + "starlette==0.47.2", "tiktoken~=0.9.0", - "transformers~=4.51.0", + "transformers~=4.53.0", "unstructured[docx,epub,md,ppt,pptx]~=0.16.1", "weave~=0.51.0", "yarl~=1.18.3", @@ -88,6 +87,7 @@ dependencies = [ "sseclient-py>=1.8.0", "httpx-sse>=0.4.0", "sendgrid~=6.12.3", + "flask-restx>=1.3.0", ] # Before adding new dependency, consider place it in # alphabet order (a-z) and suitable group. @@ -110,7 +110,7 @@ dev = [ "dotenv-linter~=0.5.0", "faker~=32.1.0", "lxml-stubs~=0.5.1", - "mypy~=1.16.0", + "mypy~=1.17.1", "ruff~=0.12.3", "pytest~=8.3.2", "pytest-benchmark~=4.0.0", @@ -164,6 +164,7 @@ dev = [ "scipy-stubs>=1.15.3.0", "types-python-http-client>=3.3.7.20240910", "types-redis>=4.6.0.20241004", + "celery-types>=0.23.0", ] ############################################################ @@ -178,7 +179,7 @@ storage = [ "google-cloud-storage==2.16.0", "opendal~=0.45.16", "oss2==2.18.5", - "supabase~=2.8.1", + "supabase~=2.18.1", "tos~=2.7.1", ] @@ -205,7 +206,7 @@ vdb = [ "pgvector==0.2.5", "pymilvus~=2.5.0", "pymochow==1.3.1", - "pyobvector~=0.1.6", + "pyobvector~=0.2.15", "qdrant-client==1.9.0", "tablestore==6.2.0", "tcvectordb~=1.6.4", diff --git a/api/repositories/factory.py b/api/repositories/factory.py index 1f0320054c..0be9c8908c 100644 --- a/api/repositories/factory.py +++ b/api/repositories/factory.py @@ -5,17 +5,14 @@ This factory is specifically designed for DifyAPI repositories that handle service-layer operations with dependency injection patterns. """ -import logging - from sqlalchemy.orm import sessionmaker from configs import dify_config from core.repositories import DifyCoreRepositoryFactory, RepositoryImportError +from libs.module_loading import import_string from repositories.api_workflow_node_execution_repository import DifyAPIWorkflowNodeExecutionRepository from repositories.api_workflow_run_repository import APIWorkflowRunRepository -logger = logging.getLogger(__name__) - class DifyAPIRepositoryFactory(DifyCoreRepositoryFactory): """ @@ -50,17 +47,9 @@ class DifyAPIRepositoryFactory(DifyCoreRepositoryFactory): class_path = dify_config.API_WORKFLOW_NODE_EXECUTION_REPOSITORY try: - repository_class = cls._import_class(class_path) - cls._validate_repository_interface(repository_class, DifyAPIWorkflowNodeExecutionRepository) - # Service repository requires session_maker parameter - cls._validate_constructor_signature(repository_class, ["session_maker"]) - + repository_class = import_string(class_path) return repository_class(session_maker=session_maker) # type: ignore[no-any-return] - except RepositoryImportError: - # Re-raise our custom errors as-is - raise - except Exception as e: - logger.exception("Failed to create DifyAPIWorkflowNodeExecutionRepository") + except (ImportError, Exception) as e: raise RepositoryImportError( f"Failed to create DifyAPIWorkflowNodeExecutionRepository from '{class_path}': {e}" ) from e @@ -87,15 +76,7 @@ class DifyAPIRepositoryFactory(DifyCoreRepositoryFactory): class_path = dify_config.API_WORKFLOW_RUN_REPOSITORY try: - repository_class = cls._import_class(class_path) - cls._validate_repository_interface(repository_class, APIWorkflowRunRepository) - # Service repository requires session_maker parameter - cls._validate_constructor_signature(repository_class, ["session_maker"]) - + repository_class = import_string(class_path) return repository_class(session_maker=session_maker) # type: ignore[no-any-return] - except RepositoryImportError: - # Re-raise our custom errors as-is - raise - except Exception as e: - logger.exception("Failed to create APIWorkflowRunRepository") + except (ImportError, Exception) as e: raise RepositoryImportError(f"Failed to create APIWorkflowRunRepository from '{class_path}': {e}") from e diff --git a/api/schedule/clean_messages.py b/api/schedule/clean_messages.py index a896c818a5..65038dce4d 100644 --- a/api/schedule/clean_messages.py +++ b/api/schedule/clean_messages.py @@ -21,7 +21,7 @@ from models.model import ( from models.web import SavedMessage from services.feature_service import FeatureService -_logger = logging.getLogger(__name__) +logger = logging.getLogger(__name__) @app.celery.task(queue="dataset") @@ -47,10 +47,9 @@ def clean_messages(): if not messages: break for message in messages: - plan_sandbox_clean_message_day = message.created_at app = db.session.query(App).filter_by(id=message.app_id).first() if not app: - _logger.warning( + logger.warning( "Expected App record to exist, but none was found, app_id=%s, message_id=%s", message.app_id, message.id, diff --git a/api/schedule/clean_unused_datasets_task.py b/api/schedule/clean_unused_datasets_task.py index 1141451011..63e6132b6a 100644 --- a/api/schedule/clean_unused_datasets_task.py +++ b/api/schedule/clean_unused_datasets_task.py @@ -45,6 +45,7 @@ def clean_unused_datasets_task(): plan_filter = config["plan_filter"] add_logs = config["add_logs"] + page = 1 while True: try: # Subquery for counting new documents @@ -86,12 +87,12 @@ def clean_unused_datasets_task(): .order_by(Dataset.created_at.desc()) ) - datasets = db.paginate(stmt, page=1, per_page=50) + datasets = db.paginate(stmt, page=page, per_page=50, error_out=False) except SQLAlchemyError: raise - if datasets.items is None or len(datasets.items) == 0: + if datasets is None or datasets.items is None or len(datasets.items) == 0: break for dataset in datasets: @@ -150,5 +151,7 @@ def clean_unused_datasets_task(): except Exception as e: click.echo(click.style(f"clean dataset index error: {e.__class__.__name__} {str(e)}", fg="red")) + page += 1 + end_at = time.perf_counter() click.echo(click.style(f"Cleaned unused dataset from db success latency: {end_at - start_at}", fg="green")) diff --git a/api/schedule/clean_workflow_runlogs_precise.py b/api/schedule/clean_workflow_runlogs_precise.py new file mode 100644 index 0000000000..75057983f6 --- /dev/null +++ b/api/schedule/clean_workflow_runlogs_precise.py @@ -0,0 +1,155 @@ +import datetime +import logging +import time + +import click + +import app +from configs import dify_config +from extensions.ext_database import db +from models.model import ( + AppAnnotationHitHistory, + Conversation, + Message, + MessageAgentThought, + MessageAnnotation, + MessageChain, + MessageFeedback, + MessageFile, +) +from models.workflow import ConversationVariable, WorkflowAppLog, WorkflowNodeExecutionModel, WorkflowRun + +logger = logging.getLogger(__name__) + + +MAX_RETRIES = 3 +BATCH_SIZE = dify_config.WORKFLOW_LOG_CLEANUP_BATCH_SIZE + + +@app.celery.task(queue="dataset") +def clean_workflow_runlogs_precise(): + """Clean expired workflow run logs with retry mechanism and complete message cascade""" + + click.echo(click.style("Start clean workflow run logs (precise mode with complete cascade).", fg="green")) + start_at = time.perf_counter() + + retention_days = dify_config.WORKFLOW_LOG_RETENTION_DAYS + cutoff_date = datetime.datetime.now() - datetime.timedelta(days=retention_days) + + try: + total_workflow_runs = db.session.query(WorkflowRun).where(WorkflowRun.created_at < cutoff_date).count() + if total_workflow_runs == 0: + logger.info("No expired workflow run logs found") + return + logger.info("Found %s expired workflow run logs to clean", total_workflow_runs) + + total_deleted = 0 + failed_batches = 0 + batch_count = 0 + + while True: + workflow_runs = ( + db.session.query(WorkflowRun.id).where(WorkflowRun.created_at < cutoff_date).limit(BATCH_SIZE).all() + ) + + if not workflow_runs: + break + + workflow_run_ids = [run.id for run in workflow_runs] + batch_count += 1 + + success = _delete_batch_with_retry(workflow_run_ids, failed_batches) + + if success: + total_deleted += len(workflow_run_ids) + failed_batches = 0 + else: + failed_batches += 1 + if failed_batches >= MAX_RETRIES: + logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES) + break + else: + # Calculate incremental delay times: 5, 10, 15 minutes + retry_delay_minutes = failed_batches * 5 + logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes) + time.sleep(retry_delay_minutes * 60) + continue + + logger.info("Cleanup completed: %s expired workflow run logs deleted", total_deleted) + + except Exception as e: + db.session.rollback() + logger.exception("Unexpected error in workflow log cleanup") + raise + + end_at = time.perf_counter() + execution_time = end_at - start_at + click.echo(click.style(f"Cleaned workflow run logs from db success latency: {execution_time:.2f}s", fg="green")) + + +def _delete_batch_with_retry(workflow_run_ids: list[str], attempt_count: int) -> bool: + """Delete a single batch with a retry mechanism and complete cascading deletion""" + try: + with db.session.begin_nested(): + message_data = ( + db.session.query(Message.id, Message.conversation_id) + .filter(Message.workflow_run_id.in_(workflow_run_ids)) + .all() + ) + message_id_list = [msg.id for msg in message_data] + conversation_id_list = list({msg.conversation_id for msg in message_data if msg.conversation_id}) + if message_id_list: + db.session.query(AppAnnotationHitHistory).where( + AppAnnotationHitHistory.message_id.in_(message_id_list) + ).delete(synchronize_session=False) + + db.session.query(MessageAgentThought).where(MessageAgentThought.message_id.in_(message_id_list)).delete( + synchronize_session=False + ) + + db.session.query(MessageChain).where(MessageChain.message_id.in_(message_id_list)).delete( + synchronize_session=False + ) + + db.session.query(MessageFile).where(MessageFile.message_id.in_(message_id_list)).delete( + synchronize_session=False + ) + + db.session.query(MessageAnnotation).where(MessageAnnotation.message_id.in_(message_id_list)).delete( + synchronize_session=False + ) + + db.session.query(MessageFeedback).where(MessageFeedback.message_id.in_(message_id_list)).delete( + synchronize_session=False + ) + + db.session.query(Message).where(Message.workflow_run_id.in_(workflow_run_ids)).delete( + synchronize_session=False + ) + + db.session.query(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(workflow_run_ids)).delete( + synchronize_session=False + ) + + db.session.query(WorkflowNodeExecutionModel).where( + WorkflowNodeExecutionModel.workflow_run_id.in_(workflow_run_ids) + ).delete(synchronize_session=False) + + if conversation_id_list: + db.session.query(ConversationVariable).where( + ConversationVariable.conversation_id.in_(conversation_id_list) + ).delete(synchronize_session=False) + + db.session.query(Conversation).where(Conversation.id.in_(conversation_id_list)).delete( + synchronize_session=False + ) + + db.session.query(WorkflowRun).where(WorkflowRun.id.in_(workflow_run_ids)).delete(synchronize_session=False) + + db.session.commit() + return True + + except Exception as e: + db.session.rollback() + logger.exception("Batch deletion failed (attempt %s)", attempt_count + 1) + return False diff --git a/api/schedule/mail_clean_document_notify_task.py b/api/schedule/mail_clean_document_notify_task.py index 03ef9062bd..9e32ecc716 100644 --- a/api/schedule/mail_clean_document_notify_task.py +++ b/api/schedule/mail_clean_document_notify_task.py @@ -13,6 +13,8 @@ from models.account import Account, Tenant, TenantAccountJoin from models.dataset import Dataset, DatasetAutoDisableLog from services.feature_service import FeatureService +logger = logging.getLogger(__name__) + @app.celery.task(queue="dataset") def mail_clean_document_notify_task(): @@ -24,7 +26,7 @@ def mail_clean_document_notify_task(): if not mail.is_inited(): return - logging.info(click.style("Start send document clean notify mail", fg="green")) + logger.info(click.style("Start send document clean notify mail", fg="green")) start_at = time.perf_counter() # send document clean notify mail @@ -89,8 +91,6 @@ def mail_clean_document_notify_task(): dataset_auto_disable_log.notified = True db.session.commit() end_at = time.perf_counter() - logging.info( - click.style(f"Send document clean notify mail succeeded: latency: {end_at - start_at}", fg="green") - ) + logger.info(click.style(f"Send document clean notify mail succeeded: latency: {end_at - start_at}", fg="green")) except Exception: - logging.exception("Send document clean notify mail failed") + logger.exception("Send document clean notify mail failed") diff --git a/api/schedule/queue_monitor_task.py b/api/schedule/queue_monitor_task.py index 5868450a14..4072d9db82 100644 --- a/api/schedule/queue_monitor_task.py +++ b/api/schedule/queue_monitor_task.py @@ -18,6 +18,8 @@ celery_redis = Redis( db=int(redis_config.get("virtual_host")) if redis_config.get("virtual_host") else 1, ) +logger = logging.getLogger(__name__) + @app.celery.task(queue="monitor") def queue_monitor_task(): @@ -25,27 +27,27 @@ def queue_monitor_task(): threshold = dify_config.QUEUE_MONITOR_THRESHOLD if threshold is None: - logging.warning(click.style("QUEUE_MONITOR_THRESHOLD is not configured, skipping monitoring", fg="yellow")) + logger.warning(click.style("QUEUE_MONITOR_THRESHOLD is not configured, skipping monitoring", fg="yellow")) return try: queue_length = celery_redis.llen(f"{queue_name}") - logging.info(click.style(f"Start monitor {queue_name}", fg="green")) + logger.info(click.style(f"Start monitor {queue_name}", fg="green")) if queue_length is None: - logging.error( + logger.error( click.style(f"Failed to get queue length for {queue_name} - Redis may be unavailable", fg="red") ) return - logging.info(click.style(f"Queue length: {queue_length}", fg="green")) + logger.info(click.style(f"Queue length: {queue_length}", fg="green")) if queue_length >= threshold: warning_msg = f"Queue {queue_name} task count exceeded the limit.: {queue_length}/{threshold}" logging.warning(click.style(warning_msg, fg="red")) - alter_emails = dify_config.QUEUE_MONITOR_ALERT_EMAILS - if alter_emails: - to_list = alter_emails.split(",") + alert_emails = dify_config.QUEUE_MONITOR_ALERT_EMAILS + if alert_emails: + to_list = alert_emails.split(",") email_service = get_email_i18n_service() for to in to_list: try: @@ -62,10 +64,10 @@ def queue_monitor_task(): }, ) except Exception as e: - logging.exception(click.style("Exception occurred during sending email", fg="red")) + logger.exception(click.style("Exception occurred during sending email", fg="red")) except Exception as e: - logging.exception(click.style("Exception occurred during queue monitoring", fg="red")) + logger.exception(click.style("Exception occurred during queue monitoring", fg="red")) finally: if db.session.is_active: db.session.close() diff --git a/api/services/account_service.py b/api/services/account_service.py index 1cce8e67a4..089e667166 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -67,6 +67,8 @@ from tasks.mail_owner_transfer_task import ( ) from tasks.mail_reset_password_task import send_reset_password_mail_task +logger = logging.getLogger(__name__) + class TokenPair(BaseModel): access_token: str @@ -332,9 +334,9 @@ class AccountService: db.session.add(account_integrate) db.session.commit() - logging.info("Account %s linked %s account %s.", account.id, provider, open_id) + logger.info("Account %s linked %s account %s.", account.id, provider, open_id) except Exception as e: - logging.exception("Failed to link %s account %s to Account %s", provider, open_id, account.id) + logger.exception("Failed to link %s account %s to Account %s", provider, open_id, account.id) raise LinkAccountIntegrateError("Failed to link account.") from e @staticmethod @@ -425,7 +427,7 @@ class AccountService: cls, account: Optional[Account] = None, email: Optional[str] = None, - language: Optional[str] = "en-US", + language: str = "en-US", ): account_email = account.email if account else email if account_email is None: @@ -452,12 +454,14 @@ class AccountService: account: Optional[Account] = None, email: Optional[str] = None, old_email: Optional[str] = None, - language: Optional[str] = "en-US", + language: str = "en-US", phase: Optional[str] = None, ): account_email = account.email if account else email if account_email is None: raise ValueError("Email must be provided.") + if not phase: + raise ValueError("phase must be provided.") if cls.change_email_rate_limiter.is_rate_limited(account_email): from controllers.console.auth.error import EmailChangeRateLimitExceededError @@ -480,7 +484,7 @@ class AccountService: cls, account: Optional[Account] = None, email: Optional[str] = None, - language: Optional[str] = "en-US", + language: str = "en-US", ): account_email = account.email if account else email if account_email is None: @@ -496,7 +500,7 @@ class AccountService: cls, account: Optional[Account] = None, email: Optional[str] = None, - language: Optional[str] = "en-US", + language: str = "en-US", workspace_name: Optional[str] = "", ): account_email = account.email if account else email @@ -509,6 +513,7 @@ class AccountService: raise OwnerTransferRateLimitExceededError() code, token = cls.generate_owner_transfer_token(account_email, account) + workspace_name = workspace_name or "" send_owner_transfer_confirm_task.delay( language=language, @@ -524,13 +529,14 @@ class AccountService: cls, account: Optional[Account] = None, email: Optional[str] = None, - language: Optional[str] = "en-US", + language: str = "en-US", workspace_name: Optional[str] = "", - new_owner_email: Optional[str] = "", + new_owner_email: str = "", ): account_email = account.email if account else email if account_email is None: raise ValueError("Email must be provided.") + workspace_name = workspace_name or "" send_old_owner_transfer_notify_email_task.delay( language=language, @@ -544,12 +550,13 @@ class AccountService: cls, account: Optional[Account] = None, email: Optional[str] = None, - language: Optional[str] = "en-US", + language: str = "en-US", workspace_name: Optional[str] = "", ): account_email = account.email if account else email if account_email is None: raise ValueError("Email must be provided.") + workspace_name = workspace_name or "" send_new_owner_transfer_notify_email_task.delay( language=language, @@ -633,7 +640,10 @@ class AccountService: @classmethod def send_email_code_login_email( - cls, account: Optional[Account] = None, email: Optional[str] = None, language: Optional[str] = "en-US" + cls, + account: Optional[Account] = None, + email: Optional[str] = None, + language: str = "en-US", ): email = account.email if account else email if email is None: @@ -917,7 +927,7 @@ class TenantService: """Create tenant member""" if role == TenantAccountRole.OWNER.value: if TenantService.has_roles(tenant, [TenantAccountRole.OWNER]): - logging.error("Tenant %s has already an owner.", tenant.id) + logger.error("Tenant %s has already an owner.", tenant.id) raise Exception("Tenant already has an owner.") ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first() @@ -1169,7 +1179,7 @@ class RegisterService: db.session.query(Tenant).delete() db.session.commit() - logging.exception("Setup account failed, email: %s, name: %s", email, name) + logger.exception("Setup account failed, email: %s, name: %s", email, name) raise ValueError(f"Setup failed: {e}") @classmethod @@ -1214,15 +1224,15 @@ class RegisterService: db.session.commit() except WorkSpaceNotAllowedCreateError: db.session.rollback() - logging.exception("Register failed") + logger.exception("Register failed") raise AccountRegisterError("Workspace is not allowed to create.") except AccountRegisterError as are: db.session.rollback() - logging.exception("Register failed") + logger.exception("Register failed") raise are except Exception as e: db.session.rollback() - logging.exception("Register failed") + logger.exception("Register failed") raise AccountRegisterError(f"Registration failed: {e}") from e return account @@ -1260,10 +1270,11 @@ class RegisterService: raise AccountAlreadyInTenantError("Account already in tenant.") token = cls.generate_invite_token(tenant, account) + language = account.interface_language or "en-US" # send email send_invite_member_mail_task.delay( - language=account.interface_language, + language=language, to=email, token=token, inviter_name=inviter.name if inviter else "Dify", diff --git a/api/services/annotation_service.py b/api/services/annotation_service.py index b7a047914e..45b246af1e 100644 --- a/api/services/annotation_service.py +++ b/api/services/annotation_service.py @@ -1,4 +1,3 @@ -import datetime import uuid from typing import cast @@ -10,6 +9,7 @@ from werkzeug.exceptions import NotFound from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.datetime_utils import naive_utc_now from models.model import App, AppAnnotationHitHistory, AppAnnotationSetting, Message, MessageAnnotation from services.feature_service import FeatureService from tasks.annotation.add_annotation_to_index_task import add_annotation_to_index_task @@ -293,7 +293,7 @@ class AppAnnotationService: annotation_ids_to_delete = [annotation.id for annotation, _ in annotations_to_delete] # Step 2: Bulk delete hit histories in a single query - db.session.query(AppAnnotationHitHistory).filter( + db.session.query(AppAnnotationHitHistory).where( AppAnnotationHitHistory.annotation_id.in_(annotation_ids_to_delete) ).delete(synchronize_session=False) @@ -307,7 +307,7 @@ class AppAnnotationService: # Step 4: Bulk delete annotations in a single query deleted_count = ( db.session.query(MessageAnnotation) - .filter(MessageAnnotation.id.in_(annotation_ids_to_delete)) + .where(MessageAnnotation.id.in_(annotation_ids_to_delete)) .delete(synchronize_session=False) ) @@ -473,7 +473,7 @@ class AppAnnotationService: raise NotFound("App annotation not found") annotation_setting.score_threshold = args["score_threshold"] annotation_setting.updated_user_id = current_user.id - annotation_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + annotation_setting.updated_at = naive_utc_now() db.session.add(annotation_setting) db.session.commit() @@ -505,9 +505,9 @@ class AppAnnotationService: db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() ) - annotations_query = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_id) + annotations_query = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_id) for annotation in annotations_query.yield_per(100): - annotation_hit_histories_query = db.session.query(AppAnnotationHitHistory).filter( + annotation_hit_histories_query = db.session.query(AppAnnotationHitHistory).where( AppAnnotationHitHistory.annotation_id == annotation.id ) for annotation_hit_history in annotation_hit_histories_query.yield_per(100): diff --git a/api/services/app_dsl_service.py b/api/services/app_dsl_service.py index 2aa9f6cabd..18c72ebde2 100644 --- a/api/services/app_dsl_service.py +++ b/api/services/app_dsl_service.py @@ -42,7 +42,7 @@ IMPORT_INFO_REDIS_KEY_PREFIX = "app_import_info:" CHECK_DEPENDENCIES_REDIS_KEY_PREFIX = "app_check_dependencies:" IMPORT_INFO_REDIS_EXPIRY = 10 * 60 # 10 minutes DSL_MAX_SIZE = 10 * 1024 * 1024 # 10MB -CURRENT_DSL_VERSION = "0.3.1" +CURRENT_DSL_VERSION = "0.4.0" class ImportMode(StrEnum): diff --git a/api/services/app_service.py b/api/services/app_service.py index 0f22666d5a..80fe45aa21 100644 --- a/api/services/app_service.py +++ b/api/services/app_service.py @@ -25,6 +25,8 @@ from services.feature_service import FeatureService from services.tag_service import TagService from tasks.remove_app_and_related_data_task import remove_app_and_related_data_task +logger = logging.getLogger(__name__) + class AppService: def get_paginate_apps(self, user_id: str, tenant_id: str, args: dict) -> Pagination | None: @@ -95,7 +97,7 @@ class AppService: except (ProviderTokenNotInitError, LLMBadRequestError): model_instance = None except Exception as e: - logging.exception("Get default model instance failed, tenant_id: %s", tenant_id) + logger.exception("Get default model instance failed, tenant_id: %s", tenant_id) model_instance = None if model_instance: diff --git a/api/services/billing_service.py b/api/services/billing_service.py index 476fce0057..40d45af376 100644 --- a/api/services/billing_service.py +++ b/api/services/billing_service.py @@ -123,7 +123,7 @@ class BillingService: return BillingService._send_request("GET", "/education/verify", params=params) @classmethod - def is_active(cls, account_id: str): + def status(cls, account_id: str): params = {"account_id": account_id} return BillingService._send_request("GET", "/education/status", params=params) diff --git a/api/services/conversation_service.py b/api/services/conversation_service.py index 4f3dd3c762..ac603d3cc9 100644 --- a/api/services/conversation_service.py +++ b/api/services/conversation_service.py @@ -1,3 +1,5 @@ +import contextlib +import logging from collections.abc import Callable, Sequence from typing import Any, Optional, Union @@ -22,6 +24,9 @@ from services.errors.conversation import ( LastConversationNotExistsError, ) from services.errors.message import MessageNotExistsError +from tasks.delete_conversation_task import delete_conversation_related_data + +logger = logging.getLogger(__name__) class ConversationService: @@ -142,13 +147,11 @@ class ConversationService: raise MessageNotExistsError() # generate conversation name - try: + with contextlib.suppress(Exception): name = LLMGenerator.generate_conversation_name( app_model.tenant_id, message.query, conversation.id, app_model.id ) conversation.name = name - except Exception: - pass db.session.commit() @@ -176,11 +179,21 @@ class ConversationService: @classmethod def delete(cls, app_model: App, conversation_id: str, user: Optional[Union[Account, EndUser]]): - conversation = cls.get_conversation(app_model, conversation_id, user) + try: + logger.info( + "Initiating conversation deletion for app_name %s, conversation_id: %s", + app_model.name, + conversation_id, + ) - conversation.is_deleted = True - conversation.updated_at = naive_utc_now() - db.session.commit() + db.session.query(Conversation).where(Conversation.id == conversation_id).delete(synchronize_session=False) + db.session.commit() + + delete_conversation_related_data.delay(conversation_id) + + except Exception as e: + db.session.rollback() + raise e @classmethod def get_conversational_variable( diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index 8934608da1..84860fd170 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -6,10 +6,10 @@ import secrets import time import uuid from collections import Counter -from typing import Any, Optional +from typing import Any, Literal, Optional from flask_login import current_user -from sqlalchemy import func, select +from sqlalchemy import exists, func, select from sqlalchemy.orm import Session from werkzeug.exceptions import NotFound @@ -51,7 +51,7 @@ from services.entities.knowledge_entities.knowledge_entities import ( RetrievalModel, SegmentUpdateArgs, ) -from services.errors.account import InvalidActionError, NoPermissionError +from services.errors.account import NoPermissionError from services.errors.chunk import ChildChunkDeleteIndexError, ChildChunkIndexingError from services.errors.dataset import DatasetNameDuplicateError from services.errors.document import DocumentIndexingError @@ -76,6 +76,8 @@ from tasks.remove_document_from_index_task import remove_document_from_index_tas from tasks.retry_document_indexing_task import retry_document_indexing_task from tasks.sync_website_document_indexing_task import sync_website_document_indexing_task +logger = logging.getLogger(__name__) + class DatasetService: @staticmethod @@ -250,6 +252,11 @@ class DatasetService: dataset: Optional[Dataset] = db.session.query(Dataset).filter_by(id=dataset_id).first() return dataset + @staticmethod + def check_doc_form(dataset: Dataset, doc_form: str): + if dataset.doc_form and doc_form != dataset.doc_form: + raise ValueError("doc_form is different from the dataset doc_form.") + @staticmethod def check_dataset_model_setting(dataset): if dataset.indexing_technique == "high_quality": @@ -610,7 +617,7 @@ class DatasetService: ) except ProviderTokenNotInitError: # If we can't get the embedding model, preserve existing settings - logging.warning( + logger.warning( "Failed to initialize embedding model %s/%s, preserving existing settings", data["embedding_model_provider"], data["embedding_model"], @@ -648,19 +655,17 @@ class DatasetService: @staticmethod def dataset_use_check(dataset_id) -> bool: - count = db.session.query(AppDatasetJoin).filter_by(dataset_id=dataset_id).count() - if count > 0: - return True - return False + stmt = select(exists().where(AppDatasetJoin.dataset_id == dataset_id)) + return db.session.execute(stmt).scalar_one() @staticmethod def check_dataset_permission(dataset, user): if dataset.tenant_id != user.current_tenant_id: - logging.debug("User %s does not have permission to access dataset %s", user.id, dataset.id) + logger.debug("User %s does not have permission to access dataset %s", user.id, dataset.id) raise NoPermissionError("You do not have permission to access this dataset.") if user.current_role != TenantAccountRole.OWNER: if dataset.permission == DatasetPermissionEnum.ONLY_ME and dataset.created_by != user.id: - logging.debug("User %s does not have permission to access dataset %s", user.id, dataset.id) + logger.debug("User %s does not have permission to access dataset %s", user.id, dataset.id) raise NoPermissionError("You do not have permission to access this dataset.") if dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM: # For partial team permission, user needs explicit permission or be the creator @@ -669,7 +674,7 @@ class DatasetService: db.session.query(DatasetPermission).filter_by(dataset_id=dataset.id, account_id=user.id).first() ) if not user_permission: - logging.debug("User %s does not have permission to access dataset %s", user.id, dataset.id) + logger.debug("User %s does not have permission to access dataset %s", user.id, dataset.id) raise NoPermissionError("You do not have permission to access this dataset.") @staticmethod @@ -1085,6 +1090,8 @@ class DocumentService: dataset_process_rule: Optional[DatasetProcessRule] = None, created_from: str = "web", ): + # check doc_form + DatasetService.check_doc_form(dataset, knowledge_config.doc_form) # check document limit features = FeatureService.get_features(current_user.current_tenant_id) @@ -1183,7 +1190,7 @@ class DocumentService: created_by=account.id, ) else: - logging.warning( + logger.warning( "Invalid process rule mode: %s, can not find dataset process rule", process_rule.mode, ) @@ -1227,7 +1234,7 @@ class DocumentService: ) if document: document.dataset_process_rule_id = dataset_process_rule.id # type: ignore - document.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + document.updated_at = naive_utc_now() document.created_from = created_from document.doc_form = knowledge_config.doc_form document.doc_language = knowledge_config.doc_language @@ -1545,7 +1552,7 @@ class DocumentService: document.parsing_completed_at = None document.cleaning_completed_at = None document.splitting_completed_at = None - document.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + document.updated_at = naive_utc_now() document.created_from = created_from document.doc_form = document_data.doc_form db.session.add(document) @@ -1800,14 +1807,16 @@ class DocumentService: raise ValueError("Process rule segmentation max_tokens is invalid") @staticmethod - def batch_update_document_status(dataset: Dataset, document_ids: list[str], action: str, user): + def batch_update_document_status( + dataset: Dataset, document_ids: list[str], action: Literal["enable", "disable", "archive", "un_archive"], user + ): """ Batch update document status. Args: dataset (Dataset): The dataset object document_ids (list[str]): List of document IDs to update - action (str): Action to perform (enable, disable, archive, un_archive) + action (Literal["enable", "disable", "archive", "un_archive"]): Action to perform user: Current user performing the action Raises: @@ -1873,7 +1882,7 @@ class DocumentService: task_func.delay(*task_args) except Exception as e: # Log the error but do not rollback the transaction - logging.exception("Error executing async task for document %s", update_info["document"].id) + logger.exception("Error executing async task for document %s", update_info["document"].id) # don't raise the error immediately, but capture it for later propagation_error = e try: @@ -1884,15 +1893,16 @@ class DocumentService: redis_client.setex(indexing_cache_key, 600, 1) except Exception as e: # Log the error but do not rollback the transaction - logging.exception("Error setting cache for document %s", update_info["document"].id) + logger.exception("Error setting cache for document %s", update_info["document"].id) # Raise any propagation error after all updates if propagation_error: raise propagation_error @staticmethod - def _prepare_document_status_update(document, action: str, user): - """ - Prepare document status update information. + def _prepare_document_status_update( + document: Document, action: Literal["enable", "disable", "archive", "un_archive"], user + ): + """Prepare document status update information. Args: document: Document object to update @@ -1902,7 +1912,7 @@ class DocumentService: Returns: dict: Update information or None if no update needed """ - now = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + now = naive_utc_now() if action == "enable": return DocumentService._prepare_enable_update(document, now) @@ -2030,8 +2040,8 @@ class SegmentService: word_count=len(content), tokens=tokens, status="completed", - indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), - completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + indexing_at=naive_utc_now(), + completed_at=naive_utc_now(), created_by=current_user.id, ) if document.doc_form == "qa_model": @@ -2049,9 +2059,9 @@ class SegmentService: try: VectorService.create_segments_vector([args["keywords"]], [segment_document], dataset, document.doc_form) except Exception as e: - logging.exception("create segment index failed") + logger.exception("create segment index failed") segment_document.enabled = False - segment_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + segment_document.disabled_at = naive_utc_now() segment_document.status = "error" segment_document.error = str(e) db.session.commit() @@ -2107,8 +2117,8 @@ class SegmentService: tokens=tokens, keywords=segment_item.get("keywords", []), status="completed", - indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), - completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + indexing_at=naive_utc_now(), + completed_at=naive_utc_now(), created_by=current_user.id, ) if document.doc_form == "qa_model": @@ -2132,10 +2142,10 @@ class SegmentService: # save vector index VectorService.create_segments_vector(keywords_list, pre_segment_data_list, dataset, document.doc_form) except Exception as e: - logging.exception("create segment index failed") + logger.exception("create segment index failed") for segment_document in segment_data_list: segment_document.enabled = False - segment_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + segment_document.disabled_at = naive_utc_now() segment_document.status = "error" segment_document.error = str(e) db.session.commit() @@ -2152,7 +2162,7 @@ class SegmentService: if segment.enabled != action: if not action: segment.enabled = action - segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + segment.disabled_at = naive_utc_now() segment.disabled_by = current_user.id db.session.add(segment) db.session.commit() @@ -2250,10 +2260,10 @@ class SegmentService: segment.word_count = len(content) segment.tokens = tokens segment.status = "completed" - segment.indexing_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) - segment.completed_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + segment.indexing_at = naive_utc_now() + segment.completed_at = naive_utc_now() segment.updated_by = current_user.id - segment.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + segment.updated_at = naive_utc_now() segment.enabled = True segment.disabled_at = None segment.disabled_by = None @@ -2304,9 +2314,9 @@ class SegmentService: VectorService.update_segment_vector(args.keywords, segment, dataset) except Exception as e: - logging.exception("update segment index failed") + logger.exception("update segment index failed") segment.enabled = False - segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + segment.disabled_at = naive_utc_now() segment.status = "error" segment.error = str(e) db.session.commit() @@ -2334,13 +2344,9 @@ class SegmentService: @classmethod def delete_segments(cls, segment_ids: list, document: Document, dataset: Dataset): - # Check if segment_ids is not empty to avoid WHERE false condition - if not segment_ids or len(segment_ids) == 0: - return - index_node_ids = ( - db.session.query(DocumentSegment) - .with_entities(DocumentSegment.index_node_id) - .where( + segments = ( + db.session.query(DocumentSegment.index_node_id, DocumentSegment.word_count) + .filter( DocumentSegment.id.in_(segment_ids), DocumentSegment.dataset_id == dataset.id, DocumentSegment.document_id == document.id, @@ -2348,14 +2354,24 @@ class SegmentService: ) .all() ) - index_node_ids = [index_node_id[0] for index_node_id in index_node_ids] + + if not segments: + return + + index_node_ids = [seg.index_node_id for seg in segments] + total_words = sum(seg.word_count for seg in segments) + + document.word_count -= total_words + db.session.add(document) delete_segment_from_index_task.delay(index_node_ids, dataset.id, document.id) db.session.query(DocumentSegment).where(DocumentSegment.id.in_(segment_ids)).delete() db.session.commit() @classmethod - def update_segments_status(cls, segment_ids: list, action: str, dataset: Dataset, document: Document): + def update_segments_status( + cls, segment_ids: list, action: Literal["enable", "disable"], dataset: Dataset, document: Document + ): # Check if segment_ids is not empty to avoid WHERE false condition if not segment_ids or len(segment_ids) == 0: return @@ -2406,15 +2422,13 @@ class SegmentService: if cache_result is not None: continue segment.enabled = False - segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + segment.disabled_at = naive_utc_now() segment.disabled_by = current_user.id db.session.add(segment) real_deal_segment_ids.append(segment.id) db.session.commit() disable_segments_from_index_task.delay(real_deal_segment_ids, dataset.id, document.id) - else: - raise InvalidActionError() @classmethod def create_child_chunk( @@ -2424,16 +2438,6 @@ class SegmentService: with redis_client.lock(lock_name, timeout=20): index_node_id = str(uuid.uuid4()) index_node_hash = helper.generate_text_hash(content) - child_chunk_count = ( - db.session.query(ChildChunk) - .where( - ChildChunk.tenant_id == current_user.current_tenant_id, - ChildChunk.dataset_id == dataset.id, - ChildChunk.document_id == document.id, - ChildChunk.segment_id == segment.id, - ) - .count() - ) max_position = ( db.session.query(func.max(ChildChunk.position)) .where( @@ -2462,7 +2466,7 @@ class SegmentService: try: VectorService.create_child_chunk_vector(child_chunk, dataset) except Exception as e: - logging.exception("create child chunk index failed") + logger.exception("create child chunk index failed") db.session.rollback() raise ChildChunkIndexingError(str(e)) db.session.commit() @@ -2498,7 +2502,7 @@ class SegmentService: child_chunk.content = child_chunk_update_args.content child_chunk.word_count = len(child_chunk.content) child_chunk.updated_by = current_user.id - child_chunk.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + child_chunk.updated_at = naive_utc_now() child_chunk.type = "customized" update_child_chunks.append(child_chunk) else: @@ -2537,7 +2541,7 @@ class SegmentService: VectorService.update_child_chunk_vector(new_child_chunks, update_child_chunks, delete_child_chunks, dataset) db.session.commit() except Exception as e: - logging.exception("update child chunk index failed") + logger.exception("update child chunk index failed") db.session.rollback() raise ChildChunkIndexingError(str(e)) return sorted(new_child_chunks + update_child_chunks, key=lambda x: x.position) @@ -2555,13 +2559,13 @@ class SegmentService: child_chunk.content = content child_chunk.word_count = len(content) child_chunk.updated_by = current_user.id - child_chunk.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + child_chunk.updated_at = naive_utc_now() child_chunk.type = "customized" db.session.add(child_chunk) VectorService.update_child_chunk_vector([], [child_chunk], [], dataset) db.session.commit() except Exception as e: - logging.exception("update child chunk index failed") + logger.exception("update child chunk index failed") db.session.rollback() raise ChildChunkIndexingError(str(e)) return child_chunk @@ -2572,7 +2576,7 @@ class SegmentService: try: VectorService.delete_child_chunk_vector(child_chunk, dataset) except Exception as e: - logging.exception("delete child chunk index failed") + logger.exception("delete child chunk index failed") db.session.rollback() raise ChildChunkDeleteIndexError(str(e)) db.session.commit() diff --git a/api/services/enterprise/mail_service.py b/api/services/enterprise/mail_service.py deleted file mode 100644 index 630e7679ac..0000000000 --- a/api/services/enterprise/mail_service.py +++ /dev/null @@ -1,18 +0,0 @@ -from pydantic import BaseModel - -from tasks.mail_enterprise_task import send_enterprise_email_task - - -class DifyMail(BaseModel): - to: list[str] - subject: str - body: str - substitutions: dict[str, str] = {} - - -class EnterpriseMailService: - @classmethod - def send_mail(cls, mail: DifyMail): - send_enterprise_email_task.delay( - to=mail.to, subject=mail.subject, body=mail.body, substitutions=mail.substitutions - ) diff --git a/api/services/entities/model_provider_entities.py b/api/services/entities/model_provider_entities.py index bc385b2e22..056decda26 100644 --- a/api/services/entities/model_provider_entities.py +++ b/api/services/entities/model_provider_entities.py @@ -8,7 +8,12 @@ from core.entities.model_entities import ( ModelWithProviderEntity, ProviderModelWithStatusEntity, ) -from core.entities.provider_entities import ProviderQuotaType, QuotaConfiguration +from core.entities.provider_entities import ( + CredentialConfiguration, + CustomModelConfiguration, + ProviderQuotaType, + QuotaConfiguration, +) from core.model_runtime.entities.common_entities import I18nObject from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.entities.provider_entities import ( @@ -36,6 +41,10 @@ class CustomConfigurationResponse(BaseModel): """ status: CustomConfigurationStatus + current_credential_id: Optional[str] = None + current_credential_name: Optional[str] = None + available_credentials: Optional[list[CredentialConfiguration]] = None + custom_models: Optional[list[CustomModelConfiguration]] = None class SystemConfigurationResponse(BaseModel): diff --git a/api/services/errors/app_model_config.py b/api/services/errors/app_model_config.py index c0669ed231..bb5eb62b75 100644 --- a/api/services/errors/app_model_config.py +++ b/api/services/errors/app_model_config.py @@ -3,3 +3,7 @@ from services.errors.base import BaseServiceError class AppModelConfigBrokenError(BaseServiceError): pass + + +class ProviderNotFoundError(BaseServiceError): + pass diff --git a/api/services/external_knowledge_service.py b/api/services/external_knowledge_service.py index 2f1babba6f..fcf57070ee 100644 --- a/api/services/external_knowledge_service.py +++ b/api/services/external_knowledge_service.py @@ -9,6 +9,7 @@ from sqlalchemy import select from constants import HIDDEN_VALUE from core.helper import ssrf_proxy from core.rag.entities.metadata_entities import MetadataCondition +from core.workflow.nodes.http_request.exc import InvalidHttpMethodError from extensions.ext_database import db from libs.datetime_utils import naive_utc_now from models.dataset import ( @@ -185,9 +186,19 @@ class ExternalDatasetService: "follow_redirects": True, } - response: httpx.Response = getattr(ssrf_proxy, settings.request_method)( - data=json.dumps(settings.params), files=files, **kwargs - ) + _METHOD_MAP = { + "get": ssrf_proxy.get, + "head": ssrf_proxy.head, + "post": ssrf_proxy.post, + "put": ssrf_proxy.put, + "delete": ssrf_proxy.delete, + "patch": ssrf_proxy.patch, + } + method_lc = settings.request_method.lower() + if method_lc not in _METHOD_MAP: + raise InvalidHttpMethodError(f"Invalid http method {settings.request_method}") + + response: httpx.Response = _METHOD_MAP[method_lc](data=json.dumps(settings.params), files=files, **kwargs) return response @staticmethod diff --git a/api/services/file_service.py b/api/services/file_service.py index e234c2f325..4c0a0f451c 100644 --- a/api/services/file_service.py +++ b/api/services/file_service.py @@ -1,4 +1,3 @@ -import datetime import hashlib import os import uuid @@ -18,6 +17,7 @@ from core.file import helpers as file_helpers from core.rag.extractor.extract_processor import ExtractProcessor from extensions.ext_database import db from extensions.ext_storage import storage +from libs.datetime_utils import naive_utc_now from libs.helper import extract_tenant_id from models.account import Account from models.enums import CreatorUserRole @@ -80,7 +80,7 @@ class FileService: mime_type=mimetype, created_by_role=(CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER), created_by=user.id, - created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + created_at=naive_utc_now(), used=False, hash=hashlib.sha3_256(content).hexdigest(), source_url=source_url, @@ -131,10 +131,10 @@ class FileService: mime_type="text/plain", created_by=current_user.id, created_by_role=CreatorUserRole.ACCOUNT, - created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + created_at=naive_utc_now(), used=True, used_by=current_user.id, - used_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + used_at=naive_utc_now(), ) db.session.add(upload_file) diff --git a/api/services/hit_testing_service.py b/api/services/hit_testing_service.py index 5a3f504035..1517ca6594 100644 --- a/api/services/hit_testing_service.py +++ b/api/services/hit_testing_service.py @@ -12,6 +12,8 @@ from extensions.ext_database import db from models.account import Account from models.dataset import Dataset, DatasetQuery +logger = logging.getLogger(__name__) + default_retrieval_model = { "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, "reranking_enable": False, @@ -77,7 +79,7 @@ class HitTestingService: ) end = time.perf_counter() - logging.debug("Hit testing retrieve in %s seconds", end - start) + logger.debug("Hit testing retrieve in %s seconds", end - start) dataset_query = DatasetQuery( dataset_id=dataset.id, content=query, source="hit_testing", created_by_role="account", created_by=account.id @@ -113,7 +115,7 @@ class HitTestingService: ) end = time.perf_counter() - logging.debug("External knowledge hit testing retrieve in %s seconds", end - start) + logger.debug("External knowledge hit testing retrieve in %s seconds", end - start) dataset_query = DatasetQuery( dataset_id=dataset.id, content=query, source="hit_testing", created_by_role="account", created_by=account.id diff --git a/api/services/metadata_service.py b/api/services/metadata_service.py index 2a83588f41..05fa5a95bc 100644 --- a/api/services/metadata_service.py +++ b/api/services/metadata_service.py @@ -1,5 +1,4 @@ import copy -import datetime import logging from typing import Optional @@ -8,6 +7,7 @@ from flask_login import current_user from core.rag.index_processor.constant.built_in_field import BuiltInField, MetadataDataSource from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.datetime_utils import naive_utc_now from models.dataset import Dataset, DatasetMetadata, DatasetMetadataBinding from services.dataset_service import DocumentService from services.entities.knowledge_entities.knowledge_entities import ( @@ -15,6 +15,8 @@ from services.entities.knowledge_entities.knowledge_entities import ( MetadataOperationData, ) +logger = logging.getLogger(__name__) + class MetadataService: @staticmethod @@ -69,7 +71,7 @@ class MetadataService: old_name = metadata.name metadata.name = name metadata.updated_by = current_user.id - metadata.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + metadata.updated_at = naive_utc_now() # update related documents dataset_metadata_bindings = ( @@ -90,7 +92,7 @@ class MetadataService: db.session.commit() return metadata # type: ignore except Exception: - logging.exception("Update metadata name failed") + logger.exception("Update metadata name failed") finally: redis_client.delete(lock_key) @@ -122,7 +124,7 @@ class MetadataService: db.session.commit() return metadata except Exception: - logging.exception("Delete metadata failed") + logger.exception("Delete metadata failed") finally: redis_client.delete(lock_key) @@ -161,7 +163,7 @@ class MetadataService: dataset.built_in_field_enabled = True db.session.commit() except Exception: - logging.exception("Enable built-in field failed") + logger.exception("Enable built-in field failed") finally: redis_client.delete(lock_key) @@ -192,7 +194,7 @@ class MetadataService: dataset.built_in_field_enabled = False db.session.commit() except Exception: - logging.exception("Disable built-in field failed") + logger.exception("Disable built-in field failed") finally: redis_client.delete(lock_key) @@ -230,7 +232,7 @@ class MetadataService: db.session.add(dataset_metadata_binding) db.session.commit() except Exception: - logging.exception("Update documents metadata failed") + logger.exception("Update documents metadata failed") finally: redis_client.delete(lock_key) diff --git a/api/services/model_load_balancing_service.py b/api/services/model_load_balancing_service.py index fe28aa006e..2145b4cdd5 100644 --- a/api/services/model_load_balancing_service.py +++ b/api/services/model_load_balancing_service.py @@ -1,4 +1,3 @@ -import datetime import json import logging from json import JSONDecodeError @@ -17,7 +16,8 @@ from core.model_runtime.entities.provider_entities import ( from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory from core.provider_manager import ProviderManager from extensions.ext_database import db -from models.provider import LoadBalancingModelConfig +from libs.datetime_utils import naive_utc_now +from models.provider import LoadBalancingModelConfig, ProviderCredential, ProviderModelCredential logger = logging.getLogger(__name__) @@ -185,6 +185,7 @@ class ModelLoadBalancingService: "id": load_balancing_config.id, "name": load_balancing_config.name, "credentials": credentials, + "credential_id": load_balancing_config.credential_id, "enabled": load_balancing_config.enabled, "in_cooldown": in_cooldown, "ttl": ttl, @@ -280,7 +281,7 @@ class ModelLoadBalancingService: return inherit_config def update_load_balancing_configs( - self, tenant_id: str, provider: str, model: str, model_type: str, configs: list[dict] + self, tenant_id: str, provider: str, model: str, model_type: str, configs: list[dict], config_from: str ) -> None: """ Update load balancing configurations. @@ -289,6 +290,7 @@ class ModelLoadBalancingService: :param model: model name :param model_type: model type :param configs: load balancing configs + :param config_from: predefined-model or custom-model :return: """ # Get all provider configurations of the current workspace @@ -327,8 +329,37 @@ class ModelLoadBalancingService: config_id = config.get("id") name = config.get("name") credentials = config.get("credentials") + credential_id = config.get("credential_id") enabled = config.get("enabled") + if credential_id: + credential_record: ProviderCredential | ProviderModelCredential | None = None + if config_from == "predefined-model": + credential_record = ( + db.session.query(ProviderCredential) + .filter_by( + id=credential_id, + tenant_id=tenant_id, + provider_name=provider_configuration.provider.provider, + ) + .first() + ) + else: + credential_record = ( + db.session.query(ProviderModelCredential) + .filter_by( + id=credential_id, + tenant_id=tenant_id, + provider_name=provider_configuration.provider.provider, + model_name=model, + model_type=model_type_enum.to_origin_model_type(), + ) + .first() + ) + if not credential_record: + raise ValueError(f"Provider credential with id {credential_id} not found") + name = credential_record.credential_name + if not name: raise ValueError("Invalid load balancing config name") @@ -346,11 +377,6 @@ class ModelLoadBalancingService: load_balancing_config = current_load_balancing_configs_dict[config_id] - # check duplicate name - for current_load_balancing_config in current_load_balancing_configs: - if current_load_balancing_config.id != config_id and current_load_balancing_config.name == name: - raise ValueError(f"Load balancing config name {name} already exists") - if credentials: if not isinstance(credentials, dict): raise ValueError("Invalid load balancing config credentials") @@ -371,45 +397,54 @@ class ModelLoadBalancingService: load_balancing_config.name = name load_balancing_config.enabled = enabled - load_balancing_config.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + load_balancing_config.updated_at = naive_utc_now() db.session.commit() self._clear_credentials_cache(tenant_id, config_id) else: # create load balancing config - if name == "__inherit__": + if name in {"__inherit__", "__delete__"}: raise ValueError("Invalid load balancing config name") - # check duplicate name - for current_load_balancing_config in current_load_balancing_configs: - if current_load_balancing_config.name == name: - raise ValueError(f"Load balancing config name {name} already exists") + if credential_id: + credential_source = "provider" if config_from == "predefined-model" else "custom_model" + assert credential_record is not None + load_balancing_model_config = LoadBalancingModelConfig( + tenant_id=tenant_id, + provider_name=provider_configuration.provider.provider, + model_type=model_type_enum.to_origin_model_type(), + model_name=model, + name=credential_record.credential_name, + encrypted_config=credential_record.encrypted_config, + credential_id=credential_id, + credential_source_type=credential_source, + ) + else: + if not credentials: + raise ValueError("Invalid load balancing config credentials") - if not credentials: - raise ValueError("Invalid load balancing config credentials") + if not isinstance(credentials, dict): + raise ValueError("Invalid load balancing config credentials") - if not isinstance(credentials, dict): - raise ValueError("Invalid load balancing config credentials") + # validate custom provider config + credentials = self._custom_credentials_validate( + tenant_id=tenant_id, + provider_configuration=provider_configuration, + model_type=model_type_enum, + model=model, + credentials=credentials, + validate=False, + ) - # validate custom provider config - credentials = self._custom_credentials_validate( - tenant_id=tenant_id, - provider_configuration=provider_configuration, - model_type=model_type_enum, - model=model, - credentials=credentials, - validate=False, - ) - - # create load balancing config - load_balancing_model_config = LoadBalancingModelConfig( - tenant_id=tenant_id, - provider_name=provider_configuration.provider.provider, - model_type=model_type_enum.to_origin_model_type(), - model_name=model, - name=name, - encrypted_config=json.dumps(credentials), - ) + # create load balancing config + load_balancing_model_config = LoadBalancingModelConfig( + tenant_id=tenant_id, + provider_name=provider_configuration.provider.provider, + model_type=model_type_enum.to_origin_model_type(), + model_name=model, + name=name, + encrypted_config=json.dumps(credentials), + ) db.session.add(load_balancing_model_config) db.session.commit() diff --git a/api/services/model_provider_service.py b/api/services/model_provider_service.py index 54197bf949..9e9422f9f7 100644 --- a/api/services/model_provider_service.py +++ b/api/services/model_provider_service.py @@ -1,7 +1,7 @@ import logging from typing import Optional -from core.entities.model_entities import ModelStatus, ModelWithProviderEntity, ProviderModelWithStatusEntity +from core.entities.model_entities import ModelWithProviderEntity, ProviderModelWithStatusEntity from core.model_runtime.entities.model_entities import ModelType, ParameterRule from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory from core.provider_manager import ProviderManager @@ -16,6 +16,7 @@ from services.entities.model_provider_entities import ( SimpleProviderEntityResponse, SystemConfigurationResponse, ) +from services.errors.app_model_config import ProviderNotFoundError logger = logging.getLogger(__name__) @@ -28,6 +29,29 @@ class ModelProviderService: def __init__(self) -> None: self.provider_manager = ProviderManager() + def _get_provider_configuration(self, tenant_id: str, provider: str): + """ + Get provider configuration or raise exception if not found. + + Args: + tenant_id: Workspace identifier + provider: Provider name + + Returns: + Provider configuration instance + + Raises: + ProviderNotFoundError: If provider doesn't exist + """ + # Get all provider configurations of the current workspace + provider_configurations = self.provider_manager.get_configurations(tenant_id) + provider_configuration = provider_configurations.get(provider) + + if not provider_configuration: + raise ProviderNotFoundError(f"Provider {provider} does not exist.") + + return provider_configuration + def get_provider_list(self, tenant_id: str, model_type: Optional[str] = None) -> list[ProviderResponse]: """ get provider list. @@ -46,6 +70,9 @@ class ModelProviderService: if model_type_entity not in provider_configuration.provider.supported_model_types: continue + provider_config = provider_configuration.custom_configuration.provider + model_config = provider_configuration.custom_configuration.models + provider_response = ProviderResponse( tenant_id=tenant_id, provider=provider_configuration.provider.provider, @@ -63,7 +90,11 @@ class ModelProviderService: custom_configuration=CustomConfigurationResponse( status=CustomConfigurationStatus.ACTIVE if provider_configuration.is_custom_configuration_available() - else CustomConfigurationStatus.NO_CONFIGURE + else CustomConfigurationStatus.NO_CONFIGURE, + current_credential_id=getattr(provider_config, "current_credential_id", None), + current_credential_name=getattr(provider_config, "current_credential_name", None), + available_credentials=getattr(provider_config, "available_credentials", []), + custom_models=model_config, ), system_configuration=SystemConfigurationResponse( enabled=provider_configuration.system_configuration.enabled, @@ -82,8 +113,8 @@ class ModelProviderService: For the model provider page, only supports passing in a single provider to query the list of supported models. - :param tenant_id: - :param provider: + :param tenant_id: workspace id + :param provider: provider name :return: """ # Get all provider configurations of the current workspace @@ -95,98 +126,111 @@ class ModelProviderService: for model in provider_configurations.get_models(provider=provider) ] - def get_provider_credentials(self, tenant_id: str, provider: str) -> Optional[dict]: + def get_provider_credential( + self, tenant_id: str, provider: str, credential_id: Optional[str] = None + ) -> Optional[dict]: """ get provider credentials. - """ - provider_configurations = self.provider_manager.get_configurations(tenant_id) - provider_configuration = provider_configurations.get(provider) - if not provider_configuration: - raise ValueError(f"Provider {provider} does not exist.") - - return provider_configuration.get_custom_credentials(obfuscated=True) - - def provider_credentials_validate(self, tenant_id: str, provider: str, credentials: dict) -> None: - """ - validate provider credentials. - - :param tenant_id: - :param provider: - :param credentials: - """ - # Get all provider configurations of the current workspace - provider_configurations = self.provider_manager.get_configurations(tenant_id) - - # Get provider configuration - provider_configuration = provider_configurations.get(provider) - if not provider_configuration: - raise ValueError(f"Provider {provider} does not exist.") - - provider_configuration.custom_credentials_validate(credentials) - - def save_provider_credentials(self, tenant_id: str, provider: str, credentials: dict) -> None: - """ - save custom provider config. :param tenant_id: workspace id :param provider: provider name - :param credentials: provider credentials + :param credential_id: credential id, if not provided, return current used credentials :return: """ - # Get all provider configurations of the current workspace - provider_configurations = self.provider_manager.get_configurations(tenant_id) + provider_configuration = self._get_provider_configuration(tenant_id, provider) + return provider_configuration.get_provider_credential(credential_id=credential_id) # type: ignore - # Get provider configuration - provider_configuration = provider_configurations.get(provider) - if not provider_configuration: - raise ValueError(f"Provider {provider} does not exist.") - - # Add or update custom provider credentials. - provider_configuration.add_or_update_custom_credentials(credentials) - - def remove_provider_credentials(self, tenant_id: str, provider: str) -> None: + def validate_provider_credentials(self, tenant_id: str, provider: str, credentials: dict) -> None: """ - remove custom provider config. + validate provider credentials before saving. :param tenant_id: workspace id :param provider: provider name + :param credentials: provider credentials dict + """ + provider_configuration = self._get_provider_configuration(tenant_id, provider) + provider_configuration.validate_provider_credentials(credentials) + + def create_provider_credential( + self, tenant_id: str, provider: str, credentials: dict, credential_name: str + ) -> None: + """ + Create and save new provider credentials. + + :param tenant_id: workspace id + :param provider: provider name + :param credentials: provider credentials dict + :param credential_name: credential name :return: """ - # Get all provider configurations of the current workspace - provider_configurations = self.provider_manager.get_configurations(tenant_id) + provider_configuration = self._get_provider_configuration(tenant_id, provider) + provider_configuration.create_provider_credential(credentials, credential_name) - # Get provider configuration - provider_configuration = provider_configurations.get(provider) - if not provider_configuration: - raise ValueError(f"Provider {provider} does not exist.") - - # Remove custom provider credentials. - provider_configuration.delete_custom_credentials() - - def get_model_credentials(self, tenant_id: str, provider: str, model_type: str, model: str) -> Optional[dict]: + def update_provider_credential( + self, + tenant_id: str, + provider: str, + credentials: dict, + credential_id: str, + credential_name: str, + ) -> None: """ - get model credentials. + update a saved provider credential (by credential_id). + + :param tenant_id: workspace id + :param provider: provider name + :param credentials: provider credentials dict + :param credential_id: credential id + :param credential_name: credential name + :return: + """ + provider_configuration = self._get_provider_configuration(tenant_id, provider) + provider_configuration.update_provider_credential( + credential_id=credential_id, + credentials=credentials, + credential_name=credential_name, + ) + + def remove_provider_credential(self, tenant_id: str, provider: str, credential_id: str) -> None: + """ + remove a saved provider credential (by credential_id). + :param tenant_id: workspace id + :param provider: provider name + :param credential_id: credential id + :return: + """ + provider_configuration = self._get_provider_configuration(tenant_id, provider) + provider_configuration.delete_provider_credential(credential_id=credential_id) + + def switch_active_provider_credential(self, tenant_id: str, provider: str, credential_id: str) -> None: + """ + :param tenant_id: workspace id + :param provider: provider name + :param credential_id: credential id + :return: + """ + provider_configuration = self._get_provider_configuration(tenant_id, provider) + provider_configuration.switch_active_provider_credential(credential_id=credential_id) + + def get_model_credential( + self, tenant_id: str, provider: str, model_type: str, model: str, credential_id: str | None + ) -> Optional[dict]: + """ + Retrieve model-specific credentials. :param tenant_id: workspace id :param provider: provider name :param model_type: model type :param model: model name + :param credential_id: Optional credential ID, uses current if not provided :return: """ - # Get all provider configurations of the current workspace - provider_configurations = self.provider_manager.get_configurations(tenant_id) - - # Get provider configuration - provider_configuration = provider_configurations.get(provider) - if not provider_configuration: - raise ValueError(f"Provider {provider} does not exist.") - - # Get model custom credentials from ProviderModel if exists - return provider_configuration.get_custom_model_credentials( - model_type=ModelType.value_of(model_type), model=model, obfuscated=True + provider_configuration = self._get_provider_configuration(tenant_id, provider) + return provider_configuration.get_custom_model_credential( # type: ignore + model_type=ModelType.value_of(model_type), model=model, credential_id=credential_id ) - def model_credentials_validate( + def validate_model_credentials( self, tenant_id: str, provider: str, model_type: str, model: str, credentials: dict ) -> None: """ @@ -196,49 +240,122 @@ class ModelProviderService: :param provider: provider name :param model_type: model type :param model: model name - :param credentials: model credentials + :param credentials: model credentials dict :return: """ - # Get all provider configurations of the current workspace - provider_configurations = self.provider_manager.get_configurations(tenant_id) - - # Get provider configuration - provider_configuration = provider_configurations.get(provider) - if not provider_configuration: - raise ValueError(f"Provider {provider} does not exist.") - - # Validate model credentials - provider_configuration.custom_model_credentials_validate( + provider_configuration = self._get_provider_configuration(tenant_id, provider) + provider_configuration.validate_custom_model_credentials( model_type=ModelType.value_of(model_type), model=model, credentials=credentials ) - def save_model_credentials( - self, tenant_id: str, provider: str, model_type: str, model: str, credentials: dict + def create_model_credential( + self, tenant_id: str, provider: str, model_type: str, model: str, credentials: dict, credential_name: str ) -> None: """ - save model credentials. + create and save model credentials. :param tenant_id: workspace id :param provider: provider name :param model_type: model type :param model: model name - :param credentials: model credentials + :param credentials: model credentials dict + :param credential_name: credential name :return: """ - # Get all provider configurations of the current workspace - provider_configurations = self.provider_manager.get_configurations(tenant_id) - - # Get provider configuration - provider_configuration = provider_configurations.get(provider) - if not provider_configuration: - raise ValueError(f"Provider {provider} does not exist.") - - # Add or update custom model credentials - provider_configuration.add_or_update_custom_model_credentials( - model_type=ModelType.value_of(model_type), model=model, credentials=credentials + provider_configuration = self._get_provider_configuration(tenant_id, provider) + provider_configuration.create_custom_model_credential( + model_type=ModelType.value_of(model_type), + model=model, + credentials=credentials, + credential_name=credential_name, ) - def remove_model_credentials(self, tenant_id: str, provider: str, model_type: str, model: str) -> None: + def update_model_credential( + self, + tenant_id: str, + provider: str, + model_type: str, + model: str, + credentials: dict, + credential_id: str, + credential_name: str, + ) -> None: + """ + update model credentials. + + :param tenant_id: workspace id + :param provider: provider name + :param model_type: model type + :param model: model name + :param credentials: model credentials dict + :param credential_id: credential id + :param credential_name: credential name + :return: + """ + provider_configuration = self._get_provider_configuration(tenant_id, provider) + provider_configuration.update_custom_model_credential( + model_type=ModelType.value_of(model_type), + model=model, + credentials=credentials, + credential_id=credential_id, + credential_name=credential_name, + ) + + def remove_model_credential( + self, tenant_id: str, provider: str, model_type: str, model: str, credential_id: str + ) -> None: + """ + remove model credentials. + + :param tenant_id: workspace id + :param provider: provider name + :param model_type: model type + :param model: model name + :param credential_id: credential id + :return: + """ + provider_configuration = self._get_provider_configuration(tenant_id, provider) + provider_configuration.delete_custom_model_credential( + model_type=ModelType.value_of(model_type), model=model, credential_id=credential_id + ) + + def switch_active_custom_model_credential( + self, tenant_id: str, provider: str, model_type: str, model: str, credential_id: str + ) -> None: + """ + switch model credentials. + + :param tenant_id: workspace id + :param provider: provider name + :param model_type: model type + :param model: model name + :param credential_id: credential id + :return: + """ + provider_configuration = self._get_provider_configuration(tenant_id, provider) + provider_configuration.switch_custom_model_credential( + model_type=ModelType.value_of(model_type), model=model, credential_id=credential_id + ) + + def add_model_credential_to_model_list( + self, tenant_id: str, provider: str, model_type: str, model: str, credential_id: str + ) -> None: + """ + add model credentials to model list. + + :param tenant_id: workspace id + :param provider: provider name + :param model_type: model type + :param model: model name + :param credential_id: credential id + :return: + """ + provider_configuration = self._get_provider_configuration(tenant_id, provider) + provider_configuration.add_model_credential_to_model( + model_type=ModelType.value_of(model_type), model=model, credential_id=credential_id + ) + + def remove_model(self, tenant_id: str, provider: str, model_type: str, model: str) -> None: """ remove model credentials. @@ -248,16 +365,8 @@ class ModelProviderService: :param model: model name :return: """ - # Get all provider configurations of the current workspace - provider_configurations = self.provider_manager.get_configurations(tenant_id) - - # Get provider configuration - provider_configuration = provider_configurations.get(provider) - if not provider_configuration: - raise ValueError(f"Provider {provider} does not exist.") - - # Remove custom model credentials - provider_configuration.delete_custom_model_credentials(model_type=ModelType.value_of(model_type), model=model) + provider_configuration = self._get_provider_configuration(tenant_id, provider) + provider_configuration.delete_custom_model(model_type=ModelType.value_of(model_type), model=model) def get_models_by_model_type(self, tenant_id: str, model_type: str) -> list[ProviderWithModelsResponse]: """ @@ -271,7 +380,7 @@ class ModelProviderService: provider_configurations = self.provider_manager.get_configurations(tenant_id) # Get provider available models - models = provider_configurations.get_models(model_type=ModelType.value_of(model_type)) + models = provider_configurations.get_models(model_type=ModelType.value_of(model_type), only_active=True) # Group models by provider provider_models: dict[str, list[ModelWithProviderEntity]] = {} @@ -282,9 +391,6 @@ class ModelProviderService: if model.deprecated: continue - if model.status != ModelStatus.ACTIVE: - continue - provider_models[model.provider.provider].append(model) # convert to ProviderWithModelsResponse list @@ -331,13 +437,7 @@ class ModelProviderService: :param model: model name :return: """ - # Get all provider configurations of the current workspace - provider_configurations = self.provider_manager.get_configurations(tenant_id) - - # Get provider configuration - provider_configuration = provider_configurations.get(provider) - if not provider_configuration: - raise ValueError(f"Provider {provider} does not exist.") + provider_configuration = self._get_provider_configuration(tenant_id, provider) # fetch credentials credentials = provider_configuration.get_current_credentials(model_type=ModelType.LLM, model=model) @@ -424,17 +524,11 @@ class ModelProviderService: :param preferred_provider_type: preferred provider type :return: """ - # Get all provider configurations of the current workspace - provider_configurations = self.provider_manager.get_configurations(tenant_id) + provider_configuration = self._get_provider_configuration(tenant_id, provider) # Convert preferred_provider_type to ProviderType preferred_provider_type_enum = ProviderType.value_of(preferred_provider_type) - # Get provider configuration - provider_configuration = provider_configurations.get(provider) - if not provider_configuration: - raise ValueError(f"Provider {provider} does not exist.") - # Switch preferred provider type provider_configuration.switch_preferred_provider_type(preferred_provider_type_enum) @@ -448,15 +542,7 @@ class ModelProviderService: :param model_type: model type :return: """ - # Get all provider configurations of the current workspace - provider_configurations = self.provider_manager.get_configurations(tenant_id) - - # Get provider configuration - provider_configuration = provider_configurations.get(provider) - if not provider_configuration: - raise ValueError(f"Provider {provider} does not exist.") - - # Enable model + provider_configuration = self._get_provider_configuration(tenant_id, provider) provider_configuration.enable_model(model=model, model_type=ModelType.value_of(model_type)) def disable_model(self, tenant_id: str, provider: str, model: str, model_type: str) -> None: @@ -469,13 +555,5 @@ class ModelProviderService: :param model_type: model type :return: """ - # Get all provider configurations of the current workspace - provider_configurations = self.provider_manager.get_configurations(tenant_id) - - # Get provider configuration - provider_configuration = provider_configurations.get(provider) - if not provider_configuration: - raise ValueError(f"Provider {provider} does not exist.") - - # Enable model + provider_configuration = self._get_provider_configuration(tenant_id, provider) provider_configuration.disable_model(model=model, model_type=ModelType.value_of(model_type)) diff --git a/api/services/ops_service.py b/api/services/ops_service.py index 7a9db7273e..2596e9f711 100644 --- a/api/services/ops_service.py +++ b/api/services/ops_service.py @@ -134,12 +134,21 @@ class OpsService: # get project url if tracing_provider in ("arize", "phoenix"): - project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider) + try: + project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider) + except Exception: + project_url = None elif tracing_provider == "langfuse": - project_key = OpsTraceManager.get_trace_config_project_key(tracing_config, tracing_provider) - project_url = f"{tracing_config.get('host')}/project/{project_key}" + try: + project_key = OpsTraceManager.get_trace_config_project_key(tracing_config, tracing_provider) + project_url = f"{tracing_config.get('host')}/project/{project_key}" + except Exception: + project_url = None elif tracing_provider in ("langsmith", "opik"): - project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider) + try: + project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider) + except Exception: + project_url = None else: project_url = None diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py index da0fc58566..71bc50017f 100644 --- a/api/services/tools/builtin_tools_manage_service.py +++ b/api/services/tools/builtin_tools_manage_service.py @@ -5,6 +5,7 @@ from collections.abc import Mapping from pathlib import Path from typing import Any, Optional +from sqlalchemy import exists, select from sqlalchemy.orm import Session from configs import dify_config @@ -190,11 +191,14 @@ class BuiltinToolManageService: # update name if provided if name and name != db_provider.name: # check if the name is already used - if ( - session.query(BuiltinToolProvider) - .filter_by(tenant_id=tenant_id, provider=provider, name=name) - .count() - > 0 + if session.scalar( + select( + exists().where( + BuiltinToolProvider.tenant_id == tenant_id, + BuiltinToolProvider.provider == provider, + BuiltinToolProvider.name == name, + ) + ) ): raise ValueError(f"the credential name '{name}' is already used") @@ -246,11 +250,14 @@ class BuiltinToolManageService: ) else: # check if the name is already used - if ( - session.query(BuiltinToolProvider) - .filter_by(tenant_id=tenant_id, provider=provider, name=name) - .count() - > 0 + if session.scalar( + select( + exists().where( + BuiltinToolProvider.tenant_id == tenant_id, + BuiltinToolProvider.provider == provider, + BuiltinToolProvider.name == name, + ) + ) ): raise ValueError(f"the credential name '{name}' is already used") @@ -453,7 +460,7 @@ class BuiltinToolManageService: check if oauth system client exists """ tool_provider = ToolProviderID(provider_name) - with Session(db.engine).no_autoflush as session: + with Session(db.engine, autoflush=False) as session: system_client: ToolOAuthSystemClient | None = ( session.query(ToolOAuthSystemClient) .filter_by(plugin_id=tool_provider.plugin_id, provider=tool_provider.provider_name) @@ -467,7 +474,7 @@ class BuiltinToolManageService: check if oauth custom client is enabled """ tool_provider = ToolProviderID(provider) - with Session(db.engine).no_autoflush as session: + with Session(db.engine, autoflush=False) as session: user_client: ToolOAuthTenantClient | None = ( session.query(ToolOAuthTenantClient) .filter_by( @@ -492,7 +499,7 @@ class BuiltinToolManageService: config=[x.to_basic_provider_config() for x in provider_controller.get_oauth_client_schema()], cache=NoOpProviderCredentialCache(), ) - with Session(db.engine).no_autoflush as session: + with Session(db.engine, autoflush=False) as session: user_client: ToolOAuthTenantClient | None = ( session.query(ToolOAuthTenantClient) .filter_by( @@ -546,54 +553,53 @@ class BuiltinToolManageService: # get all builtin providers provider_controllers = ToolManager.list_builtin_providers(tenant_id) - with db.session.no_autoflush: - # get all user added providers - db_providers: list[BuiltinToolProvider] = ToolManager.list_default_builtin_providers(tenant_id) + # get all user added providers + db_providers: list[BuiltinToolProvider] = ToolManager.list_default_builtin_providers(tenant_id) - # rewrite db_providers - for db_provider in db_providers: - db_provider.provider = str(ToolProviderID(db_provider.provider)) + # rewrite db_providers + for db_provider in db_providers: + db_provider.provider = str(ToolProviderID(db_provider.provider)) - # find provider - def find_provider(provider): - return next(filter(lambda db_provider: db_provider.provider == provider, db_providers), None) + # find provider + def find_provider(provider): + return next(filter(lambda db_provider: db_provider.provider == provider, db_providers), None) - result: list[ToolProviderApiEntity] = [] + result: list[ToolProviderApiEntity] = [] - for provider_controller in provider_controllers: - try: - # handle include, exclude - if is_filtered( - include_set=dify_config.POSITION_TOOL_INCLUDES_SET, # type: ignore - exclude_set=dify_config.POSITION_TOOL_EXCLUDES_SET, # type: ignore - data=provider_controller, - name_func=lambda x: x.identity.name, - ): - continue + for provider_controller in provider_controllers: + try: + # handle include, exclude + if is_filtered( + include_set=dify_config.POSITION_TOOL_INCLUDES_SET, # type: ignore + exclude_set=dify_config.POSITION_TOOL_EXCLUDES_SET, # type: ignore + data=provider_controller, + name_func=lambda x: x.identity.name, + ): + continue - # convert provider controller to user provider - user_builtin_provider = ToolTransformService.builtin_provider_to_user_provider( - provider_controller=provider_controller, - db_provider=find_provider(provider_controller.entity.identity.name), - decrypt_credentials=True, + # convert provider controller to user provider + user_builtin_provider = ToolTransformService.builtin_provider_to_user_provider( + provider_controller=provider_controller, + db_provider=find_provider(provider_controller.entity.identity.name), + decrypt_credentials=True, + ) + + # add icon + ToolTransformService.repack_provider(tenant_id=tenant_id, provider=user_builtin_provider) + + tools = provider_controller.get_tools() + for tool in tools or []: + user_builtin_provider.tools.append( + ToolTransformService.convert_tool_entity_to_api_entity( + tenant_id=tenant_id, + tool=tool, + labels=ToolLabelManager.get_tool_labels(provider_controller), + ) ) - # add icon - ToolTransformService.repack_provider(tenant_id=tenant_id, provider=user_builtin_provider) - - tools = provider_controller.get_tools() - for tool in tools or []: - user_builtin_provider.tools.append( - ToolTransformService.convert_tool_entity_to_api_entity( - tenant_id=tenant_id, - tool=tool, - labels=ToolLabelManager.get_tool_labels(provider_controller), - ) - ) - - result.append(user_builtin_provider) - except Exception as e: - raise e + result.append(user_builtin_provider) + except Exception as e: + raise e return BuiltinToolProviderSort.sort(result) @@ -604,7 +610,7 @@ class BuiltinToolManageService: 1.if the default provider exists, return the default provider 2.if the default provider does not exist, return the oldest provider """ - with Session(db.engine) as session: + with Session(db.engine, autoflush=False) as session: try: full_provider_name = provider_name provider_id_entity = ToolProviderID(provider_name) diff --git a/api/services/tools/tools_manage_service.py b/api/services/tools/tools_manage_service.py index 59d5b50e23..f245dd7527 100644 --- a/api/services/tools/tools_manage_service.py +++ b/api/services/tools/tools_manage_service.py @@ -1,4 +1,5 @@ import logging +from typing import Optional from core.tools.entities.api_entities import ToolProviderTypeApiLiteral from core.tools.tool_manager import ToolManager @@ -9,7 +10,7 @@ logger = logging.getLogger(__name__) class ToolCommonService: @staticmethod - def list_tool_providers(user_id: str, tenant_id: str, typ: ToolProviderTypeApiLiteral = None): + def list_tool_providers(user_id: str, tenant_id: str, typ: Optional[ToolProviderTypeApiLiteral] = None): """ list tool providers diff --git a/api/services/vector_service.py b/api/services/vector_service.py index f9ec054593..428abdde17 100644 --- a/api/services/vector_service.py +++ b/api/services/vector_service.py @@ -13,7 +13,7 @@ from models.dataset import ChildChunk, Dataset, DatasetProcessRule, DocumentSegm from models.dataset import Document as DatasetDocument from services.entities.knowledge_entities.knowledge_entities import ParentMode -_logger = logging.getLogger(__name__) +logger = logging.getLogger(__name__) class VectorService: @@ -27,7 +27,7 @@ class VectorService: if doc_form == IndexType.PARENT_CHILD_INDEX: dataset_document = db.session.query(DatasetDocument).filter_by(id=segment.document_id).first() if not dataset_document: - _logger.warning( + logger.warning( "Expected DatasetDocument record to exist, but none was found, document_id=%s, segment_id=%s", segment.document_id, segment.id, diff --git a/api/services/webapp_auth_service.py b/api/services/webapp_auth_service.py index a9df8d0d73..bb46bf3090 100644 --- a/api/services/webapp_auth_service.py +++ b/api/services/webapp_auth_service.py @@ -1,7 +1,7 @@ import enum import secrets from datetime import UTC, datetime, timedelta -from typing import Any, Optional, cast +from typing import Any, Optional from werkzeug.exceptions import NotFound, Unauthorized @@ -42,7 +42,7 @@ class WebAppAuthService: if account.password is None or not compare_password(password, account.password, account.password_salt): raise AccountPasswordError("Invalid email or password.") - return cast(Account, account) + return account @classmethod def login(cls, account: Account) -> str: @@ -63,7 +63,7 @@ class WebAppAuthService: @classmethod def send_email_code_login_email( - cls, account: Optional[Account] = None, email: Optional[str] = None, language: Optional[str] = "en-US" + cls, account: Optional[Account] = None, email: Optional[str] = None, language: str = "en-US" ): email = account.email if account else email if email is None: @@ -113,7 +113,7 @@ class WebAppAuthService: @classmethod def _get_account_jwt_token(cls, account: Account) -> str: - exp_dt = datetime.now(UTC) + timedelta(hours=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES * 24) + exp_dt = datetime.now(UTC) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES * 24) exp = int(exp_dt.timestamp()) payload = { diff --git a/api/services/workflow/workflow_converter.py b/api/services/workflow/workflow_converter.py index afcf1f7621..00b02f8091 100644 --- a/api/services/workflow/workflow_converter.py +++ b/api/services/workflow/workflow_converter.py @@ -402,7 +402,7 @@ class WorkflowConverter: ) role_prefix = None - prompts: Any = None + prompts: Optional[Any] = None # Chat Model if model_config.mode == LLMMode.CHAT.value: diff --git a/api/services/workflow_draft_variable_service.py b/api/services/workflow_draft_variable_service.py index b52f4924ba..b3b581093e 100644 --- a/api/services/workflow_draft_variable_service.py +++ b/api/services/workflow_draft_variable_service.py @@ -1,5 +1,4 @@ import dataclasses -import datetime import logging from collections.abc import Mapping, Sequence from enum import StrEnum @@ -23,12 +22,13 @@ from core.workflow.nodes.variable_assigner.common.helpers import get_updated_var from core.workflow.variable_loader import VariableLoader from factories.file_factory import StorageKeyLoader from factories.variable_factory import build_segment, segment_to_variable +from libs.datetime_utils import naive_utc_now from models import App, Conversation from models.enums import DraftVariableType from models.workflow import Workflow, WorkflowDraftVariable, is_system_variable_editable from repositories.factory import DifyAPIRepositoryFactory -_logger = logging.getLogger(__name__) +logger = logging.getLogger(__name__) @dataclasses.dataclass(frozen=True) @@ -231,7 +231,7 @@ class WorkflowDraftVariableService: variable.set_name(name) if value is not None: variable.set_value(value) - variable.last_edited_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + variable.last_edited_at = naive_utc_now() self._session.flush() return variable @@ -242,7 +242,7 @@ class WorkflowDraftVariableService: if conv_var is None: self._session.delete(instance=variable) self._session.flush() - _logger.warning( + logger.warning( "Conversation variable not found for draft variable, id=%s, name=%s", variable.id, variable.name ) return None @@ -263,12 +263,12 @@ class WorkflowDraftVariableService: if variable.node_execution_id is None: self._session.delete(instance=variable) self._session.flush() - _logger.warning("draft variable has no node_execution_id, id=%s, name=%s", variable.id, variable.name) + logger.warning("draft variable has no node_execution_id, id=%s, name=%s", variable.id, variable.name) return None node_exec = self._api_node_execution_repo.get_execution_by_id(variable.node_execution_id) if node_exec is None: - _logger.warning( + logger.warning( "Node exectution not found for draft variable, id=%s, name=%s, node_execution_id=%s", variable.id, variable.name, @@ -351,7 +351,7 @@ class WorkflowDraftVariableService: return None segment = draft_var.get_value() if not isinstance(segment, StringSegment): - _logger.warning( + logger.warning( "sys.conversation_id variable is not a string: app_id=%s, id=%s", app_id, draft_var.id, @@ -681,7 +681,7 @@ class DraftVariableSaver: draft_vars = [] for name, value in output.items(): if not self._should_variable_be_saved(name): - _logger.debug( + logger.debug( "Skip saving variable as it has been excluded by its node_type, name=%s, node_type=%s", name, self._node_type, diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index d2715a61fe..3a68379789 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -5,7 +5,7 @@ from collections.abc import Callable, Generator, Mapping, Sequence from typing import Any, Optional, cast from uuid import uuid4 -from sqlalchemy import select +from sqlalchemy import exists, select from sqlalchemy.orm import Session, sessionmaker from core.app.app_config.entities import VariableEntityType @@ -87,15 +87,14 @@ class WorkflowService: ) def is_workflow_exist(self, app_model: App) -> bool: - return ( - db.session.query(Workflow) - .where( + stmt = select( + exists().where( Workflow.tenant_id == app_model.tenant_id, Workflow.app_id == app_model.id, Workflow.version == Workflow.VERSION_DRAFT, ) - .count() - ) > 0 + ) + return db.session.execute(stmt).scalar_one() def get_draft_workflow(self, app_model: App) -> Optional[Workflow]: """ diff --git a/api/tasks/add_document_to_index_task.py b/api/tasks/add_document_to_index_task.py index c5ee4ce3f9..5df9888acc 100644 --- a/api/tasks/add_document_to_index_task.py +++ b/api/tasks/add_document_to_index_task.py @@ -1,18 +1,20 @@ -import datetime import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.index_processor.constant.index_type import IndexType from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from core.rag.models.document import ChildDocument, Document from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.datetime_utils import naive_utc_now from models.dataset import DatasetAutoDisableLog, DocumentSegment from models.dataset import Document as DatasetDocument +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def add_document_to_index_task(dataset_document_id: str): @@ -22,12 +24,12 @@ def add_document_to_index_task(dataset_document_id: str): Usage: add_document_to_index_task.delay(dataset_document_id) """ - logging.info(click.style(f"Start add document to index: {dataset_document_id}", fg="green")) + logger.info(click.style(f"Start add document to index: {dataset_document_id}", fg="green")) start_at = time.perf_counter() dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == dataset_document_id).first() if not dataset_document: - logging.info(click.style(f"Document not found: {dataset_document_id}", fg="red")) + logger.info(click.style(f"Document not found: {dataset_document_id}", fg="red")) db.session.close() return @@ -95,19 +97,19 @@ def add_document_to_index_task(dataset_document_id: str): DocumentSegment.enabled: True, DocumentSegment.disabled_at: None, DocumentSegment.disabled_by: None, - DocumentSegment.updated_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + DocumentSegment.updated_at: naive_utc_now(), } ) db.session.commit() end_at = time.perf_counter() - logging.info( + logger.info( click.style(f"Document added to index: {dataset_document.id} latency: {end_at - start_at}", fg="green") ) except Exception as e: - logging.exception("add document to index failed") + logger.exception("add document to index failed") dataset_document.enabled = False - dataset_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + dataset_document.disabled_at = naive_utc_now() dataset_document.indexing_status = "error" dataset_document.error = str(e) db.session.commit() diff --git a/api/tasks/annotation/add_annotation_to_index_task.py b/api/tasks/annotation/add_annotation_to_index_task.py index e436f00133..23c49f2742 100644 --- a/api/tasks/annotation/add_annotation_to_index_task.py +++ b/api/tasks/annotation/add_annotation_to_index_task.py @@ -2,7 +2,7 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.datasource.vdb.vector_factory import Vector from core.rag.models.document import Document @@ -10,6 +10,8 @@ from extensions.ext_database import db from models.dataset import Dataset from services.dataset_service import DatasetCollectionBindingService +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def add_annotation_to_index_task( @@ -25,7 +27,7 @@ def add_annotation_to_index_task( Usage: clean_dataset_task.delay(dataset_id, tenant_id, indexing_technique, index_struct) """ - logging.info(click.style(f"Start build index for annotation: {annotation_id}", fg="green")) + logger.info(click.style(f"Start build index for annotation: {annotation_id}", fg="green")) start_at = time.perf_counter() try: @@ -48,13 +50,13 @@ def add_annotation_to_index_task( vector.create([document], duplicate_check=True) end_at = time.perf_counter() - logging.info( + logger.info( click.style( f"Build index successful for annotation: {annotation_id} latency: {end_at - start_at}", fg="green", ) ) except Exception: - logging.exception("Build index for annotation failed") + logger.exception("Build index for annotation failed") finally: db.session.close() diff --git a/api/tasks/annotation/batch_import_annotations_task.py b/api/tasks/annotation/batch_import_annotations_task.py index 47dc3ee90e..8e46e8d0e3 100644 --- a/api/tasks/annotation/batch_import_annotations_task.py +++ b/api/tasks/annotation/batch_import_annotations_task.py @@ -2,7 +2,7 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from werkzeug.exceptions import NotFound from core.rag.datasource.vdb.vector_factory import Vector @@ -13,6 +13,8 @@ from models.dataset import Dataset from models.model import App, AppAnnotationSetting, MessageAnnotation from services.dataset_service import DatasetCollectionBindingService +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: str, tenant_id: str, user_id: str): @@ -25,7 +27,7 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: :param user_id: user_id """ - logging.info(click.style(f"Start batch import annotation: {job_id}", fg="green")) + logger.info(click.style(f"Start batch import annotation: {job_id}", fg="green")) start_at = time.perf_counter() indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}" # get app info @@ -74,7 +76,7 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: db.session.commit() redis_client.setex(indexing_cache_key, 600, "completed") end_at = time.perf_counter() - logging.info( + logger.info( click.style( "Build index successful for batch import annotation: {} latency: {}".format( job_id, end_at - start_at @@ -87,6 +89,6 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: redis_client.setex(indexing_cache_key, 600, "error") indexing_error_msg_key = f"app_annotation_batch_import_error_msg_{str(job_id)}" redis_client.setex(indexing_error_msg_key, 600, str(e)) - logging.exception("Build index for batch import annotations failed") + logger.exception("Build index for batch import annotations failed") finally: db.session.close() diff --git a/api/tasks/annotation/delete_annotation_index_task.py b/api/tasks/annotation/delete_annotation_index_task.py index f016400e16..aa79c48878 100644 --- a/api/tasks/annotation/delete_annotation_index_task.py +++ b/api/tasks/annotation/delete_annotation_index_task.py @@ -2,20 +2,22 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.datasource.vdb.vector_factory import Vector from extensions.ext_database import db from models.dataset import Dataset from services.dataset_service import DatasetCollectionBindingService +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str, collection_binding_id: str): """ Async delete annotation index task """ - logging.info(click.style(f"Start delete app annotation index: {app_id}", fg="green")) + logger.info(click.style(f"Start delete app annotation index: {app_id}", fg="green")) start_at = time.perf_counter() try: dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type( @@ -33,10 +35,10 @@ def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"]) vector.delete_by_metadata_field("annotation_id", annotation_id) except Exception: - logging.exception("Delete annotation index failed when annotation deleted.") + logger.exception("Delete annotation index failed when annotation deleted.") end_at = time.perf_counter() - logging.info(click.style(f"App annotations index deleted : {app_id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"App annotations index deleted : {app_id} latency: {end_at - start_at}", fg="green")) except Exception as e: - logging.exception("Annotation deleted index failed") + logger.exception("Annotation deleted index failed") finally: db.session.close() diff --git a/api/tasks/annotation/disable_annotation_reply_task.py b/api/tasks/annotation/disable_annotation_reply_task.py index 0076113ce8..c0020b29ed 100644 --- a/api/tasks/annotation/disable_annotation_reply_task.py +++ b/api/tasks/annotation/disable_annotation_reply_task.py @@ -2,7 +2,8 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task +from sqlalchemy import exists, select from core.rag.datasource.vdb.vector_factory import Vector from extensions.ext_database import db @@ -10,26 +11,28 @@ from extensions.ext_redis import redis_client from models.dataset import Dataset from models.model import App, AppAnnotationSetting, MessageAnnotation +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str): """ Async enable annotation reply task """ - logging.info(click.style(f"Start delete app annotations index: {app_id}", fg="green")) + logger.info(click.style(f"Start delete app annotations index: {app_id}", fg="green")) start_at = time.perf_counter() # get app info app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first() - annotations_count = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_id).count() + annotations_exists = db.session.scalar(select(exists().where(MessageAnnotation.app_id == app_id))) if not app: - logging.info(click.style(f"App not found: {app_id}", fg="red")) + logger.info(click.style(f"App not found: {app_id}", fg="red")) db.session.close() return app_annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() if not app_annotation_setting: - logging.info(click.style(f"App annotation setting not found: {app_id}", fg="red")) + logger.info(click.style(f"App annotation setting not found: {app_id}", fg="red")) db.session.close() return @@ -45,11 +48,11 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str): ) try: - if annotations_count > 0: + if annotations_exists: vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"]) vector.delete() except Exception: - logging.exception("Delete annotation index failed when annotation deleted.") + logger.exception("Delete annotation index failed when annotation deleted.") redis_client.setex(disable_app_annotation_job_key, 600, "completed") # delete annotation setting @@ -57,9 +60,9 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str): db.session.commit() end_at = time.perf_counter() - logging.info(click.style(f"App annotations index deleted : {app_id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"App annotations index deleted : {app_id} latency: {end_at - start_at}", fg="green")) except Exception as e: - logging.exception("Annotation batch deleted index failed") + logger.exception("Annotation batch deleted index failed") redis_client.setex(disable_app_annotation_job_key, 600, "error") disable_app_annotation_error_key = f"disable_app_annotation_error_{str(job_id)}" redis_client.setex(disable_app_annotation_error_key, 600, str(e)) diff --git a/api/tasks/annotation/enable_annotation_reply_task.py b/api/tasks/annotation/enable_annotation_reply_task.py index 44c65c0783..3498e08426 100644 --- a/api/tasks/annotation/enable_annotation_reply_task.py +++ b/api/tasks/annotation/enable_annotation_reply_task.py @@ -1,18 +1,20 @@ -import datetime import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.datasource.vdb.vector_factory import Vector from core.rag.models.document import Document from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.datetime_utils import naive_utc_now from models.dataset import Dataset from models.model import App, AppAnnotationSetting, MessageAnnotation from services.dataset_service import DatasetCollectionBindingService +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def enable_annotation_reply_task( @@ -27,13 +29,13 @@ def enable_annotation_reply_task( """ Async enable annotation reply task """ - logging.info(click.style(f"Start add app annotation to index: {app_id}", fg="green")) + logger.info(click.style(f"Start add app annotation to index: {app_id}", fg="green")) start_at = time.perf_counter() # get app info app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first() if not app: - logging.info(click.style(f"App not found: {app_id}", fg="red")) + logger.info(click.style(f"App not found: {app_id}", fg="red")) db.session.close() return @@ -68,11 +70,11 @@ def enable_annotation_reply_task( try: old_vector.delete() except Exception as e: - logging.info(click.style(f"Delete annotation index error: {str(e)}", fg="red")) + logger.info(click.style(f"Delete annotation index error: {str(e)}", fg="red")) annotation_setting.score_threshold = score_threshold annotation_setting.collection_binding_id = dataset_collection_binding.id annotation_setting.updated_user_id = user_id - annotation_setting.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + annotation_setting.updated_at = naive_utc_now() db.session.add(annotation_setting) else: new_app_annotation_setting = AppAnnotationSetting( @@ -104,14 +106,14 @@ def enable_annotation_reply_task( try: vector.delete_by_metadata_field("app_id", app_id) except Exception as e: - logging.info(click.style(f"Delete annotation index error: {str(e)}", fg="red")) + logger.info(click.style(f"Delete annotation index error: {str(e)}", fg="red")) vector.create(documents) db.session.commit() redis_client.setex(enable_app_annotation_job_key, 600, "completed") end_at = time.perf_counter() - logging.info(click.style(f"App annotations added to index: {app_id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"App annotations added to index: {app_id} latency: {end_at - start_at}", fg="green")) except Exception as e: - logging.exception("Annotation batch created index failed") + logger.exception("Annotation batch created index failed") redis_client.setex(enable_app_annotation_job_key, 600, "error") enable_app_annotation_error_key = f"enable_app_annotation_error_{str(job_id)}" redis_client.setex(enable_app_annotation_error_key, 600, str(e)) diff --git a/api/tasks/annotation/update_annotation_to_index_task.py b/api/tasks/annotation/update_annotation_to_index_task.py index 5f11d5aa00..957d8f7e45 100644 --- a/api/tasks/annotation/update_annotation_to_index_task.py +++ b/api/tasks/annotation/update_annotation_to_index_task.py @@ -2,7 +2,7 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.datasource.vdb.vector_factory import Vector from core.rag.models.document import Document @@ -10,6 +10,8 @@ from extensions.ext_database import db from models.dataset import Dataset from services.dataset_service import DatasetCollectionBindingService +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def update_annotation_to_index_task( @@ -25,7 +27,7 @@ def update_annotation_to_index_task( Usage: clean_dataset_task.delay(dataset_id, tenant_id, indexing_technique, index_struct) """ - logging.info(click.style(f"Start update index for annotation: {annotation_id}", fg="green")) + logger.info(click.style(f"Start update index for annotation: {annotation_id}", fg="green")) start_at = time.perf_counter() try: @@ -49,13 +51,13 @@ def update_annotation_to_index_task( vector.delete_by_metadata_field("annotation_id", annotation_id) vector.add_texts([document]) end_at = time.perf_counter() - logging.info( + logger.info( click.style( f"Build index successful for annotation: {annotation_id} latency: {end_at - start_at}", fg="green", ) ) except Exception: - logging.exception("Build index for annotation failed") + logger.exception("Build index for annotation failed") finally: db.session.close() diff --git a/api/tasks/batch_clean_document_task.py b/api/tasks/batch_clean_document_task.py index e64a799146..08e2c4a556 100644 --- a/api/tasks/batch_clean_document_task.py +++ b/api/tasks/batch_clean_document_task.py @@ -2,7 +2,7 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from core.tools.utils.web_reader_tool import get_image_upload_file_ids @@ -11,6 +11,8 @@ from extensions.ext_storage import storage from models.dataset import Dataset, DocumentSegment from models.model import UploadFile +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form: str, file_ids: list[str]): @@ -23,7 +25,7 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form Usage: batch_clean_document_task.delay(document_ids, dataset_id) """ - logging.info(click.style("Start batch clean documents when documents deleted", fg="green")) + logger.info(click.style("Start batch clean documents when documents deleted", fg="green")) start_at = time.perf_counter() try: @@ -47,7 +49,7 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form if image_file and image_file.key: storage.delete(image_file.key) except Exception: - logging.exception( + logger.exception( "Delete image_files failed when storage deleted, \ image_upload_file_is: %s", upload_file_id, @@ -62,18 +64,18 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form try: storage.delete(file.key) except Exception: - logging.exception("Delete file failed when document deleted, file_id: %s", file.id) + logger.exception("Delete file failed when document deleted, file_id: %s", file.id) db.session.delete(file) db.session.commit() end_at = time.perf_counter() - logging.info( + logger.info( click.style( f"Cleaned documents when documents deleted latency: {end_at - start_at}", fg="green", ) ) except Exception: - logging.exception("Cleaned documents when documents deleted failed") + logger.exception("Cleaned documents when documents deleted failed") finally: db.session.close() diff --git a/api/tasks/batch_create_segment_to_index_task.py b/api/tasks/batch_create_segment_to_index_task.py index dee43cd854..8f393ba019 100644 --- a/api/tasks/batch_create_segment_to_index_task.py +++ b/api/tasks/batch_create_segment_to_index_task.py @@ -1,4 +1,3 @@ -import datetime import logging import tempfile import time @@ -7,7 +6,7 @@ from pathlib import Path import click import pandas as pd -from celery import shared_task # type: ignore +from celery import shared_task from sqlalchemy import func from sqlalchemy.orm import Session @@ -17,10 +16,13 @@ from extensions.ext_database import db from extensions.ext_redis import redis_client from extensions.ext_storage import storage from libs import helper +from libs.datetime_utils import naive_utc_now from models.dataset import Dataset, Document, DocumentSegment from models.model import UploadFile from services.vector_service import VectorService +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def batch_create_segment_to_index_task( @@ -42,7 +44,7 @@ def batch_create_segment_to_index_task( Usage: batch_create_segment_to_index_task.delay(job_id, upload_file_id, dataset_id, document_id, tenant_id, user_id) """ - logging.info(click.style(f"Start batch create segment jobId: {job_id}", fg="green")) + logger.info(click.style(f"Start batch create segment jobId: {job_id}", fg="green")) start_at = time.perf_counter() indexing_cache_key = f"segment_batch_import_{job_id}" @@ -123,9 +125,9 @@ def batch_create_segment_to_index_task( word_count=len(content), tokens=tokens, created_by=user_id, - indexing_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + indexing_at=naive_utc_now(), status="completed", - completed_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + completed_at=naive_utc_now(), ) if dataset_document.doc_form == "qa_model": segment_document.answer = segment["answer"] @@ -142,14 +144,14 @@ def batch_create_segment_to_index_task( db.session.commit() redis_client.setex(indexing_cache_key, 600, "completed") end_at = time.perf_counter() - logging.info( + logger.info( click.style( f"Segment batch created job: {job_id} latency: {end_at - start_at}", fg="green", ) ) except Exception: - logging.exception("Segments batch created index failed") + logger.exception("Segments batch created index failed") redis_client.setex(indexing_cache_key, 600, "error") finally: db.session.close() diff --git a/api/tasks/clean_dataset_task.py b/api/tasks/clean_dataset_task.py index 9a45115b05..a0a19042a3 100644 --- a/api/tasks/clean_dataset_task.py +++ b/api/tasks/clean_dataset_task.py @@ -2,10 +2,10 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.index_processor.index_processor_factory import IndexProcessorFactory -from core.tools.utils.rag_web_reader import get_image_upload_file_ids +from core.tools.utils.web_reader_tool import get_image_upload_file_ids from extensions.ext_database import db from extensions.ext_storage import storage from models.dataset import ( @@ -20,6 +20,8 @@ from models.dataset import ( ) from models.model import UploadFile +logger = logging.getLogger(__name__) + # Add import statement for ValueError @shared_task(queue="dataset") @@ -42,7 +44,7 @@ def clean_dataset_task( Usage: clean_dataset_task.delay(dataset_id, tenant_id, indexing_technique, index_struct) """ - logging.info(click.style(f"Start clean dataset when dataset deleted: {dataset_id}", fg="green")) + logger.info(click.style(f"Start clean dataset when dataset deleted: {dataset_id}", fg="green")) start_at = time.perf_counter() try: @@ -63,7 +65,7 @@ def clean_dataset_task( from core.rag.index_processor.constant.index_type import IndexType doc_form = IndexType.PARAGRAPH_INDEX - logging.info( + logger.info( click.style(f"Invalid doc_form detected, using default index type for cleanup: {doc_form}", fg="yellow") ) @@ -72,18 +74,18 @@ def clean_dataset_task( try: index_processor = IndexProcessorFactory(doc_form).init_index_processor() index_processor.clean(dataset, None, with_keywords=True, delete_child_chunks=True) - logging.info(click.style(f"Successfully cleaned vector database for dataset: {dataset_id}", fg="green")) + logger.info(click.style(f"Successfully cleaned vector database for dataset: {dataset_id}", fg="green")) except Exception as index_cleanup_error: - logging.exception(click.style(f"Failed to clean vector database for dataset {dataset_id}", fg="red")) + logger.exception(click.style(f"Failed to clean vector database for dataset {dataset_id}", fg="red")) # Continue with document and segment deletion even if vector cleanup fails - logging.info( + logger.info( click.style(f"Continuing with document and segment deletion for dataset: {dataset_id}", fg="yellow") ) if documents is None or len(documents) == 0: - logging.info(click.style(f"No documents found for dataset: {dataset_id}", fg="green")) + logger.info(click.style(f"No documents found for dataset: {dataset_id}", fg="green")) else: - logging.info(click.style(f"Cleaning documents for dataset: {dataset_id}", fg="green")) + logger.info(click.style(f"Cleaning documents for dataset: {dataset_id}", fg="green")) for document in documents: db.session.delete(document) @@ -97,7 +99,7 @@ def clean_dataset_task( try: storage.delete(image_file.key) except Exception: - logging.exception( + logger.exception( "Delete image_files failed when storage deleted, \ image_upload_file_is: %s", upload_file_id, @@ -134,7 +136,7 @@ def clean_dataset_task( db.session.commit() end_at = time.perf_counter() - logging.info( + logger.info( click.style(f"Cleaned dataset when dataset deleted: {dataset_id} latency: {end_at - start_at}", fg="green") ) except Exception: @@ -142,10 +144,10 @@ def clean_dataset_task( # This ensures the database session is properly cleaned up try: db.session.rollback() - logging.info(click.style(f"Rolled back database session for dataset: {dataset_id}", fg="yellow")) + logger.info(click.style(f"Rolled back database session for dataset: {dataset_id}", fg="yellow")) except Exception as rollback_error: - logging.exception("Failed to rollback database session") + logger.exception("Failed to rollback database session") - logging.exception("Cleaned dataset when dataset deleted failed") + logger.exception("Cleaned dataset when dataset deleted failed") finally: db.session.close() diff --git a/api/tasks/clean_document_task.py b/api/tasks/clean_document_task.py index d690106d17..6549ad04b5 100644 --- a/api/tasks/clean_document_task.py +++ b/api/tasks/clean_document_task.py @@ -3,15 +3,17 @@ import time from typing import Optional import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.index_processor.index_processor_factory import IndexProcessorFactory -from core.tools.utils.rag_web_reader import get_image_upload_file_ids +from core.tools.utils.web_reader_tool import get_image_upload_file_ids from extensions.ext_database import db from extensions.ext_storage import storage from models.dataset import Dataset, DatasetMetadataBinding, DocumentSegment from models.model import UploadFile +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_id: Optional[str]): @@ -24,7 +26,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i Usage: clean_document_task.delay(document_id, dataset_id) """ - logging.info(click.style(f"Start clean document when document deleted: {document_id}", fg="green")) + logger.info(click.style(f"Start clean document when document deleted: {document_id}", fg="green")) start_at = time.perf_counter() try: @@ -49,7 +51,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i try: storage.delete(image_file.key) except Exception: - logging.exception( + logger.exception( "Delete image_files failed when storage deleted, \ image_upload_file_is: %s", upload_file_id, @@ -64,7 +66,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i try: storage.delete(file.key) except Exception: - logging.exception("Delete file failed when document deleted, file_id: %s", file_id) + logger.exception("Delete file failed when document deleted, file_id: %s", file_id) db.session.delete(file) db.session.commit() @@ -76,13 +78,13 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i db.session.commit() end_at = time.perf_counter() - logging.info( + logger.info( click.style( f"Cleaned document when document deleted: {document_id} latency: {end_at - start_at}", fg="green", ) ) except Exception: - logging.exception("Cleaned document when document deleted failed") + logger.exception("Cleaned document when document deleted failed") finally: db.session.close() diff --git a/api/tasks/clean_notion_document_task.py b/api/tasks/clean_notion_document_task.py index bf1a92f038..e7a61e22f2 100644 --- a/api/tasks/clean_notion_document_task.py +++ b/api/tasks/clean_notion_document_task.py @@ -2,12 +2,14 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from extensions.ext_database import db from models.dataset import Dataset, Document, DocumentSegment +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def clean_notion_document_task(document_ids: list[str], dataset_id: str): @@ -18,9 +20,7 @@ def clean_notion_document_task(document_ids: list[str], dataset_id: str): Usage: clean_notion_document_task.delay(document_ids, dataset_id) """ - logging.info( - click.style(f"Start clean document when import form notion document deleted: {dataset_id}", fg="green") - ) + logger.info(click.style(f"Start clean document when import form notion document deleted: {dataset_id}", fg="green")) start_at = time.perf_counter() try: @@ -43,7 +43,7 @@ def clean_notion_document_task(document_ids: list[str], dataset_id: str): db.session.delete(segment) db.session.commit() end_at = time.perf_counter() - logging.info( + logger.info( click.style( "Clean document when import form notion document deleted end :: {} latency: {}".format( dataset_id, end_at - start_at @@ -52,6 +52,6 @@ def clean_notion_document_task(document_ids: list[str], dataset_id: str): ) ) except Exception: - logging.exception("Cleaned document when import form notion document deleted failed") + logger.exception("Cleaned document when import form notion document deleted failed") finally: db.session.close() diff --git a/api/tasks/create_segment_to_index_task.py b/api/tasks/create_segment_to_index_task.py index 543a512851..986e9dbc3c 100644 --- a/api/tasks/create_segment_to_index_task.py +++ b/api/tasks/create_segment_to_index_task.py @@ -1,17 +1,19 @@ -import datetime import logging import time from typing import Optional import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from core.rag.models.document import Document from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.datetime_utils import naive_utc_now from models.dataset import DocumentSegment +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]] = None): @@ -21,12 +23,12 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]] :param keywords: Usage: create_segment_to_index_task.delay(segment_id) """ - logging.info(click.style(f"Start create segment to index: {segment_id}", fg="green")) + logger.info(click.style(f"Start create segment to index: {segment_id}", fg="green")) start_at = time.perf_counter() segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first() if not segment: - logging.info(click.style(f"Segment not found: {segment_id}", fg="red")) + logger.info(click.style(f"Segment not found: {segment_id}", fg="red")) db.session.close() return @@ -41,7 +43,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]] db.session.query(DocumentSegment).filter_by(id=segment.id).update( { DocumentSegment.status: "indexing", - DocumentSegment.indexing_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + DocumentSegment.indexing_at: naive_utc_now(), } ) db.session.commit() @@ -58,17 +60,17 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]] dataset = segment.dataset if not dataset: - logging.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan")) + logger.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan")) return dataset_document = segment.document if not dataset_document: - logging.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan")) + logger.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan")) return if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": - logging.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan")) + logger.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan")) return index_type = dataset.doc_form @@ -79,17 +81,17 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]] db.session.query(DocumentSegment).filter_by(id=segment.id).update( { DocumentSegment.status: "completed", - DocumentSegment.completed_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + DocumentSegment.completed_at: naive_utc_now(), } ) db.session.commit() end_at = time.perf_counter() - logging.info(click.style(f"Segment created to index: {segment.id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Segment created to index: {segment.id} latency: {end_at - start_at}", fg="green")) except Exception as e: - logging.exception("create segment to index failed") + logger.exception("create segment to index failed") segment.enabled = False - segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + segment.disabled_at = naive_utc_now() segment.status = "error" segment.error = str(e) db.session.commit() diff --git a/api/tasks/deal_dataset_vector_index_task.py b/api/tasks/deal_dataset_vector_index_task.py index 8c4c1876ad..23e929c57e 100644 --- a/api/tasks/deal_dataset_vector_index_task.py +++ b/api/tasks/deal_dataset_vector_index_task.py @@ -1,8 +1,9 @@ import logging import time +from typing import Literal import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.index_processor.constant.index_type import IndexType from core.rag.index_processor.index_processor_factory import IndexProcessorFactory @@ -11,16 +12,18 @@ from extensions.ext_database import db from models.dataset import Dataset, DocumentSegment from models.dataset import Document as DatasetDocument +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") -def deal_dataset_vector_index_task(dataset_id: str, action: str): +def deal_dataset_vector_index_task(dataset_id: str, action: Literal["remove", "add", "update"]): """ Async deal dataset from index :param dataset_id: dataset_id :param action: action Usage: deal_dataset_vector_index_task.delay(dataset_id, action) """ - logging.info(click.style(f"Start deal dataset vector index: {dataset_id}", fg="green")) + logger.info(click.style(f"Start deal dataset vector index: {dataset_id}", fg="green")) start_at = time.perf_counter() try: @@ -162,8 +165,8 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str): index_processor.clean(dataset, None, with_keywords=False, delete_child_chunks=False) end_at = time.perf_counter() - logging.info(click.style(f"Deal dataset vector index: {dataset_id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Deal dataset vector index: {dataset_id} latency: {end_at - start_at}", fg="green")) except Exception: - logging.exception("Deal dataset vector index failed") + logger.exception("Deal dataset vector index failed") finally: db.session.close() diff --git a/api/tasks/delete_account_task.py b/api/tasks/delete_account_task.py index ef50adf8d5..29f5a2450d 100644 --- a/api/tasks/delete_account_task.py +++ b/api/tasks/delete_account_task.py @@ -1,6 +1,6 @@ import logging -from celery import shared_task # type: ignore +from celery import shared_task from extensions.ext_database import db from models.account import Account diff --git a/api/tasks/delete_conversation_task.py b/api/tasks/delete_conversation_task.py new file mode 100644 index 0000000000..2ba9104a05 --- /dev/null +++ b/api/tasks/delete_conversation_task.py @@ -0,0 +1,70 @@ +import logging +import time + +import click +from celery import shared_task # type: ignore + +from extensions.ext_database import db +from models import ConversationVariable +from models.model import Message, MessageAnnotation, MessageFeedback +from models.tools import ToolConversationVariables, ToolFile +from models.web import PinnedConversation + +logger = logging.getLogger(__name__) + + +@shared_task(queue="conversation") +def delete_conversation_related_data(conversation_id: str) -> None: + """ + Delete related data conversation in correct order from datatbase to respect foreign key constraints + + Args: + conversation_id: conversation Id + """ + + logger.info( + click.style(f"Starting to delete conversation data from db for conversation_id {conversation_id}", fg="green") + ) + start_at = time.perf_counter() + + try: + db.session.query(MessageAnnotation).where(MessageAnnotation.conversation_id == conversation_id).delete( + synchronize_session=False + ) + + db.session.query(MessageFeedback).where(MessageFeedback.conversation_id == conversation_id).delete( + synchronize_session=False + ) + + db.session.query(ToolConversationVariables).where( + ToolConversationVariables.conversation_id == conversation_id + ).delete(synchronize_session=False) + + db.session.query(ToolFile).where(ToolFile.conversation_id == conversation_id).delete(synchronize_session=False) + + db.session.query(ConversationVariable).where(ConversationVariable.conversation_id == conversation_id).delete( + synchronize_session=False + ) + + db.session.query(Message).where(Message.conversation_id == conversation_id).delete(synchronize_session=False) + + db.session.query(PinnedConversation).where(PinnedConversation.conversation_id == conversation_id).delete( + synchronize_session=False + ) + + db.session.commit() + + end_at = time.perf_counter() + logger.info( + click.style( + f"Succeeded cleaning data from db for conversation_id {conversation_id} latency: {end_at - start_at}", + fg="green", + ) + ) + + except Exception as e: + logger.exception("Failed to delete data from db for conversation_id: %s failed", conversation_id) + db.session.rollback() + raise e + finally: + db.session.close() diff --git a/api/tasks/delete_segment_from_index_task.py b/api/tasks/delete_segment_from_index_task.py index da12355d23..0b750cf4db 100644 --- a/api/tasks/delete_segment_from_index_task.py +++ b/api/tasks/delete_segment_from_index_task.py @@ -2,12 +2,14 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from extensions.ext_database import db from models.dataset import Dataset, Document +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def delete_segment_from_index_task(index_node_ids: list, dataset_id: str, document_id: str): @@ -19,7 +21,7 @@ def delete_segment_from_index_task(index_node_ids: list, dataset_id: str, docume Usage: delete_segment_from_index_task.delay(index_node_ids, dataset_id, document_id) """ - logging.info(click.style("Start delete segment from index", fg="green")) + logger.info(click.style("Start delete segment from index", fg="green")) start_at = time.perf_counter() try: dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() @@ -38,8 +40,8 @@ def delete_segment_from_index_task(index_node_ids: list, dataset_id: str, docume index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True) end_at = time.perf_counter() - logging.info(click.style(f"Segment deleted from index latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Segment deleted from index latency: {end_at - start_at}", fg="green")) except Exception: - logging.exception("delete segment from index failed") + logger.exception("delete segment from index failed") finally: db.session.close() diff --git a/api/tasks/disable_segment_from_index_task.py b/api/tasks/disable_segment_from_index_task.py index fa4ec15f8a..6b5f01b416 100644 --- a/api/tasks/disable_segment_from_index_task.py +++ b/api/tasks/disable_segment_from_index_task.py @@ -2,13 +2,15 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from extensions.ext_database import db from extensions.ext_redis import redis_client from models.dataset import DocumentSegment +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def disable_segment_from_index_task(segment_id: str): @@ -18,17 +20,17 @@ def disable_segment_from_index_task(segment_id: str): Usage: disable_segment_from_index_task.delay(segment_id) """ - logging.info(click.style(f"Start disable segment from index: {segment_id}", fg="green")) + logger.info(click.style(f"Start disable segment from index: {segment_id}", fg="green")) start_at = time.perf_counter() segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first() if not segment: - logging.info(click.style(f"Segment not found: {segment_id}", fg="red")) + logger.info(click.style(f"Segment not found: {segment_id}", fg="red")) db.session.close() return if segment.status != "completed": - logging.info(click.style(f"Segment is not completed, disable is not allowed: {segment_id}", fg="red")) + logger.info(click.style(f"Segment is not completed, disable is not allowed: {segment_id}", fg="red")) db.session.close() return @@ -38,17 +40,17 @@ def disable_segment_from_index_task(segment_id: str): dataset = segment.dataset if not dataset: - logging.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan")) + logger.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan")) return dataset_document = segment.document if not dataset_document: - logging.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan")) + logger.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan")) return if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": - logging.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan")) + logger.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan")) return index_type = dataset_document.doc_form @@ -56,9 +58,9 @@ def disable_segment_from_index_task(segment_id: str): index_processor.clean(dataset, [segment.index_node_id]) end_at = time.perf_counter() - logging.info(click.style(f"Segment removed from index: {segment.id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Segment removed from index: {segment.id} latency: {end_at - start_at}", fg="green")) except Exception: - logging.exception("remove segment from index failed") + logger.exception("remove segment from index failed") segment.enabled = True db.session.commit() finally: diff --git a/api/tasks/disable_segments_from_index_task.py b/api/tasks/disable_segments_from_index_task.py index f033f05084..d4899fe0e4 100644 --- a/api/tasks/disable_segments_from_index_task.py +++ b/api/tasks/disable_segments_from_index_task.py @@ -2,7 +2,7 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from extensions.ext_database import db @@ -10,6 +10,8 @@ from extensions.ext_redis import redis_client from models.dataset import Dataset, DocumentSegment from models.dataset import Document as DatasetDocument +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def disable_segments_from_index_task(segment_ids: list, dataset_id: str, document_id: str): @@ -25,18 +27,18 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: - logging.info(click.style(f"Dataset {dataset_id} not found, pass.", fg="cyan")) + logger.info(click.style(f"Dataset {dataset_id} not found, pass.", fg="cyan")) db.session.close() return dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == document_id).first() if not dataset_document: - logging.info(click.style(f"Document {document_id} not found, pass.", fg="cyan")) + logger.info(click.style(f"Document {document_id} not found, pass.", fg="cyan")) db.session.close() return if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": - logging.info(click.style(f"Document {document_id} status is invalid, pass.", fg="cyan")) + logger.info(click.style(f"Document {document_id} status is invalid, pass.", fg="cyan")) db.session.close() return # sync index processor @@ -61,7 +63,7 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False) end_at = time.perf_counter() - logging.info(click.style(f"Segments removed from index latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Segments removed from index latency: {end_at - start_at}", fg="green")) except Exception: # update segment error msg db.session.query(DocumentSegment).where( diff --git a/api/tasks/document_indexing_sync_task.py b/api/tasks/document_indexing_sync_task.py index 993b2ac404..687e3e9551 100644 --- a/api/tasks/document_indexing_sync_task.py +++ b/api/tasks/document_indexing_sync_task.py @@ -1,17 +1,19 @@ -import datetime import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.indexing_runner import DocumentIsPausedError, IndexingRunner from core.rag.extractor.notion_extractor import NotionExtractor from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from extensions.ext_database import db +from libs.datetime_utils import naive_utc_now from models.dataset import Dataset, Document, DocumentSegment from models.source import DataSourceOauthBinding +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def document_indexing_sync_task(dataset_id: str, document_id: str): @@ -22,13 +24,13 @@ def document_indexing_sync_task(dataset_id: str, document_id: str): Usage: document_indexing_sync_task.delay(dataset_id, document_id) """ - logging.info(click.style(f"Start sync document: {document_id}", fg="green")) + logger.info(click.style(f"Start sync document: {document_id}", fg="green")) start_at = time.perf_counter() document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() if not document: - logging.info(click.style(f"Document not found: {document_id}", fg="red")) + logger.info(click.style(f"Document not found: {document_id}", fg="red")) db.session.close() return @@ -72,7 +74,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str): # check the page is updated if last_edited_time != page_edited_time: document.indexing_status = "parsing" - document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + document.processing_started_at = naive_utc_now() db.session.commit() # delete all document segment and index @@ -93,7 +95,7 @@ def document_indexing_sync_task(dataset_id: str, document_id: str): db.session.delete(segment) end_at = time.perf_counter() - logging.info( + logger.info( click.style( "Cleaned document when document update data source or process rule: {} latency: {}".format( document_id, end_at - start_at @@ -102,16 +104,16 @@ def document_indexing_sync_task(dataset_id: str, document_id: str): ) ) except Exception: - logging.exception("Cleaned document when document update data source or process rule failed") + logger.exception("Cleaned document when document update data source or process rule failed") try: indexing_runner = IndexingRunner() indexing_runner.run([document]) end_at = time.perf_counter() - logging.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green")) except DocumentIsPausedError as ex: - logging.info(click.style(str(ex), fg="yellow")) + logger.info(click.style(str(ex), fg="yellow")) except Exception: - logging.exception("document_indexing_sync_task failed, document_id: %s", document_id) + logger.exception("document_indexing_sync_task failed, document_id: %s", document_id) finally: db.session.close() diff --git a/api/tasks/document_indexing_task.py b/api/tasks/document_indexing_task.py index 728db2e2dc..012ae8f706 100644 --- a/api/tasks/document_indexing_task.py +++ b/api/tasks/document_indexing_task.py @@ -2,7 +2,7 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from configs import dify_config from core.indexing_runner import DocumentIsPausedError, IndexingRunner @@ -11,6 +11,8 @@ from libs.datetime_utils import naive_utc_now from models.dataset import Dataset, Document from services.feature_service import FeatureService +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def document_indexing_task(dataset_id: str, document_ids: list): @@ -26,7 +28,7 @@ def document_indexing_task(dataset_id: str, document_ids: list): dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: - logging.info(click.style(f"Dataset is not found: {dataset_id}", fg="yellow")) + logger.info(click.style(f"Dataset is not found: {dataset_id}", fg="yellow")) db.session.close() return # check document limit @@ -60,7 +62,7 @@ def document_indexing_task(dataset_id: str, document_ids: list): return for document_id in document_ids: - logging.info(click.style(f"Start process document: {document_id}", fg="green")) + logger.info(click.style(f"Start process document: {document_id}", fg="green")) document = ( db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() @@ -77,10 +79,10 @@ def document_indexing_task(dataset_id: str, document_ids: list): indexing_runner = IndexingRunner() indexing_runner.run(documents) end_at = time.perf_counter() - logging.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green")) except DocumentIsPausedError as ex: - logging.info(click.style(str(ex), fg="yellow")) + logger.info(click.style(str(ex), fg="yellow")) except Exception: - logging.exception("Document indexing task failed, dataset_id: %s", dataset_id) + logger.exception("Document indexing task failed, dataset_id: %s", dataset_id) finally: db.session.close() diff --git a/api/tasks/document_indexing_update_task.py b/api/tasks/document_indexing_update_task.py index 053c0c5f41..48566b6104 100644 --- a/api/tasks/document_indexing_update_task.py +++ b/api/tasks/document_indexing_update_task.py @@ -1,15 +1,17 @@ -import datetime import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.indexing_runner import DocumentIsPausedError, IndexingRunner from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from extensions.ext_database import db +from libs.datetime_utils import naive_utc_now from models.dataset import Dataset, Document, DocumentSegment +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def document_indexing_update_task(dataset_id: str, document_id: str): @@ -20,18 +22,18 @@ def document_indexing_update_task(dataset_id: str, document_id: str): Usage: document_indexing_update_task.delay(dataset_id, document_id) """ - logging.info(click.style(f"Start update document: {document_id}", fg="green")) + logger.info(click.style(f"Start update document: {document_id}", fg="green")) start_at = time.perf_counter() document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() if not document: - logging.info(click.style(f"Document not found: {document_id}", fg="red")) + logger.info(click.style(f"Document not found: {document_id}", fg="red")) db.session.close() return document.indexing_status = "parsing" - document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + document.processing_started_at = naive_utc_now() db.session.commit() # delete all document segment and index @@ -54,7 +56,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str): db.session.delete(segment) db.session.commit() end_at = time.perf_counter() - logging.info( + logger.info( click.style( "Cleaned document when document update data source or process rule: {} latency: {}".format( document_id, end_at - start_at @@ -63,16 +65,16 @@ def document_indexing_update_task(dataset_id: str, document_id: str): ) ) except Exception: - logging.exception("Cleaned document when document update data source or process rule failed") + logger.exception("Cleaned document when document update data source or process rule failed") try: indexing_runner = IndexingRunner() indexing_runner.run([document]) end_at = time.perf_counter() - logging.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green")) except DocumentIsPausedError as ex: - logging.info(click.style(str(ex), fg="yellow")) + logger.info(click.style(str(ex), fg="yellow")) except Exception: - logging.exception("document_indexing_update_task failed, document_id: %s", document_id) + logger.exception("document_indexing_update_task failed, document_id: %s", document_id) finally: db.session.close() diff --git a/api/tasks/duplicate_document_indexing_task.py b/api/tasks/duplicate_document_indexing_task.py index faa7e2b8d0..88e8697d17 100644 --- a/api/tasks/duplicate_document_indexing_task.py +++ b/api/tasks/duplicate_document_indexing_task.py @@ -1,17 +1,19 @@ -import datetime import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from configs import dify_config from core.indexing_runner import DocumentIsPausedError, IndexingRunner from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from extensions.ext_database import db +from libs.datetime_utils import naive_utc_now from models.dataset import Dataset, Document, DocumentSegment from services.feature_service import FeatureService +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def duplicate_document_indexing_task(dataset_id: str, document_ids: list): @@ -27,7 +29,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list): dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if dataset is None: - logging.info(click.style(f"Dataset not found: {dataset_id}", fg="red")) + logger.info(click.style(f"Dataset not found: {dataset_id}", fg="red")) db.session.close() return @@ -55,7 +57,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list): if document: document.indexing_status = "error" document.error = str(e) - document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + document.stopped_at = naive_utc_now() db.session.add(document) db.session.commit() return @@ -63,7 +65,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list): db.session.close() for document_id in document_ids: - logging.info(click.style(f"Start process document: {document_id}", fg="green")) + logger.info(click.style(f"Start process document: {document_id}", fg="green")) document = ( db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() @@ -86,7 +88,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list): db.session.commit() document.indexing_status = "parsing" - document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + document.processing_started_at = naive_utc_now() documents.append(document) db.session.add(document) db.session.commit() @@ -95,10 +97,10 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list): indexing_runner = IndexingRunner() indexing_runner.run(documents) end_at = time.perf_counter() - logging.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green")) except DocumentIsPausedError as ex: - logging.info(click.style(str(ex), fg="yellow")) + logger.info(click.style(str(ex), fg="yellow")) except Exception: - logging.exception("duplicate_document_indexing_task failed, dataset_id: %s", dataset_id) + logger.exception("duplicate_document_indexing_task failed, dataset_id: %s", dataset_id) finally: db.session.close() diff --git a/api/tasks/enable_segment_to_index_task.py b/api/tasks/enable_segment_to_index_task.py index f801c9d9ee..07c44f333e 100644 --- a/api/tasks/enable_segment_to_index_task.py +++ b/api/tasks/enable_segment_to_index_task.py @@ -1,17 +1,19 @@ -import datetime import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.index_processor.constant.index_type import IndexType from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from core.rag.models.document import ChildDocument, Document from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.datetime_utils import naive_utc_now from models.dataset import DocumentSegment +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def enable_segment_to_index_task(segment_id: str): @@ -21,17 +23,17 @@ def enable_segment_to_index_task(segment_id: str): Usage: enable_segment_to_index_task.delay(segment_id) """ - logging.info(click.style(f"Start enable segment to index: {segment_id}", fg="green")) + logger.info(click.style(f"Start enable segment to index: {segment_id}", fg="green")) start_at = time.perf_counter() segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first() if not segment: - logging.info(click.style(f"Segment not found: {segment_id}", fg="red")) + logger.info(click.style(f"Segment not found: {segment_id}", fg="red")) db.session.close() return if segment.status != "completed": - logging.info(click.style(f"Segment is not completed, enable is not allowed: {segment_id}", fg="red")) + logger.info(click.style(f"Segment is not completed, enable is not allowed: {segment_id}", fg="red")) db.session.close() return @@ -51,17 +53,17 @@ def enable_segment_to_index_task(segment_id: str): dataset = segment.dataset if not dataset: - logging.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan")) + logger.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan")) return dataset_document = segment.document if not dataset_document: - logging.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan")) + logger.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan")) return if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": - logging.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan")) + logger.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan")) return index_processor = IndexProcessorFactory(dataset_document.doc_form).init_index_processor() @@ -85,11 +87,11 @@ def enable_segment_to_index_task(segment_id: str): index_processor.load(dataset, [document]) end_at = time.perf_counter() - logging.info(click.style(f"Segment enabled to index: {segment.id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Segment enabled to index: {segment.id} latency: {end_at - start_at}", fg="green")) except Exception as e: - logging.exception("enable segment to index failed") + logger.exception("enable segment to index failed") segment.enabled = False - segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + segment.disabled_at = naive_utc_now() segment.status = "error" segment.error = str(e) db.session.commit() diff --git a/api/tasks/enable_segments_to_index_task.py b/api/tasks/enable_segments_to_index_task.py index 777380631f..647664641d 100644 --- a/api/tasks/enable_segments_to_index_task.py +++ b/api/tasks/enable_segments_to_index_task.py @@ -1,18 +1,20 @@ -import datetime import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.index_processor.constant.index_type import IndexType from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from core.rag.models.document import ChildDocument, Document from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.datetime_utils import naive_utc_now from models.dataset import Dataset, DocumentSegment from models.dataset import Document as DatasetDocument +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_id: str): @@ -27,17 +29,17 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i start_at = time.perf_counter() dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: - logging.info(click.style(f"Dataset {dataset_id} not found, pass.", fg="cyan")) + logger.info(click.style(f"Dataset {dataset_id} not found, pass.", fg="cyan")) return dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == document_id).first() if not dataset_document: - logging.info(click.style(f"Document {document_id} not found, pass.", fg="cyan")) + logger.info(click.style(f"Document {document_id} not found, pass.", fg="cyan")) db.session.close() return if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": - logging.info(click.style(f"Document {document_id} status is invalid, pass.", fg="cyan")) + logger.info(click.style(f"Document {document_id} status is invalid, pass.", fg="cyan")) db.session.close() return # sync index processor @@ -53,7 +55,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i .all() ) if not segments: - logging.info(click.style(f"Segments not found: {segment_ids}", fg="cyan")) + logger.info(click.style(f"Segments not found: {segment_ids}", fg="cyan")) db.session.close() return @@ -91,9 +93,9 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i index_processor.load(dataset, documents) end_at = time.perf_counter() - logging.info(click.style(f"Segments enabled to index latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Segments enabled to index latency: {end_at - start_at}", fg="green")) except Exception as e: - logging.exception("enable segments to index failed") + logger.exception("enable segments to index failed") # update segment error msg db.session.query(DocumentSegment).where( DocumentSegment.id.in_(segment_ids), @@ -103,7 +105,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i { "error": str(e), "status": "error", - "disabled_at": datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + "disabled_at": naive_utc_now(), "enabled": False, } ) diff --git a/api/tasks/mail_account_deletion_task.py b/api/tasks/mail_account_deletion_task.py index 38b5ca1800..41e8bc9320 100644 --- a/api/tasks/mail_account_deletion_task.py +++ b/api/tasks/mail_account_deletion_task.py @@ -2,11 +2,13 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from extensions.ext_mail import mail from libs.email_i18n import EmailType, get_email_i18n_service +logger = logging.getLogger(__name__) + @shared_task(queue="mail") def send_deletion_success_task(to: str, language: str = "en-US") -> None: @@ -20,7 +22,7 @@ def send_deletion_success_task(to: str, language: str = "en-US") -> None: if not mail.is_inited(): return - logging.info(click.style(f"Start send account deletion success email to {to}", fg="green")) + logger.info(click.style(f"Start send account deletion success email to {to}", fg="green")) start_at = time.perf_counter() try: @@ -36,11 +38,11 @@ def send_deletion_success_task(to: str, language: str = "en-US") -> None: ) end_at = time.perf_counter() - logging.info( + logger.info( click.style(f"Send account deletion success email to {to}: latency: {end_at - start_at}", fg="green") ) except Exception: - logging.exception("Send account deletion success email to %s failed", to) + logger.exception("Send account deletion success email to %s failed", to) @shared_task(queue="mail") @@ -56,7 +58,7 @@ def send_account_deletion_verification_code(to: str, code: str, language: str = if not mail.is_inited(): return - logging.info(click.style(f"Start send account deletion verification code email to {to}", fg="green")) + logger.info(click.style(f"Start send account deletion verification code email to {to}", fg="green")) start_at = time.perf_counter() try: @@ -72,7 +74,7 @@ def send_account_deletion_verification_code(to: str, code: str, language: str = ) end_at = time.perf_counter() - logging.info( + logger.info( click.style( "Send account deletion verification code email to {} succeeded: latency: {}".format( to, end_at - start_at @@ -81,4 +83,4 @@ def send_account_deletion_verification_code(to: str, code: str, language: str = ) ) except Exception: - logging.exception("Send account deletion verification code email to %s failed", to) + logger.exception("Send account deletion verification code email to %s failed", to) diff --git a/api/tasks/mail_change_mail_task.py b/api/tasks/mail_change_mail_task.py index 054053558d..c090a84923 100644 --- a/api/tasks/mail_change_mail_task.py +++ b/api/tasks/mail_change_mail_task.py @@ -2,11 +2,13 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from extensions.ext_mail import mail from libs.email_i18n import EmailType, get_email_i18n_service +logger = logging.getLogger(__name__) + @shared_task(queue="mail") def send_change_mail_task(language: str, to: str, code: str, phase: str) -> None: @@ -22,7 +24,7 @@ def send_change_mail_task(language: str, to: str, code: str, phase: str) -> None if not mail.is_inited(): return - logging.info(click.style(f"Start change email mail to {to}", fg="green")) + logger.info(click.style(f"Start change email mail to {to}", fg="green")) start_at = time.perf_counter() try: @@ -35,9 +37,9 @@ def send_change_mail_task(language: str, to: str, code: str, phase: str) -> None ) end_at = time.perf_counter() - logging.info(click.style(f"Send change email mail to {to} succeeded: latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Send change email mail to {to} succeeded: latency: {end_at - start_at}", fg="green")) except Exception: - logging.exception("Send change email mail to %s failed", to) + logger.exception("Send change email mail to %s failed", to) @shared_task(queue="mail") @@ -52,7 +54,7 @@ def send_change_mail_completed_notification_task(language: str, to: str) -> None if not mail.is_inited(): return - logging.info(click.style(f"Start change email completed notify mail to {to}", fg="green")) + logger.info(click.style(f"Start change email completed notify mail to {to}", fg="green")) start_at = time.perf_counter() try: @@ -68,11 +70,11 @@ def send_change_mail_completed_notification_task(language: str, to: str) -> None ) end_at = time.perf_counter() - logging.info( + logger.info( click.style( f"Send change email completed mail to {to} succeeded: latency: {end_at - start_at}", fg="green", ) ) except Exception: - logging.exception("Send change email completed mail to %s failed", to) + logger.exception("Send change email completed mail to %s failed", to) diff --git a/api/tasks/mail_email_code_login.py b/api/tasks/mail_email_code_login.py index a82ab55384..126c169d04 100644 --- a/api/tasks/mail_email_code_login.py +++ b/api/tasks/mail_email_code_login.py @@ -2,11 +2,13 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from extensions.ext_mail import mail from libs.email_i18n import EmailType, get_email_i18n_service +logger = logging.getLogger(__name__) + @shared_task(queue="mail") def send_email_code_login_mail_task(language: str, to: str, code: str) -> None: @@ -21,7 +23,7 @@ def send_email_code_login_mail_task(language: str, to: str, code: str) -> None: if not mail.is_inited(): return - logging.info(click.style(f"Start email code login mail to {to}", fg="green")) + logger.info(click.style(f"Start email code login mail to {to}", fg="green")) start_at = time.perf_counter() try: @@ -37,8 +39,8 @@ def send_email_code_login_mail_task(language: str, to: str, code: str) -> None: ) end_at = time.perf_counter() - logging.info( + logger.info( click.style(f"Send email code login mail to {to} succeeded: latency: {end_at - start_at}", fg="green") ) except Exception: - logging.exception("Send email code login mail to %s failed", to) + logger.exception("Send email code login mail to %s failed", to) diff --git a/api/tasks/mail_enterprise_task.py b/api/tasks/mail_inner_task.py similarity index 56% rename from api/tasks/mail_enterprise_task.py rename to api/tasks/mail_inner_task.py index 9c80da06e5..8149bfb156 100644 --- a/api/tasks/mail_enterprise_task.py +++ b/api/tasks/mail_inner_task.py @@ -3,19 +3,21 @@ import time from collections.abc import Mapping import click -from celery import shared_task # type: ignore +from celery import shared_task from flask import render_template_string from extensions.ext_mail import mail from libs.email_i18n import get_email_i18n_service +logger = logging.getLogger(__name__) + @shared_task(queue="mail") -def send_enterprise_email_task(to: list[str], subject: str, body: str, substitutions: Mapping[str, str]): +def send_inner_email_task(to: list[str], subject: str, body: str, substitutions: Mapping[str, str]): if not mail.is_inited(): return - logging.info(click.style(f"Start enterprise mail to {to} with subject {subject}", fg="green")) + logger.info(click.style(f"Start enterprise mail to {to} with subject {subject}", fg="green")) start_at = time.perf_counter() try: @@ -25,6 +27,6 @@ def send_enterprise_email_task(to: list[str], subject: str, body: str, substitut email_service.send_raw_email(to=to, subject=subject, html_content=html_content) end_at = time.perf_counter() - logging.info(click.style(f"Send enterprise mail to {to} succeeded: latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Send enterprise mail to {to} succeeded: latency: {end_at - start_at}", fg="green")) except Exception: - logging.exception("Send enterprise mail to %s failed", to) + logger.exception("Send enterprise mail to %s failed", to) diff --git a/api/tasks/mail_invite_member_task.py b/api/tasks/mail_invite_member_task.py index ff351f08af..a5d59d7452 100644 --- a/api/tasks/mail_invite_member_task.py +++ b/api/tasks/mail_invite_member_task.py @@ -2,12 +2,14 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from configs import dify_config from extensions.ext_mail import mail from libs.email_i18n import EmailType, get_email_i18n_service +logger = logging.getLogger(__name__) + @shared_task(queue="mail") def send_invite_member_mail_task(language: str, to: str, token: str, inviter_name: str, workspace_name: str) -> None: @@ -24,7 +26,7 @@ def send_invite_member_mail_task(language: str, to: str, token: str, inviter_nam if not mail.is_inited(): return - logging.info(click.style(f"Start send invite member mail to {to} in workspace {workspace_name}", fg="green")) + logger.info(click.style(f"Start send invite member mail to {to} in workspace {workspace_name}", fg="green")) start_at = time.perf_counter() try: @@ -43,8 +45,6 @@ def send_invite_member_mail_task(language: str, to: str, token: str, inviter_nam ) end_at = time.perf_counter() - logging.info( - click.style(f"Send invite member mail to {to} succeeded: latency: {end_at - start_at}", fg="green") - ) + logger.info(click.style(f"Send invite member mail to {to} succeeded: latency: {end_at - start_at}", fg="green")) except Exception: - logging.exception("Send invite member mail to %s failed", to) + logger.exception("Send invite member mail to %s failed", to) diff --git a/api/tasks/mail_owner_transfer_task.py b/api/tasks/mail_owner_transfer_task.py index 3856bf294a..33a8e17436 100644 --- a/api/tasks/mail_owner_transfer_task.py +++ b/api/tasks/mail_owner_transfer_task.py @@ -2,11 +2,13 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from extensions.ext_mail import mail from libs.email_i18n import EmailType, get_email_i18n_service +logger = logging.getLogger(__name__) + @shared_task(queue="mail") def send_owner_transfer_confirm_task(language: str, to: str, code: str, workspace: str) -> None: @@ -22,7 +24,7 @@ def send_owner_transfer_confirm_task(language: str, to: str, code: str, workspac if not mail.is_inited(): return - logging.info(click.style(f"Start owner transfer confirm mail to {to}", fg="green")) + logger.info(click.style(f"Start owner transfer confirm mail to {to}", fg="green")) start_at = time.perf_counter() try: @@ -39,14 +41,14 @@ def send_owner_transfer_confirm_task(language: str, to: str, code: str, workspac ) end_at = time.perf_counter() - logging.info( + logger.info( click.style( f"Send owner transfer confirm mail to {to} succeeded: latency: {end_at - start_at}", fg="green", ) ) except Exception: - logging.exception("owner transfer confirm email mail to %s failed", to) + logger.exception("owner transfer confirm email mail to %s failed", to) @shared_task(queue="mail") @@ -63,7 +65,7 @@ def send_old_owner_transfer_notify_email_task(language: str, to: str, workspace: if not mail.is_inited(): return - logging.info(click.style(f"Start old owner transfer notify mail to {to}", fg="green")) + logger.info(click.style(f"Start old owner transfer notify mail to {to}", fg="green")) start_at = time.perf_counter() try: @@ -80,14 +82,14 @@ def send_old_owner_transfer_notify_email_task(language: str, to: str, workspace: ) end_at = time.perf_counter() - logging.info( + logger.info( click.style( f"Send old owner transfer notify mail to {to} succeeded: latency: {end_at - start_at}", fg="green", ) ) except Exception: - logging.exception("old owner transfer notify email mail to %s failed", to) + logger.exception("old owner transfer notify email mail to %s failed", to) @shared_task(queue="mail") @@ -103,7 +105,7 @@ def send_new_owner_transfer_notify_email_task(language: str, to: str, workspace: if not mail.is_inited(): return - logging.info(click.style(f"Start new owner transfer notify mail to {to}", fg="green")) + logger.info(click.style(f"Start new owner transfer notify mail to {to}", fg="green")) start_at = time.perf_counter() try: @@ -119,11 +121,11 @@ def send_new_owner_transfer_notify_email_task(language: str, to: str, workspace: ) end_at = time.perf_counter() - logging.info( + logger.info( click.style( f"Send new owner transfer notify mail to {to} succeeded: latency: {end_at - start_at}", fg="green", ) ) except Exception: - logging.exception("new owner transfer notify email mail to %s failed", to) + logger.exception("new owner transfer notify email mail to %s failed", to) diff --git a/api/tasks/mail_reset_password_task.py b/api/tasks/mail_reset_password_task.py index b01af7827b..1fcc2bfbaa 100644 --- a/api/tasks/mail_reset_password_task.py +++ b/api/tasks/mail_reset_password_task.py @@ -2,11 +2,13 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from extensions.ext_mail import mail from libs.email_i18n import EmailType, get_email_i18n_service +logger = logging.getLogger(__name__) + @shared_task(queue="mail") def send_reset_password_mail_task(language: str, to: str, code: str) -> None: @@ -21,7 +23,7 @@ def send_reset_password_mail_task(language: str, to: str, code: str) -> None: if not mail.is_inited(): return - logging.info(click.style(f"Start password reset mail to {to}", fg="green")) + logger.info(click.style(f"Start password reset mail to {to}", fg="green")) start_at = time.perf_counter() try: @@ -37,8 +39,8 @@ def send_reset_password_mail_task(language: str, to: str, code: str) -> None: ) end_at = time.perf_counter() - logging.info( + logger.info( click.style(f"Send password reset mail to {to} succeeded: latency: {end_at - start_at}", fg="green") ) except Exception: - logging.exception("Send password reset mail to %s failed", to) + logger.exception("Send password reset mail to %s failed", to) diff --git a/api/tasks/ops_trace_task.py b/api/tasks/ops_trace_task.py index c7e0047664..7b254ac3b5 100644 --- a/api/tasks/ops_trace_task.py +++ b/api/tasks/ops_trace_task.py @@ -1,7 +1,7 @@ import json import logging -from celery import shared_task # type: ignore +from celery import shared_task from flask import current_app from core.ops.entities.config_entity import OPS_FILE_PATH, OPS_TRACE_FAILED_KEY @@ -12,6 +12,8 @@ from extensions.ext_storage import storage from models.model import Message from models.workflow import WorkflowRun +logger = logging.getLogger(__name__) + @shared_task(queue="ops_trace") def process_trace_tasks(file_info): @@ -43,11 +45,11 @@ def process_trace_tasks(file_info): if trace_type: trace_info = trace_type(**trace_info) trace_instance.trace(trace_info) - logging.info("Processing trace tasks success, app_id: %s", app_id) + logger.info("Processing trace tasks success, app_id: %s", app_id) except Exception as e: - logging.info("error:\n\n\n%s\n\n\n\n", e) + logger.info("error:\n\n\n%s\n\n\n\n", e) failed_key = f"{OPS_TRACE_FAILED_KEY}_{app_id}" redis_client.incr(failed_key) - logging.info("Processing trace tasks failed, app_id: %s", app_id) + logger.info("Processing trace tasks failed, app_id: %s", app_id) finally: storage.delete(file_path) diff --git a/api/tasks/process_tenant_plugin_autoupgrade_check_task.py b/api/tasks/process_tenant_plugin_autoupgrade_check_task.py index 9ea6aa6214..ec0b534546 100644 --- a/api/tasks/process_tenant_plugin_autoupgrade_check_task.py +++ b/api/tasks/process_tenant_plugin_autoupgrade_check_task.py @@ -2,7 +2,7 @@ import traceback import typing import click -from celery import shared_task # type: ignore +from celery import shared_task from core.helper import marketplace from core.helper.marketplace import MarketplacePluginDeclaration diff --git a/api/tasks/recover_document_indexing_task.py b/api/tasks/recover_document_indexing_task.py index ff489340cd..1b2a653c01 100644 --- a/api/tasks/recover_document_indexing_task.py +++ b/api/tasks/recover_document_indexing_task.py @@ -2,12 +2,14 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.indexing_runner import DocumentIsPausedError, IndexingRunner from extensions.ext_database import db from models.dataset import Document +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def recover_document_indexing_task(dataset_id: str, document_id: str): @@ -18,13 +20,13 @@ def recover_document_indexing_task(dataset_id: str, document_id: str): Usage: recover_document_indexing_task.delay(dataset_id, document_id) """ - logging.info(click.style(f"Recover document: {document_id}", fg="green")) + logger.info(click.style(f"Recover document: {document_id}", fg="green")) start_at = time.perf_counter() document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() if not document: - logging.info(click.style(f"Document not found: {document_id}", fg="red")) + logger.info(click.style(f"Document not found: {document_id}", fg="red")) db.session.close() return @@ -37,10 +39,10 @@ def recover_document_indexing_task(dataset_id: str, document_id: str): elif document.indexing_status == "indexing": indexing_runner.run_in_indexing_status(document) end_at = time.perf_counter() - logging.info(click.style(f"Processed document: {document.id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Processed document: {document.id} latency: {end_at - start_at}", fg="green")) except DocumentIsPausedError as ex: - logging.info(click.style(str(ex), fg="yellow")) + logger.info(click.style(str(ex), fg="yellow")) except Exception: - logging.exception("recover_document_indexing_task failed, document_id: %s", document_id) + logger.exception("recover_document_indexing_task failed, document_id: %s", document_id) finally: db.session.close() diff --git a/api/tasks/remove_app_and_related_data_task.py b/api/tasks/remove_app_and_related_data_task.py index 828c52044f..7bfda3d740 100644 --- a/api/tasks/remove_app_and_related_data_task.py +++ b/api/tasks/remove_app_and_related_data_task.py @@ -4,7 +4,7 @@ from collections.abc import Callable import click import sqlalchemy as sa -from celery import shared_task # type: ignore +from celery import shared_task from sqlalchemy import delete from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import sessionmaker @@ -40,10 +40,12 @@ from models.workflow import ( ) from repositories.factory import DifyAPIRepositoryFactory +logger = logging.getLogger(__name__) + @shared_task(queue="app_deletion", bind=True, max_retries=3) def remove_app_and_related_data_task(self, tenant_id: str, app_id: str): - logging.info(click.style(f"Start deleting app and related data: {tenant_id}:{app_id}", fg="green")) + logger.info(click.style(f"Start deleting app and related data: {tenant_id}:{app_id}", fg="green")) start_at = time.perf_counter() try: # Delete related data @@ -69,14 +71,12 @@ def remove_app_and_related_data_task(self, tenant_id: str, app_id: str): _delete_draft_variables(app_id) end_at = time.perf_counter() - logging.info(click.style(f"App and related data deleted: {app_id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"App and related data deleted: {app_id} latency: {end_at - start_at}", fg="green")) except SQLAlchemyError as e: - logging.exception( - click.style(f"Database error occurred while deleting app {app_id} and related data", fg="red") - ) + logger.exception(click.style(f"Database error occurred while deleting app {app_id} and related data", fg="red")) raise self.retry(exc=e, countdown=60) # Retry after 60 seconds except Exception as e: - logging.exception(click.style(f"Error occurred while deleting app {app_id} and related data", fg="red")) + logger.exception(click.style(f"Error occurred while deleting app {app_id} and related data", fg="red")) raise self.retry(exc=e, countdown=60) # Retry after 60 seconds @@ -215,7 +215,7 @@ def _delete_app_workflow_runs(tenant_id: str, app_id: str): batch_size=1000, ) - logging.info("Deleted %s workflow runs for app %s", deleted_count, app_id) + logger.info("Deleted %s workflow runs for app %s", deleted_count, app_id) def _delete_app_workflow_node_executions(tenant_id: str, app_id: str): @@ -229,7 +229,7 @@ def _delete_app_workflow_node_executions(tenant_id: str, app_id: str): batch_size=1000, ) - logging.info("Deleted %s workflow node executions for app %s", deleted_count, app_id) + logger.info("Deleted %s workflow node executions for app %s", deleted_count, app_id) def _delete_app_workflow_app_logs(tenant_id: str, app_id: str): @@ -266,7 +266,7 @@ def _delete_conversation_variables(*, app_id: str): with db.engine.connect() as conn: conn.execute(stmt) conn.commit() - logging.info(click.style(f"Deleted conversation variables for app {app_id}", fg="green")) + logger.info(click.style(f"Deleted conversation variables for app {app_id}", fg="green")) def _delete_app_messages(tenant_id: str, app_id: str): @@ -370,8 +370,8 @@ def delete_draft_variables_batch(app_id: str, batch_size: int = 1000) -> int: with db.engine.begin() as conn: # Get a batch of draft variable IDs query_sql = """ - SELECT id FROM workflow_draft_variables - WHERE app_id = :app_id + SELECT id FROM workflow_draft_variables + WHERE app_id = :app_id LIMIT :batch_size """ result = conn.execute(sa.text(query_sql), {"app_id": app_id, "batch_size": batch_size}) @@ -382,16 +382,16 @@ def delete_draft_variables_batch(app_id: str, batch_size: int = 1000) -> int: # Delete the batch delete_sql = """ - DELETE FROM workflow_draft_variables + DELETE FROM workflow_draft_variables WHERE id IN :ids """ deleted_result = conn.execute(sa.text(delete_sql), {"ids": tuple(draft_var_ids)}) batch_deleted = deleted_result.rowcount total_deleted += batch_deleted - logging.info(click.style(f"Deleted {batch_deleted} draft variables (batch) for app {app_id}", fg="green")) + logger.info(click.style(f"Deleted {batch_deleted} draft variables (batch) for app {app_id}", fg="green")) - logging.info(click.style(f"Deleted {total_deleted} total draft variables for app {app_id}", fg="green")) + logger.info(click.style(f"Deleted {total_deleted} total draft variables for app {app_id}", fg="green")) return total_deleted @@ -407,8 +407,8 @@ def _delete_records(query_sql: str, params: dict, delete_func: Callable, name: s try: delete_func(record_id) db.session.commit() - logging.info(click.style(f"Deleted {name} {record_id}", fg="green")) + logger.info(click.style(f"Deleted {name} {record_id}", fg="green")) except Exception: - logging.exception("Error occurred while deleting %s %s", name, record_id) + logger.exception("Error occurred while deleting %s %s", name, record_id) continue rs.close() diff --git a/api/tasks/remove_document_from_index_task.py b/api/tasks/remove_document_from_index_task.py index 524130a297..ec56ab583b 100644 --- a/api/tasks/remove_document_from_index_task.py +++ b/api/tasks/remove_document_from_index_task.py @@ -1,15 +1,17 @@ -import datetime import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.datetime_utils import naive_utc_now from models.dataset import Document, DocumentSegment +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def remove_document_from_index_task(document_id: str): @@ -19,17 +21,17 @@ def remove_document_from_index_task(document_id: str): Usage: remove_document_from_index.delay(document_id) """ - logging.info(click.style(f"Start remove document segments from index: {document_id}", fg="green")) + logger.info(click.style(f"Start remove document segments from index: {document_id}", fg="green")) start_at = time.perf_counter() document = db.session.query(Document).where(Document.id == document_id).first() if not document: - logging.info(click.style(f"Document not found: {document_id}", fg="red")) + logger.info(click.style(f"Document not found: {document_id}", fg="red")) db.session.close() return if document.indexing_status != "completed": - logging.info(click.style(f"Document is not completed, remove is not allowed: {document_id}", fg="red")) + logger.info(click.style(f"Document is not completed, remove is not allowed: {document_id}", fg="red")) db.session.close() return @@ -49,24 +51,22 @@ def remove_document_from_index_task(document_id: str): try: index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False) except Exception: - logging.exception("clean dataset %s from index failed", dataset.id) + logger.exception("clean dataset %s from index failed", dataset.id) # update segment to disable db.session.query(DocumentSegment).where(DocumentSegment.document_id == document.id).update( { DocumentSegment.enabled: False, - DocumentSegment.disabled_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + DocumentSegment.disabled_at: naive_utc_now(), DocumentSegment.disabled_by: document.disabled_by, - DocumentSegment.updated_at: datetime.datetime.now(datetime.UTC).replace(tzinfo=None), + DocumentSegment.updated_at: naive_utc_now(), } ) db.session.commit() end_at = time.perf_counter() - logging.info( - click.style(f"Document removed from index: {document.id} latency: {end_at - start_at}", fg="green") - ) + logger.info(click.style(f"Document removed from index: {document.id} latency: {end_at - start_at}", fg="green")) except Exception: - logging.exception("remove document from index failed") + logger.exception("remove document from index failed") if not document.archived: document.enabled = True db.session.commit() diff --git a/api/tasks/retry_document_indexing_task.py b/api/tasks/retry_document_indexing_task.py index 26b41aff2e..c52218caae 100644 --- a/api/tasks/retry_document_indexing_task.py +++ b/api/tasks/retry_document_indexing_task.py @@ -1,17 +1,19 @@ -import datetime import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.indexing_runner import IndexingRunner from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.datetime_utils import naive_utc_now from models.dataset import Dataset, Document, DocumentSegment from services.feature_service import FeatureService +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def retry_document_indexing_task(dataset_id: str, document_ids: list[str]): @@ -22,12 +24,11 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]): Usage: retry_document_indexing_task.delay(dataset_id, document_ids) """ - documents: list[Document] = [] start_at = time.perf_counter() try: dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: - logging.info(click.style(f"Dataset not found: {dataset_id}", fg="red")) + logger.info(click.style(f"Dataset not found: {dataset_id}", fg="red")) return tenant_id = dataset.tenant_id for document_id in document_ids: @@ -51,18 +52,18 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]): if document: document.indexing_status = "error" document.error = str(e) - document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + document.stopped_at = naive_utc_now() db.session.add(document) db.session.commit() redis_client.delete(retry_indexing_cache_key) return - logging.info(click.style(f"Start retry document: {document_id}", fg="green")) + logger.info(click.style(f"Start retry document: {document_id}", fg="green")) document = ( db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() ) if not document: - logging.info(click.style(f"Document not found: {document_id}", fg="yellow")) + logger.info(click.style(f"Document not found: {document_id}", fg="yellow")) return try: # clean old data @@ -79,7 +80,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]): db.session.commit() document.indexing_status = "parsing" - document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + document.processing_started_at = naive_utc_now() db.session.add(document) db.session.commit() @@ -89,16 +90,16 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]): except Exception as ex: document.indexing_status = "error" document.error = str(ex) - document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + document.stopped_at = naive_utc_now() db.session.add(document) db.session.commit() - logging.info(click.style(str(ex), fg="yellow")) + logger.info(click.style(str(ex), fg="yellow")) redis_client.delete(retry_indexing_cache_key) - logging.exception("retry_document_indexing_task failed, document_id: %s", document_id) + logger.exception("retry_document_indexing_task failed, document_id: %s", document_id) end_at = time.perf_counter() - logging.info(click.style(f"Retry dataset: {dataset_id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Retry dataset: {dataset_id} latency: {end_at - start_at}", fg="green")) except Exception as e: - logging.exception( + logger.exception( "retry_document_indexing_task failed, dataset_id: %s, document_ids: %s", dataset_id, document_ids ) raise e diff --git a/api/tasks/sync_website_document_indexing_task.py b/api/tasks/sync_website_document_indexing_task.py index f112a97d2f..3c7c69e3c8 100644 --- a/api/tasks/sync_website_document_indexing_task.py +++ b/api/tasks/sync_website_document_indexing_task.py @@ -1,17 +1,19 @@ -import datetime import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.indexing_runner import IndexingRunner from core.rag.index_processor.index_processor_factory import IndexProcessorFactory from extensions.ext_database import db from extensions.ext_redis import redis_client +from libs.datetime_utils import naive_utc_now from models.dataset import Dataset, Document, DocumentSegment from services.feature_service import FeatureService +logger = logging.getLogger(__name__) + @shared_task(queue="dataset") def sync_website_document_indexing_task(dataset_id: str, document_id: str): @@ -46,16 +48,16 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str): if document: document.indexing_status = "error" document.error = str(e) - document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + document.stopped_at = naive_utc_now() db.session.add(document) db.session.commit() redis_client.delete(sync_indexing_cache_key) return - logging.info(click.style(f"Start sync website document: {document_id}", fg="green")) + logger.info(click.style(f"Start sync website document: {document_id}", fg="green")) document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() if not document: - logging.info(click.style(f"Document not found: {document_id}", fg="yellow")) + logger.info(click.style(f"Document not found: {document_id}", fg="yellow")) return try: # clean old data @@ -72,7 +74,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str): db.session.commit() document.indexing_status = "parsing" - document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + document.processing_started_at = naive_utc_now() db.session.add(document) db.session.commit() @@ -82,11 +84,11 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str): except Exception as ex: document.indexing_status = "error" document.error = str(ex) - document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + document.stopped_at = naive_utc_now() db.session.add(document) db.session.commit() - logging.info(click.style(str(ex), fg="yellow")) + logger.info(click.style(str(ex), fg="yellow")) redis_client.delete(sync_indexing_cache_key) - logging.exception("sync_website_document_indexing_task failed, document_id: %s", document_id) + logger.exception("sync_website_document_indexing_task failed, document_id: %s", document_id) end_at = time.perf_counter() - logging.info(click.style(f"Sync document: {document_id} latency: {end_at - start_at}", fg="green")) + logger.info(click.style(f"Sync document: {document_id} latency: {end_at - start_at}", fg="green")) diff --git a/api/tasks/workflow_execution_tasks.py b/api/tasks/workflow_execution_tasks.py index 2f9fb628ca..77ddf83023 100644 --- a/api/tasks/workflow_execution_tasks.py +++ b/api/tasks/workflow_execution_tasks.py @@ -8,7 +8,7 @@ improving performance by offloading storage operations to background workers. import json import logging -from celery import shared_task # type: ignore[import-untyped] +from celery import shared_task from sqlalchemy import select from sqlalchemy.orm import sessionmaker diff --git a/api/tasks/workflow_node_execution_tasks.py b/api/tasks/workflow_node_execution_tasks.py index dfc8a33564..16356086cf 100644 --- a/api/tasks/workflow_node_execution_tasks.py +++ b/api/tasks/workflow_node_execution_tasks.py @@ -8,7 +8,7 @@ improving performance by offloading storage operations to background workers. import json import logging -from celery import shared_task # type: ignore[import-untyped] +from celery import shared_task from sqlalchemy import select from sqlalchemy.orm import sessionmaker diff --git a/api/tests/integration_tests/model_runtime/__mock/plugin_daemon.py b/api/tests/integration_tests/model_runtime/__mock/plugin_daemon.py index e3c592b583..c8cb7528e1 100644 --- a/api/tests/integration_tests/model_runtime/__mock/plugin_daemon.py +++ b/api/tests/integration_tests/model_runtime/__mock/plugin_daemon.py @@ -3,15 +3,12 @@ from collections.abc import Callable import pytest -# import monkeypatch -from _pytest.monkeypatch import MonkeyPatch - from core.plugin.impl.model import PluginModelClient from tests.integration_tests.model_runtime.__mock.plugin_model import MockModelClass def mock_plugin_daemon( - monkeypatch: MonkeyPatch, + monkeypatch: pytest.MonkeyPatch, ) -> Callable[[], None]: """ mock openai module @@ -34,7 +31,7 @@ MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" @pytest.fixture -def setup_model_mock(monkeypatch): +def setup_model_mock(monkeypatch: pytest.MonkeyPatch): if MOCK: unpatch = mock_plugin_daemon(monkeypatch) diff --git a/api/tests/integration_tests/plugin/__mock/http.py b/api/tests/integration_tests/plugin/__mock/http.py index 25177274c6..8f8988899b 100644 --- a/api/tests/integration_tests/plugin/__mock/http.py +++ b/api/tests/integration_tests/plugin/__mock/http.py @@ -3,7 +3,6 @@ from typing import Literal import pytest import requests -from _pytest.monkeypatch import MonkeyPatch from core.plugin.entities.plugin_daemon import PluginDaemonBasicResponse from core.tools.entities.common_entities import I18nObject @@ -53,7 +52,7 @@ MOCK_SWITCH = os.getenv("MOCK_SWITCH", "false").lower() == "true" @pytest.fixture -def setup_http_mock(request, monkeypatch: MonkeyPatch): +def setup_http_mock(request, monkeypatch: pytest.MonkeyPatch): if MOCK_SWITCH: monkeypatch.setattr(requests, "request", MockedHttp.requests_request) diff --git a/api/tests/integration_tests/tools/__mock/http.py b/api/tests/integration_tests/tools/__mock/http.py index de9711ab38..fb2e3abcee 100644 --- a/api/tests/integration_tests/tools/__mock/http.py +++ b/api/tests/integration_tests/tools/__mock/http.py @@ -3,7 +3,6 @@ from typing import Literal import httpx import pytest -from _pytest.monkeypatch import MonkeyPatch from core.helper import ssrf_proxy @@ -30,7 +29,7 @@ class MockedHttp: @pytest.fixture -def setup_http_mock(request, monkeypatch: MonkeyPatch): +def setup_http_mock(request, monkeypatch: pytest.MonkeyPatch): monkeypatch.setattr(ssrf_proxy, "make_request", MockedHttp.httpx_request) yield monkeypatch.undo() diff --git a/api/tests/integration_tests/tools/__mock_server/openapi_todo.py b/api/tests/integration_tests/tools/__mock_server/openapi_todo.py index 83f4d70ce9..2f0f38e0b8 100644 --- a/api/tests/integration_tests/tools/__mock_server/openapi_todo.py +++ b/api/tests/integration_tests/tools/__mock_server/openapi_todo.py @@ -1,5 +1,5 @@ from flask import Flask, request -from flask_restful import Api, Resource +from flask_restx import Api, Resource app = Flask(__name__) api = Api(app) diff --git a/api/tests/integration_tests/vdb/__mock/baiduvectordb.py b/api/tests/integration_tests/vdb/__mock/baiduvectordb.py index 4af35a8bef..be5b4de5a2 100644 --- a/api/tests/integration_tests/vdb/__mock/baiduvectordb.py +++ b/api/tests/integration_tests/vdb/__mock/baiduvectordb.py @@ -1,5 +1,6 @@ import os from collections import UserDict +from typing import Optional from unittest.mock import MagicMock import pytest @@ -21,7 +22,7 @@ class MockBaiduVectorDBClass: def mock_vector_db_client( self, config=None, - adapter: HTTPAdapter = None, + adapter: Optional[HTTPAdapter] = None, ): self.conn = MagicMock() self._config = MagicMock() diff --git a/api/tests/integration_tests/vdb/__mock/tcvectordb.py b/api/tests/integration_tests/vdb/__mock/tcvectordb.py index ae5f9761b4..02f658aad6 100644 --- a/api/tests/integration_tests/vdb/__mock/tcvectordb.py +++ b/api/tests/integration_tests/vdb/__mock/tcvectordb.py @@ -23,7 +23,7 @@ class MockTcvectordbClass: key="", read_consistency: ReadConsistency = ReadConsistency.EVENTUAL_CONSISTENCY, timeout=10, - adapter: HTTPAdapter = None, + adapter: Optional[HTTPAdapter] = None, pool_size: int = 2, proxies: Optional[dict] = None, password: Optional[str] = None, @@ -72,11 +72,11 @@ class MockTcvectordbClass: shard: int, replicas: int, description: Optional[str] = None, - index: Index = None, - embedding: Embedding = None, + index: Optional[Index] = None, + embedding: Optional[Embedding] = None, timeout: Optional[float] = None, ttl_config: Optional[dict] = None, - filter_index_config: FilterIndexConfig = None, + filter_index_config: Optional[FilterIndexConfig] = None, indexes: Optional[list[IndexField]] = None, ) -> RPCCollection: return RPCCollection( @@ -113,7 +113,7 @@ class MockTcvectordbClass: database_name: str, collection_name: str, vectors: list[list[float]], - filter: Filter = None, + filter: Optional[Filter] = None, params=None, retrieve_vector: bool = False, limit: int = 10, @@ -128,7 +128,7 @@ class MockTcvectordbClass: collection_name: str, ann: Optional[Union[list[AnnSearch], AnnSearch]] = None, match: Optional[Union[list[KeywordSearch], KeywordSearch]] = None, - filter: Union[Filter, str] = None, + filter: Optional[Union[Filter, str]] = None, rerank: Optional[Rerank] = None, retrieve_vector: Optional[bool] = None, output_fields: Optional[list[str]] = None, @@ -158,7 +158,7 @@ class MockTcvectordbClass: database_name: str, collection_name: str, document_ids: Optional[list[str]] = None, - filter: Filter = None, + filter: Optional[Filter] = None, timeout: Optional[float] = None, ): return {"code": 0, "msg": "operation success"} diff --git a/api/tests/integration_tests/vdb/clickzetta/test_clickzetta.py b/api/tests/integration_tests/vdb/clickzetta/test_clickzetta.py index 8b57132772..21de8be6e3 100644 --- a/api/tests/integration_tests/vdb/clickzetta/test_clickzetta.py +++ b/api/tests/integration_tests/vdb/clickzetta/test_clickzetta.py @@ -1,3 +1,4 @@ +import contextlib import os import pytest @@ -44,10 +45,8 @@ class TestClickzettaVector(AbstractVectorTest): yield vector # Cleanup: delete the test collection - try: + with contextlib.suppress(Exception): vector.delete() - except Exception: - pass def test_clickzetta_vector_basic_operations(self, vector_store): """Test basic CRUD operations on Clickzetta vector store.""" diff --git a/api/tests/test_containers_integration_tests/conftest.py b/api/tests/test_containers_integration_tests/conftest.py index 0369a5cbd0..66ddc0ba4c 100644 --- a/api/tests/test_containers_integration_tests/conftest.py +++ b/api/tests/test_containers_integration_tests/conftest.py @@ -10,11 +10,13 @@ more reliable and realistic test scenarios. import logging import os from collections.abc import Generator +from pathlib import Path from typing import Optional import pytest from flask import Flask from flask.testing import FlaskClient +from sqlalchemy import Engine, text from sqlalchemy.orm import Session from testcontainers.core.container import DockerContainer from testcontainers.core.waiting_utils import wait_for_logs @@ -43,6 +45,7 @@ class DifyTestContainers: self.postgres: Optional[PostgresContainer] = None self.redis: Optional[RedisContainer] = None self.dify_sandbox: Optional[DockerContainer] = None + self.dify_plugin_daemon: Optional[DockerContainer] = None self._containers_started = False logger.info("DifyTestContainers initialized - ready to manage test containers") @@ -64,7 +67,7 @@ class DifyTestContainers: # PostgreSQL is used for storing user data, workflows, and application state logger.info("Initializing PostgreSQL container...") self.postgres = PostgresContainer( - image="postgres:16-alpine", + image="postgres:14-alpine", ) self.postgres.start() db_host = self.postgres.get_container_host_ip() @@ -108,6 +111,25 @@ class DifyTestContainers: except Exception as e: logger.warning("Failed to install uuid-ossp extension: %s", e) + # Create plugin database for dify-plugin-daemon + logger.info("Creating plugin database...") + try: + conn = psycopg2.connect( + host=db_host, + port=db_port, + user=self.postgres.username, + password=self.postgres.password, + database=self.postgres.dbname, + ) + conn.autocommit = True + cursor = conn.cursor() + cursor.execute("CREATE DATABASE dify_plugin;") + cursor.close() + conn.close() + logger.info("Plugin database created successfully") + except Exception as e: + logger.warning("Failed to create plugin database: %s", e) + # Set up storage environment variables os.environ["STORAGE_TYPE"] = "opendal" os.environ["OPENDAL_SCHEME"] = "fs" @@ -116,7 +138,7 @@ class DifyTestContainers: # Start Redis container for caching and session management # Redis is used for storing session data, cache entries, and temporary data logger.info("Initializing Redis container...") - self.redis = RedisContainer(image="redis:latest", port=6379) + self.redis = RedisContainer(image="redis:6-alpine", port=6379) self.redis.start() redis_host = self.redis.get_container_host_ip() redis_port = self.redis.get_exposed_port(6379) @@ -149,6 +171,62 @@ class DifyTestContainers: wait_for_logs(self.dify_sandbox, "config init success", timeout=60) logger.info("Dify Sandbox container is ready and accepting connections") + # Start Dify Plugin Daemon container for plugin management + # Dify Plugin Daemon provides plugin lifecycle management and execution + logger.info("Initializing Dify Plugin Daemon container...") + self.dify_plugin_daemon = DockerContainer(image="langgenius/dify-plugin-daemon:0.2.0-local") + self.dify_plugin_daemon.with_exposed_ports(5002) + self.dify_plugin_daemon.env = { + "DB_HOST": db_host, + "DB_PORT": str(db_port), + "DB_USERNAME": self.postgres.username, + "DB_PASSWORD": self.postgres.password, + "DB_DATABASE": "dify_plugin", + "REDIS_HOST": redis_host, + "REDIS_PORT": str(redis_port), + "REDIS_PASSWORD": "", + "SERVER_PORT": "5002", + "SERVER_KEY": "test_plugin_daemon_key", + "MAX_PLUGIN_PACKAGE_SIZE": "52428800", + "PPROF_ENABLED": "false", + "DIFY_INNER_API_URL": f"http://{db_host}:5001", + "DIFY_INNER_API_KEY": "test_inner_api_key", + "PLUGIN_REMOTE_INSTALLING_HOST": "0.0.0.0", + "PLUGIN_REMOTE_INSTALLING_PORT": "5003", + "PLUGIN_WORKING_PATH": "/app/storage/cwd", + "FORCE_VERIFYING_SIGNATURE": "false", + "PYTHON_ENV_INIT_TIMEOUT": "120", + "PLUGIN_MAX_EXECUTION_TIMEOUT": "600", + "PLUGIN_STDIO_BUFFER_SIZE": "1024", + "PLUGIN_STDIO_MAX_BUFFER_SIZE": "5242880", + "PLUGIN_STORAGE_TYPE": "local", + "PLUGIN_STORAGE_LOCAL_ROOT": "/app/storage", + "PLUGIN_INSTALLED_PATH": "plugin", + "PLUGIN_PACKAGE_CACHE_PATH": "plugin_packages", + "PLUGIN_MEDIA_CACHE_PATH": "assets", + } + + try: + self.dify_plugin_daemon.start() + plugin_daemon_host = self.dify_plugin_daemon.get_container_host_ip() + plugin_daemon_port = self.dify_plugin_daemon.get_exposed_port(5002) + os.environ["PLUGIN_DAEMON_URL"] = f"http://{plugin_daemon_host}:{plugin_daemon_port}" + os.environ["PLUGIN_DAEMON_KEY"] = "test_plugin_daemon_key" + logger.info( + "Dify Plugin Daemon container started successfully - Host: %s, Port: %s", + plugin_daemon_host, + plugin_daemon_port, + ) + + # Wait for Dify Plugin Daemon to be ready + logger.info("Waiting for Dify Plugin Daemon to be ready to accept connections...") + wait_for_logs(self.dify_plugin_daemon, "start plugin manager daemon", timeout=60) + logger.info("Dify Plugin Daemon container is ready and accepting connections") + except Exception as e: + logger.warning("Failed to start Dify Plugin Daemon container: %s", e) + logger.info("Continuing without plugin daemon - some tests may be limited") + self.dify_plugin_daemon = None + self._containers_started = True logger.info("All test containers started successfully") @@ -164,7 +242,7 @@ class DifyTestContainers: return logger.info("Stopping and cleaning up test containers...") - containers = [self.redis, self.postgres, self.dify_sandbox] + containers = [self.redis, self.postgres, self.dify_sandbox, self.dify_plugin_daemon] for container in containers: if container: try: @@ -184,6 +262,57 @@ class DifyTestContainers: _container_manager = DifyTestContainers() +def _get_migration_dir() -> Path: + conftest_dir = Path(__file__).parent + return conftest_dir.parent.parent / "migrations" + + +def _get_engine_url(engine: Engine): + try: + return engine.url.render_as_string(hide_password=False).replace("%", "%%") + except AttributeError: + return str(engine.url).replace("%", "%%") + + +_UUIDv7SQL = r""" +/* Main function to generate a uuidv7 value with millisecond precision */ +CREATE FUNCTION uuidv7() RETURNS uuid +AS +$$ + -- Replace the first 48 bits of a uuidv4 with the current + -- number of milliseconds since 1970-01-01 UTC + -- and set the "ver" field to 7 by setting additional bits +SELECT encode( + set_bit( + set_bit( + overlay(uuid_send(gen_random_uuid()) placing + substring(int8send((extract(epoch from clock_timestamp()) * 1000)::bigint) from + 3) + from 1 for 6), + 52, 1), + 53, 1), 'hex')::uuid; +$$ LANGUAGE SQL VOLATILE PARALLEL SAFE; + +COMMENT ON FUNCTION uuidv7 IS + 'Generate a uuid-v7 value with a 48-bit timestamp (millisecond precision) and 74 bits of randomness'; + +CREATE FUNCTION uuidv7_boundary(timestamptz) RETURNS uuid +AS +$$ + /* uuid fields: version=0b0111, variant=0b10 */ +SELECT encode( + overlay('\x00000000000070008000000000000000'::bytea + placing substring(int8send(floor(extract(epoch from $1) * 1000)::bigint) from 3) + from 1 for 6), + 'hex')::uuid; +$$ LANGUAGE SQL STABLE STRICT PARALLEL SAFE; + +COMMENT ON FUNCTION uuidv7_boundary(timestamptz) IS + 'Generate a non-random uuidv7 with the given timestamp (first 48 bits) and all random bits to 0. + As the smallest possible uuidv7 for that timestamp, it may be used as a boundary for partitions.'; +""" + + def _create_app_with_containers() -> Flask: """ Create Flask application configured to use test containers. @@ -211,7 +340,10 @@ def _create_app_with_containers() -> Flask: # Initialize database schema logger.info("Creating database schema...") + with app.app_context(): + with db.engine.connect() as conn, conn.begin(): + conn.execute(text(_UUIDv7SQL)) db.create_all() logger.info("Database schema created successfully") diff --git a/api/tests/test_containers_integration_tests/services/test_account_service.py b/api/tests/test_containers_integration_tests/services/test_account_service.py index 3d7be0df7d..415e65ce51 100644 --- a/api/tests/test_containers_integration_tests/services/test_account_service.py +++ b/api/tests/test_containers_integration_tests/services/test_account_service.py @@ -1639,7 +1639,7 @@ class TestTenantService: email = fake.email() name = fake.name() password = fake.password(length=12) - invalid_action = fake.word() + invalid_action = "invalid_action_that_doesnt_exist" # Setup mocks mock_external_service_dependencies[ "feature_service" diff --git a/api/tests/test_containers_integration_tests/services/test_annotation_service.py b/api/tests/test_containers_integration_tests/services/test_annotation_service.py index 0ab5f398e3..92d93d601e 100644 --- a/api/tests/test_containers_integration_tests/services/test_annotation_service.py +++ b/api/tests/test_containers_integration_tests/services/test_annotation_service.py @@ -410,18 +410,18 @@ class TestAnnotationService: app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) # Create annotations with specific keywords - unique_keyword = fake.word() + unique_keyword = f"unique_{fake.uuid4()[:8]}" annotation_args = { "question": f"Question with {unique_keyword} keyword", "answer": f"Answer with {unique_keyword} keyword", } AppAnnotationService.insert_app_annotation_directly(annotation_args, app.id) - # Create another annotation without the keyword other_args = { - "question": "Question without keyword", - "answer": "Answer without keyword", + "question": "Different question without special term", + "answer": "Different answer without special content", } + AppAnnotationService.insert_app_annotation_directly(other_args, app.id) # Search with keyword @@ -471,7 +471,7 @@ class TestAnnotationService: # Verify annotation was deleted from extensions.ext_database import db - deleted_annotation = db.session.query(MessageAnnotation).filter(MessageAnnotation.id == annotation_id).first() + deleted_annotation = db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).first() assert deleted_annotation is None # Verify delete_annotation_index_task was called (when annotation setting exists) @@ -1175,7 +1175,7 @@ class TestAnnotationService: AppAnnotationService.delete_app_annotation(app.id, annotation_id) # Verify annotation was deleted - deleted_annotation = db.session.query(MessageAnnotation).filter(MessageAnnotation.id == annotation_id).first() + deleted_annotation = db.session.query(MessageAnnotation).where(MessageAnnotation.id == annotation_id).first() assert deleted_annotation is None # Verify delete_annotation_index_task was called diff --git a/api/tests/test_containers_integration_tests/services/test_api_based_extension_service.py b/api/tests/test_containers_integration_tests/services/test_api_based_extension_service.py index 38f532fd64..6cd8337ff9 100644 --- a/api/tests/test_containers_integration_tests/services/test_api_based_extension_service.py +++ b/api/tests/test_containers_integration_tests/services/test_api_based_extension_service.py @@ -234,7 +234,7 @@ class TestAPIBasedExtensionService: # Verify extension was deleted from extensions.ext_database import db - deleted_extension = db.session.query(APIBasedExtension).filter(APIBasedExtension.id == extension_id).first() + deleted_extension = db.session.query(APIBasedExtension).where(APIBasedExtension.id == extension_id).first() assert deleted_extension is None def test_save_extension_duplicate_name(self, db_session_with_containers, mock_external_service_dependencies): diff --git a/api/tests/test_containers_integration_tests/services/test_app_dsl_service.py b/api/tests/test_containers_integration_tests/services/test_app_dsl_service.py index f2bd9f8084..fc614b2296 100644 --- a/api/tests/test_containers_integration_tests/services/test_app_dsl_service.py +++ b/api/tests/test_containers_integration_tests/services/test_app_dsl_service.py @@ -144,127 +144,6 @@ class TestAppDslService: } return yaml.dump(yaml_data, allow_unicode=True) - def test_import_app_yaml_content_success(self, db_session_with_containers, mock_external_service_dependencies): - """ - Test successful app import from YAML content. - """ - fake = Faker() - app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) - - # Create YAML content - yaml_content = self._create_simple_yaml_content(fake.company(), "chat") - - # Import app - dsl_service = AppDslService(db_session_with_containers) - result = dsl_service.import_app( - account=account, - import_mode=ImportMode.YAML_CONTENT, - yaml_content=yaml_content, - name="Imported App", - description="Imported app description", - ) - - # Verify import result - assert result.status == ImportStatus.COMPLETED - assert result.app_id is not None - assert result.app_mode == "chat" - assert result.imported_dsl_version == "0.3.0" - assert result.error == "" - - # Verify app was created in database - imported_app = db_session_with_containers.query(App).filter(App.id == result.app_id).first() - assert imported_app is not None - assert imported_app.name == "Imported App" - assert imported_app.description == "Imported app description" - assert imported_app.mode == "chat" - assert imported_app.tenant_id == account.current_tenant_id - assert imported_app.created_by == account.id - - # Verify model config was created - model_config = ( - db_session_with_containers.query(AppModelConfig).filter(AppModelConfig.app_id == result.app_id).first() - ) - assert model_config is not None - # The provider and model_id are stored in the model field as JSON - model_dict = model_config.model_dict - assert model_dict["provider"] == "openai" - assert model_dict["name"] == "gpt-3.5-turbo" - - def test_import_app_yaml_url_success(self, db_session_with_containers, mock_external_service_dependencies): - """ - Test successful app import from YAML URL. - """ - fake = Faker() - app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) - - # Create YAML content for mock response - yaml_content = self._create_simple_yaml_content(fake.company(), "chat") - - # Setup mock response - mock_response = MagicMock() - mock_response.content = yaml_content.encode("utf-8") - mock_response.raise_for_status.return_value = None - mock_external_service_dependencies["ssrf_proxy"].get.return_value = mock_response - - # Import app from URL - dsl_service = AppDslService(db_session_with_containers) - result = dsl_service.import_app( - account=account, - import_mode=ImportMode.YAML_URL, - yaml_url="https://example.com/app.yaml", - name="URL Imported App", - description="App imported from URL", - ) - - # Verify import result - assert result.status == ImportStatus.COMPLETED - assert result.app_id is not None - assert result.app_mode == "chat" - assert result.imported_dsl_version == "0.3.0" - assert result.error == "" - - # Verify app was created in database - imported_app = db_session_with_containers.query(App).filter(App.id == result.app_id).first() - assert imported_app is not None - assert imported_app.name == "URL Imported App" - assert imported_app.description == "App imported from URL" - assert imported_app.mode == "chat" - assert imported_app.tenant_id == account.current_tenant_id - - # Verify ssrf_proxy was called - mock_external_service_dependencies["ssrf_proxy"].get.assert_called_once_with( - "https://example.com/app.yaml", follow_redirects=True, timeout=(10, 10) - ) - - def test_import_app_invalid_yaml_format(self, db_session_with_containers, mock_external_service_dependencies): - """ - Test app import with invalid YAML format. - """ - fake = Faker() - app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) - - # Create invalid YAML content - invalid_yaml = "invalid: yaml: content: [" - - # Import app with invalid YAML - dsl_service = AppDslService(db_session_with_containers) - result = dsl_service.import_app( - account=account, - import_mode=ImportMode.YAML_CONTENT, - yaml_content=invalid_yaml, - name="Invalid App", - ) - - # Verify import failed - assert result.status == ImportStatus.FAILED - assert result.app_id is None - assert "Invalid YAML format" in result.error - assert result.imported_dsl_version == "" - - # Verify no app was created in database - apps_count = db_session_with_containers.query(App).filter(App.tenant_id == account.current_tenant_id).count() - assert apps_count == 1 # Only the original test app - def test_import_app_missing_yaml_content(self, db_session_with_containers, mock_external_service_dependencies): """ Test app import with missing YAML content. diff --git a/api/tests/test_containers_integration_tests/services/test_feature_service.py b/api/tests/test_containers_integration_tests/services/test_feature_service.py new file mode 100644 index 0000000000..8bd5440411 --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_feature_service.py @@ -0,0 +1,1785 @@ +from unittest.mock import patch + +import pytest +from faker import Faker + +from services.feature_service import FeatureModel, FeatureService, KnowledgeRateLimitModel, SystemFeatureModel + + +class TestFeatureService: + """Integration tests for FeatureService using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("services.feature_service.BillingService") as mock_billing_service, + patch("services.feature_service.EnterpriseService") as mock_enterprise_service, + ): + # Setup default mock returns for BillingService + mock_billing_service.get_info.return_value = { + "enabled": True, + "subscription": {"plan": "pro", "interval": "monthly", "education": True}, + "members": {"size": 5, "limit": 10}, + "apps": {"size": 3, "limit": 20}, + "vector_space": {"size": 2, "limit": 10}, + "documents_upload_quota": {"size": 15, "limit": 100}, + "annotation_quota_limit": {"size": 8, "limit": 50}, + "docs_processing": "enhanced", + "can_replace_logo": True, + "model_load_balancing_enabled": True, + "knowledge_rate_limit": {"limit": 100}, + } + + mock_billing_service.get_knowledge_rate_limit.return_value = {"limit": 100, "subscription_plan": "pro"} + + # Setup default mock returns for EnterpriseService + mock_enterprise_service.get_workspace_info.return_value = { + "WorkspaceMembers": {"used": 5, "limit": 10, "enabled": True} + } + + mock_enterprise_service.get_info.return_value = { + "SSOEnforcedForSignin": True, + "SSOEnforcedForSigninProtocol": "saml", + "EnableEmailCodeLogin": True, + "EnableEmailPasswordLogin": False, + "IsAllowRegister": False, + "IsAllowCreateWorkspace": False, + "Branding": { + "applicationTitle": "Test Enterprise", + "loginPageLogo": "https://example.com/logo.png", + "workspaceLogo": "https://example.com/workspace.png", + "favicon": "https://example.com/favicon.ico", + }, + "WebAppAuth": {"allowSso": True, "allowEmailCodeLogin": True, "allowEmailPasswordLogin": False}, + "SSOEnforcedForWebProtocol": "oidc", + "License": { + "status": "active", + "expiredAt": "2025-12-31", + "workspaces": {"enabled": True, "limit": 5, "used": 2}, + }, + "PluginInstallationPermission": { + "pluginInstallationScope": "official_only", + "restrictToMarketplaceOnly": True, + }, + } + + yield { + "billing_service": mock_billing_service, + "enterprise_service": mock_enterprise_service, + } + + def _create_test_tenant_id(self): + """Helper method to create a test tenant ID.""" + fake = Faker() + return fake.uuid4() + + def test_get_features_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful feature retrieval with billing and enterprise enabled. + + This test verifies: + - Proper feature model creation with all required fields + - Correct integration with billing service + - Proper enterprise workspace information handling + - Return value correctness and structure + """ + # Arrange: Setup test data with proper config mocking + tenant_id = self._create_test_tenant_id() + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = True + mock_config.ENTERPRISE_ENABLED = True + mock_config.CAN_REPLACE_LOGO = True + mock_config.MODEL_LB_ENABLED = True + mock_config.DATASET_OPERATOR_ENABLED = True + mock_config.EDUCATION_ENABLED = True + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify billing features + assert result.billing.enabled is True + assert result.billing.subscription.plan == "pro" + assert result.billing.subscription.interval == "monthly" + assert result.education.activated is True + + # Verify member limitations + assert result.members.size == 5 + assert result.members.limit == 10 + + # Verify app limitations + assert result.apps.size == 3 + assert result.apps.limit == 20 + + # Verify vector space limitations + assert result.vector_space.size == 2 + assert result.vector_space.limit == 10 + + # Verify document upload quota + assert result.documents_upload_quota.size == 15 + assert result.documents_upload_quota.limit == 100 + + # Verify annotation quota + assert result.annotation_quota_limit.size == 8 + assert result.annotation_quota_limit.limit == 50 + + # Verify other features + assert result.docs_processing == "enhanced" + assert result.can_replace_logo is True + assert result.model_load_balancing_enabled is True + assert result.knowledge_rate_limit == 100 + + # Verify enterprise features + assert result.workspace_members.enabled is True + assert result.workspace_members.size == 5 + assert result.workspace_members.limit == 10 + + # Verify webapp copyright is enabled for non-sandbox plans + assert result.webapp_copyright_enabled is True + assert result.is_allow_transfer_workspace is True + + # Verify mock interactions + mock_external_service_dependencies["billing_service"].get_info.assert_called_once_with(tenant_id) + mock_external_service_dependencies["enterprise_service"].get_workspace_info.assert_called_once_with( + tenant_id + ) + + def test_get_features_sandbox_plan(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test feature retrieval for sandbox plan with specific limitations. + + This test verifies: + - Proper handling of sandbox plan limitations + - Correct webapp copyright settings for sandbox + - Transfer workspace restrictions for sandbox plans + - Proper billing service integration + """ + # Arrange: Setup sandbox plan mock with proper config + tenant_id = self._create_test_tenant_id() + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = True + mock_config.ENTERPRISE_ENABLED = False + mock_config.CAN_REPLACE_LOGO = False + mock_config.MODEL_LB_ENABLED = False + mock_config.DATASET_OPERATOR_ENABLED = False + mock_config.EDUCATION_ENABLED = False + + # Set mock return value inside the patch context + mock_external_service_dependencies["billing_service"].get_info.return_value = { + "enabled": True, + "subscription": {"plan": "sandbox", "interval": "monthly", "education": False}, + "members": {"size": 1, "limit": 3}, + "apps": {"size": 1, "limit": 5}, + "vector_space": {"size": 1, "limit": 2}, + "documents_upload_quota": {"size": 5, "limit": 20}, + "annotation_quota_limit": {"size": 2, "limit": 10}, + "docs_processing": "standard", + "can_replace_logo": False, + "model_load_balancing_enabled": False, + "knowledge_rate_limit": {"limit": 10}, + } + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify sandbox-specific limitations + assert result.billing.subscription.plan == "sandbox" + assert result.education.activated is False + + # Verify sandbox limitations + assert result.members.size == 1 + assert result.members.limit == 3 + assert result.apps.size == 1 + assert result.apps.limit == 5 + assert result.vector_space.size == 1 + assert result.vector_space.limit == 2 + assert result.documents_upload_quota.size == 5 + assert result.documents_upload_quota.limit == 20 + assert result.annotation_quota_limit.size == 2 + assert result.annotation_quota_limit.limit == 10 + + # Verify sandbox-specific restrictions + assert result.webapp_copyright_enabled is False + assert result.is_allow_transfer_workspace is False + assert result.can_replace_logo is False + assert result.model_load_balancing_enabled is False + assert result.docs_processing == "standard" + assert result.knowledge_rate_limit == 10 + + # Verify mock interactions + mock_external_service_dependencies["billing_service"].get_info.assert_called_once_with(tenant_id) + + def test_get_knowledge_rate_limit_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful knowledge rate limit retrieval with billing enabled. + + This test verifies: + - Proper knowledge rate limit model creation + - Correct integration with billing service + - Proper rate limit configuration + - Return value correctness and structure + """ + # Arrange: Setup test data with proper config + tenant_id = self._create_test_tenant_id() + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = True + + # Act: Execute the method under test + result = FeatureService.get_knowledge_rate_limit(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, KnowledgeRateLimitModel) + + # Verify rate limit configuration + assert result.enabled is True + assert result.limit == 100 + assert result.subscription_plan == "pro" + + # Verify mock interactions + mock_external_service_dependencies["billing_service"].get_knowledge_rate_limit.assert_called_once_with( + tenant_id + ) + + def test_get_system_features_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful system features retrieval with enterprise and marketplace enabled. + + This test verifies: + - Proper system feature model creation + - Correct integration with enterprise service + - Proper marketplace configuration + - Return value correctness and structure + """ + # Arrange: Setup test data with proper config + tenant_id = self._create_test_tenant_id() + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = True + mock_config.MARKETPLACE_ENABLED = True + mock_config.ENABLE_EMAIL_CODE_LOGIN = True + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False + mock_config.ALLOW_REGISTER = False + mock_config.ALLOW_CREATE_WORKSPACE = False + mock_config.MAIL_TYPE = "smtp" + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100 + + # Act: Execute the method under test + result = FeatureService.get_system_features() + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, SystemFeatureModel) + + # Verify enterprise features + assert result.branding.enabled is True + assert result.webapp_auth.enabled is True + assert result.enable_change_email is False + + # Verify SSO configuration + assert result.sso_enforced_for_signin is True + assert result.sso_enforced_for_signin_protocol == "saml" + + # Verify authentication settings + assert result.enable_email_code_login is True + assert result.enable_email_password_login is False + assert result.is_allow_register is False + assert result.is_allow_create_workspace is False + + # Verify branding configuration + assert result.branding.application_title == "Test Enterprise" + assert result.branding.login_page_logo == "https://example.com/logo.png" + assert result.branding.workspace_logo == "https://example.com/workspace.png" + assert result.branding.favicon == "https://example.com/favicon.ico" + + # Verify webapp auth configuration + assert result.webapp_auth.allow_sso is True + assert result.webapp_auth.allow_email_code_login is True + assert result.webapp_auth.allow_email_password_login is False + assert result.webapp_auth.sso_config.protocol == "oidc" + + # Verify license configuration + assert result.license.status.value == "active" + assert result.license.expired_at == "2025-12-31" + assert result.license.workspaces.enabled is True + assert result.license.workspaces.limit == 5 + assert result.license.workspaces.size == 2 + + # Verify plugin installation permission + assert result.plugin_installation_permission.plugin_installation_scope == "official_only" + assert result.plugin_installation_permission.restrict_to_marketplace_only is True + + # Verify marketplace configuration + assert result.enable_marketplace is True + + # Verify mock interactions + mock_external_service_dependencies["enterprise_service"].get_info.assert_called_once() + + def test_get_system_features_basic_config(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test system features retrieval with basic configuration (no enterprise). + + This test verifies: + - Proper system feature model creation without enterprise + - Correct environment variable handling + - Default configuration values + - Return value correctness and structure + """ + # Arrange: Setup basic config mock (no enterprise) + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = False + mock_config.MARKETPLACE_ENABLED = False + mock_config.ENABLE_EMAIL_CODE_LOGIN = True + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False + mock_config.ALLOW_REGISTER = True + mock_config.ALLOW_CREATE_WORKSPACE = True + mock_config.MAIL_TYPE = "smtp" + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100 + + # Act: Execute the method under test + result = FeatureService.get_system_features() + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, SystemFeatureModel) + + # Verify basic configuration + assert result.branding.enabled is False + assert result.webapp_auth.enabled is False + assert result.enable_change_email is True + + # Verify authentication settings from config + assert result.enable_email_code_login is True + assert result.enable_email_password_login is True + assert result.enable_social_oauth_login is False + assert result.is_allow_register is True + assert result.is_allow_create_workspace is True + assert result.is_email_setup is True + + # Verify marketplace configuration + assert result.enable_marketplace is False + + # Verify plugin package size (uses default value from dify_config) + assert result.max_plugin_package_size == 15728640 + + def test_get_features_billing_disabled(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test feature retrieval when billing is disabled. + + This test verifies: + - Proper feature model creation without billing + - Correct environment variable handling + - Default configuration values + - Return value correctness and structure + """ + # Arrange: Setup billing disabled mock + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = False + mock_config.ENTERPRISE_ENABLED = False + mock_config.CAN_REPLACE_LOGO = True + mock_config.MODEL_LB_ENABLED = True + mock_config.DATASET_OPERATOR_ENABLED = True + mock_config.EDUCATION_ENABLED = True + + tenant_id = self._create_test_tenant_id() + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify billing is disabled + assert result.billing.enabled is False + + # Verify environment-based features + assert result.can_replace_logo is True + assert result.model_load_balancing_enabled is True + assert result.dataset_operator_enabled is True + assert result.education.enabled is True + + # Verify default limitations + assert result.members.size == 0 + assert result.members.limit == 1 + assert result.apps.size == 0 + assert result.apps.limit == 10 + assert result.vector_space.size == 0 + assert result.vector_space.limit == 5 + assert result.documents_upload_quota.size == 0 + assert result.documents_upload_quota.limit == 50 + assert result.annotation_quota_limit.size == 0 + assert result.annotation_quota_limit.limit == 10 + assert result.knowledge_rate_limit == 10 + assert result.docs_processing == "standard" + + # Verify no enterprise features + assert result.workspace_members.enabled is False + assert result.webapp_copyright_enabled is False + + def test_get_knowledge_rate_limit_billing_disabled( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test knowledge rate limit retrieval when billing is disabled. + + This test verifies: + - Proper knowledge rate limit model creation without billing + - Default rate limit configuration + - Return value correctness and structure + """ + # Arrange: Setup billing disabled mock + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = False + + tenant_id = self._create_test_tenant_id() + + # Act: Execute the method under test + result = FeatureService.get_knowledge_rate_limit(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, KnowledgeRateLimitModel) + + # Verify default configuration + assert result.enabled is False + assert result.limit == 10 + assert result.subscription_plan == "" # Empty string when billing is disabled + + # Verify no billing service calls + mock_external_service_dependencies["billing_service"].get_knowledge_rate_limit.assert_not_called() + + def test_get_features_enterprise_only(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test feature retrieval with enterprise enabled but billing disabled. + + This test verifies: + - Proper feature model creation with enterprise only + - Correct enterprise service integration + - Proper workspace member handling + - Return value correctness and structure + """ + # Arrange: Setup enterprise only mock + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = False + mock_config.ENTERPRISE_ENABLED = True + mock_config.CAN_REPLACE_LOGO = False + mock_config.MODEL_LB_ENABLED = False + mock_config.DATASET_OPERATOR_ENABLED = False + mock_config.EDUCATION_ENABLED = False + + tenant_id = self._create_test_tenant_id() + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify billing is disabled + assert result.billing.enabled is False + + # Verify enterprise features + assert result.webapp_copyright_enabled is True + + # Verify workspace members from enterprise + assert result.workspace_members.enabled is True + assert result.workspace_members.size == 5 + assert result.workspace_members.limit == 10 + + # Verify environment-based features + assert result.can_replace_logo is False + assert result.model_load_balancing_enabled is False + assert result.dataset_operator_enabled is False + assert result.education.enabled is False + + # Verify default limitations + assert result.members.size == 0 + assert result.members.limit == 1 + assert result.apps.size == 0 + assert result.apps.limit == 10 + assert result.vector_space.size == 0 + assert result.vector_space.limit == 5 + + # Verify mock interactions + mock_external_service_dependencies["enterprise_service"].get_workspace_info.assert_called_once_with( + tenant_id + ) + mock_external_service_dependencies["billing_service"].get_info.assert_not_called() + + def test_get_system_features_enterprise_disabled( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test system features retrieval when enterprise is disabled. + + This test verifies: + - Proper system feature model creation without enterprise + - Correct environment variable handling + - Default configuration values + - Return value correctness and structure + """ + # Arrange: Setup enterprise disabled mock + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = False + mock_config.MARKETPLACE_ENABLED = True + mock_config.ENABLE_EMAIL_CODE_LOGIN = False + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = True + mock_config.ALLOW_REGISTER = False + mock_config.ALLOW_CREATE_WORKSPACE = False + mock_config.MAIL_TYPE = None + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 50 + + # Act: Execute the method under test + result = FeatureService.get_system_features() + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, SystemFeatureModel) + + # Verify enterprise features are disabled + assert result.branding.enabled is False + assert result.webapp_auth.enabled is False + assert result.enable_change_email is True + + # Verify authentication settings from config + assert result.enable_email_code_login is False + assert result.enable_email_password_login is True + assert result.enable_social_oauth_login is True + assert result.is_allow_register is False + assert result.is_allow_create_workspace is False + assert result.is_email_setup is False + + # Verify marketplace configuration + assert result.enable_marketplace is True + + # Verify plugin package size (uses default value from dify_config) + assert result.max_plugin_package_size == 15728640 + + # Verify default license status + assert result.license.status.value == "none" + assert result.license.expired_at == "" + assert result.license.workspaces.enabled is False + + # Verify no enterprise service calls + mock_external_service_dependencies["enterprise_service"].get_info.assert_not_called() + + def test_get_features_no_tenant_id(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test feature retrieval without tenant ID (billing disabled). + + This test verifies: + - Proper feature model creation without tenant ID + - Correct handling when billing is disabled + - Default configuration values + - Return value correctness and structure + """ + # Arrange: Setup no tenant ID scenario + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = True + mock_config.ENTERPRISE_ENABLED = False + mock_config.CAN_REPLACE_LOGO = True + mock_config.MODEL_LB_ENABLED = False + mock_config.DATASET_OPERATOR_ENABLED = True + mock_config.EDUCATION_ENABLED = False + + # Act: Execute the method under test + result = FeatureService.get_features("") + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify billing is disabled due to no tenant ID + assert result.billing.enabled is False + + # Verify environment-based features + assert result.can_replace_logo is True + assert result.model_load_balancing_enabled is False + assert result.dataset_operator_enabled is True + assert result.education.enabled is False + + # Verify default limitations + assert result.members.size == 0 + assert result.members.limit == 1 + assert result.apps.size == 0 + assert result.apps.limit == 10 + assert result.vector_space.size == 0 + assert result.vector_space.limit == 5 + + # Verify no billing service calls + mock_external_service_dependencies["billing_service"].get_info.assert_not_called() + + def test_get_features_partial_billing_info(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test feature retrieval with partial billing information. + + This test verifies: + - Proper handling of partial billing data + - Correct fallback to default values + - Proper billing service integration + - Return value correctness and structure + """ + # Arrange: Setup partial billing info mock with proper config + tenant_id = self._create_test_tenant_id() + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = True + mock_config.ENTERPRISE_ENABLED = False + mock_config.CAN_REPLACE_LOGO = True + mock_config.MODEL_LB_ENABLED = False + mock_config.DATASET_OPERATOR_ENABLED = True + mock_config.EDUCATION_ENABLED = False + + mock_external_service_dependencies["billing_service"].get_info.return_value = { + "enabled": True, + "subscription": {"plan": "basic", "interval": "yearly"}, + # Missing members, apps, vector_space, etc. + } + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify billing features + assert result.billing.enabled is True + assert result.billing.subscription.plan == "basic" + assert result.billing.subscription.interval == "yearly" + + # Verify default values for missing billing info + assert result.members.size == 0 + assert result.members.limit == 1 + assert result.apps.size == 0 + assert result.apps.limit == 10 + assert result.vector_space.size == 0 + assert result.vector_space.limit == 5 + assert result.documents_upload_quota.size == 0 + assert result.documents_upload_quota.limit == 50 + assert result.annotation_quota_limit.size == 0 + assert result.annotation_quota_limit.limit == 10 + assert result.knowledge_rate_limit == 10 + assert result.docs_processing == "standard" + + # Verify basic plan restrictions (non-sandbox plans have webapp copyright enabled) + assert result.webapp_copyright_enabled is True + assert result.is_allow_transfer_workspace is True + + # Verify mock interactions + mock_external_service_dependencies["billing_service"].get_info.assert_called_once_with(tenant_id) + + def test_get_features_edge_case_vector_space(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test feature retrieval with edge case vector space configuration. + + This test verifies: + - Proper handling of vector space quota limits + - Correct integration with billing service + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup edge case vector space mock with proper config + tenant_id = self._create_test_tenant_id() + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = True + mock_config.ENTERPRISE_ENABLED = False + mock_config.CAN_REPLACE_LOGO = True + mock_config.MODEL_LB_ENABLED = False + mock_config.DATASET_OPERATOR_ENABLED = True + mock_config.EDUCATION_ENABLED = False + + mock_external_service_dependencies["billing_service"].get_info.return_value = { + "enabled": True, + "subscription": {"plan": "pro", "interval": "monthly"}, + "vector_space": {"size": 0, "limit": 0}, + "apps": {"size": 5, "limit": 10}, + } + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify vector space configuration + assert result.vector_space.size == 0 + assert result.vector_space.limit == 0 + + # Verify apps configuration + assert result.apps.size == 5 + assert result.apps.limit == 10 + + # Verify pro plan features + assert result.webapp_copyright_enabled is True + assert result.is_allow_transfer_workspace is True + + # Verify default values for missing billing info + assert result.members.size == 0 + assert result.members.limit == 1 + assert result.documents_upload_quota.size == 0 + assert result.documents_upload_quota.limit == 50 + assert result.annotation_quota_limit.size == 0 + assert result.annotation_quota_limit.limit == 10 + assert result.knowledge_rate_limit == 10 + assert result.docs_processing == "standard" + + # Verify mock interactions + mock_external_service_dependencies["billing_service"].get_info.assert_called_once_with(tenant_id) + + def test_get_system_features_edge_case_webapp_auth( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test system features retrieval with edge case webapp auth configuration. + + This test verifies: + - Proper handling of webapp auth configuration + - Correct enterprise service integration + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup edge case webapp auth mock with proper config + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = True + mock_config.MARKETPLACE_ENABLED = False + mock_config.ENABLE_EMAIL_CODE_LOGIN = False + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False + mock_config.ALLOW_REGISTER = False + mock_config.ALLOW_CREATE_WORKSPACE = False + mock_config.MAIL_TYPE = "smtp" + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100 + + mock_external_service_dependencies["enterprise_service"].get_info.return_value = { + "WebAppAuth": {"allowSso": False, "allowEmailCodeLogin": True, "allowEmailPasswordLogin": False} + } + + # Act: Execute the method under test + result = FeatureService.get_system_features() + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, SystemFeatureModel) + + # Verify webapp auth configuration + assert result.webapp_auth.allow_sso is False + assert result.webapp_auth.allow_email_code_login is True + assert result.webapp_auth.allow_email_password_login is False + assert result.webapp_auth.sso_config.protocol == "" + + # Verify enterprise features + assert result.branding.enabled is True + assert result.webapp_auth.enabled is True + assert result.enable_change_email is False + + # Verify default values for missing enterprise info + assert result.sso_enforced_for_signin is False + assert result.sso_enforced_for_signin_protocol == "" + assert result.enable_email_code_login is False + assert result.enable_email_password_login is True + assert result.is_allow_register is False + assert result.is_allow_create_workspace is False + + # Verify mock interactions + mock_external_service_dependencies["enterprise_service"].get_info.assert_called_once() + + def test_get_features_edge_case_members_quota(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test feature retrieval with edge case members quota configuration. + + This test verifies: + - Proper handling of members quota limits + - Correct integration with billing service + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup edge case members quota mock with proper config + tenant_id = self._create_test_tenant_id() + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = True + mock_config.ENTERPRISE_ENABLED = False + mock_config.CAN_REPLACE_LOGO = True + mock_config.MODEL_LB_ENABLED = False + mock_config.DATASET_OPERATOR_ENABLED = True + mock_config.EDUCATION_ENABLED = False + + mock_external_service_dependencies["billing_service"].get_info.return_value = { + "enabled": True, + "subscription": {"plan": "basic", "interval": "yearly"}, + "members": {"size": 10, "limit": 10}, + "vector_space": {"size": 3, "limit": 5}, + } + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify members configuration + assert result.members.size == 10 + assert result.members.limit == 10 + + # Verify vector space configuration + assert result.vector_space.size == 3 + assert result.vector_space.limit == 5 + + # Verify basic plan features (non-sandbox plans have webapp copyright enabled) + assert result.webapp_copyright_enabled is True + assert result.is_allow_transfer_workspace is True + + # Verify default values for missing billing info + assert result.apps.size == 0 + assert result.apps.limit == 10 + assert result.documents_upload_quota.size == 0 + assert result.documents_upload_quota.limit == 50 + assert result.annotation_quota_limit.size == 0 + assert result.annotation_quota_limit.limit == 10 + assert result.knowledge_rate_limit == 10 + assert result.docs_processing == "standard" + + # Verify mock interactions + mock_external_service_dependencies["billing_service"].get_info.assert_called_once_with(tenant_id) + + def test_plugin_installation_permission_scopes( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test system features retrieval with different plugin installation permission scopes. + + This test verifies: + - Proper handling of different plugin installation scopes + - Correct enterprise service integration + - Proper permission configuration + - Return value correctness and structure + """ + + # Test case 1: Official only scope + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = True + mock_config.MARKETPLACE_ENABLED = False + mock_config.ENABLE_EMAIL_CODE_LOGIN = False + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False + mock_config.ALLOW_REGISTER = False + mock_config.ALLOW_CREATE_WORKSPACE = False + mock_config.MAIL_TYPE = "smtp" + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100 + + mock_external_service_dependencies["enterprise_service"].get_info.return_value = { + "PluginInstallationPermission": { + "pluginInstallationScope": "official_only", + "restrictToMarketplaceOnly": True, + } + } + + result = FeatureService.get_system_features() + assert result.plugin_installation_permission.plugin_installation_scope == "official_only" + assert result.plugin_installation_permission.restrict_to_marketplace_only is True + + # Test case 2: All plugins scope + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = True + mock_config.MARKETPLACE_ENABLED = False + mock_config.ENABLE_EMAIL_CODE_LOGIN = False + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False + mock_config.ALLOW_REGISTER = False + mock_config.ALLOW_CREATE_WORKSPACE = False + mock_config.MAIL_TYPE = "smtp" + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100 + + mock_external_service_dependencies["enterprise_service"].get_info.return_value = { + "PluginInstallationPermission": {"pluginInstallationScope": "all", "restrictToMarketplaceOnly": False} + } + + result = FeatureService.get_system_features() + assert result.plugin_installation_permission.plugin_installation_scope == "all" + assert result.plugin_installation_permission.restrict_to_marketplace_only is False + + # Test case 3: Specific partners scope + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = True + mock_config.MARKETPLACE_ENABLED = False + mock_config.ENABLE_EMAIL_CODE_LOGIN = False + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False + mock_config.ALLOW_REGISTER = False + mock_config.ALLOW_CREATE_WORKSPACE = False + mock_config.MAIL_TYPE = "smtp" + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100 + + mock_external_service_dependencies["enterprise_service"].get_info.return_value = { + "PluginInstallationPermission": { + "pluginInstallationScope": "official_and_specific_partners", + "restrictToMarketplaceOnly": False, + } + } + + result = FeatureService.get_system_features() + assert result.plugin_installation_permission.plugin_installation_scope == "official_and_specific_partners" + assert result.plugin_installation_permission.restrict_to_marketplace_only is False + + # Test case 4: None scope + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = True + mock_config.MARKETPLACE_ENABLED = False + mock_config.ENABLE_EMAIL_CODE_LOGIN = False + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False + mock_config.ALLOW_REGISTER = False + mock_config.ALLOW_CREATE_WORKSPACE = False + mock_config.MAIL_TYPE = "smtp" + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100 + + mock_external_service_dependencies["enterprise_service"].get_info.return_value = { + "PluginInstallationPermission": {"pluginInstallationScope": "none", "restrictToMarketplaceOnly": True} + } + + result = FeatureService.get_system_features() + assert result.plugin_installation_permission.plugin_installation_scope == "none" + assert result.plugin_installation_permission.restrict_to_marketplace_only is True + + def test_get_features_workspace_members_missing( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test feature retrieval when workspace members info is missing from enterprise. + + This test verifies: + - Proper handling of missing workspace members data + - Correct enterprise service integration + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup missing workspace members mock + tenant_id = self._create_test_tenant_id() + mock_external_service_dependencies["enterprise_service"].get_workspace_info.return_value = { + # Missing WorkspaceMembers key + } + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = False + mock_config.ENTERPRISE_ENABLED = True + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify workspace members use default values + assert result.workspace_members.enabled is False + assert result.workspace_members.size == 0 + assert result.workspace_members.limit == 0 + + # Verify enterprise features + assert result.webapp_copyright_enabled is True + + # Verify mock interactions + mock_external_service_dependencies["enterprise_service"].get_workspace_info.assert_called_once_with( + tenant_id + ) + + def test_get_system_features_license_inactive(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test system features retrieval with inactive license. + + This test verifies: + - Proper handling of inactive license status + - Correct enterprise service integration + - Proper license status handling + - Return value correctness and structure + """ + # Arrange: Setup inactive license mock with proper config + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = True + mock_config.MARKETPLACE_ENABLED = False + mock_config.ENABLE_EMAIL_CODE_LOGIN = False + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False + mock_config.ALLOW_REGISTER = False + mock_config.ALLOW_CREATE_WORKSPACE = False + mock_config.MAIL_TYPE = "smtp" + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100 + + mock_external_service_dependencies["enterprise_service"].get_info.return_value = { + "License": { + "status": "inactive", + "expiredAt": "", + "workspaces": {"enabled": False, "limit": 0, "used": 0}, + } + } + + # Act: Execute the method under test + result = FeatureService.get_system_features() + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, SystemFeatureModel) + + # Verify license status + assert result.license.status == "inactive" + assert result.license.expired_at == "" + assert result.license.workspaces.enabled is False + assert result.license.workspaces.size == 0 + assert result.license.workspaces.limit == 0 + + # Verify enterprise features + assert result.branding.enabled is True + assert result.webapp_auth.enabled is True + assert result.enable_change_email is False + + # Verify mock interactions + mock_external_service_dependencies["enterprise_service"].get_info.assert_called_once() + + def test_get_system_features_partial_enterprise_info( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test system features retrieval with partial enterprise information. + + This test verifies: + - Proper handling of partial enterprise data + - Correct fallback to default values + - Proper enterprise service integration + - Return value correctness and structure + """ + # Arrange: Setup partial enterprise info mock with proper config + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = True + mock_config.MARKETPLACE_ENABLED = False + mock_config.ENABLE_EMAIL_CODE_LOGIN = False + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False + mock_config.ALLOW_REGISTER = False + mock_config.ALLOW_CREATE_WORKSPACE = False + mock_config.MAIL_TYPE = "smtp" + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100 + + mock_external_service_dependencies["enterprise_service"].get_info.return_value = { + "SSOEnforcedForSignin": True, + "Branding": {"applicationTitle": "Partial Enterprise"}, + # Missing WebAppAuth, License, PluginInstallationPermission, etc. + } + + # Act: Execute the method under test + result = FeatureService.get_system_features() + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, SystemFeatureModel) + + # Verify enterprise features + assert result.branding.enabled is True + assert result.webapp_auth.enabled is True + assert result.enable_change_email is False + + # Verify SSO configuration + assert result.sso_enforced_for_signin is True + assert result.sso_enforced_for_signin_protocol == "" + + # Verify branding configuration (partial) + assert result.branding.application_title == "Partial Enterprise" + assert result.branding.login_page_logo == "" + assert result.branding.workspace_logo == "" + assert result.branding.favicon == "" + + # Verify default values for missing enterprise info + assert result.webapp_auth.allow_sso is False + assert result.webapp_auth.allow_email_code_login is False + assert result.webapp_auth.allow_email_password_login is False + assert result.webapp_auth.sso_config.protocol == "" + + # Verify default license status + assert result.license.status == "none" + assert result.license.expired_at == "" + assert result.license.workspaces.enabled is False + + # Verify default plugin installation permission + assert result.plugin_installation_permission.plugin_installation_scope == "all" + assert result.plugin_installation_permission.restrict_to_marketplace_only is False + + # Verify mock interactions + mock_external_service_dependencies["enterprise_service"].get_info.assert_called_once() + + def test_get_features_edge_case_limits(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test feature retrieval with edge case limit values. + + This test verifies: + - Proper handling of zero and negative limits + - Correct handling of very large limits + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup edge case limits mock with proper config + tenant_id = self._create_test_tenant_id() + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = True + mock_config.ENTERPRISE_ENABLED = False + mock_config.CAN_REPLACE_LOGO = True + mock_config.MODEL_LB_ENABLED = False + mock_config.DATASET_OPERATOR_ENABLED = True + mock_config.EDUCATION_ENABLED = False + + mock_external_service_dependencies["billing_service"].get_info.return_value = { + "enabled": True, + "subscription": {"plan": "enterprise", "interval": "yearly"}, + "members": {"size": 0, "limit": 0}, + "apps": {"size": 0, "limit": -1}, + "vector_space": {"size": 0, "limit": 999999}, + "documents_upload_quota": {"size": 0, "limit": 0}, + "annotation_quota_limit": {"size": 0, "limit": 1}, + } + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify edge case limits + assert result.members.size == 0 + assert result.members.limit == 0 + assert result.apps.size == 0 + assert result.apps.limit == -1 + assert result.vector_space.size == 0 + assert result.vector_space.limit == 999999 + assert result.documents_upload_quota.size == 0 + assert result.documents_upload_quota.limit == 0 + assert result.annotation_quota_limit.size == 0 + assert result.annotation_quota_limit.limit == 1 + + # Verify enterprise plan features + assert result.webapp_copyright_enabled is True + assert result.is_allow_transfer_workspace is True + + # Verify mock interactions + mock_external_service_dependencies["billing_service"].get_info.assert_called_once_with(tenant_id) + + def test_get_system_features_edge_case_protocols( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test system features retrieval with edge case protocol values. + + This test verifies: + - Proper handling of empty protocol strings + - Correct handling of special protocol values + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup edge case protocols mock with proper config + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = True + mock_config.MARKETPLACE_ENABLED = False + mock_config.ENABLE_EMAIL_CODE_LOGIN = False + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False + mock_config.ALLOW_REGISTER = False + mock_config.ALLOW_CREATE_WORKSPACE = False + mock_config.MAIL_TYPE = "smtp" + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100 + + mock_external_service_dependencies["enterprise_service"].get_info.return_value = { + "SSOEnforcedForSigninProtocol": "", + "SSOEnforcedForWebProtocol": " ", + "WebAppAuth": {"allowSso": True, "allowEmailCodeLogin": False, "allowEmailPasswordLogin": True}, + } + + # Act: Execute the method under test + result = FeatureService.get_system_features() + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, SystemFeatureModel) + + # Verify edge case protocols + assert result.sso_enforced_for_signin_protocol == "" + assert result.webapp_auth.sso_config.protocol == " " + + # Verify webapp auth configuration + assert result.webapp_auth.allow_sso is True + assert result.webapp_auth.allow_email_code_login is False + assert result.webapp_auth.allow_email_password_login is True + + # Verify enterprise features + assert result.branding.enabled is True + assert result.webapp_auth.enabled is True + assert result.enable_change_email is False + + # Verify mock interactions + mock_external_service_dependencies["enterprise_service"].get_info.assert_called_once() + + def test_get_features_edge_case_education(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test feature retrieval with edge case education configuration. + + This test verifies: + - Proper handling of education feature flags + - Correct integration with billing service + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup edge case education mock + tenant_id = self._create_test_tenant_id() + mock_external_service_dependencies["billing_service"].get_info.return_value = { + "enabled": True, + "subscription": {"plan": "education", "interval": "semester", "education": True}, + "members": {"size": 100, "limit": 200}, + "apps": {"size": 50, "limit": 100}, + "vector_space": {"size": 20, "limit": 50}, + "documents_upload_quota": {"size": 500, "limit": 1000}, + "annotation_quota_limit": {"size": 200, "limit": 500}, + } + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.EDUCATION_ENABLED = True + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify education features + assert result.education.enabled is True + assert result.education.activated is True + + # Verify education plan limits + assert result.members.size == 100 + assert result.members.limit == 200 + assert result.apps.size == 50 + assert result.apps.limit == 100 + assert result.vector_space.size == 20 + assert result.vector_space.limit == 50 + assert result.documents_upload_quota.size == 500 + assert result.documents_upload_quota.limit == 1000 + assert result.annotation_quota_limit.size == 200 + assert result.annotation_quota_limit.limit == 500 + + # Verify education plan features + assert result.webapp_copyright_enabled is True + assert result.is_allow_transfer_workspace is True + + # Verify mock interactions + mock_external_service_dependencies["billing_service"].get_info.assert_called_once_with(tenant_id) + + def test_license_limitation_model_is_available( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test LicenseLimitationModel.is_available method with various scenarios. + + This test verifies: + - Proper quota availability calculation + - Correct handling of unlimited limits + - Proper handling of disabled limits + - Return value correctness for different scenarios + """ + from services.feature_service import LicenseLimitationModel + + # Test case 1: Limit disabled + disabled_limit = LicenseLimitationModel(enabled=False, size=5, limit=10) + assert disabled_limit.is_available(3) is True + assert disabled_limit.is_available(10) is True + + # Test case 2: Unlimited limit + unlimited_limit = LicenseLimitationModel(enabled=True, size=5, limit=0) + assert unlimited_limit.is_available(3) is True + assert unlimited_limit.is_available(100) is True + + # Test case 3: Available quota + available_limit = LicenseLimitationModel(enabled=True, size=5, limit=10) + assert available_limit.is_available(3) is True + assert available_limit.is_available(5) is True + assert available_limit.is_available(1) is True + + # Test case 4: Insufficient quota + insufficient_limit = LicenseLimitationModel(enabled=True, size=8, limit=10) + assert insufficient_limit.is_available(3) is False + assert insufficient_limit.is_available(2) is True + assert insufficient_limit.is_available(1) is True + + # Test case 5: Exact quota usage + exact_limit = LicenseLimitationModel(enabled=True, size=7, limit=10) + assert exact_limit.is_available(3) is True + assert exact_limit.is_available(3) is True + + def test_get_features_workspace_members_disabled( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test feature retrieval when workspace members are disabled in enterprise. + + This test verifies: + - Proper handling of disabled workspace members + - Correct enterprise service integration + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup workspace members disabled mock + tenant_id = self._create_test_tenant_id() + mock_external_service_dependencies["enterprise_service"].get_workspace_info.return_value = { + "WorkspaceMembers": {"used": 0, "limit": 0, "enabled": False} + } + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = False + mock_config.ENTERPRISE_ENABLED = True + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify workspace members are disabled + assert result.workspace_members.enabled is False + assert result.workspace_members.size == 0 + assert result.workspace_members.limit == 0 + + # Verify enterprise features + assert result.webapp_copyright_enabled is True + + # Verify mock interactions + mock_external_service_dependencies["enterprise_service"].get_workspace_info.assert_called_once_with(tenant_id) + + def test_get_system_features_license_expired(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test system features retrieval with expired license. + + This test verifies: + - Proper handling of expired license status + - Correct enterprise service integration + - Proper license status handling + - Return value correctness and structure + """ + # Arrange: Setup expired license mock with proper config + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = True + mock_config.MARKETPLACE_ENABLED = False + mock_config.ENABLE_EMAIL_CODE_LOGIN = False + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False + mock_config.ALLOW_REGISTER = False + mock_config.ALLOW_CREATE_WORKSPACE = False + mock_config.MAIL_TYPE = "smtp" + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100 + + mock_external_service_dependencies["enterprise_service"].get_info.return_value = { + "License": { + "status": "expired", + "expiredAt": "2023-12-31", + "workspaces": {"enabled": False, "limit": 0, "used": 0}, + } + } + + # Act: Execute the method under test + result = FeatureService.get_system_features() + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, SystemFeatureModel) + + # Verify license status + assert result.license.status == "expired" + assert result.license.expired_at == "2023-12-31" + assert result.license.workspaces.enabled is False + assert result.license.workspaces.size == 0 + assert result.license.workspaces.limit == 0 + + # Verify enterprise features + assert result.branding.enabled is True + assert result.webapp_auth.enabled is True + assert result.enable_change_email is False + + # Verify mock interactions + mock_external_service_dependencies["enterprise_service"].get_info.assert_called_once() + + def test_get_features_edge_case_docs_processing( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test feature retrieval with edge case document processing configuration. + + This test verifies: + - Proper handling of different document processing modes + - Correct integration with billing service + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup edge case docs processing mock with proper config + tenant_id = self._create_test_tenant_id() + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = True + mock_config.ENTERPRISE_ENABLED = False + mock_config.CAN_REPLACE_LOGO = True + mock_config.MODEL_LB_ENABLED = True + mock_config.DATASET_OPERATOR_ENABLED = True + mock_config.EDUCATION_ENABLED = False + + mock_external_service_dependencies["billing_service"].get_info.return_value = { + "enabled": True, + "subscription": {"plan": "premium", "interval": "monthly"}, + "docs_processing": "advanced", + "can_replace_logo": True, + "model_load_balancing_enabled": True, + } + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify docs processing configuration + assert result.docs_processing == "advanced" + assert result.can_replace_logo is True + assert result.model_load_balancing_enabled is True + + # Verify premium plan features + assert result.webapp_copyright_enabled is True + assert result.is_allow_transfer_workspace is True + + # Verify default limitations (no specific billing info) + assert result.members.size == 0 + assert result.members.limit == 1 + assert result.apps.size == 0 + assert result.apps.limit == 10 + assert result.vector_space.size == 0 + assert result.vector_space.limit == 5 + + # Verify mock interactions + mock_external_service_dependencies["billing_service"].get_info.assert_called_once_with(tenant_id) + + def test_get_system_features_edge_case_branding( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test system features retrieval with edge case branding configuration. + + This test verifies: + - Proper handling of partial branding information + - Correct enterprise service integration + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup edge case branding mock with proper config + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = True + mock_config.MARKETPLACE_ENABLED = False + mock_config.ENABLE_EMAIL_CODE_LOGIN = False + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False + mock_config.ALLOW_REGISTER = False + mock_config.ALLOW_CREATE_WORKSPACE = False + mock_config.MAIL_TYPE = "smtp" + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100 + + mock_external_service_dependencies["enterprise_service"].get_info.return_value = { + "Branding": { + "applicationTitle": "Edge Case App", + "loginPageLogo": None, + "workspaceLogo": "", + "favicon": "https://example.com/favicon.ico", + } + } + + # Act: Execute the method under test + result = FeatureService.get_system_features() + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, SystemFeatureModel) + + # Verify branding configuration (edge cases) + assert result.branding.application_title == "Edge Case App" + assert result.branding.login_page_logo is None # None value from mock + assert result.branding.workspace_logo == "" + assert result.branding.favicon == "https://example.com/favicon.ico" + + # Verify enterprise features + assert result.branding.enabled is True + assert result.webapp_auth.enabled is True + assert result.enable_change_email is False + + # Verify default values for missing enterprise info + assert result.sso_enforced_for_signin is False + assert result.sso_enforced_for_signin_protocol == "" + assert result.enable_email_code_login is False + assert result.enable_email_password_login is True + assert result.is_allow_register is False + assert result.is_allow_create_workspace is False + + # Verify mock interactions + mock_external_service_dependencies["enterprise_service"].get_info.assert_called_once() + + def test_get_features_edge_case_annotation_quota( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test feature retrieval with edge case annotation quota configuration. + + This test verifies: + - Proper handling of annotation quota limits + - Correct integration with billing service + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup edge case annotation quota mock with proper config + tenant_id = self._create_test_tenant_id() + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = True + mock_config.ENTERPRISE_ENABLED = False + mock_config.CAN_REPLACE_LOGO = True + mock_config.MODEL_LB_ENABLED = False + mock_config.DATASET_OPERATOR_ENABLED = True + mock_config.EDUCATION_ENABLED = False + + mock_external_service_dependencies["billing_service"].get_info.return_value = { + "enabled": True, + "subscription": {"plan": "enterprise", "interval": "yearly"}, + "annotation_quota_limit": {"size": 999, "limit": 1000}, + "knowledge_rate_limit": {"limit": 500}, + } + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify annotation quota configuration + assert result.annotation_quota_limit.size == 999 + assert result.annotation_quota_limit.limit == 1000 + + # Verify knowledge rate limit + assert result.knowledge_rate_limit == 500 + + # Verify enterprise plan features + assert result.webapp_copyright_enabled is True + assert result.is_allow_transfer_workspace is True + + # Verify default values for missing billing info + assert result.members.size == 0 + assert result.members.limit == 1 + assert result.apps.size == 0 + assert result.apps.limit == 10 + assert result.vector_space.size == 0 + assert result.vector_space.limit == 5 + assert result.documents_upload_quota.size == 0 + assert result.documents_upload_quota.limit == 50 + assert result.docs_processing == "standard" + + # Verify mock interactions + mock_external_service_dependencies["billing_service"].get_info.assert_called_once_with(tenant_id) + + def test_get_features_edge_case_documents_upload( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test feature retrieval with edge case documents upload settings. + + This test verifies: + - Proper handling of edge case documents upload configuration + - Correct integration with billing service + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup edge case documents upload mock with proper config + tenant_id = self._create_test_tenant_id() + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = True + mock_config.ENTERPRISE_ENABLED = False + mock_config.CAN_REPLACE_LOGO = True + mock_config.MODEL_LB_ENABLED = False + mock_config.DATASET_OPERATOR_ENABLED = True + mock_config.EDUCATION_ENABLED = False + + mock_external_service_dependencies["billing_service"].get_info.return_value = { + "enabled": True, + "subscription": {"plan": "pro", "interval": "monthly"}, + "documents_upload_quota": { + "size": 0, # Edge case: zero current size + "limit": 0, # Edge case: zero limit + }, + "knowledge_rate_limit": {"limit": 100}, + } + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify documents upload quota configuration (edge cases) + assert result.documents_upload_quota.size == 0 + assert result.documents_upload_quota.limit == 0 + + # Verify knowledge rate limit + assert result.knowledge_rate_limit == 100 + + # Verify pro plan features + assert result.webapp_copyright_enabled is True + assert result.is_allow_transfer_workspace is True + + # Verify default values for missing billing info + assert result.members.size == 0 + assert result.members.limit == 1 + assert result.apps.size == 0 + assert result.apps.limit == 10 + assert result.vector_space.size == 0 + assert result.vector_space.limit == 5 + assert result.annotation_quota_limit.size == 0 + assert result.annotation_quota_limit.limit == 10 # Default value when not provided + assert result.docs_processing == "standard" + + # Verify mock interactions + mock_external_service_dependencies["billing_service"].get_info.assert_called_once_with(tenant_id) + + def test_get_system_features_edge_case_license_lost( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test system features with lost license status. + + This test verifies: + - Proper handling of lost license status + - Correct enterprise service integration + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup lost license mock with proper config + with patch("services.feature_service.dify_config") as mock_config: + mock_config.ENTERPRISE_ENABLED = True + mock_config.MARKETPLACE_ENABLED = False + mock_config.ENABLE_EMAIL_CODE_LOGIN = False + mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True + mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False + mock_config.ALLOW_REGISTER = False + mock_config.ALLOW_CREATE_WORKSPACE = False + mock_config.MAIL_TYPE = "smtp" + mock_config.PLUGIN_MAX_PACKAGE_SIZE = 100 + + mock_external_service_dependencies["enterprise_service"].get_info.return_value = { + "license": {"status": "lost", "expired_at": None, "plan": None} + } + + # Act: Execute the method under test + result = FeatureService.get_system_features() + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, SystemFeatureModel) + + # Verify enterprise features + assert result.branding.enabled is True + assert result.webapp_auth.enabled is True + assert result.enable_change_email is False + + # Verify default values for missing enterprise info + assert result.sso_enforced_for_signin is False + assert result.sso_enforced_for_signin_protocol == "" + assert result.enable_email_code_login is False + assert result.enable_email_password_login is True + assert result.is_allow_register is False + assert result.is_allow_create_workspace is False + + # Verify mock interactions + mock_external_service_dependencies["enterprise_service"].get_info.assert_called_once() + + def test_get_features_edge_case_education_disabled( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test feature retrieval with education feature disabled. + + This test verifies: + - Proper handling of disabled education features + - Correct integration with billing service + - Proper fallback to default values + - Return value correctness and structure + """ + # Arrange: Setup education disabled mock with proper config + tenant_id = self._create_test_tenant_id() + + with patch("services.feature_service.dify_config") as mock_config: + mock_config.BILLING_ENABLED = True + mock_config.ENTERPRISE_ENABLED = False + mock_config.CAN_REPLACE_LOGO = True + mock_config.MODEL_LB_ENABLED = False + mock_config.DATASET_OPERATOR_ENABLED = True + mock_config.EDUCATION_ENABLED = False + + mock_external_service_dependencies["billing_service"].get_info.return_value = { + "enabled": True, + "subscription": { + "plan": "pro", + "interval": "monthly", + "education": False, # Education explicitly disabled + }, + "knowledge_rate_limit": {"limit": 100}, + } + + # Act: Execute the method under test + result = FeatureService.get_features(tenant_id) + + # Assert: Verify the expected outcomes + assert result is not None + assert isinstance(result, FeatureModel) + + # Verify education configuration + assert result.education.activated is False + + # Verify knowledge rate limit + assert result.knowledge_rate_limit == 100 + + # Verify pro plan features + assert result.webapp_copyright_enabled is True + assert result.is_allow_transfer_workspace is True + + # Verify default values for missing billing info + assert result.members.size == 0 + assert result.members.limit == 1 + assert result.apps.size == 0 + assert result.apps.limit == 10 + assert result.vector_space.size == 0 + assert result.vector_space.limit == 5 + assert result.documents_upload_quota.size == 0 + assert result.documents_upload_quota.limit == 50 + assert result.annotation_quota_limit.size == 0 + assert result.annotation_quota_limit.limit == 10 # Default value when not provided + assert result.docs_processing == "standard" + + # Verify mock interactions + mock_external_service_dependencies["billing_service"].get_info.assert_called_once_with(tenant_id) diff --git a/api/tests/test_containers_integration_tests/services/test_message_service.py b/api/tests/test_containers_integration_tests/services/test_message_service.py index 25ba0d03ef..ece6de6cdf 100644 --- a/api/tests/test_containers_integration_tests/services/test_message_service.py +++ b/api/tests/test_containers_integration_tests/services/test_message_service.py @@ -484,7 +484,7 @@ class TestMessageService: # Verify feedback was deleted from extensions.ext_database import db - deleted_feedback = db.session.query(MessageFeedback).filter(MessageFeedback.id == feedback.id).first() + deleted_feedback = db.session.query(MessageFeedback).where(MessageFeedback.id == feedback.id).first() assert deleted_feedback is None def test_create_feedback_no_rating_when_not_exists( diff --git a/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py b/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py index a8a36b2565..cb20238f0c 100644 --- a/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py +++ b/api/tests/test_containers_integration_tests/services/test_model_load_balancing_service.py @@ -469,6 +469,6 @@ class TestModelLoadBalancingService: # Verify inherit config was created in database inherit_configs = ( - db.session.query(LoadBalancingModelConfig).filter(LoadBalancingModelConfig.name == "__inherit__").all() + db.session.query(LoadBalancingModelConfig).where(LoadBalancingModelConfig.name == "__inherit__").all() ) assert len(inherit_configs) == 1 diff --git a/api/tests/test_containers_integration_tests/services/test_model_provider_service.py b/api/tests/test_containers_integration_tests/services/test_model_provider_service.py new file mode 100644 index 0000000000..2196da8b3e --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_model_provider_service.py @@ -0,0 +1,1209 @@ +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from core.entities.model_entities import ModelStatus +from core.model_runtime.entities.model_entities import FetchFrom, ModelType +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models.provider import Provider, ProviderModel, ProviderModelSetting, ProviderType +from services.model_provider_service import ModelProviderService + + +class TestModelProviderService: + """Integration tests for ModelProviderService using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("services.model_provider_service.ProviderManager") as mock_provider_manager, + patch("services.model_provider_service.ModelProviderFactory") as mock_model_provider_factory, + ): + # Setup default mock returns + mock_provider_manager.return_value.get_configurations.return_value = MagicMock() + mock_model_provider_factory.return_value.get_provider_icon.return_value = (None, None) + + yield { + "provider_manager": mock_provider_manager, + "model_provider_factory": mock_model_provider_factory, + } + + def _create_test_account_and_tenant(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test account and tenant for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + tuple: (account, tenant) - Created account and tenant instances + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + + from extensions.ext_database import db + + db.session.add(account) + db.session.commit() + + # Create tenant for the account + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db.session.add(tenant) + db.session.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER.value, + current=True, + ) + db.session.add(join) + db.session.commit() + + # Set current tenant for account + account.current_tenant = tenant + + return account, tenant + + def _create_test_provider( + self, + db_session_with_containers, + mock_external_service_dependencies, + tenant_id: str, + provider_name: str = "openai", + ): + """ + Helper method to create a test provider for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + tenant_id: Tenant ID for the provider + provider_name: Name of the provider + + Returns: + Provider: Created provider instance + """ + fake = Faker() + + provider = Provider( + tenant_id=tenant_id, + provider_name=provider_name, + provider_type="custom", + is_valid=True, + quota_type="free", + quota_limit=1000, + quota_used=0, + ) + + from extensions.ext_database import db + + db.session.add(provider) + db.session.commit() + + return provider + + def _create_test_provider_model( + self, + db_session_with_containers, + mock_external_service_dependencies, + tenant_id: str, + provider_name: str, + model_name: str = "gpt-3.5-turbo", + model_type: str = "llm", + ): + """ + Helper method to create a test provider model for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + tenant_id: Tenant ID for the provider model + provider_name: Name of the provider + model_name: Name of the model + model_type: Type of the model + + Returns: + ProviderModel: Created provider model instance + """ + fake = Faker() + + provider_model = ProviderModel( + tenant_id=tenant_id, + provider_name=provider_name, + model_name=model_name, + model_type=model_type, + is_valid=True, + ) + + from extensions.ext_database import db + + db.session.add(provider_model) + db.session.commit() + + return provider_model + + def _create_test_provider_model_setting( + self, + db_session_with_containers, + mock_external_service_dependencies, + tenant_id: str, + provider_name: str, + model_name: str = "gpt-3.5-turbo", + model_type: str = "llm", + ): + """ + Helper method to create a test provider model setting for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + tenant_id: Tenant ID for the provider model setting + provider_name: Name of the provider + model_name: Name of the model + model_type: Type of the model + + Returns: + ProviderModelSetting: Created provider model setting instance + """ + fake = Faker() + + provider_model_setting = ProviderModelSetting( + tenant_id=tenant_id, + provider_name=provider_name, + model_name=model_name, + model_type=model_type, + enabled=True, + load_balancing_enabled=False, + ) + + from extensions.ext_database import db + + db.session.add(provider_model_setting) + db.session.commit() + + return provider_model_setting + + def test_get_provider_list_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful provider list retrieval. + + This test verifies: + - Proper provider list retrieval with all required fields + - Correct filtering by model type + - Proper response structure and data mapping + - Mock interactions with ProviderManager + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock provider configuration + mock_provider_entity = MagicMock() + mock_provider_entity.provider = "openai" + mock_provider_entity.label = {"en_US": "OpenAI", "zh_Hans": "OpenAI"} + mock_provider_entity.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"} + mock_provider_entity.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"} + mock_provider_entity.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"} + mock_provider_entity.background = "#FF6B6B" + mock_provider_entity.help = None + mock_provider_entity.supported_model_types = [ModelType.LLM, ModelType.TEXT_EMBEDDING] + mock_provider_entity.configurate_methods = [] + mock_provider_entity.provider_credential_schema = None + mock_provider_entity.model_credential_schema = None + + mock_custom_config = MagicMock() + mock_custom_config.provider.current_credential_id = "credential-123" + mock_custom_config.provider.current_credential_name = "test-credential" + mock_custom_config.provider.available_credentials = [] + mock_custom_config.models = [] + + mock_provider_config = MagicMock() + mock_provider_config.provider = mock_provider_entity + mock_provider_config.preferred_provider_type = ProviderType.CUSTOM + mock_provider_config.is_custom_configuration_available.return_value = True + mock_provider_config.custom_configuration = mock_custom_config + mock_provider_config.system_configuration.enabled = True + mock_provider_config.system_configuration.current_quota_type = "free" + mock_provider_config.system_configuration.quota_configurations = [] + + mock_configurations = MagicMock() + mock_configurations.values.return_value = [mock_provider_config] + mock_provider_manager.get_configurations.return_value = mock_configurations + + # Act: Execute the method under test + service = ModelProviderService() + result = service.get_provider_list(tenant.id) + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 1 + + provider_response = result[0] + assert provider_response.tenant_id == tenant.id + assert provider_response.provider == "openai" + assert provider_response.background == "#FF6B6B" + assert len(provider_response.supported_model_types) == 2 + assert ModelType.LLM in provider_response.supported_model_types + assert ModelType.TEXT_EMBEDDING in provider_response.supported_model_types + + # Verify mock interactions + mock_provider_manager.get_configurations.assert_called_once_with(tenant.id) + mock_provider_config.is_custom_configuration_available.assert_called_once() + + def test_get_provider_list_with_model_type_filter( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test provider list retrieval with model type filtering. + + This test verifies: + - Proper filtering by model type + - Only providers supporting the specified model type are returned + - Correct handling of unsupported model types + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Mock ProviderManager to return multiple provider configurations + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock provider configurations with different supported model types + mock_provider_entity_llm = MagicMock() + mock_provider_entity_llm.provider = "openai" + mock_provider_entity_llm.label = {"en_US": "OpenAI", "zh_Hans": "OpenAI"} + mock_provider_entity_llm.description = {"en_US": "OpenAI provider", "zh_Hans": "OpenAI 提供商"} + mock_provider_entity_llm.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"} + mock_provider_entity_llm.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"} + mock_provider_entity_llm.background = "#FF6B6B" + mock_provider_entity_llm.help = None + mock_provider_entity_llm.supported_model_types = [ModelType.LLM] + mock_provider_entity_llm.configurate_methods = [] + mock_provider_entity_llm.provider_credential_schema = None + mock_provider_entity_llm.model_credential_schema = None + + mock_provider_entity_embedding = MagicMock() + mock_provider_entity_embedding.provider = "cohere" + mock_provider_entity_embedding.label = {"en_US": "Cohere", "zh_Hans": "Cohere"} + mock_provider_entity_embedding.description = {"en_US": "Cohere provider", "zh_Hans": "Cohere 提供商"} + mock_provider_entity_embedding.icon_small = {"en_US": "icon_small.png", "zh_Hans": "icon_small.png"} + mock_provider_entity_embedding.icon_large = {"en_US": "icon_large.png", "zh_Hans": "icon_large.png"} + mock_provider_entity_embedding.background = "#4ECDC4" + mock_provider_entity_embedding.help = None + mock_provider_entity_embedding.supported_model_types = [ModelType.TEXT_EMBEDDING] + mock_provider_entity_embedding.configurate_methods = [] + mock_provider_entity_embedding.provider_credential_schema = None + mock_provider_entity_embedding.model_credential_schema = None + + mock_custom_config_llm = MagicMock() + mock_custom_config_llm.provider.current_credential_id = "credential-123" + mock_custom_config_llm.provider.current_credential_name = "test-credential" + mock_custom_config_llm.provider.available_credentials = [] + mock_custom_config_llm.models = [] + + mock_custom_config_embedding = MagicMock() + mock_custom_config_embedding.provider.current_credential_id = "credential-456" + mock_custom_config_embedding.provider.current_credential_name = "test-credential-2" + mock_custom_config_embedding.provider.available_credentials = [] + mock_custom_config_embedding.models = [] + + mock_provider_config_llm = MagicMock() + mock_provider_config_llm.provider = mock_provider_entity_llm + mock_provider_config_llm.preferred_provider_type = ProviderType.CUSTOM + mock_provider_config_llm.is_custom_configuration_available.return_value = True + mock_provider_config_llm.custom_configuration = mock_custom_config_llm + mock_provider_config_llm.system_configuration.enabled = True + mock_provider_config_llm.system_configuration.current_quota_type = "free" + mock_provider_config_llm.system_configuration.quota_configurations = [] + + mock_provider_config_embedding = MagicMock() + mock_provider_config_embedding.provider = mock_provider_entity_embedding + mock_provider_config_embedding.preferred_provider_type = ProviderType.CUSTOM + mock_provider_config_embedding.is_custom_configuration_available.return_value = True + mock_provider_config_embedding.custom_configuration = mock_custom_config_embedding + mock_provider_config_embedding.system_configuration.enabled = True + mock_provider_config_embedding.system_configuration.current_quota_type = "free" + mock_provider_config_embedding.system_configuration.quota_configurations = [] + + mock_configurations = MagicMock() + mock_configurations.values.return_value = [mock_provider_config_llm, mock_provider_config_embedding] + mock_provider_manager.get_configurations.return_value = mock_configurations + + # Act: Execute the method under test with LLM filter + service = ModelProviderService() + result = service.get_provider_list(tenant.id, model_type="llm") + + # Assert: Verify only LLM providers are returned + assert result is not None + assert len(result) == 1 + assert result[0].provider == "openai" + assert ModelType.LLM in result[0].supported_model_types + + # Act: Execute the method under test with TEXT_EMBEDDING filter + result = service.get_provider_list(tenant.id, model_type="text-embedding") + + # Assert: Verify only TEXT_EMBEDDING providers are returned + assert result is not None + assert len(result) == 1 + assert result[0].provider == "cohere" + assert ModelType.TEXT_EMBEDDING in result[0].supported_model_types + + def test_get_models_by_provider_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful retrieval of models by provider. + + This test verifies: + - Proper model retrieval for a specific provider + - Correct response structure with tenant_id and model data + - Mock interactions with ProviderManager + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider and models + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + provider_model_1 = self._create_test_provider_model( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai", "gpt-3.5-turbo", "llm" + ) + + provider_model_2 = self._create_test_provider_model( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai", "gpt-4", "llm" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock models + from core.entities.model_entities import ModelWithProviderEntity, SimpleModelProviderEntity + from core.model_runtime.entities.common_entities import I18nObject + from core.model_runtime.entities.provider_entities import ProviderEntity + + # Create real model objects instead of mocks + provider_entity_1 = SimpleModelProviderEntity( + ProviderEntity( + provider="openai", + label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"), + icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"), + icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"), + supported_model_types=[ModelType.LLM], + configurate_methods=[], + models=[], + ) + ) + + provider_entity_2 = SimpleModelProviderEntity( + ProviderEntity( + provider="openai", + label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"), + icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"), + icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"), + supported_model_types=[ModelType.LLM], + configurate_methods=[], + models=[], + ) + ) + + mock_model_1 = ModelWithProviderEntity( + model="gpt-3.5-turbo", + label=I18nObject(en_US="GPT-3.5 Turbo", zh_Hans="GPT-3.5 Turbo"), + model_type=ModelType.LLM, + features=[], + fetch_from=FetchFrom.PREDEFINED_MODEL, + model_properties={}, + deprecated=False, + provider=provider_entity_1, + status="active", + load_balancing_enabled=False, + ) + + mock_model_2 = ModelWithProviderEntity( + model="gpt-4", + label=I18nObject(en_US="GPT-4", zh_Hans="GPT-4"), + model_type=ModelType.LLM, + features=[], + fetch_from=FetchFrom.PREDEFINED_MODEL, + model_properties={}, + deprecated=False, + provider=provider_entity_2, + status="active", + load_balancing_enabled=False, + ) + + mock_configurations = MagicMock() + mock_configurations.get_models.return_value = [mock_model_1, mock_model_2] + mock_provider_manager.get_configurations.return_value = mock_configurations + + # Act: Execute the method under test + service = ModelProviderService() + result = service.get_models_by_provider(tenant.id, "openai") + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 2 + + # Verify first model + assert result[0].provider.tenant_id == tenant.id + assert result[0].model == "gpt-3.5-turbo" + assert result[0].provider.provider == "openai" + + # Verify second model + assert result[1].provider.tenant_id == tenant.id + assert result[1].model == "gpt-4" + assert result[1].provider.provider == "openai" + + # Verify mock interactions + mock_provider_manager.get_configurations.assert_called_once_with(tenant.id) + mock_configurations.get_models.assert_called_once_with(provider="openai") + + def test_get_provider_credentials_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful retrieval of provider credentials. + + This test verifies: + - Proper credential retrieval for existing provider + - Correct handling of obfuscated credentials + - Mock interactions with ProviderManager + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock provider configuration with credentials + mock_provider_configuration = MagicMock() + mock_provider_configuration.get_custom_credentials.return_value = { + "api_key": "sk-***123", + "base_url": "https://api.openai.com", + } + mock_provider_manager.get_configurations.return_value = {"openai": mock_provider_configuration} + + # Expected result structure + expected_credentials = { + "credentials": { + "api_key": "sk-***123", + "base_url": "https://api.openai.com", + } + } + + # Act: Execute the method under test + service = ModelProviderService() + with patch.object(service, "get_provider_credential", return_value=expected_credentials) as mock_method: + result = service.get_provider_credential(tenant.id, "openai") + + # Assert: Verify the expected outcomes + assert result is not None + assert "credentials" in result + assert "api_key" in result["credentials"] + assert "base_url" in result["credentials"] + assert result["credentials"]["api_key"] == "sk-***123" + assert result["credentials"]["base_url"] == "https://api.openai.com" + + # Verify the method was called with correct parameters + mock_method.assert_called_once_with(tenant.id, "openai") + + def test_provider_credentials_validate_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful validation of provider credentials. + + This test verifies: + - Proper credential validation for existing provider + - Correct handling of valid credentials + - Mock interactions with ProviderManager + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock provider configuration with validation method + mock_provider_configuration = MagicMock() + mock_provider_configuration.custom_credentials_validate.return_value = True + mock_provider_manager.get_configurations.return_value = {"openai": mock_provider_configuration} + + # Test credentials + test_credentials = {"api_key": "sk-test123", "base_url": "https://api.openai.com"} + + # Act: Execute the method under test + service = ModelProviderService() + # This should not raise an exception + service.validate_provider_credentials(tenant.id, "openai", test_credentials) + + # Assert: Verify mock interactions + mock_provider_manager.get_configurations.assert_called_once_with(tenant.id) + mock_provider_configuration.validate_provider_credentials.assert_called_once_with(test_credentials) + + def test_provider_credentials_validate_invalid_provider( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test validation failure for non-existent provider. + + This test verifies: + - Proper error handling for non-existent provider + - Correct exception raising + - Mock interactions with ProviderManager + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Mock ProviderManager to return empty configurations + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + mock_provider_manager.get_configurations.return_value = {} + + # Test credentials + test_credentials = {"api_key": "sk-test123", "base_url": "https://api.openai.com"} + + # Act & Assert: Execute the method under test and verify exception + service = ModelProviderService() + with pytest.raises(ValueError, match="Provider nonexistent does not exist."): + service.validate_provider_credentials(tenant.id, "nonexistent", test_credentials) + + # Verify mock interactions + mock_provider_manager.get_configurations.assert_called_once_with(tenant.id) + + def test_get_default_model_of_model_type_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful retrieval of default model for a specific model type. + + This test verifies: + - Proper default model retrieval for tenant and model type + - Correct response structure with tenant_id and model data + - Mock interactions with ProviderManager + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic default model + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock default model response + from core.entities.model_entities import DefaultModelEntity, DefaultModelProviderEntity + from core.model_runtime.entities.common_entities import I18nObject + + mock_default_model = DefaultModelEntity( + model="gpt-3.5-turbo", + model_type=ModelType.LLM, + provider=DefaultModelProviderEntity( + provider="openai", + label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"), + icon_small=I18nObject(en_US="icon_small.png", zh_Hans="icon_small.png"), + icon_large=I18nObject(en_US="icon_large.png", zh_Hans="icon_large.png"), + supported_model_types=[ModelType.LLM], + ), + ) + + mock_provider_manager.get_default_model.return_value = mock_default_model + + # Act: Execute the method under test + service = ModelProviderService() + result = service.get_default_model_of_model_type(tenant.id, "llm") + + # Assert: Verify the expected outcomes + assert result is not None + assert result.model == "gpt-3.5-turbo" + assert result.model_type == ModelType.LLM + assert result.provider.tenant_id == tenant.id + assert result.provider.provider == "openai" + + # Verify mock interactions + mock_provider_manager.get_default_model.assert_called_once_with(tenant_id=tenant.id, model_type=ModelType.LLM) + + def test_update_default_model_of_model_type_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful update of default model for a specific model type. + + This test verifies: + - Proper default model update for tenant and model type + - Correct mock interactions with ProviderManager + - Database state management + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Act: Execute the method under test + service = ModelProviderService() + service.update_default_model_of_model_type(tenant.id, "llm", "openai", "gpt-4") + + # Assert: Verify mock interactions + mock_provider_manager.update_default_model_record.assert_called_once_with( + tenant_id=tenant.id, model_type=ModelType.LLM, provider="openai", model="gpt-4" + ) + + def test_get_model_provider_icon_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful retrieval of model provider icon. + + This test verifies: + - Proper icon retrieval for provider and icon type + - Correct response structure with byte data and mime type + - Mock interactions with ModelProviderFactory + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ModelProviderFactory to return realistic icon data + mock_model_provider_factory = mock_external_service_dependencies["model_provider_factory"].return_value + mock_model_provider_factory.get_provider_icon.return_value = (b"fake_icon_data", "image/png") + + # Act: Execute the method under test + service = ModelProviderService() + result = service.get_model_provider_icon(tenant.id, "openai", "icon_small", "en_US") + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 2 + assert result[0] == b"fake_icon_data" + assert result[1] == "image/png" + + # Verify mock interactions + mock_model_provider_factory.get_provider_icon.assert_called_once_with("openai", "icon_small", "en_US") + + def test_switch_preferred_provider_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful switching of preferred provider type. + + This test verifies: + - Proper provider type switching for tenant and provider + - Correct mock interactions with ProviderManager + - Provider configuration management + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock provider configuration with switch method + mock_provider_configuration = MagicMock() + mock_provider_configuration.switch_preferred_provider_type.return_value = None + mock_provider_manager.get_configurations.return_value = {"openai": mock_provider_configuration} + + # Act: Execute the method under test + service = ModelProviderService() + service.switch_preferred_provider(tenant.id, "openai", "custom") + + # Assert: Verify mock interactions + mock_provider_manager.get_configurations.assert_called_once_with(tenant.id) + mock_provider_configuration.switch_preferred_provider_type.assert_called_once() + + def test_enable_model_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful enabling of a model. + + This test verifies: + - Proper model enabling for tenant, provider, and model + - Correct mock interactions with ProviderManager + - Model configuration management + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock provider configuration with enable method + mock_provider_configuration = MagicMock() + mock_provider_configuration.enable_model.return_value = None + mock_provider_manager.get_configurations.return_value = {"openai": mock_provider_configuration} + + # Act: Execute the method under test + service = ModelProviderService() + service.enable_model(tenant.id, "openai", "gpt-4", "llm") + + # Assert: Verify mock interactions + mock_provider_manager.get_configurations.assert_called_once_with(tenant.id) + mock_provider_configuration.enable_model.assert_called_once_with(model_type=ModelType.LLM, model="gpt-4") + + def test_get_model_credentials_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful retrieval of model credentials. + + This test verifies: + - Proper credential retrieval for model + - Correct response structure with obfuscated credentials + - Mock interactions with ProviderManager + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock provider configuration with model credentials + mock_provider_configuration = MagicMock() + mock_provider_configuration.get_custom_model_credentials.return_value = { + "api_key": "sk-***123", + "base_url": "https://api.openai.com", + } + mock_provider_manager.get_configurations.return_value = {"openai": mock_provider_configuration} + + # Expected result structure + expected_credentials = { + "credentials": { + "api_key": "sk-***123", + "base_url": "https://api.openai.com", + } + } + + # Act: Execute the method under test + service = ModelProviderService() + with patch.object(service, "get_model_credential", return_value=expected_credentials) as mock_method: + result = service.get_model_credential(tenant.id, "openai", "llm", "gpt-4", None) + + # Assert: Verify the expected outcomes + assert result is not None + assert "credentials" in result + assert "api_key" in result["credentials"] + assert "base_url" in result["credentials"] + assert result["credentials"]["api_key"] == "sk-***123" + assert result["credentials"]["base_url"] == "https://api.openai.com" + + # Verify the method was called with correct parameters + mock_method.assert_called_once_with(tenant.id, "openai", "llm", "gpt-4", None) + + def test_model_credentials_validate_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful validation of model credentials. + + This test verifies: + - Proper credential validation for model + - Correct mock interactions with ProviderManager + - Model credential validation process + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock provider configuration with validation method + mock_provider_configuration = MagicMock() + mock_provider_configuration.custom_model_credentials_validate.return_value = True + mock_provider_manager.get_configurations.return_value = {"openai": mock_provider_configuration} + + # Test credentials + test_credentials = {"api_key": "sk-test123", "base_url": "https://api.openai.com"} + + # Act: Execute the method under test + service = ModelProviderService() + # This should not raise an exception + service.validate_model_credentials(tenant.id, "openai", "llm", "gpt-4", test_credentials) + + # Assert: Verify mock interactions + mock_provider_manager.get_configurations.assert_called_once_with(tenant.id) + mock_provider_configuration.validate_custom_model_credentials.assert_called_once_with( + model_type=ModelType.LLM, model="gpt-4", credentials=test_credentials + ) + + def test_save_model_credentials_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful saving of model credentials. + + This test verifies: + - Proper credential saving for model + - Correct mock interactions with ProviderManager + - Model credential management + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock provider configuration with save method + mock_provider_configuration = MagicMock() + mock_provider_configuration.add_or_update_custom_model_credentials.return_value = None + mock_provider_manager.get_configurations.return_value = {"openai": mock_provider_configuration} + + # Test credentials + test_credentials = {"api_key": "sk-test123", "base_url": "https://api.openai.com"} + + # Act: Execute the method under test + service = ModelProviderService() + service.create_model_credential(tenant.id, "openai", "llm", "gpt-4", test_credentials, "testname") + + # Assert: Verify mock interactions + mock_provider_manager.get_configurations.assert_called_once_with(tenant.id) + mock_provider_configuration.create_custom_model_credential.assert_called_once_with( + model_type=ModelType.LLM, model="gpt-4", credentials=test_credentials, credential_name="testname" + ) + + def test_remove_model_credentials_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful removal of model credentials. + + This test verifies: + - Proper credential removal for model + - Correct mock interactions with ProviderManager + - Model credential cleanup + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock provider configuration with remove method + mock_provider_configuration = MagicMock() + mock_provider_configuration.delete_custom_model_credential.return_value = None + mock_provider_manager.get_configurations.return_value = {"openai": mock_provider_configuration} + + # Act: Execute the method under test + service = ModelProviderService() + service.remove_model_credential(tenant.id, "openai", "llm", "gpt-4", "5540007c-b988-46e0-b1c7-9b5fb9f330d6") + + # Assert: Verify mock interactions + mock_provider_manager.get_configurations.assert_called_once_with(tenant.id) + mock_provider_configuration.delete_custom_model_credential.assert_called_once_with( + model_type=ModelType.LLM, model="gpt-4", credential_id="5540007c-b988-46e0-b1c7-9b5fb9f330d6" + ) + + def test_get_models_by_model_type_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful retrieval of models by model type. + + This test verifies: + - Proper model retrieval for specific model type + - Correct response structure with provider grouping + - Mock interactions with ProviderManager + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock provider configurations object with get_models method + mock_provider_configurations = MagicMock() + mock_provider_configurations.get_models.return_value = [ + MagicMock( + provider=MagicMock( + provider="openai", + label={"en_US": "OpenAI", "zh_Hans": "OpenAI"}, + icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}, + icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}, + ), + model="gpt-3.5-turbo", + model_type=ModelType.LLM, + status=ModelStatus.ACTIVE, + deprecated=False, + label={"en_US": "GPT-3.5 Turbo", "zh_Hans": "GPT-3.5 Turbo"}, + features=[], + fetch_from="predefined-model", + model_properties={}, + load_balancing_enabled=False, + ), + MagicMock( + provider=MagicMock( + provider="openai", + label={"en_US": "OpenAI", "zh_Hans": "OpenAI"}, + icon_small={"en_US": "icon_small.png", "zh_Hans": "icon_small.png"}, + icon_large={"en_US": "icon_large.png", "zh_Hans": "icon_large.png"}, + ), + model="gpt-4", + model_type=ModelType.LLM, + status=ModelStatus.ACTIVE, + deprecated=False, + label={"en_US": "GPT-4", "zh_Hans": "GPT-4"}, + features=[], + fetch_from="predefined-model", + model_properties={}, + load_balancing_enabled=False, + ), + ] + mock_provider_manager.get_configurations.return_value = mock_provider_configurations + + # Act: Execute the method under test + service = ModelProviderService() + result = service.get_models_by_model_type(tenant.id, "llm") + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 1 # One provider group + assert result[0].provider == "openai" + assert len(result[0].models) == 2 # Two models in the provider + + # Verify mock interactions + mock_provider_manager.get_configurations.assert_called_once_with(tenant.id) + mock_provider_configurations.get_models.assert_called_once_with(model_type=ModelType.LLM, only_active=True) + + def test_get_model_parameter_rules_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful retrieval of model parameter rules. + + This test verifies: + - Proper parameter rules retrieval for model + - Correct mock interactions with ProviderManager + - Model schema handling + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock provider configuration with parameter rules + mock_provider_configuration = MagicMock() + mock_credentials = {"api_key": "sk-test123"} + mock_model_schema = MagicMock() + + # Create mock parameter rules with proper return values + mock_temperature_rule = MagicMock() + mock_temperature_rule.name = "temperature" + mock_temperature_rule.type = "float" + mock_temperature_rule.min = 0.0 + mock_temperature_rule.max = 2.0 + + mock_max_tokens_rule = MagicMock() + mock_max_tokens_rule.name = "max_tokens" + mock_max_tokens_rule.type = "integer" + mock_max_tokens_rule.min = 1 + mock_max_tokens_rule.max = 4096 + + mock_model_schema.parameter_rules = [mock_temperature_rule, mock_max_tokens_rule] + + mock_provider_configuration.get_current_credentials.return_value = mock_credentials + mock_provider_configuration.get_model_schema.return_value = mock_model_schema + mock_provider_manager.get_configurations.return_value = {"openai": mock_provider_configuration} + + # Act: Execute the method under test + service = ModelProviderService() + result = service.get_model_parameter_rules(tenant.id, "openai", "gpt-4") + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 2 + assert result[0].name == "temperature" + assert result[1].name == "max_tokens" + + # Verify mock interactions + mock_provider_manager.get_configurations.assert_called_once_with(tenant.id) + mock_provider_configuration.get_current_credentials.assert_called_once_with( + model_type=ModelType.LLM, model="gpt-4" + ) + mock_provider_configuration.get_model_schema.assert_called_once_with( + model_type=ModelType.LLM, model="gpt-4", credentials=mock_credentials + ) + + def test_get_model_parameter_rules_no_credentials( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test parameter rules retrieval when no credentials are available. + + This test verifies: + - Proper handling of missing credentials + - Empty result when no credentials exist + - Mock interactions with ProviderManager + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create test provider + provider = self._create_test_provider( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "openai" + ) + + # Mock ProviderManager to return realistic configuration + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + + # Create mock provider configuration with no credentials + mock_provider_configuration = MagicMock() + mock_provider_configuration.get_current_credentials.return_value = None + mock_provider_manager.get_configurations.return_value = {"openai": mock_provider_configuration} + + # Act: Execute the method under test + service = ModelProviderService() + result = service.get_model_parameter_rules(tenant.id, "openai", "gpt-4") + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 0 + + # Verify mock interactions + mock_provider_manager.get_configurations.assert_called_once_with(tenant.id) + mock_provider_configuration.get_current_credentials.assert_called_once_with( + model_type=ModelType.LLM, model="gpt-4" + ) + + def test_get_model_parameter_rules_provider_not_found( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test parameter rules retrieval when provider does not exist. + + This test verifies: + - Proper error handling for non-existent provider + - ValueError is raised with appropriate message + - Mock interactions with ProviderManager + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Mock ProviderManager to return empty configurations + mock_provider_manager = mock_external_service_dependencies["provider_manager"].return_value + mock_provider_manager.get_configurations.return_value = {} + + # Act & Assert: Execute the method under test and expect ValueError + service = ModelProviderService() + with pytest.raises(ValueError, match="Provider openai does not exist."): + service.get_model_parameter_rules(tenant.id, "openai", "gpt-4") + + # Verify mock interactions + mock_provider_manager.get_configurations.assert_called_once_with(tenant.id) diff --git a/api/tests/test_containers_integration_tests/services/test_saved_message_service.py b/api/tests/test_containers_integration_tests/services/test_saved_message_service.py new file mode 100644 index 0000000000..9e6b9837ae --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_saved_message_service.py @@ -0,0 +1,620 @@ +from unittest.mock import patch + +import pytest +from faker import Faker + +from models.model import EndUser, Message +from models.web import SavedMessage +from services.app_service import AppService +from services.saved_message_service import SavedMessageService + + +class TestSavedMessageService: + """Integration tests for SavedMessageService using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("services.account_service.FeatureService") as mock_account_feature_service, + patch("services.app_service.ModelManager") as mock_model_manager, + patch("services.saved_message_service.MessageService") as mock_message_service, + ): + # Setup default mock returns + mock_account_feature_service.get_system_features.return_value.is_allow_register = True + + # Mock ModelManager for app creation + mock_model_instance = mock_model_manager.return_value + mock_model_instance.get_default_model_instance.return_value = None + mock_model_instance.get_default_provider_model_name.return_value = ("openai", "gpt-3.5-turbo") + + # Mock MessageService + mock_message_service.get_message.return_value = None + mock_message_service.pagination_by_last_id.return_value = None + + yield { + "account_feature_service": mock_account_feature_service, + "model_manager": mock_model_manager, + "message_service": mock_message_service, + } + + def _create_test_app_and_account(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test app and account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + tuple: (app, account) - Created app and account instances + """ + fake = Faker() + + # Setup mocks for account creation + mock_external_service_dependencies[ + "account_feature_service" + ].get_system_features.return_value.is_allow_register = True + + # Create account and tenant first + from services.account_service import AccountService, TenantService + + account = AccountService.create_account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + password=fake.password(length=12), + ) + TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) + tenant = account.current_tenant + + # Create app with realistic data + app_args = { + "name": fake.company(), + "description": fake.text(max_nb_chars=100), + "mode": "chat", + "icon_type": "emoji", + "icon": "🤖", + "icon_background": "#FF6B6B", + "api_rph": 100, + "api_rpm": 10, + } + + app_service = AppService() + app = app_service.create_app(tenant.id, app_args, account) + + return app, account + + def _create_test_end_user(self, db_session_with_containers, app): + """ + Helper method to create a test end user for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + app: App instance to associate the end user with + + Returns: + EndUser: Created end user instance + """ + fake = Faker() + + end_user = EndUser( + tenant_id=app.tenant_id, + app_id=app.id, + external_user_id=fake.uuid4(), + name=fake.name(), + type="normal", + session_id=fake.uuid4(), + is_anonymous=False, + ) + + from extensions.ext_database import db + + db.session.add(end_user) + db.session.commit() + + return end_user + + def _create_test_message(self, db_session_with_containers, app, user): + """ + Helper method to create a test message for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + app: App instance to associate the message with + user: User instance (Account or EndUser) to associate the message with + + Returns: + Message: Created message instance + """ + fake = Faker() + + # Create a simple conversation first + from models.model import Conversation + + conversation = Conversation( + app_id=app.id, + from_source="account" if hasattr(user, "current_tenant") else "end_user", + from_end_user_id=user.id if not hasattr(user, "current_tenant") else None, + from_account_id=user.id if hasattr(user, "current_tenant") else None, + name=fake.sentence(nb_words=3), + inputs={}, + status="normal", + mode="chat", + ) + + from extensions.ext_database import db + + db.session.add(conversation) + db.session.commit() + + # Create message + message = Message( + app_id=app.id, + conversation_id=conversation.id, + from_source="account" if hasattr(user, "current_tenant") else "end_user", + from_end_user_id=user.id if not hasattr(user, "current_tenant") else None, + from_account_id=user.id if hasattr(user, "current_tenant") else None, + inputs={}, + query=fake.sentence(nb_words=5), + message=fake.text(max_nb_chars=100), + answer=fake.text(max_nb_chars=200), + message_tokens=50, + answer_tokens=100, + message_unit_price=0.001, + answer_unit_price=0.002, + total_price=0.003, + currency="USD", + status="success", + ) + + db.session.add(message) + db.session.commit() + + return message + + def test_pagination_by_last_id_success_with_account_user( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful pagination by last ID with account user. + + This test verifies: + - Proper pagination with account user + - Correct filtering by app_id and user + - Proper role identification for account users + - MessageService integration + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create test messages + message1 = self._create_test_message(db_session_with_containers, app, account) + message2 = self._create_test_message(db_session_with_containers, app, account) + + # Create saved messages + saved_message1 = SavedMessage( + app_id=app.id, + message_id=message1.id, + created_by_role="account", + created_by=account.id, + ) + saved_message2 = SavedMessage( + app_id=app.id, + message_id=message2.id, + created_by_role="account", + created_by=account.id, + ) + + from extensions.ext_database import db + + db.session.add_all([saved_message1, saved_message2]) + db.session.commit() + + # Mock MessageService.pagination_by_last_id return value + from libs.infinite_scroll_pagination import InfiniteScrollPagination + + mock_pagination = InfiniteScrollPagination(data=[message1, message2], limit=10, has_more=False) + mock_external_service_dependencies["message_service"].pagination_by_last_id.return_value = mock_pagination + + # Act: Execute the method under test + result = SavedMessageService.pagination_by_last_id(app_model=app, user=account, last_id=None, limit=10) + + # Assert: Verify the expected outcomes + assert result is not None + assert result.data == [message1, message2] + assert result.limit == 10 + assert result.has_more is False + + # Verify MessageService was called with correct parameters + # Sort the IDs to handle database query order variations + expected_include_ids = sorted([message1.id, message2.id]) + actual_call = mock_external_service_dependencies["message_service"].pagination_by_last_id.call_args + actual_include_ids = sorted(actual_call.kwargs.get("include_ids", [])) + + assert actual_call.kwargs["app_model"] == app + assert actual_call.kwargs["user"] == account + assert actual_call.kwargs["last_id"] is None + assert actual_call.kwargs["limit"] == 10 + assert actual_include_ids == expected_include_ids + + # Verify database state + db.session.refresh(saved_message1) + db.session.refresh(saved_message2) + assert saved_message1.id is not None + assert saved_message2.id is not None + assert saved_message1.created_by_role == "account" + assert saved_message2.created_by_role == "account" + + def test_pagination_by_last_id_success_with_end_user( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful pagination by last ID with end user. + + This test verifies: + - Proper pagination with end user + - Correct filtering by app_id and user + - Proper role identification for end users + - MessageService integration + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + end_user = self._create_test_end_user(db_session_with_containers, app) + + # Create test messages + message1 = self._create_test_message(db_session_with_containers, app, end_user) + message2 = self._create_test_message(db_session_with_containers, app, end_user) + + # Create saved messages + saved_message1 = SavedMessage( + app_id=app.id, + message_id=message1.id, + created_by_role="end_user", + created_by=end_user.id, + ) + saved_message2 = SavedMessage( + app_id=app.id, + message_id=message2.id, + created_by_role="end_user", + created_by=end_user.id, + ) + + from extensions.ext_database import db + + db.session.add_all([saved_message1, saved_message2]) + db.session.commit() + + # Mock MessageService.pagination_by_last_id return value + from libs.infinite_scroll_pagination import InfiniteScrollPagination + + mock_pagination = InfiniteScrollPagination(data=[message1, message2], limit=5, has_more=True) + mock_external_service_dependencies["message_service"].pagination_by_last_id.return_value = mock_pagination + + # Act: Execute the method under test + result = SavedMessageService.pagination_by_last_id( + app_model=app, user=end_user, last_id="test_last_id", limit=5 + ) + + # Assert: Verify the expected outcomes + assert result is not None + assert result.data == [message1, message2] + assert result.limit == 5 + assert result.has_more is True + + # Verify MessageService was called with correct parameters + # Sort the IDs to handle database query order variations + expected_include_ids = sorted([message1.id, message2.id]) + actual_call = mock_external_service_dependencies["message_service"].pagination_by_last_id.call_args + actual_include_ids = sorted(actual_call.kwargs.get("include_ids", [])) + + assert actual_call.kwargs["app_model"] == app + assert actual_call.kwargs["user"] == end_user + assert actual_call.kwargs["last_id"] == "test_last_id" + assert actual_call.kwargs["limit"] == 5 + assert actual_include_ids == expected_include_ids + + # Verify database state + db.session.refresh(saved_message1) + db.session.refresh(saved_message2) + assert saved_message1.id is not None + assert saved_message2.id is not None + assert saved_message1.created_by_role == "end_user" + assert saved_message2.created_by_role == "end_user" + + def test_save_success_with_new_message(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful save of a new message. + + This test verifies: + - Proper creation of new saved message + - Correct database state after save + - Proper relationship establishment + - MessageService integration for message retrieval + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + message = self._create_test_message(db_session_with_containers, app, account) + + # Mock MessageService.get_message return value + mock_external_service_dependencies["message_service"].get_message.return_value = message + + # Act: Execute the method under test + SavedMessageService.save(app_model=app, user=account, message_id=message.id) + + # Assert: Verify the expected outcomes + # Check if saved message was created in database + from extensions.ext_database import db + + saved_message = ( + db.session.query(SavedMessage) + .where( + SavedMessage.app_id == app.id, + SavedMessage.message_id == message.id, + SavedMessage.created_by_role == "account", + SavedMessage.created_by == account.id, + ) + .first() + ) + + assert saved_message is not None + assert saved_message.app_id == app.id + assert saved_message.message_id == message.id + assert saved_message.created_by_role == "account" + assert saved_message.created_by == account.id + assert saved_message.created_at is not None + + # Verify MessageService.get_message was called + mock_external_service_dependencies["message_service"].get_message.assert_called_once_with( + app_model=app, user=account, message_id=message.id + ) + + # Verify database state + db.session.refresh(saved_message) + assert saved_message.id is not None + + def test_pagination_by_last_id_error_no_user(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test error handling when no user is provided. + + This test verifies: + - Proper error handling for missing user + - ValueError is raised when user is None + - No database operations are performed + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + SavedMessageService.pagination_by_last_id(app_model=app, user=None, last_id=None, limit=10) + + assert "User is required" in str(exc_info.value) + + # Verify no database operations were performed + from extensions.ext_database import db + + saved_messages = db.session.query(SavedMessage).all() + assert len(saved_messages) == 0 + + def test_save_error_no_user(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test error handling when saving message with no user. + + This test verifies: + - Method returns early when user is None + - No database operations are performed + - No exceptions are raised + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + message = self._create_test_message(db_session_with_containers, app, account) + + # Act: Execute the method under test with None user + result = SavedMessageService.save(app_model=app, user=None, message_id=message.id) + + # Assert: Verify the expected outcomes + assert result is None + + # Verify no saved message was created + from extensions.ext_database import db + + saved_message = ( + db.session.query(SavedMessage) + .where( + SavedMessage.app_id == app.id, + SavedMessage.message_id == message.id, + ) + .first() + ) + + assert saved_message is None + + def test_delete_success_existing_message(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful deletion of an existing saved message. + + This test verifies: + - Proper deletion of existing saved message + - Correct database state after deletion + - No errors during deletion process + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + message = self._create_test_message(db_session_with_containers, app, account) + + # Create a saved message first + saved_message = SavedMessage( + app_id=app.id, + message_id=message.id, + created_by_role="account", + created_by=account.id, + ) + + from extensions.ext_database import db + + db.session.add(saved_message) + db.session.commit() + + # Verify saved message exists + assert ( + db.session.query(SavedMessage) + .where( + SavedMessage.app_id == app.id, + SavedMessage.message_id == message.id, + SavedMessage.created_by_role == "account", + SavedMessage.created_by == account.id, + ) + .first() + is not None + ) + + # Act: Execute the method under test + SavedMessageService.delete(app_model=app, user=account, message_id=message.id) + + # Assert: Verify the expected outcomes + # Check if saved message was deleted from database + deleted_saved_message = ( + db.session.query(SavedMessage) + .where( + SavedMessage.app_id == app.id, + SavedMessage.message_id == message.id, + SavedMessage.created_by_role == "account", + SavedMessage.created_by == account.id, + ) + .first() + ) + + assert deleted_saved_message is None + + # Verify database state + db.session.commit() + # The message should still exist, only the saved_message should be deleted + assert db.session.query(Message).where(Message.id == message.id).first() is not None + + def test_pagination_by_last_id_error_no_user(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test error handling when no user is provided. + + This test verifies: + - Proper error handling for missing user + - ValueError is raised when user is None + - No database operations are performed + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + SavedMessageService.pagination_by_last_id(app_model=app, user=None, last_id=None, limit=10) + + assert "User is required" in str(exc_info.value) + + # Verify no database operations were performed for this specific test + # Note: We don't check total count as other tests may have created data + # Instead, we verify that the error was properly raised + pass + + def test_save_error_no_user(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test error handling when saving message with no user. + + This test verifies: + - Method returns early when user is None + - No database operations are performed + - No exceptions are raised + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + message = self._create_test_message(db_session_with_containers, app, account) + + # Act: Execute the method under test with None user + result = SavedMessageService.save(app_model=app, user=None, message_id=message.id) + + # Assert: Verify the expected outcomes + assert result is None + + # Verify no saved message was created + from extensions.ext_database import db + + saved_message = ( + db.session.query(SavedMessage) + .where( + SavedMessage.app_id == app.id, + SavedMessage.message_id == message.id, + ) + .first() + ) + + assert saved_message is None + + def test_delete_success_existing_message(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful deletion of an existing saved message. + + This test verifies: + - Proper deletion of existing saved message + - Correct database state after deletion + - No errors during deletion process + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + message = self._create_test_message(db_session_with_containers, app, account) + + # Create a saved message first + saved_message = SavedMessage( + app_id=app.id, + message_id=message.id, + created_by_role="account", + created_by=account.id, + ) + + from extensions.ext_database import db + + db.session.add(saved_message) + db.session.commit() + + # Verify saved message exists + assert ( + db.session.query(SavedMessage) + .where( + SavedMessage.app_id == app.id, + SavedMessage.message_id == message.id, + SavedMessage.created_by_role == "account", + SavedMessage.created_by == account.id, + ) + .first() + is not None + ) + + # Act: Execute the method under test + SavedMessageService.delete(app_model=app, user=account, message_id=message.id) + + # Assert: Verify the expected outcomes + # Check if saved message was deleted from database + deleted_saved_message = ( + db.session.query(SavedMessage) + .where( + SavedMessage.app_id == app.id, + SavedMessage.message_id == message.id, + SavedMessage.created_by_role == "account", + SavedMessage.created_by == account.id, + ) + .first() + ) + + assert deleted_saved_message is None + + # Verify database state + db.session.commit() + # The message should still exist, only the saved_message should be deleted + assert db.session.query(Message).where(Message.id == message.id).first() is not None diff --git a/api/tests/test_containers_integration_tests/services/test_tag_service.py b/api/tests/test_containers_integration_tests/services/test_tag_service.py new file mode 100644 index 0000000000..2d5cdf426d --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_tag_service.py @@ -0,0 +1,1192 @@ +from unittest.mock import patch + +import pytest +from faker import Faker +from werkzeug.exceptions import NotFound + +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models.dataset import Dataset +from models.model import App, Tag, TagBinding +from services.tag_service import TagService + + +class TestTagService: + """Integration tests for TagService using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("services.tag_service.current_user") as mock_current_user, + ): + # Setup default mock returns + mock_current_user.current_tenant_id = "test-tenant-id" + mock_current_user.id = "test-user-id" + + yield { + "current_user": mock_current_user, + } + + def _create_test_account_and_tenant(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test account and tenant for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + tuple: (account, tenant) - Created account and tenant instances + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + + from extensions.ext_database import db + + db.session.add(account) + db.session.commit() + + # Create tenant for the account + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db.session.add(tenant) + db.session.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER.value, + current=True, + ) + db.session.add(join) + db.session.commit() + + # Set current tenant for account + account.current_tenant = tenant + + # Update mock to use real tenant ID + mock_external_service_dependencies["current_user"].current_tenant_id = tenant.id + mock_external_service_dependencies["current_user"].id = account.id + + return account, tenant + + def _create_test_dataset(self, db_session_with_containers, mock_external_service_dependencies, tenant_id): + """ + Helper method to create a test dataset for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + tenant_id: Tenant ID for the dataset + + Returns: + Dataset: Created dataset instance + """ + fake = Faker() + + dataset = Dataset( + name=fake.company(), + description=fake.text(max_nb_chars=100), + provider="vendor", + permission="only_me", + data_source_type="upload", + indexing_technique="high_quality", + tenant_id=tenant_id, + created_by=mock_external_service_dependencies["current_user"].id, + ) + + from extensions.ext_database import db + + db.session.add(dataset) + db.session.commit() + + return dataset + + def _create_test_app(self, db_session_with_containers, mock_external_service_dependencies, tenant_id): + """ + Helper method to create a test app for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + tenant_id: Tenant ID for the app + + Returns: + App: Created app instance + """ + fake = Faker() + + app = App( + name=fake.company(), + description=fake.text(max_nb_chars=100), + mode="chat", + icon_type="emoji", + icon="🤖", + icon_background="#FF6B6B", + enable_site=False, + enable_api=False, + tenant_id=tenant_id, + created_by=mock_external_service_dependencies["current_user"].id, + ) + + from extensions.ext_database import db + + db.session.add(app) + db.session.commit() + + return app + + def _create_test_tags( + self, db_session_with_containers, mock_external_service_dependencies, tenant_id, tag_type, count=3 + ): + """ + Helper method to create test tags for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + tenant_id: Tenant ID for the tags + tag_type: Type of tags to create + count: Number of tags to create + + Returns: + list: List of created tag instances + """ + fake = Faker() + tags = [] + + for i in range(count): + tag = Tag( + name=f"tag_{tag_type}_{i}_{fake.word()}", + type=tag_type, + tenant_id=tenant_id, + created_by=mock_external_service_dependencies["current_user"].id, + ) + tags.append(tag) + + from extensions.ext_database import db + + for tag in tags: + db.session.add(tag) + db.session.commit() + + return tags + + def _create_test_tag_bindings( + self, db_session_with_containers, mock_external_service_dependencies, tags, target_id, tenant_id + ): + """ + Helper method to create test tag bindings for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + tags: List of tags to bind + target_id: Target ID to bind tags to + tenant_id: Tenant ID for the bindings + + Returns: + list: List of created tag binding instances + """ + tag_bindings = [] + + for tag in tags: + tag_binding = TagBinding( + tag_id=tag.id, + target_id=target_id, + tenant_id=tenant_id, + created_by=mock_external_service_dependencies["current_user"].id, + ) + tag_bindings.append(tag_binding) + + from extensions.ext_database import db + + for tag_binding in tag_bindings: + db.session.add(tag_binding) + db.session.commit() + + return tag_bindings + + def test_get_tags_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful retrieval of tags with binding count. + + This test verifies: + - Proper tag retrieval with binding count + - Correct filtering by tag type and tenant + - Proper ordering by creation date + - Binding count calculation + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create tags + tags = self._create_test_tags( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "knowledge", 3 + ) + + # Create dataset and bind tags + dataset = self._create_test_dataset(db_session_with_containers, mock_external_service_dependencies, tenant.id) + self._create_test_tag_bindings( + db_session_with_containers, mock_external_service_dependencies, tags[:2], dataset.id, tenant.id + ) + + # Act: Execute the method under test + result = TagService.get_tags("knowledge", tenant.id) + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 3 + + # Verify tag data structure + for tag_result in result: + assert hasattr(tag_result, "id") + assert hasattr(tag_result, "type") + assert hasattr(tag_result, "name") + assert hasattr(tag_result, "binding_count") + assert tag_result.type == "knowledge" + + # Verify binding count + tag_with_bindings = next((t for t in result if t.binding_count > 0), None) + assert tag_with_bindings is not None + assert tag_with_bindings.binding_count >= 1 + + # Verify ordering (newest first) - note: created_at is not in SELECT but used in ORDER BY + # The ordering is handled by the database, we just verify the results are returned + assert len(result) == 3 + + def test_get_tags_with_keyword_filter(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test tag retrieval with keyword filtering. + + This test verifies: + - Proper keyword filtering functionality + - Case-insensitive search + - Partial match functionality + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create tags with specific names + tags = self._create_test_tags( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "app", 3 + ) + + # Update tag names to make them searchable + from extensions.ext_database import db + + tags[0].name = "python_development" + tags[1].name = "machine_learning" + tags[2].name = "web_development" + db.session.commit() + + # Act: Execute the method under test with keyword filter + result = TagService.get_tags("app", tenant.id, keyword="development") + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 2 # Should find python_development and web_development + + # Verify filtered results contain the keyword + for tag_result in result: + assert "development" in tag_result.name.lower() + + # Verify no results for non-matching keyword + result_no_match = TagService.get_tags("app", tenant.id, keyword="nonexistent") + assert len(result_no_match) == 0 + + def test_get_tags_empty_result(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test tag retrieval when no tags exist. + + This test verifies: + - Proper handling of empty tag sets + - Correct return value for no results + """ + # Arrange: Create test data without tags + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Act: Execute the method under test + result = TagService.get_tags("knowledge", tenant.id) + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 0 + assert isinstance(result, list) + + def test_get_target_ids_by_tag_ids_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful retrieval of target IDs by tag IDs. + + This test verifies: + - Proper target ID retrieval for valid tag IDs + - Correct filtering by tag type and tenant + - Proper handling of tag bindings + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create tags + tags = self._create_test_tags( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "knowledge", 3 + ) + + # Create multiple datasets and bind tags + datasets = [] + for i in range(2): + dataset = self._create_test_dataset( + db_session_with_containers, mock_external_service_dependencies, tenant.id + ) + datasets.append(dataset) + # Bind first two tags to first dataset, last tag to second dataset + tags_to_bind = tags[:2] if i == 0 else tags[2:] + self._create_test_tag_bindings( + db_session_with_containers, mock_external_service_dependencies, tags_to_bind, dataset.id, tenant.id + ) + + # Act: Execute the method under test + tag_ids = [tag.id for tag in tags] + result = TagService.get_target_ids_by_tag_ids("knowledge", tenant.id, tag_ids) + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 3 # Should find 3 target IDs (2 from first dataset, 1 from second) + + # Verify all dataset IDs are returned + dataset_ids = [dataset.id for dataset in datasets] + for target_id in result: + assert target_id in dataset_ids + + # Verify the first dataset appears twice (for the first two tags) + first_dataset_count = result.count(datasets[0].id) + assert first_dataset_count == 2 + + # Verify the second dataset appears once (for the last tag) + second_dataset_count = result.count(datasets[1].id) + assert second_dataset_count == 1 + + def test_get_target_ids_by_tag_ids_empty_tag_ids( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test target ID retrieval with empty tag IDs list. + + This test verifies: + - Proper handling of empty tag IDs + - Correct return value for empty input + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Act: Execute the method under test with empty tag IDs + result = TagService.get_target_ids_by_tag_ids("knowledge", tenant.id, []) + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 0 + assert isinstance(result, list) + + def test_get_target_ids_by_tag_ids_no_matching_tags( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test target ID retrieval when no tags match the criteria. + + This test verifies: + - Proper handling of non-existent tag IDs + - Correct return value for no matches + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create non-existent tag IDs + import uuid + + non_existent_tag_ids = [str(uuid.uuid4()), str(uuid.uuid4())] + + # Act: Execute the method under test + result = TagService.get_target_ids_by_tag_ids("knowledge", tenant.id, non_existent_tag_ids) + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 0 + assert isinstance(result, list) + + def test_get_tag_by_tag_name_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful retrieval of tags by tag name. + + This test verifies: + - Proper tag retrieval by name + - Correct filtering by tag type and tenant + - Proper return value structure + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create tags with specific names + tags = self._create_test_tags( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "app", 2 + ) + + # Update tag names to make them searchable + from extensions.ext_database import db + + tags[0].name = "python_tag" + tags[1].name = "ml_tag" + db.session.commit() + + # Act: Execute the method under test + result = TagService.get_tag_by_tag_name("app", tenant.id, "python_tag") + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 1 + assert result[0].name == "python_tag" + assert result[0].type == "app" + assert result[0].tenant_id == tenant.id + + def test_get_tag_by_tag_name_no_matches(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test tag retrieval by name when no matches exist. + + This test verifies: + - Proper handling of non-existent tag names + - Correct return value for no matches + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Act: Execute the method under test with non-existent tag name + result = TagService.get_tag_by_tag_name("knowledge", tenant.id, "nonexistent_tag") + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 0 + assert isinstance(result, list) + + def test_get_tag_by_tag_name_empty_parameters(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test tag retrieval by name with empty parameters. + + This test verifies: + - Proper handling of empty tag type + - Proper handling of empty tag name + - Correct return value for invalid input + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Act: Execute the method under test with empty parameters + result_empty_type = TagService.get_tag_by_tag_name("", tenant.id, "test_tag") + result_empty_name = TagService.get_tag_by_tag_name("knowledge", tenant.id, "") + + # Assert: Verify the expected outcomes + assert result_empty_type is not None + assert len(result_empty_type) == 0 + assert result_empty_name is not None + assert len(result_empty_name) == 0 + + def test_get_tags_by_target_id_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful retrieval of tags by target ID. + + This test verifies: + - Proper tag retrieval for a specific target + - Correct filtering by tag type and tenant + - Proper join with tag bindings + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create tags + tags = self._create_test_tags( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "app", 3 + ) + + # Create app and bind tags + app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, tenant.id) + self._create_test_tag_bindings( + db_session_with_containers, mock_external_service_dependencies, tags, app.id, tenant.id + ) + + # Act: Execute the method under test + result = TagService.get_tags_by_target_id("app", tenant.id, app.id) + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 3 + + # Verify all tags are returned + for tag in result: + assert tag.type == "app" + assert tag.tenant_id == tenant.id + assert tag.id in [t.id for t in tags] + + def test_get_tags_by_target_id_no_bindings(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test tag retrieval by target ID when no tags are bound. + + This test verifies: + - Proper handling of targets with no tag bindings + - Correct return value for no results + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create app without binding any tags + app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, tenant.id) + + # Act: Execute the method under test + result = TagService.get_tags_by_target_id("app", tenant.id, app.id) + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 0 + assert isinstance(result, list) + + def test_save_tags_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful tag creation. + + This test verifies: + - Proper tag creation with all required fields + - Correct database state after creation + - Proper UUID generation + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + tag_args = {"name": "test_tag_name", "type": "knowledge"} + + # Act: Execute the method under test + result = TagService.save_tags(tag_args) + + # Assert: Verify the expected outcomes + assert result is not None + assert result.name == "test_tag_name" + assert result.type == "knowledge" + assert result.tenant_id == tenant.id + assert result.created_by == account.id + assert result.id is not None + + # Verify database state + from extensions.ext_database import db + + db.session.refresh(result) + assert result.id is not None + + # Verify tag was actually saved to database + saved_tag = db.session.query(Tag).where(Tag.id == result.id).first() + assert saved_tag is not None + assert saved_tag.name == "test_tag_name" + + def test_save_tags_duplicate_name_error(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test tag creation with duplicate name. + + This test verifies: + - Proper error handling for duplicate tag names + - Correct exception type and message + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create first tag + tag_args = {"name": "duplicate_tag", "type": "app"} + TagService.save_tags(tag_args) + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + TagService.save_tags(tag_args) + assert "Tag name already exists" in str(exc_info.value) + + def test_update_tags_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful tag update. + + This test verifies: + - Proper tag update with new name + - Correct database state after update + - Proper error handling for non-existent tags + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create a tag to update + tag_args = {"name": "original_name", "type": "knowledge"} + tag = TagService.save_tags(tag_args) + + # Update args + update_args = {"name": "updated_name", "type": "knowledge"} + + # Act: Execute the method under test + result = TagService.update_tags(update_args, tag.id) + + # Assert: Verify the expected outcomes + assert result is not None + assert result.name == "updated_name" + assert result.type == "knowledge" + assert result.id == tag.id + + # Verify database state + from extensions.ext_database import db + + db.session.refresh(result) + assert result.name == "updated_name" + + # Verify tag was actually updated in database + updated_tag = db.session.query(Tag).where(Tag.id == tag.id).first() + assert updated_tag is not None + assert updated_tag.name == "updated_name" + + def test_update_tags_not_found_error(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test tag update for non-existent tag. + + This test verifies: + - Proper error handling for non-existent tags + - Correct exception type + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create non-existent tag ID + import uuid + + non_existent_tag_id = str(uuid.uuid4()) + + update_args = {"name": "updated_name", "type": "knowledge"} + + # Act & Assert: Verify proper error handling + with pytest.raises(NotFound) as exc_info: + TagService.update_tags(update_args, non_existent_tag_id) + assert "Tag not found" in str(exc_info.value) + + def test_update_tags_duplicate_name_error(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test tag update with duplicate name. + + This test verifies: + - Proper error handling for duplicate tag names during update + - Correct exception type and message + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create two tags + tag1_args = {"name": "first_tag", "type": "app"} + tag1 = TagService.save_tags(tag1_args) + + tag2_args = {"name": "second_tag", "type": "app"} + tag2 = TagService.save_tags(tag2_args) + + # Try to update second tag with first tag's name + update_args = {"name": "first_tag", "type": "app"} + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + TagService.update_tags(update_args, tag2.id) + assert "Tag name already exists" in str(exc_info.value) + + def test_get_tag_binding_count_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful retrieval of tag binding count. + + This test verifies: + - Proper binding count calculation + - Correct handling of tags with no bindings + - Proper database query execution + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create tags + tags = self._create_test_tags( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "knowledge", 2 + ) + + # Create dataset and bind first tag + dataset = self._create_test_dataset(db_session_with_containers, mock_external_service_dependencies, tenant.id) + self._create_test_tag_bindings( + db_session_with_containers, mock_external_service_dependencies, [tags[0]], dataset.id, tenant.id + ) + + # Act: Execute the method under test + result_tag_with_bindings = TagService.get_tag_binding_count(tags[0].id) + result_tag_without_bindings = TagService.get_tag_binding_count(tags[1].id) + + # Assert: Verify the expected outcomes + assert result_tag_with_bindings == 1 + assert result_tag_without_bindings == 0 + + def test_get_tag_binding_count_non_existent_tag( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test binding count retrieval for non-existent tag. + + This test verifies: + - Proper handling of non-existent tag IDs + - Correct return value for non-existent tags + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create non-existent tag ID + import uuid + + non_existent_tag_id = str(uuid.uuid4()) + + # Act: Execute the method under test + result = TagService.get_tag_binding_count(non_existent_tag_id) + + # Assert: Verify the expected outcomes + assert result == 0 + + def test_delete_tag_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful tag deletion. + + This test verifies: + - Proper tag deletion from database + - Proper cleanup of associated tag bindings + - Correct database state after deletion + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create tag with bindings + tag = self._create_test_tags( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "app", 1 + )[0] + + # Create app and bind tag + app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, tenant.id) + self._create_test_tag_bindings( + db_session_with_containers, mock_external_service_dependencies, [tag], app.id, tenant.id + ) + + # Verify tag and binding exist before deletion + from extensions.ext_database import db + + tag_before = db.session.query(Tag).where(Tag.id == tag.id).first() + assert tag_before is not None + + binding_before = db.session.query(TagBinding).where(TagBinding.tag_id == tag.id).first() + assert binding_before is not None + + # Act: Execute the method under test + TagService.delete_tag(tag.id) + + # Assert: Verify the expected outcomes + # Verify tag was deleted + tag_after = db.session.query(Tag).where(Tag.id == tag.id).first() + assert tag_after is None + + # Verify tag binding was deleted + binding_after = db.session.query(TagBinding).where(TagBinding.tag_id == tag.id).first() + assert binding_after is None + + def test_delete_tag_not_found_error(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test tag deletion for non-existent tag. + + This test verifies: + - Proper error handling for non-existent tags + - Correct exception type + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create non-existent tag ID + import uuid + + non_existent_tag_id = str(uuid.uuid4()) + + # Act & Assert: Verify proper error handling + with pytest.raises(NotFound) as exc_info: + TagService.delete_tag(non_existent_tag_id) + assert "Tag not found" in str(exc_info.value) + + def test_save_tag_binding_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful tag binding creation. + + This test verifies: + - Proper tag binding creation + - Correct handling of duplicate bindings + - Proper database state after creation + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create tags + tags = self._create_test_tags( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "knowledge", 2 + ) + + # Create dataset + dataset = self._create_test_dataset(db_session_with_containers, mock_external_service_dependencies, tenant.id) + + # Act: Execute the method under test + binding_args = {"type": "knowledge", "target_id": dataset.id, "tag_ids": [tag.id for tag in tags]} + TagService.save_tag_binding(binding_args) + + # Assert: Verify the expected outcomes + from extensions.ext_database import db + + # Verify tag bindings were created + for tag in tags: + binding = ( + db.session.query(TagBinding) + .where(TagBinding.tag_id == tag.id, TagBinding.target_id == dataset.id) + .first() + ) + assert binding is not None + assert binding.tenant_id == tenant.id + assert binding.created_by == account.id + + def test_save_tag_binding_duplicate_handling(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test tag binding creation with duplicate bindings. + + This test verifies: + - Proper handling of duplicate tag bindings + - No errors when trying to create existing bindings + - Correct database state after operation + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create tag + tag = self._create_test_tags( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "app", 1 + )[0] + + # Create app + app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, tenant.id) + + # Create first binding + binding_args = {"type": "app", "target_id": app.id, "tag_ids": [tag.id]} + TagService.save_tag_binding(binding_args) + + # Act: Try to create duplicate binding + TagService.save_tag_binding(binding_args) + + # Assert: Verify the expected outcomes + from extensions.ext_database import db + + # Verify only one binding exists + bindings = db.session.query(TagBinding).where(TagBinding.tag_id == tag.id, TagBinding.target_id == app.id).all() + assert len(bindings) == 1 + + def test_save_tag_binding_invalid_target_type(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test tag binding creation with invalid target type. + + This test verifies: + - Proper error handling for invalid target types + - Correct exception type + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create tag + tag = self._create_test_tags( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "knowledge", 1 + )[0] + + # Create non-existent target ID + import uuid + + non_existent_target_id = str(uuid.uuid4()) + + # Act & Assert: Verify proper error handling + binding_args = {"type": "invalid_type", "target_id": non_existent_target_id, "tag_ids": [tag.id]} + + with pytest.raises(NotFound) as exc_info: + TagService.save_tag_binding(binding_args) + assert "Invalid binding type" in str(exc_info.value) + + def test_delete_tag_binding_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful tag binding deletion. + + This test verifies: + - Proper tag binding deletion from database + - Correct database state after deletion + - Proper error handling for non-existent bindings + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create tag + tag = self._create_test_tags( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "knowledge", 1 + )[0] + + # Create dataset and bind tag + dataset = self._create_test_dataset(db_session_with_containers, mock_external_service_dependencies, tenant.id) + self._create_test_tag_bindings( + db_session_with_containers, mock_external_service_dependencies, [tag], dataset.id, tenant.id + ) + + # Verify binding exists before deletion + from extensions.ext_database import db + + binding_before = ( + db.session.query(TagBinding).where(TagBinding.tag_id == tag.id, TagBinding.target_id == dataset.id).first() + ) + assert binding_before is not None + + # Act: Execute the method under test + delete_args = {"type": "knowledge", "target_id": dataset.id, "tag_id": tag.id} + TagService.delete_tag_binding(delete_args) + + # Assert: Verify the expected outcomes + # Verify tag binding was deleted + binding_after = ( + db.session.query(TagBinding).where(TagBinding.tag_id == tag.id, TagBinding.target_id == dataset.id).first() + ) + assert binding_after is None + + def test_delete_tag_binding_non_existent_binding( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test tag binding deletion for non-existent binding. + + This test verifies: + - Proper handling of non-existent tag bindings + - No errors when trying to delete non-existent bindings + - Correct database state after operation + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create tag and dataset without binding + tag = self._create_test_tags( + db_session_with_containers, mock_external_service_dependencies, tenant.id, "app", 1 + )[0] + app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, tenant.id) + + # Act: Try to delete non-existent binding + delete_args = {"type": "app", "target_id": app.id, "tag_id": tag.id} + TagService.delete_tag_binding(delete_args) + + # Assert: Verify the expected outcomes + # No error should be raised, and database state should remain unchanged + from extensions.ext_database import db + + bindings = db.session.query(TagBinding).where(TagBinding.tag_id == tag.id, TagBinding.target_id == app.id).all() + assert len(bindings) == 0 + + def test_check_target_exists_knowledge_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful target existence check for knowledge type. + + This test verifies: + - Proper validation of knowledge dataset existence + - Correct error handling for non-existent datasets + - Proper tenant filtering + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create dataset + dataset = self._create_test_dataset(db_session_with_containers, mock_external_service_dependencies, tenant.id) + + # Act: Execute the method under test + TagService.check_target_exists("knowledge", dataset.id) + + # Assert: Verify the expected outcomes + # No exception should be raised for existing dataset + + def test_check_target_exists_knowledge_not_found( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test target existence check for non-existent knowledge dataset. + + This test verifies: + - Proper error handling for non-existent knowledge datasets + - Correct exception type and message + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create non-existent dataset ID + import uuid + + non_existent_dataset_id = str(uuid.uuid4()) + + # Act & Assert: Verify proper error handling + with pytest.raises(NotFound) as exc_info: + TagService.check_target_exists("knowledge", non_existent_dataset_id) + assert "Dataset not found" in str(exc_info.value) + + def test_check_target_exists_app_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful target existence check for app type. + + This test verifies: + - Proper validation of app existence + - Correct error handling for non-existent apps + - Proper tenant filtering + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create app + app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, tenant.id) + + # Act: Execute the method under test + TagService.check_target_exists("app", app.id) + + # Assert: Verify the expected outcomes + # No exception should be raised for existing app + + def test_check_target_exists_app_not_found(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test target existence check for non-existent app. + + This test verifies: + - Proper error handling for non-existent apps + - Correct exception type and message + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create non-existent app ID + import uuid + + non_existent_app_id = str(uuid.uuid4()) + + # Act & Assert: Verify proper error handling + with pytest.raises(NotFound) as exc_info: + TagService.check_target_exists("app", non_existent_app_id) + assert "App not found" in str(exc_info.value) + + def test_check_target_exists_invalid_type(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test target existence check for invalid type. + + This test verifies: + - Proper error handling for invalid target types + - Correct exception type and message + """ + # Arrange: Create test data + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Create non-existent target ID + import uuid + + non_existent_target_id = str(uuid.uuid4()) + + # Act & Assert: Verify proper error handling + with pytest.raises(NotFound) as exc_info: + TagService.check_target_exists("invalid_type", non_existent_target_id) + assert "Invalid binding type" in str(exc_info.value) diff --git a/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py b/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py new file mode 100644 index 0000000000..6d6f1dab72 --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_web_conversation_service.py @@ -0,0 +1,574 @@ +from unittest.mock import patch + +import pytest +from faker import Faker + +from core.app.entities.app_invoke_entities import InvokeFrom +from models.account import Account +from models.model import Conversation, EndUser +from models.web import PinnedConversation +from services.account_service import AccountService, TenantService +from services.app_service import AppService +from services.web_conversation_service import WebConversationService + + +class TestWebConversationService: + """Integration tests for WebConversationService using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("services.app_service.FeatureService") as mock_feature_service, + patch("services.app_service.EnterpriseService") as mock_enterprise_service, + patch("services.app_service.ModelManager") as mock_model_manager, + patch("services.account_service.FeatureService") as mock_account_feature_service, + ): + # Setup default mock returns for app service + mock_feature_service.get_system_features.return_value.webapp_auth.enabled = False + mock_enterprise_service.WebAppAuth.update_app_access_mode.return_value = None + mock_enterprise_service.WebAppAuth.cleanup_webapp.return_value = None + + # Setup default mock returns for account service + mock_account_feature_service.get_system_features.return_value.is_allow_register = True + + # Mock ModelManager for model configuration + mock_model_instance = mock_model_manager.return_value + mock_model_instance.get_default_model_instance.return_value = None + mock_model_instance.get_default_provider_model_name.return_value = ("openai", "gpt-3.5-turbo") + + yield { + "feature_service": mock_feature_service, + "enterprise_service": mock_enterprise_service, + "model_manager": mock_model_manager, + "account_feature_service": mock_account_feature_service, + } + + def _create_test_app_and_account(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test app and account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + tuple: (app, account) - Created app and account instances + """ + fake = Faker() + + # Setup mocks for account creation + mock_external_service_dependencies[ + "account_feature_service" + ].get_system_features.return_value.is_allow_register = True + + # Create account and tenant + account = AccountService.create_account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + password=fake.password(length=12), + ) + TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) + tenant = account.current_tenant + + # Create app with realistic data + app_args = { + "name": fake.company(), + "description": fake.text(max_nb_chars=100), + "mode": "chat", + "icon_type": "emoji", + "icon": "🤖", + "icon_background": "#FF6B6B", + "api_rph": 100, + "api_rpm": 10, + } + + app_service = AppService() + app = app_service.create_app(tenant.id, app_args, account) + + return app, account + + def _create_test_end_user(self, db_session_with_containers, app): + """ + Helper method to create a test end user for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + app: App instance + + Returns: + EndUser: Created end user instance + """ + fake = Faker() + + end_user = EndUser( + session_id=fake.uuid4(), + app_id=app.id, + type="normal", + is_anonymous=False, + tenant_id=app.tenant_id, + ) + + from extensions.ext_database import db + + db.session.add(end_user) + db.session.commit() + + return end_user + + def _create_test_conversation(self, db_session_with_containers, app, user, fake): + """ + Helper method to create a test conversation for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + app: App instance + user: User instance (Account or EndUser) + fake: Faker instance + + Returns: + Conversation: Created conversation instance + """ + conversation = Conversation( + app_id=app.id, + app_model_config_id=app.app_model_config_id, + model_provider="openai", + model_id="gpt-3.5-turbo", + mode="chat", + name=fake.sentence(nb_words=3), + summary=fake.text(max_nb_chars=100), + inputs={}, + introduction=fake.text(max_nb_chars=200), + system_instruction=fake.text(max_nb_chars=300), + system_instruction_tokens=50, + status="normal", + invoke_from=InvokeFrom.WEB_APP.value, + from_source="console" if isinstance(user, Account) else "api", + from_end_user_id=user.id if isinstance(user, EndUser) else None, + from_account_id=user.id if isinstance(user, Account) else None, + dialogue_count=0, + is_deleted=False, + ) + + from extensions.ext_database import db + + db.session.add(conversation) + db.session.commit() + + return conversation + + def test_pagination_by_last_id_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful pagination by last ID with basic parameters. + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create multiple conversations + conversations = [] + for i in range(5): + conversation = self._create_test_conversation(db_session_with_containers, app, account, fake) + conversations.append(conversation) + + # Test pagination without pinned filter + result = WebConversationService.pagination_by_last_id( + session=db_session_with_containers, + app_model=app, + user=account, + last_id=None, + limit=3, + invoke_from=InvokeFrom.WEB_APP, + pinned=None, + sort_by="-updated_at", + ) + + # Verify results + assert result.limit == 3 + assert len(result.data) == 3 + assert result.has_more is True + + # Verify conversations are in descending order by updated_at + assert result.data[0].updated_at >= result.data[1].updated_at + assert result.data[1].updated_at >= result.data[2].updated_at + + def test_pagination_by_last_id_with_pinned_filter( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test pagination by last ID with pinned conversation filter. + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create conversations + conversations = [] + for i in range(5): + conversation = self._create_test_conversation(db_session_with_containers, app, account, fake) + conversations.append(conversation) + + # Pin some conversations + pinned_conversation1 = PinnedConversation( + app_id=app.id, + conversation_id=conversations[0].id, + created_by_role="account", + created_by=account.id, + ) + pinned_conversation2 = PinnedConversation( + app_id=app.id, + conversation_id=conversations[2].id, + created_by_role="account", + created_by=account.id, + ) + + from extensions.ext_database import db + + db.session.add(pinned_conversation1) + db.session.add(pinned_conversation2) + db.session.commit() + + # Test pagination with pinned filter + result = WebConversationService.pagination_by_last_id( + session=db_session_with_containers, + app_model=app, + user=account, + last_id=None, + limit=10, + invoke_from=InvokeFrom.WEB_APP, + pinned=True, + sort_by="-updated_at", + ) + + # Verify only pinned conversations are returned + assert result.limit == 10 + assert len(result.data) == 2 + assert result.has_more is False + + # Verify the returned conversations are the pinned ones + returned_ids = [conv.id for conv in result.data] + expected_ids = [conversations[0].id, conversations[2].id] + assert set(returned_ids) == set(expected_ids) + + def test_pagination_by_last_id_with_unpinned_filter( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test pagination by last ID with unpinned conversation filter. + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create conversations + conversations = [] + for i in range(5): + conversation = self._create_test_conversation(db_session_with_containers, app, account, fake) + conversations.append(conversation) + + # Pin one conversation + pinned_conversation = PinnedConversation( + app_id=app.id, + conversation_id=conversations[0].id, + created_by_role="account", + created_by=account.id, + ) + + from extensions.ext_database import db + + db.session.add(pinned_conversation) + db.session.commit() + + # Test pagination with unpinned filter + result = WebConversationService.pagination_by_last_id( + session=db_session_with_containers, + app_model=app, + user=account, + last_id=None, + limit=10, + invoke_from=InvokeFrom.WEB_APP, + pinned=False, + sort_by="-updated_at", + ) + + # Verify unpinned conversations are returned (should be 4 out of 5) + assert result.limit == 10 + assert len(result.data) == 4 + assert result.has_more is False + + # Verify the pinned conversation is not in the results + returned_ids = [conv.id for conv in result.data] + assert conversations[0].id not in returned_ids + + # Verify all other conversations are in the results + expected_unpinned_ids = [conv.id for conv in conversations[1:]] + assert set(returned_ids) == set(expected_unpinned_ids) + + def test_pin_conversation_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful pinning of a conversation. + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create a conversation + conversation = self._create_test_conversation(db_session_with_containers, app, account, fake) + + # Pin the conversation + WebConversationService.pin(app, conversation.id, account) + + # Verify the conversation was pinned + from extensions.ext_database import db + + pinned_conversation = ( + db.session.query(PinnedConversation) + .where( + PinnedConversation.app_id == app.id, + PinnedConversation.conversation_id == conversation.id, + PinnedConversation.created_by_role == "account", + PinnedConversation.created_by == account.id, + ) + .first() + ) + + assert pinned_conversation is not None + assert pinned_conversation.app_id == app.id + assert pinned_conversation.conversation_id == conversation.id + assert pinned_conversation.created_by_role == "account" + assert pinned_conversation.created_by == account.id + + def test_pin_conversation_already_pinned(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test pinning a conversation that is already pinned (should not create duplicate). + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create a conversation + conversation = self._create_test_conversation(db_session_with_containers, app, account, fake) + + # Pin the conversation first time + WebConversationService.pin(app, conversation.id, account) + + # Pin the conversation again + WebConversationService.pin(app, conversation.id, account) + + # Verify only one pinned conversation record exists + from extensions.ext_database import db + + pinned_conversations = ( + db.session.query(PinnedConversation) + .where( + PinnedConversation.app_id == app.id, + PinnedConversation.conversation_id == conversation.id, + PinnedConversation.created_by_role == "account", + PinnedConversation.created_by == account.id, + ) + .all() + ) + + assert len(pinned_conversations) == 1 + + def test_pin_conversation_with_end_user(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test pinning a conversation with an end user. + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create an end user + end_user = self._create_test_end_user(db_session_with_containers, app) + + # Create a conversation for the end user + conversation = self._create_test_conversation(db_session_with_containers, app, end_user, fake) + + # Pin the conversation + WebConversationService.pin(app, conversation.id, end_user) + + # Verify the conversation was pinned + from extensions.ext_database import db + + pinned_conversation = ( + db.session.query(PinnedConversation) + .where( + PinnedConversation.app_id == app.id, + PinnedConversation.conversation_id == conversation.id, + PinnedConversation.created_by_role == "end_user", + PinnedConversation.created_by == end_user.id, + ) + .first() + ) + + assert pinned_conversation is not None + assert pinned_conversation.app_id == app.id + assert pinned_conversation.conversation_id == conversation.id + assert pinned_conversation.created_by_role == "end_user" + assert pinned_conversation.created_by == end_user.id + + def test_unpin_conversation_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful unpinning of a conversation. + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create a conversation + conversation = self._create_test_conversation(db_session_with_containers, app, account, fake) + + # Pin the conversation first + WebConversationService.pin(app, conversation.id, account) + + # Verify it was pinned + from extensions.ext_database import db + + pinned_conversation = ( + db.session.query(PinnedConversation) + .where( + PinnedConversation.app_id == app.id, + PinnedConversation.conversation_id == conversation.id, + PinnedConversation.created_by_role == "account", + PinnedConversation.created_by == account.id, + ) + .first() + ) + + assert pinned_conversation is not None + + # Unpin the conversation + WebConversationService.unpin(app, conversation.id, account) + + # Verify it was unpinned + pinned_conversation = ( + db.session.query(PinnedConversation) + .where( + PinnedConversation.app_id == app.id, + PinnedConversation.conversation_id == conversation.id, + PinnedConversation.created_by_role == "account", + PinnedConversation.created_by == account.id, + ) + .first() + ) + + assert pinned_conversation is None + + def test_unpin_conversation_not_pinned(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test unpinning a conversation that is not pinned (should not cause error). + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create a conversation + conversation = self._create_test_conversation(db_session_with_containers, app, account, fake) + + # Try to unpin a conversation that was never pinned + WebConversationService.unpin(app, conversation.id, account) + + # Verify no pinned conversation record exists + from extensions.ext_database import db + + pinned_conversation = ( + db.session.query(PinnedConversation) + .where( + PinnedConversation.app_id == app.id, + PinnedConversation.conversation_id == conversation.id, + PinnedConversation.created_by_role == "account", + PinnedConversation.created_by == account.id, + ) + .first() + ) + + assert pinned_conversation is None + + def test_pagination_by_last_id_user_required_error( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test that pagination_by_last_id raises ValueError when user is None. + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Test with None user + with pytest.raises(ValueError, match="User is required"): + WebConversationService.pagination_by_last_id( + session=db_session_with_containers, + app_model=app, + user=None, + last_id=None, + limit=10, + invoke_from=InvokeFrom.WEB_APP, + pinned=None, + sort_by="-updated_at", + ) + + def test_pin_conversation_user_none(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test that pin method returns early when user is None. + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create a conversation + conversation = self._create_test_conversation(db_session_with_containers, app, account, fake) + + # Try to pin with None user + WebConversationService.pin(app, conversation.id, None) + + # Verify no pinned conversation was created + from extensions.ext_database import db + + pinned_conversation = ( + db.session.query(PinnedConversation) + .where( + PinnedConversation.app_id == app.id, + PinnedConversation.conversation_id == conversation.id, + ) + .first() + ) + + assert pinned_conversation is None + + def test_unpin_conversation_user_none(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test that unpin method returns early when user is None. + """ + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create a conversation + conversation = self._create_test_conversation(db_session_with_containers, app, account, fake) + + # Pin the conversation first + WebConversationService.pin(app, conversation.id, account) + + # Verify it was pinned + from extensions.ext_database import db + + pinned_conversation = ( + db.session.query(PinnedConversation) + .where( + PinnedConversation.app_id == app.id, + PinnedConversation.conversation_id == conversation.id, + PinnedConversation.created_by_role == "account", + PinnedConversation.created_by == account.id, + ) + .first() + ) + + assert pinned_conversation is not None + + # Try to unpin with None user + WebConversationService.unpin(app, conversation.id, None) + + # Verify the conversation is still pinned + pinned_conversation = ( + db.session.query(PinnedConversation) + .where( + PinnedConversation.app_id == app.id, + PinnedConversation.conversation_id == conversation.id, + PinnedConversation.created_by_role == "account", + PinnedConversation.created_by == account.id, + ) + .first() + ) + + assert pinned_conversation is not None diff --git a/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py b/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py new file mode 100644 index 0000000000..666b083ba6 --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_webapp_auth_service.py @@ -0,0 +1,877 @@ +from unittest.mock import patch + +import pytest +from faker import Faker +from werkzeug.exceptions import NotFound, Unauthorized + +from libs.password import hash_password +from models.account import Account, AccountStatus, Tenant, TenantAccountJoin, TenantAccountRole +from models.model import App, Site +from services.errors.account import AccountLoginError, AccountNotFoundError, AccountPasswordError +from services.webapp_auth_service import WebAppAuthService, WebAppAuthType + + +class TestWebAppAuthService: + """Integration tests for WebAppAuthService using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("services.webapp_auth_service.PassportService") as mock_passport_service, + patch("services.webapp_auth_service.TokenManager") as mock_token_manager, + patch("services.webapp_auth_service.send_email_code_login_mail_task") as mock_mail_task, + patch("services.webapp_auth_service.AppService") as mock_app_service, + patch("services.webapp_auth_service.EnterpriseService") as mock_enterprise_service, + ): + # Setup default mock returns + mock_passport_service.return_value.issue.return_value = "mock_jwt_token" + mock_token_manager.generate_token.return_value = "mock_token" + mock_token_manager.get_token_data.return_value = {"code": "123456"} + mock_mail_task.delay.return_value = None + mock_app_service.get_app_id_by_code.return_value = "mock_app_id" + mock_enterprise_service.WebAppAuth.get_app_access_mode_by_id.return_value = type( + "MockWebAppAuth", (), {"access_mode": "private"} + )() + mock_enterprise_service.WebAppAuth.get_app_access_mode_by_code.return_value = type( + "MockWebAppAuth", (), {"access_mode": "private"} + )() + + yield { + "passport_service": mock_passport_service, + "token_manager": mock_token_manager, + "mail_task": mock_mail_task, + "app_service": mock_app_service, + "enterprise_service": mock_enterprise_service, + } + + def _create_test_account_and_tenant(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test account and tenant for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + tuple: (account, tenant) - Created account and tenant instances + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + + from extensions.ext_database import db + + db.session.add(account) + db.session.commit() + + # Create tenant for the account + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db.session.add(tenant) + db.session.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER.value, + current=True, + ) + db.session.add(join) + db.session.commit() + + # Set current tenant for account + account.current_tenant = tenant + + return account, tenant + + def _create_test_account_with_password(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test account with password for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + tuple: (account, tenant, password) - Created account, tenant and password + """ + fake = Faker() + password = fake.password(length=12) + + # Create account with password + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + + # Hash password + salt = b"test_salt_16_bytes" + password_hash = hash_password(password, salt) + + # Convert to base64 for storage + import base64 + + account.password = base64.b64encode(password_hash).decode() + account.password_salt = base64.b64encode(salt).decode() + + from extensions.ext_database import db + + db.session.add(account) + db.session.commit() + + # Create tenant for the account + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db.session.add(tenant) + db.session.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER.value, + current=True, + ) + db.session.add(join) + db.session.commit() + + # Set current tenant for account + account.current_tenant = tenant + + return account, tenant, password + + def _create_test_app_and_site(self, db_session_with_containers, mock_external_service_dependencies, tenant): + """ + Helper method to create a test app and site for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + tenant: Tenant instance to associate with + + Returns: + tuple: (app, site) - Created app and site instances + """ + fake = Faker() + + # Create app + app = App( + tenant_id=tenant.id, + name=fake.company(), + description=fake.text(max_nb_chars=100), + mode="chat", + icon_type="emoji", + icon="🤖", + icon_background="#FF6B6B", + api_rph=100, + api_rpm=10, + enable_site=True, + enable_api=True, + ) + + from extensions.ext_database import db + + db.session.add(app) + db.session.commit() + + # Create site + site = Site( + app_id=app.id, + title=fake.company(), + code=fake.unique.lexify(text="??????"), + description=fake.text(max_nb_chars=100), + default_language="en-US", + status="normal", + customize_token_strategy="not_allow", + ) + db.session.add(site) + db.session.commit() + + return app, site + + def test_authenticate_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful authentication with valid email and password. + + This test verifies: + - Proper authentication with valid credentials + - Correct account return + - Database state consistency + """ + # Arrange: Create test data + account, tenant, password = self._create_test_account_with_password( + db_session_with_containers, mock_external_service_dependencies + ) + + # Act: Execute authentication + result = WebAppAuthService.authenticate(account.email, password) + + # Assert: Verify successful authentication + assert result is not None + assert result.id == account.id + assert result.email == account.email + assert result.name == account.name + assert result.status == AccountStatus.ACTIVE.value + + # Verify database state + from extensions.ext_database import db + + db.session.refresh(result) + assert result.id is not None + assert result.password is not None + assert result.password_salt is not None + + def test_authenticate_account_not_found(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test authentication with non-existent email. + + This test verifies: + - Proper error handling for non-existent accounts + - Correct exception type and message + """ + # Arrange: Use non-existent email + fake = Faker() + non_existent_email = fake.email() + + # Act & Assert: Verify proper error handling + with pytest.raises(AccountNotFoundError): + WebAppAuthService.authenticate(non_existent_email, "any_password") + + def test_authenticate_account_banned(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test authentication with banned account. + + This test verifies: + - Proper error handling for banned accounts + - Correct exception type and message + """ + # Arrange: Create banned account + fake = Faker() + password = fake.password(length=12) + + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status=AccountStatus.BANNED.value, + ) + + # Hash password + salt = b"test_salt_16_bytes" + password_hash = hash_password(password, salt) + + # Convert to base64 for storage + import base64 + + account.password = base64.b64encode(password_hash).decode() + account.password_salt = base64.b64encode(salt).decode() + + from extensions.ext_database import db + + db.session.add(account) + db.session.commit() + + # Act & Assert: Verify proper error handling + with pytest.raises(AccountLoginError) as exc_info: + WebAppAuthService.authenticate(account.email, password) + + assert "Account is banned." in str(exc_info.value) + + def test_authenticate_invalid_password(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test authentication with invalid password. + + This test verifies: + - Proper error handling for invalid passwords + - Correct exception type and message + """ + # Arrange: Create account with password + account, tenant, correct_password = self._create_test_account_with_password( + db_session_with_containers, mock_external_service_dependencies + ) + + # Act & Assert: Verify proper error handling with wrong password + with pytest.raises(AccountPasswordError) as exc_info: + WebAppAuthService.authenticate(account.email, "wrong_password") + + assert "Invalid email or password." in str(exc_info.value) + + def test_authenticate_account_without_password( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test authentication for account without password. + + This test verifies: + - Proper error handling for accounts without password + - Correct exception type and message + """ + # Arrange: Create account without password + fake = Faker() + + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + + from extensions.ext_database import db + + db.session.add(account) + db.session.commit() + + # Act & Assert: Verify proper error handling + with pytest.raises(AccountPasswordError) as exc_info: + WebAppAuthService.authenticate(account.email, "any_password") + + assert "Invalid email or password." in str(exc_info.value) + + def test_login_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful login and JWT token generation. + + This test verifies: + - Proper JWT token generation + - Correct token format and content + - Mock service integration + """ + # Arrange: Create test account + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Act: Execute login + result = WebAppAuthService.login(account) + + # Assert: Verify successful login + assert result is not None + assert result == "mock_jwt_token" + + # Verify mock service was called correctly + mock_external_service_dependencies["passport_service"].return_value.issue.assert_called_once() + call_args = mock_external_service_dependencies["passport_service"].return_value.issue.call_args[0][0] + + assert call_args["sub"] == "Web API Passport" + assert call_args["user_id"] == account.id + assert call_args["session_id"] == account.email + assert call_args["token_source"] == "webapp_login_token" + assert call_args["auth_type"] == "internal" + assert "exp" in call_args + + def test_get_user_through_email_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful user retrieval through email. + + This test verifies: + - Proper user retrieval by email + - Correct account return + - Database state consistency + """ + # Arrange: Create test data + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Act: Execute user retrieval + result = WebAppAuthService.get_user_through_email(account.email) + + # Assert: Verify successful retrieval + assert result is not None + assert result.id == account.id + assert result.email == account.email + assert result.name == account.name + assert result.status == AccountStatus.ACTIVE.value + + # Verify database state + from extensions.ext_database import db + + db.session.refresh(result) + assert result.id is not None + + def test_get_user_through_email_not_found(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test user retrieval with non-existent email. + + This test verifies: + - Proper handling for non-existent users + - Correct return value (None) + """ + # Arrange: Use non-existent email + fake = Faker() + non_existent_email = fake.email() + + # Act: Execute user retrieval + result = WebAppAuthService.get_user_through_email(non_existent_email) + + # Assert: Verify proper handling + assert result is None + + def test_get_user_through_email_banned(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test user retrieval with banned account. + + This test verifies: + - Proper error handling for banned accounts + - Correct exception type and message + """ + # Arrange: Create banned account + fake = Faker() + + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status=AccountStatus.BANNED.value, + ) + + from extensions.ext_database import db + + db.session.add(account) + db.session.commit() + + # Act & Assert: Verify proper error handling + with pytest.raises(Unauthorized) as exc_info: + WebAppAuthService.get_user_through_email(account.email) + + assert "Account is banned." in str(exc_info.value) + + def test_send_email_code_login_email_with_account( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test sending email code login email with account. + + This test verifies: + - Proper email code generation + - Token generation with correct data + - Mail task scheduling + - Mock service integration + """ + # Arrange: Create test account + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + # Act: Execute email code login email sending + result = WebAppAuthService.send_email_code_login_email(account=account, language="en-US") + + # Assert: Verify successful email sending + assert result is not None + assert result == "mock_token" + + # Verify mock services were called correctly + mock_external_service_dependencies["token_manager"].generate_token.assert_called_once() + mock_external_service_dependencies["mail_task"].delay.assert_called_once() + + # Verify token generation parameters + token_call_args = mock_external_service_dependencies["token_manager"].generate_token.call_args + assert token_call_args[1]["account"] == account + assert token_call_args[1]["email"] == account.email + assert token_call_args[1]["token_type"] == "email_code_login" + assert "code" in token_call_args[1]["additional_data"] + + # Verify mail task parameters + mail_call_args = mock_external_service_dependencies["mail_task"].delay.call_args + assert mail_call_args[1]["language"] == "en-US" + assert mail_call_args[1]["to"] == account.email + assert "code" in mail_call_args[1] + + def test_send_email_code_login_email_with_email_only( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test sending email code login email with email only. + + This test verifies: + - Proper email code generation without account + - Token generation with email only + - Mail task scheduling + - Mock service integration + """ + # Arrange: Use test email + fake = Faker() + test_email = fake.email() + + # Act: Execute email code login email sending + result = WebAppAuthService.send_email_code_login_email(email=test_email, language="zh-Hans") + + # Assert: Verify successful email sending + assert result is not None + assert result == "mock_token" + + # Verify mock services were called correctly + mock_external_service_dependencies["token_manager"].generate_token.assert_called_once() + mock_external_service_dependencies["mail_task"].delay.assert_called_once() + + # Verify token generation parameters + token_call_args = mock_external_service_dependencies["token_manager"].generate_token.call_args + assert token_call_args[1]["account"] is None + assert token_call_args[1]["email"] == test_email + assert token_call_args[1]["token_type"] == "email_code_login" + assert "code" in token_call_args[1]["additional_data"] + + # Verify mail task parameters + mail_call_args = mock_external_service_dependencies["mail_task"].delay.call_args + assert mail_call_args[1]["language"] == "zh-Hans" + assert mail_call_args[1]["to"] == test_email + assert "code" in mail_call_args[1] + + def test_send_email_code_login_email_no_email_provided( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test sending email code login email without providing email. + + This test verifies: + - Proper error handling when no email is provided + - Correct exception type and message + """ + # Arrange: No email provided + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + WebAppAuthService.send_email_code_login_email() + + assert "Email must be provided." in str(exc_info.value) + + def test_get_email_code_login_data_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful retrieval of email code login data. + + This test verifies: + - Proper token data retrieval + - Correct data format + - Mock service integration + """ + # Arrange: Setup mock return + expected_data = {"code": "123456", "email": "test@example.com"} + mock_external_service_dependencies["token_manager"].get_token_data.return_value = expected_data + + # Act: Execute data retrieval + result = WebAppAuthService.get_email_code_login_data("mock_token") + + # Assert: Verify successful retrieval + assert result is not None + assert result == expected_data + assert result["code"] == "123456" + assert result["email"] == "test@example.com" + + # Verify mock service was called correctly + mock_external_service_dependencies["token_manager"].get_token_data.assert_called_once_with( + "mock_token", "email_code_login" + ) + + def test_get_email_code_login_data_no_data(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test email code login data retrieval when no data exists. + + This test verifies: + - Proper handling when no token data exists + - Correct return value (None) + - Mock service integration + """ + # Arrange: Setup mock return for no data + mock_external_service_dependencies["token_manager"].get_token_data.return_value = None + + # Act: Execute data retrieval + result = WebAppAuthService.get_email_code_login_data("invalid_token") + + # Assert: Verify proper handling + assert result is None + + # Verify mock service was called correctly + mock_external_service_dependencies["token_manager"].get_token_data.assert_called_once_with( + "invalid_token", "email_code_login" + ) + + def test_revoke_email_code_login_token_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful revocation of email code login token. + + This test verifies: + - Proper token revocation + - Mock service integration + """ + # Arrange: Setup mock + + # Act: Execute token revocation + WebAppAuthService.revoke_email_code_login_token("mock_token") + + # Assert: Verify mock service was called correctly + mock_external_service_dependencies["token_manager"].revoke_token.assert_called_once_with( + "mock_token", "email_code_login" + ) + + def test_create_end_user_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful end user creation. + + This test verifies: + - Proper end user creation with valid app code + - Correct database state after creation + - Proper relationship establishment + - Mock service integration + """ + # Arrange: Create test data + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + app, site = self._create_test_app_and_site( + db_session_with_containers, mock_external_service_dependencies, tenant + ) + + # Act: Execute end user creation + result = WebAppAuthService.create_end_user(site.code, "test@example.com") + + # Assert: Verify successful creation + assert result is not None + assert result.tenant_id == app.tenant_id + assert result.app_id == app.id + assert result.type == "browser" + assert result.is_anonymous is False + assert result.session_id == "test@example.com" + assert result.name == "enterpriseuser" + assert result.external_user_id == "enterpriseuser" + + # Verify database state + from extensions.ext_database import db + + db.session.refresh(result) + assert result.id is not None + assert result.created_at is not None + assert result.updated_at is not None + + def test_create_end_user_site_not_found(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test end user creation with non-existent site code. + + This test verifies: + - Proper error handling for non-existent sites + - Correct exception type and message + """ + # Arrange: Use non-existent site code + fake = Faker() + non_existent_code = fake.unique.lexify(text="??????") + + # Act & Assert: Verify proper error handling + with pytest.raises(NotFound) as exc_info: + WebAppAuthService.create_end_user(non_existent_code, "test@example.com") + + assert "Site not found." in str(exc_info.value) + + def test_create_end_user_app_not_found(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test end user creation when app is not found. + + This test verifies: + - Proper error handling when app is missing + - Correct exception type and message + """ + # Arrange: Create site without app + fake = Faker() + tenant = Tenant( + name=fake.company(), + status="normal", + ) + + from extensions.ext_database import db + + db.session.add(tenant) + db.session.commit() + + site = Site( + app_id="00000000-0000-0000-0000-000000000000", + title=fake.company(), + code=fake.unique.lexify(text="??????"), + description=fake.text(max_nb_chars=100), + default_language="en-US", + status="normal", + customize_token_strategy="not_allow", + ) + db.session.add(site) + db.session.commit() + + # Act & Assert: Verify proper error handling + with pytest.raises(NotFound) as exc_info: + WebAppAuthService.create_end_user(site.code, "test@example.com") + + assert "App not found." in str(exc_info.value) + + def test_is_app_require_permission_check_with_access_mode_private( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test permission check requirement for private access mode. + + This test verifies: + - Proper permission check requirement for private mode + - Correct return value + - Mock service integration + """ + # Arrange: Setup test with private access mode + + # Act: Execute permission check requirement test + result = WebAppAuthService.is_app_require_permission_check(access_mode="private") + + # Assert: Verify correct result + assert result is True + + def test_is_app_require_permission_check_with_access_mode_public( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test permission check requirement for public access mode. + + This test verifies: + - Proper permission check requirement for public mode + - Correct return value + - Mock service integration + """ + # Arrange: Setup test with public access mode + + # Act: Execute permission check requirement test + result = WebAppAuthService.is_app_require_permission_check(access_mode="public") + + # Assert: Verify correct result + assert result is False + + def test_is_app_require_permission_check_with_app_code( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test permission check requirement using app code. + + This test verifies: + - Proper permission check requirement using app code + - Correct return value + - Mock service integration + """ + # Arrange: Setup mock for app service + mock_external_service_dependencies["app_service"].get_app_id_by_code.return_value = "mock_app_id" + + # Act: Execute permission check requirement test + result = WebAppAuthService.is_app_require_permission_check(app_code="mock_app_code") + + # Assert: Verify correct result + assert result is True + + # Verify mock service was called correctly + mock_external_service_dependencies["app_service"].get_app_id_by_code.assert_called_once_with("mock_app_code") + mock_external_service_dependencies[ + "enterprise_service" + ].WebAppAuth.get_app_access_mode_by_id.assert_called_once_with("mock_app_id") + + def test_is_app_require_permission_check_no_parameters( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test permission check requirement with no parameters. + + This test verifies: + - Proper error handling when no parameters provided + - Correct exception type and message + """ + # Arrange: No parameters provided + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + WebAppAuthService.is_app_require_permission_check() + + assert "Either app_code or app_id must be provided." in str(exc_info.value) + + def test_get_app_auth_type_with_access_mode_public( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test app authentication type for public access mode. + + This test verifies: + - Proper authentication type determination for public mode + - Correct return value + - Mock service integration + """ + # Arrange: Setup test with public access mode + + # Act: Execute authentication type determination + result = WebAppAuthService.get_app_auth_type(access_mode="public") + + # Assert: Verify correct result + assert result == WebAppAuthType.PUBLIC + + def test_get_app_auth_type_with_access_mode_private( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test app authentication type for private access mode. + + This test verifies: + - Proper authentication type determination for private mode + - Correct return value + - Mock service integration + """ + # Arrange: Setup test with private access mode + + # Act: Execute authentication type determination + result = WebAppAuthService.get_app_auth_type(access_mode="private") + + # Assert: Verify correct result + assert result == WebAppAuthType.INTERNAL + + def test_get_app_auth_type_with_app_code(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test app authentication type using app code. + + This test verifies: + - Proper authentication type determination using app code + - Correct return value + - Mock service integration + """ + # Arrange: Setup mock for enterprise service + mock_webapp_auth = type("MockWebAppAuth", (), {"access_mode": "sso_verified"})() + mock_external_service_dependencies[ + "enterprise_service" + ].WebAppAuth.get_app_access_mode_by_code.return_value = mock_webapp_auth + + # Act: Execute authentication type determination + result = WebAppAuthService.get_app_auth_type(app_code="mock_app_code") + + # Assert: Verify correct result + assert result == WebAppAuthType.EXTERNAL + + # Verify mock service was called correctly + mock_external_service_dependencies[ + "enterprise_service" + ].WebAppAuth.get_app_access_mode_by_code.assert_called_once_with("mock_app_code") + + def test_get_app_auth_type_no_parameters(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test app authentication type with no parameters. + + This test verifies: + - Proper error handling when no parameters provided + - Correct exception type and message + """ + # Arrange: No parameters provided + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + WebAppAuthService.get_app_auth_type() + + assert "Either app_code or access_mode must be provided." in str(exc_info.value) diff --git a/api/tests/test_containers_integration_tests/services/test_website_service.py b/api/tests/test_containers_integration_tests/services/test_website_service.py new file mode 100644 index 0000000000..ec2f1556af --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_website_service.py @@ -0,0 +1,1437 @@ +from datetime import datetime +from unittest.mock import MagicMock, patch + +import pytest +from faker import Faker + +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from services.website_service import ( + CrawlOptions, + ScrapeRequest, + WebsiteCrawlApiRequest, + WebsiteCrawlStatusApiRequest, + WebsiteService, +) + + +class TestWebsiteService: + """Integration tests for WebsiteService using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("services.website_service.ApiKeyAuthService") as mock_api_key_auth_service, + patch("services.website_service.FirecrawlApp") as mock_firecrawl_app, + patch("services.website_service.WaterCrawlProvider") as mock_watercrawl_provider, + patch("services.website_service.requests") as mock_requests, + patch("services.website_service.redis_client") as mock_redis_client, + patch("services.website_service.storage") as mock_storage, + patch("services.website_service.encrypter") as mock_encrypter, + ): + # Setup default mock returns + mock_api_key_auth_service.get_auth_credentials.return_value = { + "config": {"api_key": "encrypted_api_key", "base_url": "https://api.example.com"} + } + mock_encrypter.decrypt_token.return_value = "decrypted_api_key" + + # Mock FirecrawlApp + mock_firecrawl_instance = MagicMock() + mock_firecrawl_instance.crawl_url.return_value = "test_job_id_123" + mock_firecrawl_instance.check_crawl_status.return_value = { + "status": "completed", + "total": 5, + "current": 5, + "data": [{"source_url": "https://example.com", "title": "Test Page"}], + } + mock_firecrawl_app.return_value = mock_firecrawl_instance + + # Mock WaterCrawlProvider + mock_watercrawl_instance = MagicMock() + mock_watercrawl_instance.crawl_url.return_value = {"status": "active", "job_id": "watercrawl_job_123"} + mock_watercrawl_instance.get_crawl_status.return_value = { + "status": "completed", + "job_id": "watercrawl_job_123", + "total": 3, + "current": 3, + "data": [], + } + mock_watercrawl_instance.get_crawl_url_data.return_value = { + "title": "WaterCrawl Page", + "source_url": "https://example.com", + "description": "Test description", + "markdown": "# Test Content", + } + mock_watercrawl_instance.scrape_url.return_value = { + "title": "Scraped Page", + "content": "Test content", + "url": "https://example.com", + } + mock_watercrawl_provider.return_value = mock_watercrawl_instance + + # Mock requests + mock_response = MagicMock() + mock_response.json.return_value = {"code": 200, "data": {"taskId": "jina_job_123"}} + mock_requests.get.return_value = mock_response + mock_requests.post.return_value = mock_response + + # Mock Redis + mock_redis_client.setex.return_value = None + mock_redis_client.get.return_value = str(datetime.now().timestamp()) + mock_redis_client.delete.return_value = None + + # Mock Storage + mock_storage.exists.return_value = False + mock_storage.load_once.return_value = None + + yield { + "api_key_auth_service": mock_api_key_auth_service, + "firecrawl_app": mock_firecrawl_app, + "watercrawl_provider": mock_watercrawl_provider, + "requests": mock_requests, + "redis_client": mock_redis_client, + "storage": mock_storage, + "encrypter": mock_encrypter, + } + + def _create_test_account(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test account with proper tenant setup. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + Account: Created account instance + """ + fake = Faker() + + # Create account + account = Account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + status="active", + ) + + from extensions.ext_database import db + + db.session.add(account) + db.session.commit() + + # Create tenant for the account + tenant = Tenant( + name=fake.company(), + status="normal", + ) + db.session.add(tenant) + db.session.commit() + + # Create tenant-account join + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER.value, + current=True, + ) + db.session.add(join) + db.session.commit() + + # Set current tenant for account + account.current_tenant = tenant + + return account + + def test_document_create_args_validate_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful argument validation for document creation. + + This test verifies: + - Valid arguments are accepted without errors + - All required fields are properly validated + - Optional fields are handled correctly + """ + # Arrange: Prepare valid arguments + valid_args = { + "provider": "firecrawl", + "url": "https://example.com", + "options": { + "limit": 5, + "crawl_sub_pages": True, + "only_main_content": False, + "includes": "blog,news", + "excludes": "admin,private", + "max_depth": 3, + "use_sitemap": True, + }, + } + + # Act: Validate arguments + WebsiteService.document_create_args_validate(valid_args) + + # Assert: No exception should be raised + # If we reach here, validation passed successfully + + def test_document_create_args_validate_missing_provider( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test argument validation fails when provider is missing. + + This test verifies: + - Missing provider raises ValueError + - Proper error message is provided + - Validation stops at first missing required field + """ + # Arrange: Prepare arguments without provider + invalid_args = {"url": "https://example.com", "options": {"limit": 5, "crawl_sub_pages": True}} + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + WebsiteService.document_create_args_validate(invalid_args) + + assert "Provider is required" in str(exc_info.value) + + def test_document_create_args_validate_missing_url( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test argument validation fails when URL is missing. + + This test verifies: + - Missing URL raises ValueError + - Proper error message is provided + - Validation continues after provider check + """ + # Arrange: Prepare arguments without URL + invalid_args = {"provider": "firecrawl", "options": {"limit": 5, "crawl_sub_pages": True}} + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + WebsiteService.document_create_args_validate(invalid_args) + + assert "URL is required" in str(exc_info.value) + + def test_crawl_url_firecrawl_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful URL crawling with Firecrawl provider. + + This test verifies: + - Firecrawl provider is properly initialized + - API credentials are retrieved and decrypted + - Crawl parameters are correctly formatted + - Job ID is returned with active status + - Redis cache is properly set + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + fake = Faker() + + # Mock current_user for the test + with patch("services.website_service.current_user") as mock_current_user: + mock_current_user.current_tenant_id = account.current_tenant.id + + # Create API request + api_request = WebsiteCrawlApiRequest( + provider="firecrawl", + url="https://example.com", + options={ + "limit": 10, + "crawl_sub_pages": True, + "only_main_content": True, + "includes": "blog,news", + "excludes": "admin,private", + "max_depth": 2, + "use_sitemap": True, + }, + ) + + # Act: Execute crawl operation + result = WebsiteService.crawl_url(api_request) + + # Assert: Verify successful operation + assert result is not None + assert result["status"] == "active" + assert result["job_id"] == "test_job_id_123" + + # Verify external service interactions + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.assert_called_once_with( + account.current_tenant.id, "website", "firecrawl" + ) + mock_external_service_dependencies["encrypter"].decrypt_token.assert_called_once_with( + tenant_id=account.current_tenant.id, token="encrypted_api_key" + ) + mock_external_service_dependencies["firecrawl_app"].assert_called_once_with( + api_key="decrypted_api_key", base_url="https://api.example.com" + ) + + # Verify Redis cache was set + mock_external_service_dependencies["redis_client"].setex.assert_called_once() + + def test_crawl_url_watercrawl_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful URL crawling with WaterCrawl provider. + + This test verifies: + - WaterCrawl provider is properly initialized + - API credentials are retrieved and decrypted + - Crawl options are correctly passed to provider + - Provider returns expected response format + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock current_user for the test + with patch("services.website_service.current_user") as mock_current_user: + mock_current_user.current_tenant_id = account.current_tenant.id + + # Create API request + api_request = WebsiteCrawlApiRequest( + provider="watercrawl", + url="https://example.com", + options={ + "limit": 5, + "crawl_sub_pages": False, + "only_main_content": False, + "includes": None, + "excludes": None, + "max_depth": None, + "use_sitemap": False, + }, + ) + + # Act: Execute crawl operation + result = WebsiteService.crawl_url(api_request) + + # Assert: Verify successful operation + assert result is not None + assert result["status"] == "active" + assert result["job_id"] == "watercrawl_job_123" + + # Verify external service interactions + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.assert_called_once_with( + account.current_tenant.id, "website", "watercrawl" + ) + mock_external_service_dependencies["encrypter"].decrypt_token.assert_called_once_with( + tenant_id=account.current_tenant.id, token="encrypted_api_key" + ) + mock_external_service_dependencies["watercrawl_provider"].assert_called_once_with( + api_key="decrypted_api_key", base_url="https://api.example.com" + ) + + def test_crawl_url_jinareader_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful URL crawling with JinaReader provider. + + This test verifies: + - JinaReader provider handles single page crawling + - API credentials are retrieved and decrypted + - HTTP requests are made with proper headers + - Response is properly parsed and returned + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock current_user for the test + with patch("services.website_service.current_user") as mock_current_user: + mock_current_user.current_tenant_id = account.current_tenant.id + + # Create API request for single page crawling + api_request = WebsiteCrawlApiRequest( + provider="jinareader", + url="https://example.com", + options={ + "limit": 1, + "crawl_sub_pages": False, + "only_main_content": True, + "includes": None, + "excludes": None, + "max_depth": None, + "use_sitemap": False, + }, + ) + + # Act: Execute crawl operation + result = WebsiteService.crawl_url(api_request) + + # Assert: Verify successful operation + assert result is not None + assert result["status"] == "active" + assert result["data"] is not None + + # Verify external service interactions + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.assert_called_once_with( + account.current_tenant.id, "website", "jinareader" + ) + mock_external_service_dependencies["encrypter"].decrypt_token.assert_called_once_with( + tenant_id=account.current_tenant.id, token="encrypted_api_key" + ) + + # Verify HTTP request was made + mock_external_service_dependencies["requests"].get.assert_called_once_with( + "https://r.jina.ai/https://example.com", + headers={"Accept": "application/json", "Authorization": "Bearer decrypted_api_key"}, + ) + + def test_crawl_url_invalid_provider(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test crawl operation fails with invalid provider. + + This test verifies: + - Invalid provider raises ValueError + - Proper error message is provided + - Service handles unsupported providers gracefully + """ + # Arrange: Create test account and prepare request with invalid provider + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock current_user for the test + with patch("services.website_service.current_user") as mock_current_user: + mock_current_user.current_tenant_id = account.current_tenant.id + + # Create API request with invalid provider + api_request = WebsiteCrawlApiRequest( + provider="invalid_provider", + url="https://example.com", + options={"limit": 5, "crawl_sub_pages": False, "only_main_content": False}, + ) + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + WebsiteService.crawl_url(api_request) + + assert "Invalid provider" in str(exc_info.value) + + def test_get_crawl_status_firecrawl_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful crawl status retrieval with Firecrawl provider. + + This test verifies: + - Firecrawl status is properly retrieved + - API credentials are retrieved and decrypted + - Status data includes all required fields + - Redis cache is properly managed for completed jobs + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock current_user for the test + with patch("services.website_service.current_user") as mock_current_user: + mock_current_user.current_tenant_id = account.current_tenant.id + + # Create API request + api_request = WebsiteCrawlStatusApiRequest(provider="firecrawl", job_id="test_job_id_123") + + # Act: Get crawl status + result = WebsiteService.get_crawl_status_typed(api_request) + + # Assert: Verify successful operation + assert result is not None + assert result["status"] == "completed" + assert result["job_id"] == "test_job_id_123" + assert result["total"] == 5 + assert result["current"] == 5 + assert "data" in result + assert "time_consuming" in result + + # Verify external service interactions + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.assert_called_once_with( + account.current_tenant.id, "website", "firecrawl" + ) + mock_external_service_dependencies["encrypter"].decrypt_token.assert_called_once_with( + tenant_id=account.current_tenant.id, token="encrypted_api_key" + ) + + # Verify Redis cache was accessed and cleaned up + mock_external_service_dependencies["redis_client"].get.assert_called_once() + mock_external_service_dependencies["redis_client"].delete.assert_called_once() + + def test_get_crawl_status_watercrawl_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful crawl status retrieval with WaterCrawl provider. + + This test verifies: + - WaterCrawl status is properly retrieved + - API credentials are retrieved and decrypted + - Provider returns expected status format + - All required status fields are present + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock current_user for the test + with patch("services.website_service.current_user") as mock_current_user: + mock_current_user.current_tenant_id = account.current_tenant.id + + # Create API request + api_request = WebsiteCrawlStatusApiRequest(provider="watercrawl", job_id="watercrawl_job_123") + + # Act: Get crawl status + result = WebsiteService.get_crawl_status_typed(api_request) + + # Assert: Verify successful operation + assert result is not None + assert result["status"] == "completed" + assert result["job_id"] == "watercrawl_job_123" + assert result["total"] == 3 + assert result["current"] == 3 + assert "data" in result + + # Verify external service interactions + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.assert_called_once_with( + account.current_tenant.id, "website", "watercrawl" + ) + mock_external_service_dependencies["encrypter"].decrypt_token.assert_called_once_with( + tenant_id=account.current_tenant.id, token="encrypted_api_key" + ) + + def test_get_crawl_status_jinareader_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful crawl status retrieval with JinaReader provider. + + This test verifies: + - JinaReader status is properly retrieved + - API credentials are retrieved and decrypted + - HTTP requests are made with proper parameters + - Status data is properly formatted and returned + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock current_user for the test + with patch("services.website_service.current_user") as mock_current_user: + mock_current_user.current_tenant_id = account.current_tenant.id + + # Create API request + api_request = WebsiteCrawlStatusApiRequest(provider="jinareader", job_id="jina_job_123") + + # Act: Get crawl status + result = WebsiteService.get_crawl_status_typed(api_request) + + # Assert: Verify successful operation + assert result is not None + assert result["status"] == "active" + assert result["job_id"] == "jina_job_123" + assert "total" in result + assert "current" in result + assert "data" in result + assert "time_consuming" in result + + # Verify external service interactions + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.assert_called_once_with( + account.current_tenant.id, "website", "jinareader" + ) + mock_external_service_dependencies["encrypter"].decrypt_token.assert_called_once_with( + tenant_id=account.current_tenant.id, token="encrypted_api_key" + ) + + # Verify HTTP request was made + mock_external_service_dependencies["requests"].post.assert_called_once() + + def test_get_crawl_status_invalid_provider(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test crawl status retrieval fails with invalid provider. + + This test verifies: + - Invalid provider raises ValueError + - Proper error message is provided + - Service handles unsupported providers gracefully + """ + # Arrange: Create test account and prepare request with invalid provider + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock current_user for the test + with patch("services.website_service.current_user") as mock_current_user: + mock_current_user.current_tenant_id = account.current_tenant.id + + # Create API request with invalid provider + api_request = WebsiteCrawlStatusApiRequest(provider="invalid_provider", job_id="test_job_id_123") + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + WebsiteService.get_crawl_status_typed(api_request) + + assert "Invalid provider" in str(exc_info.value) + + def test_get_crawl_status_missing_credentials(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test crawl status retrieval fails when credentials are missing. + + This test verifies: + - Missing credentials raises ValueError + - Proper error message is provided + - Service handles authentication failures gracefully + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock current_user for the test + with patch("services.website_service.current_user") as mock_current_user: + mock_current_user.current_tenant_id = account.current_tenant.id + + # Mock missing credentials + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.return_value = None + + # Create API request + api_request = WebsiteCrawlStatusApiRequest(provider="firecrawl", job_id="test_job_id_123") + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + WebsiteService.get_crawl_status_typed(api_request) + + assert "No valid credentials found for the provider" in str(exc_info.value) + + def test_get_crawl_status_missing_api_key(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test crawl status retrieval fails when API key is missing from config. + + This test verifies: + - Missing API key raises ValueError + - Proper error message is provided + - Service handles configuration failures gracefully + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock current_user for the test + with patch("services.website_service.current_user") as mock_current_user: + mock_current_user.current_tenant_id = account.current_tenant.id + + # Mock missing API key in config + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.return_value = { + "config": {"base_url": "https://api.example.com"} + } + + # Create API request + api_request = WebsiteCrawlStatusApiRequest(provider="firecrawl", job_id="test_job_id_123") + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + WebsiteService.get_crawl_status_typed(api_request) + + assert "API key not found in configuration" in str(exc_info.value) + + def test_get_crawl_url_data_firecrawl_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful URL data retrieval with Firecrawl provider. + + This test verifies: + - Firecrawl URL data is properly retrieved + - API credentials are retrieved and decrypted + - Data is returned for matching URL + - Storage fallback works when needed + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock storage to return existing data + mock_external_service_dependencies["storage"].exists.return_value = True + mock_external_service_dependencies["storage"].load_once.return_value = ( + b"[" + b'{"source_url": "https://example.com", "title": "Test Page", ' + b'"description": "Test Description", "markdown": "# Test Content"}' + b"]" + ) + + # Act: Get URL data + result = WebsiteService.get_crawl_url_data( + job_id="test_job_id_123", + provider="firecrawl", + url="https://example.com", + tenant_id=account.current_tenant.id, + ) + + # Assert: Verify successful operation + assert result is not None + assert result["source_url"] == "https://example.com" + assert result["title"] == "Test Page" + assert result["description"] == "Test Description" + assert result["markdown"] == "# Test Content" + + # Verify external service interactions + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.assert_called_once_with( + account.current_tenant.id, "website", "firecrawl" + ) + mock_external_service_dependencies["encrypter"].decrypt_token.assert_called_once_with( + tenant_id=account.current_tenant.id, token="encrypted_api_key" + ) + + # Verify storage was accessed + mock_external_service_dependencies["storage"].exists.assert_called_once() + mock_external_service_dependencies["storage"].load_once.assert_called_once() + + def test_get_crawl_url_data_watercrawl_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful URL data retrieval with WaterCrawl provider. + + This test verifies: + - WaterCrawl URL data is properly retrieved + - API credentials are retrieved and decrypted + - Provider returns expected data format + - All required data fields are present + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Act: Get URL data + result = WebsiteService.get_crawl_url_data( + job_id="watercrawl_job_123", + provider="watercrawl", + url="https://example.com", + tenant_id=account.current_tenant.id, + ) + + # Assert: Verify successful operation + assert result is not None + assert result["title"] == "WaterCrawl Page" + assert result["source_url"] == "https://example.com" + assert result["description"] == "Test description" + assert result["markdown"] == "# Test Content" + + # Verify external service interactions + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.assert_called_once_with( + account.current_tenant.id, "website", "watercrawl" + ) + mock_external_service_dependencies["encrypter"].decrypt_token.assert_called_once_with( + tenant_id=account.current_tenant.id, token="encrypted_api_key" + ) + + def test_get_crawl_url_data_jinareader_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful URL data retrieval with JinaReader provider. + + This test verifies: + - JinaReader URL data is properly retrieved + - API credentials are retrieved and decrypted + - HTTP requests are made with proper parameters + - Data is properly formatted and returned + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock successful response for JinaReader + mock_response = MagicMock() + mock_response.json.return_value = { + "code": 200, + "data": { + "title": "JinaReader Page", + "url": "https://example.com", + "description": "Test description", + "content": "# Test Content", + }, + } + mock_external_service_dependencies["requests"].get.return_value = mock_response + + # Act: Get URL data without job_id (single page scraping) + result = WebsiteService.get_crawl_url_data( + job_id="", provider="jinareader", url="https://example.com", tenant_id=account.current_tenant.id + ) + + # Assert: Verify successful operation + assert result is not None + assert result["title"] == "JinaReader Page" + assert result["url"] == "https://example.com" + assert result["description"] == "Test description" + assert result["content"] == "# Test Content" + + # Verify external service interactions + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.assert_called_once_with( + account.current_tenant.id, "website", "jinareader" + ) + mock_external_service_dependencies["encrypter"].decrypt_token.assert_called_once_with( + tenant_id=account.current_tenant.id, token="encrypted_api_key" + ) + + # Verify HTTP request was made + mock_external_service_dependencies["requests"].get.assert_called_once_with( + "https://r.jina.ai/https://example.com", + headers={"Accept": "application/json", "Authorization": "Bearer decrypted_api_key"}, + ) + + def test_get_scrape_url_data_firecrawl_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful URL scraping with Firecrawl provider. + + This test verifies: + - Firecrawl scraping is properly executed + - API credentials are retrieved and decrypted + - Scraping parameters are correctly passed + - Scraped data is returned in expected format + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock FirecrawlApp scraping response + mock_firecrawl_instance = MagicMock() + mock_firecrawl_instance.scrape_url.return_value = { + "title": "Scraped Page Title", + "content": "This is the scraped content", + "url": "https://example.com", + "description": "Page description", + } + mock_external_service_dependencies["firecrawl_app"].return_value = mock_firecrawl_instance + + # Act: Scrape URL + result = WebsiteService.get_scrape_url_data( + provider="firecrawl", url="https://example.com", tenant_id=account.current_tenant.id, only_main_content=True + ) + + # Assert: Verify successful operation + assert result is not None + assert result["title"] == "Scraped Page Title" + assert result["content"] == "This is the scraped content" + assert result["url"] == "https://example.com" + assert result["description"] == "Page description" + + # Verify external service interactions + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.assert_called_once_with( + account.current_tenant.id, "website", "firecrawl" + ) + mock_external_service_dependencies["encrypter"].decrypt_token.assert_called_once_with( + tenant_id=account.current_tenant.id, token="encrypted_api_key" + ) + + # Verify FirecrawlApp was called with correct parameters + mock_external_service_dependencies["firecrawl_app"].assert_called_once_with( + api_key="decrypted_api_key", base_url="https://api.example.com" + ) + mock_firecrawl_instance.scrape_url.assert_called_once_with( + url="https://example.com", params={"onlyMainContent": True} + ) + + def test_get_scrape_url_data_watercrawl_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful URL scraping with WaterCrawl provider. + + This test verifies: + - WaterCrawl scraping is properly executed + - API credentials are retrieved and decrypted + - Provider returns expected scraping format + - All required data fields are present + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Act: Scrape URL + result = WebsiteService.get_scrape_url_data( + provider="watercrawl", + url="https://example.com", + tenant_id=account.current_tenant.id, + only_main_content=False, + ) + + # Assert: Verify successful operation + assert result is not None + assert result["title"] == "Scraped Page" + assert result["content"] == "Test content" + assert result["url"] == "https://example.com" + + # Verify external service interactions + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.assert_called_once_with( + account.current_tenant.id, "website", "watercrawl" + ) + mock_external_service_dependencies["encrypter"].decrypt_token.assert_called_once_with( + tenant_id=account.current_tenant.id, token="encrypted_api_key" + ) + + # Verify WaterCrawlProvider was called with correct parameters + mock_external_service_dependencies["watercrawl_provider"].assert_called_once_with( + api_key="decrypted_api_key", base_url="https://api.example.com" + ) + + def test_get_scrape_url_data_invalid_provider(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test URL scraping fails with invalid provider. + + This test verifies: + - Invalid provider raises ValueError + - Proper error message is provided + - Service handles unsupported providers gracefully + """ + # Arrange: Create test account and prepare request with invalid provider + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + WebsiteService.get_scrape_url_data( + provider="invalid_provider", + url="https://example.com", + tenant_id=account.current_tenant.id, + only_main_content=False, + ) + + assert "Invalid provider" in str(exc_info.value) + + def test_crawl_options_include_exclude_paths(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test CrawlOptions include and exclude path methods. + + This test verifies: + - Include paths are properly parsed from comma-separated string + - Exclude paths are properly parsed from comma-separated string + - Empty or None values are handled correctly + - Path lists are returned in expected format + """ + # Arrange: Create CrawlOptions with various path configurations + options_with_paths = CrawlOptions(includes="blog,news,articles", excludes="admin,private,test") + + options_without_paths = CrawlOptions(includes=None, excludes="") + + # Act: Get include and exclude paths + include_paths = options_with_paths.get_include_paths() + exclude_paths = options_with_paths.get_exclude_paths() + + empty_include_paths = options_without_paths.get_include_paths() + empty_exclude_paths = options_without_paths.get_exclude_paths() + + # Assert: Verify path parsing + assert include_paths == ["blog", "news", "articles"] + assert exclude_paths == ["admin", "private", "test"] + assert empty_include_paths == [] + assert empty_exclude_paths == [] + + def test_website_crawl_api_request_conversion(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test WebsiteCrawlApiRequest conversion to CrawlRequest. + + This test verifies: + - API request is properly converted to internal CrawlRequest + - All options are correctly mapped + - Default values are applied when options are missing + - Conversion maintains data integrity + """ + # Arrange: Create API request with various options + api_request = WebsiteCrawlApiRequest( + provider="firecrawl", + url="https://example.com", + options={ + "limit": 10, + "crawl_sub_pages": True, + "only_main_content": True, + "includes": "blog,news", + "excludes": "admin,private", + "max_depth": 3, + "use_sitemap": False, + }, + ) + + # Act: Convert to CrawlRequest + crawl_request = api_request.to_crawl_request() + + # Assert: Verify conversion + assert crawl_request.url == "https://example.com" + assert crawl_request.provider == "firecrawl" + assert crawl_request.options.limit == 10 + assert crawl_request.options.crawl_sub_pages is True + assert crawl_request.options.only_main_content is True + assert crawl_request.options.includes == "blog,news" + assert crawl_request.options.excludes == "admin,private" + assert crawl_request.options.max_depth == 3 + assert crawl_request.options.use_sitemap is False + + def test_website_crawl_api_request_from_args(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test WebsiteCrawlApiRequest creation from Flask arguments. + + This test verifies: + - Request is properly created from parsed arguments + - Required fields are validated + - Optional fields are handled correctly + - Validation errors are properly raised + """ + # Arrange: Prepare valid arguments + valid_args = {"provider": "watercrawl", "url": "https://example.com", "options": {"limit": 5}} + + # Act: Create request from args + request = WebsiteCrawlApiRequest.from_args(valid_args) + + # Assert: Verify request creation + assert request.provider == "watercrawl" + assert request.url == "https://example.com" + assert request.options == {"limit": 5} + + # Test missing provider + invalid_args = {"url": "https://example.com", "options": {}} + with pytest.raises(ValueError) as exc_info: + WebsiteCrawlApiRequest.from_args(invalid_args) + assert "Provider is required" in str(exc_info.value) + + # Test missing URL + invalid_args = {"provider": "watercrawl", "options": {}} + with pytest.raises(ValueError) as exc_info: + WebsiteCrawlApiRequest.from_args(invalid_args) + assert "URL is required" in str(exc_info.value) + + # Test missing options + invalid_args = {"provider": "watercrawl", "url": "https://example.com"} + with pytest.raises(ValueError) as exc_info: + WebsiteCrawlApiRequest.from_args(invalid_args) + assert "Options are required" in str(exc_info.value) + + def test_crawl_url_jinareader_sub_pages_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful URL crawling with JinaReader provider for sub-pages. + + This test verifies: + - JinaReader provider handles sub-page crawling correctly + - HTTP POST request is made with proper parameters + - Job ID is returned for multi-page crawling + - All required parameters are passed correctly + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock current_user for the test + with patch("services.website_service.current_user") as mock_current_user: + mock_current_user.current_tenant_id = account.current_tenant.id + + # Create API request for sub-page crawling + api_request = WebsiteCrawlApiRequest( + provider="jinareader", + url="https://example.com", + options={ + "limit": 5, + "crawl_sub_pages": True, + "only_main_content": False, + "includes": None, + "excludes": None, + "max_depth": None, + "use_sitemap": True, + }, + ) + + # Act: Execute crawl operation + result = WebsiteService.crawl_url(api_request) + + # Assert: Verify successful operation + assert result is not None + assert result["status"] == "active" + assert result["job_id"] == "jina_job_123" + + # Verify external service interactions + mock_external_service_dependencies["api_key_auth_service"].get_auth_credentials.assert_called_once_with( + account.current_tenant.id, "website", "jinareader" + ) + mock_external_service_dependencies["encrypter"].decrypt_token.assert_called_once_with( + tenant_id=account.current_tenant.id, token="encrypted_api_key" + ) + + # Verify HTTP POST request was made for sub-page crawling + mock_external_service_dependencies["requests"].post.assert_called_once_with( + "https://adaptivecrawl-kir3wx7b3a-uc.a.run.app", + json={"url": "https://example.com", "maxPages": 5, "useSitemap": True}, + headers={"Content-Type": "application/json", "Authorization": "Bearer decrypted_api_key"}, + ) + + def test_crawl_url_jinareader_failed_response(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test JinaReader crawling fails when API returns error. + + This test verifies: + - Failed API response raises ValueError + - Proper error message is provided + - Service handles API failures gracefully + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock failed response + mock_failed_response = MagicMock() + mock_failed_response.json.return_value = {"code": 500, "error": "Internal server error"} + mock_external_service_dependencies["requests"].get.return_value = mock_failed_response + + # Mock current_user for the test + with patch("services.website_service.current_user") as mock_current_user: + mock_current_user.current_tenant_id = account.current_tenant.id + + # Create API request + api_request = WebsiteCrawlApiRequest( + provider="jinareader", + url="https://example.com", + options={"limit": 1, "crawl_sub_pages": False, "only_main_content": True}, + ) + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + WebsiteService.crawl_url(api_request) + + assert "Failed to crawl" in str(exc_info.value) + + def test_get_crawl_status_firecrawl_active_job( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test Firecrawl status retrieval for active (not completed) job. + + This test verifies: + - Active job status is properly returned + - Redis cache is not deleted for active jobs + - Time consuming is not calculated for active jobs + - All required status fields are present + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock active job status + mock_firecrawl_instance = MagicMock() + mock_firecrawl_instance.check_crawl_status.return_value = { + "status": "active", + "total": 10, + "current": 3, + "data": [], + } + mock_external_service_dependencies["firecrawl_app"].return_value = mock_firecrawl_instance + + # Mock current_user for the test + with patch("services.website_service.current_user") as mock_current_user: + mock_current_user.current_tenant_id = account.current_tenant.id + + # Create API request + api_request = WebsiteCrawlStatusApiRequest(provider="firecrawl", job_id="active_job_123") + + # Act: Get crawl status + result = WebsiteService.get_crawl_status_typed(api_request) + + # Assert: Verify active job status + assert result is not None + assert result["status"] == "active" + assert result["job_id"] == "active_job_123" + assert result["total"] == 10 + assert result["current"] == 3 + assert "data" in result + assert "time_consuming" not in result + + # Verify Redis cache was not accessed for active jobs + mock_external_service_dependencies["redis_client"].get.assert_not_called() + mock_external_service_dependencies["redis_client"].delete.assert_not_called() + + def test_get_crawl_url_data_firecrawl_storage_fallback( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test Firecrawl URL data retrieval with storage fallback. + + This test verifies: + - Storage fallback works when storage has data + - API call is not made when storage has data + - Data is properly parsed from storage + - Correct URL data is returned + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock storage to return existing data + mock_external_service_dependencies["storage"].exists.return_value = True + mock_external_service_dependencies["storage"].load_once.return_value = ( + b"[" + b'{"source_url": "https://example.com/page1", ' + b'"title": "Page 1", "description": "Description 1", "markdown": "# Page 1"}, ' + b'{"source_url": "https://example.com/page2", "title": "Page 2", ' + b'"description": "Description 2", "markdown": "# Page 2"}' + b"]" + ) + + # Act: Get URL data for specific URL + result = WebsiteService.get_crawl_url_data( + job_id="test_job_id_123", + provider="firecrawl", + url="https://example.com/page1", + tenant_id=account.current_tenant.id, + ) + + # Assert: Verify successful operation + assert result is not None + assert result["source_url"] == "https://example.com/page1" + assert result["title"] == "Page 1" + assert result["description"] == "Description 1" + assert result["markdown"] == "# Page 1" + + # Verify storage was accessed + mock_external_service_dependencies["storage"].exists.assert_called_once() + mock_external_service_dependencies["storage"].load_once.assert_called_once() + + def test_get_crawl_url_data_firecrawl_api_fallback( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test Firecrawl URL data retrieval with API fallback when storage is empty. + + This test verifies: + - API fallback works when storage has no data + - FirecrawlApp is called to get data + - Completed job status is checked + - Data is returned from API response + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock storage to return no data + mock_external_service_dependencies["storage"].exists.return_value = False + + # Mock FirecrawlApp for API fallback + mock_firecrawl_instance = MagicMock() + mock_firecrawl_instance.check_crawl_status.return_value = { + "status": "completed", + "data": [ + { + "source_url": "https://example.com/api_page", + "title": "API Page", + "description": "API Description", + "markdown": "# API Content", + } + ], + } + mock_external_service_dependencies["firecrawl_app"].return_value = mock_firecrawl_instance + + # Act: Get URL data + result = WebsiteService.get_crawl_url_data( + job_id="test_job_id_123", + provider="firecrawl", + url="https://example.com/api_page", + tenant_id=account.current_tenant.id, + ) + + # Assert: Verify successful operation + assert result is not None + assert result["source_url"] == "https://example.com/api_page" + assert result["title"] == "API Page" + assert result["description"] == "API Description" + assert result["markdown"] == "# API Content" + + # Verify API was called + mock_external_service_dependencies["firecrawl_app"].assert_called_once() + + def test_get_crawl_url_data_firecrawl_incomplete_job( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test Firecrawl URL data retrieval fails for incomplete job. + + This test verifies: + - Incomplete job raises ValueError + - Proper error message is provided + - Service handles incomplete jobs gracefully + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock storage to return no data + mock_external_service_dependencies["storage"].exists.return_value = False + + # Mock incomplete job status + mock_firecrawl_instance = MagicMock() + mock_firecrawl_instance.check_crawl_status.return_value = {"status": "active", "data": []} + mock_external_service_dependencies["firecrawl_app"].return_value = mock_firecrawl_instance + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + WebsiteService.get_crawl_url_data( + job_id="test_job_id_123", + provider="firecrawl", + url="https://example.com/page", + tenant_id=account.current_tenant.id, + ) + + assert "Crawl job is not completed" in str(exc_info.value) + + def test_get_crawl_url_data_jinareader_with_job_id( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test JinaReader URL data retrieval with job ID for multi-page crawling. + + This test verifies: + - JinaReader handles job ID-based data retrieval + - Status check is performed before data retrieval + - Processed data is properly formatted + - Correct URL data is returned + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock successful status response + mock_status_response = MagicMock() + mock_status_response.json.return_value = { + "code": 200, + "data": { + "status": "completed", + "processed": { + "https://example.com/page1": { + "data": { + "title": "Page 1", + "url": "https://example.com/page1", + "description": "Description 1", + "content": "# Content 1", + } + } + }, + }, + } + mock_external_service_dependencies["requests"].post.return_value = mock_status_response + + # Act: Get URL data with job ID + result = WebsiteService.get_crawl_url_data( + job_id="jina_job_123", + provider="jinareader", + url="https://example.com/page1", + tenant_id=account.current_tenant.id, + ) + + # Assert: Verify successful operation + assert result is not None + assert result["title"] == "Page 1" + assert result["url"] == "https://example.com/page1" + assert result["description"] == "Description 1" + assert result["content"] == "# Content 1" + + # Verify HTTP requests were made + assert mock_external_service_dependencies["requests"].post.call_count == 2 + + def test_get_crawl_url_data_jinareader_incomplete_job( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test JinaReader URL data retrieval fails for incomplete job. + + This test verifies: + - Incomplete job raises ValueError + - Proper error message is provided + - Service handles incomplete jobs gracefully + """ + # Arrange: Create test account and prepare request + account = self._create_test_account(db_session_with_containers, mock_external_service_dependencies) + + # Mock incomplete job status + mock_status_response = MagicMock() + mock_status_response.json.return_value = {"code": 200, "data": {"status": "active", "processed": {}}} + mock_external_service_dependencies["requests"].post.return_value = mock_status_response + + # Act & Assert: Verify proper error handling + with pytest.raises(ValueError) as exc_info: + WebsiteService.get_crawl_url_data( + job_id="jina_job_123", + provider="jinareader", + url="https://example.com/page", + tenant_id=account.current_tenant.id, + ) + + assert "Crawl job is not completed" in str(exc_info.value) + + def test_crawl_options_default_values(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test CrawlOptions default values and initialization. + + This test verifies: + - Default values are properly set + - Optional fields can be None + - Boolean fields have correct defaults + - Integer fields have correct defaults + """ + # Arrange: Create CrawlOptions with minimal parameters + options = CrawlOptions() + + # Assert: Verify default values + assert options.limit == 1 + assert options.crawl_sub_pages is False + assert options.only_main_content is False + assert options.includes is None + assert options.excludes is None + assert options.max_depth is None + assert options.use_sitemap is True + + # Test with custom values + custom_options = CrawlOptions( + limit=10, + crawl_sub_pages=True, + only_main_content=True, + includes="blog,news", + excludes="admin", + max_depth=3, + use_sitemap=False, + ) + + assert custom_options.limit == 10 + assert custom_options.crawl_sub_pages is True + assert custom_options.only_main_content is True + assert custom_options.includes == "blog,news" + assert custom_options.excludes == "admin" + assert custom_options.max_depth == 3 + assert custom_options.use_sitemap is False + + def test_website_crawl_status_api_request_from_args( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test WebsiteCrawlStatusApiRequest creation from Flask arguments. + + This test verifies: + - Request is properly created from parsed arguments + - Required fields are validated + - Job ID is properly handled + - Validation errors are properly raised + """ + # Arrange: Prepare valid arguments + valid_args = {"provider": "firecrawl"} + job_id = "test_job_123" + + # Act: Create request from args + request = WebsiteCrawlStatusApiRequest.from_args(valid_args, job_id) + + # Assert: Verify request creation + assert request.provider == "firecrawl" + assert request.job_id == "test_job_123" + + # Test missing provider + invalid_args = {} + with pytest.raises(ValueError) as exc_info: + WebsiteCrawlStatusApiRequest.from_args(invalid_args, job_id) + assert "Provider is required" in str(exc_info.value) + + # Test missing job ID + with pytest.raises(ValueError) as exc_info: + WebsiteCrawlStatusApiRequest.from_args(valid_args, "") + assert "Job ID is required" in str(exc_info.value) + + def test_scrape_request_initialization(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test ScrapeRequest dataclass initialization and properties. + + This test verifies: + - ScrapeRequest is properly initialized + - All fields are correctly set + - Boolean field works correctly + - String fields are properly assigned + """ + # Arrange: Create ScrapeRequest + request = ScrapeRequest( + provider="firecrawl", url="https://example.com", tenant_id="tenant_123", only_main_content=True + ) + + # Assert: Verify initialization + assert request.provider == "firecrawl" + assert request.url == "https://example.com" + assert request.tenant_id == "tenant_123" + assert request.only_main_content is True + + # Test with different values + request2 = ScrapeRequest( + provider="watercrawl", url="https://test.com", tenant_id="tenant_456", only_main_content=False + ) + + assert request2.provider == "watercrawl" + assert request2.url == "https://test.com" + assert request2.tenant_id == "tenant_456" + assert request2.only_main_content is False diff --git a/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py b/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py new file mode 100644 index 0000000000..2e18184aea --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py @@ -0,0 +1,1358 @@ +import json +import uuid +from datetime import UTC, datetime, timedelta +from unittest.mock import patch + +import pytest +from faker import Faker + +from core.workflow.entities.workflow_execution import WorkflowExecutionStatus +from models import EndUser, Workflow, WorkflowAppLog, WorkflowRun +from models.enums import CreatorUserRole +from services.account_service import AccountService, TenantService +from services.app_service import AppService +from services.workflow_app_service import WorkflowAppService + + +class TestWorkflowAppService: + """Integration tests for WorkflowAppService using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("services.app_service.FeatureService") as mock_feature_service, + patch("services.app_service.EnterpriseService") as mock_enterprise_service, + patch("services.app_service.ModelManager") as mock_model_manager, + patch("services.account_service.FeatureService") as mock_account_feature_service, + ): + # Setup default mock returns for app service + mock_feature_service.get_system_features.return_value.webapp_auth.enabled = False + mock_enterprise_service.WebAppAuth.update_app_access_mode.return_value = None + mock_enterprise_service.WebAppAuth.cleanup_webapp.return_value = None + + # Setup default mock returns for account service + mock_account_feature_service.get_system_features.return_value.is_allow_register = True + + # Mock ModelManager for model configuration + mock_model_instance = mock_model_manager.return_value + mock_model_instance.get_default_model_instance.return_value = None + mock_model_instance.get_default_provider_model_name.return_value = ("openai", "gpt-3.5-turbo") + + yield { + "feature_service": mock_feature_service, + "enterprise_service": mock_enterprise_service, + "model_manager": mock_model_manager, + "account_feature_service": mock_account_feature_service, + } + + def _create_test_app_and_account(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test app and account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + tuple: (app, account) - Created app and account instances + """ + fake = Faker() + + # Setup mocks for account creation + mock_external_service_dependencies[ + "account_feature_service" + ].get_system_features.return_value.is_allow_register = True + + # Create account and tenant + account = AccountService.create_account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + password=fake.password(length=12), + ) + TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) + tenant = account.current_tenant + + # Create app with realistic data + app_args = { + "name": fake.company(), + "description": fake.text(max_nb_chars=100), + "mode": "workflow", + "icon_type": "emoji", + "icon": "🤖", + "icon_background": "#FF6B6B", + "api_rph": 100, + "api_rpm": 10, + } + + app_service = AppService() + app = app_service.create_app(tenant.id, app_args, account) + + return app, account + + def _create_test_tenant_and_account(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test tenant and account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + tuple: (tenant, account) - Created tenant and account instances + """ + fake = Faker() + + # Setup mocks for account creation + mock_external_service_dependencies[ + "account_feature_service" + ].get_system_features.return_value.is_allow_register = True + + # Create account and tenant + account = AccountService.create_account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + password=fake.password(length=12), + ) + TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) + tenant = account.current_tenant + + return tenant, account + + def _create_test_app(self, db_session_with_containers, tenant, account): + """ + Helper method to create a test app for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + tenant: Tenant instance + account: Account instance + + Returns: + App: Created app instance + """ + fake = Faker() + + # Create app with realistic data + app_args = { + "name": fake.company(), + "description": fake.text(max_nb_chars=100), + "mode": "workflow", + "icon_type": "emoji", + "icon": "🤖", + "icon_background": "#FF6B6B", + "api_rph": 100, + "api_rpm": 10, + } + + app_service = AppService() + app = app_service.create_app(tenant.id, app_args, account) + + return app + + def _create_test_workflow_data(self, db_session_with_containers, app, account): + """ + Helper method to create test workflow data for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + app: App instance + account: Account instance + + Returns: + tuple: (workflow, workflow_run, workflow_app_log) - Created workflow entities + """ + fake = Faker() + + from extensions.ext_database import db + + # Create workflow + workflow = Workflow( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + type="workflow", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + _features=json.dumps({}), + created_by=account.id, + updated_by=account.id, + ) + db.session.add(workflow) + db.session.commit() + + # Create workflow run + workflow_run = WorkflowRun( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + inputs=json.dumps({"input1": "test_value"}), + outputs=json.dumps({"output1": "result_value"}), + status="succeeded", + elapsed_time=1.5, + total_tokens=100, + total_steps=3, + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC), + finished_at=datetime.now(UTC), + ) + db.session.add(workflow_run) + db.session.commit() + + # Create workflow app log + workflow_app_log = WorkflowAppLog( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run.id, + created_from="service-api", + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC), + ) + db.session.add(workflow_app_log) + db.session.commit() + + return workflow, workflow_run, workflow_app_log + + def test_get_paginate_workflow_app_logs_basic_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful pagination of workflow app logs with basic parameters. + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + workflow, workflow_run, workflow_app_log = self._create_test_workflow_data( + db_session_with_containers, app, account + ) + + # Act: Execute the method under test + service = WorkflowAppService() + result = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, page=1, limit=20 + ) + + # Assert: Verify the expected outcomes + assert result is not None + assert result["page"] == 1 + assert result["limit"] == 20 + assert result["total"] == 1 + assert result["has_more"] is False + assert len(result["data"]) == 1 + + # Verify the returned data + log_entry = result["data"][0] + assert log_entry.id == workflow_app_log.id + assert log_entry.tenant_id == app.tenant_id + assert log_entry.app_id == app.id + assert log_entry.workflow_id == workflow.id + assert log_entry.workflow_run_id == workflow_run.id + + # Verify database state + from extensions.ext_database import db + + db.session.refresh(workflow_app_log) + assert workflow_app_log.id is not None + + def test_get_paginate_workflow_app_logs_with_keyword_search( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test workflow app logs pagination with keyword search functionality. + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + workflow, workflow_run, workflow_app_log = self._create_test_workflow_data( + db_session_with_containers, app, account + ) + + # Update workflow run with searchable content + from extensions.ext_database import db + + workflow_run.inputs = json.dumps({"search_term": "test_keyword", "input2": "other_value"}) + workflow_run.outputs = json.dumps({"result": "test_keyword_found", "status": "success"}) + db.session.commit() + + # Act: Execute the method under test with keyword search + service = WorkflowAppService() + result = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, keyword="test_keyword", page=1, limit=20 + ) + + # Assert: Verify keyword search results + assert result is not None + assert result["total"] == 1 + assert len(result["data"]) == 1 + + # Verify the returned data contains the searched keyword + log_entry = result["data"][0] + assert log_entry.workflow_run_id == workflow_run.id + + # Test with non-matching keyword + result_no_match = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, keyword="non_existent_keyword", page=1, limit=20 + ) + + assert result_no_match["total"] == 0 + assert len(result_no_match["data"]) == 0 + + def test_get_paginate_workflow_app_logs_with_status_filter( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test workflow app logs pagination with status filtering. + """ + # Arrange: Create test data with different statuses + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + from extensions.ext_database import db + + # Create workflow + workflow = Workflow( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + type="workflow", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + _features=json.dumps({}), + created_by=account.id, + updated_by=account.id, + ) + db.session.add(workflow) + db.session.commit() + + # Create workflow runs with different statuses + statuses = ["succeeded", "failed", "running", "stopped"] + workflow_runs = [] + workflow_app_logs = [] + + for i, status in enumerate(statuses): + workflow_run = WorkflowRun( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + inputs=json.dumps({"input": f"test_{i}"}), + outputs=json.dumps({"output": f"result_{i}"}), + status=status, + elapsed_time=1.0 + i, + total_tokens=100 + i * 10, + total_steps=3, + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC) + timedelta(minutes=i), + finished_at=datetime.now(UTC) + timedelta(minutes=i + 1) if status != "running" else None, + ) + db.session.add(workflow_run) + db.session.commit() + + workflow_app_log = WorkflowAppLog( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run.id, + created_from="service-api", + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC) + timedelta(minutes=i), + ) + db.session.add(workflow_app_log) + db.session.commit() + + workflow_runs.append(workflow_run) + workflow_app_logs.append(workflow_app_log) + + # Act & Assert: Test filtering by different statuses + service = WorkflowAppService() + + # Test succeeded status filter + result_succeeded = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + status=WorkflowExecutionStatus.SUCCEEDED, + page=1, + limit=20, + ) + assert result_succeeded["total"] == 1 + assert result_succeeded["data"][0].workflow_run.status == "succeeded" + + # Test failed status filter + result_failed = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, status=WorkflowExecutionStatus.FAILED, page=1, limit=20 + ) + assert result_failed["total"] == 1 + assert result_failed["data"][0].workflow_run.status == "failed" + + # Test running status filter + result_running = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, status=WorkflowExecutionStatus.RUNNING, page=1, limit=20 + ) + assert result_running["total"] == 1 + assert result_running["data"][0].workflow_run.status == "running" + + def test_get_paginate_workflow_app_logs_with_time_filtering( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test workflow app logs pagination with time-based filtering. + """ + # Arrange: Create test data with different timestamps + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + from extensions.ext_database import db + + # Create workflow + workflow = Workflow( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + type="workflow", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + _features=json.dumps({}), + created_by=account.id, + updated_by=account.id, + ) + db.session.add(workflow) + db.session.commit() + + # Create workflow runs with different timestamps + base_time = datetime.now(UTC) + timestamps = [ + base_time - timedelta(hours=3), # 3 hours ago + base_time - timedelta(hours=2), # 2 hours ago + base_time - timedelta(hours=1), # 1 hour ago + base_time, # now + ] + + workflow_runs = [] + workflow_app_logs = [] + + for i, timestamp in enumerate(timestamps): + workflow_run = WorkflowRun( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + inputs=json.dumps({"input": f"test_{i}"}), + outputs=json.dumps({"output": f"result_{i}"}), + status="succeeded", + elapsed_time=1.0, + total_tokens=100, + total_steps=3, + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=timestamp, + finished_at=timestamp + timedelta(minutes=1), + ) + db.session.add(workflow_run) + db.session.commit() + + workflow_app_log = WorkflowAppLog( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run.id, + created_from="service-api", + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=timestamp, + ) + db.session.add(workflow_app_log) + db.session.commit() + + workflow_runs.append(workflow_run) + workflow_app_logs.append(workflow_app_log) + + # Act & Assert: Test time-based filtering + service = WorkflowAppService() + + # Test filtering logs created after 2 hours ago + result_after = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_at_after=base_time - timedelta(hours=2), + page=1, + limit=20, + ) + assert result_after["total"] == 3 # Should get logs from 2 hours ago, 1 hour ago, and now + + # Test filtering logs created before 1 hour ago + result_before = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_at_before=base_time - timedelta(hours=1), + page=1, + limit=20, + ) + assert result_before["total"] == 3 # Should get logs from 3 hours ago, 2 hours ago, and 1 hour ago + + # Test filtering logs within a time range + result_range = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_at_after=base_time - timedelta(hours=2), + created_at_before=base_time - timedelta(hours=1), + page=1, + limit=20, + ) + assert result_range["total"] == 2 # Should get logs from 2 hours ago and 1 hour ago + + def test_get_paginate_workflow_app_logs_with_pagination( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test workflow app logs pagination with different page sizes and limits. + """ + # Arrange: Create test data with multiple logs + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + from extensions.ext_database import db + + # Create workflow + workflow = Workflow( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + type="workflow", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + _features=json.dumps({}), + created_by=account.id, + updated_by=account.id, + ) + db.session.add(workflow) + db.session.commit() + + # Create 25 workflow runs and logs + total_logs = 25 + workflow_runs = [] + workflow_app_logs = [] + + for i in range(total_logs): + workflow_run = WorkflowRun( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + inputs=json.dumps({"input": f"test_{i}"}), + outputs=json.dumps({"output": f"result_{i}"}), + status="succeeded", + elapsed_time=1.0, + total_tokens=100, + total_steps=3, + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC) + timedelta(minutes=i), + finished_at=datetime.now(UTC) + timedelta(minutes=i + 1), + ) + db.session.add(workflow_run) + db.session.commit() + + workflow_app_log = WorkflowAppLog( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run.id, + created_from="service-api", + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC) + timedelta(minutes=i), + ) + db.session.add(workflow_app_log) + db.session.commit() + + workflow_runs.append(workflow_run) + workflow_app_logs.append(workflow_app_log) + + # Act & Assert: Test pagination + service = WorkflowAppService() + + # Test first page with limit 10 + result_page1 = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, page=1, limit=10 + ) + assert result_page1["page"] == 1 + assert result_page1["limit"] == 10 + assert result_page1["total"] == total_logs + assert result_page1["has_more"] is True + assert len(result_page1["data"]) == 10 + + # Test second page with limit 10 + result_page2 = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, page=2, limit=10 + ) + assert result_page2["page"] == 2 + assert result_page2["limit"] == 10 + assert result_page2["total"] == total_logs + assert result_page2["has_more"] is True + assert len(result_page2["data"]) == 10 + + # Test third page with limit 10 + result_page3 = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, page=3, limit=10 + ) + assert result_page3["page"] == 3 + assert result_page3["limit"] == 10 + assert result_page3["total"] == total_logs + assert result_page3["has_more"] is False + assert len(result_page3["data"]) == 5 # Remaining 5 logs + + # Test with larger limit + result_large_limit = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, page=1, limit=50 + ) + assert result_large_limit["page"] == 1 + assert result_large_limit["limit"] == 50 + assert result_large_limit["total"] == total_logs + assert result_large_limit["has_more"] is False + assert len(result_large_limit["data"]) == total_logs + + def test_get_paginate_workflow_app_logs_with_user_role_filtering( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test workflow app logs pagination with user role and session filtering. + """ + # Arrange: Create test data with different user roles + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + from extensions.ext_database import db + + # Create workflow + workflow = Workflow( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + type="workflow", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + _features=json.dumps({}), + created_by=account.id, + updated_by=account.id, + ) + db.session.add(workflow) + db.session.commit() + + # Create end user + end_user = EndUser( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + type="web", + is_anonymous=False, + session_id="test_session_123", + created_at=datetime.now(UTC), + updated_at=datetime.now(UTC), + ) + db.session.add(end_user) + db.session.commit() + + # Create workflow runs and logs for both account and end user + workflow_runs = [] + workflow_app_logs = [] + + # Account user logs + for i in range(3): + workflow_run = WorkflowRun( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + inputs=json.dumps({"input": f"account_test_{i}"}), + outputs=json.dumps({"output": f"account_result_{i}"}), + status="succeeded", + elapsed_time=1.0, + total_tokens=100, + total_steps=3, + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC) + timedelta(minutes=i), + finished_at=datetime.now(UTC) + timedelta(minutes=i + 1), + ) + db.session.add(workflow_run) + db.session.commit() + + workflow_app_log = WorkflowAppLog( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run.id, + created_from="service-api", + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC) + timedelta(minutes=i), + ) + db.session.add(workflow_app_log) + db.session.commit() + + workflow_runs.append(workflow_run) + workflow_app_logs.append(workflow_app_log) + + # End user logs + for i in range(2): + workflow_run = WorkflowRun( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + inputs=json.dumps({"input": f"end_user_test_{i}"}), + outputs=json.dumps({"output": f"end_user_result_{i}"}), + status="succeeded", + elapsed_time=1.0, + total_tokens=100, + total_steps=3, + created_by_role=CreatorUserRole.END_USER.value, + created_by=end_user.id, + created_at=datetime.now(UTC) + timedelta(minutes=i + 10), + finished_at=datetime.now(UTC) + timedelta(minutes=i + 11), + ) + db.session.add(workflow_run) + db.session.commit() + + workflow_app_log = WorkflowAppLog( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run.id, + created_from="web-app", + created_by_role=CreatorUserRole.END_USER.value, + created_by=end_user.id, + created_at=datetime.now(UTC) + timedelta(minutes=i + 10), + ) + db.session.add(workflow_app_log) + db.session.commit() + + workflow_runs.append(workflow_run) + workflow_app_logs.append(workflow_app_log) + + # Act & Assert: Test user role filtering + service = WorkflowAppService() + + # Test filtering by end user session ID + result_session_filter = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_by_end_user_session_id="test_session_123", + page=1, + limit=20, + ) + assert result_session_filter["total"] == 2 + assert all(log.created_by_role == CreatorUserRole.END_USER.value for log in result_session_filter["data"]) + + # Test filtering by account email + result_account_filter = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, created_by_account=account.email, page=1, limit=20 + ) + assert result_account_filter["total"] == 3 + assert all(log.created_by_role == CreatorUserRole.ACCOUNT.value for log in result_account_filter["data"]) + + # Test filtering by non-existent session ID + result_no_session = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_by_end_user_session_id="non_existent_session", + page=1, + limit=20, + ) + assert result_no_session["total"] == 0 + + # Test filtering by non-existent account email + result_no_account = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_by_account="nonexistent@example.com", + page=1, + limit=20, + ) + assert result_no_account["total"] == 0 + + def test_get_paginate_workflow_app_logs_with_uuid_keyword_search( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test workflow app logs pagination with UUID keyword search functionality. + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + from extensions.ext_database import db + + # Create workflow + workflow = Workflow( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + type="workflow", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + _features=json.dumps({}), + created_by=account.id, + updated_by=account.id, + ) + db.session.add(workflow) + db.session.commit() + + # Create workflow run with specific UUID + workflow_run_id = str(uuid.uuid4()) + workflow_run = WorkflowRun( + id=workflow_run_id, + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + inputs=json.dumps({"input": "test_input"}), + outputs=json.dumps({"output": "test_output"}), + status="succeeded", + elapsed_time=1.0, + total_tokens=100, + total_steps=3, + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC), + finished_at=datetime.now(UTC) + timedelta(minutes=1), + ) + db.session.add(workflow_run) + db.session.commit() + + # Create workflow app log + workflow_app_log = WorkflowAppLog( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run.id, + created_from="service-api", + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC), + ) + db.session.add(workflow_app_log) + db.session.commit() + + # Act & Assert: Test UUID keyword search + service = WorkflowAppService() + + # Test searching by workflow run UUID + result_uuid_search = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, keyword=workflow_run_id, page=1, limit=20 + ) + assert result_uuid_search["total"] == 1 + assert result_uuid_search["data"][0].workflow_run_id == workflow_run_id + + # Test searching by partial UUID (should not match) + partial_uuid = workflow_run_id[:8] + result_partial_uuid = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, keyword=partial_uuid, page=1, limit=20 + ) + assert result_partial_uuid["total"] == 0 + + # Test searching by invalid UUID format + invalid_uuid = "invalid-uuid-format" + result_invalid_uuid = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, keyword=invalid_uuid, page=1, limit=20 + ) + assert result_invalid_uuid["total"] == 0 + + def test_get_paginate_workflow_app_logs_with_edge_cases( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test workflow app logs pagination with edge cases and boundary conditions. + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + from extensions.ext_database import db + + # Create workflow + workflow = Workflow( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + type="workflow", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + _features=json.dumps({}), + created_by=account.id, + updated_by=account.id, + ) + db.session.add(workflow) + db.session.commit() + + # Create workflow run with edge case data + workflow_run = WorkflowRun( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + inputs=json.dumps({"input": "test_input"}), + outputs=json.dumps({"output": "test_output"}), + status="succeeded", + elapsed_time=0.0, # Edge case: 0 elapsed time + total_tokens=0, # Edge case: 0 tokens + total_steps=0, # Edge case: 0 steps + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC), + finished_at=datetime.now(UTC), + ) + db.session.add(workflow_run) + db.session.commit() + + # Create workflow app log + workflow_app_log = WorkflowAppLog( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run.id, + created_from="service-api", + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC), + ) + db.session.add(workflow_app_log) + db.session.commit() + + # Act & Assert: Test edge cases + service = WorkflowAppService() + + # Test with page 1 (normal case) + result_page_one = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, page=1, limit=20 + ) + assert result_page_one["page"] == 1 + assert result_page_one["total"] == 1 + + # Test with very large limit + result_large_limit = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, page=1, limit=10000 + ) + assert result_large_limit["limit"] == 10000 + assert result_large_limit["total"] == 1 + + # Test with limit 0 (should return empty result) + result_zero_limit = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, page=1, limit=0 + ) + assert result_zero_limit["limit"] == 0 + assert result_zero_limit["total"] == 1 + assert len(result_zero_limit["data"]) == 0 + + # Test with very high page number (should return empty result) + result_high_page = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, page=999999, limit=20 + ) + assert result_high_page["page"] == 999999 + assert result_high_page["total"] == 1 + assert len(result_high_page["data"]) == 0 + assert result_high_page["has_more"] is False + + def test_get_paginate_workflow_app_logs_with_empty_results( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test workflow app logs pagination with empty results and no data scenarios. + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Act & Assert: Test empty results + service = WorkflowAppService() + + # Test with no workflow logs + result_no_logs = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, page=1, limit=20 + ) + assert result_no_logs["page"] == 1 + assert result_no_logs["limit"] == 20 + assert result_no_logs["total"] == 0 + assert result_no_logs["has_more"] is False + assert len(result_no_logs["data"]) == 0 + + # Test with status filter that matches no logs + result_no_status_match = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, status=WorkflowExecutionStatus.FAILED, page=1, limit=20 + ) + assert result_no_status_match["total"] == 0 + assert len(result_no_status_match["data"]) == 0 + + # Test with keyword that matches no logs + result_no_keyword_match = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, keyword="nonexistent_keyword", page=1, limit=20 + ) + assert result_no_keyword_match["total"] == 0 + assert len(result_no_keyword_match["data"]) == 0 + + # Test with time filter that matches no logs + future_time = datetime.now(UTC) + timedelta(days=1) + result_future_time = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, created_at_after=future_time, page=1, limit=20 + ) + assert result_future_time["total"] == 0 + assert len(result_future_time["data"]) == 0 + + # Test with end user session that doesn't exist + result_no_session = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_by_end_user_session_id="nonexistent_session", + page=1, + limit=20, + ) + assert result_no_session["total"] == 0 + assert len(result_no_session["data"]) == 0 + + # Test with account email that doesn't exist + result_no_account = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_by_account="nonexistent@example.com", + page=1, + limit=20, + ) + assert result_no_account["total"] == 0 + assert len(result_no_account["data"]) == 0 + + def test_get_paginate_workflow_app_logs_with_complex_query_combinations( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test workflow app logs pagination with complex query combinations. + """ + # Arrange: Create test data with various combinations + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + workflow, _, _ = self._create_test_workflow_data(db_session_with_containers, app, account) + + # Create multiple logs with different characteristics + logs_data = [] + for i in range(5): + status = "succeeded" if i % 2 == 0 else "failed" + workflow_run = WorkflowRun( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + status=status, + inputs=json.dumps({"input": f"test_input_{i}"}), + outputs=json.dumps({"output": f"test_output_{i}"}) if status == "succeeded" else None, + error=json.dumps({"error": f"test_error_{i}"}) if status == "failed" else None, + elapsed_time=1.5, + total_tokens=100, + total_steps=3, + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC) + timedelta(minutes=i), + finished_at=datetime.now(UTC) + timedelta(minutes=i + 1) if status == "succeeded" else None, + ) + db_session_with_containers.add(workflow_run) + db_session_with_containers.flush() + + log = WorkflowAppLog( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run.id, + created_from="service-api", + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC) + timedelta(minutes=i), + ) + db_session_with_containers.add(log) + logs_data.append((log, workflow_run)) + + db_session_with_containers.commit() + + service = WorkflowAppService() + + # Test complex combination: keyword + status + time range + pagination + result_complex = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + keyword="test_input_1", + status=WorkflowExecutionStatus.SUCCEEDED, + created_at_after=datetime.now(UTC) - timedelta(minutes=10), + created_at_before=datetime.now(UTC) + timedelta(minutes=10), + page=1, + limit=3, + ) + + # Should find logs matching all criteria + assert result_complex["total"] >= 0 # At least 0, could be more depending on timing + assert len(result_complex["data"]) <= 3 # Respects limit + + # Test combination: user role + keyword + status + result_user_keyword_status = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_by_account=account.email, + keyword="test_input", + status=WorkflowExecutionStatus.FAILED, + page=1, + limit=20, + ) + + # Should find failed logs created by the account with "test_input" in inputs + assert result_user_keyword_status["total"] >= 0 + + # Test combination: time range + status + pagination with small limit + result_time_status_limit = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app, + created_at_after=datetime.now(UTC) - timedelta(minutes=10), + status=WorkflowExecutionStatus.SUCCEEDED, + page=1, + limit=2, + ) + + assert result_time_status_limit["total"] >= 0 + assert len(result_time_status_limit["data"]) <= 2 + + def test_get_paginate_workflow_app_logs_with_large_dataset_performance( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test workflow app logs pagination with large dataset for performance validation. + """ + # Arrange: Create a larger dataset + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + workflow, _, _ = self._create_test_workflow_data(db_session_with_containers, app, account) + + # Create 50 logs to test performance with larger datasets + logs_data = [] + for i in range(50): + status = "succeeded" if i % 3 == 0 else "failed" if i % 3 == 1 else "running" + workflow_run = WorkflowRun( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + status=status, + inputs=json.dumps({"input": f"performance_test_input_{i}", "index": i}), + outputs=json.dumps({"output": f"performance_test_output_{i}"}) if status == "succeeded" else None, + error=json.dumps({"error": f"performance_test_error_{i}"}) if status == "failed" else None, + elapsed_time=1.5, + total_tokens=100, + total_steps=3, + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC) + timedelta(minutes=i), + finished_at=datetime.now(UTC) + timedelta(minutes=i + 1) if status != "running" else None, + ) + db_session_with_containers.add(workflow_run) + db_session_with_containers.flush() + + log = WorkflowAppLog( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run.id, + created_from="service-api", + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC) + timedelta(minutes=i), + ) + db_session_with_containers.add(log) + logs_data.append((log, workflow_run)) + + db_session_with_containers.commit() + + service = WorkflowAppService() + + # Test performance with large dataset and pagination + import time + + start_time = time.time() + + result_large = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, page=1, limit=20 + ) + + end_time = time.time() + execution_time = end_time - start_time + + # Performance assertions + assert result_large["total"] == 51 # 50 new logs + 1 from _create_test_workflow_data + assert len(result_large["data"]) == 20 + assert execution_time < 5.0 # Should complete within 5 seconds + + # Test pagination through large dataset + result_page_2 = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, page=2, limit=20 + ) + + assert result_page_2["total"] == 51 # 50 new logs + 1 from _create_test_workflow_data + assert len(result_page_2["data"]) == 20 + assert result_page_2["page"] == 2 + + # Test last page + result_last_page = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app, page=3, limit=20 + ) + + assert result_last_page["total"] == 51 # 50 new logs + 1 from _create_test_workflow_data + assert len(result_last_page["data"]) == 11 # Last page should have remaining items (10 + 1) + assert result_last_page["page"] == 3 + + def test_get_paginate_workflow_app_logs_with_tenant_isolation( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test workflow app logs pagination with proper tenant isolation. + """ + # Arrange: Create multiple tenants and apps + fake = Faker() + + # Create first tenant and app + tenant1, account1 = self._create_test_tenant_and_account( + db_session_with_containers, mock_external_service_dependencies + ) + app1 = self._create_test_app(db_session_with_containers, tenant1, account1) + workflow1, _, _ = self._create_test_workflow_data(db_session_with_containers, app1, account1) + + # Create second tenant and app + tenant2, account2 = self._create_test_tenant_and_account( + db_session_with_containers, mock_external_service_dependencies + ) + app2 = self._create_test_app(db_session_with_containers, tenant2, account2) + workflow2, _, _ = self._create_test_workflow_data(db_session_with_containers, app2, account2) + + # Create logs for both tenants + for i, (app, workflow, account) in enumerate([(app1, workflow1, account1), (app2, workflow2, account2)]): + for j in range(3): + workflow_run = WorkflowRun( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + type="workflow", + triggered_from="app-run", + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + status="succeeded", + inputs=json.dumps({"input": f"tenant_{i}_input_{j}"}), + outputs=json.dumps({"output": f"tenant_{i}_output_{j}"}), + elapsed_time=1.5, + total_tokens=100, + total_steps=3, + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC) + timedelta(minutes=i * 10 + j), + finished_at=datetime.now(UTC) + timedelta(minutes=i * 10 + j + 1), + ) + db_session_with_containers.add(workflow_run) + db_session_with_containers.flush() + + log = WorkflowAppLog( + id=str(uuid.uuid4()), + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow.id, + workflow_run_id=workflow_run.id, + created_from="service-api", + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC) + timedelta(minutes=i * 10 + j), + ) + db_session_with_containers.add(log) + + db_session_with_containers.commit() + + service = WorkflowAppService() + + # Test tenant isolation: tenant1 should only see its own logs + result_tenant1 = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app1, page=1, limit=20 + ) + + assert result_tenant1["total"] == 4 # 3 new logs + 1 from _create_test_workflow_data + for log in result_tenant1["data"]: + assert log.tenant_id == app1.tenant_id + assert log.app_id == app1.id + + # Test tenant isolation: tenant2 should only see its own logs + result_tenant2 = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, app_model=app2, page=1, limit=20 + ) + + assert result_tenant2["total"] == 4 # 3 new logs + 1 from _create_test_workflow_data + for log in result_tenant2["data"]: + assert log.tenant_id == app2.tenant_id + assert log.app_id == app2.id + + # Test cross-tenant search should not work + result_cross_tenant = service.get_paginate_workflow_app_logs( + session=db_session_with_containers, + app_model=app1, + keyword="tenant_1_input", # Search for tenant2's data from tenant1's context + page=1, + limit=20, + ) + + # Should not find tenant2's data when searching from tenant1's context + assert result_cross_tenant["total"] == 0 diff --git a/api/tests/test_containers_integration_tests/services/test_workflow_run_service.py b/api/tests/test_containers_integration_tests/services/test_workflow_run_service.py new file mode 100644 index 0000000000..4cb21ef6bd --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_workflow_run_service.py @@ -0,0 +1,713 @@ +import json +import uuid +from datetime import UTC, datetime, timedelta +from unittest.mock import patch + +import pytest +from faker import Faker + +from models.enums import CreatorUserRole +from models.model import ( + Message, +) +from models.workflow import WorkflowRun +from services.account_service import AccountService, TenantService +from services.app_service import AppService +from services.workflow_run_service import WorkflowRunService + + +class TestWorkflowRunService: + """Integration tests for WorkflowRunService using testcontainers.""" + + @pytest.fixture + def mock_external_service_dependencies(self): + """Mock setup for external service dependencies.""" + with ( + patch("services.app_service.FeatureService") as mock_feature_service, + patch("services.app_service.EnterpriseService") as mock_enterprise_service, + patch("services.app_service.ModelManager") as mock_model_manager, + patch("services.account_service.FeatureService") as mock_account_feature_service, + ): + # Setup default mock returns for app service + mock_feature_service.get_system_features.return_value.webapp_auth.enabled = False + mock_enterprise_service.WebAppAuth.update_app_access_mode.return_value = None + mock_enterprise_service.WebAppAuth.cleanup_webapp.return_value = None + + # Setup default mock returns for account service + mock_account_feature_service.get_system_features.return_value.is_allow_register = True + + # Mock ModelManager for model configuration + mock_model_instance = mock_model_manager.return_value + mock_model_instance.get_default_model_instance.return_value = None + mock_model_instance.get_default_provider_model_name.return_value = ("openai", "gpt-3.5-turbo") + + yield { + "feature_service": mock_feature_service, + "enterprise_service": mock_enterprise_service, + "model_manager": mock_model_manager, + "account_feature_service": mock_account_feature_service, + } + + def _create_test_app_and_account(self, db_session_with_containers, mock_external_service_dependencies): + """ + Helper method to create a test app and account for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + mock_external_service_dependencies: Mock dependencies + + Returns: + tuple: (app, account) - Created app and account instances + """ + fake = Faker() + + # Setup mocks for account creation + mock_external_service_dependencies[ + "account_feature_service" + ].get_system_features.return_value.is_allow_register = True + + # Create account and tenant + account = AccountService.create_account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + password=fake.password(length=12), + ) + TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) + tenant = account.current_tenant + + # Create app with realistic data + app_args = { + "name": fake.company(), + "description": fake.text(max_nb_chars=100), + "mode": "chat", + "icon_type": "emoji", + "icon": "🤖", + "icon_background": "#FF6B6B", + "api_rph": 100, + "api_rpm": 10, + } + + app_service = AppService() + app = app_service.create_app(tenant.id, app_args, account) + + return app, account + + def _create_test_workflow_run( + self, db_session_with_containers, app, account, triggered_from="debugging", offset_minutes=0 + ): + """ + Helper method to create a test workflow run for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + app: App instance + account: Account instance + triggered_from: Trigger source for workflow run + + Returns: + WorkflowRun: Created workflow run instance + """ + fake = Faker() + + from extensions.ext_database import db + + # Create workflow run with offset timestamp + base_time = datetime.now(UTC) + created_time = base_time - timedelta(minutes=offset_minutes) + + workflow_run = WorkflowRun( + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=str(uuid.uuid4()), + type="chat", + triggered_from=triggered_from, + version="1.0.0", + graph=json.dumps({"nodes": [], "edges": []}), + inputs=json.dumps({"input": "test"}), + status="succeeded", + outputs=json.dumps({"output": "test result"}), + elapsed_time=1.5, + total_tokens=100, + total_steps=3, + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=created_time, + finished_at=created_time, + ) + + db.session.add(workflow_run) + db.session.commit() + + return workflow_run + + def _create_test_message(self, db_session_with_containers, app, account, workflow_run): + """ + Helper method to create a test message for testing. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + app: App instance + account: Account instance + workflow_run: WorkflowRun instance + + Returns: + Message: Created message instance + """ + fake = Faker() + + from extensions.ext_database import db + + # Create conversation first (required for message) + from models.model import Conversation + + conversation = Conversation( + app_id=app.id, + name=fake.sentence(), + inputs={}, + status="normal", + mode="chat", + from_source=CreatorUserRole.ACCOUNT.value, + from_account_id=account.id, + ) + db.session.add(conversation) + db.session.commit() + + # Create message + message = Message() + message.app_id = app.id + message.conversation_id = conversation.id + message.query = fake.text(max_nb_chars=100) + message.message = {"type": "text", "content": fake.text(max_nb_chars=100)} + message.answer = fake.text(max_nb_chars=200) + message.message_tokens = 50 + message.answer_tokens = 100 + message.message_unit_price = 0.001 + message.answer_unit_price = 0.002 + message.message_price_unit = 0.001 + message.answer_price_unit = 0.001 + message.currency = "USD" + message.status = "normal" + message.from_source = CreatorUserRole.ACCOUNT.value + message.from_account_id = account.id + message.workflow_run_id = workflow_run.id + message.inputs = {"input": "test input"} + + db.session.add(message) + db.session.commit() + + return message + + def test_get_paginate_workflow_runs_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful pagination of workflow runs with debugging trigger. + + This test verifies: + - Proper pagination of workflow runs + - Correct filtering by triggered_from + - Proper limit and last_id handling + - Repository method calls + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create multiple workflow runs + workflow_runs = [] + for i in range(5): + workflow_run = self._create_test_workflow_run(db_session_with_containers, app, account, "debugging") + workflow_runs.append(workflow_run) + + # Act: Execute the method under test + workflow_run_service = WorkflowRunService() + args = {"limit": 3, "last_id": None} + result = workflow_run_service.get_paginate_workflow_runs(app, args) + + # Assert: Verify the expected outcomes + assert result is not None + assert hasattr(result, "data") + assert len(result.data) == 3 # Should return 3 items due to limit + + # Verify pagination properties + assert hasattr(result, "has_more") + assert hasattr(result, "limit") + + # Verify all returned items are debugging runs + for workflow_run in result.data: + assert workflow_run.triggered_from == "debugging" + assert workflow_run.app_id == app.id + assert workflow_run.tenant_id == app.tenant_id + + def test_get_paginate_workflow_runs_with_last_id( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test pagination of workflow runs with last_id parameter. + + This test verifies: + - Proper pagination with last_id parameter + - Correct handling of pagination state + - Repository method calls with proper parameters + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create multiple workflow runs with different timestamps + workflow_runs = [] + for i in range(5): + workflow_run = self._create_test_workflow_run( + db_session_with_containers, app, account, "debugging", offset_minutes=i + ) + workflow_runs.append(workflow_run) + + # Act: Execute the method under test with last_id + workflow_run_service = WorkflowRunService() + args = {"limit": 2, "last_id": workflow_runs[1].id} + result = workflow_run_service.get_paginate_workflow_runs(app, args) + + # Assert: Verify the expected outcomes + assert result is not None + assert hasattr(result, "data") + assert len(result.data) == 2 # Should return 2 items due to limit + + # Verify pagination properties + assert hasattr(result, "has_more") + assert hasattr(result, "limit") + + # Verify all returned items are debugging runs + for workflow_run in result.data: + assert workflow_run.triggered_from == "debugging" + assert workflow_run.app_id == app.id + assert workflow_run.tenant_id == app.tenant_id + + def test_get_paginate_workflow_runs_default_limit( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test pagination of workflow runs with default limit. + + This test verifies: + - Default limit of 20 when not specified + - Proper handling of missing limit parameter + - Repository method calls with default values + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create workflow runs + workflow_run = self._create_test_workflow_run(db_session_with_containers, app, account, "debugging") + + # Act: Execute the method under test without limit + workflow_run_service = WorkflowRunService() + args = {} # No limit specified + result = workflow_run_service.get_paginate_workflow_runs(app, args) + + # Assert: Verify the expected outcomes + assert result is not None + assert hasattr(result, "data") + + # Verify pagination properties + assert hasattr(result, "has_more") + assert hasattr(result, "limit") + + # Verify the returned workflow run + if result.data: + workflow_run_result = result.data[0] + assert workflow_run_result.triggered_from == "debugging" + assert workflow_run_result.app_id == app.id + assert workflow_run_result.tenant_id == app.tenant_id + + def test_get_paginate_advanced_chat_workflow_runs_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful pagination of advanced chat workflow runs with message information. + + This test verifies: + - Proper pagination of advanced chat workflow runs + - Correct filtering by triggered_from + - Message information enrichment + - WorkflowWithMessage wrapper functionality + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create workflow runs with messages + workflow_runs = [] + for i in range(3): + workflow_run = self._create_test_workflow_run( + db_session_with_containers, app, account, "debugging", offset_minutes=i + ) + message = self._create_test_message(db_session_with_containers, app, account, workflow_run) + workflow_runs.append(workflow_run) + + # Act: Execute the method under test + workflow_run_service = WorkflowRunService() + args = {"limit": 2, "last_id": None} + result = workflow_run_service.get_paginate_advanced_chat_workflow_runs(app, args) + + # Assert: Verify the expected outcomes + assert result is not None + assert hasattr(result, "data") + assert len(result.data) == 2 # Should return 2 items due to limit + + # Verify pagination properties + assert hasattr(result, "has_more") + assert hasattr(result, "limit") + + # Verify all returned items have message information + for workflow_run in result.data: + assert hasattr(workflow_run, "message_id") + assert hasattr(workflow_run, "conversation_id") + assert workflow_run.app_id == app.id + assert workflow_run.tenant_id == app.tenant_id + + def test_get_workflow_run_success(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test successful retrieval of workflow run by ID. + + This test verifies: + - Proper workflow run retrieval by ID + - Correct tenant and app isolation + - Repository method calls with proper parameters + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create workflow run + workflow_run = self._create_test_workflow_run(db_session_with_containers, app, account, "debugging") + + # Act: Execute the method under test + workflow_run_service = WorkflowRunService() + result = workflow_run_service.get_workflow_run(app, workflow_run.id) + + # Assert: Verify the expected outcomes + assert result is not None + assert result.id == workflow_run.id + assert result.tenant_id == app.tenant_id + assert result.app_id == app.id + assert result.triggered_from == "debugging" + assert result.status == "succeeded" + assert result.type == "chat" + assert result.version == "1.0.0" + + def test_get_workflow_run_not_found(self, db_session_with_containers, mock_external_service_dependencies): + """ + Test workflow run retrieval when run ID does not exist. + + This test verifies: + - Proper handling of non-existent workflow run IDs + - Repository method calls with proper parameters + - Return value for missing records + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Use a non-existent UUID + non_existent_id = str(uuid.uuid4()) + + # Act: Execute the method under test + workflow_run_service = WorkflowRunService() + result = workflow_run_service.get_workflow_run(app, non_existent_id) + + # Assert: Verify the expected outcomes + assert result is None + + def test_get_workflow_run_node_executions_success( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test successful retrieval of workflow run node executions. + + This test verifies: + - Proper node execution retrieval for workflow run + - Correct tenant and app isolation + - Repository method calls with proper parameters + - Context setup for plugin tool providers + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create workflow run + workflow_run = self._create_test_workflow_run(db_session_with_containers, app, account, "debugging") + + # Create node executions + from extensions.ext_database import db + from models.workflow import WorkflowNodeExecutionModel + + node_executions = [] + for i in range(3): + node_execution = WorkflowNodeExecutionModel( + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow_run.workflow_id, + triggered_from="workflow-run", + workflow_run_id=workflow_run.id, + index=i, + node_id=f"node_{i}", + node_type="llm" if i == 0 else "tool", + title=f"Node {i}", + inputs=json.dumps({"input": f"test_input_{i}"}), + process_data=json.dumps({"process": f"test_process_{i}"}), + status="succeeded", + elapsed_time=0.5, + execution_metadata=json.dumps({"tokens": 50}), + created_by_role=CreatorUserRole.ACCOUNT.value, + created_by=account.id, + created_at=datetime.now(UTC), + ) + db.session.add(node_execution) + node_executions.append(node_execution) + + db.session.commit() + + # Act: Execute the method under test + workflow_run_service = WorkflowRunService() + result = workflow_run_service.get_workflow_run_node_executions(app, workflow_run.id, account) + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 3 + + # Verify node execution properties + for node_execution in result: + assert node_execution.tenant_id == app.tenant_id + assert node_execution.app_id == app.id + assert node_execution.workflow_run_id == workflow_run.id + assert node_execution.index in [0, 1, 2] # Check that index is one of the expected values + assert node_execution.node_id.startswith("node_") # Check that node_id starts with "node_" + assert node_execution.status == "succeeded" + + def test_get_workflow_run_node_executions_empty( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test getting node executions for a workflow run with no executions. + + This test verifies: + - Empty result when no node executions exist + - Proper handling of empty data + - No errors when querying non-existent executions + """ + # Arrange: Setup test data + account_service = AccountService() + tenant_service = TenantService() + app_service = AppService() + workflow_run_service = WorkflowRunService() + + # Create account and tenant + account = account_service.create_account( + email="test@example.com", + name="Test User", + password="password123", + interface_language="en-US", + ) + TenantService.create_owner_tenant_if_not_exist(account, name="test_tenant") + tenant = account.current_tenant + + # Create app + app_args = { + "name": "Test App", + "mode": "chat", + "icon_type": "emoji", + "icon": "🚀", + "icon_background": "#4ECDC4", + } + app = app_service.create_app(tenant.id, app_args, account) + + # Create workflow run without node executions + workflow_run = self._create_test_workflow_run(db_session_with_containers, app, account, "debugging") + + # Act: Get node executions + result = workflow_run_service.get_workflow_run_node_executions( + app_model=app, + run_id=workflow_run.id, + user=account, + ) + + # Assert: Verify empty result + assert result is not None + assert len(result) == 0 + + def test_get_workflow_run_node_executions_invalid_workflow_run_id( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test getting node executions with invalid workflow run ID. + + This test verifies: + - Proper handling of invalid workflow run ID + - Empty result when workflow run doesn't exist + - No errors when querying with invalid ID + """ + # Arrange: Setup test data + account_service = AccountService() + tenant_service = TenantService() + app_service = AppService() + workflow_run_service = WorkflowRunService() + + # Create account and tenant + account = account_service.create_account( + email="test@example.com", + name="Test User", + password="password123", + interface_language="en-US", + ) + TenantService.create_owner_tenant_if_not_exist(account, name="test_tenant") + tenant = account.current_tenant + + # Create app + app_args = { + "name": "Test App", + "mode": "chat", + "icon_type": "emoji", + "icon": "🚀", + "icon_background": "#4ECDC4", + } + app = app_service.create_app(tenant.id, app_args, account) + + # Use invalid workflow run ID + invalid_workflow_run_id = str(uuid.uuid4()) + + # Act: Get node executions with invalid ID + result = workflow_run_service.get_workflow_run_node_executions( + app_model=app, + run_id=invalid_workflow_run_id, + user=account, + ) + + # Assert: Verify empty result + assert result is not None + assert len(result) == 0 + + def test_get_workflow_run_node_executions_database_error( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test getting node executions when database encounters an error. + + This test verifies: + - Proper error handling when database operations fail + - Graceful degradation in error scenarios + - Error propagation to calling code + """ + # Arrange: Setup test data + account_service = AccountService() + tenant_service = TenantService() + app_service = AppService() + workflow_run_service = WorkflowRunService() + + # Create account and tenant + account = account_service.create_account( + email="test@example.com", + name="Test User", + password="password123", + interface_language="en-US", + ) + TenantService.create_owner_tenant_if_not_exist(account, name="test_tenant") + tenant = account.current_tenant + + # Create app + app_args = { + "name": "Test App", + "mode": "chat", + "icon_type": "emoji", + "icon": "🚀", + "icon_background": "#4ECDC4", + } + app = app_service.create_app(tenant.id, app_args, account) + + # Create workflow run + workflow_run = self._create_test_workflow_run(db_session_with_containers, app, account, "debugging") + + # Mock database error by closing the session + db_session_with_containers.close() + + # Act & Assert: Verify error handling + with pytest.raises((Exception, RuntimeError)): + workflow_run_service.get_workflow_run_node_executions( + app_model=app, + run_id=workflow_run.id, + user=account, + ) + + def test_get_workflow_run_node_executions_end_user( + self, db_session_with_containers, mock_external_service_dependencies + ): + """ + Test node execution retrieval for end user. + + This test verifies: + - Proper handling of end user vs account user + - Correct tenant ID extraction for end users + - Repository method calls with proper parameters + """ + # Arrange: Create test data + fake = Faker() + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Create workflow run + workflow_run = self._create_test_workflow_run(db_session_with_containers, app, account, "debugging") + + # Create end user + from extensions.ext_database import db + from models.model import EndUser + + end_user = EndUser( + tenant_id=app.tenant_id, + app_id=app.id, + type="web_app", + is_anonymous=False, + session_id=str(uuid.uuid4()), + external_user_id=str(uuid.uuid4()), + name=fake.name(), + ) + db.session.add(end_user) + db.session.commit() + + # Create node execution + from models.workflow import WorkflowNodeExecutionModel + + node_execution = WorkflowNodeExecutionModel( + tenant_id=app.tenant_id, + app_id=app.id, + workflow_id=workflow_run.workflow_id, + triggered_from="workflow-run", + workflow_run_id=workflow_run.id, + index=0, + node_id="node_0", + node_type="llm", + title="Node 0", + inputs=json.dumps({"input": "test_input"}), + process_data=json.dumps({"process": "test_process"}), + status="succeeded", + elapsed_time=0.5, + execution_metadata=json.dumps({"tokens": 50}), + created_by_role=CreatorUserRole.END_USER.value, + created_by=end_user.id, + created_at=datetime.now(UTC), + ) + db.session.add(node_execution) + db.session.commit() + + # Act: Execute the method under test + workflow_run_service = WorkflowRunService() + result = workflow_run_service.get_workflow_run_node_executions(app, workflow_run.id, end_user) + + # Assert: Verify the expected outcomes + assert result is not None + assert len(result) == 1 + + # Verify node execution properties + node_exec = result[0] + assert node_exec.tenant_id == app.tenant_id + assert node_exec.app_id == app.id + assert node_exec.workflow_run_id == workflow_run.id + assert node_exec.created_by == end_user.id + assert node_exec.created_by_role == CreatorUserRole.END_USER.value diff --git a/api/tests/test_containers_integration_tests/services/test_workflow_service.py b/api/tests/test_containers_integration_tests/services/test_workflow_service.py new file mode 100644 index 0000000000..018eb6d896 --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_workflow_service.py @@ -0,0 +1,1585 @@ +""" +TestContainers-based integration tests for WorkflowService. + +This module provides comprehensive integration testing for WorkflowService using +TestContainers to ensure realistic database interactions and proper isolation. +""" + +import json +from unittest.mock import MagicMock + +import pytest +from faker import Faker + +from models import Account, App, Workflow +from models.model import AppMode +from models.workflow import WorkflowType +from services.workflow_service import WorkflowService + + +class TestWorkflowService: + """ + Comprehensive integration tests for WorkflowService using testcontainers. + + This test class covers all major functionality of the WorkflowService: + - Workflow CRUD operations (Create, Read, Update, Delete) + - Workflow publishing and versioning + - Node execution and workflow running + - Workflow conversion and validation + - Error handling for various edge cases + + All tests use the testcontainers infrastructure to ensure proper database isolation + and realistic testing environment with actual database interactions. + """ + + def _create_test_account(self, db_session_with_containers, fake=None): + """ + Helper method to create a test account with realistic data. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + fake: Faker instance for generating test data + + Returns: + Account: Created test account instance + """ + fake = fake or Faker() + account = Account() + account.id = fake.uuid4() + account.email = fake.email() + account.name = fake.name() + account.avatar_url = fake.url() + account.tenant_id = fake.uuid4() + account.status = "active" + account.type = "normal" + account.role = "owner" + account.interface_language = "en-US" # Set interface language for Site creation + account.created_at = fake.date_time_this_year() + account.updated_at = account.created_at + + # Create a tenant for the account + from models.account import Tenant + + tenant = Tenant() + tenant.id = account.tenant_id + tenant.name = f"Test Tenant {fake.company()}" + tenant.plan = "basic" + tenant.status = "active" + tenant.created_at = fake.date_time_this_year() + tenant.updated_at = tenant.created_at + + from extensions.ext_database import db + + db.session.add(tenant) + db.session.add(account) + db.session.commit() + + # Set the current tenant for the account + account.current_tenant = tenant + + return account + + def _create_test_app(self, db_session_with_containers, fake=None): + """ + Helper method to create a test app with realistic data. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + fake: Faker instance for generating test data + + Returns: + App: Created test app instance + """ + fake = fake or Faker() + app = App() + app.id = fake.uuid4() + app.tenant_id = fake.uuid4() + app.name = fake.company() + app.description = fake.text() + app.mode = AppMode.WORKFLOW.value + app.icon_type = "emoji" + app.icon = "🤖" + app.icon_background = "#FFEAD5" + app.enable_site = True + app.enable_api = True + app.created_by = fake.uuid4() + app.updated_by = app.created_by + app.workflow_id = None # Will be set when workflow is created + + from extensions.ext_database import db + + db.session.add(app) + db.session.commit() + return app + + def _create_test_workflow(self, db_session_with_containers, app, account, fake=None): + """ + Helper method to create a test workflow associated with an app. + + Args: + db_session_with_containers: Database session from testcontainers infrastructure + app: The app to associate the workflow with + account: The account creating the workflow + fake: Faker instance for generating test data + + Returns: + Workflow: Created test workflow instance + """ + fake = fake or Faker() + workflow = Workflow() + workflow.id = fake.uuid4() + workflow.tenant_id = app.tenant_id + workflow.app_id = app.id + workflow.type = WorkflowType.WORKFLOW.value + workflow.version = Workflow.VERSION_DRAFT + workflow.graph = json.dumps({"nodes": [], "edges": []}) + workflow.features = json.dumps({"features": []}) + # unique_hash is a computed property based on graph and features + workflow.created_by = account.id + workflow.updated_by = account.id + workflow.environment_variables = [] + workflow.conversation_variables = [] + + from extensions.ext_database import db + + db.session.add(workflow) + db.session.commit() + return workflow + + def test_get_node_last_run_success(self, db_session_with_containers): + """ + Test successful retrieval of the most recent execution for a specific node. + + This test verifies that the service can correctly retrieve the last execution + record for a workflow node, which is essential for debugging and monitoring + workflow execution history. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + + # Create a mock node execution record + from models.enums import CreatorUserRole + from models.workflow import WorkflowNodeExecutionModel + + node_execution = WorkflowNodeExecutionModel() + node_execution.id = fake.uuid4() + node_execution.tenant_id = app.tenant_id + node_execution.app_id = app.id + node_execution.workflow_id = workflow.id + node_execution.triggered_from = "single-step" # Required field + node_execution.index = 1 # Required field + node_execution.node_id = "test-node-1" + node_execution.node_type = "test_node" + node_execution.title = "Test Node" # Required field + node_execution.status = "succeeded" + node_execution.created_by_role = CreatorUserRole.ACCOUNT.value # Required field + node_execution.created_by = account.id # Required field + node_execution.created_at = fake.date_time_this_year() + + from extensions.ext_database import db + + db.session.add(node_execution) + db.session.commit() + + workflow_service = WorkflowService() + + # Act + result = workflow_service.get_node_last_run(app, workflow, "test-node-1") + + # Assert + assert result is not None + assert result.node_id == "test-node-1" + assert result.workflow_id == workflow.id + assert result.status == "succeeded" + + def test_get_node_last_run_not_found(self, db_session_with_containers): + """ + Test retrieval when no execution record exists for the specified node. + + This test ensures that the service correctly handles cases where there are + no previous executions for a node, returning None as expected. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + + workflow_service = WorkflowService() + + # Act + result = workflow_service.get_node_last_run(app, workflow, "non-existent-node") + + # Assert + assert result is None + + def test_is_workflow_exist_true(self, db_session_with_containers): + """ + Test workflow existence check when a draft workflow exists. + + This test verifies that the service correctly identifies when a draft workflow + exists for an application, which is important for workflow management operations. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + + workflow_service = WorkflowService() + + # Act + result = workflow_service.is_workflow_exist(app) + + # Assert + assert result is True + + def test_is_workflow_exist_false(self, db_session_with_containers): + """ + Test workflow existence check when no draft workflow exists. + + This test ensures that the service correctly identifies when no draft workflow + exists for an application, which is the initial state for new apps. + """ + # Arrange + fake = Faker() + app = self._create_test_app(db_session_with_containers, fake) + # Don't create any workflow + + workflow_service = WorkflowService() + + # Act + result = workflow_service.is_workflow_exist(app) + + # Assert + assert result is False + + def test_get_draft_workflow_success(self, db_session_with_containers): + """ + Test successful retrieval of a draft workflow. + + This test verifies that the service can correctly retrieve an existing + draft workflow for an application, which is essential for workflow editing + and development workflows. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + + workflow_service = WorkflowService() + + # Act + result = workflow_service.get_draft_workflow(app) + + # Assert + assert result is not None + assert result.id == workflow.id + assert result.version == Workflow.VERSION_DRAFT + assert result.app_id == app.id + assert result.tenant_id == app.tenant_id + + def test_get_draft_workflow_not_found(self, db_session_with_containers): + """ + Test draft workflow retrieval when no draft workflow exists. + + This test ensures that the service correctly handles cases where there is + no draft workflow for an application, returning None as expected. + """ + # Arrange + fake = Faker() + app = self._create_test_app(db_session_with_containers, fake) + # Don't create any workflow + + workflow_service = WorkflowService() + + # Act + result = workflow_service.get_draft_workflow(app) + + # Assert + assert result is None + + def test_get_published_workflow_by_id_success(self, db_session_with_containers): + """ + Test successful retrieval of a published workflow by ID. + + This test verifies that the service can correctly retrieve a published + workflow using its ID, which is essential for workflow execution and + reference operations. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + # Create a published workflow (not draft) + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + workflow.version = "2024.01.01.001" # Published version + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + + # Act + result = workflow_service.get_published_workflow_by_id(app, workflow.id) + + # Assert + assert result is not None + assert result.id == workflow.id + assert result.version != Workflow.VERSION_DRAFT + assert result.app_id == app.id + + def test_get_published_workflow_by_id_draft_error(self, db_session_with_containers): + """ + Test error when trying to retrieve a draft workflow as published. + + This test ensures that the service correctly prevents access to draft + workflows when a published version is requested, maintaining proper + workflow version control. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + # Keep as draft version + + workflow_service = WorkflowService() + + # Act & Assert + from services.errors.app import IsDraftWorkflowError + + with pytest.raises(IsDraftWorkflowError): + workflow_service.get_published_workflow_by_id(app, workflow.id) + + def test_get_published_workflow_by_id_not_found(self, db_session_with_containers): + """ + Test retrieval when no workflow exists with the specified ID. + + This test ensures that the service correctly handles cases where the + requested workflow ID doesn't exist in the system. + """ + # Arrange + fake = Faker() + app = self._create_test_app(db_session_with_containers, fake) + non_existent_workflow_id = fake.uuid4() + + workflow_service = WorkflowService() + + # Act + result = workflow_service.get_published_workflow_by_id(app, non_existent_workflow_id) + + # Assert + assert result is None + + def test_get_published_workflow_success(self, db_session_with_containers): + """ + Test successful retrieval of the current published workflow for an app. + + This test verifies that the service can correctly retrieve the published + workflow that is currently associated with an application. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + # Create a published workflow and associate it with the app + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + workflow.version = "2024.01.01.001" # Published version + + from extensions.ext_database import db + + app.workflow_id = workflow.id + db.session.commit() + + workflow_service = WorkflowService() + + # Act + result = workflow_service.get_published_workflow(app) + + # Assert + assert result is not None + assert result.id == workflow.id + assert result.version != Workflow.VERSION_DRAFT + assert result.app_id == app.id + + def test_get_published_workflow_no_workflow_id(self, db_session_with_containers): + """ + Test retrieval when app has no associated workflow ID. + + This test ensures that the service correctly handles cases where an + application doesn't have any published workflow associated with it. + """ + # Arrange + fake = Faker() + app = self._create_test_app(db_session_with_containers, fake) + # app.workflow_id is None by default + + workflow_service = WorkflowService() + + # Act + result = workflow_service.get_published_workflow(app) + + # Assert + assert result is None + + def test_get_all_published_workflow_pagination(self, db_session_with_containers): + """ + Test pagination of published workflows. + + This test verifies that the service can correctly paginate through + published workflows, supporting large workflow collections and + efficient data retrieval. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + # Create multiple published workflows + workflows = [] + for i in range(5): + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + workflow.version = f"2024.01.0{i + 1}.001" # Published version + workflow.marked_name = f"Workflow {i + 1}" + workflows.append(workflow) + + # Set the app's workflow_id to the first workflow + app.workflow_id = workflows[0].id + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + + # Act - First page + result_workflows, has_more = workflow_service.get_all_published_workflow( + session=db.session, + app_model=app, + page=1, + limit=3, + user_id=None, # Show all workflows + ) + + # Assert + assert len(result_workflows) == 3 + assert has_more is True + + # Act - Second page + result_workflows, has_more = workflow_service.get_all_published_workflow( + session=db.session, + app_model=app, + page=2, + limit=3, + user_id=None, # Show all workflows + ) + + # Assert + assert len(result_workflows) == 2 + assert has_more is False + + def test_get_all_published_workflow_user_filter(self, db_session_with_containers): + """ + Test filtering published workflows by user. + + This test verifies that the service can correctly filter workflows + by the user who created them, supporting user-specific workflow + management and access control. + """ + # Arrange + fake = Faker() + account1 = self._create_test_account(db_session_with_containers, fake) + account2 = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + # Create workflows by different users + workflow1 = self._create_test_workflow(db_session_with_containers, app, account1, fake) + workflow1.version = "2024.01.01.001" # Published version + workflow1.created_by = account1.id + + workflow2 = self._create_test_workflow(db_session_with_containers, app, account2, fake) + workflow2.version = "2024.01.02.001" # Published version + workflow2.created_by = account2.id + + # Set the app's workflow_id to the first workflow + app.workflow_id = workflow1.id + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + + # Act - Filter by account1 + result_workflows, has_more = workflow_service.get_all_published_workflow( + session=db.session, app_model=app, page=1, limit=10, user_id=account1.id + ) + + # Assert + assert len(result_workflows) == 1 + assert result_workflows[0].created_by == account1.id + + def test_get_all_published_workflow_named_only_filter(self, db_session_with_containers): + """ + Test filtering published workflows to show only named workflows. + + This test verifies that the service correctly filters workflows + to show only those with marked names, supporting workflow + organization and management features. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + # Create workflows with and without names + workflow1 = self._create_test_workflow(db_session_with_containers, app, account, fake) + workflow1.version = "2024.01.01.001" # Published version + workflow1.marked_name = "Named Workflow 1" + + workflow2 = self._create_test_workflow(db_session_with_containers, app, account, fake) + workflow2.version = "2024.01.02.001" # Published version + workflow2.marked_name = "" # No name + + workflow3 = self._create_test_workflow(db_session_with_containers, app, account, fake) + workflow3.version = "2024.01.03.001" # Published version + workflow3.marked_name = "Named Workflow 3" + + # Set the app's workflow_id to the first workflow + app.workflow_id = workflow1.id + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + + # Act - Filter named only + result_workflows, has_more = workflow_service.get_all_published_workflow( + session=db.session, app_model=app, page=1, limit=10, user_id=None, named_only=True + ) + + # Assert + assert len(result_workflows) == 2 + assert all(wf.marked_name for wf in result_workflows) + + def test_sync_draft_workflow_create_new(self, db_session_with_containers): + """ + Test creating a new draft workflow through sync operation. + + This test verifies that the service can correctly create a new draft + workflow when none exists, which is the initial workflow setup process. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + graph = {"nodes": [{"id": "start", "type": "start"}], "edges": []} + features = {"features": ["feature1", "feature2"]} + # Don't pre-calculate hash, let the service generate it + unique_hash = None + + environment_variables = [] + conversation_variables = [] + + workflow_service = WorkflowService() + + # Act + result = workflow_service.sync_draft_workflow( + app_model=app, + graph=graph, + features=features, + unique_hash=unique_hash, + account=account, + environment_variables=environment_variables, + conversation_variables=conversation_variables, + ) + + # Assert + assert result is not None + assert result.version == Workflow.VERSION_DRAFT + assert result.app_id == app.id + assert result.tenant_id == app.tenant_id + assert result.unique_hash is not None # Should have a hash generated + assert result.graph == json.dumps(graph) + assert result.features == json.dumps(features) + assert result.created_by == account.id + + def test_sync_draft_workflow_update_existing(self, db_session_with_containers): + """ + Test updating an existing draft workflow through sync operation. + + This test verifies that the service can correctly update an existing + draft workflow with new graph and features data. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + # Create existing draft workflow + existing_workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + # Get the actual hash that was generated + original_hash = existing_workflow.unique_hash + + new_graph = {"nodes": [{"id": "start", "type": "start"}, {"id": "end", "type": "end"}], "edges": []} + new_features = {"features": ["feature1", "feature2", "feature3"]} + + environment_variables = [] + conversation_variables = [] + + workflow_service = WorkflowService() + + # Act + result = workflow_service.sync_draft_workflow( + app_model=app, + graph=new_graph, + features=new_features, + unique_hash=original_hash, # Use original hash to allow update + account=account, + environment_variables=environment_variables, + conversation_variables=conversation_variables, + ) + + # Assert + assert result is not None + assert result.id == existing_workflow.id # Same workflow updated + assert result.version == Workflow.VERSION_DRAFT + # Hash should be updated to reflect new content + assert result.unique_hash != original_hash # Hash should change after update + assert result.graph == json.dumps(new_graph) + assert result.features == json.dumps(new_features) + assert result.updated_by == account.id + + def test_sync_draft_workflow_hash_mismatch_error(self, db_session_with_containers): + """ + Test error when sync is attempted with mismatched hash. + + This test ensures that the service correctly prevents workflow sync + when the hash doesn't match, maintaining workflow consistency and + preventing concurrent modification conflicts. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + # Create existing draft workflow + existing_workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + # Get the actual hash that was generated + original_hash = existing_workflow.unique_hash + + new_graph = {"nodes": [{"id": "start", "type": "start"}], "edges": []} + new_features = {"features": ["feature1"]} + # Use a different hash to trigger the error + mismatched_hash = "different_hash_12345" + environment_variables = [] + conversation_variables = [] + + workflow_service = WorkflowService() + + # Act & Assert + from services.errors.app import WorkflowHashNotEqualError + + with pytest.raises(WorkflowHashNotEqualError): + workflow_service.sync_draft_workflow( + app_model=app, + graph=new_graph, + features=new_features, + unique_hash=mismatched_hash, + account=account, + environment_variables=environment_variables, + conversation_variables=conversation_variables, + ) + + def test_publish_workflow_success(self, db_session_with_containers): + """ + Test successful workflow publishing. + + This test verifies that the service can correctly publish a draft + workflow, creating a new published version with proper versioning + and status management. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + # Create draft workflow + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + workflow.version = Workflow.VERSION_DRAFT + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + + # Act - Mock current_user context and pass session + from unittest.mock import patch + + with patch("flask_login.utils._get_user", return_value=account): + result = workflow_service.publish_workflow( + session=db_session_with_containers, app_model=app, account=account + ) + + # Assert + assert result is not None + assert result.version != Workflow.VERSION_DRAFT + # Version should be a timestamp format like '2025-08-22 00:10:24.722051' + assert isinstance(result.version, str) + assert len(result.version) > 10 # Should be a reasonable timestamp length + assert result.created_by == account.id + + def test_publish_workflow_no_draft_error(self, db_session_with_containers): + """ + Test error when publishing workflow without draft. + + This test ensures that the service correctly prevents publishing + when no draft workflow exists, maintaining workflow state consistency. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + # Don't create any workflow - app should have no draft + + workflow_service = WorkflowService() + + # Act & Assert + with pytest.raises(ValueError, match="No valid workflow found"): + workflow_service.publish_workflow(session=db_session_with_containers, app_model=app, account=account) + + def test_publish_workflow_already_published_error(self, db_session_with_containers): + """ + Test error when publishing already published workflow. + + This test ensures that the service correctly prevents re-publishing + of already published workflows, maintaining version control integrity. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + # Create already published workflow + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + workflow.version = "2024.01.01.001" # Already published + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + + # Act & Assert + with pytest.raises(ValueError, match="No valid workflow found"): + workflow_service.publish_workflow(session=db_session_with_containers, app_model=app, account=account) + + def test_get_default_block_configs(self, db_session_with_containers): + """ + Test retrieval of default block configurations for all node types. + + This test verifies that the service can correctly retrieve default + configurations for all available workflow node types, which is + essential for workflow design and configuration. + """ + # Arrange + workflow_service = WorkflowService() + + # Act + result = workflow_service.get_default_block_configs() + + # Assert + assert isinstance(result, list) + # The list might be empty if no default configs are available + # This is acceptable behavior + + # Check that each config has required structure if any exist + for config in result: + assert isinstance(config, dict) + # The structure can vary, so we just check it's a dict + + def test_get_default_block_config_specific_type(self, db_session_with_containers): + """ + Test retrieval of default block configuration for a specific node type. + + This test verifies that the service can correctly retrieve default + configuration for a specific workflow node type, supporting targeted + workflow node configuration. + """ + # Arrange + workflow_service = WorkflowService() + node_type = "start" # Common node type + + # Act + result = workflow_service.get_default_block_config(node_type=node_type) + + # Assert + # The result might be None if no default config is available for this node type + # This is acceptable behavior + assert result is None or isinstance(result, dict) + + def test_get_default_block_config_invalid_type(self, db_session_with_containers): + """ + Test retrieval of default block configuration for invalid node type. + + This test ensures that the service correctly handles requests for + invalid or non-existent node types, returning None as expected. + """ + # Arrange + workflow_service = WorkflowService() + invalid_node_type = "invalid_node_type_12345" + + # Act + try: + result = workflow_service.get_default_block_config(node_type=invalid_node_type) + # If we get here, the service should return None for invalid types + assert result is None + except ValueError: + # It's also acceptable for the service to raise a ValueError for invalid types + pass + + def test_get_default_block_config_with_filters(self, db_session_with_containers): + """ + Test retrieval of default block configuration with filters. + + This test verifies that the service can correctly apply filters + when retrieving default configurations, supporting conditional + configuration retrieval. + """ + # Arrange + workflow_service = WorkflowService() + node_type = "start" + filters = {"category": "input"} + + # Act + result = workflow_service.get_default_block_config(node_type=node_type, filters=filters) + + # Assert + # Result might be None if filters don't match, but should not raise error + assert result is None or isinstance(result, dict) + + def test_convert_to_workflow_chat_mode_success(self, db_session_with_containers): + """ + Test successful conversion from chat mode app to workflow mode. + + This test verifies that the service can correctly convert a chatbot + application to workflow mode, which is essential for app mode migration. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + + # Create chat mode app + app = self._create_test_app(db_session_with_containers, fake) + app.mode = AppMode.CHAT.value + + # Create app model config (required for conversion) + from models.model import AppModelConfig + + app_model_config = AppModelConfig() + app_model_config.id = fake.uuid4() + app_model_config.app_id = app.id + app_model_config.tenant_id = app.tenant_id + app_model_config.provider = "openai" + app_model_config.model_id = "gpt-3.5-turbo" + # Set the model field directly - this is what model_dict property returns + app_model_config.model = json.dumps( + { + "provider": "openai", + "name": "gpt-3.5-turbo", + "completion_params": {"max_tokens": 1000, "temperature": 0.7}, + } + ) + # Set pre_prompt for PromptTemplateConfigManager + app_model_config.pre_prompt = "You are a helpful assistant." + app_model_config.created_by = account.id + app_model_config.updated_by = account.id + + from extensions.ext_database import db + + db.session.add(app_model_config) + app.app_model_config_id = app_model_config.id + db.session.commit() + + workflow_service = WorkflowService() + conversion_args = { + "name": "Converted Workflow App", + "icon_type": "emoji", + "icon": "🚀", + "icon_background": "#FF5733", + } + + # Act + result = workflow_service.convert_to_workflow(app_model=app, account=account, args=conversion_args) + + # Assert + assert result is not None + assert result.mode == AppMode.ADVANCED_CHAT.value # CHAT mode converts to ADVANCED_CHAT, not WORKFLOW + assert result.name == conversion_args["name"] + assert result.icon == conversion_args["icon"] + assert result.icon_type == conversion_args["icon_type"] + assert result.icon_background == conversion_args["icon_background"] + + def test_convert_to_workflow_completion_mode_success(self, db_session_with_containers): + """ + Test successful conversion from completion mode app to workflow mode. + + This test verifies that the service can correctly convert a completion + application to workflow mode, supporting different app type migrations. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + + # Create completion mode app + app = self._create_test_app(db_session_with_containers, fake) + app.mode = AppMode.COMPLETION.value + + # Create app model config (required for conversion) + from models.model import AppModelConfig + + app_model_config = AppModelConfig() + app_model_config.id = fake.uuid4() + app_model_config.app_id = app.id + app_model_config.tenant_id = app.tenant_id + app_model_config.provider = "openai" + app_model_config.model_id = "gpt-3.5-turbo" + # Set the model field directly - this is what model_dict property returns + app_model_config.model = json.dumps( + { + "provider": "openai", + "name": "gpt-3.5-turbo", + "completion_params": {"max_tokens": 1000, "temperature": 0.7}, + } + ) + # Set pre_prompt for PromptTemplateConfigManager + app_model_config.pre_prompt = "Complete the following text:" + app_model_config.created_by = account.id + app_model_config.updated_by = account.id + + from extensions.ext_database import db + + db.session.add(app_model_config) + app.app_model_config_id = app_model_config.id + db.session.commit() + + workflow_service = WorkflowService() + conversion_args = { + "name": "Converted Workflow App", + "icon_type": "emoji", + "icon": "🚀", + "icon_background": "#FF5733", + } + + # Act + result = workflow_service.convert_to_workflow(app_model=app, account=account, args=conversion_args) + + # Assert + assert result is not None + assert result.mode == AppMode.WORKFLOW.value + assert result.name == conversion_args["name"] + assert result.icon == conversion_args["icon"] + assert result.icon_type == conversion_args["icon_type"] + assert result.icon_background == conversion_args["icon_background"] + + def test_convert_to_workflow_unsupported_mode_error(self, db_session_with_containers): + """ + Test error when attempting to convert unsupported app mode. + + This test ensures that the service correctly prevents conversion + of apps that are not in supported modes for workflow conversion. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + + # Create workflow mode app (already in workflow mode) + app = self._create_test_app(db_session_with_containers, fake) + app.mode = AppMode.WORKFLOW.value + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + conversion_args = {"name": "Test"} + + # Act & Assert + with pytest.raises(ValueError, match="Current App mode: workflow is not supported convert to workflow"): + workflow_service.convert_to_workflow(app_model=app, account=account, args=conversion_args) + + def test_validate_features_structure_advanced_chat(self, db_session_with_containers): + """ + Test feature structure validation for advanced chat mode apps. + + This test verifies that the service can correctly validate feature + structures for advanced chat applications, ensuring proper configuration. + """ + # Arrange + fake = Faker() + app = self._create_test_app(db_session_with_containers, fake) + app.mode = AppMode.ADVANCED_CHAT.value + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + features = { + "opening_statement": "Hello!", + "suggested_questions": ["Question 1", "Question 2"], + "more_like_this": True, + } + + # Act + result = workflow_service.validate_features_structure(app_model=app, features=features) + + # Assert + # The validation should return the validated config or raise an error + # The exact behavior depends on the AdvancedChatAppConfigManager implementation + assert result is not None or isinstance(result, dict) + + def test_validate_features_structure_workflow(self, db_session_with_containers): + """ + Test feature structure validation for workflow mode apps. + + This test verifies that the service can correctly validate feature + structures for workflow applications, ensuring proper configuration. + """ + # Arrange + fake = Faker() + app = self._create_test_app(db_session_with_containers, fake) + app.mode = AppMode.WORKFLOW.value + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + features = {"workflow_config": {"max_steps": 10, "timeout": 300}} + + # Act + result = workflow_service.validate_features_structure(app_model=app, features=features) + + # Assert + # The validation should return the validated config or raise an error + # The exact behavior depends on the WorkflowAppConfigManager implementation + assert result is not None or isinstance(result, dict) + + def test_validate_features_structure_invalid_mode(self, db_session_with_containers): + """ + Test error when validating features for invalid app mode. + + This test ensures that the service correctly handles feature validation + for unsupported app modes, preventing invalid operations. + """ + # Arrange + fake = Faker() + app = self._create_test_app(db_session_with_containers, fake) + app.mode = "invalid_mode" # Invalid mode + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + features = {"test": "value"} + + # Act & Assert + with pytest.raises(ValueError, match="Invalid app mode: invalid_mode"): + workflow_service.validate_features_structure(app_model=app, features=features) + + def test_update_workflow_success(self, db_session_with_containers): + """ + Test successful workflow update with allowed fields. + + This test verifies that the service can correctly update workflow + attributes like marked_name and marked_comment, supporting workflow + metadata management. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + update_data = {"marked_name": "Updated Workflow Name", "marked_comment": "Updated workflow comment"} + + # Act + result = workflow_service.update_workflow( + session=db.session, + workflow_id=workflow.id, + tenant_id=workflow.tenant_id, + account_id=account.id, + data=update_data, + ) + + # Assert + assert result is not None + assert result.marked_name == update_data["marked_name"] + assert result.marked_comment == update_data["marked_comment"] + assert result.updated_by == account.id + + def test_update_workflow_not_found(self, db_session_with_containers): + """ + Test workflow update when workflow doesn't exist. + + This test ensures that the service correctly handles update attempts + on non-existent workflows, returning None as expected. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + from extensions.ext_database import db + + workflow_service = WorkflowService() + non_existent_workflow_id = fake.uuid4() + update_data = {"marked_name": "Test"} + + # Act + result = workflow_service.update_workflow( + session=db.session, + workflow_id=non_existent_workflow_id, + tenant_id=app.tenant_id, + account_id=account.id, + data=update_data, + ) + + # Assert + assert result is None + + def test_update_workflow_ignores_disallowed_fields(self, db_session_with_containers): + """ + Test that workflow update ignores disallowed fields. + + This test verifies that the service correctly filters update data, + only allowing modifications to permitted fields and ignoring others. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + original_name = workflow.marked_name + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + update_data = { + "marked_name": "Allowed Update", + "graph": "disallowed_field", # Should be ignored + "features": "disallowed_field", # Should be ignored + } + + # Act + result = workflow_service.update_workflow( + session=db.session, + workflow_id=workflow.id, + tenant_id=workflow.tenant_id, + account_id=account.id, + data=update_data, + ) + + # Assert + assert result is not None + assert result.marked_name == "Allowed Update" # Allowed field updated + # Disallowed fields should not be changed + assert result.graph == workflow.graph + assert result.features == workflow.features + + def test_delete_workflow_success(self, db_session_with_containers): + """ + Test successful workflow deletion. + + This test verifies that the service can correctly delete a workflow + when it's not in use and not a draft version, supporting workflow + lifecycle management. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + # Create a published workflow (not draft) + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + workflow.version = "2024.01.01.001" # Published version + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + + # Act + result = workflow_service.delete_workflow( + session=db.session, workflow_id=workflow.id, tenant_id=workflow.tenant_id + ) + + # Assert + assert result is True + + # Verify workflow is actually deleted + deleted_workflow = db.session.query(Workflow).filter_by(id=workflow.id).first() + assert deleted_workflow is None + + def test_delete_workflow_draft_error(self, db_session_with_containers): + """ + Test error when attempting to delete a draft workflow. + + This test ensures that the service correctly prevents deletion + of draft workflows, maintaining workflow development integrity. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + # Create draft workflow + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + # Keep as draft version + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + + # Act & Assert + from services.errors.workflow_service import DraftWorkflowDeletionError + + with pytest.raises(DraftWorkflowDeletionError, match="Cannot delete draft workflow versions"): + workflow_service.delete_workflow(session=db.session, workflow_id=workflow.id, tenant_id=workflow.tenant_id) + + def test_delete_workflow_in_use_error(self, db_session_with_containers): + """ + Test error when attempting to delete a workflow that's in use by an app. + + This test ensures that the service correctly prevents deletion + of workflows that are currently referenced by applications. + """ + # Arrange + fake = Faker() + account = self._create_test_account(db_session_with_containers, fake) + app = self._create_test_app(db_session_with_containers, fake) + + # Create a published workflow + workflow = self._create_test_workflow(db_session_with_containers, app, account, fake) + workflow.version = "2024.01.01.001" # Published version + + # Associate workflow with app + app.workflow_id = workflow.id + + from extensions.ext_database import db + + db.session.commit() + + workflow_service = WorkflowService() + + # Act & Assert + from services.errors.workflow_service import WorkflowInUseError + + with pytest.raises(WorkflowInUseError, match="Cannot delete workflow that is currently in use by app"): + workflow_service.delete_workflow(session=db.session, workflow_id=workflow.id, tenant_id=workflow.tenant_id) + + def test_delete_workflow_not_found_error(self, db_session_with_containers): + """ + Test error when attempting to delete a non-existent workflow. + + This test ensures that the service correctly handles deletion + attempts on workflows that don't exist in the system. + """ + # Arrange + fake = Faker() + app = self._create_test_app(db_session_with_containers, fake) + non_existent_workflow_id = fake.uuid4() + + from extensions.ext_database import db + + workflow_service = WorkflowService() + + # Act & Assert + with pytest.raises(ValueError, match=f"Workflow with ID {non_existent_workflow_id} not found"): + workflow_service.delete_workflow( + session=db.session, workflow_id=non_existent_workflow_id, tenant_id=app.tenant_id + ) + + def test_run_free_workflow_node_success(self, db_session_with_containers): + """ + Test successful execution of a free workflow node. + + This test verifies that the service can correctly execute a standalone + workflow node without requiring a full workflow context, supporting + node testing and development workflows. + """ + # Arrange + fake = Faker() + tenant_id = fake.uuid4() + user_id = fake.uuid4() + node_id = "test-node-1" + node_data = { + "type": "parameter-extractor", # Use supported NodeType + "title": "Parameter Extractor Node", # Required by BaseNodeData + "model": { + "provider": "openai", + "name": "gpt-3.5-turbo", + "mode": "chat", + "completion_params": {"max_tokens": 1000, "temperature": 0.7}, + }, + "query": ["Extract parameters from the input"], + "parameters": [{"name": "param1", "type": "string", "description": "First parameter", "required": True}], + "reasoning_mode": "function_call", + } + user_inputs = {"input1": "test_value"} + + workflow_service = WorkflowService() + + # Act + result = workflow_service.run_free_workflow_node( + node_data=node_data, tenant_id=tenant_id, user_id=user_id, node_id=node_id, user_inputs=user_inputs + ) + + # Assert + assert result is not None + assert result.node_id == node_id + assert result.workflow_id == "" # No workflow ID for free nodes + assert result.index == 1 + + def test_run_free_workflow_node_with_complex_inputs(self, db_session_with_containers): + """ + Test execution of a free workflow node with complex input data. + + This test verifies that the service can handle complex input structures + when executing free workflow nodes, supporting realistic workflow scenarios. + + Note: This test is currently simplified to avoid external service dependencies + that are not available in the test environment. + """ + # Arrange + fake = Faker() + tenant_id = fake.uuid4() + user_id = fake.uuid4() + node_id = "complex-node-1" + + # Use a simple node type that doesn't require external services + node_data = { + "type": "start", # Use start node type which has minimal dependencies + "title": "Start Node", # Required by BaseNodeData + } + user_inputs = { + "text_input": "Sample text", + "number_input": 42, + "list_input": ["item1", "item2", "item3"], + "dict_input": {"key1": "value1", "key2": "value2"}, + } + + workflow_service = WorkflowService() + + # Act + # Since start nodes are not supported in run_free_node, we expect an error + with pytest.raises(Exception) as exc_info: + workflow_service.run_free_workflow_node( + node_data=node_data, tenant_id=tenant_id, user_id=user_id, node_id=node_id, user_inputs=user_inputs + ) + + # Verify the error message indicates the expected issue + error_msg = str(exc_info.value).lower() + assert any(keyword in error_msg for keyword in ["start", "not supported", "external"]) + + def test_handle_node_run_result_success(self, db_session_with_containers): + """ + Test successful handling of node run results. + + This test verifies that the service can correctly process and format + successful node execution results, ensuring proper data structure + for workflow execution tracking. + """ + # Arrange + fake = Faker() + node_id = "test-node-1" + start_at = fake.unix_time() + + # Mock successful node execution + def mock_successful_invoke(): + from core.workflow.entities.node_entities import NodeRunResult + from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus + from core.workflow.nodes.base.node import BaseNode + from core.workflow.nodes.event import RunCompletedEvent + + # Create mock node + mock_node = MagicMock(spec=BaseNode) + mock_node.type_ = "start" # Use valid NodeType + mock_node.title = "Test Node" + mock_node.continue_on_error = False + + # Create mock result with valid metadata + mock_result = NodeRunResult( + status=WorkflowNodeExecutionStatus.SUCCEEDED, + inputs={"input1": "value1"}, + outputs={"output1": "result1"}, + process_data={"process1": "data1"}, + metadata={"total_tokens": 100}, # Use valid metadata field + ) + + # Create mock event + mock_event = RunCompletedEvent(run_result=mock_result) + + return mock_node, [mock_event] + + workflow_service = WorkflowService() + + # Act + result = workflow_service._handle_node_run_result( + invoke_node_fn=mock_successful_invoke, start_at=start_at, node_id=node_id + ) + + # Assert + assert result is not None + assert result.node_id == node_id + assert result.node_type == "start" # Should match the mock node type + assert result.title == "Test Node" + # Import the enum for comparison + from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus + + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert result.inputs is not None + assert result.outputs is not None + assert result.process_data is not None + + def test_handle_node_run_result_failure(self, db_session_with_containers): + """ + Test handling of failed node run results. + + This test verifies that the service can correctly process and format + failed node execution results, ensuring proper error handling and + status tracking for workflow execution. + """ + # Arrange + fake = Faker() + node_id = "test-node-1" + start_at = fake.unix_time() + + # Mock failed node execution + def mock_failed_invoke(): + from core.workflow.entities.node_entities import NodeRunResult + from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus + from core.workflow.nodes.base.node import BaseNode + from core.workflow.nodes.event import RunCompletedEvent + + # Create mock node + mock_node = MagicMock(spec=BaseNode) + mock_node.type_ = "llm" # Use valid NodeType + mock_node.title = "Test Node" + mock_node.continue_on_error = False + + # Create mock failed result + mock_result = NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs={"input1": "value1"}, + error="Test error message", + error_type="TestError", + ) + + # Create mock event + mock_event = RunCompletedEvent(run_result=mock_result) + + return mock_node, [mock_event] + + workflow_service = WorkflowService() + + # Act + result = workflow_service._handle_node_run_result( + invoke_node_fn=mock_failed_invoke, start_at=start_at, node_id=node_id + ) + + # Assert + assert result is not None + assert result.node_id == node_id + # Import the enum for comparison + from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus + + assert result.status == WorkflowNodeExecutionStatus.FAILED + assert result.error is not None + assert "Test error message" in str(result.error) + + def test_handle_node_run_result_continue_on_error(self, db_session_with_containers): + """ + Test handling of node run results with continue_on_error strategy. + + This test verifies that the service can correctly handle nodes + configured to continue execution even when errors occur, supporting + resilient workflow execution strategies. + """ + # Arrange + fake = Faker() + node_id = "test-node-1" + start_at = fake.unix_time() + + # Mock node execution with continue_on_error + def mock_continue_on_error_invoke(): + from core.workflow.entities.node_entities import NodeRunResult + from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus + from core.workflow.nodes.base.node import BaseNode + from core.workflow.nodes.enums import ErrorStrategy + from core.workflow.nodes.event import RunCompletedEvent + + # Create mock node with continue_on_error + mock_node = MagicMock(spec=BaseNode) + mock_node.type_ = "tool" # Use valid NodeType + mock_node.title = "Test Node" + mock_node.continue_on_error = True + mock_node.error_strategy = ErrorStrategy.DEFAULT_VALUE + mock_node.default_value_dict = {"default_output": "default_value"} + + # Create mock failed result + mock_result = NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs={"input1": "value1"}, + error="Test error message", + error_type="TestError", + ) + + # Create mock event + mock_event = RunCompletedEvent(run_result=mock_result) + + return mock_node, [mock_event] + + workflow_service = WorkflowService() + + # Act + result = workflow_service._handle_node_run_result( + invoke_node_fn=mock_continue_on_error_invoke, start_at=start_at, node_id=node_id + ) + + # Assert + assert result is not None + assert result.node_id == node_id + # Import the enum for comparison + from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus + + assert result.status == WorkflowNodeExecutionStatus.EXCEPTION # Should be EXCEPTION, not FAILED + assert result.outputs is not None + assert "default_output" in result.outputs + assert result.outputs["default_output"] == "default_value" + assert "error_message" in result.outputs + assert "error_type" in result.outputs diff --git a/api/tests/unit_tests/configs/test_dify_config.py b/api/tests/unit_tests/configs/test_dify_config.py index 0ae6a09f5b..0c7473019a 100644 --- a/api/tests/unit_tests/configs/test_dify_config.py +++ b/api/tests/unit_tests/configs/test_dify_config.py @@ -8,7 +8,7 @@ from yarl import URL from configs.app_config import DifyConfig -def test_dify_config(monkeypatch): +def test_dify_config(monkeypatch: pytest.MonkeyPatch): # clear system environment variables os.environ.clear() @@ -48,7 +48,7 @@ def test_dify_config(monkeypatch): # NOTE: If there is a `.env` file in your Workspace, this test might not succeed as expected. # This is due to `pymilvus` loading all the variables from the `.env` file into `os.environ`. -def test_flask_configs(monkeypatch): +def test_flask_configs(monkeypatch: pytest.MonkeyPatch): flask_app = Flask("app") # clear system environment variables os.environ.clear() @@ -90,6 +90,7 @@ def test_flask_configs(monkeypatch): "pool_recycle": 3600, "pool_size": 30, "pool_use_lifo": False, + "pool_reset_on_return": None, } assert config["CONSOLE_WEB_URL"] == "https://example.com" @@ -100,7 +101,7 @@ def test_flask_configs(monkeypatch): assert str(URL(str(config["CODE_EXECUTION_ENDPOINT"])) / "v1") == "http://127.0.0.1:8194/v1" -def test_inner_api_config_exist(monkeypatch): +def test_inner_api_config_exist(monkeypatch: pytest.MonkeyPatch): # Set environment variables using monkeypatch monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") @@ -118,7 +119,7 @@ def test_inner_api_config_exist(monkeypatch): assert len(config.INNER_API_KEY) > 0 -def test_db_extras_options_merging(monkeypatch): +def test_db_extras_options_merging(monkeypatch: pytest.MonkeyPatch): """Test that DB_EXTRAS options are properly merged with default timezone setting""" # Set environment variables monkeypatch.setenv("DB_USERNAME", "postgres") @@ -163,7 +164,13 @@ def test_db_extras_options_merging(monkeypatch): ], ) def test_celery_broker_url_with_special_chars_password( - monkeypatch, broker_url, expected_host, expected_port, expected_username, expected_password, expected_db + monkeypatch: pytest.MonkeyPatch, + broker_url, + expected_host, + expected_port, + expected_username, + expected_password, + expected_db, ): """Test that CELERY_BROKER_URL with various formats are handled correctly.""" from kombu.utils.url import parse_url diff --git a/api/tests/unit_tests/controllers/console/app/workflow_draft_variables_test.py b/api/tests/unit_tests/controllers/console/app/workflow_draft_variables_test.py index f26be6702a..ac3c8e45c9 100644 --- a/api/tests/unit_tests/controllers/console/app/workflow_draft_variables_test.py +++ b/api/tests/unit_tests/controllers/console/app/workflow_draft_variables_test.py @@ -1,9 +1,8 @@ -import datetime import uuid from collections import OrderedDict from typing import Any, NamedTuple -from flask_restful import marshal +from flask_restx import marshal from controllers.console.app.workflow_draft_variable import ( _WORKFLOW_DRAFT_VARIABLE_FIELDS, @@ -13,6 +12,7 @@ from controllers.console.app.workflow_draft_variable import ( ) from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID from factories.variable_factory import build_segment +from libs.datetime_utils import naive_utc_now from models.workflow import WorkflowDraftVariable from services.workflow_draft_variable_service import WorkflowDraftVariableList @@ -57,7 +57,7 @@ class TestWorkflowDraftVariableFields: ) sys_var.id = str(uuid.uuid4()) - sys_var.last_edited_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + sys_var.last_edited_at = naive_utc_now() sys_var.visible = True expected_without_value = OrderedDict( @@ -88,7 +88,7 @@ class TestWorkflowDraftVariableFields: ) node_var.id = str(uuid.uuid4()) - node_var.last_edited_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + node_var.last_edited_at = naive_utc_now() expected_without_value: OrderedDict[str, Any] = OrderedDict( { diff --git a/api/tests/unit_tests/controllers/console/auth/test_authentication_security.py b/api/tests/unit_tests/controllers/console/auth/test_authentication_security.py new file mode 100644 index 0000000000..aefb4bf8b0 --- /dev/null +++ b/api/tests/unit_tests/controllers/console/auth/test_authentication_security.py @@ -0,0 +1,134 @@ +"""Test authentication security to prevent user enumeration.""" + +from unittest.mock import MagicMock, patch + +import pytest +from flask import Flask +from flask_restx import Api + +import services.errors.account +from controllers.console.auth.error import AuthenticationFailedError +from controllers.console.auth.login import LoginApi +from controllers.console.error import AccountNotFound + + +class TestAuthenticationSecurity: + """Test authentication endpoints for security against user enumeration.""" + + def setup_method(self): + """Set up test fixtures.""" + self.app = Flask(__name__) + self.api = Api(self.app) + self.api.add_resource(LoginApi, "/login") + self.client = self.app.test_client() + self.app.config["TESTING"] = True + + @patch("controllers.console.wraps.db") + @patch("controllers.console.auth.login.FeatureService.get_system_features") + @patch("controllers.console.auth.login.AccountService.is_login_error_rate_limit") + @patch("controllers.console.auth.login.AccountService.authenticate") + @patch("controllers.console.auth.login.AccountService.send_reset_password_email") + @patch("controllers.console.auth.login.dify_config.BILLING_ENABLED", False) + @patch("controllers.console.auth.login.RegisterService.get_invitation_if_token_valid") + def test_login_invalid_email_with_registration_allowed( + self, mock_get_invitation, mock_send_email, mock_authenticate, mock_is_rate_limit, mock_features, mock_db + ): + """Test that invalid email sends reset password email when registration is allowed.""" + # Arrange + mock_is_rate_limit.return_value = False + mock_get_invitation.return_value = None + mock_authenticate.side_effect = services.errors.account.AccountNotFoundError("Account not found") + mock_db.session.query.return_value.first.return_value = MagicMock() # Mock setup exists + mock_features.return_value.is_allow_register = True + mock_send_email.return_value = "token123" + + # Act + with self.app.test_request_context( + "/login", method="POST", json={"email": "nonexistent@example.com", "password": "WrongPass123!"} + ): + login_api = LoginApi() + result = login_api.post() + + # Assert + assert result == {"result": "fail", "data": "token123", "code": "account_not_found"} + mock_send_email.assert_called_once_with(email="nonexistent@example.com", language="en-US") + + @patch("controllers.console.wraps.db") + @patch("controllers.console.auth.login.AccountService.is_login_error_rate_limit") + @patch("controllers.console.auth.login.AccountService.authenticate") + @patch("controllers.console.auth.login.AccountService.add_login_error_rate_limit") + @patch("controllers.console.auth.login.dify_config.BILLING_ENABLED", False) + @patch("controllers.console.auth.login.RegisterService.get_invitation_if_token_valid") + def test_login_wrong_password_returns_error( + self, mock_get_invitation, mock_add_rate_limit, mock_authenticate, mock_is_rate_limit, mock_db + ): + """Test that wrong password returns AuthenticationFailedError.""" + # Arrange + mock_is_rate_limit.return_value = False + mock_get_invitation.return_value = None + mock_authenticate.side_effect = services.errors.account.AccountPasswordError("Wrong password") + mock_db.session.query.return_value.first.return_value = MagicMock() # Mock setup exists + + # Act + with self.app.test_request_context( + "/login", method="POST", json={"email": "existing@example.com", "password": "WrongPass123!"} + ): + login_api = LoginApi() + + # Assert + with pytest.raises(AuthenticationFailedError) as exc_info: + login_api.post() + + assert exc_info.value.error_code == "authentication_failed" + assert exc_info.value.description == "Invalid email or password." + mock_add_rate_limit.assert_called_once_with("existing@example.com") + + @patch("controllers.console.wraps.db") + @patch("controllers.console.auth.login.FeatureService.get_system_features") + @patch("controllers.console.auth.login.AccountService.is_login_error_rate_limit") + @patch("controllers.console.auth.login.AccountService.authenticate") + @patch("controllers.console.auth.login.dify_config.BILLING_ENABLED", False) + @patch("controllers.console.auth.login.RegisterService.get_invitation_if_token_valid") + def test_login_invalid_email_with_registration_disabled( + self, mock_get_invitation, mock_authenticate, mock_is_rate_limit, mock_features, mock_db + ): + """Test that invalid email raises AccountNotFound when registration is disabled.""" + # Arrange + mock_is_rate_limit.return_value = False + mock_get_invitation.return_value = None + mock_authenticate.side_effect = services.errors.account.AccountNotFoundError("Account not found") + mock_db.session.query.return_value.first.return_value = MagicMock() # Mock setup exists + mock_features.return_value.is_allow_register = False + + # Act + with self.app.test_request_context( + "/login", method="POST", json={"email": "nonexistent@example.com", "password": "WrongPass123!"} + ): + login_api = LoginApi() + + # Assert + with pytest.raises(AccountNotFound) as exc_info: + login_api.post() + + assert exc_info.value.error_code == "account_not_found" + + @patch("controllers.console.wraps.db") + @patch("controllers.console.auth.login.FeatureService.get_system_features") + @patch("controllers.console.auth.login.AccountService.get_user_through_email") + @patch("controllers.console.auth.login.AccountService.send_reset_password_email") + def test_reset_password_with_existing_account(self, mock_send_email, mock_get_user, mock_features, mock_db): + """Test that reset password returns success with token for existing accounts.""" + # Mock the setup check + mock_db.session.query.return_value.first.return_value = MagicMock() # Mock setup exists + + # Test with existing account + mock_get_user.return_value = MagicMock(email="existing@example.com") + mock_send_email.return_value = "token123" + + with self.app.test_request_context("/reset-password", method="POST", json={"email": "existing@example.com"}): + from controllers.console.auth.login import ResetPasswordSendEmailApi + + api = ResetPasswordSendEmailApi() + result = api.post() + + assert result == {"result": "success", "data": "token123"} diff --git a/api/tests/unit_tests/core/mcp/client/test_sse.py b/api/tests/unit_tests/core/mcp/client/test_sse.py index 880a0d4940..aadd366762 100644 --- a/api/tests/unit_tests/core/mcp/client/test_sse.py +++ b/api/tests/unit_tests/core/mcp/client/test_sse.py @@ -1,3 +1,4 @@ +import contextlib import json import queue import threading @@ -124,13 +125,10 @@ def test_sse_client_connection_validation(): mock_event_source.iter_sse.return_value = [endpoint_event] # Test connection - try: + with contextlib.suppress(Exception): with sse_client(test_url) as (read_queue, write_queue): assert read_queue is not None assert write_queue is not None - except Exception as e: - # Connection might fail due to mocking, but we're testing the validation logic - pass def test_sse_client_error_handling(): @@ -178,7 +176,7 @@ def test_sse_client_timeout_configuration(): mock_event_source.iter_sse.return_value = [] mock_sse_connect.return_value.__enter__.return_value = mock_event_source - try: + with contextlib.suppress(Exception): with sse_client( test_url, headers=custom_headers, timeout=custom_timeout, sse_read_timeout=custom_sse_timeout ) as (read_queue, write_queue): @@ -190,9 +188,6 @@ def test_sse_client_timeout_configuration(): assert call_args is not None timeout_arg = call_args[1]["timeout"] assert timeout_arg.read == custom_sse_timeout - except Exception: - # Connection might fail due to mocking, but we tested the configuration - pass def test_sse_transport_endpoint_validation(): @@ -251,12 +246,10 @@ def test_sse_client_queue_cleanup(): # Mock connection that raises an exception mock_sse_connect.side_effect = Exception("Connection failed") - try: + with contextlib.suppress(Exception): with sse_client(test_url) as (rq, wq): read_queue = rq write_queue = wq - except Exception: - pass # Expected to fail # Queues should be cleaned up even on exception # Note: In real implementation, cleanup should put None to signal shutdown @@ -283,11 +276,9 @@ def test_sse_client_headers_propagation(): mock_event_source.iter_sse.return_value = [] mock_sse_connect.return_value.__enter__.return_value = mock_event_source - try: + with contextlib.suppress(Exception): with sse_client(test_url, headers=custom_headers): pass - except Exception: - pass # Expected due to mocking # Verify headers were passed to client factory mock_client_factory.assert_called_with(headers=custom_headers) diff --git a/api/tests/unit_tests/core/model_runtime/entities/test_llm_entities.py b/api/tests/unit_tests/core/model_runtime/entities/test_llm_entities.py new file mode 100644 index 0000000000..c10f7b89c3 --- /dev/null +++ b/api/tests/unit_tests/core/model_runtime/entities/test_llm_entities.py @@ -0,0 +1,148 @@ +"""Tests for LLMUsage entity.""" + +from decimal import Decimal + +from core.model_runtime.entities.llm_entities import LLMUsage, LLMUsageMetadata + + +class TestLLMUsage: + """Test cases for LLMUsage class.""" + + def test_from_metadata_with_all_tokens(self): + """Test from_metadata when all token types are provided.""" + metadata: LLMUsageMetadata = { + "prompt_tokens": 100, + "completion_tokens": 50, + "total_tokens": 150, + "prompt_unit_price": 0.001, + "completion_unit_price": 0.002, + "total_price": 0.2, + "currency": "USD", + "latency": 1.5, + } + + usage = LLMUsage.from_metadata(metadata) + + assert usage.prompt_tokens == 100 + assert usage.completion_tokens == 50 + assert usage.total_tokens == 150 + assert usage.prompt_unit_price == Decimal("0.001") + assert usage.completion_unit_price == Decimal("0.002") + assert usage.total_price == Decimal("0.2") + assert usage.currency == "USD" + assert usage.latency == 1.5 + + def test_from_metadata_with_prompt_tokens_only(self): + """Test from_metadata when only prompt_tokens is provided.""" + metadata: LLMUsageMetadata = { + "prompt_tokens": 100, + "total_tokens": 100, + } + + usage = LLMUsage.from_metadata(metadata) + + assert usage.prompt_tokens == 100 + assert usage.completion_tokens == 0 + assert usage.total_tokens == 100 + + def test_from_metadata_with_completion_tokens_only(self): + """Test from_metadata when only completion_tokens is provided.""" + metadata: LLMUsageMetadata = { + "completion_tokens": 50, + "total_tokens": 50, + } + + usage = LLMUsage.from_metadata(metadata) + + assert usage.prompt_tokens == 0 + assert usage.completion_tokens == 50 + assert usage.total_tokens == 50 + + def test_from_metadata_calculates_total_when_missing(self): + """Test from_metadata calculates total_tokens when not provided.""" + metadata: LLMUsageMetadata = { + "prompt_tokens": 100, + "completion_tokens": 50, + } + + usage = LLMUsage.from_metadata(metadata) + + assert usage.prompt_tokens == 100 + assert usage.completion_tokens == 50 + assert usage.total_tokens == 150 # Should be calculated + + def test_from_metadata_with_total_but_no_completion(self): + """ + Test from_metadata when total_tokens is provided but completion_tokens is 0. + This tests the fix for issue #24360 - prompt tokens should NOT be assigned to completion_tokens. + """ + metadata: LLMUsageMetadata = { + "prompt_tokens": 479, + "completion_tokens": 0, + "total_tokens": 521, + } + + usage = LLMUsage.from_metadata(metadata) + + # This is the key fix - prompt tokens should remain as prompt tokens + assert usage.prompt_tokens == 479 + assert usage.completion_tokens == 0 + assert usage.total_tokens == 521 + + def test_from_metadata_with_empty_metadata(self): + """Test from_metadata with empty metadata.""" + metadata: LLMUsageMetadata = {} + + usage = LLMUsage.from_metadata(metadata) + + assert usage.prompt_tokens == 0 + assert usage.completion_tokens == 0 + assert usage.total_tokens == 0 + assert usage.currency == "USD" + assert usage.latency == 0.0 + + def test_from_metadata_preserves_zero_completion_tokens(self): + """ + Test that zero completion_tokens are preserved when explicitly set. + This is important for agent nodes that only use prompt tokens. + """ + metadata: LLMUsageMetadata = { + "prompt_tokens": 1000, + "completion_tokens": 0, + "total_tokens": 1000, + "prompt_unit_price": 0.15, + "completion_unit_price": 0.60, + "prompt_price": 0.00015, + "completion_price": 0, + "total_price": 0.00015, + } + + usage = LLMUsage.from_metadata(metadata) + + assert usage.prompt_tokens == 1000 + assert usage.completion_tokens == 0 + assert usage.total_tokens == 1000 + assert usage.prompt_price == Decimal("0.00015") + assert usage.completion_price == Decimal(0) + assert usage.total_price == Decimal("0.00015") + + def test_from_metadata_with_decimal_values(self): + """Test from_metadata handles decimal values correctly.""" + metadata: LLMUsageMetadata = { + "prompt_tokens": 100, + "completion_tokens": 50, + "total_tokens": 150, + "prompt_unit_price": "0.001", + "completion_unit_price": "0.002", + "prompt_price": "0.1", + "completion_price": "0.1", + "total_price": "0.2", + } + + usage = LLMUsage.from_metadata(metadata) + + assert usage.prompt_unit_price == Decimal("0.001") + assert usage.completion_unit_price == Decimal("0.002") + assert usage.prompt_price == Decimal("0.1") + assert usage.completion_price == Decimal("0.1") + assert usage.total_price == Decimal("0.2") diff --git a/api/tests/unit_tests/core/plugin/__init__.py b/api/tests/unit_tests/core/plugin/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/core/plugin/utils/__init__.py b/api/tests/unit_tests/core/plugin/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/core/plugin/utils/test_chunk_merger.py b/api/tests/unit_tests/core/plugin/utils/test_chunk_merger.py new file mode 100644 index 0000000000..e0eace0f2d --- /dev/null +++ b/api/tests/unit_tests/core/plugin/utils/test_chunk_merger.py @@ -0,0 +1,460 @@ +from collections.abc import Generator + +import pytest + +from core.agent.entities import AgentInvokeMessage +from core.plugin.utils.chunk_merger import FileChunk, merge_blob_chunks +from core.tools.entities.tool_entities import ToolInvokeMessage + + +class TestChunkMerger: + def test_file_chunk_initialization(self): + """Test FileChunk initialization.""" + chunk = FileChunk(1024) + assert chunk.bytes_written == 0 + assert chunk.total_length == 1024 + assert len(chunk.data) == 1024 + + def test_merge_blob_chunks_with_single_complete_chunk(self): + """Test merging a single complete blob chunk.""" + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + # First chunk (partial) + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=0, total_length=10, blob=b"Hello", end=False + ), + ) + # Second chunk (final) + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=1, total_length=10, blob=b"World", end=True + ), + ) + + result = list(merge_blob_chunks(mock_generator())) + assert len(result) == 1 + assert result[0].type == ToolInvokeMessage.MessageType.BLOB + assert isinstance(result[0].message, ToolInvokeMessage.BlobMessage) + # The buffer should contain the complete data + assert result[0].message.blob[:10] == b"HelloWorld" + + def test_merge_blob_chunks_with_multiple_files(self): + """Test merging chunks from multiple files.""" + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + # File 1, chunk 1 + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=0, total_length=4, blob=b"AB", end=False + ), + ) + # File 2, chunk 1 + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file2", sequence=0, total_length=4, blob=b"12", end=False + ), + ) + # File 1, chunk 2 (final) + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=1, total_length=4, blob=b"CD", end=True + ), + ) + # File 2, chunk 2 (final) + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file2", sequence=1, total_length=4, blob=b"34", end=True + ), + ) + + result = list(merge_blob_chunks(mock_generator())) + assert len(result) == 2 + # Check that both files are properly merged + assert all(r.type == ToolInvokeMessage.MessageType.BLOB for r in result) + + def test_merge_blob_chunks_passes_through_non_blob_messages(self): + """Test that non-blob messages pass through unchanged.""" + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + # Text message + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.TEXT, + message=ToolInvokeMessage.TextMessage(text="Hello"), + ) + # Blob chunk + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=0, total_length=5, blob=b"Test", end=True + ), + ) + # Another text message + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.TEXT, + message=ToolInvokeMessage.TextMessage(text="World"), + ) + + result = list(merge_blob_chunks(mock_generator())) + assert len(result) == 3 + assert result[0].type == ToolInvokeMessage.MessageType.TEXT + assert isinstance(result[0].message, ToolInvokeMessage.TextMessage) + assert result[0].message.text == "Hello" + assert result[1].type == ToolInvokeMessage.MessageType.BLOB + assert result[2].type == ToolInvokeMessage.MessageType.TEXT + assert isinstance(result[2].message, ToolInvokeMessage.TextMessage) + assert result[2].message.text == "World" + + def test_merge_blob_chunks_file_too_large(self): + """Test that error is raised when file exceeds max size.""" + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + # Send a chunk that would exceed the limit + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=0, total_length=100, blob=b"x" * 1024, end=False + ), + ) + + with pytest.raises(ValueError) as exc_info: + list(merge_blob_chunks(mock_generator(), max_file_size=1000)) + assert "File is too large" in str(exc_info.value) + + def test_merge_blob_chunks_chunk_too_large(self): + """Test that error is raised when chunk exceeds max chunk size.""" + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + # Send a chunk that exceeds the max chunk size + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=0, total_length=10000, blob=b"x" * 9000, end=False + ), + ) + + with pytest.raises(ValueError) as exc_info: + list(merge_blob_chunks(mock_generator(), max_chunk_size=8192)) + assert "File chunk is too large" in str(exc_info.value) + + def test_merge_blob_chunks_with_agent_invoke_message(self): + """Test that merge_blob_chunks works with AgentInvokeMessage.""" + + def mock_generator() -> Generator[AgentInvokeMessage, None, None]: + # First chunk + yield AgentInvokeMessage( + type=AgentInvokeMessage.MessageType.BLOB_CHUNK, + message=AgentInvokeMessage.BlobChunkMessage( + id="agent_file", sequence=0, total_length=8, blob=b"Agent", end=False + ), + ) + # Final chunk + yield AgentInvokeMessage( + type=AgentInvokeMessage.MessageType.BLOB_CHUNK, + message=AgentInvokeMessage.BlobChunkMessage( + id="agent_file", sequence=1, total_length=8, blob=b"Data", end=True + ), + ) + + result = list(merge_blob_chunks(mock_generator())) + assert len(result) == 1 + assert isinstance(result[0], AgentInvokeMessage) + assert result[0].type == AgentInvokeMessage.MessageType.BLOB + + def test_merge_blob_chunks_preserves_meta(self): + """Test that meta information is preserved in merged messages.""" + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=0, total_length=4, blob=b"Test", end=True + ), + meta={"key": "value"}, + ) + + result = list(merge_blob_chunks(mock_generator())) + assert len(result) == 1 + assert result[0].meta == {"key": "value"} + + def test_merge_blob_chunks_custom_limits(self): + """Test merge_blob_chunks with custom size limits.""" + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + # This should work with custom limits + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=0, total_length=500, blob=b"x" * 400, end=False + ), + ) + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=1, total_length=500, blob=b"y" * 100, end=True + ), + ) + + # Should work with custom limits + result = list(merge_blob_chunks(mock_generator(), max_file_size=1000, max_chunk_size=500)) + assert len(result) == 1 + + # Should fail with smaller file size limit + def mock_generator2() -> Generator[ToolInvokeMessage, None, None]: + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=0, total_length=500, blob=b"x" * 400, end=False + ), + ) + + with pytest.raises(ValueError): + list(merge_blob_chunks(mock_generator2(), max_file_size=300)) + + def test_merge_blob_chunks_data_integrity(self): + """Test that merged chunks exactly match the original data.""" + # Create original data + original_data = b"This is a test message that will be split into chunks for testing purposes." + chunk_size = 20 + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + # Split original data into chunks + chunks = [] + for i in range(0, len(original_data), chunk_size): + chunk_data = original_data[i : i + chunk_size] + is_last = (i + chunk_size) >= len(original_data) + chunks.append((i // chunk_size, chunk_data, is_last)) + + # Yield chunks + for sequence, data, is_end in chunks: + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="test_file", + sequence=sequence, + total_length=len(original_data), + blob=data, + end=is_end, + ), + ) + + result = list(merge_blob_chunks(mock_generator())) + assert len(result) == 1 + assert result[0].type == ToolInvokeMessage.MessageType.BLOB + assert isinstance(result[0].message, ToolInvokeMessage.BlobMessage) + # Verify the merged data exactly matches the original + assert result[0].message.blob == original_data + + def test_merge_blob_chunks_empty_chunk(self): + """Test handling of empty chunks.""" + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + # First chunk with data + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=0, total_length=10, blob=b"Hello", end=False + ), + ) + # Empty chunk in the middle + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=1, total_length=10, blob=b"", end=False + ), + ) + # Final chunk with data + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="file1", sequence=2, total_length=10, blob=b"World", end=True + ), + ) + + result = list(merge_blob_chunks(mock_generator())) + assert len(result) == 1 + assert result[0].type == ToolInvokeMessage.MessageType.BLOB + assert isinstance(result[0].message, ToolInvokeMessage.BlobMessage) + # The final blob should contain "Hello" followed by "World" + assert result[0].message.blob[:10] == b"HelloWorld" + + def test_merge_blob_chunks_single_chunk_file(self): + """Test file that arrives as a single complete chunk.""" + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + # Single chunk that is both first and last + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="single_chunk_file", + sequence=0, + total_length=11, + blob=b"Single Data", + end=True, + ), + ) + + result = list(merge_blob_chunks(mock_generator())) + assert len(result) == 1 + assert result[0].type == ToolInvokeMessage.MessageType.BLOB + assert isinstance(result[0].message, ToolInvokeMessage.BlobMessage) + assert result[0].message.blob == b"Single Data" + + def test_merge_blob_chunks_concurrent_files(self): + """Test that chunks from different files are properly separated.""" + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + # Interleave chunks from three different files + files_data = { + "file1": b"First file content", + "file2": b"Second file data", + "file3": b"Third file", + } + + # First chunk from each file + for file_id, data in files_data.items(): + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id=file_id, + sequence=0, + total_length=len(data), + blob=data[:6], + end=False, + ), + ) + + # Second chunk from each file (final) + for file_id, data in files_data.items(): + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id=file_id, + sequence=1, + total_length=len(data), + blob=data[6:], + end=True, + ), + ) + + result = list(merge_blob_chunks(mock_generator())) + assert len(result) == 3 + + # Extract the blob data from results + blobs = set() + for r in result: + assert isinstance(r.message, ToolInvokeMessage.BlobMessage) + blobs.add(r.message.blob) + expected = {b"First file content", b"Second file data", b"Third file"} + assert blobs == expected + + def test_merge_blob_chunks_exact_buffer_size(self): + """Test that data fitting exactly in buffer works correctly.""" + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + # Create data that exactly fills the declared buffer + exact_data = b"X" * 100 + + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="exact_file", + sequence=0, + total_length=100, + blob=exact_data[:50], + end=False, + ), + ) + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="exact_file", + sequence=1, + total_length=100, + blob=exact_data[50:], + end=True, + ), + ) + + result = list(merge_blob_chunks(mock_generator())) + assert len(result) == 1 + assert isinstance(result[0].message, ToolInvokeMessage.BlobMessage) + assert len(result[0].message.blob) == 100 + assert result[0].message.blob == b"X" * 100 + + def test_merge_blob_chunks_large_file_simulation(self): + """Test handling of a large file split into many chunks.""" + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + # Simulate a 1MB file split into 128 chunks of 8KB each + chunk_size = 8192 + num_chunks = 128 + total_size = chunk_size * num_chunks + + for i in range(num_chunks): + # Create unique data for each chunk to verify ordering + chunk_data = bytes([i % 256]) * chunk_size + is_last = i == num_chunks - 1 + + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="large_file", + sequence=i, + total_length=total_size, + blob=chunk_data, + end=is_last, + ), + ) + + result = list(merge_blob_chunks(mock_generator())) + assert len(result) == 1 + assert isinstance(result[0].message, ToolInvokeMessage.BlobMessage) + assert len(result[0].message.blob) == 1024 * 1024 + + # Verify the data pattern is correct + merged_data = result[0].message.blob + chunk_size = 8192 + num_chunks = 128 + for i in range(num_chunks): + chunk_start = i * chunk_size + chunk_end = chunk_start + chunk_size + expected_byte = i % 256 + chunk = merged_data[chunk_start:chunk_end] + assert all(b == expected_byte for b in chunk), f"Chunk {i} has incorrect data" + + def test_merge_blob_chunks_sequential_order_required(self): + """ + Test note: The current implementation assumes chunks arrive in sequential order. + Out-of-order chunks would need additional logic to handle properly. + This test documents the expected behavior with sequential chunks. + """ + + def mock_generator() -> Generator[ToolInvokeMessage, None, None]: + # Chunks arriving in correct sequential order + data_parts = [b"First", b"Second", b"Third"] + total_length = sum(len(part) for part in data_parts) + + for i, part in enumerate(data_parts): + is_last = i == len(data_parts) - 1 + yield ToolInvokeMessage( + type=ToolInvokeMessage.MessageType.BLOB_CHUNK, + message=ToolInvokeMessage.BlobChunkMessage( + id="ordered_file", + sequence=i, + total_length=total_length, + blob=part, + end=is_last, + ), + ) + + result = list(merge_blob_chunks(mock_generator())) + assert len(result) == 1 + assert isinstance(result[0].message, ToolInvokeMessage.BlobMessage) + assert result[0].message.blob == b"FirstSecondThird" diff --git a/api/tests/unit_tests/core/prompt/test_advanced_prompt_transform.py b/api/tests/unit_tests/core/prompt/test_advanced_prompt_transform.py index f6d22690d1..8abed0a3f9 100644 --- a/api/tests/unit_tests/core/prompt/test_advanced_prompt_transform.py +++ b/api/tests/unit_tests/core/prompt/test_advanced_prompt_transform.py @@ -164,7 +164,7 @@ def test__get_chat_model_prompt_messages_with_files_no_memory(get_chat_model_arg ) assert isinstance(prompt_messages[3].content, list) assert len(prompt_messages[3].content) == 2 - assert prompt_messages[3].content[1].data == files[0].remote_url + assert prompt_messages[3].content[0].data == files[0].remote_url @pytest.fixture diff --git a/api/tests/unit_tests/core/repositories/test_celery_workflow_execution_repository.py b/api/tests/unit_tests/core/repositories/test_celery_workflow_execution_repository.py index 450501c256..e7733b2317 100644 --- a/api/tests/unit_tests/core/repositories/test_celery_workflow_execution_repository.py +++ b/api/tests/unit_tests/core/repositories/test_celery_workflow_execution_repository.py @@ -5,7 +5,6 @@ These tests verify the Celery-based asynchronous storage functionality for workflow execution data. """ -from datetime import UTC, datetime from unittest.mock import Mock, patch from uuid import uuid4 @@ -13,6 +12,7 @@ import pytest from core.repositories.celery_workflow_execution_repository import CeleryWorkflowExecutionRepository from core.workflow.entities.workflow_execution import WorkflowExecution, WorkflowType +from libs.datetime_utils import naive_utc_now from models import Account, EndUser from models.enums import WorkflowRunTriggeredFrom @@ -56,7 +56,7 @@ def sample_workflow_execution(): workflow_version="1.0", graph={"nodes": [], "edges": []}, inputs={"input1": "value1"}, - started_at=datetime.now(UTC).replace(tzinfo=None), + started_at=naive_utc_now(), ) @@ -199,7 +199,7 @@ class TestCeleryWorkflowExecutionRepository: workflow_version="1.0", graph={"nodes": [], "edges": []}, inputs={"input1": "value1"}, - started_at=datetime.now(UTC).replace(tzinfo=None), + started_at=naive_utc_now(), ) exec2 = WorkflowExecution.new( id_=str(uuid4()), @@ -208,7 +208,7 @@ class TestCeleryWorkflowExecutionRepository: workflow_version="1.0", graph={"nodes": [], "edges": []}, inputs={"input2": "value2"}, - started_at=datetime.now(UTC).replace(tzinfo=None), + started_at=naive_utc_now(), ) # Save both executions @@ -235,7 +235,7 @@ class TestCeleryWorkflowExecutionRepository: workflow_version="1.0", graph={"nodes": [], "edges": []}, inputs={"input1": "value1"}, - started_at=datetime.now(UTC).replace(tzinfo=None), + started_at=naive_utc_now(), ) repo.save(execution) diff --git a/api/tests/unit_tests/core/repositories/test_celery_workflow_node_execution_repository.py b/api/tests/unit_tests/core/repositories/test_celery_workflow_node_execution_repository.py index b38d994f03..0c6fdc8f92 100644 --- a/api/tests/unit_tests/core/repositories/test_celery_workflow_node_execution_repository.py +++ b/api/tests/unit_tests/core/repositories/test_celery_workflow_node_execution_repository.py @@ -5,7 +5,6 @@ These tests verify the Celery-based asynchronous storage functionality for workflow node execution data. """ -from datetime import UTC, datetime from unittest.mock import Mock, patch from uuid import uuid4 @@ -18,6 +17,7 @@ from core.workflow.entities.workflow_node_execution import ( ) from core.workflow.nodes.enums import NodeType from core.workflow.repositories.workflow_node_execution_repository import OrderConfig +from libs.datetime_utils import naive_utc_now from models import Account, EndUser from models.workflow import WorkflowNodeExecutionTriggeredFrom @@ -65,7 +65,7 @@ def sample_workflow_node_execution(): title="Test Node", inputs={"input1": "value1"}, status=WorkflowNodeExecutionStatus.RUNNING, - created_at=datetime.now(UTC).replace(tzinfo=None), + created_at=naive_utc_now(), ) @@ -263,7 +263,7 @@ class TestCeleryWorkflowNodeExecutionRepository: title="Node 1", inputs={"input1": "value1"}, status=WorkflowNodeExecutionStatus.RUNNING, - created_at=datetime.now(UTC).replace(tzinfo=None), + created_at=naive_utc_now(), ) exec2 = WorkflowNodeExecution( id=str(uuid4()), @@ -276,7 +276,7 @@ class TestCeleryWorkflowNodeExecutionRepository: title="Node 2", inputs={"input2": "value2"}, status=WorkflowNodeExecutionStatus.RUNNING, - created_at=datetime.now(UTC).replace(tzinfo=None), + created_at=naive_utc_now(), ) # Save both executions @@ -314,7 +314,7 @@ class TestCeleryWorkflowNodeExecutionRepository: title="Node 2", inputs={}, status=WorkflowNodeExecutionStatus.RUNNING, - created_at=datetime.now(UTC).replace(tzinfo=None), + created_at=naive_utc_now(), ) exec2 = WorkflowNodeExecution( id=str(uuid4()), @@ -327,7 +327,7 @@ class TestCeleryWorkflowNodeExecutionRepository: title="Node 1", inputs={}, status=WorkflowNodeExecutionStatus.RUNNING, - created_at=datetime.now(UTC).replace(tzinfo=None), + created_at=naive_utc_now(), ) # Save in random order diff --git a/api/tests/unit_tests/core/repositories/test_factory.py b/api/tests/unit_tests/core/repositories/test_factory.py index 5146e82e8f..30f51902ef 100644 --- a/api/tests/unit_tests/core/repositories/test_factory.py +++ b/api/tests/unit_tests/core/repositories/test_factory.py @@ -2,19 +2,19 @@ Unit tests for the RepositoryFactory. This module tests the factory pattern implementation for creating repository instances -based on configuration, including error handling and validation. +based on configuration, including error handling. """ from unittest.mock import MagicMock, patch import pytest -from pytest_mock import MockerFixture from sqlalchemy.engine import Engine from sqlalchemy.orm import sessionmaker from core.repositories.factory import DifyCoreRepositoryFactory, RepositoryImportError from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from libs.module_loading import import_string from models import Account, EndUser from models.enums import WorkflowRunTriggeredFrom from models.workflow import WorkflowNodeExecutionTriggeredFrom @@ -23,98 +23,30 @@ from models.workflow import WorkflowNodeExecutionTriggeredFrom class TestRepositoryFactory: """Test cases for RepositoryFactory.""" - def test_import_class_success(self): + def test_import_string_success(self): """Test successful class import.""" # Test importing a real class class_path = "unittest.mock.MagicMock" - result = DifyCoreRepositoryFactory._import_class(class_path) + result = import_string(class_path) assert result is MagicMock - def test_import_class_invalid_path(self): + def test_import_string_invalid_path(self): """Test import with invalid module path.""" - with pytest.raises(RepositoryImportError) as exc_info: - DifyCoreRepositoryFactory._import_class("invalid.module.path") - assert "Cannot import repository class" in str(exc_info.value) + with pytest.raises(ImportError) as exc_info: + import_string("invalid.module.path") + assert "No module named" in str(exc_info.value) - def test_import_class_invalid_class_name(self): + def test_import_string_invalid_class_name(self): """Test import with invalid class name.""" - with pytest.raises(RepositoryImportError) as exc_info: - DifyCoreRepositoryFactory._import_class("unittest.mock.NonExistentClass") - assert "Cannot import repository class" in str(exc_info.value) + with pytest.raises(ImportError) as exc_info: + import_string("unittest.mock.NonExistentClass") + assert "does not define" in str(exc_info.value) - def test_import_class_malformed_path(self): + def test_import_string_malformed_path(self): """Test import with malformed path (no dots).""" - with pytest.raises(RepositoryImportError) as exc_info: - DifyCoreRepositoryFactory._import_class("invalidpath") - assert "Cannot import repository class" in str(exc_info.value) - - def test_validate_repository_interface_success(self): - """Test successful interface validation.""" - - # Create a mock class that implements the required methods - class MockRepository: - def save(self): - pass - - def get_by_id(self): - pass - - # Create a mock interface class - class MockInterface: - def save(self): - pass - - def get_by_id(self): - pass - - # Should not raise an exception when all methods are present - DifyCoreRepositoryFactory._validate_repository_interface(MockRepository, MockInterface) - - def test_validate_repository_interface_missing_methods(self): - """Test interface validation with missing methods.""" - - # Create a mock class that's missing required methods - class IncompleteRepository: - def save(self): - pass - - # Missing get_by_id method - - # Create a mock interface that requires both methods - class MockInterface: - def save(self): - pass - - def get_by_id(self): - pass - - def missing_method(self): - pass - - with pytest.raises(RepositoryImportError) as exc_info: - DifyCoreRepositoryFactory._validate_repository_interface(IncompleteRepository, MockInterface) - assert "does not implement required methods" in str(exc_info.value) - - def test_validate_repository_interface_with_private_methods(self): - """Test that private methods are ignored during interface validation.""" - - class MockRepository: - def save(self): - pass - - def _private_method(self): - pass - - # Create a mock interface with private methods - class MockInterface: - def save(self): - pass - - def _private_method(self): - pass - - # Should not raise exception - private methods should be ignored - DifyCoreRepositoryFactory._validate_repository_interface(MockRepository, MockInterface) + with pytest.raises(ImportError) as exc_info: + import_string("invalidpath") + assert "doesn't look like a module path" in str(exc_info.value) @patch("core.repositories.factory.dify_config") def test_create_workflow_execution_repository_success(self, mock_config): @@ -133,11 +65,8 @@ class TestRepositoryFactory: mock_repository_instance = MagicMock(spec=WorkflowExecutionRepository) mock_repository_class.return_value = mock_repository_instance - # Mock the validation methods - with ( - patch.object(DifyCoreRepositoryFactory, "_import_class", return_value=mock_repository_class), - patch.object(DifyCoreRepositoryFactory, "_validate_repository_interface"), - ): + # Mock import_string + with patch("core.repositories.factory.import_string", return_value=mock_repository_class): result = DifyCoreRepositoryFactory.create_workflow_execution_repository( session_factory=mock_session_factory, user=mock_user, @@ -170,34 +99,7 @@ class TestRepositoryFactory: app_id="test-app-id", triggered_from=WorkflowRunTriggeredFrom.APP_RUN, ) - assert "Cannot import repository class" in str(exc_info.value) - - @patch("core.repositories.factory.dify_config") - def test_create_workflow_execution_repository_validation_error(self, mock_config, mocker: MockerFixture): - """Test WorkflowExecutionRepository creation with validation error.""" - # Setup mock configuration - mock_config.CORE_WORKFLOW_EXECUTION_REPOSITORY = "unittest.mock.MagicMock" - - mock_session_factory = MagicMock(spec=sessionmaker) - mock_user = MagicMock(spec=Account) - - # Mock the import to succeed but validation to fail - mock_repository_class = MagicMock() - mocker.patch.object(DifyCoreRepositoryFactory, "_import_class", return_value=mock_repository_class) - mocker.patch.object( - DifyCoreRepositoryFactory, - "_validate_repository_interface", - side_effect=RepositoryImportError("Interface validation failed"), - ) - - with pytest.raises(RepositoryImportError) as exc_info: - DifyCoreRepositoryFactory.create_workflow_execution_repository( - session_factory=mock_session_factory, - user=mock_user, - app_id="test-app-id", - triggered_from=WorkflowRunTriggeredFrom.APP_RUN, - ) - assert "Interface validation failed" in str(exc_info.value) + assert "Failed to create WorkflowExecutionRepository" in str(exc_info.value) @patch("core.repositories.factory.dify_config") def test_create_workflow_execution_repository_instantiation_error(self, mock_config): @@ -212,11 +114,8 @@ class TestRepositoryFactory: mock_repository_class = MagicMock() mock_repository_class.side_effect = Exception("Instantiation failed") - # Mock the validation methods to succeed - with ( - patch.object(DifyCoreRepositoryFactory, "_import_class", return_value=mock_repository_class), - patch.object(DifyCoreRepositoryFactory, "_validate_repository_interface"), - ): + # Mock import_string to return a failing class + with patch("core.repositories.factory.import_string", return_value=mock_repository_class): with pytest.raises(RepositoryImportError) as exc_info: DifyCoreRepositoryFactory.create_workflow_execution_repository( session_factory=mock_session_factory, @@ -243,11 +142,8 @@ class TestRepositoryFactory: mock_repository_instance = MagicMock(spec=WorkflowNodeExecutionRepository) mock_repository_class.return_value = mock_repository_instance - # Mock the validation methods - with ( - patch.object(DifyCoreRepositoryFactory, "_import_class", return_value=mock_repository_class), - patch.object(DifyCoreRepositoryFactory, "_validate_repository_interface"), - ): + # Mock import_string + with patch("core.repositories.factory.import_string", return_value=mock_repository_class): result = DifyCoreRepositoryFactory.create_workflow_node_execution_repository( session_factory=mock_session_factory, user=mock_user, @@ -280,34 +176,7 @@ class TestRepositoryFactory: app_id="test-app-id", triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP, ) - assert "Cannot import repository class" in str(exc_info.value) - - @patch("core.repositories.factory.dify_config") - def test_create_workflow_node_execution_repository_validation_error(self, mock_config, mocker: MockerFixture): - """Test WorkflowNodeExecutionRepository creation with validation error.""" - # Setup mock configuration - mock_config.CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY = "unittest.mock.MagicMock" - - mock_session_factory = MagicMock(spec=sessionmaker) - mock_user = MagicMock(spec=EndUser) - - # Mock the import to succeed but validation to fail - mock_repository_class = MagicMock() - mocker.patch.object(DifyCoreRepositoryFactory, "_import_class", return_value=mock_repository_class) - mocker.patch.object( - DifyCoreRepositoryFactory, - "_validate_repository_interface", - side_effect=RepositoryImportError("Interface validation failed"), - ) - - with pytest.raises(RepositoryImportError) as exc_info: - DifyCoreRepositoryFactory.create_workflow_node_execution_repository( - session_factory=mock_session_factory, - user=mock_user, - app_id="test-app-id", - triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP, - ) - assert "Interface validation failed" in str(exc_info.value) + assert "Failed to create WorkflowNodeExecutionRepository" in str(exc_info.value) @patch("core.repositories.factory.dify_config") def test_create_workflow_node_execution_repository_instantiation_error(self, mock_config): @@ -322,11 +191,8 @@ class TestRepositoryFactory: mock_repository_class = MagicMock() mock_repository_class.side_effect = Exception("Instantiation failed") - # Mock the validation methods to succeed - with ( - patch.object(DifyCoreRepositoryFactory, "_import_class", return_value=mock_repository_class), - patch.object(DifyCoreRepositoryFactory, "_validate_repository_interface"), - ): + # Mock import_string to return a failing class + with patch("core.repositories.factory.import_string", return_value=mock_repository_class): with pytest.raises(RepositoryImportError) as exc_info: DifyCoreRepositoryFactory.create_workflow_node_execution_repository( session_factory=mock_session_factory, @@ -359,11 +225,8 @@ class TestRepositoryFactory: mock_repository_instance = MagicMock(spec=WorkflowExecutionRepository) mock_repository_class.return_value = mock_repository_instance - # Mock the validation methods - with ( - patch.object(DifyCoreRepositoryFactory, "_import_class", return_value=mock_repository_class), - patch.object(DifyCoreRepositoryFactory, "_validate_repository_interface"), - ): + # Mock import_string + with patch("core.repositories.factory.import_string", return_value=mock_repository_class): result = DifyCoreRepositoryFactory.create_workflow_execution_repository( session_factory=mock_engine, # Using Engine instead of sessionmaker user=mock_user, diff --git a/api/tests/unit_tests/core/test_provider_configuration.py b/api/tests/unit_tests/core/test_provider_configuration.py new file mode 100644 index 0000000000..75621ecb6a --- /dev/null +++ b/api/tests/unit_tests/core/test_provider_configuration.py @@ -0,0 +1,308 @@ +from unittest.mock import Mock, patch + +import pytest + +from core.entities.provider_configuration import ProviderConfiguration, SystemConfigurationStatus +from core.entities.provider_entities import ( + CustomConfiguration, + ModelSettings, + ProviderQuotaType, + QuotaConfiguration, + QuotaUnit, + RestrictModel, + SystemConfiguration, +) +from core.model_runtime.entities.common_entities import I18nObject +from core.model_runtime.entities.model_entities import ModelType +from core.model_runtime.entities.provider_entities import ConfigurateMethod, ProviderEntity +from models.provider import Provider, ProviderType + + +@pytest.fixture +def mock_provider_entity(): + """Mock provider entity with basic configuration""" + provider_entity = ProviderEntity( + provider="openai", + label=I18nObject(en_US="OpenAI", zh_Hans="OpenAI"), + description=I18nObject(en_US="OpenAI provider", zh_Hans="OpenAI 提供商"), + icon_small=I18nObject(en_US="icon.png", zh_Hans="icon.png"), + icon_large=I18nObject(en_US="icon.png", zh_Hans="icon.png"), + background="background.png", + help=None, + supported_model_types=[ModelType.LLM], + configurate_methods=[ConfigurateMethod.PREDEFINED_MODEL], + provider_credential_schema=None, + model_credential_schema=None, + ) + + return provider_entity + + +@pytest.fixture +def mock_system_configuration(): + """Mock system configuration""" + quota_config = QuotaConfiguration( + quota_type=ProviderQuotaType.TRIAL, + quota_unit=QuotaUnit.TOKENS, + quota_limit=1000, + quota_used=0, + is_valid=True, + restrict_models=[RestrictModel(model="gpt-4", reason="Experimental", model_type=ModelType.LLM)], + ) + + system_config = SystemConfiguration( + enabled=True, + credentials={"openai_api_key": "test_key"}, + quota_configurations=[quota_config], + current_quota_type=ProviderQuotaType.TRIAL, + ) + + return system_config + + +@pytest.fixture +def mock_custom_configuration(): + """Mock custom configuration""" + custom_config = CustomConfiguration(provider=None, models=[]) + return custom_config + + +@pytest.fixture +def provider_configuration(mock_provider_entity, mock_system_configuration, mock_custom_configuration): + """Create a test provider configuration instance""" + with patch("core.entities.provider_configuration.original_provider_configurate_methods", {}): + return ProviderConfiguration( + tenant_id="test_tenant", + provider=mock_provider_entity, + preferred_provider_type=ProviderType.SYSTEM, + using_provider_type=ProviderType.SYSTEM, + system_configuration=mock_system_configuration, + custom_configuration=mock_custom_configuration, + model_settings=[], + ) + + +class TestProviderConfiguration: + """Test cases for ProviderConfiguration class""" + + def test_get_current_credentials_system_provider_success(self, provider_configuration): + """Test successfully getting credentials from system provider""" + # Arrange + provider_configuration.using_provider_type = ProviderType.SYSTEM + + # Act + credentials = provider_configuration.get_current_credentials(ModelType.LLM, "gpt-4") + + # Assert + assert credentials == {"openai_api_key": "test_key"} + + def test_get_current_credentials_model_disabled(self, provider_configuration): + """Test getting credentials when model is disabled""" + # Arrange + model_setting = ModelSettings( + model="gpt-4", + model_type=ModelType.LLM, + enabled=False, + load_balancing_configs=[], + has_invalid_load_balancing_configs=False, + ) + provider_configuration.model_settings = [model_setting] + + # Act & Assert + with pytest.raises(ValueError, match="Model gpt-4 is disabled"): + provider_configuration.get_current_credentials(ModelType.LLM, "gpt-4") + + def test_get_current_credentials_custom_provider_with_models(self, provider_configuration): + """Test getting credentials from custom provider with model configurations""" + # Arrange + provider_configuration.using_provider_type = ProviderType.CUSTOM + + mock_model_config = Mock() + mock_model_config.model_type = ModelType.LLM + mock_model_config.model = "gpt-4" + mock_model_config.credentials = {"openai_api_key": "custom_key"} + provider_configuration.custom_configuration.models = [mock_model_config] + + # Act + credentials = provider_configuration.get_current_credentials(ModelType.LLM, "gpt-4") + + # Assert + assert credentials == {"openai_api_key": "custom_key"} + + def test_get_system_configuration_status_active(self, provider_configuration): + """Test getting active system configuration status""" + # Arrange + provider_configuration.system_configuration.enabled = True + + # Act + status = provider_configuration.get_system_configuration_status() + + # Assert + assert status == SystemConfigurationStatus.ACTIVE + + def test_get_system_configuration_status_unsupported(self, provider_configuration): + """Test getting unsupported system configuration status""" + # Arrange + provider_configuration.system_configuration.enabled = False + + # Act + status = provider_configuration.get_system_configuration_status() + + # Assert + assert status == SystemConfigurationStatus.UNSUPPORTED + + def test_get_system_configuration_status_quota_exceeded(self, provider_configuration): + """Test getting quota exceeded system configuration status""" + # Arrange + provider_configuration.system_configuration.enabled = True + quota_config = provider_configuration.system_configuration.quota_configurations[0] + quota_config.is_valid = False + + # Act + status = provider_configuration.get_system_configuration_status() + + # Assert + assert status == SystemConfigurationStatus.QUOTA_EXCEEDED + + def test_is_custom_configuration_available_with_provider(self, provider_configuration): + """Test custom configuration availability with provider credentials""" + # Arrange + mock_provider = Mock() + mock_provider.available_credentials = ["openai_api_key"] + provider_configuration.custom_configuration.provider = mock_provider + provider_configuration.custom_configuration.models = [] + + # Act + result = provider_configuration.is_custom_configuration_available() + + # Assert + assert result is True + + def test_is_custom_configuration_available_with_models(self, provider_configuration): + """Test custom configuration availability with model configurations""" + # Arrange + provider_configuration.custom_configuration.provider = None + provider_configuration.custom_configuration.models = [Mock()] + + # Act + result = provider_configuration.is_custom_configuration_available() + + # Assert + assert result is True + + def test_is_custom_configuration_available_false(self, provider_configuration): + """Test custom configuration not available""" + # Arrange + provider_configuration.custom_configuration.provider = None + provider_configuration.custom_configuration.models = [] + + # Act + result = provider_configuration.is_custom_configuration_available() + + # Assert + assert result is False + + @patch("core.entities.provider_configuration.Session") + def test_get_provider_record_found(self, mock_session, provider_configuration): + """Test getting provider record successfully""" + # Arrange + mock_provider = Mock(spec=Provider) + mock_session_instance = Mock() + mock_session.return_value.__enter__.return_value = mock_session_instance + mock_session_instance.execute.return_value.scalar_one_or_none.return_value = mock_provider + + # Act + result = provider_configuration._get_provider_record(mock_session_instance) + + # Assert + assert result == mock_provider + + @patch("core.entities.provider_configuration.Session") + def test_get_provider_record_not_found(self, mock_session, provider_configuration): + """Test getting provider record when not found""" + # Arrange + mock_session_instance = Mock() + mock_session.return_value.__enter__.return_value = mock_session_instance + mock_session_instance.execute.return_value.scalar_one_or_none.return_value = None + + # Act + result = provider_configuration._get_provider_record(mock_session_instance) + + # Assert + assert result is None + + def test_init_with_customizable_model_only( + self, mock_provider_entity, mock_system_configuration, mock_custom_configuration + ): + """Test initialization with customizable model only configuration""" + # Arrange + mock_provider_entity.configurate_methods = [ConfigurateMethod.CUSTOMIZABLE_MODEL] + + # Act + with patch("core.entities.provider_configuration.original_provider_configurate_methods", {}): + config = ProviderConfiguration( + tenant_id="test_tenant", + provider=mock_provider_entity, + preferred_provider_type=ProviderType.SYSTEM, + using_provider_type=ProviderType.SYSTEM, + system_configuration=mock_system_configuration, + custom_configuration=mock_custom_configuration, + model_settings=[], + ) + + # Assert + assert ConfigurateMethod.PREDEFINED_MODEL in config.provider.configurate_methods + + def test_get_current_credentials_with_restricted_models(self, provider_configuration): + """Test getting credentials with model restrictions""" + # Arrange + provider_configuration.using_provider_type = ProviderType.SYSTEM + + # Act + credentials = provider_configuration.get_current_credentials(ModelType.LLM, "gpt-3.5-turbo") + + # Assert + assert credentials is not None + assert "openai_api_key" in credentials + + @patch("core.entities.provider_configuration.Session") + def test_get_specific_provider_credential_success(self, mock_session, provider_configuration): + """Test getting specific provider credential successfully""" + # Arrange + credential_id = "test_credential_id" + mock_credential = Mock() + mock_credential.encrypted_config = '{"openai_api_key": "encrypted_key"}' + + mock_session_instance = Mock() + mock_session.return_value.__enter__.return_value = mock_session_instance + mock_session_instance.execute.return_value.scalar_one_or_none.return_value = mock_credential + + # Act + with patch.object(provider_configuration, "_get_specific_provider_credential") as mock_get: + mock_get.return_value = {"openai_api_key": "test_key"} + result = provider_configuration._get_specific_provider_credential(credential_id) + + # Assert + assert result == {"openai_api_key": "test_key"} + + @patch("core.entities.provider_configuration.Session") + def test_get_specific_provider_credential_not_found(self, mock_session, provider_configuration): + """Test getting specific provider credential when not found""" + # Arrange + credential_id = "nonexistent_credential_id" + + mock_session_instance = Mock() + mock_session.return_value.__enter__.return_value = mock_session_instance + mock_session_instance.execute.return_value.scalar_one_or_none.return_value = None + + # Act & Assert + with patch.object(provider_configuration, "_get_specific_provider_credential") as mock_get: + mock_get.return_value = None + result = provider_configuration._get_specific_provider_credential(credential_id) + assert result is None + + # Act + credentials = provider_configuration.get_current_credentials(ModelType.LLM, "gpt-4") + + # Assert + assert credentials == {"openai_api_key": "test_key"} diff --git a/api/tests/unit_tests/core/test_provider_manager.py b/api/tests/unit_tests/core/test_provider_manager.py index 90d5a6f15b..2dab394029 100644 --- a/api/tests/unit_tests/core/test_provider_manager.py +++ b/api/tests/unit_tests/core/test_provider_manager.py @@ -1,190 +1,185 @@ -# from core.entities.provider_entities import ModelSettings -# from core.model_runtime.entities.model_entities import ModelType -# from core.model_runtime.model_providers.model_provider_factory import ModelProviderFactory -# from core.provider_manager import ProviderManager -# from models.provider import LoadBalancingModelConfig, ProviderModelSetting +import pytest + +from core.entities.provider_entities import ModelSettings +from core.model_runtime.entities.model_entities import ModelType +from core.provider_manager import ProviderManager +from models.provider import LoadBalancingModelConfig, ProviderModelSetting -# def test__to_model_settings(mocker): -# # Get all provider entities -# model_provider_factory = ModelProviderFactory("test_tenant") -# provider_entities = model_provider_factory.get_providers() +@pytest.fixture +def mock_provider_entity(mocker): + mock_entity = mocker.Mock() + mock_entity.provider = "openai" + mock_entity.configurate_methods = ["predefined-model"] + mock_entity.supported_model_types = [ModelType.LLM] -# provider_entity = None -# for provider in provider_entities: -# if provider.provider == "openai": -# provider_entity = provider + mock_entity.model_credential_schema = mocker.Mock() + mock_entity.model_credential_schema.credential_form_schemas = [] -# # Mocking the inputs -# provider_model_settings = [ -# ProviderModelSetting( -# id="id", -# tenant_id="tenant_id", -# provider_name="openai", -# model_name="gpt-4", -# model_type="text-generation", -# enabled=True, -# load_balancing_enabled=True, -# ) -# ] -# load_balancing_model_configs = [ -# LoadBalancingModelConfig( -# id="id1", -# tenant_id="tenant_id", -# provider_name="openai", -# model_name="gpt-4", -# model_type="text-generation", -# name="__inherit__", -# encrypted_config=None, -# enabled=True, -# ), -# LoadBalancingModelConfig( -# id="id2", -# tenant_id="tenant_id", -# provider_name="openai", -# model_name="gpt-4", -# model_type="text-generation", -# name="first", -# encrypted_config='{"openai_api_key": "fake_key"}', -# enabled=True, -# ), -# ] - -# mocker.patch( -# "core.helper.model_provider_cache.ProviderCredentialsCache.get", return_value={"openai_api_key": "fake_key"} -# ) - -# provider_manager = ProviderManager() - -# # Running the method -# result = provider_manager._to_model_settings(provider_entity, -# provider_model_settings, load_balancing_model_configs) - -# # Asserting that the result is as expected -# assert len(result) == 1 -# assert isinstance(result[0], ModelSettings) -# assert result[0].model == "gpt-4" -# assert result[0].model_type == ModelType.LLM -# assert result[0].enabled is True -# assert len(result[0].load_balancing_configs) == 2 -# assert result[0].load_balancing_configs[0].name == "__inherit__" -# assert result[0].load_balancing_configs[1].name == "first" + return mock_entity -# def test__to_model_settings_only_one_lb(mocker): -# # Get all provider entities -# model_provider_factory = ModelProviderFactory("test_tenant") -# provider_entities = model_provider_factory.get_providers() +def test__to_model_settings(mocker, mock_provider_entity): + # Mocking the inputs + provider_model_settings = [ + ProviderModelSetting( + id="id", + tenant_id="tenant_id", + provider_name="openai", + model_name="gpt-4", + model_type="text-generation", + enabled=True, + load_balancing_enabled=True, + ) + ] + load_balancing_model_configs = [ + LoadBalancingModelConfig( + id="id1", + tenant_id="tenant_id", + provider_name="openai", + model_name="gpt-4", + model_type="text-generation", + name="__inherit__", + encrypted_config=None, + enabled=True, + ), + LoadBalancingModelConfig( + id="id2", + tenant_id="tenant_id", + provider_name="openai", + model_name="gpt-4", + model_type="text-generation", + name="first", + encrypted_config='{"openai_api_key": "fake_key"}', + enabled=True, + ), + ] -# provider_entity = None -# for provider in provider_entities: -# if provider.provider == "openai": -# provider_entity = provider + mocker.patch( + "core.helper.model_provider_cache.ProviderCredentialsCache.get", return_value={"openai_api_key": "fake_key"} + ) -# # Mocking the inputs -# provider_model_settings = [ -# ProviderModelSetting( -# id="id", -# tenant_id="tenant_id", -# provider_name="openai", -# model_name="gpt-4", -# model_type="text-generation", -# enabled=True, -# load_balancing_enabled=True, -# ) -# ] -# load_balancing_model_configs = [ -# LoadBalancingModelConfig( -# id="id1", -# tenant_id="tenant_id", -# provider_name="openai", -# model_name="gpt-4", -# model_type="text-generation", -# name="__inherit__", -# encrypted_config=None, -# enabled=True, -# ) -# ] + provider_manager = ProviderManager() -# mocker.patch( -# "core.helper.model_provider_cache.ProviderCredentialsCache.get", return_value={"openai_api_key": "fake_key"} -# ) + # Running the method + result = provider_manager._to_model_settings( + provider_entity=mock_provider_entity, + provider_model_settings=provider_model_settings, + load_balancing_model_configs=load_balancing_model_configs, + ) -# provider_manager = ProviderManager() - -# # Running the method -# result = provider_manager._to_model_settings( -# provider_entity, provider_model_settings, load_balancing_model_configs) - -# # Asserting that the result is as expected -# assert len(result) == 1 -# assert isinstance(result[0], ModelSettings) -# assert result[0].model == "gpt-4" -# assert result[0].model_type == ModelType.LLM -# assert result[0].enabled is True -# assert len(result[0].load_balancing_configs) == 0 + # Asserting that the result is as expected + assert len(result) == 1 + assert isinstance(result[0], ModelSettings) + assert result[0].model == "gpt-4" + assert result[0].model_type == ModelType.LLM + assert result[0].enabled is True + assert len(result[0].load_balancing_configs) == 2 + assert result[0].load_balancing_configs[0].name == "__inherit__" + assert result[0].load_balancing_configs[1].name == "first" -# def test__to_model_settings_lb_disabled(mocker): -# # Get all provider entities -# model_provider_factory = ModelProviderFactory("test_tenant") -# provider_entities = model_provider_factory.get_providers() +def test__to_model_settings_only_one_lb(mocker, mock_provider_entity): + # Mocking the inputs + provider_model_settings = [ + ProviderModelSetting( + id="id", + tenant_id="tenant_id", + provider_name="openai", + model_name="gpt-4", + model_type="text-generation", + enabled=True, + load_balancing_enabled=True, + ) + ] + load_balancing_model_configs = [ + LoadBalancingModelConfig( + id="id1", + tenant_id="tenant_id", + provider_name="openai", + model_name="gpt-4", + model_type="text-generation", + name="__inherit__", + encrypted_config=None, + enabled=True, + ) + ] -# provider_entity = None -# for provider in provider_entities: -# if provider.provider == "openai": -# provider_entity = provider + mocker.patch( + "core.helper.model_provider_cache.ProviderCredentialsCache.get", return_value={"openai_api_key": "fake_key"} + ) -# # Mocking the inputs -# provider_model_settings = [ -# ProviderModelSetting( -# id="id", -# tenant_id="tenant_id", -# provider_name="openai", -# model_name="gpt-4", -# model_type="text-generation", -# enabled=True, -# load_balancing_enabled=False, -# ) -# ] -# load_balancing_model_configs = [ -# LoadBalancingModelConfig( -# id="id1", -# tenant_id="tenant_id", -# provider_name="openai", -# model_name="gpt-4", -# model_type="text-generation", -# name="__inherit__", -# encrypted_config=None, -# enabled=True, -# ), -# LoadBalancingModelConfig( -# id="id2", -# tenant_id="tenant_id", -# provider_name="openai", -# model_name="gpt-4", -# model_type="text-generation", -# name="first", -# encrypted_config='{"openai_api_key": "fake_key"}', -# enabled=True, -# ), -# ] + provider_manager = ProviderManager() -# mocker.patch( -# "core.helper.model_provider_cache.ProviderCredentialsCache.get", -# return_value={"openai_api_key": "fake_key"} -# ) + # Running the method + result = provider_manager._to_model_settings( + provider_entity=mock_provider_entity, + provider_model_settings=provider_model_settings, + load_balancing_model_configs=load_balancing_model_configs, + ) -# provider_manager = ProviderManager() + # Asserting that the result is as expected + assert len(result) == 1 + assert isinstance(result[0], ModelSettings) + assert result[0].model == "gpt-4" + assert result[0].model_type == ModelType.LLM + assert result[0].enabled is True + assert len(result[0].load_balancing_configs) == 0 -# # Running the method -# result = provider_manager._to_model_settings(provider_entity, -# provider_model_settings, load_balancing_model_configs) -# # Asserting that the result is as expected -# assert len(result) == 1 -# assert isinstance(result[0], ModelSettings) -# assert result[0].model == "gpt-4" -# assert result[0].model_type == ModelType.LLM -# assert result[0].enabled is True -# assert len(result[0].load_balancing_configs) == 0 +def test__to_model_settings_lb_disabled(mocker, mock_provider_entity): + # Mocking the inputs + provider_model_settings = [ + ProviderModelSetting( + id="id", + tenant_id="tenant_id", + provider_name="openai", + model_name="gpt-4", + model_type="text-generation", + enabled=True, + load_balancing_enabled=False, + ) + ] + load_balancing_model_configs = [ + LoadBalancingModelConfig( + id="id1", + tenant_id="tenant_id", + provider_name="openai", + model_name="gpt-4", + model_type="text-generation", + name="__inherit__", + encrypted_config=None, + enabled=True, + ), + LoadBalancingModelConfig( + id="id2", + tenant_id="tenant_id", + provider_name="openai", + model_name="gpt-4", + model_type="text-generation", + name="first", + encrypted_config='{"openai_api_key": "fake_key"}', + enabled=True, + ), + ] + + mocker.patch( + "core.helper.model_provider_cache.ProviderCredentialsCache.get", return_value={"openai_api_key": "fake_key"} + ) + + provider_manager = ProviderManager() + + # Running the method + result = provider_manager._to_model_settings( + provider_entity=mock_provider_entity, + provider_model_settings=provider_model_settings, + load_balancing_model_configs=load_balancing_model_configs, + ) + + # Asserting that the result is as expected + assert len(result) == 1 + assert isinstance(result[0], ModelSettings) + assert result[0].model == "gpt-4" + assert result[0].model_type == ModelType.LLM + assert result[0].enabled is True + assert len(result[0].load_balancing_configs) == 0 diff --git a/api/tests/unit_tests/core/tools/utils/test_encryption.py b/api/tests/unit_tests/core/tools/utils/test_encryption.py new file mode 100644 index 0000000000..6425ab0b8d --- /dev/null +++ b/api/tests/unit_tests/core/tools/utils/test_encryption.py @@ -0,0 +1,181 @@ +import copy +from unittest.mock import patch + +import pytest + +from core.entities.provider_entities import BasicProviderConfig +from core.tools.utils.encryption import ProviderConfigEncrypter + + +# --------------------------- +# A no-op cache +# --------------------------- +class NoopCache: + """Simple cache stub: always returns None, does nothing for set/delete.""" + + def get(self): + return None + + def set(self, config): + pass + + def delete(self): + pass + + +@pytest.fixture +def secret_field() -> BasicProviderConfig: + """A SECRET_INPUT field named 'password'.""" + return BasicProviderConfig( + name="password", + type=BasicProviderConfig.Type.SECRET_INPUT, + ) + + +@pytest.fixture +def normal_field() -> BasicProviderConfig: + """A TEXT_INPUT field named 'username'.""" + return BasicProviderConfig( + name="username", + type=BasicProviderConfig.Type.TEXT_INPUT, + ) + + +@pytest.fixture +def encrypter_obj(secret_field, normal_field): + """ + Build ProviderConfigEncrypter with: + - tenant_id = tenant123 + - one secret field (password) and one normal field (username) + - NoopCache as cache + """ + return ProviderConfigEncrypter( + tenant_id="tenant123", + config=[secret_field, normal_field], + provider_config_cache=NoopCache(), + ) + + +# ============================================================ +# ProviderConfigEncrypter.encrypt() +# ============================================================ + + +def test_encrypt_only_secret_is_encrypted_and_non_secret_unchanged(encrypter_obj): + """ + Secret field should be encrypted, non-secret field unchanged. + Verify encrypt_token called only for secret field. + Also check deep copy (input not modified). + """ + data_in = {"username": "alice", "password": "plain_pwd"} + data_copy = copy.deepcopy(data_in) + + with patch("core.tools.utils.encryption.encrypter.encrypt_token", return_value="CIPHERTEXT") as mock_encrypt: + out = encrypter_obj.encrypt(data_in) + + assert out["username"] == "alice" + assert out["password"] == "CIPHERTEXT" + mock_encrypt.assert_called_once_with("tenant123", "plain_pwd") + assert data_in == data_copy # deep copy semantics + + +def test_encrypt_missing_secret_key_is_ok(encrypter_obj): + """If secret field missing in input, no error and no encryption called.""" + with patch("core.tools.utils.encryption.encrypter.encrypt_token") as mock_encrypt: + out = encrypter_obj.encrypt({"username": "alice"}) + assert out["username"] == "alice" + mock_encrypt.assert_not_called() + + +# ============================================================ +# ProviderConfigEncrypter.mask_tool_credentials() +# ============================================================ + + +@pytest.mark.parametrize( + ("raw", "prefix", "suffix"), + [ + ("longsecret", "lo", "et"), + ("abcdefg", "ab", "fg"), + ("1234567", "12", "67"), + ], +) +def test_mask_tool_credentials_long_secret(encrypter_obj, raw, prefix, suffix): + """ + For length > 6: keep first 2 and last 2, mask middle with '*'. + """ + data_in = {"username": "alice", "password": raw} + data_copy = copy.deepcopy(data_in) + + out = encrypter_obj.mask_tool_credentials(data_in) + masked = out["password"] + + assert masked.startswith(prefix) + assert masked.endswith(suffix) + assert "*" in masked + assert len(masked) == len(raw) + assert data_in == data_copy # deep copy semantics + + +@pytest.mark.parametrize("raw", ["", "1", "12", "123", "123456"]) +def test_mask_tool_credentials_short_secret(encrypter_obj, raw): + """ + For length <= 6: fully mask with '*' of same length. + """ + out = encrypter_obj.mask_tool_credentials({"password": raw}) + assert out["password"] == ("*" * len(raw)) + + +def test_mask_tool_credentials_missing_key_noop(encrypter_obj): + """If secret key missing, leave other fields unchanged.""" + data_in = {"username": "alice"} + data_copy = copy.deepcopy(data_in) + + out = encrypter_obj.mask_tool_credentials(data_in) + assert out["username"] == "alice" + assert data_in == data_copy + + +# ============================================================ +# ProviderConfigEncrypter.decrypt() +# ============================================================ + + +def test_decrypt_normal_flow(encrypter_obj): + """ + Normal decrypt flow: + - decrypt_token called for secret field + - secret replaced with decrypted value + - non-secret unchanged + """ + data_in = {"username": "alice", "password": "ENC"} + data_copy = copy.deepcopy(data_in) + + with patch("core.tools.utils.encryption.encrypter.decrypt_token", return_value="PLAIN") as mock_decrypt: + out = encrypter_obj.decrypt(data_in) + + assert out["username"] == "alice" + assert out["password"] == "PLAIN" + mock_decrypt.assert_called_once_with("tenant123", "ENC") + assert data_in == data_copy # deep copy semantics + + +@pytest.mark.parametrize("empty_val", ["", None]) +def test_decrypt_skip_empty_values(encrypter_obj, empty_val): + """Skip decrypt if value is empty or None, keep original.""" + with patch("core.tools.utils.encryption.encrypter.decrypt_token") as mock_decrypt: + out = encrypter_obj.decrypt({"password": empty_val}) + + mock_decrypt.assert_not_called() + assert out["password"] == empty_val + + +def test_decrypt_swallow_exception_and_keep_original(encrypter_obj): + """ + If decrypt_token raises, exception should be swallowed, + and original value preserved. + """ + with patch("core.tools.utils.encryption.encrypter.decrypt_token", side_effect=Exception("boom")): + out = encrypter_obj.decrypt({"password": "ENC_ERR"}) + + assert out["password"] == "ENC_ERR" diff --git a/api/tests/unit_tests/core/tools/utils/test_web_reader_tool.py b/api/tests/unit_tests/core/tools/utils/test_web_reader_tool.py new file mode 100644 index 0000000000..57ddacd13d --- /dev/null +++ b/api/tests/unit_tests/core/tools/utils/test_web_reader_tool.py @@ -0,0 +1,312 @@ +import pytest + +from core.tools.utils.web_reader_tool import ( + extract_using_readabilipy, + get_image_upload_file_ids, + get_url, + page_result, +) + + +class FakeResponse: + """Minimal fake response object for ssrf_proxy / cloudscraper.""" + + def __init__(self, *, status_code=200, headers=None, content=b"", text=""): + self.status_code = status_code + self.headers = headers or {} + self.content = content + self.text = text if text else content.decode("utf-8", errors="ignore") + + +# --------------------------- +# Tests: page_result +# --------------------------- +@pytest.mark.parametrize( + ("text", "cursor", "maxlen", "expected"), + [ + ("abcdef", 0, 3, "abc"), + ("abcdef", 2, 10, "cdef"), # maxlen beyond end + ("abcdef", 6, 5, ""), # cursor at end + ("abcdef", 7, 5, ""), # cursor beyond end + ("", 0, 5, ""), # empty text + ], +) +def test_page_result(text, cursor, maxlen, expected): + assert page_result(text, cursor, maxlen) == expected + + +# --------------------------- +# Tests: get_url +# --------------------------- +@pytest.fixture +def stub_support_types(monkeypatch: pytest.MonkeyPatch): + """Stub supported content types list.""" + import core.tools.utils.web_reader_tool as mod + + # e.g. binary types supported by ExtractProcessor + monkeypatch.setattr(mod.extract_processor, "SUPPORT_URL_CONTENT_TYPES", ["application/pdf", "text/plain"]) + return mod + + +def test_get_url_unsupported_content_type(monkeypatch: pytest.MonkeyPatch, stub_support_types): + # HEAD 200 but content-type not supported and not text/html + def fake_head(url, headers=None, follow_redirects=True, timeout=None): + return FakeResponse( + status_code=200, + headers={"Content-Type": "image/png"}, # not supported + ) + + monkeypatch.setattr(stub_support_types.ssrf_proxy, "head", fake_head) + + result = get_url("https://x.test/file.png") + assert result == "Unsupported content-type [image/png] of URL." + + +def test_get_url_supported_binary_type_uses_extract_processor(monkeypatch: pytest.MonkeyPatch, stub_support_types): + """ + When content-type is in SUPPORT_URL_CONTENT_TYPES, + should call ExtractProcessor.load_from_url and return its text. + """ + calls = {"load": 0} + + def fake_head(url, headers=None, follow_redirects=True, timeout=None): + return FakeResponse( + status_code=200, + headers={"Content-Type": "application/pdf"}, + ) + + def fake_load_from_url(url, return_text=False): + calls["load"] += 1 + assert return_text is True + return "PDF extracted text" + + monkeypatch.setattr(stub_support_types.ssrf_proxy, "head", fake_head) + monkeypatch.setattr(stub_support_types.ExtractProcessor, "load_from_url", staticmethod(fake_load_from_url)) + + result = get_url("https://x.test/doc.pdf") + assert calls["load"] == 1 + assert result == "PDF extracted text" + + +def test_get_url_html_flow_with_chardet_and_readability(monkeypatch: pytest.MonkeyPatch, stub_support_types): + """200 + text/html → GET, chardet detects encoding, readability returns article which is templated.""" + + def fake_head(url, headers=None, follow_redirects=True, timeout=None): + return FakeResponse(status_code=200, headers={"Content-Type": "text/html"}) + + def fake_get(url, headers=None, follow_redirects=True, timeout=None): + html = b"xhello" + return FakeResponse(status_code=200, headers={"Content-Type": "text/html"}, content=html) + + # chardet.detect returns utf-8 + import core.tools.utils.web_reader_tool as mod + + monkeypatch.setattr(mod.ssrf_proxy, "head", fake_head) + monkeypatch.setattr(mod.ssrf_proxy, "get", fake_get) + monkeypatch.setattr(mod.chardet, "detect", lambda b: {"encoding": "utf-8"}) + + # readability → a dict that maps to Article, then FULL_TEMPLATE + def fake_simple_json_from_html_string(html, use_readability=True): + return { + "title": "My Title", + "byline": "Bob", + "plain_text": [{"type": "text", "text": "Hello world"}], + } + + monkeypatch.setattr(mod, "simple_json_from_html_string", fake_simple_json_from_html_string) + + out = get_url("https://x.test/page") + assert "TITLE: My Title" in out + assert "AUTHOR: Bob" in out + assert "Hello world" in out + + +def test_get_url_html_flow_empty_article_text_returns_empty(monkeypatch: pytest.MonkeyPatch, stub_support_types): + """If readability returns no text, should return empty string.""" + + def fake_head(url, headers=None, follow_redirects=True, timeout=None): + return FakeResponse(status_code=200, headers={"Content-Type": "text/html"}) + + def fake_get(url, headers=None, follow_redirects=True, timeout=None): + return FakeResponse(status_code=200, headers={"Content-Type": "text/html"}, content=b"") + + import core.tools.utils.web_reader_tool as mod + + monkeypatch.setattr(mod.ssrf_proxy, "head", fake_head) + monkeypatch.setattr(mod.ssrf_proxy, "get", fake_get) + monkeypatch.setattr(mod.chardet, "detect", lambda b: {"encoding": "utf-8"}) + # readability returns empty plain_text + monkeypatch.setattr(mod, "simple_json_from_html_string", lambda html, use_readability=True: {"plain_text": []}) + + out = get_url("https://x.test/empty") + assert out == "" + + +def test_get_url_403_cloudscraper_fallback(monkeypatch: pytest.MonkeyPatch, stub_support_types): + """HEAD 403 → use cloudscraper.get via ssrf_proxy.make_request, then proceed.""" + + def fake_head(url, headers=None, follow_redirects=True, timeout=None): + return FakeResponse(status_code=403, headers={}) + + # cloudscraper.create_scraper() → object with .get() + class FakeScraper: + def __init__(self): + pass # removed unused attribute + + def get(self, url, headers=None, follow_redirects=True, timeout=None): + # mimic html 200 + html = b"hi" + return FakeResponse(status_code=200, headers={"Content-Type": "text/html"}, content=html) + + import core.tools.utils.web_reader_tool as mod + + monkeypatch.setattr(mod.ssrf_proxy, "head", fake_head) + monkeypatch.setattr(mod.cloudscraper, "create_scraper", lambda: FakeScraper()) + monkeypatch.setattr(mod.chardet, "detect", lambda b: {"encoding": "utf-8"}) + monkeypatch.setattr( + mod, + "simple_json_from_html_string", + lambda html, use_readability=True: {"title": "T", "byline": "A", "plain_text": [{"type": "text", "text": "X"}]}, + ) + + out = get_url("https://x.test/403") + assert "TITLE: T" in out + assert "AUTHOR: A" in out + assert "X" in out + + +def test_get_url_head_non_200_returns_status(monkeypatch: pytest.MonkeyPatch, stub_support_types): + """HEAD returns non-200 and non-403 → should directly return code message.""" + + def fake_head(url, headers=None, follow_redirects=True, timeout=None): + return FakeResponse(status_code=500) + + import core.tools.utils.web_reader_tool as mod + + monkeypatch.setattr(mod.ssrf_proxy, "head", fake_head) + + out = get_url("https://x.test/fail") + assert out == "URL returned status code 500." + + +def test_get_url_content_disposition_filename_detection(monkeypatch: pytest.MonkeyPatch, stub_support_types): + """ + If HEAD 200 with no Content-Type but Content-Disposition filename suggests a supported type, + it should route to ExtractProcessor.load_from_url. + """ + calls = {"load": 0} + + def fake_head(url, headers=None, follow_redirects=True, timeout=None): + return FakeResponse(status_code=200, headers={"Content-Disposition": 'attachment; filename="doc.pdf"'}) + + def fake_load_from_url(url, return_text=False): + calls["load"] += 1 + return "From ExtractProcessor via filename" + + import core.tools.utils.web_reader_tool as mod + + monkeypatch.setattr(mod.ssrf_proxy, "head", fake_head) + monkeypatch.setattr(mod.ExtractProcessor, "load_from_url", staticmethod(fake_load_from_url)) + + out = get_url("https://x.test/fname") + assert calls["load"] == 1 + assert out == "From ExtractProcessor via filename" + + +def test_get_url_html_encoding_fallback_when_decode_fails(monkeypatch: pytest.MonkeyPatch, stub_support_types): + """ + If chardet returns an encoding but content.decode raises, should fallback to response.text. + """ + + def fake_head(url, headers=None, follow_redirects=True, timeout=None): + return FakeResponse(status_code=200, headers={"Content-Type": "text/html"}) + + # Return bytes that will raise with the chosen encoding + def fake_get(url, headers=None, follow_redirects=True, timeout=None): + return FakeResponse( + status_code=200, + headers={"Content-Type": "text/html"}, + content=b"\xff\xfe\xfa", # likely to fail under utf-8 + text="fallback text", + ) + + import core.tools.utils.web_reader_tool as mod + + monkeypatch.setattr(mod.ssrf_proxy, "head", fake_head) + monkeypatch.setattr(mod.ssrf_proxy, "get", fake_get) + monkeypatch.setattr(mod.chardet, "detect", lambda b: {"encoding": "utf-8"}) + monkeypatch.setattr( + mod, + "simple_json_from_html_string", + lambda html, use_readability=True: {"title": "", "byline": "", "plain_text": [{"type": "text", "text": "ok"}]}, + ) + + out = get_url("https://x.test/enc-fallback") + assert "ok" in out + + +# --------------------------- +# Tests: extract_using_readabilipy +# --------------------------- + + +def test_extract_using_readabilipy_field_mapping_and_defaults(monkeypatch: pytest.MonkeyPatch): + # stub readabilipy.simple_json_from_html_string + def fake_simple_json_from_html_string(html, use_readability=True): + return { + "title": "Hello", + "byline": "Alice", + "plain_text": [{"type": "text", "text": "world"}], + } + + import core.tools.utils.web_reader_tool as mod + + monkeypatch.setattr(mod, "simple_json_from_html_string", fake_simple_json_from_html_string) + + article = extract_using_readabilipy("...") + assert article.title == "Hello" + assert article.author == "Alice" + assert isinstance(article.text, list) + assert article.text + assert article.text[0]["text"] == "world" + + +def test_extract_using_readabilipy_defaults_when_missing(monkeypatch: pytest.MonkeyPatch): + def fake_simple_json_from_html_string(html, use_readability=True): + return {} # all missing + + import core.tools.utils.web_reader_tool as mod + + monkeypatch.setattr(mod, "simple_json_from_html_string", fake_simple_json_from_html_string) + + article = extract_using_readabilipy("...") + assert article.title == "" + assert article.author == "" + assert article.text == [] + + +# --------------------------- +# Tests: get_image_upload_file_ids +# --------------------------- +def test_get_image_upload_file_ids(): + # should extract id from https + file-preview + content = "![image](https://example.com/a/b/files/abc123/file-preview)" + assert get_image_upload_file_ids(content) == ["abc123"] + + # should extract id from http + image-preview + content = "![image](http://host/files/xyz789/image-preview)" + assert get_image_upload_file_ids(content) == ["xyz789"] + + # should not match invalid scheme 'htt://' + content = "![image](htt://host/files/bad/file-preview)" + assert get_image_upload_file_ids(content) == [] + + # should extract multiple ids in order + content = """ + some text + ![image](https://h/files/id1/file-preview) + middle + ![image](http://h/files/id2/image-preview) + end + """ + assert get_image_upload_file_ids(content) == ["id1", "id2"] diff --git a/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py b/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py index fa6fc3ba32..5348f729f9 100644 --- a/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py +++ b/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py @@ -8,7 +8,7 @@ from core.tools.errors import ToolInvokeError from core.tools.workflow_as_tool.tool import WorkflowTool -def test_workflow_tool_should_raise_tool_invoke_error_when_result_has_error_field(monkeypatch): +def test_workflow_tool_should_raise_tool_invoke_error_when_result_has_error_field(monkeypatch: pytest.MonkeyPatch): """Ensure that WorkflowTool will throw a `ToolInvokeError` exception when `WorkflowAppGenerator.generate` returns a result with `error` key inside the `data` element. @@ -40,7 +40,7 @@ def test_workflow_tool_should_raise_tool_invoke_error_when_result_has_error_fiel "core.app.apps.workflow.app_generator.WorkflowAppGenerator.generate", lambda *args, **kwargs: {"data": {"error": "oops"}}, ) - monkeypatch.setattr("flask_login.current_user", lambda *args, **kwargs: None) + monkeypatch.setattr("libs.login.current_user", lambda *args, **kwargs: None) with pytest.raises(ToolInvokeError) as exc_info: # WorkflowTool always returns a generator, so we need to iterate to diff --git a/api/tests/unit_tests/core/variables/test_segment_type.py b/api/tests/unit_tests/core/variables/test_segment_type.py index b33a83ba77..a197b617f3 100644 --- a/api/tests/unit_tests/core/variables/test_segment_type.py +++ b/api/tests/unit_tests/core/variables/test_segment_type.py @@ -23,6 +23,7 @@ class TestSegmentTypeIsArrayType: SegmentType.ARRAY_NUMBER, SegmentType.ARRAY_OBJECT, SegmentType.ARRAY_FILE, + SegmentType.ARRAY_BOOLEAN, ] expected_non_array_types = [ SegmentType.INTEGER, @@ -34,6 +35,7 @@ class TestSegmentTypeIsArrayType: SegmentType.FILE, SegmentType.NONE, SegmentType.GROUP, + SegmentType.BOOLEAN, ] for seg_type in expected_array_types: diff --git a/api/tests/unit_tests/core/variables/test_segment_type_validation.py b/api/tests/unit_tests/core/variables/test_segment_type_validation.py new file mode 100644 index 0000000000..e0541280d3 --- /dev/null +++ b/api/tests/unit_tests/core/variables/test_segment_type_validation.py @@ -0,0 +1,729 @@ +""" +Comprehensive unit tests for SegmentType.is_valid and SegmentType._validate_array methods. + +This module provides thorough testing of the validation logic for all SegmentType values, +including edge cases, error conditions, and different ArrayValidation strategies. +""" + +from dataclasses import dataclass +from typing import Any + +import pytest + +from core.file.enums import FileTransferMethod, FileType +from core.file.models import File +from core.variables.types import ArrayValidation, SegmentType + + +def create_test_file( + file_type: FileType = FileType.DOCUMENT, + transfer_method: FileTransferMethod = FileTransferMethod.LOCAL_FILE, + filename: str = "test.txt", + extension: str = ".txt", + mime_type: str = "text/plain", + size: int = 1024, +) -> File: + """Factory function to create File objects for testing.""" + return File( + tenant_id="test-tenant", + type=file_type, + transfer_method=transfer_method, + filename=filename, + extension=extension, + mime_type=mime_type, + size=size, + related_id="test-file-id" if transfer_method != FileTransferMethod.REMOTE_URL else None, + remote_url="https://example.com/file.txt" if transfer_method == FileTransferMethod.REMOTE_URL else None, + storage_key="test-storage-key", + ) + + +@dataclass +class ValidationTestCase: + """Test case data structure for validation tests.""" + + segment_type: SegmentType + value: Any + expected: bool + description: str + + def get_id(self): + return self.description + + +@dataclass +class ArrayValidationTestCase: + """Test case data structure for array validation tests.""" + + segment_type: SegmentType + value: Any + array_validation: ArrayValidation + expected: bool + description: str + + def get_id(self): + return self.description + + +# Test data construction functions +def get_boolean_cases() -> list[ValidationTestCase]: + return [ + # valid values + ValidationTestCase(SegmentType.BOOLEAN, True, True, "True boolean"), + ValidationTestCase(SegmentType.BOOLEAN, False, True, "False boolean"), + # Invalid values + ValidationTestCase(SegmentType.BOOLEAN, 1, False, "Integer 1 (not boolean)"), + ValidationTestCase(SegmentType.BOOLEAN, 0, False, "Integer 0 (not boolean)"), + ValidationTestCase(SegmentType.BOOLEAN, "true", False, "String 'true'"), + ValidationTestCase(SegmentType.BOOLEAN, "false", False, "String 'false'"), + ValidationTestCase(SegmentType.BOOLEAN, None, False, "None value"), + ValidationTestCase(SegmentType.BOOLEAN, [], False, "Empty list"), + ValidationTestCase(SegmentType.BOOLEAN, {}, False, "Empty dict"), + ] + + +def get_number_cases() -> list[ValidationTestCase]: + """Get test cases for valid number values.""" + return [ + # valid values + ValidationTestCase(SegmentType.NUMBER, 42, True, "Positive integer"), + ValidationTestCase(SegmentType.NUMBER, -42, True, "Negative integer"), + ValidationTestCase(SegmentType.NUMBER, 0, True, "Zero integer"), + ValidationTestCase(SegmentType.NUMBER, 3.14, True, "Positive float"), + ValidationTestCase(SegmentType.NUMBER, -3.14, True, "Negative float"), + ValidationTestCase(SegmentType.NUMBER, 0.0, True, "Zero float"), + ValidationTestCase(SegmentType.NUMBER, float("inf"), True, "Positive infinity"), + ValidationTestCase(SegmentType.NUMBER, float("-inf"), True, "Negative infinity"), + ValidationTestCase(SegmentType.NUMBER, float("nan"), True, "float(NaN)"), + # invalid number values + ValidationTestCase(SegmentType.NUMBER, "42", False, "String number"), + ValidationTestCase(SegmentType.NUMBER, None, False, "None value"), + ValidationTestCase(SegmentType.NUMBER, [], False, "Empty list"), + ValidationTestCase(SegmentType.NUMBER, {}, False, "Empty dict"), + ValidationTestCase(SegmentType.NUMBER, "3.14", False, "String float"), + ] + + +def get_string_cases() -> list[ValidationTestCase]: + """Get test cases for valid string values.""" + return [ + # valid values + ValidationTestCase(SegmentType.STRING, "", True, "Empty string"), + ValidationTestCase(SegmentType.STRING, "hello", True, "Simple string"), + ValidationTestCase(SegmentType.STRING, "🚀", True, "Unicode emoji"), + ValidationTestCase(SegmentType.STRING, "line1\nline2", True, "Multiline string"), + # invalid values + ValidationTestCase(SegmentType.STRING, 123, False, "Integer"), + ValidationTestCase(SegmentType.STRING, 3.14, False, "Float"), + ValidationTestCase(SegmentType.STRING, True, False, "Boolean"), + ValidationTestCase(SegmentType.STRING, None, False, "None value"), + ValidationTestCase(SegmentType.STRING, [], False, "Empty list"), + ValidationTestCase(SegmentType.STRING, {}, False, "Empty dict"), + ] + + +def get_object_cases() -> list[ValidationTestCase]: + """Get test cases for valid object values.""" + return [ + # valid cases + ValidationTestCase(SegmentType.OBJECT, {}, True, "Empty dict"), + ValidationTestCase(SegmentType.OBJECT, {"key": "value"}, True, "Simple dict"), + ValidationTestCase(SegmentType.OBJECT, {"a": 1, "b": 2}, True, "Dict with numbers"), + ValidationTestCase(SegmentType.OBJECT, {"nested": {"key": "value"}}, True, "Nested dict"), + ValidationTestCase(SegmentType.OBJECT, {"list": [1, 2, 3]}, True, "Dict with list"), + ValidationTestCase(SegmentType.OBJECT, {"mixed": [1, "two", {"three": 3}]}, True, "Complex dict"), + # invalid cases + ValidationTestCase(SegmentType.OBJECT, "not a dict", False, "String"), + ValidationTestCase(SegmentType.OBJECT, 123, False, "Integer"), + ValidationTestCase(SegmentType.OBJECT, 3.14, False, "Float"), + ValidationTestCase(SegmentType.OBJECT, True, False, "Boolean"), + ValidationTestCase(SegmentType.OBJECT, None, False, "None value"), + ValidationTestCase(SegmentType.OBJECT, [], False, "Empty list"), + ValidationTestCase(SegmentType.OBJECT, [1, 2, 3], False, "List with values"), + ] + + +def get_secret_cases() -> list[ValidationTestCase]: + """Get test cases for valid secret values.""" + return [ + # valid cases + ValidationTestCase(SegmentType.SECRET, "", True, "Empty secret"), + ValidationTestCase(SegmentType.SECRET, "secret", True, "Simple secret"), + ValidationTestCase(SegmentType.SECRET, "api_key_123", True, "API key format"), + ValidationTestCase(SegmentType.SECRET, "very_long_secret_key_with_special_chars!@#", True, "Complex secret"), + # invalid cases + ValidationTestCase(SegmentType.SECRET, 123, False, "Integer"), + ValidationTestCase(SegmentType.SECRET, 3.14, False, "Float"), + ValidationTestCase(SegmentType.SECRET, True, False, "Boolean"), + ValidationTestCase(SegmentType.SECRET, None, False, "None value"), + ValidationTestCase(SegmentType.SECRET, [], False, "Empty list"), + ValidationTestCase(SegmentType.SECRET, {}, False, "Empty dict"), + ] + + +def get_file_cases() -> list[ValidationTestCase]: + """Get test cases for valid file values.""" + test_file = create_test_file() + image_file = create_test_file( + file_type=FileType.IMAGE, filename="image.jpg", extension=".jpg", mime_type="image/jpeg" + ) + remote_file = create_test_file( + transfer_method=FileTransferMethod.REMOTE_URL, filename="remote.pdf", extension=".pdf" + ) + + return [ + # valid cases + ValidationTestCase(SegmentType.FILE, test_file, True, "Document file"), + ValidationTestCase(SegmentType.FILE, image_file, True, "Image file"), + ValidationTestCase(SegmentType.FILE, remote_file, True, "Remote file"), + # invalid cases + ValidationTestCase(SegmentType.FILE, "not a file", False, "String"), + ValidationTestCase(SegmentType.FILE, 123, False, "Integer"), + ValidationTestCase(SegmentType.FILE, {"filename": "test.txt"}, False, "Dict resembling file"), + ValidationTestCase(SegmentType.FILE, None, False, "None value"), + ValidationTestCase(SegmentType.FILE, [], False, "Empty list"), + ValidationTestCase(SegmentType.FILE, True, False, "Boolean"), + ] + + +def get_none_cases() -> list[ValidationTestCase]: + """Get test cases for valid none values.""" + return [ + # valid cases + ValidationTestCase(SegmentType.NONE, None, True, "None value"), + # invalid cases + ValidationTestCase(SegmentType.NONE, "", False, "Empty string"), + ValidationTestCase(SegmentType.NONE, 0, False, "Zero integer"), + ValidationTestCase(SegmentType.NONE, 0.0, False, "Zero float"), + ValidationTestCase(SegmentType.NONE, False, False, "False boolean"), + ValidationTestCase(SegmentType.NONE, [], False, "Empty list"), + ValidationTestCase(SegmentType.NONE, {}, False, "Empty dict"), + ValidationTestCase(SegmentType.NONE, "null", False, "String 'null'"), + ] + + +def get_array_any_validation_cases() -> list[ArrayValidationTestCase]: + """Get test cases for ARRAY_ANY validation.""" + return [ + ArrayValidationTestCase( + SegmentType.ARRAY_ANY, + [1, "string", 3.14, {"key": "value"}, True], + ArrayValidation.NONE, + True, + "Mixed types with NONE validation", + ), + ArrayValidationTestCase( + SegmentType.ARRAY_ANY, + [1, "string", 3.14, {"key": "value"}, True], + ArrayValidation.FIRST, + True, + "Mixed types with FIRST validation", + ), + ArrayValidationTestCase( + SegmentType.ARRAY_ANY, + [1, "string", 3.14, {"key": "value"}, True], + ArrayValidation.ALL, + True, + "Mixed types with ALL validation", + ), + ArrayValidationTestCase( + SegmentType.ARRAY_ANY, [None, None, None], ArrayValidation.ALL, True, "All None values" + ), + ] + + +def get_array_string_validation_none_cases() -> list[ArrayValidationTestCase]: + """Get test cases for ARRAY_STRING validation with NONE strategy.""" + return [ + ArrayValidationTestCase( + SegmentType.ARRAY_STRING, + ["hello", "world"], + ArrayValidation.NONE, + True, + "Valid strings with NONE validation", + ), + ArrayValidationTestCase( + SegmentType.ARRAY_STRING, + [123, 456], + ArrayValidation.NONE, + True, + "Invalid elements with NONE validation", + ), + ArrayValidationTestCase( + SegmentType.ARRAY_STRING, + ["valid", 123, True], + ArrayValidation.NONE, + True, + "Mixed types with NONE validation", + ), + ] + + +def get_array_string_validation_first_cases() -> list[ArrayValidationTestCase]: + """Get test cases for ARRAY_STRING validation with FIRST strategy.""" + return [ + ArrayValidationTestCase( + SegmentType.ARRAY_STRING, ["hello", "world"], ArrayValidation.FIRST, True, "All valid strings" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_STRING, + ["hello", 123, True], + ArrayValidation.FIRST, + True, + "First valid, others invalid", + ), + ArrayValidationTestCase( + SegmentType.ARRAY_STRING, + [123, "hello", "world"], + ArrayValidation.FIRST, + False, + "First invalid, others valid", + ), + ArrayValidationTestCase(SegmentType.ARRAY_STRING, [None, "hello"], ArrayValidation.FIRST, False, "First None"), + ] + + +def get_array_string_validation_all_cases() -> list[ArrayValidationTestCase]: + """Get test cases for ARRAY_STRING validation with ALL strategy.""" + return [ + ArrayValidationTestCase( + SegmentType.ARRAY_STRING, ["hello", "world", "test"], ArrayValidation.ALL, True, "All valid strings" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_STRING, ["hello", 123, "world"], ArrayValidation.ALL, False, "One invalid element" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_STRING, [123, 456, 789], ArrayValidation.ALL, False, "All invalid elements" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_STRING, ["valid", None, "also_valid"], ArrayValidation.ALL, False, "Contains None" + ), + ] + + +def get_array_number_validation_cases() -> list[ArrayValidationTestCase]: + """Get test cases for ARRAY_NUMBER validation with different strategies.""" + return [ + # NONE strategy + ArrayValidationTestCase( + SegmentType.ARRAY_NUMBER, [1, 2.5, 3], ArrayValidation.NONE, True, "Valid numbers with NONE" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_NUMBER, ["not", "numbers"], ArrayValidation.NONE, True, "Invalid elements with NONE" + ), + # FIRST strategy + ArrayValidationTestCase( + SegmentType.ARRAY_NUMBER, [42, "not a number"], ArrayValidation.FIRST, True, "First valid number" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_NUMBER, ["not a number", 42], ArrayValidation.FIRST, False, "First invalid" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_NUMBER, [3.14, 2.71, 1.41], ArrayValidation.FIRST, True, "All valid floats" + ), + # ALL strategy + ArrayValidationTestCase( + SegmentType.ARRAY_NUMBER, [1, 2, 3, 4.5], ArrayValidation.ALL, True, "All valid numbers" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_NUMBER, [1, "invalid", 3], ArrayValidation.ALL, False, "One invalid element" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_NUMBER, + [float("inf"), float("-inf"), float("nan")], + ArrayValidation.ALL, + True, + "Special float values", + ), + ] + + +def get_array_object_validation_cases() -> list[ArrayValidationTestCase]: + """Get test cases for ARRAY_OBJECT validation with different strategies.""" + return [ + # NONE strategy + ArrayValidationTestCase( + SegmentType.ARRAY_OBJECT, [{}, {"key": "value"}], ArrayValidation.NONE, True, "Valid objects with NONE" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_OBJECT, ["not", "objects"], ArrayValidation.NONE, True, "Invalid elements with NONE" + ), + # FIRST strategy + ArrayValidationTestCase( + SegmentType.ARRAY_OBJECT, + [{"valid": "object"}, "not an object"], + ArrayValidation.FIRST, + True, + "First valid object", + ), + ArrayValidationTestCase( + SegmentType.ARRAY_OBJECT, + ["not an object", {"valid": "object"}], + ArrayValidation.FIRST, + False, + "First invalid", + ), + # ALL strategy + ArrayValidationTestCase( + SegmentType.ARRAY_OBJECT, + [{}, {"a": 1}, {"nested": {"key": "value"}}], + ArrayValidation.ALL, + True, + "All valid objects", + ), + ArrayValidationTestCase( + SegmentType.ARRAY_OBJECT, + [{"valid": "object"}, "invalid", {"another": "object"}], + ArrayValidation.ALL, + False, + "One invalid element", + ), + ] + + +def get_array_file_validation_cases() -> list[ArrayValidationTestCase]: + """Get test cases for ARRAY_FILE validation with different strategies.""" + file1 = create_test_file(filename="file1.txt") + file2 = create_test_file(filename="file2.txt") + + return [ + # NONE strategy + ArrayValidationTestCase( + SegmentType.ARRAY_FILE, [file1, file2], ArrayValidation.NONE, True, "Valid files with NONE" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_FILE, ["not", "files"], ArrayValidation.NONE, True, "Invalid elements with NONE" + ), + # FIRST strategy + ArrayValidationTestCase( + SegmentType.ARRAY_FILE, [file1, "not a file"], ArrayValidation.FIRST, True, "First valid file" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_FILE, ["not a file", file1], ArrayValidation.FIRST, False, "First invalid" + ), + # ALL strategy + ArrayValidationTestCase(SegmentType.ARRAY_FILE, [file1, file2], ArrayValidation.ALL, True, "All valid files"), + ArrayValidationTestCase( + SegmentType.ARRAY_FILE, [file1, "invalid", file2], ArrayValidation.ALL, False, "One invalid element" + ), + ] + + +def get_array_boolean_validation_cases() -> list[ArrayValidationTestCase]: + """Get test cases for ARRAY_BOOLEAN validation with different strategies.""" + return [ + # NONE strategy + ArrayValidationTestCase( + SegmentType.ARRAY_BOOLEAN, [True, False, True], ArrayValidation.NONE, True, "Valid booleans with NONE" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_BOOLEAN, [1, 0, "true"], ArrayValidation.NONE, True, "Invalid elements with NONE" + ), + # FIRST strategy + ArrayValidationTestCase( + SegmentType.ARRAY_BOOLEAN, [True, 1, 0], ArrayValidation.FIRST, True, "First valid boolean" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_BOOLEAN, [1, True, False], ArrayValidation.FIRST, False, "First invalid (integer 1)" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_BOOLEAN, [0, True, False], ArrayValidation.FIRST, False, "First invalid (integer 0)" + ), + # ALL strategy + ArrayValidationTestCase( + SegmentType.ARRAY_BOOLEAN, [True, False, True, False], ArrayValidation.ALL, True, "All valid booleans" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_BOOLEAN, [True, 1, False], ArrayValidation.ALL, False, "One invalid element (integer)" + ), + ArrayValidationTestCase( + SegmentType.ARRAY_BOOLEAN, + [True, "false", False], + ArrayValidation.ALL, + False, + "One invalid element (string)", + ), + ] + + +class TestSegmentTypeIsValid: + """Test suite for SegmentType.is_valid method covering all non-array types.""" + + @pytest.mark.parametrize("case", get_boolean_cases(), ids=lambda case: case.description) + def test_boolean_validation(self, case): + assert case.segment_type.is_valid(case.value) == case.expected + + @pytest.mark.parametrize("case", get_number_cases(), ids=lambda case: case.description) + def test_number_validation(self, case: ValidationTestCase): + assert case.segment_type.is_valid(case.value) == case.expected + + @pytest.mark.parametrize("case", get_string_cases(), ids=lambda case: case.description) + def test_string_validation(self, case): + assert case.segment_type.is_valid(case.value) == case.expected + + @pytest.mark.parametrize("case", get_object_cases(), ids=lambda case: case.description) + def test_object_validation(self, case): + assert case.segment_type.is_valid(case.value) == case.expected + + @pytest.mark.parametrize("case", get_secret_cases(), ids=lambda case: case.description) + def test_secret_validation(self, case): + assert case.segment_type.is_valid(case.value) == case.expected + + @pytest.mark.parametrize("case", get_file_cases(), ids=lambda case: case.description) + def test_file_validation(self, case): + assert case.segment_type.is_valid(case.value) == case.expected + + @pytest.mark.parametrize("case", get_none_cases(), ids=lambda case: case.description) + def test_none_validation_valid_cases(self, case): + assert case.segment_type.is_valid(case.value) == case.expected + + def test_unsupported_segment_type_raises_assertion_error(self): + """Test that unsupported SegmentType values raise AssertionError.""" + # GROUP is not handled in is_valid method + with pytest.raises(AssertionError, match="this statement should be unreachable"): + SegmentType.GROUP.is_valid("any value") + + +class TestSegmentTypeArrayValidation: + """Test suite for SegmentType._validate_array method and array type validation.""" + + def test_array_validation_non_list_values(self): + """Test that non-list values return False for all array types.""" + array_types = [ + SegmentType.ARRAY_ANY, + SegmentType.ARRAY_STRING, + SegmentType.ARRAY_NUMBER, + SegmentType.ARRAY_OBJECT, + SegmentType.ARRAY_FILE, + SegmentType.ARRAY_BOOLEAN, + ] + + non_list_values = [ + "not a list", + 123, + 3.14, + True, + None, + {"key": "value"}, + create_test_file(), + ] + + for array_type in array_types: + for value in non_list_values: + assert array_type.is_valid(value) is False, f"{array_type} should reject {type(value).__name__}" + + def test_empty_array_validation(self): + """Test that empty arrays are valid for all array types regardless of validation strategy.""" + array_types = [ + SegmentType.ARRAY_ANY, + SegmentType.ARRAY_STRING, + SegmentType.ARRAY_NUMBER, + SegmentType.ARRAY_OBJECT, + SegmentType.ARRAY_FILE, + SegmentType.ARRAY_BOOLEAN, + ] + + validation_strategies = [ArrayValidation.NONE, ArrayValidation.FIRST, ArrayValidation.ALL] + + for array_type in array_types: + for strategy in validation_strategies: + assert array_type.is_valid([], strategy) is True, ( + f"{array_type} should accept empty array with {strategy}" + ) + + @pytest.mark.parametrize("case", get_array_any_validation_cases(), ids=lambda case: case.description) + def test_array_any_validation(self, case): + """Test ARRAY_ANY validation accepts any list regardless of content.""" + assert case.segment_type.is_valid(case.value, case.array_validation) == case.expected + + @pytest.mark.parametrize("case", get_array_string_validation_none_cases(), ids=lambda case: case.description) + def test_array_string_validation_with_none_strategy(self, case): + """Test ARRAY_STRING validation with NONE strategy (no element validation).""" + assert case.segment_type.is_valid(case.value, case.array_validation) == case.expected + + @pytest.mark.parametrize("case", get_array_string_validation_first_cases(), ids=lambda case: case.description) + def test_array_string_validation_with_first_strategy(self, case): + """Test ARRAY_STRING validation with FIRST strategy (validate first element only).""" + assert case.segment_type.is_valid(case.value, case.array_validation) == case.expected + + @pytest.mark.parametrize("case", get_array_string_validation_all_cases(), ids=lambda case: case.description) + def test_array_string_validation_with_all_strategy(self, case): + """Test ARRAY_STRING validation with ALL strategy (validate all elements).""" + assert case.segment_type.is_valid(case.value, case.array_validation) == case.expected + + @pytest.mark.parametrize("case", get_array_number_validation_cases(), ids=lambda case: case.description) + def test_array_number_validation_with_different_strategies(self, case): + """Test ARRAY_NUMBER validation with different validation strategies.""" + assert case.segment_type.is_valid(case.value, case.array_validation) == case.expected + + @pytest.mark.parametrize("case", get_array_object_validation_cases(), ids=lambda case: case.description) + def test_array_object_validation_with_different_strategies(self, case): + """Test ARRAY_OBJECT validation with different validation strategies.""" + assert case.segment_type.is_valid(case.value, case.array_validation) == case.expected + + @pytest.mark.parametrize("case", get_array_file_validation_cases(), ids=lambda case: case.description) + def test_array_file_validation_with_different_strategies(self, case): + """Test ARRAY_FILE validation with different validation strategies.""" + assert case.segment_type.is_valid(case.value, case.array_validation) == case.expected + + @pytest.mark.parametrize("case", get_array_boolean_validation_cases(), ids=lambda case: case.description) + def test_array_boolean_validation_with_different_strategies(self, case): + """Test ARRAY_BOOLEAN validation with different validation strategies.""" + assert case.segment_type.is_valid(case.value, case.array_validation) == case.expected + + def test_default_array_validation_strategy(self): + """Test that default array validation strategy is FIRST.""" + # When no array_validation parameter is provided, it should default to FIRST + assert SegmentType.ARRAY_STRING.is_valid(["valid", 123]) is False # First element valid + assert SegmentType.ARRAY_STRING.is_valid([123, "valid"]) is False # First element invalid + + assert SegmentType.ARRAY_NUMBER.is_valid([42, "invalid"]) is False # First element valid + assert SegmentType.ARRAY_NUMBER.is_valid(["invalid", 42]) is False # First element invalid + + def test_array_validation_edge_cases(self): + """Test edge cases for array validation.""" + # Test with nested arrays (should be invalid for specific array types) + nested_array = [["nested", "array"], ["another", "nested"]] + + assert SegmentType.ARRAY_STRING.is_valid(nested_array, ArrayValidation.FIRST) is False + assert SegmentType.ARRAY_STRING.is_valid(nested_array, ArrayValidation.ALL) is False + assert SegmentType.ARRAY_ANY.is_valid(nested_array, ArrayValidation.ALL) is True + + # Test with very large arrays (performance consideration) + large_valid_array = ["string"] * 1000 + large_mixed_array = ["string"] * 999 + [123] # Last element invalid + + assert SegmentType.ARRAY_STRING.is_valid(large_valid_array, ArrayValidation.ALL) is True + assert SegmentType.ARRAY_STRING.is_valid(large_mixed_array, ArrayValidation.ALL) is False + assert SegmentType.ARRAY_STRING.is_valid(large_mixed_array, ArrayValidation.FIRST) is True + + +class TestSegmentTypeValidationIntegration: + """Integration tests for SegmentType validation covering interactions between methods.""" + + def test_non_array_types_ignore_array_validation_parameter(self): + """Test that non-array types ignore the array_validation parameter.""" + non_array_types = [ + SegmentType.STRING, + SegmentType.NUMBER, + SegmentType.BOOLEAN, + SegmentType.OBJECT, + SegmentType.SECRET, + SegmentType.FILE, + SegmentType.NONE, + ] + + for segment_type in non_array_types: + # Create appropriate valid value for each type + valid_value: Any + if segment_type == SegmentType.STRING: + valid_value = "test" + elif segment_type == SegmentType.NUMBER: + valid_value = 42 + elif segment_type == SegmentType.BOOLEAN: + valid_value = True + elif segment_type == SegmentType.OBJECT: + valid_value = {"key": "value"} + elif segment_type == SegmentType.SECRET: + valid_value = "secret" + elif segment_type == SegmentType.FILE: + valid_value = create_test_file() + elif segment_type == SegmentType.NONE: + valid_value = None + else: + continue # Skip unsupported types + + # All array validation strategies should give the same result + result_none = segment_type.is_valid(valid_value, ArrayValidation.NONE) + result_first = segment_type.is_valid(valid_value, ArrayValidation.FIRST) + result_all = segment_type.is_valid(valid_value, ArrayValidation.ALL) + + assert result_none == result_first == result_all == True, ( + f"{segment_type} should ignore array_validation parameter" + ) + + def test_comprehensive_type_coverage(self): + """Test that all SegmentType enum values are covered in validation tests.""" + all_segment_types = set(SegmentType) + + # Types that should be handled by is_valid method + handled_types = { + # Non-array types + SegmentType.STRING, + SegmentType.NUMBER, + SegmentType.BOOLEAN, + SegmentType.OBJECT, + SegmentType.SECRET, + SegmentType.FILE, + SegmentType.NONE, + # Array types + SegmentType.ARRAY_ANY, + SegmentType.ARRAY_STRING, + SegmentType.ARRAY_NUMBER, + SegmentType.ARRAY_OBJECT, + SegmentType.ARRAY_FILE, + SegmentType.ARRAY_BOOLEAN, + } + + # Types that are not handled by is_valid (should raise AssertionError) + unhandled_types = { + SegmentType.GROUP, + SegmentType.INTEGER, # Handled by NUMBER validation logic + SegmentType.FLOAT, # Handled by NUMBER validation logic + } + + # Verify all types are accounted for + assert handled_types | unhandled_types == all_segment_types, "All SegmentType values should be categorized" + + # Test that handled types work correctly + for segment_type in handled_types: + if segment_type.is_array_type(): + # Test with empty array (should always be valid) + assert segment_type.is_valid([]) is True, f"{segment_type} should accept empty array" + else: + # Test with appropriate valid value + if segment_type == SegmentType.STRING: + assert segment_type.is_valid("test") is True + elif segment_type == SegmentType.NUMBER: + assert segment_type.is_valid(42) is True + elif segment_type == SegmentType.BOOLEAN: + assert segment_type.is_valid(True) is True + elif segment_type == SegmentType.OBJECT: + assert segment_type.is_valid({}) is True + elif segment_type == SegmentType.SECRET: + assert segment_type.is_valid("secret") is True + elif segment_type == SegmentType.FILE: + assert segment_type.is_valid(create_test_file()) is True + elif segment_type == SegmentType.NONE: + assert segment_type.is_valid(None) is True + + def test_boolean_vs_integer_type_distinction(self): + """Test the important distinction between boolean and integer types in validation.""" + # This tests the comment in the code about bool being a subclass of int + + # Boolean type should only accept actual booleans, not integers + assert SegmentType.BOOLEAN.is_valid(True) is True + assert SegmentType.BOOLEAN.is_valid(False) is True + assert SegmentType.BOOLEAN.is_valid(1) is False # Integer 1, not boolean + assert SegmentType.BOOLEAN.is_valid(0) is False # Integer 0, not boolean + + # Number type should accept both integers and floats, including booleans (since bool is subclass of int) + assert SegmentType.NUMBER.is_valid(42) is True + assert SegmentType.NUMBER.is_valid(3.14) is True + assert SegmentType.NUMBER.is_valid(True) is True # bool is subclass of int + assert SegmentType.NUMBER.is_valid(False) is True # bool is subclass of int + + def test_array_validation_recursive_behavior(self): + """Test that array validation correctly handles recursive validation calls.""" + # When validating array elements, _validate_array calls is_valid recursively + # with ArrayValidation.NONE to avoid infinite recursion + + # Test nested validation doesn't cause issues + nested_arrays = [["inner", "array"], ["another", "inner"]] + + # ARRAY_ANY should accept nested arrays + assert SegmentType.ARRAY_ANY.is_valid(nested_arrays, ArrayValidation.ALL) is True + + # ARRAY_STRING should reject nested arrays (first element is not a string) + assert SegmentType.ARRAY_STRING.is_valid(nested_arrays, ArrayValidation.FIRST) is False + assert SegmentType.ARRAY_STRING.is_valid(nested_arrays, ArrayValidation.ALL) is False diff --git a/api/tests/unit_tests/core/workflow/nodes/answer/test_answer_stream_processor.py b/api/tests/unit_tests/core/workflow/nodes/answer/test_answer_stream_processor.py index 137e8b889d..8b1b9a55bc 100644 --- a/api/tests/unit_tests/core/workflow/nodes/answer/test_answer_stream_processor.py +++ b/api/tests/unit_tests/core/workflow/nodes/answer/test_answer_stream_processor.py @@ -1,6 +1,5 @@ import uuid from collections.abc import Generator -from datetime import UTC, datetime from core.workflow.entities.variable_pool import VariablePool from core.workflow.graph_engine.entities.event import ( @@ -15,6 +14,7 @@ from core.workflow.nodes.answer.answer_stream_processor import AnswerStreamProce from core.workflow.nodes.enums import NodeType from core.workflow.nodes.start.entities import StartNodeData from core.workflow.system_variable import SystemVariable +from libs.datetime_utils import naive_utc_now def _recursive_process(graph: Graph, next_node_id: str) -> Generator[GraphEngineEvent, None, None]: @@ -29,7 +29,7 @@ def _recursive_process(graph: Graph, next_node_id: str) -> Generator[GraphEngine def _publish_events(graph: Graph, next_node_id: str) -> Generator[GraphEngineEvent, None, None]: - route_node_state = RouteNodeState(node_id=next_node_id, start_at=datetime.now(UTC).replace(tzinfo=None)) + route_node_state = RouteNodeState(node_id=next_node_id, start_at=naive_utc_now()) parallel_id = graph.node_parallel_mapping.get(next_node_id) parallel_start_node_id = None @@ -68,7 +68,7 @@ def _publish_events(graph: Graph, next_node_id: str) -> Generator[GraphEngineEve ) route_node_state.status = RouteNodeState.Status.SUCCESS - route_node_state.finished_at = datetime.now(UTC).replace(tzinfo=None) + route_node_state.finished_at = naive_utc_now() yield NodeRunSucceededEvent( id=node_execution_id, node_id=next_node_id, diff --git a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_node.py b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_node.py index 71b3a8f7d8..2d8d433c46 100644 --- a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_node.py @@ -1,4 +1,5 @@ import httpx +import pytest from core.app.entities.app_invoke_entities import InvokeFrom from core.file import File, FileTransferMethod, FileType @@ -20,7 +21,7 @@ from models.enums import UserFrom from models.workflow import WorkflowType -def test_http_request_node_binary_file(monkeypatch): +def test_http_request_node_binary_file(monkeypatch: pytest.MonkeyPatch): data = HttpRequestNodeData( title="test", method="post", @@ -110,7 +111,7 @@ def test_http_request_node_binary_file(monkeypatch): assert result.outputs["body"] == "test" -def test_http_request_node_form_with_file(monkeypatch): +def test_http_request_node_form_with_file(monkeypatch: pytest.MonkeyPatch): data = HttpRequestNodeData( title="test", method="post", @@ -211,7 +212,7 @@ def test_http_request_node_form_with_file(monkeypatch): assert result.outputs["body"] == "" -def test_http_request_node_form_with_multiple_files(monkeypatch): +def test_http_request_node_form_with_multiple_files(monkeypatch: pytest.MonkeyPatch): data = HttpRequestNodeData( title="test", method="post", diff --git a/api/tests/unit_tests/core/workflow/nodes/parameter_extractor/__init__.py b/api/tests/unit_tests/core/workflow/nodes/parameter_extractor/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/core/workflow/nodes/parameter_extractor/test_entities.py b/api/tests/unit_tests/core/workflow/nodes/parameter_extractor/test_entities.py new file mode 100644 index 0000000000..b28d1d3d0a --- /dev/null +++ b/api/tests/unit_tests/core/workflow/nodes/parameter_extractor/test_entities.py @@ -0,0 +1,27 @@ +from core.variables.types import SegmentType +from core.workflow.nodes.parameter_extractor.entities import ParameterConfig + + +class TestParameterConfig: + def test_select_type(self): + data = { + "name": "yes_or_no", + "type": "select", + "options": ["yes", "no"], + "description": "a simple select made of `yes` and `no`", + "required": True, + } + + pc = ParameterConfig.model_validate(data) + assert pc.type == SegmentType.STRING + assert pc.options == data["options"] + + def test_validate_bool_type(self): + data = { + "name": "boolean", + "type": "bool", + "description": "a simple boolean parameter", + "required": True, + } + pc = ParameterConfig.model_validate(data) + assert pc.type == SegmentType.BOOLEAN diff --git a/api/tests/unit_tests/core/workflow/nodes/parameter_extractor/test_parameter_extractor_node.py b/api/tests/unit_tests/core/workflow/nodes/parameter_extractor/test_parameter_extractor_node.py new file mode 100644 index 0000000000..b9947d4693 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/nodes/parameter_extractor/test_parameter_extractor_node.py @@ -0,0 +1,567 @@ +""" +Test cases for ParameterExtractorNode._validate_result and _transform_result methods. +""" + +from dataclasses import dataclass +from typing import Any + +import pytest + +from core.model_runtime.entities import LLMMode +from core.variables.types import SegmentType +from core.workflow.nodes.llm import ModelConfig, VisionConfig +from core.workflow.nodes.parameter_extractor.entities import ParameterConfig, ParameterExtractorNodeData +from core.workflow.nodes.parameter_extractor.exc import ( + InvalidNumberOfParametersError, + InvalidSelectValueError, + InvalidValueTypeError, + RequiredParameterMissingError, +) +from core.workflow.nodes.parameter_extractor.parameter_extractor_node import ParameterExtractorNode +from factories.variable_factory import build_segment_with_type + + +@dataclass +class ValidTestCase: + """Test case data for valid scenarios.""" + + name: str + parameters: list[ParameterConfig] + result: dict[str, Any] + + def get_name(self) -> str: + return self.name + + +@dataclass +class ErrorTestCase: + """Test case data for error scenarios.""" + + name: str + parameters: list[ParameterConfig] + result: dict[str, Any] + expected_exception: type[Exception] + expected_message: str + + def get_name(self) -> str: + return self.name + + +@dataclass +class TransformTestCase: + """Test case data for transformation scenarios.""" + + name: str + parameters: list[ParameterConfig] + input_result: dict[str, Any] + expected_result: dict[str, Any] + + def get_name(self) -> str: + return self.name + + +class TestParameterExtractorNodeMethods: + """Test helper class that provides access to the methods under test.""" + + def validate_result(self, data: ParameterExtractorNodeData, result: dict[str, Any]) -> dict[str, Any]: + """Wrapper to call _validate_result method.""" + node = ParameterExtractorNode.__new__(ParameterExtractorNode) + return node._validate_result(data=data, result=result) + + def transform_result(self, data: ParameterExtractorNodeData, result: dict[str, Any]) -> dict[str, Any]: + """Wrapper to call _transform_result method.""" + node = ParameterExtractorNode.__new__(ParameterExtractorNode) + return node._transform_result(data=data, result=result) + + +class TestValidateResult: + """Test cases for _validate_result method.""" + + @staticmethod + def get_valid_test_cases() -> list[ValidTestCase]: + """Get test cases that should pass validation.""" + return [ + ValidTestCase( + name="single_string_parameter", + parameters=[ParameterConfig(name="name", type=SegmentType.STRING, description="Name", required=True)], + result={"name": "John"}, + ), + ValidTestCase( + name="single_number_parameter_int", + parameters=[ParameterConfig(name="age", type=SegmentType.NUMBER, description="Age", required=True)], + result={"age": 25}, + ), + ValidTestCase( + name="single_number_parameter_float", + parameters=[ParameterConfig(name="price", type=SegmentType.NUMBER, description="Price", required=True)], + result={"price": 19.99}, + ), + ValidTestCase( + name="single_bool_parameter_true", + parameters=[ + ParameterConfig(name="active", type=SegmentType.BOOLEAN, description="Active", required=True) + ], + result={"active": True}, + ), + ValidTestCase( + name="single_bool_parameter_true", + parameters=[ + ParameterConfig(name="active", type=SegmentType.BOOLEAN, description="Active", required=True) + ], + result={"active": True}, + ), + ValidTestCase( + name="single_bool_parameter_false", + parameters=[ + ParameterConfig(name="active", type=SegmentType.BOOLEAN, description="Active", required=True) + ], + result={"active": False}, + ), + ValidTestCase( + name="select_parameter_valid_option", + parameters=[ + ParameterConfig( + name="status", + type="select", # pyright: ignore[reportArgumentType] + description="Status", + required=True, + options=["active", "inactive"], + ) + ], + result={"status": "active"}, + ), + ValidTestCase( + name="array_string_parameter", + parameters=[ + ParameterConfig(name="tags", type=SegmentType.ARRAY_STRING, description="Tags", required=True) + ], + result={"tags": ["tag1", "tag2", "tag3"]}, + ), + ValidTestCase( + name="array_number_parameter", + parameters=[ + ParameterConfig(name="scores", type=SegmentType.ARRAY_NUMBER, description="Scores", required=True) + ], + result={"scores": [85, 92.5, 78]}, + ), + ValidTestCase( + name="array_object_parameter", + parameters=[ + ParameterConfig(name="items", type=SegmentType.ARRAY_OBJECT, description="Items", required=True) + ], + result={"items": [{"name": "item1"}, {"name": "item2"}]}, + ), + ValidTestCase( + name="multiple_parameters", + parameters=[ + ParameterConfig(name="name", type=SegmentType.STRING, description="Name", required=True), + ParameterConfig(name="age", type=SegmentType.NUMBER, description="Age", required=True), + ParameterConfig(name="active", type=SegmentType.BOOLEAN, description="Active", required=True), + ], + result={"name": "John", "age": 25, "active": True}, + ), + ValidTestCase( + name="optional_parameter_present", + parameters=[ + ParameterConfig(name="name", type=SegmentType.STRING, description="Name", required=True), + ParameterConfig(name="nickname", type=SegmentType.STRING, description="Nickname", required=False), + ], + result={"name": "John", "nickname": "Johnny"}, + ), + ValidTestCase( + name="empty_array_parameter", + parameters=[ + ParameterConfig(name="tags", type=SegmentType.ARRAY_STRING, description="Tags", required=True) + ], + result={"tags": []}, + ), + ] + + @staticmethod + def get_error_test_cases() -> list[ErrorTestCase]: + """Get test cases that should raise exceptions.""" + return [ + ErrorTestCase( + name="invalid_number_of_parameters_too_few", + parameters=[ + ParameterConfig(name="name", type=SegmentType.STRING, description="Name", required=True), + ParameterConfig(name="age", type=SegmentType.NUMBER, description="Age", required=True), + ], + result={"name": "John"}, + expected_exception=InvalidNumberOfParametersError, + expected_message="Invalid number of parameters", + ), + ErrorTestCase( + name="invalid_number_of_parameters_too_many", + parameters=[ParameterConfig(name="name", type=SegmentType.STRING, description="Name", required=True)], + result={"name": "John", "age": 25}, + expected_exception=InvalidNumberOfParametersError, + expected_message="Invalid number of parameters", + ), + ErrorTestCase( + name="invalid_string_value_none", + parameters=[ + ParameterConfig(name="name", type=SegmentType.STRING, description="Name", required=True), + ], + result={"name": None}, # Parameter present but None value, will trigger type check first + expected_exception=InvalidValueTypeError, + expected_message="Invalid value for parameter name, expected segment type: string, actual_type: none", + ), + ErrorTestCase( + name="invalid_select_value", + parameters=[ + ParameterConfig( + name="status", + type="select", # type: ignore + description="Status", + required=True, + options=["active", "inactive"], + ) + ], + result={"status": "pending"}, + expected_exception=InvalidSelectValueError, + expected_message="Invalid `select` value for parameter status", + ), + ErrorTestCase( + name="invalid_number_value_string", + parameters=[ParameterConfig(name="age", type=SegmentType.NUMBER, description="Age", required=True)], + result={"age": "twenty-five"}, + expected_exception=InvalidValueTypeError, + expected_message="Invalid value for parameter age, expected segment type: number, actual_type: string", + ), + ErrorTestCase( + name="invalid_bool_value_string", + parameters=[ + ParameterConfig(name="active", type=SegmentType.BOOLEAN, description="Active", required=True) + ], + result={"active": "yes"}, + expected_exception=InvalidValueTypeError, + expected_message=( + "Invalid value for parameter active, expected segment type: boolean, actual_type: string" + ), + ), + ErrorTestCase( + name="invalid_string_value_number", + parameters=[ + ParameterConfig( + name="description", type=SegmentType.STRING, description="Description", required=True + ) + ], + result={"description": 123}, + expected_exception=InvalidValueTypeError, + expected_message=( + "Invalid value for parameter description, expected segment type: string, actual_type: integer" + ), + ), + ErrorTestCase( + name="invalid_array_value_not_list", + parameters=[ + ParameterConfig(name="tags", type=SegmentType.ARRAY_STRING, description="Tags", required=True) + ], + result={"tags": "tag1,tag2,tag3"}, + expected_exception=InvalidValueTypeError, + expected_message=( + "Invalid value for parameter tags, expected segment type: array[string], actual_type: string" + ), + ), + ErrorTestCase( + name="invalid_array_number_wrong_element_type", + parameters=[ + ParameterConfig(name="scores", type=SegmentType.ARRAY_NUMBER, description="Scores", required=True) + ], + result={"scores": [85, "ninety-two", 78]}, + expected_exception=InvalidValueTypeError, + expected_message=( + "Invalid value for parameter scores, expected segment type: array[number], actual_type: array[any]" + ), + ), + ErrorTestCase( + name="invalid_array_string_wrong_element_type", + parameters=[ + ParameterConfig(name="tags", type=SegmentType.ARRAY_STRING, description="Tags", required=True) + ], + result={"tags": ["tag1", 123, "tag3"]}, + expected_exception=InvalidValueTypeError, + expected_message=( + "Invalid value for parameter tags, expected segment type: array[string], actual_type: array[any]" + ), + ), + ErrorTestCase( + name="invalid_array_object_wrong_element_type", + parameters=[ + ParameterConfig(name="items", type=SegmentType.ARRAY_OBJECT, description="Items", required=True) + ], + result={"items": [{"name": "item1"}, "item2"]}, + expected_exception=InvalidValueTypeError, + expected_message=( + "Invalid value for parameter items, expected segment type: array[object], actual_type: array[any]" + ), + ), + ErrorTestCase( + name="required_parameter_missing", + parameters=[ + ParameterConfig(name="name", type=SegmentType.STRING, description="Name", required=True), + ParameterConfig(name="age", type=SegmentType.NUMBER, description="Age", required=False), + ], + result={"age": 25, "other": "value"}, # Missing required 'name' parameter, but has correct count + expected_exception=RequiredParameterMissingError, + expected_message="Parameter name is required", + ), + ] + + @pytest.mark.parametrize("test_case", get_valid_test_cases(), ids=ValidTestCase.get_name) + def test_validate_result_valid_cases(self, test_case): + """Test _validate_result with valid inputs.""" + helper = TestParameterExtractorNodeMethods() + + node_data = ParameterExtractorNodeData( + title="Test Node", + model=ModelConfig(provider="openai", name="gpt-3.5-turbo", mode=LLMMode.CHAT, completion_params={}), + query=["test_query"], + parameters=test_case.parameters, + reasoning_mode="function_call", + vision=VisionConfig(), + ) + + result = helper.validate_result(data=node_data, result=test_case.result) + assert result == test_case.result, f"Failed for case: {test_case.name}" + + @pytest.mark.parametrize("test_case", get_error_test_cases(), ids=ErrorTestCase.get_name) + def test_validate_result_error_cases(self, test_case): + """Test _validate_result with invalid inputs that should raise exceptions.""" + helper = TestParameterExtractorNodeMethods() + + node_data = ParameterExtractorNodeData( + title="Test Node", + model=ModelConfig(provider="openai", name="gpt-3.5-turbo", mode=LLMMode.CHAT, completion_params={}), + query=["test_query"], + parameters=test_case.parameters, + reasoning_mode="function_call", + vision=VisionConfig(), + ) + + with pytest.raises(test_case.expected_exception) as exc_info: + helper.validate_result(data=node_data, result=test_case.result) + + assert test_case.expected_message in str(exc_info.value), f"Failed for case: {test_case.name}" + + +class TestTransformResult: + """Test cases for _transform_result method.""" + + @staticmethod + def get_transform_test_cases() -> list[TransformTestCase]: + """Get test cases for result transformation.""" + return [ + # String parameter transformation + TransformTestCase( + name="string_parameter_present", + parameters=[ParameterConfig(name="name", type=SegmentType.STRING, description="Name", required=True)], + input_result={"name": "John"}, + expected_result={"name": "John"}, + ), + TransformTestCase( + name="string_parameter_missing", + parameters=[ParameterConfig(name="name", type=SegmentType.STRING, description="Name", required=True)], + input_result={}, + expected_result={"name": ""}, + ), + # Number parameter transformation + TransformTestCase( + name="number_parameter_int_present", + parameters=[ParameterConfig(name="age", type=SegmentType.NUMBER, description="Age", required=True)], + input_result={"age": 25}, + expected_result={"age": 25}, + ), + TransformTestCase( + name="number_parameter_float_present", + parameters=[ParameterConfig(name="price", type=SegmentType.NUMBER, description="Price", required=True)], + input_result={"price": 19.99}, + expected_result={"price": 19.99}, + ), + TransformTestCase( + name="number_parameter_missing", + parameters=[ParameterConfig(name="age", type=SegmentType.NUMBER, description="Age", required=True)], + input_result={}, + expected_result={"age": 0}, + ), + # Bool parameter transformation + TransformTestCase( + name="bool_parameter_missing", + parameters=[ + ParameterConfig(name="active", type=SegmentType.BOOLEAN, description="Active", required=True) + ], + input_result={}, + expected_result={"active": False}, + ), + # Select parameter transformation + TransformTestCase( + name="select_parameter_present", + parameters=[ + ParameterConfig( + name="status", + type="select", # type: ignore + description="Status", + required=True, + options=["active", "inactive"], + ) + ], + input_result={"status": "active"}, + expected_result={"status": "active"}, + ), + TransformTestCase( + name="select_parameter_missing", + parameters=[ + ParameterConfig( + name="status", + type="select", # type: ignore + description="Status", + required=True, + options=["active", "inactive"], + ) + ], + input_result={}, + expected_result={"status": ""}, + ), + # Array parameter transformation - present cases + TransformTestCase( + name="array_string_parameter_present", + parameters=[ + ParameterConfig(name="tags", type=SegmentType.ARRAY_STRING, description="Tags", required=True) + ], + input_result={"tags": ["tag1", "tag2"]}, + expected_result={ + "tags": build_segment_with_type(segment_type=SegmentType.ARRAY_STRING, value=["tag1", "tag2"]) + }, + ), + TransformTestCase( + name="array_number_parameter_present", + parameters=[ + ParameterConfig(name="scores", type=SegmentType.ARRAY_NUMBER, description="Scores", required=True) + ], + input_result={"scores": [85, 92.5]}, + expected_result={ + "scores": build_segment_with_type(segment_type=SegmentType.ARRAY_NUMBER, value=[85, 92.5]) + }, + ), + TransformTestCase( + name="array_number_parameter_with_string_conversion", + parameters=[ + ParameterConfig(name="scores", type=SegmentType.ARRAY_NUMBER, description="Scores", required=True) + ], + input_result={"scores": [85, "92.5", "78"]}, + expected_result={ + "scores": build_segment_with_type(segment_type=SegmentType.ARRAY_NUMBER, value=[85, 92.5, 78]) + }, + ), + TransformTestCase( + name="array_object_parameter_present", + parameters=[ + ParameterConfig(name="items", type=SegmentType.ARRAY_OBJECT, description="Items", required=True) + ], + input_result={"items": [{"name": "item1"}, {"name": "item2"}]}, + expected_result={ + "items": build_segment_with_type( + segment_type=SegmentType.ARRAY_OBJECT, value=[{"name": "item1"}, {"name": "item2"}] + ) + }, + ), + # Array parameter transformation - missing cases + TransformTestCase( + name="array_string_parameter_missing", + parameters=[ + ParameterConfig(name="tags", type=SegmentType.ARRAY_STRING, description="Tags", required=True) + ], + input_result={}, + expected_result={"tags": build_segment_with_type(segment_type=SegmentType.ARRAY_STRING, value=[])}, + ), + TransformTestCase( + name="array_number_parameter_missing", + parameters=[ + ParameterConfig(name="scores", type=SegmentType.ARRAY_NUMBER, description="Scores", required=True) + ], + input_result={}, + expected_result={"scores": build_segment_with_type(segment_type=SegmentType.ARRAY_NUMBER, value=[])}, + ), + TransformTestCase( + name="array_object_parameter_missing", + parameters=[ + ParameterConfig(name="items", type=SegmentType.ARRAY_OBJECT, description="Items", required=True) + ], + input_result={}, + expected_result={"items": build_segment_with_type(segment_type=SegmentType.ARRAY_OBJECT, value=[])}, + ), + # Multiple parameters transformation + TransformTestCase( + name="multiple_parameters_mixed", + parameters=[ + ParameterConfig(name="name", type=SegmentType.STRING, description="Name", required=True), + ParameterConfig(name="age", type=SegmentType.NUMBER, description="Age", required=True), + ParameterConfig(name="active", type=SegmentType.BOOLEAN, description="Active", required=True), + ParameterConfig(name="tags", type=SegmentType.ARRAY_STRING, description="Tags", required=True), + ], + input_result={"name": "John", "age": 25}, + expected_result={ + "name": "John", + "age": 25, + "active": False, + "tags": build_segment_with_type(segment_type=SegmentType.ARRAY_STRING, value=[]), + }, + ), + # Number parameter transformation with string conversion + TransformTestCase( + name="number_parameter_string_to_float", + parameters=[ParameterConfig(name="price", type=SegmentType.NUMBER, description="Price", required=True)], + input_result={"price": "19.99"}, + expected_result={"price": 19.99}, # String not converted, falls back to default + ), + TransformTestCase( + name="number_parameter_string_to_int", + parameters=[ParameterConfig(name="age", type=SegmentType.NUMBER, description="Age", required=True)], + input_result={"age": "25"}, + expected_result={"age": 25}, # String not converted, falls back to default + ), + TransformTestCase( + name="number_parameter_invalid_string", + parameters=[ParameterConfig(name="age", type=SegmentType.NUMBER, description="Age", required=True)], + input_result={"age": "invalid_number"}, + expected_result={"age": 0}, # Invalid string conversion fails, falls back to default + ), + TransformTestCase( + name="number_parameter_non_string_non_number", + parameters=[ParameterConfig(name="age", type=SegmentType.NUMBER, description="Age", required=True)], + input_result={"age": ["not_a_number"]}, # Non-string, non-number value + expected_result={"age": 0}, # Falls back to default + ), + TransformTestCase( + name="array_number_parameter_with_invalid_string_conversion", + parameters=[ + ParameterConfig(name="scores", type=SegmentType.ARRAY_NUMBER, description="Scores", required=True) + ], + input_result={"scores": [85, "invalid", "78"]}, + expected_result={ + "scores": build_segment_with_type( + segment_type=SegmentType.ARRAY_NUMBER, value=[85, 78] + ) # Invalid string skipped + }, + ), + ] + + @pytest.mark.parametrize("test_case", get_transform_test_cases(), ids=TransformTestCase.get_name) + def test_transform_result_cases(self, test_case): + """Test _transform_result with various inputs.""" + helper = TestParameterExtractorNodeMethods() + + node_data = ParameterExtractorNodeData( + title="Test Node", + model=ModelConfig(provider="openai", name="gpt-3.5-turbo", mode=LLMMode.CHAT, completion_params={}), + query=["test_query"], + parameters=test_case.parameters, + reasoning_mode="function_call", + vision=VisionConfig(), + ) + + result = helper.transform_result(data=node_data, result=test_case.input_result) + assert result == test_case.expected_result, ( + f"Failed for case: {test_case.name}. Expected: {test_case.expected_result}, Got: {result}" + ) diff --git a/api/tests/unit_tests/core/workflow/nodes/test_if_else.py b/api/tests/unit_tests/core/workflow/nodes/test_if_else.py index 8383aee0e4..36a6fbb53e 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_if_else.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_if_else.py @@ -2,6 +2,8 @@ import time import uuid from unittest.mock import MagicMock, Mock +import pytest + from core.app.entities.app_invoke_entities import InvokeFrom from core.file import File, FileTransferMethod, FileType from core.variables import ArrayFileSegment @@ -272,3 +274,220 @@ def test_array_file_contains_file_name(): assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED assert result.outputs is not None assert result.outputs["result"] is True + + +def _get_test_conditions() -> list: + conditions = [ + # Test boolean "is" operator + {"comparison_operator": "is", "variable_selector": ["start", "bool_true"], "value": "true"}, + # Test boolean "is not" operator + {"comparison_operator": "is not", "variable_selector": ["start", "bool_false"], "value": "true"}, + # Test boolean "=" operator + {"comparison_operator": "=", "variable_selector": ["start", "bool_true"], "value": "1"}, + # Test boolean "≠" operator + {"comparison_operator": "≠", "variable_selector": ["start", "bool_false"], "value": "1"}, + # Test boolean "not null" operator + {"comparison_operator": "not null", "variable_selector": ["start", "bool_true"]}, + # Test boolean array "contains" operator + {"comparison_operator": "contains", "variable_selector": ["start", "bool_array"], "value": "true"}, + # Test boolean "in" operator + { + "comparison_operator": "in", + "variable_selector": ["start", "bool_true"], + "value": ["true", "false"], + }, + ] + return [Condition.model_validate(i) for i in conditions] + + +def _get_condition_test_id(c: Condition): + return c.comparison_operator + + +@pytest.mark.parametrize("condition", _get_test_conditions(), ids=_get_condition_test_id) +def test_execute_if_else_boolean_conditions(condition: Condition): + """Test IfElseNode with boolean conditions using various operators""" + graph_config = {"edges": [], "nodes": [{"data": {"type": "start"}, "id": "start"}]} + + graph = Graph.init(graph_config=graph_config) + + init_params = GraphInitParams( + tenant_id="1", + app_id="1", + workflow_type=WorkflowType.WORKFLOW, + workflow_id="1", + graph_config=graph_config, + user_id="1", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ) + + # construct variable pool with boolean values + pool = VariablePool( + system_variables=SystemVariable(files=[], user_id="aaa"), + ) + pool.add(["start", "bool_true"], True) + pool.add(["start", "bool_false"], False) + pool.add(["start", "bool_array"], [True, False, True]) + pool.add(["start", "mixed_array"], [True, "false", 1, 0]) + + node_data = { + "title": "Boolean Test", + "type": "if-else", + "logical_operator": "and", + "conditions": [condition.model_dump()], + } + node = IfElseNode( + id=str(uuid.uuid4()), + graph_init_params=init_params, + graph=graph, + graph_runtime_state=GraphRuntimeState(variable_pool=pool, start_at=time.perf_counter()), + config={"id": "if-else", "data": node_data}, + ) + node.init_node_data(node_data) + + # Mock db.session.close() + db.session.close = MagicMock() + + # execute node + result = node._run() + + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert result.outputs is not None + assert result.outputs["result"] is True + + +def test_execute_if_else_boolean_false_conditions(): + """Test IfElseNode with boolean conditions that should evaluate to false""" + graph_config = {"edges": [], "nodes": [{"data": {"type": "start"}, "id": "start"}]} + + graph = Graph.init(graph_config=graph_config) + + init_params = GraphInitParams( + tenant_id="1", + app_id="1", + workflow_type=WorkflowType.WORKFLOW, + workflow_id="1", + graph_config=graph_config, + user_id="1", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ) + + # construct variable pool with boolean values + pool = VariablePool( + system_variables=SystemVariable(files=[], user_id="aaa"), + ) + pool.add(["start", "bool_true"], True) + pool.add(["start", "bool_false"], False) + pool.add(["start", "bool_array"], [True, False, True]) + + node_data = { + "title": "Boolean False Test", + "type": "if-else", + "logical_operator": "or", + "conditions": [ + # Test boolean "is" operator (should be false) + {"comparison_operator": "is", "variable_selector": ["start", "bool_true"], "value": "false"}, + # Test boolean "=" operator (should be false) + {"comparison_operator": "=", "variable_selector": ["start", "bool_false"], "value": "1"}, + # Test boolean "not contains" operator (should be false) + { + "comparison_operator": "not contains", + "variable_selector": ["start", "bool_array"], + "value": "true", + }, + ], + } + + node = IfElseNode( + id=str(uuid.uuid4()), + graph_init_params=init_params, + graph=graph, + graph_runtime_state=GraphRuntimeState(variable_pool=pool, start_at=time.perf_counter()), + config={ + "id": "if-else", + "data": node_data, + }, + ) + node.init_node_data(node_data) + + # Mock db.session.close() + db.session.close = MagicMock() + + # execute node + result = node._run() + + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert result.outputs is not None + assert result.outputs["result"] is False + + +def test_execute_if_else_boolean_cases_structure(): + """Test IfElseNode with boolean conditions using the new cases structure""" + graph_config = {"edges": [], "nodes": [{"data": {"type": "start"}, "id": "start"}]} + + graph = Graph.init(graph_config=graph_config) + + init_params = GraphInitParams( + tenant_id="1", + app_id="1", + workflow_type=WorkflowType.WORKFLOW, + workflow_id="1", + graph_config=graph_config, + user_id="1", + user_from=UserFrom.ACCOUNT, + invoke_from=InvokeFrom.DEBUGGER, + call_depth=0, + ) + + # construct variable pool with boolean values + pool = VariablePool( + system_variables=SystemVariable(files=[], user_id="aaa"), + ) + pool.add(["start", "bool_true"], True) + pool.add(["start", "bool_false"], False) + + node_data = { + "title": "Boolean Cases Test", + "type": "if-else", + "cases": [ + { + "case_id": "true", + "logical_operator": "and", + "conditions": [ + { + "comparison_operator": "is", + "variable_selector": ["start", "bool_true"], + "value": "true", + }, + { + "comparison_operator": "is not", + "variable_selector": ["start", "bool_false"], + "value": "true", + }, + ], + } + ], + } + node = IfElseNode( + id=str(uuid.uuid4()), + graph_init_params=init_params, + graph=graph, + graph_runtime_state=GraphRuntimeState(variable_pool=pool, start_at=time.perf_counter()), + config={"id": "if-else", "data": node_data}, + ) + node.init_node_data(node_data) + + # Mock db.session.close() + db.session.close = MagicMock() + + # execute node + result = node._run() + + assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert result.outputs is not None + assert result.outputs["result"] is True + assert result.outputs["selected_case_id"] == "true" diff --git a/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py b/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py index 5fc9eab2df..d4d6aa0387 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py @@ -11,7 +11,8 @@ from core.workflow.nodes.list_operator.entities import ( FilterCondition, Limit, ListOperatorNodeData, - OrderBy, + Order, + OrderByConfig, ) from core.workflow.nodes.list_operator.exc import InvalidKeyError from core.workflow.nodes.list_operator.node import ListOperatorNode, _get_file_extract_string_func @@ -27,7 +28,7 @@ def list_operator_node(): FilterCondition(key="type", comparison_operator="in", value=[FileType.IMAGE, FileType.DOCUMENT]) ], ), - "order_by": OrderBy(enabled=False, value="asc"), + "order_by": OrderByConfig(enabled=False, value=Order.ASC), "limit": Limit(enabled=False, size=0), "extract_by": ExtractConfig(enabled=False, serial="1"), "title": "Test Title", diff --git a/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py b/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py index 4866db1fdb..1d2eba1e71 100644 --- a/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py +++ b/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py @@ -1,5 +1,4 @@ import json -from datetime import UTC, datetime from unittest.mock import MagicMock import pytest @@ -23,6 +22,7 @@ from core.workflow.repositories.workflow_execution_repository import WorkflowExe from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository from core.workflow.system_variable import SystemVariable from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager +from libs.datetime_utils import naive_utc_now from models.enums import CreatorUserRole from models.model import AppMode from models.workflow import Workflow, WorkflowRun @@ -145,8 +145,8 @@ def real_workflow(): workflow.graph = json.dumps(graph_data) workflow.features = json.dumps({"file_upload": {"enabled": False}}) workflow.created_by = "test-user-id" - workflow.created_at = datetime.now(UTC).replace(tzinfo=None) - workflow.updated_at = datetime.now(UTC).replace(tzinfo=None) + workflow.created_at = naive_utc_now() + workflow.updated_at = naive_utc_now() workflow._environment_variables = "{}" workflow._conversation_variables = "{}" @@ -169,7 +169,7 @@ def real_workflow_run(): workflow_run.outputs = json.dumps({"answer": "test answer"}) workflow_run.created_by_role = CreatorUserRole.ACCOUNT workflow_run.created_by = "test-user-id" - workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None) + workflow_run.created_at = naive_utc_now() return workflow_run @@ -211,7 +211,7 @@ def test_handle_workflow_run_success(workflow_cycle_manager, mock_workflow_execu workflow_type=WorkflowType.CHAT, graph={"nodes": [], "edges": []}, inputs={"query": "test query"}, - started_at=datetime.now(UTC).replace(tzinfo=None), + started_at=naive_utc_now(), ) # Pre-populate the cache with the workflow execution @@ -245,7 +245,7 @@ def test_handle_workflow_run_failed(workflow_cycle_manager, mock_workflow_execut workflow_type=WorkflowType.CHAT, graph={"nodes": [], "edges": []}, inputs={"query": "test query"}, - started_at=datetime.now(UTC).replace(tzinfo=None), + started_at=naive_utc_now(), ) # Pre-populate the cache with the workflow execution @@ -282,7 +282,7 @@ def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_execu workflow_type=WorkflowType.CHAT, graph={"nodes": [], "edges": []}, inputs={"query": "test query"}, - started_at=datetime.now(UTC).replace(tzinfo=None), + started_at=naive_utc_now(), ) # Pre-populate the cache with the workflow execution @@ -335,7 +335,7 @@ def test_get_workflow_execution_or_raise_error(workflow_cycle_manager, mock_work workflow_type=WorkflowType.CHAT, graph={"nodes": [], "edges": []}, inputs={"query": "test query"}, - started_at=datetime.now(UTC).replace(tzinfo=None), + started_at=naive_utc_now(), ) # Pre-populate the cache with the workflow execution @@ -366,7 +366,7 @@ def test_handle_workflow_node_execution_success(workflow_cycle_manager): event.process_data = {"process": "test process"} event.outputs = {"output": "test output"} event.execution_metadata = {WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: 100} - event.start_at = datetime.now(UTC).replace(tzinfo=None) + event.start_at = naive_utc_now() # Create a real node execution @@ -379,7 +379,7 @@ def test_handle_workflow_node_execution_success(workflow_cycle_manager): node_id="test-node-id", node_type=NodeType.LLM, title="Test Node", - created_at=datetime.now(UTC).replace(tzinfo=None), + created_at=naive_utc_now(), ) # Pre-populate the cache with the node execution @@ -409,7 +409,7 @@ def test_handle_workflow_run_partial_success(workflow_cycle_manager, mock_workfl workflow_type=WorkflowType.CHAT, graph={"nodes": [], "edges": []}, inputs={"query": "test query"}, - started_at=datetime.now(UTC).replace(tzinfo=None), + started_at=naive_utc_now(), ) # Pre-populate the cache with the workflow execution @@ -443,7 +443,7 @@ def test_handle_workflow_node_execution_failed(workflow_cycle_manager): event.process_data = {"process": "test process"} event.outputs = {"output": "test output"} event.execution_metadata = {WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: 100} - event.start_at = datetime.now(UTC).replace(tzinfo=None) + event.start_at = naive_utc_now() event.error = "Test error message" # Create a real node execution @@ -457,7 +457,7 @@ def test_handle_workflow_node_execution_failed(workflow_cycle_manager): node_id="test-node-id", node_type=NodeType.LLM, title="Test Node", - created_at=datetime.now(UTC).replace(tzinfo=None), + created_at=naive_utc_now(), ) # Pre-populate the cache with the node execution diff --git a/api/tests/unit_tests/extensions/storage/__init__.py b/api/tests/unit_tests/extensions/storage/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/extensions/storage/test_supabase_storage.py b/api/tests/unit_tests/extensions/storage/test_supabase_storage.py new file mode 100644 index 0000000000..7d295cecf2 --- /dev/null +++ b/api/tests/unit_tests/extensions/storage/test_supabase_storage.py @@ -0,0 +1,313 @@ +from collections.abc import Generator +from unittest.mock import Mock, patch + +import pytest + +from extensions.storage.supabase_storage import SupabaseStorage + + +class TestSupabaseStorage: + """Test suite for SupabaseStorage class.""" + + def test_init_success_with_all_config(self): + """Test successful initialization when all required config is provided.""" + with patch("extensions.storage.supabase_storage.dify_config") as mock_config: + mock_config.SUPABASE_URL = "https://test.supabase.co" + mock_config.SUPABASE_API_KEY = "test-api-key" + mock_config.SUPABASE_BUCKET_NAME = "test-bucket" + + with patch("extensions.storage.supabase_storage.Client") as mock_client_class: + mock_client = Mock() + mock_client_class.return_value = mock_client + + # Mock bucket_exists to return True so create_bucket is not called + with patch.object(SupabaseStorage, "bucket_exists", return_value=True): + storage = SupabaseStorage() + + assert storage.bucket_name == "test-bucket" + mock_client_class.assert_called_once_with( + supabase_url="https://test.supabase.co", supabase_key="test-api-key" + ) + + def test_init_raises_error_when_url_missing(self): + """Test initialization raises ValueError when SUPABASE_URL is None.""" + with patch("extensions.storage.supabase_storage.dify_config") as mock_config: + mock_config.SUPABASE_URL = None + mock_config.SUPABASE_API_KEY = "test-api-key" + mock_config.SUPABASE_BUCKET_NAME = "test-bucket" + + with pytest.raises(ValueError, match="SUPABASE_URL is not set"): + SupabaseStorage() + + def test_init_raises_error_when_api_key_missing(self): + """Test initialization raises ValueError when SUPABASE_API_KEY is None.""" + with patch("extensions.storage.supabase_storage.dify_config") as mock_config: + mock_config.SUPABASE_URL = "https://test.supabase.co" + mock_config.SUPABASE_API_KEY = None + mock_config.SUPABASE_BUCKET_NAME = "test-bucket" + + with pytest.raises(ValueError, match="SUPABASE_API_KEY is not set"): + SupabaseStorage() + + def test_init_raises_error_when_bucket_name_missing(self): + """Test initialization raises ValueError when SUPABASE_BUCKET_NAME is None.""" + with patch("extensions.storage.supabase_storage.dify_config") as mock_config: + mock_config.SUPABASE_URL = "https://test.supabase.co" + mock_config.SUPABASE_API_KEY = "test-api-key" + mock_config.SUPABASE_BUCKET_NAME = None + + with pytest.raises(ValueError, match="SUPABASE_BUCKET_NAME is not set"): + SupabaseStorage() + + def test_create_bucket_when_not_exists(self): + """Test create_bucket creates bucket when it doesn't exist.""" + with patch("extensions.storage.supabase_storage.dify_config") as mock_config: + mock_config.SUPABASE_URL = "https://test.supabase.co" + mock_config.SUPABASE_API_KEY = "test-api-key" + mock_config.SUPABASE_BUCKET_NAME = "test-bucket" + + with patch("extensions.storage.supabase_storage.Client") as mock_client_class: + mock_client = Mock() + mock_client_class.return_value = mock_client + + with patch.object(SupabaseStorage, "bucket_exists", return_value=False): + storage = SupabaseStorage() + + mock_client.storage.create_bucket.assert_called_once_with(id="test-bucket", name="test-bucket") + + def test_create_bucket_when_exists(self): + """Test create_bucket does not create bucket when it already exists.""" + with patch("extensions.storage.supabase_storage.dify_config") as mock_config: + mock_config.SUPABASE_URL = "https://test.supabase.co" + mock_config.SUPABASE_API_KEY = "test-api-key" + mock_config.SUPABASE_BUCKET_NAME = "test-bucket" + + with patch("extensions.storage.supabase_storage.Client") as mock_client_class: + mock_client = Mock() + mock_client_class.return_value = mock_client + + with patch.object(SupabaseStorage, "bucket_exists", return_value=True): + storage = SupabaseStorage() + + mock_client.storage.create_bucket.assert_not_called() + + @pytest.fixture + def storage_with_mock_client(self): + """Fixture providing SupabaseStorage with mocked client.""" + with patch("extensions.storage.supabase_storage.dify_config") as mock_config: + mock_config.SUPABASE_URL = "https://test.supabase.co" + mock_config.SUPABASE_API_KEY = "test-api-key" + mock_config.SUPABASE_BUCKET_NAME = "test-bucket" + + with patch("extensions.storage.supabase_storage.Client") as mock_client_class: + mock_client = Mock() + mock_client_class.return_value = mock_client + + with patch.object(SupabaseStorage, "bucket_exists", return_value=True): + storage = SupabaseStorage() + # Create fresh mock for each test + mock_client.reset_mock() + yield storage, mock_client + + def test_save(self, storage_with_mock_client): + """Test save calls client.storage.from_(bucket).upload(path, data).""" + storage, mock_client = storage_with_mock_client + + filename = "test.txt" + data = b"test data" + + storage.save(filename, data) + + mock_client.storage.from_.assert_called_once_with("test-bucket") + mock_client.storage.from_().upload.assert_called_once_with(filename, data) + + def test_load_once_returns_bytes(self, storage_with_mock_client): + """Test load_once returns bytes.""" + storage, mock_client = storage_with_mock_client + + expected_data = b"test content" + mock_client.storage.from_().download.return_value = expected_data + + result = storage.load_once("test.txt") + + assert result == expected_data + # Verify the correct calls were made + assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] + mock_client.storage.from_().download.assert_called_with("test.txt") + + def test_load_stream_yields_chunks(self, storage_with_mock_client): + """Test load_stream yields chunks.""" + storage, mock_client = storage_with_mock_client + + test_data = b"test content for streaming" + mock_client.storage.from_().download.return_value = test_data + + result = storage.load_stream("test.txt") + + assert isinstance(result, Generator) + + # Collect all chunks + chunks = list(result) + + # Verify chunks contain the expected data + assert b"".join(chunks) == test_data + # Verify the correct calls were made + assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] + mock_client.storage.from_().download.assert_called_with("test.txt") + + def test_download_writes_bytes_to_disk(self, storage_with_mock_client, tmp_path): + """Test download writes expected bytes to disk.""" + storage, mock_client = storage_with_mock_client + + test_data = b"test file content" + mock_client.storage.from_().download.return_value = test_data + + target_file = tmp_path / "downloaded_file.txt" + + storage.download("test.txt", str(target_file)) + + # Verify file was written with correct content + assert target_file.read_bytes() == test_data + # Verify the correct calls were made + assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] + mock_client.storage.from_().download.assert_called_with("test.txt") + + def test_exists_with_list_containing_items(self, storage_with_mock_client): + """Test exists returns True when list() returns items (using len() > 0).""" + storage, mock_client = storage_with_mock_client + + # Mock list return with special object that has count() method + mock_list_result = Mock() + mock_list_result.count.return_value = 1 + mock_client.storage.from_().list.return_value = mock_list_result + + result = storage.exists("test.txt") + + assert result is True + # from_ gets called during init too, so just check it was called with the right bucket + assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] + mock_client.storage.from_().list.assert_called_with("test.txt") + + def test_exists_with_count_method_greater_than_zero(self, storage_with_mock_client): + """Test exists returns True when list result has count() > 0.""" + storage, mock_client = storage_with_mock_client + + # Mock list return with count() method + mock_list_result = Mock() + mock_list_result.count.return_value = 1 + mock_client.storage.from_().list.return_value = mock_list_result + + result = storage.exists("test.txt") + + assert result is True + # Verify the correct calls were made + assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] + mock_client.storage.from_().list.assert_called_with("test.txt") + mock_list_result.count.assert_called() + + def test_exists_with_count_method_zero(self, storage_with_mock_client): + """Test exists returns False when list result has count() == 0.""" + storage, mock_client = storage_with_mock_client + + # Mock list return with count() method returning 0 + mock_list_result = Mock() + mock_list_result.count.return_value = 0 + mock_client.storage.from_().list.return_value = mock_list_result + + result = storage.exists("test.txt") + + assert result is False + # Verify the correct calls were made + assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] + mock_client.storage.from_().list.assert_called_with("test.txt") + mock_list_result.count.assert_called() + + def test_exists_with_empty_list(self, storage_with_mock_client): + """Test exists returns False when list() returns empty list.""" + storage, mock_client = storage_with_mock_client + + # Mock list return with special object that has count() method returning 0 + mock_list_result = Mock() + mock_list_result.count.return_value = 0 + mock_client.storage.from_().list.return_value = mock_list_result + + result = storage.exists("test.txt") + + assert result is False + # Verify the correct calls were made + assert "test-bucket" in [call[0][0] for call in mock_client.storage.from_.call_args_list if call[0]] + mock_client.storage.from_().list.assert_called_with("test.txt") + + def test_delete_calls_remove_with_filename(self, storage_with_mock_client): + """Test delete calls remove([...]) (some client versions require a list).""" + storage, mock_client = storage_with_mock_client + + filename = "test.txt" + + storage.delete(filename) + + mock_client.storage.from_.assert_called_once_with("test-bucket") + mock_client.storage.from_().remove.assert_called_once_with(filename) + + def test_bucket_exists_returns_true_when_bucket_found(self): + """Test bucket_exists returns True when bucket is found in list.""" + with patch("extensions.storage.supabase_storage.dify_config") as mock_config: + mock_config.SUPABASE_URL = "https://test.supabase.co" + mock_config.SUPABASE_API_KEY = "test-api-key" + mock_config.SUPABASE_BUCKET_NAME = "test-bucket" + + with patch("extensions.storage.supabase_storage.Client") as mock_client_class: + mock_client = Mock() + mock_client_class.return_value = mock_client + + mock_bucket = Mock() + mock_bucket.name = "test-bucket" + mock_client.storage.list_buckets.return_value = [mock_bucket] + storage = SupabaseStorage() + result = storage.bucket_exists() + + assert result is True + assert mock_client.storage.list_buckets.call_count >= 1 + + def test_bucket_exists_returns_false_when_bucket_not_found(self): + """Test bucket_exists returns False when bucket is not found in list.""" + with patch("extensions.storage.supabase_storage.dify_config") as mock_config: + mock_config.SUPABASE_URL = "https://test.supabase.co" + mock_config.SUPABASE_API_KEY = "test-api-key" + mock_config.SUPABASE_BUCKET_NAME = "test-bucket" + + with patch("extensions.storage.supabase_storage.Client") as mock_client_class: + mock_client = Mock() + mock_client_class.return_value = mock_client + + # Mock different bucket + mock_bucket = Mock() + mock_bucket.name = "different-bucket" + mock_client.storage.list_buckets.return_value = [mock_bucket] + mock_client.storage.create_bucket = Mock() + + storage = SupabaseStorage() + result = storage.bucket_exists() + + assert result is False + assert mock_client.storage.list_buckets.call_count >= 1 + + def test_bucket_exists_returns_false_when_no_buckets(self): + """Test bucket_exists returns False when no buckets exist.""" + with patch("extensions.storage.supabase_storage.dify_config") as mock_config: + mock_config.SUPABASE_URL = "https://test.supabase.co" + mock_config.SUPABASE_API_KEY = "test-api-key" + mock_config.SUPABASE_BUCKET_NAME = "test-bucket" + + with patch("extensions.storage.supabase_storage.Client") as mock_client_class: + mock_client = Mock() + mock_client_class.return_value = mock_client + + mock_client.storage.list_buckets.return_value = [] + mock_client.storage.create_bucket = Mock() + + storage = SupabaseStorage() + result = storage.bucket_exists() + + assert result is False + assert mock_client.storage.list_buckets.call_count >= 1 diff --git a/api/tests/unit_tests/extensions/test_ext_request_logging.py b/api/tests/unit_tests/extensions/test_ext_request_logging.py index 4e71469bcc..cf6e172e4d 100644 --- a/api/tests/unit_tests/extensions/test_ext_request_logging.py +++ b/api/tests/unit_tests/extensions/test_ext_request_logging.py @@ -43,28 +43,28 @@ def _get_test_app(): @pytest.fixture -def mock_request_receiver(monkeypatch) -> mock.Mock: +def mock_request_receiver(monkeypatch: pytest.MonkeyPatch) -> mock.Mock: mock_log_request_started = mock.Mock() monkeypatch.setattr(ext_request_logging, "_log_request_started", mock_log_request_started) return mock_log_request_started @pytest.fixture -def mock_response_receiver(monkeypatch) -> mock.Mock: +def mock_response_receiver(monkeypatch: pytest.MonkeyPatch) -> mock.Mock: mock_log_request_finished = mock.Mock() monkeypatch.setattr(ext_request_logging, "_log_request_finished", mock_log_request_finished) return mock_log_request_finished @pytest.fixture -def mock_logger(monkeypatch) -> logging.Logger: +def mock_logger(monkeypatch: pytest.MonkeyPatch) -> logging.Logger: _logger = mock.MagicMock(spec=logging.Logger) - monkeypatch.setattr(ext_request_logging, "_logger", _logger) + monkeypatch.setattr(ext_request_logging, "logger", _logger) return _logger @pytest.fixture -def enable_request_logging(monkeypatch): +def enable_request_logging(monkeypatch: pytest.MonkeyPatch): monkeypatch.setattr(dify_config, "ENABLE_REQUEST_LOGGING", True) diff --git a/api/tests/unit_tests/factories/test_variable_factory.py b/api/tests/unit_tests/factories/test_variable_factory.py index 4f2542a323..2a193ef2d7 100644 --- a/api/tests/unit_tests/factories/test_variable_factory.py +++ b/api/tests/unit_tests/factories/test_variable_factory.py @@ -24,16 +24,18 @@ from core.variables.segments import ( ArrayNumberSegment, ArrayObjectSegment, ArrayStringSegment, + BooleanSegment, FileSegment, FloatSegment, IntegerSegment, NoneSegment, ObjectSegment, + Segment, StringSegment, ) from core.variables.types import SegmentType from factories import variable_factory -from factories.variable_factory import TypeMismatchError, build_segment_with_type +from factories.variable_factory import TypeMismatchError, build_segment, build_segment_with_type def test_string_variable(): @@ -139,6 +141,26 @@ def test_array_number_variable(): assert isinstance(variable.value[1], float) +def test_build_segment_scalar_values(): + @dataclass + class TestCase: + value: Any + expected: Segment + description: str + + cases = [ + TestCase( + value=True, + expected=BooleanSegment(value=True), + description="build_segment with boolean should yield BooleanSegment", + ) + ] + + for idx, c in enumerate(cases, 1): + seg = build_segment(c.value) + assert seg == c.expected, f"Test case {idx} failed: {c.description}" + + def test_array_object_variable(): mapping = { "id": str(uuid4()), @@ -847,15 +869,22 @@ class TestBuildSegmentValueErrors: f"but got: {error_message}" ) - def test_build_segment_boolean_type_note(self): - """Note: Boolean values are actually handled as integers in Python, so they don't raise ValueError.""" - # Boolean values in Python are subclasses of int, so they get processed as integers - # True becomes IntegerSegment(value=1) and False becomes IntegerSegment(value=0) + def test_build_segment_boolean_type(self): + """Test that Boolean values are correctly handled as boolean type, not integers.""" + # Boolean values should now be processed as BooleanSegment, not IntegerSegment + # This is because the bool check now comes before the int check in build_segment true_segment = variable_factory.build_segment(True) false_segment = variable_factory.build_segment(False) - # Verify they are processed as integers, not as errors - assert true_segment.value == 1, "Test case 1 (boolean_true): Expected True to be processed as integer 1" - assert false_segment.value == 0, "Test case 2 (boolean_false): Expected False to be processed as integer 0" - assert true_segment.value_type == SegmentType.INTEGER - assert false_segment.value_type == SegmentType.INTEGER + # Verify they are processed as booleans, not integers + assert true_segment.value is True, "Test case 1 (boolean_true): Expected True to be processed as boolean True" + assert false_segment.value is False, ( + "Test case 2 (boolean_false): Expected False to be processed as boolean False" + ) + assert true_segment.value_type == SegmentType.BOOLEAN + assert false_segment.value_type == SegmentType.BOOLEAN + + # Test array of booleans + bool_array_segment = variable_factory.build_segment([True, False, True]) + assert bool_array_segment.value_type == SegmentType.ARRAY_BOOLEAN + assert bool_array_segment.value == [True, False, True] diff --git a/api/tests/unit_tests/libs/test_datetime_utils.py b/api/tests/unit_tests/libs/test_datetime_utils.py index e7781a5821..e914ca4816 100644 --- a/api/tests/unit_tests/libs/test_datetime_utils.py +++ b/api/tests/unit_tests/libs/test_datetime_utils.py @@ -1,9 +1,11 @@ import datetime +import pytest + from libs.datetime_utils import naive_utc_now -def test_naive_utc_now(monkeypatch): +def test_naive_utc_now(monkeypatch: pytest.MonkeyPatch): tz_aware_utc_now = datetime.datetime.now(tz=datetime.UTC) def _now_func(tz: datetime.timezone | None) -> datetime.datetime: diff --git a/api/tests/unit_tests/libs/test_jwt_imports.py b/api/tests/unit_tests/libs/test_jwt_imports.py new file mode 100644 index 0000000000..4acd901b1b --- /dev/null +++ b/api/tests/unit_tests/libs/test_jwt_imports.py @@ -0,0 +1,63 @@ +"""Test PyJWT import paths to catch changes in library structure.""" + +import pytest + + +class TestPyJWTImports: + """Test PyJWT import paths used throughout the codebase.""" + + def test_invalid_token_error_import(self): + """Test that InvalidTokenError can be imported as used in login controller.""" + # This test verifies the import path used in controllers/web/login.py:2 + # If PyJWT changes this import path, this test will fail early + try: + from jwt import InvalidTokenError + + # Verify it's the correct exception class + assert issubclass(InvalidTokenError, Exception) + + # Test that it can be instantiated + error = InvalidTokenError("test error") + assert str(error) == "test error" + + except ImportError as e: + pytest.fail(f"Failed to import InvalidTokenError from jwt: {e}") + + def test_jwt_exceptions_import(self): + """Test that jwt.exceptions imports work as expected.""" + # Alternative import path that might be used + try: + # Verify it's the same class as the direct import + from jwt import InvalidTokenError + from jwt.exceptions import InvalidTokenError as InvalidTokenErrorAlt + + assert InvalidTokenError is InvalidTokenErrorAlt + + except ImportError as e: + pytest.fail(f"Failed to import InvalidTokenError from jwt.exceptions: {e}") + + def test_other_jwt_exceptions_available(self): + """Test that other common JWT exceptions are available.""" + # Test other exceptions that might be used in the codebase + try: + from jwt import DecodeError, ExpiredSignatureError, InvalidSignatureError + + # Verify they are exception classes + assert issubclass(DecodeError, Exception) + assert issubclass(ExpiredSignatureError, Exception) + assert issubclass(InvalidSignatureError, Exception) + + except ImportError as e: + pytest.fail(f"Failed to import JWT exceptions: {e}") + + def test_jwt_main_functions_available(self): + """Test that main JWT functions are available.""" + try: + from jwt import decode, encode + + # Verify they are callable + assert callable(decode) + assert callable(encode) + + except ImportError as e: + pytest.fail(f"Failed to import JWT main functions: {e}") diff --git a/api/tests/unit_tests/libs/test_uuid_utils.py b/api/tests/unit_tests/libs/test_uuid_utils.py index 7dbda95f45..9e040efb62 100644 --- a/api/tests/unit_tests/libs/test_uuid_utils.py +++ b/api/tests/unit_tests/libs/test_uuid_utils.py @@ -143,7 +143,7 @@ def test_uuidv7_with_custom_timestamp(): assert extracted_timestamp == custom_timestamp # Exact match for integer milliseconds -def test_uuidv7_with_none_timestamp(monkeypatch): +def test_uuidv7_with_none_timestamp(monkeypatch: pytest.MonkeyPatch): """Test UUID generation with None timestamp uses current time.""" mock_time = 1609459200 mock_time_func = mock.Mock(return_value=mock_time) diff --git a/api/tests/unit_tests/models/test_workflow.py b/api/tests/unit_tests/models/test_workflow.py index 5bc77ad0ef..4c61320c29 100644 --- a/api/tests/unit_tests/models/test_workflow.py +++ b/api/tests/unit_tests/models/test_workflow.py @@ -9,7 +9,6 @@ from core.file.models import File from core.variables import FloatVariable, IntegerVariable, SecretVariable, StringVariable from core.variables.segments import IntegerSegment, Segment from factories.variable_factory import build_segment -from models.model import EndUser from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel, is_system_variable_editable @@ -43,14 +42,9 @@ def test_environment_variables(): {"name": "var4", "value": 3.14, "id": str(uuid4()), "selector": ["env", "var4"]} ) - # Mock current_user as an EndUser - mock_user = mock.Mock(spec=EndUser) - mock_user.tenant_id = "tenant_id" - with ( mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"), mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"), - mock.patch("models.workflow.current_user", mock_user), ): # Set the environment_variables property of the Workflow instance variables = [variable1, variable2, variable3, variable4] @@ -90,14 +84,9 @@ def test_update_environment_variables(): {"name": "var4", "value": 3.14, "id": str(uuid4()), "selector": ["env", "var4"]} ) - # Mock current_user as an EndUser - mock_user = mock.Mock(spec=EndUser) - mock_user.tenant_id = "tenant_id" - with ( mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"), mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"), - mock.patch("models.workflow.current_user", mock_user), ): variables = [variable1, variable2, variable3, variable4] @@ -136,14 +125,9 @@ def test_to_dict(): # Create some EnvironmentVariable instances - # Mock current_user as an EndUser - mock_user = mock.Mock(spec=EndUser) - mock_user.tenant_id = "tenant_id" - with ( mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"), mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"), - mock.patch("models.workflow.current_user", mock_user), ): # Set the environment_variables property of the Workflow instance workflow.environment_variables = [ diff --git a/api/tests/unit_tests/services/test_dataset_permission.py b/api/tests/unit_tests/services/test_dataset_permission.py index c1e4981325..4974d6c1ef 100644 --- a/api/tests/unit_tests/services/test_dataset_permission.py +++ b/api/tests/unit_tests/services/test_dataset_permission.py @@ -83,7 +83,7 @@ class TestDatasetPermissionService: @pytest.fixture def mock_logging_dependencies(self): """Mock setup for logging tests.""" - with patch("services.dataset_service.logging") as mock_logging: + with patch("services.dataset_service.logger") as mock_logging: yield { "logging": mock_logging, } diff --git a/api/tests/unit_tests/services/test_dataset_service_batch_update_document_status.py b/api/tests/unit_tests/services/test_dataset_service_batch_update_document_status.py index dc09aca5b2..1881ceac26 100644 --- a/api/tests/unit_tests/services/test_dataset_service_batch_update_document_status.py +++ b/api/tests/unit_tests/services/test_dataset_service_batch_update_document_status.py @@ -93,16 +93,15 @@ class TestDatasetServiceBatchUpdateDocumentStatus: with ( patch("services.dataset_service.DocumentService.get_document") as mock_get_doc, patch("extensions.ext_database.db.session") as mock_db, - patch("services.dataset_service.datetime") as mock_datetime, + patch("services.dataset_service.naive_utc_now") as mock_naive_utc_now, ): current_time = datetime.datetime(2023, 1, 1, 12, 0, 0) - mock_datetime.datetime.now.return_value = current_time - mock_datetime.UTC = datetime.UTC + mock_naive_utc_now.return_value = current_time yield { "get_document": mock_get_doc, "db_session": mock_db, - "datetime": mock_datetime, + "naive_utc_now": mock_naive_utc_now, "current_time": current_time, } @@ -120,21 +119,21 @@ class TestDatasetServiceBatchUpdateDocumentStatus: assert document.enabled == True assert document.disabled_at is None assert document.disabled_by is None - assert document.updated_at == current_time.replace(tzinfo=None) + assert document.updated_at == current_time def _assert_document_disabled(self, document: Mock, user_id: str, current_time: datetime.datetime): """Helper method to verify document was disabled correctly.""" assert document.enabled == False - assert document.disabled_at == current_time.replace(tzinfo=None) + assert document.disabled_at == current_time assert document.disabled_by == user_id - assert document.updated_at == current_time.replace(tzinfo=None) + assert document.updated_at == current_time def _assert_document_archived(self, document: Mock, user_id: str, current_time: datetime.datetime): """Helper method to verify document was archived correctly.""" assert document.archived == True - assert document.archived_at == current_time.replace(tzinfo=None) + assert document.archived_at == current_time assert document.archived_by == user_id - assert document.updated_at == current_time.replace(tzinfo=None) + assert document.updated_at == current_time def _assert_document_unarchived(self, document: Mock): """Helper method to verify document was unarchived correctly.""" @@ -430,7 +429,7 @@ class TestDatasetServiceBatchUpdateDocumentStatus: # Verify document attributes were updated correctly self._assert_document_unarchived(archived_doc) - assert archived_doc.updated_at == mock_document_service_dependencies["current_time"].replace(tzinfo=None) + assert archived_doc.updated_at == mock_document_service_dependencies["current_time"] # Verify Redis cache was set (because document is enabled) redis_mock.setex.assert_called_once_with("document_doc-1_indexing", 600, 1) @@ -495,9 +494,7 @@ class TestDatasetServiceBatchUpdateDocumentStatus: # Verify document was unarchived self._assert_document_unarchived(archived_disabled_doc) - assert archived_disabled_doc.updated_at == mock_document_service_dependencies["current_time"].replace( - tzinfo=None - ) + assert archived_disabled_doc.updated_at == mock_document_service_dependencies["current_time"] # Verify no Redis cache was set (document is disabled) redis_mock.setex.assert_not_called() diff --git a/api/tests/unit_tests/services/test_metadata_bug_complete.py b/api/tests/unit_tests/services/test_metadata_bug_complete.py index c4c7579e83..0fc36510b9 100644 --- a/api/tests/unit_tests/services/test_metadata_bug_complete.py +++ b/api/tests/unit_tests/services/test_metadata_bug_complete.py @@ -1,7 +1,7 @@ from unittest.mock import Mock, patch import pytest -from flask_restful import reqparse +from flask_restx import reqparse from werkzeug.exceptions import BadRequest from services.entities.knowledge_entities.knowledge_entities import MetadataArgs diff --git a/api/tests/unit_tests/services/test_metadata_nullable_bug.py b/api/tests/unit_tests/services/test_metadata_nullable_bug.py index ef4d05c1d9..7f6344f942 100644 --- a/api/tests/unit_tests/services/test_metadata_nullable_bug.py +++ b/api/tests/unit_tests/services/test_metadata_nullable_bug.py @@ -1,7 +1,7 @@ from unittest.mock import Mock, patch import pytest -from flask_restful import reqparse +from flask_restx import reqparse from services.entities.knowledge_entities.knowledge_entities import MetadataArgs from services.metadata_service import MetadataService diff --git a/api/tests/unit_tests/tasks/test_remove_app_and_related_data_task.py b/api/tests/unit_tests/tasks/test_remove_app_and_related_data_task.py index d8003570b5..673282a6f4 100644 --- a/api/tests/unit_tests/tasks/test_remove_app_and_related_data_task.py +++ b/api/tests/unit_tests/tasks/test_remove_app_and_related_data_task.py @@ -179,7 +179,7 @@ class TestDeleteDraftVariablesBatch: delete_draft_variables_batch(app_id, 0) @patch("tasks.remove_app_and_related_data_task.db") - @patch("tasks.remove_app_and_related_data_task.logging") + @patch("tasks.remove_app_and_related_data_task.logger") def test_delete_draft_variables_batch_logs_progress(self, mock_logging, mock_db): """Test that batch deletion logs progress correctly.""" app_id = "test-app-id" diff --git a/api/uv.lock b/api/uv.lock index cecce2bc43..dabca0d0de 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1,5 +1,4 @@ version = 1 -revision = 2 requires-python = ">=3.11, <3.13" resolution-markers = [ "python_full_version >= '3.12.4' and platform_python_implementation != 'PyPy' and sys_platform == 'linux'", @@ -20,18 +19,18 @@ resolution-markers = [ name = "aiofiles" version = "24.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247, upload-time = "2024-06-24T11:02:03.584Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896, upload-time = "2024-06-24T11:02:01.529Z" }, + { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896 }, ] [[package]] name = "aiohappyeyeballs" version = "2.6.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265 }, ] [[package]] @@ -47,42 +46,42 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/6e/ab88e7cb2a4058bed2f7870276454f85a7c56cd6da79349eb314fc7bbcaa/aiohttp-3.12.13.tar.gz", hash = "sha256:47e2da578528264a12e4e3dd8dd72a7289e5f812758fe086473fab037a10fcce", size = 7819160, upload-time = "2025-06-14T15:15:41.354Z" } +sdist = { url = "https://files.pythonhosted.org/packages/42/6e/ab88e7cb2a4058bed2f7870276454f85a7c56cd6da79349eb314fc7bbcaa/aiohttp-3.12.13.tar.gz", hash = "sha256:47e2da578528264a12e4e3dd8dd72a7289e5f812758fe086473fab037a10fcce", size = 7819160 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/65/5566b49553bf20ffed6041c665a5504fb047cefdef1b701407b8ce1a47c4/aiohttp-3.12.13-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c229b1437aa2576b99384e4be668af1db84b31a45305d02f61f5497cfa6f60c", size = 709401, upload-time = "2025-06-14T15:13:30.774Z" }, - { url = "https://files.pythonhosted.org/packages/14/b5/48e4cc61b54850bdfafa8fe0b641ab35ad53d8e5a65ab22b310e0902fa42/aiohttp-3.12.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04076d8c63471e51e3689c93940775dc3d12d855c0c80d18ac5a1c68f0904358", size = 481669, upload-time = "2025-06-14T15:13:32.316Z" }, - { url = "https://files.pythonhosted.org/packages/04/4f/e3f95c8b2a20a0437d51d41d5ccc4a02970d8ad59352efb43ea2841bd08e/aiohttp-3.12.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:55683615813ce3601640cfaa1041174dc956d28ba0511c8cbd75273eb0587014", size = 469933, upload-time = "2025-06-14T15:13:34.104Z" }, - { url = "https://files.pythonhosted.org/packages/41/c9/c5269f3b6453b1cfbd2cfbb6a777d718c5f086a3727f576c51a468b03ae2/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:921bc91e602d7506d37643e77819cb0b840d4ebb5f8d6408423af3d3bf79a7b7", size = 1740128, upload-time = "2025-06-14T15:13:35.604Z" }, - { url = "https://files.pythonhosted.org/packages/6f/49/a3f76caa62773d33d0cfaa842bdf5789a78749dbfe697df38ab1badff369/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e72d17fe0974ddeae8ed86db297e23dba39c7ac36d84acdbb53df2e18505a013", size = 1688796, upload-time = "2025-06-14T15:13:37.125Z" }, - { url = "https://files.pythonhosted.org/packages/ad/e4/556fccc4576dc22bf18554b64cc873b1a3e5429a5bdb7bbef7f5d0bc7664/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0653d15587909a52e024a261943cf1c5bdc69acb71f411b0dd5966d065a51a47", size = 1787589, upload-time = "2025-06-14T15:13:38.745Z" }, - { url = "https://files.pythonhosted.org/packages/b9/3d/d81b13ed48e1a46734f848e26d55a7391708421a80336e341d2aef3b6db2/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a77b48997c66722c65e157c06c74332cdf9c7ad00494b85ec43f324e5c5a9b9a", size = 1826635, upload-time = "2025-06-14T15:13:40.733Z" }, - { url = "https://files.pythonhosted.org/packages/75/a5/472e25f347da88459188cdaadd1f108f6292f8a25e62d226e63f860486d1/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6946bae55fd36cfb8e4092c921075cde029c71c7cb571d72f1079d1e4e013bc", size = 1729095, upload-time = "2025-06-14T15:13:42.312Z" }, - { url = "https://files.pythonhosted.org/packages/b9/fe/322a78b9ac1725bfc59dfc301a5342e73d817592828e4445bd8f4ff83489/aiohttp-3.12.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f95db8c8b219bcf294a53742c7bda49b80ceb9d577c8e7aa075612b7f39ffb7", size = 1666170, upload-time = "2025-06-14T15:13:44.884Z" }, - { url = "https://files.pythonhosted.org/packages/7a/77/ec80912270e231d5e3839dbd6c065472b9920a159ec8a1895cf868c2708e/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:03d5eb3cfb4949ab4c74822fb3326cd9655c2b9fe22e4257e2100d44215b2e2b", size = 1714444, upload-time = "2025-06-14T15:13:46.401Z" }, - { url = "https://files.pythonhosted.org/packages/21/b2/fb5aedbcb2b58d4180e58500e7c23ff8593258c27c089abfbcc7db65bd40/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6383dd0ffa15515283c26cbf41ac8e6705aab54b4cbb77bdb8935a713a89bee9", size = 1709604, upload-time = "2025-06-14T15:13:48.377Z" }, - { url = "https://files.pythonhosted.org/packages/e3/15/a94c05f7c4dc8904f80b6001ad6e07e035c58a8ebfcc15e6b5d58500c858/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6548a411bc8219b45ba2577716493aa63b12803d1e5dc70508c539d0db8dbf5a", size = 1689786, upload-time = "2025-06-14T15:13:50.401Z" }, - { url = "https://files.pythonhosted.org/packages/1d/fd/0d2e618388f7a7a4441eed578b626bda9ec6b5361cd2954cfc5ab39aa170/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:81b0fcbfe59a4ca41dc8f635c2a4a71e63f75168cc91026c61be665945739e2d", size = 1783389, upload-time = "2025-06-14T15:13:51.945Z" }, - { url = "https://files.pythonhosted.org/packages/a6/6b/6986d0c75996ef7e64ff7619b9b7449b1d1cbbe05c6755e65d92f1784fe9/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6a83797a0174e7995e5edce9dcecc517c642eb43bc3cba296d4512edf346eee2", size = 1803853, upload-time = "2025-06-14T15:13:53.533Z" }, - { url = "https://files.pythonhosted.org/packages/21/65/cd37b38f6655d95dd07d496b6d2f3924f579c43fd64b0e32b547b9c24df5/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a5734d8469a5633a4e9ffdf9983ff7cdb512524645c7a3d4bc8a3de45b935ac3", size = 1716909, upload-time = "2025-06-14T15:13:55.148Z" }, - { url = "https://files.pythonhosted.org/packages/fd/20/2de7012427dc116714c38ca564467f6143aec3d5eca3768848d62aa43e62/aiohttp-3.12.13-cp311-cp311-win32.whl", hash = "sha256:fef8d50dfa482925bb6b4c208b40d8e9fa54cecba923dc65b825a72eed9a5dbd", size = 427036, upload-time = "2025-06-14T15:13:57.076Z" }, - { url = "https://files.pythonhosted.org/packages/f8/b6/98518bcc615ef998a64bef371178b9afc98ee25895b4f476c428fade2220/aiohttp-3.12.13-cp311-cp311-win_amd64.whl", hash = "sha256:9a27da9c3b5ed9d04c36ad2df65b38a96a37e9cfba6f1381b842d05d98e6afe9", size = 451427, upload-time = "2025-06-14T15:13:58.505Z" }, - { url = "https://files.pythonhosted.org/packages/b4/6a/ce40e329788013cd190b1d62bbabb2b6a9673ecb6d836298635b939562ef/aiohttp-3.12.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0aa580cf80558557285b49452151b9c69f2fa3ad94c5c9e76e684719a8791b73", size = 700491, upload-time = "2025-06-14T15:14:00.048Z" }, - { url = "https://files.pythonhosted.org/packages/28/d9/7150d5cf9163e05081f1c5c64a0cdf3c32d2f56e2ac95db2a28fe90eca69/aiohttp-3.12.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b103a7e414b57e6939cc4dece8e282cfb22043efd0c7298044f6594cf83ab347", size = 475104, upload-time = "2025-06-14T15:14:01.691Z" }, - { url = "https://files.pythonhosted.org/packages/f8/91/d42ba4aed039ce6e449b3e2db694328756c152a79804e64e3da5bc19dffc/aiohttp-3.12.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f64e748e9e741d2eccff9597d09fb3cd962210e5b5716047cbb646dc8fe06f", size = 467948, upload-time = "2025-06-14T15:14:03.561Z" }, - { url = "https://files.pythonhosted.org/packages/99/3b/06f0a632775946981d7c4e5a865cddb6e8dfdbaed2f56f9ade7bb4a1039b/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c955989bf4c696d2ededc6b0ccb85a73623ae6e112439398935362bacfaaf6", size = 1714742, upload-time = "2025-06-14T15:14:05.558Z" }, - { url = "https://files.pythonhosted.org/packages/92/a6/2552eebad9ec5e3581a89256276009e6a974dc0793632796af144df8b740/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d640191016763fab76072c87d8854a19e8e65d7a6fcfcbf017926bdbbb30a7e5", size = 1697393, upload-time = "2025-06-14T15:14:07.194Z" }, - { url = "https://files.pythonhosted.org/packages/d8/9f/bd08fdde114b3fec7a021381b537b21920cdd2aa29ad48c5dffd8ee314f1/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dc507481266b410dede95dd9f26c8d6f5a14315372cc48a6e43eac652237d9b", size = 1752486, upload-time = "2025-06-14T15:14:08.808Z" }, - { url = "https://files.pythonhosted.org/packages/f7/e1/affdea8723aec5bd0959171b5490dccd9a91fcc505c8c26c9f1dca73474d/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8a94daa873465d518db073bd95d75f14302e0208a08e8c942b2f3f1c07288a75", size = 1798643, upload-time = "2025-06-14T15:14:10.767Z" }, - { url = "https://files.pythonhosted.org/packages/f3/9d/666d856cc3af3a62ae86393baa3074cc1d591a47d89dc3bf16f6eb2c8d32/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f52420cde4ce0bb9425a375d95577fe082cb5721ecb61da3049b55189e4e6", size = 1718082, upload-time = "2025-06-14T15:14:12.38Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ce/3c185293843d17be063dada45efd2712bb6bf6370b37104b4eda908ffdbd/aiohttp-3.12.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f7df1f620ec40f1a7fbcb99ea17d7326ea6996715e78f71a1c9a021e31b96b8", size = 1633884, upload-time = "2025-06-14T15:14:14.415Z" }, - { url = "https://files.pythonhosted.org/packages/3a/5b/f3413f4b238113be35dfd6794e65029250d4b93caa0974ca572217745bdb/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3062d4ad53b36e17796dce1c0d6da0ad27a015c321e663657ba1cc7659cfc710", size = 1694943, upload-time = "2025-06-14T15:14:16.48Z" }, - { url = "https://files.pythonhosted.org/packages/82/c8/0e56e8bf12081faca85d14a6929ad5c1263c146149cd66caa7bc12255b6d/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:8605e22d2a86b8e51ffb5253d9045ea73683d92d47c0b1438e11a359bdb94462", size = 1716398, upload-time = "2025-06-14T15:14:18.589Z" }, - { url = "https://files.pythonhosted.org/packages/ea/f3/33192b4761f7f9b2f7f4281365d925d663629cfaea093a64b658b94fc8e1/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:54fbbe6beafc2820de71ece2198458a711e224e116efefa01b7969f3e2b3ddae", size = 1657051, upload-time = "2025-06-14T15:14:20.223Z" }, - { url = "https://files.pythonhosted.org/packages/5e/0b/26ddd91ca8f84c48452431cb4c5dd9523b13bc0c9766bda468e072ac9e29/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:050bd277dfc3768b606fd4eae79dd58ceda67d8b0b3c565656a89ae34525d15e", size = 1736611, upload-time = "2025-06-14T15:14:21.988Z" }, - { url = "https://files.pythonhosted.org/packages/c3/8d/e04569aae853302648e2c138a680a6a2f02e374c5b6711732b29f1e129cc/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2637a60910b58f50f22379b6797466c3aa6ae28a6ab6404e09175ce4955b4e6a", size = 1764586, upload-time = "2025-06-14T15:14:23.979Z" }, - { url = "https://files.pythonhosted.org/packages/ac/98/c193c1d1198571d988454e4ed75adc21c55af247a9fda08236602921c8c8/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e986067357550d1aaa21cfe9897fa19e680110551518a5a7cf44e6c5638cb8b5", size = 1724197, upload-time = "2025-06-14T15:14:25.692Z" }, - { url = "https://files.pythonhosted.org/packages/e7/9e/07bb8aa11eec762c6b1ff61575eeeb2657df11ab3d3abfa528d95f3e9337/aiohttp-3.12.13-cp312-cp312-win32.whl", hash = "sha256:ac941a80aeea2aaae2875c9500861a3ba356f9ff17b9cb2dbfb5cbf91baaf5bf", size = 421771, upload-time = "2025-06-14T15:14:27.364Z" }, - { url = "https://files.pythonhosted.org/packages/52/66/3ce877e56ec0813069cdc9607cd979575859c597b6fb9b4182c6d5f31886/aiohttp-3.12.13-cp312-cp312-win_amd64.whl", hash = "sha256:671f41e6146a749b6c81cb7fd07f5a8356d46febdaaaf07b0e774ff04830461e", size = 447869, upload-time = "2025-06-14T15:14:29.05Z" }, + { url = "https://files.pythonhosted.org/packages/6a/65/5566b49553bf20ffed6041c665a5504fb047cefdef1b701407b8ce1a47c4/aiohttp-3.12.13-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c229b1437aa2576b99384e4be668af1db84b31a45305d02f61f5497cfa6f60c", size = 709401 }, + { url = "https://files.pythonhosted.org/packages/14/b5/48e4cc61b54850bdfafa8fe0b641ab35ad53d8e5a65ab22b310e0902fa42/aiohttp-3.12.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04076d8c63471e51e3689c93940775dc3d12d855c0c80d18ac5a1c68f0904358", size = 481669 }, + { url = "https://files.pythonhosted.org/packages/04/4f/e3f95c8b2a20a0437d51d41d5ccc4a02970d8ad59352efb43ea2841bd08e/aiohttp-3.12.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:55683615813ce3601640cfaa1041174dc956d28ba0511c8cbd75273eb0587014", size = 469933 }, + { url = "https://files.pythonhosted.org/packages/41/c9/c5269f3b6453b1cfbd2cfbb6a777d718c5f086a3727f576c51a468b03ae2/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:921bc91e602d7506d37643e77819cb0b840d4ebb5f8d6408423af3d3bf79a7b7", size = 1740128 }, + { url = "https://files.pythonhosted.org/packages/6f/49/a3f76caa62773d33d0cfaa842bdf5789a78749dbfe697df38ab1badff369/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e72d17fe0974ddeae8ed86db297e23dba39c7ac36d84acdbb53df2e18505a013", size = 1688796 }, + { url = "https://files.pythonhosted.org/packages/ad/e4/556fccc4576dc22bf18554b64cc873b1a3e5429a5bdb7bbef7f5d0bc7664/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0653d15587909a52e024a261943cf1c5bdc69acb71f411b0dd5966d065a51a47", size = 1787589 }, + { url = "https://files.pythonhosted.org/packages/b9/3d/d81b13ed48e1a46734f848e26d55a7391708421a80336e341d2aef3b6db2/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a77b48997c66722c65e157c06c74332cdf9c7ad00494b85ec43f324e5c5a9b9a", size = 1826635 }, + { url = "https://files.pythonhosted.org/packages/75/a5/472e25f347da88459188cdaadd1f108f6292f8a25e62d226e63f860486d1/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6946bae55fd36cfb8e4092c921075cde029c71c7cb571d72f1079d1e4e013bc", size = 1729095 }, + { url = "https://files.pythonhosted.org/packages/b9/fe/322a78b9ac1725bfc59dfc301a5342e73d817592828e4445bd8f4ff83489/aiohttp-3.12.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f95db8c8b219bcf294a53742c7bda49b80ceb9d577c8e7aa075612b7f39ffb7", size = 1666170 }, + { url = "https://files.pythonhosted.org/packages/7a/77/ec80912270e231d5e3839dbd6c065472b9920a159ec8a1895cf868c2708e/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:03d5eb3cfb4949ab4c74822fb3326cd9655c2b9fe22e4257e2100d44215b2e2b", size = 1714444 }, + { url = "https://files.pythonhosted.org/packages/21/b2/fb5aedbcb2b58d4180e58500e7c23ff8593258c27c089abfbcc7db65bd40/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6383dd0ffa15515283c26cbf41ac8e6705aab54b4cbb77bdb8935a713a89bee9", size = 1709604 }, + { url = "https://files.pythonhosted.org/packages/e3/15/a94c05f7c4dc8904f80b6001ad6e07e035c58a8ebfcc15e6b5d58500c858/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6548a411bc8219b45ba2577716493aa63b12803d1e5dc70508c539d0db8dbf5a", size = 1689786 }, + { url = "https://files.pythonhosted.org/packages/1d/fd/0d2e618388f7a7a4441eed578b626bda9ec6b5361cd2954cfc5ab39aa170/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:81b0fcbfe59a4ca41dc8f635c2a4a71e63f75168cc91026c61be665945739e2d", size = 1783389 }, + { url = "https://files.pythonhosted.org/packages/a6/6b/6986d0c75996ef7e64ff7619b9b7449b1d1cbbe05c6755e65d92f1784fe9/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6a83797a0174e7995e5edce9dcecc517c642eb43bc3cba296d4512edf346eee2", size = 1803853 }, + { url = "https://files.pythonhosted.org/packages/21/65/cd37b38f6655d95dd07d496b6d2f3924f579c43fd64b0e32b547b9c24df5/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a5734d8469a5633a4e9ffdf9983ff7cdb512524645c7a3d4bc8a3de45b935ac3", size = 1716909 }, + { url = "https://files.pythonhosted.org/packages/fd/20/2de7012427dc116714c38ca564467f6143aec3d5eca3768848d62aa43e62/aiohttp-3.12.13-cp311-cp311-win32.whl", hash = "sha256:fef8d50dfa482925bb6b4c208b40d8e9fa54cecba923dc65b825a72eed9a5dbd", size = 427036 }, + { url = "https://files.pythonhosted.org/packages/f8/b6/98518bcc615ef998a64bef371178b9afc98ee25895b4f476c428fade2220/aiohttp-3.12.13-cp311-cp311-win_amd64.whl", hash = "sha256:9a27da9c3b5ed9d04c36ad2df65b38a96a37e9cfba6f1381b842d05d98e6afe9", size = 451427 }, + { url = "https://files.pythonhosted.org/packages/b4/6a/ce40e329788013cd190b1d62bbabb2b6a9673ecb6d836298635b939562ef/aiohttp-3.12.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0aa580cf80558557285b49452151b9c69f2fa3ad94c5c9e76e684719a8791b73", size = 700491 }, + { url = "https://files.pythonhosted.org/packages/28/d9/7150d5cf9163e05081f1c5c64a0cdf3c32d2f56e2ac95db2a28fe90eca69/aiohttp-3.12.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b103a7e414b57e6939cc4dece8e282cfb22043efd0c7298044f6594cf83ab347", size = 475104 }, + { url = "https://files.pythonhosted.org/packages/f8/91/d42ba4aed039ce6e449b3e2db694328756c152a79804e64e3da5bc19dffc/aiohttp-3.12.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f64e748e9e741d2eccff9597d09fb3cd962210e5b5716047cbb646dc8fe06f", size = 467948 }, + { url = "https://files.pythonhosted.org/packages/99/3b/06f0a632775946981d7c4e5a865cddb6e8dfdbaed2f56f9ade7bb4a1039b/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c955989bf4c696d2ededc6b0ccb85a73623ae6e112439398935362bacfaaf6", size = 1714742 }, + { url = "https://files.pythonhosted.org/packages/92/a6/2552eebad9ec5e3581a89256276009e6a974dc0793632796af144df8b740/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d640191016763fab76072c87d8854a19e8e65d7a6fcfcbf017926bdbbb30a7e5", size = 1697393 }, + { url = "https://files.pythonhosted.org/packages/d8/9f/bd08fdde114b3fec7a021381b537b21920cdd2aa29ad48c5dffd8ee314f1/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dc507481266b410dede95dd9f26c8d6f5a14315372cc48a6e43eac652237d9b", size = 1752486 }, + { url = "https://files.pythonhosted.org/packages/f7/e1/affdea8723aec5bd0959171b5490dccd9a91fcc505c8c26c9f1dca73474d/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8a94daa873465d518db073bd95d75f14302e0208a08e8c942b2f3f1c07288a75", size = 1798643 }, + { url = "https://files.pythonhosted.org/packages/f3/9d/666d856cc3af3a62ae86393baa3074cc1d591a47d89dc3bf16f6eb2c8d32/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f52420cde4ce0bb9425a375d95577fe082cb5721ecb61da3049b55189e4e6", size = 1718082 }, + { url = "https://files.pythonhosted.org/packages/f3/ce/3c185293843d17be063dada45efd2712bb6bf6370b37104b4eda908ffdbd/aiohttp-3.12.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f7df1f620ec40f1a7fbcb99ea17d7326ea6996715e78f71a1c9a021e31b96b8", size = 1633884 }, + { url = "https://files.pythonhosted.org/packages/3a/5b/f3413f4b238113be35dfd6794e65029250d4b93caa0974ca572217745bdb/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3062d4ad53b36e17796dce1c0d6da0ad27a015c321e663657ba1cc7659cfc710", size = 1694943 }, + { url = "https://files.pythonhosted.org/packages/82/c8/0e56e8bf12081faca85d14a6929ad5c1263c146149cd66caa7bc12255b6d/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:8605e22d2a86b8e51ffb5253d9045ea73683d92d47c0b1438e11a359bdb94462", size = 1716398 }, + { url = "https://files.pythonhosted.org/packages/ea/f3/33192b4761f7f9b2f7f4281365d925d663629cfaea093a64b658b94fc8e1/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:54fbbe6beafc2820de71ece2198458a711e224e116efefa01b7969f3e2b3ddae", size = 1657051 }, + { url = "https://files.pythonhosted.org/packages/5e/0b/26ddd91ca8f84c48452431cb4c5dd9523b13bc0c9766bda468e072ac9e29/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:050bd277dfc3768b606fd4eae79dd58ceda67d8b0b3c565656a89ae34525d15e", size = 1736611 }, + { url = "https://files.pythonhosted.org/packages/c3/8d/e04569aae853302648e2c138a680a6a2f02e374c5b6711732b29f1e129cc/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2637a60910b58f50f22379b6797466c3aa6ae28a6ab6404e09175ce4955b4e6a", size = 1764586 }, + { url = "https://files.pythonhosted.org/packages/ac/98/c193c1d1198571d988454e4ed75adc21c55af247a9fda08236602921c8c8/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e986067357550d1aaa21cfe9897fa19e680110551518a5a7cf44e6c5638cb8b5", size = 1724197 }, + { url = "https://files.pythonhosted.org/packages/e7/9e/07bb8aa11eec762c6b1ff61575eeeb2657df11ab3d3abfa528d95f3e9337/aiohttp-3.12.13-cp312-cp312-win32.whl", hash = "sha256:ac941a80aeea2aaae2875c9500861a3ba356f9ff17b9cb2dbfb5cbf91baaf5bf", size = 421771 }, + { url = "https://files.pythonhosted.org/packages/52/66/3ce877e56ec0813069cdc9607cd979575859c597b6fb9b4182c6d5f31886/aiohttp-3.12.13-cp312-cp312-win_amd64.whl", hash = "sha256:671f41e6146a749b6c81cb7fd07f5a8356d46febdaaaf07b0e774ff04830461e", size = 447869 }, ] [[package]] @@ -92,9 +91,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pymysql" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/67/76/2c5b55e4406a1957ffdfd933a94c2517455291c97d2b81cec6813754791a/aiomysql-0.2.0.tar.gz", hash = "sha256:558b9c26d580d08b8c5fd1be23c5231ce3aeff2dadad989540fee740253deb67", size = 114706, upload-time = "2023-06-11T19:57:53.608Z" } +sdist = { url = "https://files.pythonhosted.org/packages/67/76/2c5b55e4406a1957ffdfd933a94c2517455291c97d2b81cec6813754791a/aiomysql-0.2.0.tar.gz", hash = "sha256:558b9c26d580d08b8c5fd1be23c5231ce3aeff2dadad989540fee740253deb67", size = 114706 } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/87/c982ee8b333c85b8ae16306387d703a1fcdfc81a2f3f15a24820ab1a512d/aiomysql-0.2.0-py3-none-any.whl", hash = "sha256:b7c26da0daf23a5ec5e0b133c03d20657276e4eae9b73e040b72787f6f6ade0a", size = 44215, upload-time = "2023-06-11T19:57:51.09Z" }, + { url = "https://files.pythonhosted.org/packages/42/87/c982ee8b333c85b8ae16306387d703a1fcdfc81a2f3f15a24820ab1a512d/aiomysql-0.2.0-py3-none-any.whl", hash = "sha256:b7c26da0daf23a5ec5e0b133c03d20657276e4eae9b73e040b72787f6f6ade0a", size = 44215 }, ] [[package]] @@ -105,9 +104,9 @@ dependencies = [ { name = "frozenlist" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490 }, ] [[package]] @@ -119,9 +118,9 @@ dependencies = [ { name = "sqlalchemy" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/40/28683414cc8711035a65256ca689e159471aa9ef08e8741ad1605bc01066/alembic-1.16.3.tar.gz", hash = "sha256:18ad13c1f40a5796deee4b2346d1a9c382f44b8af98053897484fa6cf88025e4", size = 1967462, upload-time = "2025-07-08T18:57:50.991Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/40/28683414cc8711035a65256ca689e159471aa9ef08e8741ad1605bc01066/alembic-1.16.3.tar.gz", hash = "sha256:18ad13c1f40a5796deee4b2346d1a9c382f44b8af98053897484fa6cf88025e4", size = 1967462 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/68/1dea77887af7304528ea944c355d769a7ccc4599d3a23bd39182486deb42/alembic-1.16.3-py3-none-any.whl", hash = "sha256:70a7c7829b792de52d08ca0e3aefaf060687cb8ed6bebfa557e597a1a5e5a481", size = 246933, upload-time = "2025-07-08T18:57:52.793Z" }, + { url = "https://files.pythonhosted.org/packages/e6/68/1dea77887af7304528ea944c355d769a7ccc4599d3a23bd39182486deb42/alembic-1.16.3-py3-none-any.whl", hash = "sha256:70a7c7829b792de52d08ca0e3aefaf060687cb8ed6bebfa557e597a1a5e5a481", size = 246933 }, ] [[package]] @@ -134,19 +133,19 @@ dependencies = [ { name = "alibabacloud-tea" }, { name = "apscheduler" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b7/0c/1b0c5f4c2170165719b336616ac0a88f1666fd8690fda41e2e8ae3139fd9/alibabacloud-credentials-1.0.2.tar.gz", hash = "sha256:d2368eb70bd02db9143b2bf531a27a6fecd2cde9601db6e5b48cd6dbe25720ce", size = 30804, upload-time = "2025-05-06T12:30:35.46Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/0c/1b0c5f4c2170165719b336616ac0a88f1666fd8690fda41e2e8ae3139fd9/alibabacloud-credentials-1.0.2.tar.gz", hash = "sha256:d2368eb70bd02db9143b2bf531a27a6fecd2cde9601db6e5b48cd6dbe25720ce", size = 30804 } [[package]] name = "alibabacloud-credentials-api" version = "1.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a0/87/1d7019d23891897cb076b2f7e3c81ab3c2ba91de3bb067196f675d60d34c/alibabacloud-credentials-api-1.0.0.tar.gz", hash = "sha256:8c340038d904f0218d7214a8f4088c31912bfcf279af2cbc7d9be4897a97dd2f", size = 2330, upload-time = "2025-01-13T05:53:04.931Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a0/87/1d7019d23891897cb076b2f7e3c81ab3c2ba91de3bb067196f675d60d34c/alibabacloud-credentials-api-1.0.0.tar.gz", hash = "sha256:8c340038d904f0218d7214a8f4088c31912bfcf279af2cbc7d9be4897a97dd2f", size = 2330 } [[package]] name = "alibabacloud-endpoint-util" version = "0.0.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/7d/8cc92a95c920e344835b005af6ea45a0db98763ad6ad19299d26892e6c8d/alibabacloud_endpoint_util-0.0.4.tar.gz", hash = "sha256:a593eb8ddd8168d5dc2216cd33111b144f9189fcd6e9ca20e48f358a739bbf90", size = 2813, upload-time = "2025-06-12T07:20:52.572Z" } +sdist = { url = "https://files.pythonhosted.org/packages/92/7d/8cc92a95c920e344835b005af6ea45a0db98763ad6ad19299d26892e6c8d/alibabacloud_endpoint_util-0.0.4.tar.gz", hash = "sha256:a593eb8ddd8168d5dc2216cd33111b144f9189fcd6e9ca20e48f358a739bbf90", size = 2813 } [[package]] name = "alibabacloud-gateway-spi" @@ -155,7 +154,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alibabacloud-credentials" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/98/d7111245f17935bf72ee9bea60bbbeff2bc42cdfe24d2544db52bc517e1a/alibabacloud_gateway_spi-0.0.3.tar.gz", hash = "sha256:10d1c53a3fc5f87915fbd6b4985b98338a776e9b44a0263f56643c5048223b8b", size = 4249, upload-time = "2025-02-23T16:29:54.222Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ab/98/d7111245f17935bf72ee9bea60bbbeff2bc42cdfe24d2544db52bc517e1a/alibabacloud_gateway_spi-0.0.3.tar.gz", hash = "sha256:10d1c53a3fc5f87915fbd6b4985b98338a776e9b44a0263f56643c5048223b8b", size = 4249 } [[package]] name = "alibabacloud-gpdb20160503" @@ -171,9 +170,9 @@ dependencies = [ { name = "alibabacloud-tea-openapi" }, { name = "alibabacloud-tea-util" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/15/6a/cc72e744e95c8f37fa6a84e66ae0b9b57a13ee97a0ef03d94c7127c31d75/alibabacloud_gpdb20160503-3.8.3.tar.gz", hash = "sha256:4dfcc0d9cff5a921d529d76f4bf97e2ceb9dc2fa53f00ab055f08509423d8e30", size = 155092, upload-time = "2024-07-18T17:09:42.438Z" } +sdist = { url = "https://files.pythonhosted.org/packages/15/6a/cc72e744e95c8f37fa6a84e66ae0b9b57a13ee97a0ef03d94c7127c31d75/alibabacloud_gpdb20160503-3.8.3.tar.gz", hash = "sha256:4dfcc0d9cff5a921d529d76f4bf97e2ceb9dc2fa53f00ab055f08509423d8e30", size = 155092 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/36/bce41704b3bf59d607590ec73a42a254c5dea27c0f707aee11d20512a200/alibabacloud_gpdb20160503-3.8.3-py3-none-any.whl", hash = "sha256:06e1c46ce5e4e9d1bcae76e76e51034196c625799d06b2efec8d46a7df323fe8", size = 156097, upload-time = "2024-07-18T17:09:40.414Z" }, + { url = "https://files.pythonhosted.org/packages/ab/36/bce41704b3bf59d607590ec73a42a254c5dea27c0f707aee11d20512a200/alibabacloud_gpdb20160503-3.8.3-py3-none-any.whl", hash = "sha256:06e1c46ce5e4e9d1bcae76e76e51034196c625799d06b2efec8d46a7df323fe8", size = 156097 }, ] [[package]] @@ -184,7 +183,7 @@ dependencies = [ { name = "alibabacloud-tea-util" }, { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f6/50/5f41ab550d7874c623f6e992758429802c4b52a6804db437017e5387de33/alibabacloud_openapi_util-0.2.2.tar.gz", hash = "sha256:ebbc3906f554cb4bf8f513e43e8a33e8b6a3d4a0ef13617a0e14c3dda8ef52a8", size = 7201, upload-time = "2023-10-23T07:44:18.523Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/50/5f41ab550d7874c623f6e992758429802c4b52a6804db437017e5387de33/alibabacloud_openapi_util-0.2.2.tar.gz", hash = "sha256:ebbc3906f554cb4bf8f513e43e8a33e8b6a3d4a0ef13617a0e14c3dda8ef52a8", size = 7201 } [[package]] name = "alibabacloud-openplatform20191219" @@ -196,9 +195,9 @@ dependencies = [ { name = "alibabacloud-tea-openapi" }, { name = "alibabacloud-tea-util" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4f/bf/f7fa2f3657ed352870f442434cb2f27b7f70dcd52a544a1f3998eeaf6d71/alibabacloud_openplatform20191219-2.0.0.tar.gz", hash = "sha256:e67f4c337b7542538746592c6a474bd4ae3a9edccdf62e11a32ca61fad3c9020", size = 5038, upload-time = "2022-09-21T06:16:10.683Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/bf/f7fa2f3657ed352870f442434cb2f27b7f70dcd52a544a1f3998eeaf6d71/alibabacloud_openplatform20191219-2.0.0.tar.gz", hash = "sha256:e67f4c337b7542538746592c6a474bd4ae3a9edccdf62e11a32ca61fad3c9020", size = 5038 } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/e5/18c75213551eeca9db1f6b41ddcc0bd87b5b6508c75a67f05cd8671847b4/alibabacloud_openplatform20191219-2.0.0-py3-none-any.whl", hash = "sha256:873821c45bca72a6c6ec7a906c9cb21554c122e88893bbac3986934dab30dd36", size = 5204, upload-time = "2022-09-21T06:16:07.844Z" }, + { url = "https://files.pythonhosted.org/packages/94/e5/18c75213551eeca9db1f6b41ddcc0bd87b5b6508c75a67f05cd8671847b4/alibabacloud_openplatform20191219-2.0.0-py3-none-any.whl", hash = "sha256:873821c45bca72a6c6ec7a906c9cb21554c122e88893bbac3986934dab30dd36", size = 5204 }, ] [[package]] @@ -212,7 +211,7 @@ dependencies = [ { name = "alibabacloud-tea-util" }, { name = "alibabacloud-tea-xml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7e/d1/f442dd026908fcf55340ca694bb1d027aa91e119e76ae2fbea62f2bde4f4/alibabacloud_oss_sdk-0.1.1.tar.gz", hash = "sha256:f51a368020d0964fcc0978f96736006f49f5ab6a4a4bf4f0b8549e2c659e7358", size = 46434, upload-time = "2025-04-22T12:40:41.717Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/d1/f442dd026908fcf55340ca694bb1d027aa91e119e76ae2fbea62f2bde4f4/alibabacloud_oss_sdk-0.1.1.tar.gz", hash = "sha256:f51a368020d0964fcc0978f96736006f49f5ab6a4a4bf4f0b8549e2c659e7358", size = 46434 } [[package]] name = "alibabacloud-oss-util" @@ -221,7 +220,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alibabacloud-tea" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/02/7c/d7e812b9968247a302573daebcfef95d0f9a718f7b4bfcca8d3d83e266be/alibabacloud_oss_util-0.0.6.tar.gz", hash = "sha256:d3ecec36632434bd509a113e8cf327dc23e830ac8d9dd6949926f4e334c8b5d6", size = 10008, upload-time = "2021-04-28T09:25:04.056Z" } +sdist = { url = "https://files.pythonhosted.org/packages/02/7c/d7e812b9968247a302573daebcfef95d0f9a718f7b4bfcca8d3d83e266be/alibabacloud_oss_util-0.0.6.tar.gz", hash = "sha256:d3ecec36632434bd509a113e8cf327dc23e830ac8d9dd6949926f4e334c8b5d6", size = 10008 } [[package]] name = "alibabacloud-tea" @@ -231,7 +230,7 @@ dependencies = [ { name = "aiohttp" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/7d/b22cb9a0d4f396ee0f3f9d7f26b76b9ed93d4101add7867a2c87ed2534f5/alibabacloud-tea-0.4.3.tar.gz", hash = "sha256:ec8053d0aa8d43ebe1deb632d5c5404339b39ec9a18a0707d57765838418504a", size = 8785, upload-time = "2025-03-24T07:34:42.958Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/7d/b22cb9a0d4f396ee0f3f9d7f26b76b9ed93d4101add7867a2c87ed2534f5/alibabacloud-tea-0.4.3.tar.gz", hash = "sha256:ec8053d0aa8d43ebe1deb632d5c5404339b39ec9a18a0707d57765838418504a", size = 8785 } [[package]] name = "alibabacloud-tea-fileform" @@ -240,7 +239,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alibabacloud-tea" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/22/8a/ef8ddf5ee0350984cad2749414b420369fe943e15e6d96b79be45367630e/alibabacloud_tea_fileform-0.0.5.tar.gz", hash = "sha256:fd00a8c9d85e785a7655059e9651f9e91784678881831f60589172387b968ee8", size = 3961, upload-time = "2021-04-28T09:22:54.56Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/8a/ef8ddf5ee0350984cad2749414b420369fe943e15e6d96b79be45367630e/alibabacloud_tea_fileform-0.0.5.tar.gz", hash = "sha256:fd00a8c9d85e785a7655059e9651f9e91784678881831f60589172387b968ee8", size = 3961 } [[package]] name = "alibabacloud-tea-openapi" @@ -253,7 +252,7 @@ dependencies = [ { name = "alibabacloud-tea-util" }, { name = "alibabacloud-tea-xml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/09/be/f594e79625e5ccfcfe7f12d7d70709a3c59e920878469c998886211c850d/alibabacloud_tea_openapi-0.3.16.tar.gz", hash = "sha256:6bffed8278597592e67860156f424bde4173a6599d7b6039fb640a3612bae292", size = 13087, upload-time = "2025-07-04T09:30:10.689Z" } +sdist = { url = "https://files.pythonhosted.org/packages/09/be/f594e79625e5ccfcfe7f12d7d70709a3c59e920878469c998886211c850d/alibabacloud_tea_openapi-0.3.16.tar.gz", hash = "sha256:6bffed8278597592e67860156f424bde4173a6599d7b6039fb640a3612bae292", size = 13087 } [[package]] name = "alibabacloud-tea-util" @@ -262,7 +261,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alibabacloud-tea" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/23/18/35be17103c8f40f9eebec3b1567f51b3eec09c3a47a5dd62bcb413f4e619/alibabacloud_tea_util-0.3.13.tar.gz", hash = "sha256:8cbdfd2a03fbbf622f901439fa08643898290dd40e1d928347f6346e43f63c90", size = 6535, upload-time = "2024-07-15T12:25:12.07Z" } +sdist = { url = "https://files.pythonhosted.org/packages/23/18/35be17103c8f40f9eebec3b1567f51b3eec09c3a47a5dd62bcb413f4e619/alibabacloud_tea_util-0.3.13.tar.gz", hash = "sha256:8cbdfd2a03fbbf622f901439fa08643898290dd40e1d928347f6346e43f63c90", size = 6535 } [[package]] name = "alibabacloud-tea-xml" @@ -271,7 +270,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alibabacloud-tea" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/32/eb/5e82e419c3061823f3feae9b5681588762929dc4da0176667297c2784c1a/alibabacloud_tea_xml-0.0.3.tar.gz", hash = "sha256:979cb51fadf43de77f41c69fc69c12529728919f849723eb0cd24eb7b048a90c", size = 3466, upload-time = "2025-07-01T08:04:55.144Z" } +sdist = { url = "https://files.pythonhosted.org/packages/32/eb/5e82e419c3061823f3feae9b5681588762929dc4da0176667297c2784c1a/alibabacloud_tea_xml-0.0.3.tar.gz", hash = "sha256:979cb51fadf43de77f41c69fc69c12529728919f849723eb0cd24eb7b048a90c", size = 3466 } [[package]] name = "aliyun-python-sdk-core" @@ -281,7 +280,7 @@ dependencies = [ { name = "cryptography" }, { name = "jmespath" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3e/09/da9f58eb38b4fdb97ba6523274fbf445ef6a06be64b433693da8307b4bec/aliyun-python-sdk-core-2.16.0.tar.gz", hash = "sha256:651caad597eb39d4fad6cf85133dffe92837d53bdf62db9d8f37dab6508bb8f9", size = 449555, upload-time = "2024-10-09T06:01:01.762Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/09/da9f58eb38b4fdb97ba6523274fbf445ef6a06be64b433693da8307b4bec/aliyun-python-sdk-core-2.16.0.tar.gz", hash = "sha256:651caad597eb39d4fad6cf85133dffe92837d53bdf62db9d8f37dab6508bb8f9", size = 449555 } [[package]] name = "aliyun-python-sdk-kms" @@ -290,9 +289,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aliyun-python-sdk-core" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/2c/9877d0e6b18ecf246df671ac65a5d1d9fecbf85bdcb5d43efbde0d4662eb/aliyun-python-sdk-kms-2.16.5.tar.gz", hash = "sha256:f328a8a19d83ecbb965ffce0ec1e9930755216d104638cd95ecd362753b813b3", size = 12018, upload-time = "2024-08-30T09:01:20.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/2c/9877d0e6b18ecf246df671ac65a5d1d9fecbf85bdcb5d43efbde0d4662eb/aliyun-python-sdk-kms-2.16.5.tar.gz", hash = "sha256:f328a8a19d83ecbb965ffce0ec1e9930755216d104638cd95ecd362753b813b3", size = 12018 } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/5c/0132193d7da2c735669a1ed103b142fd63c9455984d48c5a88a1a516efaa/aliyun_python_sdk_kms-2.16.5-py2.py3-none-any.whl", hash = "sha256:24b6cdc4fd161d2942619479c8d050c63ea9cd22b044fe33b60bbb60153786f0", size = 99495, upload-time = "2024-08-30T09:01:18.462Z" }, + { url = "https://files.pythonhosted.org/packages/11/5c/0132193d7da2c735669a1ed103b142fd63c9455984d48c5a88a1a516efaa/aliyun_python_sdk_kms-2.16.5-py2.py3-none-any.whl", hash = "sha256:24b6cdc4fd161d2942619479c8d050c63ea9cd22b044fe33b60bbb60153786f0", size = 99495 }, ] [[package]] @@ -302,27 +301,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "vine" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/fc/ec94a357dfc6683d8c86f8b4cfa5416a4c36b28052ec8260c77aca96a443/amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432", size = 129013, upload-time = "2024-11-12T19:55:44.051Z" } +sdist = { url = "https://files.pythonhosted.org/packages/79/fc/ec94a357dfc6683d8c86f8b4cfa5416a4c36b28052ec8260c77aca96a443/amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432", size = 129013 } wheels = [ - { url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944, upload-time = "2024-11-12T19:55:41.782Z" }, + { url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944 }, ] [[package]] name = "aniso8601" version = "10.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/52179c4e3f1978d3d9a285f98c706642522750ef343e9738286130423730/aniso8601-10.0.1.tar.gz", hash = "sha256:25488f8663dd1528ae1f54f94ac1ea51ae25b4d531539b8bc707fed184d16845", size = 47190, upload-time = "2025-04-18T17:29:42.995Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/52179c4e3f1978d3d9a285f98c706642522750ef343e9738286130423730/aniso8601-10.0.1.tar.gz", hash = "sha256:25488f8663dd1528ae1f54f94ac1ea51ae25b4d531539b8bc707fed184d16845", size = 47190 } wheels = [ - { url = "https://files.pythonhosted.org/packages/59/75/e0e10dc7ed1408c28e03a6cb2d7a407f99320eb953f229d008a7a6d05546/aniso8601-10.0.1-py2.py3-none-any.whl", hash = "sha256:eb19717fd4e0db6de1aab06f12450ab92144246b257423fe020af5748c0cb89e", size = 52848, upload-time = "2025-04-18T17:29:41.492Z" }, + { url = "https://files.pythonhosted.org/packages/59/75/e0e10dc7ed1408c28e03a6cb2d7a407f99320eb953f229d008a7a6d05546/aniso8601-10.0.1-py2.py3-none-any.whl", hash = "sha256:eb19717fd4e0db6de1aab06f12450ab92144246b257423fe020af5748c0cb89e", size = 52848 }, ] [[package]] name = "annotated-types" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, ] [[package]] @@ -334,9 +333,9 @@ dependencies = [ { name = "sniffio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } +sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, + { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 }, ] [[package]] @@ -346,9 +345,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "tzlocal" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4e/00/6d6814ddc19be2df62c8c898c4df6b5b1914f3bd024b780028caa392d186/apscheduler-3.11.0.tar.gz", hash = "sha256:4c622d250b0955a65d5d0eb91c33e6d43fd879834bf541e0a18661ae60460133", size = 107347, upload-time = "2024-11-24T19:39:26.463Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/00/6d6814ddc19be2df62c8c898c4df6b5b1914f3bd024b780028caa392d186/apscheduler-3.11.0.tar.gz", hash = "sha256:4c622d250b0955a65d5d0eb91c33e6d43fd879834bf541e0a18661ae60460133", size = 107347 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/ae/9a053dd9229c0fde6b1f1f33f609ccff1ee79ddda364c756a924c6d8563b/APScheduler-3.11.0-py3-none-any.whl", hash = "sha256:fc134ca32e50f5eadcc4938e3a4545ab19131435e851abb40b34d63d5141c6da", size = 64004, upload-time = "2024-11-24T19:39:24.442Z" }, + { url = "https://files.pythonhosted.org/packages/d0/ae/9a053dd9229c0fde6b1f1f33f609ccff1ee79ddda364c756a924c6d8563b/APScheduler-3.11.0-py3-none-any.whl", hash = "sha256:fc134ca32e50f5eadcc4938e3a4545ab19131435e851abb40b34d63d5141c6da", size = 64004 }, ] [[package]] @@ -364,36 +363,36 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/27/b9/8c89191eb46915e9ba7bdb473e2fb1c510b7db3635ae5ede5e65b2176b9d/arize_phoenix_otel-0.9.2.tar.gz", hash = "sha256:a48c7d41f3ac60dc75b037f036bf3306d2af4af371cdb55e247e67957749bc31", size = 11599, upload-time = "2025-04-14T22:05:28.637Z" } +sdist = { url = "https://files.pythonhosted.org/packages/27/b9/8c89191eb46915e9ba7bdb473e2fb1c510b7db3635ae5ede5e65b2176b9d/arize_phoenix_otel-0.9.2.tar.gz", hash = "sha256:a48c7d41f3ac60dc75b037f036bf3306d2af4af371cdb55e247e67957749bc31", size = 11599 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/3d/f64136a758c649e883315939f30fe51ad0747024b0db05fd78450801a78d/arize_phoenix_otel-0.9.2-py3-none-any.whl", hash = "sha256:5286b33c58b596ef8edd9a4255ee00fd74f774b1e5dbd9393e77e87870a14d76", size = 12560, upload-time = "2025-04-14T22:05:27.162Z" }, + { url = "https://files.pythonhosted.org/packages/3a/3d/f64136a758c649e883315939f30fe51ad0747024b0db05fd78450801a78d/arize_phoenix_otel-0.9.2-py3-none-any.whl", hash = "sha256:5286b33c58b596ef8edd9a4255ee00fd74f774b1e5dbd9393e77e87870a14d76", size = 12560 }, ] [[package]] name = "asgiref" version = "3.9.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/90/61/0aa957eec22ff70b830b22ff91f825e70e1ef732c06666a805730f28b36b/asgiref-3.9.1.tar.gz", hash = "sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142", size = 36870, upload-time = "2025-07-08T09:07:43.344Z" } +sdist = { url = "https://files.pythonhosted.org/packages/90/61/0aa957eec22ff70b830b22ff91f825e70e1ef732c06666a805730f28b36b/asgiref-3.9.1.tar.gz", hash = "sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142", size = 36870 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/3c/0464dcada90d5da0e71018c04a140ad6349558afb30b3051b4264cc5b965/asgiref-3.9.1-py3-none-any.whl", hash = "sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c", size = 23790, upload-time = "2025-07-08T09:07:41.548Z" }, + { url = "https://files.pythonhosted.org/packages/7c/3c/0464dcada90d5da0e71018c04a140ad6349558afb30b3051b4264cc5b965/asgiref-3.9.1-py3-none-any.whl", hash = "sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c", size = 23790 }, ] [[package]] name = "async-timeout" version = "5.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233 }, ] [[package]] name = "attrs" version = "25.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032 } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815 }, ] [[package]] @@ -403,9 +402,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/09/47/df70ecd34fbf86d69833fe4e25bb9ecbaab995c8e49df726dd416f6bb822/authlib-1.3.1.tar.gz", hash = "sha256:7ae843f03c06c5c0debd63c9db91f9fda64fa62a42a77419fa15fbb7e7a58917", size = 146074, upload-time = "2024-06-04T14:15:32.06Z" } +sdist = { url = "https://files.pythonhosted.org/packages/09/47/df70ecd34fbf86d69833fe4e25bb9ecbaab995c8e49df726dd416f6bb822/authlib-1.3.1.tar.gz", hash = "sha256:7ae843f03c06c5c0debd63c9db91f9fda64fa62a42a77419fa15fbb7e7a58917", size = 146074 } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/1f/bc95e43ffb57c05b8efcc376dd55a0240bf58f47ddf5a0f92452b6457b75/Authlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:d35800b973099bbadc49b42b256ecb80041ad56b7fe1216a362c7943c088f377", size = 223827, upload-time = "2024-06-04T14:15:29.218Z" }, + { url = "https://files.pythonhosted.org/packages/87/1f/bc95e43ffb57c05b8efcc376dd55a0240bf58f47ddf5a0f92452b6457b75/Authlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:d35800b973099bbadc49b42b256ecb80041ad56b7fe1216a362c7943c088f377", size = 223827 }, ] [[package]] @@ -417,9 +416,9 @@ dependencies = [ { name = "six" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/89/f53968635b1b2e53e4aad2dd641488929fef4ca9dfb0b97927fa7697ddf3/azure_core-1.35.0.tar.gz", hash = "sha256:c0be528489485e9ede59b6971eb63c1eaacf83ef53001bfe3904e475e972be5c", size = 339689, upload-time = "2025-07-03T00:55:23.496Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/89/f53968635b1b2e53e4aad2dd641488929fef4ca9dfb0b97927fa7697ddf3/azure_core-1.35.0.tar.gz", hash = "sha256:c0be528489485e9ede59b6971eb63c1eaacf83ef53001bfe3904e475e972be5c", size = 339689 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/78/bf94897361fdd650850f0f2e405b2293e2f12808239046232bdedf554301/azure_core-1.35.0-py3-none-any.whl", hash = "sha256:8db78c72868a58f3de8991eb4d22c4d368fae226dac1002998d6c50437e7dad1", size = 210708, upload-time = "2025-07-03T00:55:25.238Z" }, + { url = "https://files.pythonhosted.org/packages/d4/78/bf94897361fdd650850f0f2e405b2293e2f12808239046232bdedf554301/azure_core-1.35.0-py3-none-any.whl", hash = "sha256:8db78c72868a58f3de8991eb4d22c4d368fae226dac1002998d6c50437e7dad1", size = 210708 }, ] [[package]] @@ -432,9 +431,9 @@ dependencies = [ { name = "msal" }, { name = "msal-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bb/1c/bd704075e555046e24b069157ca25c81aedb4199c3e0b35acba9243a6ca6/azure-identity-1.16.1.tar.gz", hash = "sha256:6d93f04468f240d59246d8afde3091494a5040d4f141cad0f49fc0c399d0d91e", size = 236726, upload-time = "2024-06-10T22:23:27.46Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/1c/bd704075e555046e24b069157ca25c81aedb4199c3e0b35acba9243a6ca6/azure-identity-1.16.1.tar.gz", hash = "sha256:6d93f04468f240d59246d8afde3091494a5040d4f141cad0f49fc0c399d0d91e", size = 236726 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/c5/ca55106564d2044ab90614381368b3756690fb7e3ab04552e17f308e4e4f/azure_identity-1.16.1-py3-none-any.whl", hash = "sha256:8fb07c25642cd4ac422559a8b50d3e77f73dcc2bbfaba419d06d6c9d7cff6726", size = 166741, upload-time = "2024-06-10T22:23:30.906Z" }, + { url = "https://files.pythonhosted.org/packages/ef/c5/ca55106564d2044ab90614381368b3756690fb7e3ab04552e17f308e4e4f/azure_identity-1.16.1-py3-none-any.whl", hash = "sha256:8fb07c25642cd4ac422559a8b50d3e77f73dcc2bbfaba419d06d6c9d7cff6726", size = 166741 }, ] [[package]] @@ -446,18 +445,18 @@ dependencies = [ { name = "cryptography" }, { name = "msrest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/93/b13bf390e940a79a399981f75ac8d2e05a70112a95ebb7b41e9b752d2921/azure-storage-blob-12.13.0.zip", hash = "sha256:53f0d4cd32970ac9ff9b9753f83dd2fb3f9ac30e1d01e71638c436c509bfd884", size = 684838, upload-time = "2022-07-07T22:35:44.543Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/93/b13bf390e940a79a399981f75ac8d2e05a70112a95ebb7b41e9b752d2921/azure-storage-blob-12.13.0.zip", hash = "sha256:53f0d4cd32970ac9ff9b9753f83dd2fb3f9ac30e1d01e71638c436c509bfd884", size = 684838 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/2a/b8246df35af68d64fb7292c93dbbde63cd25036f2f669a9d9ae59e518c76/azure_storage_blob-12.13.0-py3-none-any.whl", hash = "sha256:280a6ab032845bab9627582bee78a50497ca2f14772929b5c5ee8b4605af0cb3", size = 377309, upload-time = "2022-07-07T22:35:41.905Z" }, + { url = "https://files.pythonhosted.org/packages/0e/2a/b8246df35af68d64fb7292c93dbbde63cd25036f2f669a9d9ae59e518c76/azure_storage_blob-12.13.0-py3-none-any.whl", hash = "sha256:280a6ab032845bab9627582bee78a50497ca2f14772929b5c5ee8b4605af0cb3", size = 377309 }, ] [[package]] name = "backoff" version = "2.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/47/d7/5bbeb12c44d7c4f2fb5b56abce497eb5ed9f34d85701de869acedd602619/backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba", size = 17001, upload-time = "2022-10-05T19:19:32.061Z" } +sdist = { url = "https://files.pythonhosted.org/packages/47/d7/5bbeb12c44d7c4f2fb5b56abce497eb5ed9f34d85701de869acedd602619/backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba", size = 17001 } wheels = [ - { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148, upload-time = "2022-10-05T19:19:30.546Z" }, + { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148 }, ] [[package]] @@ -469,49 +468,49 @@ dependencies = [ { name = "pycryptodome" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c6/91/c218750fd515fef10d197a2385a81a5f3504d30637fc1268bafa53cc2837/bce_python_sdk-0.9.35.tar.gz", hash = "sha256:024a2b5cd086707c866225cf8631fa126edbccfdd5bc3c8a83fe2ea9aa768bf5", size = 247844, upload-time = "2025-05-19T11:23:35.223Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/91/c218750fd515fef10d197a2385a81a5f3504d30637fc1268bafa53cc2837/bce_python_sdk-0.9.35.tar.gz", hash = "sha256:024a2b5cd086707c866225cf8631fa126edbccfdd5bc3c8a83fe2ea9aa768bf5", size = 247844 } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/81/f574f6b300927a63596fa8e5081f5c0ad66d5cc99004d70d63c523f42ff8/bce_python_sdk-0.9.35-py3-none-any.whl", hash = "sha256:08c1575a0f2ec04b2fc17063fe6e47e1aab48e3bca1f26181cb8bed5528fa5de", size = 344813, upload-time = "2025-05-19T11:23:33.68Z" }, + { url = "https://files.pythonhosted.org/packages/28/81/f574f6b300927a63596fa8e5081f5c0ad66d5cc99004d70d63c523f42ff8/bce_python_sdk-0.9.35-py3-none-any.whl", hash = "sha256:08c1575a0f2ec04b2fc17063fe6e47e1aab48e3bca1f26181cb8bed5528fa5de", size = 344813 }, ] [[package]] name = "bcrypt" version = "4.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/5d/6d7433e0f3cd46ce0b43cd65e1db465ea024dbb8216fb2404e919c2ad77b/bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18", size = 25697, upload-time = "2025-02-28T01:24:09.174Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/5d/6d7433e0f3cd46ce0b43cd65e1db465ea024dbb8216fb2404e919c2ad77b/bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18", size = 25697 } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/22/5ada0b9af72b60cbc4c9a399fdde4af0feaa609d27eb0adc61607997a3fa/bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d", size = 498019, upload-time = "2025-02-28T01:23:05.838Z" }, - { url = "https://files.pythonhosted.org/packages/b8/8c/252a1edc598dc1ce57905be173328eda073083826955ee3c97c7ff5ba584/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b", size = 279174, upload-time = "2025-02-28T01:23:07.274Z" }, - { url = "https://files.pythonhosted.org/packages/29/5b/4547d5c49b85f0337c13929f2ccbe08b7283069eea3550a457914fc078aa/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e", size = 283870, upload-time = "2025-02-28T01:23:09.151Z" }, - { url = "https://files.pythonhosted.org/packages/be/21/7dbaf3fa1745cb63f776bb046e481fbababd7d344c5324eab47f5ca92dd2/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59", size = 279601, upload-time = "2025-02-28T01:23:11.461Z" }, - { url = "https://files.pythonhosted.org/packages/6d/64/e042fc8262e971347d9230d9abbe70d68b0a549acd8611c83cebd3eaec67/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753", size = 297660, upload-time = "2025-02-28T01:23:12.989Z" }, - { url = "https://files.pythonhosted.org/packages/50/b8/6294eb84a3fef3b67c69b4470fcdd5326676806bf2519cda79331ab3c3a9/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761", size = 284083, upload-time = "2025-02-28T01:23:14.5Z" }, - { url = "https://files.pythonhosted.org/packages/62/e6/baff635a4f2c42e8788fe1b1633911c38551ecca9a749d1052d296329da6/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb", size = 279237, upload-time = "2025-02-28T01:23:16.686Z" }, - { url = "https://files.pythonhosted.org/packages/39/48/46f623f1b0c7dc2e5de0b8af5e6f5ac4cc26408ac33f3d424e5ad8da4a90/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d", size = 283737, upload-time = "2025-02-28T01:23:18.897Z" }, - { url = "https://files.pythonhosted.org/packages/49/8b/70671c3ce9c0fca4a6cc3cc6ccbaa7e948875a2e62cbd146e04a4011899c/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f", size = 312741, upload-time = "2025-02-28T01:23:21.041Z" }, - { url = "https://files.pythonhosted.org/packages/27/fb/910d3a1caa2d249b6040a5caf9f9866c52114d51523ac2fb47578a27faee/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732", size = 316472, upload-time = "2025-02-28T01:23:23.183Z" }, - { url = "https://files.pythonhosted.org/packages/dc/cf/7cf3a05b66ce466cfb575dbbda39718d45a609daa78500f57fa9f36fa3c0/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef", size = 343606, upload-time = "2025-02-28T01:23:25.361Z" }, - { url = "https://files.pythonhosted.org/packages/e3/b8/e970ecc6d7e355c0d892b7f733480f4aa8509f99b33e71550242cf0b7e63/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304", size = 362867, upload-time = "2025-02-28T01:23:26.875Z" }, - { url = "https://files.pythonhosted.org/packages/a9/97/8d3118efd8354c555a3422d544163f40d9f236be5b96c714086463f11699/bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51", size = 160589, upload-time = "2025-02-28T01:23:28.381Z" }, - { url = "https://files.pythonhosted.org/packages/29/07/416f0b99f7f3997c69815365babbc2e8754181a4b1899d921b3c7d5b6f12/bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62", size = 152794, upload-time = "2025-02-28T01:23:30.187Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c1/3fa0e9e4e0bfd3fd77eb8b52ec198fd6e1fd7e9402052e43f23483f956dd/bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3", size = 498969, upload-time = "2025-02-28T01:23:31.945Z" }, - { url = "https://files.pythonhosted.org/packages/ce/d4/755ce19b6743394787fbd7dff6bf271b27ee9b5912a97242e3caf125885b/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24", size = 279158, upload-time = "2025-02-28T01:23:34.161Z" }, - { url = "https://files.pythonhosted.org/packages/9b/5d/805ef1a749c965c46b28285dfb5cd272a7ed9fa971f970435a5133250182/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef", size = 284285, upload-time = "2025-02-28T01:23:35.765Z" }, - { url = "https://files.pythonhosted.org/packages/ab/2b/698580547a4a4988e415721b71eb45e80c879f0fb04a62da131f45987b96/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b", size = 279583, upload-time = "2025-02-28T01:23:38.021Z" }, - { url = "https://files.pythonhosted.org/packages/f2/87/62e1e426418204db520f955ffd06f1efd389feca893dad7095bf35612eec/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676", size = 297896, upload-time = "2025-02-28T01:23:39.575Z" }, - { url = "https://files.pythonhosted.org/packages/cb/c6/8fedca4c2ada1b6e889c52d2943b2f968d3427e5d65f595620ec4c06fa2f/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1", size = 284492, upload-time = "2025-02-28T01:23:40.901Z" }, - { url = "https://files.pythonhosted.org/packages/4d/4d/c43332dcaaddb7710a8ff5269fcccba97ed3c85987ddaa808db084267b9a/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe", size = 279213, upload-time = "2025-02-28T01:23:42.653Z" }, - { url = "https://files.pythonhosted.org/packages/dc/7f/1e36379e169a7df3a14a1c160a49b7b918600a6008de43ff20d479e6f4b5/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0", size = 284162, upload-time = "2025-02-28T01:23:43.964Z" }, - { url = "https://files.pythonhosted.org/packages/1c/0a/644b2731194b0d7646f3210dc4d80c7fee3ecb3a1f791a6e0ae6bb8684e3/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f", size = 312856, upload-time = "2025-02-28T01:23:46.011Z" }, - { url = "https://files.pythonhosted.org/packages/dc/62/2a871837c0bb6ab0c9a88bf54de0fc021a6a08832d4ea313ed92a669d437/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23", size = 316726, upload-time = "2025-02-28T01:23:47.575Z" }, - { url = "https://files.pythonhosted.org/packages/0c/a1/9898ea3faac0b156d457fd73a3cb9c2855c6fd063e44b8522925cdd8ce46/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe", size = 343664, upload-time = "2025-02-28T01:23:49.059Z" }, - { url = "https://files.pythonhosted.org/packages/40/f2/71b4ed65ce38982ecdda0ff20c3ad1b15e71949c78b2c053df53629ce940/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505", size = 363128, upload-time = "2025-02-28T01:23:50.399Z" }, - { url = "https://files.pythonhosted.org/packages/11/99/12f6a58eca6dea4be992d6c681b7ec9410a1d9f5cf368c61437e31daa879/bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a", size = 160598, upload-time = "2025-02-28T01:23:51.775Z" }, - { url = "https://files.pythonhosted.org/packages/a9/cf/45fb5261ece3e6b9817d3d82b2f343a505fd58674a92577923bc500bd1aa/bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b", size = 152799, upload-time = "2025-02-28T01:23:53.139Z" }, - { url = "https://files.pythonhosted.org/packages/4c/b1/1289e21d710496b88340369137cc4c5f6ee036401190ea116a7b4ae6d32a/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a", size = 275103, upload-time = "2025-02-28T01:24:00.764Z" }, - { url = "https://files.pythonhosted.org/packages/94/41/19be9fe17e4ffc5d10b7b67f10e459fc4eee6ffe9056a88de511920cfd8d/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce", size = 280513, upload-time = "2025-02-28T01:24:02.243Z" }, - { url = "https://files.pythonhosted.org/packages/aa/73/05687a9ef89edebdd8ad7474c16d8af685eb4591c3c38300bb6aad4f0076/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8", size = 274685, upload-time = "2025-02-28T01:24:04.512Z" }, - { url = "https://files.pythonhosted.org/packages/63/13/47bba97924ebe86a62ef83dc75b7c8a881d53c535f83e2c54c4bd701e05c/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938", size = 280110, upload-time = "2025-02-28T01:24:05.896Z" }, + { url = "https://files.pythonhosted.org/packages/11/22/5ada0b9af72b60cbc4c9a399fdde4af0feaa609d27eb0adc61607997a3fa/bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d", size = 498019 }, + { url = "https://files.pythonhosted.org/packages/b8/8c/252a1edc598dc1ce57905be173328eda073083826955ee3c97c7ff5ba584/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b", size = 279174 }, + { url = "https://files.pythonhosted.org/packages/29/5b/4547d5c49b85f0337c13929f2ccbe08b7283069eea3550a457914fc078aa/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e", size = 283870 }, + { url = "https://files.pythonhosted.org/packages/be/21/7dbaf3fa1745cb63f776bb046e481fbababd7d344c5324eab47f5ca92dd2/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59", size = 279601 }, + { url = "https://files.pythonhosted.org/packages/6d/64/e042fc8262e971347d9230d9abbe70d68b0a549acd8611c83cebd3eaec67/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753", size = 297660 }, + { url = "https://files.pythonhosted.org/packages/50/b8/6294eb84a3fef3b67c69b4470fcdd5326676806bf2519cda79331ab3c3a9/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761", size = 284083 }, + { url = "https://files.pythonhosted.org/packages/62/e6/baff635a4f2c42e8788fe1b1633911c38551ecca9a749d1052d296329da6/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb", size = 279237 }, + { url = "https://files.pythonhosted.org/packages/39/48/46f623f1b0c7dc2e5de0b8af5e6f5ac4cc26408ac33f3d424e5ad8da4a90/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d", size = 283737 }, + { url = "https://files.pythonhosted.org/packages/49/8b/70671c3ce9c0fca4a6cc3cc6ccbaa7e948875a2e62cbd146e04a4011899c/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f", size = 312741 }, + { url = "https://files.pythonhosted.org/packages/27/fb/910d3a1caa2d249b6040a5caf9f9866c52114d51523ac2fb47578a27faee/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732", size = 316472 }, + { url = "https://files.pythonhosted.org/packages/dc/cf/7cf3a05b66ce466cfb575dbbda39718d45a609daa78500f57fa9f36fa3c0/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef", size = 343606 }, + { url = "https://files.pythonhosted.org/packages/e3/b8/e970ecc6d7e355c0d892b7f733480f4aa8509f99b33e71550242cf0b7e63/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304", size = 362867 }, + { url = "https://files.pythonhosted.org/packages/a9/97/8d3118efd8354c555a3422d544163f40d9f236be5b96c714086463f11699/bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51", size = 160589 }, + { url = "https://files.pythonhosted.org/packages/29/07/416f0b99f7f3997c69815365babbc2e8754181a4b1899d921b3c7d5b6f12/bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62", size = 152794 }, + { url = "https://files.pythonhosted.org/packages/6e/c1/3fa0e9e4e0bfd3fd77eb8b52ec198fd6e1fd7e9402052e43f23483f956dd/bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3", size = 498969 }, + { url = "https://files.pythonhosted.org/packages/ce/d4/755ce19b6743394787fbd7dff6bf271b27ee9b5912a97242e3caf125885b/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24", size = 279158 }, + { url = "https://files.pythonhosted.org/packages/9b/5d/805ef1a749c965c46b28285dfb5cd272a7ed9fa971f970435a5133250182/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef", size = 284285 }, + { url = "https://files.pythonhosted.org/packages/ab/2b/698580547a4a4988e415721b71eb45e80c879f0fb04a62da131f45987b96/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b", size = 279583 }, + { url = "https://files.pythonhosted.org/packages/f2/87/62e1e426418204db520f955ffd06f1efd389feca893dad7095bf35612eec/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676", size = 297896 }, + { url = "https://files.pythonhosted.org/packages/cb/c6/8fedca4c2ada1b6e889c52d2943b2f968d3427e5d65f595620ec4c06fa2f/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1", size = 284492 }, + { url = "https://files.pythonhosted.org/packages/4d/4d/c43332dcaaddb7710a8ff5269fcccba97ed3c85987ddaa808db084267b9a/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe", size = 279213 }, + { url = "https://files.pythonhosted.org/packages/dc/7f/1e36379e169a7df3a14a1c160a49b7b918600a6008de43ff20d479e6f4b5/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0", size = 284162 }, + { url = "https://files.pythonhosted.org/packages/1c/0a/644b2731194b0d7646f3210dc4d80c7fee3ecb3a1f791a6e0ae6bb8684e3/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f", size = 312856 }, + { url = "https://files.pythonhosted.org/packages/dc/62/2a871837c0bb6ab0c9a88bf54de0fc021a6a08832d4ea313ed92a669d437/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23", size = 316726 }, + { url = "https://files.pythonhosted.org/packages/0c/a1/9898ea3faac0b156d457fd73a3cb9c2855c6fd063e44b8522925cdd8ce46/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe", size = 343664 }, + { url = "https://files.pythonhosted.org/packages/40/f2/71b4ed65ce38982ecdda0ff20c3ad1b15e71949c78b2c053df53629ce940/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505", size = 363128 }, + { url = "https://files.pythonhosted.org/packages/11/99/12f6a58eca6dea4be992d6c681b7ec9410a1d9f5cf368c61437e31daa879/bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a", size = 160598 }, + { url = "https://files.pythonhosted.org/packages/a9/cf/45fb5261ece3e6b9817d3d82b2f343a505fd58674a92577923bc500bd1aa/bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b", size = 152799 }, + { url = "https://files.pythonhosted.org/packages/4c/b1/1289e21d710496b88340369137cc4c5f6ee036401190ea116a7b4ae6d32a/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a", size = 275103 }, + { url = "https://files.pythonhosted.org/packages/94/41/19be9fe17e4ffc5d10b7b67f10e459fc4eee6ffe9056a88de511920cfd8d/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce", size = 280513 }, + { url = "https://files.pythonhosted.org/packages/aa/73/05687a9ef89edebdd8ad7474c16d8af685eb4591c3c38300bb6aad4f0076/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8", size = 274685 }, + { url = "https://files.pythonhosted.org/packages/63/13/47bba97924ebe86a62ef83dc75b7c8a881d53c535f83e2c54c4bd701e05c/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938", size = 280110 }, ] [[package]] @@ -521,27 +520,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "soupsieve" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/af/0b/44c39cf3b18a9280950ad63a579ce395dda4c32193ee9da7ff0aed547094/beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da", size = 505113, upload-time = "2023-04-07T15:02:49.038Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/0b/44c39cf3b18a9280950ad63a579ce395dda4c32193ee9da7ff0aed547094/beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da", size = 505113 } wheels = [ - { url = "https://files.pythonhosted.org/packages/57/f4/a69c20ee4f660081a7dedb1ac57f29be9378e04edfcb90c526b923d4bebc/beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a", size = 142979, upload-time = "2023-04-07T15:02:50.77Z" }, + { url = "https://files.pythonhosted.org/packages/57/f4/a69c20ee4f660081a7dedb1ac57f29be9378e04edfcb90c526b923d4bebc/beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a", size = 142979 }, ] [[package]] name = "billiard" version = "4.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/58/1546c970afcd2a2428b1bfafecf2371d8951cc34b46701bea73f4280989e/billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f", size = 155031, upload-time = "2024-09-21T13:40:22.491Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/58/1546c970afcd2a2428b1bfafecf2371d8951cc34b46701bea73f4280989e/billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f", size = 155031 } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/da/43b15f28fe5f9e027b41c539abc5469052e9d48fd75f8ff094ba2a0ae767/billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb", size = 86766, upload-time = "2024-09-21T13:40:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/30/da/43b15f28fe5f9e027b41c539abc5469052e9d48fd75f8ff094ba2a0ae767/billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb", size = 86766 }, ] [[package]] name = "blinker" version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" } +sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460 } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458 }, ] [[package]] @@ -553,9 +552,9 @@ dependencies = [ { name = "jmespath" }, { name = "s3transfer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/99/3e8b48f15580672eda20f33439fc1622bd611f6238b6d05407320e1fb98c/boto3-1.35.99.tar.gz", hash = "sha256:e0abd794a7a591d90558e92e29a9f8837d25ece8e3c120e530526fe27eba5fca", size = 111028, upload-time = "2025-01-14T20:20:28.636Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/99/3e8b48f15580672eda20f33439fc1622bd611f6238b6d05407320e1fb98c/boto3-1.35.99.tar.gz", hash = "sha256:e0abd794a7a591d90558e92e29a9f8837d25ece8e3c120e530526fe27eba5fca", size = 111028 } wheels = [ - { url = "https://files.pythonhosted.org/packages/65/77/8bbca82f70b062181cf0ae53fd43f1ac6556f3078884bfef9da2269c06a3/boto3-1.35.99-py3-none-any.whl", hash = "sha256:83e560faaec38a956dfb3d62e05e1703ee50432b45b788c09e25107c5058bd71", size = 139178, upload-time = "2025-01-14T20:20:25.48Z" }, + { url = "https://files.pythonhosted.org/packages/65/77/8bbca82f70b062181cf0ae53fd43f1ac6556f3078884bfef9da2269c06a3/boto3-1.35.99-py3-none-any.whl", hash = "sha256:83e560faaec38a956dfb3d62e05e1703ee50432b45b788c09e25107c5058bd71", size = 139178 }, ] [[package]] @@ -567,9 +566,9 @@ dependencies = [ { name = "types-s3transfer" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/ea/85b9940d6eedc04d0c6febf24d27311b6ee54f85ccc37192eb4db0dff5d6/boto3_stubs-1.39.3.tar.gz", hash = "sha256:9aad443b1d690951fd9ccb6fa20ad387bd0b1054c704566ff65dd0043a63fc26", size = 99947, upload-time = "2025-07-03T19:28:15.602Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/ea/85b9940d6eedc04d0c6febf24d27311b6ee54f85ccc37192eb4db0dff5d6/boto3_stubs-1.39.3.tar.gz", hash = "sha256:9aad443b1d690951fd9ccb6fa20ad387bd0b1054c704566ff65dd0043a63fc26", size = 99947 } wheels = [ - { url = "https://files.pythonhosted.org/packages/be/b8/0c56297e5f290de17e838c7e4ff338f5b94351c6566aed70ee197a671dc5/boto3_stubs-1.39.3-py3-none-any.whl", hash = "sha256:4daddb19374efa6d1bef7aded9cede0075f380722a9e60ab129ebba14ae66b69", size = 69196, upload-time = "2025-07-03T19:28:09.4Z" }, + { url = "https://files.pythonhosted.org/packages/be/b8/0c56297e5f290de17e838c7e4ff338f5b94351c6566aed70ee197a671dc5/boto3_stubs-1.39.3-py3-none-any.whl", hash = "sha256:4daddb19374efa6d1bef7aded9cede0075f380722a9e60ab129ebba14ae66b69", size = 69196 }, ] [package.optional-dependencies] @@ -586,9 +585,9 @@ dependencies = [ { name = "python-dateutil" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/9c/1df6deceee17c88f7170bad8325aa91452529d683486273928eecfd946d8/botocore-1.35.99.tar.gz", hash = "sha256:1eab44e969c39c5f3d9a3104a0836c24715579a455f12b3979a31d7cde51b3c3", size = 13490969, upload-time = "2025-01-14T20:20:11.419Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/9c/1df6deceee17c88f7170bad8325aa91452529d683486273928eecfd946d8/botocore-1.35.99.tar.gz", hash = "sha256:1eab44e969c39c5f3d9a3104a0836c24715579a455f12b3979a31d7cde51b3c3", size = 13490969 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/dd/d87e2a145fad9e08d0ec6edcf9d71f838ccc7acdd919acc4c0d4a93515f8/botocore-1.35.99-py3-none-any.whl", hash = "sha256:b22d27b6b617fc2d7342090d6129000af2efd20174215948c0d7ae2da0fab445", size = 13293216, upload-time = "2025-01-14T20:20:06.427Z" }, + { url = "https://files.pythonhosted.org/packages/fc/dd/d87e2a145fad9e08d0ec6edcf9d71f838ccc7acdd919acc4c0d4a93515f8/botocore-1.35.99-py3-none-any.whl", hash = "sha256:b22d27b6b617fc2d7342090d6129000af2efd20174215948c0d7ae2da0fab445", size = 13293216 }, ] [[package]] @@ -598,9 +597,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-awscrt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/05/45/27cabc7c3022dcb12de5098cc646b374065f5e72fae13600ff1756f365ee/botocore_stubs-1.38.46.tar.gz", hash = "sha256:a04e69766ab8bae338911c1897492f88d05cd489cd75f06e6eb4f135f9da8c7b", size = 42299, upload-time = "2025-06-29T22:58:24.765Z" } +sdist = { url = "https://files.pythonhosted.org/packages/05/45/27cabc7c3022dcb12de5098cc646b374065f5e72fae13600ff1756f365ee/botocore_stubs-1.38.46.tar.gz", hash = "sha256:a04e69766ab8bae338911c1897492f88d05cd489cd75f06e6eb4f135f9da8c7b", size = 42299 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/84/06490071e26bab22ac79a684e98445df118adcf80c58c33ba5af184030f2/botocore_stubs-1.38.46-py3-none-any.whl", hash = "sha256:cc21d9a7dd994bdd90872db4664d817c4719b51cda8004fd507a4bf65b085a75", size = 66083, upload-time = "2025-06-29T22:58:22.234Z" }, + { url = "https://files.pythonhosted.org/packages/cc/84/06490071e26bab22ac79a684e98445df118adcf80c58c33ba5af184030f2/botocore_stubs-1.38.46-py3-none-any.whl", hash = "sha256:cc21d9a7dd994bdd90872db4664d817c4719b51cda8004fd507a4bf65b085a75", size = 66083 }, ] [[package]] @@ -610,64 +609,64 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/80/82/dd20e69b97b9072ed2d26cc95c0a573461986bf62f7fde7ac59143490918/bottleneck-1.5.0.tar.gz", hash = "sha256:c860242cf20e69d5aab2ec3c5d6c8c2a15f19e4b25b28b8fca2c2a12cefae9d8", size = 104177, upload-time = "2025-05-13T21:11:21.158Z" } +sdist = { url = "https://files.pythonhosted.org/packages/80/82/dd20e69b97b9072ed2d26cc95c0a573461986bf62f7fde7ac59143490918/bottleneck-1.5.0.tar.gz", hash = "sha256:c860242cf20e69d5aab2ec3c5d6c8c2a15f19e4b25b28b8fca2c2a12cefae9d8", size = 104177 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/5e/d66b2487c12fa3343013ac87a03bcefbeacf5f13ffa4ad56bb4bce319d09/bottleneck-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9be5dfdf1a662d1d4423d7b7e8dd9a1b7046dcc2ce67b6e94a31d1cc57a8558f", size = 99536, upload-time = "2025-05-13T21:10:34.324Z" }, - { url = "https://files.pythonhosted.org/packages/28/24/e7030fe27c7a9eb9cc8c86a4d74a7422d2c3e3466aecdf658617bea40491/bottleneck-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16fead35c0b5d307815997eef67d03c2151f255ca889e0fc3d68703f41aa5302", size = 357134, upload-time = "2025-05-13T21:10:35.764Z" }, - { url = "https://files.pythonhosted.org/packages/d0/ce/91b0514a7ac456d934ebd90f0cae2314302f33c16e9489c99a4f496b1cff/bottleneck-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:049162927cf802208cc8691fb99b108afe74656cdc96b9e2067cf56cb9d84056", size = 361243, upload-time = "2025-05-13T21:10:36.851Z" }, - { url = "https://files.pythonhosted.org/packages/be/f7/1a41889a6c0863b9f6236c14182bfb5f37c964e791b90ba721450817fc24/bottleneck-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2f5e863a4fdaf9c85416789aeb333d1cdd3603037fd854ad58b0e2ac73be16cf", size = 361326, upload-time = "2025-05-13T21:10:37.904Z" }, - { url = "https://files.pythonhosted.org/packages/d3/e8/d4772b5321cf62b53c792253e38db1f6beee4f2de81e65bce5a6fe78df8e/bottleneck-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8d123762f78717fc35ecf10cad45d08273fcb12ab40b3c847190b83fec236f03", size = 371849, upload-time = "2025-05-13T21:10:40.544Z" }, - { url = "https://files.pythonhosted.org/packages/29/dc/f88f6d476d7a3d6bd92f6e66f814d0bf088be20f0c6f716caa2a2ca02e82/bottleneck-1.5.0-cp311-cp311-win32.whl", hash = "sha256:07c2c1aa39917b5c9be77e85791aa598e8b2c00f8597a198b93628bbfde72a3f", size = 107710, upload-time = "2025-05-13T21:10:41.648Z" }, - { url = "https://files.pythonhosted.org/packages/17/03/f89a2eff4f919a7c98433df3be6fd9787c72966a36be289ec180f505b2d5/bottleneck-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:80ef9eea2a92fc5a1c04734aa1bcf317253241062c962eaa6e7f123b583d0109", size = 112055, upload-time = "2025-05-13T21:10:42.549Z" }, - { url = "https://files.pythonhosted.org/packages/8e/64/127e174cec548ab98bc0fa868b4f5d3ae5276e25c856d31d235d83d885a8/bottleneck-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dbb0f0d38feda63050aa253cf9435e81a0ecfac954b0df84896636be9eabd9b6", size = 99640, upload-time = "2025-05-13T21:10:43.574Z" }, - { url = "https://files.pythonhosted.org/packages/59/89/6e0b6463a36fd4771a9227d22ea904f892b80d95154399dd3e89fb6001f8/bottleneck-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:613165ce39bf6bd80f5307da0f05842ba534b213a89526f1eba82ea0099592fc", size = 358009, upload-time = "2025-05-13T21:10:45.045Z" }, - { url = "https://files.pythonhosted.org/packages/f7/d6/7d1795a4a9e6383d3710a94c44010c7f2a8ba58cb5f2d9e2834a1c179afe/bottleneck-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f218e4dae6511180dcc4f06d8300e0c81e7f3df382091f464c5a919d289fab8e", size = 362875, upload-time = "2025-05-13T21:10:46.16Z" }, - { url = "https://files.pythonhosted.org/packages/2b/1b/bab35ef291b9379a97e2fb986ce75f32eda38a47fc4954177b43590ee85e/bottleneck-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3886799cceb271eb67d057f6ecb13fb4582bda17a3b13b4fa0334638c59637c6", size = 361194, upload-time = "2025-05-13T21:10:47.631Z" }, - { url = "https://files.pythonhosted.org/packages/d5/f3/a416fed726b81d2093578bc2112077f011c9f57b31e7ff3a1a9b00cce3d3/bottleneck-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dc8d553d4bf033d3e025cd32d4c034d2daf10709e31ced3909811d1c843e451c", size = 373253, upload-time = "2025-05-13T21:10:48.634Z" }, - { url = "https://files.pythonhosted.org/packages/0a/40/c372f9e59b3ce340d170fbdc24c12df3d2b3c22c4809b149b7129044180b/bottleneck-1.5.0-cp312-cp312-win32.whl", hash = "sha256:0dca825048a3076f34c4a35409e3277b31ceeb3cbb117bbe2a13ff5c214bcabc", size = 107915, upload-time = "2025-05-13T21:10:50.639Z" }, - { url = "https://files.pythonhosted.org/packages/28/5a/57571a3cd4e356bbd636bb2225fbe916f29adc2235ba3dc77cd4085c91c8/bottleneck-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:f26005740e6ef6013eba8a48241606a963e862a601671eab064b7835cd12ef3d", size = 112148, upload-time = "2025-05-13T21:10:51.626Z" }, + { url = "https://files.pythonhosted.org/packages/fd/5e/d66b2487c12fa3343013ac87a03bcefbeacf5f13ffa4ad56bb4bce319d09/bottleneck-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9be5dfdf1a662d1d4423d7b7e8dd9a1b7046dcc2ce67b6e94a31d1cc57a8558f", size = 99536 }, + { url = "https://files.pythonhosted.org/packages/28/24/e7030fe27c7a9eb9cc8c86a4d74a7422d2c3e3466aecdf658617bea40491/bottleneck-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16fead35c0b5d307815997eef67d03c2151f255ca889e0fc3d68703f41aa5302", size = 357134 }, + { url = "https://files.pythonhosted.org/packages/d0/ce/91b0514a7ac456d934ebd90f0cae2314302f33c16e9489c99a4f496b1cff/bottleneck-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:049162927cf802208cc8691fb99b108afe74656cdc96b9e2067cf56cb9d84056", size = 361243 }, + { url = "https://files.pythonhosted.org/packages/be/f7/1a41889a6c0863b9f6236c14182bfb5f37c964e791b90ba721450817fc24/bottleneck-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2f5e863a4fdaf9c85416789aeb333d1cdd3603037fd854ad58b0e2ac73be16cf", size = 361326 }, + { url = "https://files.pythonhosted.org/packages/d3/e8/d4772b5321cf62b53c792253e38db1f6beee4f2de81e65bce5a6fe78df8e/bottleneck-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8d123762f78717fc35ecf10cad45d08273fcb12ab40b3c847190b83fec236f03", size = 371849 }, + { url = "https://files.pythonhosted.org/packages/29/dc/f88f6d476d7a3d6bd92f6e66f814d0bf088be20f0c6f716caa2a2ca02e82/bottleneck-1.5.0-cp311-cp311-win32.whl", hash = "sha256:07c2c1aa39917b5c9be77e85791aa598e8b2c00f8597a198b93628bbfde72a3f", size = 107710 }, + { url = "https://files.pythonhosted.org/packages/17/03/f89a2eff4f919a7c98433df3be6fd9787c72966a36be289ec180f505b2d5/bottleneck-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:80ef9eea2a92fc5a1c04734aa1bcf317253241062c962eaa6e7f123b583d0109", size = 112055 }, + { url = "https://files.pythonhosted.org/packages/8e/64/127e174cec548ab98bc0fa868b4f5d3ae5276e25c856d31d235d83d885a8/bottleneck-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dbb0f0d38feda63050aa253cf9435e81a0ecfac954b0df84896636be9eabd9b6", size = 99640 }, + { url = "https://files.pythonhosted.org/packages/59/89/6e0b6463a36fd4771a9227d22ea904f892b80d95154399dd3e89fb6001f8/bottleneck-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:613165ce39bf6bd80f5307da0f05842ba534b213a89526f1eba82ea0099592fc", size = 358009 }, + { url = "https://files.pythonhosted.org/packages/f7/d6/7d1795a4a9e6383d3710a94c44010c7f2a8ba58cb5f2d9e2834a1c179afe/bottleneck-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f218e4dae6511180dcc4f06d8300e0c81e7f3df382091f464c5a919d289fab8e", size = 362875 }, + { url = "https://files.pythonhosted.org/packages/2b/1b/bab35ef291b9379a97e2fb986ce75f32eda38a47fc4954177b43590ee85e/bottleneck-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3886799cceb271eb67d057f6ecb13fb4582bda17a3b13b4fa0334638c59637c6", size = 361194 }, + { url = "https://files.pythonhosted.org/packages/d5/f3/a416fed726b81d2093578bc2112077f011c9f57b31e7ff3a1a9b00cce3d3/bottleneck-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dc8d553d4bf033d3e025cd32d4c034d2daf10709e31ced3909811d1c843e451c", size = 373253 }, + { url = "https://files.pythonhosted.org/packages/0a/40/c372f9e59b3ce340d170fbdc24c12df3d2b3c22c4809b149b7129044180b/bottleneck-1.5.0-cp312-cp312-win32.whl", hash = "sha256:0dca825048a3076f34c4a35409e3277b31ceeb3cbb117bbe2a13ff5c214bcabc", size = 107915 }, + { url = "https://files.pythonhosted.org/packages/28/5a/57571a3cd4e356bbd636bb2225fbe916f29adc2235ba3dc77cd4085c91c8/bottleneck-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:f26005740e6ef6013eba8a48241606a963e862a601671eab064b7835cd12ef3d", size = 112148 }, ] [[package]] name = "brotli" version = "1.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2f/c2/f9e977608bdf958650638c3f1e28f85a1b075f075ebbe77db8555463787b/Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724", size = 7372270, upload-time = "2023-09-07T14:05:41.643Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/c2/f9e977608bdf958650638c3f1e28f85a1b075f075ebbe77db8555463787b/Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724", size = 7372270 } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/12/ad41e7fadd5db55459c4c401842b47f7fee51068f86dd2894dd0dcfc2d2a/Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc", size = 873068, upload-time = "2023-09-07T14:03:37.779Z" }, - { url = "https://files.pythonhosted.org/packages/95/4e/5afab7b2b4b61a84e9c75b17814198ce515343a44e2ed4488fac314cd0a9/Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6", size = 446244, upload-time = "2023-09-07T14:03:39.223Z" }, - { url = "https://files.pythonhosted.org/packages/9d/e6/f305eb61fb9a8580c525478a4a34c5ae1a9bcb12c3aee619114940bc513d/Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd", size = 2906500, upload-time = "2023-09-07T14:03:40.858Z" }, - { url = "https://files.pythonhosted.org/packages/3e/4f/af6846cfbc1550a3024e5d3775ede1e00474c40882c7bf5b37a43ca35e91/Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf", size = 2943950, upload-time = "2023-09-07T14:03:42.896Z" }, - { url = "https://files.pythonhosted.org/packages/b3/e7/ca2993c7682d8629b62630ebf0d1f3bb3d579e667ce8e7ca03a0a0576a2d/Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61", size = 2918527, upload-time = "2023-09-07T14:03:44.552Z" }, - { url = "https://files.pythonhosted.org/packages/b3/96/da98e7bedc4c51104d29cc61e5f449a502dd3dbc211944546a4cc65500d3/Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327", size = 2845489, upload-time = "2023-09-07T14:03:46.594Z" }, - { url = "https://files.pythonhosted.org/packages/e8/ef/ccbc16947d6ce943a7f57e1a40596c75859eeb6d279c6994eddd69615265/Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd", size = 2914080, upload-time = "2023-09-07T14:03:48.204Z" }, - { url = "https://files.pythonhosted.org/packages/80/d6/0bd38d758d1afa62a5524172f0b18626bb2392d717ff94806f741fcd5ee9/Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9", size = 2813051, upload-time = "2023-09-07T14:03:50.348Z" }, - { url = "https://files.pythonhosted.org/packages/14/56/48859dd5d129d7519e001f06dcfbb6e2cf6db92b2702c0c2ce7d97e086c1/Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265", size = 2938172, upload-time = "2023-09-07T14:03:52.395Z" }, - { url = "https://files.pythonhosted.org/packages/3d/77/a236d5f8cd9e9f4348da5acc75ab032ab1ab2c03cc8f430d24eea2672888/Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8", size = 2933023, upload-time = "2023-09-07T14:03:53.96Z" }, - { url = "https://files.pythonhosted.org/packages/f1/87/3b283efc0f5cb35f7f84c0c240b1e1a1003a5e47141a4881bf87c86d0ce2/Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f", size = 2935871, upload-time = "2024-10-18T12:32:16.688Z" }, - { url = "https://files.pythonhosted.org/packages/f3/eb/2be4cc3e2141dc1a43ad4ca1875a72088229de38c68e842746b342667b2a/Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757", size = 2847784, upload-time = "2024-10-18T12:32:18.459Z" }, - { url = "https://files.pythonhosted.org/packages/66/13/b58ddebfd35edde572ccefe6890cf7c493f0c319aad2a5badee134b4d8ec/Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0", size = 3034905, upload-time = "2024-10-18T12:32:20.192Z" }, - { url = "https://files.pythonhosted.org/packages/84/9c/bc96b6c7db824998a49ed3b38e441a2cae9234da6fa11f6ed17e8cf4f147/Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b", size = 2929467, upload-time = "2024-10-18T12:32:21.774Z" }, - { url = "https://files.pythonhosted.org/packages/e7/71/8f161dee223c7ff7fea9d44893fba953ce97cf2c3c33f78ba260a91bcff5/Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50", size = 333169, upload-time = "2023-09-07T14:03:55.404Z" }, - { url = "https://files.pythonhosted.org/packages/02/8a/fece0ee1057643cb2a5bbf59682de13f1725f8482b2c057d4e799d7ade75/Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1", size = 357253, upload-time = "2023-09-07T14:03:56.643Z" }, - { url = "https://files.pythonhosted.org/packages/5c/d0/5373ae13b93fe00095a58efcbce837fd470ca39f703a235d2a999baadfbc/Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28", size = 815693, upload-time = "2024-10-18T12:32:23.824Z" }, - { url = "https://files.pythonhosted.org/packages/8e/48/f6e1cdf86751300c288c1459724bfa6917a80e30dbfc326f92cea5d3683a/Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f", size = 422489, upload-time = "2024-10-18T12:32:25.641Z" }, - { url = "https://files.pythonhosted.org/packages/06/88/564958cedce636d0f1bed313381dfc4b4e3d3f6015a63dae6146e1b8c65c/Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409", size = 873081, upload-time = "2023-09-07T14:03:57.967Z" }, - { url = "https://files.pythonhosted.org/packages/58/79/b7026a8bb65da9a6bb7d14329fd2bd48d2b7f86d7329d5cc8ddc6a90526f/Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2", size = 446244, upload-time = "2023-09-07T14:03:59.319Z" }, - { url = "https://files.pythonhosted.org/packages/e5/18/c18c32ecea41b6c0004e15606e274006366fe19436b6adccc1ae7b2e50c2/Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451", size = 2906505, upload-time = "2023-09-07T14:04:01.327Z" }, - { url = "https://files.pythonhosted.org/packages/08/c8/69ec0496b1ada7569b62d85893d928e865df29b90736558d6c98c2031208/Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91", size = 2944152, upload-time = "2023-09-07T14:04:03.033Z" }, - { url = "https://files.pythonhosted.org/packages/ab/fb/0517cea182219d6768113a38167ef6d4eb157a033178cc938033a552ed6d/Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408", size = 2919252, upload-time = "2023-09-07T14:04:04.675Z" }, - { url = "https://files.pythonhosted.org/packages/c7/53/73a3431662e33ae61a5c80b1b9d2d18f58dfa910ae8dd696e57d39f1a2f5/Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0", size = 2845955, upload-time = "2023-09-07T14:04:06.585Z" }, - { url = "https://files.pythonhosted.org/packages/55/ac/bd280708d9c5ebdbf9de01459e625a3e3803cce0784f47d633562cf40e83/Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc", size = 2914304, upload-time = "2023-09-07T14:04:08.668Z" }, - { url = "https://files.pythonhosted.org/packages/76/58/5c391b41ecfc4527d2cc3350719b02e87cb424ef8ba2023fb662f9bf743c/Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180", size = 2814452, upload-time = "2023-09-07T14:04:10.736Z" }, - { url = "https://files.pythonhosted.org/packages/c7/4e/91b8256dfe99c407f174924b65a01f5305e303f486cc7a2e8a5d43c8bec3/Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248", size = 2938751, upload-time = "2023-09-07T14:04:12.875Z" }, - { url = "https://files.pythonhosted.org/packages/5a/a6/e2a39a5d3b412938362bbbeba5af904092bf3f95b867b4a3eb856104074e/Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966", size = 2933757, upload-time = "2023-09-07T14:04:14.551Z" }, - { url = "https://files.pythonhosted.org/packages/13/f0/358354786280a509482e0e77c1a5459e439766597d280f28cb097642fc26/Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9", size = 2936146, upload-time = "2024-10-18T12:32:27.257Z" }, - { url = "https://files.pythonhosted.org/packages/80/f7/daf538c1060d3a88266b80ecc1d1c98b79553b3f117a485653f17070ea2a/Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb", size = 2848055, upload-time = "2024-10-18T12:32:29.376Z" }, - { url = "https://files.pythonhosted.org/packages/ad/cf/0eaa0585c4077d3c2d1edf322d8e97aabf317941d3a72d7b3ad8bce004b0/Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111", size = 3035102, upload-time = "2024-10-18T12:32:31.371Z" }, - { url = "https://files.pythonhosted.org/packages/d8/63/1c1585b2aa554fe6dbce30f0c18bdbc877fa9a1bf5ff17677d9cca0ac122/Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839", size = 2930029, upload-time = "2024-10-18T12:32:33.293Z" }, - { url = "https://files.pythonhosted.org/packages/5f/3b/4e3fd1893eb3bbfef8e5a80d4508bec17a57bb92d586c85c12d28666bb13/Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0", size = 333276, upload-time = "2023-09-07T14:04:16.49Z" }, - { url = "https://files.pythonhosted.org/packages/3d/d5/942051b45a9e883b5b6e98c041698b1eb2012d25e5948c58d6bf85b1bb43/Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951", size = 357255, upload-time = "2023-09-07T14:04:17.83Z" }, + { url = "https://files.pythonhosted.org/packages/96/12/ad41e7fadd5db55459c4c401842b47f7fee51068f86dd2894dd0dcfc2d2a/Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc", size = 873068 }, + { url = "https://files.pythonhosted.org/packages/95/4e/5afab7b2b4b61a84e9c75b17814198ce515343a44e2ed4488fac314cd0a9/Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6", size = 446244 }, + { url = "https://files.pythonhosted.org/packages/9d/e6/f305eb61fb9a8580c525478a4a34c5ae1a9bcb12c3aee619114940bc513d/Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd", size = 2906500 }, + { url = "https://files.pythonhosted.org/packages/3e/4f/af6846cfbc1550a3024e5d3775ede1e00474c40882c7bf5b37a43ca35e91/Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf", size = 2943950 }, + { url = "https://files.pythonhosted.org/packages/b3/e7/ca2993c7682d8629b62630ebf0d1f3bb3d579e667ce8e7ca03a0a0576a2d/Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61", size = 2918527 }, + { url = "https://files.pythonhosted.org/packages/b3/96/da98e7bedc4c51104d29cc61e5f449a502dd3dbc211944546a4cc65500d3/Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327", size = 2845489 }, + { url = "https://files.pythonhosted.org/packages/e8/ef/ccbc16947d6ce943a7f57e1a40596c75859eeb6d279c6994eddd69615265/Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd", size = 2914080 }, + { url = "https://files.pythonhosted.org/packages/80/d6/0bd38d758d1afa62a5524172f0b18626bb2392d717ff94806f741fcd5ee9/Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9", size = 2813051 }, + { url = "https://files.pythonhosted.org/packages/14/56/48859dd5d129d7519e001f06dcfbb6e2cf6db92b2702c0c2ce7d97e086c1/Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265", size = 2938172 }, + { url = "https://files.pythonhosted.org/packages/3d/77/a236d5f8cd9e9f4348da5acc75ab032ab1ab2c03cc8f430d24eea2672888/Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8", size = 2933023 }, + { url = "https://files.pythonhosted.org/packages/f1/87/3b283efc0f5cb35f7f84c0c240b1e1a1003a5e47141a4881bf87c86d0ce2/Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f", size = 2935871 }, + { url = "https://files.pythonhosted.org/packages/f3/eb/2be4cc3e2141dc1a43ad4ca1875a72088229de38c68e842746b342667b2a/Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757", size = 2847784 }, + { url = "https://files.pythonhosted.org/packages/66/13/b58ddebfd35edde572ccefe6890cf7c493f0c319aad2a5badee134b4d8ec/Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0", size = 3034905 }, + { url = "https://files.pythonhosted.org/packages/84/9c/bc96b6c7db824998a49ed3b38e441a2cae9234da6fa11f6ed17e8cf4f147/Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b", size = 2929467 }, + { url = "https://files.pythonhosted.org/packages/e7/71/8f161dee223c7ff7fea9d44893fba953ce97cf2c3c33f78ba260a91bcff5/Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50", size = 333169 }, + { url = "https://files.pythonhosted.org/packages/02/8a/fece0ee1057643cb2a5bbf59682de13f1725f8482b2c057d4e799d7ade75/Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1", size = 357253 }, + { url = "https://files.pythonhosted.org/packages/5c/d0/5373ae13b93fe00095a58efcbce837fd470ca39f703a235d2a999baadfbc/Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28", size = 815693 }, + { url = "https://files.pythonhosted.org/packages/8e/48/f6e1cdf86751300c288c1459724bfa6917a80e30dbfc326f92cea5d3683a/Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f", size = 422489 }, + { url = "https://files.pythonhosted.org/packages/06/88/564958cedce636d0f1bed313381dfc4b4e3d3f6015a63dae6146e1b8c65c/Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409", size = 873081 }, + { url = "https://files.pythonhosted.org/packages/58/79/b7026a8bb65da9a6bb7d14329fd2bd48d2b7f86d7329d5cc8ddc6a90526f/Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2", size = 446244 }, + { url = "https://files.pythonhosted.org/packages/e5/18/c18c32ecea41b6c0004e15606e274006366fe19436b6adccc1ae7b2e50c2/Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451", size = 2906505 }, + { url = "https://files.pythonhosted.org/packages/08/c8/69ec0496b1ada7569b62d85893d928e865df29b90736558d6c98c2031208/Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91", size = 2944152 }, + { url = "https://files.pythonhosted.org/packages/ab/fb/0517cea182219d6768113a38167ef6d4eb157a033178cc938033a552ed6d/Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408", size = 2919252 }, + { url = "https://files.pythonhosted.org/packages/c7/53/73a3431662e33ae61a5c80b1b9d2d18f58dfa910ae8dd696e57d39f1a2f5/Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0", size = 2845955 }, + { url = "https://files.pythonhosted.org/packages/55/ac/bd280708d9c5ebdbf9de01459e625a3e3803cce0784f47d633562cf40e83/Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc", size = 2914304 }, + { url = "https://files.pythonhosted.org/packages/76/58/5c391b41ecfc4527d2cc3350719b02e87cb424ef8ba2023fb662f9bf743c/Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180", size = 2814452 }, + { url = "https://files.pythonhosted.org/packages/c7/4e/91b8256dfe99c407f174924b65a01f5305e303f486cc7a2e8a5d43c8bec3/Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248", size = 2938751 }, + { url = "https://files.pythonhosted.org/packages/5a/a6/e2a39a5d3b412938362bbbeba5af904092bf3f95b867b4a3eb856104074e/Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966", size = 2933757 }, + { url = "https://files.pythonhosted.org/packages/13/f0/358354786280a509482e0e77c1a5459e439766597d280f28cb097642fc26/Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9", size = 2936146 }, + { url = "https://files.pythonhosted.org/packages/80/f7/daf538c1060d3a88266b80ecc1d1c98b79553b3f117a485653f17070ea2a/Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb", size = 2848055 }, + { url = "https://files.pythonhosted.org/packages/ad/cf/0eaa0585c4077d3c2d1edf322d8e97aabf317941d3a72d7b3ad8bce004b0/Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111", size = 3035102 }, + { url = "https://files.pythonhosted.org/packages/d8/63/1c1585b2aa554fe6dbce30f0c18bdbc877fa9a1bf5ff17677d9cca0ac122/Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839", size = 2930029 }, + { url = "https://files.pythonhosted.org/packages/5f/3b/4e3fd1893eb3bbfef8e5a80d4508bec17a57bb92d586c85c12d28666bb13/Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0", size = 333276 }, + { url = "https://files.pythonhosted.org/packages/3d/d5/942051b45a9e883b5b6e98c041698b1eb2012d25e5948c58d6bf85b1bb43/Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951", size = 357255 }, ] [[package]] @@ -677,14 +676,14 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation == 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/9d/70caa61192f570fcf0352766331b735afa931b4c6bc9a348a0925cc13288/brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13", size = 465192, upload-time = "2023-09-14T14:22:40.707Z" } +sdist = { url = "https://files.pythonhosted.org/packages/95/9d/70caa61192f570fcf0352766331b735afa931b4c6bc9a348a0925cc13288/brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13", size = 465192 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/11/7b96009d3dcc2c931e828ce1e157f03824a69fb728d06bfd7b2fc6f93718/brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851", size = 453786, upload-time = "2023-09-14T14:21:57.72Z" }, - { url = "https://files.pythonhosted.org/packages/d6/e6/a8f46f4a4ee7856fbd6ac0c6fb0dc65ed181ba46cd77875b8d9bbe494d9e/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b", size = 2911165, upload-time = "2023-09-14T14:21:59.613Z" }, - { url = "https://files.pythonhosted.org/packages/be/20/201559dff14e83ba345a5ec03335607e47467b6633c210607e693aefac40/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9feb210d932ffe7798ee62e6145d3a757eb6233aa9a4e7db78dd3690d7755814", size = 2927895, upload-time = "2023-09-14T14:22:01.22Z" }, - { url = "https://files.pythonhosted.org/packages/cd/15/695b1409264143be3c933f708a3f81d53c4a1e1ebbc06f46331decbf6563/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84763dbdef5dd5c24b75597a77e1b30c66604725707565188ba54bab4f114820", size = 2851834, upload-time = "2023-09-14T14:22:03.571Z" }, - { url = "https://files.pythonhosted.org/packages/b4/40/b961a702463b6005baf952794c2e9e0099bde657d0d7e007f923883b907f/brotlicffi-1.1.0.0-cp37-abi3-win32.whl", hash = "sha256:1b12b50e07c3911e1efa3a8971543e7648100713d4e0971b13631cce22c587eb", size = 341731, upload-time = "2023-09-14T14:22:05.74Z" }, - { url = "https://files.pythonhosted.org/packages/1c/fa/5408a03c041114ceab628ce21766a4ea882aa6f6f0a800e04ee3a30ec6b9/brotlicffi-1.1.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:994a4f0681bb6c6c3b0925530a1926b7a189d878e6e5e38fae8efa47c5d9c613", size = 366783, upload-time = "2023-09-14T14:22:07.096Z" }, + { url = "https://files.pythonhosted.org/packages/a2/11/7b96009d3dcc2c931e828ce1e157f03824a69fb728d06bfd7b2fc6f93718/brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851", size = 453786 }, + { url = "https://files.pythonhosted.org/packages/d6/e6/a8f46f4a4ee7856fbd6ac0c6fb0dc65ed181ba46cd77875b8d9bbe494d9e/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b", size = 2911165 }, + { url = "https://files.pythonhosted.org/packages/be/20/201559dff14e83ba345a5ec03335607e47467b6633c210607e693aefac40/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9feb210d932ffe7798ee62e6145d3a757eb6233aa9a4e7db78dd3690d7755814", size = 2927895 }, + { url = "https://files.pythonhosted.org/packages/cd/15/695b1409264143be3c933f708a3f81d53c4a1e1ebbc06f46331decbf6563/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84763dbdef5dd5c24b75597a77e1b30c66604725707565188ba54bab4f114820", size = 2851834 }, + { url = "https://files.pythonhosted.org/packages/b4/40/b961a702463b6005baf952794c2e9e0099bde657d0d7e007f923883b907f/brotlicffi-1.1.0.0-cp37-abi3-win32.whl", hash = "sha256:1b12b50e07c3911e1efa3a8971543e7648100713d4e0971b13631cce22c587eb", size = 341731 }, + { url = "https://files.pythonhosted.org/packages/1c/fa/5408a03c041114ceab628ce21766a4ea882aa6f6f0a800e04ee3a30ec6b9/brotlicffi-1.1.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:994a4f0681bb6c6c3b0925530a1926b7a189d878e6e5e38fae8efa47c5d9c613", size = 366783 }, ] [[package]] @@ -694,9 +693,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "beautifulsoup4" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/aa/4acaf814ff901145da37332e05bb510452ebed97bc9602695059dd46ef39/bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925", size = 698, upload-time = "2024-01-17T18:15:47.371Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/aa/4acaf814ff901145da37332e05bb510452ebed97bc9602695059dd46ef39/bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925", size = 698 } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/bb/bf7aab772a159614954d84aa832c129624ba6c32faa559dfb200a534e50b/bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc", size = 1189, upload-time = "2024-01-17T18:15:48.613Z" }, + { url = "https://files.pythonhosted.org/packages/51/bb/bf7aab772a159614954d84aa832c129624ba6c32faa559dfb200a534e50b/bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc", size = 1189 }, ] [[package]] @@ -708,18 +707,18 @@ dependencies = [ { name = "packaging" }, { name = "pyproject-hooks" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7d/46/aeab111f8e06793e4f0e421fcad593d547fb8313b50990f31681ee2fb1ad/build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7", size = 46701, upload-time = "2024-10-06T17:22:25.251Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/46/aeab111f8e06793e4f0e421fcad593d547fb8313b50990f31681ee2fb1ad/build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7", size = 46701 } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/c2/80633736cd183ee4a62107413def345f7e6e3c01563dbca1417363cf957e/build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5", size = 22950, upload-time = "2024-10-06T17:22:23.299Z" }, + { url = "https://files.pythonhosted.org/packages/84/c2/80633736cd183ee4a62107413def345f7e6e3c01563dbca1417363cf957e/build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5", size = 22950 }, ] [[package]] name = "cachetools" version = "5.3.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b3/4d/27a3e6dd09011649ad5210bdf963765bc8fa81a0827a4fc01bafd2705c5b/cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105", size = 26522, upload-time = "2024-02-26T20:33:23.386Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/4d/27a3e6dd09011649ad5210bdf963765bc8fa81a0827a4fc01bafd2705c5b/cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105", size = 26522 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/2b/a64c2d25a37aeb921fddb929111413049fc5f8b9a4c1aefaffaafe768d54/cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945", size = 9325, upload-time = "2024-02-26T20:33:20.308Z" }, + { url = "https://files.pythonhosted.org/packages/fb/2b/a64c2d25a37aeb921fddb929111413049fc5f8b9a4c1aefaffaafe768d54/cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945", size = 9325 }, ] [[package]] @@ -736,18 +735,30 @@ dependencies = [ { name = "python-dateutil" }, { name = "vine" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bb/7d/6c289f407d219ba36d8b384b42489ebdd0c84ce9c413875a8aae0c85f35b/celery-5.5.3.tar.gz", hash = "sha256:6c972ae7968c2b5281227f01c3a3f984037d21c5129d07bf3550cc2afc6b10a5", size = 1667144, upload-time = "2025-06-01T11:08:12.563Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/7d/6c289f407d219ba36d8b384b42489ebdd0c84ce9c413875a8aae0c85f35b/celery-5.5.3.tar.gz", hash = "sha256:6c972ae7968c2b5281227f01c3a3f984037d21c5129d07bf3550cc2afc6b10a5", size = 1667144 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/af/0dcccc7fdcdf170f9a1585e5e96b6fb0ba1749ef6be8c89a6202284759bd/celery-5.5.3-py3-none-any.whl", hash = "sha256:0b5761a07057acee94694464ca482416b959568904c9dfa41ce8413a7d65d525", size = 438775, upload-time = "2025-06-01T11:08:09.94Z" }, + { url = "https://files.pythonhosted.org/packages/c9/af/0dcccc7fdcdf170f9a1585e5e96b6fb0ba1749ef6be8c89a6202284759bd/celery-5.5.3-py3-none-any.whl", hash = "sha256:0b5761a07057acee94694464ca482416b959568904c9dfa41ce8413a7d65d525", size = 438775 }, +] + +[[package]] +name = "celery-types" +version = "0.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/d1/0823e71c281e4ad0044e278cf1577d1a68e05f2809424bf94e1614925c5d/celery_types-0.23.0.tar.gz", hash = "sha256:402ed0555aea3cd5e1e6248f4632e4f18eec8edb2435173f9e6dc08449fa101e", size = 31479 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/8b/92bb54dd74d145221c3854aa245c84f4dc04cc9366147496182cec8e88e3/celery_types-0.23.0-py3-none-any.whl", hash = "sha256:0cc495b8d7729891b7e070d0ec8d4906d2373209656a6e8b8276fe1ed306af9a", size = 50189 }, ] [[package]] name = "certifi" version = "2025.6.15" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/73/f7/f14b46d4bcd21092d7d3ccef689615220d8a08fb25e564b65d20738e672e/certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b", size = 158753, upload-time = "2025-06-15T02:45:51.329Z" } +sdist = { url = "https://files.pythonhosted.org/packages/73/f7/f14b46d4bcd21092d7d3ccef689615220d8a08fb25e564b65d20738e672e/certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b", size = 158753 } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/ae/320161bd181fc06471eed047ecce67b693fd7515b16d495d8932db763426/certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", size = 157650, upload-time = "2025-06-15T02:45:49.977Z" }, + { url = "https://files.pythonhosted.org/packages/84/ae/320161bd181fc06471eed047ecce67b693fd7515b16d495d8932db763426/certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", size = 157650 }, ] [[package]] @@ -757,75 +768,75 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycparser" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264, upload-time = "2024-09-04T20:43:51.124Z" }, - { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651, upload-time = "2024-09-04T20:43:52.872Z" }, - { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259, upload-time = "2024-09-04T20:43:56.123Z" }, - { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200, upload-time = "2024-09-04T20:43:57.891Z" }, - { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235, upload-time = "2024-09-04T20:44:00.18Z" }, - { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721, upload-time = "2024-09-04T20:44:01.585Z" }, - { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242, upload-time = "2024-09-04T20:44:03.467Z" }, - { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999, upload-time = "2024-09-04T20:44:05.023Z" }, - { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242, upload-time = "2024-09-04T20:44:06.444Z" }, - { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604, upload-time = "2024-09-04T20:44:08.206Z" }, - { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727, upload-time = "2024-09-04T20:44:09.481Z" }, - { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400, upload-time = "2024-09-04T20:44:10.873Z" }, - { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" }, - { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" }, - { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" }, - { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" }, - { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" }, - { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" }, - { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" }, - { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" }, - { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" }, - { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" }, - { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 }, + { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 }, + { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 }, + { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 }, + { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 }, + { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 }, + { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 }, + { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 }, + { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 }, + { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 }, + { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 }, + { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 }, + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, ] [[package]] name = "chardet" version = "5.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/32/cdc91dcf83849c7385bf8e2a5693d87376536ed000807fa07f5eab33430d/chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5", size = 2069617, upload-time = "2022-12-01T22:34:18.086Z" } +sdist = { url = "https://files.pythonhosted.org/packages/41/32/cdc91dcf83849c7385bf8e2a5693d87376536ed000807fa07f5eab33430d/chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5", size = 2069617 } wheels = [ - { url = "https://files.pythonhosted.org/packages/74/8f/8fc49109009e8d2169d94d72e6b1f4cd45c13d147ba7d6170fb41f22b08f/chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9", size = 199124, upload-time = "2022-12-01T22:34:14.609Z" }, + { url = "https://files.pythonhosted.org/packages/74/8f/8fc49109009e8d2169d94d72e6b1f4cd45c13d147ba7d6170fb41f22b08f/chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9", size = 199124 }, ] [[package]] name = "charset-normalizer" version = "3.4.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367 } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, - { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, - { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, - { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, - { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, - { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, - { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, - { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, - { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, - { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, - { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, - { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, - { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, - { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, - { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, - { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, - { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, - { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, - { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, - { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, - { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, - { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, - { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, - { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, - { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794 }, + { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846 }, + { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350 }, + { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657 }, + { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260 }, + { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164 }, + { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571 }, + { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952 }, + { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959 }, + { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030 }, + { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015 }, + { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106 }, + { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402 }, + { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936 }, + { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790 }, + { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924 }, + { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626 }, + { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567 }, + { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957 }, + { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408 }, + { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399 }, + { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815 }, + { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537 }, + { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565 }, + { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357 }, + { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776 }, + { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626 }, ] [[package]] @@ -835,17 +846,17 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/73/09/10d57569e399ce9cbc5eee2134996581c957f63a9addfa6ca657daf006b8/chroma_hnswlib-0.7.6.tar.gz", hash = "sha256:4dce282543039681160259d29fcde6151cc9106c6461e0485f57cdccd83059b7", size = 32256, upload-time = "2024-07-22T20:19:29.259Z" } +sdist = { url = "https://files.pythonhosted.org/packages/73/09/10d57569e399ce9cbc5eee2134996581c957f63a9addfa6ca657daf006b8/chroma_hnswlib-0.7.6.tar.gz", hash = "sha256:4dce282543039681160259d29fcde6151cc9106c6461e0485f57cdccd83059b7", size = 32256 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f5/af/d15fdfed2a204c0f9467ad35084fbac894c755820b203e62f5dcba2d41f1/chroma_hnswlib-0.7.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81181d54a2b1e4727369486a631f977ffc53c5533d26e3d366dda243fb0998ca", size = 196911, upload-time = "2024-07-22T20:18:33.46Z" }, - { url = "https://files.pythonhosted.org/packages/0d/19/aa6f2139f1ff7ad23a690ebf2a511b2594ab359915d7979f76f3213e46c4/chroma_hnswlib-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4b4ab4e11f1083dd0a11ee4f0e0b183ca9f0f2ed63ededba1935b13ce2b3606f", size = 185000, upload-time = "2024-07-22T20:18:36.16Z" }, - { url = "https://files.pythonhosted.org/packages/79/b1/1b269c750e985ec7d40b9bbe7d66d0a890e420525187786718e7f6b07913/chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53db45cd9173d95b4b0bdccb4dbff4c54a42b51420599c32267f3abbeb795170", size = 2377289, upload-time = "2024-07-22T20:18:37.761Z" }, - { url = "https://files.pythonhosted.org/packages/c7/2d/d5663e134436e5933bc63516a20b5edc08b4c1b1588b9680908a5f1afd04/chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c093f07a010b499c00a15bc9376036ee4800d335360570b14f7fe92badcdcf9", size = 2411755, upload-time = "2024-07-22T20:18:39.949Z" }, - { url = "https://files.pythonhosted.org/packages/3e/79/1bce519cf186112d6d5ce2985392a89528c6e1e9332d680bf752694a4cdf/chroma_hnswlib-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:0540b0ac96e47d0aa39e88ea4714358ae05d64bbe6bf33c52f316c664190a6a3", size = 151888, upload-time = "2024-07-22T20:18:45.003Z" }, - { url = "https://files.pythonhosted.org/packages/93/ac/782b8d72de1c57b64fdf5cb94711540db99a92768d93d973174c62d45eb8/chroma_hnswlib-0.7.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e87e9b616c281bfbe748d01705817c71211613c3b063021f7ed5e47173556cb7", size = 197804, upload-time = "2024-07-22T20:18:46.442Z" }, - { url = "https://files.pythonhosted.org/packages/32/4e/fd9ce0764228e9a98f6ff46af05e92804090b5557035968c5b4198bc7af9/chroma_hnswlib-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec5ca25bc7b66d2ecbf14502b5729cde25f70945d22f2aaf523c2d747ea68912", size = 185421, upload-time = "2024-07-22T20:18:47.72Z" }, - { url = "https://files.pythonhosted.org/packages/d9/3d/b59a8dedebd82545d873235ef2d06f95be244dfece7ee4a1a6044f080b18/chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:305ae491de9d5f3c51e8bd52d84fdf2545a4a2bc7af49765cda286b7bb30b1d4", size = 2389672, upload-time = "2024-07-22T20:18:49.583Z" }, - { url = "https://files.pythonhosted.org/packages/74/1e/80a033ea4466338824974a34f418e7b034a7748bf906f56466f5caa434b0/chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:822ede968d25a2c88823ca078a58f92c9b5c4142e38c7c8b4c48178894a0a3c5", size = 2436986, upload-time = "2024-07-22T20:18:51.872Z" }, + { url = "https://files.pythonhosted.org/packages/f5/af/d15fdfed2a204c0f9467ad35084fbac894c755820b203e62f5dcba2d41f1/chroma_hnswlib-0.7.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81181d54a2b1e4727369486a631f977ffc53c5533d26e3d366dda243fb0998ca", size = 196911 }, + { url = "https://files.pythonhosted.org/packages/0d/19/aa6f2139f1ff7ad23a690ebf2a511b2594ab359915d7979f76f3213e46c4/chroma_hnswlib-0.7.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4b4ab4e11f1083dd0a11ee4f0e0b183ca9f0f2ed63ededba1935b13ce2b3606f", size = 185000 }, + { url = "https://files.pythonhosted.org/packages/79/b1/1b269c750e985ec7d40b9bbe7d66d0a890e420525187786718e7f6b07913/chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53db45cd9173d95b4b0bdccb4dbff4c54a42b51420599c32267f3abbeb795170", size = 2377289 }, + { url = "https://files.pythonhosted.org/packages/c7/2d/d5663e134436e5933bc63516a20b5edc08b4c1b1588b9680908a5f1afd04/chroma_hnswlib-0.7.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c093f07a010b499c00a15bc9376036ee4800d335360570b14f7fe92badcdcf9", size = 2411755 }, + { url = "https://files.pythonhosted.org/packages/3e/79/1bce519cf186112d6d5ce2985392a89528c6e1e9332d680bf752694a4cdf/chroma_hnswlib-0.7.6-cp311-cp311-win_amd64.whl", hash = "sha256:0540b0ac96e47d0aa39e88ea4714358ae05d64bbe6bf33c52f316c664190a6a3", size = 151888 }, + { url = "https://files.pythonhosted.org/packages/93/ac/782b8d72de1c57b64fdf5cb94711540db99a92768d93d973174c62d45eb8/chroma_hnswlib-0.7.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e87e9b616c281bfbe748d01705817c71211613c3b063021f7ed5e47173556cb7", size = 197804 }, + { url = "https://files.pythonhosted.org/packages/32/4e/fd9ce0764228e9a98f6ff46af05e92804090b5557035968c5b4198bc7af9/chroma_hnswlib-0.7.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec5ca25bc7b66d2ecbf14502b5729cde25f70945d22f2aaf523c2d747ea68912", size = 185421 }, + { url = "https://files.pythonhosted.org/packages/d9/3d/b59a8dedebd82545d873235ef2d06f95be244dfece7ee4a1a6044f080b18/chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:305ae491de9d5f3c51e8bd52d84fdf2545a4a2bc7af49765cda286b7bb30b1d4", size = 2389672 }, + { url = "https://files.pythonhosted.org/packages/74/1e/80a033ea4466338824974a34f418e7b034a7748bf906f56466f5caa434b0/chroma_hnswlib-0.7.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:822ede968d25a2c88823ca078a58f92c9b5c4142e38c7c8b4c48178894a0a3c5", size = 2436986 }, ] [[package]] @@ -882,9 +893,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "uvicorn", extra = ["standard"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/03/31/6c8e05405bb02b4a1f71f9aa3eef242415565dabf6afc1bde7f64f726963/chromadb-0.5.20.tar.gz", hash = "sha256:19513a23b2d20059866216bfd80195d1d4a160ffba234b8899f5e80978160ca7", size = 33664540, upload-time = "2024-11-19T05:13:58.678Z" } +sdist = { url = "https://files.pythonhosted.org/packages/03/31/6c8e05405bb02b4a1f71f9aa3eef242415565dabf6afc1bde7f64f726963/chromadb-0.5.20.tar.gz", hash = "sha256:19513a23b2d20059866216bfd80195d1d4a160ffba234b8899f5e80978160ca7", size = 33664540 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/7a/10bf5dc92d13cc03230190fcc5016a0b138d99e5b36b8b89ee0fe1680e10/chromadb-0.5.20-py3-none-any.whl", hash = "sha256:9550ba1b6dce911e35cac2568b301badf4b42f457b99a432bdeec2b6b9dd3680", size = 617884, upload-time = "2024-11-19T05:13:56.29Z" }, + { url = "https://files.pythonhosted.org/packages/5f/7a/10bf5dc92d13cc03230190fcc5016a0b138d99e5b36b8b89ee0fe1680e10/chromadb-0.5.20-py3-none-any.whl", hash = "sha256:9550ba1b6dce911e35cac2568b301badf4b42f457b99a432bdeec2b6b9dd3680", size = 617884 }, ] [[package]] @@ -894,9 +905,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342 } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, + { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215 }, ] [[package]] @@ -906,9 +917,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1d/ce/edb087fb53de63dad3b36408ca30368f438738098e668b78c87f93cd41df/click_default_group-1.2.4.tar.gz", hash = "sha256:eb3f3c99ec0d456ca6cd2a7f08f7d4e91771bef51b01bdd9580cc6450fe1251e", size = 3505, upload-time = "2023-08-04T07:54:58.425Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/ce/edb087fb53de63dad3b36408ca30368f438738098e668b78c87f93cd41df/click_default_group-1.2.4.tar.gz", hash = "sha256:eb3f3c99ec0d456ca6cd2a7f08f7d4e91771bef51b01bdd9580cc6450fe1251e", size = 3505 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/1a/aff8bb287a4b1400f69e09a53bd65de96aa5cee5691925b38731c67fc695/click_default_group-1.2.4-py2.py3-none-any.whl", hash = "sha256:9b60486923720e7fc61731bdb32b617039aba820e22e1c88766b1125592eaa5f", size = 4123, upload-time = "2023-08-04T07:54:56.875Z" }, + { url = "https://files.pythonhosted.org/packages/2c/1a/aff8bb287a4b1400f69e09a53bd65de96aa5cee5691925b38731c67fc695/click_default_group-1.2.4-py2.py3-none-any.whl", hash = "sha256:9b60486923720e7fc61731bdb32b617039aba820e22e1c88766b1125592eaa5f", size = 4123 }, ] [[package]] @@ -918,9 +929,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/30/ce/217289b77c590ea1e7c24242d9ddd6e249e52c795ff10fac2c50062c48cb/click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463", size = 3089, upload-time = "2024-03-24T08:22:07.499Z" } +sdist = { url = "https://files.pythonhosted.org/packages/30/ce/217289b77c590ea1e7c24242d9ddd6e249e52c795ff10fac2c50062c48cb/click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463", size = 3089 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/5b/974430b5ffdb7a4f1941d13d83c64a0395114503cc357c6b9ae4ce5047ed/click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c", size = 3631, upload-time = "2024-03-24T08:22:06.356Z" }, + { url = "https://files.pythonhosted.org/packages/1b/5b/974430b5ffdb7a4f1941d13d83c64a0395114503cc357c6b9ae4ce5047ed/click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c", size = 3631 }, ] [[package]] @@ -930,9 +941,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/a4/34847b59150da33690a36da3681d6bbc2ec14ee9a846bc30a6746e5984e4/click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261", size = 8343, upload-time = "2025-06-25T00:47:37.555Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/a4/34847b59150da33690a36da3681d6bbc2ec14ee9a846bc30a6746e5984e4/click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261", size = 8343 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/9a/2abecb28ae875e39c8cad711eb1186d8d14eab564705325e77e4e6ab9ae5/click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6", size = 11051, upload-time = "2025-06-25T00:47:36.731Z" }, + { url = "https://files.pythonhosted.org/packages/3d/9a/2abecb28ae875e39c8cad711eb1186d8d14eab564705325e77e4e6ab9ae5/click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6", size = 11051 }, ] [[package]] @@ -943,9 +954,9 @@ dependencies = [ { name = "click" }, { name = "prompt-toolkit" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cb/a2/57f4ac79838cfae6912f997b4d1a64a858fb0c86d7fcaae6f7b58d267fca/click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9", size = 10449, upload-time = "2023-06-15T12:43:51.141Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/a2/57f4ac79838cfae6912f997b4d1a64a858fb0c86d7fcaae6f7b58d267fca/click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9", size = 10449 } wheels = [ - { url = "https://files.pythonhosted.org/packages/52/40/9d857001228658f0d59e97ebd4c346fe73e138c6de1bce61dc568a57c7f8/click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812", size = 10289, upload-time = "2023-06-15T12:43:48.626Z" }, + { url = "https://files.pythonhosted.org/packages/52/40/9d857001228658f0d59e97ebd4c346fe73e138c6de1bce61dc568a57c7f8/click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812", size = 10289 }, ] [[package]] @@ -959,28 +970,28 @@ dependencies = [ { name = "urllib3" }, { name = "zstandard" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f4/8e/bf6012f7b45dbb74e19ad5c881a7bbcd1e7dd2b990f12cc434294d917800/clickhouse-connect-0.7.19.tar.gz", hash = "sha256:ce8f21f035781c5ef6ff57dc162e8150779c009b59f14030ba61f8c9c10c06d0", size = 84918, upload-time = "2024-08-21T21:37:16.639Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/8e/bf6012f7b45dbb74e19ad5c881a7bbcd1e7dd2b990f12cc434294d917800/clickhouse-connect-0.7.19.tar.gz", hash = "sha256:ce8f21f035781c5ef6ff57dc162e8150779c009b59f14030ba61f8c9c10c06d0", size = 84918 } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/6f/a78cad40dc0f1fee19094c40abd7d23ff04bb491732c3a65b3661d426c89/clickhouse_connect-0.7.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee47af8926a7ec3a970e0ebf29a82cbbe3b1b7eae43336a81b3a0ca18091de5f", size = 253530, upload-time = "2024-08-21T21:35:53.372Z" }, - { url = "https://files.pythonhosted.org/packages/40/82/419d110149900ace5eb0787c668d11e1657ac0eabb65c1404f039746f4ed/clickhouse_connect-0.7.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce429233b2d21a8a149c8cd836a2555393cbcf23d61233520db332942ffb8964", size = 245691, upload-time = "2024-08-21T21:35:55.074Z" }, - { url = "https://files.pythonhosted.org/packages/e3/9c/ad6708ced6cf9418334d2bf19bbba3c223511ed852eb85f79b1e7c20cdbd/clickhouse_connect-0.7.19-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:617c04f5c46eed3344a7861cd96fb05293e70d3b40d21541b1e459e7574efa96", size = 1055273, upload-time = "2024-08-21T21:35:56.478Z" }, - { url = "https://files.pythonhosted.org/packages/ea/99/88c24542d6218100793cfb13af54d7ad4143d6515b0b3d621ba3b5a2d8af/clickhouse_connect-0.7.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08e33b8cc2dc1873edc5ee4088d4fc3c0dbb69b00e057547bcdc7e9680b43e5", size = 1067030, upload-time = "2024-08-21T21:35:58.096Z" }, - { url = "https://files.pythonhosted.org/packages/c8/84/19eb776b4e760317c21214c811f04f612cba7eee0f2818a7d6806898a994/clickhouse_connect-0.7.19-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:921886b887f762e5cc3eef57ef784d419a3f66df85fd86fa2e7fbbf464c4c54a", size = 1027207, upload-time = "2024-08-21T21:35:59.832Z" }, - { url = "https://files.pythonhosted.org/packages/22/81/c2982a33b088b6c9af5d0bdc46413adc5fedceae063b1f8b56570bb28887/clickhouse_connect-0.7.19-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ad0cf8552a9e985cfa6524b674ae7c8f5ba51df5bd3ecddbd86c82cdbef41a7", size = 1054850, upload-time = "2024-08-21T21:36:01.559Z" }, - { url = "https://files.pythonhosted.org/packages/7b/a4/4a84ed3e92323d12700011cc8c4039f00a8c888079d65e75a4d4758ba288/clickhouse_connect-0.7.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:70f838ef0861cdf0e2e198171a1f3fd2ee05cf58e93495eeb9b17dfafb278186", size = 1022784, upload-time = "2024-08-21T21:36:02.805Z" }, - { url = "https://files.pythonhosted.org/packages/5e/67/3f5cc6f78c9adbbd6a3183a3f9f3196a116be19e958d7eaa6e307b391fed/clickhouse_connect-0.7.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c5f0d207cb0dcc1adb28ced63f872d080924b7562b263a9d54d4693b670eb066", size = 1071084, upload-time = "2024-08-21T21:36:04.052Z" }, - { url = "https://files.pythonhosted.org/packages/01/8d/a294e1cc752e22bc6ee08aa421ea31ed9559b09d46d35499449140a5c374/clickhouse_connect-0.7.19-cp311-cp311-win32.whl", hash = "sha256:8c96c4c242b98fcf8005e678a26dbd4361748721b6fa158c1fe84ad15c7edbbe", size = 221156, upload-time = "2024-08-21T21:36:05.72Z" }, - { url = "https://files.pythonhosted.org/packages/68/69/09b3a4e53f5d3d770e9fa70f6f04642cdb37cc76d37279c55fd4e868f845/clickhouse_connect-0.7.19-cp311-cp311-win_amd64.whl", hash = "sha256:bda092bab224875ed7c7683707d63f8a2322df654c4716e6611893a18d83e908", size = 238826, upload-time = "2024-08-21T21:36:06.892Z" }, - { url = "https://files.pythonhosted.org/packages/af/f8/1d48719728bac33c1a9815e0a7230940e078fd985b09af2371715de78a3c/clickhouse_connect-0.7.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8f170d08166438d29f0dcfc8a91b672c783dc751945559e65eefff55096f9274", size = 256687, upload-time = "2024-08-21T21:36:08.245Z" }, - { url = "https://files.pythonhosted.org/packages/ed/0d/3cbbbd204be045c4727f9007679ad97d3d1d559b43ba844373a79af54d16/clickhouse_connect-0.7.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26b80cb8f66bde9149a9a2180e2cc4895c1b7d34f9dceba81630a9b9a9ae66b2", size = 247631, upload-time = "2024-08-21T21:36:09.679Z" }, - { url = "https://files.pythonhosted.org/packages/b6/44/adb55285226d60e9c46331a9980c88dad8c8de12abb895c4e3149a088092/clickhouse_connect-0.7.19-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba80e3598acf916c4d1b2515671f65d9efee612a783c17c56a5a646f4db59b9", size = 1053767, upload-time = "2024-08-21T21:36:11.361Z" }, - { url = "https://files.pythonhosted.org/packages/6c/f3/a109c26a41153768be57374cb823cac5daf74c9098a5c61081ffabeb4e59/clickhouse_connect-0.7.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d38c30bd847af0ce7ff738152478f913854db356af4d5824096394d0eab873d", size = 1072014, upload-time = "2024-08-21T21:36:12.752Z" }, - { url = "https://files.pythonhosted.org/packages/51/80/9c200e5e392a538f2444c9a6a93e1cf0e36588c7e8720882ac001e23b246/clickhouse_connect-0.7.19-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d41d4b159071c0e4f607563932d4fa5c2a8fc27d3ba1200d0929b361e5191864", size = 1027423, upload-time = "2024-08-21T21:36:14.483Z" }, - { url = "https://files.pythonhosted.org/packages/33/a3/219fcd1572f1ce198dcef86da8c6c526b04f56e8b7a82e21119677f89379/clickhouse_connect-0.7.19-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3682c2426f5dbda574611210e3c7c951b9557293a49eb60a7438552435873889", size = 1053683, upload-time = "2024-08-21T21:36:15.828Z" }, - { url = "https://files.pythonhosted.org/packages/5d/df/687d90fbc0fd8ce586c46400f3791deac120e4c080aa8b343c0f676dfb08/clickhouse_connect-0.7.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6d492064dca278eb61be3a2d70a5f082e2ebc8ceebd4f33752ae234116192020", size = 1021120, upload-time = "2024-08-21T21:36:17.184Z" }, - { url = "https://files.pythonhosted.org/packages/c8/3b/39ba71b103275df8ec90d424dbaca2dba82b28398c3d2aeac5a0141b6aae/clickhouse_connect-0.7.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:62612da163b934c1ff35df6155a47cf17ac0e2d2f9f0f8f913641e5c02cdf39f", size = 1073652, upload-time = "2024-08-21T21:36:19.053Z" }, - { url = "https://files.pythonhosted.org/packages/b3/92/06df8790a7d93d5d5f1098604fc7d79682784818030091966a3ce3f766a8/clickhouse_connect-0.7.19-cp312-cp312-win32.whl", hash = "sha256:196e48c977affc045794ec7281b4d711e169def00535ecab5f9fdeb8c177f149", size = 221589, upload-time = "2024-08-21T21:36:20.796Z" }, - { url = "https://files.pythonhosted.org/packages/42/1f/935d0810b73184a1d306f92458cb0a2e9b0de2377f536da874e063b8e422/clickhouse_connect-0.7.19-cp312-cp312-win_amd64.whl", hash = "sha256:b771ca6a473d65103dcae82810d3a62475c5372fc38d8f211513c72b954fb020", size = 239584, upload-time = "2024-08-21T21:36:22.105Z" }, + { url = "https://files.pythonhosted.org/packages/68/6f/a78cad40dc0f1fee19094c40abd7d23ff04bb491732c3a65b3661d426c89/clickhouse_connect-0.7.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee47af8926a7ec3a970e0ebf29a82cbbe3b1b7eae43336a81b3a0ca18091de5f", size = 253530 }, + { url = "https://files.pythonhosted.org/packages/40/82/419d110149900ace5eb0787c668d11e1657ac0eabb65c1404f039746f4ed/clickhouse_connect-0.7.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce429233b2d21a8a149c8cd836a2555393cbcf23d61233520db332942ffb8964", size = 245691 }, + { url = "https://files.pythonhosted.org/packages/e3/9c/ad6708ced6cf9418334d2bf19bbba3c223511ed852eb85f79b1e7c20cdbd/clickhouse_connect-0.7.19-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:617c04f5c46eed3344a7861cd96fb05293e70d3b40d21541b1e459e7574efa96", size = 1055273 }, + { url = "https://files.pythonhosted.org/packages/ea/99/88c24542d6218100793cfb13af54d7ad4143d6515b0b3d621ba3b5a2d8af/clickhouse_connect-0.7.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08e33b8cc2dc1873edc5ee4088d4fc3c0dbb69b00e057547bcdc7e9680b43e5", size = 1067030 }, + { url = "https://files.pythonhosted.org/packages/c8/84/19eb776b4e760317c21214c811f04f612cba7eee0f2818a7d6806898a994/clickhouse_connect-0.7.19-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:921886b887f762e5cc3eef57ef784d419a3f66df85fd86fa2e7fbbf464c4c54a", size = 1027207 }, + { url = "https://files.pythonhosted.org/packages/22/81/c2982a33b088b6c9af5d0bdc46413adc5fedceae063b1f8b56570bb28887/clickhouse_connect-0.7.19-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ad0cf8552a9e985cfa6524b674ae7c8f5ba51df5bd3ecddbd86c82cdbef41a7", size = 1054850 }, + { url = "https://files.pythonhosted.org/packages/7b/a4/4a84ed3e92323d12700011cc8c4039f00a8c888079d65e75a4d4758ba288/clickhouse_connect-0.7.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:70f838ef0861cdf0e2e198171a1f3fd2ee05cf58e93495eeb9b17dfafb278186", size = 1022784 }, + { url = "https://files.pythonhosted.org/packages/5e/67/3f5cc6f78c9adbbd6a3183a3f9f3196a116be19e958d7eaa6e307b391fed/clickhouse_connect-0.7.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c5f0d207cb0dcc1adb28ced63f872d080924b7562b263a9d54d4693b670eb066", size = 1071084 }, + { url = "https://files.pythonhosted.org/packages/01/8d/a294e1cc752e22bc6ee08aa421ea31ed9559b09d46d35499449140a5c374/clickhouse_connect-0.7.19-cp311-cp311-win32.whl", hash = "sha256:8c96c4c242b98fcf8005e678a26dbd4361748721b6fa158c1fe84ad15c7edbbe", size = 221156 }, + { url = "https://files.pythonhosted.org/packages/68/69/09b3a4e53f5d3d770e9fa70f6f04642cdb37cc76d37279c55fd4e868f845/clickhouse_connect-0.7.19-cp311-cp311-win_amd64.whl", hash = "sha256:bda092bab224875ed7c7683707d63f8a2322df654c4716e6611893a18d83e908", size = 238826 }, + { url = "https://files.pythonhosted.org/packages/af/f8/1d48719728bac33c1a9815e0a7230940e078fd985b09af2371715de78a3c/clickhouse_connect-0.7.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8f170d08166438d29f0dcfc8a91b672c783dc751945559e65eefff55096f9274", size = 256687 }, + { url = "https://files.pythonhosted.org/packages/ed/0d/3cbbbd204be045c4727f9007679ad97d3d1d559b43ba844373a79af54d16/clickhouse_connect-0.7.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26b80cb8f66bde9149a9a2180e2cc4895c1b7d34f9dceba81630a9b9a9ae66b2", size = 247631 }, + { url = "https://files.pythonhosted.org/packages/b6/44/adb55285226d60e9c46331a9980c88dad8c8de12abb895c4e3149a088092/clickhouse_connect-0.7.19-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba80e3598acf916c4d1b2515671f65d9efee612a783c17c56a5a646f4db59b9", size = 1053767 }, + { url = "https://files.pythonhosted.org/packages/6c/f3/a109c26a41153768be57374cb823cac5daf74c9098a5c61081ffabeb4e59/clickhouse_connect-0.7.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d38c30bd847af0ce7ff738152478f913854db356af4d5824096394d0eab873d", size = 1072014 }, + { url = "https://files.pythonhosted.org/packages/51/80/9c200e5e392a538f2444c9a6a93e1cf0e36588c7e8720882ac001e23b246/clickhouse_connect-0.7.19-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d41d4b159071c0e4f607563932d4fa5c2a8fc27d3ba1200d0929b361e5191864", size = 1027423 }, + { url = "https://files.pythonhosted.org/packages/33/a3/219fcd1572f1ce198dcef86da8c6c526b04f56e8b7a82e21119677f89379/clickhouse_connect-0.7.19-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3682c2426f5dbda574611210e3c7c951b9557293a49eb60a7438552435873889", size = 1053683 }, + { url = "https://files.pythonhosted.org/packages/5d/df/687d90fbc0fd8ce586c46400f3791deac120e4c080aa8b343c0f676dfb08/clickhouse_connect-0.7.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6d492064dca278eb61be3a2d70a5f082e2ebc8ceebd4f33752ae234116192020", size = 1021120 }, + { url = "https://files.pythonhosted.org/packages/c8/3b/39ba71b103275df8ec90d424dbaca2dba82b28398c3d2aeac5a0141b6aae/clickhouse_connect-0.7.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:62612da163b934c1ff35df6155a47cf17ac0e2d2f9f0f8f913641e5c02cdf39f", size = 1073652 }, + { url = "https://files.pythonhosted.org/packages/b3/92/06df8790a7d93d5d5f1098604fc7d79682784818030091966a3ce3f766a8/clickhouse_connect-0.7.19-cp312-cp312-win32.whl", hash = "sha256:196e48c977affc045794ec7281b4d711e169def00535ecab5f9fdeb8c177f149", size = 221589 }, + { url = "https://files.pythonhosted.org/packages/42/1f/935d0810b73184a1d306f92458cb0a2e9b0de2377f536da874e063b8e422/clickhouse_connect-0.7.19-cp312-cp312-win_amd64.whl", hash = "sha256:b771ca6a473d65103dcae82810d3a62475c5372fc38d8f211513c72b954fb020", size = 239584 }, ] [[package]] @@ -999,7 +1010,7 @@ dependencies = [ { name = "urllib3" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/e5/23dcc950e873127df0135cf45144062a3207f5d2067259c73854e8ce7228/clickzetta_connector_python-0.8.102-py3-none-any.whl", hash = "sha256:c45486ae77fd82df7113ec67ec50e772372588d79c23757f8ee6291a057994a7", size = 77861, upload-time = "2025-07-17T03:11:59.543Z" }, + { url = "https://files.pythonhosted.org/packages/c6/e5/23dcc950e873127df0135cf45144062a3207f5d2067259c73854e8ce7228/clickzetta_connector_python-0.8.102-py3-none-any.whl", hash = "sha256:c45486ae77fd82df7113ec67ec50e772372588d79c23757f8ee6291a057994a7", size = 77861 }, ] [[package]] @@ -1011,18 +1022,18 @@ dependencies = [ { name = "requests" }, { name = "requests-toolbelt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ac/25/6d0481860583f44953bd791de0b7c4f6d7ead7223f8a17e776247b34a5b4/cloudscraper-1.2.71.tar.gz", hash = "sha256:429c6e8aa6916d5bad5c8a5eac50f3ea53c9ac22616f6cb21b18dcc71517d0d3", size = 93261, upload-time = "2023-04-25T23:20:19.467Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/25/6d0481860583f44953bd791de0b7c4f6d7ead7223f8a17e776247b34a5b4/cloudscraper-1.2.71.tar.gz", hash = "sha256:429c6e8aa6916d5bad5c8a5eac50f3ea53c9ac22616f6cb21b18dcc71517d0d3", size = 93261 } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/97/fc88803a451029688dffd7eb446dc1b529657577aec13aceff1cc9628c5d/cloudscraper-1.2.71-py2.py3-none-any.whl", hash = "sha256:76f50ca529ed2279e220837befdec892626f9511708e200d48d5bb76ded679b0", size = 99652, upload-time = "2023-04-25T23:20:15.974Z" }, + { url = "https://files.pythonhosted.org/packages/81/97/fc88803a451029688dffd7eb446dc1b529657577aec13aceff1cc9628c5d/cloudscraper-1.2.71-py2.py3-none-any.whl", hash = "sha256:76f50ca529ed2279e220837befdec892626f9511708e200d48d5bb76ded679b0", size = 99652 }, ] [[package]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, ] [[package]] @@ -1032,9 +1043,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "humanfriendly" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520, upload-time = "2021-06-11T10:22:45.202Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018, upload-time = "2021-06-11T10:22:42.561Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018 }, ] [[package]] @@ -1048,53 +1059,53 @@ dependencies = [ { name = "six" }, { name = "xmltodict" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c4/f2/be99b41433b33a76896680920fca621f191875ca410a66778015e47a501b/cos-python-sdk-v5-1.9.30.tar.gz", hash = "sha256:a23fd090211bf90883066d90cd74317860aa67c6d3aa80fe5e44b18c7e9b2a81", size = 108384, upload-time = "2024-06-14T08:02:37.063Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/f2/be99b41433b33a76896680920fca621f191875ca410a66778015e47a501b/cos-python-sdk-v5-1.9.30.tar.gz", hash = "sha256:a23fd090211bf90883066d90cd74317860aa67c6d3aa80fe5e44b18c7e9b2a81", size = 108384 } [[package]] name = "couchbase" version = "4.3.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2f/70/7cf92b2443330e7a4b626a02fe15fbeb1531337d75e6ae6393294e960d18/couchbase-4.3.6.tar.gz", hash = "sha256:d58c5ccdad5d85fc026f328bf4190c4fc0041fdbe68ad900fb32fc5497c3f061", size = 6517695, upload-time = "2025-05-15T17:21:38.157Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/70/7cf92b2443330e7a4b626a02fe15fbeb1531337d75e6ae6393294e960d18/couchbase-4.3.6.tar.gz", hash = "sha256:d58c5ccdad5d85fc026f328bf4190c4fc0041fdbe68ad900fb32fc5497c3f061", size = 6517695 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/0a/eae21d3a9331f7c93e8483f686e1bcb9e3b48f2ce98193beb0637a620926/couchbase-4.3.6-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:4c10fd26271c5630196b9bcc0dd7e17a45fa9c7e46ed5756e5690d125423160c", size = 4775710, upload-time = "2025-05-15T17:20:29.388Z" }, - { url = "https://files.pythonhosted.org/packages/f6/98/0ca042a42f5807bbf8050f52fff39ebceebc7bea7e5897907758f3e1ad39/couchbase-4.3.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:811eee7a6013cea7b15a718e201ee1188df162c656d27c7882b618ab57a08f3a", size = 4020743, upload-time = "2025-05-15T17:20:31.515Z" }, - { url = "https://files.pythonhosted.org/packages/f8/0f/c91407cb082d2322217e8f7ca4abb8eda016a81a4db5a74b7ac6b737597d/couchbase-4.3.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fc177e0161beb1e6e8c4b9561efcb97c51aed55a77ee11836ca194d33ae22b7", size = 4796091, upload-time = "2025-05-15T17:20:33.818Z" }, - { url = "https://files.pythonhosted.org/packages/8c/02/5567b660543828bdbbc68dcae080e388cb0be391aa8a97cce9d8c8a6c147/couchbase-4.3.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02afb1c1edd6b215f702510412b5177ed609df8135930c23789bbc5901dd1b45", size = 5015684, upload-time = "2025-05-15T17:20:36.364Z" }, - { url = "https://files.pythonhosted.org/packages/dc/d1/767908826d5bdd258addab26d7f1d21bc42bafbf5f30d1b556ace06295af/couchbase-4.3.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:594e9eb17bb76ba8e10eeee17a16aef897dd90d33c6771cf2b5b4091da415b32", size = 5673513, upload-time = "2025-05-15T17:20:38.972Z" }, - { url = "https://files.pythonhosted.org/packages/f2/25/39ecde0a06692abce8bb0df4f15542933f05883647a1a57cdc7bbed9c77c/couchbase-4.3.6-cp311-cp311-win_amd64.whl", hash = "sha256:db22c56e38b8313f65807aa48309c8b8c7c44d5517b9ff1d8b4404d4740ec286", size = 4010728, upload-time = "2025-05-15T17:20:43.286Z" }, - { url = "https://files.pythonhosted.org/packages/b1/55/c12b8f626de71363fbe30578f4a0de1b8bb41afbe7646ff8538c3b38ce2a/couchbase-4.3.6-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:a2ae13432b859f513485d4cee691e1e4fce4af23ed4218b9355874b146343f8c", size = 4693517, upload-time = "2025-05-15T17:20:45.433Z" }, - { url = "https://files.pythonhosted.org/packages/a1/aa/2184934d283d99b34a004f577bf724d918278a2962781ca5690d4fa4b6c6/couchbase-4.3.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ea5ca7e34b5d023c8bab406211ab5d71e74a976ba25fa693b4f8e6c74f85aa2", size = 4022393, upload-time = "2025-05-15T17:20:47.442Z" }, - { url = "https://files.pythonhosted.org/packages/80/29/ba6d3b205a51c04c270c1b56ea31da678b7edc565b35a34237ec2cfc708d/couchbase-4.3.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6eaca0a71fd8f9af4344b7d6474d7b74d1784ae9a658f6bc3751df5f9a4185ae", size = 4798396, upload-time = "2025-05-15T17:20:49.473Z" }, - { url = "https://files.pythonhosted.org/packages/4a/94/d7d791808bd9064c01f965015ff40ee76e6bac10eaf2c73308023b9bdedf/couchbase-4.3.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0470378b986f69368caed6d668ac6530e635b0c1abaef3d3f524cfac0dacd878", size = 5018099, upload-time = "2025-05-15T17:20:52.541Z" }, - { url = "https://files.pythonhosted.org/packages/a6/04/cec160f9f4b862788e2a0167616472a5695b2f569bd62204938ab674835d/couchbase-4.3.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:374ce392558f1688ac073aa0b15c256b1a441201d965811fd862357ff05d27a9", size = 5672633, upload-time = "2025-05-15T17:20:55.994Z" }, - { url = "https://files.pythonhosted.org/packages/1b/a2/1da2ab45412b9414e2c6a578e0e7a24f29b9261ef7de11707c2fc98045b8/couchbase-4.3.6-cp312-cp312-win_amd64.whl", hash = "sha256:cd734333de34d8594504c163bb6c47aea9cc1f2cefdf8e91875dd9bf14e61e29", size = 4013298, upload-time = "2025-05-15T17:20:59.533Z" }, + { url = "https://files.pythonhosted.org/packages/f3/0a/eae21d3a9331f7c93e8483f686e1bcb9e3b48f2ce98193beb0637a620926/couchbase-4.3.6-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:4c10fd26271c5630196b9bcc0dd7e17a45fa9c7e46ed5756e5690d125423160c", size = 4775710 }, + { url = "https://files.pythonhosted.org/packages/f6/98/0ca042a42f5807bbf8050f52fff39ebceebc7bea7e5897907758f3e1ad39/couchbase-4.3.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:811eee7a6013cea7b15a718e201ee1188df162c656d27c7882b618ab57a08f3a", size = 4020743 }, + { url = "https://files.pythonhosted.org/packages/f8/0f/c91407cb082d2322217e8f7ca4abb8eda016a81a4db5a74b7ac6b737597d/couchbase-4.3.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fc177e0161beb1e6e8c4b9561efcb97c51aed55a77ee11836ca194d33ae22b7", size = 4796091 }, + { url = "https://files.pythonhosted.org/packages/8c/02/5567b660543828bdbbc68dcae080e388cb0be391aa8a97cce9d8c8a6c147/couchbase-4.3.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02afb1c1edd6b215f702510412b5177ed609df8135930c23789bbc5901dd1b45", size = 5015684 }, + { url = "https://files.pythonhosted.org/packages/dc/d1/767908826d5bdd258addab26d7f1d21bc42bafbf5f30d1b556ace06295af/couchbase-4.3.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:594e9eb17bb76ba8e10eeee17a16aef897dd90d33c6771cf2b5b4091da415b32", size = 5673513 }, + { url = "https://files.pythonhosted.org/packages/f2/25/39ecde0a06692abce8bb0df4f15542933f05883647a1a57cdc7bbed9c77c/couchbase-4.3.6-cp311-cp311-win_amd64.whl", hash = "sha256:db22c56e38b8313f65807aa48309c8b8c7c44d5517b9ff1d8b4404d4740ec286", size = 4010728 }, + { url = "https://files.pythonhosted.org/packages/b1/55/c12b8f626de71363fbe30578f4a0de1b8bb41afbe7646ff8538c3b38ce2a/couchbase-4.3.6-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:a2ae13432b859f513485d4cee691e1e4fce4af23ed4218b9355874b146343f8c", size = 4693517 }, + { url = "https://files.pythonhosted.org/packages/a1/aa/2184934d283d99b34a004f577bf724d918278a2962781ca5690d4fa4b6c6/couchbase-4.3.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ea5ca7e34b5d023c8bab406211ab5d71e74a976ba25fa693b4f8e6c74f85aa2", size = 4022393 }, + { url = "https://files.pythonhosted.org/packages/80/29/ba6d3b205a51c04c270c1b56ea31da678b7edc565b35a34237ec2cfc708d/couchbase-4.3.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6eaca0a71fd8f9af4344b7d6474d7b74d1784ae9a658f6bc3751df5f9a4185ae", size = 4798396 }, + { url = "https://files.pythonhosted.org/packages/4a/94/d7d791808bd9064c01f965015ff40ee76e6bac10eaf2c73308023b9bdedf/couchbase-4.3.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0470378b986f69368caed6d668ac6530e635b0c1abaef3d3f524cfac0dacd878", size = 5018099 }, + { url = "https://files.pythonhosted.org/packages/a6/04/cec160f9f4b862788e2a0167616472a5695b2f569bd62204938ab674835d/couchbase-4.3.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:374ce392558f1688ac073aa0b15c256b1a441201d965811fd862357ff05d27a9", size = 5672633 }, + { url = "https://files.pythonhosted.org/packages/1b/a2/1da2ab45412b9414e2c6a578e0e7a24f29b9261ef7de11707c2fc98045b8/couchbase-4.3.6-cp312-cp312-win_amd64.whl", hash = "sha256:cd734333de34d8594504c163bb6c47aea9cc1f2cefdf8e91875dd9bf14e61e29", size = 4013298 }, ] [[package]] name = "coverage" version = "7.2.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/45/8b/421f30467e69ac0e414214856798d4bc32da1336df745e49e49ae5c1e2a8/coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59", size = 762575, upload-time = "2023-05-29T20:08:50.273Z" } +sdist = { url = "https://files.pythonhosted.org/packages/45/8b/421f30467e69ac0e414214856798d4bc32da1336df745e49e49ae5c1e2a8/coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59", size = 762575 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/fa/529f55c9a1029c840bcc9109d5a15ff00478b7ff550a1ae361f8745f8ad5/coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f", size = 200895, upload-time = "2023-05-29T20:07:21.963Z" }, - { url = "https://files.pythonhosted.org/packages/67/d7/cd8fe689b5743fffac516597a1222834c42b80686b99f5b44ef43ccc2a43/coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe", size = 201120, upload-time = "2023-05-29T20:07:23.765Z" }, - { url = "https://files.pythonhosted.org/packages/8c/95/16eed713202406ca0a37f8ac259bbf144c9d24f9b8097a8e6ead61da2dbb/coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3", size = 233178, upload-time = "2023-05-29T20:07:25.281Z" }, - { url = "https://files.pythonhosted.org/packages/c1/49/4d487e2ad5d54ed82ac1101e467e8994c09d6123c91b2a962145f3d262c2/coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f", size = 230754, upload-time = "2023-05-29T20:07:27.044Z" }, - { url = "https://files.pythonhosted.org/packages/a7/cd/3ce94ad9d407a052dc2a74fbeb1c7947f442155b28264eb467ee78dea812/coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb", size = 232558, upload-time = "2023-05-29T20:07:28.743Z" }, - { url = "https://files.pythonhosted.org/packages/8f/a8/12cc7b261f3082cc299ab61f677f7e48d93e35ca5c3c2f7241ed5525ccea/coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833", size = 241509, upload-time = "2023-05-29T20:07:30.434Z" }, - { url = "https://files.pythonhosted.org/packages/04/fa/43b55101f75a5e9115259e8be70ff9279921cb6b17f04c34a5702ff9b1f7/coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97", size = 239924, upload-time = "2023-05-29T20:07:32.065Z" }, - { url = "https://files.pythonhosted.org/packages/68/5f/d2bd0f02aa3c3e0311986e625ccf97fdc511b52f4f1a063e4f37b624772f/coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a", size = 240977, upload-time = "2023-05-29T20:07:34.184Z" }, - { url = "https://files.pythonhosted.org/packages/ba/92/69c0722882643df4257ecc5437b83f4c17ba9e67f15dc6b77bad89b6982e/coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a", size = 203168, upload-time = "2023-05-29T20:07:35.869Z" }, - { url = "https://files.pythonhosted.org/packages/b1/96/c12ed0dfd4ec587f3739f53eb677b9007853fd486ccb0e7d5512a27bab2e/coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562", size = 204185, upload-time = "2023-05-29T20:07:37.39Z" }, - { url = "https://files.pythonhosted.org/packages/ff/d5/52fa1891d1802ab2e1b346d37d349cb41cdd4fd03f724ebbf94e80577687/coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4", size = 201020, upload-time = "2023-05-29T20:07:38.724Z" }, - { url = "https://files.pythonhosted.org/packages/24/df/6765898d54ea20e3197a26d26bb65b084deefadd77ce7de946b9c96dfdc5/coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4", size = 233994, upload-time = "2023-05-29T20:07:40.274Z" }, - { url = "https://files.pythonhosted.org/packages/15/81/b108a60bc758b448c151e5abceed027ed77a9523ecbc6b8a390938301841/coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01", size = 231358, upload-time = "2023-05-29T20:07:41.998Z" }, - { url = "https://files.pythonhosted.org/packages/61/90/c76b9462f39897ebd8714faf21bc985b65c4e1ea6dff428ea9dc711ed0dd/coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6", size = 233316, upload-time = "2023-05-29T20:07:43.539Z" }, - { url = "https://files.pythonhosted.org/packages/04/d6/8cba3bf346e8b1a4fb3f084df7d8cea25a6b6c56aaca1f2e53829be17e9e/coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d", size = 240159, upload-time = "2023-05-29T20:07:44.982Z" }, - { url = "https://files.pythonhosted.org/packages/6e/ea/4a252dc77ca0605b23d477729d139915e753ee89e4c9507630e12ad64a80/coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de", size = 238127, upload-time = "2023-05-29T20:07:46.522Z" }, - { url = "https://files.pythonhosted.org/packages/9f/5c/d9760ac497c41f9c4841f5972d0edf05d50cad7814e86ee7d133ec4a0ac8/coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d", size = 239833, upload-time = "2023-05-29T20:07:47.992Z" }, - { url = "https://files.pythonhosted.org/packages/69/8c/26a95b08059db1cbb01e4b0e6d40f2e9debb628c6ca86b78f625ceaf9bab/coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511", size = 203463, upload-time = "2023-05-29T20:07:49.939Z" }, - { url = "https://files.pythonhosted.org/packages/b7/00/14b00a0748e9eda26e97be07a63cc911108844004687321ddcc213be956c/coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3", size = 204347, upload-time = "2023-05-29T20:07:51.909Z" }, + { url = "https://files.pythonhosted.org/packages/c6/fa/529f55c9a1029c840bcc9109d5a15ff00478b7ff550a1ae361f8745f8ad5/coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f", size = 200895 }, + { url = "https://files.pythonhosted.org/packages/67/d7/cd8fe689b5743fffac516597a1222834c42b80686b99f5b44ef43ccc2a43/coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe", size = 201120 }, + { url = "https://files.pythonhosted.org/packages/8c/95/16eed713202406ca0a37f8ac259bbf144c9d24f9b8097a8e6ead61da2dbb/coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3", size = 233178 }, + { url = "https://files.pythonhosted.org/packages/c1/49/4d487e2ad5d54ed82ac1101e467e8994c09d6123c91b2a962145f3d262c2/coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f", size = 230754 }, + { url = "https://files.pythonhosted.org/packages/a7/cd/3ce94ad9d407a052dc2a74fbeb1c7947f442155b28264eb467ee78dea812/coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb", size = 232558 }, + { url = "https://files.pythonhosted.org/packages/8f/a8/12cc7b261f3082cc299ab61f677f7e48d93e35ca5c3c2f7241ed5525ccea/coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833", size = 241509 }, + { url = "https://files.pythonhosted.org/packages/04/fa/43b55101f75a5e9115259e8be70ff9279921cb6b17f04c34a5702ff9b1f7/coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97", size = 239924 }, + { url = "https://files.pythonhosted.org/packages/68/5f/d2bd0f02aa3c3e0311986e625ccf97fdc511b52f4f1a063e4f37b624772f/coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a", size = 240977 }, + { url = "https://files.pythonhosted.org/packages/ba/92/69c0722882643df4257ecc5437b83f4c17ba9e67f15dc6b77bad89b6982e/coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a", size = 203168 }, + { url = "https://files.pythonhosted.org/packages/b1/96/c12ed0dfd4ec587f3739f53eb677b9007853fd486ccb0e7d5512a27bab2e/coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562", size = 204185 }, + { url = "https://files.pythonhosted.org/packages/ff/d5/52fa1891d1802ab2e1b346d37d349cb41cdd4fd03f724ebbf94e80577687/coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4", size = 201020 }, + { url = "https://files.pythonhosted.org/packages/24/df/6765898d54ea20e3197a26d26bb65b084deefadd77ce7de946b9c96dfdc5/coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4", size = 233994 }, + { url = "https://files.pythonhosted.org/packages/15/81/b108a60bc758b448c151e5abceed027ed77a9523ecbc6b8a390938301841/coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01", size = 231358 }, + { url = "https://files.pythonhosted.org/packages/61/90/c76b9462f39897ebd8714faf21bc985b65c4e1ea6dff428ea9dc711ed0dd/coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6", size = 233316 }, + { url = "https://files.pythonhosted.org/packages/04/d6/8cba3bf346e8b1a4fb3f084df7d8cea25a6b6c56aaca1f2e53829be17e9e/coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d", size = 240159 }, + { url = "https://files.pythonhosted.org/packages/6e/ea/4a252dc77ca0605b23d477729d139915e753ee89e4c9507630e12ad64a80/coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de", size = 238127 }, + { url = "https://files.pythonhosted.org/packages/9f/5c/d9760ac497c41f9c4841f5972d0edf05d50cad7814e86ee7d133ec4a0ac8/coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d", size = 239833 }, + { url = "https://files.pythonhosted.org/packages/69/8c/26a95b08059db1cbb01e4b0e6d40f2e9debb628c6ca86b78f625ceaf9bab/coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511", size = 203463 }, + { url = "https://files.pythonhosted.org/packages/b7/00/14b00a0748e9eda26e97be07a63cc911108844004687321ddcc213be956c/coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3", size = 204347 }, ] [package.optional-dependencies] @@ -1106,37 +1117,37 @@ toml = [ name = "crc32c" version = "2.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7f/4c/4e40cc26347ac8254d3f25b9f94710b8e8df24ee4dddc1ba41907a88a94d/crc32c-2.7.1.tar.gz", hash = "sha256:f91b144a21eef834d64178e01982bb9179c354b3e9e5f4c803b0e5096384968c", size = 45712, upload-time = "2024-09-24T06:20:17.553Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/4c/4e40cc26347ac8254d3f25b9f94710b8e8df24ee4dddc1ba41907a88a94d/crc32c-2.7.1.tar.gz", hash = "sha256:f91b144a21eef834d64178e01982bb9179c354b3e9e5f4c803b0e5096384968c", size = 45712 } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/8e/2f37f46368bbfd50edfc11b96f0aa135699034b1b020966c70ebaff3463b/crc32c-2.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:19e03a50545a3ef400bd41667d5525f71030488629c57d819e2dd45064f16192", size = 49672, upload-time = "2024-09-24T06:18:18.032Z" }, - { url = "https://files.pythonhosted.org/packages/ed/b8/e52f7c4b045b871c2984d70f37c31d4861b533a8082912dfd107a96cf7c1/crc32c-2.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c03286b1e5ce9bed7090084f206aacd87c5146b4b10de56fe9e86cbbbf851cf", size = 37155, upload-time = "2024-09-24T06:18:19.373Z" }, - { url = "https://files.pythonhosted.org/packages/25/ee/0cfa82a68736697f3c7e435ba658c2ef8c997f42b89f6ab4545efe1b2649/crc32c-2.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80ebbf144a1a56a532b353e81fa0f3edca4f4baa1bf92b1dde2c663a32bb6a15", size = 35372, upload-time = "2024-09-24T06:18:20.983Z" }, - { url = "https://files.pythonhosted.org/packages/aa/92/c878aaba81c431fcd93a059e9f6c90db397c585742793f0bf6e0c531cc67/crc32c-2.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96b794fd11945298fdd5eb1290a812efb497c14bc42592c5c992ca077458eeba", size = 54879, upload-time = "2024-09-24T06:18:23.085Z" }, - { url = "https://files.pythonhosted.org/packages/5b/f5/ab828ab3907095e06b18918408748950a9f726ee2b37be1b0839fb925ee1/crc32c-2.7.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9df7194dd3c0efb5a21f5d70595b7a8b4fd9921fbbd597d6d8e7a11eca3e2d27", size = 52588, upload-time = "2024-09-24T06:18:24.463Z" }, - { url = "https://files.pythonhosted.org/packages/6a/2b/9e29e9ac4c4213d60491db09487125db358cd9263490fbadbd55e48fbe03/crc32c-2.7.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d698eec444b18e296a104d0b9bb6c596c38bdcb79d24eba49604636e9d747305", size = 53674, upload-time = "2024-09-24T06:18:25.624Z" }, - { url = "https://files.pythonhosted.org/packages/79/ed/df3c4c14bf1b29f5c9b52d51fb6793e39efcffd80b2941d994e8f7f5f688/crc32c-2.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e07cf10ef852d219d179333fd706d1c415626f1f05e60bd75acf0143a4d8b225", size = 54691, upload-time = "2024-09-24T06:18:26.578Z" }, - { url = "https://files.pythonhosted.org/packages/0c/47/4917af3c9c1df2fff28bbfa6492673c9adeae5599dcc207bbe209847489c/crc32c-2.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d2a051f296e6e92e13efee3b41db388931cdb4a2800656cd1ed1d9fe4f13a086", size = 52896, upload-time = "2024-09-24T06:18:28.174Z" }, - { url = "https://files.pythonhosted.org/packages/1b/6f/26fc3dda5835cda8f6cd9d856afe62bdeae428de4c34fea200b0888e8835/crc32c-2.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1738259802978cdf428f74156175da6a5fdfb7256f647fdc0c9de1bc6cd7173", size = 53554, upload-time = "2024-09-24T06:18:29.104Z" }, - { url = "https://files.pythonhosted.org/packages/56/3e/6f39127f7027c75d130c0ba348d86a6150dff23761fbc6a5f71659f4521e/crc32c-2.7.1-cp311-cp311-win32.whl", hash = "sha256:f7786d219a1a1bf27d0aa1869821d11a6f8e90415cfffc1e37791690d4a848a1", size = 38370, upload-time = "2024-09-24T06:18:30.013Z" }, - { url = "https://files.pythonhosted.org/packages/c9/fb/1587c2705a3a47a3d0067eecf9a6fec510761c96dec45c7b038fb5c8ff46/crc32c-2.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:887f6844bb3ad35f0778cd10793ad217f7123a5422e40041231b8c4c7329649d", size = 39795, upload-time = "2024-09-24T06:18:31.324Z" }, - { url = "https://files.pythonhosted.org/packages/1d/02/998dc21333413ce63fe4c1ca70eafe61ca26afc7eb353f20cecdb77d614e/crc32c-2.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f7d1c4e761fe42bf856130daf8b2658df33fe0ced3c43dadafdfeaa42b57b950", size = 49568, upload-time = "2024-09-24T06:18:32.425Z" }, - { url = "https://files.pythonhosted.org/packages/9c/3e/e3656bfa76e50ef87b7136fef2dbf3c46e225629432fc9184fdd7fd187ff/crc32c-2.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:73361c79a6e4605204457f19fda18b042a94508a52e53d10a4239da5fb0f6a34", size = 37019, upload-time = "2024-09-24T06:18:34.097Z" }, - { url = "https://files.pythonhosted.org/packages/0b/7d/5ff9904046ad15a08772515db19df43107bf5e3901a89c36a577b5f40ba0/crc32c-2.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:afd778fc8ac0ed2ffbfb122a9aa6a0e409a8019b894a1799cda12c01534493e0", size = 35373, upload-time = "2024-09-24T06:18:35.02Z" }, - { url = "https://files.pythonhosted.org/packages/4d/41/4aedc961893f26858ab89fc772d0eaba91f9870f19eaa933999dcacb94ec/crc32c-2.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56ef661b34e9f25991fface7f9ad85e81bbc1b3fe3b916fd58c893eabe2fa0b8", size = 54675, upload-time = "2024-09-24T06:18:35.954Z" }, - { url = "https://files.pythonhosted.org/packages/d6/63/8cabf09b7e39b9fec8f7010646c8b33057fc8d67e6093b3cc15563d23533/crc32c-2.7.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:571aa4429444b5d7f588e4377663592145d2d25eb1635abb530f1281794fc7c9", size = 52386, upload-time = "2024-09-24T06:18:36.896Z" }, - { url = "https://files.pythonhosted.org/packages/79/13/13576941bf7cf95026abae43d8427c812c0054408212bf8ed490eda846b0/crc32c-2.7.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c02a3bd67dea95cdb25844aaf44ca2e1b0c1fd70b287ad08c874a95ef4bb38db", size = 53495, upload-time = "2024-09-24T06:18:38.099Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b6/55ffb26d0517d2d6c6f430ce2ad36ae7647c995c5bfd7abce7f32bb2bad1/crc32c-2.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d17637c4867672cb8adeea007294e3c3df9d43964369516cfe2c1f47ce500a", size = 54456, upload-time = "2024-09-24T06:18:39.051Z" }, - { url = "https://files.pythonhosted.org/packages/c2/1a/5562e54cb629ecc5543d3604dba86ddfc7c7b7bf31d64005b38a00d31d31/crc32c-2.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f4a400ac3c69a32e180d8753fd7ec7bccb80ade7ab0812855dce8a208e72495f", size = 52647, upload-time = "2024-09-24T06:18:40.021Z" }, - { url = "https://files.pythonhosted.org/packages/48/ec/ce4138eaf356cd9aae60bbe931755e5e0151b3eca5f491fce6c01b97fd59/crc32c-2.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:588587772e55624dd9c7a906ec9e8773ae0b6ac5e270fc0bc84ee2758eba90d5", size = 53332, upload-time = "2024-09-24T06:18:40.925Z" }, - { url = "https://files.pythonhosted.org/packages/5e/b5/144b42cd838a901175a916078781cb2c3c9f977151c9ba085aebd6d15b22/crc32c-2.7.1-cp312-cp312-win32.whl", hash = "sha256:9f14b60e5a14206e8173dd617fa0c4df35e098a305594082f930dae5488da428", size = 38371, upload-time = "2024-09-24T06:18:42.711Z" }, - { url = "https://files.pythonhosted.org/packages/ae/c4/7929dcd5d9b57db0cce4fe6f6c191049380fc6d8c9b9f5581967f4ec018e/crc32c-2.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:7c810a246660a24dc818047dc5f89c7ce7b2814e1e08a8e99993f4103f7219e8", size = 39805, upload-time = "2024-09-24T06:18:43.6Z" }, + { url = "https://files.pythonhosted.org/packages/45/8e/2f37f46368bbfd50edfc11b96f0aa135699034b1b020966c70ebaff3463b/crc32c-2.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:19e03a50545a3ef400bd41667d5525f71030488629c57d819e2dd45064f16192", size = 49672 }, + { url = "https://files.pythonhosted.org/packages/ed/b8/e52f7c4b045b871c2984d70f37c31d4861b533a8082912dfd107a96cf7c1/crc32c-2.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c03286b1e5ce9bed7090084f206aacd87c5146b4b10de56fe9e86cbbbf851cf", size = 37155 }, + { url = "https://files.pythonhosted.org/packages/25/ee/0cfa82a68736697f3c7e435ba658c2ef8c997f42b89f6ab4545efe1b2649/crc32c-2.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80ebbf144a1a56a532b353e81fa0f3edca4f4baa1bf92b1dde2c663a32bb6a15", size = 35372 }, + { url = "https://files.pythonhosted.org/packages/aa/92/c878aaba81c431fcd93a059e9f6c90db397c585742793f0bf6e0c531cc67/crc32c-2.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96b794fd11945298fdd5eb1290a812efb497c14bc42592c5c992ca077458eeba", size = 54879 }, + { url = "https://files.pythonhosted.org/packages/5b/f5/ab828ab3907095e06b18918408748950a9f726ee2b37be1b0839fb925ee1/crc32c-2.7.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9df7194dd3c0efb5a21f5d70595b7a8b4fd9921fbbd597d6d8e7a11eca3e2d27", size = 52588 }, + { url = "https://files.pythonhosted.org/packages/6a/2b/9e29e9ac4c4213d60491db09487125db358cd9263490fbadbd55e48fbe03/crc32c-2.7.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d698eec444b18e296a104d0b9bb6c596c38bdcb79d24eba49604636e9d747305", size = 53674 }, + { url = "https://files.pythonhosted.org/packages/79/ed/df3c4c14bf1b29f5c9b52d51fb6793e39efcffd80b2941d994e8f7f5f688/crc32c-2.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e07cf10ef852d219d179333fd706d1c415626f1f05e60bd75acf0143a4d8b225", size = 54691 }, + { url = "https://files.pythonhosted.org/packages/0c/47/4917af3c9c1df2fff28bbfa6492673c9adeae5599dcc207bbe209847489c/crc32c-2.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d2a051f296e6e92e13efee3b41db388931cdb4a2800656cd1ed1d9fe4f13a086", size = 52896 }, + { url = "https://files.pythonhosted.org/packages/1b/6f/26fc3dda5835cda8f6cd9d856afe62bdeae428de4c34fea200b0888e8835/crc32c-2.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1738259802978cdf428f74156175da6a5fdfb7256f647fdc0c9de1bc6cd7173", size = 53554 }, + { url = "https://files.pythonhosted.org/packages/56/3e/6f39127f7027c75d130c0ba348d86a6150dff23761fbc6a5f71659f4521e/crc32c-2.7.1-cp311-cp311-win32.whl", hash = "sha256:f7786d219a1a1bf27d0aa1869821d11a6f8e90415cfffc1e37791690d4a848a1", size = 38370 }, + { url = "https://files.pythonhosted.org/packages/c9/fb/1587c2705a3a47a3d0067eecf9a6fec510761c96dec45c7b038fb5c8ff46/crc32c-2.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:887f6844bb3ad35f0778cd10793ad217f7123a5422e40041231b8c4c7329649d", size = 39795 }, + { url = "https://files.pythonhosted.org/packages/1d/02/998dc21333413ce63fe4c1ca70eafe61ca26afc7eb353f20cecdb77d614e/crc32c-2.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f7d1c4e761fe42bf856130daf8b2658df33fe0ced3c43dadafdfeaa42b57b950", size = 49568 }, + { url = "https://files.pythonhosted.org/packages/9c/3e/e3656bfa76e50ef87b7136fef2dbf3c46e225629432fc9184fdd7fd187ff/crc32c-2.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:73361c79a6e4605204457f19fda18b042a94508a52e53d10a4239da5fb0f6a34", size = 37019 }, + { url = "https://files.pythonhosted.org/packages/0b/7d/5ff9904046ad15a08772515db19df43107bf5e3901a89c36a577b5f40ba0/crc32c-2.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:afd778fc8ac0ed2ffbfb122a9aa6a0e409a8019b894a1799cda12c01534493e0", size = 35373 }, + { url = "https://files.pythonhosted.org/packages/4d/41/4aedc961893f26858ab89fc772d0eaba91f9870f19eaa933999dcacb94ec/crc32c-2.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56ef661b34e9f25991fface7f9ad85e81bbc1b3fe3b916fd58c893eabe2fa0b8", size = 54675 }, + { url = "https://files.pythonhosted.org/packages/d6/63/8cabf09b7e39b9fec8f7010646c8b33057fc8d67e6093b3cc15563d23533/crc32c-2.7.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:571aa4429444b5d7f588e4377663592145d2d25eb1635abb530f1281794fc7c9", size = 52386 }, + { url = "https://files.pythonhosted.org/packages/79/13/13576941bf7cf95026abae43d8427c812c0054408212bf8ed490eda846b0/crc32c-2.7.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c02a3bd67dea95cdb25844aaf44ca2e1b0c1fd70b287ad08c874a95ef4bb38db", size = 53495 }, + { url = "https://files.pythonhosted.org/packages/3d/b6/55ffb26d0517d2d6c6f430ce2ad36ae7647c995c5bfd7abce7f32bb2bad1/crc32c-2.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99d17637c4867672cb8adeea007294e3c3df9d43964369516cfe2c1f47ce500a", size = 54456 }, + { url = "https://files.pythonhosted.org/packages/c2/1a/5562e54cb629ecc5543d3604dba86ddfc7c7b7bf31d64005b38a00d31d31/crc32c-2.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f4a400ac3c69a32e180d8753fd7ec7bccb80ade7ab0812855dce8a208e72495f", size = 52647 }, + { url = "https://files.pythonhosted.org/packages/48/ec/ce4138eaf356cd9aae60bbe931755e5e0151b3eca5f491fce6c01b97fd59/crc32c-2.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:588587772e55624dd9c7a906ec9e8773ae0b6ac5e270fc0bc84ee2758eba90d5", size = 53332 }, + { url = "https://files.pythonhosted.org/packages/5e/b5/144b42cd838a901175a916078781cb2c3c9f977151c9ba085aebd6d15b22/crc32c-2.7.1-cp312-cp312-win32.whl", hash = "sha256:9f14b60e5a14206e8173dd617fa0c4df35e098a305594082f930dae5488da428", size = 38371 }, + { url = "https://files.pythonhosted.org/packages/ae/c4/7929dcd5d9b57db0cce4fe6f6c191049380fc6d8c9b9f5581967f4ec018e/crc32c-2.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:7c810a246660a24dc818047dc5f89c7ce7b2814e1e08a8e99993f4103f7219e8", size = 39805 }, ] [[package]] name = "crcmod" version = "1.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/b0/e595ce2a2527e169c3bcd6c33d2473c1918e0b7f6826a043ca1245dd4e5b/crcmod-1.7.tar.gz", hash = "sha256:dc7051a0db5f2bd48665a990d3ec1cc305a466a77358ca4492826f41f283601e", size = 89670, upload-time = "2010-06-27T14:35:29.538Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/b0/e595ce2a2527e169c3bcd6c33d2473c1918e0b7f6826a043ca1245dd4e5b/crcmod-1.7.tar.gz", hash = "sha256:dc7051a0db5f2bd48665a990d3ec1cc305a466a77358ca4492826f41f283601e", size = 89670 } [[package]] name = "cryptography" @@ -1145,38 +1156,38 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/1e/49527ac611af559665f71cbb8f92b332b5ec9c6fbc4e88b0f8e92f5e85df/cryptography-45.0.5.tar.gz", hash = "sha256:72e76caa004ab63accdf26023fccd1d087f6d90ec6048ff33ad0445abf7f605a", size = 744903, upload-time = "2025-07-02T13:06:25.941Z" } +sdist = { url = "https://files.pythonhosted.org/packages/95/1e/49527ac611af559665f71cbb8f92b332b5ec9c6fbc4e88b0f8e92f5e85df/cryptography-45.0.5.tar.gz", hash = "sha256:72e76caa004ab63accdf26023fccd1d087f6d90ec6048ff33ad0445abf7f605a", size = 744903 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/fb/09e28bc0c46d2c547085e60897fea96310574c70fb21cd58a730a45f3403/cryptography-45.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:101ee65078f6dd3e5a028d4f19c07ffa4dd22cce6a20eaa160f8b5219911e7d8", size = 7043092, upload-time = "2025-07-02T13:05:01.514Z" }, - { url = "https://files.pythonhosted.org/packages/b1/05/2194432935e29b91fb649f6149c1a4f9e6d3d9fc880919f4ad1bcc22641e/cryptography-45.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3a264aae5f7fbb089dbc01e0242d3b67dffe3e6292e1f5182122bdf58e65215d", size = 4205926, upload-time = "2025-07-02T13:05:04.741Z" }, - { url = "https://files.pythonhosted.org/packages/07/8b/9ef5da82350175e32de245646b1884fc01124f53eb31164c77f95a08d682/cryptography-45.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e74d30ec9c7cb2f404af331d5b4099a9b322a8a6b25c4632755c8757345baac5", size = 4429235, upload-time = "2025-07-02T13:05:07.084Z" }, - { url = "https://files.pythonhosted.org/packages/7c/e1/c809f398adde1994ee53438912192d92a1d0fc0f2d7582659d9ef4c28b0c/cryptography-45.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3af26738f2db354aafe492fb3869e955b12b2ef2e16908c8b9cb928128d42c57", size = 4209785, upload-time = "2025-07-02T13:05:09.321Z" }, - { url = "https://files.pythonhosted.org/packages/d0/8b/07eb6bd5acff58406c5e806eff34a124936f41a4fb52909ffa4d00815f8c/cryptography-45.0.5-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e6c00130ed423201c5bc5544c23359141660b07999ad82e34e7bb8f882bb78e0", size = 3893050, upload-time = "2025-07-02T13:05:11.069Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ef/3333295ed58d900a13c92806b67e62f27876845a9a908c939f040887cca9/cryptography-45.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:dd420e577921c8c2d31289536c386aaa30140b473835e97f83bc71ea9d2baf2d", size = 4457379, upload-time = "2025-07-02T13:05:13.32Z" }, - { url = "https://files.pythonhosted.org/packages/d9/9d/44080674dee514dbb82b21d6fa5d1055368f208304e2ab1828d85c9de8f4/cryptography-45.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d05a38884db2ba215218745f0781775806bde4f32e07b135348355fe8e4991d9", size = 4209355, upload-time = "2025-07-02T13:05:15.017Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d8/0749f7d39f53f8258e5c18a93131919ac465ee1f9dccaf1b3f420235e0b5/cryptography-45.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ad0caded895a00261a5b4aa9af828baede54638754b51955a0ac75576b831b27", size = 4456087, upload-time = "2025-07-02T13:05:16.945Z" }, - { url = "https://files.pythonhosted.org/packages/09/d7/92acac187387bf08902b0bf0699816f08553927bdd6ba3654da0010289b4/cryptography-45.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9024beb59aca9d31d36fcdc1604dd9bbeed0a55bface9f1908df19178e2f116e", size = 4332873, upload-time = "2025-07-02T13:05:18.743Z" }, - { url = "https://files.pythonhosted.org/packages/03/c2/840e0710da5106a7c3d4153c7215b2736151bba60bf4491bdb421df5056d/cryptography-45.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:91098f02ca81579c85f66df8a588c78f331ca19089763d733e34ad359f474174", size = 4564651, upload-time = "2025-07-02T13:05:21.382Z" }, - { url = "https://files.pythonhosted.org/packages/2e/92/cc723dd6d71e9747a887b94eb3827825c6c24b9e6ce2bb33b847d31d5eaa/cryptography-45.0.5-cp311-abi3-win32.whl", hash = "sha256:926c3ea71a6043921050eaa639137e13dbe7b4ab25800932a8498364fc1abec9", size = 2929050, upload-time = "2025-07-02T13:05:23.39Z" }, - { url = "https://files.pythonhosted.org/packages/1f/10/197da38a5911a48dd5389c043de4aec4b3c94cb836299b01253940788d78/cryptography-45.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:b85980d1e345fe769cfc57c57db2b59cff5464ee0c045d52c0df087e926fbe63", size = 3403224, upload-time = "2025-07-02T13:05:25.202Z" }, - { url = "https://files.pythonhosted.org/packages/fe/2b/160ce8c2765e7a481ce57d55eba1546148583e7b6f85514472b1d151711d/cryptography-45.0.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3562c2f23c612f2e4a6964a61d942f891d29ee320edb62ff48ffb99f3de9ae8", size = 7017143, upload-time = "2025-07-02T13:05:27.229Z" }, - { url = "https://files.pythonhosted.org/packages/c2/e7/2187be2f871c0221a81f55ee3105d3cf3e273c0a0853651d7011eada0d7e/cryptography-45.0.5-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3fcfbefc4a7f332dece7272a88e410f611e79458fab97b5efe14e54fe476f4fd", size = 4197780, upload-time = "2025-07-02T13:05:29.299Z" }, - { url = "https://files.pythonhosted.org/packages/b9/cf/84210c447c06104e6be9122661159ad4ce7a8190011669afceeaea150524/cryptography-45.0.5-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:460f8c39ba66af7db0545a8c6f2eabcbc5a5528fc1cf6c3fa9a1e44cec33385e", size = 4420091, upload-time = "2025-07-02T13:05:31.221Z" }, - { url = "https://files.pythonhosted.org/packages/3e/6a/cb8b5c8bb82fafffa23aeff8d3a39822593cee6e2f16c5ca5c2ecca344f7/cryptography-45.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9b4cf6318915dccfe218e69bbec417fdd7c7185aa7aab139a2c0beb7468c89f0", size = 4198711, upload-time = "2025-07-02T13:05:33.062Z" }, - { url = "https://files.pythonhosted.org/packages/04/f7/36d2d69df69c94cbb2473871926daf0f01ad8e00fe3986ac3c1e8c4ca4b3/cryptography-45.0.5-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2089cc8f70a6e454601525e5bf2779e665d7865af002a5dec8d14e561002e135", size = 3883299, upload-time = "2025-07-02T13:05:34.94Z" }, - { url = "https://files.pythonhosted.org/packages/82/c7/f0ea40f016de72f81288e9fe8d1f6748036cb5ba6118774317a3ffc6022d/cryptography-45.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0027d566d65a38497bc37e0dd7c2f8ceda73597d2ac9ba93810204f56f52ebc7", size = 4450558, upload-time = "2025-07-02T13:05:37.288Z" }, - { url = "https://files.pythonhosted.org/packages/06/ae/94b504dc1a3cdf642d710407c62e86296f7da9e66f27ab12a1ee6fdf005b/cryptography-45.0.5-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:be97d3a19c16a9be00edf79dca949c8fa7eff621763666a145f9f9535a5d7f42", size = 4198020, upload-time = "2025-07-02T13:05:39.102Z" }, - { url = "https://files.pythonhosted.org/packages/05/2b/aaf0adb845d5dabb43480f18f7ca72e94f92c280aa983ddbd0bcd6ecd037/cryptography-45.0.5-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:7760c1c2e1a7084153a0f68fab76e754083b126a47d0117c9ed15e69e2103492", size = 4449759, upload-time = "2025-07-02T13:05:41.398Z" }, - { url = "https://files.pythonhosted.org/packages/91/e4/f17e02066de63e0100a3a01b56f8f1016973a1d67551beaf585157a86b3f/cryptography-45.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6ff8728d8d890b3dda5765276d1bc6fb099252915a2cd3aff960c4c195745dd0", size = 4319991, upload-time = "2025-07-02T13:05:43.64Z" }, - { url = "https://files.pythonhosted.org/packages/f2/2e/e2dbd629481b499b14516eed933f3276eb3239f7cee2dcfa4ee6b44d4711/cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a", size = 4554189, upload-time = "2025-07-02T13:05:46.045Z" }, - { url = "https://files.pythonhosted.org/packages/f8/ea/a78a0c38f4c8736287b71c2ea3799d173d5ce778c7d6e3c163a95a05ad2a/cryptography-45.0.5-cp37-abi3-win32.whl", hash = "sha256:1e1da5accc0c750056c556a93c3e9cb828970206c68867712ca5805e46dc806f", size = 2911769, upload-time = "2025-07-02T13:05:48.329Z" }, - { url = "https://files.pythonhosted.org/packages/79/b3/28ac139109d9005ad3f6b6f8976ffede6706a6478e21c889ce36c840918e/cryptography-45.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:90cb0a7bb35959f37e23303b7eed0a32280510030daba3f7fdfbb65defde6a97", size = 3390016, upload-time = "2025-07-02T13:05:50.811Z" }, - { url = "https://files.pythonhosted.org/packages/c0/71/9bdbcfd58d6ff5084687fe722c58ac718ebedbc98b9f8f93781354e6d286/cryptography-45.0.5-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8c4a6ff8a30e9e3d38ac0539e9a9e02540ab3f827a3394f8852432f6b0ea152e", size = 3587878, upload-time = "2025-07-02T13:06:06.339Z" }, - { url = "https://files.pythonhosted.org/packages/f0/63/83516cfb87f4a8756eaa4203f93b283fda23d210fc14e1e594bd5f20edb6/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bd4c45986472694e5121084c6ebbd112aa919a25e783b87eb95953c9573906d6", size = 4152447, upload-time = "2025-07-02T13:06:08.345Z" }, - { url = "https://files.pythonhosted.org/packages/22/11/d2823d2a5a0bd5802b3565437add16f5c8ce1f0778bf3822f89ad2740a38/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:982518cd64c54fcada9d7e5cf28eabd3ee76bd03ab18e08a48cad7e8b6f31b18", size = 4386778, upload-time = "2025-07-02T13:06:10.263Z" }, - { url = "https://files.pythonhosted.org/packages/5f/38/6bf177ca6bce4fe14704ab3e93627c5b0ca05242261a2e43ef3168472540/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:12e55281d993a793b0e883066f590c1ae1e802e3acb67f8b442e721e475e6463", size = 4151627, upload-time = "2025-07-02T13:06:13.097Z" }, - { url = "https://files.pythonhosted.org/packages/38/6a/69fc67e5266bff68a91bcb81dff8fb0aba4d79a78521a08812048913e16f/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:5aa1e32983d4443e310f726ee4b071ab7569f58eedfdd65e9675484a4eb67bd1", size = 4385593, upload-time = "2025-07-02T13:06:15.689Z" }, - { url = "https://files.pythonhosted.org/packages/f6/34/31a1604c9a9ade0fdab61eb48570e09a796f4d9836121266447b0eaf7feb/cryptography-45.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e357286c1b76403dd384d938f93c46b2b058ed4dfcdce64a770f0537ed3feb6f", size = 3331106, upload-time = "2025-07-02T13:06:18.058Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fb/09e28bc0c46d2c547085e60897fea96310574c70fb21cd58a730a45f3403/cryptography-45.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:101ee65078f6dd3e5a028d4f19c07ffa4dd22cce6a20eaa160f8b5219911e7d8", size = 7043092 }, + { url = "https://files.pythonhosted.org/packages/b1/05/2194432935e29b91fb649f6149c1a4f9e6d3d9fc880919f4ad1bcc22641e/cryptography-45.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3a264aae5f7fbb089dbc01e0242d3b67dffe3e6292e1f5182122bdf58e65215d", size = 4205926 }, + { url = "https://files.pythonhosted.org/packages/07/8b/9ef5da82350175e32de245646b1884fc01124f53eb31164c77f95a08d682/cryptography-45.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e74d30ec9c7cb2f404af331d5b4099a9b322a8a6b25c4632755c8757345baac5", size = 4429235 }, + { url = "https://files.pythonhosted.org/packages/7c/e1/c809f398adde1994ee53438912192d92a1d0fc0f2d7582659d9ef4c28b0c/cryptography-45.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3af26738f2db354aafe492fb3869e955b12b2ef2e16908c8b9cb928128d42c57", size = 4209785 }, + { url = "https://files.pythonhosted.org/packages/d0/8b/07eb6bd5acff58406c5e806eff34a124936f41a4fb52909ffa4d00815f8c/cryptography-45.0.5-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e6c00130ed423201c5bc5544c23359141660b07999ad82e34e7bb8f882bb78e0", size = 3893050 }, + { url = "https://files.pythonhosted.org/packages/ec/ef/3333295ed58d900a13c92806b67e62f27876845a9a908c939f040887cca9/cryptography-45.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:dd420e577921c8c2d31289536c386aaa30140b473835e97f83bc71ea9d2baf2d", size = 4457379 }, + { url = "https://files.pythonhosted.org/packages/d9/9d/44080674dee514dbb82b21d6fa5d1055368f208304e2ab1828d85c9de8f4/cryptography-45.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d05a38884db2ba215218745f0781775806bde4f32e07b135348355fe8e4991d9", size = 4209355 }, + { url = "https://files.pythonhosted.org/packages/c9/d8/0749f7d39f53f8258e5c18a93131919ac465ee1f9dccaf1b3f420235e0b5/cryptography-45.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ad0caded895a00261a5b4aa9af828baede54638754b51955a0ac75576b831b27", size = 4456087 }, + { url = "https://files.pythonhosted.org/packages/09/d7/92acac187387bf08902b0bf0699816f08553927bdd6ba3654da0010289b4/cryptography-45.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9024beb59aca9d31d36fcdc1604dd9bbeed0a55bface9f1908df19178e2f116e", size = 4332873 }, + { url = "https://files.pythonhosted.org/packages/03/c2/840e0710da5106a7c3d4153c7215b2736151bba60bf4491bdb421df5056d/cryptography-45.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:91098f02ca81579c85f66df8a588c78f331ca19089763d733e34ad359f474174", size = 4564651 }, + { url = "https://files.pythonhosted.org/packages/2e/92/cc723dd6d71e9747a887b94eb3827825c6c24b9e6ce2bb33b847d31d5eaa/cryptography-45.0.5-cp311-abi3-win32.whl", hash = "sha256:926c3ea71a6043921050eaa639137e13dbe7b4ab25800932a8498364fc1abec9", size = 2929050 }, + { url = "https://files.pythonhosted.org/packages/1f/10/197da38a5911a48dd5389c043de4aec4b3c94cb836299b01253940788d78/cryptography-45.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:b85980d1e345fe769cfc57c57db2b59cff5464ee0c045d52c0df087e926fbe63", size = 3403224 }, + { url = "https://files.pythonhosted.org/packages/fe/2b/160ce8c2765e7a481ce57d55eba1546148583e7b6f85514472b1d151711d/cryptography-45.0.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3562c2f23c612f2e4a6964a61d942f891d29ee320edb62ff48ffb99f3de9ae8", size = 7017143 }, + { url = "https://files.pythonhosted.org/packages/c2/e7/2187be2f871c0221a81f55ee3105d3cf3e273c0a0853651d7011eada0d7e/cryptography-45.0.5-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3fcfbefc4a7f332dece7272a88e410f611e79458fab97b5efe14e54fe476f4fd", size = 4197780 }, + { url = "https://files.pythonhosted.org/packages/b9/cf/84210c447c06104e6be9122661159ad4ce7a8190011669afceeaea150524/cryptography-45.0.5-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:460f8c39ba66af7db0545a8c6f2eabcbc5a5528fc1cf6c3fa9a1e44cec33385e", size = 4420091 }, + { url = "https://files.pythonhosted.org/packages/3e/6a/cb8b5c8bb82fafffa23aeff8d3a39822593cee6e2f16c5ca5c2ecca344f7/cryptography-45.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9b4cf6318915dccfe218e69bbec417fdd7c7185aa7aab139a2c0beb7468c89f0", size = 4198711 }, + { url = "https://files.pythonhosted.org/packages/04/f7/36d2d69df69c94cbb2473871926daf0f01ad8e00fe3986ac3c1e8c4ca4b3/cryptography-45.0.5-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2089cc8f70a6e454601525e5bf2779e665d7865af002a5dec8d14e561002e135", size = 3883299 }, + { url = "https://files.pythonhosted.org/packages/82/c7/f0ea40f016de72f81288e9fe8d1f6748036cb5ba6118774317a3ffc6022d/cryptography-45.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0027d566d65a38497bc37e0dd7c2f8ceda73597d2ac9ba93810204f56f52ebc7", size = 4450558 }, + { url = "https://files.pythonhosted.org/packages/06/ae/94b504dc1a3cdf642d710407c62e86296f7da9e66f27ab12a1ee6fdf005b/cryptography-45.0.5-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:be97d3a19c16a9be00edf79dca949c8fa7eff621763666a145f9f9535a5d7f42", size = 4198020 }, + { url = "https://files.pythonhosted.org/packages/05/2b/aaf0adb845d5dabb43480f18f7ca72e94f92c280aa983ddbd0bcd6ecd037/cryptography-45.0.5-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:7760c1c2e1a7084153a0f68fab76e754083b126a47d0117c9ed15e69e2103492", size = 4449759 }, + { url = "https://files.pythonhosted.org/packages/91/e4/f17e02066de63e0100a3a01b56f8f1016973a1d67551beaf585157a86b3f/cryptography-45.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6ff8728d8d890b3dda5765276d1bc6fb099252915a2cd3aff960c4c195745dd0", size = 4319991 }, + { url = "https://files.pythonhosted.org/packages/f2/2e/e2dbd629481b499b14516eed933f3276eb3239f7cee2dcfa4ee6b44d4711/cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a", size = 4554189 }, + { url = "https://files.pythonhosted.org/packages/f8/ea/a78a0c38f4c8736287b71c2ea3799d173d5ce778c7d6e3c163a95a05ad2a/cryptography-45.0.5-cp37-abi3-win32.whl", hash = "sha256:1e1da5accc0c750056c556a93c3e9cb828970206c68867712ca5805e46dc806f", size = 2911769 }, + { url = "https://files.pythonhosted.org/packages/79/b3/28ac139109d9005ad3f6b6f8976ffede6706a6478e21c889ce36c840918e/cryptography-45.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:90cb0a7bb35959f37e23303b7eed0a32280510030daba3f7fdfbb65defde6a97", size = 3390016 }, + { url = "https://files.pythonhosted.org/packages/c0/71/9bdbcfd58d6ff5084687fe722c58ac718ebedbc98b9f8f93781354e6d286/cryptography-45.0.5-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8c4a6ff8a30e9e3d38ac0539e9a9e02540ab3f827a3394f8852432f6b0ea152e", size = 3587878 }, + { url = "https://files.pythonhosted.org/packages/f0/63/83516cfb87f4a8756eaa4203f93b283fda23d210fc14e1e594bd5f20edb6/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bd4c45986472694e5121084c6ebbd112aa919a25e783b87eb95953c9573906d6", size = 4152447 }, + { url = "https://files.pythonhosted.org/packages/22/11/d2823d2a5a0bd5802b3565437add16f5c8ce1f0778bf3822f89ad2740a38/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:982518cd64c54fcada9d7e5cf28eabd3ee76bd03ab18e08a48cad7e8b6f31b18", size = 4386778 }, + { url = "https://files.pythonhosted.org/packages/5f/38/6bf177ca6bce4fe14704ab3e93627c5b0ca05242261a2e43ef3168472540/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:12e55281d993a793b0e883066f590c1ae1e802e3acb67f8b442e721e475e6463", size = 4151627 }, + { url = "https://files.pythonhosted.org/packages/38/6a/69fc67e5266bff68a91bcb81dff8fb0aba4d79a78521a08812048913e16f/cryptography-45.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:5aa1e32983d4443e310f726ee4b071ab7569f58eedfdd65e9675484a4eb67bd1", size = 4385593 }, + { url = "https://files.pythonhosted.org/packages/f6/34/31a1604c9a9ade0fdab61eb48570e09a796f4d9836121266447b0eaf7feb/cryptography-45.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e357286c1b76403dd384d938f93c46b2b058ed4dfcdce64a770f0537ed3feb6f", size = 3331106 }, ] [[package]] @@ -1187,27 +1198,27 @@ dependencies = [ { name = "marshmallow" }, { name = "typing-inspect" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/64/a4/f71d9cf3a5ac257c993b5ca3f93df5f7fb395c725e7f1e6479d2514173c3/dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0", size = 32227, upload-time = "2024-06-09T16:20:19.103Z" } +sdist = { url = "https://files.pythonhosted.org/packages/64/a4/f71d9cf3a5ac257c993b5ca3f93df5f7fb395c725e7f1e6479d2514173c3/dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0", size = 32227 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/be/d0d44e092656fe7a06b55e6103cbce807cdbdee17884a5367c68c9860853/dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", size = 28686, upload-time = "2024-06-09T16:20:16.715Z" }, + { url = "https://files.pythonhosted.org/packages/c3/be/d0d44e092656fe7a06b55e6103cbce807cdbdee17884a5367c68c9860853/dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", size = 28686 }, ] [[package]] name = "decorator" version = "5.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190 }, ] [[package]] name = "defusedxml" version = "0.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520 } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" }, + { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604 }, ] [[package]] @@ -1217,9 +1228,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/98/97/06afe62762c9a8a86af0cfb7bfdab22a43ad17138b07af5b1a58442690a2/deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d", size = 2928744, upload-time = "2025-01-27T10:46:25.7Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/97/06afe62762c9a8a86af0cfb7bfdab22a43ad17138b07af5b1a58442690a2/deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d", size = 2928744 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/c6/ac0b6c1e2d138f1002bcf799d330bd6d85084fece321e662a14223794041/Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec", size = 9998, upload-time = "2025-01-27T10:46:09.186Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c6/ac0b6c1e2d138f1002bcf799d330bd6d85084fece321e662a14223794041/Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec", size = 9998 }, ] [[package]] @@ -1229,14 +1240,14 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/d3/8ae2869247df154b64c1884d7346d412fed0c49df84db635aab2d1c40e62/deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff", size = 173788, upload-time = "2020-04-20T14:23:38.738Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/d3/8ae2869247df154b64c1884d7346d412fed0c49df84db635aab2d1c40e62/deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff", size = 173788 } wheels = [ - { url = "https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a", size = 11178, upload-time = "2020-04-20T14:23:36.581Z" }, + { url = "https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a", size = 11178 }, ] [[package]] name = "dify-api" -version = "1.7.2" +version = "1.8.0" source = { virtual = "." } dependencies = [ { name = "arize-phoenix-otel" }, @@ -1254,7 +1265,7 @@ dependencies = [ { name = "flask-login" }, { name = "flask-migrate" }, { name = "flask-orjson" }, - { name = "flask-restful" }, + { name = "flask-restx" }, { name = "flask-sqlalchemy" }, { name = "gevent" }, { name = "gmpy2" }, @@ -1326,6 +1337,7 @@ dependencies = [ [package.dev-dependencies] dev = [ { name = "boto3-stubs" }, + { name = "celery-types" }, { name = "coverage" }, { name = "dotenv-linter" }, { name = "faker" }, @@ -1436,13 +1448,13 @@ requires-dist = [ { name = "cachetools", specifier = "~=5.3.0" }, { name = "celery", specifier = "~=5.5.2" }, { name = "chardet", specifier = "~=5.1.0" }, - { name = "flask", specifier = "~=3.1.0" }, + { name = "flask", specifier = "~=3.1.2" }, { name = "flask-compress", specifier = "~=1.17" }, { name = "flask-cors", specifier = "~=6.0.0" }, { name = "flask-login", specifier = "~=0.6.3" }, { name = "flask-migrate", specifier = "~=4.0.7" }, { name = "flask-orjson", specifier = "~=2.0.0" }, - { name = "flask-restful", specifier = "~=0.3.10" }, + { name = "flask-restx", specifier = ">=1.3.0" }, { name = "flask-sqlalchemy", specifier = "~=3.1.1" }, { name = "gevent", specifier = "~=24.11.1" }, { name = "gmpy2", specifier = "~=2.2.1" }, @@ -1490,7 +1502,7 @@ requires-dist = [ { name = "pydantic", specifier = "~=2.11.4" }, { name = "pydantic-extra-types", specifier = "~=2.10.3" }, { name = "pydantic-settings", specifier = "~=2.9.1" }, - { name = "pyjwt", specifier = "~=2.8.0" }, + { name = "pyjwt", specifier = "~=2.10.1" }, { name = "pypdfium2", specifier = "==4.30.0" }, { name = "python-docx", specifier = "~=1.1.0" }, { name = "python-dotenv", specifier = "==1.0.1" }, @@ -1502,9 +1514,9 @@ requires-dist = [ { name = "sentry-sdk", extras = ["flask"], specifier = "~=2.28.0" }, { name = "sqlalchemy", specifier = "~=2.0.29" }, { name = "sseclient-py", specifier = ">=1.8.0" }, - { name = "starlette", specifier = "==0.41.0" }, + { name = "starlette", specifier = "==0.47.2" }, { name = "tiktoken", specifier = "~=0.9.0" }, - { name = "transformers", specifier = "~=4.51.0" }, + { name = "transformers", specifier = "~=4.53.0" }, { name = "unstructured", extras = ["docx", "epub", "md", "ppt", "pptx"], specifier = "~=0.16.1" }, { name = "weave", specifier = "~=0.51.0" }, { name = "webvtt-py", specifier = "~=0.5.1" }, @@ -1514,12 +1526,13 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ { name = "boto3-stubs", specifier = ">=1.38.20" }, + { name = "celery-types", specifier = ">=0.23.0" }, { name = "coverage", specifier = "~=7.2.4" }, { name = "dotenv-linter", specifier = "~=0.5.0" }, { name = "faker", specifier = "~=32.1.0" }, { name = "hypothesis", specifier = ">=6.131.15" }, { name = "lxml-stubs", specifier = "~=0.5.1" }, - { name = "mypy", specifier = "~=1.16.0" }, + { name = "mypy", specifier = "~=1.17.1" }, { name = "pandas-stubs", specifier = "~=2.2.3" }, { name = "pytest", specifier = "~=8.3.2" }, { name = "pytest-benchmark", specifier = "~=4.0.0" }, @@ -1580,7 +1593,7 @@ storage = [ { name = "google-cloud-storage", specifier = "==2.16.0" }, { name = "opendal", specifier = "~=0.45.16" }, { name = "oss2", specifier = "==2.18.5" }, - { name = "supabase", specifier = "~=2.8.1" }, + { name = "supabase", specifier = "~=2.18.1" }, { name = "tos", specifier = "~=2.7.1" }, ] tools = [ @@ -1602,7 +1615,7 @@ vdb = [ { name = "pgvector", specifier = "==0.2.5" }, { name = "pymilvus", specifier = "~=2.5.0" }, { name = "pymochow", specifier = "==1.3.1" }, - { name = "pyobvector", specifier = "~=0.1.6" }, + { name = "pyobvector", specifier = "~=0.2.15" }, { name = "qdrant-client", specifier = "==1.9.0" }, { name = "tablestore", specifier = "==6.2.0" }, { name = "tcvectordb", specifier = "~=1.6.4" }, @@ -1617,18 +1630,18 @@ vdb = [ name = "diskcache" version = "5.6.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916, upload-time = "2023-08-31T06:12:00.316Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550, upload-time = "2023-08-31T06:11:58.822Z" }, + { url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550 }, ] [[package]] name = "distro" version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 }, ] [[package]] @@ -1640,18 +1653,18 @@ dependencies = [ { name = "requests" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834, upload-time = "2024-05-23T11:13:57.216Z" } +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774, upload-time = "2024-05-23T11:13:55.01Z" }, + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774 }, ] [[package]] name = "docstring-parser" version = "0.16" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/08/12/9c22a58c0b1e29271051222d8906257616da84135af9ed167c9e28f85cb3/docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e", size = 26565, upload-time = "2024-03-15T10:39:44.419Z" } +sdist = { url = "https://files.pythonhosted.org/packages/08/12/9c22a58c0b1e29271051222d8906257616da84135af9ed167c9e28f85cb3/docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e", size = 26565 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/7c/e9fcff7623954d86bdc17782036cbf715ecab1bec4847c008557affe1ca8/docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637", size = 36533, upload-time = "2024-03-15T10:39:41.527Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7c/e9fcff7623954d86bdc17782036cbf715ecab1bec4847c008557affe1ca8/docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637", size = 36533 }, ] [[package]] @@ -1665,18 +1678,18 @@ dependencies = [ { name = "ply" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ef/fe/77e184ccc312f6263cbcc48a9579eec99f5c7ff72a9b1bd7812cafc22bbb/dotenv_linter-0.5.0.tar.gz", hash = "sha256:4862a8393e5ecdfb32982f1b32dbc006fff969a7b3c8608ba7db536108beeaea", size = 15346, upload-time = "2024-03-13T11:52:10.52Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/fe/77e184ccc312f6263cbcc48a9579eec99f5c7ff72a9b1bd7812cafc22bbb/dotenv_linter-0.5.0.tar.gz", hash = "sha256:4862a8393e5ecdfb32982f1b32dbc006fff969a7b3c8608ba7db536108beeaea", size = 15346 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/01/62ed4374340e6cf17c5084828974d96db8085e4018439ac41dc3cbbbcab3/dotenv_linter-0.5.0-py3-none-any.whl", hash = "sha256:fd01cca7f2140cb1710f49cbc1bf0e62397a75a6f0522d26a8b9b2331143c8bd", size = 21770, upload-time = "2024-03-13T11:52:08.607Z" }, + { url = "https://files.pythonhosted.org/packages/f0/01/62ed4374340e6cf17c5084828974d96db8085e4018439ac41dc3cbbbcab3/dotenv_linter-0.5.0-py3-none-any.whl", hash = "sha256:fd01cca7f2140cb1710f49cbc1bf0e62397a75a6f0522d26a8b9b2331143c8bd", size = 21770 }, ] [[package]] name = "durationpy" version = "0.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9d/a4/e44218c2b394e31a6dd0d6b095c4e1f32d0be54c2a4b250032d717647bab/durationpy-0.10.tar.gz", hash = "sha256:1fa6893409a6e739c9c72334fc65cca1f355dbdd93405d30f726deb5bde42fba", size = 3335, upload-time = "2025-05-17T13:52:37.26Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/a4/e44218c2b394e31a6dd0d6b095c4e1f32d0be54c2a4b250032d717647bab/durationpy-0.10.tar.gz", hash = "sha256:1fa6893409a6e739c9c72334fc65cca1f355dbdd93405d30f726deb5bde42fba", size = 3335 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" }, + { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922 }, ] [[package]] @@ -1686,9 +1699,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793, upload-time = "2025-03-13T11:52:43.25Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607, upload-time = "2025-03-13T11:52:41.757Z" }, + { url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607 }, ] [[package]] @@ -1699,9 +1712,9 @@ dependencies = [ { name = "certifi" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6a/54/d498a766ac8fa475f931da85a154666cc81a70f8eb4a780bc8e4e934e9ac/elastic_transport-8.17.1.tar.gz", hash = "sha256:5edef32ac864dca8e2f0a613ef63491ee8d6b8cfb52881fa7313ba9290cac6d2", size = 73425, upload-time = "2025-03-13T07:28:30.776Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/54/d498a766ac8fa475f931da85a154666cc81a70f8eb4a780bc8e4e934e9ac/elastic_transport-8.17.1.tar.gz", hash = "sha256:5edef32ac864dca8e2f0a613ef63491ee8d6b8cfb52881fa7313ba9290cac6d2", size = 73425 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/cd/b71d5bc74cde7fc6fd9b2ff9389890f45d9762cbbbf81dc5e51fd7588c4a/elastic_transport-8.17.1-py3-none-any.whl", hash = "sha256:192718f498f1d10c5e9aa8b9cf32aed405e469a7f0e9d6a8923431dbb2c59fb8", size = 64969, upload-time = "2025-03-13T07:28:29.031Z" }, + { url = "https://files.pythonhosted.org/packages/cf/cd/b71d5bc74cde7fc6fd9b2ff9389890f45d9762cbbbf81dc5e51fd7588c4a/elastic_transport-8.17.1-py3-none-any.whl", hash = "sha256:192718f498f1d10c5e9aa8b9cf32aed405e469a7f0e9d6a8923431dbb2c59fb8", size = 64969 }, ] [[package]] @@ -1711,18 +1724,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "elastic-transport" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/36/63/8dc82cbf1bfbca2a2af8eeaa4a7eccc2cf7a87bf217130f6bc66d33b4d8f/elasticsearch-8.14.0.tar.gz", hash = "sha256:aa2490029dd96f4015b333c1827aa21fd6c0a4d223b00dfb0fe933b8d09a511b", size = 382506, upload-time = "2024-06-06T13:31:10.205Z" } +sdist = { url = "https://files.pythonhosted.org/packages/36/63/8dc82cbf1bfbca2a2af8eeaa4a7eccc2cf7a87bf217130f6bc66d33b4d8f/elasticsearch-8.14.0.tar.gz", hash = "sha256:aa2490029dd96f4015b333c1827aa21fd6c0a4d223b00dfb0fe933b8d09a511b", size = 382506 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/09/c9dec8bd95bff6aaa8fe29a834257a6606608d0b2ed9932a1857683f736f/elasticsearch-8.14.0-py3-none-any.whl", hash = "sha256:cef8ef70a81af027f3da74a4f7d9296b390c636903088439087b8262a468c130", size = 480236, upload-time = "2024-06-06T13:31:00.987Z" }, + { url = "https://files.pythonhosted.org/packages/a2/09/c9dec8bd95bff6aaa8fe29a834257a6606608d0b2ed9932a1857683f736f/elasticsearch-8.14.0-py3-none-any.whl", hash = "sha256:cef8ef70a81af027f3da74a4f7d9296b390c636903088439087b8262a468c130", size = 480236 }, ] [[package]] name = "emoji" version = "2.14.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cb/7d/01cddcbb6f5cc0ba72e00ddf9b1fa206c802d557fd0a20b18e130edf1336/emoji-2.14.1.tar.gz", hash = "sha256:f8c50043d79a2c1410ebfae833ae1868d5941a67a6cd4d18377e2eb0bd79346b", size = 597182, upload-time = "2025-01-16T06:31:24.983Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/7d/01cddcbb6f5cc0ba72e00ddf9b1fa206c802d557fd0a20b18e130edf1336/emoji-2.14.1.tar.gz", hash = "sha256:f8c50043d79a2c1410ebfae833ae1868d5941a67a6cd4d18377e2eb0bd79346b", size = 597182 } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/db/a0335710caaa6d0aebdaa65ad4df789c15d89b7babd9a30277838a7d9aac/emoji-2.14.1-py3-none-any.whl", hash = "sha256:35a8a486c1460addb1499e3bf7929d3889b2e2841a57401903699fef595e942b", size = 590617, upload-time = "2025-01-16T06:31:23.526Z" }, + { url = "https://files.pythonhosted.org/packages/91/db/a0335710caaa6d0aebdaa65ad4df789c15d89b7babd9a30277838a7d9aac/emoji-2.14.1-py3-none-any.whl", hash = "sha256:35a8a486c1460addb1499e3bf7929d3889b2e2841a57401903699fef595e942b", size = 590617 }, ] [[package]] @@ -1732,15 +1745,15 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycryptodome" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/af/d83276f9e288bd6a62f44d67ae1eafd401028ba1b2b643ae4014b51da5bd/esdk-obs-python-3.24.6.1.tar.gz", hash = "sha256:c45fed143e99d9256c8560c1d78f651eae0d2e809d16e962f8b286b773c33bf0", size = 85798, upload-time = "2024-07-26T13:13:22.467Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/af/d83276f9e288bd6a62f44d67ae1eafd401028ba1b2b643ae4014b51da5bd/esdk-obs-python-3.24.6.1.tar.gz", hash = "sha256:c45fed143e99d9256c8560c1d78f651eae0d2e809d16e962f8b286b773c33bf0", size = 85798 } [[package]] name = "et-xmlfile" version = "2.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d3/38/af70d7ab1ae9d4da450eeec1fa3918940a5fafb9055e934af8d6eb0c2313/et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54", size = 17234, upload-time = "2024-10-25T17:25:40.039Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/38/af70d7ab1ae9d4da450eeec1fa3918940a5fafb9055e934af8d6eb0c2313/et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54", size = 17234 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059, upload-time = "2024-10-25T17:25:39.051Z" }, + { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059 }, ] [[package]] @@ -1751,46 +1764,46 @@ dependencies = [ { name = "python-dateutil" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/2a/dd2c8f55d69013d0eee30ec4c998250fb7da957f5fe860ed077b3df1725b/faker-32.1.0.tar.gz", hash = "sha256:aac536ba04e6b7beb2332c67df78485fc29c1880ff723beac6d1efd45e2f10f5", size = 1850193, upload-time = "2024-11-12T22:04:34.812Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/2a/dd2c8f55d69013d0eee30ec4c998250fb7da957f5fe860ed077b3df1725b/faker-32.1.0.tar.gz", hash = "sha256:aac536ba04e6b7beb2332c67df78485fc29c1880ff723beac6d1efd45e2f10f5", size = 1850193 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/fa/4a82dea32d6262a96e6841cdd4a45c11ac09eecdff018e745565410ac70e/Faker-32.1.0-py3-none-any.whl", hash = "sha256:c77522577863c264bdc9dad3a2a750ad3f7ee43ff8185072e482992288898814", size = 1889123, upload-time = "2024-11-12T22:04:32.298Z" }, + { url = "https://files.pythonhosted.org/packages/7e/fa/4a82dea32d6262a96e6841cdd4a45c11ac09eecdff018e745565410ac70e/Faker-32.1.0-py3-none-any.whl", hash = "sha256:c77522577863c264bdc9dad3a2a750ad3f7ee43ff8185072e482992288898814", size = 1889123 }, ] [[package]] name = "fastapi" -version = "0.116.0" +version = "0.116.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/20/38/e1da78736143fd885c36213a3ccc493c384ae8fea6a0f0bc272ef42ebea8/fastapi-0.116.0.tar.gz", hash = "sha256:80dc0794627af0390353a6d1171618276616310d37d24faba6648398e57d687a", size = 296518, upload-time = "2025-07-07T15:09:27.82Z" } +sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/68/d80347fe2360445b5f58cf290e588a4729746e7501080947e6cdae114b1f/fastapi-0.116.0-py3-none-any.whl", hash = "sha256:fdcc9ed272eaef038952923bef2b735c02372402d1203ee1210af4eea7a78d2b", size = 95625, upload-time = "2025-07-07T15:09:26.348Z" }, + { url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631 }, ] [[package]] name = "filelock" version = "3.18.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, + { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215 }, ] [[package]] name = "filetype" version = "1.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/29/745f7d30d47fe0f251d3ad3dc2978a23141917661998763bebb6da007eb1/filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb", size = 998020, upload-time = "2022-11-02T17:34:04.141Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/29/745f7d30d47fe0f251d3ad3dc2978a23141917661998763bebb6da007eb1/filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb", size = 998020 } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/79/1b8fa1bb3568781e84c9200f951c735f3f157429f44be0495da55894d620/filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25", size = 19970, upload-time = "2022-11-02T17:34:01.425Z" }, + { url = "https://files.pythonhosted.org/packages/18/79/1b8fa1bb3568781e84c9200f951c735f3f157429f44be0495da55894d620/filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25", size = 19970 }, ] [[package]] name = "flask" -version = "3.1.1" +version = "3.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "blinker" }, @@ -1800,9 +1813,9 @@ dependencies = [ { name = "markupsafe" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/de/e47735752347f4128bcf354e0da07ef311a78244eba9e3dc1d4a5ab21a98/flask-3.1.1.tar.gz", hash = "sha256:284c7b8f2f58cb737f0cf1c30fd7eaf0ccfcde196099d24ecede3fc2005aa59e", size = 753440, upload-time = "2025-05-13T15:01:17.447Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/6d/cfe3c0fcc5e477df242b98bfe186a4c34357b4847e87ecaef04507332dab/flask-3.1.2.tar.gz", hash = "sha256:bf656c15c80190ed628ad08cdfd3aaa35beb087855e2f494910aa3774cc4fd87", size = 720160 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/68/9d4508e893976286d2ead7f8f571314af6c2037af34853a30fd769c02e9d/flask-3.1.1-py3-none-any.whl", hash = "sha256:07aae2bb5eaf77993ef57e357491839f5fd9f4dc281593a81a9e4d79a24f295c", size = 103305, upload-time = "2025-05-13T15:01:15.591Z" }, + { url = "https://files.pythonhosted.org/packages/ec/f9/7f9263c5695f4bd0023734af91bedb2ff8209e8de6ead162f35d8dc762fd/flask-3.1.2-py3-none-any.whl", hash = "sha256:ca1d8112ec8a6158cc29ea4858963350011b5c846a414cdb7a954aa9e967d03c", size = 103308 }, ] [[package]] @@ -1816,9 +1829,9 @@ dependencies = [ { name = "zstandard" }, { name = "zstandard", extra = ["cffi"], marker = "platform_python_implementation == 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/1f/260db5a4517d59bfde7b4a0d71052df68fb84983bda9231100e3b80f5989/flask_compress-1.17.tar.gz", hash = "sha256:1ebb112b129ea7c9e7d6ee6d5cc0d64f226cbc50c4daddf1a58b9bd02253fbd8", size = 15733, upload-time = "2024-10-14T08:13:33.196Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/1f/260db5a4517d59bfde7b4a0d71052df68fb84983bda9231100e3b80f5989/flask_compress-1.17.tar.gz", hash = "sha256:1ebb112b129ea7c9e7d6ee6d5cc0d64f226cbc50c4daddf1a58b9bd02253fbd8", size = 15733 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/54/ff08f947d07c0a8a5d8f1c8e57b142c97748ca912b259db6467ab35983cd/Flask_Compress-1.17-py3-none-any.whl", hash = "sha256:415131f197c41109f08e8fdfc3a6628d83d81680fb5ecd0b3a97410e02397b20", size = 8723, upload-time = "2024-10-14T08:13:31.726Z" }, + { url = "https://files.pythonhosted.org/packages/f7/54/ff08f947d07c0a8a5d8f1c8e57b142c97748ca912b259db6467ab35983cd/Flask_Compress-1.17-py3-none-any.whl", hash = "sha256:415131f197c41109f08e8fdfc3a6628d83d81680fb5ecd0b3a97410e02397b20", size = 8723 }, ] [[package]] @@ -1829,9 +1842,9 @@ dependencies = [ { name = "flask" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/76/37/bcfa6c7d5eec777c4c7cf45ce6b27631cebe5230caf88d85eadd63edd37a/flask_cors-6.0.1.tar.gz", hash = "sha256:d81bcb31f07b0985be7f48406247e9243aced229b7747219160a0559edd678db", size = 13463, upload-time = "2025-06-11T01:32:08.518Z" } +sdist = { url = "https://files.pythonhosted.org/packages/76/37/bcfa6c7d5eec777c4c7cf45ce6b27631cebe5230caf88d85eadd63edd37a/flask_cors-6.0.1.tar.gz", hash = "sha256:d81bcb31f07b0985be7f48406247e9243aced229b7747219160a0559edd678db", size = 13463 } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/f8/01bf35a3afd734345528f98d0353f2a978a476528ad4d7e78b70c4d149dd/flask_cors-6.0.1-py3-none-any.whl", hash = "sha256:c7b2cbfb1a31aa0d2e5341eea03a6805349f7a61647daee1a15c46bbe981494c", size = 13244, upload-time = "2025-06-11T01:32:07.352Z" }, + { url = "https://files.pythonhosted.org/packages/17/f8/01bf35a3afd734345528f98d0353f2a978a476528ad4d7e78b70c4d149dd/flask_cors-6.0.1-py3-none-any.whl", hash = "sha256:c7b2cbfb1a31aa0d2e5341eea03a6805349f7a61647daee1a15c46bbe981494c", size = 13244 }, ] [[package]] @@ -1842,9 +1855,9 @@ dependencies = [ { name = "flask" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/6e/2f4e13e373bb49e68c02c51ceadd22d172715a06716f9299d9df01b6ddb2/Flask-Login-0.6.3.tar.gz", hash = "sha256:5e23d14a607ef12806c699590b89d0f0e0d67baeec599d75947bf9c147330333", size = 48834, upload-time = "2023-10-30T14:53:21.151Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/6e/2f4e13e373bb49e68c02c51ceadd22d172715a06716f9299d9df01b6ddb2/Flask-Login-0.6.3.tar.gz", hash = "sha256:5e23d14a607ef12806c699590b89d0f0e0d67baeec599d75947bf9c147330333", size = 48834 } wheels = [ - { url = "https://files.pythonhosted.org/packages/59/f5/67e9cc5c2036f58115f9fe0f00d203cf6780c3ff8ae0e705e7a9d9e8ff9e/Flask_Login-0.6.3-py3-none-any.whl", hash = "sha256:849b25b82a436bf830a054e74214074af59097171562ab10bfa999e6b78aae5d", size = 17303, upload-time = "2023-10-30T14:53:19.636Z" }, + { url = "https://files.pythonhosted.org/packages/59/f5/67e9cc5c2036f58115f9fe0f00d203cf6780c3ff8ae0e705e7a9d9e8ff9e/Flask_Login-0.6.3-py3-none-any.whl", hash = "sha256:849b25b82a436bf830a054e74214074af59097171562ab10bfa999e6b78aae5d", size = 17303 }, ] [[package]] @@ -1856,9 +1869,9 @@ dependencies = [ { name = "flask" }, { name = "flask-sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3b/e2/4008fc0d298d7ce797021b194bbe151d4d12db670691648a226d4fc8aefc/Flask-Migrate-4.0.7.tar.gz", hash = "sha256:dff7dd25113c210b069af280ea713b883f3840c1e3455274745d7355778c8622", size = 21770, upload-time = "2024-03-11T18:43:01.498Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/e2/4008fc0d298d7ce797021b194bbe151d4d12db670691648a226d4fc8aefc/Flask-Migrate-4.0.7.tar.gz", hash = "sha256:dff7dd25113c210b069af280ea713b883f3840c1e3455274745d7355778c8622", size = 21770 } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/01/587023575286236f95d2ab8a826c320375ed5ea2102bb103ed89704ffa6b/Flask_Migrate-4.0.7-py3-none-any.whl", hash = "sha256:5c532be17e7b43a223b7500d620edae33795df27c75811ddf32560f7d48ec617", size = 21127, upload-time = "2024-03-11T18:42:59.462Z" }, + { url = "https://files.pythonhosted.org/packages/93/01/587023575286236f95d2ab8a826c320375ed5ea2102bb103ed89704ffa6b/Flask_Migrate-4.0.7-py3-none-any.whl", hash = "sha256:5c532be17e7b43a223b7500d620edae33795df27c75811ddf32560f7d48ec617", size = 21127 }, ] [[package]] @@ -1869,24 +1882,26 @@ dependencies = [ { name = "flask" }, { name = "orjson" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/49/575796f6ddca171d82dbb12762e33166c8b8f8616c946f0a6dfbb9bc3cd6/flask_orjson-2.0.0.tar.gz", hash = "sha256:6df6631437f9bc52cf9821735f896efa5583b5f80712f7d29d9ef69a79986a9c", size = 2974, upload-time = "2024-01-15T00:03:22.236Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/49/575796f6ddca171d82dbb12762e33166c8b8f8616c946f0a6dfbb9bc3cd6/flask_orjson-2.0.0.tar.gz", hash = "sha256:6df6631437f9bc52cf9821735f896efa5583b5f80712f7d29d9ef69a79986a9c", size = 2974 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/ca/53e14be018a2284acf799830e8cd8e0b263c0fd3dff1ad7b35f8417e7067/flask_orjson-2.0.0-py3-none-any.whl", hash = "sha256:5d15f2ba94b8d6c02aee88fc156045016e83db9eda2c30545fabd640aebaec9d", size = 3622, upload-time = "2024-01-15T00:03:17.511Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ca/53e14be018a2284acf799830e8cd8e0b263c0fd3dff1ad7b35f8417e7067/flask_orjson-2.0.0-py3-none-any.whl", hash = "sha256:5d15f2ba94b8d6c02aee88fc156045016e83db9eda2c30545fabd640aebaec9d", size = 3622 }, ] [[package]] -name = "flask-restful" -version = "0.3.10" +name = "flask-restx" +version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aniso8601" }, { name = "flask" }, + { name = "importlib-resources" }, + { name = "jsonschema" }, { name = "pytz" }, - { name = "six" }, + { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/ce/a0a133db616ea47f78a41e15c4c68b9f08cab3df31eb960f61899200a119/Flask-RESTful-0.3.10.tar.gz", hash = "sha256:fe4af2ef0027df8f9b4f797aba20c5566801b6ade995ac63b588abf1a59cec37", size = 110453, upload-time = "2023-05-21T03:58:55.781Z" } +sdist = { url = "https://files.pythonhosted.org/packages/45/4c/2e7d84e2b406b47cf3bf730f521efe474977b404ee170d8ea68dc37e6733/flask-restx-1.3.0.tar.gz", hash = "sha256:4f3d3fa7b6191fcc715b18c201a12cd875176f92ba4acc61626ccfd571ee1728", size = 2814072 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/7b/f0b45f0df7d2978e5ae51804bb5939b7897b2ace24306009da0cc34d8d1f/Flask_RESTful-0.3.10-py2.py3-none-any.whl", hash = "sha256:1cf93c535172f112e080b0d4503a8d15f93a48c88bdd36dd87269bdaf405051b", size = 26217, upload-time = "2023-05-21T03:58:54.004Z" }, + { url = "https://files.pythonhosted.org/packages/a5/bf/1907369f2a7ee614dde5152ff8f811159d357e77962aa3f8c2e937f63731/flask_restx-1.3.0-py2.py3-none-any.whl", hash = "sha256:636c56c3fb3f2c1df979e748019f084a938c4da2035a3e535a4673e4fc177691", size = 2798683 }, ] [[package]] @@ -1897,79 +1912,79 @@ dependencies = [ { name = "flask" }, { name = "sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/91/53/b0a9fcc1b1297f51e68b69ed3b7c3c40d8c45be1391d77ae198712914392/flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312", size = 81899, upload-time = "2023-09-11T21:42:36.147Z" } +sdist = { url = "https://files.pythonhosted.org/packages/91/53/b0a9fcc1b1297f51e68b69ed3b7c3c40d8c45be1391d77ae198712914392/flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312", size = 81899 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/6a/89963a5c6ecf166e8be29e0d1bf6806051ee8fe6c82e232842e3aeac9204/flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0", size = 25125, upload-time = "2023-09-11T21:42:34.514Z" }, + { url = "https://files.pythonhosted.org/packages/1d/6a/89963a5c6ecf166e8be29e0d1bf6806051ee8fe6c82e232842e3aeac9204/flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0", size = 25125 }, ] [[package]] name = "flatbuffers" version = "25.2.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/30/eb5dce7994fc71a2f685d98ec33cc660c0a5887db5610137e60d8cbc4489/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e", size = 22170, upload-time = "2025-02-11T04:26:46.257Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/30/eb5dce7994fc71a2f685d98ec33cc660c0a5887db5610137e60d8cbc4489/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e", size = 22170 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953, upload-time = "2025-02-11T04:26:44.484Z" }, + { url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953 }, ] [[package]] name = "frozenlist" version = "1.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } +sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078 } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a", size = 82251, upload-time = "2025-06-09T23:00:16.279Z" }, - { url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750", size = 48183, upload-time = "2025-06-09T23:00:17.698Z" }, - { url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd", size = 47107, upload-time = "2025-06-09T23:00:18.952Z" }, - { url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2", size = 237333, upload-time = "2025-06-09T23:00:20.275Z" }, - { url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f", size = 231724, upload-time = "2025-06-09T23:00:21.705Z" }, - { url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30", size = 245842, upload-time = "2025-06-09T23:00:23.148Z" }, - { url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98", size = 239767, upload-time = "2025-06-09T23:00:25.103Z" }, - { url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86", size = 224130, upload-time = "2025-06-09T23:00:27.061Z" }, - { url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae", size = 235301, upload-time = "2025-06-09T23:00:29.02Z" }, - { url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8", size = 234606, upload-time = "2025-06-09T23:00:30.514Z" }, - { url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31", size = 248372, upload-time = "2025-06-09T23:00:31.966Z" }, - { url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7", size = 229860, upload-time = "2025-06-09T23:00:33.375Z" }, - { url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5", size = 245893, upload-time = "2025-06-09T23:00:35.002Z" }, - { url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898", size = 246323, upload-time = "2025-06-09T23:00:36.468Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56", size = 233149, upload-time = "2025-06-09T23:00:37.963Z" }, - { url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7", size = 39565, upload-time = "2025-06-09T23:00:39.753Z" }, - { url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d", size = 44019, upload-time = "2025-06-09T23:00:40.988Z" }, - { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, - { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, - { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, - { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, - { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, - { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, - { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, - { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, - { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, - { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, - { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, - { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, - { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, - { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, - { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, - { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, - { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, - { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, + { url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a", size = 82251 }, + { url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750", size = 48183 }, + { url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd", size = 47107 }, + { url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2", size = 237333 }, + { url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f", size = 231724 }, + { url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30", size = 245842 }, + { url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98", size = 239767 }, + { url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86", size = 224130 }, + { url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae", size = 235301 }, + { url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8", size = 234606 }, + { url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31", size = 248372 }, + { url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7", size = 229860 }, + { url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5", size = 245893 }, + { url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898", size = 246323 }, + { url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56", size = 233149 }, + { url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7", size = 39565 }, + { url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d", size = 44019 }, + { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424 }, + { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952 }, + { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688 }, + { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084 }, + { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524 }, + { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493 }, + { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116 }, + { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557 }, + { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820 }, + { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542 }, + { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350 }, + { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093 }, + { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482 }, + { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590 }, + { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785 }, + { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487 }, + { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874 }, + { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106 }, ] [[package]] name = "fsspec" version = "2025.5.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/00/f7/27f15d41f0ed38e8fcc488584b57e902b331da7f7c6dcda53721b15838fc/fsspec-2025.5.1.tar.gz", hash = "sha256:2e55e47a540b91843b755e83ded97c6e897fa0942b11490113f09e9c443c2475", size = 303033, upload-time = "2025-05-24T12:03:23.792Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/f7/27f15d41f0ed38e8fcc488584b57e902b331da7f7c6dcda53721b15838fc/fsspec-2025.5.1.tar.gz", hash = "sha256:2e55e47a540b91843b755e83ded97c6e897fa0942b11490113f09e9c443c2475", size = 303033 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/61/78c7b3851add1481b048b5fdc29067397a1784e2910592bc81bb3f608635/fsspec-2025.5.1-py3-none-any.whl", hash = "sha256:24d3a2e663d5fc735ab256263c4075f374a174c3410c0b25e5bd1970bceaa462", size = 199052, upload-time = "2025-05-24T12:03:21.66Z" }, + { url = "https://files.pythonhosted.org/packages/bb/61/78c7b3851add1481b048b5fdc29067397a1784e2910592bc81bb3f608635/fsspec-2025.5.1-py3-none-any.whl", hash = "sha256:24d3a2e663d5fc735ab256263c4075f374a174c3410c0b25e5bd1970bceaa462", size = 199052 }, ] [[package]] name = "future" version = "1.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/b2/4140c69c6a66432916b26158687e821ba631a4c9273c474343badf84d3ba/future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05", size = 1228490, upload-time = "2024-02-21T11:52:38.461Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/b2/4140c69c6a66432916b26158687e821ba631a4c9273c474343badf84d3ba/future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05", size = 1228490 } wheels = [ - { url = "https://files.pythonhosted.org/packages/da/71/ae30dadffc90b9006d77af76b393cb9dfbfc9629f339fc1574a1c52e6806/future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216", size = 491326, upload-time = "2024-02-21T11:52:35.956Z" }, + { url = "https://files.pythonhosted.org/packages/da/71/ae30dadffc90b9006d77af76b393cb9dfbfc9629f339fc1574a1c52e6806/future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216", size = 491326 }, ] [[package]] @@ -1982,24 +1997,24 @@ dependencies = [ { name = "zope-event" }, { name = "zope-interface" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/75/a53f1cb732420f5e5d79b2563fc3504d22115e7ecfe7966e5cf9b3582ae7/gevent-24.11.1.tar.gz", hash = "sha256:8bd1419114e9e4a3ed33a5bad766afff9a3cf765cb440a582a1b3a9bc80c1aca", size = 5976624, upload-time = "2024-11-11T15:36:45.991Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ab/75/a53f1cb732420f5e5d79b2563fc3504d22115e7ecfe7966e5cf9b3582ae7/gevent-24.11.1.tar.gz", hash = "sha256:8bd1419114e9e4a3ed33a5bad766afff9a3cf765cb440a582a1b3a9bc80c1aca", size = 5976624 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ea/fd/86a170f77ef51a15297573c50dbec4cc67ddc98b677cc2d03cc7f2927f4c/gevent-24.11.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:351d1c0e4ef2b618ace74c91b9b28b3eaa0dd45141878a964e03c7873af09f62", size = 2951424, upload-time = "2024-11-11T14:32:36.451Z" }, - { url = "https://files.pythonhosted.org/packages/7f/0a/987268c9d446f61883bc627c77c5ed4a97869c0f541f76661a62b2c411f6/gevent-24.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5efe72e99b7243e222ba0c2c2ce9618d7d36644c166d63373af239da1036bab", size = 4878504, upload-time = "2024-11-11T15:20:03.521Z" }, - { url = "https://files.pythonhosted.org/packages/dc/d4/2f77ddd837c0e21b4a4460bcb79318b6754d95ef138b7a29f3221c7e9993/gevent-24.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d3b249e4e1f40c598ab8393fc01ae6a3b4d51fc1adae56d9ba5b315f6b2d758", size = 5007668, upload-time = "2024-11-11T15:21:00.422Z" }, - { url = "https://files.pythonhosted.org/packages/80/a0/829e0399a1f9b84c344b72d2be9aa60fe2a64e993cac221edcc14f069679/gevent-24.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81d918e952954675f93fb39001da02113ec4d5f4921bf5a0cc29719af6824e5d", size = 5067055, upload-time = "2024-11-11T15:22:44.279Z" }, - { url = "https://files.pythonhosted.org/packages/1e/67/0e693f9ddb7909c2414f8fcfc2409aa4157884c147bc83dab979e9cf717c/gevent-24.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9c935b83d40c748b6421625465b7308d87c7b3717275acd587eef2bd1c39546", size = 6761883, upload-time = "2024-11-11T14:57:09.359Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b6/b69883fc069d7148dd23c5dda20826044e54e7197f3c8e72b8cc2cd4035a/gevent-24.11.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff96c5739834c9a594db0e12bf59cb3fa0e5102fc7b893972118a3166733d61c", size = 5440802, upload-time = "2024-11-11T15:37:04.983Z" }, - { url = "https://files.pythonhosted.org/packages/32/4e/b00094d995ff01fd88b3cf6b9d1d794f935c31c645c431e65cd82d808c9c/gevent-24.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d6c0a065e31ef04658f799215dddae8752d636de2bed61365c358f9c91e7af61", size = 6866992, upload-time = "2024-11-11T15:03:44.208Z" }, - { url = "https://files.pythonhosted.org/packages/37/ed/58dbe9fb09d36f6477ff8db0459ebd3be9a77dc05ae5d96dc91ad657610d/gevent-24.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:97e2f3999a5c0656f42065d02939d64fffaf55861f7d62b0107a08f52c984897", size = 1543736, upload-time = "2024-11-11T15:03:06.121Z" }, - { url = "https://files.pythonhosted.org/packages/dd/32/301676f67ffa996ff1c4175092fb0c48c83271cc95e5c67650b87156b6cf/gevent-24.11.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:a3d75fa387b69c751a3d7c5c3ce7092a171555126e136c1d21ecd8b50c7a6e46", size = 2956467, upload-time = "2024-11-11T14:32:33.238Z" }, - { url = "https://files.pythonhosted.org/packages/6b/84/aef1a598123cef2375b6e2bf9d17606b961040f8a10e3dcc3c3dd2a99f05/gevent-24.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:beede1d1cff0c6fafae3ab58a0c470d7526196ef4cd6cc18e7769f207f2ea4eb", size = 5136486, upload-time = "2024-11-11T15:20:04.972Z" }, - { url = "https://files.pythonhosted.org/packages/92/7b/04f61187ee1df7a913b3fca63b0a1206c29141ab4d2a57e7645237b6feb5/gevent-24.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85329d556aaedced90a993226d7d1186a539c843100d393f2349b28c55131c85", size = 5299718, upload-time = "2024-11-11T15:21:03.354Z" }, - { url = "https://files.pythonhosted.org/packages/36/2a/ebd12183ac25eece91d084be2111e582b061f4d15ead32239b43ed47e9ba/gevent-24.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:816b3883fa6842c1cf9d2786722014a0fd31b6312cca1f749890b9803000bad6", size = 5400118, upload-time = "2024-11-11T15:22:45.897Z" }, - { url = "https://files.pythonhosted.org/packages/ec/c9/f006c0cd59f0720fbb62ee11da0ad4c4c0fd12799afd957dd491137e80d9/gevent-24.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b24d800328c39456534e3bc3e1684a28747729082684634789c2f5a8febe7671", size = 6775163, upload-time = "2024-11-11T14:57:11.991Z" }, - { url = "https://files.pythonhosted.org/packages/49/f1/5edf00b674b10d67e3b967c2d46b8a124c2bc8cfd59d4722704392206444/gevent-24.11.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5f1701ce0f7832f333dd2faf624484cbac99e60656bfbb72504decd42970f0f", size = 5479886, upload-time = "2024-11-11T15:37:06.558Z" }, - { url = "https://files.pythonhosted.org/packages/22/11/c48e62744a32c0d48984268ae62b99edb81eaf0e03b42de52e2f09855509/gevent-24.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d740206e69dfdfdcd34510c20adcb9777ce2cc18973b3441ab9767cd8948ca8a", size = 6891452, upload-time = "2024-11-11T15:03:46.892Z" }, - { url = "https://files.pythonhosted.org/packages/11/b2/5d20664ef6a077bec9f27f7a7ee761edc64946d0b1e293726a3d074a9a18/gevent-24.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:68bee86b6e1c041a187347ef84cf03a792f0b6c7238378bf6ba4118af11feaae", size = 1541631, upload-time = "2024-11-11T14:55:34.977Z" }, + { url = "https://files.pythonhosted.org/packages/ea/fd/86a170f77ef51a15297573c50dbec4cc67ddc98b677cc2d03cc7f2927f4c/gevent-24.11.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:351d1c0e4ef2b618ace74c91b9b28b3eaa0dd45141878a964e03c7873af09f62", size = 2951424 }, + { url = "https://files.pythonhosted.org/packages/7f/0a/987268c9d446f61883bc627c77c5ed4a97869c0f541f76661a62b2c411f6/gevent-24.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5efe72e99b7243e222ba0c2c2ce9618d7d36644c166d63373af239da1036bab", size = 4878504 }, + { url = "https://files.pythonhosted.org/packages/dc/d4/2f77ddd837c0e21b4a4460bcb79318b6754d95ef138b7a29f3221c7e9993/gevent-24.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d3b249e4e1f40c598ab8393fc01ae6a3b4d51fc1adae56d9ba5b315f6b2d758", size = 5007668 }, + { url = "https://files.pythonhosted.org/packages/80/a0/829e0399a1f9b84c344b72d2be9aa60fe2a64e993cac221edcc14f069679/gevent-24.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81d918e952954675f93fb39001da02113ec4d5f4921bf5a0cc29719af6824e5d", size = 5067055 }, + { url = "https://files.pythonhosted.org/packages/1e/67/0e693f9ddb7909c2414f8fcfc2409aa4157884c147bc83dab979e9cf717c/gevent-24.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9c935b83d40c748b6421625465b7308d87c7b3717275acd587eef2bd1c39546", size = 6761883 }, + { url = "https://files.pythonhosted.org/packages/fa/b6/b69883fc069d7148dd23c5dda20826044e54e7197f3c8e72b8cc2cd4035a/gevent-24.11.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff96c5739834c9a594db0e12bf59cb3fa0e5102fc7b893972118a3166733d61c", size = 5440802 }, + { url = "https://files.pythonhosted.org/packages/32/4e/b00094d995ff01fd88b3cf6b9d1d794f935c31c645c431e65cd82d808c9c/gevent-24.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d6c0a065e31ef04658f799215dddae8752d636de2bed61365c358f9c91e7af61", size = 6866992 }, + { url = "https://files.pythonhosted.org/packages/37/ed/58dbe9fb09d36f6477ff8db0459ebd3be9a77dc05ae5d96dc91ad657610d/gevent-24.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:97e2f3999a5c0656f42065d02939d64fffaf55861f7d62b0107a08f52c984897", size = 1543736 }, + { url = "https://files.pythonhosted.org/packages/dd/32/301676f67ffa996ff1c4175092fb0c48c83271cc95e5c67650b87156b6cf/gevent-24.11.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:a3d75fa387b69c751a3d7c5c3ce7092a171555126e136c1d21ecd8b50c7a6e46", size = 2956467 }, + { url = "https://files.pythonhosted.org/packages/6b/84/aef1a598123cef2375b6e2bf9d17606b961040f8a10e3dcc3c3dd2a99f05/gevent-24.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:beede1d1cff0c6fafae3ab58a0c470d7526196ef4cd6cc18e7769f207f2ea4eb", size = 5136486 }, + { url = "https://files.pythonhosted.org/packages/92/7b/04f61187ee1df7a913b3fca63b0a1206c29141ab4d2a57e7645237b6feb5/gevent-24.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85329d556aaedced90a993226d7d1186a539c843100d393f2349b28c55131c85", size = 5299718 }, + { url = "https://files.pythonhosted.org/packages/36/2a/ebd12183ac25eece91d084be2111e582b061f4d15ead32239b43ed47e9ba/gevent-24.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:816b3883fa6842c1cf9d2786722014a0fd31b6312cca1f749890b9803000bad6", size = 5400118 }, + { url = "https://files.pythonhosted.org/packages/ec/c9/f006c0cd59f0720fbb62ee11da0ad4c4c0fd12799afd957dd491137e80d9/gevent-24.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b24d800328c39456534e3bc3e1684a28747729082684634789c2f5a8febe7671", size = 6775163 }, + { url = "https://files.pythonhosted.org/packages/49/f1/5edf00b674b10d67e3b967c2d46b8a124c2bc8cfd59d4722704392206444/gevent-24.11.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5f1701ce0f7832f333dd2faf624484cbac99e60656bfbb72504decd42970f0f", size = 5479886 }, + { url = "https://files.pythonhosted.org/packages/22/11/c48e62744a32c0d48984268ae62b99edb81eaf0e03b42de52e2f09855509/gevent-24.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d740206e69dfdfdcd34510c20adcb9777ce2cc18973b3441ab9767cd8948ca8a", size = 6891452 }, + { url = "https://files.pythonhosted.org/packages/11/b2/5d20664ef6a077bec9f27f7a7ee761edc64946d0b1e293726a3d074a9a18/gevent-24.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:68bee86b6e1c041a187347ef84cf03a792f0b6c7238378bf6ba4118af11feaae", size = 1541631 }, ] [[package]] @@ -2009,9 +2024,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "smmap" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684, upload-time = "2025-01-02T07:20:46.413Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794, upload-time = "2025-01-02T07:20:43.624Z" }, + { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794 }, ] [[package]] @@ -2021,31 +2036,31 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "gitdb" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/89/37df0b71473153574a5cdef8f242de422a0f5d26d7a9e231e6f169b4ad14/gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269", size = 214196, upload-time = "2025-01-02T07:32:43.59Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/89/37df0b71473153574a5cdef8f242de422a0f5d26d7a9e231e6f169b4ad14/gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269", size = 214196 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/9a/4114a9057db2f1462d5c8f8390ab7383925fe1ac012eaa42402ad65c2963/GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110", size = 207599, upload-time = "2025-01-02T07:32:40.731Z" }, + { url = "https://files.pythonhosted.org/packages/1d/9a/4114a9057db2f1462d5c8f8390ab7383925fe1ac012eaa42402ad65c2963/GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110", size = 207599 }, ] [[package]] name = "gmpy2" version = "2.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/07/bd/c6c154ce734a3e6187871b323297d8e5f3bdf9feaafc5212381538bc19e4/gmpy2-2.2.1.tar.gz", hash = "sha256:e83e07567441b78cb87544910cb3cc4fe94e7da987e93ef7622e76fb96650432", size = 234228, upload-time = "2024-07-21T05:33:00.715Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/bd/c6c154ce734a3e6187871b323297d8e5f3bdf9feaafc5212381538bc19e4/gmpy2-2.2.1.tar.gz", hash = "sha256:e83e07567441b78cb87544910cb3cc4fe94e7da987e93ef7622e76fb96650432", size = 234228 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/ec/ab67751ac0c4088ed21cf9a2a7f9966bf702ca8ebfc3204879cf58c90179/gmpy2-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98e947491c67523d3147a500f377bb64d0b115e4ab8a12d628fb324bb0e142bf", size = 880346, upload-time = "2024-07-21T05:31:25.531Z" }, - { url = "https://files.pythonhosted.org/packages/97/7c/bdc4a7a2b0e543787a9354e80fdcf846c4e9945685218cef4ca938d25594/gmpy2-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ccd319a3a87529484167ae1391f937ac4a8724169fd5822bbb541d1eab612b0", size = 694518, upload-time = "2024-07-21T05:31:27.78Z" }, - { url = "https://files.pythonhosted.org/packages/fc/44/ea903003bb4c3af004912fb0d6488e346bd76968f11a7472a1e60dee7dd7/gmpy2-2.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:827bcd433e5d62f1b732f45e6949419da4a53915d6c80a3c7a5a03d5a783a03a", size = 1653491, upload-time = "2024-07-21T05:31:29.968Z" }, - { url = "https://files.pythonhosted.org/packages/c9/70/5bce281b7cd664c04f1c9d47a37087db37b2be887bce738340e912ad86c8/gmpy2-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7131231fc96f57272066295c81cbf11b3233a9471659bca29ddc90a7bde9bfa", size = 1706487, upload-time = "2024-07-21T05:31:32.476Z" }, - { url = "https://files.pythonhosted.org/packages/2a/52/1f773571f21cf0319fc33218a1b384f29de43053965c05ed32f7e6729115/gmpy2-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1cc6f2bb68ee00c20aae554e111dc781a76140e00c31e4eda5c8f2d4168ed06c", size = 1637415, upload-time = "2024-07-21T05:31:34.591Z" }, - { url = "https://files.pythonhosted.org/packages/99/4c/390daf67c221b3f4f10b5b7d9293e61e4dbd48956a38947679c5a701af27/gmpy2-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ae388fe46e3d20af4675451a4b6c12fc1bb08e6e0e69ee47072638be21bf42d8", size = 1657781, upload-time = "2024-07-21T05:31:36.81Z" }, - { url = "https://files.pythonhosted.org/packages/61/cd/86e47bccb3636389e29c4654a0e5ac52926d832897f2f64632639b63ffc1/gmpy2-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:8b472ee3c123b77979374da2293ebf2c170b88212e173d64213104956d4678fb", size = 1203346, upload-time = "2024-07-21T05:31:39.344Z" }, - { url = "https://files.pythonhosted.org/packages/9a/ee/8f9f65e2bac334cfe13b3fc3f8962d5fc2858ebcf4517690d2d24afa6d0e/gmpy2-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90d03a1be1b1ad3944013fae5250316c3f4e6aec45ecdf189a5c7422d640004d", size = 885231, upload-time = "2024-07-21T05:31:41.471Z" }, - { url = "https://files.pythonhosted.org/packages/07/1c/bf29f6bf8acd72c3cf85d04e7db1bb26dd5507ee2387770bb787bc54e2a5/gmpy2-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd09dd43d199908c1d1d501c5de842b3bf754f99b94af5b5ef0e26e3b716d2d5", size = 696569, upload-time = "2024-07-21T05:31:43.768Z" }, - { url = "https://files.pythonhosted.org/packages/7c/cc/38d33eadeccd81b604a95b67d43c71b246793b7c441f1d7c3b41978cd1cf/gmpy2-2.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3232859fda3e96fd1aecd6235ae20476ed4506562bcdef6796a629b78bb96acd", size = 1655776, upload-time = "2024-07-21T05:31:46.272Z" }, - { url = "https://files.pythonhosted.org/packages/96/8d/d017599d6db8e9b96d6e84ea5102c33525cb71c82876b1813a2ece5d94ec/gmpy2-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30fba6f7cf43fb7f8474216701b5aaddfa5e6a06d560e88a67f814062934e863", size = 1707529, upload-time = "2024-07-21T05:31:48.732Z" }, - { url = "https://files.pythonhosted.org/packages/d0/93/91b4a0af23ae4216fd7ebcfd955dcbe152c5ef170598aee421310834de0a/gmpy2-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9b33cae533ede8173bc7d4bb855b388c5b636ca9f22a32c949f2eb7e0cc531b2", size = 1634195, upload-time = "2024-07-21T05:31:50.99Z" }, - { url = "https://files.pythonhosted.org/packages/d7/ba/08ee99f19424cd33d5f0f17b2184e34d2fa886eebafcd3e164ccba15d9f2/gmpy2-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:954e7e1936c26e370ca31bbd49729ebeeb2006a8f9866b1e778ebb89add2e941", size = 1656779, upload-time = "2024-07-21T05:31:53.657Z" }, - { url = "https://files.pythonhosted.org/packages/14/e1/7b32ae2b23c8363d87b7f4bbac9abe9a1f820c2417d2e99ca3b4afd9379b/gmpy2-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c929870137b20d9c3f7dd97f43615b2d2c1a2470e50bafd9a5eea2e844f462e9", size = 1204668, upload-time = "2024-07-21T05:31:56.264Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ec/ab67751ac0c4088ed21cf9a2a7f9966bf702ca8ebfc3204879cf58c90179/gmpy2-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98e947491c67523d3147a500f377bb64d0b115e4ab8a12d628fb324bb0e142bf", size = 880346 }, + { url = "https://files.pythonhosted.org/packages/97/7c/bdc4a7a2b0e543787a9354e80fdcf846c4e9945685218cef4ca938d25594/gmpy2-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ccd319a3a87529484167ae1391f937ac4a8724169fd5822bbb541d1eab612b0", size = 694518 }, + { url = "https://files.pythonhosted.org/packages/fc/44/ea903003bb4c3af004912fb0d6488e346bd76968f11a7472a1e60dee7dd7/gmpy2-2.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:827bcd433e5d62f1b732f45e6949419da4a53915d6c80a3c7a5a03d5a783a03a", size = 1653491 }, + { url = "https://files.pythonhosted.org/packages/c9/70/5bce281b7cd664c04f1c9d47a37087db37b2be887bce738340e912ad86c8/gmpy2-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7131231fc96f57272066295c81cbf11b3233a9471659bca29ddc90a7bde9bfa", size = 1706487 }, + { url = "https://files.pythonhosted.org/packages/2a/52/1f773571f21cf0319fc33218a1b384f29de43053965c05ed32f7e6729115/gmpy2-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1cc6f2bb68ee00c20aae554e111dc781a76140e00c31e4eda5c8f2d4168ed06c", size = 1637415 }, + { url = "https://files.pythonhosted.org/packages/99/4c/390daf67c221b3f4f10b5b7d9293e61e4dbd48956a38947679c5a701af27/gmpy2-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ae388fe46e3d20af4675451a4b6c12fc1bb08e6e0e69ee47072638be21bf42d8", size = 1657781 }, + { url = "https://files.pythonhosted.org/packages/61/cd/86e47bccb3636389e29c4654a0e5ac52926d832897f2f64632639b63ffc1/gmpy2-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:8b472ee3c123b77979374da2293ebf2c170b88212e173d64213104956d4678fb", size = 1203346 }, + { url = "https://files.pythonhosted.org/packages/9a/ee/8f9f65e2bac334cfe13b3fc3f8962d5fc2858ebcf4517690d2d24afa6d0e/gmpy2-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90d03a1be1b1ad3944013fae5250316c3f4e6aec45ecdf189a5c7422d640004d", size = 885231 }, + { url = "https://files.pythonhosted.org/packages/07/1c/bf29f6bf8acd72c3cf85d04e7db1bb26dd5507ee2387770bb787bc54e2a5/gmpy2-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd09dd43d199908c1d1d501c5de842b3bf754f99b94af5b5ef0e26e3b716d2d5", size = 696569 }, + { url = "https://files.pythonhosted.org/packages/7c/cc/38d33eadeccd81b604a95b67d43c71b246793b7c441f1d7c3b41978cd1cf/gmpy2-2.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3232859fda3e96fd1aecd6235ae20476ed4506562bcdef6796a629b78bb96acd", size = 1655776 }, + { url = "https://files.pythonhosted.org/packages/96/8d/d017599d6db8e9b96d6e84ea5102c33525cb71c82876b1813a2ece5d94ec/gmpy2-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30fba6f7cf43fb7f8474216701b5aaddfa5e6a06d560e88a67f814062934e863", size = 1707529 }, + { url = "https://files.pythonhosted.org/packages/d0/93/91b4a0af23ae4216fd7ebcfd955dcbe152c5ef170598aee421310834de0a/gmpy2-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9b33cae533ede8173bc7d4bb855b388c5b636ca9f22a32c949f2eb7e0cc531b2", size = 1634195 }, + { url = "https://files.pythonhosted.org/packages/d7/ba/08ee99f19424cd33d5f0f17b2184e34d2fa886eebafcd3e164ccba15d9f2/gmpy2-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:954e7e1936c26e370ca31bbd49729ebeeb2006a8f9866b1e778ebb89add2e941", size = 1656779 }, + { url = "https://files.pythonhosted.org/packages/14/e1/7b32ae2b23c8363d87b7f4bbac9abe9a1f820c2417d2e99ca3b4afd9379b/gmpy2-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c929870137b20d9c3f7dd97f43615b2d2c1a2470e50bafd9a5eea2e844f462e9", size = 1204668 }, ] [[package]] @@ -2055,9 +2070,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "beautifulsoup4" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/89/97/b49c69893cddea912c7a660a4b6102c6b02cd268f8c7162dd70b7c16f753/google-3.0.0.tar.gz", hash = "sha256:143530122ee5130509ad5e989f0512f7cb218b2d4eddbafbad40fd10e8d8ccbe", size = 44978, upload-time = "2020-07-11T14:50:45.678Z" } +sdist = { url = "https://files.pythonhosted.org/packages/89/97/b49c69893cddea912c7a660a4b6102c6b02cd268f8c7162dd70b7c16f753/google-3.0.0.tar.gz", hash = "sha256:143530122ee5130509ad5e989f0512f7cb218b2d4eddbafbad40fd10e8d8ccbe", size = 44978 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/35/17c9141c4ae21e9a29a43acdfd848e3e468a810517f862cad07977bf8fe9/google-3.0.0-py2.py3-none-any.whl", hash = "sha256:889cf695f84e4ae2c55fbc0cfdaf4c1e729417fa52ab1db0485202ba173e4935", size = 45258, upload-time = "2020-07-11T14:49:58.287Z" }, + { url = "https://files.pythonhosted.org/packages/ac/35/17c9141c4ae21e9a29a43acdfd848e3e468a810517f862cad07977bf8fe9/google-3.0.0-py2.py3-none-any.whl", hash = "sha256:889cf695f84e4ae2c55fbc0cfdaf4c1e729417fa52ab1db0485202ba173e4935", size = 45258 }, ] [[package]] @@ -2071,9 +2086,9 @@ dependencies = [ { name = "protobuf" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b2/8f/ecd68579bd2bf5e9321df60dcdee6e575adf77fedacb1d8378760b2b16b6/google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9", size = 148047, upload-time = "2024-03-21T20:16:56.269Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/8f/ecd68579bd2bf5e9321df60dcdee6e575adf77fedacb1d8378760b2b16b6/google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9", size = 148047 } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/75/59a3ad90d9b4ff5b3e0537611dbe885aeb96124521c9d35aa079f1e0f2c9/google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6", size = 138293, upload-time = "2024-03-21T20:16:53.645Z" }, + { url = "https://files.pythonhosted.org/packages/86/75/59a3ad90d9b4ff5b3e0537611dbe885aeb96124521c9d35aa079f1e0f2c9/google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6", size = 138293 }, ] [package.optional-dependencies] @@ -2093,9 +2108,9 @@ dependencies = [ { name = "httplib2" }, { name = "uritemplate" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/35/8b/d990f947c261304a5c1599d45717d02c27d46af5f23e1fee5dc19c8fa79d/google-api-python-client-2.90.0.tar.gz", hash = "sha256:cbcb3ba8be37c6806676a49df16ac412077e5e5dc7fa967941eff977b31fba03", size = 10891311, upload-time = "2023-06-20T16:29:25.008Z" } +sdist = { url = "https://files.pythonhosted.org/packages/35/8b/d990f947c261304a5c1599d45717d02c27d46af5f23e1fee5dc19c8fa79d/google-api-python-client-2.90.0.tar.gz", hash = "sha256:cbcb3ba8be37c6806676a49df16ac412077e5e5dc7fa967941eff977b31fba03", size = 10891311 } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/03/209b5c36a621ae644dc7d4743746cd3b38b18e133f8779ecaf6b95cc01ce/google_api_python_client-2.90.0-py2.py3-none-any.whl", hash = "sha256:4a41ffb7797d4f28e44635fb1e7076240b741c6493e7c3233c0e4421cec7c913", size = 11379891, upload-time = "2023-06-20T16:29:19.532Z" }, + { url = "https://files.pythonhosted.org/packages/39/03/209b5c36a621ae644dc7d4743746cd3b38b18e133f8779ecaf6b95cc01ce/google_api_python_client-2.90.0-py2.py3-none-any.whl", hash = "sha256:4a41ffb7797d4f28e44635fb1e7076240b741c6493e7c3233c0e4421cec7c913", size = 11379891 }, ] [[package]] @@ -2107,9 +2122,9 @@ dependencies = [ { name = "pyasn1-modules" }, { name = "rsa" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/18/b2/f14129111cfd61793609643a07ecb03651a71dd65c6974f63b0310ff4b45/google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360", size = 244326, upload-time = "2024-03-20T17:24:27.72Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/b2/f14129111cfd61793609643a07ecb03651a71dd65c6974f63b0310ff4b45/google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360", size = 244326 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/8d/ddbcf81ec751d8ee5fd18ac11ff38a0e110f39dfbf105e6d9db69d556dd0/google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415", size = 189186, upload-time = "2024-03-20T17:24:24.292Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8d/ddbcf81ec751d8ee5fd18ac11ff38a0e110f39dfbf105e6d9db69d556dd0/google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415", size = 189186 }, ] [[package]] @@ -2120,9 +2135,9 @@ dependencies = [ { name = "google-auth" }, { name = "httplib2" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/56/be/217a598a818567b28e859ff087f347475c807a5649296fb5a817c58dacef/google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05", size = 10842, upload-time = "2023-12-12T17:40:30.722Z" } +sdist = { url = "https://files.pythonhosted.org/packages/56/be/217a598a818567b28e859ff087f347475c807a5649296fb5a817c58dacef/google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05", size = 10842 } wheels = [ - { url = "https://files.pythonhosted.org/packages/be/8a/fe34d2f3f9470a27b01c9e76226965863f153d5fbe276f83608562e49c04/google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d", size = 9253, upload-time = "2023-12-12T17:40:13.055Z" }, + { url = "https://files.pythonhosted.org/packages/be/8a/fe34d2f3f9470a27b01c9e76226965863f153d5fbe276f83608562e49c04/google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d", size = 9253 }, ] [[package]] @@ -2142,9 +2157,9 @@ dependencies = [ { name = "pydantic" }, { name = "shapely" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/47/21/5930a1420f82bec246ae09e1b7cc8458544f3befe669193b33a7b5c0691c/google-cloud-aiplatform-1.49.0.tar.gz", hash = "sha256:e6e6d01079bb5def49e4be4db4d12b13c624b5c661079c869c13c855e5807429", size = 5766450, upload-time = "2024-04-29T17:25:31.646Z" } +sdist = { url = "https://files.pythonhosted.org/packages/47/21/5930a1420f82bec246ae09e1b7cc8458544f3befe669193b33a7b5c0691c/google-cloud-aiplatform-1.49.0.tar.gz", hash = "sha256:e6e6d01079bb5def49e4be4db4d12b13c624b5c661079c869c13c855e5807429", size = 5766450 } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/6a/7d9e1c03c814e760361fe8b0ffd373ead4124ace66ed33bb16d526ae1ecf/google_cloud_aiplatform-1.49.0-py2.py3-none-any.whl", hash = "sha256:8072d9e0c18d8942c704233d1a93b8d6312fc7b278786a283247950e28ae98df", size = 4914049, upload-time = "2024-04-29T17:25:27.625Z" }, + { url = "https://files.pythonhosted.org/packages/39/6a/7d9e1c03c814e760361fe8b0ffd373ead4124ace66ed33bb16d526ae1ecf/google_cloud_aiplatform-1.49.0-py2.py3-none-any.whl", hash = "sha256:8072d9e0c18d8942c704233d1a93b8d6312fc7b278786a283247950e28ae98df", size = 4914049 }, ] [[package]] @@ -2160,9 +2175,9 @@ dependencies = [ { name = "python-dateutil" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f1/2f/3dda76b3ec029578838b1fe6396e6b86eb574200352240e23dea49265bb7/google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6", size = 474389, upload-time = "2025-02-27T18:49:45.416Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/2f/3dda76b3ec029578838b1fe6396e6b86eb574200352240e23dea49265bb7/google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6", size = 474389 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/6d/856a6ca55c1d9d99129786c929a27dd9d31992628ebbff7f5d333352981f/google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877", size = 247885, upload-time = "2025-02-27T18:49:43.454Z" }, + { url = "https://files.pythonhosted.org/packages/0c/6d/856a6ca55c1d9d99129786c929a27dd9d31992628ebbff7f5d333352981f/google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877", size = 247885 }, ] [[package]] @@ -2173,9 +2188,9 @@ dependencies = [ { name = "google-api-core" }, { name = "google-auth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d6/b8/2b53838d2acd6ec6168fd284a990c76695e84c65deee79c9f3a4276f6b4f/google_cloud_core-2.4.3.tar.gz", hash = "sha256:1fab62d7102844b278fe6dead3af32408b1df3eb06f5c7e8634cbd40edc4da53", size = 35861, upload-time = "2025-03-10T21:05:38.948Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/b8/2b53838d2acd6ec6168fd284a990c76695e84c65deee79c9f3a4276f6b4f/google_cloud_core-2.4.3.tar.gz", hash = "sha256:1fab62d7102844b278fe6dead3af32408b1df3eb06f5c7e8634cbd40edc4da53", size = 35861 } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/86/bda7241a8da2d28a754aad2ba0f6776e35b67e37c36ae0c45d49370f1014/google_cloud_core-2.4.3-py2.py3-none-any.whl", hash = "sha256:5130f9f4c14b4fafdff75c79448f9495cfade0d8775facf1b09c3bf67e027f6e", size = 29348, upload-time = "2025-03-10T21:05:37.785Z" }, + { url = "https://files.pythonhosted.org/packages/40/86/bda7241a8da2d28a754aad2ba0f6776e35b67e37c36ae0c45d49370f1014/google_cloud_core-2.4.3-py2.py3-none-any.whl", hash = "sha256:5130f9f4c14b4fafdff75c79448f9495cfade0d8775facf1b09c3bf67e027f6e", size = 29348 }, ] [[package]] @@ -2189,9 +2204,9 @@ dependencies = [ { name = "proto-plus" }, { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6e/ca/a4648f5038cb94af4b3942815942a03aa9398f9fb0bef55b3f1585b9940d/google_cloud_resource_manager-1.14.2.tar.gz", hash = "sha256:962e2d904c550d7bac48372607904ff7bb3277e3bb4a36d80cc9a37e28e6eb74", size = 446370, upload-time = "2025-03-17T11:35:56.343Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/ca/a4648f5038cb94af4b3942815942a03aa9398f9fb0bef55b3f1585b9940d/google_cloud_resource_manager-1.14.2.tar.gz", hash = "sha256:962e2d904c550d7bac48372607904ff7bb3277e3bb4a36d80cc9a37e28e6eb74", size = 446370 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/ea/a92631c358da377af34d3a9682c97af83185c2d66363d5939ab4a1169a7f/google_cloud_resource_manager-1.14.2-py3-none-any.whl", hash = "sha256:d0fa954dedd1d2b8e13feae9099c01b8aac515b648e612834f9942d2795a9900", size = 394344, upload-time = "2025-03-17T11:35:54.722Z" }, + { url = "https://files.pythonhosted.org/packages/b1/ea/a92631c358da377af34d3a9682c97af83185c2d66363d5939ab4a1169a7f/google_cloud_resource_manager-1.14.2-py3-none-any.whl", hash = "sha256:d0fa954dedd1d2b8e13feae9099c01b8aac515b648e612834f9942d2795a9900", size = 394344 }, ] [[package]] @@ -2206,29 +2221,29 @@ dependencies = [ { name = "google-resumable-media" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/17/c5/0bc3f97cf4c14a731ecc5a95c5cde6883aec7289dc74817f9b41f866f77e/google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f", size = 5525307, upload-time = "2024-03-18T23:55:37.102Z" } +sdist = { url = "https://files.pythonhosted.org/packages/17/c5/0bc3f97cf4c14a731ecc5a95c5cde6883aec7289dc74817f9b41f866f77e/google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f", size = 5525307 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/e5/7d045d188f4ef85d94b9e3ae1bf876170c6b9f4c9a950124978efc36f680/google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852", size = 125604, upload-time = "2024-03-18T23:55:33.987Z" }, + { url = "https://files.pythonhosted.org/packages/cb/e5/7d045d188f4ef85d94b9e3ae1bf876170c6b9f4c9a950124978efc36f680/google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852", size = 125604 }, ] [[package]] name = "google-crc32c" version = "1.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/19/ae/87802e6d9f9d69adfaedfcfd599266bf386a54d0be058b532d04c794f76d/google_crc32c-1.7.1.tar.gz", hash = "sha256:2bff2305f98846f3e825dbeec9ee406f89da7962accdb29356e4eadc251bd472", size = 14495, upload-time = "2025-03-26T14:29:13.32Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/ae/87802e6d9f9d69adfaedfcfd599266bf386a54d0be058b532d04c794f76d/google_crc32c-1.7.1.tar.gz", hash = "sha256:2bff2305f98846f3e825dbeec9ee406f89da7962accdb29356e4eadc251bd472", size = 14495 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/94/220139ea87822b6fdfdab4fb9ba81b3fff7ea2c82e2af34adc726085bffc/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6fbab4b935989e2c3610371963ba1b86afb09537fd0c633049be82afe153ac06", size = 30468, upload-time = "2025-03-26T14:32:52.215Z" }, - { url = "https://files.pythonhosted.org/packages/94/97/789b23bdeeb9d15dc2904660463ad539d0318286d7633fe2760c10ed0c1c/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:ed66cbe1ed9cbaaad9392b5259b3eba4a9e565420d734e6238813c428c3336c9", size = 30313, upload-time = "2025-03-26T14:57:38.758Z" }, - { url = "https://files.pythonhosted.org/packages/81/b8/976a2b843610c211e7ccb3e248996a61e87dbb2c09b1499847e295080aec/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee6547b657621b6cbed3562ea7826c3e11cab01cd33b74e1f677690652883e77", size = 33048, upload-time = "2025-03-26T14:41:30.679Z" }, - { url = "https://files.pythonhosted.org/packages/c9/16/a3842c2cf591093b111d4a5e2bfb478ac6692d02f1b386d2a33283a19dc9/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d68e17bad8f7dd9a49181a1f5a8f4b251c6dbc8cc96fb79f1d321dfd57d66f53", size = 32669, upload-time = "2025-03-26T14:41:31.432Z" }, - { url = "https://files.pythonhosted.org/packages/04/17/ed9aba495916fcf5fe4ecb2267ceb851fc5f273c4e4625ae453350cfd564/google_crc32c-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:6335de12921f06e1f774d0dd1fbea6bf610abe0887a1638f64d694013138be5d", size = 33476, upload-time = "2025-03-26T14:29:10.211Z" }, - { url = "https://files.pythonhosted.org/packages/dd/b7/787e2453cf8639c94b3d06c9d61f512234a82e1d12d13d18584bd3049904/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2d73a68a653c57281401871dd4aeebbb6af3191dcac751a76ce430df4d403194", size = 30470, upload-time = "2025-03-26T14:34:31.655Z" }, - { url = "https://files.pythonhosted.org/packages/ed/b4/6042c2b0cbac3ec3a69bb4c49b28d2f517b7a0f4a0232603c42c58e22b44/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:22beacf83baaf59f9d3ab2bbb4db0fb018da8e5aebdce07ef9f09fce8220285e", size = 30315, upload-time = "2025-03-26T15:01:54.634Z" }, - { url = "https://files.pythonhosted.org/packages/29/ad/01e7a61a5d059bc57b702d9ff6a18b2585ad97f720bd0a0dbe215df1ab0e/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19eafa0e4af11b0a4eb3974483d55d2d77ad1911e6cf6f832e1574f6781fd337", size = 33180, upload-time = "2025-03-26T14:41:32.168Z" }, - { url = "https://files.pythonhosted.org/packages/3b/a5/7279055cf004561894ed3a7bfdf5bf90a53f28fadd01af7cd166e88ddf16/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d86616faaea68101195c6bdc40c494e4d76f41e07a37ffdef270879c15fb65", size = 32794, upload-time = "2025-03-26T14:41:33.264Z" }, - { url = "https://files.pythonhosted.org/packages/0f/d6/77060dbd140c624e42ae3ece3df53b9d811000729a5c821b9fd671ceaac6/google_crc32c-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:b7491bdc0c7564fcf48c0179d2048ab2f7c7ba36b84ccd3a3e1c3f7a72d3bba6", size = 33477, upload-time = "2025-03-26T14:29:10.94Z" }, - { url = "https://files.pythonhosted.org/packages/16/1b/1693372bf423ada422f80fd88260dbfd140754adb15cbc4d7e9a68b1cb8e/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85fef7fae11494e747c9fd1359a527e5970fc9603c90764843caabd3a16a0a48", size = 28241, upload-time = "2025-03-26T14:41:45.898Z" }, - { url = "https://files.pythonhosted.org/packages/fd/3c/2a19a60a473de48717b4efb19398c3f914795b64a96cf3fbe82588044f78/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6efb97eb4369d52593ad6f75e7e10d053cf00c48983f7a973105bc70b0ac4d82", size = 28048, upload-time = "2025-03-26T14:41:46.696Z" }, + { url = "https://files.pythonhosted.org/packages/f7/94/220139ea87822b6fdfdab4fb9ba81b3fff7ea2c82e2af34adc726085bffc/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6fbab4b935989e2c3610371963ba1b86afb09537fd0c633049be82afe153ac06", size = 30468 }, + { url = "https://files.pythonhosted.org/packages/94/97/789b23bdeeb9d15dc2904660463ad539d0318286d7633fe2760c10ed0c1c/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:ed66cbe1ed9cbaaad9392b5259b3eba4a9e565420d734e6238813c428c3336c9", size = 30313 }, + { url = "https://files.pythonhosted.org/packages/81/b8/976a2b843610c211e7ccb3e248996a61e87dbb2c09b1499847e295080aec/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee6547b657621b6cbed3562ea7826c3e11cab01cd33b74e1f677690652883e77", size = 33048 }, + { url = "https://files.pythonhosted.org/packages/c9/16/a3842c2cf591093b111d4a5e2bfb478ac6692d02f1b386d2a33283a19dc9/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d68e17bad8f7dd9a49181a1f5a8f4b251c6dbc8cc96fb79f1d321dfd57d66f53", size = 32669 }, + { url = "https://files.pythonhosted.org/packages/04/17/ed9aba495916fcf5fe4ecb2267ceb851fc5f273c4e4625ae453350cfd564/google_crc32c-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:6335de12921f06e1f774d0dd1fbea6bf610abe0887a1638f64d694013138be5d", size = 33476 }, + { url = "https://files.pythonhosted.org/packages/dd/b7/787e2453cf8639c94b3d06c9d61f512234a82e1d12d13d18584bd3049904/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2d73a68a653c57281401871dd4aeebbb6af3191dcac751a76ce430df4d403194", size = 30470 }, + { url = "https://files.pythonhosted.org/packages/ed/b4/6042c2b0cbac3ec3a69bb4c49b28d2f517b7a0f4a0232603c42c58e22b44/google_crc32c-1.7.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:22beacf83baaf59f9d3ab2bbb4db0fb018da8e5aebdce07ef9f09fce8220285e", size = 30315 }, + { url = "https://files.pythonhosted.org/packages/29/ad/01e7a61a5d059bc57b702d9ff6a18b2585ad97f720bd0a0dbe215df1ab0e/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19eafa0e4af11b0a4eb3974483d55d2d77ad1911e6cf6f832e1574f6781fd337", size = 33180 }, + { url = "https://files.pythonhosted.org/packages/3b/a5/7279055cf004561894ed3a7bfdf5bf90a53f28fadd01af7cd166e88ddf16/google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d86616faaea68101195c6bdc40c494e4d76f41e07a37ffdef270879c15fb65", size = 32794 }, + { url = "https://files.pythonhosted.org/packages/0f/d6/77060dbd140c624e42ae3ece3df53b9d811000729a5c821b9fd671ceaac6/google_crc32c-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:b7491bdc0c7564fcf48c0179d2048ab2f7c7ba36b84ccd3a3e1c3f7a72d3bba6", size = 33477 }, + { url = "https://files.pythonhosted.org/packages/16/1b/1693372bf423ada422f80fd88260dbfd140754adb15cbc4d7e9a68b1cb8e/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85fef7fae11494e747c9fd1359a527e5970fc9603c90764843caabd3a16a0a48", size = 28241 }, + { url = "https://files.pythonhosted.org/packages/fd/3c/2a19a60a473de48717b4efb19398c3f914795b64a96cf3fbe82588044f78/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6efb97eb4369d52593ad6f75e7e10d053cf00c48983f7a973105bc70b0ac4d82", size = 28048 }, ] [[package]] @@ -2238,9 +2253,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-crc32c" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/58/5a/0efdc02665dca14e0837b62c8a1a93132c264bd02054a15abb2218afe0ae/google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0", size = 2163099, upload-time = "2024-08-07T22:20:38.555Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/5a/0efdc02665dca14e0837b62c8a1a93132c264bd02054a15abb2218afe0ae/google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0", size = 2163099 } wheels = [ - { url = "https://files.pythonhosted.org/packages/82/35/b8d3baf8c46695858cb9d8835a53baa1eeb9906ddaf2f728a5f5b640fd1e/google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa", size = 81251, upload-time = "2024-08-07T22:20:36.409Z" }, + { url = "https://files.pythonhosted.org/packages/82/35/b8d3baf8c46695858cb9d8835a53baa1eeb9906ddaf2f728a5f5b640fd1e/google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa", size = 81251 }, ] [[package]] @@ -2250,9 +2265,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d2/dc/291cebf3c73e108ef8210f19cb83d671691354f4f7dd956445560d778715/googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e", size = 121646, upload-time = "2024-03-11T12:33:15.765Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/dc/291cebf3c73e108ef8210f19cb83d671691354f4f7dd956445560d778715/googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e", size = 121646 } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/a6/12a0c976140511d8bc8a16ad15793b2aef29ac927baa0786ccb7ddbb6e1c/googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632", size = 229141, upload-time = "2024-03-11T12:33:14.052Z" }, + { url = "https://files.pythonhosted.org/packages/dc/a6/12a0c976140511d8bc8a16ad15793b2aef29ac927baa0786ccb7ddbb6e1c/googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632", size = 229141 }, ] [package.optional-dependencies] @@ -2260,19 +2275,6 @@ grpc = [ { name = "grpcio" }, ] -[[package]] -name = "gotrue" -version = "2.11.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "httpx", extra = ["http2"] }, - { name = "pydantic" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/19/9c/62c3241731b59c1c403377abef17b5e3782f6385b0317f6d7083271db501/gotrue-2.11.4.tar.gz", hash = "sha256:a9ced242b16c6d6bedc43bca21bbefea1ba5fb35fcdaad7d529342099d3b1767", size = 35353, upload-time = "2025-02-20T09:02:37.346Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/47/3a/1a7cac16438f4e5319a0c879416d5e5032c98c3db2874e6e5300b3b475e6/gotrue-2.11.4-py3-none-any.whl", hash = "sha256:712e5018acc00d93cfc6d7bfddc3114eb3c420ab03b945757a8ba38c5fc3caa8", size = 41106, upload-time = "2025-02-20T09:02:34.653Z" }, -] - [[package]] name = "gql" version = "3.5.3" @@ -2283,9 +2285,9 @@ dependencies = [ { name = "graphql-core" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/34/ed/44ffd30b06b3afc8274ee2f38c3c1b61fe4740bf03d92083e43d2c17ac77/gql-3.5.3.tar.gz", hash = "sha256:393b8c049d58e0d2f5461b9d738a2b5f904186a40395500b4a84dd092d56e42b", size = 180504, upload-time = "2025-05-20T12:34:08.954Z" } +sdist = { url = "https://files.pythonhosted.org/packages/34/ed/44ffd30b06b3afc8274ee2f38c3c1b61fe4740bf03d92083e43d2c17ac77/gql-3.5.3.tar.gz", hash = "sha256:393b8c049d58e0d2f5461b9d738a2b5f904186a40395500b4a84dd092d56e42b", size = 180504 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/50/2f4e99b216821ac921dbebf91c644ba95818f5d07857acadee17220221f3/gql-3.5.3-py2.py3-none-any.whl", hash = "sha256:e1fcbde2893fcafdd28114ece87ff47f1cc339a31db271fc4e1d528f5a1d4fbc", size = 74348, upload-time = "2025-05-20T12:34:07.687Z" }, + { url = "https://files.pythonhosted.org/packages/cb/50/2f4e99b216821ac921dbebf91c644ba95818f5d07857acadee17220221f3/gql-3.5.3-py2.py3-none-any.whl", hash = "sha256:e1fcbde2893fcafdd28114ece87ff47f1cc339a31db271fc4e1d528f5a1d4fbc", size = 74348 }, ] [package.optional-dependencies] @@ -2301,35 +2303,35 @@ requests = [ name = "graphql-core" version = "3.2.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c4/16/7574029da84834349b60ed71614d66ca3afe46e9bf9c7b9562102acb7d4f/graphql_core-3.2.6.tar.gz", hash = "sha256:c08eec22f9e40f0bd61d805907e3b3b1b9a320bc606e23dc145eebca07c8fbab", size = 505353, upload-time = "2025-01-26T16:36:27.374Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/16/7574029da84834349b60ed71614d66ca3afe46e9bf9c7b9562102acb7d4f/graphql_core-3.2.6.tar.gz", hash = "sha256:c08eec22f9e40f0bd61d805907e3b3b1b9a320bc606e23dc145eebca07c8fbab", size = 505353 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ae/4f/7297663840621022bc73c22d7d9d80dbc78b4db6297f764b545cd5dd462d/graphql_core-3.2.6-py3-none-any.whl", hash = "sha256:78b016718c161a6fb20a7d97bbf107f331cd1afe53e45566c59f776ed7f0b45f", size = 203416, upload-time = "2025-01-26T16:36:24.868Z" }, + { url = "https://files.pythonhosted.org/packages/ae/4f/7297663840621022bc73c22d7d9d80dbc78b4db6297f764b545cd5dd462d/graphql_core-3.2.6-py3-none-any.whl", hash = "sha256:78b016718c161a6fb20a7d97bbf107f331cd1afe53e45566c59f776ed7f0b45f", size = 203416 }, ] [[package]] name = "greenlet" version = "3.2.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c9/92/bb85bd6e80148a4d2e0c59f7c0c2891029f8fd510183afc7d8d2feeed9b6/greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365", size = 185752, upload-time = "2025-06-05T16:16:09.955Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/92/bb85bd6e80148a4d2e0c59f7c0c2891029f8fd510183afc7d8d2feeed9b6/greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365", size = 185752 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/2e/d4fcb2978f826358b673f779f78fa8a32ee37df11920dc2bb5589cbeecef/greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822", size = 270219, upload-time = "2025-06-05T16:10:10.414Z" }, - { url = "https://files.pythonhosted.org/packages/16/24/929f853e0202130e4fe163bc1d05a671ce8dcd604f790e14896adac43a52/greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83", size = 630383, upload-time = "2025-06-05T16:38:51.785Z" }, - { url = "https://files.pythonhosted.org/packages/d1/b2/0320715eb61ae70c25ceca2f1d5ae620477d246692d9cc284c13242ec31c/greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf", size = 642422, upload-time = "2025-06-05T16:41:35.259Z" }, - { url = "https://files.pythonhosted.org/packages/bd/49/445fd1a210f4747fedf77615d941444349c6a3a4a1135bba9701337cd966/greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b", size = 638375, upload-time = "2025-06-05T16:48:18.235Z" }, - { url = "https://files.pythonhosted.org/packages/7e/c8/ca19760cf6eae75fa8dc32b487e963d863b3ee04a7637da77b616703bc37/greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147", size = 637627, upload-time = "2025-06-05T16:13:02.858Z" }, - { url = "https://files.pythonhosted.org/packages/65/89/77acf9e3da38e9bcfca881e43b02ed467c1dedc387021fc4d9bd9928afb8/greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5", size = 585502, upload-time = "2025-06-05T16:12:49.642Z" }, - { url = "https://files.pythonhosted.org/packages/97/c6/ae244d7c95b23b7130136e07a9cc5aadd60d59b5951180dc7dc7e8edaba7/greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc", size = 1114498, upload-time = "2025-06-05T16:36:46.598Z" }, - { url = "https://files.pythonhosted.org/packages/89/5f/b16dec0cbfd3070658e0d744487919740c6d45eb90946f6787689a7efbce/greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba", size = 1139977, upload-time = "2025-06-05T16:12:38.262Z" }, - { url = "https://files.pythonhosted.org/packages/66/77/d48fb441b5a71125bcac042fc5b1494c806ccb9a1432ecaa421e72157f77/greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34", size = 297017, upload-time = "2025-06-05T16:25:05.225Z" }, - { url = "https://files.pythonhosted.org/packages/f3/94/ad0d435f7c48debe960c53b8f60fb41c2026b1d0fa4a99a1cb17c3461e09/greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d", size = 271992, upload-time = "2025-06-05T16:11:23.467Z" }, - { url = "https://files.pythonhosted.org/packages/93/5d/7c27cf4d003d6e77749d299c7c8f5fd50b4f251647b5c2e97e1f20da0ab5/greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b", size = 638820, upload-time = "2025-06-05T16:38:52.882Z" }, - { url = "https://files.pythonhosted.org/packages/c6/7e/807e1e9be07a125bb4c169144937910bf59b9d2f6d931578e57f0bce0ae2/greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d", size = 653046, upload-time = "2025-06-05T16:41:36.343Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ab/158c1a4ea1068bdbc78dba5a3de57e4c7aeb4e7fa034320ea94c688bfb61/greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264", size = 647701, upload-time = "2025-06-05T16:48:19.604Z" }, - { url = "https://files.pythonhosted.org/packages/cc/0d/93729068259b550d6a0288da4ff72b86ed05626eaf1eb7c0d3466a2571de/greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688", size = 649747, upload-time = "2025-06-05T16:13:04.628Z" }, - { url = "https://files.pythonhosted.org/packages/f6/f6/c82ac1851c60851302d8581680573245c8fc300253fc1ff741ae74a6c24d/greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb", size = 605461, upload-time = "2025-06-05T16:12:50.792Z" }, - { url = "https://files.pythonhosted.org/packages/98/82/d022cf25ca39cf1200650fc58c52af32c90f80479c25d1cbf57980ec3065/greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c", size = 1121190, upload-time = "2025-06-05T16:36:48.59Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e1/25297f70717abe8104c20ecf7af0a5b82d2f5a980eb1ac79f65654799f9f/greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163", size = 1149055, upload-time = "2025-06-05T16:12:40.457Z" }, - { url = "https://files.pythonhosted.org/packages/1f/8f/8f9e56c5e82eb2c26e8cde787962e66494312dc8cb261c460e1f3a9c88bc/greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849", size = 297817, upload-time = "2025-06-05T16:29:49.244Z" }, + { url = "https://files.pythonhosted.org/packages/fc/2e/d4fcb2978f826358b673f779f78fa8a32ee37df11920dc2bb5589cbeecef/greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822", size = 270219 }, + { url = "https://files.pythonhosted.org/packages/16/24/929f853e0202130e4fe163bc1d05a671ce8dcd604f790e14896adac43a52/greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83", size = 630383 }, + { url = "https://files.pythonhosted.org/packages/d1/b2/0320715eb61ae70c25ceca2f1d5ae620477d246692d9cc284c13242ec31c/greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf", size = 642422 }, + { url = "https://files.pythonhosted.org/packages/bd/49/445fd1a210f4747fedf77615d941444349c6a3a4a1135bba9701337cd966/greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b", size = 638375 }, + { url = "https://files.pythonhosted.org/packages/7e/c8/ca19760cf6eae75fa8dc32b487e963d863b3ee04a7637da77b616703bc37/greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147", size = 637627 }, + { url = "https://files.pythonhosted.org/packages/65/89/77acf9e3da38e9bcfca881e43b02ed467c1dedc387021fc4d9bd9928afb8/greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5", size = 585502 }, + { url = "https://files.pythonhosted.org/packages/97/c6/ae244d7c95b23b7130136e07a9cc5aadd60d59b5951180dc7dc7e8edaba7/greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc", size = 1114498 }, + { url = "https://files.pythonhosted.org/packages/89/5f/b16dec0cbfd3070658e0d744487919740c6d45eb90946f6787689a7efbce/greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba", size = 1139977 }, + { url = "https://files.pythonhosted.org/packages/66/77/d48fb441b5a71125bcac042fc5b1494c806ccb9a1432ecaa421e72157f77/greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34", size = 297017 }, + { url = "https://files.pythonhosted.org/packages/f3/94/ad0d435f7c48debe960c53b8f60fb41c2026b1d0fa4a99a1cb17c3461e09/greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d", size = 271992 }, + { url = "https://files.pythonhosted.org/packages/93/5d/7c27cf4d003d6e77749d299c7c8f5fd50b4f251647b5c2e97e1f20da0ab5/greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b", size = 638820 }, + { url = "https://files.pythonhosted.org/packages/c6/7e/807e1e9be07a125bb4c169144937910bf59b9d2f6d931578e57f0bce0ae2/greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d", size = 653046 }, + { url = "https://files.pythonhosted.org/packages/9d/ab/158c1a4ea1068bdbc78dba5a3de57e4c7aeb4e7fa034320ea94c688bfb61/greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264", size = 647701 }, + { url = "https://files.pythonhosted.org/packages/cc/0d/93729068259b550d6a0288da4ff72b86ed05626eaf1eb7c0d3466a2571de/greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688", size = 649747 }, + { url = "https://files.pythonhosted.org/packages/f6/f6/c82ac1851c60851302d8581680573245c8fc300253fc1ff741ae74a6c24d/greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb", size = 605461 }, + { url = "https://files.pythonhosted.org/packages/98/82/d022cf25ca39cf1200650fc58c52af32c90f80479c25d1cbf57980ec3065/greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c", size = 1121190 }, + { url = "https://files.pythonhosted.org/packages/f5/e1/25297f70717abe8104c20ecf7af0a5b82d2f5a980eb1ac79f65654799f9f/greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163", size = 1149055 }, + { url = "https://files.pythonhosted.org/packages/1f/8f/8f9e56c5e82eb2c26e8cde787962e66494312dc8cb261c460e1f3a9c88bc/greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849", size = 297817 }, ] [[package]] @@ -2341,35 +2343,35 @@ dependencies = [ { name = "grpcio" }, { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/4e/8d0ca3b035e41fe0b3f31ebbb638356af720335e5a11154c330169b40777/grpc_google_iam_v1-0.14.2.tar.gz", hash = "sha256:b3e1fc387a1a329e41672197d0ace9de22c78dd7d215048c4c78712073f7bd20", size = 16259, upload-time = "2025-03-17T11:40:23.586Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/4e/8d0ca3b035e41fe0b3f31ebbb638356af720335e5a11154c330169b40777/grpc_google_iam_v1-0.14.2.tar.gz", hash = "sha256:b3e1fc387a1a329e41672197d0ace9de22c78dd7d215048c4c78712073f7bd20", size = 16259 } wheels = [ - { url = "https://files.pythonhosted.org/packages/66/6f/dd9b178aee7835b96c2e63715aba6516a9d50f6bebbd1cc1d32c82a2a6c3/grpc_google_iam_v1-0.14.2-py3-none-any.whl", hash = "sha256:a3171468459770907926d56a440b2bb643eec1d7ba215f48f3ecece42b4d8351", size = 19242, upload-time = "2025-03-17T11:40:22.648Z" }, + { url = "https://files.pythonhosted.org/packages/66/6f/dd9b178aee7835b96c2e63715aba6516a9d50f6bebbd1cc1d32c82a2a6c3/grpc_google_iam_v1-0.14.2-py3-none-any.whl", hash = "sha256:a3171468459770907926d56a440b2bb643eec1d7ba215f48f3ecece42b4d8351", size = 19242 }, ] [[package]] name = "grpcio" version = "1.67.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/53/d9282a66a5db45981499190b77790570617a604a38f3d103d0400974aeb5/grpcio-1.67.1.tar.gz", hash = "sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732", size = 12580022, upload-time = "2024-10-29T06:30:07.787Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/53/d9282a66a5db45981499190b77790570617a604a38f3d103d0400974aeb5/grpcio-1.67.1.tar.gz", hash = "sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732", size = 12580022 } wheels = [ - { url = "https://files.pythonhosted.org/packages/59/2c/b60d6ea1f63a20a8d09c6db95c4f9a16497913fb3048ce0990ed81aeeca0/grpcio-1.67.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:7818c0454027ae3384235a65210bbf5464bd715450e30a3d40385453a85a70cb", size = 5119075, upload-time = "2024-10-29T06:24:04.696Z" }, - { url = "https://files.pythonhosted.org/packages/b3/9a/e1956f7ca582a22dd1f17b9e26fcb8229051b0ce6d33b47227824772feec/grpcio-1.67.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ea33986b70f83844cd00814cee4451055cd8cab36f00ac64a31f5bb09b31919e", size = 11009159, upload-time = "2024-10-29T06:24:07.781Z" }, - { url = "https://files.pythonhosted.org/packages/43/a8/35fbbba580c4adb1d40d12e244cf9f7c74a379073c0a0ca9d1b5338675a1/grpcio-1.67.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c7a01337407dd89005527623a4a72c5c8e2894d22bead0895306b23c6695698f", size = 5629476, upload-time = "2024-10-29T06:24:11.444Z" }, - { url = "https://files.pythonhosted.org/packages/77/c9/864d336e167263d14dfccb4dbfa7fce634d45775609895287189a03f1fc3/grpcio-1.67.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b866f73224b0634f4312a4674c1be21b2b4afa73cb20953cbbb73a6b36c3cc", size = 6239901, upload-time = "2024-10-29T06:24:14.2Z" }, - { url = "https://files.pythonhosted.org/packages/f7/1e/0011408ebabf9bd69f4f87cc1515cbfe2094e5a32316f8714a75fd8ddfcb/grpcio-1.67.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9fff78ba10d4250bfc07a01bd6254a6d87dc67f9627adece85c0b2ed754fa96", size = 5881010, upload-time = "2024-10-29T06:24:17.451Z" }, - { url = "https://files.pythonhosted.org/packages/b4/7d/fbca85ee9123fb296d4eff8df566f458d738186d0067dec6f0aa2fd79d71/grpcio-1.67.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8a23cbcc5bb11ea7dc6163078be36c065db68d915c24f5faa4f872c573bb400f", size = 6580706, upload-time = "2024-10-29T06:24:20.038Z" }, - { url = "https://files.pythonhosted.org/packages/75/7a/766149dcfa2dfa81835bf7df623944c1f636a15fcb9b6138ebe29baf0bc6/grpcio-1.67.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1a65b503d008f066e994f34f456e0647e5ceb34cfcec5ad180b1b44020ad4970", size = 6161799, upload-time = "2024-10-29T06:24:22.604Z" }, - { url = "https://files.pythonhosted.org/packages/09/13/5b75ae88810aaea19e846f5380611837de411181df51fd7a7d10cb178dcb/grpcio-1.67.1-cp311-cp311-win32.whl", hash = "sha256:e29ca27bec8e163dca0c98084040edec3bc49afd10f18b412f483cc68c712744", size = 3616330, upload-time = "2024-10-29T06:24:25.775Z" }, - { url = "https://files.pythonhosted.org/packages/aa/39/38117259613f68f072778c9638a61579c0cfa5678c2558706b10dd1d11d3/grpcio-1.67.1-cp311-cp311-win_amd64.whl", hash = "sha256:786a5b18544622bfb1e25cc08402bd44ea83edfb04b93798d85dca4d1a0b5be5", size = 4354535, upload-time = "2024-10-29T06:24:28.614Z" }, - { url = "https://files.pythonhosted.org/packages/6e/25/6f95bd18d5f506364379eabc0d5874873cc7dbdaf0757df8d1e82bc07a88/grpcio-1.67.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953", size = 5089809, upload-time = "2024-10-29T06:24:31.24Z" }, - { url = "https://files.pythonhosted.org/packages/10/3f/d79e32e5d0354be33a12db2267c66d3cfeff700dd5ccdd09fd44a3ff4fb6/grpcio-1.67.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb", size = 10981985, upload-time = "2024-10-29T06:24:34.942Z" }, - { url = "https://files.pythonhosted.org/packages/21/f2/36fbc14b3542e3a1c20fb98bd60c4732c55a44e374a4eb68f91f28f14aab/grpcio-1.67.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0", size = 5588770, upload-time = "2024-10-29T06:24:38.145Z" }, - { url = "https://files.pythonhosted.org/packages/0d/af/bbc1305df60c4e65de8c12820a942b5e37f9cf684ef5e49a63fbb1476a73/grpcio-1.67.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af", size = 6214476, upload-time = "2024-10-29T06:24:41.006Z" }, - { url = "https://files.pythonhosted.org/packages/92/cf/1d4c3e93efa93223e06a5c83ac27e32935f998bc368e276ef858b8883154/grpcio-1.67.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e", size = 5850129, upload-time = "2024-10-29T06:24:43.553Z" }, - { url = "https://files.pythonhosted.org/packages/ae/ca/26195b66cb253ac4d5ef59846e354d335c9581dba891624011da0e95d67b/grpcio-1.67.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75", size = 6568489, upload-time = "2024-10-29T06:24:46.453Z" }, - { url = "https://files.pythonhosted.org/packages/d1/94/16550ad6b3f13b96f0856ee5dfc2554efac28539ee84a51d7b14526da985/grpcio-1.67.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38", size = 6149369, upload-time = "2024-10-29T06:24:49.112Z" }, - { url = "https://files.pythonhosted.org/packages/33/0d/4c3b2587e8ad7f121b597329e6c2620374fccbc2e4e1aa3c73ccc670fde4/grpcio-1.67.1-cp312-cp312-win32.whl", hash = "sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78", size = 3599176, upload-time = "2024-10-29T06:24:51.443Z" }, - { url = "https://files.pythonhosted.org/packages/7d/36/0c03e2d80db69e2472cf81c6123aa7d14741de7cf790117291a703ae6ae1/grpcio-1.67.1-cp312-cp312-win_amd64.whl", hash = "sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc", size = 4346574, upload-time = "2024-10-29T06:24:54.587Z" }, + { url = "https://files.pythonhosted.org/packages/59/2c/b60d6ea1f63a20a8d09c6db95c4f9a16497913fb3048ce0990ed81aeeca0/grpcio-1.67.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:7818c0454027ae3384235a65210bbf5464bd715450e30a3d40385453a85a70cb", size = 5119075 }, + { url = "https://files.pythonhosted.org/packages/b3/9a/e1956f7ca582a22dd1f17b9e26fcb8229051b0ce6d33b47227824772feec/grpcio-1.67.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ea33986b70f83844cd00814cee4451055cd8cab36f00ac64a31f5bb09b31919e", size = 11009159 }, + { url = "https://files.pythonhosted.org/packages/43/a8/35fbbba580c4adb1d40d12e244cf9f7c74a379073c0a0ca9d1b5338675a1/grpcio-1.67.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c7a01337407dd89005527623a4a72c5c8e2894d22bead0895306b23c6695698f", size = 5629476 }, + { url = "https://files.pythonhosted.org/packages/77/c9/864d336e167263d14dfccb4dbfa7fce634d45775609895287189a03f1fc3/grpcio-1.67.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b866f73224b0634f4312a4674c1be21b2b4afa73cb20953cbbb73a6b36c3cc", size = 6239901 }, + { url = "https://files.pythonhosted.org/packages/f7/1e/0011408ebabf9bd69f4f87cc1515cbfe2094e5a32316f8714a75fd8ddfcb/grpcio-1.67.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9fff78ba10d4250bfc07a01bd6254a6d87dc67f9627adece85c0b2ed754fa96", size = 5881010 }, + { url = "https://files.pythonhosted.org/packages/b4/7d/fbca85ee9123fb296d4eff8df566f458d738186d0067dec6f0aa2fd79d71/grpcio-1.67.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8a23cbcc5bb11ea7dc6163078be36c065db68d915c24f5faa4f872c573bb400f", size = 6580706 }, + { url = "https://files.pythonhosted.org/packages/75/7a/766149dcfa2dfa81835bf7df623944c1f636a15fcb9b6138ebe29baf0bc6/grpcio-1.67.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1a65b503d008f066e994f34f456e0647e5ceb34cfcec5ad180b1b44020ad4970", size = 6161799 }, + { url = "https://files.pythonhosted.org/packages/09/13/5b75ae88810aaea19e846f5380611837de411181df51fd7a7d10cb178dcb/grpcio-1.67.1-cp311-cp311-win32.whl", hash = "sha256:e29ca27bec8e163dca0c98084040edec3bc49afd10f18b412f483cc68c712744", size = 3616330 }, + { url = "https://files.pythonhosted.org/packages/aa/39/38117259613f68f072778c9638a61579c0cfa5678c2558706b10dd1d11d3/grpcio-1.67.1-cp311-cp311-win_amd64.whl", hash = "sha256:786a5b18544622bfb1e25cc08402bd44ea83edfb04b93798d85dca4d1a0b5be5", size = 4354535 }, + { url = "https://files.pythonhosted.org/packages/6e/25/6f95bd18d5f506364379eabc0d5874873cc7dbdaf0757df8d1e82bc07a88/grpcio-1.67.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953", size = 5089809 }, + { url = "https://files.pythonhosted.org/packages/10/3f/d79e32e5d0354be33a12db2267c66d3cfeff700dd5ccdd09fd44a3ff4fb6/grpcio-1.67.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb", size = 10981985 }, + { url = "https://files.pythonhosted.org/packages/21/f2/36fbc14b3542e3a1c20fb98bd60c4732c55a44e374a4eb68f91f28f14aab/grpcio-1.67.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0", size = 5588770 }, + { url = "https://files.pythonhosted.org/packages/0d/af/bbc1305df60c4e65de8c12820a942b5e37f9cf684ef5e49a63fbb1476a73/grpcio-1.67.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af", size = 6214476 }, + { url = "https://files.pythonhosted.org/packages/92/cf/1d4c3e93efa93223e06a5c83ac27e32935f998bc368e276ef858b8883154/grpcio-1.67.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e", size = 5850129 }, + { url = "https://files.pythonhosted.org/packages/ae/ca/26195b66cb253ac4d5ef59846e354d335c9581dba891624011da0e95d67b/grpcio-1.67.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75", size = 6568489 }, + { url = "https://files.pythonhosted.org/packages/d1/94/16550ad6b3f13b96f0856ee5dfc2554efac28539ee84a51d7b14526da985/grpcio-1.67.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38", size = 6149369 }, + { url = "https://files.pythonhosted.org/packages/33/0d/4c3b2587e8ad7f121b597329e6c2620374fccbc2e4e1aa3c73ccc670fde4/grpcio-1.67.1-cp312-cp312-win32.whl", hash = "sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78", size = 3599176 }, + { url = "https://files.pythonhosted.org/packages/7d/36/0c03e2d80db69e2472cf81c6123aa7d14741de7cf790117291a703ae6ae1/grpcio-1.67.1-cp312-cp312-win_amd64.whl", hash = "sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc", size = 4346574 }, ] [[package]] @@ -2381,9 +2383,9 @@ dependencies = [ { name = "grpcio" }, { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/d7/013ef01c5a1c2fd0932c27c904934162f69f41ca0f28396d3ffe4d386123/grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485", size = 13063, upload-time = "2024-08-06T00:37:08.003Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/d7/013ef01c5a1c2fd0932c27c904934162f69f41ca0f28396d3ffe4d386123/grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485", size = 13063 } wheels = [ - { url = "https://files.pythonhosted.org/packages/90/40/972271de05f9315c0d69f9f7ebbcadd83bc85322f538637d11bb8c67803d/grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8", size = 14448, upload-time = "2024-08-06T00:30:15.702Z" }, + { url = "https://files.pythonhosted.org/packages/90/40/972271de05f9315c0d69f9f7ebbcadd83bc85322f538637d11bb8c67803d/grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8", size = 14448 }, ] [[package]] @@ -2395,24 +2397,24 @@ dependencies = [ { name = "protobuf" }, { name = "setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/54/fa/b69bd8040eafc09b88bb0ec0fea59e8aacd1a801e688af087cead213b0d0/grpcio-tools-1.62.3.tar.gz", hash = "sha256:7c7136015c3d62c3eef493efabaf9e3380e3e66d24ee8e94c01cb71377f57833", size = 4538520, upload-time = "2024-08-06T00:37:11.035Z" } +sdist = { url = "https://files.pythonhosted.org/packages/54/fa/b69bd8040eafc09b88bb0ec0fea59e8aacd1a801e688af087cead213b0d0/grpcio-tools-1.62.3.tar.gz", hash = "sha256:7c7136015c3d62c3eef493efabaf9e3380e3e66d24ee8e94c01cb71377f57833", size = 4538520 } wheels = [ - { url = "https://files.pythonhosted.org/packages/23/52/2dfe0a46b63f5ebcd976570aa5fc62f793d5a8b169e211c6a5aede72b7ae/grpcio_tools-1.62.3-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:703f46e0012af83a36082b5f30341113474ed0d91e36640da713355cd0ea5d23", size = 5147623, upload-time = "2024-08-06T00:30:54.894Z" }, - { url = "https://files.pythonhosted.org/packages/f0/2e/29fdc6c034e058482e054b4a3c2432f84ff2e2765c1342d4f0aa8a5c5b9a/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7cc83023acd8bc72cf74c2edbe85b52098501d5b74d8377bfa06f3e929803492", size = 2719538, upload-time = "2024-08-06T00:30:57.928Z" }, - { url = "https://files.pythonhosted.org/packages/f9/60/abe5deba32d9ec2c76cdf1a2f34e404c50787074a2fee6169568986273f1/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ff7d58a45b75df67d25f8f144936a3e44aabd91afec833ee06826bd02b7fbe7", size = 3070964, upload-time = "2024-08-06T00:31:00.267Z" }, - { url = "https://files.pythonhosted.org/packages/bc/ad/e2b066684c75f8d9a48508cde080a3a36618064b9cadac16d019ca511444/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f2483ea232bd72d98a6dc6d7aefd97e5bc80b15cd909b9e356d6f3e326b6e43", size = 2805003, upload-time = "2024-08-06T00:31:02.565Z" }, - { url = "https://files.pythonhosted.org/packages/9c/3f/59bf7af786eae3f9d24ee05ce75318b87f541d0950190ecb5ffb776a1a58/grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:962c84b4da0f3b14b3cdb10bc3837ebc5f136b67d919aea8d7bb3fd3df39528a", size = 3685154, upload-time = "2024-08-06T00:31:05.339Z" }, - { url = "https://files.pythonhosted.org/packages/f1/79/4dd62478b91e27084c67b35a2316ce8a967bd8b6cb8d6ed6c86c3a0df7cb/grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8ad0473af5544f89fc5a1ece8676dd03bdf160fb3230f967e05d0f4bf89620e3", size = 3297942, upload-time = "2024-08-06T00:31:08.456Z" }, - { url = "https://files.pythonhosted.org/packages/b8/cb/86449ecc58bea056b52c0b891f26977afc8c4464d88c738f9648da941a75/grpcio_tools-1.62.3-cp311-cp311-win32.whl", hash = "sha256:db3bc9fa39afc5e4e2767da4459df82b095ef0cab2f257707be06c44a1c2c3e5", size = 910231, upload-time = "2024-08-06T00:31:11.464Z" }, - { url = "https://files.pythonhosted.org/packages/45/a4/9736215e3945c30ab6843280b0c6e1bff502910156ea2414cd77fbf1738c/grpcio_tools-1.62.3-cp311-cp311-win_amd64.whl", hash = "sha256:e0898d412a434e768a0c7e365acabe13ff1558b767e400936e26b5b6ed1ee51f", size = 1052496, upload-time = "2024-08-06T00:31:13.665Z" }, - { url = "https://files.pythonhosted.org/packages/2a/a5/d6887eba415ce318ae5005e8dfac3fa74892400b54b6d37b79e8b4f14f5e/grpcio_tools-1.62.3-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d102b9b21c4e1e40af9a2ab3c6d41afba6bd29c0aa50ca013bf85c99cdc44ac5", size = 5147690, upload-time = "2024-08-06T00:31:16.436Z" }, - { url = "https://files.pythonhosted.org/packages/8a/7c/3cde447a045e83ceb4b570af8afe67ffc86896a2fe7f59594dc8e5d0a645/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0a52cc9444df978438b8d2332c0ca99000521895229934a59f94f37ed896b133", size = 2720538, upload-time = "2024-08-06T00:31:18.905Z" }, - { url = "https://files.pythonhosted.org/packages/88/07/f83f2750d44ac4f06c07c37395b9c1383ef5c994745f73c6bfaf767f0944/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141d028bf5762d4a97f981c501da873589df3f7e02f4c1260e1921e565b376fa", size = 3071571, upload-time = "2024-08-06T00:31:21.684Z" }, - { url = "https://files.pythonhosted.org/packages/37/74/40175897deb61e54aca716bc2e8919155b48f33aafec8043dda9592d8768/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47a5c093ab256dec5714a7a345f8cc89315cb57c298b276fa244f37a0ba507f0", size = 2806207, upload-time = "2024-08-06T00:31:24.208Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ee/d8de915105a217cbcb9084d684abdc032030dcd887277f2ef167372287fe/grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f6831fdec2b853c9daa3358535c55eed3694325889aa714070528cf8f92d7d6d", size = 3685815, upload-time = "2024-08-06T00:31:26.917Z" }, - { url = "https://files.pythonhosted.org/packages/fd/d9/4360a6c12be3d7521b0b8c39e5d3801d622fbb81cc2721dbd3eee31e28c8/grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e02d7c1a02e3814c94ba0cfe43d93e872c758bd8fd5c2797f894d0c49b4a1dfc", size = 3298378, upload-time = "2024-08-06T00:31:30.401Z" }, - { url = "https://files.pythonhosted.org/packages/29/3b/7cdf4a9e5a3e0a35a528b48b111355cd14da601413a4f887aa99b6da468f/grpcio_tools-1.62.3-cp312-cp312-win32.whl", hash = "sha256:b881fd9505a84457e9f7e99362eeedd86497b659030cf57c6f0070df6d9c2b9b", size = 910416, upload-time = "2024-08-06T00:31:33.118Z" }, - { url = "https://files.pythonhosted.org/packages/6c/66/dd3ec249e44c1cc15e902e783747819ed41ead1336fcba72bf841f72c6e9/grpcio_tools-1.62.3-cp312-cp312-win_amd64.whl", hash = "sha256:11c625eebefd1fd40a228fc8bae385e448c7e32a6ae134e43cf13bbc23f902b7", size = 1052856, upload-time = "2024-08-06T00:31:36.519Z" }, + { url = "https://files.pythonhosted.org/packages/23/52/2dfe0a46b63f5ebcd976570aa5fc62f793d5a8b169e211c6a5aede72b7ae/grpcio_tools-1.62.3-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:703f46e0012af83a36082b5f30341113474ed0d91e36640da713355cd0ea5d23", size = 5147623 }, + { url = "https://files.pythonhosted.org/packages/f0/2e/29fdc6c034e058482e054b4a3c2432f84ff2e2765c1342d4f0aa8a5c5b9a/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7cc83023acd8bc72cf74c2edbe85b52098501d5b74d8377bfa06f3e929803492", size = 2719538 }, + { url = "https://files.pythonhosted.org/packages/f9/60/abe5deba32d9ec2c76cdf1a2f34e404c50787074a2fee6169568986273f1/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ff7d58a45b75df67d25f8f144936a3e44aabd91afec833ee06826bd02b7fbe7", size = 3070964 }, + { url = "https://files.pythonhosted.org/packages/bc/ad/e2b066684c75f8d9a48508cde080a3a36618064b9cadac16d019ca511444/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f2483ea232bd72d98a6dc6d7aefd97e5bc80b15cd909b9e356d6f3e326b6e43", size = 2805003 }, + { url = "https://files.pythonhosted.org/packages/9c/3f/59bf7af786eae3f9d24ee05ce75318b87f541d0950190ecb5ffb776a1a58/grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:962c84b4da0f3b14b3cdb10bc3837ebc5f136b67d919aea8d7bb3fd3df39528a", size = 3685154 }, + { url = "https://files.pythonhosted.org/packages/f1/79/4dd62478b91e27084c67b35a2316ce8a967bd8b6cb8d6ed6c86c3a0df7cb/grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8ad0473af5544f89fc5a1ece8676dd03bdf160fb3230f967e05d0f4bf89620e3", size = 3297942 }, + { url = "https://files.pythonhosted.org/packages/b8/cb/86449ecc58bea056b52c0b891f26977afc8c4464d88c738f9648da941a75/grpcio_tools-1.62.3-cp311-cp311-win32.whl", hash = "sha256:db3bc9fa39afc5e4e2767da4459df82b095ef0cab2f257707be06c44a1c2c3e5", size = 910231 }, + { url = "https://files.pythonhosted.org/packages/45/a4/9736215e3945c30ab6843280b0c6e1bff502910156ea2414cd77fbf1738c/grpcio_tools-1.62.3-cp311-cp311-win_amd64.whl", hash = "sha256:e0898d412a434e768a0c7e365acabe13ff1558b767e400936e26b5b6ed1ee51f", size = 1052496 }, + { url = "https://files.pythonhosted.org/packages/2a/a5/d6887eba415ce318ae5005e8dfac3fa74892400b54b6d37b79e8b4f14f5e/grpcio_tools-1.62.3-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d102b9b21c4e1e40af9a2ab3c6d41afba6bd29c0aa50ca013bf85c99cdc44ac5", size = 5147690 }, + { url = "https://files.pythonhosted.org/packages/8a/7c/3cde447a045e83ceb4b570af8afe67ffc86896a2fe7f59594dc8e5d0a645/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0a52cc9444df978438b8d2332c0ca99000521895229934a59f94f37ed896b133", size = 2720538 }, + { url = "https://files.pythonhosted.org/packages/88/07/f83f2750d44ac4f06c07c37395b9c1383ef5c994745f73c6bfaf767f0944/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141d028bf5762d4a97f981c501da873589df3f7e02f4c1260e1921e565b376fa", size = 3071571 }, + { url = "https://files.pythonhosted.org/packages/37/74/40175897deb61e54aca716bc2e8919155b48f33aafec8043dda9592d8768/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47a5c093ab256dec5714a7a345f8cc89315cb57c298b276fa244f37a0ba507f0", size = 2806207 }, + { url = "https://files.pythonhosted.org/packages/ec/ee/d8de915105a217cbcb9084d684abdc032030dcd887277f2ef167372287fe/grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f6831fdec2b853c9daa3358535c55eed3694325889aa714070528cf8f92d7d6d", size = 3685815 }, + { url = "https://files.pythonhosted.org/packages/fd/d9/4360a6c12be3d7521b0b8c39e5d3801d622fbb81cc2721dbd3eee31e28c8/grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e02d7c1a02e3814c94ba0cfe43d93e872c758bd8fd5c2797f894d0c49b4a1dfc", size = 3298378 }, + { url = "https://files.pythonhosted.org/packages/29/3b/7cdf4a9e5a3e0a35a528b48b111355cd14da601413a4f887aa99b6da468f/grpcio_tools-1.62.3-cp312-cp312-win32.whl", hash = "sha256:b881fd9505a84457e9f7e99362eeedd86497b659030cf57c6f0070df6d9c2b9b", size = 910416 }, + { url = "https://files.pythonhosted.org/packages/6c/66/dd3ec249e44c1cc15e902e783747819ed41ead1336fcba72bf841f72c6e9/grpcio_tools-1.62.3-cp312-cp312-win_amd64.whl", hash = "sha256:11c625eebefd1fd40a228fc8bae385e448c7e32a6ae134e43cf13bbc23f902b7", size = 1052856 }, ] [[package]] @@ -2422,93 +2424,93 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/34/72/9614c465dc206155d93eff0ca20d42e1e35afc533971379482de953521a4/gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec", size = 375031, upload-time = "2024-08-10T20:25:27.378Z" } +sdist = { url = "https://files.pythonhosted.org/packages/34/72/9614c465dc206155d93eff0ca20d42e1e35afc533971379482de953521a4/gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec", size = 375031 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/7d/6dac2a6e1eba33ee43f318edbed4ff29151a49b5d37f080aad1e6469bca4/gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d", size = 85029, upload-time = "2024-08-10T20:25:24.996Z" }, + { url = "https://files.pythonhosted.org/packages/cb/7d/6dac2a6e1eba33ee43f318edbed4ff29151a49b5d37f080aad1e6469bca4/gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d", size = 85029 }, ] [[package]] name = "h11" version = "0.16.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 }, ] [[package]] name = "h2" -version = "4.2.0" +version = "4.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "hpack" }, { name = "hyperframe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1b/38/d7f80fd13e6582fb8e0df8c9a653dcc02b03ca34f4d72f34869298c5baf8/h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f", size = 2150682, upload-time = "2025-02-02T07:43:51.815Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0", size = 60957, upload-time = "2025-02-01T11:02:26.481Z" }, + { url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779 }, ] [[package]] name = "hf-xet" version = "1.1.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ed/d4/7685999e85945ed0d7f0762b686ae7015035390de1161dcea9d5276c134c/hf_xet-1.1.5.tar.gz", hash = "sha256:69ebbcfd9ec44fdc2af73441619eeb06b94ee34511bbcf57cd423820090f5694", size = 495969, upload-time = "2025-06-20T21:48:38.007Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/d4/7685999e85945ed0d7f0762b686ae7015035390de1161dcea9d5276c134c/hf_xet-1.1.5.tar.gz", hash = "sha256:69ebbcfd9ec44fdc2af73441619eeb06b94ee34511bbcf57cd423820090f5694", size = 495969 } wheels = [ - { url = "https://files.pythonhosted.org/packages/00/89/a1119eebe2836cb25758e7661d6410d3eae982e2b5e974bcc4d250be9012/hf_xet-1.1.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f52c2fa3635b8c37c7764d8796dfa72706cc4eded19d638331161e82b0792e23", size = 2687929, upload-time = "2025-06-20T21:48:32.284Z" }, - { url = "https://files.pythonhosted.org/packages/de/5f/2c78e28f309396e71ec8e4e9304a6483dcbc36172b5cea8f291994163425/hf_xet-1.1.5-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:9fa6e3ee5d61912c4a113e0708eaaef987047616465ac7aa30f7121a48fc1af8", size = 2556338, upload-time = "2025-06-20T21:48:30.079Z" }, - { url = "https://files.pythonhosted.org/packages/6d/2f/6cad7b5fe86b7652579346cb7f85156c11761df26435651cbba89376cd2c/hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc874b5c843e642f45fd85cda1ce599e123308ad2901ead23d3510a47ff506d1", size = 3102894, upload-time = "2025-06-20T21:48:28.114Z" }, - { url = "https://files.pythonhosted.org/packages/d0/54/0fcf2b619720a26fbb6cc941e89f2472a522cd963a776c089b189559447f/hf_xet-1.1.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dbba1660e5d810bd0ea77c511a99e9242d920790d0e63c0e4673ed36c4022d18", size = 3002134, upload-time = "2025-06-20T21:48:25.906Z" }, - { url = "https://files.pythonhosted.org/packages/f3/92/1d351ac6cef7c4ba8c85744d37ffbfac2d53d0a6c04d2cabeba614640a78/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ab34c4c3104133c495785d5d8bba3b1efc99de52c02e759cf711a91fd39d3a14", size = 3171009, upload-time = "2025-06-20T21:48:33.987Z" }, - { url = "https://files.pythonhosted.org/packages/c9/65/4b2ddb0e3e983f2508528eb4501288ae2f84963586fbdfae596836d5e57a/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:83088ecea236d5113de478acb2339f92c95b4fb0462acaa30621fac02f5a534a", size = 3279245, upload-time = "2025-06-20T21:48:36.051Z" }, - { url = "https://files.pythonhosted.org/packages/f0/55/ef77a85ee443ae05a9e9cba1c9f0dd9241eb42da2aeba1dc50f51154c81a/hf_xet-1.1.5-cp37-abi3-win_amd64.whl", hash = "sha256:73e167d9807d166596b4b2f0b585c6d5bd84a26dea32843665a8b58f6edba245", size = 2738931, upload-time = "2025-06-20T21:48:39.482Z" }, + { url = "https://files.pythonhosted.org/packages/00/89/a1119eebe2836cb25758e7661d6410d3eae982e2b5e974bcc4d250be9012/hf_xet-1.1.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f52c2fa3635b8c37c7764d8796dfa72706cc4eded19d638331161e82b0792e23", size = 2687929 }, + { url = "https://files.pythonhosted.org/packages/de/5f/2c78e28f309396e71ec8e4e9304a6483dcbc36172b5cea8f291994163425/hf_xet-1.1.5-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:9fa6e3ee5d61912c4a113e0708eaaef987047616465ac7aa30f7121a48fc1af8", size = 2556338 }, + { url = "https://files.pythonhosted.org/packages/6d/2f/6cad7b5fe86b7652579346cb7f85156c11761df26435651cbba89376cd2c/hf_xet-1.1.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc874b5c843e642f45fd85cda1ce599e123308ad2901ead23d3510a47ff506d1", size = 3102894 }, + { url = "https://files.pythonhosted.org/packages/d0/54/0fcf2b619720a26fbb6cc941e89f2472a522cd963a776c089b189559447f/hf_xet-1.1.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dbba1660e5d810bd0ea77c511a99e9242d920790d0e63c0e4673ed36c4022d18", size = 3002134 }, + { url = "https://files.pythonhosted.org/packages/f3/92/1d351ac6cef7c4ba8c85744d37ffbfac2d53d0a6c04d2cabeba614640a78/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ab34c4c3104133c495785d5d8bba3b1efc99de52c02e759cf711a91fd39d3a14", size = 3171009 }, + { url = "https://files.pythonhosted.org/packages/c9/65/4b2ddb0e3e983f2508528eb4501288ae2f84963586fbdfae596836d5e57a/hf_xet-1.1.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:83088ecea236d5113de478acb2339f92c95b4fb0462acaa30621fac02f5a534a", size = 3279245 }, + { url = "https://files.pythonhosted.org/packages/f0/55/ef77a85ee443ae05a9e9cba1c9f0dd9241eb42da2aeba1dc50f51154c81a/hf_xet-1.1.5-cp37-abi3-win_amd64.whl", hash = "sha256:73e167d9807d166596b4b2f0b585c6d5bd84a26dea32843665a8b58f6edba245", size = 2738931 }, ] [[package]] name = "hiredis" version = "3.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f7/08/24b72f425b75e1de7442fb1740f69ca66d5820b9f9c0e2511ff9aadab3b7/hiredis-3.2.1.tar.gz", hash = "sha256:5a5f64479bf04dd829fe7029fad0ea043eac4023abc6e946668cbbec3493a78d", size = 89096, upload-time = "2025-05-23T11:41:57.227Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/08/24b72f425b75e1de7442fb1740f69ca66d5820b9f9c0e2511ff9aadab3b7/hiredis-3.2.1.tar.gz", hash = "sha256:5a5f64479bf04dd829fe7029fad0ea043eac4023abc6e946668cbbec3493a78d", size = 89096 } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/84/2ea9636f2ba0811d9eb3bebbbfa84f488238180ddab70c9cb7fa13419d78/hiredis-3.2.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:e4ae0be44cab5e74e6e4c4a93d04784629a45e781ff483b136cc9e1b9c23975c", size = 82425, upload-time = "2025-05-23T11:39:54.135Z" }, - { url = "https://files.pythonhosted.org/packages/fc/24/b9ebf766a99998fda3975937afa4912e98de9d7f8d0b83f48096bdd961c1/hiredis-3.2.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:24647e84c9f552934eb60b7f3d2116f8b64a7020361da9369e558935ca45914d", size = 45231, upload-time = "2025-05-23T11:39:55.455Z" }, - { url = "https://files.pythonhosted.org/packages/68/4c/c009b4d9abeb964d607f0987561892d1589907f770b9e5617552b34a4a4d/hiredis-3.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6fb3e92d1172da8decc5f836bf8b528c0fc9b6d449f1353e79ceeb9dc1801132", size = 43240, upload-time = "2025-05-23T11:39:57.8Z" }, - { url = "https://files.pythonhosted.org/packages/e9/83/d53f3ae9e4ac51b8a35afb7ccd68db871396ed1d7c8ba02ce2c30de0cf17/hiredis-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38ba7a32e51e518b6b3e470142e52ed2674558e04d7d73d86eb19ebcb37d7d40", size = 169624, upload-time = "2025-05-23T11:40:00.055Z" }, - { url = "https://files.pythonhosted.org/packages/91/2f/f9f091526e22a45385d45f3870204dc78aee365b6fe32e679e65674da6a7/hiredis-3.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4fc632be73174891d6bb71480247e57b2fd8f572059f0a1153e4d0339e919779", size = 165799, upload-time = "2025-05-23T11:40:01.194Z" }, - { url = "https://files.pythonhosted.org/packages/1c/cc/e561274438cdb19794f0638136a5a99a9ca19affcb42679b12a78016b8ad/hiredis-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f03e6839ff21379ad3c195e0700fc9c209e7f344946dea0f8a6d7b5137a2a141", size = 180612, upload-time = "2025-05-23T11:40:02.385Z" }, - { url = "https://files.pythonhosted.org/packages/83/ba/a8a989f465191d55672e57aea2a331bfa3a74b5cbc6f590031c9e11f7491/hiredis-3.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99983873e37c71bb71deb544670ff4f9d6920dab272aaf52365606d87a4d6c73", size = 169934, upload-time = "2025-05-23T11:40:03.524Z" }, - { url = "https://files.pythonhosted.org/packages/52/5f/1148e965df1c67b17bdcaef199f54aec3def0955d19660a39c6ee10a6f55/hiredis-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffd982c419f48e3a57f592678c72474429465bb4bfc96472ec805f5d836523f0", size = 170074, upload-time = "2025-05-23T11:40:04.618Z" }, - { url = "https://files.pythonhosted.org/packages/43/5e/e6846ad159a938b539fb8d472e2e68cb6758d7c9454ea0520211f335ea72/hiredis-3.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc993f4aa4abc029347f309e722f122e05a3b8a0c279ae612849b5cc9dc69f2d", size = 164158, upload-time = "2025-05-23T11:40:05.653Z" }, - { url = "https://files.pythonhosted.org/packages/0a/a1/5891e0615f0993f194c1b51a65aaac063b0db318a70df001b28e49f0579d/hiredis-3.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dde790d420081f18b5949227649ccb3ed991459df33279419a25fcae7f97cd92", size = 162591, upload-time = "2025-05-23T11:40:07.041Z" }, - { url = "https://files.pythonhosted.org/packages/d4/da/8bce52ca81716f53c1014f689aea4c170ba6411e6848f81a1bed1fc375eb/hiredis-3.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b0c8cae7edbef860afcf3177b705aef43e10b5628f14d5baf0ec69668247d08d", size = 174808, upload-time = "2025-05-23T11:40:09.146Z" }, - { url = "https://files.pythonhosted.org/packages/84/91/fc1ef444ed4dc432b5da9b48e9bd23266c703528db7be19e2b608d67ba06/hiredis-3.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e8a90eaca7e1ce7f175584f07a2cdbbcab13f4863f9f355d7895c4d28805f65b", size = 167060, upload-time = "2025-05-23T11:40:10.757Z" }, - { url = "https://files.pythonhosted.org/packages/66/ad/beebf73a5455f232b97e00564d1e8ad095d4c6e18858c60c6cfdd893ac1e/hiredis-3.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:476031958fa44e245e803827e0787d49740daa4de708fe514370293ce519893a", size = 164833, upload-time = "2025-05-23T11:40:12.001Z" }, - { url = "https://files.pythonhosted.org/packages/75/79/a9591bdc0148c0fbdf54cf6f3d449932d3b3b8779e87f33fa100a5a8088f/hiredis-3.2.1-cp311-cp311-win32.whl", hash = "sha256:eb3f5df2a9593b4b4b676dce3cea53b9c6969fc372875188589ddf2bafc7f624", size = 20402, upload-time = "2025-05-23T11:40:13.216Z" }, - { url = "https://files.pythonhosted.org/packages/9f/05/c93cc6fab31e3c01b671126c82f44372fb211facb8bd4571fd372f50898d/hiredis-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:1402e763d8a9fdfcc103bbf8b2913971c0a3f7b8a73deacbda3dfe5f3a9d1e0b", size = 22085, upload-time = "2025-05-23T11:40:14.19Z" }, - { url = "https://files.pythonhosted.org/packages/60/a1/6da1578a22df1926497f7a3f6a3d2408fe1d1559f762c1640af5762a8eb6/hiredis-3.2.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:3742d8b17e73c198cabeab11da35f2e2a81999d406f52c6275234592256bf8e8", size = 82627, upload-time = "2025-05-23T11:40:15.362Z" }, - { url = "https://files.pythonhosted.org/packages/6c/b1/1056558ca8dc330be5bb25162fe5f268fee71571c9a535153df9f871a073/hiredis-3.2.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9c2f3176fb617a79f6cccf22cb7d2715e590acb534af6a82b41f8196ad59375d", size = 45404, upload-time = "2025-05-23T11:40:16.72Z" }, - { url = "https://files.pythonhosted.org/packages/58/4f/13d1fa1a6b02a99e9fed8f546396f2d598c3613c98e6c399a3284fa65361/hiredis-3.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a8bd46189c7fa46174e02670dc44dfecb60f5bd4b67ed88cb050d8f1fd842f09", size = 43299, upload-time = "2025-05-23T11:40:17.697Z" }, - { url = "https://files.pythonhosted.org/packages/c0/25/ddfac123ba5a32eb1f0b40ba1b2ec98a599287f7439def8856c3c7e5dd0d/hiredis-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f86ee4488c8575b58139cdfdddeae17f91e9a893ffee20260822add443592e2f", size = 172194, upload-time = "2025-05-23T11:40:19.143Z" }, - { url = "https://files.pythonhosted.org/packages/2c/1e/443a3703ce570b631ca43494094fbaeb051578a0ebe4bfcefde351e1ba25/hiredis-3.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3717832f4a557b2fe7060b9d4a7900e5de287a15595e398c3f04df69019ca69d", size = 168429, upload-time = "2025-05-23T11:40:20.329Z" }, - { url = "https://files.pythonhosted.org/packages/3b/d6/0d8c6c706ed79b2298c001b5458c055615e3166533dcee3900e821a18a3e/hiredis-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5cb12c21fb9e2403d28c4e6a38120164973342d34d08120f2d7009b66785644", size = 182967, upload-time = "2025-05-23T11:40:21.921Z" }, - { url = "https://files.pythonhosted.org/packages/da/68/da8dd231fbce858b5a20ab7d7bf558912cd125f08bac4c778865ef5fe2c2/hiredis-3.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:080fda1510bbd389af91f919c11a4f2aa4d92f0684afa4709236faa084a42cac", size = 172495, upload-time = "2025-05-23T11:40:23.105Z" }, - { url = "https://files.pythonhosted.org/packages/65/25/83a31420535e2778662caa95533d5c997011fa6a88331f0cdb22afea9ec3/hiredis-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1252e10a1f3273d1c6bf2021e461652c2e11b05b83e0915d6eb540ec7539afe2", size = 173142, upload-time = "2025-05-23T11:40:24.24Z" }, - { url = "https://files.pythonhosted.org/packages/41/d7/cb907348889eb75e2aa2e6b63e065b611459e0f21fe1e371a968e13f0d55/hiredis-3.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d9e320e99ab7d2a30dc91ff6f745ba38d39b23f43d345cdee9881329d7b511d6", size = 166433, upload-time = "2025-05-23T11:40:25.287Z" }, - { url = "https://files.pythonhosted.org/packages/01/5d/7cbc69d82af7b29a95723d50f5261555ba3d024bfbdc414bdc3d23c0defb/hiredis-3.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:641668f385f16550fdd6fdc109b0af6988b94ba2acc06770a5e06a16e88f320c", size = 164883, upload-time = "2025-05-23T11:40:26.454Z" }, - { url = "https://files.pythonhosted.org/packages/f9/00/f995b1296b1d7e0247651347aa230f3225a9800e504fdf553cf7cd001cf7/hiredis-3.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1e1f44208c39d6c345ff451f82f21e9eeda6fe9af4ac65972cc3eeb58d41f7cb", size = 177262, upload-time = "2025-05-23T11:40:27.576Z" }, - { url = "https://files.pythonhosted.org/packages/c5/f3/723a67d729e94764ce9e0d73fa5f72a0f87d3ce3c98c9a0b27cbf001cc79/hiredis-3.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f882a0d6415fffe1ffcb09e6281d0ba8b1ece470e866612bbb24425bf76cf397", size = 169619, upload-time = "2025-05-23T11:40:29.671Z" }, - { url = "https://files.pythonhosted.org/packages/45/58/f69028df00fb1b223e221403f3be2059ae86031e7885f955d26236bdfc17/hiredis-3.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b4e78719a0730ebffe335528531d154bc8867a246418f74ecd88adbc4d938c49", size = 167303, upload-time = "2025-05-23T11:40:30.902Z" }, - { url = "https://files.pythonhosted.org/packages/2b/7d/567411e65cce76cf265a9a4f837fd2ebc564bef6368dd42ac03f7a517c0a/hiredis-3.2.1-cp312-cp312-win32.whl", hash = "sha256:33c4604d9f79a13b84da79950a8255433fca7edaf292bbd3364fd620864ed7b2", size = 20551, upload-time = "2025-05-23T11:40:32.69Z" }, - { url = "https://files.pythonhosted.org/packages/90/74/b4c291eb4a4a874b3690ff9fc311a65d5292072556421b11b1d786e3e1d0/hiredis-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7b9749375bf9d171aab8813694f379f2cff0330d7424000f5e92890ad4932dc9", size = 22128, upload-time = "2025-05-23T11:40:33.686Z" }, + { url = "https://files.pythonhosted.org/packages/48/84/2ea9636f2ba0811d9eb3bebbbfa84f488238180ddab70c9cb7fa13419d78/hiredis-3.2.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:e4ae0be44cab5e74e6e4c4a93d04784629a45e781ff483b136cc9e1b9c23975c", size = 82425 }, + { url = "https://files.pythonhosted.org/packages/fc/24/b9ebf766a99998fda3975937afa4912e98de9d7f8d0b83f48096bdd961c1/hiredis-3.2.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:24647e84c9f552934eb60b7f3d2116f8b64a7020361da9369e558935ca45914d", size = 45231 }, + { url = "https://files.pythonhosted.org/packages/68/4c/c009b4d9abeb964d607f0987561892d1589907f770b9e5617552b34a4a4d/hiredis-3.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6fb3e92d1172da8decc5f836bf8b528c0fc9b6d449f1353e79ceeb9dc1801132", size = 43240 }, + { url = "https://files.pythonhosted.org/packages/e9/83/d53f3ae9e4ac51b8a35afb7ccd68db871396ed1d7c8ba02ce2c30de0cf17/hiredis-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38ba7a32e51e518b6b3e470142e52ed2674558e04d7d73d86eb19ebcb37d7d40", size = 169624 }, + { url = "https://files.pythonhosted.org/packages/91/2f/f9f091526e22a45385d45f3870204dc78aee365b6fe32e679e65674da6a7/hiredis-3.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4fc632be73174891d6bb71480247e57b2fd8f572059f0a1153e4d0339e919779", size = 165799 }, + { url = "https://files.pythonhosted.org/packages/1c/cc/e561274438cdb19794f0638136a5a99a9ca19affcb42679b12a78016b8ad/hiredis-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f03e6839ff21379ad3c195e0700fc9c209e7f344946dea0f8a6d7b5137a2a141", size = 180612 }, + { url = "https://files.pythonhosted.org/packages/83/ba/a8a989f465191d55672e57aea2a331bfa3a74b5cbc6f590031c9e11f7491/hiredis-3.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99983873e37c71bb71deb544670ff4f9d6920dab272aaf52365606d87a4d6c73", size = 169934 }, + { url = "https://files.pythonhosted.org/packages/52/5f/1148e965df1c67b17bdcaef199f54aec3def0955d19660a39c6ee10a6f55/hiredis-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffd982c419f48e3a57f592678c72474429465bb4bfc96472ec805f5d836523f0", size = 170074 }, + { url = "https://files.pythonhosted.org/packages/43/5e/e6846ad159a938b539fb8d472e2e68cb6758d7c9454ea0520211f335ea72/hiredis-3.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc993f4aa4abc029347f309e722f122e05a3b8a0c279ae612849b5cc9dc69f2d", size = 164158 }, + { url = "https://files.pythonhosted.org/packages/0a/a1/5891e0615f0993f194c1b51a65aaac063b0db318a70df001b28e49f0579d/hiredis-3.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dde790d420081f18b5949227649ccb3ed991459df33279419a25fcae7f97cd92", size = 162591 }, + { url = "https://files.pythonhosted.org/packages/d4/da/8bce52ca81716f53c1014f689aea4c170ba6411e6848f81a1bed1fc375eb/hiredis-3.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b0c8cae7edbef860afcf3177b705aef43e10b5628f14d5baf0ec69668247d08d", size = 174808 }, + { url = "https://files.pythonhosted.org/packages/84/91/fc1ef444ed4dc432b5da9b48e9bd23266c703528db7be19e2b608d67ba06/hiredis-3.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e8a90eaca7e1ce7f175584f07a2cdbbcab13f4863f9f355d7895c4d28805f65b", size = 167060 }, + { url = "https://files.pythonhosted.org/packages/66/ad/beebf73a5455f232b97e00564d1e8ad095d4c6e18858c60c6cfdd893ac1e/hiredis-3.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:476031958fa44e245e803827e0787d49740daa4de708fe514370293ce519893a", size = 164833 }, + { url = "https://files.pythonhosted.org/packages/75/79/a9591bdc0148c0fbdf54cf6f3d449932d3b3b8779e87f33fa100a5a8088f/hiredis-3.2.1-cp311-cp311-win32.whl", hash = "sha256:eb3f5df2a9593b4b4b676dce3cea53b9c6969fc372875188589ddf2bafc7f624", size = 20402 }, + { url = "https://files.pythonhosted.org/packages/9f/05/c93cc6fab31e3c01b671126c82f44372fb211facb8bd4571fd372f50898d/hiredis-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:1402e763d8a9fdfcc103bbf8b2913971c0a3f7b8a73deacbda3dfe5f3a9d1e0b", size = 22085 }, + { url = "https://files.pythonhosted.org/packages/60/a1/6da1578a22df1926497f7a3f6a3d2408fe1d1559f762c1640af5762a8eb6/hiredis-3.2.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:3742d8b17e73c198cabeab11da35f2e2a81999d406f52c6275234592256bf8e8", size = 82627 }, + { url = "https://files.pythonhosted.org/packages/6c/b1/1056558ca8dc330be5bb25162fe5f268fee71571c9a535153df9f871a073/hiredis-3.2.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9c2f3176fb617a79f6cccf22cb7d2715e590acb534af6a82b41f8196ad59375d", size = 45404 }, + { url = "https://files.pythonhosted.org/packages/58/4f/13d1fa1a6b02a99e9fed8f546396f2d598c3613c98e6c399a3284fa65361/hiredis-3.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a8bd46189c7fa46174e02670dc44dfecb60f5bd4b67ed88cb050d8f1fd842f09", size = 43299 }, + { url = "https://files.pythonhosted.org/packages/c0/25/ddfac123ba5a32eb1f0b40ba1b2ec98a599287f7439def8856c3c7e5dd0d/hiredis-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f86ee4488c8575b58139cdfdddeae17f91e9a893ffee20260822add443592e2f", size = 172194 }, + { url = "https://files.pythonhosted.org/packages/2c/1e/443a3703ce570b631ca43494094fbaeb051578a0ebe4bfcefde351e1ba25/hiredis-3.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3717832f4a557b2fe7060b9d4a7900e5de287a15595e398c3f04df69019ca69d", size = 168429 }, + { url = "https://files.pythonhosted.org/packages/3b/d6/0d8c6c706ed79b2298c001b5458c055615e3166533dcee3900e821a18a3e/hiredis-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5cb12c21fb9e2403d28c4e6a38120164973342d34d08120f2d7009b66785644", size = 182967 }, + { url = "https://files.pythonhosted.org/packages/da/68/da8dd231fbce858b5a20ab7d7bf558912cd125f08bac4c778865ef5fe2c2/hiredis-3.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:080fda1510bbd389af91f919c11a4f2aa4d92f0684afa4709236faa084a42cac", size = 172495 }, + { url = "https://files.pythonhosted.org/packages/65/25/83a31420535e2778662caa95533d5c997011fa6a88331f0cdb22afea9ec3/hiredis-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1252e10a1f3273d1c6bf2021e461652c2e11b05b83e0915d6eb540ec7539afe2", size = 173142 }, + { url = "https://files.pythonhosted.org/packages/41/d7/cb907348889eb75e2aa2e6b63e065b611459e0f21fe1e371a968e13f0d55/hiredis-3.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d9e320e99ab7d2a30dc91ff6f745ba38d39b23f43d345cdee9881329d7b511d6", size = 166433 }, + { url = "https://files.pythonhosted.org/packages/01/5d/7cbc69d82af7b29a95723d50f5261555ba3d024bfbdc414bdc3d23c0defb/hiredis-3.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:641668f385f16550fdd6fdc109b0af6988b94ba2acc06770a5e06a16e88f320c", size = 164883 }, + { url = "https://files.pythonhosted.org/packages/f9/00/f995b1296b1d7e0247651347aa230f3225a9800e504fdf553cf7cd001cf7/hiredis-3.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1e1f44208c39d6c345ff451f82f21e9eeda6fe9af4ac65972cc3eeb58d41f7cb", size = 177262 }, + { url = "https://files.pythonhosted.org/packages/c5/f3/723a67d729e94764ce9e0d73fa5f72a0f87d3ce3c98c9a0b27cbf001cc79/hiredis-3.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f882a0d6415fffe1ffcb09e6281d0ba8b1ece470e866612bbb24425bf76cf397", size = 169619 }, + { url = "https://files.pythonhosted.org/packages/45/58/f69028df00fb1b223e221403f3be2059ae86031e7885f955d26236bdfc17/hiredis-3.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b4e78719a0730ebffe335528531d154bc8867a246418f74ecd88adbc4d938c49", size = 167303 }, + { url = "https://files.pythonhosted.org/packages/2b/7d/567411e65cce76cf265a9a4f837fd2ebc564bef6368dd42ac03f7a517c0a/hiredis-3.2.1-cp312-cp312-win32.whl", hash = "sha256:33c4604d9f79a13b84da79950a8255433fca7edaf292bbd3364fd620864ed7b2", size = 20551 }, + { url = "https://files.pythonhosted.org/packages/90/74/b4c291eb4a4a874b3690ff9fc311a65d5292072556421b11b1d786e3e1d0/hiredis-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7b9749375bf9d171aab8813694f379f2cff0330d7424000f5e92890ad4932dc9", size = 22128 }, ] [[package]] name = "hpack" version = "4.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276, upload-time = "2025-01-22T21:44:58.347Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276 } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357, upload-time = "2025-01-22T21:44:56.92Z" }, + { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357 }, ] [[package]] @@ -2519,9 +2521,9 @@ dependencies = [ { name = "six" }, { name = "webencodings" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ac/b6/b55c3f49042f1df3dcd422b7f224f939892ee94f22abcf503a9b7339eaf2/html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f", size = 272215, upload-time = "2020-06-22T23:32:38.834Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/b6/b55c3f49042f1df3dcd422b7f224f939892ee94f22abcf503a9b7339eaf2/html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f", size = 272215 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6c/dd/a834df6482147d48e225a49515aabc28974ad5a4ca3215c18a882565b028/html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d", size = 112173, upload-time = "2020-06-22T23:32:36.781Z" }, + { url = "https://files.pythonhosted.org/packages/6c/dd/a834df6482147d48e225a49515aabc28974ad5a4ca3215c18a882565b028/html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d", size = 112173 }, ] [[package]] @@ -2532,9 +2534,9 @@ dependencies = [ { name = "certifi" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 }, ] [[package]] @@ -2544,31 +2546,31 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyparsing" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/ad/2371116b22d616c194aa25ec410c9c6c37f23599dcd590502b74db197584/httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81", size = 351116, upload-time = "2023-03-21T22:29:37.214Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/ad/2371116b22d616c194aa25ec410c9c6c37f23599dcd590502b74db197584/httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81", size = 351116 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/6c/d2fbdaaa5959339d53ba38e94c123e4e84b8fbc4b84beb0e70d7c1608486/httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc", size = 96854, upload-time = "2023-03-21T22:29:35.683Z" }, + { url = "https://files.pythonhosted.org/packages/a8/6c/d2fbdaaa5959339d53ba38e94c123e4e84b8fbc4b84beb0e70d7c1608486/httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc", size = 96854 }, ] [[package]] name = "httptools" version = "0.6.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639, upload-time = "2024-10-16T19:45:08.902Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069", size = 199029, upload-time = "2024-10-16T19:44:18.427Z" }, - { url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a", size = 103492, upload-time = "2024-10-16T19:44:19.515Z" }, - { url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975", size = 462891, upload-time = "2024-10-16T19:44:21.067Z" }, - { url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636", size = 459788, upload-time = "2024-10-16T19:44:22.958Z" }, - { url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721", size = 433214, upload-time = "2024-10-16T19:44:24.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988", size = 434120, upload-time = "2024-10-16T19:44:26.295Z" }, - { url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17", size = 88565, upload-time = "2024-10-16T19:44:29.188Z" }, - { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683, upload-time = "2024-10-16T19:44:30.175Z" }, - { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337, upload-time = "2024-10-16T19:44:31.786Z" }, - { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796, upload-time = "2024-10-16T19:44:32.825Z" }, - { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837, upload-time = "2024-10-16T19:44:33.974Z" }, - { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289, upload-time = "2024-10-16T19:44:35.111Z" }, - { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779, upload-time = "2024-10-16T19:44:36.253Z" }, - { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634, upload-time = "2024-10-16T19:44:37.357Z" }, + { url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069", size = 199029 }, + { url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a", size = 103492 }, + { url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975", size = 462891 }, + { url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636", size = 459788 }, + { url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721", size = 433214 }, + { url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988", size = 434120 }, + { url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17", size = 88565 }, + { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683 }, + { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337 }, + { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796 }, + { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837 }, + { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289 }, + { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779 }, + { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634 }, ] [[package]] @@ -2582,9 +2584,9 @@ dependencies = [ { name = "idna" }, { name = "sniffio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189, upload-time = "2024-08-27T12:54:01.334Z" } +sdist = { url = "https://files.pythonhosted.org/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189 } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395, upload-time = "2024-08-27T12:53:59.653Z" }, + { url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395 }, ] [package.optional-dependencies] @@ -2599,9 +2601,9 @@ socks = [ name = "httpx-sse" version = "0.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998 } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, + { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054 }, ] [[package]] @@ -2618,9 +2620,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fa/42/8a95c5632080ae312c0498744b2b852195e10b05a20b1be11c5141092f4c/huggingface_hub-0.33.2.tar.gz", hash = "sha256:84221defaec8fa09c090390cd68c78b88e3c4c2b7befba68d3dc5aacbc3c2c5f", size = 426637, upload-time = "2025-07-02T06:26:05.156Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/42/8a95c5632080ae312c0498744b2b852195e10b05a20b1be11c5141092f4c/huggingface_hub-0.33.2.tar.gz", hash = "sha256:84221defaec8fa09c090390cd68c78b88e3c4c2b7befba68d3dc5aacbc3c2c5f", size = 426637 } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/f4/5f3f22e762ad1965f01122b42dae5bf0e009286e2dba601ce1d0dba72424/huggingface_hub-0.33.2-py3-none-any.whl", hash = "sha256:3749498bfa91e8cde2ddc2c1db92c79981f40e66434c20133b39e5928ac9bcc5", size = 515373, upload-time = "2025-07-02T06:26:03.072Z" }, + { url = "https://files.pythonhosted.org/packages/44/f4/5f3f22e762ad1965f01122b42dae5bf0e009286e2dba601ce1d0dba72424/huggingface_hub-0.33.2-py3-none-any.whl", hash = "sha256:3749498bfa91e8cde2ddc2c1db92c79981f40e66434c20133b39e5928ac9bcc5", size = 515373 }, ] [[package]] @@ -2630,18 +2632,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyreadline3", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702, upload-time = "2021-09-17T21:40:43.31Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794, upload-time = "2021-09-17T21:40:39.897Z" }, + { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794 }, ] [[package]] name = "hyperframe" version = "6.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566, upload-time = "2025-01-22T21:41:49.302Z" } +sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566 } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, + { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007 }, ] [[package]] @@ -2652,18 +2654,18 @@ dependencies = [ { name = "attrs" }, { name = "sortedcontainers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/83/15c4e30561a0d8c8d076c88cb159187823d877118f34c851ada3b9b02a7b/hypothesis-6.135.26.tar.gz", hash = "sha256:73af0e46cd5039c6806f514fed6a3c185d91ef88b5a1577477099ddbd1a2e300", size = 454523, upload-time = "2025-07-05T04:59:45.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/83/15c4e30561a0d8c8d076c88cb159187823d877118f34c851ada3b9b02a7b/hypothesis-6.135.26.tar.gz", hash = "sha256:73af0e46cd5039c6806f514fed6a3c185d91ef88b5a1577477099ddbd1a2e300", size = 454523 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/78/db4fdc464219455f8dde90074660c3faf8429101b2d1299cac7d219e3176/hypothesis-6.135.26-py3-none-any.whl", hash = "sha256:fa237cbe2ae2c31d65f7230dcb866139ace635dcfec6c30dddf25974dd8ff4b9", size = 521517, upload-time = "2025-07-05T04:59:42.061Z" }, + { url = "https://files.pythonhosted.org/packages/3c/78/db4fdc464219455f8dde90074660c3faf8429101b2d1299cac7d219e3176/hypothesis-6.135.26-py3-none-any.whl", hash = "sha256:fa237cbe2ae2c31d65f7230dcb866139ace635dcfec6c30dddf25974dd8ff4b9", size = 521517 }, ] [[package]] name = "idna" version = "3.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, ] [[package]] @@ -2673,52 +2675,52 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "zipp" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/bd/fa8ce65b0a7d4b6d143ec23b0f5fd3f7ab80121078c465bc02baeaab22dc/importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5", size = 54320, upload-time = "2024-08-20T17:11:42.348Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/bd/fa8ce65b0a7d4b6d143ec23b0f5fd3f7ab80121078c465bc02baeaab22dc/importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5", size = 54320 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/14/362d31bf1076b21e1bcdcb0dc61944822ff263937b804a79231df2774d28/importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1", size = 26269, upload-time = "2024-08-20T17:11:41.102Z" }, + { url = "https://files.pythonhosted.org/packages/c0/14/362d31bf1076b21e1bcdcb0dc61944822ff263937b804a79231df2774d28/importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1", size = 26269 }, ] [[package]] name = "importlib-resources" version = "6.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cf/8c/f834fbf984f691b4f7ff60f50b514cc3de5cc08abfc3295564dd89c5e2e7/importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c", size = 44693, upload-time = "2025-01-03T18:51:56.698Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/8c/f834fbf984f691b4f7ff60f50b514cc3de5cc08abfc3295564dd89c5e2e7/importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c", size = 44693 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461, upload-time = "2025-01-03T18:51:54.306Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461 }, ] [[package]] name = "iniconfig" version = "2.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050 }, ] [[package]] name = "isodate" version = "0.7.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705, upload-time = "2024-10-08T23:04:11.5Z" } +sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705 } wheels = [ - { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" }, + { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320 }, ] [[package]] name = "itsdangerous" version = "2.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, + { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234 }, ] [[package]] name = "jieba" version = "0.42.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c6/cb/18eeb235f833b726522d7ebed54f2278ce28ba9438e3135ab0278d9792a2/jieba-0.42.1.tar.gz", hash = "sha256:055ca12f62674fafed09427f176506079bc135638a14e23e25be909131928db2", size = 19214172, upload-time = "2020-01-20T14:27:23.5Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/cb/18eeb235f833b726522d7ebed54f2278ce28ba9438e3135ab0278d9792a2/jieba-0.42.1.tar.gz", hash = "sha256:055ca12f62674fafed09427f176506079bc135638a14e23e25be909131928db2", size = 19214172 } [[package]] name = "jinja2" @@ -2727,68 +2729,68 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } wheels = [ - { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, ] [[package]] name = "jiter" version = "0.10.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759, upload-time = "2025-05-18T19:04:59.73Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/dd/6cefc6bd68b1c3c979cecfa7029ab582b57690a31cd2f346c4d0ce7951b6/jiter-0.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3bebe0c558e19902c96e99217e0b8e8b17d570906e72ed8a87170bc290b1e978", size = 317473, upload-time = "2025-05-18T19:03:25.942Z" }, - { url = "https://files.pythonhosted.org/packages/be/cf/fc33f5159ce132be1d8dd57251a1ec7a631c7df4bd11e1cd198308c6ae32/jiter-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:558cc7e44fd8e507a236bee6a02fa17199ba752874400a0ca6cd6e2196cdb7dc", size = 321971, upload-time = "2025-05-18T19:03:27.255Z" }, - { url = "https://files.pythonhosted.org/packages/68/a4/da3f150cf1d51f6c472616fb7650429c7ce053e0c962b41b68557fdf6379/jiter-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d613e4b379a07d7c8453c5712ce7014e86c6ac93d990a0b8e7377e18505e98d", size = 345574, upload-time = "2025-05-18T19:03:28.63Z" }, - { url = "https://files.pythonhosted.org/packages/84/34/6e8d412e60ff06b186040e77da5f83bc158e9735759fcae65b37d681f28b/jiter-0.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f62cf8ba0618eda841b9bf61797f21c5ebd15a7a1e19daab76e4e4b498d515b2", size = 371028, upload-time = "2025-05-18T19:03:30.292Z" }, - { url = "https://files.pythonhosted.org/packages/fb/d9/9ee86173aae4576c35a2f50ae930d2ccb4c4c236f6cb9353267aa1d626b7/jiter-0.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:919d139cdfa8ae8945112398511cb7fca58a77382617d279556b344867a37e61", size = 491083, upload-time = "2025-05-18T19:03:31.654Z" }, - { url = "https://files.pythonhosted.org/packages/d9/2c/f955de55e74771493ac9e188b0f731524c6a995dffdcb8c255b89c6fb74b/jiter-0.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ddbc6ae311175a3b03bd8994881bc4635c923754932918e18da841632349db", size = 388821, upload-time = "2025-05-18T19:03:33.184Z" }, - { url = "https://files.pythonhosted.org/packages/81/5a/0e73541b6edd3f4aada586c24e50626c7815c561a7ba337d6a7eb0a915b4/jiter-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c440ea003ad10927a30521a9062ce10b5479592e8a70da27f21eeb457b4a9c5", size = 352174, upload-time = "2025-05-18T19:03:34.965Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c0/61eeec33b8c75b31cae42be14d44f9e6fe3ac15a4e58010256ac3abf3638/jiter-0.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc347c87944983481e138dea467c0551080c86b9d21de6ea9306efb12ca8f606", size = 391869, upload-time = "2025-05-18T19:03:36.436Z" }, - { url = "https://files.pythonhosted.org/packages/41/22/5beb5ee4ad4ef7d86f5ea5b4509f680a20706c4a7659e74344777efb7739/jiter-0.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:13252b58c1f4d8c5b63ab103c03d909e8e1e7842d302473f482915d95fefd605", size = 523741, upload-time = "2025-05-18T19:03:38.168Z" }, - { url = "https://files.pythonhosted.org/packages/ea/10/768e8818538e5817c637b0df52e54366ec4cebc3346108a4457ea7a98f32/jiter-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d1bbf3c465de4a24ab12fb7766a0003f6f9bce48b8b6a886158c4d569452dc5", size = 514527, upload-time = "2025-05-18T19:03:39.577Z" }, - { url = "https://files.pythonhosted.org/packages/73/6d/29b7c2dc76ce93cbedabfd842fc9096d01a0550c52692dfc33d3cc889815/jiter-0.10.0-cp311-cp311-win32.whl", hash = "sha256:db16e4848b7e826edca4ccdd5b145939758dadf0dc06e7007ad0e9cfb5928ae7", size = 210765, upload-time = "2025-05-18T19:03:41.271Z" }, - { url = "https://files.pythonhosted.org/packages/c2/c9/d394706deb4c660137caf13e33d05a031d734eb99c051142e039d8ceb794/jiter-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c9c1d5f10e18909e993f9641f12fe1c77b3e9b533ee94ffa970acc14ded3812", size = 209234, upload-time = "2025-05-18T19:03:42.918Z" }, - { url = "https://files.pythonhosted.org/packages/6d/b5/348b3313c58f5fbfb2194eb4d07e46a35748ba6e5b3b3046143f3040bafa/jiter-0.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1e274728e4a5345a6dde2d343c8da018b9d4bd4350f5a472fa91f66fda44911b", size = 312262, upload-time = "2025-05-18T19:03:44.637Z" }, - { url = "https://files.pythonhosted.org/packages/9c/4a/6a2397096162b21645162825f058d1709a02965606e537e3304b02742e9b/jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7202ae396446c988cb2a5feb33a543ab2165b786ac97f53b59aafb803fef0744", size = 320124, upload-time = "2025-05-18T19:03:46.341Z" }, - { url = "https://files.pythonhosted.org/packages/2a/85/1ce02cade7516b726dd88f59a4ee46914bf79d1676d1228ef2002ed2f1c9/jiter-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ba7722d6748b6920ed02a8f1726fb4b33e0fd2f3f621816a8b486c66410ab2", size = 345330, upload-time = "2025-05-18T19:03:47.596Z" }, - { url = "https://files.pythonhosted.org/packages/75/d0/bb6b4f209a77190ce10ea8d7e50bf3725fc16d3372d0a9f11985a2b23eff/jiter-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:371eab43c0a288537d30e1f0b193bc4eca90439fc08a022dd83e5e07500ed026", size = 369670, upload-time = "2025-05-18T19:03:49.334Z" }, - { url = "https://files.pythonhosted.org/packages/a0/f5/a61787da9b8847a601e6827fbc42ecb12be2c925ced3252c8ffcb56afcaf/jiter-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c675736059020365cebc845a820214765162728b51ab1e03a1b7b3abb70f74c", size = 489057, upload-time = "2025-05-18T19:03:50.66Z" }, - { url = "https://files.pythonhosted.org/packages/12/e4/6f906272810a7b21406c760a53aadbe52e99ee070fc5c0cb191e316de30b/jiter-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c5867d40ab716e4684858e4887489685968a47e3ba222e44cde6e4a2154f959", size = 389372, upload-time = "2025-05-18T19:03:51.98Z" }, - { url = "https://files.pythonhosted.org/packages/e2/ba/77013b0b8ba904bf3762f11e0129b8928bff7f978a81838dfcc958ad5728/jiter-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395bb9a26111b60141757d874d27fdea01b17e8fac958b91c20128ba8f4acc8a", size = 352038, upload-time = "2025-05-18T19:03:53.703Z" }, - { url = "https://files.pythonhosted.org/packages/67/27/c62568e3ccb03368dbcc44a1ef3a423cb86778a4389e995125d3d1aaa0a4/jiter-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6842184aed5cdb07e0c7e20e5bdcfafe33515ee1741a6835353bb45fe5d1bd95", size = 391538, upload-time = "2025-05-18T19:03:55.046Z" }, - { url = "https://files.pythonhosted.org/packages/c0/72/0d6b7e31fc17a8fdce76164884edef0698ba556b8eb0af9546ae1a06b91d/jiter-0.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:62755d1bcea9876770d4df713d82606c8c1a3dca88ff39046b85a048566d56ea", size = 523557, upload-time = "2025-05-18T19:03:56.386Z" }, - { url = "https://files.pythonhosted.org/packages/2f/09/bc1661fbbcbeb6244bd2904ff3a06f340aa77a2b94e5a7373fd165960ea3/jiter-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533efbce2cacec78d5ba73a41756beff8431dfa1694b6346ce7af3a12c42202b", size = 514202, upload-time = "2025-05-18T19:03:57.675Z" }, - { url = "https://files.pythonhosted.org/packages/1b/84/5a5d5400e9d4d54b8004c9673bbe4403928a00d28529ff35b19e9d176b19/jiter-0.10.0-cp312-cp312-win32.whl", hash = "sha256:8be921f0cadd245e981b964dfbcd6fd4bc4e254cdc069490416dd7a2632ecc01", size = 211781, upload-time = "2025-05-18T19:03:59.025Z" }, - { url = "https://files.pythonhosted.org/packages/9b/52/7ec47455e26f2d6e5f2ea4951a0652c06e5b995c291f723973ae9e724a65/jiter-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7c7d785ae9dda68c2678532a5a1581347e9c15362ae9f6e68f3fdbfb64f2e49", size = 206176, upload-time = "2025-05-18T19:04:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/1b/dd/6cefc6bd68b1c3c979cecfa7029ab582b57690a31cd2f346c4d0ce7951b6/jiter-0.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3bebe0c558e19902c96e99217e0b8e8b17d570906e72ed8a87170bc290b1e978", size = 317473 }, + { url = "https://files.pythonhosted.org/packages/be/cf/fc33f5159ce132be1d8dd57251a1ec7a631c7df4bd11e1cd198308c6ae32/jiter-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:558cc7e44fd8e507a236bee6a02fa17199ba752874400a0ca6cd6e2196cdb7dc", size = 321971 }, + { url = "https://files.pythonhosted.org/packages/68/a4/da3f150cf1d51f6c472616fb7650429c7ce053e0c962b41b68557fdf6379/jiter-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d613e4b379a07d7c8453c5712ce7014e86c6ac93d990a0b8e7377e18505e98d", size = 345574 }, + { url = "https://files.pythonhosted.org/packages/84/34/6e8d412e60ff06b186040e77da5f83bc158e9735759fcae65b37d681f28b/jiter-0.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f62cf8ba0618eda841b9bf61797f21c5ebd15a7a1e19daab76e4e4b498d515b2", size = 371028 }, + { url = "https://files.pythonhosted.org/packages/fb/d9/9ee86173aae4576c35a2f50ae930d2ccb4c4c236f6cb9353267aa1d626b7/jiter-0.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:919d139cdfa8ae8945112398511cb7fca58a77382617d279556b344867a37e61", size = 491083 }, + { url = "https://files.pythonhosted.org/packages/d9/2c/f955de55e74771493ac9e188b0f731524c6a995dffdcb8c255b89c6fb74b/jiter-0.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ddbc6ae311175a3b03bd8994881bc4635c923754932918e18da841632349db", size = 388821 }, + { url = "https://files.pythonhosted.org/packages/81/5a/0e73541b6edd3f4aada586c24e50626c7815c561a7ba337d6a7eb0a915b4/jiter-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c440ea003ad10927a30521a9062ce10b5479592e8a70da27f21eeb457b4a9c5", size = 352174 }, + { url = "https://files.pythonhosted.org/packages/1c/c0/61eeec33b8c75b31cae42be14d44f9e6fe3ac15a4e58010256ac3abf3638/jiter-0.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc347c87944983481e138dea467c0551080c86b9d21de6ea9306efb12ca8f606", size = 391869 }, + { url = "https://files.pythonhosted.org/packages/41/22/5beb5ee4ad4ef7d86f5ea5b4509f680a20706c4a7659e74344777efb7739/jiter-0.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:13252b58c1f4d8c5b63ab103c03d909e8e1e7842d302473f482915d95fefd605", size = 523741 }, + { url = "https://files.pythonhosted.org/packages/ea/10/768e8818538e5817c637b0df52e54366ec4cebc3346108a4457ea7a98f32/jiter-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d1bbf3c465de4a24ab12fb7766a0003f6f9bce48b8b6a886158c4d569452dc5", size = 514527 }, + { url = "https://files.pythonhosted.org/packages/73/6d/29b7c2dc76ce93cbedabfd842fc9096d01a0550c52692dfc33d3cc889815/jiter-0.10.0-cp311-cp311-win32.whl", hash = "sha256:db16e4848b7e826edca4ccdd5b145939758dadf0dc06e7007ad0e9cfb5928ae7", size = 210765 }, + { url = "https://files.pythonhosted.org/packages/c2/c9/d394706deb4c660137caf13e33d05a031d734eb99c051142e039d8ceb794/jiter-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c9c1d5f10e18909e993f9641f12fe1c77b3e9b533ee94ffa970acc14ded3812", size = 209234 }, + { url = "https://files.pythonhosted.org/packages/6d/b5/348b3313c58f5fbfb2194eb4d07e46a35748ba6e5b3b3046143f3040bafa/jiter-0.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1e274728e4a5345a6dde2d343c8da018b9d4bd4350f5a472fa91f66fda44911b", size = 312262 }, + { url = "https://files.pythonhosted.org/packages/9c/4a/6a2397096162b21645162825f058d1709a02965606e537e3304b02742e9b/jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7202ae396446c988cb2a5feb33a543ab2165b786ac97f53b59aafb803fef0744", size = 320124 }, + { url = "https://files.pythonhosted.org/packages/2a/85/1ce02cade7516b726dd88f59a4ee46914bf79d1676d1228ef2002ed2f1c9/jiter-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ba7722d6748b6920ed02a8f1726fb4b33e0fd2f3f621816a8b486c66410ab2", size = 345330 }, + { url = "https://files.pythonhosted.org/packages/75/d0/bb6b4f209a77190ce10ea8d7e50bf3725fc16d3372d0a9f11985a2b23eff/jiter-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:371eab43c0a288537d30e1f0b193bc4eca90439fc08a022dd83e5e07500ed026", size = 369670 }, + { url = "https://files.pythonhosted.org/packages/a0/f5/a61787da9b8847a601e6827fbc42ecb12be2c925ced3252c8ffcb56afcaf/jiter-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c675736059020365cebc845a820214765162728b51ab1e03a1b7b3abb70f74c", size = 489057 }, + { url = "https://files.pythonhosted.org/packages/12/e4/6f906272810a7b21406c760a53aadbe52e99ee070fc5c0cb191e316de30b/jiter-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c5867d40ab716e4684858e4887489685968a47e3ba222e44cde6e4a2154f959", size = 389372 }, + { url = "https://files.pythonhosted.org/packages/e2/ba/77013b0b8ba904bf3762f11e0129b8928bff7f978a81838dfcc958ad5728/jiter-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395bb9a26111b60141757d874d27fdea01b17e8fac958b91c20128ba8f4acc8a", size = 352038 }, + { url = "https://files.pythonhosted.org/packages/67/27/c62568e3ccb03368dbcc44a1ef3a423cb86778a4389e995125d3d1aaa0a4/jiter-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6842184aed5cdb07e0c7e20e5bdcfafe33515ee1741a6835353bb45fe5d1bd95", size = 391538 }, + { url = "https://files.pythonhosted.org/packages/c0/72/0d6b7e31fc17a8fdce76164884edef0698ba556b8eb0af9546ae1a06b91d/jiter-0.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:62755d1bcea9876770d4df713d82606c8c1a3dca88ff39046b85a048566d56ea", size = 523557 }, + { url = "https://files.pythonhosted.org/packages/2f/09/bc1661fbbcbeb6244bd2904ff3a06f340aa77a2b94e5a7373fd165960ea3/jiter-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533efbce2cacec78d5ba73a41756beff8431dfa1694b6346ce7af3a12c42202b", size = 514202 }, + { url = "https://files.pythonhosted.org/packages/1b/84/5a5d5400e9d4d54b8004c9673bbe4403928a00d28529ff35b19e9d176b19/jiter-0.10.0-cp312-cp312-win32.whl", hash = "sha256:8be921f0cadd245e981b964dfbcd6fd4bc4e254cdc069490416dd7a2632ecc01", size = 211781 }, + { url = "https://files.pythonhosted.org/packages/9b/52/7ec47455e26f2d6e5f2ea4951a0652c06e5b995c291f723973ae9e724a65/jiter-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7c7d785ae9dda68c2678532a5a1581347e9c15362ae9f6e68f3fdbfb64f2e49", size = 206176 }, ] [[package]] name = "jmespath" version = "0.10.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3c/56/3f325b1eef9791759784aa5046a8f6a1aff8f7c898a2e34506771d3b99d8/jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", size = 21607, upload-time = "2020-05-12T22:03:47.267Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/56/3f325b1eef9791759784aa5046a8f6a1aff8f7c898a2e34506771d3b99d8/jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", size = 21607 } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/cb/5f001272b6faeb23c1c9e0acc04d48eaaf5c862c17709d20e3469c6e0139/jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f", size = 24489, upload-time = "2020-05-12T22:03:45.643Z" }, + { url = "https://files.pythonhosted.org/packages/07/cb/5f001272b6faeb23c1c9e0acc04d48eaaf5c862c17709d20e3469c6e0139/jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f", size = 24489 }, ] [[package]] name = "joblib" version = "1.5.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dc/fe/0f5a938c54105553436dbff7a61dc4fed4b1b2c98852f8833beaf4d5968f/joblib-1.5.1.tar.gz", hash = "sha256:f4f86e351f39fe3d0d32a9f2c3d8af1ee4cec285aafcb27003dda5205576b444", size = 330475, upload-time = "2025-05-23T12:04:37.097Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/fe/0f5a938c54105553436dbff7a61dc4fed4b1b2c98852f8833beaf4d5968f/joblib-1.5.1.tar.gz", hash = "sha256:f4f86e351f39fe3d0d32a9f2c3d8af1ee4cec285aafcb27003dda5205576b444", size = 330475 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7d/4f/1195bbac8e0c2acc5f740661631d8d750dc38d4a32b23ee5df3cde6f4e0d/joblib-1.5.1-py3-none-any.whl", hash = "sha256:4719a31f054c7d766948dcd83e9613686b27114f190f717cec7eaa2084f8a74a", size = 307746, upload-time = "2025-05-23T12:04:35.124Z" }, + { url = "https://files.pythonhosted.org/packages/7d/4f/1195bbac8e0c2acc5f740661631d8d750dc38d4a32b23ee5df3cde6f4e0d/joblib-1.5.1-py3-none-any.whl", hash = "sha256:4719a31f054c7d766948dcd83e9613686b27114f190f717cec7eaa2084f8a74a", size = 307746 }, ] [[package]] name = "json-repair" version = "0.47.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ae/9e/e8bcda4fd47b16fcd4f545af258d56ba337fa43b847beb213818d7641515/json_repair-0.47.6.tar.gz", hash = "sha256:4af5a14b9291d4d005a11537bae5a6b7912376d7584795f0ac1b23724b999620", size = 34400, upload-time = "2025-07-01T15:42:07.458Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/9e/e8bcda4fd47b16fcd4f545af258d56ba337fa43b847beb213818d7641515/json_repair-0.47.6.tar.gz", hash = "sha256:4af5a14b9291d4d005a11537bae5a6b7912376d7584795f0ac1b23724b999620", size = 34400 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/f8/f464ce2afc4be5decf53d0171c2d399d9ee6cd70d2273b8e85e7c6d00324/json_repair-0.47.6-py3-none-any.whl", hash = "sha256:1c9da58fb6240f99b8405f63534e08f8402793f09074dea25800a0b232d4fb19", size = 25754, upload-time = "2025-07-01T15:42:06.418Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f8/f464ce2afc4be5decf53d0171c2d399d9ee6cd70d2273b8e85e7c6d00324/json_repair-0.47.6-py3-none-any.whl", hash = "sha256:1c9da58fb6240f99b8405f63534e08f8402793f09074dea25800a0b232d4fb19", size = 25754 }, ] [[package]] @@ -2801,9 +2803,9 @@ dependencies = [ { name = "referencing" }, { name = "rpds-py" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/d3/1cf5326b923a53515d8f3a2cd442e6d7e94fcc444716e879ea70a0ce3177/jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196", size = 353480, upload-time = "2025-05-26T18:48:10.459Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/d3/1cf5326b923a53515d8f3a2cd442e6d7e94fcc444716e879ea70a0ce3177/jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196", size = 353480 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/3d/023389198f69c722d039351050738d6755376c8fd343e91dc493ea485905/jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d", size = 88709, upload-time = "2025-05-26T18:48:08.417Z" }, + { url = "https://files.pythonhosted.org/packages/a2/3d/023389198f69c722d039351050738d6755376c8fd343e91dc493ea485905/jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d", size = 88709 }, ] [[package]] @@ -2813,9 +2815,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "referencing" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513, upload-time = "2025-04-23T12:34:07.418Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/ce/46fbd9c8119cfc3581ee5643ea49464d168028cfb5caff5fc0596d0cf914/jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608", size = 15513 } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437, upload-time = "2025-04-23T12:34:05.422Z" }, + { url = "https://files.pythonhosted.org/packages/01/0e/b27cdbaccf30b890c40ed1da9fd4a3593a5cf94dae54fb34f8a4b74fcd3f/jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af", size = 18437 }, ] [[package]] @@ -2828,9 +2830,9 @@ dependencies = [ { name = "tzdata" }, { name = "vine" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0f/d3/5ff936d8319ac86b9c409f1501b07c426e6ad41966fedace9ef1b966e23f/kombu-5.5.4.tar.gz", hash = "sha256:886600168275ebeada93b888e831352fe578168342f0d1d5833d88ba0d847363", size = 461992, upload-time = "2025-06-01T10:19:22.281Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/d3/5ff936d8319ac86b9c409f1501b07c426e6ad41966fedace9ef1b966e23f/kombu-5.5.4.tar.gz", hash = "sha256:886600168275ebeada93b888e831352fe578168342f0d1d5833d88ba0d847363", size = 461992 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/70/a07dcf4f62598c8ad579df241af55ced65bed76e42e45d3c368a6d82dbc1/kombu-5.5.4-py3-none-any.whl", hash = "sha256:a12ed0557c238897d8e518f1d1fdf84bd1516c5e305af2dacd85c2015115feb8", size = 210034, upload-time = "2025-06-01T10:19:20.436Z" }, + { url = "https://files.pythonhosted.org/packages/ef/70/a07dcf4f62598c8ad579df241af55ced65bed76e42e45d3c368a6d82dbc1/kombu-5.5.4-py3-none-any.whl", hash = "sha256:a12ed0557c238897d8e518f1d1fdf84bd1516c5e305af2dacd85c2015115feb8", size = 210034 }, ] [[package]] @@ -2850,9 +2852,9 @@ dependencies = [ { name = "urllib3" }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/52/19ebe8004c243fdfa78268a96727c71e08f00ff6fe69a301d0b7fcbce3c2/kubernetes-33.1.0.tar.gz", hash = "sha256:f64d829843a54c251061a8e7a14523b521f2dc5c896cf6d65ccf348648a88993", size = 1036779, upload-time = "2025-06-09T21:57:58.521Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/52/19ebe8004c243fdfa78268a96727c71e08f00ff6fe69a301d0b7fcbce3c2/kubernetes-33.1.0.tar.gz", hash = "sha256:f64d829843a54c251061a8e7a14523b521f2dc5c896cf6d65ccf348648a88993", size = 1036779 } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/43/d9bebfc3db7dea6ec80df5cb2aad8d274dd18ec2edd6c4f21f32c237cbbb/kubernetes-33.1.0-py2.py3-none-any.whl", hash = "sha256:544de42b24b64287f7e0aa9513c93cb503f7f40eea39b20f66810011a86eabc5", size = 1941335, upload-time = "2025-06-09T21:57:56.327Z" }, + { url = "https://files.pythonhosted.org/packages/89/43/d9bebfc3db7dea6ec80df5cb2aad8d274dd18ec2edd6c4f21f32c237cbbb/kubernetes-33.1.0-py2.py3-none-any.whl", hash = "sha256:544de42b24b64287f7e0aa9513c93cb503f7f40eea39b20f66810011a86eabc5", size = 1941335 }, ] [[package]] @@ -2862,7 +2864,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0e/72/a3add0e4eec4eb9e2569554f7c70f4a3c27712f40e3284d483e88094cc0e/langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0", size = 981474, upload-time = "2021-05-07T07:54:13.562Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0e/72/a3add0e4eec4eb9e2569554f7c70f4a3c27712f40e3284d483e88094cc0e/langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0", size = 981474 } [[package]] name = "langfuse" @@ -2877,9 +2879,9 @@ dependencies = [ { name = "pydantic" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/e9/22c9c05d877ab85da6d9008aaa7360f2a9ad58787a8e36e00b1b5be9a990/langfuse-2.51.5.tar.gz", hash = "sha256:55bc37b5c5d3ae133c1a95db09117cfb3117add110ba02ebbf2ce45ac4395c5b", size = 117574, upload-time = "2024-10-09T00:59:15.016Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/e9/22c9c05d877ab85da6d9008aaa7360f2a9ad58787a8e36e00b1b5be9a990/langfuse-2.51.5.tar.gz", hash = "sha256:55bc37b5c5d3ae133c1a95db09117cfb3117add110ba02ebbf2ce45ac4395c5b", size = 117574 } wheels = [ - { url = "https://files.pythonhosted.org/packages/03/f7/242a13ca094c78464b7d4df77dfe7d4c44ed77b15fed3d2e3486afa5d2e1/langfuse-2.51.5-py3-none-any.whl", hash = "sha256:b95401ca710ef94b521afa6541933b6f93d7cfd4a97523c8fc75bca4d6d219fb", size = 214281, upload-time = "2024-10-09T00:59:12.596Z" }, + { url = "https://files.pythonhosted.org/packages/03/f7/242a13ca094c78464b7d4df77dfe7d4c44ed77b15fed3d2e3486afa5d2e1/langfuse-2.51.5-py3-none-any.whl", hash = "sha256:b95401ca710ef94b521afa6541933b6f93d7cfd4a97523c8fc75bca4d6d219fb", size = 214281 }, ] [[package]] @@ -2893,9 +2895,9 @@ dependencies = [ { name = "requests" }, { name = "requests-toolbelt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6c/56/201dd94d492ae47c1bf9b50cacc1985113dc2288d8f15857e1f4a6818376/langsmith-0.1.147.tar.gz", hash = "sha256:2e933220318a4e73034657103b3b1a3a6109cc5db3566a7e8e03be8d6d7def7a", size = 300453, upload-time = "2024-11-27T17:32:41.297Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/56/201dd94d492ae47c1bf9b50cacc1985113dc2288d8f15857e1f4a6818376/langsmith-0.1.147.tar.gz", hash = "sha256:2e933220318a4e73034657103b3b1a3a6109cc5db3566a7e8e03be8d6d7def7a", size = 300453 } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/f0/63b06b99b730b9954f8709f6f7d9b8d076fa0a973e472efe278089bde42b/langsmith-0.1.147-py3-none-any.whl", hash = "sha256:7166fc23b965ccf839d64945a78e9f1157757add228b086141eb03a60d699a15", size = 311812, upload-time = "2024-11-27T17:32:39.569Z" }, + { url = "https://files.pythonhosted.org/packages/de/f0/63b06b99b730b9954f8709f6f7d9b8d076fa0a973e472efe278089bde42b/langsmith-0.1.147-py3-none-any.whl", hash = "sha256:7166fc23b965ccf839d64945a78e9f1157757add228b086141eb03a60d699a15", size = 311812 }, ] [[package]] @@ -2915,98 +2917,98 @@ dependencies = [ { name = "tiktoken" }, { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5c/7a/6c1994a239abd1b335001a46ae47fa055a24c493b6de19a9fa1872187fe9/litellm-1.63.7.tar.gz", hash = "sha256:2fbd7236d5e5379eee18556857ed62a5ed49f4f09e03ff33cf15932306b984f1", size = 6598034, upload-time = "2025-03-12T19:26:40.915Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/7a/6c1994a239abd1b335001a46ae47fa055a24c493b6de19a9fa1872187fe9/litellm-1.63.7.tar.gz", hash = "sha256:2fbd7236d5e5379eee18556857ed62a5ed49f4f09e03ff33cf15932306b984f1", size = 6598034 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/44/255c7ecb8b6f3f730a37422736509c21cb1bf4da66cc060d872005bda9f5/litellm-1.63.7-py3-none-any.whl", hash = "sha256:fbdee39a894506c68f158c6b4e0079f9e9c023441fff7215e7b8e42162dba0a7", size = 6909807, upload-time = "2025-03-12T19:26:37.788Z" }, + { url = "https://files.pythonhosted.org/packages/1e/44/255c7ecb8b6f3f730a37422736509c21cb1bf4da66cc060d872005bda9f5/litellm-1.63.7-py3-none-any.whl", hash = "sha256:fbdee39a894506c68f158c6b4e0079f9e9c023441fff7215e7b8e42162dba0a7", size = 6909807 }, ] [[package]] name = "llvmlite" version = "0.44.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/89/6a/95a3d3610d5c75293d5dbbb2a76480d5d4eeba641557b69fe90af6c5b84e/llvmlite-0.44.0.tar.gz", hash = "sha256:07667d66a5d150abed9157ab6c0b9393c9356f229784a4385c02f99e94fc94d4", size = 171880, upload-time = "2025-01-20T11:14:41.342Z" } +sdist = { url = "https://files.pythonhosted.org/packages/89/6a/95a3d3610d5c75293d5dbbb2a76480d5d4eeba641557b69fe90af6c5b84e/llvmlite-0.44.0.tar.gz", hash = "sha256:07667d66a5d150abed9157ab6c0b9393c9356f229784a4385c02f99e94fc94d4", size = 171880 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/e2/86b245397052386595ad726f9742e5223d7aea999b18c518a50e96c3aca4/llvmlite-0.44.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:eed7d5f29136bda63b6d7804c279e2b72e08c952b7c5df61f45db408e0ee52f3", size = 28132305, upload-time = "2025-01-20T11:12:53.936Z" }, - { url = "https://files.pythonhosted.org/packages/ff/ec/506902dc6870249fbe2466d9cf66d531265d0f3a1157213c8f986250c033/llvmlite-0.44.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ace564d9fa44bb91eb6e6d8e7754977783c68e90a471ea7ce913bff30bd62427", size = 26201090, upload-time = "2025-01-20T11:12:59.847Z" }, - { url = "https://files.pythonhosted.org/packages/99/fe/d030f1849ebb1f394bb3f7adad5e729b634fb100515594aca25c354ffc62/llvmlite-0.44.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5d22c3bfc842668168a786af4205ec8e3ad29fb1bc03fd11fd48460d0df64c1", size = 42361858, upload-time = "2025-01-20T11:13:07.623Z" }, - { url = "https://files.pythonhosted.org/packages/d7/7a/ce6174664b9077fc673d172e4c888cb0b128e707e306bc33fff8c2035f0d/llvmlite-0.44.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f01a394e9c9b7b1d4e63c327b096d10f6f0ed149ef53d38a09b3749dcf8c9610", size = 41184200, upload-time = "2025-01-20T11:13:20.058Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c6/258801143975a6d09a373f2641237992496e15567b907a4d401839d671b8/llvmlite-0.44.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8489634d43c20cd0ad71330dde1d5bc7b9966937a263ff1ec1cebb90dc50955", size = 30331193, upload-time = "2025-01-20T11:13:26.976Z" }, - { url = "https://files.pythonhosted.org/packages/15/86/e3c3195b92e6e492458f16d233e58a1a812aa2bfbef9bdd0fbafcec85c60/llvmlite-0.44.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:1d671a56acf725bf1b531d5ef76b86660a5ab8ef19bb6a46064a705c6ca80aad", size = 28132297, upload-time = "2025-01-20T11:13:32.57Z" }, - { url = "https://files.pythonhosted.org/packages/d6/53/373b6b8be67b9221d12b24125fd0ec56b1078b660eeae266ec388a6ac9a0/llvmlite-0.44.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f79a728e0435493611c9f405168682bb75ffd1fbe6fc360733b850c80a026db", size = 26201105, upload-time = "2025-01-20T11:13:38.744Z" }, - { url = "https://files.pythonhosted.org/packages/cb/da/8341fd3056419441286c8e26bf436923021005ece0bff5f41906476ae514/llvmlite-0.44.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0143a5ef336da14deaa8ec26c5449ad5b6a2b564df82fcef4be040b9cacfea9", size = 42361901, upload-time = "2025-01-20T11:13:46.711Z" }, - { url = "https://files.pythonhosted.org/packages/53/ad/d79349dc07b8a395a99153d7ce8b01d6fcdc9f8231355a5df55ded649b61/llvmlite-0.44.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d752f89e31b66db6f8da06df8b39f9b91e78c5feea1bf9e8c1fba1d1c24c065d", size = 41184247, upload-time = "2025-01-20T11:13:56.159Z" }, - { url = "https://files.pythonhosted.org/packages/e2/3b/a9a17366af80127bd09decbe2a54d8974b6d8b274b39bf47fbaedeec6307/llvmlite-0.44.0-cp312-cp312-win_amd64.whl", hash = "sha256:eae7e2d4ca8f88f89d315b48c6b741dcb925d6a1042da694aa16ab3dd4cbd3a1", size = 30332380, upload-time = "2025-01-20T11:14:02.442Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e2/86b245397052386595ad726f9742e5223d7aea999b18c518a50e96c3aca4/llvmlite-0.44.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:eed7d5f29136bda63b6d7804c279e2b72e08c952b7c5df61f45db408e0ee52f3", size = 28132305 }, + { url = "https://files.pythonhosted.org/packages/ff/ec/506902dc6870249fbe2466d9cf66d531265d0f3a1157213c8f986250c033/llvmlite-0.44.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ace564d9fa44bb91eb6e6d8e7754977783c68e90a471ea7ce913bff30bd62427", size = 26201090 }, + { url = "https://files.pythonhosted.org/packages/99/fe/d030f1849ebb1f394bb3f7adad5e729b634fb100515594aca25c354ffc62/llvmlite-0.44.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5d22c3bfc842668168a786af4205ec8e3ad29fb1bc03fd11fd48460d0df64c1", size = 42361858 }, + { url = "https://files.pythonhosted.org/packages/d7/7a/ce6174664b9077fc673d172e4c888cb0b128e707e306bc33fff8c2035f0d/llvmlite-0.44.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f01a394e9c9b7b1d4e63c327b096d10f6f0ed149ef53d38a09b3749dcf8c9610", size = 41184200 }, + { url = "https://files.pythonhosted.org/packages/5f/c6/258801143975a6d09a373f2641237992496e15567b907a4d401839d671b8/llvmlite-0.44.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8489634d43c20cd0ad71330dde1d5bc7b9966937a263ff1ec1cebb90dc50955", size = 30331193 }, + { url = "https://files.pythonhosted.org/packages/15/86/e3c3195b92e6e492458f16d233e58a1a812aa2bfbef9bdd0fbafcec85c60/llvmlite-0.44.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:1d671a56acf725bf1b531d5ef76b86660a5ab8ef19bb6a46064a705c6ca80aad", size = 28132297 }, + { url = "https://files.pythonhosted.org/packages/d6/53/373b6b8be67b9221d12b24125fd0ec56b1078b660eeae266ec388a6ac9a0/llvmlite-0.44.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f79a728e0435493611c9f405168682bb75ffd1fbe6fc360733b850c80a026db", size = 26201105 }, + { url = "https://files.pythonhosted.org/packages/cb/da/8341fd3056419441286c8e26bf436923021005ece0bff5f41906476ae514/llvmlite-0.44.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0143a5ef336da14deaa8ec26c5449ad5b6a2b564df82fcef4be040b9cacfea9", size = 42361901 }, + { url = "https://files.pythonhosted.org/packages/53/ad/d79349dc07b8a395a99153d7ce8b01d6fcdc9f8231355a5df55ded649b61/llvmlite-0.44.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d752f89e31b66db6f8da06df8b39f9b91e78c5feea1bf9e8c1fba1d1c24c065d", size = 41184247 }, + { url = "https://files.pythonhosted.org/packages/e2/3b/a9a17366af80127bd09decbe2a54d8974b6d8b274b39bf47fbaedeec6307/llvmlite-0.44.0-cp312-cp312-win_amd64.whl", hash = "sha256:eae7e2d4ca8f88f89d315b48c6b741dcb925d6a1042da694aa16ab3dd4cbd3a1", size = 30332380 }, ] [[package]] name = "lxml" version = "6.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c5/ed/60eb6fa2923602fba988d9ca7c5cdbd7cf25faa795162ed538b527a35411/lxml-6.0.0.tar.gz", hash = "sha256:032e65120339d44cdc3efc326c9f660f5f7205f3a535c1fdbf898b29ea01fb72", size = 4096938, upload-time = "2025-06-26T16:28:19.373Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/ed/60eb6fa2923602fba988d9ca7c5cdbd7cf25faa795162ed538b527a35411/lxml-6.0.0.tar.gz", hash = "sha256:032e65120339d44cdc3efc326c9f660f5f7205f3a535c1fdbf898b29ea01fb72", size = 4096938 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/23/828d4cc7da96c611ec0ce6147bbcea2fdbde023dc995a165afa512399bbf/lxml-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ee56288d0df919e4aac43b539dd0e34bb55d6a12a6562038e8d6f3ed07f9e36", size = 8438217, upload-time = "2025-06-26T16:25:34.349Z" }, - { url = "https://files.pythonhosted.org/packages/f1/33/5ac521212c5bcb097d573145d54b2b4a3c9766cda88af5a0e91f66037c6e/lxml-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8dd6dd0e9c1992613ccda2bcb74fc9d49159dbe0f0ca4753f37527749885c25", size = 4590317, upload-time = "2025-06-26T16:25:38.103Z" }, - { url = "https://files.pythonhosted.org/packages/2b/2e/45b7ca8bee304c07f54933c37afe7dd4d39ff61ba2757f519dcc71bc5d44/lxml-6.0.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d7ae472f74afcc47320238b5dbfd363aba111a525943c8a34a1b657c6be934c3", size = 5221628, upload-time = "2025-06-26T16:25:40.878Z" }, - { url = "https://files.pythonhosted.org/packages/32/23/526d19f7eb2b85da1f62cffb2556f647b049ebe2a5aa8d4d41b1fb2c7d36/lxml-6.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5592401cdf3dc682194727c1ddaa8aa0f3ddc57ca64fd03226a430b955eab6f6", size = 4949429, upload-time = "2025-06-28T18:47:20.046Z" }, - { url = "https://files.pythonhosted.org/packages/ac/cc/f6be27a5c656a43a5344e064d9ae004d4dcb1d3c9d4f323c8189ddfe4d13/lxml-6.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58ffd35bd5425c3c3b9692d078bf7ab851441434531a7e517c4984d5634cd65b", size = 5087909, upload-time = "2025-06-28T18:47:22.834Z" }, - { url = "https://files.pythonhosted.org/packages/3b/e6/8ec91b5bfbe6972458bc105aeb42088e50e4b23777170404aab5dfb0c62d/lxml-6.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f720a14aa102a38907c6d5030e3d66b3b680c3e6f6bc95473931ea3c00c59967", size = 5031713, upload-time = "2025-06-26T16:25:43.226Z" }, - { url = "https://files.pythonhosted.org/packages/33/cf/05e78e613840a40e5be3e40d892c48ad3e475804db23d4bad751b8cadb9b/lxml-6.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2a5e8d207311a0170aca0eb6b160af91adc29ec121832e4ac151a57743a1e1e", size = 5232417, upload-time = "2025-06-26T16:25:46.111Z" }, - { url = "https://files.pythonhosted.org/packages/ac/8c/6b306b3e35c59d5f0b32e3b9b6b3b0739b32c0dc42a295415ba111e76495/lxml-6.0.0-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:2dd1cc3ea7e60bfb31ff32cafe07e24839df573a5e7c2d33304082a5019bcd58", size = 4681443, upload-time = "2025-06-26T16:25:48.837Z" }, - { url = "https://files.pythonhosted.org/packages/59/43/0bd96bece5f7eea14b7220476835a60d2b27f8e9ca99c175f37c085cb154/lxml-6.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cfcf84f1defed7e5798ef4f88aa25fcc52d279be731ce904789aa7ccfb7e8d2", size = 5074542, upload-time = "2025-06-26T16:25:51.65Z" }, - { url = "https://files.pythonhosted.org/packages/e2/3d/32103036287a8ca012d8518071f8852c68f2b3bfe048cef2a0202eb05910/lxml-6.0.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a52a4704811e2623b0324a18d41ad4b9fabf43ce5ff99b14e40a520e2190c851", size = 4729471, upload-time = "2025-06-26T16:25:54.571Z" }, - { url = "https://files.pythonhosted.org/packages/ca/a8/7be5d17df12d637d81854bd8648cd329f29640a61e9a72a3f77add4a311b/lxml-6.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c16304bba98f48a28ae10e32a8e75c349dd742c45156f297e16eeb1ba9287a1f", size = 5256285, upload-time = "2025-06-26T16:25:56.997Z" }, - { url = "https://files.pythonhosted.org/packages/cd/d0/6cb96174c25e0d749932557c8d51d60c6e292c877b46fae616afa23ed31a/lxml-6.0.0-cp311-cp311-win32.whl", hash = "sha256:f8d19565ae3eb956d84da3ef367aa7def14a2735d05bd275cd54c0301f0d0d6c", size = 3612004, upload-time = "2025-06-26T16:25:59.11Z" }, - { url = "https://files.pythonhosted.org/packages/ca/77/6ad43b165dfc6dead001410adeb45e88597b25185f4479b7ca3b16a5808f/lxml-6.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b2d71cdefda9424adff9a3607ba5bbfc60ee972d73c21c7e3c19e71037574816", size = 4003470, upload-time = "2025-06-26T16:26:01.655Z" }, - { url = "https://files.pythonhosted.org/packages/a0/bc/4c50ec0eb14f932a18efc34fc86ee936a66c0eb5f2fe065744a2da8a68b2/lxml-6.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:8a2e76efbf8772add72d002d67a4c3d0958638696f541734304c7f28217a9cab", size = 3682477, upload-time = "2025-06-26T16:26:03.808Z" }, - { url = "https://files.pythonhosted.org/packages/89/c3/d01d735c298d7e0ddcedf6f028bf556577e5ab4f4da45175ecd909c79378/lxml-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78718d8454a6e928470d511bf8ac93f469283a45c354995f7d19e77292f26108", size = 8429515, upload-time = "2025-06-26T16:26:06.776Z" }, - { url = "https://files.pythonhosted.org/packages/06/37/0e3eae3043d366b73da55a86274a590bae76dc45aa004b7042e6f97803b1/lxml-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:84ef591495ffd3f9dcabffd6391db7bb70d7230b5c35ef5148354a134f56f2be", size = 4601387, upload-time = "2025-06-26T16:26:09.511Z" }, - { url = "https://files.pythonhosted.org/packages/a3/28/e1a9a881e6d6e29dda13d633885d13acb0058f65e95da67841c8dd02b4a8/lxml-6.0.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2930aa001a3776c3e2601cb8e0a15d21b8270528d89cc308be4843ade546b9ab", size = 5228928, upload-time = "2025-06-26T16:26:12.337Z" }, - { url = "https://files.pythonhosted.org/packages/9a/55/2cb24ea48aa30c99f805921c1c7860c1f45c0e811e44ee4e6a155668de06/lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563", size = 4952289, upload-time = "2025-06-28T18:47:25.602Z" }, - { url = "https://files.pythonhosted.org/packages/31/c0/b25d9528df296b9a3306ba21ff982fc5b698c45ab78b94d18c2d6ae71fd9/lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7", size = 5111310, upload-time = "2025-06-28T18:47:28.136Z" }, - { url = "https://files.pythonhosted.org/packages/e9/af/681a8b3e4f668bea6e6514cbcb297beb6de2b641e70f09d3d78655f4f44c/lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7", size = 5025457, upload-time = "2025-06-26T16:26:15.068Z" }, - { url = "https://files.pythonhosted.org/packages/99/b6/3a7971aa05b7be7dfebc7ab57262ec527775c2c3c5b2f43675cac0458cad/lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991", size = 5657016, upload-time = "2025-07-03T19:19:06.008Z" }, - { url = "https://files.pythonhosted.org/packages/69/f8/693b1a10a891197143c0673fcce5b75fc69132afa81a36e4568c12c8faba/lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da", size = 5257565, upload-time = "2025-06-26T16:26:17.906Z" }, - { url = "https://files.pythonhosted.org/packages/a8/96/e08ff98f2c6426c98c8964513c5dab8d6eb81dadcd0af6f0c538ada78d33/lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e", size = 4713390, upload-time = "2025-06-26T16:26:20.292Z" }, - { url = "https://files.pythonhosted.org/packages/a8/83/6184aba6cc94d7413959f6f8f54807dc318fdcd4985c347fe3ea6937f772/lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741", size = 5066103, upload-time = "2025-06-26T16:26:22.765Z" }, - { url = "https://files.pythonhosted.org/packages/ee/01/8bf1f4035852d0ff2e36a4d9aacdbcc57e93a6cd35a54e05fa984cdf73ab/lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3", size = 4791428, upload-time = "2025-06-26T16:26:26.461Z" }, - { url = "https://files.pythonhosted.org/packages/29/31/c0267d03b16954a85ed6b065116b621d37f559553d9339c7dcc4943a76f1/lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16", size = 5678523, upload-time = "2025-07-03T19:19:09.837Z" }, - { url = "https://files.pythonhosted.org/packages/5c/f7/5495829a864bc5f8b0798d2b52a807c89966523140f3d6fa3a58ab6720ea/lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0", size = 5281290, upload-time = "2025-06-26T16:26:29.406Z" }, - { url = "https://files.pythonhosted.org/packages/79/56/6b8edb79d9ed294ccc4e881f4db1023af56ba451909b9ce79f2a2cd7c532/lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a", size = 3613495, upload-time = "2025-06-26T16:26:31.588Z" }, - { url = "https://files.pythonhosted.org/packages/0b/1e/cc32034b40ad6af80b6fd9b66301fc0f180f300002e5c3eb5a6110a93317/lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3", size = 4014711, upload-time = "2025-06-26T16:26:33.723Z" }, - { url = "https://files.pythonhosted.org/packages/55/10/dc8e5290ae4c94bdc1a4c55865be7e1f31dfd857a88b21cbba68b5fea61b/lxml-6.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:8cb26f51c82d77483cdcd2b4a53cda55bbee29b3c2f3ddeb47182a2a9064e4eb", size = 3674431, upload-time = "2025-06-26T16:26:35.959Z" }, + { url = "https://files.pythonhosted.org/packages/7c/23/828d4cc7da96c611ec0ce6147bbcea2fdbde023dc995a165afa512399bbf/lxml-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ee56288d0df919e4aac43b539dd0e34bb55d6a12a6562038e8d6f3ed07f9e36", size = 8438217 }, + { url = "https://files.pythonhosted.org/packages/f1/33/5ac521212c5bcb097d573145d54b2b4a3c9766cda88af5a0e91f66037c6e/lxml-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8dd6dd0e9c1992613ccda2bcb74fc9d49159dbe0f0ca4753f37527749885c25", size = 4590317 }, + { url = "https://files.pythonhosted.org/packages/2b/2e/45b7ca8bee304c07f54933c37afe7dd4d39ff61ba2757f519dcc71bc5d44/lxml-6.0.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:d7ae472f74afcc47320238b5dbfd363aba111a525943c8a34a1b657c6be934c3", size = 5221628 }, + { url = "https://files.pythonhosted.org/packages/32/23/526d19f7eb2b85da1f62cffb2556f647b049ebe2a5aa8d4d41b1fb2c7d36/lxml-6.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5592401cdf3dc682194727c1ddaa8aa0f3ddc57ca64fd03226a430b955eab6f6", size = 4949429 }, + { url = "https://files.pythonhosted.org/packages/ac/cc/f6be27a5c656a43a5344e064d9ae004d4dcb1d3c9d4f323c8189ddfe4d13/lxml-6.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58ffd35bd5425c3c3b9692d078bf7ab851441434531a7e517c4984d5634cd65b", size = 5087909 }, + { url = "https://files.pythonhosted.org/packages/3b/e6/8ec91b5bfbe6972458bc105aeb42088e50e4b23777170404aab5dfb0c62d/lxml-6.0.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f720a14aa102a38907c6d5030e3d66b3b680c3e6f6bc95473931ea3c00c59967", size = 5031713 }, + { url = "https://files.pythonhosted.org/packages/33/cf/05e78e613840a40e5be3e40d892c48ad3e475804db23d4bad751b8cadb9b/lxml-6.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2a5e8d207311a0170aca0eb6b160af91adc29ec121832e4ac151a57743a1e1e", size = 5232417 }, + { url = "https://files.pythonhosted.org/packages/ac/8c/6b306b3e35c59d5f0b32e3b9b6b3b0739b32c0dc42a295415ba111e76495/lxml-6.0.0-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:2dd1cc3ea7e60bfb31ff32cafe07e24839df573a5e7c2d33304082a5019bcd58", size = 4681443 }, + { url = "https://files.pythonhosted.org/packages/59/43/0bd96bece5f7eea14b7220476835a60d2b27f8e9ca99c175f37c085cb154/lxml-6.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cfcf84f1defed7e5798ef4f88aa25fcc52d279be731ce904789aa7ccfb7e8d2", size = 5074542 }, + { url = "https://files.pythonhosted.org/packages/e2/3d/32103036287a8ca012d8518071f8852c68f2b3bfe048cef2a0202eb05910/lxml-6.0.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a52a4704811e2623b0324a18d41ad4b9fabf43ce5ff99b14e40a520e2190c851", size = 4729471 }, + { url = "https://files.pythonhosted.org/packages/ca/a8/7be5d17df12d637d81854bd8648cd329f29640a61e9a72a3f77add4a311b/lxml-6.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c16304bba98f48a28ae10e32a8e75c349dd742c45156f297e16eeb1ba9287a1f", size = 5256285 }, + { url = "https://files.pythonhosted.org/packages/cd/d0/6cb96174c25e0d749932557c8d51d60c6e292c877b46fae616afa23ed31a/lxml-6.0.0-cp311-cp311-win32.whl", hash = "sha256:f8d19565ae3eb956d84da3ef367aa7def14a2735d05bd275cd54c0301f0d0d6c", size = 3612004 }, + { url = "https://files.pythonhosted.org/packages/ca/77/6ad43b165dfc6dead001410adeb45e88597b25185f4479b7ca3b16a5808f/lxml-6.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:b2d71cdefda9424adff9a3607ba5bbfc60ee972d73c21c7e3c19e71037574816", size = 4003470 }, + { url = "https://files.pythonhosted.org/packages/a0/bc/4c50ec0eb14f932a18efc34fc86ee936a66c0eb5f2fe065744a2da8a68b2/lxml-6.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:8a2e76efbf8772add72d002d67a4c3d0958638696f541734304c7f28217a9cab", size = 3682477 }, + { url = "https://files.pythonhosted.org/packages/89/c3/d01d735c298d7e0ddcedf6f028bf556577e5ab4f4da45175ecd909c79378/lxml-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78718d8454a6e928470d511bf8ac93f469283a45c354995f7d19e77292f26108", size = 8429515 }, + { url = "https://files.pythonhosted.org/packages/06/37/0e3eae3043d366b73da55a86274a590bae76dc45aa004b7042e6f97803b1/lxml-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:84ef591495ffd3f9dcabffd6391db7bb70d7230b5c35ef5148354a134f56f2be", size = 4601387 }, + { url = "https://files.pythonhosted.org/packages/a3/28/e1a9a881e6d6e29dda13d633885d13acb0058f65e95da67841c8dd02b4a8/lxml-6.0.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:2930aa001a3776c3e2601cb8e0a15d21b8270528d89cc308be4843ade546b9ab", size = 5228928 }, + { url = "https://files.pythonhosted.org/packages/9a/55/2cb24ea48aa30c99f805921c1c7860c1f45c0e811e44ee4e6a155668de06/lxml-6.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:219e0431ea8006e15005767f0351e3f7f9143e793e58519dc97fe9e07fae5563", size = 4952289 }, + { url = "https://files.pythonhosted.org/packages/31/c0/b25d9528df296b9a3306ba21ff982fc5b698c45ab78b94d18c2d6ae71fd9/lxml-6.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bd5913b4972681ffc9718bc2d4c53cde39ef81415e1671ff93e9aa30b46595e7", size = 5111310 }, + { url = "https://files.pythonhosted.org/packages/e9/af/681a8b3e4f668bea6e6514cbcb297beb6de2b641e70f09d3d78655f4f44c/lxml-6.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:390240baeb9f415a82eefc2e13285016f9c8b5ad71ec80574ae8fa9605093cd7", size = 5025457 }, + { url = "https://files.pythonhosted.org/packages/99/b6/3a7971aa05b7be7dfebc7ab57262ec527775c2c3c5b2f43675cac0458cad/lxml-6.0.0-cp312-cp312-manylinux_2_27_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d6e200909a119626744dd81bae409fc44134389e03fbf1d68ed2a55a2fb10991", size = 5657016 }, + { url = "https://files.pythonhosted.org/packages/69/f8/693b1a10a891197143c0673fcce5b75fc69132afa81a36e4568c12c8faba/lxml-6.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ca50bd612438258a91b5b3788c6621c1f05c8c478e7951899f492be42defc0da", size = 5257565 }, + { url = "https://files.pythonhosted.org/packages/a8/96/e08ff98f2c6426c98c8964513c5dab8d6eb81dadcd0af6f0c538ada78d33/lxml-6.0.0-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:c24b8efd9c0f62bad0439283c2c795ef916c5a6b75f03c17799775c7ae3c0c9e", size = 4713390 }, + { url = "https://files.pythonhosted.org/packages/a8/83/6184aba6cc94d7413959f6f8f54807dc318fdcd4985c347fe3ea6937f772/lxml-6.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:afd27d8629ae94c5d863e32ab0e1d5590371d296b87dae0a751fb22bf3685741", size = 5066103 }, + { url = "https://files.pythonhosted.org/packages/ee/01/8bf1f4035852d0ff2e36a4d9aacdbcc57e93a6cd35a54e05fa984cdf73ab/lxml-6.0.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:54c4855eabd9fc29707d30141be99e5cd1102e7d2258d2892314cf4c110726c3", size = 4791428 }, + { url = "https://files.pythonhosted.org/packages/29/31/c0267d03b16954a85ed6b065116b621d37f559553d9339c7dcc4943a76f1/lxml-6.0.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c907516d49f77f6cd8ead1322198bdfd902003c3c330c77a1c5f3cc32a0e4d16", size = 5678523 }, + { url = "https://files.pythonhosted.org/packages/5c/f7/5495829a864bc5f8b0798d2b52a807c89966523140f3d6fa3a58ab6720ea/lxml-6.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36531f81c8214e293097cd2b7873f178997dae33d3667caaae8bdfb9666b76c0", size = 5281290 }, + { url = "https://files.pythonhosted.org/packages/79/56/6b8edb79d9ed294ccc4e881f4db1023af56ba451909b9ce79f2a2cd7c532/lxml-6.0.0-cp312-cp312-win32.whl", hash = "sha256:690b20e3388a7ec98e899fd54c924e50ba6693874aa65ef9cb53de7f7de9d64a", size = 3613495 }, + { url = "https://files.pythonhosted.org/packages/0b/1e/cc32034b40ad6af80b6fd9b66301fc0f180f300002e5c3eb5a6110a93317/lxml-6.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:310b719b695b3dd442cdfbbe64936b2f2e231bb91d998e99e6f0daf991a3eba3", size = 4014711 }, + { url = "https://files.pythonhosted.org/packages/55/10/dc8e5290ae4c94bdc1a4c55865be7e1f31dfd857a88b21cbba68b5fea61b/lxml-6.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:8cb26f51c82d77483cdcd2b4a53cda55bbee29b3c2f3ddeb47182a2a9064e4eb", size = 3674431 }, ] [[package]] name = "lxml-stubs" version = "0.5.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/99/da/1a3a3e5d159b249fc2970d73437496b908de8e4716a089c69591b4ffa6fd/lxml-stubs-0.5.1.tar.gz", hash = "sha256:e0ec2aa1ce92d91278b719091ce4515c12adc1d564359dfaf81efa7d4feab79d", size = 14778, upload-time = "2024-01-10T09:37:46.521Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/da/1a3a3e5d159b249fc2970d73437496b908de8e4716a089c69591b4ffa6fd/lxml-stubs-0.5.1.tar.gz", hash = "sha256:e0ec2aa1ce92d91278b719091ce4515c12adc1d564359dfaf81efa7d4feab79d", size = 14778 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1f/c9/e0f8e4e6e8a69e5959b06499582dca6349db6769cc7fdfb8a02a7c75a9ae/lxml_stubs-0.5.1-py3-none-any.whl", hash = "sha256:1f689e5dbc4b9247cb09ae820c7d34daeb1fdbd1db06123814b856dae7787272", size = 13584, upload-time = "2024-01-10T09:37:44.931Z" }, + { url = "https://files.pythonhosted.org/packages/1f/c9/e0f8e4e6e8a69e5959b06499582dca6349db6769cc7fdfb8a02a7c75a9ae/lxml_stubs-0.5.1-py3-none-any.whl", hash = "sha256:1f689e5dbc4b9247cb09ae820c7d34daeb1fdbd1db06123814b856dae7787272", size = 13584 }, ] [[package]] name = "lz4" version = "4.4.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c6/5a/945f5086326d569f14c84ac6f7fcc3229f0b9b1e8cc536b951fd53dfb9e1/lz4-4.4.4.tar.gz", hash = "sha256:070fd0627ec4393011251a094e08ed9fdcc78cb4e7ab28f507638eee4e39abda", size = 171884, upload-time = "2025-04-01T22:55:58.62Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/5a/945f5086326d569f14c84ac6f7fcc3229f0b9b1e8cc536b951fd53dfb9e1/lz4-4.4.4.tar.gz", hash = "sha256:070fd0627ec4393011251a094e08ed9fdcc78cb4e7ab28f507638eee4e39abda", size = 171884 } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/e8/63843dc5ecb1529eb38e1761ceed04a0ad52a9ad8929ab8b7930ea2e4976/lz4-4.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ddfc7194cd206496c445e9e5b0c47f970ce982c725c87bd22de028884125b68f", size = 220898, upload-time = "2025-04-01T22:55:23.085Z" }, - { url = "https://files.pythonhosted.org/packages/e4/94/c53de5f07c7dc11cf459aab2a1d754f5df5f693bfacbbe1e4914bfd02f1e/lz4-4.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:714f9298c86f8e7278f1c6af23e509044782fa8220eb0260f8f8f1632f820550", size = 189685, upload-time = "2025-04-01T22:55:24.413Z" }, - { url = "https://files.pythonhosted.org/packages/fe/59/c22d516dd0352f2a3415d1f665ccef2f3e74ecec3ca6a8f061a38f97d50d/lz4-4.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8474c91de47733856c6686df3c4aca33753741da7e757979369c2c0d32918ba", size = 1239225, upload-time = "2025-04-01T22:55:25.737Z" }, - { url = "https://files.pythonhosted.org/packages/81/af/665685072e71f3f0e626221b7922867ec249cd8376aca761078c8f11f5da/lz4-4.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80dd27d7d680ea02c261c226acf1d41de2fd77af4fb2da62b278a9376e380de0", size = 1265881, upload-time = "2025-04-01T22:55:26.817Z" }, - { url = "https://files.pythonhosted.org/packages/90/04/b4557ae381d3aa451388a29755cc410066f5e2f78c847f66f154f4520a68/lz4-4.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b7d6dddfd01b49aedb940fdcaf32f41dc58c926ba35f4e31866aeec2f32f4f4", size = 1185593, upload-time = "2025-04-01T22:55:27.896Z" }, - { url = "https://files.pythonhosted.org/packages/7b/e4/03636979f4e8bf92c557f998ca98ee4e6ef92e92eaf0ed6d3c7f2524e790/lz4-4.4.4-cp311-cp311-win32.whl", hash = "sha256:4134b9fd70ac41954c080b772816bb1afe0c8354ee993015a83430031d686a4c", size = 88259, upload-time = "2025-04-01T22:55:29.03Z" }, - { url = "https://files.pythonhosted.org/packages/07/f0/9efe53b4945441a5d2790d455134843ad86739855b7e6199977bf6dc8898/lz4-4.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:f5024d3ca2383470f7c4ef4d0ed8eabad0b22b23eeefde1c192cf1a38d5e9f78", size = 99916, upload-time = "2025-04-01T22:55:29.933Z" }, - { url = "https://files.pythonhosted.org/packages/87/c8/1675527549ee174b9e1db089f7ddfbb962a97314657269b1e0344a5eaf56/lz4-4.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:6ea715bb3357ea1665f77874cf8f55385ff112553db06f3742d3cdcec08633f7", size = 89741, upload-time = "2025-04-01T22:55:31.184Z" }, - { url = "https://files.pythonhosted.org/packages/f7/2d/5523b4fabe11cd98f040f715728d1932eb7e696bfe94391872a823332b94/lz4-4.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:23ae267494fdd80f0d2a131beff890cf857f1b812ee72dbb96c3204aab725553", size = 220669, upload-time = "2025-04-01T22:55:32.032Z" }, - { url = "https://files.pythonhosted.org/packages/91/06/1a5bbcacbfb48d8ee5b6eb3fca6aa84143a81d92946bdb5cd6b005f1863e/lz4-4.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fff9f3a1ed63d45cb6514bfb8293005dc4141341ce3500abdfeb76124c0b9b2e", size = 189661, upload-time = "2025-04-01T22:55:33.413Z" }, - { url = "https://files.pythonhosted.org/packages/fa/08/39eb7ac907f73e11a69a11576a75a9e36406b3241c0ba41453a7eb842abb/lz4-4.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ea7f07329f85a8eda4d8cf937b87f27f0ac392c6400f18bea2c667c8b7f8ecc", size = 1238775, upload-time = "2025-04-01T22:55:34.835Z" }, - { url = "https://files.pythonhosted.org/packages/e9/26/05840fbd4233e8d23e88411a066ab19f1e9de332edddb8df2b6a95c7fddc/lz4-4.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ccab8f7f7b82f9fa9fc3b0ba584d353bd5aa818d5821d77d5b9447faad2aaad", size = 1265143, upload-time = "2025-04-01T22:55:35.933Z" }, - { url = "https://files.pythonhosted.org/packages/b7/5d/5f2db18c298a419932f3ab2023deb689863cf8fd7ed875b1c43492479af2/lz4-4.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43e9d48b2daf80e486213128b0763deed35bbb7a59b66d1681e205e1702d735", size = 1185032, upload-time = "2025-04-01T22:55:37.454Z" }, - { url = "https://files.pythonhosted.org/packages/c4/e6/736ab5f128694b0f6aac58343bcf37163437ac95997276cd0be3ea4c3342/lz4-4.4.4-cp312-cp312-win32.whl", hash = "sha256:33e01e18e4561b0381b2c33d58e77ceee850a5067f0ece945064cbaac2176962", size = 88284, upload-time = "2025-04-01T22:55:38.536Z" }, - { url = "https://files.pythonhosted.org/packages/40/b8/243430cb62319175070e06e3a94c4c7bd186a812e474e22148ae1290d47d/lz4-4.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d21d1a2892a2dcc193163dd13eaadabb2c1b803807a5117d8f8588b22eaf9f12", size = 99918, upload-time = "2025-04-01T22:55:39.628Z" }, - { url = "https://files.pythonhosted.org/packages/6c/e1/0686c91738f3e6c2e1a243e0fdd4371667c4d2e5009b0a3605806c2aa020/lz4-4.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:2f4f2965c98ab254feddf6b5072854a6935adab7bc81412ec4fe238f07b85f62", size = 89736, upload-time = "2025-04-01T22:55:40.5Z" }, + { url = "https://files.pythonhosted.org/packages/28/e8/63843dc5ecb1529eb38e1761ceed04a0ad52a9ad8929ab8b7930ea2e4976/lz4-4.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ddfc7194cd206496c445e9e5b0c47f970ce982c725c87bd22de028884125b68f", size = 220898 }, + { url = "https://files.pythonhosted.org/packages/e4/94/c53de5f07c7dc11cf459aab2a1d754f5df5f693bfacbbe1e4914bfd02f1e/lz4-4.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:714f9298c86f8e7278f1c6af23e509044782fa8220eb0260f8f8f1632f820550", size = 189685 }, + { url = "https://files.pythonhosted.org/packages/fe/59/c22d516dd0352f2a3415d1f665ccef2f3e74ecec3ca6a8f061a38f97d50d/lz4-4.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8474c91de47733856c6686df3c4aca33753741da7e757979369c2c0d32918ba", size = 1239225 }, + { url = "https://files.pythonhosted.org/packages/81/af/665685072e71f3f0e626221b7922867ec249cd8376aca761078c8f11f5da/lz4-4.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80dd27d7d680ea02c261c226acf1d41de2fd77af4fb2da62b278a9376e380de0", size = 1265881 }, + { url = "https://files.pythonhosted.org/packages/90/04/b4557ae381d3aa451388a29755cc410066f5e2f78c847f66f154f4520a68/lz4-4.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b7d6dddfd01b49aedb940fdcaf32f41dc58c926ba35f4e31866aeec2f32f4f4", size = 1185593 }, + { url = "https://files.pythonhosted.org/packages/7b/e4/03636979f4e8bf92c557f998ca98ee4e6ef92e92eaf0ed6d3c7f2524e790/lz4-4.4.4-cp311-cp311-win32.whl", hash = "sha256:4134b9fd70ac41954c080b772816bb1afe0c8354ee993015a83430031d686a4c", size = 88259 }, + { url = "https://files.pythonhosted.org/packages/07/f0/9efe53b4945441a5d2790d455134843ad86739855b7e6199977bf6dc8898/lz4-4.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:f5024d3ca2383470f7c4ef4d0ed8eabad0b22b23eeefde1c192cf1a38d5e9f78", size = 99916 }, + { url = "https://files.pythonhosted.org/packages/87/c8/1675527549ee174b9e1db089f7ddfbb962a97314657269b1e0344a5eaf56/lz4-4.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:6ea715bb3357ea1665f77874cf8f55385ff112553db06f3742d3cdcec08633f7", size = 89741 }, + { url = "https://files.pythonhosted.org/packages/f7/2d/5523b4fabe11cd98f040f715728d1932eb7e696bfe94391872a823332b94/lz4-4.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:23ae267494fdd80f0d2a131beff890cf857f1b812ee72dbb96c3204aab725553", size = 220669 }, + { url = "https://files.pythonhosted.org/packages/91/06/1a5bbcacbfb48d8ee5b6eb3fca6aa84143a81d92946bdb5cd6b005f1863e/lz4-4.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fff9f3a1ed63d45cb6514bfb8293005dc4141341ce3500abdfeb76124c0b9b2e", size = 189661 }, + { url = "https://files.pythonhosted.org/packages/fa/08/39eb7ac907f73e11a69a11576a75a9e36406b3241c0ba41453a7eb842abb/lz4-4.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ea7f07329f85a8eda4d8cf937b87f27f0ac392c6400f18bea2c667c8b7f8ecc", size = 1238775 }, + { url = "https://files.pythonhosted.org/packages/e9/26/05840fbd4233e8d23e88411a066ab19f1e9de332edddb8df2b6a95c7fddc/lz4-4.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ccab8f7f7b82f9fa9fc3b0ba584d353bd5aa818d5821d77d5b9447faad2aaad", size = 1265143 }, + { url = "https://files.pythonhosted.org/packages/b7/5d/5f2db18c298a419932f3ab2023deb689863cf8fd7ed875b1c43492479af2/lz4-4.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43e9d48b2daf80e486213128b0763deed35bbb7a59b66d1681e205e1702d735", size = 1185032 }, + { url = "https://files.pythonhosted.org/packages/c4/e6/736ab5f128694b0f6aac58343bcf37163437ac95997276cd0be3ea4c3342/lz4-4.4.4-cp312-cp312-win32.whl", hash = "sha256:33e01e18e4561b0381b2c33d58e77ceee850a5067f0ece945064cbaac2176962", size = 88284 }, + { url = "https://files.pythonhosted.org/packages/40/b8/243430cb62319175070e06e3a94c4c7bd186a812e474e22148ae1290d47d/lz4-4.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d21d1a2892a2dcc193163dd13eaadabb2c1b803807a5117d8f8588b22eaf9f12", size = 99918 }, + { url = "https://files.pythonhosted.org/packages/6c/e1/0686c91738f3e6c2e1a243e0fdd4371667c4d2e5009b0a3605806c2aa020/lz4-4.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:2f4f2965c98ab254feddf6b5072854a6935adab7bc81412ec4fe238f07b85f62", size = 89736 }, ] [[package]] @@ -3021,7 +3023,7 @@ dependencies = [ { name = "urllib3" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/bc/cb60d02c00996839bbd87444a97d0ba5ac271b1a324001562afb8f685251/mailchimp_transactional-1.0.56-py3-none-any.whl", hash = "sha256:a76ea88b90a2d47d8b5134586aabbd3a96c459f6066d8886748ab59e50de36eb", size = 31660, upload-time = "2024-02-01T18:39:19.717Z" }, + { url = "https://files.pythonhosted.org/packages/5f/bc/cb60d02c00996839bbd87444a97d0ba5ac271b1a324001562afb8f685251/mailchimp_transactional-1.0.56-py3-none-any.whl", hash = "sha256:a76ea88b90a2d47d8b5134586aabbd3a96c459f6066d8886748ab59e50de36eb", size = 31660 }, ] [[package]] @@ -3031,18 +3033,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474 } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, + { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509 }, ] [[package]] name = "markdown" version = "3.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/28/c5441a6642681d92de56063fa7984df56f783d3f1eba518dc3e7a253b606/Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8", size = 349398, upload-time = "2024-01-10T15:19:38.261Z" } +sdist = { url = "https://files.pythonhosted.org/packages/11/28/c5441a6642681d92de56063fa7984df56f783d3f1eba518dc3e7a253b606/Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8", size = 349398 } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/f4/f0031854de10a0bc7821ef9fca0b92ca0d7aa6fbfbf504c5473ba825e49c/Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd", size = 103870, upload-time = "2024-01-10T15:19:36.071Z" }, + { url = "https://files.pythonhosted.org/packages/42/f4/f0031854de10a0bc7821ef9fca0b92ca0d7aa6fbfbf504c5473ba825e49c/Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd", size = 103870 }, ] [[package]] @@ -3052,37 +3054,37 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, ] [[package]] name = "markupsafe" version = "3.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, - { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, - { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, - { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, - { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, - { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, - { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, - { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353 }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392 }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984 }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120 }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032 }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057 }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359 }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306 }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094 }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521 }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, ] [[package]] @@ -3092,18 +3094,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/5e/5e53d26b42ab75491cda89b871dab9e97c840bf12c63ec58a1919710cd06/marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6", size = 221825, upload-time = "2025-02-03T15:32:25.093Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ab/5e/5e53d26b42ab75491cda89b871dab9e97c840bf12c63ec58a1919710cd06/marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6", size = 221825 } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/75/51952c7b2d3873b44a0028b1bd26a25078c18f92f256608e8d1dc61b39fd/marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c", size = 50878, upload-time = "2025-02-03T15:32:22.295Z" }, + { url = "https://files.pythonhosted.org/packages/34/75/51952c7b2d3873b44a0028b1bd26a25078c18f92f256608e8d1dc61b39fd/marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c", size = 50878 }, ] [[package]] name = "mdurl" version = "0.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, ] [[package]] @@ -3114,50 +3116,50 @@ dependencies = [ { name = "tqdm" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/b2/acc5024c8e8b6a0b034670b8e8af306ebd633ede777dcbf557eac4785937/milvus_lite-2.5.1-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:6b014453200ba977be37ba660cb2d021030375fa6a35bc53c2e1d92980a0c512", size = 27934713, upload-time = "2025-06-30T04:23:37.028Z" }, - { url = "https://files.pythonhosted.org/packages/9b/2e/746f5bb1d6facd1e73eb4af6dd5efda11125b0f29d7908a097485ca6cad9/milvus_lite-2.5.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a2e031088bf308afe5f8567850412d618cfb05a65238ed1a6117f60decccc95a", size = 24421451, upload-time = "2025-06-30T04:23:51.747Z" }, - { url = "https://files.pythonhosted.org/packages/2e/cf/3d1fee5c16c7661cf53977067a34820f7269ed8ba99fe9cf35efc1700866/milvus_lite-2.5.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:a13277e9bacc6933dea172e42231f7e6135bd3bdb073dd2688ee180418abd8d9", size = 45337093, upload-time = "2025-06-30T04:24:06.706Z" }, - { url = "https://files.pythonhosted.org/packages/d3/82/41d9b80f09b82e066894d9b508af07b7b0fa325ce0322980674de49106a0/milvus_lite-2.5.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:25ce13f4b8d46876dd2b7ac8563d7d8306da7ff3999bb0d14b116b30f71d706c", size = 55263911, upload-time = "2025-06-30T04:24:19.434Z" }, + { url = "https://files.pythonhosted.org/packages/a9/b2/acc5024c8e8b6a0b034670b8e8af306ebd633ede777dcbf557eac4785937/milvus_lite-2.5.1-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:6b014453200ba977be37ba660cb2d021030375fa6a35bc53c2e1d92980a0c512", size = 27934713 }, + { url = "https://files.pythonhosted.org/packages/9b/2e/746f5bb1d6facd1e73eb4af6dd5efda11125b0f29d7908a097485ca6cad9/milvus_lite-2.5.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a2e031088bf308afe5f8567850412d618cfb05a65238ed1a6117f60decccc95a", size = 24421451 }, + { url = "https://files.pythonhosted.org/packages/2e/cf/3d1fee5c16c7661cf53977067a34820f7269ed8ba99fe9cf35efc1700866/milvus_lite-2.5.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:a13277e9bacc6933dea172e42231f7e6135bd3bdb073dd2688ee180418abd8d9", size = 45337093 }, + { url = "https://files.pythonhosted.org/packages/d3/82/41d9b80f09b82e066894d9b508af07b7b0fa325ce0322980674de49106a0/milvus_lite-2.5.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:25ce13f4b8d46876dd2b7ac8563d7d8306da7ff3999bb0d14b116b30f71d706c", size = 55263911 }, ] [[package]] name = "mmh3" version = "5.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/47/1b/1fc6888c74cbd8abad1292dde2ddfcf8fc059e114c97dd6bf16d12f36293/mmh3-5.1.0.tar.gz", hash = "sha256:136e1e670500f177f49ec106a4ebf0adf20d18d96990cc36ea492c651d2b406c", size = 33728, upload-time = "2025-01-25T08:39:43.386Z" } +sdist = { url = "https://files.pythonhosted.org/packages/47/1b/1fc6888c74cbd8abad1292dde2ddfcf8fc059e114c97dd6bf16d12f36293/mmh3-5.1.0.tar.gz", hash = "sha256:136e1e670500f177f49ec106a4ebf0adf20d18d96990cc36ea492c651d2b406c", size = 33728 } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/09/fda7af7fe65928262098382e3bf55950cfbf67d30bf9e47731bf862161e9/mmh3-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b529dcda3f951ff363a51d5866bc6d63cf57f1e73e8961f864ae5010647079d", size = 56098, upload-time = "2025-01-25T08:38:22.917Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ab/84c7bc3f366d6f3bd8b5d9325a10c367685bc17c26dac4c068e2001a4671/mmh3-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db1079b3ace965e562cdfc95847312f9273eb2ad3ebea983435c8423e06acd7", size = 40513, upload-time = "2025-01-25T08:38:25.079Z" }, - { url = "https://files.pythonhosted.org/packages/4f/21/25ea58ca4a652bdc83d1528bec31745cce35802381fb4fe3c097905462d2/mmh3-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:22d31e3a0ff89b8eb3b826d6fc8e19532998b2aa6b9143698043a1268da413e1", size = 40112, upload-time = "2025-01-25T08:38:25.947Z" }, - { url = "https://files.pythonhosted.org/packages/bd/78/4f12f16ae074ddda6f06745254fdb50f8cf3c85b0bbf7eaca58bed84bf58/mmh3-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2139bfbd354cd6cb0afed51c4b504f29bcd687a3b1460b7e89498329cc28a894", size = 102632, upload-time = "2025-01-25T08:38:26.939Z" }, - { url = "https://files.pythonhosted.org/packages/48/11/8f09dc999cf2a09b6138d8d7fc734efb7b7bfdd9adb9383380941caadff0/mmh3-5.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c8105c6a435bc2cd6ea2ef59558ab1a2976fd4a4437026f562856d08996673a", size = 108884, upload-time = "2025-01-25T08:38:29.159Z" }, - { url = "https://files.pythonhosted.org/packages/bd/91/e59a66538a3364176f6c3f7620eee0ab195bfe26f89a95cbcc7a1fb04b28/mmh3-5.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57730067174a7f36fcd6ce012fe359bd5510fdaa5fe067bc94ed03e65dafb769", size = 106835, upload-time = "2025-01-25T08:38:33.04Z" }, - { url = "https://files.pythonhosted.org/packages/25/14/b85836e21ab90e5cddb85fe79c494ebd8f81d96a87a664c488cc9277668b/mmh3-5.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde80eb196d7fdc765a318604ded74a4378f02c5b46c17aa48a27d742edaded2", size = 93688, upload-time = "2025-01-25T08:38:34.987Z" }, - { url = "https://files.pythonhosted.org/packages/ac/aa/8bc964067df9262740c95e4cde2d19f149f2224f426654e14199a9e47df6/mmh3-5.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9c8eddcb441abddeb419c16c56fd74b3e2df9e57f7aa2903221996718435c7a", size = 101569, upload-time = "2025-01-25T08:38:35.983Z" }, - { url = "https://files.pythonhosted.org/packages/70/b6/1fb163cbf919046a64717466c00edabebece3f95c013853fec76dbf2df92/mmh3-5.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:99e07e4acafbccc7a28c076a847fb060ffc1406036bc2005acb1b2af620e53c3", size = 98483, upload-time = "2025-01-25T08:38:38.198Z" }, - { url = "https://files.pythonhosted.org/packages/70/49/ba64c050dd646060f835f1db6b2cd60a6485f3b0ea04976e7a29ace7312e/mmh3-5.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e25ba5b530e9a7d65f41a08d48f4b3fedc1e89c26486361166a5544aa4cad33", size = 96496, upload-time = "2025-01-25T08:38:39.257Z" }, - { url = "https://files.pythonhosted.org/packages/9e/07/f2751d6a0b535bb865e1066e9c6b80852571ef8d61bce7eb44c18720fbfc/mmh3-5.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bb9bf7475b4d99156ce2f0cf277c061a17560c8c10199c910a680869a278ddc7", size = 105109, upload-time = "2025-01-25T08:38:40.395Z" }, - { url = "https://files.pythonhosted.org/packages/b7/02/30360a5a66f7abba44596d747cc1e6fb53136b168eaa335f63454ab7bb79/mmh3-5.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a1b0878dd281ea3003368ab53ff6f568e175f1b39f281df1da319e58a19c23a", size = 98231, upload-time = "2025-01-25T08:38:42.141Z" }, - { url = "https://files.pythonhosted.org/packages/8c/60/8526b0c750ff4d7ae1266e68b795f14b97758a1d9fcc19f6ecabf9c55656/mmh3-5.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:25f565093ac8b8aefe0f61f8f95c9a9d11dd69e6a9e9832ff0d293511bc36258", size = 97548, upload-time = "2025-01-25T08:38:43.402Z" }, - { url = "https://files.pythonhosted.org/packages/6d/4c/26e1222aca65769280d5427a1ce5875ef4213449718c8f03958d0bf91070/mmh3-5.1.0-cp311-cp311-win32.whl", hash = "sha256:1e3554d8792387eac73c99c6eaea0b3f884e7130eb67986e11c403e4f9b6d372", size = 40810, upload-time = "2025-01-25T08:38:45.143Z" }, - { url = "https://files.pythonhosted.org/packages/98/d5/424ba95062d1212ea615dc8debc8d57983f2242d5e6b82e458b89a117a1e/mmh3-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8ad777a48197882492af50bf3098085424993ce850bdda406a358b6ab74be759", size = 41476, upload-time = "2025-01-25T08:38:46.029Z" }, - { url = "https://files.pythonhosted.org/packages/bd/08/0315ccaf087ba55bb19a6dd3b1e8acd491e74ce7f5f9c4aaa06a90d66441/mmh3-5.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f29dc4efd99bdd29fe85ed6c81915b17b2ef2cf853abf7213a48ac6fb3eaabe1", size = 38880, upload-time = "2025-01-25T08:38:47.035Z" }, - { url = "https://files.pythonhosted.org/packages/f4/47/e5f452bdf16028bfd2edb4e2e35d0441e4a4740f30e68ccd4cfd2fb2c57e/mmh3-5.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:45712987367cb9235026e3cbf4334670522a97751abfd00b5bc8bfa022c3311d", size = 56152, upload-time = "2025-01-25T08:38:47.902Z" }, - { url = "https://files.pythonhosted.org/packages/60/38/2132d537dc7a7fdd8d2e98df90186c7fcdbd3f14f95502a24ba443c92245/mmh3-5.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b1020735eb35086ab24affbea59bb9082f7f6a0ad517cb89f0fc14f16cea4dae", size = 40564, upload-time = "2025-01-25T08:38:48.839Z" }, - { url = "https://files.pythonhosted.org/packages/c0/2a/c52cf000581bfb8d94794f58865658e7accf2fa2e90789269d4ae9560b16/mmh3-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:babf2a78ce5513d120c358722a2e3aa7762d6071cd10cede026f8b32452be322", size = 40104, upload-time = "2025-01-25T08:38:49.773Z" }, - { url = "https://files.pythonhosted.org/packages/83/33/30d163ce538c54fc98258db5621447e3ab208d133cece5d2577cf913e708/mmh3-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4f47f58cd5cbef968c84a7c1ddc192fef0a36b48b0b8a3cb67354531aa33b00", size = 102634, upload-time = "2025-01-25T08:38:51.5Z" }, - { url = "https://files.pythonhosted.org/packages/94/5c/5a18acb6ecc6852be2d215c3d811aa61d7e425ab6596be940877355d7f3e/mmh3-5.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2044a601c113c981f2c1e14fa33adc9b826c9017034fe193e9eb49a6882dbb06", size = 108888, upload-time = "2025-01-25T08:38:52.542Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f6/11c556324c64a92aa12f28e221a727b6e082e426dc502e81f77056f6fc98/mmh3-5.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94d999c9f2eb2da44d7c2826d3fbffdbbbbcde8488d353fee7c848ecc42b968", size = 106968, upload-time = "2025-01-25T08:38:54.286Z" }, - { url = "https://files.pythonhosted.org/packages/5d/61/ca0c196a685aba7808a5c00246f17b988a9c4f55c594ee0a02c273e404f3/mmh3-5.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a015dcb24fa0c7a78f88e9419ac74f5001c1ed6a92e70fd1803f74afb26a4c83", size = 93771, upload-time = "2025-01-25T08:38:55.576Z" }, - { url = "https://files.pythonhosted.org/packages/b4/55/0927c33528710085ee77b808d85bbbafdb91a1db7c8eaa89cac16d6c513e/mmh3-5.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:457da019c491a2d20e2022c7d4ce723675e4c081d9efc3b4d8b9f28a5ea789bd", size = 101726, upload-time = "2025-01-25T08:38:56.654Z" }, - { url = "https://files.pythonhosted.org/packages/49/39/a92c60329fa470f41c18614a93c6cd88821412a12ee78c71c3f77e1cfc2d/mmh3-5.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71408579a570193a4ac9c77344d68ddefa440b00468a0b566dcc2ba282a9c559", size = 98523, upload-time = "2025-01-25T08:38:57.662Z" }, - { url = "https://files.pythonhosted.org/packages/81/90/26adb15345af8d9cf433ae1b6adcf12e0a4cad1e692de4fa9f8e8536c5ae/mmh3-5.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8b3a04bc214a6e16c81f02f855e285c6df274a2084787eeafaa45f2fbdef1b63", size = 96628, upload-time = "2025-01-25T08:38:59.505Z" }, - { url = "https://files.pythonhosted.org/packages/8a/4d/340d1e340df972a13fd4ec84c787367f425371720a1044220869c82364e9/mmh3-5.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:832dae26a35514f6d3c1e267fa48e8de3c7b978afdafa0529c808ad72e13ada3", size = 105190, upload-time = "2025-01-25T08:39:00.483Z" }, - { url = "https://files.pythonhosted.org/packages/d3/7c/65047d1cccd3782d809936db446430fc7758bda9def5b0979887e08302a2/mmh3-5.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bf658a61fc92ef8a48945ebb1076ef4ad74269e353fffcb642dfa0890b13673b", size = 98439, upload-time = "2025-01-25T08:39:01.484Z" }, - { url = "https://files.pythonhosted.org/packages/72/d2/3c259d43097c30f062050f7e861075099404e8886b5d4dd3cebf180d6e02/mmh3-5.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3313577453582b03383731b66447cdcdd28a68f78df28f10d275d7d19010c1df", size = 97780, upload-time = "2025-01-25T08:39:02.444Z" }, - { url = "https://files.pythonhosted.org/packages/29/29/831ea8d4abe96cdb3e28b79eab49cac7f04f9c6b6e36bfc686197ddba09d/mmh3-5.1.0-cp312-cp312-win32.whl", hash = "sha256:1d6508504c531ab86c4424b5a5ff07c1132d063863339cf92f6657ff7a580f76", size = 40835, upload-time = "2025-01-25T08:39:03.369Z" }, - { url = "https://files.pythonhosted.org/packages/12/dd/7cbc30153b73f08eeac43804c1dbc770538a01979b4094edbe1a4b8eb551/mmh3-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:aa75981fcdf3f21759d94f2c81b6a6e04a49dfbcdad88b152ba49b8e20544776", size = 41509, upload-time = "2025-01-25T08:39:04.284Z" }, - { url = "https://files.pythonhosted.org/packages/80/9d/627375bab4c90dd066093fc2c9a26b86f87e26d980dbf71667b44cbee3eb/mmh3-5.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4c1a76808dfea47f7407a0b07aaff9087447ef6280716fd0783409b3088bb3c", size = 38888, upload-time = "2025-01-25T08:39:05.174Z" }, + { url = "https://files.pythonhosted.org/packages/56/09/fda7af7fe65928262098382e3bf55950cfbf67d30bf9e47731bf862161e9/mmh3-5.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b529dcda3f951ff363a51d5866bc6d63cf57f1e73e8961f864ae5010647079d", size = 56098 }, + { url = "https://files.pythonhosted.org/packages/0c/ab/84c7bc3f366d6f3bd8b5d9325a10c367685bc17c26dac4c068e2001a4671/mmh3-5.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db1079b3ace965e562cdfc95847312f9273eb2ad3ebea983435c8423e06acd7", size = 40513 }, + { url = "https://files.pythonhosted.org/packages/4f/21/25ea58ca4a652bdc83d1528bec31745cce35802381fb4fe3c097905462d2/mmh3-5.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:22d31e3a0ff89b8eb3b826d6fc8e19532998b2aa6b9143698043a1268da413e1", size = 40112 }, + { url = "https://files.pythonhosted.org/packages/bd/78/4f12f16ae074ddda6f06745254fdb50f8cf3c85b0bbf7eaca58bed84bf58/mmh3-5.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2139bfbd354cd6cb0afed51c4b504f29bcd687a3b1460b7e89498329cc28a894", size = 102632 }, + { url = "https://files.pythonhosted.org/packages/48/11/8f09dc999cf2a09b6138d8d7fc734efb7b7bfdd9adb9383380941caadff0/mmh3-5.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c8105c6a435bc2cd6ea2ef59558ab1a2976fd4a4437026f562856d08996673a", size = 108884 }, + { url = "https://files.pythonhosted.org/packages/bd/91/e59a66538a3364176f6c3f7620eee0ab195bfe26f89a95cbcc7a1fb04b28/mmh3-5.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57730067174a7f36fcd6ce012fe359bd5510fdaa5fe067bc94ed03e65dafb769", size = 106835 }, + { url = "https://files.pythonhosted.org/packages/25/14/b85836e21ab90e5cddb85fe79c494ebd8f81d96a87a664c488cc9277668b/mmh3-5.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde80eb196d7fdc765a318604ded74a4378f02c5b46c17aa48a27d742edaded2", size = 93688 }, + { url = "https://files.pythonhosted.org/packages/ac/aa/8bc964067df9262740c95e4cde2d19f149f2224f426654e14199a9e47df6/mmh3-5.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9c8eddcb441abddeb419c16c56fd74b3e2df9e57f7aa2903221996718435c7a", size = 101569 }, + { url = "https://files.pythonhosted.org/packages/70/b6/1fb163cbf919046a64717466c00edabebece3f95c013853fec76dbf2df92/mmh3-5.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:99e07e4acafbccc7a28c076a847fb060ffc1406036bc2005acb1b2af620e53c3", size = 98483 }, + { url = "https://files.pythonhosted.org/packages/70/49/ba64c050dd646060f835f1db6b2cd60a6485f3b0ea04976e7a29ace7312e/mmh3-5.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e25ba5b530e9a7d65f41a08d48f4b3fedc1e89c26486361166a5544aa4cad33", size = 96496 }, + { url = "https://files.pythonhosted.org/packages/9e/07/f2751d6a0b535bb865e1066e9c6b80852571ef8d61bce7eb44c18720fbfc/mmh3-5.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bb9bf7475b4d99156ce2f0cf277c061a17560c8c10199c910a680869a278ddc7", size = 105109 }, + { url = "https://files.pythonhosted.org/packages/b7/02/30360a5a66f7abba44596d747cc1e6fb53136b168eaa335f63454ab7bb79/mmh3-5.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a1b0878dd281ea3003368ab53ff6f568e175f1b39f281df1da319e58a19c23a", size = 98231 }, + { url = "https://files.pythonhosted.org/packages/8c/60/8526b0c750ff4d7ae1266e68b795f14b97758a1d9fcc19f6ecabf9c55656/mmh3-5.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:25f565093ac8b8aefe0f61f8f95c9a9d11dd69e6a9e9832ff0d293511bc36258", size = 97548 }, + { url = "https://files.pythonhosted.org/packages/6d/4c/26e1222aca65769280d5427a1ce5875ef4213449718c8f03958d0bf91070/mmh3-5.1.0-cp311-cp311-win32.whl", hash = "sha256:1e3554d8792387eac73c99c6eaea0b3f884e7130eb67986e11c403e4f9b6d372", size = 40810 }, + { url = "https://files.pythonhosted.org/packages/98/d5/424ba95062d1212ea615dc8debc8d57983f2242d5e6b82e458b89a117a1e/mmh3-5.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8ad777a48197882492af50bf3098085424993ce850bdda406a358b6ab74be759", size = 41476 }, + { url = "https://files.pythonhosted.org/packages/bd/08/0315ccaf087ba55bb19a6dd3b1e8acd491e74ce7f5f9c4aaa06a90d66441/mmh3-5.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f29dc4efd99bdd29fe85ed6c81915b17b2ef2cf853abf7213a48ac6fb3eaabe1", size = 38880 }, + { url = "https://files.pythonhosted.org/packages/f4/47/e5f452bdf16028bfd2edb4e2e35d0441e4a4740f30e68ccd4cfd2fb2c57e/mmh3-5.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:45712987367cb9235026e3cbf4334670522a97751abfd00b5bc8bfa022c3311d", size = 56152 }, + { url = "https://files.pythonhosted.org/packages/60/38/2132d537dc7a7fdd8d2e98df90186c7fcdbd3f14f95502a24ba443c92245/mmh3-5.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b1020735eb35086ab24affbea59bb9082f7f6a0ad517cb89f0fc14f16cea4dae", size = 40564 }, + { url = "https://files.pythonhosted.org/packages/c0/2a/c52cf000581bfb8d94794f58865658e7accf2fa2e90789269d4ae9560b16/mmh3-5.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:babf2a78ce5513d120c358722a2e3aa7762d6071cd10cede026f8b32452be322", size = 40104 }, + { url = "https://files.pythonhosted.org/packages/83/33/30d163ce538c54fc98258db5621447e3ab208d133cece5d2577cf913e708/mmh3-5.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4f47f58cd5cbef968c84a7c1ddc192fef0a36b48b0b8a3cb67354531aa33b00", size = 102634 }, + { url = "https://files.pythonhosted.org/packages/94/5c/5a18acb6ecc6852be2d215c3d811aa61d7e425ab6596be940877355d7f3e/mmh3-5.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2044a601c113c981f2c1e14fa33adc9b826c9017034fe193e9eb49a6882dbb06", size = 108888 }, + { url = "https://files.pythonhosted.org/packages/1f/f6/11c556324c64a92aa12f28e221a727b6e082e426dc502e81f77056f6fc98/mmh3-5.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c94d999c9f2eb2da44d7c2826d3fbffdbbbbcde8488d353fee7c848ecc42b968", size = 106968 }, + { url = "https://files.pythonhosted.org/packages/5d/61/ca0c196a685aba7808a5c00246f17b988a9c4f55c594ee0a02c273e404f3/mmh3-5.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a015dcb24fa0c7a78f88e9419ac74f5001c1ed6a92e70fd1803f74afb26a4c83", size = 93771 }, + { url = "https://files.pythonhosted.org/packages/b4/55/0927c33528710085ee77b808d85bbbafdb91a1db7c8eaa89cac16d6c513e/mmh3-5.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:457da019c491a2d20e2022c7d4ce723675e4c081d9efc3b4d8b9f28a5ea789bd", size = 101726 }, + { url = "https://files.pythonhosted.org/packages/49/39/a92c60329fa470f41c18614a93c6cd88821412a12ee78c71c3f77e1cfc2d/mmh3-5.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71408579a570193a4ac9c77344d68ddefa440b00468a0b566dcc2ba282a9c559", size = 98523 }, + { url = "https://files.pythonhosted.org/packages/81/90/26adb15345af8d9cf433ae1b6adcf12e0a4cad1e692de4fa9f8e8536c5ae/mmh3-5.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8b3a04bc214a6e16c81f02f855e285c6df274a2084787eeafaa45f2fbdef1b63", size = 96628 }, + { url = "https://files.pythonhosted.org/packages/8a/4d/340d1e340df972a13fd4ec84c787367f425371720a1044220869c82364e9/mmh3-5.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:832dae26a35514f6d3c1e267fa48e8de3c7b978afdafa0529c808ad72e13ada3", size = 105190 }, + { url = "https://files.pythonhosted.org/packages/d3/7c/65047d1cccd3782d809936db446430fc7758bda9def5b0979887e08302a2/mmh3-5.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bf658a61fc92ef8a48945ebb1076ef4ad74269e353fffcb642dfa0890b13673b", size = 98439 }, + { url = "https://files.pythonhosted.org/packages/72/d2/3c259d43097c30f062050f7e861075099404e8886b5d4dd3cebf180d6e02/mmh3-5.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3313577453582b03383731b66447cdcdd28a68f78df28f10d275d7d19010c1df", size = 97780 }, + { url = "https://files.pythonhosted.org/packages/29/29/831ea8d4abe96cdb3e28b79eab49cac7f04f9c6b6e36bfc686197ddba09d/mmh3-5.1.0-cp312-cp312-win32.whl", hash = "sha256:1d6508504c531ab86c4424b5a5ff07c1132d063863339cf92f6657ff7a580f76", size = 40835 }, + { url = "https://files.pythonhosted.org/packages/12/dd/7cbc30153b73f08eeac43804c1dbc770538a01979b4094edbe1a4b8eb551/mmh3-5.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:aa75981fcdf3f21759d94f2c81b6a6e04a49dfbcdad88b152ba49b8e20544776", size = 41509 }, + { url = "https://files.pythonhosted.org/packages/80/9d/627375bab4c90dd066093fc2c9a26b86f87e26d980dbf71667b44cbee3eb/mmh3-5.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4c1a76808dfea47f7407a0b07aaff9087447ef6280716fd0783409b3088bb3c", size = 38888 }, ] [[package]] @@ -3169,18 +3171,18 @@ dependencies = [ { name = "pymysql" }, { name = "sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/01/03/2ef4de1c8d970288f018b6b63439563336c51f26f57706dc51e4c395fdbe/mo_vector-0.1.13.tar.gz", hash = "sha256:8526c37e99157a0c9866bf3868600e877980464eccb212f8ea71971c0630eb69", size = 16926, upload-time = "2025-06-18T09:27:27.906Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/03/2ef4de1c8d970288f018b6b63439563336c51f26f57706dc51e4c395fdbe/mo_vector-0.1.13.tar.gz", hash = "sha256:8526c37e99157a0c9866bf3868600e877980464eccb212f8ea71971c0630eb69", size = 16926 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/e7/514f5cf5909f96adf09b78146a9e5c92f82abcc212bc3f88456bf2640c23/mo_vector-0.1.13-py3-none-any.whl", hash = "sha256:f7d619acc3e92ed59631e6b3a12508240e22cf428c87daf022c0d87fbd5da459", size = 20091, upload-time = "2025-06-18T09:27:26.899Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e7/514f5cf5909f96adf09b78146a9e5c92f82abcc212bc3f88456bf2640c23/mo_vector-0.1.13-py3-none-any.whl", hash = "sha256:f7d619acc3e92ed59631e6b3a12508240e22cf428c87daf022c0d87fbd5da459", size = 20091 }, ] [[package]] name = "mpmath" version = "1.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106, upload-time = "2023-03-07T16:47:11.061Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106 } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, + { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198 }, ] [[package]] @@ -3192,9 +3194,9 @@ dependencies = [ { name = "pyjwt", extra = ["crypto"] }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3f/90/81dcc50f0be11a8c4dcbae1a9f761a26e5f905231330a7cacc9f04ec4c61/msal-1.32.3.tar.gz", hash = "sha256:5eea038689c78a5a70ca8ecbe1245458b55a857bd096efb6989c69ba15985d35", size = 151449, upload-time = "2025-04-25T13:12:34.204Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/90/81dcc50f0be11a8c4dcbae1a9f761a26e5f905231330a7cacc9f04ec4c61/msal-1.32.3.tar.gz", hash = "sha256:5eea038689c78a5a70ca8ecbe1245458b55a857bd096efb6989c69ba15985d35", size = 151449 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/bf/81516b9aac7fd867709984d08eb4db1d2e3fe1df795c8e442cde9b568962/msal-1.32.3-py3-none-any.whl", hash = "sha256:b2798db57760b1961b142f027ffb7c8169536bf77316e99a0df5c4aaebb11569", size = 115358, upload-time = "2025-04-25T13:12:33.034Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/81516b9aac7fd867709984d08eb4db1d2e3fe1df795c8e442cde9b568962/msal-1.32.3-py3-none-any.whl", hash = "sha256:b2798db57760b1961b142f027ffb7c8169536bf77316e99a0df5c4aaebb11569", size = 115358 }, ] [[package]] @@ -3204,9 +3206,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "msal" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315, upload-time = "2025-03-14T23:51:03.902Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" }, + { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583 }, ] [[package]] @@ -3220,80 +3222,80 @@ dependencies = [ { name = "requests" }, { name = "requests-oauthlib" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/68/77/8397c8fb8fc257d8ea0fa66f8068e073278c65f05acb17dcb22a02bfdc42/msrest-0.7.1.zip", hash = "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9", size = 175332, upload-time = "2022-06-13T22:41:25.111Z" } +sdist = { url = "https://files.pythonhosted.org/packages/68/77/8397c8fb8fc257d8ea0fa66f8068e073278c65f05acb17dcb22a02bfdc42/msrest-0.7.1.zip", hash = "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9", size = 175332 } wheels = [ - { url = "https://files.pythonhosted.org/packages/15/cf/f2966a2638144491f8696c27320d5219f48a072715075d168b31d3237720/msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32", size = 85384, upload-time = "2022-06-13T22:41:22.42Z" }, + { url = "https://files.pythonhosted.org/packages/15/cf/f2966a2638144491f8696c27320d5219f48a072715075d168b31d3237720/msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32", size = 85384 }, ] [[package]] name = "multidict" version = "6.6.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hash = "sha256:798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc", size = 101006, upload-time = "2025-06-30T15:53:46.929Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hash = "sha256:798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc", size = 101006 } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/f0/1a39863ced51f639c81a5463fbfa9eb4df59c20d1a8769ab9ef4ca57ae04/multidict-6.6.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:18f4eba0cbac3546b8ae31e0bbc55b02c801ae3cbaf80c247fcdd89b456ff58c", size = 76445, upload-time = "2025-06-30T15:51:24.01Z" }, - { url = "https://files.pythonhosted.org/packages/c9/0e/a7cfa451c7b0365cd844e90b41e21fab32edaa1e42fc0c9f68461ce44ed7/multidict-6.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef43b5dd842382329e4797c46f10748d8c2b6e0614f46b4afe4aee9ac33159df", size = 44610, upload-time = "2025-06-30T15:51:25.158Z" }, - { url = "https://files.pythonhosted.org/packages/c6/bb/a14a4efc5ee748cc1904b0748be278c31b9295ce5f4d2ef66526f410b94d/multidict-6.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf9bd1fd5eec01494e0f2e8e446a74a85d5e49afb63d75a9934e4a5423dba21d", size = 44267, upload-time = "2025-06-30T15:51:26.326Z" }, - { url = "https://files.pythonhosted.org/packages/c2/f8/410677d563c2d55e063ef74fe578f9d53fe6b0a51649597a5861f83ffa15/multidict-6.6.3-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:5bd8d6f793a787153956cd35e24f60485bf0651c238e207b9a54f7458b16d539", size = 230004, upload-time = "2025-06-30T15:51:27.491Z" }, - { url = "https://files.pythonhosted.org/packages/fd/df/2b787f80059314a98e1ec6a4cc7576244986df3e56b3c755e6fc7c99e038/multidict-6.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bf99b4daf908c73856bd87ee0a2499c3c9a3d19bb04b9c6025e66af3fd07462", size = 247196, upload-time = "2025-06-30T15:51:28.762Z" }, - { url = "https://files.pythonhosted.org/packages/05/f2/f9117089151b9a8ab39f9019620d10d9718eec2ac89e7ca9d30f3ec78e96/multidict-6.6.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b9e59946b49dafaf990fd9c17ceafa62976e8471a14952163d10a7a630413a9", size = 225337, upload-time = "2025-06-30T15:51:30.025Z" }, - { url = "https://files.pythonhosted.org/packages/93/2d/7115300ec5b699faa152c56799b089a53ed69e399c3c2d528251f0aeda1a/multidict-6.6.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e2db616467070d0533832d204c54eea6836a5e628f2cb1e6dfd8cd6ba7277cb7", size = 257079, upload-time = "2025-06-30T15:51:31.716Z" }, - { url = "https://files.pythonhosted.org/packages/15/ea/ff4bab367623e39c20d3b07637225c7688d79e4f3cc1f3b9f89867677f9a/multidict-6.6.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7394888236621f61dcdd25189b2768ae5cc280f041029a5bcf1122ac63df79f9", size = 255461, upload-time = "2025-06-30T15:51:33.029Z" }, - { url = "https://files.pythonhosted.org/packages/74/07/2c9246cda322dfe08be85f1b8739646f2c4c5113a1422d7a407763422ec4/multidict-6.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f114d8478733ca7388e7c7e0ab34b72547476b97009d643644ac33d4d3fe1821", size = 246611, upload-time = "2025-06-30T15:51:34.47Z" }, - { url = "https://files.pythonhosted.org/packages/a8/62/279c13d584207d5697a752a66ffc9bb19355a95f7659140cb1b3cf82180e/multidict-6.6.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cdf22e4db76d323bcdc733514bf732e9fb349707c98d341d40ebcc6e9318ef3d", size = 243102, upload-time = "2025-06-30T15:51:36.525Z" }, - { url = "https://files.pythonhosted.org/packages/69/cc/e06636f48c6d51e724a8bc8d9e1db5f136fe1df066d7cafe37ef4000f86a/multidict-6.6.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e995a34c3d44ab511bfc11aa26869b9d66c2d8c799fa0e74b28a473a692532d6", size = 238693, upload-time = "2025-06-30T15:51:38.278Z" }, - { url = "https://files.pythonhosted.org/packages/89/a4/66c9d8fb9acf3b226cdd468ed009537ac65b520aebdc1703dd6908b19d33/multidict-6.6.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:766a4a5996f54361d8d5a9050140aa5362fe48ce51c755a50c0bc3706460c430", size = 246582, upload-time = "2025-06-30T15:51:39.709Z" }, - { url = "https://files.pythonhosted.org/packages/cf/01/c69e0317be556e46257826d5449feb4e6aa0d18573e567a48a2c14156f1f/multidict-6.6.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3893a0d7d28a7fe6ca7a1f760593bc13038d1d35daf52199d431b61d2660602b", size = 253355, upload-time = "2025-06-30T15:51:41.013Z" }, - { url = "https://files.pythonhosted.org/packages/c0/da/9cc1da0299762d20e626fe0042e71b5694f9f72d7d3f9678397cbaa71b2b/multidict-6.6.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:934796c81ea996e61914ba58064920d6cad5d99140ac3167901eb932150e2e56", size = 247774, upload-time = "2025-06-30T15:51:42.291Z" }, - { url = "https://files.pythonhosted.org/packages/e6/91/b22756afec99cc31105ddd4a52f95ab32b1a4a58f4d417979c570c4a922e/multidict-6.6.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9ed948328aec2072bc00f05d961ceadfd3e9bfc2966c1319aeaf7b7c21219183", size = 242275, upload-time = "2025-06-30T15:51:43.642Z" }, - { url = "https://files.pythonhosted.org/packages/be/f1/adcc185b878036a20399d5be5228f3cbe7f823d78985d101d425af35c800/multidict-6.6.3-cp311-cp311-win32.whl", hash = "sha256:9f5b28c074c76afc3e4c610c488e3493976fe0e596dd3db6c8ddfbb0134dcac5", size = 41290, upload-time = "2025-06-30T15:51:45.264Z" }, - { url = "https://files.pythonhosted.org/packages/e0/d4/27652c1c6526ea6b4f5ddd397e93f4232ff5de42bea71d339bc6a6cc497f/multidict-6.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc7f6fbc61b1c16050a389c630da0b32fc6d4a3d191394ab78972bf5edc568c2", size = 45942, upload-time = "2025-06-30T15:51:46.377Z" }, - { url = "https://files.pythonhosted.org/packages/16/18/23f4932019804e56d3c2413e237f866444b774b0263bcb81df2fdecaf593/multidict-6.6.3-cp311-cp311-win_arm64.whl", hash = "sha256:d4e47d8faffaae822fb5cba20937c048d4f734f43572e7079298a6c39fb172cb", size = 42880, upload-time = "2025-06-30T15:51:47.561Z" }, - { url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6", size = 76514, upload-time = "2025-06-30T15:51:48.728Z" }, - { url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f", size = 45394, upload-time = "2025-06-30T15:51:49.986Z" }, - { url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55", size = 43590, upload-time = "2025-06-30T15:51:51.331Z" }, - { url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b", size = 237292, upload-time = "2025-06-30T15:51:52.584Z" }, - { url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888", size = 258385, upload-time = "2025-06-30T15:51:53.913Z" }, - { url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d", size = 242328, upload-time = "2025-06-30T15:51:55.672Z" }, - { url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680", size = 268057, upload-time = "2025-06-30T15:51:57.037Z" }, - { url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a", size = 269341, upload-time = "2025-06-30T15:51:59.111Z" }, - { url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961", size = 256081, upload-time = "2025-06-30T15:52:00.533Z" }, - { url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65", size = 253581, upload-time = "2025-06-30T15:52:02.43Z" }, - { url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643", size = 250750, upload-time = "2025-06-30T15:52:04.26Z" }, - { url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063", size = 251548, upload-time = "2025-06-30T15:52:06.002Z" }, - { url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3", size = 262718, upload-time = "2025-06-30T15:52:07.707Z" }, - { url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75", size = 259603, upload-time = "2025-06-30T15:52:09.58Z" }, - { url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10", size = 251351, upload-time = "2025-06-30T15:52:10.947Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl", hash = "sha256:73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5", size = 41860, upload-time = "2025-06-30T15:52:12.334Z" }, - { url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17", size = 45982, upload-time = "2025-06-30T15:52:13.6Z" }, - { url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b", size = 43210, upload-time = "2025-06-30T15:52:14.893Z" }, - { url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313, upload-time = "2025-06-30T15:53:45.437Z" }, + { url = "https://files.pythonhosted.org/packages/08/f0/1a39863ced51f639c81a5463fbfa9eb4df59c20d1a8769ab9ef4ca57ae04/multidict-6.6.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:18f4eba0cbac3546b8ae31e0bbc55b02c801ae3cbaf80c247fcdd89b456ff58c", size = 76445 }, + { url = "https://files.pythonhosted.org/packages/c9/0e/a7cfa451c7b0365cd844e90b41e21fab32edaa1e42fc0c9f68461ce44ed7/multidict-6.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef43b5dd842382329e4797c46f10748d8c2b6e0614f46b4afe4aee9ac33159df", size = 44610 }, + { url = "https://files.pythonhosted.org/packages/c6/bb/a14a4efc5ee748cc1904b0748be278c31b9295ce5f4d2ef66526f410b94d/multidict-6.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf9bd1fd5eec01494e0f2e8e446a74a85d5e49afb63d75a9934e4a5423dba21d", size = 44267 }, + { url = "https://files.pythonhosted.org/packages/c2/f8/410677d563c2d55e063ef74fe578f9d53fe6b0a51649597a5861f83ffa15/multidict-6.6.3-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:5bd8d6f793a787153956cd35e24f60485bf0651c238e207b9a54f7458b16d539", size = 230004 }, + { url = "https://files.pythonhosted.org/packages/fd/df/2b787f80059314a98e1ec6a4cc7576244986df3e56b3c755e6fc7c99e038/multidict-6.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bf99b4daf908c73856bd87ee0a2499c3c9a3d19bb04b9c6025e66af3fd07462", size = 247196 }, + { url = "https://files.pythonhosted.org/packages/05/f2/f9117089151b9a8ab39f9019620d10d9718eec2ac89e7ca9d30f3ec78e96/multidict-6.6.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b9e59946b49dafaf990fd9c17ceafa62976e8471a14952163d10a7a630413a9", size = 225337 }, + { url = "https://files.pythonhosted.org/packages/93/2d/7115300ec5b699faa152c56799b089a53ed69e399c3c2d528251f0aeda1a/multidict-6.6.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e2db616467070d0533832d204c54eea6836a5e628f2cb1e6dfd8cd6ba7277cb7", size = 257079 }, + { url = "https://files.pythonhosted.org/packages/15/ea/ff4bab367623e39c20d3b07637225c7688d79e4f3cc1f3b9f89867677f9a/multidict-6.6.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7394888236621f61dcdd25189b2768ae5cc280f041029a5bcf1122ac63df79f9", size = 255461 }, + { url = "https://files.pythonhosted.org/packages/74/07/2c9246cda322dfe08be85f1b8739646f2c4c5113a1422d7a407763422ec4/multidict-6.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f114d8478733ca7388e7c7e0ab34b72547476b97009d643644ac33d4d3fe1821", size = 246611 }, + { url = "https://files.pythonhosted.org/packages/a8/62/279c13d584207d5697a752a66ffc9bb19355a95f7659140cb1b3cf82180e/multidict-6.6.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cdf22e4db76d323bcdc733514bf732e9fb349707c98d341d40ebcc6e9318ef3d", size = 243102 }, + { url = "https://files.pythonhosted.org/packages/69/cc/e06636f48c6d51e724a8bc8d9e1db5f136fe1df066d7cafe37ef4000f86a/multidict-6.6.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e995a34c3d44ab511bfc11aa26869b9d66c2d8c799fa0e74b28a473a692532d6", size = 238693 }, + { url = "https://files.pythonhosted.org/packages/89/a4/66c9d8fb9acf3b226cdd468ed009537ac65b520aebdc1703dd6908b19d33/multidict-6.6.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:766a4a5996f54361d8d5a9050140aa5362fe48ce51c755a50c0bc3706460c430", size = 246582 }, + { url = "https://files.pythonhosted.org/packages/cf/01/c69e0317be556e46257826d5449feb4e6aa0d18573e567a48a2c14156f1f/multidict-6.6.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3893a0d7d28a7fe6ca7a1f760593bc13038d1d35daf52199d431b61d2660602b", size = 253355 }, + { url = "https://files.pythonhosted.org/packages/c0/da/9cc1da0299762d20e626fe0042e71b5694f9f72d7d3f9678397cbaa71b2b/multidict-6.6.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:934796c81ea996e61914ba58064920d6cad5d99140ac3167901eb932150e2e56", size = 247774 }, + { url = "https://files.pythonhosted.org/packages/e6/91/b22756afec99cc31105ddd4a52f95ab32b1a4a58f4d417979c570c4a922e/multidict-6.6.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9ed948328aec2072bc00f05d961ceadfd3e9bfc2966c1319aeaf7b7c21219183", size = 242275 }, + { url = "https://files.pythonhosted.org/packages/be/f1/adcc185b878036a20399d5be5228f3cbe7f823d78985d101d425af35c800/multidict-6.6.3-cp311-cp311-win32.whl", hash = "sha256:9f5b28c074c76afc3e4c610c488e3493976fe0e596dd3db6c8ddfbb0134dcac5", size = 41290 }, + { url = "https://files.pythonhosted.org/packages/e0/d4/27652c1c6526ea6b4f5ddd397e93f4232ff5de42bea71d339bc6a6cc497f/multidict-6.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc7f6fbc61b1c16050a389c630da0b32fc6d4a3d191394ab78972bf5edc568c2", size = 45942 }, + { url = "https://files.pythonhosted.org/packages/16/18/23f4932019804e56d3c2413e237f866444b774b0263bcb81df2fdecaf593/multidict-6.6.3-cp311-cp311-win_arm64.whl", hash = "sha256:d4e47d8faffaae822fb5cba20937c048d4f734f43572e7079298a6c39fb172cb", size = 42880 }, + { url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6", size = 76514 }, + { url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f", size = 45394 }, + { url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55", size = 43590 }, + { url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b", size = 237292 }, + { url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888", size = 258385 }, + { url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d", size = 242328 }, + { url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680", size = 268057 }, + { url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a", size = 269341 }, + { url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961", size = 256081 }, + { url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65", size = 253581 }, + { url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643", size = 250750 }, + { url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063", size = 251548 }, + { url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3", size = 262718 }, + { url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75", size = 259603 }, + { url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10", size = 251351 }, + { url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl", hash = "sha256:73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5", size = 41860 }, + { url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17", size = 45982 }, + { url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b", size = 43210 }, + { url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313 }, ] [[package]] name = "mypy" -version = "1.16.1" +version = "1.17.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mypy-extensions" }, { name = "pathspec" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/81/69/92c7fa98112e4d9eb075a239caa4ef4649ad7d441545ccffbd5e34607cbb/mypy-1.16.1.tar.gz", hash = "sha256:6bd00a0a2094841c5e47e7374bb42b83d64c527a502e3334e1173a0c24437bab", size = 3324747, upload-time = "2025-06-16T16:51:35.145Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/22/ea637422dedf0bf36f3ef238eab4e455e2a0dcc3082b5cc067615347ab8e/mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", size = 3352570 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/61/ec1245aa1c325cb7a6c0f8570a2eee3bfc40fa90d19b1267f8e50b5c8645/mypy-1.16.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:472e4e4c100062488ec643f6162dd0d5208e33e2f34544e1fc931372e806c0cc", size = 10890557, upload-time = "2025-06-16T16:37:21.421Z" }, - { url = "https://files.pythonhosted.org/packages/6b/bb/6eccc0ba0aa0c7a87df24e73f0ad34170514abd8162eb0c75fd7128171fb/mypy-1.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea16e2a7d2714277e349e24d19a782a663a34ed60864006e8585db08f8ad1782", size = 10012921, upload-time = "2025-06-16T16:51:28.659Z" }, - { url = "https://files.pythonhosted.org/packages/5f/80/b337a12e2006715f99f529e732c5f6a8c143bb58c92bb142d5ab380963a5/mypy-1.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08e850ea22adc4d8a4014651575567b0318ede51e8e9fe7a68f25391af699507", size = 11802887, upload-time = "2025-06-16T16:50:53.627Z" }, - { url = "https://files.pythonhosted.org/packages/d9/59/f7af072d09793d581a745a25737c7c0a945760036b16aeb620f658a017af/mypy-1.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22d76a63a42619bfb90122889b903519149879ddbf2ba4251834727944c8baca", size = 12531658, upload-time = "2025-06-16T16:33:55.002Z" }, - { url = "https://files.pythonhosted.org/packages/82/c4/607672f2d6c0254b94a646cfc45ad589dd71b04aa1f3d642b840f7cce06c/mypy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2c7ce0662b6b9dc8f4ed86eb7a5d505ee3298c04b40ec13b30e572c0e5ae17c4", size = 12732486, upload-time = "2025-06-16T16:37:03.301Z" }, - { url = "https://files.pythonhosted.org/packages/b6/5e/136555ec1d80df877a707cebf9081bd3a9f397dedc1ab9750518d87489ec/mypy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:211287e98e05352a2e1d4e8759c5490925a7c784ddc84207f4714822f8cf99b6", size = 9479482, upload-time = "2025-06-16T16:47:37.48Z" }, - { url = "https://files.pythonhosted.org/packages/b4/d6/39482e5fcc724c15bf6280ff5806548c7185e0c090712a3736ed4d07e8b7/mypy-1.16.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:af4792433f09575d9eeca5c63d7d90ca4aeceda9d8355e136f80f8967639183d", size = 11066493, upload-time = "2025-06-16T16:47:01.683Z" }, - { url = "https://files.pythonhosted.org/packages/e6/e5/26c347890efc6b757f4d5bb83f4a0cf5958b8cf49c938ac99b8b72b420a6/mypy-1.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66df38405fd8466ce3517eda1f6640611a0b8e70895e2a9462d1d4323c5eb4b9", size = 10081687, upload-time = "2025-06-16T16:48:19.367Z" }, - { url = "https://files.pythonhosted.org/packages/44/c7/b5cb264c97b86914487d6a24bd8688c0172e37ec0f43e93b9691cae9468b/mypy-1.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44e7acddb3c48bd2713994d098729494117803616e116032af192871aed80b79", size = 11839723, upload-time = "2025-06-16T16:49:20.912Z" }, - { url = "https://files.pythonhosted.org/packages/15/f8/491997a9b8a554204f834ed4816bda813aefda31cf873bb099deee3c9a99/mypy-1.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ab5eca37b50188163fa7c1b73c685ac66c4e9bdee4a85c9adac0e91d8895e15", size = 12722980, upload-time = "2025-06-16T16:37:40.929Z" }, - { url = "https://files.pythonhosted.org/packages/df/f0/2bd41e174b5fd93bc9de9a28e4fb673113633b8a7f3a607fa4a73595e468/mypy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb6229b2c9086247e21a83c309754b9058b438704ad2f6807f0d8227f6ebdd", size = 12903328, upload-time = "2025-06-16T16:34:35.099Z" }, - { url = "https://files.pythonhosted.org/packages/61/81/5572108a7bec2c46b8aff7e9b524f371fe6ab5efb534d38d6b37b5490da8/mypy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:1f0435cf920e287ff68af3d10a118a73f212deb2ce087619eb4e648116d1fe9b", size = 9562321, upload-time = "2025-06-16T16:48:58.823Z" }, - { url = "https://files.pythonhosted.org/packages/cf/d3/53e684e78e07c1a2bf7105715e5edd09ce951fc3f47cf9ed095ec1b7a037/mypy-1.16.1-py3-none-any.whl", hash = "sha256:5fc2ac4027d0ef28d6ba69a0343737a23c4d1b83672bf38d1fe237bdc0643b37", size = 2265923, upload-time = "2025-06-16T16:48:02.366Z" }, + { url = "https://files.pythonhosted.org/packages/46/cf/eadc80c4e0a70db1c08921dcc220357ba8ab2faecb4392e3cebeb10edbfa/mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58", size = 10921009 }, + { url = "https://files.pythonhosted.org/packages/5d/c1/c869d8c067829ad30d9bdae051046561552516cfb3a14f7f0347b7d973ee/mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5", size = 10047482 }, + { url = "https://files.pythonhosted.org/packages/98/b9/803672bab3fe03cee2e14786ca056efda4bb511ea02dadcedde6176d06d0/mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd", size = 11832883 }, + { url = "https://files.pythonhosted.org/packages/88/fb/fcdac695beca66800918c18697b48833a9a6701de288452b6715a98cfee1/mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b", size = 12566215 }, + { url = "https://files.pythonhosted.org/packages/7f/37/a932da3d3dace99ee8eb2043b6ab03b6768c36eb29a02f98f46c18c0da0e/mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5", size = 12751956 }, + { url = "https://files.pythonhosted.org/packages/8c/cf/6438a429e0f2f5cab8bc83e53dbebfa666476f40ee322e13cac5e64b79e7/mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b", size = 9507307 }, + { url = "https://files.pythonhosted.org/packages/17/a2/7034d0d61af8098ec47902108553122baa0f438df8a713be860f7407c9e6/mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb", size = 11086295 }, + { url = "https://files.pythonhosted.org/packages/14/1f/19e7e44b594d4b12f6ba8064dbe136505cec813549ca3e5191e40b1d3cc2/mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403", size = 10112355 }, + { url = "https://files.pythonhosted.org/packages/5b/69/baa33927e29e6b4c55d798a9d44db5d394072eef2bdc18c3e2048c9ed1e9/mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056", size = 11875285 }, + { url = "https://files.pythonhosted.org/packages/90/13/f3a89c76b0a41e19490b01e7069713a30949d9a6c147289ee1521bcea245/mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341", size = 12737895 }, + { url = "https://files.pythonhosted.org/packages/23/a1/c4ee79ac484241301564072e6476c5a5be2590bc2e7bfd28220033d2ef8f/mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb", size = 12931025 }, + { url = "https://files.pythonhosted.org/packages/89/b8/7409477be7919a0608900e6320b155c72caab4fef46427c5cc75f85edadd/mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19", size = 9584664 }, + { url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411 }, ] [[package]] @@ -3303,27 +3305,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c6/6d/65c684441a91cd16f00e442a7ebb34bba5ee335ba8bb9ec5ad8f08e71e27/mypy_boto3_bedrock_runtime-1.39.0.tar.gz", hash = "sha256:f3eb0972bd3801013470cffd9dd094ff93ddcd6fae7ca17ec5bad1e357ab8117", size = 26901, upload-time = "2025-06-30T19:34:15.089Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/6d/65c684441a91cd16f00e442a7ebb34bba5ee335ba8bb9ec5ad8f08e71e27/mypy_boto3_bedrock_runtime-1.39.0.tar.gz", hash = "sha256:f3eb0972bd3801013470cffd9dd094ff93ddcd6fae7ca17ec5bad1e357ab8117", size = 26901 } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/92/ed01279bf155a1afe78a57d8e34f22604be66f59cb2b7c2f26e73715ced5/mypy_boto3_bedrock_runtime-1.39.0-py3-none-any.whl", hash = "sha256:2925d76b72ec77a7dc2169a0483c36567078de74cf2fcfff084e87b0e2c5ca8b", size = 32623, upload-time = "2025-06-30T19:34:13.663Z" }, + { url = "https://files.pythonhosted.org/packages/05/92/ed01279bf155a1afe78a57d8e34f22604be66f59cb2b7c2f26e73715ced5/mypy_boto3_bedrock_runtime-1.39.0-py3-none-any.whl", hash = "sha256:2925d76b72ec77a7dc2169a0483c36567078de74cf2fcfff084e87b0e2c5ca8b", size = 32623 }, ] [[package]] name = "mypy-extensions" version = "1.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343 } wheels = [ - { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963 }, ] [[package]] name = "nest-asyncio" version = "1.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" } +sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195 }, ] [[package]] @@ -3336,9 +3338,9 @@ dependencies = [ { name = "regex" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/87/db8be88ad32c2d042420b6fd9ffd4a149f9a0d7f0e86b3f543be2eeeedd2/nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868", size = 2904691, upload-time = "2024-08-18T19:48:37.769Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/87/db8be88ad32c2d042420b6fd9ffd4a149f9a0d7f0e86b3f543be2eeeedd2/nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868", size = 2904691 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/66/7d9e26593edda06e8cb531874633f7c2372279c3b0f46235539fe546df8b/nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1", size = 1505442, upload-time = "2024-08-18T19:48:21.909Z" }, + { url = "https://files.pythonhosted.org/packages/4d/66/7d9e26593edda06e8cb531874633f7c2372279c3b0f46235539fe546df8b/nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1", size = 1505442 }, ] [[package]] @@ -3349,18 +3351,18 @@ dependencies = [ { name = "llvmlite" }, { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/a0/e21f57604304aa03ebb8e098429222722ad99176a4f979d34af1d1ee80da/numba-0.61.2.tar.gz", hash = "sha256:8750ee147940a6637b80ecf7f95062185ad8726c8c28a2295b8ec1160a196f7d", size = 2820615, upload-time = "2025-04-09T02:58:07.659Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/a0/e21f57604304aa03ebb8e098429222722ad99176a4f979d34af1d1ee80da/numba-0.61.2.tar.gz", hash = "sha256:8750ee147940a6637b80ecf7f95062185ad8726c8c28a2295b8ec1160a196f7d", size = 2820615 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/97/c99d1056aed767503c228f7099dc11c402906b42a4757fec2819329abb98/numba-0.61.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:efd3db391df53aaa5cfbee189b6c910a5b471488749fd6606c3f33fc984c2ae2", size = 2775825, upload-time = "2025-04-09T02:57:43.442Z" }, - { url = "https://files.pythonhosted.org/packages/95/9e/63c549f37136e892f006260c3e2613d09d5120672378191f2dc387ba65a2/numba-0.61.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:49c980e4171948ffebf6b9a2520ea81feed113c1f4890747ba7f59e74be84b1b", size = 2778695, upload-time = "2025-04-09T02:57:44.968Z" }, - { url = "https://files.pythonhosted.org/packages/97/c8/8740616c8436c86c1b9a62e72cb891177d2c34c2d24ddcde4c390371bf4c/numba-0.61.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3945615cd73c2c7eba2a85ccc9c1730c21cd3958bfcf5a44302abae0fb07bb60", size = 3829227, upload-time = "2025-04-09T02:57:46.63Z" }, - { url = "https://files.pythonhosted.org/packages/fc/06/66e99ae06507c31d15ff3ecd1f108f2f59e18b6e08662cd5f8a5853fbd18/numba-0.61.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbfdf4eca202cebade0b7d43896978e146f39398909a42941c9303f82f403a18", size = 3523422, upload-time = "2025-04-09T02:57:48.222Z" }, - { url = "https://files.pythonhosted.org/packages/0f/a4/2b309a6a9f6d4d8cfba583401c7c2f9ff887adb5d54d8e2e130274c0973f/numba-0.61.2-cp311-cp311-win_amd64.whl", hash = "sha256:76bcec9f46259cedf888041b9886e257ae101c6268261b19fda8cfbc52bec9d1", size = 2831505, upload-time = "2025-04-09T02:57:50.108Z" }, - { url = "https://files.pythonhosted.org/packages/b4/a0/c6b7b9c615cfa3b98c4c63f4316e3f6b3bbe2387740277006551784218cd/numba-0.61.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:34fba9406078bac7ab052efbf0d13939426c753ad72946baaa5bf9ae0ebb8dd2", size = 2776626, upload-time = "2025-04-09T02:57:51.857Z" }, - { url = "https://files.pythonhosted.org/packages/92/4a/fe4e3c2ecad72d88f5f8cd04e7f7cff49e718398a2fac02d2947480a00ca/numba-0.61.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ddce10009bc097b080fc96876d14c051cc0c7679e99de3e0af59014dab7dfe8", size = 2779287, upload-time = "2025-04-09T02:57:53.658Z" }, - { url = "https://files.pythonhosted.org/packages/9a/2d/e518df036feab381c23a624dac47f8445ac55686ec7f11083655eb707da3/numba-0.61.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b1bb509d01f23d70325d3a5a0e237cbc9544dd50e50588bc581ba860c213546", size = 3885928, upload-time = "2025-04-09T02:57:55.206Z" }, - { url = "https://files.pythonhosted.org/packages/10/0f/23cced68ead67b75d77cfcca3df4991d1855c897ee0ff3fe25a56ed82108/numba-0.61.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48a53a3de8f8793526cbe330f2a39fe9a6638efcbf11bd63f3d2f9757ae345cd", size = 3577115, upload-time = "2025-04-09T02:57:56.818Z" }, - { url = "https://files.pythonhosted.org/packages/68/1d/ddb3e704c5a8fb90142bf9dc195c27db02a08a99f037395503bfbc1d14b3/numba-0.61.2-cp312-cp312-win_amd64.whl", hash = "sha256:97cf4f12c728cf77c9c1d7c23707e4d8fb4632b46275f8f3397de33e5877af18", size = 2831929, upload-time = "2025-04-09T02:57:58.45Z" }, + { url = "https://files.pythonhosted.org/packages/3f/97/c99d1056aed767503c228f7099dc11c402906b42a4757fec2819329abb98/numba-0.61.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:efd3db391df53aaa5cfbee189b6c910a5b471488749fd6606c3f33fc984c2ae2", size = 2775825 }, + { url = "https://files.pythonhosted.org/packages/95/9e/63c549f37136e892f006260c3e2613d09d5120672378191f2dc387ba65a2/numba-0.61.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:49c980e4171948ffebf6b9a2520ea81feed113c1f4890747ba7f59e74be84b1b", size = 2778695 }, + { url = "https://files.pythonhosted.org/packages/97/c8/8740616c8436c86c1b9a62e72cb891177d2c34c2d24ddcde4c390371bf4c/numba-0.61.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3945615cd73c2c7eba2a85ccc9c1730c21cd3958bfcf5a44302abae0fb07bb60", size = 3829227 }, + { url = "https://files.pythonhosted.org/packages/fc/06/66e99ae06507c31d15ff3ecd1f108f2f59e18b6e08662cd5f8a5853fbd18/numba-0.61.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbfdf4eca202cebade0b7d43896978e146f39398909a42941c9303f82f403a18", size = 3523422 }, + { url = "https://files.pythonhosted.org/packages/0f/a4/2b309a6a9f6d4d8cfba583401c7c2f9ff887adb5d54d8e2e130274c0973f/numba-0.61.2-cp311-cp311-win_amd64.whl", hash = "sha256:76bcec9f46259cedf888041b9886e257ae101c6268261b19fda8cfbc52bec9d1", size = 2831505 }, + { url = "https://files.pythonhosted.org/packages/b4/a0/c6b7b9c615cfa3b98c4c63f4316e3f6b3bbe2387740277006551784218cd/numba-0.61.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:34fba9406078bac7ab052efbf0d13939426c753ad72946baaa5bf9ae0ebb8dd2", size = 2776626 }, + { url = "https://files.pythonhosted.org/packages/92/4a/fe4e3c2ecad72d88f5f8cd04e7f7cff49e718398a2fac02d2947480a00ca/numba-0.61.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ddce10009bc097b080fc96876d14c051cc0c7679e99de3e0af59014dab7dfe8", size = 2779287 }, + { url = "https://files.pythonhosted.org/packages/9a/2d/e518df036feab381c23a624dac47f8445ac55686ec7f11083655eb707da3/numba-0.61.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b1bb509d01f23d70325d3a5a0e237cbc9544dd50e50588bc581ba860c213546", size = 3885928 }, + { url = "https://files.pythonhosted.org/packages/10/0f/23cced68ead67b75d77cfcca3df4991d1855c897ee0ff3fe25a56ed82108/numba-0.61.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48a53a3de8f8793526cbe330f2a39fe9a6638efcbf11bd63f3d2f9757ae345cd", size = 3577115 }, + { url = "https://files.pythonhosted.org/packages/68/1d/ddb3e704c5a8fb90142bf9dc195c27db02a08a99f037395503bfbc1d14b3/numba-0.61.2-cp312-cp312-win_amd64.whl", hash = "sha256:97cf4f12c728cf77c9c1d7c23707e4d8fb4632b46275f8f3397de33e5877af18", size = 2831929 }, ] [[package]] @@ -3370,53 +3372,53 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d2/8f/2cc977e91adbfbcdb6b49fdb9147e1d1c7566eb2c0c1e737e9a47020b5ca/numexpr-2.11.0.tar.gz", hash = "sha256:75b2c01a4eda2e7c357bc67a3f5c3dd76506c15b5fd4dc42845ef2e182181bad", size = 108960, upload-time = "2025-06-09T11:05:56.79Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/8f/2cc977e91adbfbcdb6b49fdb9147e1d1c7566eb2c0c1e737e9a47020b5ca/numexpr-2.11.0.tar.gz", hash = "sha256:75b2c01a4eda2e7c357bc67a3f5c3dd76506c15b5fd4dc42845ef2e182181bad", size = 108960 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/d1/1cf8137990b3f3d445556ed63b9bc347aec39bde8c41146b02d3b35c1adc/numexpr-2.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:450eba3c93c3e3e8070566ad8d70590949d6e574b1c960bf68edd789811e7da8", size = 147535, upload-time = "2025-06-09T11:05:08.929Z" }, - { url = "https://files.pythonhosted.org/packages/b6/5e/bac7649d043f47c7c14c797efe60dbd19476468a149399cd706fe2e47f8c/numexpr-2.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f0eb88dbac8a7e61ee433006d0ddfd6eb921f5c6c224d1b50855bc98fb304c44", size = 136710, upload-time = "2025-06-09T11:05:10.366Z" }, - { url = "https://files.pythonhosted.org/packages/1b/9f/c88fc34d82d23c66ea0b78b00a1fb3b64048e0f7ac7791b2cd0d2a4ce14d/numexpr-2.11.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a194e3684b3553ea199c3f4837f422a521c7e2f0cce13527adc3a6b4049f9e7c", size = 411169, upload-time = "2025-06-09T11:05:11.797Z" }, - { url = "https://files.pythonhosted.org/packages/e4/8d/4d78dad430b41d836146f9e6f545f5c4f7d1972a6aa427d8570ab232bf16/numexpr-2.11.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f677668ab2bb2452fee955af3702fbb3b71919e61e4520762b1e5f54af59c0d8", size = 401671, upload-time = "2025-06-09T11:05:13.127Z" }, - { url = "https://files.pythonhosted.org/packages/83/1c/414670eb41a82b78bd09769a4f5fb49a934f9b3990957f02c833637a511e/numexpr-2.11.0-cp311-cp311-win32.whl", hash = "sha256:7d9e76a77c9644fbd60da3984e516ead5b84817748c2da92515cd36f1941a04d", size = 153159, upload-time = "2025-06-09T11:05:14.452Z" }, - { url = "https://files.pythonhosted.org/packages/0c/97/8d00ca9b36f3ac68a8fd85e930ab0c9448d8c9ca7ce195ee75c188dabd45/numexpr-2.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:7163b488bfdcd13c300a8407c309e4cee195ef95d07facf5ac2678d66c988805", size = 146224, upload-time = "2025-06-09T11:05:15.877Z" }, - { url = "https://files.pythonhosted.org/packages/38/45/7a0e5a0b800d92e73825494ac695fa05a52c7fc7088d69a336880136b437/numexpr-2.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4229060be866813122385c608bbd3ea48fe0b33e91f2756810d28c1cdbfc98f1", size = 147494, upload-time = "2025-06-09T11:05:17.015Z" }, - { url = "https://files.pythonhosted.org/packages/74/46/3a26b84e44f4739ec98de0ede4b95b4b8096f721e22d0e97517eeb02017e/numexpr-2.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:097aa8835d32d6ac52f2be543384019b4b134d1fb67998cbfc4271155edfe54a", size = 136832, upload-time = "2025-06-09T11:05:18.55Z" }, - { url = "https://files.pythonhosted.org/packages/75/05/e3076ff25d4a108b47640c169c0a64811748c43b63d9cc052ea56de1631e/numexpr-2.11.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f082321c244ff5d0e252071fb2c4fe02063a45934144a1456a5370ca139bec2", size = 412618, upload-time = "2025-06-09T11:05:20.093Z" }, - { url = "https://files.pythonhosted.org/packages/70/e8/15e0e077a004db0edd530da96c60c948689c888c464ee5d14b82405ebd86/numexpr-2.11.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7a19435ca3d7dd502b8d8dce643555eb1b6013989e3f7577857289f6db6be16", size = 403363, upload-time = "2025-06-09T11:05:21.217Z" }, - { url = "https://files.pythonhosted.org/packages/10/14/f22afb3a7ae41d03ba87f62d00fbcfb76389f9cc91b7a82593c39c509318/numexpr-2.11.0-cp312-cp312-win32.whl", hash = "sha256:f326218262c8d8537887cc4bbd613c8409d62f2cac799835c0360e0d9cefaa5c", size = 153307, upload-time = "2025-06-09T11:05:22.855Z" }, - { url = "https://files.pythonhosted.org/packages/18/70/abc585269424582b3cd6db261e33b2ec96b5d4971da3edb29fc9b62a8926/numexpr-2.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:0a184e5930c77ab91dd9beee4df403b825cd9dfc4e9ba4670d31c9fcb4e2c08e", size = 146337, upload-time = "2025-06-09T11:05:23.976Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d1/1cf8137990b3f3d445556ed63b9bc347aec39bde8c41146b02d3b35c1adc/numexpr-2.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:450eba3c93c3e3e8070566ad8d70590949d6e574b1c960bf68edd789811e7da8", size = 147535 }, + { url = "https://files.pythonhosted.org/packages/b6/5e/bac7649d043f47c7c14c797efe60dbd19476468a149399cd706fe2e47f8c/numexpr-2.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f0eb88dbac8a7e61ee433006d0ddfd6eb921f5c6c224d1b50855bc98fb304c44", size = 136710 }, + { url = "https://files.pythonhosted.org/packages/1b/9f/c88fc34d82d23c66ea0b78b00a1fb3b64048e0f7ac7791b2cd0d2a4ce14d/numexpr-2.11.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a194e3684b3553ea199c3f4837f422a521c7e2f0cce13527adc3a6b4049f9e7c", size = 411169 }, + { url = "https://files.pythonhosted.org/packages/e4/8d/4d78dad430b41d836146f9e6f545f5c4f7d1972a6aa427d8570ab232bf16/numexpr-2.11.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f677668ab2bb2452fee955af3702fbb3b71919e61e4520762b1e5f54af59c0d8", size = 401671 }, + { url = "https://files.pythonhosted.org/packages/83/1c/414670eb41a82b78bd09769a4f5fb49a934f9b3990957f02c833637a511e/numexpr-2.11.0-cp311-cp311-win32.whl", hash = "sha256:7d9e76a77c9644fbd60da3984e516ead5b84817748c2da92515cd36f1941a04d", size = 153159 }, + { url = "https://files.pythonhosted.org/packages/0c/97/8d00ca9b36f3ac68a8fd85e930ab0c9448d8c9ca7ce195ee75c188dabd45/numexpr-2.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:7163b488bfdcd13c300a8407c309e4cee195ef95d07facf5ac2678d66c988805", size = 146224 }, + { url = "https://files.pythonhosted.org/packages/38/45/7a0e5a0b800d92e73825494ac695fa05a52c7fc7088d69a336880136b437/numexpr-2.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4229060be866813122385c608bbd3ea48fe0b33e91f2756810d28c1cdbfc98f1", size = 147494 }, + { url = "https://files.pythonhosted.org/packages/74/46/3a26b84e44f4739ec98de0ede4b95b4b8096f721e22d0e97517eeb02017e/numexpr-2.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:097aa8835d32d6ac52f2be543384019b4b134d1fb67998cbfc4271155edfe54a", size = 136832 }, + { url = "https://files.pythonhosted.org/packages/75/05/e3076ff25d4a108b47640c169c0a64811748c43b63d9cc052ea56de1631e/numexpr-2.11.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f082321c244ff5d0e252071fb2c4fe02063a45934144a1456a5370ca139bec2", size = 412618 }, + { url = "https://files.pythonhosted.org/packages/70/e8/15e0e077a004db0edd530da96c60c948689c888c464ee5d14b82405ebd86/numexpr-2.11.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7a19435ca3d7dd502b8d8dce643555eb1b6013989e3f7577857289f6db6be16", size = 403363 }, + { url = "https://files.pythonhosted.org/packages/10/14/f22afb3a7ae41d03ba87f62d00fbcfb76389f9cc91b7a82593c39c509318/numexpr-2.11.0-cp312-cp312-win32.whl", hash = "sha256:f326218262c8d8537887cc4bbd613c8409d62f2cac799835c0360e0d9cefaa5c", size = 153307 }, + { url = "https://files.pythonhosted.org/packages/18/70/abc585269424582b3cd6db261e33b2ec96b5d4971da3edb29fc9b62a8926/numexpr-2.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:0a184e5930c77ab91dd9beee4df403b825cd9dfc4e9ba4670d31c9fcb4e2c08e", size = 146337 }, ] [[package]] name = "numpy" version = "1.26.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z" } +sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129 } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554, upload-time = "2024-02-05T23:51:50.149Z" }, - { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127, upload-time = "2024-02-05T23:52:15.314Z" }, - { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994, upload-time = "2024-02-05T23:52:47.569Z" }, - { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005, upload-time = "2024-02-05T23:53:15.637Z" }, - { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297, upload-time = "2024-02-05T23:53:42.16Z" }, - { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567, upload-time = "2024-02-05T23:54:11.696Z" }, - { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812, upload-time = "2024-02-05T23:54:26.453Z" }, - { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913, upload-time = "2024-02-05T23:54:53.933Z" }, - { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901, upload-time = "2024-02-05T23:55:32.801Z" }, - { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868, upload-time = "2024-02-05T23:55:56.28Z" }, - { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109, upload-time = "2024-02-05T23:56:20.368Z" }, - { url = "https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", size = 17950613, upload-time = "2024-02-05T23:56:56.054Z" }, - { url = "https://files.pythonhosted.org/packages/4c/0c/9c603826b6465e82591e05ca230dfc13376da512b25ccd0894709b054ed0/numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", size = 13572172, upload-time = "2024-02-05T23:57:21.56Z" }, - { url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643, upload-time = "2024-02-05T23:57:56.585Z" }, - { url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803, upload-time = "2024-02-05T23:58:08.963Z" }, - { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754, upload-time = "2024-02-05T23:58:36.364Z" }, + { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554 }, + { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127 }, + { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994 }, + { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005 }, + { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297 }, + { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567 }, + { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812 }, + { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913 }, + { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901 }, + { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868 }, + { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109 }, + { url = "https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", size = 17950613 }, + { url = "https://files.pythonhosted.org/packages/4c/0c/9c603826b6465e82591e05ca230dfc13376da512b25ccd0894709b054ed0/numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", size = 13572172 }, + { url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643 }, + { url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803 }, + { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754 }, ] [[package]] name = "oauthlib" version = "3.3.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/5f/19930f824ffeb0ad4372da4812c50edbd1434f678c90c2733e1188edfc63/oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9", size = 185918, upload-time = "2025-06-19T22:48:08.269Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/5f/19930f824ffeb0ad4372da4812c50edbd1434f678c90c2733e1188edfc63/oauthlib-3.3.1.tar.gz", hash = "sha256:0f0f8aa759826a193cf66c12ea1af1637f87b9b4622d46e866952bb022e538c9", size = 185918 } wheels = [ - { url = "https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1", size = 160065, upload-time = "2025-06-19T22:48:06.508Z" }, + { url = "https://files.pythonhosted.org/packages/be/9c/92789c596b8df838baa98fa71844d84283302f7604ed565dafe5a6b5041a/oauthlib-3.3.1-py3-none-any.whl", hash = "sha256:88119c938d2b8fb88561af5f6ee0eec8cc8d552b7bb1f712743136eb7523b7a1", size = 160065 }, ] [[package]] @@ -3426,15 +3428,15 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "defusedxml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/97/73/8ade73f6749177003f7ce3304f524774adda96e6aaab30ea79fd8fda7934/odfpy-1.4.1.tar.gz", hash = "sha256:db766a6e59c5103212f3cc92ec8dd50a0f3a02790233ed0b52148b70d3c438ec", size = 717045, upload-time = "2020-01-18T16:55:48.852Z" } +sdist = { url = "https://files.pythonhosted.org/packages/97/73/8ade73f6749177003f7ce3304f524774adda96e6aaab30ea79fd8fda7934/odfpy-1.4.1.tar.gz", hash = "sha256:db766a6e59c5103212f3cc92ec8dd50a0f3a02790233ed0b52148b70d3c438ec", size = 717045 } [[package]] name = "olefile" version = "0.47" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/69/1b/077b508e3e500e1629d366249c3ccb32f95e50258b231705c09e3c7a4366/olefile-0.47.zip", hash = "sha256:599383381a0bf3dfbd932ca0ca6515acd174ed48870cbf7fee123d698c192c1c", size = 112240, upload-time = "2023-12-01T16:22:53.025Z" } +sdist = { url = "https://files.pythonhosted.org/packages/69/1b/077b508e3e500e1629d366249c3ccb32f95e50258b231705c09e3c7a4366/olefile-0.47.zip", hash = "sha256:599383381a0bf3dfbd932ca0ca6515acd174ed48870cbf7fee123d698c192c1c", size = 112240 } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/d3/b64c356a907242d719fc668b71befd73324e47ab46c8ebbbede252c154b2/olefile-0.47-py2.py3-none-any.whl", hash = "sha256:543c7da2a7adadf21214938bb79c83ea12b473a4b6ee4ad4bf854e7715e13d1f", size = 114565, upload-time = "2023-12-01T16:22:51.518Z" }, + { url = "https://files.pythonhosted.org/packages/17/d3/b64c356a907242d719fc668b71befd73324e47ab46c8ebbbede252c154b2/olefile-0.47-py2.py3-none-any.whl", hash = "sha256:543c7da2a7adadf21214938bb79c83ea12b473a4b6ee4ad4bf854e7715e13d1f", size = 114565 }, ] [[package]] @@ -3450,14 +3452,14 @@ dependencies = [ { name = "sympy" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/08/c008711d1b92ff1272f4fea0fbee57723171f161d42e5c680625535280af/onnxruntime-1.22.0-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:8d6725c5b9a681d8fe72f2960c191a96c256367887d076b08466f52b4e0991df", size = 34282151, upload-time = "2025-05-09T20:25:59.246Z" }, - { url = "https://files.pythonhosted.org/packages/3e/8b/22989f6b59bc4ad1324f07a945c80b9ab825f0a581ad7a6064b93716d9b7/onnxruntime-1.22.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fef17d665a917866d1f68f09edc98223b9a27e6cb167dec69da4c66484ad12fd", size = 14446302, upload-time = "2025-05-09T20:25:44.299Z" }, - { url = "https://files.pythonhosted.org/packages/7a/d5/aa83d084d05bc8f6cf8b74b499c77431ffd6b7075c761ec48ec0c161a47f/onnxruntime-1.22.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b978aa63a9a22095479c38371a9b359d4c15173cbb164eaad5f2cd27d666aa65", size = 16393496, upload-time = "2025-05-09T20:26:11.588Z" }, - { url = "https://files.pythonhosted.org/packages/89/a5/1c6c10322201566015183b52ef011dfa932f5dd1b278de8d75c3b948411d/onnxruntime-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:03d3ef7fb11adf154149d6e767e21057e0e577b947dd3f66190b212528e1db31", size = 12691517, upload-time = "2025-05-12T21:26:13.354Z" }, - { url = "https://files.pythonhosted.org/packages/4d/de/9162872c6e502e9ac8c99a98a8738b2fab408123d11de55022ac4f92562a/onnxruntime-1.22.0-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:f3c0380f53c1e72a41b3f4d6af2ccc01df2c17844072233442c3a7e74851ab97", size = 34298046, upload-time = "2025-05-09T20:26:02.399Z" }, - { url = "https://files.pythonhosted.org/packages/03/79/36f910cd9fc96b444b0e728bba14607016079786adf032dae61f7c63b4aa/onnxruntime-1.22.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8601128eaef79b636152aea76ae6981b7c9fc81a618f584c15d78d42b310f1c", size = 14443220, upload-time = "2025-05-09T20:25:47.078Z" }, - { url = "https://files.pythonhosted.org/packages/8c/60/16d219b8868cc8e8e51a68519873bdb9f5f24af080b62e917a13fff9989b/onnxruntime-1.22.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6964a975731afc19dc3418fad8d4e08c48920144ff590149429a5ebe0d15fb3c", size = 16406377, upload-time = "2025-05-09T20:26:14.478Z" }, - { url = "https://files.pythonhosted.org/packages/36/b4/3f1c71ce1d3d21078a6a74c5483bfa2b07e41a8d2b8fb1e9993e6a26d8d3/onnxruntime-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0d534a43d1264d1273c2d4f00a5a588fa98d21117a3345b7104fa0bbcaadb9a", size = 12692233, upload-time = "2025-05-12T21:26:16.963Z" }, + { url = "https://files.pythonhosted.org/packages/7a/08/c008711d1b92ff1272f4fea0fbee57723171f161d42e5c680625535280af/onnxruntime-1.22.0-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:8d6725c5b9a681d8fe72f2960c191a96c256367887d076b08466f52b4e0991df", size = 34282151 }, + { url = "https://files.pythonhosted.org/packages/3e/8b/22989f6b59bc4ad1324f07a945c80b9ab825f0a581ad7a6064b93716d9b7/onnxruntime-1.22.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fef17d665a917866d1f68f09edc98223b9a27e6cb167dec69da4c66484ad12fd", size = 14446302 }, + { url = "https://files.pythonhosted.org/packages/7a/d5/aa83d084d05bc8f6cf8b74b499c77431ffd6b7075c761ec48ec0c161a47f/onnxruntime-1.22.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b978aa63a9a22095479c38371a9b359d4c15173cbb164eaad5f2cd27d666aa65", size = 16393496 }, + { url = "https://files.pythonhosted.org/packages/89/a5/1c6c10322201566015183b52ef011dfa932f5dd1b278de8d75c3b948411d/onnxruntime-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:03d3ef7fb11adf154149d6e767e21057e0e577b947dd3f66190b212528e1db31", size = 12691517 }, + { url = "https://files.pythonhosted.org/packages/4d/de/9162872c6e502e9ac8c99a98a8738b2fab408123d11de55022ac4f92562a/onnxruntime-1.22.0-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:f3c0380f53c1e72a41b3f4d6af2ccc01df2c17844072233442c3a7e74851ab97", size = 34298046 }, + { url = "https://files.pythonhosted.org/packages/03/79/36f910cd9fc96b444b0e728bba14607016079786adf032dae61f7c63b4aa/onnxruntime-1.22.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8601128eaef79b636152aea76ae6981b7c9fc81a618f584c15d78d42b310f1c", size = 14443220 }, + { url = "https://files.pythonhosted.org/packages/8c/60/16d219b8868cc8e8e51a68519873bdb9f5f24af080b62e917a13fff9989b/onnxruntime-1.22.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6964a975731afc19dc3418fad8d4e08c48920144ff590149429a5ebe0d15fb3c", size = 16406377 }, + { url = "https://files.pythonhosted.org/packages/36/b4/3f1c71ce1d3d21078a6a74c5483bfa2b07e41a8d2b8fb1e9993e6a26d8d3/onnxruntime-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0d534a43d1264d1273c2d4f00a5a588fa98d21117a3345b7104fa0bbcaadb9a", size = 12692233 }, ] [[package]] @@ -3474,25 +3476,25 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d9/cf/61e71ce64cf0a38f029da0f9a5f10c9fa0e69a7a977b537126dac50adfea/openai-1.61.1.tar.gz", hash = "sha256:ce1851507218209961f89f3520e06726c0aa7d0512386f0f977e3ac3e4f2472e", size = 350784, upload-time = "2025-02-05T14:34:15.873Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/cf/61e71ce64cf0a38f029da0f9a5f10c9fa0e69a7a977b537126dac50adfea/openai-1.61.1.tar.gz", hash = "sha256:ce1851507218209961f89f3520e06726c0aa7d0512386f0f977e3ac3e4f2472e", size = 350784 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/b6/2e2a011b2dc27a6711376808b4cd8c922c476ea0f1420b39892117fa8563/openai-1.61.1-py3-none-any.whl", hash = "sha256:72b0826240ce26026ac2cd17951691f046e5be82ad122d20a8e1b30ca18bd11e", size = 463126, upload-time = "2025-02-05T14:34:13.643Z" }, + { url = "https://files.pythonhosted.org/packages/9a/b6/2e2a011b2dc27a6711376808b4cd8c922c476ea0f1420b39892117fa8563/openai-1.61.1-py3-none-any.whl", hash = "sha256:72b0826240ce26026ac2cd17951691f046e5be82ad122d20a8e1b30ca18bd11e", size = 463126 }, ] [[package]] name = "opendal" version = "0.45.20" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2f/3f/927dfe1349ae58b9238b8eafba747af648d660a9425f486dda01a10f0b78/opendal-0.45.20.tar.gz", hash = "sha256:9f6f90d9e9f9d6e9e5a34aa7729169ef34d2f1869ad1e01ddc39b1c0ce0c9405", size = 990267, upload-time = "2025-05-26T07:02:11.819Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/3f/927dfe1349ae58b9238b8eafba747af648d660a9425f486dda01a10f0b78/opendal-0.45.20.tar.gz", hash = "sha256:9f6f90d9e9f9d6e9e5a34aa7729169ef34d2f1869ad1e01ddc39b1c0ce0c9405", size = 990267 } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/77/6427e16b8630f0cc71f4a1b01648ed3264f1e04f1f6d9b5d09e5c6a4dd2f/opendal-0.45.20-cp311-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:35acdd8001e4a741532834fdbff3020ffb10b40028bb49fbe93c4f8197d66d8c", size = 26910966, upload-time = "2025-05-26T07:01:24.987Z" }, - { url = "https://files.pythonhosted.org/packages/12/1f/83e415334739f1ab4dba55cdd349abf0b66612249055afb422a354b96ac8/opendal-0.45.20-cp311-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:629bfe8d384364bced6cbeb01f49b99779fa5151c68048a1869ff645ddcfcb25", size = 13002770, upload-time = "2025-05-26T07:01:30.385Z" }, - { url = "https://files.pythonhosted.org/packages/49/94/c5de6ed54a02d7413636c2ccefa71d8dd09c2ada1cd6ecab202feb1fdeda/opendal-0.45.20-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12cc5ac7e441fb93d86d1673112d9fb08580fc3226f864434f4a56a72efec53", size = 14387218, upload-time = "2025-05-26T07:01:33.017Z" }, - { url = "https://files.pythonhosted.org/packages/c6/83/713a1e1de8cbbd69af50e26644bbdeef3c1068b89f442417376fa3c0f591/opendal-0.45.20-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:45a3adae1f473052234fc4054a6f210df3ded9aff10db8d545d0a37eff3b13cc", size = 13424302, upload-time = "2025-05-26T07:01:36.417Z" }, - { url = "https://files.pythonhosted.org/packages/c7/78/c9651e753aaf6eb61887ca372a3f9c2ae57dae03c3159d24deaf018c26dc/opendal-0.45.20-cp311-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d8947857052c85a4b0e251d50e23f5f68f0cdd9e509e32e614a5e4b2fc7424c4", size = 13622483, upload-time = "2025-05-26T07:01:38.886Z" }, - { url = "https://files.pythonhosted.org/packages/3c/9d/5d8c20c0fc93df5e349e5694167de30afdc54c5755704cc64764a6cbb309/opendal-0.45.20-cp311-abi3-musllinux_1_1_armv7l.whl", hash = "sha256:891d2f9114efeef648973049ed15e56477e8feb9e48b540bd8d6105ea22a253c", size = 13320229, upload-time = "2025-05-26T07:01:41.965Z" }, - { url = "https://files.pythonhosted.org/packages/21/39/05262f748a2085522e0c85f03eab945589313dc9caedc002872c39162776/opendal-0.45.20-cp311-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:539de9b825f6783d6289d88c0c9ac5415daa4d892d761e3540c565bda51e8997", size = 14574280, upload-time = "2025-05-26T07:01:44.413Z" }, - { url = "https://files.pythonhosted.org/packages/74/83/cc7c6de29b0a7585cd445258d174ca204d37729c3874ad08e515b0bf331c/opendal-0.45.20-cp311-abi3-win_amd64.whl", hash = "sha256:145efd56aa33b493d5b652c3e4f5ae5097ab69d38c132d80f108e9f5c1e4d863", size = 14929888, upload-time = "2025-05-26T07:01:46.929Z" }, + { url = "https://files.pythonhosted.org/packages/84/77/6427e16b8630f0cc71f4a1b01648ed3264f1e04f1f6d9b5d09e5c6a4dd2f/opendal-0.45.20-cp311-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:35acdd8001e4a741532834fdbff3020ffb10b40028bb49fbe93c4f8197d66d8c", size = 26910966 }, + { url = "https://files.pythonhosted.org/packages/12/1f/83e415334739f1ab4dba55cdd349abf0b66612249055afb422a354b96ac8/opendal-0.45.20-cp311-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:629bfe8d384364bced6cbeb01f49b99779fa5151c68048a1869ff645ddcfcb25", size = 13002770 }, + { url = "https://files.pythonhosted.org/packages/49/94/c5de6ed54a02d7413636c2ccefa71d8dd09c2ada1cd6ecab202feb1fdeda/opendal-0.45.20-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12cc5ac7e441fb93d86d1673112d9fb08580fc3226f864434f4a56a72efec53", size = 14387218 }, + { url = "https://files.pythonhosted.org/packages/c6/83/713a1e1de8cbbd69af50e26644bbdeef3c1068b89f442417376fa3c0f591/opendal-0.45.20-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:45a3adae1f473052234fc4054a6f210df3ded9aff10db8d545d0a37eff3b13cc", size = 13424302 }, + { url = "https://files.pythonhosted.org/packages/c7/78/c9651e753aaf6eb61887ca372a3f9c2ae57dae03c3159d24deaf018c26dc/opendal-0.45.20-cp311-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d8947857052c85a4b0e251d50e23f5f68f0cdd9e509e32e614a5e4b2fc7424c4", size = 13622483 }, + { url = "https://files.pythonhosted.org/packages/3c/9d/5d8c20c0fc93df5e349e5694167de30afdc54c5755704cc64764a6cbb309/opendal-0.45.20-cp311-abi3-musllinux_1_1_armv7l.whl", hash = "sha256:891d2f9114efeef648973049ed15e56477e8feb9e48b540bd8d6105ea22a253c", size = 13320229 }, + { url = "https://files.pythonhosted.org/packages/21/39/05262f748a2085522e0c85f03eab945589313dc9caedc002872c39162776/opendal-0.45.20-cp311-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:539de9b825f6783d6289d88c0c9ac5415daa4d892d761e3540c565bda51e8997", size = 14574280 }, + { url = "https://files.pythonhosted.org/packages/74/83/cc7c6de29b0a7585cd445258d174ca204d37729c3874ad08e515b0bf331c/opendal-0.45.20-cp311-abi3-win_amd64.whl", hash = "sha256:145efd56aa33b493d5b652c3e4f5ae5097ab69d38c132d80f108e9f5c1e4d863", size = 14929888 }, ] [[package]] @@ -3504,18 +3506,18 @@ dependencies = [ { name = "opentelemetry-api" }, { name = "opentelemetry-sdk" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2e/18/d074b45b04ba69bd03260d2dc0a034e5d586d8854e957695f40569278136/openinference_instrumentation-0.1.34.tar.gz", hash = "sha256:fa0328e8b92fc3e22e150c46f108794946ce39fe13670aed15f23ba0105f72ab", size = 22373, upload-time = "2025-06-17T16:47:22.641Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/18/d074b45b04ba69bd03260d2dc0a034e5d586d8854e957695f40569278136/openinference_instrumentation-0.1.34.tar.gz", hash = "sha256:fa0328e8b92fc3e22e150c46f108794946ce39fe13670aed15f23ba0105f72ab", size = 22373 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/ad/1a0a5c0a755918269f71fbca225fd70759dd79dd5bffc4723e44f0d87240/openinference_instrumentation-0.1.34-py3-none-any.whl", hash = "sha256:0fff1cc6d9b86f3450fc1c88347c51c5467855992b75e7addb85bf09fd048d2d", size = 28137, upload-time = "2025-06-17T16:47:21.658Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ad/1a0a5c0a755918269f71fbca225fd70759dd79dd5bffc4723e44f0d87240/openinference_instrumentation-0.1.34-py3-none-any.whl", hash = "sha256:0fff1cc6d9b86f3450fc1c88347c51c5467855992b75e7addb85bf09fd048d2d", size = 28137 }, ] [[package]] name = "openinference-semantic-conventions" version = "0.1.21" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/75/0f/b794eb009846d4b10af50e205a323ca359f284563ef4d1778f35a80522ac/openinference_semantic_conventions-0.1.21.tar.gz", hash = "sha256:328405b9f79ff72a659c7712b8429c0d7ea68c6a4a1679e3eb44372aa228119b", size = 12534, upload-time = "2025-06-13T05:22:18.982Z" } +sdist = { url = "https://files.pythonhosted.org/packages/75/0f/b794eb009846d4b10af50e205a323ca359f284563ef4d1778f35a80522ac/openinference_semantic_conventions-0.1.21.tar.gz", hash = "sha256:328405b9f79ff72a659c7712b8429c0d7ea68c6a4a1679e3eb44372aa228119b", size = 12534 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/4d/092766f8e610f2c513e483c4adc892eea1634945022a73371fe01f621165/openinference_semantic_conventions-0.1.21-py3-none-any.whl", hash = "sha256:acde8282c20da1de900cdc0d6258a793ec3eb8031bfc496bd823dae17d32e326", size = 10167, upload-time = "2025-06-13T05:22:18.118Z" }, + { url = "https://files.pythonhosted.org/packages/6e/4d/092766f8e610f2c513e483c4adc892eea1634945022a73371fe01f621165/openinference_semantic_conventions-0.1.21-py3-none-any.whl", hash = "sha256:acde8282c20da1de900cdc0d6258a793ec3eb8031bfc496bd823dae17d32e326", size = 10167 }, ] [[package]] @@ -3525,9 +3527,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "et-xmlfile" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/f9/88d94a75de065ea32619465d2f77b29a0469500e99012523b91cc4141cd1/openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050", size = 186464, upload-time = "2024-06-28T14:03:44.161Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/f9/88d94a75de065ea32619465d2f77b29a0469500e99012523b91cc4141cd1/openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050", size = 186464 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910, upload-time = "2024-06-28T14:03:41.161Z" }, + { url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910 }, ] [[package]] @@ -3541,9 +3543,9 @@ dependencies = [ { name = "six" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e4/dc/acb182db6bb0c71f1e6e41c49260e01d68e52a03efb64e44aed3cc7f483f/opensearch-py-2.4.0.tar.gz", hash = "sha256:7eba2b6ed2ddcf33225bfebfba2aee026877838cc39f760ec80f27827308cc4b", size = 182924, upload-time = "2023-11-15T21:41:37.329Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/dc/acb182db6bb0c71f1e6e41c49260e01d68e52a03efb64e44aed3cc7f483f/opensearch-py-2.4.0.tar.gz", hash = "sha256:7eba2b6ed2ddcf33225bfebfba2aee026877838cc39f760ec80f27827308cc4b", size = 182924 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/98/178aacf07ece7f95d1948352778702898d57c286053813deb20ebb409923/opensearch_py-2.4.0-py2.py3-none-any.whl", hash = "sha256:316077235437c8ceac970232261f3393c65fb92a80f33c5b106f50f1dab24fd9", size = 258405, upload-time = "2023-11-15T21:41:35.59Z" }, + { url = "https://files.pythonhosted.org/packages/c1/98/178aacf07ece7f95d1948352778702898d57c286053813deb20ebb409923/opensearch_py-2.4.0-py2.py3-none-any.whl", hash = "sha256:316077235437c8ceac970232261f3393c65fb92a80f33c5b106f50f1dab24fd9", size = 258405 }, ] [[package]] @@ -3554,9 +3556,9 @@ dependencies = [ { name = "deprecated" }, { name = "importlib-metadata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/83/93114b6de85a98963aec218a51509a52ed3f8de918fe91eb0f7299805c3f/opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342", size = 62693, upload-time = "2024-08-28T21:35:31.445Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/83/93114b6de85a98963aec218a51509a52ed3f8de918fe91eb0f7299805c3f/opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342", size = 62693 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/1f/737dcdbc9fea2fa96c1b392ae47275165a7c641663fbb08a8d252968eed2/opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7", size = 63970, upload-time = "2024-08-28T21:35:00.598Z" }, + { url = "https://files.pythonhosted.org/packages/fb/1f/737dcdbc9fea2fa96c1b392ae47275165a7c641663fbb08a8d252968eed2/opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7", size = 63970 }, ] [[package]] @@ -3568,9 +3570,9 @@ dependencies = [ { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-sdk" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f4/09/423e17c439ed24c45110affe84aad886a536b7871a42637d2ad14a179b47/opentelemetry_distro-0.48b0.tar.gz", hash = "sha256:5cb15915780ac4972583286a56683d43bd4ca95371d72f5f3f179c8b0b2ddc91", size = 2556, upload-time = "2024-08-28T21:27:40.455Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/09/423e17c439ed24c45110affe84aad886a536b7871a42637d2ad14a179b47/opentelemetry_distro-0.48b0.tar.gz", hash = "sha256:5cb15915780ac4972583286a56683d43bd4ca95371d72f5f3f179c8b0b2ddc91", size = 2556 } wheels = [ - { url = "https://files.pythonhosted.org/packages/82/cf/fa9a5fe954f1942e03b319ae0e319ebc93d9f984b548bcd9b3f232a1434d/opentelemetry_distro-0.48b0-py3-none-any.whl", hash = "sha256:b2f8fce114325b020769af3b9bf503efb8af07efc190bd1b9deac7843171664a", size = 3321, upload-time = "2024-08-28T21:26:26.584Z" }, + { url = "https://files.pythonhosted.org/packages/82/cf/fa9a5fe954f1942e03b319ae0e319ebc93d9f984b548bcd9b3f232a1434d/opentelemetry_distro-0.48b0-py3-none-any.whl", hash = "sha256:b2f8fce114325b020769af3b9bf503efb8af07efc190bd1b9deac7843171664a", size = 3321 }, ] [[package]] @@ -3581,9 +3583,9 @@ dependencies = [ { name = "opentelemetry-exporter-otlp-proto-grpc" }, { name = "opentelemetry-exporter-otlp-proto-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/d3/8156cc14e8f4573a3572ee7f30badc7aabd02961a09acc72ab5f2c789ef1/opentelemetry_exporter_otlp-1.27.0.tar.gz", hash = "sha256:4a599459e623868cc95d933c301199c2367e530f089750e115599fccd67cb2a1", size = 6166, upload-time = "2024-08-28T21:35:33.746Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/d3/8156cc14e8f4573a3572ee7f30badc7aabd02961a09acc72ab5f2c789ef1/opentelemetry_exporter_otlp-1.27.0.tar.gz", hash = "sha256:4a599459e623868cc95d933c301199c2367e530f089750e115599fccd67cb2a1", size = 6166 } wheels = [ - { url = "https://files.pythonhosted.org/packages/59/6d/95e1fc2c8d945a734db32e87a5aa7a804f847c1657a21351df9338bd1c9c/opentelemetry_exporter_otlp-1.27.0-py3-none-any.whl", hash = "sha256:7688791cbdd951d71eb6445951d1cfbb7b6b2d7ee5948fac805d404802931145", size = 7001, upload-time = "2024-08-28T21:35:04.02Z" }, + { url = "https://files.pythonhosted.org/packages/59/6d/95e1fc2c8d945a734db32e87a5aa7a804f847c1657a21351df9338bd1c9c/opentelemetry_exporter_otlp-1.27.0-py3-none-any.whl", hash = "sha256:7688791cbdd951d71eb6445951d1cfbb7b6b2d7ee5948fac805d404802931145", size = 7001 }, ] [[package]] @@ -3593,9 +3595,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-proto" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cd/2e/7eaf4ba595fb5213cf639c9158dfb64aacb2e4c7d74bfa664af89fa111f4/opentelemetry_exporter_otlp_proto_common-1.27.0.tar.gz", hash = "sha256:159d27cf49f359e3798c4c3eb8da6ef4020e292571bd8c5604a2a573231dd5c8", size = 17860, upload-time = "2024-08-28T21:35:34.896Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/2e/7eaf4ba595fb5213cf639c9158dfb64aacb2e4c7d74bfa664af89fa111f4/opentelemetry_exporter_otlp_proto_common-1.27.0.tar.gz", hash = "sha256:159d27cf49f359e3798c4c3eb8da6ef4020e292571bd8c5604a2a573231dd5c8", size = 17860 } wheels = [ - { url = "https://files.pythonhosted.org/packages/41/27/4610ab3d9bb3cde4309b6505f98b3aabca04a26aa480aa18cede23149837/opentelemetry_exporter_otlp_proto_common-1.27.0-py3-none-any.whl", hash = "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a", size = 17848, upload-time = "2024-08-28T21:35:05.412Z" }, + { url = "https://files.pythonhosted.org/packages/41/27/4610ab3d9bb3cde4309b6505f98b3aabca04a26aa480aa18cede23149837/opentelemetry_exporter_otlp_proto_common-1.27.0-py3-none-any.whl", hash = "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a", size = 17848 }, ] [[package]] @@ -3611,9 +3613,9 @@ dependencies = [ { name = "opentelemetry-proto" }, { name = "opentelemetry-sdk" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/d0/c1e375b292df26e0ffebf194e82cd197e4c26cc298582bda626ce3ce74c5/opentelemetry_exporter_otlp_proto_grpc-1.27.0.tar.gz", hash = "sha256:af6f72f76bcf425dfb5ad11c1a6d6eca2863b91e63575f89bb7b4b55099d968f", size = 26244, upload-time = "2024-08-28T21:35:36.314Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d0/c1e375b292df26e0ffebf194e82cd197e4c26cc298582bda626ce3ce74c5/opentelemetry_exporter_otlp_proto_grpc-1.27.0.tar.gz", hash = "sha256:af6f72f76bcf425dfb5ad11c1a6d6eca2863b91e63575f89bb7b4b55099d968f", size = 26244 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/80/32217460c2c64c0568cea38410124ff680a9b65f6732867bbf857c4d8626/opentelemetry_exporter_otlp_proto_grpc-1.27.0-py3-none-any.whl", hash = "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e", size = 18541, upload-time = "2024-08-28T21:35:06.493Z" }, + { url = "https://files.pythonhosted.org/packages/8d/80/32217460c2c64c0568cea38410124ff680a9b65f6732867bbf857c4d8626/opentelemetry_exporter_otlp_proto_grpc-1.27.0-py3-none-any.whl", hash = "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e", size = 18541 }, ] [[package]] @@ -3629,9 +3631,9 @@ dependencies = [ { name = "opentelemetry-sdk" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/31/0a/f05c55e8913bf58a033583f2580a0ec31a5f4cf2beacc9e286dcb74d6979/opentelemetry_exporter_otlp_proto_http-1.27.0.tar.gz", hash = "sha256:2103479092d8eb18f61f3fbff084f67cc7f2d4a7d37e75304b8b56c1d09ebef5", size = 15059, upload-time = "2024-08-28T21:35:37.079Z" } +sdist = { url = "https://files.pythonhosted.org/packages/31/0a/f05c55e8913bf58a033583f2580a0ec31a5f4cf2beacc9e286dcb74d6979/opentelemetry_exporter_otlp_proto_http-1.27.0.tar.gz", hash = "sha256:2103479092d8eb18f61f3fbff084f67cc7f2d4a7d37e75304b8b56c1d09ebef5", size = 15059 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2d/8d/4755884afc0b1db6000527cac0ca17273063b6142c773ce4ecd307a82e72/opentelemetry_exporter_otlp_proto_http-1.27.0-py3-none-any.whl", hash = "sha256:688027575c9da42e179a69fe17e2d1eba9b14d81de8d13553a21d3114f3b4d75", size = 17203, upload-time = "2024-08-28T21:35:08.141Z" }, + { url = "https://files.pythonhosted.org/packages/2d/8d/4755884afc0b1db6000527cac0ca17273063b6142c773ce4ecd307a82e72/opentelemetry_exporter_otlp_proto_http-1.27.0-py3-none-any.whl", hash = "sha256:688027575c9da42e179a69fe17e2d1eba9b14d81de8d13553a21d3114f3b4d75", size = 17203 }, ] [[package]] @@ -3643,9 +3645,9 @@ dependencies = [ { name = "setuptools" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/0e/d9394839af5d55c8feb3b22cd11138b953b49739b20678ca96289e30f904/opentelemetry_instrumentation-0.48b0.tar.gz", hash = "sha256:94929685d906380743a71c3970f76b5f07476eea1834abd5dd9d17abfe23cc35", size = 24724, upload-time = "2024-08-28T21:27:42.82Z" } +sdist = { url = "https://files.pythonhosted.org/packages/04/0e/d9394839af5d55c8feb3b22cd11138b953b49739b20678ca96289e30f904/opentelemetry_instrumentation-0.48b0.tar.gz", hash = "sha256:94929685d906380743a71c3970f76b5f07476eea1834abd5dd9d17abfe23cc35", size = 24724 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/7f/405c41d4f359121376c9d5117dcf68149b8122d3f6c718996d037bd4d800/opentelemetry_instrumentation-0.48b0-py3-none-any.whl", hash = "sha256:a69750dc4ba6a5c3eb67986a337185a25b739966d80479befe37b546fc870b44", size = 29449, upload-time = "2024-08-28T21:26:31.288Z" }, + { url = "https://files.pythonhosted.org/packages/0a/7f/405c41d4f359121376c9d5117dcf68149b8122d3f6c718996d037bd4d800/opentelemetry_instrumentation-0.48b0-py3-none-any.whl", hash = "sha256:a69750dc4ba6a5c3eb67986a337185a25b739966d80479befe37b546fc870b44", size = 29449 }, ] [[package]] @@ -3659,9 +3661,9 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/44/ac/fd3d40bab3234ec3f5c052a815100676baaae1832fa1067935f11e5c59c6/opentelemetry_instrumentation_asgi-0.48b0.tar.gz", hash = "sha256:04c32174b23c7fa72ddfe192dad874954968a6a924608079af9952964ecdf785", size = 23435, upload-time = "2024-08-28T21:27:47.276Z" } +sdist = { url = "https://files.pythonhosted.org/packages/44/ac/fd3d40bab3234ec3f5c052a815100676baaae1832fa1067935f11e5c59c6/opentelemetry_instrumentation_asgi-0.48b0.tar.gz", hash = "sha256:04c32174b23c7fa72ddfe192dad874954968a6a924608079af9952964ecdf785", size = 23435 } wheels = [ - { url = "https://files.pythonhosted.org/packages/db/74/a0e0d38622856597dd8e630f2bd793760485eb165708e11b8be1696bbb5a/opentelemetry_instrumentation_asgi-0.48b0-py3-none-any.whl", hash = "sha256:ddb1b5fc800ae66e85a4e2eca4d9ecd66367a8c7b556169d9e7b57e10676e44d", size = 15958, upload-time = "2024-08-28T21:26:38.139Z" }, + { url = "https://files.pythonhosted.org/packages/db/74/a0e0d38622856597dd8e630f2bd793760485eb165708e11b8be1696bbb5a/opentelemetry_instrumentation_asgi-0.48b0-py3-none-any.whl", hash = "sha256:ddb1b5fc800ae66e85a4e2eca4d9ecd66367a8c7b556169d9e7b57e10676e44d", size = 15958 }, ] [[package]] @@ -3673,9 +3675,9 @@ dependencies = [ { name = "opentelemetry-instrumentation" }, { name = "opentelemetry-semantic-conventions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/68/72975eff50cc22d8f65f96c425a2e8844f91488e78ffcfb603ac7cee0e5a/opentelemetry_instrumentation_celery-0.48b0.tar.gz", hash = "sha256:1d33aa6c4a1e6c5d17a64215245208a96e56c9d07611685dbae09a557704af26", size = 14445, upload-time = "2024-08-28T21:27:56.392Z" } +sdist = { url = "https://files.pythonhosted.org/packages/42/68/72975eff50cc22d8f65f96c425a2e8844f91488e78ffcfb603ac7cee0e5a/opentelemetry_instrumentation_celery-0.48b0.tar.gz", hash = "sha256:1d33aa6c4a1e6c5d17a64215245208a96e56c9d07611685dbae09a557704af26", size = 14445 } wheels = [ - { url = "https://files.pythonhosted.org/packages/28/59/f09e8f9f596d375fd86b7677751525bbc485c8cc8c5388e39786a3d3b968/opentelemetry_instrumentation_celery-0.48b0-py3-none-any.whl", hash = "sha256:c1904e38cc58fb2a33cd657d6e296285c5ffb0dca3f164762f94b905e5abc88e", size = 13697, upload-time = "2024-08-28T21:26:50.01Z" }, + { url = "https://files.pythonhosted.org/packages/28/59/f09e8f9f596d375fd86b7677751525bbc485c8cc8c5388e39786a3d3b968/opentelemetry_instrumentation_celery-0.48b0-py3-none-any.whl", hash = "sha256:c1904e38cc58fb2a33cd657d6e296285c5ffb0dca3f164762f94b905e5abc88e", size = 13697 }, ] [[package]] @@ -3689,9 +3691,9 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/58/20/43477da5850ef2cd3792715d442aecd051e885e0603b6ee5783b2104ba8f/opentelemetry_instrumentation_fastapi-0.48b0.tar.gz", hash = "sha256:21a72563ea412c0b535815aeed75fc580240f1f02ebc72381cfab672648637a2", size = 18497, upload-time = "2024-08-28T21:28:01.14Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/20/43477da5850ef2cd3792715d442aecd051e885e0603b6ee5783b2104ba8f/opentelemetry_instrumentation_fastapi-0.48b0.tar.gz", hash = "sha256:21a72563ea412c0b535815aeed75fc580240f1f02ebc72381cfab672648637a2", size = 18497 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/50/745ab075a3041b7a5f29a579d2c28eaad54f64b4589d8f9fd364c62cf0f3/opentelemetry_instrumentation_fastapi-0.48b0-py3-none-any.whl", hash = "sha256:afeb820a59e139d3e5d96619600f11ce0187658b8ae9e3480857dd790bc024f2", size = 11777, upload-time = "2024-08-28T21:26:57.457Z" }, + { url = "https://files.pythonhosted.org/packages/ee/50/745ab075a3041b7a5f29a579d2c28eaad54f64b4589d8f9fd364c62cf0f3/opentelemetry_instrumentation_fastapi-0.48b0-py3-none-any.whl", hash = "sha256:afeb820a59e139d3e5d96619600f11ce0187658b8ae9e3480857dd790bc024f2", size = 11777 }, ] [[package]] @@ -3707,9 +3709,9 @@ dependencies = [ { name = "opentelemetry-util-http" }, { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ed/2f/5c3af780a69f9ba78445fe0e5035c41f67281a31b08f3c3e7ec460bda726/opentelemetry_instrumentation_flask-0.48b0.tar.gz", hash = "sha256:e03a34428071aebf4864ea6c6a564acef64f88c13eb3818e64ea90da61266c3d", size = 19196, upload-time = "2024-08-28T21:28:01.986Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/2f/5c3af780a69f9ba78445fe0e5035c41f67281a31b08f3c3e7ec460bda726/opentelemetry_instrumentation_flask-0.48b0.tar.gz", hash = "sha256:e03a34428071aebf4864ea6c6a564acef64f88c13eb3818e64ea90da61266c3d", size = 19196 } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/3d/fcde4f8f0bf9fa1ee73a12304fa538076fb83fe0a2ae966ab0f0b7da5109/opentelemetry_instrumentation_flask-0.48b0-py3-none-any.whl", hash = "sha256:26b045420b9d76e85493b1c23fcf27517972423480dc6cf78fd6924248ba5808", size = 14588, upload-time = "2024-08-28T21:26:58.504Z" }, + { url = "https://files.pythonhosted.org/packages/78/3d/fcde4f8f0bf9fa1ee73a12304fa538076fb83fe0a2ae966ab0f0b7da5109/opentelemetry_instrumentation_flask-0.48b0-py3-none-any.whl", hash = "sha256:26b045420b9d76e85493b1c23fcf27517972423480dc6cf78fd6924248ba5808", size = 14588 }, ] [[package]] @@ -3722,9 +3724,9 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/70/be/92e98e4c7f275be3d373899a41b0a7d4df64266657d985dbbdb9a54de0d5/opentelemetry_instrumentation_redis-0.48b0.tar.gz", hash = "sha256:61e33e984b4120e1b980d9fba6e9f7ca0c8d972f9970654d8f6e9f27fa115a8c", size = 10511, upload-time = "2024-08-28T21:28:15.061Z" } +sdist = { url = "https://files.pythonhosted.org/packages/70/be/92e98e4c7f275be3d373899a41b0a7d4df64266657d985dbbdb9a54de0d5/opentelemetry_instrumentation_redis-0.48b0.tar.gz", hash = "sha256:61e33e984b4120e1b980d9fba6e9f7ca0c8d972f9970654d8f6e9f27fa115a8c", size = 10511 } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/40/892f30d400091106309cc047fd3f6d76a828fedd984a953fd5386b78a2fb/opentelemetry_instrumentation_redis-0.48b0-py3-none-any.whl", hash = "sha256:48c7f2e25cbb30bde749dc0d8b9c74c404c851f554af832956b9630b27f5bcb7", size = 11610, upload-time = "2024-08-28T21:27:18.759Z" }, + { url = "https://files.pythonhosted.org/packages/94/40/892f30d400091106309cc047fd3f6d76a828fedd984a953fd5386b78a2fb/opentelemetry_instrumentation_redis-0.48b0-py3-none-any.whl", hash = "sha256:48c7f2e25cbb30bde749dc0d8b9c74c404c851f554af832956b9630b27f5bcb7", size = 11610 }, ] [[package]] @@ -3737,9 +3739,9 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/52/ac/5eb78efde21ff21d0ad5dc8c6cc6a0f8ae482ce8a46293c2f45a628b6166/opentelemetry_instrumentation_requests-0.48b0.tar.gz", hash = "sha256:67ab9bd877a0352ee0db4616c8b4ae59736ddd700c598ed907482d44f4c9a2b3", size = 14120, upload-time = "2024-08-28T21:28:16.933Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ac/5eb78efde21ff21d0ad5dc8c6cc6a0f8ae482ce8a46293c2f45a628b6166/opentelemetry_instrumentation_requests-0.48b0.tar.gz", hash = "sha256:67ab9bd877a0352ee0db4616c8b4ae59736ddd700c598ed907482d44f4c9a2b3", size = 14120 } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/df/0df9226d1b14f29d23c07e6194b9fd5ad50e7d987b7fd13df7dcf718aeb1/opentelemetry_instrumentation_requests-0.48b0-py3-none-any.whl", hash = "sha256:d4f01852121d0bd4c22f14f429654a735611d4f7bf3cf93f244bdf1489b2233d", size = 12366, upload-time = "2024-08-28T21:27:20.771Z" }, + { url = "https://files.pythonhosted.org/packages/43/df/0df9226d1b14f29d23c07e6194b9fd5ad50e7d987b7fd13df7dcf718aeb1/opentelemetry_instrumentation_requests-0.48b0-py3-none-any.whl", hash = "sha256:d4f01852121d0bd4c22f14f429654a735611d4f7bf3cf93f244bdf1489b2233d", size = 12366 }, ] [[package]] @@ -3753,9 +3755,9 @@ dependencies = [ { name = "packaging" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4c/77/3fcebbca8bd729da50dc2130d8ca869a235aa5483a85ef06c5dc8643476b/opentelemetry_instrumentation_sqlalchemy-0.48b0.tar.gz", hash = "sha256:dbf2d5a755b470e64e5e2762b56f8d56313787e4c7d71a87fe25c33f48eb3493", size = 13194, upload-time = "2024-08-28T21:28:18.122Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/77/3fcebbca8bd729da50dc2130d8ca869a235aa5483a85ef06c5dc8643476b/opentelemetry_instrumentation_sqlalchemy-0.48b0.tar.gz", hash = "sha256:dbf2d5a755b470e64e5e2762b56f8d56313787e4c7d71a87fe25c33f48eb3493", size = 13194 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/84/4b6f1e9e9f83a52d966e91963f5a8424edc4a3d5ea32854c96c2d1618284/opentelemetry_instrumentation_sqlalchemy-0.48b0-py3-none-any.whl", hash = "sha256:625848a34aa5770cb4b1dcdbd95afce4307a0230338711101325261d739f391f", size = 13360, upload-time = "2024-08-28T21:27:22.102Z" }, + { url = "https://files.pythonhosted.org/packages/e1/84/4b6f1e9e9f83a52d966e91963f5a8424edc4a3d5ea32854c96c2d1618284/opentelemetry_instrumentation_sqlalchemy-0.48b0-py3-none-any.whl", hash = "sha256:625848a34aa5770cb4b1dcdbd95afce4307a0230338711101325261d739f391f", size = 13360 }, ] [[package]] @@ -3768,9 +3770,9 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "opentelemetry-util-http" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/de/a5/f45cdfba18f22aefd2378eac8c07c1f8c9656d6bf7ce315ced48c67f3437/opentelemetry_instrumentation_wsgi-0.48b0.tar.gz", hash = "sha256:1a1e752367b0df4397e0b835839225ef5c2c3c053743a261551af13434fc4d51", size = 17974, upload-time = "2024-08-28T21:28:24.902Z" } +sdist = { url = "https://files.pythonhosted.org/packages/de/a5/f45cdfba18f22aefd2378eac8c07c1f8c9656d6bf7ce315ced48c67f3437/opentelemetry_instrumentation_wsgi-0.48b0.tar.gz", hash = "sha256:1a1e752367b0df4397e0b835839225ef5c2c3c053743a261551af13434fc4d51", size = 17974 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/87/fa420007e0ba7e8cd43799ab204717ab515f000236fa2726a6be3299efdd/opentelemetry_instrumentation_wsgi-0.48b0-py3-none-any.whl", hash = "sha256:c6051124d741972090fe94b2fa302555e1e2a22e9cdda32dd39ed49a5b34e0c6", size = 13691, upload-time = "2024-08-28T21:27:33.257Z" }, + { url = "https://files.pythonhosted.org/packages/fb/87/fa420007e0ba7e8cd43799ab204717ab515f000236fa2726a6be3299efdd/opentelemetry_instrumentation_wsgi-0.48b0-py3-none-any.whl", hash = "sha256:c6051124d741972090fe94b2fa302555e1e2a22e9cdda32dd39ed49a5b34e0c6", size = 13691 }, ] [[package]] @@ -3781,9 +3783,9 @@ dependencies = [ { name = "deprecated" }, { name = "opentelemetry-api" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/53/a3/3ceeb5ff5a1906371834d5c594e24e5b84f35528d219054833deca4ac44c/opentelemetry_propagator_b3-1.27.0.tar.gz", hash = "sha256:39377b6aa619234e08fbc6db79bf880aff36d7e2761efa9afa28b78d5937308f", size = 9590, upload-time = "2024-08-28T21:35:43.971Z" } +sdist = { url = "https://files.pythonhosted.org/packages/53/a3/3ceeb5ff5a1906371834d5c594e24e5b84f35528d219054833deca4ac44c/opentelemetry_propagator_b3-1.27.0.tar.gz", hash = "sha256:39377b6aa619234e08fbc6db79bf880aff36d7e2761efa9afa28b78d5937308f", size = 9590 } wheels = [ - { url = "https://files.pythonhosted.org/packages/03/3f/75ba77b8d9938bae575bc457a5c56ca2246ff5367b54c7d4252a31d1c91f/opentelemetry_propagator_b3-1.27.0-py3-none-any.whl", hash = "sha256:1dd75e9801ba02e870df3830097d35771a64c123127c984d9b05c352a35aa9cc", size = 8899, upload-time = "2024-08-28T21:35:18.317Z" }, + { url = "https://files.pythonhosted.org/packages/03/3f/75ba77b8d9938bae575bc457a5c56ca2246ff5367b54c7d4252a31d1c91f/opentelemetry_propagator_b3-1.27.0-py3-none-any.whl", hash = "sha256:1dd75e9801ba02e870df3830097d35771a64c123127c984d9b05c352a35aa9cc", size = 8899 }, ] [[package]] @@ -3793,9 +3795,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9a/59/959f0beea798ae0ee9c979b90f220736fbec924eedbefc60ca581232e659/opentelemetry_proto-1.27.0.tar.gz", hash = "sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6", size = 34749, upload-time = "2024-08-28T21:35:45.839Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/59/959f0beea798ae0ee9c979b90f220736fbec924eedbefc60ca581232e659/opentelemetry_proto-1.27.0.tar.gz", hash = "sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6", size = 34749 } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/56/3d2d826834209b19a5141eed717f7922150224d1a982385d19a9444cbf8d/opentelemetry_proto-1.27.0-py3-none-any.whl", hash = "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace", size = 52464, upload-time = "2024-08-28T21:35:21.434Z" }, + { url = "https://files.pythonhosted.org/packages/94/56/3d2d826834209b19a5141eed717f7922150224d1a982385d19a9444cbf8d/opentelemetry_proto-1.27.0-py3-none-any.whl", hash = "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace", size = 52464 }, ] [[package]] @@ -3807,9 +3809,9 @@ dependencies = [ { name = "opentelemetry-semantic-conventions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0d/9a/82a6ac0f06590f3d72241a587cb8b0b751bd98728e896cc4cbd4847248e6/opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f", size = 145019, upload-time = "2024-08-28T21:35:46.708Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/9a/82a6ac0f06590f3d72241a587cb8b0b751bd98728e896cc4cbd4847248e6/opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f", size = 145019 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/bd/a6602e71e315055d63b2ff07172bd2d012b4cba2d4e00735d74ba42fc4d6/opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d", size = 110505, upload-time = "2024-08-28T21:35:24.769Z" }, + { url = "https://files.pythonhosted.org/packages/c1/bd/a6602e71e315055d63b2ff07172bd2d012b4cba2d4e00735d74ba42fc4d6/opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d", size = 110505 }, ] [[package]] @@ -3820,18 +3822,18 @@ dependencies = [ { name = "deprecated" }, { name = "opentelemetry-api" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0a/89/1724ad69f7411772446067cdfa73b598694c8c91f7f8c922e344d96d81f9/opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a", size = 89445, upload-time = "2024-08-28T21:35:47.673Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/89/1724ad69f7411772446067cdfa73b598694c8c91f7f8c922e344d96d81f9/opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a", size = 89445 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/7a/4f0063dbb0b6c971568291a8bc19a4ca70d3c185db2d956230dd67429dfc/opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f", size = 149685, upload-time = "2024-08-28T21:35:25.983Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7a/4f0063dbb0b6c971568291a8bc19a4ca70d3c185db2d956230dd67429dfc/opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f", size = 149685 }, ] [[package]] name = "opentelemetry-util-http" version = "0.48b0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/d7/185c494754340e0a3928fd39fde2616ee78f2c9d66253affaad62d5b7935/opentelemetry_util_http-0.48b0.tar.gz", hash = "sha256:60312015153580cc20f322e5cdc3d3ecad80a71743235bdb77716e742814623c", size = 7863, upload-time = "2024-08-28T21:28:27.266Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/d7/185c494754340e0a3928fd39fde2616ee78f2c9d66253affaad62d5b7935/opentelemetry_util_http-0.48b0.tar.gz", hash = "sha256:60312015153580cc20f322e5cdc3d3ecad80a71743235bdb77716e742814623c", size = 7863 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/2e/36097c0a4d0115b8c7e377c90bab7783ac183bc5cb4071308f8959454311/opentelemetry_util_http-0.48b0-py3-none-any.whl", hash = "sha256:76f598af93aab50328d2a69c786beaedc8b6a7770f7a818cc307eb353debfffb", size = 6946, upload-time = "2024-08-28T21:27:37.975Z" }, + { url = "https://files.pythonhosted.org/packages/ad/2e/36097c0a4d0115b8c7e377c90bab7783ac183bc5cb4071308f8959454311/opentelemetry_util_http-0.48b0-py3-none-any.whl", hash = "sha256:76f598af93aab50328d2a69c786beaedc8b6a7770f7a818cc307eb353debfffb", size = 6946 }, ] [[package]] @@ -3855,9 +3857,9 @@ dependencies = [ { name = "tqdm" }, { name = "uuid6" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ba/52/cea0317bc3207bc967b48932781995d9cdb2c490e7e05caa00ff660f7205/opik-1.7.43.tar.gz", hash = "sha256:0b02522b0b74d0a67b141939deda01f8bb69690eda6b04a7cecb1c7f0649ccd0", size = 326886, upload-time = "2025-07-07T10:30:07.715Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/52/cea0317bc3207bc967b48932781995d9cdb2c490e7e05caa00ff660f7205/opik-1.7.43.tar.gz", hash = "sha256:0b02522b0b74d0a67b141939deda01f8bb69690eda6b04a7cecb1c7f0649ccd0", size = 326886 } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/ae/f3566bdc3c49a1a8f795b1b6e726ef211c87e31f92d870ca6d63999c9bbf/opik-1.7.43-py3-none-any.whl", hash = "sha256:a66395c8b5ea7c24846f72dafc70c74d5b8f24ffbc4c8a1b3a7f9456e550568d", size = 625356, upload-time = "2025-07-07T10:30:06.389Z" }, + { url = "https://files.pythonhosted.org/packages/76/ae/f3566bdc3c49a1a8f795b1b6e726ef211c87e31f92d870ca6d63999c9bbf/opik-1.7.43-py3-none-any.whl", hash = "sha256:a66395c8b5ea7c24846f72dafc70c74d5b8f24ffbc4c8a1b3a7f9456e550568d", size = 625356 }, ] [[package]] @@ -3867,9 +3869,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/11/11/5bc1ad8e4dd339783daec5299c9162eaa80ad072aaa1256561b336152981/optype-0.10.0.tar.gz", hash = "sha256:2b89a1b8b48f9d6dd8c4dd4f59e22557185c81823c6e2bfc43c4819776d5a7ca", size = 95630, upload-time = "2025-05-28T22:43:18.799Z" } +sdist = { url = "https://files.pythonhosted.org/packages/11/11/5bc1ad8e4dd339783daec5299c9162eaa80ad072aaa1256561b336152981/optype-0.10.0.tar.gz", hash = "sha256:2b89a1b8b48f9d6dd8c4dd4f59e22557185c81823c6e2bfc43c4819776d5a7ca", size = 95630 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/98/7f97864d5b6801bc63c24e72c45a58417c344c563ca58134a43249ce8afa/optype-0.10.0-py3-none-any.whl", hash = "sha256:7e9ccc329fb65c326c6bd62c30c2ba03b694c28c378a96c2bcdd18a084f2c96b", size = 83825, upload-time = "2025-05-28T22:43:16.772Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/7f97864d5b6801bc63c24e72c45a58417c344c563ca58134a43249ce8afa/optype-0.10.0-py3-none-any.whl", hash = "sha256:7e9ccc329fb65c326c6bd62c30c2ba03b694c28c378a96c2bcdd18a084f2c96b", size = 83825 }, ] [[package]] @@ -3879,56 +3881,56 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/39/712f797b75705c21148fa1d98651f63c2e5cc6876e509a0a9e2f5b406572/oracledb-3.0.0.tar.gz", hash = "sha256:64dc86ee5c032febc556798b06e7b000ef6828bb0252084f6addacad3363db85", size = 840431, upload-time = "2025-03-03T19:36:12.223Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/39/712f797b75705c21148fa1d98651f63c2e5cc6876e509a0a9e2f5b406572/oracledb-3.0.0.tar.gz", hash = "sha256:64dc86ee5c032febc556798b06e7b000ef6828bb0252084f6addacad3363db85", size = 840431 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/bf/d872c4b3fc15cd3261fe0ea72b21d181700c92dbc050160e161654987062/oracledb-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:52daa9141c63dfa75c07d445e9bb7f69f43bfb3c5a173ecc48c798fe50288d26", size = 4312963, upload-time = "2025-03-03T19:36:32.576Z" }, - { url = "https://files.pythonhosted.org/packages/b1/ea/01ee29e76a610a53bb34fdc1030f04b7669c3f80b25f661e07850fc6160e/oracledb-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af98941789df4c6aaaf4338f5b5f6b7f2c8c3fe6f8d6a9382f177f350868747a", size = 2661536, upload-time = "2025-03-03T19:36:34.904Z" }, - { url = "https://files.pythonhosted.org/packages/3d/8e/ad380e34a46819224423b4773e58c350bc6269643c8969604097ced8c3bc/oracledb-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9812bb48865aaec35d73af54cd1746679f2a8a13cbd1412ab371aba2e39b3943", size = 2867461, upload-time = "2025-03-03T19:36:36.508Z" }, - { url = "https://files.pythonhosted.org/packages/96/09/ecc4384a27fd6e1e4de824ae9c160e4ad3aaebdaade5b4bdcf56a4d1ff63/oracledb-3.0.0-cp311-cp311-win32.whl", hash = "sha256:6c27fe0de64f2652e949eb05b3baa94df9b981a4a45fa7f8a991e1afb450c8e2", size = 1752046, upload-time = "2025-03-03T19:36:38.313Z" }, - { url = "https://files.pythonhosted.org/packages/62/e8/f34bde24050c6e55eeba46b23b2291f2dd7fd272fa8b322dcbe71be55778/oracledb-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:f922709672002f0b40997456f03a95f03e5712a86c61159951c5ce09334325e0", size = 2101210, upload-time = "2025-03-03T19:36:40.669Z" }, - { url = "https://files.pythonhosted.org/packages/6f/fc/24590c3a3d41e58494bd3c3b447a62835138e5f9b243d9f8da0cfb5da8dc/oracledb-3.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:acd0e747227dea01bebe627b07e958bf36588a337539f24db629dc3431d3f7eb", size = 4351993, upload-time = "2025-03-03T19:36:42.577Z" }, - { url = "https://files.pythonhosted.org/packages/b7/b6/1f3b0b7bb94d53e8857d77b2e8dbdf6da091dd7e377523e24b79dac4fd71/oracledb-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f8b402f77c22af031cd0051aea2472ecd0635c1b452998f511aa08b7350c90a4", size = 2532640, upload-time = "2025-03-03T19:36:45.066Z" }, - { url = "https://files.pythonhosted.org/packages/72/1a/1815f6c086ab49c00921cf155ff5eede5267fb29fcec37cb246339a5ce4d/oracledb-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:378a27782e9a37918bd07a5a1427a77cb6f777d0a5a8eac9c070d786f50120ef", size = 2765949, upload-time = "2025-03-03T19:36:47.47Z" }, - { url = "https://files.pythonhosted.org/packages/33/8d/208900f8d372909792ee70b2daad3f7361181e55f2217c45ed9dff658b54/oracledb-3.0.0-cp312-cp312-win32.whl", hash = "sha256:54a28c2cb08316a527cd1467740a63771cc1c1164697c932aa834c0967dc4efc", size = 1709373, upload-time = "2025-03-03T19:36:49.67Z" }, - { url = "https://files.pythonhosted.org/packages/0c/5e/c21754f19c896102793c3afec2277e2180aa7d505e4d7fcca24b52d14e4f/oracledb-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8289bad6d103ce42b140e40576cf0c81633e344d56e2d738b539341eacf65624", size = 2056452, upload-time = "2025-03-03T19:36:51.363Z" }, + { url = "https://files.pythonhosted.org/packages/fa/bf/d872c4b3fc15cd3261fe0ea72b21d181700c92dbc050160e161654987062/oracledb-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:52daa9141c63dfa75c07d445e9bb7f69f43bfb3c5a173ecc48c798fe50288d26", size = 4312963 }, + { url = "https://files.pythonhosted.org/packages/b1/ea/01ee29e76a610a53bb34fdc1030f04b7669c3f80b25f661e07850fc6160e/oracledb-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af98941789df4c6aaaf4338f5b5f6b7f2c8c3fe6f8d6a9382f177f350868747a", size = 2661536 }, + { url = "https://files.pythonhosted.org/packages/3d/8e/ad380e34a46819224423b4773e58c350bc6269643c8969604097ced8c3bc/oracledb-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9812bb48865aaec35d73af54cd1746679f2a8a13cbd1412ab371aba2e39b3943", size = 2867461 }, + { url = "https://files.pythonhosted.org/packages/96/09/ecc4384a27fd6e1e4de824ae9c160e4ad3aaebdaade5b4bdcf56a4d1ff63/oracledb-3.0.0-cp311-cp311-win32.whl", hash = "sha256:6c27fe0de64f2652e949eb05b3baa94df9b981a4a45fa7f8a991e1afb450c8e2", size = 1752046 }, + { url = "https://files.pythonhosted.org/packages/62/e8/f34bde24050c6e55eeba46b23b2291f2dd7fd272fa8b322dcbe71be55778/oracledb-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:f922709672002f0b40997456f03a95f03e5712a86c61159951c5ce09334325e0", size = 2101210 }, + { url = "https://files.pythonhosted.org/packages/6f/fc/24590c3a3d41e58494bd3c3b447a62835138e5f9b243d9f8da0cfb5da8dc/oracledb-3.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:acd0e747227dea01bebe627b07e958bf36588a337539f24db629dc3431d3f7eb", size = 4351993 }, + { url = "https://files.pythonhosted.org/packages/b7/b6/1f3b0b7bb94d53e8857d77b2e8dbdf6da091dd7e377523e24b79dac4fd71/oracledb-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f8b402f77c22af031cd0051aea2472ecd0635c1b452998f511aa08b7350c90a4", size = 2532640 }, + { url = "https://files.pythonhosted.org/packages/72/1a/1815f6c086ab49c00921cf155ff5eede5267fb29fcec37cb246339a5ce4d/oracledb-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:378a27782e9a37918bd07a5a1427a77cb6f777d0a5a8eac9c070d786f50120ef", size = 2765949 }, + { url = "https://files.pythonhosted.org/packages/33/8d/208900f8d372909792ee70b2daad3f7361181e55f2217c45ed9dff658b54/oracledb-3.0.0-cp312-cp312-win32.whl", hash = "sha256:54a28c2cb08316a527cd1467740a63771cc1c1164697c932aa834c0967dc4efc", size = 1709373 }, + { url = "https://files.pythonhosted.org/packages/0c/5e/c21754f19c896102793c3afec2277e2180aa7d505e4d7fcca24b52d14e4f/oracledb-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8289bad6d103ce42b140e40576cf0c81633e344d56e2d738b539341eacf65624", size = 2056452 }, ] [[package]] name = "orjson" version = "3.10.18" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/81/0b/fea456a3ffe74e70ba30e01ec183a9b26bec4d497f61dcfce1b601059c60/orjson-3.10.18.tar.gz", hash = "sha256:e8da3947d92123eda795b68228cafe2724815621fe35e8e320a9e9593a4bcd53", size = 5422810, upload-time = "2025-04-29T23:30:08.423Z" } +sdist = { url = "https://files.pythonhosted.org/packages/81/0b/fea456a3ffe74e70ba30e01ec183a9b26bec4d497f61dcfce1b601059c60/orjson-3.10.18.tar.gz", hash = "sha256:e8da3947d92123eda795b68228cafe2724815621fe35e8e320a9e9593a4bcd53", size = 5422810 } wheels = [ - { url = "https://files.pythonhosted.org/packages/97/c7/c54a948ce9a4278794f669a353551ce7db4ffb656c69a6e1f2264d563e50/orjson-3.10.18-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e0a183ac3b8e40471e8d843105da6fbe7c070faab023be3b08188ee3f85719b8", size = 248929, upload-time = "2025-04-29T23:28:30.716Z" }, - { url = "https://files.pythonhosted.org/packages/9e/60/a9c674ef1dd8ab22b5b10f9300e7e70444d4e3cda4b8258d6c2488c32143/orjson-3.10.18-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5ef7c164d9174362f85238d0cd4afdeeb89d9e523e4651add6a5d458d6f7d42d", size = 133364, upload-time = "2025-04-29T23:28:32.392Z" }, - { url = "https://files.pythonhosted.org/packages/c1/4e/f7d1bdd983082216e414e6d7ef897b0c2957f99c545826c06f371d52337e/orjson-3.10.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd14c5d99cdc7bf93f22b12ec3b294931518aa019e2a147e8aa2f31fd3240f7", size = 136995, upload-time = "2025-04-29T23:28:34.024Z" }, - { url = "https://files.pythonhosted.org/packages/17/89/46b9181ba0ea251c9243b0c8ce29ff7c9796fa943806a9c8b02592fce8ea/orjson-3.10.18-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b672502323b6cd133c4af6b79e3bea36bad2d16bca6c1f645903fce83909a7a", size = 132894, upload-time = "2025-04-29T23:28:35.318Z" }, - { url = "https://files.pythonhosted.org/packages/ca/dd/7bce6fcc5b8c21aef59ba3c67f2166f0a1a9b0317dcca4a9d5bd7934ecfd/orjson-3.10.18-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51f8c63be6e070ec894c629186b1c0fe798662b8687f3d9fdfa5e401c6bd7679", size = 137016, upload-time = "2025-04-29T23:28:36.674Z" }, - { url = "https://files.pythonhosted.org/packages/1c/4a/b8aea1c83af805dcd31c1f03c95aabb3e19a016b2a4645dd822c5686e94d/orjson-3.10.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f9478ade5313d724e0495d167083c6f3be0dd2f1c9c8a38db9a9e912cdaf947", size = 138290, upload-time = "2025-04-29T23:28:38.3Z" }, - { url = "https://files.pythonhosted.org/packages/36/d6/7eb05c85d987b688707f45dcf83c91abc2251e0dd9fb4f7be96514f838b1/orjson-3.10.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:187aefa562300a9d382b4b4eb9694806e5848b0cedf52037bb5c228c61bb66d4", size = 142829, upload-time = "2025-04-29T23:28:39.657Z" }, - { url = "https://files.pythonhosted.org/packages/d2/78/ddd3ee7873f2b5f90f016bc04062713d567435c53ecc8783aab3a4d34915/orjson-3.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da552683bc9da222379c7a01779bddd0ad39dd699dd6300abaf43eadee38334", size = 132805, upload-time = "2025-04-29T23:28:40.969Z" }, - { url = "https://files.pythonhosted.org/packages/8c/09/c8e047f73d2c5d21ead9c180203e111cddeffc0848d5f0f974e346e21c8e/orjson-3.10.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e450885f7b47a0231979d9c49b567ed1c4e9f69240804621be87c40bc9d3cf17", size = 135008, upload-time = "2025-04-29T23:28:42.284Z" }, - { url = "https://files.pythonhosted.org/packages/0c/4b/dccbf5055ef8fb6eda542ab271955fc1f9bf0b941a058490293f8811122b/orjson-3.10.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5e3c9cc2ba324187cd06287ca24f65528f16dfc80add48dc99fa6c836bb3137e", size = 413419, upload-time = "2025-04-29T23:28:43.673Z" }, - { url = "https://files.pythonhosted.org/packages/8a/f3/1eac0c5e2d6d6790bd2025ebfbefcbd37f0d097103d76f9b3f9302af5a17/orjson-3.10.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:50ce016233ac4bfd843ac5471e232b865271d7d9d44cf9d33773bcd883ce442b", size = 153292, upload-time = "2025-04-29T23:28:45.573Z" }, - { url = "https://files.pythonhosted.org/packages/1f/b4/ef0abf64c8f1fabf98791819ab502c2c8c1dc48b786646533a93637d8999/orjson-3.10.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b3ceff74a8f7ffde0b2785ca749fc4e80e4315c0fd887561144059fb1c138aa7", size = 137182, upload-time = "2025-04-29T23:28:47.229Z" }, - { url = "https://files.pythonhosted.org/packages/a9/a3/6ea878e7b4a0dc5c888d0370d7752dcb23f402747d10e2257478d69b5e63/orjson-3.10.18-cp311-cp311-win32.whl", hash = "sha256:fdba703c722bd868c04702cac4cb8c6b8ff137af2623bc0ddb3b3e6a2c8996c1", size = 142695, upload-time = "2025-04-29T23:28:48.564Z" }, - { url = "https://files.pythonhosted.org/packages/79/2a/4048700a3233d562f0e90d5572a849baa18ae4e5ce4c3ba6247e4ece57b0/orjson-3.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:c28082933c71ff4bc6ccc82a454a2bffcef6e1d7379756ca567c772e4fb3278a", size = 134603, upload-time = "2025-04-29T23:28:50.442Z" }, - { url = "https://files.pythonhosted.org/packages/03/45/10d934535a4993d27e1c84f1810e79ccf8b1b7418cef12151a22fe9bb1e1/orjson-3.10.18-cp311-cp311-win_arm64.whl", hash = "sha256:a6c7c391beaedd3fa63206e5c2b7b554196f14debf1ec9deb54b5d279b1b46f5", size = 131400, upload-time = "2025-04-29T23:28:51.838Z" }, - { url = "https://files.pythonhosted.org/packages/21/1a/67236da0916c1a192d5f4ccbe10ec495367a726996ceb7614eaa687112f2/orjson-3.10.18-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:50c15557afb7f6d63bc6d6348e0337a880a04eaa9cd7c9d569bcb4e760a24753", size = 249184, upload-time = "2025-04-29T23:28:53.612Z" }, - { url = "https://files.pythonhosted.org/packages/b3/bc/c7f1db3b1d094dc0c6c83ed16b161a16c214aaa77f311118a93f647b32dc/orjson-3.10.18-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:356b076f1662c9813d5fa56db7d63ccceef4c271b1fb3dd522aca291375fcf17", size = 133279, upload-time = "2025-04-29T23:28:55.055Z" }, - { url = "https://files.pythonhosted.org/packages/af/84/664657cd14cc11f0d81e80e64766c7ba5c9b7fc1ec304117878cc1b4659c/orjson-3.10.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:559eb40a70a7494cd5beab2d73657262a74a2c59aff2068fdba8f0424ec5b39d", size = 136799, upload-time = "2025-04-29T23:28:56.828Z" }, - { url = "https://files.pythonhosted.org/packages/9a/bb/f50039c5bb05a7ab024ed43ba25d0319e8722a0ac3babb0807e543349978/orjson-3.10.18-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f3c29eb9a81e2fbc6fd7ddcfba3e101ba92eaff455b8d602bf7511088bbc0eae", size = 132791, upload-time = "2025-04-29T23:28:58.751Z" }, - { url = "https://files.pythonhosted.org/packages/93/8c/ee74709fc072c3ee219784173ddfe46f699598a1723d9d49cbc78d66df65/orjson-3.10.18-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6612787e5b0756a171c7d81ba245ef63a3533a637c335aa7fcb8e665f4a0966f", size = 137059, upload-time = "2025-04-29T23:29:00.129Z" }, - { url = "https://files.pythonhosted.org/packages/6a/37/e6d3109ee004296c80426b5a62b47bcadd96a3deab7443e56507823588c5/orjson-3.10.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ac6bd7be0dcab5b702c9d43d25e70eb456dfd2e119d512447468f6405b4a69c", size = 138359, upload-time = "2025-04-29T23:29:01.704Z" }, - { url = "https://files.pythonhosted.org/packages/4f/5d/387dafae0e4691857c62bd02839a3bf3fa648eebd26185adfac58d09f207/orjson-3.10.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9f72f100cee8dde70100406d5c1abba515a7df926d4ed81e20a9730c062fe9ad", size = 142853, upload-time = "2025-04-29T23:29:03.576Z" }, - { url = "https://files.pythonhosted.org/packages/27/6f/875e8e282105350b9a5341c0222a13419758545ae32ad6e0fcf5f64d76aa/orjson-3.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dca85398d6d093dd41dc0983cbf54ab8e6afd1c547b6b8a311643917fbf4e0c", size = 133131, upload-time = "2025-04-29T23:29:05.753Z" }, - { url = "https://files.pythonhosted.org/packages/48/b2/73a1f0b4790dcb1e5a45f058f4f5dcadc8a85d90137b50d6bbc6afd0ae50/orjson-3.10.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22748de2a07fcc8781a70edb887abf801bb6142e6236123ff93d12d92db3d406", size = 134834, upload-time = "2025-04-29T23:29:07.35Z" }, - { url = "https://files.pythonhosted.org/packages/56/f5/7ed133a5525add9c14dbdf17d011dd82206ca6840811d32ac52a35935d19/orjson-3.10.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:3a83c9954a4107b9acd10291b7f12a6b29e35e8d43a414799906ea10e75438e6", size = 413368, upload-time = "2025-04-29T23:29:09.301Z" }, - { url = "https://files.pythonhosted.org/packages/11/7c/439654221ed9c3324bbac7bdf94cf06a971206b7b62327f11a52544e4982/orjson-3.10.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:303565c67a6c7b1f194c94632a4a39918e067bd6176a48bec697393865ce4f06", size = 153359, upload-time = "2025-04-29T23:29:10.813Z" }, - { url = "https://files.pythonhosted.org/packages/48/e7/d58074fa0cc9dd29a8fa2a6c8d5deebdfd82c6cfef72b0e4277c4017563a/orjson-3.10.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:86314fdb5053a2f5a5d881f03fca0219bfdf832912aa88d18676a5175c6916b5", size = 137466, upload-time = "2025-04-29T23:29:12.26Z" }, - { url = "https://files.pythonhosted.org/packages/57/4d/fe17581cf81fb70dfcef44e966aa4003360e4194d15a3f38cbffe873333a/orjson-3.10.18-cp312-cp312-win32.whl", hash = "sha256:187ec33bbec58c76dbd4066340067d9ece6e10067bb0cc074a21ae3300caa84e", size = 142683, upload-time = "2025-04-29T23:29:13.865Z" }, - { url = "https://files.pythonhosted.org/packages/e6/22/469f62d25ab5f0f3aee256ea732e72dc3aab6d73bac777bd6277955bceef/orjson-3.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:f9f94cf6d3f9cd720d641f8399e390e7411487e493962213390d1ae45c7814fc", size = 134754, upload-time = "2025-04-29T23:29:15.338Z" }, - { url = "https://files.pythonhosted.org/packages/10/b0/1040c447fac5b91bc1e9c004b69ee50abb0c1ffd0d24406e1350c58a7fcb/orjson-3.10.18-cp312-cp312-win_arm64.whl", hash = "sha256:3d600be83fe4514944500fa8c2a0a77099025ec6482e8087d7659e891f23058a", size = 131218, upload-time = "2025-04-29T23:29:17.324Z" }, + { url = "https://files.pythonhosted.org/packages/97/c7/c54a948ce9a4278794f669a353551ce7db4ffb656c69a6e1f2264d563e50/orjson-3.10.18-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e0a183ac3b8e40471e8d843105da6fbe7c070faab023be3b08188ee3f85719b8", size = 248929 }, + { url = "https://files.pythonhosted.org/packages/9e/60/a9c674ef1dd8ab22b5b10f9300e7e70444d4e3cda4b8258d6c2488c32143/orjson-3.10.18-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5ef7c164d9174362f85238d0cd4afdeeb89d9e523e4651add6a5d458d6f7d42d", size = 133364 }, + { url = "https://files.pythonhosted.org/packages/c1/4e/f7d1bdd983082216e414e6d7ef897b0c2957f99c545826c06f371d52337e/orjson-3.10.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd14c5d99cdc7bf93f22b12ec3b294931518aa019e2a147e8aa2f31fd3240f7", size = 136995 }, + { url = "https://files.pythonhosted.org/packages/17/89/46b9181ba0ea251c9243b0c8ce29ff7c9796fa943806a9c8b02592fce8ea/orjson-3.10.18-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b672502323b6cd133c4af6b79e3bea36bad2d16bca6c1f645903fce83909a7a", size = 132894 }, + { url = "https://files.pythonhosted.org/packages/ca/dd/7bce6fcc5b8c21aef59ba3c67f2166f0a1a9b0317dcca4a9d5bd7934ecfd/orjson-3.10.18-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51f8c63be6e070ec894c629186b1c0fe798662b8687f3d9fdfa5e401c6bd7679", size = 137016 }, + { url = "https://files.pythonhosted.org/packages/1c/4a/b8aea1c83af805dcd31c1f03c95aabb3e19a016b2a4645dd822c5686e94d/orjson-3.10.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f9478ade5313d724e0495d167083c6f3be0dd2f1c9c8a38db9a9e912cdaf947", size = 138290 }, + { url = "https://files.pythonhosted.org/packages/36/d6/7eb05c85d987b688707f45dcf83c91abc2251e0dd9fb4f7be96514f838b1/orjson-3.10.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:187aefa562300a9d382b4b4eb9694806e5848b0cedf52037bb5c228c61bb66d4", size = 142829 }, + { url = "https://files.pythonhosted.org/packages/d2/78/ddd3ee7873f2b5f90f016bc04062713d567435c53ecc8783aab3a4d34915/orjson-3.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da552683bc9da222379c7a01779bddd0ad39dd699dd6300abaf43eadee38334", size = 132805 }, + { url = "https://files.pythonhosted.org/packages/8c/09/c8e047f73d2c5d21ead9c180203e111cddeffc0848d5f0f974e346e21c8e/orjson-3.10.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e450885f7b47a0231979d9c49b567ed1c4e9f69240804621be87c40bc9d3cf17", size = 135008 }, + { url = "https://files.pythonhosted.org/packages/0c/4b/dccbf5055ef8fb6eda542ab271955fc1f9bf0b941a058490293f8811122b/orjson-3.10.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5e3c9cc2ba324187cd06287ca24f65528f16dfc80add48dc99fa6c836bb3137e", size = 413419 }, + { url = "https://files.pythonhosted.org/packages/8a/f3/1eac0c5e2d6d6790bd2025ebfbefcbd37f0d097103d76f9b3f9302af5a17/orjson-3.10.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:50ce016233ac4bfd843ac5471e232b865271d7d9d44cf9d33773bcd883ce442b", size = 153292 }, + { url = "https://files.pythonhosted.org/packages/1f/b4/ef0abf64c8f1fabf98791819ab502c2c8c1dc48b786646533a93637d8999/orjson-3.10.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b3ceff74a8f7ffde0b2785ca749fc4e80e4315c0fd887561144059fb1c138aa7", size = 137182 }, + { url = "https://files.pythonhosted.org/packages/a9/a3/6ea878e7b4a0dc5c888d0370d7752dcb23f402747d10e2257478d69b5e63/orjson-3.10.18-cp311-cp311-win32.whl", hash = "sha256:fdba703c722bd868c04702cac4cb8c6b8ff137af2623bc0ddb3b3e6a2c8996c1", size = 142695 }, + { url = "https://files.pythonhosted.org/packages/79/2a/4048700a3233d562f0e90d5572a849baa18ae4e5ce4c3ba6247e4ece57b0/orjson-3.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:c28082933c71ff4bc6ccc82a454a2bffcef6e1d7379756ca567c772e4fb3278a", size = 134603 }, + { url = "https://files.pythonhosted.org/packages/03/45/10d934535a4993d27e1c84f1810e79ccf8b1b7418cef12151a22fe9bb1e1/orjson-3.10.18-cp311-cp311-win_arm64.whl", hash = "sha256:a6c7c391beaedd3fa63206e5c2b7b554196f14debf1ec9deb54b5d279b1b46f5", size = 131400 }, + { url = "https://files.pythonhosted.org/packages/21/1a/67236da0916c1a192d5f4ccbe10ec495367a726996ceb7614eaa687112f2/orjson-3.10.18-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:50c15557afb7f6d63bc6d6348e0337a880a04eaa9cd7c9d569bcb4e760a24753", size = 249184 }, + { url = "https://files.pythonhosted.org/packages/b3/bc/c7f1db3b1d094dc0c6c83ed16b161a16c214aaa77f311118a93f647b32dc/orjson-3.10.18-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:356b076f1662c9813d5fa56db7d63ccceef4c271b1fb3dd522aca291375fcf17", size = 133279 }, + { url = "https://files.pythonhosted.org/packages/af/84/664657cd14cc11f0d81e80e64766c7ba5c9b7fc1ec304117878cc1b4659c/orjson-3.10.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:559eb40a70a7494cd5beab2d73657262a74a2c59aff2068fdba8f0424ec5b39d", size = 136799 }, + { url = "https://files.pythonhosted.org/packages/9a/bb/f50039c5bb05a7ab024ed43ba25d0319e8722a0ac3babb0807e543349978/orjson-3.10.18-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f3c29eb9a81e2fbc6fd7ddcfba3e101ba92eaff455b8d602bf7511088bbc0eae", size = 132791 }, + { url = "https://files.pythonhosted.org/packages/93/8c/ee74709fc072c3ee219784173ddfe46f699598a1723d9d49cbc78d66df65/orjson-3.10.18-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6612787e5b0756a171c7d81ba245ef63a3533a637c335aa7fcb8e665f4a0966f", size = 137059 }, + { url = "https://files.pythonhosted.org/packages/6a/37/e6d3109ee004296c80426b5a62b47bcadd96a3deab7443e56507823588c5/orjson-3.10.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ac6bd7be0dcab5b702c9d43d25e70eb456dfd2e119d512447468f6405b4a69c", size = 138359 }, + { url = "https://files.pythonhosted.org/packages/4f/5d/387dafae0e4691857c62bd02839a3bf3fa648eebd26185adfac58d09f207/orjson-3.10.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9f72f100cee8dde70100406d5c1abba515a7df926d4ed81e20a9730c062fe9ad", size = 142853 }, + { url = "https://files.pythonhosted.org/packages/27/6f/875e8e282105350b9a5341c0222a13419758545ae32ad6e0fcf5f64d76aa/orjson-3.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dca85398d6d093dd41dc0983cbf54ab8e6afd1c547b6b8a311643917fbf4e0c", size = 133131 }, + { url = "https://files.pythonhosted.org/packages/48/b2/73a1f0b4790dcb1e5a45f058f4f5dcadc8a85d90137b50d6bbc6afd0ae50/orjson-3.10.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22748de2a07fcc8781a70edb887abf801bb6142e6236123ff93d12d92db3d406", size = 134834 }, + { url = "https://files.pythonhosted.org/packages/56/f5/7ed133a5525add9c14dbdf17d011dd82206ca6840811d32ac52a35935d19/orjson-3.10.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:3a83c9954a4107b9acd10291b7f12a6b29e35e8d43a414799906ea10e75438e6", size = 413368 }, + { url = "https://files.pythonhosted.org/packages/11/7c/439654221ed9c3324bbac7bdf94cf06a971206b7b62327f11a52544e4982/orjson-3.10.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:303565c67a6c7b1f194c94632a4a39918e067bd6176a48bec697393865ce4f06", size = 153359 }, + { url = "https://files.pythonhosted.org/packages/48/e7/d58074fa0cc9dd29a8fa2a6c8d5deebdfd82c6cfef72b0e4277c4017563a/orjson-3.10.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:86314fdb5053a2f5a5d881f03fca0219bfdf832912aa88d18676a5175c6916b5", size = 137466 }, + { url = "https://files.pythonhosted.org/packages/57/4d/fe17581cf81fb70dfcef44e966aa4003360e4194d15a3f38cbffe873333a/orjson-3.10.18-cp312-cp312-win32.whl", hash = "sha256:187ec33bbec58c76dbd4066340067d9ece6e10067bb0cc074a21ae3300caa84e", size = 142683 }, + { url = "https://files.pythonhosted.org/packages/e6/22/469f62d25ab5f0f3aee256ea732e72dc3aab6d73bac777bd6277955bceef/orjson-3.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:f9f94cf6d3f9cd720d641f8399e390e7411487e493962213390d1ae45c7814fc", size = 134754 }, + { url = "https://files.pythonhosted.org/packages/10/b0/1040c447fac5b91bc1e9c004b69ee50abb0c1ffd0d24406e1350c58a7fcb/orjson-3.10.18-cp312-cp312-win_arm64.whl", hash = "sha256:3d600be83fe4514944500fa8c2a0a77099025ec6482e8087d7659e891f23058a", size = 131218 }, ] [[package]] @@ -3943,24 +3945,24 @@ dependencies = [ { name = "requests" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/61/ce/d23a9d44268dc992ae1a878d24341dddaea4de4ae374c261209bb6e9554b/oss2-2.18.5.tar.gz", hash = "sha256:555c857f4441ae42a2c0abab8fc9482543fba35d65a4a4be73101c959a2b4011", size = 283388, upload-time = "2024-04-29T12:49:07.686Z" } +sdist = { url = "https://files.pythonhosted.org/packages/61/ce/d23a9d44268dc992ae1a878d24341dddaea4de4ae374c261209bb6e9554b/oss2-2.18.5.tar.gz", hash = "sha256:555c857f4441ae42a2c0abab8fc9482543fba35d65a4a4be73101c959a2b4011", size = 283388 } [[package]] name = "overrides" version = "7.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812, upload-time = "2024-01-27T21:01:33.423Z" } +sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832, upload-time = "2024-01-27T21:01:31.393Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832 }, ] [[package]] name = "packaging" version = "23.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fb/2b/9b9c33ffed44ee921d0967086d653047286054117d584f1b1a7c22ceaf7b/packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", size = 146714, upload-time = "2023-10-01T13:50:05.279Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/2b/9b9c33ffed44ee921d0967086d653047286054117d584f1b1a7c22ceaf7b/packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", size = 146714 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7", size = 53011, upload-time = "2023-10-01T13:50:03.745Z" }, + { url = "https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7", size = 53011 }, ] [[package]] @@ -3973,22 +3975,22 @@ dependencies = [ { name = "pytz" }, { name = "tzdata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213, upload-time = "2024-09-20T13:10:04.827Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/44/d9502bf0ed197ba9bf1103c9867d5904ddcaf869e52329787fc54ed70cc8/pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039", size = 12602222, upload-time = "2024-09-20T13:08:56.254Z" }, - { url = "https://files.pythonhosted.org/packages/52/11/9eac327a38834f162b8250aab32a6781339c69afe7574368fffe46387edf/pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd", size = 11321274, upload-time = "2024-09-20T13:08:58.645Z" }, - { url = "https://files.pythonhosted.org/packages/45/fb/c4beeb084718598ba19aa9f5abbc8aed8b42f90930da861fcb1acdb54c3a/pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698", size = 15579836, upload-time = "2024-09-20T19:01:57.571Z" }, - { url = "https://files.pythonhosted.org/packages/cd/5f/4dba1d39bb9c38d574a9a22548c540177f78ea47b32f99c0ff2ec499fac5/pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc", size = 13058505, upload-time = "2024-09-20T13:09:01.501Z" }, - { url = "https://files.pythonhosted.org/packages/b9/57/708135b90391995361636634df1f1130d03ba456e95bcf576fada459115a/pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3", size = 16744420, upload-time = "2024-09-20T19:02:00.678Z" }, - { url = "https://files.pythonhosted.org/packages/86/4a/03ed6b7ee323cf30404265c284cee9c65c56a212e0a08d9ee06984ba2240/pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32", size = 14440457, upload-time = "2024-09-20T13:09:04.105Z" }, - { url = "https://files.pythonhosted.org/packages/ed/8c/87ddf1fcb55d11f9f847e3c69bb1c6f8e46e2f40ab1a2d2abadb2401b007/pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5", size = 11617166, upload-time = "2024-09-20T13:09:06.917Z" }, - { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893, upload-time = "2024-09-20T13:09:09.655Z" }, - { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475, upload-time = "2024-09-20T13:09:14.718Z" }, - { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645, upload-time = "2024-09-20T19:02:03.88Z" }, - { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445, upload-time = "2024-09-20T13:09:17.621Z" }, - { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235, upload-time = "2024-09-20T19:02:07.094Z" }, - { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756, upload-time = "2024-09-20T13:09:20.474Z" }, - { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248, upload-time = "2024-09-20T13:09:23.137Z" }, + { url = "https://files.pythonhosted.org/packages/a8/44/d9502bf0ed197ba9bf1103c9867d5904ddcaf869e52329787fc54ed70cc8/pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039", size = 12602222 }, + { url = "https://files.pythonhosted.org/packages/52/11/9eac327a38834f162b8250aab32a6781339c69afe7574368fffe46387edf/pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd", size = 11321274 }, + { url = "https://files.pythonhosted.org/packages/45/fb/c4beeb084718598ba19aa9f5abbc8aed8b42f90930da861fcb1acdb54c3a/pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698", size = 15579836 }, + { url = "https://files.pythonhosted.org/packages/cd/5f/4dba1d39bb9c38d574a9a22548c540177f78ea47b32f99c0ff2ec499fac5/pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc", size = 13058505 }, + { url = "https://files.pythonhosted.org/packages/b9/57/708135b90391995361636634df1f1130d03ba456e95bcf576fada459115a/pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3", size = 16744420 }, + { url = "https://files.pythonhosted.org/packages/86/4a/03ed6b7ee323cf30404265c284cee9c65c56a212e0a08d9ee06984ba2240/pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32", size = 14440457 }, + { url = "https://files.pythonhosted.org/packages/ed/8c/87ddf1fcb55d11f9f847e3c69bb1c6f8e46e2f40ab1a2d2abadb2401b007/pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5", size = 11617166 }, + { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893 }, + { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475 }, + { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645 }, + { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445 }, + { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235 }, + { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756 }, + { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248 }, ] [package.optional-dependencies] @@ -4018,9 +4020,9 @@ dependencies = [ { name = "numpy" }, { name = "types-pytz" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5f/0d/5fe7f7f3596eb1c2526fea151e9470f86b379183d8b9debe44b2098651ca/pandas_stubs-2.2.3.250527.tar.gz", hash = "sha256:e2d694c4e72106055295ad143664e5c99e5815b07190d1ff85b73b13ff019e63", size = 106312, upload-time = "2025-05-27T15:24:29.716Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/0d/5fe7f7f3596eb1c2526fea151e9470f86b379183d8b9debe44b2098651ca/pandas_stubs-2.2.3.250527.tar.gz", hash = "sha256:e2d694c4e72106055295ad143664e5c99e5815b07190d1ff85b73b13ff019e63", size = 106312 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/f8/46141ba8c9d7064dc5008bfb4a6ae5bd3c30e4c61c28b5c5ed485bf358ba/pandas_stubs-2.2.3.250527-py3-none-any.whl", hash = "sha256:cd0a49a95b8c5f944e605be711042a4dd8550e2c559b43d70ba2c4b524b66163", size = 159683, upload-time = "2025-05-27T15:24:28.4Z" }, + { url = "https://files.pythonhosted.org/packages/ec/f8/46141ba8c9d7064dc5008bfb4a6ae5bd3c30e4c61c28b5c5ed485bf358ba/pandas_stubs-2.2.3.250527-py3-none-any.whl", hash = "sha256:cd0a49a95b8c5f944e605be711042a4dd8550e2c559b43d70ba2c4b524b66163", size = 159683 }, ] [[package]] @@ -4031,15 +4033,15 @@ dependencies = [ { name = "plumbum" }, { name = "ply" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/10/9a/e3186e760c57ee5f1c27ea5cea577a0ff9abfca51eefcb4d9a4cd39aff2e/pandoc-2.4.tar.gz", hash = "sha256:ecd1f8cbb7f4180c6b5db4a17a7c1a74df519995f5f186ef81ce72a9cbd0dd9a", size = 34635, upload-time = "2024-08-07T14:33:58.016Z" } +sdist = { url = "https://files.pythonhosted.org/packages/10/9a/e3186e760c57ee5f1c27ea5cea577a0ff9abfca51eefcb4d9a4cd39aff2e/pandoc-2.4.tar.gz", hash = "sha256:ecd1f8cbb7f4180c6b5db4a17a7c1a74df519995f5f186ef81ce72a9cbd0dd9a", size = 34635 } [[package]] name = "pathspec" version = "0.12.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, ] [[package]] @@ -4050,9 +4052,9 @@ dependencies = [ { name = "numpy" }, { name = "toml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/01/09/c0be8f54386367159fd22495635fba65ac6bbc436a34502bc2849d89f6ab/pgvecto_rs-0.2.2.tar.gz", hash = "sha256:edaa913d1747152b1407cbdf6337d51ac852547b54953ef38997433be3a75a3b", size = 28561, upload-time = "2024-10-08T02:01:15.678Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/09/c0be8f54386367159fd22495635fba65ac6bbc436a34502bc2849d89f6ab/pgvecto_rs-0.2.2.tar.gz", hash = "sha256:edaa913d1747152b1407cbdf6337d51ac852547b54953ef38997433be3a75a3b", size = 28561 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/dc/a39ceb4fe4b72f889228119b91e0ef7fcaaf9ec662ab19acdacb74cd5eaf/pgvecto_rs-0.2.2-py3-none-any.whl", hash = "sha256:5f3f7f806813de408c45dc10a9eb418b986c4d7b7723e8fce9298f2f7d8fbbd5", size = 30779, upload-time = "2024-10-08T02:01:14.669Z" }, + { url = "https://files.pythonhosted.org/packages/ba/dc/a39ceb4fe4b72f889228119b91e0ef7fcaaf9ec662ab19acdacb74cd5eaf/pgvecto_rs-0.2.2-py3-none-any.whl", hash = "sha256:5f3f7f806813de408c45dc10a9eb418b986c4d7b7723e8fce9298f2f7d8fbbd5", size = 30779 }, ] [package.optional-dependencies] @@ -4068,62 +4070,62 @@ dependencies = [ { name = "numpy" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/29/bb/4686b1090a7c68fa367e981130a074dc6c1236571d914ffa6e05c882b59d/pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b", size = 9638, upload-time = "2024-02-07T19:35:03.8Z" }, + { url = "https://files.pythonhosted.org/packages/29/bb/4686b1090a7c68fa367e981130a074dc6c1236571d914ffa6e05c882b59d/pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b", size = 9638 }, ] [[package]] name = "pillow" version = "11.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523", size = 47113069, upload-time = "2025-07-01T09:16:30.666Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/d0d6dea55cd152ce3d6767bb38a8fc10e33796ba4ba210cbab9354b6d238/pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523", size = 47113069 } wheels = [ - { url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722", size = 5316531, upload-time = "2025-07-01T09:13:59.203Z" }, - { url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288", size = 4686560, upload-time = "2025-07-01T09:14:01.101Z" }, - { url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d", size = 5870978, upload-time = "2025-07-03T13:09:55.638Z" }, - { url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494", size = 7641168, upload-time = "2025-07-03T13:10:00.37Z" }, - { url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58", size = 5973053, upload-time = "2025-07-01T09:14:04.491Z" }, - { url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f", size = 6640273, upload-time = "2025-07-01T09:14:06.235Z" }, - { url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e", size = 6082043, upload-time = "2025-07-01T09:14:07.978Z" }, - { url = "https://files.pythonhosted.org/packages/3a/04/ba8f2b11fc80d2dd462d7abec16351b45ec99cbbaea4387648a44190351a/pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94", size = 6715516, upload-time = "2025-07-01T09:14:10.233Z" }, - { url = "https://files.pythonhosted.org/packages/48/59/8cd06d7f3944cc7d892e8533c56b0acb68399f640786313275faec1e3b6f/pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0", size = 6274768, upload-time = "2025-07-01T09:14:11.921Z" }, - { url = "https://files.pythonhosted.org/packages/f1/cc/29c0f5d64ab8eae20f3232da8f8571660aa0ab4b8f1331da5c2f5f9a938e/pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac", size = 6986055, upload-time = "2025-07-01T09:14:13.623Z" }, - { url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd", size = 2423079, upload-time = "2025-07-01T09:14:15.268Z" }, - { url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800, upload-time = "2025-07-01T09:14:17.648Z" }, - { url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296, upload-time = "2025-07-01T09:14:19.828Z" }, - { url = "https://files.pythonhosted.org/packages/8e/1e/b9e12bbe6e4c2220effebc09ea0923a07a6da1e1f1bfbc8d7d29a01ce32b/pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d", size = 5871726, upload-time = "2025-07-03T13:10:04.448Z" }, - { url = "https://files.pythonhosted.org/packages/8d/33/e9200d2bd7ba00dc3ddb78df1198a6e80d7669cce6c2bdbeb2530a74ec58/pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6", size = 7644652, upload-time = "2025-07-03T13:10:10.391Z" }, - { url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787, upload-time = "2025-07-01T09:14:21.63Z" }, - { url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236, upload-time = "2025-07-01T09:14:23.321Z" }, - { url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950, upload-time = "2025-07-01T09:14:25.237Z" }, - { url = "https://files.pythonhosted.org/packages/0b/1a/7cff92e695a2a29ac1958c2a0fe4c0b2393b60aac13b04a4fe2735cad52d/pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d", size = 6723358, upload-time = "2025-07-01T09:14:27.053Z" }, - { url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149", size = 6275079, upload-time = "2025-07-01T09:14:30.104Z" }, - { url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d", size = 6986324, upload-time = "2025-07-01T09:14:31.899Z" }, - { url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067, upload-time = "2025-07-01T09:14:33.709Z" }, - { url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6", size = 5270566, upload-time = "2025-07-01T09:16:19.801Z" }, - { url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438", size = 4654618, upload-time = "2025-07-01T09:16:21.818Z" }, - { url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3", size = 4874248, upload-time = "2025-07-03T13:11:20.738Z" }, - { url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c", size = 6583963, upload-time = "2025-07-03T13:11:26.283Z" }, - { url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361", size = 4957170, upload-time = "2025-07-01T09:16:23.762Z" }, - { url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7", size = 5581505, upload-time = "2025-07-01T09:16:25.593Z" }, - { url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8", size = 6984598, upload-time = "2025-07-01T09:16:27.732Z" }, + { url = "https://files.pythonhosted.org/packages/db/26/77f8ed17ca4ffd60e1dcd220a6ec6d71210ba398cfa33a13a1cd614c5613/pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722", size = 5316531 }, + { url = "https://files.pythonhosted.org/packages/cb/39/ee475903197ce709322a17a866892efb560f57900d9af2e55f86db51b0a5/pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288", size = 4686560 }, + { url = "https://files.pythonhosted.org/packages/d5/90/442068a160fd179938ba55ec8c97050a612426fae5ec0a764e345839f76d/pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d", size = 5870978 }, + { url = "https://files.pythonhosted.org/packages/13/92/dcdd147ab02daf405387f0218dcf792dc6dd5b14d2573d40b4caeef01059/pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494", size = 7641168 }, + { url = "https://files.pythonhosted.org/packages/6e/db/839d6ba7fd38b51af641aa904e2960e7a5644d60ec754c046b7d2aee00e5/pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58", size = 5973053 }, + { url = "https://files.pythonhosted.org/packages/f2/2f/d7675ecae6c43e9f12aa8d58b6012683b20b6edfbdac7abcb4e6af7a3784/pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f", size = 6640273 }, + { url = "https://files.pythonhosted.org/packages/45/ad/931694675ede172e15b2ff03c8144a0ddaea1d87adb72bb07655eaffb654/pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e", size = 6082043 }, + { url = "https://files.pythonhosted.org/packages/3a/04/ba8f2b11fc80d2dd462d7abec16351b45ec99cbbaea4387648a44190351a/pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94", size = 6715516 }, + { url = "https://files.pythonhosted.org/packages/48/59/8cd06d7f3944cc7d892e8533c56b0acb68399f640786313275faec1e3b6f/pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0", size = 6274768 }, + { url = "https://files.pythonhosted.org/packages/f1/cc/29c0f5d64ab8eae20f3232da8f8571660aa0ab4b8f1331da5c2f5f9a938e/pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac", size = 6986055 }, + { url = "https://files.pythonhosted.org/packages/c6/df/90bd886fabd544c25addd63e5ca6932c86f2b701d5da6c7839387a076b4a/pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd", size = 2423079 }, + { url = "https://files.pythonhosted.org/packages/40/fe/1bc9b3ee13f68487a99ac9529968035cca2f0a51ec36892060edcc51d06a/pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4", size = 5278800 }, + { url = "https://files.pythonhosted.org/packages/2c/32/7e2ac19b5713657384cec55f89065fb306b06af008cfd87e572035b27119/pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69", size = 4686296 }, + { url = "https://files.pythonhosted.org/packages/8e/1e/b9e12bbe6e4c2220effebc09ea0923a07a6da1e1f1bfbc8d7d29a01ce32b/pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d", size = 5871726 }, + { url = "https://files.pythonhosted.org/packages/8d/33/e9200d2bd7ba00dc3ddb78df1198a6e80d7669cce6c2bdbeb2530a74ec58/pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6", size = 7644652 }, + { url = "https://files.pythonhosted.org/packages/41/f1/6f2427a26fc683e00d985bc391bdd76d8dd4e92fac33d841127eb8fb2313/pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7", size = 5977787 }, + { url = "https://files.pythonhosted.org/packages/e4/c9/06dd4a38974e24f932ff5f98ea3c546ce3f8c995d3f0985f8e5ba48bba19/pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024", size = 6645236 }, + { url = "https://files.pythonhosted.org/packages/40/e7/848f69fb79843b3d91241bad658e9c14f39a32f71a301bcd1d139416d1be/pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809", size = 6086950 }, + { url = "https://files.pythonhosted.org/packages/0b/1a/7cff92e695a2a29ac1958c2a0fe4c0b2393b60aac13b04a4fe2735cad52d/pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d", size = 6723358 }, + { url = "https://files.pythonhosted.org/packages/26/7d/73699ad77895f69edff76b0f332acc3d497f22f5d75e5360f78cbcaff248/pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149", size = 6275079 }, + { url = "https://files.pythonhosted.org/packages/8c/ce/e7dfc873bdd9828f3b6e5c2bbb74e47a98ec23cc5c74fc4e54462f0d9204/pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d", size = 6986324 }, + { url = "https://files.pythonhosted.org/packages/16/8f/b13447d1bf0b1f7467ce7d86f6e6edf66c0ad7cf44cf5c87a37f9bed9936/pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542", size = 2423067 }, + { url = "https://files.pythonhosted.org/packages/9e/e3/6fa84033758276fb31da12e5fb66ad747ae83b93c67af17f8c6ff4cc8f34/pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6", size = 5270566 }, + { url = "https://files.pythonhosted.org/packages/5b/ee/e8d2e1ab4892970b561e1ba96cbd59c0d28cf66737fc44abb2aec3795a4e/pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438", size = 4654618 }, + { url = "https://files.pythonhosted.org/packages/f2/6d/17f80f4e1f0761f02160fc433abd4109fa1548dcfdca46cfdadaf9efa565/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3", size = 4874248 }, + { url = "https://files.pythonhosted.org/packages/de/5f/c22340acd61cef960130585bbe2120e2fd8434c214802f07e8c03596b17e/pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c", size = 6583963 }, + { url = "https://files.pythonhosted.org/packages/31/5e/03966aedfbfcbb4d5f8aa042452d3361f325b963ebbadddac05b122e47dd/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361", size = 4957170 }, + { url = "https://files.pythonhosted.org/packages/cc/2d/e082982aacc927fc2cab48e1e731bdb1643a1406acace8bed0900a61464e/pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7", size = 5581505 }, + { url = "https://files.pythonhosted.org/packages/34/e7/ae39f538fd6844e982063c3a5e4598b8ced43b9633baa3a85ef33af8c05c/pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8", size = 6984598 }, ] [[package]] name = "platformdirs" version = "4.3.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, + { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567 }, ] [[package]] name = "pluggy" version = "1.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, ] [[package]] @@ -4133,18 +4135,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/5d/49ba324ad4ae5b1a4caefafbce7a1648540129344481f2ed4ef6bb68d451/plumbum-1.9.0.tar.gz", hash = "sha256:e640062b72642c3873bd5bdc3effed75ba4d3c70ef6b6a7b907357a84d909219", size = 319083, upload-time = "2024-10-05T05:59:27.059Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/5d/49ba324ad4ae5b1a4caefafbce7a1648540129344481f2ed4ef6bb68d451/plumbum-1.9.0.tar.gz", hash = "sha256:e640062b72642c3873bd5bdc3effed75ba4d3c70ef6b6a7b907357a84d909219", size = 319083 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/9d/d03542c93bb3d448406731b80f39c3d5601282f778328c22c77d270f4ed4/plumbum-1.9.0-py3-none-any.whl", hash = "sha256:9fd0d3b0e8d86e4b581af36edf3f3bbe9d1ae15b45b8caab28de1bcb27aaa7f5", size = 127970, upload-time = "2024-10-05T05:59:25.102Z" }, + { url = "https://files.pythonhosted.org/packages/4f/9d/d03542c93bb3d448406731b80f39c3d5601282f778328c22c77d270f4ed4/plumbum-1.9.0-py3-none-any.whl", hash = "sha256:9fd0d3b0e8d86e4b581af36edf3f3bbe9d1ae15b45b8caab28de1bcb27aaa7f5", size = 127970 }, ] [[package]] name = "ply" version = "3.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3", size = 159130, upload-time = "2018-02-15T19:01:31.097Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3", size = 159130 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/58/35da89ee790598a0700ea49b2a66594140f44dec458c07e8e3d4979137fc/ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce", size = 49567, upload-time = "2018-02-15T19:01:27.172Z" }, + { url = "https://files.pythonhosted.org/packages/a3/58/35da89ee790598a0700ea49b2a66594140f44dec458c07e8e3d4979137fc/ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce", size = 49567 }, ] [[package]] @@ -4154,23 +4156,23 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pywin32", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ed/d3/c6c64067759e87af98cc668c1cc75171347d0f1577fab7ca3749134e3cd4/portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f", size = 40891, upload-time = "2024-07-13T23:15:34.86Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/d3/c6c64067759e87af98cc668c1cc75171347d0f1577fab7ca3749134e3cd4/portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f", size = 40891 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/fb/a70a4214956182e0d7a9099ab17d50bfcba1056188e9b14f35b9e2b62a0d/portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf", size = 18423, upload-time = "2024-07-13T23:15:32.602Z" }, + { url = "https://files.pythonhosted.org/packages/9b/fb/a70a4214956182e0d7a9099ab17d50bfcba1056188e9b14f35b9e2b62a0d/portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf", size = 18423 }, ] [[package]] name = "postgrest" -version = "0.17.2" +version = "1.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "deprecation" }, { name = "httpx", extra = ["http2"] }, { name = "pydantic" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d4/4c/1053e2e2571e7f39eef8506db94dbe0a37630db97055228f8bdc2e53651c/postgrest-0.17.2.tar.gz", hash = "sha256:445cd4e4a191e279492549df0c4e827d32f9d01d0852599bb8a6efb0f07fcf78", size = 14604, upload-time = "2024-10-18T08:58:39.856Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/3e/1b50568e1f5db0bdced4a82c7887e37326585faef7ca43ead86849cb4861/postgrest-1.1.1.tar.gz", hash = "sha256:f3bb3e8c4602775c75c844a31f565f5f3dd584df4d36d683f0b67d01a86be322", size = 15431 } wheels = [ - { url = "https://files.pythonhosted.org/packages/80/21/3bdf4c51707f50f4a34839bf4431bad53aa603d303ada961dd9e3d943ecc/postgrest-0.17.2-py3-none-any.whl", hash = "sha256:f7c4f448e5a5e2d4c1dcf192edae9d1007c4261e9a6fb5116783a0046846ece2", size = 21669, upload-time = "2024-10-18T08:58:38.13Z" }, + { url = "https://files.pythonhosted.org/packages/a4/71/188a50ea64c17f73ff4df5196ec1553a8f1723421eb2d1069c73bab47d78/postgrest-1.1.1-py3-none-any.whl", hash = "sha256:98a6035ee1d14288484bfe36235942c5fb2d26af6d8120dfe3efbe007859251a", size = 22366 }, ] [[package]] @@ -4185,9 +4187,9 @@ dependencies = [ { name = "six" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/39/a2/1b68562124b0d0e615fa8431cc88c84b3db6526275c2c19a419579a49277/posthog-6.0.3.tar.gz", hash = "sha256:9005abb341af8fedd9d82ca0359b3d35a9537555cdc9881bfb469f7c0b4b0ec5", size = 91861, upload-time = "2025-07-07T07:14:08.21Z" } +sdist = { url = "https://files.pythonhosted.org/packages/39/a2/1b68562124b0d0e615fa8431cc88c84b3db6526275c2c19a419579a49277/posthog-6.0.3.tar.gz", hash = "sha256:9005abb341af8fedd9d82ca0359b3d35a9537555cdc9881bfb469f7c0b4b0ec5", size = 91861 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ca/f1/a8d86245d41c8686f7d828a4959bdf483e8ac331b249b48b8c61fc884a1c/posthog-6.0.3-py3-none-any.whl", hash = "sha256:4b808c907f3623216a9362d91fdafce8e2f57a8387fb3020475c62ec809be56d", size = 108978, upload-time = "2025-07-07T07:14:06.451Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f1/a8d86245d41c8686f7d828a4959bdf483e8ac331b249b48b8c61fc884a1c/posthog-6.0.3-py3-none-any.whl", hash = "sha256:4b808c907f3623216a9362d91fdafce8e2f57a8387fb3020475c62ec809be56d", size = 108978 }, ] [[package]] @@ -4197,50 +4199,50 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wcwidth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bb/6e/9d084c929dfe9e3bfe0c6a47e31f78a25c54627d64a66e884a8bf5474f1c/prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed", size = 428940, upload-time = "2025-04-15T09:18:47.731Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/6e/9d084c929dfe9e3bfe0c6a47e31f78a25c54627d64a66e884a8bf5474f1c/prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed", size = 428940 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810 }, ] [[package]] name = "propcache" version = "0.3.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139 } wheels = [ - { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, - { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, - { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, - { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, - { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, - { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, - { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, - { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, - { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, - { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, - { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, - { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, - { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, - { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, - { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, - { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, - { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, - { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, - { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, - { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, - { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, - { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, - { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, - { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, - { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, - { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, - { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, - { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, - { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, + { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207 }, + { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648 }, + { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496 }, + { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288 }, + { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456 }, + { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429 }, + { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472 }, + { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480 }, + { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530 }, + { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230 }, + { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754 }, + { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430 }, + { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884 }, + { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480 }, + { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757 }, + { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500 }, + { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674 }, + { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570 }, + { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094 }, + { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958 }, + { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894 }, + { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672 }, + { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395 }, + { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510 }, + { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949 }, + { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258 }, + { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036 }, + { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684 }, + { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562 }, + { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142 }, + { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711 }, + { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479 }, + { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663 }, ] [[package]] @@ -4250,94 +4252,94 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", size = 56142, upload-time = "2025-03-10T15:54:38.843Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", size = 56142 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", size = 50163, upload-time = "2025-03-10T15:54:37.335Z" }, + { url = "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", size = 50163 }, ] [[package]] name = "protobuf" version = "4.25.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/df/01/34c8d2b6354906d728703cb9d546a0e534de479e25f1b581e4094c4a85cc/protobuf-4.25.8.tar.gz", hash = "sha256:6135cf8affe1fc6f76cced2641e4ea8d3e59518d1f24ae41ba97bcad82d397cd", size = 380920, upload-time = "2025-05-28T14:22:25.153Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/01/34c8d2b6354906d728703cb9d546a0e534de479e25f1b581e4094c4a85cc/protobuf-4.25.8.tar.gz", hash = "sha256:6135cf8affe1fc6f76cced2641e4ea8d3e59518d1f24ae41ba97bcad82d397cd", size = 380920 } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/ff/05f34305fe6b85bbfbecbc559d423a5985605cad5eda4f47eae9e9c9c5c5/protobuf-4.25.8-cp310-abi3-win32.whl", hash = "sha256:504435d831565f7cfac9f0714440028907f1975e4bed228e58e72ecfff58a1e0", size = 392745, upload-time = "2025-05-28T14:22:10.524Z" }, - { url = "https://files.pythonhosted.org/packages/08/35/8b8a8405c564caf4ba835b1fdf554da869954712b26d8f2a98c0e434469b/protobuf-4.25.8-cp310-abi3-win_amd64.whl", hash = "sha256:bd551eb1fe1d7e92c1af1d75bdfa572eff1ab0e5bf1736716814cdccdb2360f9", size = 413736, upload-time = "2025-05-28T14:22:13.156Z" }, - { url = "https://files.pythonhosted.org/packages/28/d7/ab27049a035b258dab43445eb6ec84a26277b16105b277cbe0a7698bdc6c/protobuf-4.25.8-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ca809b42f4444f144f2115c4c1a747b9a404d590f18f37e9402422033e464e0f", size = 394537, upload-time = "2025-05-28T14:22:14.768Z" }, - { url = "https://files.pythonhosted.org/packages/bd/6d/a4a198b61808dd3d1ee187082ccc21499bc949d639feb948961b48be9a7e/protobuf-4.25.8-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:9ad7ef62d92baf5a8654fbb88dac7fa5594cfa70fd3440488a5ca3bfc6d795a7", size = 294005, upload-time = "2025-05-28T14:22:16.052Z" }, - { url = "https://files.pythonhosted.org/packages/d6/c6/c9deaa6e789b6fc41b88ccbdfe7a42d2b82663248b715f55aa77fbc00724/protobuf-4.25.8-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:83e6e54e93d2b696a92cad6e6efc924f3850f82b52e1563778dfab8b355101b0", size = 294924, upload-time = "2025-05-28T14:22:17.105Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c1/6aece0ab5209981a70cd186f164c133fdba2f51e124ff92b73de7fd24d78/protobuf-4.25.8-py3-none-any.whl", hash = "sha256:15a0af558aa3b13efef102ae6e4f3efac06f1eea11afb3a57db2901447d9fb59", size = 156757, upload-time = "2025-05-28T14:22:24.135Z" }, + { url = "https://files.pythonhosted.org/packages/45/ff/05f34305fe6b85bbfbecbc559d423a5985605cad5eda4f47eae9e9c9c5c5/protobuf-4.25.8-cp310-abi3-win32.whl", hash = "sha256:504435d831565f7cfac9f0714440028907f1975e4bed228e58e72ecfff58a1e0", size = 392745 }, + { url = "https://files.pythonhosted.org/packages/08/35/8b8a8405c564caf4ba835b1fdf554da869954712b26d8f2a98c0e434469b/protobuf-4.25.8-cp310-abi3-win_amd64.whl", hash = "sha256:bd551eb1fe1d7e92c1af1d75bdfa572eff1ab0e5bf1736716814cdccdb2360f9", size = 413736 }, + { url = "https://files.pythonhosted.org/packages/28/d7/ab27049a035b258dab43445eb6ec84a26277b16105b277cbe0a7698bdc6c/protobuf-4.25.8-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ca809b42f4444f144f2115c4c1a747b9a404d590f18f37e9402422033e464e0f", size = 394537 }, + { url = "https://files.pythonhosted.org/packages/bd/6d/a4a198b61808dd3d1ee187082ccc21499bc949d639feb948961b48be9a7e/protobuf-4.25.8-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:9ad7ef62d92baf5a8654fbb88dac7fa5594cfa70fd3440488a5ca3bfc6d795a7", size = 294005 }, + { url = "https://files.pythonhosted.org/packages/d6/c6/c9deaa6e789b6fc41b88ccbdfe7a42d2b82663248b715f55aa77fbc00724/protobuf-4.25.8-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:83e6e54e93d2b696a92cad6e6efc924f3850f82b52e1563778dfab8b355101b0", size = 294924 }, + { url = "https://files.pythonhosted.org/packages/0c/c1/6aece0ab5209981a70cd186f164c133fdba2f51e124ff92b73de7fd24d78/protobuf-4.25.8-py3-none-any.whl", hash = "sha256:15a0af558aa3b13efef102ae6e4f3efac06f1eea11afb3a57db2901447d9fb59", size = 156757 }, ] [[package]] name = "psutil" version = "7.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003, upload-time = "2025-02-13T21:54:07.946Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051, upload-time = "2025-02-13T21:54:12.36Z" }, - { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535, upload-time = "2025-02-13T21:54:16.07Z" }, - { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004, upload-time = "2025-02-13T21:54:18.662Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986, upload-time = "2025-02-13T21:54:21.811Z" }, - { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544, upload-time = "2025-02-13T21:54:24.68Z" }, - { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053, upload-time = "2025-02-13T21:54:34.31Z" }, - { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" }, + { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051 }, + { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535 }, + { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004 }, + { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986 }, + { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544 }, + { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053 }, + { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885 }, ] [[package]] name = "psycogreen" version = "1.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/eb/72/4a7965cf54e341006ad74cdc72cd6572c789bc4f4e3fadc78672f1fbcfbd/psycogreen-1.0.2.tar.gz", hash = "sha256:c429845a8a49cf2f76b71265008760bcd7c7c77d80b806db4dc81116dbcd130d", size = 5411, upload-time = "2020-02-22T19:55:22.02Z" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/72/4a7965cf54e341006ad74cdc72cd6572c789bc4f4e3fadc78672f1fbcfbd/psycogreen-1.0.2.tar.gz", hash = "sha256:c429845a8a49cf2f76b71265008760bcd7c7c77d80b806db4dc81116dbcd130d", size = 5411 } [[package]] name = "psycopg2-binary" version = "2.9.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764, upload-time = "2024-10-16T11:24:58.126Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/8f/9feb01291d0d7a0a4c6a6bab24094135c2b59c6a81943752f632c75896d6/psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff", size = 3043397, upload-time = "2024-10-16T11:19:40.033Z" }, - { url = "https://files.pythonhosted.org/packages/15/30/346e4683532011561cd9c8dfeac6a8153dd96452fee0b12666058ab7893c/psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c", size = 3274806, upload-time = "2024-10-16T11:19:43.5Z" }, - { url = "https://files.pythonhosted.org/packages/66/6e/4efebe76f76aee7ec99166b6c023ff8abdc4e183f7b70913d7c047701b79/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c", size = 2851370, upload-time = "2024-10-16T11:19:46.986Z" }, - { url = "https://files.pythonhosted.org/packages/7f/fd/ff83313f86b50f7ca089b161b8e0a22bb3c319974096093cd50680433fdb/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb", size = 3080780, upload-time = "2024-10-16T11:19:50.242Z" }, - { url = "https://files.pythonhosted.org/packages/e6/c4/bfadd202dcda8333a7ccafdc51c541dbdfce7c2c7cda89fa2374455d795f/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341", size = 3264583, upload-time = "2024-10-16T11:19:54.424Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f1/09f45ac25e704ac954862581f9f9ae21303cc5ded3d0b775532b407f0e90/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a", size = 3019831, upload-time = "2024-10-16T11:19:57.762Z" }, - { url = "https://files.pythonhosted.org/packages/9e/2e/9beaea078095cc558f215e38f647c7114987d9febfc25cb2beed7c3582a5/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b", size = 2871822, upload-time = "2024-10-16T11:20:04.693Z" }, - { url = "https://files.pythonhosted.org/packages/01/9e/ef93c5d93f3dc9fc92786ffab39e323b9aed066ba59fdc34cf85e2722271/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7", size = 2820975, upload-time = "2024-10-16T11:20:11.401Z" }, - { url = "https://files.pythonhosted.org/packages/a5/f0/049e9631e3268fe4c5a387f6fc27e267ebe199acf1bc1bc9cbde4bd6916c/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e", size = 2919320, upload-time = "2024-10-16T11:20:17.959Z" }, - { url = "https://files.pythonhosted.org/packages/dc/9a/bcb8773b88e45fb5a5ea8339e2104d82c863a3b8558fbb2aadfe66df86b3/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68", size = 2957617, upload-time = "2024-10-16T11:20:24.711Z" }, - { url = "https://files.pythonhosted.org/packages/e2/6b/144336a9bf08a67d217b3af3246abb1d027095dab726f0687f01f43e8c03/psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392", size = 1024618, upload-time = "2024-10-16T11:20:27.718Z" }, - { url = "https://files.pythonhosted.org/packages/61/69/3b3d7bd583c6d3cbe5100802efa5beacaacc86e37b653fc708bf3d6853b8/psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4", size = 1163816, upload-time = "2024-10-16T11:20:30.777Z" }, - { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771, upload-time = "2024-10-16T11:20:35.234Z" }, - { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336, upload-time = "2024-10-16T11:20:38.742Z" }, - { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637, upload-time = "2024-10-16T11:20:42.145Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097, upload-time = "2024-10-16T11:20:46.185Z" }, - { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776, upload-time = "2024-10-16T11:20:50.879Z" }, - { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968, upload-time = "2024-10-16T11:20:56.819Z" }, - { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334, upload-time = "2024-10-16T11:21:02.411Z" }, - { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722, upload-time = "2024-10-16T11:21:09.01Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132, upload-time = "2024-10-16T11:21:16.339Z" }, - { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312, upload-time = "2024-10-16T11:21:25.584Z" }, - { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191, upload-time = "2024-10-16T11:21:29.912Z" }, - { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031, upload-time = "2024-10-16T11:21:34.211Z" }, + { url = "https://files.pythonhosted.org/packages/9c/8f/9feb01291d0d7a0a4c6a6bab24094135c2b59c6a81943752f632c75896d6/psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff", size = 3043397 }, + { url = "https://files.pythonhosted.org/packages/15/30/346e4683532011561cd9c8dfeac6a8153dd96452fee0b12666058ab7893c/psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c", size = 3274806 }, + { url = "https://files.pythonhosted.org/packages/66/6e/4efebe76f76aee7ec99166b6c023ff8abdc4e183f7b70913d7c047701b79/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c", size = 2851370 }, + { url = "https://files.pythonhosted.org/packages/7f/fd/ff83313f86b50f7ca089b161b8e0a22bb3c319974096093cd50680433fdb/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb", size = 3080780 }, + { url = "https://files.pythonhosted.org/packages/e6/c4/bfadd202dcda8333a7ccafdc51c541dbdfce7c2c7cda89fa2374455d795f/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341", size = 3264583 }, + { url = "https://files.pythonhosted.org/packages/5d/f1/09f45ac25e704ac954862581f9f9ae21303cc5ded3d0b775532b407f0e90/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a", size = 3019831 }, + { url = "https://files.pythonhosted.org/packages/9e/2e/9beaea078095cc558f215e38f647c7114987d9febfc25cb2beed7c3582a5/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b", size = 2871822 }, + { url = "https://files.pythonhosted.org/packages/01/9e/ef93c5d93f3dc9fc92786ffab39e323b9aed066ba59fdc34cf85e2722271/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7", size = 2820975 }, + { url = "https://files.pythonhosted.org/packages/a5/f0/049e9631e3268fe4c5a387f6fc27e267ebe199acf1bc1bc9cbde4bd6916c/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e", size = 2919320 }, + { url = "https://files.pythonhosted.org/packages/dc/9a/bcb8773b88e45fb5a5ea8339e2104d82c863a3b8558fbb2aadfe66df86b3/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68", size = 2957617 }, + { url = "https://files.pythonhosted.org/packages/e2/6b/144336a9bf08a67d217b3af3246abb1d027095dab726f0687f01f43e8c03/psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392", size = 1024618 }, + { url = "https://files.pythonhosted.org/packages/61/69/3b3d7bd583c6d3cbe5100802efa5beacaacc86e37b653fc708bf3d6853b8/psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4", size = 1163816 }, + { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771 }, + { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336 }, + { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637 }, + { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097 }, + { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776 }, + { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968 }, + { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334 }, + { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722 }, + { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132 }, + { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312 }, + { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191 }, + { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031 }, ] [[package]] name = "py" version = "1.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/ff/fec109ceb715d2a6b4c4a85a61af3b40c723a961e8828319fbcb15b868dc/py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", size = 207796, upload-time = "2021-11-04T17:17:01.377Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/ff/fec109ceb715d2a6b4c4a85a61af3b40c723a961e8828319fbcb15b868dc/py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", size = 207796 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378", size = 98708, upload-time = "2021-11-04T17:17:00.152Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378", size = 98708 }, ] [[package]] name = "py-cpuinfo" version = "9.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/37/a8/d832f7293ebb21690860d2e01d8115e5ff6f2ae8bbdc953f0eb0fa4bd2c7/py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690", size = 104716, upload-time = "2022-10-25T20:38:06.303Z" } +sdist = { url = "https://files.pythonhosted.org/packages/37/a8/d832f7293ebb21690860d2e01d8115e5ff6f2ae8bbdc953f0eb0fa4bd2c7/py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690", size = 104716 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5", size = 22335, upload-time = "2022-10-25T20:38:27.636Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5", size = 22335 }, ] [[package]] @@ -4347,31 +4349,31 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d7/8b/d18b7eb6fb22e5ed6ffcbc073c85dae635778dbd1270a6cf5d750b031e84/pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025", size = 1063645, upload-time = "2023-12-18T15:43:41.625Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/8b/d18b7eb6fb22e5ed6ffcbc073c85dae635778dbd1270a6cf5d750b031e84/pyarrow-14.0.2.tar.gz", hash = "sha256:36cef6ba12b499d864d1def3e990f97949e0b79400d08b7cf74504ffbd3eb025", size = 1063645 } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/8a/411ef0b05483076b7f548c74ccaa0f90c1e60d3875db71a821f6ffa8cf42/pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b", size = 26904455, upload-time = "2023-12-18T15:40:43.477Z" }, - { url = "https://files.pythonhosted.org/packages/6c/6c/882a57798877e3a49ba54d8e0540bea24aed78fb42e1d860f08c3449c75e/pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23", size = 23997116, upload-time = "2023-12-18T15:40:48.533Z" }, - { url = "https://files.pythonhosted.org/packages/ec/3f/ef47fe6192ce4d82803a073db449b5292135406c364a7fc49dfbcd34c987/pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200", size = 35944575, upload-time = "2023-12-18T15:40:55.128Z" }, - { url = "https://files.pythonhosted.org/packages/1a/90/2021e529d7f234a3909f419d4341d53382541ef77d957fa274a99c533b18/pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696", size = 38079719, upload-time = "2023-12-18T15:41:02.565Z" }, - { url = "https://files.pythonhosted.org/packages/30/a9/474caf5fd54a6d5315aaf9284c6e8f5d071ca825325ad64c53137b646e1f/pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a", size = 35429706, upload-time = "2023-12-18T15:41:09.955Z" }, - { url = "https://files.pythonhosted.org/packages/d9/f8/cfba56f5353e51c19b0c240380ce39483f4c76e5c4aee5a000f3d75b72da/pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02", size = 38001476, upload-time = "2023-12-18T15:41:16.372Z" }, - { url = "https://files.pythonhosted.org/packages/43/3f/7bdf7dc3b3b0cfdcc60760e7880954ba99ccd0bc1e0df806f3dd61bc01cd/pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b", size = 24576230, upload-time = "2023-12-18T15:41:22.561Z" }, - { url = "https://files.pythonhosted.org/packages/69/5b/d8ab6c20c43b598228710e4e4a6cba03a01f6faa3d08afff9ce76fd0fd47/pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944", size = 26819585, upload-time = "2023-12-18T15:41:27.59Z" }, - { url = "https://files.pythonhosted.org/packages/2d/29/bed2643d0dd5e9570405244a61f6db66c7f4704a6e9ce313f84fa5a3675a/pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5", size = 23965222, upload-time = "2023-12-18T15:41:32.449Z" }, - { url = "https://files.pythonhosted.org/packages/2a/34/da464632e59a8cdd083370d69e6c14eae30221acb284f671c6bc9273fadd/pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422", size = 35942036, upload-time = "2023-12-18T15:41:38.767Z" }, - { url = "https://files.pythonhosted.org/packages/a8/ff/cbed4836d543b29f00d2355af67575c934999ff1d43e3f438ab0b1b394f1/pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07", size = 38089266, upload-time = "2023-12-18T15:41:47.617Z" }, - { url = "https://files.pythonhosted.org/packages/38/41/345011cb831d3dbb2dab762fc244c745a5df94b199223a99af52a5f7dff6/pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591", size = 35404468, upload-time = "2023-12-18T15:41:54.49Z" }, - { url = "https://files.pythonhosted.org/packages/fd/af/2fc23ca2068ff02068d8dabf0fb85b6185df40ec825973470e613dbd8790/pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379", size = 38003134, upload-time = "2023-12-18T15:42:01.593Z" }, - { url = "https://files.pythonhosted.org/packages/95/1f/9d912f66a87e3864f694e000977a6a70a644ea560289eac1d733983f215d/pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d", size = 25043754, upload-time = "2023-12-18T15:42:07.108Z" }, + { url = "https://files.pythonhosted.org/packages/94/8a/411ef0b05483076b7f548c74ccaa0f90c1e60d3875db71a821f6ffa8cf42/pyarrow-14.0.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:87482af32e5a0c0cce2d12eb3c039dd1d853bd905b04f3f953f147c7a196915b", size = 26904455 }, + { url = "https://files.pythonhosted.org/packages/6c/6c/882a57798877e3a49ba54d8e0540bea24aed78fb42e1d860f08c3449c75e/pyarrow-14.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:059bd8f12a70519e46cd64e1ba40e97eae55e0cbe1695edd95384653d7626b23", size = 23997116 }, + { url = "https://files.pythonhosted.org/packages/ec/3f/ef47fe6192ce4d82803a073db449b5292135406c364a7fc49dfbcd34c987/pyarrow-14.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f16111f9ab27e60b391c5f6d197510e3ad6654e73857b4e394861fc79c37200", size = 35944575 }, + { url = "https://files.pythonhosted.org/packages/1a/90/2021e529d7f234a3909f419d4341d53382541ef77d957fa274a99c533b18/pyarrow-14.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ff1264fe4448e8d02073f5ce45a9f934c0f3db0a04460d0b01ff28befc3696", size = 38079719 }, + { url = "https://files.pythonhosted.org/packages/30/a9/474caf5fd54a6d5315aaf9284c6e8f5d071ca825325ad64c53137b646e1f/pyarrow-14.0.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd4f4b472ccf4042f1eab77e6c8bce574543f54d2135c7e396f413046397d5a", size = 35429706 }, + { url = "https://files.pythonhosted.org/packages/d9/f8/cfba56f5353e51c19b0c240380ce39483f4c76e5c4aee5a000f3d75b72da/pyarrow-14.0.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:32356bfb58b36059773f49e4e214996888eeea3a08893e7dbde44753799b2a02", size = 38001476 }, + { url = "https://files.pythonhosted.org/packages/43/3f/7bdf7dc3b3b0cfdcc60760e7880954ba99ccd0bc1e0df806f3dd61bc01cd/pyarrow-14.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:52809ee69d4dbf2241c0e4366d949ba035cbcf48409bf404f071f624ed313a2b", size = 24576230 }, + { url = "https://files.pythonhosted.org/packages/69/5b/d8ab6c20c43b598228710e4e4a6cba03a01f6faa3d08afff9ce76fd0fd47/pyarrow-14.0.2-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:c87824a5ac52be210d32906c715f4ed7053d0180c1060ae3ff9b7e560f53f944", size = 26819585 }, + { url = "https://files.pythonhosted.org/packages/2d/29/bed2643d0dd5e9570405244a61f6db66c7f4704a6e9ce313f84fa5a3675a/pyarrow-14.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a25eb2421a58e861f6ca91f43339d215476f4fe159eca603c55950c14f378cc5", size = 23965222 }, + { url = "https://files.pythonhosted.org/packages/2a/34/da464632e59a8cdd083370d69e6c14eae30221acb284f671c6bc9273fadd/pyarrow-14.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c1da70d668af5620b8ba0a23f229030a4cd6c5f24a616a146f30d2386fec422", size = 35942036 }, + { url = "https://files.pythonhosted.org/packages/a8/ff/cbed4836d543b29f00d2355af67575c934999ff1d43e3f438ab0b1b394f1/pyarrow-14.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cc61593c8e66194c7cdfae594503e91b926a228fba40b5cf25cc593563bcd07", size = 38089266 }, + { url = "https://files.pythonhosted.org/packages/38/41/345011cb831d3dbb2dab762fc244c745a5df94b199223a99af52a5f7dff6/pyarrow-14.0.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:78ea56f62fb7c0ae8ecb9afdd7893e3a7dbeb0b04106f5c08dbb23f9c0157591", size = 35404468 }, + { url = "https://files.pythonhosted.org/packages/fd/af/2fc23ca2068ff02068d8dabf0fb85b6185df40ec825973470e613dbd8790/pyarrow-14.0.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:37c233ddbce0c67a76c0985612fef27c0c92aef9413cf5aa56952f359fcb7379", size = 38003134 }, + { url = "https://files.pythonhosted.org/packages/95/1f/9d912f66a87e3864f694e000977a6a70a644ea560289eac1d733983f215d/pyarrow-14.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:e4b123ad0f6add92de898214d404e488167b87b5dd86e9a434126bc2b7a5578d", size = 25043754 }, ] [[package]] name = "pyasn1" version = "0.6.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135 }, ] [[package]] @@ -4381,36 +4383,38 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyasn1" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892 } wheels = [ - { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259 }, ] [[package]] name = "pycparser" version = "2.22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, ] [[package]] name = "pycryptodome" version = "3.19.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b1/38/42a8855ff1bf568c61ca6557e2203f318fb7afeadaf2eb8ecfdbde107151/pycryptodome-3.19.1.tar.gz", hash = "sha256:8ae0dd1bcfada451c35f9e29a3e5db385caabc190f98e4a80ad02a61098fb776", size = 4782144, upload-time = "2023-12-28T06:52:40.741Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/38/42a8855ff1bf568c61ca6557e2203f318fb7afeadaf2eb8ecfdbde107151/pycryptodome-3.19.1.tar.gz", hash = "sha256:8ae0dd1bcfada451c35f9e29a3e5db385caabc190f98e4a80ad02a61098fb776", size = 4782144 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/ef/4931bc30674f0de0ca0e827b58c8b0c17313a8eae2754976c610b866118b/pycryptodome-3.19.1-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:67939a3adbe637281c611596e44500ff309d547e932c449337649921b17b6297", size = 2417027, upload-time = "2023-12-28T06:51:50.138Z" }, - { url = "https://files.pythonhosted.org/packages/67/e6/238c53267fd8d223029c0a0d3730cb1b6594d60f62e40c4184703dc490b1/pycryptodome-3.19.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:11ddf6c9b52116b62223b6a9f4741bc4f62bb265392a4463282f7f34bb287180", size = 1579728, upload-time = "2023-12-28T06:51:52.385Z" }, - { url = "https://files.pythonhosted.org/packages/7c/87/7181c42c8d5ba89822a4b824830506d0aeec02959bb893614767e3279846/pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3e6f89480616781d2a7f981472d0cdb09b9da9e8196f43c1234eff45c915766", size = 2051440, upload-time = "2023-12-28T06:51:55.751Z" }, - { url = "https://files.pythonhosted.org/packages/34/dd/332c4c0055527d17dac317ed9f9c864fc047b627d82f4b9a56c110afc6fc/pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e1efcb68993b7ce5d1d047a46a601d41281bba9f1971e6be4aa27c69ab8065", size = 2125379, upload-time = "2023-12-28T06:51:58.567Z" }, - { url = "https://files.pythonhosted.org/packages/24/9e/320b885ea336c218ff54ec2b276cd70ba6904e4f5a14a771ed39a2c47d59/pycryptodome-3.19.1-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c6273ca5a03b672e504995529b8bae56da0ebb691d8ef141c4aa68f60765700", size = 2153951, upload-time = "2023-12-28T06:52:01.699Z" }, - { url = "https://files.pythonhosted.org/packages/f4/54/8ae0c43d1257b41bc9d3277c3f875174fd8ad86b9567f0b8609b99c938ee/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b0bfe61506795877ff974f994397f0c862d037f6f1c0bfc3572195fc00833b96", size = 2044041, upload-time = "2023-12-28T06:52:03.737Z" }, - { url = "https://files.pythonhosted.org/packages/45/93/f8450a92cc38541c3ba1f4cb4e267e15ae6d6678ca617476d52c3a3764d4/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:f34976c5c8eb79e14c7d970fb097482835be8d410a4220f86260695ede4c3e17", size = 2182446, upload-time = "2023-12-28T06:52:05.588Z" }, - { url = "https://files.pythonhosted.org/packages/af/cd/ed6e429fb0792ce368f66e83246264dd3a7a045b0b1e63043ed22a063ce5/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7c9e222d0976f68d0cf6409cfea896676ddc1d98485d601e9508f90f60e2b0a2", size = 2144914, upload-time = "2023-12-28T06:52:07.44Z" }, - { url = "https://files.pythonhosted.org/packages/f6/23/b064bd4cfbf2cc5f25afcde0e7c880df5b20798172793137ba4b62d82e72/pycryptodome-3.19.1-cp35-abi3-win32.whl", hash = "sha256:4805e053571140cb37cf153b5c72cd324bb1e3e837cbe590a19f69b6cf85fd03", size = 1713105, upload-time = "2023-12-28T06:52:09.585Z" }, - { url = "https://files.pythonhosted.org/packages/7d/e0/ded1968a5257ab34216a0f8db7433897a2337d59e6d03be113713b346ea2/pycryptodome-3.19.1-cp35-abi3-win_amd64.whl", hash = "sha256:a470237ee71a1efd63f9becebc0ad84b88ec28e6784a2047684b693f458f41b7", size = 1749222, upload-time = "2023-12-28T06:52:11.534Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/4931bc30674f0de0ca0e827b58c8b0c17313a8eae2754976c610b866118b/pycryptodome-3.19.1-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:67939a3adbe637281c611596e44500ff309d547e932c449337649921b17b6297", size = 2417027 }, + { url = "https://files.pythonhosted.org/packages/67/e6/238c53267fd8d223029c0a0d3730cb1b6594d60f62e40c4184703dc490b1/pycryptodome-3.19.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:11ddf6c9b52116b62223b6a9f4741bc4f62bb265392a4463282f7f34bb287180", size = 1579728 }, + { url = "https://files.pythonhosted.org/packages/7c/87/7181c42c8d5ba89822a4b824830506d0aeec02959bb893614767e3279846/pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3e6f89480616781d2a7f981472d0cdb09b9da9e8196f43c1234eff45c915766", size = 2051440 }, + { url = "https://files.pythonhosted.org/packages/34/dd/332c4c0055527d17dac317ed9f9c864fc047b627d82f4b9a56c110afc6fc/pycryptodome-3.19.1-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e1efcb68993b7ce5d1d047a46a601d41281bba9f1971e6be4aa27c69ab8065", size = 2125379 }, + { url = "https://files.pythonhosted.org/packages/24/9e/320b885ea336c218ff54ec2b276cd70ba6904e4f5a14a771ed39a2c47d59/pycryptodome-3.19.1-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c6273ca5a03b672e504995529b8bae56da0ebb691d8ef141c4aa68f60765700", size = 2153951 }, + { url = "https://files.pythonhosted.org/packages/f4/54/8ae0c43d1257b41bc9d3277c3f875174fd8ad86b9567f0b8609b99c938ee/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b0bfe61506795877ff974f994397f0c862d037f6f1c0bfc3572195fc00833b96", size = 2044041 }, + { url = "https://files.pythonhosted.org/packages/45/93/f8450a92cc38541c3ba1f4cb4e267e15ae6d6678ca617476d52c3a3764d4/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:f34976c5c8eb79e14c7d970fb097482835be8d410a4220f86260695ede4c3e17", size = 2182446 }, + { url = "https://files.pythonhosted.org/packages/af/cd/ed6e429fb0792ce368f66e83246264dd3a7a045b0b1e63043ed22a063ce5/pycryptodome-3.19.1-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7c9e222d0976f68d0cf6409cfea896676ddc1d98485d601e9508f90f60e2b0a2", size = 2144914 }, + { url = "https://files.pythonhosted.org/packages/f6/23/b064bd4cfbf2cc5f25afcde0e7c880df5b20798172793137ba4b62d82e72/pycryptodome-3.19.1-cp35-abi3-win32.whl", hash = "sha256:4805e053571140cb37cf153b5c72cd324bb1e3e837cbe590a19f69b6cf85fd03", size = 1713105 }, + { url = "https://files.pythonhosted.org/packages/7d/e0/ded1968a5257ab34216a0f8db7433897a2337d59e6d03be113713b346ea2/pycryptodome-3.19.1-cp35-abi3-win_amd64.whl", hash = "sha256:a470237ee71a1efd63f9becebc0ad84b88ec28e6784a2047684b693f458f41b7", size = 1749222 }, + { url = "https://files.pythonhosted.org/packages/1d/e3/0c9679cd66cf5604b1f070bdf4525a0c01a15187be287d8348b2eafb718e/pycryptodome-3.19.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:ed932eb6c2b1c4391e166e1a562c9d2f020bfff44a0e1b108f67af38b390ea89", size = 1629005 }, + { url = "https://files.pythonhosted.org/packages/13/75/0d63bf0daafd0580b17202d8a9dd57f28c8487f26146b3e2799b0c5a059c/pycryptodome-3.19.1-pp27-pypy_73-win32.whl", hash = "sha256:81e9d23c0316fc1b45d984a44881b220062336bbdc340aa9218e8d0656587934", size = 1697997 }, ] [[package]] @@ -4423,9 +4427,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782 }, ] [[package]] @@ -4435,45 +4439,45 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584 }, + { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071 }, + { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823 }, + { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792 }, + { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338 }, + { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998 }, + { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200 }, + { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890 }, + { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359 }, + { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883 }, + { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074 }, + { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538 }, + { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909 }, + { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786 }, + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000 }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996 }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957 }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199 }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296 }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109 }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028 }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044 }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881 }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034 }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187 }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628 }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866 }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894 }, + { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200 }, + { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123 }, + { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852 }, + { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484 }, + { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896 }, + { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475 }, + { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013 }, + { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715 }, + { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757 }, ] [[package]] @@ -4484,9 +4488,9 @@ dependencies = [ { name = "pydantic" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7e/ba/4178111ec4116c54e1dc7ecd2a1ff8f54256cdbd250e576882911e8f710a/pydantic_extra_types-2.10.5.tar.gz", hash = "sha256:1dcfa2c0cf741a422f088e0dbb4690e7bfadaaf050da3d6f80d6c3cf58a2bad8", size = 138429, upload-time = "2025-06-02T09:31:52.713Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/ba/4178111ec4116c54e1dc7ecd2a1ff8f54256cdbd250e576882911e8f710a/pydantic_extra_types-2.10.5.tar.gz", hash = "sha256:1dcfa2c0cf741a422f088e0dbb4690e7bfadaaf050da3d6f80d6c3cf58a2bad8", size = 138429 } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/1a/5f4fd9e7285f10c44095a4f9fe17d0f358d1702a7c74a9278c794e8a7537/pydantic_extra_types-2.10.5-py3-none-any.whl", hash = "sha256:b60c4e23d573a69a4f1a16dd92888ecc0ef34fb0e655b4f305530377fa70e7a8", size = 38315, upload-time = "2025-06-02T09:31:51.229Z" }, + { url = "https://files.pythonhosted.org/packages/70/1a/5f4fd9e7285f10c44095a4f9fe17d0f358d1702a7c74a9278c794e8a7537/pydantic_extra_types-2.10.5-py3-none-any.whl", hash = "sha256:b60c4e23d573a69a4f1a16dd92888ecc0ef34fb0e655b4f305530377fa70e7a8", size = 38315 }, ] [[package]] @@ -4498,27 +4502,27 @@ dependencies = [ { name = "python-dotenv" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/67/1d/42628a2c33e93f8e9acbde0d5d735fa0850f3e6a2f8cb1eb6c40b9a732ac/pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268", size = 163234, upload-time = "2025-04-18T16:44:48.265Z" } +sdist = { url = "https://files.pythonhosted.org/packages/67/1d/42628a2c33e93f8e9acbde0d5d735fa0850f3e6a2f8cb1eb6c40b9a732ac/pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268", size = 163234 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/5f/d6d641b490fd3ec2c4c13b4244d68deea3a1b970a97be64f34fb5504ff72/pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef", size = 44356, upload-time = "2025-04-18T16:44:46.617Z" }, + { url = "https://files.pythonhosted.org/packages/b6/5f/d6d641b490fd3ec2c4c13b4244d68deea3a1b970a97be64f34fb5504ff72/pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef", size = 44356 }, ] [[package]] name = "pygments" version = "2.19.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217 }, ] [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.10.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/72/8259b2bccfe4673330cea843ab23f86858a419d8f1493f66d413a76c7e3b/PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de", size = 78313, upload-time = "2023-07-18T20:02:22.594Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/4f/e04a8067c7c96c364cef7ef73906504e2f40d690811c021e1a1901473a19/PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320", size = 22591, upload-time = "2023-07-18T20:02:21.561Z" }, + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997 }, ] [package.optional-dependencies] @@ -4539,9 +4543,9 @@ dependencies = [ { name = "setuptools" }, { name = "ujson" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fa/53/4af820a37163225a76656222ee43a0eb8f1bd2ceec063315680a585435da/pymilvus-2.5.12.tar.gz", hash = "sha256:79ec7dc0616c2484f77abe98bca8deafb613645b5703c492b51961afd4f985d8", size = 1265893, upload-time = "2025-07-02T15:34:00.385Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/53/4af820a37163225a76656222ee43a0eb8f1bd2ceec063315680a585435da/pymilvus-2.5.12.tar.gz", hash = "sha256:79ec7dc0616c2484f77abe98bca8deafb613645b5703c492b51961afd4f985d8", size = 1265893 } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/4f/80a4940f2772d10272c3292444af767a5aa1a5bbb631874568713ca01d54/pymilvus-2.5.12-py3-none-any.whl", hash = "sha256:ef77a4a0076469a30b05f0bb23b5a058acfbdca83d82af9574ca651764017f44", size = 231425, upload-time = "2025-07-02T15:33:58.938Z" }, + { url = "https://files.pythonhosted.org/packages/68/4f/80a4940f2772d10272c3292444af767a5aa1a5bbb631874568713ca01d54/pymilvus-2.5.12-py3-none-any.whl", hash = "sha256:ef77a4a0076469a30b05f0bb23b5a058acfbdca83d82af9574ca651764017f44", size = 231425 }, ] [[package]] @@ -4553,104 +4557,106 @@ dependencies = [ { name = "orjson" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/da/3027eeeaf7a7db9b0ca761079de4e676a002e1cc2c4260dab0ce812972b8/pymochow-1.3.1.tar.gz", hash = "sha256:1693d10cd0bb7bce45327890a90adafb503155922ccc029acb257699a73a20ba", size = 30800, upload-time = "2024-09-11T12:06:37.88Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/da/3027eeeaf7a7db9b0ca761079de4e676a002e1cc2c4260dab0ce812972b8/pymochow-1.3.1.tar.gz", hash = "sha256:1693d10cd0bb7bce45327890a90adafb503155922ccc029acb257699a73a20ba", size = 30800 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/74/4b6227717f6baa37e7288f53e0fd55764939abc4119342eed4924a98f477/pymochow-1.3.1-py3-none-any.whl", hash = "sha256:a7f3b34fd6ea5d1d8413650bb6678365aa148fc396ae945e4ccb4f2365a52327", size = 42697, upload-time = "2024-09-11T12:06:36.114Z" }, + { url = "https://files.pythonhosted.org/packages/6b/74/4b6227717f6baa37e7288f53e0fd55764939abc4119342eed4924a98f477/pymochow-1.3.1-py3-none-any.whl", hash = "sha256:a7f3b34fd6ea5d1d8413650bb6678365aa148fc396ae945e4ccb4f2365a52327", size = 42697 }, ] [[package]] name = "pymysql" version = "1.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/ce59b5e5ed4ce8512f879ff1fa5ab699d211ae2495f1adaa5fbba2a1eada/pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0", size = 47678, upload-time = "2024-05-21T11:03:43.722Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/ce59b5e5ed4ce8512f879ff1fa5ab699d211ae2495f1adaa5fbba2a1eada/pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0", size = 47678 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/94/e4181a1f6286f545507528c78016e00065ea913276888db2262507693ce5/PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c", size = 44972, upload-time = "2024-05-21T11:03:41.216Z" }, + { url = "https://files.pythonhosted.org/packages/0c/94/e4181a1f6286f545507528c78016e00065ea913276888db2262507693ce5/PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c", size = 44972 }, ] [[package]] name = "pyobvector" -version = "0.1.14" +version = "0.2.15" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiomysql" }, { name = "numpy" }, + { name = "pydantic" }, { name = "pymysql" }, { name = "sqlalchemy" }, + { name = "sqlglot" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dc/59/7d762061808948dd6aad165a000b34e22163dc83fb5014184eeacc0fabe5/pyobvector-0.1.14.tar.gz", hash = "sha256:4f85cdd63064d040e94c0a96099a0cd5cda18ce625865382e89429f28422fc02", size = 26780, upload-time = "2024-11-20T11:46:18.017Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/7d/3f3aac6acf1fdd1782042d6eecd48efaa2ee355af0dbb61e93292d629391/pyobvector-0.2.15.tar.gz", hash = "sha256:5de258c1e952c88b385b5661e130c1cf8262c498c1f8a4a348a35962d379fce4", size = 39611 } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/68/ecb21b74c974e7be7f9034e205d08db62d614ff5c221581ae96d37ef853e/pyobvector-0.1.14-py3-none-any.whl", hash = "sha256:828e0bec49a177355b70c7a1270af3b0bf5239200ee0d096e4165b267eeff97c", size = 35526, upload-time = "2024-11-20T11:46:16.809Z" }, + { url = "https://files.pythonhosted.org/packages/5f/1f/a62754ba9b8a02c038d2a96cb641b71d3809f34d2ba4f921fecd7840d7fb/pyobvector-0.2.15-py3-none-any.whl", hash = "sha256:feeefe849ee5400e72a9a4d3844e425a58a99053dd02abe06884206923065ebb", size = 52680 }, ] [[package]] name = "pypandoc" version = "1.15" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e1/88/26e650d053df5f3874aa3c05901a14166ce3271f58bfe114fd776987efbd/pypandoc-1.15.tar.gz", hash = "sha256:ea25beebe712ae41d63f7410c08741a3cab0e420f6703f95bc9b3a749192ce13", size = 32940, upload-time = "2025-01-08T17:39:58.705Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/88/26e650d053df5f3874aa3c05901a14166ce3271f58bfe114fd776987efbd/pypandoc-1.15.tar.gz", hash = "sha256:ea25beebe712ae41d63f7410c08741a3cab0e420f6703f95bc9b3a749192ce13", size = 32940 } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/06/0763e0ccc81754d3eadb21b2cb86cf21bdedc9b52698c2ad6785db7f0a4e/pypandoc-1.15-py3-none-any.whl", hash = "sha256:4ededcc76c8770f27aaca6dff47724578428eca84212a31479403a9731fc2b16", size = 21321, upload-time = "2025-01-08T17:39:09.928Z" }, + { url = "https://files.pythonhosted.org/packages/61/06/0763e0ccc81754d3eadb21b2cb86cf21bdedc9b52698c2ad6785db7f0a4e/pypandoc-1.15-py3-none-any.whl", hash = "sha256:4ededcc76c8770f27aaca6dff47724578428eca84212a31479403a9731fc2b16", size = 21321 }, ] [[package]] name = "pyparsing" version = "3.2.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608, upload-time = "2025-03-25T05:01:28.114Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608 } wheels = [ - { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" }, + { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120 }, ] [[package]] name = "pypdf" -version = "5.7.0" +version = "6.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7b/42/fbc37af367b20fa6c53da81b1780025f6046a0fac8cbf0663a17e743b033/pypdf-5.7.0.tar.gz", hash = "sha256:68c92f2e1aae878bab1150e74447f31ab3848b1c0a6f8becae9f0b1904460b6f", size = 5026120, upload-time = "2025-06-29T08:49:48.305Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/ac/a300a03c3b34967c050677ccb16e7a4b65607ee5df9d51e8b6d713de4098/pypdf-6.0.0.tar.gz", hash = "sha256:282a99d2cc94a84a3a3159f0d9358c0af53f85b4d28d76ea38b96e9e5ac2a08d", size = 5033827 } wheels = [ - { url = "https://files.pythonhosted.org/packages/73/9f/78d096ef795a813fa0e1cb9b33fa574b205f2b563d9c1e9366c854cf0364/pypdf-5.7.0-py3-none-any.whl", hash = "sha256:203379453439f5b68b7a1cd43cdf4c5f7a02b84810cefa7f93a47b350aaaba48", size = 305524, upload-time = "2025-06-29T08:49:46.16Z" }, + { url = "https://files.pythonhosted.org/packages/2c/83/2cacc506eb322bb31b747bc06ccb82cc9aa03e19ee9c1245e538e49d52be/pypdf-6.0.0-py3-none-any.whl", hash = "sha256:56ea60100ce9f11fc3eec4f359da15e9aec3821b036c1f06d2b660d35683abb8", size = 310465 }, ] [[package]] name = "pypdfium2" version = "4.30.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/14/838b3ba247a0ba92e4df5d23f2bea9478edcfd72b78a39d6ca36ccd84ad2/pypdfium2-4.30.0.tar.gz", hash = "sha256:48b5b7e5566665bc1015b9d69c1ebabe21f6aee468b509531c3c8318eeee2e16", size = 140239, upload-time = "2024-05-09T18:33:17.552Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/14/838b3ba247a0ba92e4df5d23f2bea9478edcfd72b78a39d6ca36ccd84ad2/pypdfium2-4.30.0.tar.gz", hash = "sha256:48b5b7e5566665bc1015b9d69c1ebabe21f6aee468b509531c3c8318eeee2e16", size = 140239 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/9a/c8ff5cc352c1b60b0b97642ae734f51edbab6e28b45b4fcdfe5306ee3c83/pypdfium2-4.30.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:b33ceded0b6ff5b2b93bc1fe0ad4b71aa6b7e7bd5875f1ca0cdfb6ba6ac01aab", size = 2837254, upload-time = "2024-05-09T18:32:48.653Z" }, - { url = "https://files.pythonhosted.org/packages/21/8b/27d4d5409f3c76b985f4ee4afe147b606594411e15ac4dc1c3363c9a9810/pypdfium2-4.30.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4e55689f4b06e2d2406203e771f78789bd4f190731b5d57383d05cf611d829de", size = 2707624, upload-time = "2024-05-09T18:32:51.458Z" }, - { url = "https://files.pythonhosted.org/packages/11/63/28a73ca17c24b41a205d658e177d68e198d7dde65a8c99c821d231b6ee3d/pypdfium2-4.30.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e6e50f5ce7f65a40a33d7c9edc39f23140c57e37144c2d6d9e9262a2a854854", size = 2793126, upload-time = "2024-05-09T18:32:53.581Z" }, - { url = "https://files.pythonhosted.org/packages/d1/96/53b3ebf0955edbd02ac6da16a818ecc65c939e98fdeb4e0958362bd385c8/pypdfium2-4.30.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3d0dd3ecaffd0b6dbda3da663220e705cb563918249bda26058c6036752ba3a2", size = 2591077, upload-time = "2024-05-09T18:32:55.99Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ee/0394e56e7cab8b5b21f744d988400948ef71a9a892cbeb0b200d324ab2c7/pypdfium2-4.30.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc3bf29b0db8c76cdfaac1ec1cde8edf211a7de7390fbf8934ad2aa9b4d6dfad", size = 2864431, upload-time = "2024-05-09T18:32:57.911Z" }, - { url = "https://files.pythonhosted.org/packages/65/cd/3f1edf20a0ef4a212a5e20a5900e64942c5a374473671ac0780eaa08ea80/pypdfium2-4.30.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1f78d2189e0ddf9ac2b7a9b9bd4f0c66f54d1389ff6c17e9fd9dc034d06eb3f", size = 2812008, upload-time = "2024-05-09T18:32:59.886Z" }, - { url = "https://files.pythonhosted.org/packages/c8/91/2d517db61845698f41a2a974de90762e50faeb529201c6b3574935969045/pypdfium2-4.30.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:5eda3641a2da7a7a0b2f4dbd71d706401a656fea521b6b6faa0675b15d31a163", size = 6181543, upload-time = "2024-05-09T18:33:02.597Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c4/ed1315143a7a84b2c7616569dfb472473968d628f17c231c39e29ae9d780/pypdfium2-4.30.0-py3-none-musllinux_1_1_i686.whl", hash = "sha256:0dfa61421b5eb68e1188b0b2231e7ba35735aef2d867d86e48ee6cab6975195e", size = 6175911, upload-time = "2024-05-09T18:33:05.376Z" }, - { url = "https://files.pythonhosted.org/packages/7a/c4/9e62d03f414e0e3051c56d5943c3bf42aa9608ede4e19dc96438364e9e03/pypdfium2-4.30.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:f33bd79e7a09d5f7acca3b0b69ff6c8a488869a7fab48fdf400fec6e20b9c8be", size = 6267430, upload-time = "2024-05-09T18:33:08.067Z" }, - { url = "https://files.pythonhosted.org/packages/90/47/eda4904f715fb98561e34012826e883816945934a851745570521ec89520/pypdfium2-4.30.0-py3-none-win32.whl", hash = "sha256:ee2410f15d576d976c2ab2558c93d392a25fb9f6635e8dd0a8a3a5241b275e0e", size = 2775951, upload-time = "2024-05-09T18:33:10.567Z" }, - { url = "https://files.pythonhosted.org/packages/25/bd/56d9ec6b9f0fc4e0d95288759f3179f0fcd34b1a1526b75673d2f6d5196f/pypdfium2-4.30.0-py3-none-win_amd64.whl", hash = "sha256:90dbb2ac07be53219f56be09961eb95cf2473f834d01a42d901d13ccfad64b4c", size = 2892098, upload-time = "2024-05-09T18:33:13.107Z" }, - { url = "https://files.pythonhosted.org/packages/be/7a/097801205b991bc3115e8af1edb850d30aeaf0118520b016354cf5ccd3f6/pypdfium2-4.30.0-py3-none-win_arm64.whl", hash = "sha256:119b2969a6d6b1e8d55e99caaf05290294f2d0fe49c12a3f17102d01c441bd29", size = 2752118, upload-time = "2024-05-09T18:33:15.489Z" }, + { url = "https://files.pythonhosted.org/packages/c7/9a/c8ff5cc352c1b60b0b97642ae734f51edbab6e28b45b4fcdfe5306ee3c83/pypdfium2-4.30.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:b33ceded0b6ff5b2b93bc1fe0ad4b71aa6b7e7bd5875f1ca0cdfb6ba6ac01aab", size = 2837254 }, + { url = "https://files.pythonhosted.org/packages/21/8b/27d4d5409f3c76b985f4ee4afe147b606594411e15ac4dc1c3363c9a9810/pypdfium2-4.30.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4e55689f4b06e2d2406203e771f78789bd4f190731b5d57383d05cf611d829de", size = 2707624 }, + { url = "https://files.pythonhosted.org/packages/11/63/28a73ca17c24b41a205d658e177d68e198d7dde65a8c99c821d231b6ee3d/pypdfium2-4.30.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e6e50f5ce7f65a40a33d7c9edc39f23140c57e37144c2d6d9e9262a2a854854", size = 2793126 }, + { url = "https://files.pythonhosted.org/packages/d1/96/53b3ebf0955edbd02ac6da16a818ecc65c939e98fdeb4e0958362bd385c8/pypdfium2-4.30.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3d0dd3ecaffd0b6dbda3da663220e705cb563918249bda26058c6036752ba3a2", size = 2591077 }, + { url = "https://files.pythonhosted.org/packages/ec/ee/0394e56e7cab8b5b21f744d988400948ef71a9a892cbeb0b200d324ab2c7/pypdfium2-4.30.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc3bf29b0db8c76cdfaac1ec1cde8edf211a7de7390fbf8934ad2aa9b4d6dfad", size = 2864431 }, + { url = "https://files.pythonhosted.org/packages/65/cd/3f1edf20a0ef4a212a5e20a5900e64942c5a374473671ac0780eaa08ea80/pypdfium2-4.30.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1f78d2189e0ddf9ac2b7a9b9bd4f0c66f54d1389ff6c17e9fd9dc034d06eb3f", size = 2812008 }, + { url = "https://files.pythonhosted.org/packages/c8/91/2d517db61845698f41a2a974de90762e50faeb529201c6b3574935969045/pypdfium2-4.30.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:5eda3641a2da7a7a0b2f4dbd71d706401a656fea521b6b6faa0675b15d31a163", size = 6181543 }, + { url = "https://files.pythonhosted.org/packages/ba/c4/ed1315143a7a84b2c7616569dfb472473968d628f17c231c39e29ae9d780/pypdfium2-4.30.0-py3-none-musllinux_1_1_i686.whl", hash = "sha256:0dfa61421b5eb68e1188b0b2231e7ba35735aef2d867d86e48ee6cab6975195e", size = 6175911 }, + { url = "https://files.pythonhosted.org/packages/7a/c4/9e62d03f414e0e3051c56d5943c3bf42aa9608ede4e19dc96438364e9e03/pypdfium2-4.30.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:f33bd79e7a09d5f7acca3b0b69ff6c8a488869a7fab48fdf400fec6e20b9c8be", size = 6267430 }, + { url = "https://files.pythonhosted.org/packages/90/47/eda4904f715fb98561e34012826e883816945934a851745570521ec89520/pypdfium2-4.30.0-py3-none-win32.whl", hash = "sha256:ee2410f15d576d976c2ab2558c93d392a25fb9f6635e8dd0a8a3a5241b275e0e", size = 2775951 }, + { url = "https://files.pythonhosted.org/packages/25/bd/56d9ec6b9f0fc4e0d95288759f3179f0fcd34b1a1526b75673d2f6d5196f/pypdfium2-4.30.0-py3-none-win_amd64.whl", hash = "sha256:90dbb2ac07be53219f56be09961eb95cf2473f834d01a42d901d13ccfad64b4c", size = 2892098 }, + { url = "https://files.pythonhosted.org/packages/be/7a/097801205b991bc3115e8af1edb850d30aeaf0118520b016354cf5ccd3f6/pypdfium2-4.30.0-py3-none-win_arm64.whl", hash = "sha256:119b2969a6d6b1e8d55e99caaf05290294f2d0fe49c12a3f17102d01c441bd29", size = 2752118 }, ] [[package]] name = "pypika" version = "0.48.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c7/2c/94ed7b91db81d61d7096ac8f2d325ec562fc75e35f3baea8749c85b28784/PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378", size = 67259, upload-time = "2022-03-15T11:22:57.066Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/2c/94ed7b91db81d61d7096ac8f2d325ec562fc75e35f3baea8749c85b28784/PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378", size = 67259 } [[package]] name = "pyproject-hooks" version = "1.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/82/28175b2414effca1cdac8dc99f76d660e7a4fb0ceefa4b4ab8f5f6742925/pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8", size = 19228, upload-time = "2024-09-29T09:24:13.293Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/82/28175b2414effca1cdac8dc99f76d660e7a4fb0ceefa4b4ab8f5f6742925/pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8", size = 19228 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/24/12818598c362d7f300f18e74db45963dbcb85150324092410c8b49405e42/pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913", size = 10216, upload-time = "2024-09-29T09:24:11.978Z" }, + { url = "https://files.pythonhosted.org/packages/bd/24/12818598c362d7f300f18e74db45963dbcb85150324092410c8b49405e42/pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913", size = 10216 }, ] [[package]] name = "pyreadline3" version = "3.5.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839, upload-time = "2024-09-19T02:40:10.062Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178, upload-time = "2024-09-19T02:40:08.598Z" }, + { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178 }, ] [[package]] @@ -4663,9 +4669,9 @@ dependencies = [ { name = "packaging" }, { name = "pluggy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891, upload-time = "2025-03-02T12:54:54.503Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891 } wheels = [ - { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" }, + { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634 }, ] [[package]] @@ -4676,9 +4682,9 @@ dependencies = [ { name = "py-cpuinfo" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/28/08/e6b0067efa9a1f2a1eb3043ecd8a0c48bfeb60d3255006dcc829d72d5da2/pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1", size = 334641, upload-time = "2022-10-25T21:21:55.686Z" } +sdist = { url = "https://files.pythonhosted.org/packages/28/08/e6b0067efa9a1f2a1eb3043ecd8a0c48bfeb60d3255006dcc829d72d5da2/pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1", size = 334641 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/a1/3b70862b5b3f830f0422844f25a823d0470739d994466be9dbbbb414d85a/pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6", size = 43951, upload-time = "2022-10-25T21:21:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/4d/a1/3b70862b5b3f830f0422844f25a823d0470739d994466be9dbbbb414d85a/pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6", size = 43951 }, ] [[package]] @@ -4689,9 +4695,9 @@ dependencies = [ { name = "coverage", extra = ["toml"] }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7a/15/da3df99fd551507694a9b01f512a2f6cf1254f33601605843c3775f39460/pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6", size = 63245, upload-time = "2023-05-24T18:44:56.845Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7a/15/da3df99fd551507694a9b01f512a2f6cf1254f33601605843c3775f39460/pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6", size = 63245 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/4b/8b78d126e275efa2379b1c2e09dc52cf70df16fc3b90613ef82531499d73/pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a", size = 21949, upload-time = "2023-05-24T18:44:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/a7/4b/8b78d126e275efa2379b1c2e09dc52cf70df16fc3b90613ef82531499d73/pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a", size = 21949 }, ] [[package]] @@ -4701,9 +4707,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1f/31/27f28431a16b83cab7a636dce59cf397517807d247caa38ee67d65e71ef8/pytest_env-1.1.5.tar.gz", hash = "sha256:91209840aa0e43385073ac464a554ad2947cc2fd663a9debf88d03b01e0cc1cf", size = 8911, upload-time = "2024-09-17T22:39:18.566Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/31/27f28431a16b83cab7a636dce59cf397517807d247caa38ee67d65e71ef8/pytest_env-1.1.5.tar.gz", hash = "sha256:91209840aa0e43385073ac464a554ad2947cc2fd663a9debf88d03b01e0cc1cf", size = 8911 } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/b8/87cfb16045c9d4092cfcf526135d73b88101aac83bc1adcf82dfb5fd3833/pytest_env-1.1.5-py3-none-any.whl", hash = "sha256:ce90cf8772878515c24b31cd97c7fa1f4481cd68d588419fd45f10ecaee6bc30", size = 6141, upload-time = "2024-09-17T22:39:16.942Z" }, + { url = "https://files.pythonhosted.org/packages/de/b8/87cfb16045c9d4092cfcf526135d73b88101aac83bc1adcf82dfb5fd3833/pytest_env-1.1.5-py3-none-any.whl", hash = "sha256:ce90cf8772878515c24b31cd97c7fa1f4481cd68d588419fd45f10ecaee6bc30", size = 6141 }, ] [[package]] @@ -4713,9 +4719,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/28/67172c96ba684058a4d24ffe144d64783d2a270d0af0d9e792737bddc75c/pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e", size = 33241, upload-time = "2025-05-26T13:58:45.167Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/28/67172c96ba684058a4d24ffe144d64783d2a270d0af0d9e792737bddc75c/pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e", size = 33241 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923, upload-time = "2025-05-26T13:58:43.487Z" }, + { url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923 }, ] [[package]] @@ -4725,34 +4731,34 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/03/269f96535705b2f18c8977fa58e76763b4e4727a9b3ae277a9468c8ffe05/python_calamine-0.4.0.tar.gz", hash = "sha256:94afcbae3fec36d2d7475095a59d4dc6fae45829968c743cb799ebae269d7bbf", size = 127737, upload-time = "2025-07-04T06:05:28.626Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/03/269f96535705b2f18c8977fa58e76763b4e4727a9b3ae277a9468c8ffe05/python_calamine-0.4.0.tar.gz", hash = "sha256:94afcbae3fec36d2d7475095a59d4dc6fae45829968c743cb799ebae269d7bbf", size = 127737 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/a5/bcd82326d0ff1ab5889e7a5e13c868b483fc56398e143aae8e93149ba43b/python_calamine-0.4.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d1687f8c4d7852920c7b4e398072f183f88dd273baf5153391edc88b7454b8c0", size = 833019, upload-time = "2025-07-04T06:03:32.214Z" }, - { url = "https://files.pythonhosted.org/packages/f6/1a/a681f1d2f28164552e91ef47bcde6708098aa64a5f5fe3952f22362d340a/python_calamine-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:258d04230bebbbafa370a15838049d912d6a0a2c4da128943d8160ca4b6db58e", size = 812268, upload-time = "2025-07-04T06:03:33.855Z" }, - { url = "https://files.pythonhosted.org/packages/3d/92/2fc911431733739d4e7a633cefa903fa49a6b7a61e8765bad29a4a7c47b1/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c686e491634934f059553d55f77ac67ca4c235452d5b444f98fe79b3579f1ea5", size = 875733, upload-time = "2025-07-04T06:03:35.154Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f0/48bfae6802eb360028ca6c15e9edf42243aadd0006b6ac3e9edb41a57119/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4480af7babcc2f919c638a554b06b7b145d9ab3da47fd696d68c2fc6f67f9541", size = 878325, upload-time = "2025-07-04T06:03:36.638Z" }, - { url = "https://files.pythonhosted.org/packages/a4/dc/f8c956e15bac9d5d1e05cd1b907ae780e40522d2fd103c8c6e2f21dff4ed/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e405b87a8cd1e90a994e570705898634f105442029f25bab7da658ee9cbaa771", size = 1015038, upload-time = "2025-07-04T06:03:37.971Z" }, - { url = "https://files.pythonhosted.org/packages/54/3f/e69ab97c7734fb850fba2f506b775912fd59f04e17488582c8fbf52dbc72/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a831345ee42615f0dfcb0ed60a3b1601d2f946d4166edae64fd9a6f9bbd57fc1", size = 924969, upload-time = "2025-07-04T06:03:39.253Z" }, - { url = "https://files.pythonhosted.org/packages/79/03/b4c056b468908d87a3de94389166e0f4dba725a70bc39e03bc039ba96f6b/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9951b8e4cafb3e1623bb5dfc31a18d38ef43589275f9657e99dfcbe4c8c4b33e", size = 888020, upload-time = "2025-07-04T06:03:41.099Z" }, - { url = "https://files.pythonhosted.org/packages/86/4f/b9092f7c970894054083656953184e44cb2dadff8852425e950d4ca419af/python_calamine-0.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a6619fe3b5c9633ed8b178684605f8076c9d8d85b29ade15f7a7713fcfdee2d0", size = 930337, upload-time = "2025-07-04T06:03:42.89Z" }, - { url = "https://files.pythonhosted.org/packages/64/da/137239027bf253aabe7063450950085ec9abd827d0cbc5170f585f38f464/python_calamine-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2cc45b8e76ee331f6ea88ca23677be0b7a05b502cd4423ba2c2bc8dad53af1be", size = 1054568, upload-time = "2025-07-04T06:03:44.153Z" }, - { url = "https://files.pythonhosted.org/packages/80/96/74c38bcf6b6825d5180c0e147b85be8c52dbfba11848b1e98ba358e32a64/python_calamine-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1b2cfb7ced1a7c80befa0cfddfe4aae65663eb4d63c4ae484b9b7a80ebe1b528", size = 1058317, upload-time = "2025-07-04T06:03:45.873Z" }, - { url = "https://files.pythonhosted.org/packages/33/95/9d7b8fe8b32d99a6c79534df3132cfe40e9df4a0f5204048bf5e66ddbd93/python_calamine-0.4.0-cp311-cp311-win32.whl", hash = "sha256:04f4e32ee16814fc1fafc49300be8eeb280d94878461634768b51497e1444bd6", size = 663934, upload-time = "2025-07-04T06:03:47.407Z" }, - { url = "https://files.pythonhosted.org/packages/7c/e3/1c6cd9fd499083bea6ff1c30033ee8215b9f64e862babf5be170cacae190/python_calamine-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:a8543f69afac2213c0257bb56215b03dadd11763064a9d6b19786f27d1bef586", size = 692535, upload-time = "2025-07-04T06:03:48.699Z" }, - { url = "https://files.pythonhosted.org/packages/94/1c/3105d19fbab6b66874ce8831652caedd73b23b72e88ce18addf8ceca8c12/python_calamine-0.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:54622e35ec7c3b6f07d119da49aa821731c185e951918f152c2dbf3bec1e15d6", size = 671751, upload-time = "2025-07-04T06:03:49.979Z" }, - { url = "https://files.pythonhosted.org/packages/63/60/f951513aaaa470b3a38a87d65eca45e0a02bc329b47864f5a17db563f746/python_calamine-0.4.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:74bca5d44a73acf3dcfa5370820797fcfd225c8c71abcddea987c5b4f5077e98", size = 826603, upload-time = "2025-07-04T06:03:51.245Z" }, - { url = "https://files.pythonhosted.org/packages/76/3f/789955bbc77831c639890758f945eb2b25d6358065edf00da6751226cf31/python_calamine-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf80178f5d1b0ee2ccfffb8549c50855f6249e930664adc5807f4d0d6c2b269c", size = 805826, upload-time = "2025-07-04T06:03:52.482Z" }, - { url = "https://files.pythonhosted.org/packages/00/4c/f87d17d996f647030a40bfd124fe45fe893c002bee35ae6aca9910a923ae/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65cfef345386ae86f7720f1be93495a40fd7e7feabb8caa1df5025d7fbc58a1f", size = 874989, upload-time = "2025-07-04T06:03:53.794Z" }, - { url = "https://files.pythonhosted.org/packages/47/d2/3269367303f6c0488cf1bfebded3f9fe968d118a988222e04c9b2636bf2e/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f23e6214dbf9b29065a5dcfd6a6c674dd0e251407298c9138611c907d53423ff", size = 877504, upload-time = "2025-07-04T06:03:55.095Z" }, - { url = "https://files.pythonhosted.org/packages/f9/6d/c7ac35f5c7125e8bd07eb36773f300fda20dd2da635eae78a8cebb0b6ab7/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d792d304ee232ab01598e1d3ab22e074a32c2511476b5fb4f16f4222d9c2a265", size = 1014171, upload-time = "2025-07-04T06:03:56.777Z" }, - { url = "https://files.pythonhosted.org/packages/f0/81/5ea8792a2e9ab5e2a05872db3a4d3ed3538ad5af1861282c789e2f13a8cf/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf813425918fd68f3e991ef7c4b5015be0a1a95fc4a8ab7e73c016ef1b881bb4", size = 926737, upload-time = "2025-07-04T06:03:58.024Z" }, - { url = "https://files.pythonhosted.org/packages/cc/6e/989e56e6f073fc0981a74ba7a393881eb351bb143e5486aa629b5e5d6a8b/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbe2a0ccb4d003635888eea83a995ff56b0748c8c76fc71923544f5a4a7d4cd7", size = 887032, upload-time = "2025-07-04T06:03:59.298Z" }, - { url = "https://files.pythonhosted.org/packages/5d/92/2c9bd64277c6fe4be695d7d5a803b38d953ec8565037486be7506642c27c/python_calamine-0.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7b3bb5f0d910b9b03c240987560f843256626fd443279759df4e91b717826d2", size = 929700, upload-time = "2025-07-04T06:04:01.388Z" }, - { url = "https://files.pythonhosted.org/packages/64/fa/fc758ca37701d354a6bc7d63118699f1c73788a1f2e1b44d720824992764/python_calamine-0.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bd2c0fc2b5eabd08ceac8a2935bffa88dbc6116db971aa8c3f244bad3fd0f644", size = 1053971, upload-time = "2025-07-04T06:04:02.704Z" }, - { url = "https://files.pythonhosted.org/packages/65/52/40d7e08ae0ddba331cdc9f7fb3e92972f8f38d7afbd00228158ff6d1fceb/python_calamine-0.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:85b547cb1c5b692a0c2406678d666dbc1cec65a714046104683fe4f504a1721d", size = 1057057, upload-time = "2025-07-04T06:04:04.014Z" }, - { url = "https://files.pythonhosted.org/packages/16/de/e8a071c0adfda73285d891898a24f6e99338328c404f497ff5b0e6bc3d45/python_calamine-0.4.0-cp312-cp312-win32.whl", hash = "sha256:4c2a1e3a0db4d6de4587999a21cc35845648c84fba81c03dd6f3072c690888e4", size = 665540, upload-time = "2025-07-04T06:04:05.679Z" }, - { url = "https://files.pythonhosted.org/packages/5e/f2/7fdfada13f80db12356853cf08697ff4e38800a1809c2bdd26ee60962e7a/python_calamine-0.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b193c89ffcc146019475cd121c552b23348411e19c04dedf5c766a20db64399a", size = 695366, upload-time = "2025-07-04T06:04:06.977Z" }, - { url = "https://files.pythonhosted.org/packages/20/66/d37412ad854480ce32f50d9f74f2a2f88b1b8a6fbc32f70aabf3211ae89e/python_calamine-0.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:43a0f15e0b60c75a71b21a012b911d5d6f5fa052afad2a8edbc728af43af0fcf", size = 670740, upload-time = "2025-07-04T06:04:08.656Z" }, + { url = "https://files.pythonhosted.org/packages/d4/a5/bcd82326d0ff1ab5889e7a5e13c868b483fc56398e143aae8e93149ba43b/python_calamine-0.4.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d1687f8c4d7852920c7b4e398072f183f88dd273baf5153391edc88b7454b8c0", size = 833019 }, + { url = "https://files.pythonhosted.org/packages/f6/1a/a681f1d2f28164552e91ef47bcde6708098aa64a5f5fe3952f22362d340a/python_calamine-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:258d04230bebbbafa370a15838049d912d6a0a2c4da128943d8160ca4b6db58e", size = 812268 }, + { url = "https://files.pythonhosted.org/packages/3d/92/2fc911431733739d4e7a633cefa903fa49a6b7a61e8765bad29a4a7c47b1/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c686e491634934f059553d55f77ac67ca4c235452d5b444f98fe79b3579f1ea5", size = 875733 }, + { url = "https://files.pythonhosted.org/packages/f4/f0/48bfae6802eb360028ca6c15e9edf42243aadd0006b6ac3e9edb41a57119/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4480af7babcc2f919c638a554b06b7b145d9ab3da47fd696d68c2fc6f67f9541", size = 878325 }, + { url = "https://files.pythonhosted.org/packages/a4/dc/f8c956e15bac9d5d1e05cd1b907ae780e40522d2fd103c8c6e2f21dff4ed/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e405b87a8cd1e90a994e570705898634f105442029f25bab7da658ee9cbaa771", size = 1015038 }, + { url = "https://files.pythonhosted.org/packages/54/3f/e69ab97c7734fb850fba2f506b775912fd59f04e17488582c8fbf52dbc72/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a831345ee42615f0dfcb0ed60a3b1601d2f946d4166edae64fd9a6f9bbd57fc1", size = 924969 }, + { url = "https://files.pythonhosted.org/packages/79/03/b4c056b468908d87a3de94389166e0f4dba725a70bc39e03bc039ba96f6b/python_calamine-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9951b8e4cafb3e1623bb5dfc31a18d38ef43589275f9657e99dfcbe4c8c4b33e", size = 888020 }, + { url = "https://files.pythonhosted.org/packages/86/4f/b9092f7c970894054083656953184e44cb2dadff8852425e950d4ca419af/python_calamine-0.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a6619fe3b5c9633ed8b178684605f8076c9d8d85b29ade15f7a7713fcfdee2d0", size = 930337 }, + { url = "https://files.pythonhosted.org/packages/64/da/137239027bf253aabe7063450950085ec9abd827d0cbc5170f585f38f464/python_calamine-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2cc45b8e76ee331f6ea88ca23677be0b7a05b502cd4423ba2c2bc8dad53af1be", size = 1054568 }, + { url = "https://files.pythonhosted.org/packages/80/96/74c38bcf6b6825d5180c0e147b85be8c52dbfba11848b1e98ba358e32a64/python_calamine-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1b2cfb7ced1a7c80befa0cfddfe4aae65663eb4d63c4ae484b9b7a80ebe1b528", size = 1058317 }, + { url = "https://files.pythonhosted.org/packages/33/95/9d7b8fe8b32d99a6c79534df3132cfe40e9df4a0f5204048bf5e66ddbd93/python_calamine-0.4.0-cp311-cp311-win32.whl", hash = "sha256:04f4e32ee16814fc1fafc49300be8eeb280d94878461634768b51497e1444bd6", size = 663934 }, + { url = "https://files.pythonhosted.org/packages/7c/e3/1c6cd9fd499083bea6ff1c30033ee8215b9f64e862babf5be170cacae190/python_calamine-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:a8543f69afac2213c0257bb56215b03dadd11763064a9d6b19786f27d1bef586", size = 692535 }, + { url = "https://files.pythonhosted.org/packages/94/1c/3105d19fbab6b66874ce8831652caedd73b23b72e88ce18addf8ceca8c12/python_calamine-0.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:54622e35ec7c3b6f07d119da49aa821731c185e951918f152c2dbf3bec1e15d6", size = 671751 }, + { url = "https://files.pythonhosted.org/packages/63/60/f951513aaaa470b3a38a87d65eca45e0a02bc329b47864f5a17db563f746/python_calamine-0.4.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:74bca5d44a73acf3dcfa5370820797fcfd225c8c71abcddea987c5b4f5077e98", size = 826603 }, + { url = "https://files.pythonhosted.org/packages/76/3f/789955bbc77831c639890758f945eb2b25d6358065edf00da6751226cf31/python_calamine-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf80178f5d1b0ee2ccfffb8549c50855f6249e930664adc5807f4d0d6c2b269c", size = 805826 }, + { url = "https://files.pythonhosted.org/packages/00/4c/f87d17d996f647030a40bfd124fe45fe893c002bee35ae6aca9910a923ae/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65cfef345386ae86f7720f1be93495a40fd7e7feabb8caa1df5025d7fbc58a1f", size = 874989 }, + { url = "https://files.pythonhosted.org/packages/47/d2/3269367303f6c0488cf1bfebded3f9fe968d118a988222e04c9b2636bf2e/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f23e6214dbf9b29065a5dcfd6a6c674dd0e251407298c9138611c907d53423ff", size = 877504 }, + { url = "https://files.pythonhosted.org/packages/f9/6d/c7ac35f5c7125e8bd07eb36773f300fda20dd2da635eae78a8cebb0b6ab7/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d792d304ee232ab01598e1d3ab22e074a32c2511476b5fb4f16f4222d9c2a265", size = 1014171 }, + { url = "https://files.pythonhosted.org/packages/f0/81/5ea8792a2e9ab5e2a05872db3a4d3ed3538ad5af1861282c789e2f13a8cf/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf813425918fd68f3e991ef7c4b5015be0a1a95fc4a8ab7e73c016ef1b881bb4", size = 926737 }, + { url = "https://files.pythonhosted.org/packages/cc/6e/989e56e6f073fc0981a74ba7a393881eb351bb143e5486aa629b5e5d6a8b/python_calamine-0.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbe2a0ccb4d003635888eea83a995ff56b0748c8c76fc71923544f5a4a7d4cd7", size = 887032 }, + { url = "https://files.pythonhosted.org/packages/5d/92/2c9bd64277c6fe4be695d7d5a803b38d953ec8565037486be7506642c27c/python_calamine-0.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7b3bb5f0d910b9b03c240987560f843256626fd443279759df4e91b717826d2", size = 929700 }, + { url = "https://files.pythonhosted.org/packages/64/fa/fc758ca37701d354a6bc7d63118699f1c73788a1f2e1b44d720824992764/python_calamine-0.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bd2c0fc2b5eabd08ceac8a2935bffa88dbc6116db971aa8c3f244bad3fd0f644", size = 1053971 }, + { url = "https://files.pythonhosted.org/packages/65/52/40d7e08ae0ddba331cdc9f7fb3e92972f8f38d7afbd00228158ff6d1fceb/python_calamine-0.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:85b547cb1c5b692a0c2406678d666dbc1cec65a714046104683fe4f504a1721d", size = 1057057 }, + { url = "https://files.pythonhosted.org/packages/16/de/e8a071c0adfda73285d891898a24f6e99338328c404f497ff5b0e6bc3d45/python_calamine-0.4.0-cp312-cp312-win32.whl", hash = "sha256:4c2a1e3a0db4d6de4587999a21cc35845648c84fba81c03dd6f3072c690888e4", size = 665540 }, + { url = "https://files.pythonhosted.org/packages/5e/f2/7fdfada13f80db12356853cf08697ff4e38800a1809c2bdd26ee60962e7a/python_calamine-0.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b193c89ffcc146019475cd121c552b23348411e19c04dedf5c766a20db64399a", size = 695366 }, + { url = "https://files.pythonhosted.org/packages/20/66/d37412ad854480ce32f50d9f74f2a2f88b1b8a6fbc32f70aabf3211ae89e/python_calamine-0.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:43a0f15e0b60c75a71b21a012b911d5d6f5fa052afad2a8edbc728af43af0fcf", size = 670740 }, ] [[package]] @@ -4762,9 +4768,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, ] [[package]] @@ -4775,45 +4781,45 @@ dependencies = [ { name = "lxml" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/35/e4/386c514c53684772885009c12b67a7edd526c15157778ac1b138bc75063e/python_docx-1.1.2.tar.gz", hash = "sha256:0cf1f22e95b9002addca7948e16f2cd7acdfd498047f1941ca5d293db7762efd", size = 5656581, upload-time = "2024-05-01T19:41:57.772Z" } +sdist = { url = "https://files.pythonhosted.org/packages/35/e4/386c514c53684772885009c12b67a7edd526c15157778ac1b138bc75063e/python_docx-1.1.2.tar.gz", hash = "sha256:0cf1f22e95b9002addca7948e16f2cd7acdfd498047f1941ca5d293db7762efd", size = 5656581 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/3d/330d9efbdb816d3f60bf2ad92f05e1708e4a1b9abe80461ac3444c83f749/python_docx-1.1.2-py3-none-any.whl", hash = "sha256:08c20d6058916fb19853fcf080f7f42b6270d89eac9fa5f8c15f691c0017fabe", size = 244315, upload-time = "2024-05-01T19:41:47.006Z" }, + { url = "https://files.pythonhosted.org/packages/3e/3d/330d9efbdb816d3f60bf2ad92f05e1708e4a1b9abe80461ac3444c83f749/python_docx-1.1.2-py3-none-any.whl", hash = "sha256:08c20d6058916fb19853fcf080f7f42b6270d89eac9fa5f8c15f691c0017fabe", size = 244315 }, ] [[package]] name = "python-dotenv" version = "1.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115, upload-time = "2024-01-23T06:33:00.505Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863, upload-time = "2024-01-23T06:32:58.246Z" }, + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, ] [[package]] name = "python-http-client" version = "3.3.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/56/fa/284e52a8c6dcbe25671f02d217bf2f85660db940088faf18ae7a05e97313/python_http_client-3.3.7.tar.gz", hash = "sha256:bf841ee45262747e00dec7ee9971dfb8c7d83083f5713596488d67739170cea0", size = 9377, upload-time = "2022-03-09T20:23:56.386Z" } +sdist = { url = "https://files.pythonhosted.org/packages/56/fa/284e52a8c6dcbe25671f02d217bf2f85660db940088faf18ae7a05e97313/python_http_client-3.3.7.tar.gz", hash = "sha256:bf841ee45262747e00dec7ee9971dfb8c7d83083f5713596488d67739170cea0", size = 9377 } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/31/9b360138f4e4035ee9dac4fe1132b6437bd05751aaf1db2a2d83dc45db5f/python_http_client-3.3.7-py3-none-any.whl", hash = "sha256:ad371d2bbedc6ea15c26179c6222a78bc9308d272435ddf1d5c84f068f249a36", size = 8352, upload-time = "2022-03-09T20:23:54.862Z" }, + { url = "https://files.pythonhosted.org/packages/29/31/9b360138f4e4035ee9dac4fe1132b6437bd05751aaf1db2a2d83dc45db5f/python_http_client-3.3.7-py3-none-any.whl", hash = "sha256:ad371d2bbedc6ea15c26179c6222a78bc9308d272435ddf1d5c84f068f249a36", size = 8352 }, ] [[package]] name = "python-iso639" version = "2025.2.18" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d5/19/45aa1917c7b1f4eb71104795b9b0cbf97169b99ec46cd303445883536549/python_iso639-2025.2.18.tar.gz", hash = "sha256:34e31e8e76eb3fc839629e257b12bcfd957c6edcbd486bbf66ba5185d1f566e8", size = 173552, upload-time = "2025-02-18T13:48:08.607Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d5/19/45aa1917c7b1f4eb71104795b9b0cbf97169b99ec46cd303445883536549/python_iso639-2025.2.18.tar.gz", hash = "sha256:34e31e8e76eb3fc839629e257b12bcfd957c6edcbd486bbf66ba5185d1f566e8", size = 173552 } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/a3/3ceaf89a17a1e1d5e7bbdfe5514aa3055d91285b37a5c8fed662969e3d56/python_iso639-2025.2.18-py3-none-any.whl", hash = "sha256:b2d471c37483a26f19248458b20e7bd96492e15368b01053b540126bcc23152f", size = 167631, upload-time = "2025-02-18T13:48:06.602Z" }, + { url = "https://files.pythonhosted.org/packages/54/a3/3ceaf89a17a1e1d5e7bbdfe5514aa3055d91285b37a5c8fed662969e3d56/python_iso639-2025.2.18-py3-none-any.whl", hash = "sha256:b2d471c37483a26f19248458b20e7bd96492e15368b01053b540126bcc23152f", size = 167631 }, ] [[package]] name = "python-magic" version = "0.4.27" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/da/db/0b3e28ac047452d079d375ec6798bf76a036a08182dbb39ed38116a49130/python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b", size = 14677, upload-time = "2022-06-07T20:16:59.508Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/db/0b3e28ac047452d079d375ec6798bf76a036a08182dbb39ed38116a49130/python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b", size = 14677 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6c/73/9f872cb81fc5c3bb48f7227872c28975f998f3e7c2b1c16e95e6432bbb90/python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3", size = 13840, upload-time = "2022-06-07T20:16:57.763Z" }, + { url = "https://files.pythonhosted.org/packages/6c/73/9f872cb81fc5c3bb48f7227872c28975f998f3e7c2b1c16e95e6432bbb90/python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3", size = 13840 }, ] [[package]] @@ -4825,9 +4831,9 @@ dependencies = [ { name = "olefile" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a2/4e/869f34faedbc968796d2c7e9837dede079c9cb9750917356b1f1eda926e9/python_oxmsg-0.0.2.tar.gz", hash = "sha256:a6aff4deb1b5975d44d49dab1d9384089ffeec819e19c6940bc7ffbc84775fad", size = 34713, upload-time = "2025-02-03T17:13:47.415Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/4e/869f34faedbc968796d2c7e9837dede079c9cb9750917356b1f1eda926e9/python_oxmsg-0.0.2.tar.gz", hash = "sha256:a6aff4deb1b5975d44d49dab1d9384089ffeec819e19c6940bc7ffbc84775fad", size = 34713 } wheels = [ - { url = "https://files.pythonhosted.org/packages/53/67/f56c69a98c7eb244025845506387d0f961681657c9fcd8b2d2edd148f9d2/python_oxmsg-0.0.2-py3-none-any.whl", hash = "sha256:22be29b14c46016bcd05e34abddfd8e05ee82082f53b82753d115da3fc7d0355", size = 31455, upload-time = "2025-02-03T17:13:46.061Z" }, + { url = "https://files.pythonhosted.org/packages/53/67/f56c69a98c7eb244025845506387d0f961681657c9fcd8b2d2edd148f9d2/python_oxmsg-0.0.2-py3-none-any.whl", hash = "sha256:22be29b14c46016bcd05e34abddfd8e05ee82082f53b82753d115da3fc7d0355", size = 31455 }, ] [[package]] @@ -4840,18 +4846,18 @@ dependencies = [ { name = "typing-extensions" }, { name = "xlsxwriter" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/52/a9/0c0db8d37b2b8a645666f7fd8accea4c6224e013c42b1d5c17c93590cd06/python_pptx-1.0.2.tar.gz", hash = "sha256:479a8af0eaf0f0d76b6f00b0887732874ad2e3188230315290cd1f9dd9cc7095", size = 10109297, upload-time = "2024-08-07T17:33:37.772Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/a9/0c0db8d37b2b8a645666f7fd8accea4c6224e013c42b1d5c17c93590cd06/python_pptx-1.0.2.tar.gz", hash = "sha256:479a8af0eaf0f0d76b6f00b0887732874ad2e3188230315290cd1f9dd9cc7095", size = 10109297 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/4f/00be2196329ebbff56ce564aa94efb0fbc828d00de250b1980de1a34ab49/python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba", size = 472788, upload-time = "2024-08-07T17:33:28.192Z" }, + { url = "https://files.pythonhosted.org/packages/d9/4f/00be2196329ebbff56ce564aa94efb0fbc828d00de250b1980de1a34ab49/python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba", size = 472788 }, ] [[package]] name = "pytz" version = "2025.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 }, ] [[package]] @@ -4859,47 +4865,47 @@ name = "pywin32" version = "310" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/b1/68aa2986129fb1011dabbe95f0136f44509afaf072b12b8f815905a39f33/pywin32-310-cp311-cp311-win32.whl", hash = "sha256:1e765f9564e83011a63321bb9d27ec456a0ed90d3732c4b2e312b855365ed8bd", size = 8784284, upload-time = "2025-03-17T00:55:53.124Z" }, - { url = "https://files.pythonhosted.org/packages/b3/bd/d1592635992dd8db5bb8ace0551bc3a769de1ac8850200cfa517e72739fb/pywin32-310-cp311-cp311-win_amd64.whl", hash = "sha256:126298077a9d7c95c53823934f000599f66ec9296b09167810eb24875f32689c", size = 9520748, upload-time = "2025-03-17T00:55:55.203Z" }, - { url = "https://files.pythonhosted.org/packages/90/b1/ac8b1ffce6603849eb45a91cf126c0fa5431f186c2e768bf56889c46f51c/pywin32-310-cp311-cp311-win_arm64.whl", hash = "sha256:19ec5fc9b1d51c4350be7bb00760ffce46e6c95eaf2f0b2f1150657b1a43c582", size = 8455941, upload-time = "2025-03-17T00:55:57.048Z" }, - { url = "https://files.pythonhosted.org/packages/6b/ec/4fdbe47932f671d6e348474ea35ed94227fb5df56a7c30cbbb42cd396ed0/pywin32-310-cp312-cp312-win32.whl", hash = "sha256:8a75a5cc3893e83a108c05d82198880704c44bbaee4d06e442e471d3c9ea4f3d", size = 8796239, upload-time = "2025-03-17T00:55:58.807Z" }, - { url = "https://files.pythonhosted.org/packages/e3/e5/b0627f8bb84e06991bea89ad8153a9e50ace40b2e1195d68e9dff6b03d0f/pywin32-310-cp312-cp312-win_amd64.whl", hash = "sha256:bf5c397c9a9a19a6f62f3fb821fbf36cac08f03770056711f765ec1503972060", size = 9503839, upload-time = "2025-03-17T00:56:00.8Z" }, - { url = "https://files.pythonhosted.org/packages/1f/32/9ccf53748df72301a89713936645a664ec001abd35ecc8578beda593d37d/pywin32-310-cp312-cp312-win_arm64.whl", hash = "sha256:2349cc906eae872d0663d4d6290d13b90621eaf78964bb1578632ff20e152966", size = 8459470, upload-time = "2025-03-17T00:56:02.601Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b1/68aa2986129fb1011dabbe95f0136f44509afaf072b12b8f815905a39f33/pywin32-310-cp311-cp311-win32.whl", hash = "sha256:1e765f9564e83011a63321bb9d27ec456a0ed90d3732c4b2e312b855365ed8bd", size = 8784284 }, + { url = "https://files.pythonhosted.org/packages/b3/bd/d1592635992dd8db5bb8ace0551bc3a769de1ac8850200cfa517e72739fb/pywin32-310-cp311-cp311-win_amd64.whl", hash = "sha256:126298077a9d7c95c53823934f000599f66ec9296b09167810eb24875f32689c", size = 9520748 }, + { url = "https://files.pythonhosted.org/packages/90/b1/ac8b1ffce6603849eb45a91cf126c0fa5431f186c2e768bf56889c46f51c/pywin32-310-cp311-cp311-win_arm64.whl", hash = "sha256:19ec5fc9b1d51c4350be7bb00760ffce46e6c95eaf2f0b2f1150657b1a43c582", size = 8455941 }, + { url = "https://files.pythonhosted.org/packages/6b/ec/4fdbe47932f671d6e348474ea35ed94227fb5df56a7c30cbbb42cd396ed0/pywin32-310-cp312-cp312-win32.whl", hash = "sha256:8a75a5cc3893e83a108c05d82198880704c44bbaee4d06e442e471d3c9ea4f3d", size = 8796239 }, + { url = "https://files.pythonhosted.org/packages/e3/e5/b0627f8bb84e06991bea89ad8153a9e50ace40b2e1195d68e9dff6b03d0f/pywin32-310-cp312-cp312-win_amd64.whl", hash = "sha256:bf5c397c9a9a19a6f62f3fb821fbf36cac08f03770056711f765ec1503972060", size = 9503839 }, + { url = "https://files.pythonhosted.org/packages/1f/32/9ccf53748df72301a89713936645a664ec001abd35ecc8578beda593d37d/pywin32-310-cp312-cp312-win_arm64.whl", hash = "sha256:2349cc906eae872d0663d4d6290d13b90621eaf78964bb1578632ff20e152966", size = 8459470 }, ] [[package]] name = "pyxlsb" version = "1.0.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3f/13/eebaeb7a40b062d1c6f7f91d09e73d30a69e33e4baa7cbe4b7658548b1cd/pyxlsb-1.0.10.tar.gz", hash = "sha256:8062d1ea8626d3f1980e8b1cfe91a4483747449242ecb61013bc2df85435f685", size = 22424, upload-time = "2022-10-14T19:17:47.308Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/13/eebaeb7a40b062d1c6f7f91d09e73d30a69e33e4baa7cbe4b7658548b1cd/pyxlsb-1.0.10.tar.gz", hash = "sha256:8062d1ea8626d3f1980e8b1cfe91a4483747449242ecb61013bc2df85435f685", size = 22424 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/92/345823838ae367c59b63e03aef9c331f485370f9df6d049256a61a28f06d/pyxlsb-1.0.10-py2.py3-none-any.whl", hash = "sha256:87c122a9a622e35ca5e741d2e541201d28af00fb46bec492cfa9586890b120b4", size = 23849, upload-time = "2022-10-14T19:17:46.079Z" }, + { url = "https://files.pythonhosted.org/packages/7e/92/345823838ae367c59b63e03aef9c331f485370f9df6d049256a61a28f06d/pyxlsb-1.0.10-py2.py3-none-any.whl", hash = "sha256:87c122a9a622e35ca5e741d2e541201d28af00fb46bec492cfa9586890b120b4", size = 23849 }, ] [[package]] name = "pyyaml" version = "6.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, - { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, - { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, - { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, - { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, - { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, - { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, - { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, ] [[package]] @@ -4915,53 +4921,53 @@ dependencies = [ { name = "pydantic" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/86/cf/db06a74694bf8f126ed4a869c70ef576f01ee691ef20799fba3d561d3565/qdrant_client-1.9.0.tar.gz", hash = "sha256:7b1792f616651a6f0a76312f945c13d088e9451726795b82ce0350f7df3b7981", size = 199999, upload-time = "2024-04-22T13:35:49.444Z" } +sdist = { url = "https://files.pythonhosted.org/packages/86/cf/db06a74694bf8f126ed4a869c70ef576f01ee691ef20799fba3d561d3565/qdrant_client-1.9.0.tar.gz", hash = "sha256:7b1792f616651a6f0a76312f945c13d088e9451726795b82ce0350f7df3b7981", size = 199999 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/fa/5abd82cde353f1009c068cca820195efd94e403d261b787e78ea7a9c8318/qdrant_client-1.9.0-py3-none-any.whl", hash = "sha256:ee02893eab1f642481b1ac1e38eb68ec30bab0f673bef7cc05c19fa5d2cbf43e", size = 229258, upload-time = "2024-04-22T13:35:46.81Z" }, + { url = "https://files.pythonhosted.org/packages/3a/fa/5abd82cde353f1009c068cca820195efd94e403d261b787e78ea7a9c8318/qdrant_client-1.9.0-py3-none-any.whl", hash = "sha256:ee02893eab1f642481b1ac1e38eb68ec30bab0f673bef7cc05c19fa5d2cbf43e", size = 229258 }, ] [[package]] name = "rapidfuzz" version = "3.13.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ed/f6/6895abc3a3d056b9698da3199b04c0e56226d530ae44a470edabf8b664f0/rapidfuzz-3.13.0.tar.gz", hash = "sha256:d2eaf3839e52cbcc0accbe9817a67b4b0fcf70aaeb229cfddc1c28061f9ce5d8", size = 57904226, upload-time = "2025-04-03T20:38:51.226Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/f6/6895abc3a3d056b9698da3199b04c0e56226d530ae44a470edabf8b664f0/rapidfuzz-3.13.0.tar.gz", hash = "sha256:d2eaf3839e52cbcc0accbe9817a67b4b0fcf70aaeb229cfddc1c28061f9ce5d8", size = 57904226 } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/17/9be9eff5a3c7dfc831c2511262082c6786dca2ce21aa8194eef1cb71d67a/rapidfuzz-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d395a5cad0c09c7f096433e5fd4224d83b53298d53499945a9b0e5a971a84f3a", size = 1999453, upload-time = "2025-04-03T20:35:40.804Z" }, - { url = "https://files.pythonhosted.org/packages/75/67/62e57896ecbabe363f027d24cc769d55dd49019e576533ec10e492fcd8a2/rapidfuzz-3.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7b3eda607a019169f7187328a8d1648fb9a90265087f6903d7ee3a8eee01805", size = 1450881, upload-time = "2025-04-03T20:35:42.734Z" }, - { url = "https://files.pythonhosted.org/packages/96/5c/691c5304857f3476a7b3df99e91efc32428cbe7d25d234e967cc08346c13/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98e0bfa602e1942d542de077baf15d658bd9d5dcfe9b762aff791724c1c38b70", size = 1422990, upload-time = "2025-04-03T20:35:45.158Z" }, - { url = "https://files.pythonhosted.org/packages/46/81/7a7e78f977496ee2d613154b86b203d373376bcaae5de7bde92f3ad5a192/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bef86df6d59667d9655905b02770a0c776d2853971c0773767d5ef8077acd624", size = 5342309, upload-time = "2025-04-03T20:35:46.952Z" }, - { url = "https://files.pythonhosted.org/packages/51/44/12fdd12a76b190fe94bf38d252bb28ddf0ab7a366b943e792803502901a2/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fedd316c165beed6307bf754dee54d3faca2c47e1f3bcbd67595001dfa11e969", size = 1656881, upload-time = "2025-04-03T20:35:49.954Z" }, - { url = "https://files.pythonhosted.org/packages/27/ae/0d933e660c06fcfb087a0d2492f98322f9348a28b2cc3791a5dbadf6e6fb/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5158da7f2ec02a930be13bac53bb5903527c073c90ee37804090614cab83c29e", size = 1608494, upload-time = "2025-04-03T20:35:51.646Z" }, - { url = "https://files.pythonhosted.org/packages/3d/2c/4b2f8aafdf9400e5599b6ed2f14bc26ca75f5a923571926ccbc998d4246a/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b6f913ee4618ddb6d6f3e387b76e8ec2fc5efee313a128809fbd44e65c2bbb2", size = 3072160, upload-time = "2025-04-03T20:35:53.472Z" }, - { url = "https://files.pythonhosted.org/packages/60/7d/030d68d9a653c301114101c3003b31ce01cf2c3224034cd26105224cd249/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d25fdbce6459ccbbbf23b4b044f56fbd1158b97ac50994eaae2a1c0baae78301", size = 2491549, upload-time = "2025-04-03T20:35:55.391Z" }, - { url = "https://files.pythonhosted.org/packages/8e/cd/7040ba538fc6a8ddc8816a05ecf46af9988b46c148ddd7f74fb0fb73d012/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25343ccc589a4579fbde832e6a1e27258bfdd7f2eb0f28cb836d6694ab8591fc", size = 7584142, upload-time = "2025-04-03T20:35:57.71Z" }, - { url = "https://files.pythonhosted.org/packages/c1/96/85f7536fbceb0aa92c04a1c37a3fc4fcd4e80649e9ed0fb585382df82edc/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a9ad1f37894e3ffb76bbab76256e8a8b789657183870be11aa64e306bb5228fd", size = 2896234, upload-time = "2025-04-03T20:35:59.969Z" }, - { url = "https://files.pythonhosted.org/packages/55/fd/460e78438e7019f2462fe9d4ecc880577ba340df7974c8a4cfe8d8d029df/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5dc71ef23845bb6b62d194c39a97bb30ff171389c9812d83030c1199f319098c", size = 3437420, upload-time = "2025-04-03T20:36:01.91Z" }, - { url = "https://files.pythonhosted.org/packages/cc/df/c3c308a106a0993befd140a414c5ea78789d201cf1dfffb8fd9749718d4f/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b7f4c65facdb94f44be759bbd9b6dda1fa54d0d6169cdf1a209a5ab97d311a75", size = 4410860, upload-time = "2025-04-03T20:36:04.352Z" }, - { url = "https://files.pythonhosted.org/packages/75/ee/9d4ece247f9b26936cdeaae600e494af587ce9bf8ddc47d88435f05cfd05/rapidfuzz-3.13.0-cp311-cp311-win32.whl", hash = "sha256:b5104b62711565e0ff6deab2a8f5dbf1fbe333c5155abe26d2cfd6f1849b6c87", size = 1843161, upload-time = "2025-04-03T20:36:06.802Z" }, - { url = "https://files.pythonhosted.org/packages/c9/5a/d00e1f63564050a20279015acb29ecaf41646adfacc6ce2e1e450f7f2633/rapidfuzz-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:9093cdeb926deb32a4887ebe6910f57fbcdbc9fbfa52252c10b56ef2efb0289f", size = 1629962, upload-time = "2025-04-03T20:36:09.133Z" }, - { url = "https://files.pythonhosted.org/packages/3b/74/0a3de18bc2576b794f41ccd07720b623e840fda219ab57091897f2320fdd/rapidfuzz-3.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:f70f646751b6aa9d05be1fb40372f006cc89d6aad54e9d79ae97bd1f5fce5203", size = 866631, upload-time = "2025-04-03T20:36:11.022Z" }, - { url = "https://files.pythonhosted.org/packages/13/4b/a326f57a4efed8f5505b25102797a58e37ee11d94afd9d9422cb7c76117e/rapidfuzz-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a1a6a906ba62f2556372282b1ef37b26bca67e3d2ea957277cfcefc6275cca7", size = 1989501, upload-time = "2025-04-03T20:36:13.43Z" }, - { url = "https://files.pythonhosted.org/packages/b7/53/1f7eb7ee83a06c400089ec7cb841cbd581c2edd7a4b21eb2f31030b88daa/rapidfuzz-3.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fd0975e015b05c79a97f38883a11236f5a24cca83aa992bd2558ceaa5652b26", size = 1445379, upload-time = "2025-04-03T20:36:16.439Z" }, - { url = "https://files.pythonhosted.org/packages/07/09/de8069a4599cc8e6d194e5fa1782c561151dea7d5e2741767137e2a8c1f0/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d4e13593d298c50c4f94ce453f757b4b398af3fa0fd2fde693c3e51195b7f69", size = 1405986, upload-time = "2025-04-03T20:36:18.447Z" }, - { url = "https://files.pythonhosted.org/packages/5d/77/d9a90b39c16eca20d70fec4ca377fbe9ea4c0d358c6e4736ab0e0e78aaf6/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed6f416bda1c9133000009d84d9409823eb2358df0950231cc936e4bf784eb97", size = 5310809, upload-time = "2025-04-03T20:36:20.324Z" }, - { url = "https://files.pythonhosted.org/packages/1e/7d/14da291b0d0f22262d19522afaf63bccf39fc027c981233fb2137a57b71f/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dc82b6ed01acb536b94a43996a94471a218f4d89f3fdd9185ab496de4b2a981", size = 1629394, upload-time = "2025-04-03T20:36:22.256Z" }, - { url = "https://files.pythonhosted.org/packages/b7/e4/79ed7e4fa58f37c0f8b7c0a62361f7089b221fe85738ae2dbcfb815e985a/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9d824de871daa6e443b39ff495a884931970d567eb0dfa213d234337343835f", size = 1600544, upload-time = "2025-04-03T20:36:24.207Z" }, - { url = "https://files.pythonhosted.org/packages/4e/20/e62b4d13ba851b0f36370060025de50a264d625f6b4c32899085ed51f980/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d18228a2390375cf45726ce1af9d36ff3dc1f11dce9775eae1f1b13ac6ec50f", size = 3052796, upload-time = "2025-04-03T20:36:26.279Z" }, - { url = "https://files.pythonhosted.org/packages/cd/8d/55fdf4387dec10aa177fe3df8dbb0d5022224d95f48664a21d6b62a5299d/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5fe634c9482ec5d4a6692afb8c45d370ae86755e5f57aa6c50bfe4ca2bdd87", size = 2464016, upload-time = "2025-04-03T20:36:28.525Z" }, - { url = "https://files.pythonhosted.org/packages/9b/be/0872f6a56c0f473165d3b47d4170fa75263dc5f46985755aa9bf2bbcdea1/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:694eb531889f71022b2be86f625a4209c4049e74be9ca836919b9e395d5e33b3", size = 7556725, upload-time = "2025-04-03T20:36:30.629Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f3/6c0750e484d885a14840c7a150926f425d524982aca989cdda0bb3bdfa57/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:11b47b40650e06147dee5e51a9c9ad73bb7b86968b6f7d30e503b9f8dd1292db", size = 2859052, upload-time = "2025-04-03T20:36:32.836Z" }, - { url = "https://files.pythonhosted.org/packages/6f/98/5a3a14701b5eb330f444f7883c9840b43fb29c575e292e09c90a270a6e07/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:98b8107ff14f5af0243f27d236bcc6e1ef8e7e3b3c25df114e91e3a99572da73", size = 3390219, upload-time = "2025-04-03T20:36:35.062Z" }, - { url = "https://files.pythonhosted.org/packages/e9/7d/f4642eaaeb474b19974332f2a58471803448be843033e5740965775760a5/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b836f486dba0aceb2551e838ff3f514a38ee72b015364f739e526d720fdb823a", size = 4377924, upload-time = "2025-04-03T20:36:37.363Z" }, - { url = "https://files.pythonhosted.org/packages/8e/83/fa33f61796731891c3e045d0cbca4436a5c436a170e7f04d42c2423652c3/rapidfuzz-3.13.0-cp312-cp312-win32.whl", hash = "sha256:4671ee300d1818d7bdfd8fa0608580d7778ba701817216f0c17fb29e6b972514", size = 1823915, upload-time = "2025-04-03T20:36:39.451Z" }, - { url = "https://files.pythonhosted.org/packages/03/25/5ee7ab6841ca668567d0897905eebc79c76f6297b73bf05957be887e9c74/rapidfuzz-3.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e2065f68fb1d0bf65adc289c1bdc45ba7e464e406b319d67bb54441a1b9da9e", size = 1616985, upload-time = "2025-04-03T20:36:41.631Z" }, - { url = "https://files.pythonhosted.org/packages/76/5e/3f0fb88db396cb692aefd631e4805854e02120a2382723b90dcae720bcc6/rapidfuzz-3.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:65cc97c2fc2c2fe23586599686f3b1ceeedeca8e598cfcc1b7e56dc8ca7e2aa7", size = 860116, upload-time = "2025-04-03T20:36:43.915Z" }, - { url = "https://files.pythonhosted.org/packages/88/df/6060c5a9c879b302bd47a73fc012d0db37abf6544c57591bcbc3459673bd/rapidfuzz-3.13.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1ba007f4d35a45ee68656b2eb83b8715e11d0f90e5b9f02d615a8a321ff00c27", size = 1905935, upload-time = "2025-04-03T20:38:18.07Z" }, - { url = "https://files.pythonhosted.org/packages/a2/6c/a0b819b829e20525ef1bd58fc776fb8d07a0c38d819e63ba2b7c311a2ed4/rapidfuzz-3.13.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d7a217310429b43be95b3b8ad7f8fc41aba341109dc91e978cd7c703f928c58f", size = 1383714, upload-time = "2025-04-03T20:38:20.628Z" }, - { url = "https://files.pythonhosted.org/packages/6a/c1/3da3466cc8a9bfb9cd345ad221fac311143b6a9664b5af4adb95b5e6ce01/rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:558bf526bcd777de32b7885790a95a9548ffdcce68f704a81207be4a286c1095", size = 1367329, upload-time = "2025-04-03T20:38:23.01Z" }, - { url = "https://files.pythonhosted.org/packages/da/f0/9f2a9043bfc4e66da256b15d728c5fc2d865edf0028824337f5edac36783/rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:202a87760f5145140d56153b193a797ae9338f7939eb16652dd7ff96f8faf64c", size = 5251057, upload-time = "2025-04-03T20:38:25.52Z" }, - { url = "https://files.pythonhosted.org/packages/6a/ff/af2cb1d8acf9777d52487af5c6b34ce9d13381a753f991d95ecaca813407/rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcccc08f671646ccb1e413c773bb92e7bba789e3a1796fd49d23c12539fe2e4", size = 2992401, upload-time = "2025-04-03T20:38:28.196Z" }, - { url = "https://files.pythonhosted.org/packages/c1/c5/c243b05a15a27b946180db0d1e4c999bef3f4221505dff9748f1f6c917be/rapidfuzz-3.13.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f219f1e3c3194d7a7de222f54450ce12bc907862ff9a8962d83061c1f923c86", size = 1553782, upload-time = "2025-04-03T20:38:30.778Z" }, + { url = "https://files.pythonhosted.org/packages/87/17/9be9eff5a3c7dfc831c2511262082c6786dca2ce21aa8194eef1cb71d67a/rapidfuzz-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d395a5cad0c09c7f096433e5fd4224d83b53298d53499945a9b0e5a971a84f3a", size = 1999453 }, + { url = "https://files.pythonhosted.org/packages/75/67/62e57896ecbabe363f027d24cc769d55dd49019e576533ec10e492fcd8a2/rapidfuzz-3.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7b3eda607a019169f7187328a8d1648fb9a90265087f6903d7ee3a8eee01805", size = 1450881 }, + { url = "https://files.pythonhosted.org/packages/96/5c/691c5304857f3476a7b3df99e91efc32428cbe7d25d234e967cc08346c13/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98e0bfa602e1942d542de077baf15d658bd9d5dcfe9b762aff791724c1c38b70", size = 1422990 }, + { url = "https://files.pythonhosted.org/packages/46/81/7a7e78f977496ee2d613154b86b203d373376bcaae5de7bde92f3ad5a192/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bef86df6d59667d9655905b02770a0c776d2853971c0773767d5ef8077acd624", size = 5342309 }, + { url = "https://files.pythonhosted.org/packages/51/44/12fdd12a76b190fe94bf38d252bb28ddf0ab7a366b943e792803502901a2/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fedd316c165beed6307bf754dee54d3faca2c47e1f3bcbd67595001dfa11e969", size = 1656881 }, + { url = "https://files.pythonhosted.org/packages/27/ae/0d933e660c06fcfb087a0d2492f98322f9348a28b2cc3791a5dbadf6e6fb/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5158da7f2ec02a930be13bac53bb5903527c073c90ee37804090614cab83c29e", size = 1608494 }, + { url = "https://files.pythonhosted.org/packages/3d/2c/4b2f8aafdf9400e5599b6ed2f14bc26ca75f5a923571926ccbc998d4246a/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b6f913ee4618ddb6d6f3e387b76e8ec2fc5efee313a128809fbd44e65c2bbb2", size = 3072160 }, + { url = "https://files.pythonhosted.org/packages/60/7d/030d68d9a653c301114101c3003b31ce01cf2c3224034cd26105224cd249/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d25fdbce6459ccbbbf23b4b044f56fbd1158b97ac50994eaae2a1c0baae78301", size = 2491549 }, + { url = "https://files.pythonhosted.org/packages/8e/cd/7040ba538fc6a8ddc8816a05ecf46af9988b46c148ddd7f74fb0fb73d012/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25343ccc589a4579fbde832e6a1e27258bfdd7f2eb0f28cb836d6694ab8591fc", size = 7584142 }, + { url = "https://files.pythonhosted.org/packages/c1/96/85f7536fbceb0aa92c04a1c37a3fc4fcd4e80649e9ed0fb585382df82edc/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a9ad1f37894e3ffb76bbab76256e8a8b789657183870be11aa64e306bb5228fd", size = 2896234 }, + { url = "https://files.pythonhosted.org/packages/55/fd/460e78438e7019f2462fe9d4ecc880577ba340df7974c8a4cfe8d8d029df/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5dc71ef23845bb6b62d194c39a97bb30ff171389c9812d83030c1199f319098c", size = 3437420 }, + { url = "https://files.pythonhosted.org/packages/cc/df/c3c308a106a0993befd140a414c5ea78789d201cf1dfffb8fd9749718d4f/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b7f4c65facdb94f44be759bbd9b6dda1fa54d0d6169cdf1a209a5ab97d311a75", size = 4410860 }, + { url = "https://files.pythonhosted.org/packages/75/ee/9d4ece247f9b26936cdeaae600e494af587ce9bf8ddc47d88435f05cfd05/rapidfuzz-3.13.0-cp311-cp311-win32.whl", hash = "sha256:b5104b62711565e0ff6deab2a8f5dbf1fbe333c5155abe26d2cfd6f1849b6c87", size = 1843161 }, + { url = "https://files.pythonhosted.org/packages/c9/5a/d00e1f63564050a20279015acb29ecaf41646adfacc6ce2e1e450f7f2633/rapidfuzz-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:9093cdeb926deb32a4887ebe6910f57fbcdbc9fbfa52252c10b56ef2efb0289f", size = 1629962 }, + { url = "https://files.pythonhosted.org/packages/3b/74/0a3de18bc2576b794f41ccd07720b623e840fda219ab57091897f2320fdd/rapidfuzz-3.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:f70f646751b6aa9d05be1fb40372f006cc89d6aad54e9d79ae97bd1f5fce5203", size = 866631 }, + { url = "https://files.pythonhosted.org/packages/13/4b/a326f57a4efed8f5505b25102797a58e37ee11d94afd9d9422cb7c76117e/rapidfuzz-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a1a6a906ba62f2556372282b1ef37b26bca67e3d2ea957277cfcefc6275cca7", size = 1989501 }, + { url = "https://files.pythonhosted.org/packages/b7/53/1f7eb7ee83a06c400089ec7cb841cbd581c2edd7a4b21eb2f31030b88daa/rapidfuzz-3.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fd0975e015b05c79a97f38883a11236f5a24cca83aa992bd2558ceaa5652b26", size = 1445379 }, + { url = "https://files.pythonhosted.org/packages/07/09/de8069a4599cc8e6d194e5fa1782c561151dea7d5e2741767137e2a8c1f0/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d4e13593d298c50c4f94ce453f757b4b398af3fa0fd2fde693c3e51195b7f69", size = 1405986 }, + { url = "https://files.pythonhosted.org/packages/5d/77/d9a90b39c16eca20d70fec4ca377fbe9ea4c0d358c6e4736ab0e0e78aaf6/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed6f416bda1c9133000009d84d9409823eb2358df0950231cc936e4bf784eb97", size = 5310809 }, + { url = "https://files.pythonhosted.org/packages/1e/7d/14da291b0d0f22262d19522afaf63bccf39fc027c981233fb2137a57b71f/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dc82b6ed01acb536b94a43996a94471a218f4d89f3fdd9185ab496de4b2a981", size = 1629394 }, + { url = "https://files.pythonhosted.org/packages/b7/e4/79ed7e4fa58f37c0f8b7c0a62361f7089b221fe85738ae2dbcfb815e985a/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9d824de871daa6e443b39ff495a884931970d567eb0dfa213d234337343835f", size = 1600544 }, + { url = "https://files.pythonhosted.org/packages/4e/20/e62b4d13ba851b0f36370060025de50a264d625f6b4c32899085ed51f980/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d18228a2390375cf45726ce1af9d36ff3dc1f11dce9775eae1f1b13ac6ec50f", size = 3052796 }, + { url = "https://files.pythonhosted.org/packages/cd/8d/55fdf4387dec10aa177fe3df8dbb0d5022224d95f48664a21d6b62a5299d/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5fe634c9482ec5d4a6692afb8c45d370ae86755e5f57aa6c50bfe4ca2bdd87", size = 2464016 }, + { url = "https://files.pythonhosted.org/packages/9b/be/0872f6a56c0f473165d3b47d4170fa75263dc5f46985755aa9bf2bbcdea1/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:694eb531889f71022b2be86f625a4209c4049e74be9ca836919b9e395d5e33b3", size = 7556725 }, + { url = "https://files.pythonhosted.org/packages/5d/f3/6c0750e484d885a14840c7a150926f425d524982aca989cdda0bb3bdfa57/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:11b47b40650e06147dee5e51a9c9ad73bb7b86968b6f7d30e503b9f8dd1292db", size = 2859052 }, + { url = "https://files.pythonhosted.org/packages/6f/98/5a3a14701b5eb330f444f7883c9840b43fb29c575e292e09c90a270a6e07/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:98b8107ff14f5af0243f27d236bcc6e1ef8e7e3b3c25df114e91e3a99572da73", size = 3390219 }, + { url = "https://files.pythonhosted.org/packages/e9/7d/f4642eaaeb474b19974332f2a58471803448be843033e5740965775760a5/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b836f486dba0aceb2551e838ff3f514a38ee72b015364f739e526d720fdb823a", size = 4377924 }, + { url = "https://files.pythonhosted.org/packages/8e/83/fa33f61796731891c3e045d0cbca4436a5c436a170e7f04d42c2423652c3/rapidfuzz-3.13.0-cp312-cp312-win32.whl", hash = "sha256:4671ee300d1818d7bdfd8fa0608580d7778ba701817216f0c17fb29e6b972514", size = 1823915 }, + { url = "https://files.pythonhosted.org/packages/03/25/5ee7ab6841ca668567d0897905eebc79c76f6297b73bf05957be887e9c74/rapidfuzz-3.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e2065f68fb1d0bf65adc289c1bdc45ba7e464e406b319d67bb54441a1b9da9e", size = 1616985 }, + { url = "https://files.pythonhosted.org/packages/76/5e/3f0fb88db396cb692aefd631e4805854e02120a2382723b90dcae720bcc6/rapidfuzz-3.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:65cc97c2fc2c2fe23586599686f3b1ceeedeca8e598cfcc1b7e56dc8ca7e2aa7", size = 860116 }, + { url = "https://files.pythonhosted.org/packages/88/df/6060c5a9c879b302bd47a73fc012d0db37abf6544c57591bcbc3459673bd/rapidfuzz-3.13.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1ba007f4d35a45ee68656b2eb83b8715e11d0f90e5b9f02d615a8a321ff00c27", size = 1905935 }, + { url = "https://files.pythonhosted.org/packages/a2/6c/a0b819b829e20525ef1bd58fc776fb8d07a0c38d819e63ba2b7c311a2ed4/rapidfuzz-3.13.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d7a217310429b43be95b3b8ad7f8fc41aba341109dc91e978cd7c703f928c58f", size = 1383714 }, + { url = "https://files.pythonhosted.org/packages/6a/c1/3da3466cc8a9bfb9cd345ad221fac311143b6a9664b5af4adb95b5e6ce01/rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:558bf526bcd777de32b7885790a95a9548ffdcce68f704a81207be4a286c1095", size = 1367329 }, + { url = "https://files.pythonhosted.org/packages/da/f0/9f2a9043bfc4e66da256b15d728c5fc2d865edf0028824337f5edac36783/rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:202a87760f5145140d56153b193a797ae9338f7939eb16652dd7ff96f8faf64c", size = 5251057 }, + { url = "https://files.pythonhosted.org/packages/6a/ff/af2cb1d8acf9777d52487af5c6b34ce9d13381a753f991d95ecaca813407/rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcccc08f671646ccb1e413c773bb92e7bba789e3a1796fd49d23c12539fe2e4", size = 2992401 }, + { url = "https://files.pythonhosted.org/packages/c1/c5/c243b05a15a27b946180db0d1e4c999bef3f4221505dff9748f1f6c917be/rapidfuzz-3.13.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f219f1e3c3194d7a7de222f54450ce12bc907862ff9a8962d83061c1f923c86", size = 1553782 }, ] [[package]] @@ -4974,22 +4980,23 @@ dependencies = [ { name = "lxml" }, { name = "regex" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b8/e4/260a202516886c2e0cc6e6ae96d1f491792d829098886d9529a2439fbe8e/readabilipy-0.3.0.tar.gz", hash = "sha256:e13313771216953935ac031db4234bdb9725413534bfb3c19dbd6caab0887ae0", size = 35491, upload-time = "2024-12-02T23:03:02.311Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b8/e4/260a202516886c2e0cc6e6ae96d1f491792d829098886d9529a2439fbe8e/readabilipy-0.3.0.tar.gz", hash = "sha256:e13313771216953935ac031db4234bdb9725413534bfb3c19dbd6caab0887ae0", size = 35491 } wheels = [ - { url = "https://files.pythonhosted.org/packages/dd/46/8a640c6de1a6c6af971f858b2fb178ca5e1db91f223d8ba5f40efe1491e5/readabilipy-0.3.0-py3-none-any.whl", hash = "sha256:d106da0fad11d5fdfcde21f5c5385556bfa8ff0258483037d39ea6b1d6db3943", size = 22158, upload-time = "2024-12-02T23:03:00.438Z" }, + { url = "https://files.pythonhosted.org/packages/dd/46/8a640c6de1a6c6af971f858b2fb178ca5e1db91f223d8ba5f40efe1491e5/readabilipy-0.3.0-py3-none-any.whl", hash = "sha256:d106da0fad11d5fdfcde21f5c5385556bfa8ff0258483037d39ea6b1d6db3943", size = 22158 }, ] [[package]] name = "realtime" -version = "2.5.3" +version = "2.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "pydantic" }, { name = "typing-extensions" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/48/94/3cf962b814303a1688eece56a94b25a7bd423d60705f1124cba0896c9c07/realtime-2.5.3.tar.gz", hash = "sha256:0587594f3bc1c84bf007ff625075b86db6528843e03250dc84f4f2808be3d99a", size = 18527, upload-time = "2025-06-26T22:39:01.59Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/ca/e408fbdb6b344bf529c7e8bf020372d21114fe538392c72089462edd26e5/realtime-2.7.0.tar.gz", hash = "sha256:6b9434eeba8d756c8faf94fc0a32081d09f250d14d82b90341170602adbb019f", size = 18860 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/2a/f69c156a58d44b7b9ca22dab181b91e4d93d074f99923c75907bf3953d40/realtime-2.5.3-py3-none-any.whl", hash = "sha256:eb0994636946eff04c4c7f044f980c8c633c7eb632994f549f61053a474ac970", size = 21784, upload-time = "2025-06-26T22:38:59.98Z" }, + { url = "https://files.pythonhosted.org/packages/d2/07/a5c7aef12f9a3497f5ad77157a37915645861e8b23b89b2ad4b0f11b48ad/realtime-2.7.0-py3-none-any.whl", hash = "sha256:d55a278803529a69d61c7174f16563a9cfa5bacc1664f656959694481903d99c", size = 22409 }, ] [[package]] @@ -4999,9 +5006,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "async-timeout", marker = "python_full_version < '3.11.3'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/07/8b/14ef373ffe71c0d2fde93c204eab78472ea13c021d9aee63b0e11bd65896/redis-6.1.1.tar.gz", hash = "sha256:88c689325b5b41cedcbdbdfd4d937ea86cf6dab2222a83e86d8a466e4b3d2600", size = 4629515, upload-time = "2025-06-02T11:44:04.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/8b/14ef373ffe71c0d2fde93c204eab78472ea13c021d9aee63b0e11bd65896/redis-6.1.1.tar.gz", hash = "sha256:88c689325b5b41cedcbdbdfd4d937ea86cf6dab2222a83e86d8a466e4b3d2600", size = 4629515 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/cd/29503c609186104c363ef1f38d6e752e7d91ef387fc90aa165e96d69f446/redis-6.1.1-py3-none-any.whl", hash = "sha256:ed44d53d065bbe04ac6d76864e331cfe5c5353f86f6deccc095f8794fd15bb2e", size = 273930, upload-time = "2025-06-02T11:44:02.705Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cd/29503c609186104c363ef1f38d6e752e7d91ef387fc90aa165e96d69f446/redis-6.1.1-py3-none-any.whl", hash = "sha256:ed44d53d065bbe04ac6d76864e331cfe5c5353f86f6deccc095f8794fd15bb2e", size = 273930 }, ] [package.optional-dependencies] @@ -5018,47 +5025,47 @@ dependencies = [ { name = "rpds-py" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775 }, ] [[package]] name = "regex" version = "2024.11.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8e/5f/bd69653fbfb76cf8604468d3b4ec4c403197144c7bfe0e6a5fc9e02a07cb/regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519", size = 399494, upload-time = "2024-11-06T20:12:31.635Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/5f/bd69653fbfb76cf8604468d3b4ec4c403197144c7bfe0e6a5fc9e02a07cb/regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519", size = 399494 } wheels = [ - { url = "https://files.pythonhosted.org/packages/58/58/7e4d9493a66c88a7da6d205768119f51af0f684fe7be7bac8328e217a52c/regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638", size = 482669, upload-time = "2024-11-06T20:09:31.064Z" }, - { url = "https://files.pythonhosted.org/packages/34/4c/8f8e631fcdc2ff978609eaeef1d6994bf2f028b59d9ac67640ed051f1218/regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7", size = 287684, upload-time = "2024-11-06T20:09:32.915Z" }, - { url = "https://files.pythonhosted.org/packages/c5/1b/f0e4d13e6adf866ce9b069e191f303a30ab1277e037037a365c3aad5cc9c/regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20", size = 284589, upload-time = "2024-11-06T20:09:35.504Z" }, - { url = "https://files.pythonhosted.org/packages/25/4d/ab21047f446693887f25510887e6820b93f791992994f6498b0318904d4a/regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114", size = 792121, upload-time = "2024-11-06T20:09:37.701Z" }, - { url = "https://files.pythonhosted.org/packages/45/ee/c867e15cd894985cb32b731d89576c41a4642a57850c162490ea34b78c3b/regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3", size = 831275, upload-time = "2024-11-06T20:09:40.371Z" }, - { url = "https://files.pythonhosted.org/packages/b3/12/b0f480726cf1c60f6536fa5e1c95275a77624f3ac8fdccf79e6727499e28/regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f", size = 818257, upload-time = "2024-11-06T20:09:43.059Z" }, - { url = "https://files.pythonhosted.org/packages/bf/ce/0d0e61429f603bac433910d99ef1a02ce45a8967ffbe3cbee48599e62d88/regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0", size = 792727, upload-time = "2024-11-06T20:09:48.19Z" }, - { url = "https://files.pythonhosted.org/packages/e4/c1/243c83c53d4a419c1556f43777ccb552bccdf79d08fda3980e4e77dd9137/regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55", size = 780667, upload-time = "2024-11-06T20:09:49.828Z" }, - { url = "https://files.pythonhosted.org/packages/c5/f4/75eb0dd4ce4b37f04928987f1d22547ddaf6c4bae697623c1b05da67a8aa/regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89", size = 776963, upload-time = "2024-11-06T20:09:51.819Z" }, - { url = "https://files.pythonhosted.org/packages/16/5d/95c568574e630e141a69ff8a254c2f188b4398e813c40d49228c9bbd9875/regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d", size = 784700, upload-time = "2024-11-06T20:09:53.982Z" }, - { url = "https://files.pythonhosted.org/packages/8e/b5/f8495c7917f15cc6fee1e7f395e324ec3e00ab3c665a7dc9d27562fd5290/regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34", size = 848592, upload-time = "2024-11-06T20:09:56.222Z" }, - { url = "https://files.pythonhosted.org/packages/1c/80/6dd7118e8cb212c3c60b191b932dc57db93fb2e36fb9e0e92f72a5909af9/regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d", size = 852929, upload-time = "2024-11-06T20:09:58.642Z" }, - { url = "https://files.pythonhosted.org/packages/11/9b/5a05d2040297d2d254baf95eeeb6df83554e5e1df03bc1a6687fc4ba1f66/regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45", size = 781213, upload-time = "2024-11-06T20:10:00.867Z" }, - { url = "https://files.pythonhosted.org/packages/26/b7/b14e2440156ab39e0177506c08c18accaf2b8932e39fb092074de733d868/regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9", size = 261734, upload-time = "2024-11-06T20:10:03.361Z" }, - { url = "https://files.pythonhosted.org/packages/80/32/763a6cc01d21fb3819227a1cc3f60fd251c13c37c27a73b8ff4315433a8e/regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60", size = 274052, upload-time = "2024-11-06T20:10:05.179Z" }, - { url = "https://files.pythonhosted.org/packages/ba/30/9a87ce8336b172cc232a0db89a3af97929d06c11ceaa19d97d84fa90a8f8/regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a", size = 483781, upload-time = "2024-11-06T20:10:07.07Z" }, - { url = "https://files.pythonhosted.org/packages/01/e8/00008ad4ff4be8b1844786ba6636035f7ef926db5686e4c0f98093612add/regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9", size = 288455, upload-time = "2024-11-06T20:10:09.117Z" }, - { url = "https://files.pythonhosted.org/packages/60/85/cebcc0aff603ea0a201667b203f13ba75d9fc8668fab917ac5b2de3967bc/regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2", size = 284759, upload-time = "2024-11-06T20:10:11.155Z" }, - { url = "https://files.pythonhosted.org/packages/94/2b/701a4b0585cb05472a4da28ee28fdfe155f3638f5e1ec92306d924e5faf0/regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4", size = 794976, upload-time = "2024-11-06T20:10:13.24Z" }, - { url = "https://files.pythonhosted.org/packages/4b/bf/fa87e563bf5fee75db8915f7352e1887b1249126a1be4813837f5dbec965/regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577", size = 833077, upload-time = "2024-11-06T20:10:15.37Z" }, - { url = "https://files.pythonhosted.org/packages/a1/56/7295e6bad94b047f4d0834e4779491b81216583c00c288252ef625c01d23/regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3", size = 823160, upload-time = "2024-11-06T20:10:19.027Z" }, - { url = "https://files.pythonhosted.org/packages/fb/13/e3b075031a738c9598c51cfbc4c7879e26729c53aa9cca59211c44235314/regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e", size = 796896, upload-time = "2024-11-06T20:10:21.85Z" }, - { url = "https://files.pythonhosted.org/packages/24/56/0b3f1b66d592be6efec23a795b37732682520b47c53da5a32c33ed7d84e3/regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe", size = 783997, upload-time = "2024-11-06T20:10:24.329Z" }, - { url = "https://files.pythonhosted.org/packages/f9/a1/eb378dada8b91c0e4c5f08ffb56f25fcae47bf52ad18f9b2f33b83e6d498/regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e", size = 781725, upload-time = "2024-11-06T20:10:28.067Z" }, - { url = "https://files.pythonhosted.org/packages/83/f2/033e7dec0cfd6dda93390089864732a3409246ffe8b042e9554afa9bff4e/regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29", size = 789481, upload-time = "2024-11-06T20:10:31.612Z" }, - { url = "https://files.pythonhosted.org/packages/83/23/15d4552ea28990a74e7696780c438aadd73a20318c47e527b47a4a5a596d/regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39", size = 852896, upload-time = "2024-11-06T20:10:34.054Z" }, - { url = "https://files.pythonhosted.org/packages/e3/39/ed4416bc90deedbfdada2568b2cb0bc1fdb98efe11f5378d9892b2a88f8f/regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51", size = 860138, upload-time = "2024-11-06T20:10:36.142Z" }, - { url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad", size = 787692, upload-time = "2024-11-06T20:10:38.394Z" }, - { url = "https://files.pythonhosted.org/packages/0b/55/31877a249ab7a5156758246b9c59539abbeba22461b7d8adc9e8475ff73e/regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54", size = 262135, upload-time = "2024-11-06T20:10:40.367Z" }, - { url = "https://files.pythonhosted.org/packages/38/ec/ad2d7de49a600cdb8dd78434a1aeffe28b9d6fc42eb36afab4a27ad23384/regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b", size = 273567, upload-time = "2024-11-06T20:10:43.467Z" }, + { url = "https://files.pythonhosted.org/packages/58/58/7e4d9493a66c88a7da6d205768119f51af0f684fe7be7bac8328e217a52c/regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638", size = 482669 }, + { url = "https://files.pythonhosted.org/packages/34/4c/8f8e631fcdc2ff978609eaeef1d6994bf2f028b59d9ac67640ed051f1218/regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7", size = 287684 }, + { url = "https://files.pythonhosted.org/packages/c5/1b/f0e4d13e6adf866ce9b069e191f303a30ab1277e037037a365c3aad5cc9c/regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20", size = 284589 }, + { url = "https://files.pythonhosted.org/packages/25/4d/ab21047f446693887f25510887e6820b93f791992994f6498b0318904d4a/regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114", size = 792121 }, + { url = "https://files.pythonhosted.org/packages/45/ee/c867e15cd894985cb32b731d89576c41a4642a57850c162490ea34b78c3b/regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3", size = 831275 }, + { url = "https://files.pythonhosted.org/packages/b3/12/b0f480726cf1c60f6536fa5e1c95275a77624f3ac8fdccf79e6727499e28/regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f", size = 818257 }, + { url = "https://files.pythonhosted.org/packages/bf/ce/0d0e61429f603bac433910d99ef1a02ce45a8967ffbe3cbee48599e62d88/regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0", size = 792727 }, + { url = "https://files.pythonhosted.org/packages/e4/c1/243c83c53d4a419c1556f43777ccb552bccdf79d08fda3980e4e77dd9137/regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55", size = 780667 }, + { url = "https://files.pythonhosted.org/packages/c5/f4/75eb0dd4ce4b37f04928987f1d22547ddaf6c4bae697623c1b05da67a8aa/regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89", size = 776963 }, + { url = "https://files.pythonhosted.org/packages/16/5d/95c568574e630e141a69ff8a254c2f188b4398e813c40d49228c9bbd9875/regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d", size = 784700 }, + { url = "https://files.pythonhosted.org/packages/8e/b5/f8495c7917f15cc6fee1e7f395e324ec3e00ab3c665a7dc9d27562fd5290/regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34", size = 848592 }, + { url = "https://files.pythonhosted.org/packages/1c/80/6dd7118e8cb212c3c60b191b932dc57db93fb2e36fb9e0e92f72a5909af9/regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d", size = 852929 }, + { url = "https://files.pythonhosted.org/packages/11/9b/5a05d2040297d2d254baf95eeeb6df83554e5e1df03bc1a6687fc4ba1f66/regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45", size = 781213 }, + { url = "https://files.pythonhosted.org/packages/26/b7/b14e2440156ab39e0177506c08c18accaf2b8932e39fb092074de733d868/regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9", size = 261734 }, + { url = "https://files.pythonhosted.org/packages/80/32/763a6cc01d21fb3819227a1cc3f60fd251c13c37c27a73b8ff4315433a8e/regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60", size = 274052 }, + { url = "https://files.pythonhosted.org/packages/ba/30/9a87ce8336b172cc232a0db89a3af97929d06c11ceaa19d97d84fa90a8f8/regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a", size = 483781 }, + { url = "https://files.pythonhosted.org/packages/01/e8/00008ad4ff4be8b1844786ba6636035f7ef926db5686e4c0f98093612add/regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9", size = 288455 }, + { url = "https://files.pythonhosted.org/packages/60/85/cebcc0aff603ea0a201667b203f13ba75d9fc8668fab917ac5b2de3967bc/regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2", size = 284759 }, + { url = "https://files.pythonhosted.org/packages/94/2b/701a4b0585cb05472a4da28ee28fdfe155f3638f5e1ec92306d924e5faf0/regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4", size = 794976 }, + { url = "https://files.pythonhosted.org/packages/4b/bf/fa87e563bf5fee75db8915f7352e1887b1249126a1be4813837f5dbec965/regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577", size = 833077 }, + { url = "https://files.pythonhosted.org/packages/a1/56/7295e6bad94b047f4d0834e4779491b81216583c00c288252ef625c01d23/regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3", size = 823160 }, + { url = "https://files.pythonhosted.org/packages/fb/13/e3b075031a738c9598c51cfbc4c7879e26729c53aa9cca59211c44235314/regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e", size = 796896 }, + { url = "https://files.pythonhosted.org/packages/24/56/0b3f1b66d592be6efec23a795b37732682520b47c53da5a32c33ed7d84e3/regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe", size = 783997 }, + { url = "https://files.pythonhosted.org/packages/f9/a1/eb378dada8b91c0e4c5f08ffb56f25fcae47bf52ad18f9b2f33b83e6d498/regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e", size = 781725 }, + { url = "https://files.pythonhosted.org/packages/83/f2/033e7dec0cfd6dda93390089864732a3409246ffe8b042e9554afa9bff4e/regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29", size = 789481 }, + { url = "https://files.pythonhosted.org/packages/83/23/15d4552ea28990a74e7696780c438aadd73a20318c47e527b47a4a5a596d/regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39", size = 852896 }, + { url = "https://files.pythonhosted.org/packages/e3/39/ed4416bc90deedbfdada2568b2cb0bc1fdb98efe11f5378d9892b2a88f8f/regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51", size = 860138 }, + { url = "https://files.pythonhosted.org/packages/93/2d/dd56bb76bd8e95bbce684326302f287455b56242a4f9c61f1bc76e28360e/regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad", size = 787692 }, + { url = "https://files.pythonhosted.org/packages/0b/55/31877a249ab7a5156758246b9c59539abbeba22461b7d8adc9e8475ff73e/regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54", size = 262135 }, + { url = "https://files.pythonhosted.org/packages/38/ec/ad2d7de49a600cdb8dd78434a1aeffe28b9d6fc42eb36afab4a27ad23384/regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b", size = 273567 }, ] [[package]] @@ -5071,9 +5078,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, + { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847 }, ] [[package]] @@ -5084,9 +5091,9 @@ dependencies = [ { name = "oauthlib" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650, upload-time = "2024-03-22T20:32:29.939Z" } +sdist = { url = "https://files.pythonhosted.org/packages/42/f2/05f29bc3913aea15eb670be136045bf5c5bbf4b99ecb839da9b422bb2c85/requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9", size = 55650 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179, upload-time = "2024-03-22T20:32:28.055Z" }, + { url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179 }, ] [[package]] @@ -5096,9 +5103,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888, upload-time = "2023-05-01T04:11:33.229Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481, upload-time = "2023-05-01T04:11:28.427Z" }, + { url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481 }, ] [[package]] @@ -5109,9 +5116,9 @@ dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1f/2a/535a794e5b64f6ef4abc1342ef1a43465af2111c5185e98b4cca2a6b6b7a/resend-2.9.0.tar.gz", hash = "sha256:e8d4c909a7fe7701119789f848a6befb0a4a668e2182d7bbfe764742f1952bd3", size = 13600, upload-time = "2025-05-06T00:35:20.363Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/2a/535a794e5b64f6ef4abc1342ef1a43465af2111c5185e98b4cca2a6b6b7a/resend-2.9.0.tar.gz", hash = "sha256:e8d4c909a7fe7701119789f848a6befb0a4a668e2182d7bbfe764742f1952bd3", size = 13600 } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/81/ba1feb9959bafbcde6466b78d4628405d69cd14613f6eba12b928a77b86a/resend-2.9.0-py2.py3-none-any.whl", hash = "sha256:6607f75e3a9257a219c0640f935b8d1211338190d553eb043c25732affb92949", size = 20173, upload-time = "2025-05-06T00:35:18.963Z" }, + { url = "https://files.pythonhosted.org/packages/96/81/ba1feb9959bafbcde6466b78d4628405d69cd14613f6eba12b928a77b86a/resend-2.9.0-py2.py3-none-any.whl", hash = "sha256:6607f75e3a9257a219c0640f935b8d1211338190d553eb043c25732affb92949", size = 20173 }, ] [[package]] @@ -5122,9 +5129,9 @@ dependencies = [ { name = "decorator" }, { name = "py" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9d/72/75d0b85443fbc8d9f38d08d2b1b67cc184ce35280e4a3813cda2f445f3a4/retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4", size = 6448, upload-time = "2016-05-11T13:58:51.541Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/72/75d0b85443fbc8d9f38d08d2b1b67cc184ce35280e4a3813cda2f445f3a4/retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4", size = 6448 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4b/0d/53aea75710af4528a25ed6837d71d117602b01946b307a3912cb3cfcbcba/retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606", size = 7986, upload-time = "2016-05-11T13:58:39.925Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0d/53aea75710af4528a25ed6837d71d117602b01946b307a3912cb3cfcbcba/retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606", size = 7986 }, ] [[package]] @@ -5135,56 +5142,56 @@ dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload-time = "2025-03-30T14:15:14.23Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, + { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229 }, ] [[package]] name = "rpds-py" version = "0.26.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/4456d84bbb54adc6a916fb10c9b374f78ac840337644e4a5eda229c81275/rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0", size = 27385, upload-time = "2025-07-01T15:57:13.958Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/aa/4456d84bbb54adc6a916fb10c9b374f78ac840337644e4a5eda229c81275/rpds_py-0.26.0.tar.gz", hash = "sha256:20dae58a859b0906f0685642e591056f1e787f3a8b39c8e8749a45dc7d26bdb0", size = 27385 } wheels = [ - { url = "https://files.pythonhosted.org/packages/09/4c/4ee8f7e512030ff79fda1df3243c88d70fc874634e2dbe5df13ba4210078/rpds_py-0.26.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9e8cb77286025bdb21be2941d64ac6ca016130bfdcd228739e8ab137eb4406ed", size = 372610, upload-time = "2025-07-01T15:53:58.844Z" }, - { url = "https://files.pythonhosted.org/packages/fa/9d/3dc16be00f14fc1f03c71b1d67c8df98263ab2710a2fbd65a6193214a527/rpds_py-0.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e09330b21d98adc8ccb2dbb9fc6cb434e8908d4c119aeaa772cb1caab5440a0", size = 358032, upload-time = "2025-07-01T15:53:59.985Z" }, - { url = "https://files.pythonhosted.org/packages/e7/5a/7f1bf8f045da2866324a08ae80af63e64e7bfaf83bd31f865a7b91a58601/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9c1b92b774b2e68d11193dc39620d62fd8ab33f0a3c77ecdabe19c179cdbc1", size = 381525, upload-time = "2025-07-01T15:54:01.162Z" }, - { url = "https://files.pythonhosted.org/packages/45/8a/04479398c755a066ace10e3d158866beb600867cacae194c50ffa783abd0/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:824e6d3503ab990d7090768e4dfd9e840837bae057f212ff9f4f05ec6d1975e7", size = 397089, upload-time = "2025-07-01T15:54:02.319Z" }, - { url = "https://files.pythonhosted.org/packages/72/88/9203f47268db488a1b6d469d69c12201ede776bb728b9d9f29dbfd7df406/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ad7fd2258228bf288f2331f0a6148ad0186b2e3643055ed0db30990e59817a6", size = 514255, upload-time = "2025-07-01T15:54:03.38Z" }, - { url = "https://files.pythonhosted.org/packages/f5/b4/01ce5d1e853ddf81fbbd4311ab1eff0b3cf162d559288d10fd127e2588b5/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0dc23bbb3e06ec1ea72d515fb572c1fea59695aefbffb106501138762e1e915e", size = 402283, upload-time = "2025-07-01T15:54:04.923Z" }, - { url = "https://files.pythonhosted.org/packages/34/a2/004c99936997bfc644d590a9defd9e9c93f8286568f9c16cdaf3e14429a7/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80bf832ac7b1920ee29a426cdca335f96a2b5caa839811803e999b41ba9030d", size = 383881, upload-time = "2025-07-01T15:54:06.482Z" }, - { url = "https://files.pythonhosted.org/packages/05/1b/ef5fba4a8f81ce04c427bfd96223f92f05e6cd72291ce9d7523db3b03a6c/rpds_py-0.26.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0919f38f5542c0a87e7b4afcafab6fd2c15386632d249e9a087498571250abe3", size = 415822, upload-time = "2025-07-01T15:54:07.605Z" }, - { url = "https://files.pythonhosted.org/packages/16/80/5c54195aec456b292f7bd8aa61741c8232964063fd8a75fdde9c1e982328/rpds_py-0.26.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d422b945683e409000c888e384546dbab9009bb92f7c0b456e217988cf316107", size = 558347, upload-time = "2025-07-01T15:54:08.591Z" }, - { url = "https://files.pythonhosted.org/packages/f2/1c/1845c1b1fd6d827187c43afe1841d91678d7241cbdb5420a4c6de180a538/rpds_py-0.26.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77a7711fa562ba2da1aa757e11024ad6d93bad6ad7ede5afb9af144623e5f76a", size = 587956, upload-time = "2025-07-01T15:54:09.963Z" }, - { url = "https://files.pythonhosted.org/packages/2e/ff/9e979329dd131aa73a438c077252ddabd7df6d1a7ad7b9aacf6261f10faa/rpds_py-0.26.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:238e8c8610cb7c29460e37184f6799547f7e09e6a9bdbdab4e8edb90986a2318", size = 554363, upload-time = "2025-07-01T15:54:11.073Z" }, - { url = "https://files.pythonhosted.org/packages/00/8b/d78cfe034b71ffbe72873a136e71acc7a831a03e37771cfe59f33f6de8a2/rpds_py-0.26.0-cp311-cp311-win32.whl", hash = "sha256:893b022bfbdf26d7bedb083efeea624e8550ca6eb98bf7fea30211ce95b9201a", size = 220123, upload-time = "2025-07-01T15:54:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/94/c1/3c8c94c7dd3905dbfde768381ce98778500a80db9924731d87ddcdb117e9/rpds_py-0.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:87a5531de9f71aceb8af041d72fc4cab4943648d91875ed56d2e629bef6d4c03", size = 231732, upload-time = "2025-07-01T15:54:13.434Z" }, - { url = "https://files.pythonhosted.org/packages/67/93/e936fbed1b734eabf36ccb5d93c6a2e9246fbb13c1da011624b7286fae3e/rpds_py-0.26.0-cp311-cp311-win_arm64.whl", hash = "sha256:de2713f48c1ad57f89ac25b3cb7daed2156d8e822cf0eca9b96a6f990718cc41", size = 221917, upload-time = "2025-07-01T15:54:14.559Z" }, - { url = "https://files.pythonhosted.org/packages/ea/86/90eb87c6f87085868bd077c7a9938006eb1ce19ed4d06944a90d3560fce2/rpds_py-0.26.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:894514d47e012e794f1350f076c427d2347ebf82f9b958d554d12819849a369d", size = 363933, upload-time = "2025-07-01T15:54:15.734Z" }, - { url = "https://files.pythonhosted.org/packages/63/78/4469f24d34636242c924626082b9586f064ada0b5dbb1e9d096ee7a8e0c6/rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc921b96fa95a097add244da36a1d9e4f3039160d1d30f1b35837bf108c21136", size = 350447, upload-time = "2025-07-01T15:54:16.922Z" }, - { url = "https://files.pythonhosted.org/packages/ad/91/c448ed45efdfdade82348d5e7995e15612754826ea640afc20915119734f/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1157659470aa42a75448b6e943c895be8c70531c43cb78b9ba990778955582", size = 384711, upload-time = "2025-07-01T15:54:18.101Z" }, - { url = "https://files.pythonhosted.org/packages/ec/43/e5c86fef4be7f49828bdd4ecc8931f0287b1152c0bb0163049b3218740e7/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:521ccf56f45bb3a791182dc6b88ae5f8fa079dd705ee42138c76deb1238e554e", size = 400865, upload-time = "2025-07-01T15:54:19.295Z" }, - { url = "https://files.pythonhosted.org/packages/55/34/e00f726a4d44f22d5c5fe2e5ddd3ac3d7fd3f74a175607781fbdd06fe375/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9def736773fd56b305c0eef698be5192c77bfa30d55a0e5885f80126c4831a15", size = 517763, upload-time = "2025-07-01T15:54:20.858Z" }, - { url = "https://files.pythonhosted.org/packages/52/1c/52dc20c31b147af724b16104500fba13e60123ea0334beba7b40e33354b4/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdad4ea3b4513b475e027be79e5a0ceac8ee1c113a1a11e5edc3c30c29f964d8", size = 406651, upload-time = "2025-07-01T15:54:22.508Z" }, - { url = "https://files.pythonhosted.org/packages/2e/77/87d7bfabfc4e821caa35481a2ff6ae0b73e6a391bb6b343db2c91c2b9844/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b165b07f416bdccf5c84546a484cc8f15137ca38325403864bfdf2b5b72f6a", size = 386079, upload-time = "2025-07-01T15:54:23.987Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d4/7f2200c2d3ee145b65b3cddc4310d51f7da6a26634f3ac87125fd789152a/rpds_py-0.26.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d04cab0a54b9dba4d278fe955a1390da3cf71f57feb78ddc7cb67cbe0bd30323", size = 421379, upload-time = "2025-07-01T15:54:25.073Z" }, - { url = "https://files.pythonhosted.org/packages/ae/13/9fdd428b9c820869924ab62236b8688b122baa22d23efdd1c566938a39ba/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:79061ba1a11b6a12743a2b0f72a46aa2758613d454aa6ba4f5a265cc48850158", size = 562033, upload-time = "2025-07-01T15:54:26.225Z" }, - { url = "https://files.pythonhosted.org/packages/f3/e1/b69686c3bcbe775abac3a4c1c30a164a2076d28df7926041f6c0eb5e8d28/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f405c93675d8d4c5ac87364bb38d06c988e11028a64b52a47158a355079661f3", size = 591639, upload-time = "2025-07-01T15:54:27.424Z" }, - { url = "https://files.pythonhosted.org/packages/5c/c9/1e3d8c8863c84a90197ac577bbc3d796a92502124c27092413426f670990/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dafd4c44b74aa4bed4b250f1aed165b8ef5de743bcca3b88fc9619b6087093d2", size = 557105, upload-time = "2025-07-01T15:54:29.93Z" }, - { url = "https://files.pythonhosted.org/packages/9f/c5/90c569649057622959f6dcc40f7b516539608a414dfd54b8d77e3b201ac0/rpds_py-0.26.0-cp312-cp312-win32.whl", hash = "sha256:3da5852aad63fa0c6f836f3359647870e21ea96cf433eb393ffa45263a170d44", size = 223272, upload-time = "2025-07-01T15:54:31.128Z" }, - { url = "https://files.pythonhosted.org/packages/7d/16/19f5d9f2a556cfed454eebe4d354c38d51c20f3db69e7b4ce6cff904905d/rpds_py-0.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf47cfdabc2194a669dcf7a8dbba62e37a04c5041d2125fae0233b720da6f05c", size = 234995, upload-time = "2025-07-01T15:54:32.195Z" }, - { url = "https://files.pythonhosted.org/packages/83/f0/7935e40b529c0e752dfaa7880224771b51175fce08b41ab4a92eb2fbdc7f/rpds_py-0.26.0-cp312-cp312-win_arm64.whl", hash = "sha256:20ab1ae4fa534f73647aad289003f1104092890849e0266271351922ed5574f8", size = 223198, upload-time = "2025-07-01T15:54:33.271Z" }, - { url = "https://files.pythonhosted.org/packages/51/f2/b5c85b758a00c513bb0389f8fc8e61eb5423050c91c958cdd21843faa3e6/rpds_py-0.26.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f61a9326f80ca59214d1cceb0a09bb2ece5b2563d4e0cd37bfd5515c28510674", size = 373505, upload-time = "2025-07-01T15:56:34.716Z" }, - { url = "https://files.pythonhosted.org/packages/23/e0/25db45e391251118e915e541995bb5f5ac5691a3b98fb233020ba53afc9b/rpds_py-0.26.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:183f857a53bcf4b1b42ef0f57ca553ab56bdd170e49d8091e96c51c3d69ca696", size = 359468, upload-time = "2025-07-01T15:56:36.219Z" }, - { url = "https://files.pythonhosted.org/packages/0b/73/dd5ee6075bb6491be3a646b301dfd814f9486d924137a5098e61f0487e16/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:941c1cfdf4799d623cf3aa1d326a6b4fdb7a5799ee2687f3516738216d2262fb", size = 382680, upload-time = "2025-07-01T15:56:37.644Z" }, - { url = "https://files.pythonhosted.org/packages/2f/10/84b522ff58763a5c443f5bcedc1820240e454ce4e620e88520f04589e2ea/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72a8d9564a717ee291f554eeb4bfeafe2309d5ec0aa6c475170bdab0f9ee8e88", size = 397035, upload-time = "2025-07-01T15:56:39.241Z" }, - { url = "https://files.pythonhosted.org/packages/06/ea/8667604229a10a520fcbf78b30ccc278977dcc0627beb7ea2c96b3becef0/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:511d15193cbe013619dd05414c35a7dedf2088fcee93c6bbb7c77859765bd4e8", size = 514922, upload-time = "2025-07-01T15:56:40.645Z" }, - { url = "https://files.pythonhosted.org/packages/24/e6/9ed5b625c0661c4882fc8cdf302bf8e96c73c40de99c31e0b95ed37d508c/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aea1f9741b603a8d8fedb0ed5502c2bc0accbc51f43e2ad1337fe7259c2b77a5", size = 402822, upload-time = "2025-07-01T15:56:42.137Z" }, - { url = "https://files.pythonhosted.org/packages/8a/58/212c7b6fd51946047fb45d3733da27e2fa8f7384a13457c874186af691b1/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4019a9d473c708cf2f16415688ef0b4639e07abaa569d72f74745bbeffafa2c7", size = 384336, upload-time = "2025-07-01T15:56:44.239Z" }, - { url = "https://files.pythonhosted.org/packages/aa/f5/a40ba78748ae8ebf4934d4b88e77b98497378bc2c24ba55ebe87a4e87057/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:093d63b4b0f52d98ebae33b8c50900d3d67e0666094b1be7a12fffd7f65de74b", size = 416871, upload-time = "2025-07-01T15:56:46.284Z" }, - { url = "https://files.pythonhosted.org/packages/d5/a6/33b1fc0c9f7dcfcfc4a4353daa6308b3ece22496ceece348b3e7a7559a09/rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2abe21d8ba64cded53a2a677e149ceb76dcf44284202d737178afe7ba540c1eb", size = 559439, upload-time = "2025-07-01T15:56:48.549Z" }, - { url = "https://files.pythonhosted.org/packages/71/2d/ceb3f9c12f8cfa56d34995097f6cd99da1325642c60d1b6680dd9df03ed8/rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:4feb7511c29f8442cbbc28149a92093d32e815a28aa2c50d333826ad2a20fdf0", size = 588380, upload-time = "2025-07-01T15:56:50.086Z" }, - { url = "https://files.pythonhosted.org/packages/c8/ed/9de62c2150ca8e2e5858acf3f4f4d0d180a38feef9fdab4078bea63d8dba/rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e99685fc95d386da368013e7fb4269dd39c30d99f812a8372d62f244f662709c", size = 555334, upload-time = "2025-07-01T15:56:51.703Z" }, + { url = "https://files.pythonhosted.org/packages/09/4c/4ee8f7e512030ff79fda1df3243c88d70fc874634e2dbe5df13ba4210078/rpds_py-0.26.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9e8cb77286025bdb21be2941d64ac6ca016130bfdcd228739e8ab137eb4406ed", size = 372610 }, + { url = "https://files.pythonhosted.org/packages/fa/9d/3dc16be00f14fc1f03c71b1d67c8df98263ab2710a2fbd65a6193214a527/rpds_py-0.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e09330b21d98adc8ccb2dbb9fc6cb434e8908d4c119aeaa772cb1caab5440a0", size = 358032 }, + { url = "https://files.pythonhosted.org/packages/e7/5a/7f1bf8f045da2866324a08ae80af63e64e7bfaf83bd31f865a7b91a58601/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9c1b92b774b2e68d11193dc39620d62fd8ab33f0a3c77ecdabe19c179cdbc1", size = 381525 }, + { url = "https://files.pythonhosted.org/packages/45/8a/04479398c755a066ace10e3d158866beb600867cacae194c50ffa783abd0/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:824e6d3503ab990d7090768e4dfd9e840837bae057f212ff9f4f05ec6d1975e7", size = 397089 }, + { url = "https://files.pythonhosted.org/packages/72/88/9203f47268db488a1b6d469d69c12201ede776bb728b9d9f29dbfd7df406/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ad7fd2258228bf288f2331f0a6148ad0186b2e3643055ed0db30990e59817a6", size = 514255 }, + { url = "https://files.pythonhosted.org/packages/f5/b4/01ce5d1e853ddf81fbbd4311ab1eff0b3cf162d559288d10fd127e2588b5/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0dc23bbb3e06ec1ea72d515fb572c1fea59695aefbffb106501138762e1e915e", size = 402283 }, + { url = "https://files.pythonhosted.org/packages/34/a2/004c99936997bfc644d590a9defd9e9c93f8286568f9c16cdaf3e14429a7/rpds_py-0.26.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80bf832ac7b1920ee29a426cdca335f96a2b5caa839811803e999b41ba9030d", size = 383881 }, + { url = "https://files.pythonhosted.org/packages/05/1b/ef5fba4a8f81ce04c427bfd96223f92f05e6cd72291ce9d7523db3b03a6c/rpds_py-0.26.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0919f38f5542c0a87e7b4afcafab6fd2c15386632d249e9a087498571250abe3", size = 415822 }, + { url = "https://files.pythonhosted.org/packages/16/80/5c54195aec456b292f7bd8aa61741c8232964063fd8a75fdde9c1e982328/rpds_py-0.26.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d422b945683e409000c888e384546dbab9009bb92f7c0b456e217988cf316107", size = 558347 }, + { url = "https://files.pythonhosted.org/packages/f2/1c/1845c1b1fd6d827187c43afe1841d91678d7241cbdb5420a4c6de180a538/rpds_py-0.26.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77a7711fa562ba2da1aa757e11024ad6d93bad6ad7ede5afb9af144623e5f76a", size = 587956 }, + { url = "https://files.pythonhosted.org/packages/2e/ff/9e979329dd131aa73a438c077252ddabd7df6d1a7ad7b9aacf6261f10faa/rpds_py-0.26.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:238e8c8610cb7c29460e37184f6799547f7e09e6a9bdbdab4e8edb90986a2318", size = 554363 }, + { url = "https://files.pythonhosted.org/packages/00/8b/d78cfe034b71ffbe72873a136e71acc7a831a03e37771cfe59f33f6de8a2/rpds_py-0.26.0-cp311-cp311-win32.whl", hash = "sha256:893b022bfbdf26d7bedb083efeea624e8550ca6eb98bf7fea30211ce95b9201a", size = 220123 }, + { url = "https://files.pythonhosted.org/packages/94/c1/3c8c94c7dd3905dbfde768381ce98778500a80db9924731d87ddcdb117e9/rpds_py-0.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:87a5531de9f71aceb8af041d72fc4cab4943648d91875ed56d2e629bef6d4c03", size = 231732 }, + { url = "https://files.pythonhosted.org/packages/67/93/e936fbed1b734eabf36ccb5d93c6a2e9246fbb13c1da011624b7286fae3e/rpds_py-0.26.0-cp311-cp311-win_arm64.whl", hash = "sha256:de2713f48c1ad57f89ac25b3cb7daed2156d8e822cf0eca9b96a6f990718cc41", size = 221917 }, + { url = "https://files.pythonhosted.org/packages/ea/86/90eb87c6f87085868bd077c7a9938006eb1ce19ed4d06944a90d3560fce2/rpds_py-0.26.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:894514d47e012e794f1350f076c427d2347ebf82f9b958d554d12819849a369d", size = 363933 }, + { url = "https://files.pythonhosted.org/packages/63/78/4469f24d34636242c924626082b9586f064ada0b5dbb1e9d096ee7a8e0c6/rpds_py-0.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc921b96fa95a097add244da36a1d9e4f3039160d1d30f1b35837bf108c21136", size = 350447 }, + { url = "https://files.pythonhosted.org/packages/ad/91/c448ed45efdfdade82348d5e7995e15612754826ea640afc20915119734f/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1157659470aa42a75448b6e943c895be8c70531c43cb78b9ba990778955582", size = 384711 }, + { url = "https://files.pythonhosted.org/packages/ec/43/e5c86fef4be7f49828bdd4ecc8931f0287b1152c0bb0163049b3218740e7/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:521ccf56f45bb3a791182dc6b88ae5f8fa079dd705ee42138c76deb1238e554e", size = 400865 }, + { url = "https://files.pythonhosted.org/packages/55/34/e00f726a4d44f22d5c5fe2e5ddd3ac3d7fd3f74a175607781fbdd06fe375/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9def736773fd56b305c0eef698be5192c77bfa30d55a0e5885f80126c4831a15", size = 517763 }, + { url = "https://files.pythonhosted.org/packages/52/1c/52dc20c31b147af724b16104500fba13e60123ea0334beba7b40e33354b4/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdad4ea3b4513b475e027be79e5a0ceac8ee1c113a1a11e5edc3c30c29f964d8", size = 406651 }, + { url = "https://files.pythonhosted.org/packages/2e/77/87d7bfabfc4e821caa35481a2ff6ae0b73e6a391bb6b343db2c91c2b9844/rpds_py-0.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b165b07f416bdccf5c84546a484cc8f15137ca38325403864bfdf2b5b72f6a", size = 386079 }, + { url = "https://files.pythonhosted.org/packages/e3/d4/7f2200c2d3ee145b65b3cddc4310d51f7da6a26634f3ac87125fd789152a/rpds_py-0.26.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d04cab0a54b9dba4d278fe955a1390da3cf71f57feb78ddc7cb67cbe0bd30323", size = 421379 }, + { url = "https://files.pythonhosted.org/packages/ae/13/9fdd428b9c820869924ab62236b8688b122baa22d23efdd1c566938a39ba/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:79061ba1a11b6a12743a2b0f72a46aa2758613d454aa6ba4f5a265cc48850158", size = 562033 }, + { url = "https://files.pythonhosted.org/packages/f3/e1/b69686c3bcbe775abac3a4c1c30a164a2076d28df7926041f6c0eb5e8d28/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f405c93675d8d4c5ac87364bb38d06c988e11028a64b52a47158a355079661f3", size = 591639 }, + { url = "https://files.pythonhosted.org/packages/5c/c9/1e3d8c8863c84a90197ac577bbc3d796a92502124c27092413426f670990/rpds_py-0.26.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dafd4c44b74aa4bed4b250f1aed165b8ef5de743bcca3b88fc9619b6087093d2", size = 557105 }, + { url = "https://files.pythonhosted.org/packages/9f/c5/90c569649057622959f6dcc40f7b516539608a414dfd54b8d77e3b201ac0/rpds_py-0.26.0-cp312-cp312-win32.whl", hash = "sha256:3da5852aad63fa0c6f836f3359647870e21ea96cf433eb393ffa45263a170d44", size = 223272 }, + { url = "https://files.pythonhosted.org/packages/7d/16/19f5d9f2a556cfed454eebe4d354c38d51c20f3db69e7b4ce6cff904905d/rpds_py-0.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf47cfdabc2194a669dcf7a8dbba62e37a04c5041d2125fae0233b720da6f05c", size = 234995 }, + { url = "https://files.pythonhosted.org/packages/83/f0/7935e40b529c0e752dfaa7880224771b51175fce08b41ab4a92eb2fbdc7f/rpds_py-0.26.0-cp312-cp312-win_arm64.whl", hash = "sha256:20ab1ae4fa534f73647aad289003f1104092890849e0266271351922ed5574f8", size = 223198 }, + { url = "https://files.pythonhosted.org/packages/51/f2/b5c85b758a00c513bb0389f8fc8e61eb5423050c91c958cdd21843faa3e6/rpds_py-0.26.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f61a9326f80ca59214d1cceb0a09bb2ece5b2563d4e0cd37bfd5515c28510674", size = 373505 }, + { url = "https://files.pythonhosted.org/packages/23/e0/25db45e391251118e915e541995bb5f5ac5691a3b98fb233020ba53afc9b/rpds_py-0.26.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:183f857a53bcf4b1b42ef0f57ca553ab56bdd170e49d8091e96c51c3d69ca696", size = 359468 }, + { url = "https://files.pythonhosted.org/packages/0b/73/dd5ee6075bb6491be3a646b301dfd814f9486d924137a5098e61f0487e16/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:941c1cfdf4799d623cf3aa1d326a6b4fdb7a5799ee2687f3516738216d2262fb", size = 382680 }, + { url = "https://files.pythonhosted.org/packages/2f/10/84b522ff58763a5c443f5bcedc1820240e454ce4e620e88520f04589e2ea/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72a8d9564a717ee291f554eeb4bfeafe2309d5ec0aa6c475170bdab0f9ee8e88", size = 397035 }, + { url = "https://files.pythonhosted.org/packages/06/ea/8667604229a10a520fcbf78b30ccc278977dcc0627beb7ea2c96b3becef0/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:511d15193cbe013619dd05414c35a7dedf2088fcee93c6bbb7c77859765bd4e8", size = 514922 }, + { url = "https://files.pythonhosted.org/packages/24/e6/9ed5b625c0661c4882fc8cdf302bf8e96c73c40de99c31e0b95ed37d508c/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aea1f9741b603a8d8fedb0ed5502c2bc0accbc51f43e2ad1337fe7259c2b77a5", size = 402822 }, + { url = "https://files.pythonhosted.org/packages/8a/58/212c7b6fd51946047fb45d3733da27e2fa8f7384a13457c874186af691b1/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4019a9d473c708cf2f16415688ef0b4639e07abaa569d72f74745bbeffafa2c7", size = 384336 }, + { url = "https://files.pythonhosted.org/packages/aa/f5/a40ba78748ae8ebf4934d4b88e77b98497378bc2c24ba55ebe87a4e87057/rpds_py-0.26.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:093d63b4b0f52d98ebae33b8c50900d3d67e0666094b1be7a12fffd7f65de74b", size = 416871 }, + { url = "https://files.pythonhosted.org/packages/d5/a6/33b1fc0c9f7dcfcfc4a4353daa6308b3ece22496ceece348b3e7a7559a09/rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2abe21d8ba64cded53a2a677e149ceb76dcf44284202d737178afe7ba540c1eb", size = 559439 }, + { url = "https://files.pythonhosted.org/packages/71/2d/ceb3f9c12f8cfa56d34995097f6cd99da1325642c60d1b6680dd9df03ed8/rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:4feb7511c29f8442cbbc28149a92093d32e815a28aa2c50d333826ad2a20fdf0", size = 588380 }, + { url = "https://files.pythonhosted.org/packages/c8/ed/9de62c2150ca8e2e5858acf3f4f4d0d180a38feef9fdab4078bea63d8dba/rpds_py-0.26.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e99685fc95d386da368013e7fb4269dd39c30d99f812a8372d62f244f662709c", size = 555334 }, ] [[package]] @@ -5194,34 +5201,34 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyasn1" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034 } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696 }, ] [[package]] name = "ruff" version = "0.12.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c3/2a/43955b530c49684d3c38fcda18c43caf91e99204c2a065552528e0552d4f/ruff-0.12.3.tar.gz", hash = "sha256:f1b5a4b6668fd7b7ea3697d8d98857390b40c1320a63a178eee6be0899ea2d77", size = 4459341, upload-time = "2025-07-11T13:21:16.086Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/2a/43955b530c49684d3c38fcda18c43caf91e99204c2a065552528e0552d4f/ruff-0.12.3.tar.gz", hash = "sha256:f1b5a4b6668fd7b7ea3697d8d98857390b40c1320a63a178eee6be0899ea2d77", size = 4459341 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/fd/b44c5115539de0d598d75232a1cc7201430b6891808df111b8b0506aae43/ruff-0.12.3-py3-none-linux_armv6l.whl", hash = "sha256:47552138f7206454eaf0c4fe827e546e9ddac62c2a3d2585ca54d29a890137a2", size = 10430499, upload-time = "2025-07-11T13:20:26.321Z" }, - { url = "https://files.pythonhosted.org/packages/43/c5/9eba4f337970d7f639a37077be067e4ec80a2ad359e4cc6c5b56805cbc66/ruff-0.12.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:0a9153b000c6fe169bb307f5bd1b691221c4286c133407b8827c406a55282041", size = 11213413, upload-time = "2025-07-11T13:20:30.017Z" }, - { url = "https://files.pythonhosted.org/packages/e2/2c/fac3016236cf1fe0bdc8e5de4f24c76ce53c6dd9b5f350d902549b7719b2/ruff-0.12.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fa6b24600cf3b750e48ddb6057e901dd5b9aa426e316addb2a1af185a7509882", size = 10586941, upload-time = "2025-07-11T13:20:33.046Z" }, - { url = "https://files.pythonhosted.org/packages/c5/0f/41fec224e9dfa49a139f0b402ad6f5d53696ba1800e0f77b279d55210ca9/ruff-0.12.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2506961bf6ead54887ba3562604d69cb430f59b42133d36976421bc8bd45901", size = 10783001, upload-time = "2025-07-11T13:20:35.534Z" }, - { url = "https://files.pythonhosted.org/packages/0d/ca/dd64a9ce56d9ed6cad109606ac014860b1c217c883e93bf61536400ba107/ruff-0.12.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4faaff1f90cea9d3033cbbcdf1acf5d7fb11d8180758feb31337391691f3df0", size = 10269641, upload-time = "2025-07-11T13:20:38.459Z" }, - { url = "https://files.pythonhosted.org/packages/63/5c/2be545034c6bd5ce5bb740ced3e7014d7916f4c445974be11d2a406d5088/ruff-0.12.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40dced4a79d7c264389de1c59467d5d5cefd79e7e06d1dfa2c75497b5269a5a6", size = 11875059, upload-time = "2025-07-11T13:20:41.517Z" }, - { url = "https://files.pythonhosted.org/packages/8e/d4/a74ef1e801ceb5855e9527dae105eaff136afcb9cc4d2056d44feb0e4792/ruff-0.12.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:0262d50ba2767ed0fe212aa7e62112a1dcbfd46b858c5bf7bbd11f326998bafc", size = 12658890, upload-time = "2025-07-11T13:20:44.442Z" }, - { url = "https://files.pythonhosted.org/packages/13/c8/1057916416de02e6d7c9bcd550868a49b72df94e3cca0aeb77457dcd9644/ruff-0.12.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12371aec33e1a3758597c5c631bae9a5286f3c963bdfb4d17acdd2d395406687", size = 12232008, upload-time = "2025-07-11T13:20:47.374Z" }, - { url = "https://files.pythonhosted.org/packages/f5/59/4f7c130cc25220392051fadfe15f63ed70001487eca21d1796db46cbcc04/ruff-0.12.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:560f13b6baa49785665276c963edc363f8ad4b4fc910a883e2625bdb14a83a9e", size = 11499096, upload-time = "2025-07-11T13:20:50.348Z" }, - { url = "https://files.pythonhosted.org/packages/d4/01/a0ad24a5d2ed6be03a312e30d32d4e3904bfdbc1cdbe63c47be9d0e82c79/ruff-0.12.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023040a3499f6f974ae9091bcdd0385dd9e9eb4942f231c23c57708147b06311", size = 11688307, upload-time = "2025-07-11T13:20:52.945Z" }, - { url = "https://files.pythonhosted.org/packages/93/72/08f9e826085b1f57c9a0226e48acb27643ff19b61516a34c6cab9d6ff3fa/ruff-0.12.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:883d844967bffff5ab28bba1a4d246c1a1b2933f48cb9840f3fdc5111c603b07", size = 10661020, upload-time = "2025-07-11T13:20:55.799Z" }, - { url = "https://files.pythonhosted.org/packages/80/a0/68da1250d12893466c78e54b4a0ff381370a33d848804bb51279367fc688/ruff-0.12.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2120d3aa855ff385e0e562fdee14d564c9675edbe41625c87eeab744a7830d12", size = 10246300, upload-time = "2025-07-11T13:20:58.222Z" }, - { url = "https://files.pythonhosted.org/packages/6a/22/5f0093d556403e04b6fd0984fc0fb32fbb6f6ce116828fd54306a946f444/ruff-0.12.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6b16647cbb470eaf4750d27dddc6ebf7758b918887b56d39e9c22cce2049082b", size = 11263119, upload-time = "2025-07-11T13:21:01.503Z" }, - { url = "https://files.pythonhosted.org/packages/92/c9/f4c0b69bdaffb9968ba40dd5fa7df354ae0c73d01f988601d8fac0c639b1/ruff-0.12.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e1417051edb436230023575b149e8ff843a324557fe0a265863b7602df86722f", size = 11746990, upload-time = "2025-07-11T13:21:04.524Z" }, - { url = "https://files.pythonhosted.org/packages/fe/84/7cc7bd73924ee6be4724be0db5414a4a2ed82d06b30827342315a1be9e9c/ruff-0.12.3-py3-none-win32.whl", hash = "sha256:dfd45e6e926deb6409d0616078a666ebce93e55e07f0fb0228d4b2608b2c248d", size = 10589263, upload-time = "2025-07-11T13:21:07.148Z" }, - { url = "https://files.pythonhosted.org/packages/07/87/c070f5f027bd81f3efee7d14cb4d84067ecf67a3a8efb43aadfc72aa79a6/ruff-0.12.3-py3-none-win_amd64.whl", hash = "sha256:a946cf1e7ba3209bdef039eb97647f1c77f6f540e5845ec9c114d3af8df873e7", size = 11695072, upload-time = "2025-07-11T13:21:11.004Z" }, - { url = "https://files.pythonhosted.org/packages/e0/30/f3eaf6563c637b6e66238ed6535f6775480db973c836336e4122161986fc/ruff-0.12.3-py3-none-win_arm64.whl", hash = "sha256:5f9c7c9c8f84c2d7f27e93674d27136fbf489720251544c4da7fb3d742e011b1", size = 10805855, upload-time = "2025-07-11T13:21:13.547Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fd/b44c5115539de0d598d75232a1cc7201430b6891808df111b8b0506aae43/ruff-0.12.3-py3-none-linux_armv6l.whl", hash = "sha256:47552138f7206454eaf0c4fe827e546e9ddac62c2a3d2585ca54d29a890137a2", size = 10430499 }, + { url = "https://files.pythonhosted.org/packages/43/c5/9eba4f337970d7f639a37077be067e4ec80a2ad359e4cc6c5b56805cbc66/ruff-0.12.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:0a9153b000c6fe169bb307f5bd1b691221c4286c133407b8827c406a55282041", size = 11213413 }, + { url = "https://files.pythonhosted.org/packages/e2/2c/fac3016236cf1fe0bdc8e5de4f24c76ce53c6dd9b5f350d902549b7719b2/ruff-0.12.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fa6b24600cf3b750e48ddb6057e901dd5b9aa426e316addb2a1af185a7509882", size = 10586941 }, + { url = "https://files.pythonhosted.org/packages/c5/0f/41fec224e9dfa49a139f0b402ad6f5d53696ba1800e0f77b279d55210ca9/ruff-0.12.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2506961bf6ead54887ba3562604d69cb430f59b42133d36976421bc8bd45901", size = 10783001 }, + { url = "https://files.pythonhosted.org/packages/0d/ca/dd64a9ce56d9ed6cad109606ac014860b1c217c883e93bf61536400ba107/ruff-0.12.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4faaff1f90cea9d3033cbbcdf1acf5d7fb11d8180758feb31337391691f3df0", size = 10269641 }, + { url = "https://files.pythonhosted.org/packages/63/5c/2be545034c6bd5ce5bb740ced3e7014d7916f4c445974be11d2a406d5088/ruff-0.12.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40dced4a79d7c264389de1c59467d5d5cefd79e7e06d1dfa2c75497b5269a5a6", size = 11875059 }, + { url = "https://files.pythonhosted.org/packages/8e/d4/a74ef1e801ceb5855e9527dae105eaff136afcb9cc4d2056d44feb0e4792/ruff-0.12.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:0262d50ba2767ed0fe212aa7e62112a1dcbfd46b858c5bf7bbd11f326998bafc", size = 12658890 }, + { url = "https://files.pythonhosted.org/packages/13/c8/1057916416de02e6d7c9bcd550868a49b72df94e3cca0aeb77457dcd9644/ruff-0.12.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12371aec33e1a3758597c5c631bae9a5286f3c963bdfb4d17acdd2d395406687", size = 12232008 }, + { url = "https://files.pythonhosted.org/packages/f5/59/4f7c130cc25220392051fadfe15f63ed70001487eca21d1796db46cbcc04/ruff-0.12.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:560f13b6baa49785665276c963edc363f8ad4b4fc910a883e2625bdb14a83a9e", size = 11499096 }, + { url = "https://files.pythonhosted.org/packages/d4/01/a0ad24a5d2ed6be03a312e30d32d4e3904bfdbc1cdbe63c47be9d0e82c79/ruff-0.12.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023040a3499f6f974ae9091bcdd0385dd9e9eb4942f231c23c57708147b06311", size = 11688307 }, + { url = "https://files.pythonhosted.org/packages/93/72/08f9e826085b1f57c9a0226e48acb27643ff19b61516a34c6cab9d6ff3fa/ruff-0.12.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:883d844967bffff5ab28bba1a4d246c1a1b2933f48cb9840f3fdc5111c603b07", size = 10661020 }, + { url = "https://files.pythonhosted.org/packages/80/a0/68da1250d12893466c78e54b4a0ff381370a33d848804bb51279367fc688/ruff-0.12.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2120d3aa855ff385e0e562fdee14d564c9675edbe41625c87eeab744a7830d12", size = 10246300 }, + { url = "https://files.pythonhosted.org/packages/6a/22/5f0093d556403e04b6fd0984fc0fb32fbb6f6ce116828fd54306a946f444/ruff-0.12.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6b16647cbb470eaf4750d27dddc6ebf7758b918887b56d39e9c22cce2049082b", size = 11263119 }, + { url = "https://files.pythonhosted.org/packages/92/c9/f4c0b69bdaffb9968ba40dd5fa7df354ae0c73d01f988601d8fac0c639b1/ruff-0.12.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e1417051edb436230023575b149e8ff843a324557fe0a265863b7602df86722f", size = 11746990 }, + { url = "https://files.pythonhosted.org/packages/fe/84/7cc7bd73924ee6be4724be0db5414a4a2ed82d06b30827342315a1be9e9c/ruff-0.12.3-py3-none-win32.whl", hash = "sha256:dfd45e6e926deb6409d0616078a666ebce93e55e07f0fb0228d4b2608b2c248d", size = 10589263 }, + { url = "https://files.pythonhosted.org/packages/07/87/c070f5f027bd81f3efee7d14cb4d84067ecf67a3a8efb43aadfc72aa79a6/ruff-0.12.3-py3-none-win_amd64.whl", hash = "sha256:a946cf1e7ba3209bdef039eb97647f1c77f6f540e5845ec9c114d3af8df873e7", size = 11695072 }, + { url = "https://files.pythonhosted.org/packages/e0/30/f3eaf6563c637b6e66238ed6535f6775480db973c836336e4122161986fc/ruff-0.12.3-py3-none-win_arm64.whl", hash = "sha256:5f9c7c9c8f84c2d7f27e93674d27136fbf489720251544c4da7fb3d742e011b1", size = 10805855 }, ] [[package]] @@ -5231,31 +5238,31 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c0/0a/1cdbabf9edd0ea7747efdf6c9ab4e7061b085aa7f9bfc36bb1601563b069/s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7", size = 145287, upload-time = "2024-11-20T21:06:05.981Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/0a/1cdbabf9edd0ea7747efdf6c9ab4e7061b085aa7f9bfc36bb1601563b069/s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7", size = 145287 } wheels = [ - { url = "https://files.pythonhosted.org/packages/66/05/7957af15543b8c9799209506df4660cba7afc4cf94bfb60513827e96bed6/s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e", size = 83175, upload-time = "2024-11-20T21:06:03.961Z" }, + { url = "https://files.pythonhosted.org/packages/66/05/7957af15543b8c9799209506df4660cba7afc4cf94bfb60513827e96bed6/s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e", size = 83175 }, ] [[package]] name = "safetensors" version = "0.5.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/71/7e/2d5d6ee7b40c0682315367ec7475693d110f512922d582fef1bd4a63adc3/safetensors-0.5.3.tar.gz", hash = "sha256:b6b0d6ecacec39a4fdd99cc19f4576f5219ce858e6fd8dbe7609df0b8dc56965", size = 67210, upload-time = "2025-02-26T09:15:13.155Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/7e/2d5d6ee7b40c0682315367ec7475693d110f512922d582fef1bd4a63adc3/safetensors-0.5.3.tar.gz", hash = "sha256:b6b0d6ecacec39a4fdd99cc19f4576f5219ce858e6fd8dbe7609df0b8dc56965", size = 67210 } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/ae/88f6c49dbd0cc4da0e08610019a3c78a7d390879a919411a410a1876d03a/safetensors-0.5.3-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd20eb133db8ed15b40110b7c00c6df51655a2998132193de2f75f72d99c7073", size = 436917, upload-time = "2025-02-26T09:15:03.702Z" }, - { url = "https://files.pythonhosted.org/packages/b8/3b/11f1b4a2f5d2ab7da34ecc062b0bc301f2be024d110a6466726bec8c055c/safetensors-0.5.3-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:21d01c14ff6c415c485616b8b0bf961c46b3b343ca59110d38d744e577f9cce7", size = 418419, upload-time = "2025-02-26T09:15:01.765Z" }, - { url = "https://files.pythonhosted.org/packages/5d/9a/add3e6fef267658075c5a41573c26d42d80c935cdc992384dfae435feaef/safetensors-0.5.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11bce6164887cd491ca75c2326a113ba934be596e22b28b1742ce27b1d076467", size = 459493, upload-time = "2025-02-26T09:14:51.812Z" }, - { url = "https://files.pythonhosted.org/packages/df/5c/bf2cae92222513cc23b3ff85c4a1bb2811a2c3583ac0f8e8d502751de934/safetensors-0.5.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4a243be3590bc3301c821da7a18d87224ef35cbd3e5f5727e4e0728b8172411e", size = 472400, upload-time = "2025-02-26T09:14:53.549Z" }, - { url = "https://files.pythonhosted.org/packages/58/11/7456afb740bd45782d0f4c8e8e1bb9e572f1bf82899fb6ace58af47b4282/safetensors-0.5.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8bd84b12b1670a6f8e50f01e28156422a2bc07fb16fc4e98bded13039d688a0d", size = 522891, upload-time = "2025-02-26T09:14:55.717Z" }, - { url = "https://files.pythonhosted.org/packages/57/3d/fe73a9d2ace487e7285f6e157afee2383bd1ddb911b7cb44a55cf812eae3/safetensors-0.5.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:391ac8cab7c829452175f871fcaf414aa1e292b5448bd02620f675a7f3e7abb9", size = 537694, upload-time = "2025-02-26T09:14:57.036Z" }, - { url = "https://files.pythonhosted.org/packages/a6/f8/dae3421624fcc87a89d42e1898a798bc7ff72c61f38973a65d60df8f124c/safetensors-0.5.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cead1fa41fc54b1e61089fa57452e8834f798cb1dc7a09ba3524f1eb08e0317a", size = 471642, upload-time = "2025-02-26T09:15:00.544Z" }, - { url = "https://files.pythonhosted.org/packages/ce/20/1fbe16f9b815f6c5a672f5b760951e20e17e43f67f231428f871909a37f6/safetensors-0.5.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1077f3e94182d72618357b04b5ced540ceb71c8a813d3319f1aba448e68a770d", size = 502241, upload-time = "2025-02-26T09:14:58.303Z" }, - { url = "https://files.pythonhosted.org/packages/5f/18/8e108846b506487aa4629fe4116b27db65c3dde922de2c8e0cc1133f3f29/safetensors-0.5.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:799021e78287bac619c7b3f3606730a22da4cda27759ddf55d37c8db7511c74b", size = 638001, upload-time = "2025-02-26T09:15:05.79Z" }, - { url = "https://files.pythonhosted.org/packages/82/5a/c116111d8291af6c8c8a8b40628fe833b9db97d8141c2a82359d14d9e078/safetensors-0.5.3-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:df26da01aaac504334644e1b7642fa000bfec820e7cef83aeac4e355e03195ff", size = 734013, upload-time = "2025-02-26T09:15:07.892Z" }, - { url = "https://files.pythonhosted.org/packages/7d/ff/41fcc4d3b7de837963622e8610d998710705bbde9a8a17221d85e5d0baad/safetensors-0.5.3-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:32c3ef2d7af8b9f52ff685ed0bc43913cdcde135089ae322ee576de93eae5135", size = 670687, upload-time = "2025-02-26T09:15:09.979Z" }, - { url = "https://files.pythonhosted.org/packages/40/ad/2b113098e69c985a3d8fbda4b902778eae4a35b7d5188859b4a63d30c161/safetensors-0.5.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:37f1521be045e56fc2b54c606d4455573e717b2d887c579ee1dbba5f868ece04", size = 643147, upload-time = "2025-02-26T09:15:11.185Z" }, - { url = "https://files.pythonhosted.org/packages/0a/0c/95aeb51d4246bd9a3242d3d8349c1112b4ee7611a4b40f0c5c93b05f001d/safetensors-0.5.3-cp38-abi3-win32.whl", hash = "sha256:cfc0ec0846dcf6763b0ed3d1846ff36008c6e7290683b61616c4b040f6a54ace", size = 296677, upload-time = "2025-02-26T09:15:16.554Z" }, - { url = "https://files.pythonhosted.org/packages/69/e2/b011c38e5394c4c18fb5500778a55ec43ad6106126e74723ffaee246f56e/safetensors-0.5.3-cp38-abi3-win_amd64.whl", hash = "sha256:836cbbc320b47e80acd40e44c8682db0e8ad7123209f69b093def21ec7cafd11", size = 308878, upload-time = "2025-02-26T09:15:14.99Z" }, + { url = "https://files.pythonhosted.org/packages/18/ae/88f6c49dbd0cc4da0e08610019a3c78a7d390879a919411a410a1876d03a/safetensors-0.5.3-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd20eb133db8ed15b40110b7c00c6df51655a2998132193de2f75f72d99c7073", size = 436917 }, + { url = "https://files.pythonhosted.org/packages/b8/3b/11f1b4a2f5d2ab7da34ecc062b0bc301f2be024d110a6466726bec8c055c/safetensors-0.5.3-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:21d01c14ff6c415c485616b8b0bf961c46b3b343ca59110d38d744e577f9cce7", size = 418419 }, + { url = "https://files.pythonhosted.org/packages/5d/9a/add3e6fef267658075c5a41573c26d42d80c935cdc992384dfae435feaef/safetensors-0.5.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11bce6164887cd491ca75c2326a113ba934be596e22b28b1742ce27b1d076467", size = 459493 }, + { url = "https://files.pythonhosted.org/packages/df/5c/bf2cae92222513cc23b3ff85c4a1bb2811a2c3583ac0f8e8d502751de934/safetensors-0.5.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4a243be3590bc3301c821da7a18d87224ef35cbd3e5f5727e4e0728b8172411e", size = 472400 }, + { url = "https://files.pythonhosted.org/packages/58/11/7456afb740bd45782d0f4c8e8e1bb9e572f1bf82899fb6ace58af47b4282/safetensors-0.5.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8bd84b12b1670a6f8e50f01e28156422a2bc07fb16fc4e98bded13039d688a0d", size = 522891 }, + { url = "https://files.pythonhosted.org/packages/57/3d/fe73a9d2ace487e7285f6e157afee2383bd1ddb911b7cb44a55cf812eae3/safetensors-0.5.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:391ac8cab7c829452175f871fcaf414aa1e292b5448bd02620f675a7f3e7abb9", size = 537694 }, + { url = "https://files.pythonhosted.org/packages/a6/f8/dae3421624fcc87a89d42e1898a798bc7ff72c61f38973a65d60df8f124c/safetensors-0.5.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cead1fa41fc54b1e61089fa57452e8834f798cb1dc7a09ba3524f1eb08e0317a", size = 471642 }, + { url = "https://files.pythonhosted.org/packages/ce/20/1fbe16f9b815f6c5a672f5b760951e20e17e43f67f231428f871909a37f6/safetensors-0.5.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1077f3e94182d72618357b04b5ced540ceb71c8a813d3319f1aba448e68a770d", size = 502241 }, + { url = "https://files.pythonhosted.org/packages/5f/18/8e108846b506487aa4629fe4116b27db65c3dde922de2c8e0cc1133f3f29/safetensors-0.5.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:799021e78287bac619c7b3f3606730a22da4cda27759ddf55d37c8db7511c74b", size = 638001 }, + { url = "https://files.pythonhosted.org/packages/82/5a/c116111d8291af6c8c8a8b40628fe833b9db97d8141c2a82359d14d9e078/safetensors-0.5.3-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:df26da01aaac504334644e1b7642fa000bfec820e7cef83aeac4e355e03195ff", size = 734013 }, + { url = "https://files.pythonhosted.org/packages/7d/ff/41fcc4d3b7de837963622e8610d998710705bbde9a8a17221d85e5d0baad/safetensors-0.5.3-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:32c3ef2d7af8b9f52ff685ed0bc43913cdcde135089ae322ee576de93eae5135", size = 670687 }, + { url = "https://files.pythonhosted.org/packages/40/ad/2b113098e69c985a3d8fbda4b902778eae4a35b7d5188859b4a63d30c161/safetensors-0.5.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:37f1521be045e56fc2b54c606d4455573e717b2d887c579ee1dbba5f868ece04", size = 643147 }, + { url = "https://files.pythonhosted.org/packages/0a/0c/95aeb51d4246bd9a3242d3d8349c1112b4ee7611a4b40f0c5c93b05f001d/safetensors-0.5.3-cp38-abi3-win32.whl", hash = "sha256:cfc0ec0846dcf6763b0ed3d1846ff36008c6e7290683b61616c4b040f6a54ace", size = 296677 }, + { url = "https://files.pythonhosted.org/packages/69/e2/b011c38e5394c4c18fb5500778a55ec43ad6106126e74723ffaee246f56e/safetensors-0.5.3-cp38-abi3-win_amd64.whl", hash = "sha256:836cbbc320b47e80acd40e44c8682db0e8ad7123209f69b093def21ec7cafd11", size = 308878 }, ] [[package]] @@ -5265,9 +5272,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "optype" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/19/a8461383f7328300e83c34f58bf38ccc05f57c2289c0e54e2bea757de83c/scipy_stubs-1.16.0.2.tar.gz", hash = "sha256:f83aacaf2e899d044de6483e6112bf7a1942d683304077bc9e78cf6f21353acd", size = 306747, upload-time = "2025-07-01T23:19:04.513Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4b/19/a8461383f7328300e83c34f58bf38ccc05f57c2289c0e54e2bea757de83c/scipy_stubs-1.16.0.2.tar.gz", hash = "sha256:f83aacaf2e899d044de6483e6112bf7a1942d683304077bc9e78cf6f21353acd", size = 306747 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/30/b73418e6d3d8209fef684841d9a0e5b439d3528fa341a23b632fe47918dd/scipy_stubs-1.16.0.2-py3-none-any.whl", hash = "sha256:dc364d24a3accd1663e7576480bdb720533f94de8a05590354ff6d4a83d765c7", size = 491346, upload-time = "2025-07-01T23:19:03.222Z" }, + { url = "https://files.pythonhosted.org/packages/8f/30/b73418e6d3d8209fef684841d9a0e5b439d3528fa341a23b632fe47918dd/scipy_stubs-1.16.0.2-py3-none-any.whl", hash = "sha256:dc364d24a3accd1663e7576480bdb720533f94de8a05590354ff6d4a83d765c7", size = 491346 }, ] [[package]] @@ -5279,9 +5286,9 @@ dependencies = [ { name = "python-http-client" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/11/31/62e00433878dccf33edf07f8efa417b9030a2464eb3b04bbd797a11b4447/sendgrid-6.12.4.tar.gz", hash = "sha256:9e88b849daf0fa4bdf256c3b5da9f5a3272402c0c2fd6b1928c9de440db0a03d", size = 50271, upload-time = "2025-06-12T10:29:37.213Z" } +sdist = { url = "https://files.pythonhosted.org/packages/11/31/62e00433878dccf33edf07f8efa417b9030a2464eb3b04bbd797a11b4447/sendgrid-6.12.4.tar.gz", hash = "sha256:9e88b849daf0fa4bdf256c3b5da9f5a3272402c0c2fd6b1928c9de440db0a03d", size = 50271 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/9c/45d068fd831a65e6ed1e2ab3233de58784842afdc62fdcdd0a01bbb6b39d/sendgrid-6.12.4-py3-none-any.whl", hash = "sha256:9a211b96241e63bd5b9ed9afcc8608f4bcac426e4a319b3920ab877c8426e92c", size = 102122, upload-time = "2025-06-12T10:29:35.457Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9c/45d068fd831a65e6ed1e2ab3233de58784842afdc62fdcdd0a01bbb6b39d/sendgrid-6.12.4-py3-none-any.whl", hash = "sha256:9a211b96241e63bd5b9ed9afcc8608f4bcac426e4a319b3920ab877c8426e92c", size = 102122 }, ] [[package]] @@ -5292,9 +5299,9 @@ dependencies = [ { name = "certifi" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/bb/6a41b2e0e9121bed4d2ec68d50568ab95c49f4744156a9bbb789c866c66d/sentry_sdk-2.28.0.tar.gz", hash = "sha256:14d2b73bc93afaf2a9412490329099e6217761cbab13b6ee8bc0e82927e1504e", size = 325052, upload-time = "2025-05-12T07:53:12.785Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/bb/6a41b2e0e9121bed4d2ec68d50568ab95c49f4744156a9bbb789c866c66d/sentry_sdk-2.28.0.tar.gz", hash = "sha256:14d2b73bc93afaf2a9412490329099e6217761cbab13b6ee8bc0e82927e1504e", size = 325052 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/4e/b1575833094c088dfdef63fbca794518860fcbc8002aadf51ebe8b6a387f/sentry_sdk-2.28.0-py2.py3-none-any.whl", hash = "sha256:51496e6cb3cb625b99c8e08907c67a9112360259b0ef08470e532c3ab184a232", size = 341693, upload-time = "2025-05-12T07:53:10.882Z" }, + { url = "https://files.pythonhosted.org/packages/9b/4e/b1575833094c088dfdef63fbca794518860fcbc8002aadf51ebe8b6a387f/sentry_sdk-2.28.0-py2.py3-none-any.whl", hash = "sha256:51496e6cb3cb625b99c8e08907c67a9112360259b0ef08470e532c3ab184a232", size = 341693 }, ] [package.optional-dependencies] @@ -5308,9 +5315,9 @@ flask = [ name = "setuptools" version = "80.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486 }, ] [[package]] @@ -5320,87 +5327,87 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ca/3c/2da625233f4e605155926566c0e7ea8dda361877f48e8b1655e53456f252/shapely-2.1.1.tar.gz", hash = "sha256:500621967f2ffe9642454808009044c21e5b35db89ce69f8a2042c2ffd0e2772", size = 315422, upload-time = "2025-05-19T11:04:41.265Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/3c/2da625233f4e605155926566c0e7ea8dda361877f48e8b1655e53456f252/shapely-2.1.1.tar.gz", hash = "sha256:500621967f2ffe9642454808009044c21e5b35db89ce69f8a2042c2ffd0e2772", size = 315422 } wheels = [ - { url = "https://files.pythonhosted.org/packages/19/97/2df985b1e03f90c503796ad5ecd3d9ed305123b64d4ccb54616b30295b29/shapely-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:587a1aa72bc858fab9b8c20427b5f6027b7cbc92743b8e2c73b9de55aa71c7a7", size = 1819368, upload-time = "2025-05-19T11:03:55.937Z" }, - { url = "https://files.pythonhosted.org/packages/56/17/504518860370f0a28908b18864f43d72f03581e2b6680540ca668f07aa42/shapely-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9fa5c53b0791a4b998f9ad84aad456c988600757a96b0a05e14bba10cebaaaea", size = 1625362, upload-time = "2025-05-19T11:03:57.06Z" }, - { url = "https://files.pythonhosted.org/packages/36/a1/9677337d729b79fce1ef3296aac6b8ef4743419086f669e8a8070eff8f40/shapely-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aabecd038841ab5310d23495253f01c2a82a3aedae5ab9ca489be214aa458aa7", size = 2999005, upload-time = "2025-05-19T11:03:58.692Z" }, - { url = "https://files.pythonhosted.org/packages/a2/17/e09357274699c6e012bbb5a8ea14765a4d5860bb658df1931c9f90d53bd3/shapely-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586f6aee1edec04e16227517a866df3e9a2e43c1f635efc32978bb3dc9c63753", size = 3108489, upload-time = "2025-05-19T11:04:00.059Z" }, - { url = "https://files.pythonhosted.org/packages/17/5d/93a6c37c4b4e9955ad40834f42b17260ca74ecf36df2e81bb14d12221b90/shapely-2.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b9878b9e37ad26c72aada8de0c9cfe418d9e2ff36992a1693b7f65a075b28647", size = 3945727, upload-time = "2025-05-19T11:04:01.786Z" }, - { url = "https://files.pythonhosted.org/packages/a3/1a/ad696648f16fd82dd6bfcca0b3b8fbafa7aacc13431c7fc4c9b49e481681/shapely-2.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9a531c48f289ba355e37b134e98e28c557ff13965d4653a5228d0f42a09aed0", size = 4109311, upload-time = "2025-05-19T11:04:03.134Z" }, - { url = "https://files.pythonhosted.org/packages/d4/38/150dd245beab179ec0d4472bf6799bf18f21b1efbef59ac87de3377dbf1c/shapely-2.1.1-cp311-cp311-win32.whl", hash = "sha256:4866de2673a971820c75c0167b1f1cd8fb76f2d641101c23d3ca021ad0449bab", size = 1522982, upload-time = "2025-05-19T11:04:05.217Z" }, - { url = "https://files.pythonhosted.org/packages/93/5b/842022c00fbb051083c1c85430f3bb55565b7fd2d775f4f398c0ba8052ce/shapely-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:20a9d79958b3d6c70d8a886b250047ea32ff40489d7abb47d01498c704557a93", size = 1703872, upload-time = "2025-05-19T11:04:06.791Z" }, - { url = "https://files.pythonhosted.org/packages/fb/64/9544dc07dfe80a2d489060791300827c941c451e2910f7364b19607ea352/shapely-2.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2827365b58bf98efb60affc94a8e01c56dd1995a80aabe4b701465d86dcbba43", size = 1833021, upload-time = "2025-05-19T11:04:08.022Z" }, - { url = "https://files.pythonhosted.org/packages/07/aa/fb5f545e72e89b6a0f04a0effda144f5be956c9c312c7d4e00dfddbddbcf/shapely-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9c551f7fa7f1e917af2347fe983f21f212863f1d04f08eece01e9c275903fad", size = 1643018, upload-time = "2025-05-19T11:04:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/03/46/61e03edba81de729f09d880ce7ae5c1af873a0814206bbfb4402ab5c3388/shapely-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78dec4d4fbe7b1db8dc36de3031767e7ece5911fb7782bc9e95c5cdec58fb1e9", size = 2986417, upload-time = "2025-05-19T11:04:10.56Z" }, - { url = "https://files.pythonhosted.org/packages/1f/1e/83ec268ab8254a446b4178b45616ab5822d7b9d2b7eb6e27cf0b82f45601/shapely-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:872d3c0a7b8b37da0e23d80496ec5973c4692920b90de9f502b5beb994bbaaef", size = 3098224, upload-time = "2025-05-19T11:04:11.903Z" }, - { url = "https://files.pythonhosted.org/packages/f1/44/0c21e7717c243e067c9ef8fa9126de24239f8345a5bba9280f7bb9935959/shapely-2.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e2b9125ebfbc28ecf5353511de62f75a8515ae9470521c9a693e4bb9fbe0cf1", size = 3925982, upload-time = "2025-05-19T11:04:13.224Z" }, - { url = "https://files.pythonhosted.org/packages/15/50/d3b4e15fefc103a0eb13d83bad5f65cd6e07a5d8b2ae920e767932a247d1/shapely-2.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4b96cea171b3d7f6786976a0520f178c42792897653ecca0c5422fb1e6946e6d", size = 4089122, upload-time = "2025-05-19T11:04:14.477Z" }, - { url = "https://files.pythonhosted.org/packages/bd/05/9a68f27fc6110baeedeeebc14fd86e73fa38738c5b741302408fb6355577/shapely-2.1.1-cp312-cp312-win32.whl", hash = "sha256:39dca52201e02996df02e447f729da97cfb6ff41a03cb50f5547f19d02905af8", size = 1522437, upload-time = "2025-05-19T11:04:16.203Z" }, - { url = "https://files.pythonhosted.org/packages/bc/e9/a4560e12b9338842a1f82c9016d2543eaa084fce30a1ca11991143086b57/shapely-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:13d643256f81d55a50013eff6321142781cf777eb6a9e207c2c9e6315ba6044a", size = 1703479, upload-time = "2025-05-19T11:04:18.497Z" }, + { url = "https://files.pythonhosted.org/packages/19/97/2df985b1e03f90c503796ad5ecd3d9ed305123b64d4ccb54616b30295b29/shapely-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:587a1aa72bc858fab9b8c20427b5f6027b7cbc92743b8e2c73b9de55aa71c7a7", size = 1819368 }, + { url = "https://files.pythonhosted.org/packages/56/17/504518860370f0a28908b18864f43d72f03581e2b6680540ca668f07aa42/shapely-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9fa5c53b0791a4b998f9ad84aad456c988600757a96b0a05e14bba10cebaaaea", size = 1625362 }, + { url = "https://files.pythonhosted.org/packages/36/a1/9677337d729b79fce1ef3296aac6b8ef4743419086f669e8a8070eff8f40/shapely-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aabecd038841ab5310d23495253f01c2a82a3aedae5ab9ca489be214aa458aa7", size = 2999005 }, + { url = "https://files.pythonhosted.org/packages/a2/17/e09357274699c6e012bbb5a8ea14765a4d5860bb658df1931c9f90d53bd3/shapely-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586f6aee1edec04e16227517a866df3e9a2e43c1f635efc32978bb3dc9c63753", size = 3108489 }, + { url = "https://files.pythonhosted.org/packages/17/5d/93a6c37c4b4e9955ad40834f42b17260ca74ecf36df2e81bb14d12221b90/shapely-2.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b9878b9e37ad26c72aada8de0c9cfe418d9e2ff36992a1693b7f65a075b28647", size = 3945727 }, + { url = "https://files.pythonhosted.org/packages/a3/1a/ad696648f16fd82dd6bfcca0b3b8fbafa7aacc13431c7fc4c9b49e481681/shapely-2.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9a531c48f289ba355e37b134e98e28c557ff13965d4653a5228d0f42a09aed0", size = 4109311 }, + { url = "https://files.pythonhosted.org/packages/d4/38/150dd245beab179ec0d4472bf6799bf18f21b1efbef59ac87de3377dbf1c/shapely-2.1.1-cp311-cp311-win32.whl", hash = "sha256:4866de2673a971820c75c0167b1f1cd8fb76f2d641101c23d3ca021ad0449bab", size = 1522982 }, + { url = "https://files.pythonhosted.org/packages/93/5b/842022c00fbb051083c1c85430f3bb55565b7fd2d775f4f398c0ba8052ce/shapely-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:20a9d79958b3d6c70d8a886b250047ea32ff40489d7abb47d01498c704557a93", size = 1703872 }, + { url = "https://files.pythonhosted.org/packages/fb/64/9544dc07dfe80a2d489060791300827c941c451e2910f7364b19607ea352/shapely-2.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2827365b58bf98efb60affc94a8e01c56dd1995a80aabe4b701465d86dcbba43", size = 1833021 }, + { url = "https://files.pythonhosted.org/packages/07/aa/fb5f545e72e89b6a0f04a0effda144f5be956c9c312c7d4e00dfddbddbcf/shapely-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9c551f7fa7f1e917af2347fe983f21f212863f1d04f08eece01e9c275903fad", size = 1643018 }, + { url = "https://files.pythonhosted.org/packages/03/46/61e03edba81de729f09d880ce7ae5c1af873a0814206bbfb4402ab5c3388/shapely-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78dec4d4fbe7b1db8dc36de3031767e7ece5911fb7782bc9e95c5cdec58fb1e9", size = 2986417 }, + { url = "https://files.pythonhosted.org/packages/1f/1e/83ec268ab8254a446b4178b45616ab5822d7b9d2b7eb6e27cf0b82f45601/shapely-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:872d3c0a7b8b37da0e23d80496ec5973c4692920b90de9f502b5beb994bbaaef", size = 3098224 }, + { url = "https://files.pythonhosted.org/packages/f1/44/0c21e7717c243e067c9ef8fa9126de24239f8345a5bba9280f7bb9935959/shapely-2.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e2b9125ebfbc28ecf5353511de62f75a8515ae9470521c9a693e4bb9fbe0cf1", size = 3925982 }, + { url = "https://files.pythonhosted.org/packages/15/50/d3b4e15fefc103a0eb13d83bad5f65cd6e07a5d8b2ae920e767932a247d1/shapely-2.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4b96cea171b3d7f6786976a0520f178c42792897653ecca0c5422fb1e6946e6d", size = 4089122 }, + { url = "https://files.pythonhosted.org/packages/bd/05/9a68f27fc6110baeedeeebc14fd86e73fa38738c5b741302408fb6355577/shapely-2.1.1-cp312-cp312-win32.whl", hash = "sha256:39dca52201e02996df02e447f729da97cfb6ff41a03cb50f5547f19d02905af8", size = 1522437 }, + { url = "https://files.pythonhosted.org/packages/bc/e9/a4560e12b9338842a1f82c9016d2543eaa084fce30a1ca11991143086b57/shapely-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:13d643256f81d55a50013eff6321142781cf777eb6a9e207c2c9e6315ba6044a", size = 1703479 }, ] [[package]] name = "shellingham" version = "1.5.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 }, ] [[package]] name = "six" version = "1.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, ] [[package]] name = "smmap" version = "5.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329, upload-time = "2025-01-02T07:14:40.909Z" } +sdist = { url = "https://files.pythonhosted.org/packages/44/cd/a040c4b3119bbe532e5b0732286f805445375489fceaec1f48306068ee3b/smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5", size = 22329 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303, upload-time = "2025-01-02T07:14:38.724Z" }, + { url = "https://files.pythonhosted.org/packages/04/be/d09147ad1ec7934636ad912901c5fd7667e1c858e19d355237db0d0cd5e4/smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e", size = 24303 }, ] [[package]] name = "sniffio" version = "1.3.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, ] [[package]] name = "socksio" version = "1.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f8/5c/48a7d9495be3d1c651198fd99dbb6ce190e2274d0f28b9051307bdec6b85/socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac", size = 19055, upload-time = "2020-04-17T15:50:34.664Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/5c/48a7d9495be3d1c651198fd99dbb6ce190e2274d0f28b9051307bdec6b85/socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac", size = 19055 } wheels = [ - { url = "https://files.pythonhosted.org/packages/37/c3/6eeb6034408dac0fa653d126c9204ade96b819c936e136c5e8a6897eee9c/socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3", size = 12763, upload-time = "2020-04-17T15:50:31.878Z" }, + { url = "https://files.pythonhosted.org/packages/37/c3/6eeb6034408dac0fa653d126c9204ade96b819c936e136c5e8a6897eee9c/socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3", size = 12763 }, ] [[package]] name = "sortedcontainers" version = "2.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594 } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575 }, ] [[package]] name = "soupsieve" version = "2.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3f/f4/4a80cd6ef364b2e8b65b15816a843c0980f7a5a2b4dc701fc574952aa19f/soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a", size = 103418, upload-time = "2025-04-20T18:50:08.518Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/f4/4a80cd6ef364b2e8b65b15816a843c0980f7a5a2b4dc701fc574952aa19f/soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a", size = 103418 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677, upload-time = "2025-04-20T18:50:07.196Z" }, + { url = "https://files.pythonhosted.org/packages/e7/9c/0e6afc12c269578be5c0c1c9f4b49a8d32770a080260c333ac04cc1c832d/soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4", size = 36677 }, ] [[package]] @@ -5411,90 +5418,123 @@ dependencies = [ { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/63/66/45b165c595ec89aa7dcc2c1cd222ab269bc753f1fc7a1e68f8481bd957bf/sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9", size = 9689424, upload-time = "2025-05-14T17:10:32.339Z" } +sdist = { url = "https://files.pythonhosted.org/packages/63/66/45b165c595ec89aa7dcc2c1cd222ab269bc753f1fc7a1e68f8481bd957bf/sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9", size = 9689424 } wheels = [ - { url = "https://files.pythonhosted.org/packages/37/4e/b00e3ffae32b74b5180e15d2ab4040531ee1bef4c19755fe7926622dc958/sqlalchemy-2.0.41-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f", size = 2121232, upload-time = "2025-05-14T17:48:20.444Z" }, - { url = "https://files.pythonhosted.org/packages/ef/30/6547ebb10875302074a37e1970a5dce7985240665778cfdee2323709f749/sqlalchemy-2.0.41-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560", size = 2110897, upload-time = "2025-05-14T17:48:21.634Z" }, - { url = "https://files.pythonhosted.org/packages/9e/21/59df2b41b0f6c62da55cd64798232d7349a9378befa7f1bb18cf1dfd510a/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f", size = 3273313, upload-time = "2025-05-14T17:51:56.205Z" }, - { url = "https://files.pythonhosted.org/packages/62/e4/b9a7a0e5c6f79d49bcd6efb6e90d7536dc604dab64582a9dec220dab54b6/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6", size = 3273807, upload-time = "2025-05-14T17:55:26.928Z" }, - { url = "https://files.pythonhosted.org/packages/39/d8/79f2427251b44ddee18676c04eab038d043cff0e764d2d8bb08261d6135d/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04", size = 3209632, upload-time = "2025-05-14T17:51:59.384Z" }, - { url = "https://files.pythonhosted.org/packages/d4/16/730a82dda30765f63e0454918c982fb7193f6b398b31d63c7c3bd3652ae5/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582", size = 3233642, upload-time = "2025-05-14T17:55:29.901Z" }, - { url = "https://files.pythonhosted.org/packages/04/61/c0d4607f7799efa8b8ea3c49b4621e861c8f5c41fd4b5b636c534fcb7d73/sqlalchemy-2.0.41-cp311-cp311-win32.whl", hash = "sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8", size = 2086475, upload-time = "2025-05-14T17:56:02.095Z" }, - { url = "https://files.pythonhosted.org/packages/9d/8e/8344f8ae1cb6a479d0741c02cd4f666925b2bf02e2468ddaf5ce44111f30/sqlalchemy-2.0.41-cp311-cp311-win_amd64.whl", hash = "sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504", size = 2110903, upload-time = "2025-05-14T17:56:03.499Z" }, - { url = "https://files.pythonhosted.org/packages/3e/2a/f1f4e068b371154740dd10fb81afb5240d5af4aa0087b88d8b308b5429c2/sqlalchemy-2.0.41-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9", size = 2119645, upload-time = "2025-05-14T17:55:24.854Z" }, - { url = "https://files.pythonhosted.org/packages/9b/e8/c664a7e73d36fbfc4730f8cf2bf930444ea87270f2825efbe17bf808b998/sqlalchemy-2.0.41-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1", size = 2107399, upload-time = "2025-05-14T17:55:28.097Z" }, - { url = "https://files.pythonhosted.org/packages/5c/78/8a9cf6c5e7135540cb682128d091d6afa1b9e48bd049b0d691bf54114f70/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70", size = 3293269, upload-time = "2025-05-14T17:50:38.227Z" }, - { url = "https://files.pythonhosted.org/packages/3c/35/f74add3978c20de6323fb11cb5162702670cc7a9420033befb43d8d5b7a4/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e", size = 3303364, upload-time = "2025-05-14T17:51:49.829Z" }, - { url = "https://files.pythonhosted.org/packages/6a/d4/c990f37f52c3f7748ebe98883e2a0f7d038108c2c5a82468d1ff3eec50b7/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078", size = 3229072, upload-time = "2025-05-14T17:50:39.774Z" }, - { url = "https://files.pythonhosted.org/packages/15/69/cab11fecc7eb64bc561011be2bd03d065b762d87add52a4ca0aca2e12904/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae", size = 3268074, upload-time = "2025-05-14T17:51:51.736Z" }, - { url = "https://files.pythonhosted.org/packages/5c/ca/0c19ec16858585d37767b167fc9602593f98998a68a798450558239fb04a/sqlalchemy-2.0.41-cp312-cp312-win32.whl", hash = "sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6", size = 2084514, upload-time = "2025-05-14T17:55:49.915Z" }, - { url = "https://files.pythonhosted.org/packages/7f/23/4c2833d78ff3010a4e17f984c734f52b531a8c9060a50429c9d4b0211be6/sqlalchemy-2.0.41-cp312-cp312-win_amd64.whl", hash = "sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0", size = 2111557, upload-time = "2025-05-14T17:55:51.349Z" }, - { url = "https://files.pythonhosted.org/packages/1c/fc/9ba22f01b5cdacc8f5ed0d22304718d2c758fce3fd49a5372b886a86f37c/sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", size = 1911224, upload-time = "2025-05-14T17:39:42.154Z" }, + { url = "https://files.pythonhosted.org/packages/37/4e/b00e3ffae32b74b5180e15d2ab4040531ee1bef4c19755fe7926622dc958/sqlalchemy-2.0.41-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f", size = 2121232 }, + { url = "https://files.pythonhosted.org/packages/ef/30/6547ebb10875302074a37e1970a5dce7985240665778cfdee2323709f749/sqlalchemy-2.0.41-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560", size = 2110897 }, + { url = "https://files.pythonhosted.org/packages/9e/21/59df2b41b0f6c62da55cd64798232d7349a9378befa7f1bb18cf1dfd510a/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f", size = 3273313 }, + { url = "https://files.pythonhosted.org/packages/62/e4/b9a7a0e5c6f79d49bcd6efb6e90d7536dc604dab64582a9dec220dab54b6/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6", size = 3273807 }, + { url = "https://files.pythonhosted.org/packages/39/d8/79f2427251b44ddee18676c04eab038d043cff0e764d2d8bb08261d6135d/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04", size = 3209632 }, + { url = "https://files.pythonhosted.org/packages/d4/16/730a82dda30765f63e0454918c982fb7193f6b398b31d63c7c3bd3652ae5/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582", size = 3233642 }, + { url = "https://files.pythonhosted.org/packages/04/61/c0d4607f7799efa8b8ea3c49b4621e861c8f5c41fd4b5b636c534fcb7d73/sqlalchemy-2.0.41-cp311-cp311-win32.whl", hash = "sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8", size = 2086475 }, + { url = "https://files.pythonhosted.org/packages/9d/8e/8344f8ae1cb6a479d0741c02cd4f666925b2bf02e2468ddaf5ce44111f30/sqlalchemy-2.0.41-cp311-cp311-win_amd64.whl", hash = "sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504", size = 2110903 }, + { url = "https://files.pythonhosted.org/packages/3e/2a/f1f4e068b371154740dd10fb81afb5240d5af4aa0087b88d8b308b5429c2/sqlalchemy-2.0.41-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9", size = 2119645 }, + { url = "https://files.pythonhosted.org/packages/9b/e8/c664a7e73d36fbfc4730f8cf2bf930444ea87270f2825efbe17bf808b998/sqlalchemy-2.0.41-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1", size = 2107399 }, + { url = "https://files.pythonhosted.org/packages/5c/78/8a9cf6c5e7135540cb682128d091d6afa1b9e48bd049b0d691bf54114f70/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70", size = 3293269 }, + { url = "https://files.pythonhosted.org/packages/3c/35/f74add3978c20de6323fb11cb5162702670cc7a9420033befb43d8d5b7a4/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e", size = 3303364 }, + { url = "https://files.pythonhosted.org/packages/6a/d4/c990f37f52c3f7748ebe98883e2a0f7d038108c2c5a82468d1ff3eec50b7/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078", size = 3229072 }, + { url = "https://files.pythonhosted.org/packages/15/69/cab11fecc7eb64bc561011be2bd03d065b762d87add52a4ca0aca2e12904/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae", size = 3268074 }, + { url = "https://files.pythonhosted.org/packages/5c/ca/0c19ec16858585d37767b167fc9602593f98998a68a798450558239fb04a/sqlalchemy-2.0.41-cp312-cp312-win32.whl", hash = "sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6", size = 2084514 }, + { url = "https://files.pythonhosted.org/packages/7f/23/4c2833d78ff3010a4e17f984c734f52b531a8c9060a50429c9d4b0211be6/sqlalchemy-2.0.41-cp312-cp312-win_amd64.whl", hash = "sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0", size = 2111557 }, + { url = "https://files.pythonhosted.org/packages/1c/fc/9ba22f01b5cdacc8f5ed0d22304718d2c758fce3fd49a5372b886a86f37c/sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", size = 1911224 }, +] + +[[package]] +name = "sqlglot" +version = "26.33.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/25/9d/fcd59b4612d5ad1e2257c67c478107f073b19e1097d3bfde2fb517884416/sqlglot-26.33.0.tar.gz", hash = "sha256:2817278779fa51d6def43aa0d70690b93a25c83eb18ec97130fdaf707abc0d73", size = 5353340 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/8d/f1d9cb5b18e06aa45689fbeaaea6ebab66d5f01d1e65029a8f7657c06be5/sqlglot-26.33.0-py3-none-any.whl", hash = "sha256:031cee20c0c796a83d26d079a47fdce667604df430598c7eabfa4e4dfd147033", size = 477610 }, ] [[package]] name = "sseclient-py" version = "1.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/ed/3df5ab8bb0c12f86c28d0cadb11ed1de44a92ed35ce7ff4fd5518a809325/sseclient-py-1.8.0.tar.gz", hash = "sha256:c547c5c1a7633230a38dc599a21a2dc638f9b5c297286b48b46b935c71fac3e8", size = 7791, upload-time = "2023-09-01T19:39:20.45Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/ed/3df5ab8bb0c12f86c28d0cadb11ed1de44a92ed35ce7ff4fd5518a809325/sseclient-py-1.8.0.tar.gz", hash = "sha256:c547c5c1a7633230a38dc599a21a2dc638f9b5c297286b48b46b935c71fac3e8", size = 7791 } wheels = [ - { url = "https://files.pythonhosted.org/packages/49/58/97655efdfeb5b4eeab85b1fc5d3fa1023661246c2ab2a26ea8e47402d4f2/sseclient_py-1.8.0-py2.py3-none-any.whl", hash = "sha256:4ecca6dc0b9f963f8384e9d7fd529bf93dd7d708144c4fb5da0e0a1a926fee83", size = 8828, upload-time = "2023-09-01T19:39:17.627Z" }, + { url = "https://files.pythonhosted.org/packages/49/58/97655efdfeb5b4eeab85b1fc5d3fa1023661246c2ab2a26ea8e47402d4f2/sseclient_py-1.8.0-py2.py3-none-any.whl", hash = "sha256:4ecca6dc0b9f963f8384e9d7fd529bf93dd7d708144c4fb5da0e0a1a926fee83", size = 8828 }, ] [[package]] name = "starlette" -version = "0.41.0" +version = "0.47.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/53/c3a36690a923706e7ac841f649c64f5108889ab1ec44218dac45771f252a/starlette-0.41.0.tar.gz", hash = "sha256:39cbd8768b107d68bfe1ff1672b38a2c38b49777de46d2a592841d58e3bf7c2a", size = 2573755, upload-time = "2024-10-15T17:32:04.224Z" } +sdist = { url = "https://files.pythonhosted.org/packages/04/57/d062573f391d062710d4088fa1369428c38d51460ab6fedff920efef932e/starlette-0.47.2.tar.gz", hash = "sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8", size = 2583948 } wheels = [ - { url = "https://files.pythonhosted.org/packages/35/c6/a4443bfabf5629129512ca0e07866c4c3c094079ba4e9b2551006927253c/starlette-0.41.0-py3-none-any.whl", hash = "sha256:a0193a3c413ebc9c78bff1c3546a45bb8c8bcb4a84cae8747d650a65bd37210a", size = 73216, upload-time = "2024-10-15T17:32:02.931Z" }, + { url = "https://files.pythonhosted.org/packages/f7/1f/b876b1f83aef204198a42dc101613fefccb32258e5428b5f9259677864b4/starlette-0.47.2-py3-none-any.whl", hash = "sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b", size = 72984 }, ] [[package]] name = "storage3" -version = "0.8.2" +version = "0.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "deprecation" }, { name = "httpx", extra = ["http2"] }, { name = "python-dateutil" }, - { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/af/94cd4925c8a80b4c06bdef60226c04566973f6e2982957d2eabeecb2d5ca/storage3-0.8.2.tar.gz", hash = "sha256:db05d3fe8fb73bd30c814c4c4749664f37a5dfc78b629e8c058ef558c2b89f5a", size = 9041, upload-time = "2024-10-18T07:05:40.219Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/e2/280fe75f65e7a3ca680b7843acfc572a63aa41230e3d3c54c66568809c85/storage3-0.12.1.tar.gz", hash = "sha256:32ea8f5eb2f7185c2114a4f6ae66d577722e32503f0a30b56e7ed5c7f13e6b48", size = 10198 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/67/7d281ba69b3ba3359f528bb0a1cac9d87896938d80119451123e829b3820/storage3-0.8.2-py3-none-any.whl", hash = "sha256:f2e995b18c77a2a9265d1a33047d43e4d6abb11eb3ca5067959f68281c305de3", size = 16230, upload-time = "2024-10-18T07:05:38.408Z" }, + { url = "https://files.pythonhosted.org/packages/7f/3b/c5f8709fc5349928e591fee47592eeff78d29a7d75b097f96a4e01de028d/storage3-0.12.1-py3-none-any.whl", hash = "sha256:9da77fd4f406b019fdcba201e9916aefbf615ef87f551253ce427d8136459a34", size = 18420 }, +] + +[[package]] +name = "strenum" +version = "0.4.15" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/85/ad/430fb60d90e1d112a62ff57bdd1f286ec73a2a0331272febfddd21f330e1/StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff", size = 23384 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/69/297302c5f5f59c862faa31e6cb9a4cd74721cd1e052b38e464c5b402df8b/StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659", size = 8851 }, ] [[package]] name = "supabase" -version = "2.8.1" +version = "2.18.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "gotrue" }, { name = "httpx" }, { name = "postgrest" }, { name = "realtime" }, { name = "storage3" }, - { name = "supafunc" }, - { name = "typing-extensions" }, + { name = "supabase-auth" }, + { name = "supabase-functions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/80/46/0846eae977d7e067e73960d880a3457e2a87b1ec7467ff3bc5365b318df7/supabase-2.8.1.tar.gz", hash = "sha256:711c70e6acd9e2ff48ca0dc0b1bb70c01c25378cc5189ec9f5ed9655b30bc41d", size = 13955, upload-time = "2024-09-30T16:03:53.548Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/d2/3b135af55dd5788bd47875bb81f99c870054b990c030e51fd641a61b10b5/supabase-2.18.1.tar.gz", hash = "sha256:205787b1fbb43d6bc997c06fe3a56137336d885a1b56ec10f0012f2a2905285d", size = 11549 } wheels = [ - { url = "https://files.pythonhosted.org/packages/15/ca/7f1dfcd9dfff2cb56ce063b3c8e4c29ae43e50102f039d5196cbed8d51b8/supabase-2.8.1-py3-none-any.whl", hash = "sha256:dfa8bef89b54129093521d5bba2136ff765baf67cd76d8ad0aa4984d61a7815c", size = 16589, upload-time = "2024-09-30T16:03:51.737Z" }, + { url = "https://files.pythonhosted.org/packages/a8/33/0e0062fea22cfe01d466dee83f56b3ed40c89bdcbca671bafeba3fe86b92/supabase-2.18.1-py3-none-any.whl", hash = "sha256:4fdd7b7247178a847f97ecd34f018dcb4775e487c8ff46b1208a01c933691fe9", size = 18683 }, ] [[package]] -name = "supafunc" -version = "0.6.2" +name = "supabase-auth" +version = "2.12.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx", extra = ["http2"] }, + { name = "pydantic" }, + { name = "pyjwt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/85/28/c808bfd80c996cbf0ba5de6714edf2e2f68637f50058f6b9373f49b82a70/supafunc-0.6.2.tar.gz", hash = "sha256:c7dfa20db7182f7fe4ae436e94e05c06cd7ed98d697fed75d68c7b9792822adc", size = 3902, upload-time = "2024-10-18T07:06:39.038Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/e9/3d6f696a604752803b9e389b04d454f4b26a29b5d155b257fea4af8dc543/supabase_auth-2.12.3.tar.gz", hash = "sha256:8d3b67543f3b27f5adbfe46b66990424c8504c6b08c1141ec572a9802761edc2", size = 38430 } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/91/cb7a31cf250ee66dfd40cca2c7c36eede7e1d8e3183f99865d14438c66a7/supafunc-0.6.2-py3-none-any.whl", hash = "sha256:101b30616b0a1ce8cf938eca1df362fa4cf1deacb0271f53ebbd674190fb0da5", size = 6622, upload-time = "2024-10-18T07:06:37.782Z" }, + { url = "https://files.pythonhosted.org/packages/96/a6/4102d5fa08a8521d9432b4d10bb58fedbd1f92b211d1b45d5394f5cb9021/supabase_auth-2.12.3-py3-none-any.whl", hash = "sha256:15c7580e1313d30ffddeb3221cb3cdb87c2a80fd220bf85d67db19cd1668435b", size = 44417 }, +] + +[[package]] +name = "supabase-functions" +version = "0.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx", extra = ["http2"] }, + { name = "strenum" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6c/e4/6df7cd4366396553449e9907c745862ebf010305835b2bac99933dd7db9d/supabase_functions-0.10.1.tar.gz", hash = "sha256:4779d33a1cc3d4aea567f586b16d8efdb7cddcd6b40ce367c5fb24288af3a4f1", size = 5025 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/06/060118a1e602c9bda8e4bf950bd1c8b5e1542349f2940ec57541266fabe1/supabase_functions-0.10.1-py3-none-any.whl", hash = "sha256:1db85e20210b465075aacee4e171332424f7305f9903c5918096be1423d6fcc5", size = 8275 }, ] [[package]] @@ -5504,9 +5544,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mpmath" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921, upload-time = "2025-04-27T18:05:01.611Z" } +sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, + { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353 }, ] [[package]] @@ -5523,18 +5563,18 @@ dependencies = [ { name = "six" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/58/48d65d181a69f7db19f7cdee01d252168fbfbad2d1bb25abed03e6df3b05/tablestore-6.2.0.tar.gz", hash = "sha256:0773e77c00542be1bfebbc3c7a85f72a881c63e4e7df7c5a9793a54144590e68", size = 85942, upload-time = "2025-04-15T12:11:20.655Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/58/48d65d181a69f7db19f7cdee01d252168fbfbad2d1bb25abed03e6df3b05/tablestore-6.2.0.tar.gz", hash = "sha256:0773e77c00542be1bfebbc3c7a85f72a881c63e4e7df7c5a9793a54144590e68", size = 85942 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/da/30451712a769bcf417add8e81163d478a4d668b0e8d489a9d667260d55df/tablestore-6.2.0-py3-none-any.whl", hash = "sha256:6af496d841ab1ff3f78b46abbd87b95a08d89605c51664d2b30933b1d1c5583a", size = 106297, upload-time = "2025-04-15T12:11:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/9c/da/30451712a769bcf417add8e81163d478a4d668b0e8d489a9d667260d55df/tablestore-6.2.0-py3-none-any.whl", hash = "sha256:6af496d841ab1ff3f78b46abbd87b95a08d89605c51664d2b30933b1d1c5583a", size = 106297 }, ] [[package]] name = "tabulate" version = "0.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090, upload-time = "2022-10-06T17:21:48.54Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090 } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" }, + { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252 }, ] [[package]] @@ -5547,9 +5587,9 @@ dependencies = [ { name = "numpy" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b6/3f/9487f703edb5b8be51ada52b675b4b2fcd507399946aeab8c10028f75265/tcvdb_text-1.1.1.tar.gz", hash = "sha256:db36b5d7b640b194ae72c0c429718c9613b8ef9de5fffb9d510aba5be75ff1cb", size = 57859792, upload-time = "2025-02-07T11:08:17.586Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/3f/9487f703edb5b8be51ada52b675b4b2fcd507399946aeab8c10028f75265/tcvdb_text-1.1.1.tar.gz", hash = "sha256:db36b5d7b640b194ae72c0c429718c9613b8ef9de5fffb9d510aba5be75ff1cb", size = 57859792 } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/d3/8c8799802676bc6c4696bed7ca7b01a3a5b6ab080ed959e5a4925640e01b/tcvdb_text-1.1.1-py3-none-any.whl", hash = "sha256:981eb2323c0668129942c066de05e8f0d2165be36f567877906646dea07d17a9", size = 59535083, upload-time = "2025-02-07T11:07:59.66Z" }, + { url = "https://files.pythonhosted.org/packages/76/d3/8c8799802676bc6c4696bed7ca7b01a3a5b6ab080ed959e5a4925640e01b/tcvdb_text-1.1.1-py3-none-any.whl", hash = "sha256:981eb2323c0668129942c066de05e8f0d2165be36f567877906646dea07d17a9", size = 59535083 }, ] [[package]] @@ -5567,18 +5607,18 @@ dependencies = [ { name = "ujson" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/19/ec/c80579aff1539257aafcf8dc3f3c13630171f299d65b33b68440e166f27c/tcvectordb-1.6.4.tar.gz", hash = "sha256:6fb18e15ccc6744d5147e9bbd781f84df3d66112de7d9cc615878b3f72d3a29a", size = 75188, upload-time = "2025-03-05T09:14:19.925Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/ec/c80579aff1539257aafcf8dc3f3c13630171f299d65b33b68440e166f27c/tcvectordb-1.6.4.tar.gz", hash = "sha256:6fb18e15ccc6744d5147e9bbd781f84df3d66112de7d9cc615878b3f72d3a29a", size = 75188 } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/bf/f38d9f629324ecffca8fe934e8df47e1233a9021b0739447e59e9fb248f9/tcvectordb-1.6.4-py3-none-any.whl", hash = "sha256:06ef13e7edb4575b04615065fc90e1a28374e318ada305f3786629aec5c9318a", size = 88917, upload-time = "2025-03-05T09:14:17.494Z" }, + { url = "https://files.pythonhosted.org/packages/68/bf/f38d9f629324ecffca8fe934e8df47e1233a9021b0739447e59e9fb248f9/tcvectordb-1.6.4-py3-none-any.whl", hash = "sha256:06ef13e7edb4575b04615065fc90e1a28374e318ada305f3786629aec5c9318a", size = 88917 }, ] [[package]] name = "tenacity" version = "9.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036, upload-time = "2025-04-02T08:25:09.966Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" }, + { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248 }, ] [[package]] @@ -5592,9 +5632,9 @@ dependencies = [ { name = "urllib3" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/49/9c618aff1c50121d183cdfbc3a4a5cf2727a2cde1893efe6ca55c7009196/testcontainers-4.10.0.tar.gz", hash = "sha256:03f85c3e505d8b4edeb192c72a961cebbcba0dd94344ae778b4a159cb6dcf8d3", size = 63327, upload-time = "2025-04-02T16:13:27.582Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/49/9c618aff1c50121d183cdfbc3a4a5cf2727a2cde1893efe6ca55c7009196/testcontainers-4.10.0.tar.gz", hash = "sha256:03f85c3e505d8b4edeb192c72a961cebbcba0dd94344ae778b4a159cb6dcf8d3", size = 63327 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1c/0a/824b0c1ecf224802125279c3effff2e25ed785ed046e67da6e53d928de4c/testcontainers-4.10.0-py3-none-any.whl", hash = "sha256:31ed1a81238c7e131a2a29df6db8f23717d892b592fa5a1977fd0dcd0c23fc23", size = 107414, upload-time = "2025-04-02T16:13:25.785Z" }, + { url = "https://files.pythonhosted.org/packages/1c/0a/824b0c1ecf224802125279c3effff2e25ed785ed046e67da6e53d928de4c/testcontainers-4.10.0-py3-none-any.whl", hash = "sha256:31ed1a81238c7e131a2a29df6db8f23717d892b592fa5a1977fd0dcd0c23fc23", size = 107414 }, ] [[package]] @@ -5604,9 +5644,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1a/98/ab324fdfbbf064186ca621e21aa3871ddf886ecb78358a9864509241e802/tidb_vector-0.0.9.tar.gz", hash = "sha256:e10680872532808e1bcffa7a92dd2b05bb65d63982f833edb3c6cd590dec7709", size = 16948, upload-time = "2024-05-08T07:54:36.955Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/98/ab324fdfbbf064186ca621e21aa3871ddf886ecb78358a9864509241e802/tidb_vector-0.0.9.tar.gz", hash = "sha256:e10680872532808e1bcffa7a92dd2b05bb65d63982f833edb3c6cd590dec7709", size = 16948 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/bb/0f3b7b4d31537e90f4dd01f50fa58daef48807c789c1c1bdd610204ff103/tidb_vector-0.0.9-py3-none-any.whl", hash = "sha256:db060ee1c981326d3882d0810e0b8b57811f278668f9381168997b360c4296c2", size = 17026, upload-time = "2024-05-08T07:54:34.849Z" }, + { url = "https://files.pythonhosted.org/packages/5d/bb/0f3b7b4d31537e90f4dd01f50fa58daef48807c789c1c1bdd610204ff103/tidb_vector-0.0.9-py3-none-any.whl", hash = "sha256:db060ee1c981326d3882d0810e0b8b57811f278668f9381168997b360c4296c2", size = 17026 }, ] [[package]] @@ -5617,20 +5657,20 @@ dependencies = [ { name = "regex" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991, upload-time = "2025-02-14T06:03:01.003Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/ae/4613a59a2a48e761c5161237fc850eb470b4bb93696db89da51b79a871f1/tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e", size = 1065987, upload-time = "2025-02-14T06:02:14.174Z" }, - { url = "https://files.pythonhosted.org/packages/3f/86/55d9d1f5b5a7e1164d0f1538a85529b5fcba2b105f92db3622e5d7de6522/tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348", size = 1009155, upload-time = "2025-02-14T06:02:15.384Z" }, - { url = "https://files.pythonhosted.org/packages/03/58/01fb6240df083b7c1916d1dcb024e2b761213c95d576e9f780dfb5625a76/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33", size = 1142898, upload-time = "2025-02-14T06:02:16.666Z" }, - { url = "https://files.pythonhosted.org/packages/b1/73/41591c525680cd460a6becf56c9b17468d3711b1df242c53d2c7b2183d16/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136", size = 1197535, upload-time = "2025-02-14T06:02:18.595Z" }, - { url = "https://files.pythonhosted.org/packages/7d/7c/1069f25521c8f01a1a182f362e5c8e0337907fae91b368b7da9c3e39b810/tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336", size = 1259548, upload-time = "2025-02-14T06:02:20.729Z" }, - { url = "https://files.pythonhosted.org/packages/6f/07/c67ad1724b8e14e2b4c8cca04b15da158733ac60136879131db05dda7c30/tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb", size = 893895, upload-time = "2025-02-14T06:02:22.67Z" }, - { url = "https://files.pythonhosted.org/packages/cf/e5/21ff33ecfa2101c1bb0f9b6df750553bd873b7fb532ce2cb276ff40b197f/tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03", size = 1065073, upload-time = "2025-02-14T06:02:24.768Z" }, - { url = "https://files.pythonhosted.org/packages/8e/03/a95e7b4863ee9ceec1c55983e4cc9558bcfd8f4f80e19c4f8a99642f697d/tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210", size = 1008075, upload-time = "2025-02-14T06:02:26.92Z" }, - { url = "https://files.pythonhosted.org/packages/40/10/1305bb02a561595088235a513ec73e50b32e74364fef4de519da69bc8010/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794", size = 1140754, upload-time = "2025-02-14T06:02:28.124Z" }, - { url = "https://files.pythonhosted.org/packages/1b/40/da42522018ca496432ffd02793c3a72a739ac04c3794a4914570c9bb2925/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22", size = 1196678, upload-time = "2025-02-14T06:02:29.845Z" }, - { url = "https://files.pythonhosted.org/packages/5c/41/1e59dddaae270ba20187ceb8aa52c75b24ffc09f547233991d5fd822838b/tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2", size = 1259283, upload-time = "2025-02-14T06:02:33.838Z" }, - { url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897, upload-time = "2025-02-14T06:02:36.265Z" }, + { url = "https://files.pythonhosted.org/packages/4d/ae/4613a59a2a48e761c5161237fc850eb470b4bb93696db89da51b79a871f1/tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e", size = 1065987 }, + { url = "https://files.pythonhosted.org/packages/3f/86/55d9d1f5b5a7e1164d0f1538a85529b5fcba2b105f92db3622e5d7de6522/tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348", size = 1009155 }, + { url = "https://files.pythonhosted.org/packages/03/58/01fb6240df083b7c1916d1dcb024e2b761213c95d576e9f780dfb5625a76/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33", size = 1142898 }, + { url = "https://files.pythonhosted.org/packages/b1/73/41591c525680cd460a6becf56c9b17468d3711b1df242c53d2c7b2183d16/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136", size = 1197535 }, + { url = "https://files.pythonhosted.org/packages/7d/7c/1069f25521c8f01a1a182f362e5c8e0337907fae91b368b7da9c3e39b810/tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336", size = 1259548 }, + { url = "https://files.pythonhosted.org/packages/6f/07/c67ad1724b8e14e2b4c8cca04b15da158733ac60136879131db05dda7c30/tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb", size = 893895 }, + { url = "https://files.pythonhosted.org/packages/cf/e5/21ff33ecfa2101c1bb0f9b6df750553bd873b7fb532ce2cb276ff40b197f/tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03", size = 1065073 }, + { url = "https://files.pythonhosted.org/packages/8e/03/a95e7b4863ee9ceec1c55983e4cc9558bcfd8f4f80e19c4f8a99642f697d/tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210", size = 1008075 }, + { url = "https://files.pythonhosted.org/packages/40/10/1305bb02a561595088235a513ec73e50b32e74364fef4de519da69bc8010/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794", size = 1140754 }, + { url = "https://files.pythonhosted.org/packages/1b/40/da42522018ca496432ffd02793c3a72a739ac04c3794a4914570c9bb2925/tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22", size = 1196678 }, + { url = "https://files.pythonhosted.org/packages/5c/41/1e59dddaae270ba20187ceb8aa52c75b24ffc09f547233991d5fd822838b/tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2", size = 1259283 }, + { url = "https://files.pythonhosted.org/packages/5b/64/b16003419a1d7728d0d8c0d56a4c24325e7b10a21a9dd1fc0f7115c02f0a/tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16", size = 894897 }, ] [[package]] @@ -5640,60 +5680,60 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/2d/b0fce2b8201635f60e8c95990080f58461cc9ca3d5026de2e900f38a7f21/tokenizers-0.21.2.tar.gz", hash = "sha256:fdc7cffde3e2113ba0e6cc7318c40e3438a4d74bbc62bf04bcc63bdfb082ac77", size = 351545, upload-time = "2025-06-24T10:24:52.449Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ab/2d/b0fce2b8201635f60e8c95990080f58461cc9ca3d5026de2e900f38a7f21/tokenizers-0.21.2.tar.gz", hash = "sha256:fdc7cffde3e2113ba0e6cc7318c40e3438a4d74bbc62bf04bcc63bdfb082ac77", size = 351545 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/cc/2936e2d45ceb130a21d929743f1e9897514691bec123203e10837972296f/tokenizers-0.21.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:342b5dfb75009f2255ab8dec0041287260fed5ce00c323eb6bab639066fef8ec", size = 2875206, upload-time = "2025-06-24T10:24:42.755Z" }, - { url = "https://files.pythonhosted.org/packages/6c/e6/33f41f2cc7861faeba8988e7a77601407bf1d9d28fc79c5903f8f77df587/tokenizers-0.21.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:126df3205d6f3a93fea80c7a8a266a78c1bd8dd2fe043386bafdd7736a23e45f", size = 2732655, upload-time = "2025-06-24T10:24:41.56Z" }, - { url = "https://files.pythonhosted.org/packages/33/2b/1791eb329c07122a75b01035b1a3aa22ad139f3ce0ece1b059b506d9d9de/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a32cd81be21168bd0d6a0f0962d60177c447a1aa1b1e48fa6ec9fc728ee0b12", size = 3019202, upload-time = "2025-06-24T10:24:31.791Z" }, - { url = "https://files.pythonhosted.org/packages/05/15/fd2d8104faa9f86ac68748e6f7ece0b5eb7983c7efc3a2c197cb98c99030/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8bd8999538c405133c2ab999b83b17c08b7fc1b48c1ada2469964605a709ef91", size = 2934539, upload-time = "2025-06-24T10:24:34.567Z" }, - { url = "https://files.pythonhosted.org/packages/a5/2e/53e8fd053e1f3ffbe579ca5f9546f35ac67cf0039ed357ad7ec57f5f5af0/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e9944e61239b083a41cf8fc42802f855e1dca0f499196df37a8ce219abac6eb", size = 3248665, upload-time = "2025-06-24T10:24:39.024Z" }, - { url = "https://files.pythonhosted.org/packages/00/15/79713359f4037aa8f4d1f06ffca35312ac83629da062670e8830917e2153/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:514cd43045c5d546f01142ff9c79a96ea69e4b5cda09e3027708cb2e6d5762ab", size = 3451305, upload-time = "2025-06-24T10:24:36.133Z" }, - { url = "https://files.pythonhosted.org/packages/38/5f/959f3a8756fc9396aeb704292777b84f02a5c6f25c3fc3ba7530db5feb2c/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1b9405822527ec1e0f7d8d2fdb287a5730c3a6518189c968254a8441b21faae", size = 3214757, upload-time = "2025-06-24T10:24:37.784Z" }, - { url = "https://files.pythonhosted.org/packages/c5/74/f41a432a0733f61f3d21b288de6dfa78f7acff309c6f0f323b2833e9189f/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed9a4d51c395103ad24f8e7eb976811c57fbec2af9f133df471afcd922e5020", size = 3121887, upload-time = "2025-06-24T10:24:40.293Z" }, - { url = "https://files.pythonhosted.org/packages/3c/6a/bc220a11a17e5d07b0dfb3b5c628621d4dcc084bccd27cfaead659963016/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2c41862df3d873665ec78b6be36fcc30a26e3d4902e9dd8608ed61d49a48bc19", size = 9091965, upload-time = "2025-06-24T10:24:44.431Z" }, - { url = "https://files.pythonhosted.org/packages/6c/bd/ac386d79c4ef20dc6f39c4706640c24823dca7ebb6f703bfe6b5f0292d88/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:ed21dc7e624e4220e21758b2e62893be7101453525e3d23264081c9ef9a6d00d", size = 9053372, upload-time = "2025-06-24T10:24:46.455Z" }, - { url = "https://files.pythonhosted.org/packages/63/7b/5440bf203b2a5358f074408f7f9c42884849cd9972879e10ee6b7a8c3b3d/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:0e73770507e65a0e0e2a1affd6b03c36e3bc4377bd10c9ccf51a82c77c0fe365", size = 9298632, upload-time = "2025-06-24T10:24:48.446Z" }, - { url = "https://files.pythonhosted.org/packages/a4/d2/faa1acac3f96a7427866e94ed4289949b2524f0c1878512516567d80563c/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:106746e8aa9014a12109e58d540ad5465b4c183768ea96c03cbc24c44d329958", size = 9470074, upload-time = "2025-06-24T10:24:50.378Z" }, - { url = "https://files.pythonhosted.org/packages/d8/a5/896e1ef0707212745ae9f37e84c7d50269411aef2e9ccd0de63623feecdf/tokenizers-0.21.2-cp39-abi3-win32.whl", hash = "sha256:cabda5a6d15d620b6dfe711e1af52205266d05b379ea85a8a301b3593c60e962", size = 2330115, upload-time = "2025-06-24T10:24:55.069Z" }, - { url = "https://files.pythonhosted.org/packages/13/c3/cc2755ee10be859c4338c962a35b9a663788c0c0b50c0bdd8078fb6870cf/tokenizers-0.21.2-cp39-abi3-win_amd64.whl", hash = "sha256:58747bb898acdb1007f37a7bbe614346e98dc28708ffb66a3fd50ce169ac6c98", size = 2509918, upload-time = "2025-06-24T10:24:53.71Z" }, + { url = "https://files.pythonhosted.org/packages/1d/cc/2936e2d45ceb130a21d929743f1e9897514691bec123203e10837972296f/tokenizers-0.21.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:342b5dfb75009f2255ab8dec0041287260fed5ce00c323eb6bab639066fef8ec", size = 2875206 }, + { url = "https://files.pythonhosted.org/packages/6c/e6/33f41f2cc7861faeba8988e7a77601407bf1d9d28fc79c5903f8f77df587/tokenizers-0.21.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:126df3205d6f3a93fea80c7a8a266a78c1bd8dd2fe043386bafdd7736a23e45f", size = 2732655 }, + { url = "https://files.pythonhosted.org/packages/33/2b/1791eb329c07122a75b01035b1a3aa22ad139f3ce0ece1b059b506d9d9de/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a32cd81be21168bd0d6a0f0962d60177c447a1aa1b1e48fa6ec9fc728ee0b12", size = 3019202 }, + { url = "https://files.pythonhosted.org/packages/05/15/fd2d8104faa9f86ac68748e6f7ece0b5eb7983c7efc3a2c197cb98c99030/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8bd8999538c405133c2ab999b83b17c08b7fc1b48c1ada2469964605a709ef91", size = 2934539 }, + { url = "https://files.pythonhosted.org/packages/a5/2e/53e8fd053e1f3ffbe579ca5f9546f35ac67cf0039ed357ad7ec57f5f5af0/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e9944e61239b083a41cf8fc42802f855e1dca0f499196df37a8ce219abac6eb", size = 3248665 }, + { url = "https://files.pythonhosted.org/packages/00/15/79713359f4037aa8f4d1f06ffca35312ac83629da062670e8830917e2153/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:514cd43045c5d546f01142ff9c79a96ea69e4b5cda09e3027708cb2e6d5762ab", size = 3451305 }, + { url = "https://files.pythonhosted.org/packages/38/5f/959f3a8756fc9396aeb704292777b84f02a5c6f25c3fc3ba7530db5feb2c/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1b9405822527ec1e0f7d8d2fdb287a5730c3a6518189c968254a8441b21faae", size = 3214757 }, + { url = "https://files.pythonhosted.org/packages/c5/74/f41a432a0733f61f3d21b288de6dfa78f7acff309c6f0f323b2833e9189f/tokenizers-0.21.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed9a4d51c395103ad24f8e7eb976811c57fbec2af9f133df471afcd922e5020", size = 3121887 }, + { url = "https://files.pythonhosted.org/packages/3c/6a/bc220a11a17e5d07b0dfb3b5c628621d4dcc084bccd27cfaead659963016/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2c41862df3d873665ec78b6be36fcc30a26e3d4902e9dd8608ed61d49a48bc19", size = 9091965 }, + { url = "https://files.pythonhosted.org/packages/6c/bd/ac386d79c4ef20dc6f39c4706640c24823dca7ebb6f703bfe6b5f0292d88/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:ed21dc7e624e4220e21758b2e62893be7101453525e3d23264081c9ef9a6d00d", size = 9053372 }, + { url = "https://files.pythonhosted.org/packages/63/7b/5440bf203b2a5358f074408f7f9c42884849cd9972879e10ee6b7a8c3b3d/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:0e73770507e65a0e0e2a1affd6b03c36e3bc4377bd10c9ccf51a82c77c0fe365", size = 9298632 }, + { url = "https://files.pythonhosted.org/packages/a4/d2/faa1acac3f96a7427866e94ed4289949b2524f0c1878512516567d80563c/tokenizers-0.21.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:106746e8aa9014a12109e58d540ad5465b4c183768ea96c03cbc24c44d329958", size = 9470074 }, + { url = "https://files.pythonhosted.org/packages/d8/a5/896e1ef0707212745ae9f37e84c7d50269411aef2e9ccd0de63623feecdf/tokenizers-0.21.2-cp39-abi3-win32.whl", hash = "sha256:cabda5a6d15d620b6dfe711e1af52205266d05b379ea85a8a301b3593c60e962", size = 2330115 }, + { url = "https://files.pythonhosted.org/packages/13/c3/cc2755ee10be859c4338c962a35b9a663788c0c0b50c0bdd8078fb6870cf/tokenizers-0.21.2-cp39-abi3-win_amd64.whl", hash = "sha256:58747bb898acdb1007f37a7bbe614346e98dc28708ffb66a3fd50ce169ac6c98", size = 2509918 }, ] [[package]] name = "toml" version = "0.10.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253, upload-time = "2020-11-01T01:40:22.204Z" } +sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253 } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588, upload-time = "2020-11-01T01:40:20.672Z" }, + { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588 }, ] [[package]] name = "tomli" version = "2.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, ] [[package]] @@ -5707,7 +5747,7 @@ dependencies = [ { name = "requests" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0c/01/f811af86f1f80d5f289be075c3b281e74bf3fe081cfbe5cfce44954d2c3a/tos-2.7.2.tar.gz", hash = "sha256:3c31257716785bca7b2cac51474ff32543cda94075a7b7aff70d769c15c7b7ed", size = 123407, upload-time = "2024-10-16T15:59:08.634Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0c/01/f811af86f1f80d5f289be075c3b281e74bf3fe081cfbe5cfce44954d2c3a/tos-2.7.2.tar.gz", hash = "sha256:3c31257716785bca7b2cac51474ff32543cda94075a7b7aff70d769c15c7b7ed", size = 123407 } [[package]] name = "tqdm" @@ -5716,14 +5756,14 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, ] [[package]] name = "transformers" -version = "4.51.3" +version = "4.53.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -5737,9 +5777,9 @@ dependencies = [ { name = "tokenizers" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f1/11/7414d5bc07690002ce4d7553602107bf969af85144bbd02830f9fb471236/transformers-4.51.3.tar.gz", hash = "sha256:e292fcab3990c6defe6328f0f7d2004283ca81a7a07b2de9a46d67fd81ea1409", size = 8941266, upload-time = "2025-04-14T08:15:00.485Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/5c/49182918b58eaa0b4c954fd0e37c79fc299e5643e69d70089d0b0eb0cd9b/transformers-4.53.3.tar.gz", hash = "sha256:b2eda1a261de79b78b97f7888fe2005fc0c3fabf5dad33d52cc02983f9f675d8", size = 9197478 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/b6/5257d04ae327b44db31f15cce39e6020cc986333c715660b1315a9724d82/transformers-4.51.3-py3-none-any.whl", hash = "sha256:fd3279633ceb2b777013234bbf0b4f5c2d23c4626b05497691f00cfda55e8a83", size = 10383940, upload-time = "2025-04-14T08:13:43.023Z" }, + { url = "https://files.pythonhosted.org/packages/41/b1/d7520cc5cb69c825599042eb3a7c986fa9baa8a8d2dea9acd78e152c81e2/transformers-4.53.3-py3-none-any.whl", hash = "sha256:5aba81c92095806b6baf12df35d756cf23b66c356975fb2a7fa9e536138d7c75", size = 10826382 }, ] [[package]] @@ -5752,27 +5792,27 @@ dependencies = [ { name = "shellingham" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c5/8c/7d682431efca5fd290017663ea4588bf6f2c6aad085c7f108c5dbc316e70/typer-0.16.0.tar.gz", hash = "sha256:af377ffaee1dbe37ae9440cb4e8f11686ea5ce4e9bae01b84ae7c63b87f1dd3b", size = 102625, upload-time = "2025-05-26T14:30:31.824Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/8c/7d682431efca5fd290017663ea4588bf6f2c6aad085c7f108c5dbc316e70/typer-0.16.0.tar.gz", hash = "sha256:af377ffaee1dbe37ae9440cb4e8f11686ea5ce4e9bae01b84ae7c63b87f1dd3b", size = 102625 } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/42/3efaf858001d2c2913de7f354563e3a3a2f0decae3efe98427125a8f441e/typer-0.16.0-py3-none-any.whl", hash = "sha256:1f79bed11d4d02d4310e3c1b7ba594183bcedb0ac73b27a9e5f28f6fb5b98855", size = 46317, upload-time = "2025-05-26T14:30:30.523Z" }, + { url = "https://files.pythonhosted.org/packages/76/42/3efaf858001d2c2913de7f354563e3a3a2f0decae3efe98427125a8f441e/typer-0.16.0-py3-none-any.whl", hash = "sha256:1f79bed11d4d02d4310e3c1b7ba594183bcedb0ac73b27a9e5f28f6fb5b98855", size = 46317 }, ] [[package]] name = "types-aiofiles" version = "24.1.0.20250708" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4a/d6/5c44761bc11cb5c7505013a39f397a9016bfb3a5c932032b2db16c38b87b/types_aiofiles-24.1.0.20250708.tar.gz", hash = "sha256:c8207ed7385491ce5ba94da02658164ebd66b69a44e892288c9f20cbbf5284ff", size = 14322, upload-time = "2025-07-08T03:14:44.814Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/d6/5c44761bc11cb5c7505013a39f397a9016bfb3a5c932032b2db16c38b87b/types_aiofiles-24.1.0.20250708.tar.gz", hash = "sha256:c8207ed7385491ce5ba94da02658164ebd66b69a44e892288c9f20cbbf5284ff", size = 14322 } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/e9/4e0cc79c630040aae0634ac9393341dc2aff1a5be454be9741cc6cc8989f/types_aiofiles-24.1.0.20250708-py3-none-any.whl", hash = "sha256:07f8f06465fd415d9293467d1c66cd074b2c3b62b679e26e353e560a8cf63720", size = 14320, upload-time = "2025-07-08T03:14:44.009Z" }, + { url = "https://files.pythonhosted.org/packages/44/e9/4e0cc79c630040aae0634ac9393341dc2aff1a5be454be9741cc6cc8989f/types_aiofiles-24.1.0.20250708-py3-none-any.whl", hash = "sha256:07f8f06465fd415d9293467d1c66cd074b2c3b62b679e26e353e560a8cf63720", size = 14320 }, ] [[package]] name = "types-awscrt" version = "0.27.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/95/02564024f8668feab6733a2c491005b5281b048b3d0573510622cbcd9fd4/types_awscrt-0.27.4.tar.gz", hash = "sha256:c019ba91a097e8a31d6948f6176ede1312963f41cdcacf82482ac877cbbcf390", size = 16941, upload-time = "2025-06-29T22:58:04.756Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/95/02564024f8668feab6733a2c491005b5281b048b3d0573510622cbcd9fd4/types_awscrt-0.27.4.tar.gz", hash = "sha256:c019ba91a097e8a31d6948f6176ede1312963f41cdcacf82482ac877cbbcf390", size = 16941 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/40/cb4d04df4ac3520858f5b397a4ab89f34be2601000002a26edd8ddc0cac5/types_awscrt-0.27.4-py3-none-any.whl", hash = "sha256:a8c4b9d9ae66d616755c322aba75ab9bd793c6fef448917e6de2e8b8cdf66fb4", size = 39626, upload-time = "2025-06-29T22:58:03.157Z" }, + { url = "https://files.pythonhosted.org/packages/d4/40/cb4d04df4ac3520858f5b397a4ab89f34be2601000002a26edd8ddc0cac5/types_awscrt-0.27.4-py3-none-any.whl", hash = "sha256:a8c4b9d9ae66d616755c322aba75ab9bd793c6fef448917e6de2e8b8cdf66fb4", size = 39626 }, ] [[package]] @@ -5782,18 +5822,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-html5lib" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/d1/32b410f6d65eda94d3dfb0b3d0ca151f12cb1dc4cef731dcf7cbfd8716ff/types_beautifulsoup4-4.12.0.20250516.tar.gz", hash = "sha256:aa19dd73b33b70d6296adf92da8ab8a0c945c507e6fb7d5db553415cc77b417e", size = 16628, upload-time = "2025-05-16T03:09:09.93Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/d1/32b410f6d65eda94d3dfb0b3d0ca151f12cb1dc4cef731dcf7cbfd8716ff/types_beautifulsoup4-4.12.0.20250516.tar.gz", hash = "sha256:aa19dd73b33b70d6296adf92da8ab8a0c945c507e6fb7d5db553415cc77b417e", size = 16628 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/79/d84de200a80085b32f12c5820d4fd0addcbe7ba6dce8c1c9d8605e833c8e/types_beautifulsoup4-4.12.0.20250516-py3-none-any.whl", hash = "sha256:5923399d4a1ba9cc8f0096fe334cc732e130269541d66261bb42ab039c0376ee", size = 16879, upload-time = "2025-05-16T03:09:09.051Z" }, + { url = "https://files.pythonhosted.org/packages/7c/79/d84de200a80085b32f12c5820d4fd0addcbe7ba6dce8c1c9d8605e833c8e/types_beautifulsoup4-4.12.0.20250516-py3-none-any.whl", hash = "sha256:5923399d4a1ba9cc8f0096fe334cc732e130269541d66261bb42ab039c0376ee", size = 16879 }, ] [[package]] name = "types-cachetools" version = "5.5.0.20240820" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c2/7e/ad6ba4a56b2a994e0f0a04a61a50466b60ee88a13d10a18c83ac14a66c61/types-cachetools-5.5.0.20240820.tar.gz", hash = "sha256:b888ab5c1a48116f7799cd5004b18474cd82b5463acb5ffb2db2fc9c7b053bc0", size = 4198, upload-time = "2024-08-20T02:30:07.525Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c2/7e/ad6ba4a56b2a994e0f0a04a61a50466b60ee88a13d10a18c83ac14a66c61/types-cachetools-5.5.0.20240820.tar.gz", hash = "sha256:b888ab5c1a48116f7799cd5004b18474cd82b5463acb5ffb2db2fc9c7b053bc0", size = 4198 } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/4d/fd7cc050e2d236d5570c4d92531c0396573a1e14b31735870e849351c717/types_cachetools-5.5.0.20240820-py3-none-any.whl", hash = "sha256:efb2ed8bf27a4b9d3ed70d33849f536362603a90b8090a328acf0cd42fda82e2", size = 4149, upload-time = "2024-08-20T02:30:06.461Z" }, + { url = "https://files.pythonhosted.org/packages/27/4d/fd7cc050e2d236d5570c4d92531c0396573a1e14b31735870e849351c717/types_cachetools-5.5.0.20240820-py3-none-any.whl", hash = "sha256:efb2ed8bf27a4b9d3ed70d33849f536362603a90b8090a328acf0cd42fda82e2", size = 4149 }, ] [[package]] @@ -5803,45 +5843,45 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/5f/ac80a2f55757019e5d4809d17544569c47a623565258ca1a836ba951d53f/types_cffi-1.17.0.20250523.tar.gz", hash = "sha256:e7110f314c65590533adae1b30763be08ca71ad856a1ae3fe9b9d8664d49ec22", size = 16858, upload-time = "2025-05-23T03:05:40.983Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/5f/ac80a2f55757019e5d4809d17544569c47a623565258ca1a836ba951d53f/types_cffi-1.17.0.20250523.tar.gz", hash = "sha256:e7110f314c65590533adae1b30763be08ca71ad856a1ae3fe9b9d8664d49ec22", size = 16858 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f1/86/e26e6ae4dfcbf6031b8422c22cf3a9eb2b6d127770406e7645b6248d8091/types_cffi-1.17.0.20250523-py3-none-any.whl", hash = "sha256:e98c549d8e191f6220e440f9f14315d6775a21a0e588c32c20476be885b2fad9", size = 20010, upload-time = "2025-05-23T03:05:39.136Z" }, + { url = "https://files.pythonhosted.org/packages/f1/86/e26e6ae4dfcbf6031b8422c22cf3a9eb2b6d127770406e7645b6248d8091/types_cffi-1.17.0.20250523-py3-none-any.whl", hash = "sha256:e98c549d8e191f6220e440f9f14315d6775a21a0e588c32c20476be885b2fad9", size = 20010 }, ] [[package]] name = "types-colorama" version = "0.4.15.20240311" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/59/73/0fb0b9fe4964b45b2a06ed41b60c352752626db46aa0fb70a49a9e283a75/types-colorama-0.4.15.20240311.tar.gz", hash = "sha256:a28e7f98d17d2b14fb9565d32388e419f4108f557a7d939a66319969b2b99c7a", size = 5608, upload-time = "2024-03-11T02:15:51.557Z" } +sdist = { url = "https://files.pythonhosted.org/packages/59/73/0fb0b9fe4964b45b2a06ed41b60c352752626db46aa0fb70a49a9e283a75/types-colorama-0.4.15.20240311.tar.gz", hash = "sha256:a28e7f98d17d2b14fb9565d32388e419f4108f557a7d939a66319969b2b99c7a", size = 5608 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/83/6944b4fa01efb2e63ac62b791a8ddf0fee358f93be9f64b8f152648ad9d3/types_colorama-0.4.15.20240311-py3-none-any.whl", hash = "sha256:6391de60ddc0db3f147e31ecb230006a6823e81e380862ffca1e4695c13a0b8e", size = 5840, upload-time = "2024-03-11T02:15:50.43Z" }, + { url = "https://files.pythonhosted.org/packages/b7/83/6944b4fa01efb2e63ac62b791a8ddf0fee358f93be9f64b8f152648ad9d3/types_colorama-0.4.15.20240311-py3-none-any.whl", hash = "sha256:6391de60ddc0db3f147e31ecb230006a6823e81e380862ffca1e4695c13a0b8e", size = 5840 }, ] [[package]] name = "types-defusedxml" version = "0.7.0.20250708" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b9/4b/79d046a7211e110afd885be04bb9423546df2a662ed28251512d60e51fb6/types_defusedxml-0.7.0.20250708.tar.gz", hash = "sha256:7b785780cc11c18a1af086308bf94bf53a0907943a1d145dbe00189bef323cb8", size = 10541, upload-time = "2025-07-08T03:14:33.325Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/4b/79d046a7211e110afd885be04bb9423546df2a662ed28251512d60e51fb6/types_defusedxml-0.7.0.20250708.tar.gz", hash = "sha256:7b785780cc11c18a1af086308bf94bf53a0907943a1d145dbe00189bef323cb8", size = 10541 } wheels = [ - { url = "https://files.pythonhosted.org/packages/24/f8/870de7fbd5fee5643f05061db948df6bd574a05a42aee91e37ad47c999ef/types_defusedxml-0.7.0.20250708-py3-none-any.whl", hash = "sha256:cc426cbc31c61a0f1b1c2ad9b9ef9ef846645f28fd708cd7727a6353b5c52e54", size = 13478, upload-time = "2025-07-08T03:14:32.633Z" }, + { url = "https://files.pythonhosted.org/packages/24/f8/870de7fbd5fee5643f05061db948df6bd574a05a42aee91e37ad47c999ef/types_defusedxml-0.7.0.20250708-py3-none-any.whl", hash = "sha256:cc426cbc31c61a0f1b1c2ad9b9ef9ef846645f28fd708cd7727a6353b5c52e54", size = 13478 }, ] [[package]] name = "types-deprecated" version = "1.2.15.20250304" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0e/67/eeefaaabb03b288aad85483d410452c8bbcbf8b2bd876b0e467ebd97415b/types_deprecated-1.2.15.20250304.tar.gz", hash = "sha256:c329030553029de5cc6cb30f269c11f4e00e598c4241290179f63cda7d33f719", size = 8015, upload-time = "2025-03-04T02:48:17.894Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0e/67/eeefaaabb03b288aad85483d410452c8bbcbf8b2bd876b0e467ebd97415b/types_deprecated-1.2.15.20250304.tar.gz", hash = "sha256:c329030553029de5cc6cb30f269c11f4e00e598c4241290179f63cda7d33f719", size = 8015 } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/e3/c18aa72ab84e0bc127a3a94e93be1a6ac2cb281371d3a45376ab7cfdd31c/types_deprecated-1.2.15.20250304-py3-none-any.whl", hash = "sha256:86a65aa550ea8acf49f27e226b8953288cd851de887970fbbdf2239c116c3107", size = 8553, upload-time = "2025-03-04T02:48:16.666Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e3/c18aa72ab84e0bc127a3a94e93be1a6ac2cb281371d3a45376ab7cfdd31c/types_deprecated-1.2.15.20250304-py3-none-any.whl", hash = "sha256:86a65aa550ea8acf49f27e226b8953288cd851de887970fbbdf2239c116c3107", size = 8553 }, ] [[package]] name = "types-docutils" version = "0.21.0.20250708" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/39/86/24394a71a04f416ca03df51863a3d3e2cd0542fdc40989188dca30ffb5bf/types_docutils-0.21.0.20250708.tar.gz", hash = "sha256:5625a82a9a2f26d8384545607c157e023a48ed60d940dfc738db125282864172", size = 42011, upload-time = "2025-07-08T03:14:24.214Z" } +sdist = { url = "https://files.pythonhosted.org/packages/39/86/24394a71a04f416ca03df51863a3d3e2cd0542fdc40989188dca30ffb5bf/types_docutils-0.21.0.20250708.tar.gz", hash = "sha256:5625a82a9a2f26d8384545607c157e023a48ed60d940dfc738db125282864172", size = 42011 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/17/8c1153fc1576a0dcffdd157c69a12863c3f9485054256f6791ea17d95aed/types_docutils-0.21.0.20250708-py3-none-any.whl", hash = "sha256:166630d1aec18b9ca02547873210e04bf7674ba8f8da9cd9e6a5e77dc99372c2", size = 67953, upload-time = "2025-07-08T03:14:23.057Z" }, + { url = "https://files.pythonhosted.org/packages/bd/17/8c1153fc1576a0dcffdd157c69a12863c3f9485054256f6791ea17d95aed/types_docutils-0.21.0.20250708-py3-none-any.whl", hash = "sha256:166630d1aec18b9ca02547873210e04bf7674ba8f8da9cd9e6a5e77dc99372c2", size = 67953 }, ] [[package]] @@ -5851,9 +5891,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "flask" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a4/f3/dd2f0d274ecb77772d3ce83735f75ad14713461e8cf7e6d61a7c272037b1/types_flask_cors-5.0.0.20250413.tar.gz", hash = "sha256:b346d052f4ef3b606b73faf13e868e458f1efdbfedcbe1aba739eb2f54a6cf5f", size = 9921, upload-time = "2025-04-13T04:04:15.515Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a4/f3/dd2f0d274ecb77772d3ce83735f75ad14713461e8cf7e6d61a7c272037b1/types_flask_cors-5.0.0.20250413.tar.gz", hash = "sha256:b346d052f4ef3b606b73faf13e868e458f1efdbfedcbe1aba739eb2f54a6cf5f", size = 9921 } wheels = [ - { url = "https://files.pythonhosted.org/packages/66/34/7d64eb72d80bfd5b9e6dd31e7fe351a1c9a735f5c01e85b1d3b903a9d656/types_flask_cors-5.0.0.20250413-py3-none-any.whl", hash = "sha256:8183fdba764d45a5b40214468a1d5daa0e86c4ee6042d13f38cc428308f27a64", size = 9982, upload-time = "2025-04-13T04:04:14.27Z" }, + { url = "https://files.pythonhosted.org/packages/66/34/7d64eb72d80bfd5b9e6dd31e7fe351a1c9a735f5c01e85b1d3b903a9d656/types_flask_cors-5.0.0.20250413-py3-none-any.whl", hash = "sha256:8183fdba764d45a5b40214468a1d5daa0e86c4ee6042d13f38cc428308f27a64", size = 9982 }, ] [[package]] @@ -5864,9 +5904,9 @@ dependencies = [ { name = "flask" }, { name = "flask-sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d6/2a/15d922ddd3fad1ec0e06dab338f20c508becacaf8193ff373aee6986a1cc/types_flask_migrate-4.1.0.20250112.tar.gz", hash = "sha256:f2d2c966378ae7bb0660ec810e9af0a56ca03108235364c2a7b5e90418b0ff67", size = 8650, upload-time = "2025-01-12T02:51:25.29Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/2a/15d922ddd3fad1ec0e06dab338f20c508becacaf8193ff373aee6986a1cc/types_flask_migrate-4.1.0.20250112.tar.gz", hash = "sha256:f2d2c966378ae7bb0660ec810e9af0a56ca03108235364c2a7b5e90418b0ff67", size = 8650 } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/01/56e26643c54c5101a7bc11d277d15cd871b05a8a3ddbcc9acd3634d7fff8/types_Flask_Migrate-4.1.0.20250112-py3-none-any.whl", hash = "sha256:1814fffc609c2ead784affd011de92f0beecd48044963a8c898dd107dc1b5969", size = 8727, upload-time = "2025-01-12T02:51:23.121Z" }, + { url = "https://files.pythonhosted.org/packages/36/01/56e26643c54c5101a7bc11d277d15cd871b05a8a3ddbcc9acd3634d7fff8/types_Flask_Migrate-4.1.0.20250112-py3-none-any.whl", hash = "sha256:1814fffc609c2ead784affd011de92f0beecd48044963a8c898dd107dc1b5969", size = 8727 }, ] [[package]] @@ -5877,36 +5917,36 @@ dependencies = [ { name = "types-greenlet" }, { name = "types-psutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/db/bdade74c3ba3a266eafd625377eb7b9b37c9c724c7472192100baf0fe507/types_gevent-24.11.0.20250401.tar.gz", hash = "sha256:1443f796a442062698e67d818fca50aa88067dee4021d457a7c0c6bedd6f46ca", size = 36980, upload-time = "2025-04-01T03:07:30.365Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/db/bdade74c3ba3a266eafd625377eb7b9b37c9c724c7472192100baf0fe507/types_gevent-24.11.0.20250401.tar.gz", hash = "sha256:1443f796a442062698e67d818fca50aa88067dee4021d457a7c0c6bedd6f46ca", size = 36980 } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/3d/c8b12d048565ef12ae65d71a0e566f36c6e076b158d3f94d87edddbeea6b/types_gevent-24.11.0.20250401-py3-none-any.whl", hash = "sha256:6764faf861ea99250c38179c58076392c44019ac3393029f71b06c4a15e8c1d1", size = 54863, upload-time = "2025-04-01T03:07:29.147Z" }, + { url = "https://files.pythonhosted.org/packages/25/3d/c8b12d048565ef12ae65d71a0e566f36c6e076b158d3f94d87edddbeea6b/types_gevent-24.11.0.20250401-py3-none-any.whl", hash = "sha256:6764faf861ea99250c38179c58076392c44019ac3393029f71b06c4a15e8c1d1", size = 54863 }, ] [[package]] name = "types-greenlet" version = "3.1.0.20250401" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c0/c9/50405ed194a02f02a418311311e6ee4dd73eed446608b679e6df8170d5b7/types_greenlet-3.1.0.20250401.tar.gz", hash = "sha256:949389b64c34ca9472f6335189e9fe0b2e9704436d4f0850e39e9b7145909082", size = 8460, upload-time = "2025-04-01T03:06:44.216Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c0/c9/50405ed194a02f02a418311311e6ee4dd73eed446608b679e6df8170d5b7/types_greenlet-3.1.0.20250401.tar.gz", hash = "sha256:949389b64c34ca9472f6335189e9fe0b2e9704436d4f0850e39e9b7145909082", size = 8460 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/f3/36c5a6db23761c810d91227146f20b6e501aa50a51a557bd14e021cd9aea/types_greenlet-3.1.0.20250401-py3-none-any.whl", hash = "sha256:77987f3249b0f21415dc0254057e1ae4125a696a9bba28b0bcb67ee9e3dc14f6", size = 8821, upload-time = "2025-04-01T03:06:42.945Z" }, + { url = "https://files.pythonhosted.org/packages/a5/f3/36c5a6db23761c810d91227146f20b6e501aa50a51a557bd14e021cd9aea/types_greenlet-3.1.0.20250401-py3-none-any.whl", hash = "sha256:77987f3249b0f21415dc0254057e1ae4125a696a9bba28b0bcb67ee9e3dc14f6", size = 8821 }, ] [[package]] name = "types-html5lib" version = "1.1.11.20250708" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d4/3b/1f5ba4358cfc1421cced5cdb9d2b08b4b99e4f9a41da88ce079f6d1a7bf1/types_html5lib-1.1.11.20250708.tar.gz", hash = "sha256:24321720fdbac71cee50d5a4bec9b7448495b7217974cffe3fcf1ede4eef7afe", size = 16799, upload-time = "2025-07-08T03:13:53.14Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/3b/1f5ba4358cfc1421cced5cdb9d2b08b4b99e4f9a41da88ce079f6d1a7bf1/types_html5lib-1.1.11.20250708.tar.gz", hash = "sha256:24321720fdbac71cee50d5a4bec9b7448495b7217974cffe3fcf1ede4eef7afe", size = 16799 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/50/5fc23cf647eee23acdd337c8150861d39980cf11f33dd87f78e87d2a4bad/types_html5lib-1.1.11.20250708-py3-none-any.whl", hash = "sha256:bb898066b155de7081cb182179e2ded31b9e0e234605e2cb46536894e68a6954", size = 22913, upload-time = "2025-07-08T03:13:52.098Z" }, + { url = "https://files.pythonhosted.org/packages/a8/50/5fc23cf647eee23acdd337c8150861d39980cf11f33dd87f78e87d2a4bad/types_html5lib-1.1.11.20250708-py3-none-any.whl", hash = "sha256:bb898066b155de7081cb182179e2ded31b9e0e234605e2cb46536894e68a6954", size = 22913 }, ] [[package]] name = "types-jmespath" version = "1.0.2.20250529" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ab/ce/1083f6dcf5e7f25e9abcb67f870799d45f8b184cdb6fd23bbe541d17d9cc/types_jmespath-1.0.2.20250529.tar.gz", hash = "sha256:d3c08397f57fe0510e3b1b02c27f0a5e738729680fb0ea5f4b74f70fb032c129", size = 10138, upload-time = "2025-05-29T03:07:30.24Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ab/ce/1083f6dcf5e7f25e9abcb67f870799d45f8b184cdb6fd23bbe541d17d9cc/types_jmespath-1.0.2.20250529.tar.gz", hash = "sha256:d3c08397f57fe0510e3b1b02c27f0a5e738729680fb0ea5f4b74f70fb032c129", size = 10138 } wheels = [ - { url = "https://files.pythonhosted.org/packages/66/74/78c518aeb310cc809aaf1dd19e646f8d42c472344a720b39e1ba2a65c2e7/types_jmespath-1.0.2.20250529-py3-none-any.whl", hash = "sha256:6344c102233aae954d623d285618079d797884e35f6cd8d2a894ca02640eca07", size = 11409, upload-time = "2025-05-29T03:07:29.012Z" }, + { url = "https://files.pythonhosted.org/packages/66/74/78c518aeb310cc809aaf1dd19e646f8d42c472344a720b39e1ba2a65c2e7/types_jmespath-1.0.2.20250529-py3-none-any.whl", hash = "sha256:6344c102233aae954d623d285618079d797884e35f6cd8d2a894ca02640eca07", size = 11409 }, ] [[package]] @@ -5916,90 +5956,90 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "referencing" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a0/ec/27ea5bffdb306bf261f6677a98b6993d93893b2c2e30f7ecc1d2c99d32e7/types_jsonschema-4.23.0.20250516.tar.gz", hash = "sha256:9ace09d9d35c4390a7251ccd7d833b92ccc189d24d1b347f26212afce361117e", size = 14911, upload-time = "2025-05-16T03:09:33.728Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a0/ec/27ea5bffdb306bf261f6677a98b6993d93893b2c2e30f7ecc1d2c99d32e7/types_jsonschema-4.23.0.20250516.tar.gz", hash = "sha256:9ace09d9d35c4390a7251ccd7d833b92ccc189d24d1b347f26212afce361117e", size = 14911 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/48/73ae8b388e19fc4a2a8060d0876325ec7310cfd09b53a2185186fd35959f/types_jsonschema-4.23.0.20250516-py3-none-any.whl", hash = "sha256:e7d0dd7db7e59e63c26e3230e26ffc64c4704cc5170dc21270b366a35ead1618", size = 15027, upload-time = "2025-05-16T03:09:32.499Z" }, + { url = "https://files.pythonhosted.org/packages/e6/48/73ae8b388e19fc4a2a8060d0876325ec7310cfd09b53a2185186fd35959f/types_jsonschema-4.23.0.20250516-py3-none-any.whl", hash = "sha256:e7d0dd7db7e59e63c26e3230e26ffc64c4704cc5170dc21270b366a35ead1618", size = 15027 }, ] [[package]] name = "types-markdown" version = "3.7.0.20250322" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/fd/b4bd01b8c46f021c35a07aa31fe1dc45d21adc9fc8d53064bfa577aae73d/types_markdown-3.7.0.20250322.tar.gz", hash = "sha256:a48ed82dfcb6954592a10f104689d2d44df9125ce51b3cee20e0198a5216d55c", size = 18052, upload-time = "2025-03-22T02:48:46.193Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/fd/b4bd01b8c46f021c35a07aa31fe1dc45d21adc9fc8d53064bfa577aae73d/types_markdown-3.7.0.20250322.tar.gz", hash = "sha256:a48ed82dfcb6954592a10f104689d2d44df9125ce51b3cee20e0198a5216d55c", size = 18052 } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/59/ee46617bc2b5e43bc06a000fdcd6358a013957e30ad545bed5e3456a4341/types_markdown-3.7.0.20250322-py3-none-any.whl", hash = "sha256:7e855503027b4290355a310fb834871940d9713da7c111f3e98a5e1cbc77acfb", size = 23699, upload-time = "2025-03-22T02:48:45.001Z" }, + { url = "https://files.pythonhosted.org/packages/56/59/ee46617bc2b5e43bc06a000fdcd6358a013957e30ad545bed5e3456a4341/types_markdown-3.7.0.20250322-py3-none-any.whl", hash = "sha256:7e855503027b4290355a310fb834871940d9713da7c111f3e98a5e1cbc77acfb", size = 23699 }, ] [[package]] name = "types-oauthlib" version = "3.2.0.20250516" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b1/2c/dba2c193ccff2d1e2835589d4075b230d5627b9db363e9c8de153261d6ec/types_oauthlib-3.2.0.20250516.tar.gz", hash = "sha256:56bf2cffdb8443ae718d4e83008e3fbd5f861230b4774e6d7799527758119d9a", size = 24683, upload-time = "2025-05-16T03:07:42.484Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/2c/dba2c193ccff2d1e2835589d4075b230d5627b9db363e9c8de153261d6ec/types_oauthlib-3.2.0.20250516.tar.gz", hash = "sha256:56bf2cffdb8443ae718d4e83008e3fbd5f861230b4774e6d7799527758119d9a", size = 24683 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/54/cdd62283338616fd2448f534b29110d79a42aaabffaf5f45e7aed365a366/types_oauthlib-3.2.0.20250516-py3-none-any.whl", hash = "sha256:5799235528bc9bd262827149a1633ff55ae6e5a5f5f151f4dae74359783a31b3", size = 45671, upload-time = "2025-05-16T03:07:41.268Z" }, + { url = "https://files.pythonhosted.org/packages/b8/54/cdd62283338616fd2448f534b29110d79a42aaabffaf5f45e7aed365a366/types_oauthlib-3.2.0.20250516-py3-none-any.whl", hash = "sha256:5799235528bc9bd262827149a1633ff55ae6e5a5f5f151f4dae74359783a31b3", size = 45671 }, ] [[package]] name = "types-objgraph" version = "3.6.0.20240907" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/22/48/ba0ec63d392904eee34ef1cbde2d8798f79a3663950e42fbbc25fd1bd6f7/types-objgraph-3.6.0.20240907.tar.gz", hash = "sha256:2e3dee675843ae387889731550b0ddfed06e9420946cf78a4bca565b5fc53634", size = 2928, upload-time = "2024-09-07T02:35:21.214Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/48/ba0ec63d392904eee34ef1cbde2d8798f79a3663950e42fbbc25fd1bd6f7/types-objgraph-3.6.0.20240907.tar.gz", hash = "sha256:2e3dee675843ae387889731550b0ddfed06e9420946cf78a4bca565b5fc53634", size = 2928 } wheels = [ - { url = "https://files.pythonhosted.org/packages/16/c9/6d647a947f3937b19bcc6d52262921ddad60d90060ff66511a4bd7e990c5/types_objgraph-3.6.0.20240907-py3-none-any.whl", hash = "sha256:67207633a9b5789ee1911d740b269c3371081b79c0d8f68b00e7b8539f5c43f5", size = 3314, upload-time = "2024-09-07T02:35:19.865Z" }, + { url = "https://files.pythonhosted.org/packages/16/c9/6d647a947f3937b19bcc6d52262921ddad60d90060ff66511a4bd7e990c5/types_objgraph-3.6.0.20240907-py3-none-any.whl", hash = "sha256:67207633a9b5789ee1911d740b269c3371081b79c0d8f68b00e7b8539f5c43f5", size = 3314 }, ] [[package]] name = "types-olefile" version = "0.47.0.20240806" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/49/18/9d87a1bc394323ce22690308c751680c4301fc3fbe47cd58e16d760b563a/types-olefile-0.47.0.20240806.tar.gz", hash = "sha256:96490f208cbb449a52283855319d73688ba9167ae58858ef8c506bf7ca2c6b67", size = 4369, upload-time = "2024-08-06T02:30:01.966Z" } +sdist = { url = "https://files.pythonhosted.org/packages/49/18/9d87a1bc394323ce22690308c751680c4301fc3fbe47cd58e16d760b563a/types-olefile-0.47.0.20240806.tar.gz", hash = "sha256:96490f208cbb449a52283855319d73688ba9167ae58858ef8c506bf7ca2c6b67", size = 4369 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/4d/f8acae53dd95353f8a789a06ea27423ae41f2067eb6ce92946fdc6a1f7a7/types_olefile-0.47.0.20240806-py3-none-any.whl", hash = "sha256:c760a3deab7adb87a80d33b0e4edbbfbab865204a18d5121746022d7f8555118", size = 4758, upload-time = "2024-08-06T02:30:01.15Z" }, + { url = "https://files.pythonhosted.org/packages/a9/4d/f8acae53dd95353f8a789a06ea27423ae41f2067eb6ce92946fdc6a1f7a7/types_olefile-0.47.0.20240806-py3-none-any.whl", hash = "sha256:c760a3deab7adb87a80d33b0e4edbbfbab865204a18d5121746022d7f8555118", size = 4758 }, ] [[package]] name = "types-openpyxl" version = "3.1.5.20250602" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bc/d4/33cc2f331cde82206aa4ec7d8db408beca65964785f438c6d2505d828178/types_openpyxl-3.1.5.20250602.tar.gz", hash = "sha256:d19831482022fc933780d6e9d6990464c18c2ec5f14786fea862f72c876980b5", size = 100608, upload-time = "2025-06-02T03:14:40.625Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/d4/33cc2f331cde82206aa4ec7d8db408beca65964785f438c6d2505d828178/types_openpyxl-3.1.5.20250602.tar.gz", hash = "sha256:d19831482022fc933780d6e9d6990464c18c2ec5f14786fea862f72c876980b5", size = 100608 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/69/5b924a20a4d441ec2160e94085b9fa9358dc27edde10080d71209c59101d/types_openpyxl-3.1.5.20250602-py3-none-any.whl", hash = "sha256:1f82211e086902318f6a14b5d8d865102362fda7cb82f3d63ac4dff47a1f164b", size = 165922, upload-time = "2025-06-02T03:14:39.226Z" }, + { url = "https://files.pythonhosted.org/packages/2e/69/5b924a20a4d441ec2160e94085b9fa9358dc27edde10080d71209c59101d/types_openpyxl-3.1.5.20250602-py3-none-any.whl", hash = "sha256:1f82211e086902318f6a14b5d8d865102362fda7cb82f3d63ac4dff47a1f164b", size = 165922 }, ] [[package]] name = "types-pexpect" version = "4.9.0.20250516" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/a3/3943fcb94c12af29a88c346b588f1eda180b8b99aeb388a046b25072732c/types_pexpect-4.9.0.20250516.tar.gz", hash = "sha256:7baed9ee566fa24034a567cbec56a5cff189a021344e84383b14937b35d83881", size = 13285, upload-time = "2025-05-16T03:08:33.327Z" } +sdist = { url = "https://files.pythonhosted.org/packages/92/a3/3943fcb94c12af29a88c346b588f1eda180b8b99aeb388a046b25072732c/types_pexpect-4.9.0.20250516.tar.gz", hash = "sha256:7baed9ee566fa24034a567cbec56a5cff189a021344e84383b14937b35d83881", size = 13285 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/d4/3128ae3365b46b9c4a33202af79b0e0d9d4308a6348a3317ce2331fea6cb/types_pexpect-4.9.0.20250516-py3-none-any.whl", hash = "sha256:84cbd7ae9da577c0d2629d4e4fd53cf074cd012296e01fd4fa1031e01973c28a", size = 17081, upload-time = "2025-05-16T03:08:32.127Z" }, + { url = "https://files.pythonhosted.org/packages/e1/d4/3128ae3365b46b9c4a33202af79b0e0d9d4308a6348a3317ce2331fea6cb/types_pexpect-4.9.0.20250516-py3-none-any.whl", hash = "sha256:84cbd7ae9da577c0d2629d4e4fd53cf074cd012296e01fd4fa1031e01973c28a", size = 17081 }, ] [[package]] name = "types-protobuf" version = "5.29.1.20250403" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/78/6d/62a2e73b966c77609560800004dd49a926920dd4976a9fdd86cf998e7048/types_protobuf-5.29.1.20250403.tar.gz", hash = "sha256:7ff44f15022119c9d7558ce16e78b2d485bf7040b4fadced4dd069bb5faf77a2", size = 59413, upload-time = "2025-04-02T10:07:17.138Z" } +sdist = { url = "https://files.pythonhosted.org/packages/78/6d/62a2e73b966c77609560800004dd49a926920dd4976a9fdd86cf998e7048/types_protobuf-5.29.1.20250403.tar.gz", hash = "sha256:7ff44f15022119c9d7558ce16e78b2d485bf7040b4fadced4dd069bb5faf77a2", size = 59413 } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/e3/b74dcc2797b21b39d5a4f08a8b08e20369b4ca250d718df7af41a60dd9f0/types_protobuf-5.29.1.20250403-py3-none-any.whl", hash = "sha256:c71de04106a2d54e5b2173d0a422058fae0ef2d058d70cf369fb797bf61ffa59", size = 73874, upload-time = "2025-04-02T10:07:15.755Z" }, + { url = "https://files.pythonhosted.org/packages/69/e3/b74dcc2797b21b39d5a4f08a8b08e20369b4ca250d718df7af41a60dd9f0/types_protobuf-5.29.1.20250403-py3-none-any.whl", hash = "sha256:c71de04106a2d54e5b2173d0a422058fae0ef2d058d70cf369fb797bf61ffa59", size = 73874 }, ] [[package]] name = "types-psutil" version = "7.0.0.20250601" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c8/af/767b92be7de4105f5e2e87a53aac817164527c4a802119ad5b4e23028f7c/types_psutil-7.0.0.20250601.tar.gz", hash = "sha256:71fe9c4477a7e3d4f1233862f0877af87bff057ff398f04f4e5c0ca60aded197", size = 20297, upload-time = "2025-06-01T03:25:16.698Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/af/767b92be7de4105f5e2e87a53aac817164527c4a802119ad5b4e23028f7c/types_psutil-7.0.0.20250601.tar.gz", hash = "sha256:71fe9c4477a7e3d4f1233862f0877af87bff057ff398f04f4e5c0ca60aded197", size = 20297 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/85/864c663a924a34e0d87bd10ead4134bb4ab6269fa02daaa5dd644ac478c5/types_psutil-7.0.0.20250601-py3-none-any.whl", hash = "sha256:0c372e2d1b6529938a080a6ba4a9358e3dfc8526d82fabf40c1ef9325e4ca52e", size = 23106, upload-time = "2025-06-01T03:25:15.386Z" }, + { url = "https://files.pythonhosted.org/packages/8d/85/864c663a924a34e0d87bd10ead4134bb4ab6269fa02daaa5dd644ac478c5/types_psutil-7.0.0.20250601-py3-none-any.whl", hash = "sha256:0c372e2d1b6529938a080a6ba4a9358e3dfc8526d82fabf40c1ef9325e4ca52e", size = 23106 }, ] [[package]] name = "types-psycopg2" version = "2.9.21.20250516" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/68/55/3f94eff9d1a1402f39e19523a90117fe6c97d7fc61957e7ee3e3052c75e1/types_psycopg2-2.9.21.20250516.tar.gz", hash = "sha256:6721018279175cce10b9582202e2a2b4a0da667857ccf82a97691bdb5ecd610f", size = 26514, upload-time = "2025-05-16T03:07:45.786Z" } +sdist = { url = "https://files.pythonhosted.org/packages/68/55/3f94eff9d1a1402f39e19523a90117fe6c97d7fc61957e7ee3e3052c75e1/types_psycopg2-2.9.21.20250516.tar.gz", hash = "sha256:6721018279175cce10b9582202e2a2b4a0da667857ccf82a97691bdb5ecd610f", size = 26514 } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/50/f5d74945ab09b9a3e966ad39027ac55998f917eca72ede7929eab962b5db/types_psycopg2-2.9.21.20250516-py3-none-any.whl", hash = "sha256:2a9212d1e5e507017b31486ce8147634d06b85d652769d7a2d91d53cb4edbd41", size = 24846, upload-time = "2025-05-16T03:07:44.849Z" }, + { url = "https://files.pythonhosted.org/packages/39/50/f5d74945ab09b9a3e966ad39027ac55998f917eca72ede7929eab962b5db/types_psycopg2-2.9.21.20250516-py3-none-any.whl", hash = "sha256:2a9212d1e5e507017b31486ce8147634d06b85d652769d7a2d91d53cb4edbd41", size = 24846 }, ] [[package]] @@ -6009,18 +6049,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-docutils" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/9a/c1ea3f59001e9d13b93ec8acf02c75b47832423f17471295b8ceebc48a65/types_pygments-2.19.0.20250516.tar.gz", hash = "sha256:b53fd07e197f0e7be38ee19598bd99c78be5ca5f9940849c843be74a2f81ab58", size = 18485, upload-time = "2025-05-16T03:09:30.05Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/9a/c1ea3f59001e9d13b93ec8acf02c75b47832423f17471295b8ceebc48a65/types_pygments-2.19.0.20250516.tar.gz", hash = "sha256:b53fd07e197f0e7be38ee19598bd99c78be5ca5f9940849c843be74a2f81ab58", size = 18485 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/0b/32ce3ad35983bf4f603c43cfb00559b37bb5ed90ac4ef9f1d5564b8e4034/types_pygments-2.19.0.20250516-py3-none-any.whl", hash = "sha256:db27de8b59591389cd7d14792483892c021c73b8389ef55fef40a48aa371fbcc", size = 25440, upload-time = "2025-05-16T03:09:29.185Z" }, + { url = "https://files.pythonhosted.org/packages/a7/0b/32ce3ad35983bf4f603c43cfb00559b37bb5ed90ac4ef9f1d5564b8e4034/types_pygments-2.19.0.20250516-py3-none-any.whl", hash = "sha256:db27de8b59591389cd7d14792483892c021c73b8389ef55fef40a48aa371fbcc", size = 25440 }, ] [[package]] name = "types-pymysql" version = "1.1.0.20250708" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/65/a3/db349a06c64b8c041c165fc470b81d37404ec342014625c7a6b7f7a4f680/types_pymysql-1.1.0.20250708.tar.gz", hash = "sha256:2cbd7cfcf9313eda784910578c4f1d06f8cc03a15cd30ce588aa92dd6255011d", size = 21715, upload-time = "2025-07-08T03:13:56.463Z" } +sdist = { url = "https://files.pythonhosted.org/packages/65/a3/db349a06c64b8c041c165fc470b81d37404ec342014625c7a6b7f7a4f680/types_pymysql-1.1.0.20250708.tar.gz", hash = "sha256:2cbd7cfcf9313eda784910578c4f1d06f8cc03a15cd30ce588aa92dd6255011d", size = 21715 } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/e5/7f72c520f527175b6455e955426fd4f971128b4fa2f8ab2f505f254a1ddc/types_pymysql-1.1.0.20250708-py3-none-any.whl", hash = "sha256:9252966d2795945b2a7a53d5cdc49fe8e4e2f3dde4c104ed7fc782a83114e365", size = 22860, upload-time = "2025-07-08T03:13:55.367Z" }, + { url = "https://files.pythonhosted.org/packages/88/e5/7f72c520f527175b6455e955426fd4f971128b4fa2f8ab2f505f254a1ddc/types_pymysql-1.1.0.20250708-py3-none-any.whl", hash = "sha256:9252966d2795945b2a7a53d5cdc49fe8e4e2f3dde4c104ed7fc782a83114e365", size = 22860 }, ] [[package]] @@ -6031,54 +6071,54 @@ dependencies = [ { name = "cryptography" }, { name = "types-cffi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/93/29/47a346550fd2020dac9a7a6d033ea03fccb92fa47c726056618cc889745e/types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39", size = 8458, upload-time = "2024-07-22T02:32:22.558Z" } +sdist = { url = "https://files.pythonhosted.org/packages/93/29/47a346550fd2020dac9a7a6d033ea03fccb92fa47c726056618cc889745e/types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39", size = 8458 } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/05/c868a850b6fbb79c26f5f299b768ee0adc1f9816d3461dcf4287916f655b/types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54", size = 7499, upload-time = "2024-07-22T02:32:21.232Z" }, + { url = "https://files.pythonhosted.org/packages/98/05/c868a850b6fbb79c26f5f299b768ee0adc1f9816d3461dcf4287916f655b/types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54", size = 7499 }, ] [[package]] name = "types-python-dateutil" version = "2.9.0.20250708" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c9/95/6bdde7607da2e1e99ec1c1672a759d42f26644bbacf939916e086db34870/types_python_dateutil-2.9.0.20250708.tar.gz", hash = "sha256:ccdbd75dab2d6c9696c350579f34cffe2c281e4c5f27a585b2a2438dd1d5c8ab", size = 15834, upload-time = "2025-07-08T03:14:03.382Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/95/6bdde7607da2e1e99ec1c1672a759d42f26644bbacf939916e086db34870/types_python_dateutil-2.9.0.20250708.tar.gz", hash = "sha256:ccdbd75dab2d6c9696c350579f34cffe2c281e4c5f27a585b2a2438dd1d5c8ab", size = 15834 } wheels = [ - { url = "https://files.pythonhosted.org/packages/72/52/43e70a8e57fefb172c22a21000b03ebcc15e47e97f5cb8495b9c2832efb4/types_python_dateutil-2.9.0.20250708-py3-none-any.whl", hash = "sha256:4d6d0cc1cc4d24a2dc3816024e502564094497b713f7befda4d5bc7a8e3fd21f", size = 17724, upload-time = "2025-07-08T03:14:02.593Z" }, + { url = "https://files.pythonhosted.org/packages/72/52/43e70a8e57fefb172c22a21000b03ebcc15e47e97f5cb8495b9c2832efb4/types_python_dateutil-2.9.0.20250708-py3-none-any.whl", hash = "sha256:4d6d0cc1cc4d24a2dc3816024e502564094497b713f7befda4d5bc7a8e3fd21f", size = 17724 }, ] [[package]] name = "types-python-http-client" version = "3.3.7.20250708" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/55/a0/0ad93698a3ebc6846ca23aca20ff6f6f8ebe7b4f0c1de7f19e87c03dbe8f/types_python_http_client-3.3.7.20250708.tar.gz", hash = "sha256:5f85b32dc64671a4e5e016142169aa187c5abed0b196680944e4efd3d5ce3322", size = 7707, upload-time = "2025-07-08T03:14:36.197Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/a0/0ad93698a3ebc6846ca23aca20ff6f6f8ebe7b4f0c1de7f19e87c03dbe8f/types_python_http_client-3.3.7.20250708.tar.gz", hash = "sha256:5f85b32dc64671a4e5e016142169aa187c5abed0b196680944e4efd3d5ce3322", size = 7707 } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/4f/b88274658cf489e35175be8571c970e9a1219713bafd8fc9e166d7351ecb/types_python_http_client-3.3.7.20250708-py3-none-any.whl", hash = "sha256:e2fc253859decab36713d82fc7f205868c3ddeaee79dbb55956ad9ca77abe12b", size = 8890, upload-time = "2025-07-08T03:14:35.506Z" }, + { url = "https://files.pythonhosted.org/packages/85/4f/b88274658cf489e35175be8571c970e9a1219713bafd8fc9e166d7351ecb/types_python_http_client-3.3.7.20250708-py3-none-any.whl", hash = "sha256:e2fc253859decab36713d82fc7f205868c3ddeaee79dbb55956ad9ca77abe12b", size = 8890 }, ] [[package]] name = "types-pytz" version = "2025.2.0.20250516" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/72/b0e711fd90409f5a76c75349055d3eb19992c110f0d2d6aabbd6cfbc14bf/types_pytz-2025.2.0.20250516.tar.gz", hash = "sha256:e1216306f8c0d5da6dafd6492e72eb080c9a166171fa80dd7a1990fd8be7a7b3", size = 10940, upload-time = "2025-05-16T03:07:01.91Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/72/b0e711fd90409f5a76c75349055d3eb19992c110f0d2d6aabbd6cfbc14bf/types_pytz-2025.2.0.20250516.tar.gz", hash = "sha256:e1216306f8c0d5da6dafd6492e72eb080c9a166171fa80dd7a1990fd8be7a7b3", size = 10940 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/ba/e205cd11c1c7183b23c97e4bcd1de7bc0633e2e867601c32ecfc6ad42675/types_pytz-2025.2.0.20250516-py3-none-any.whl", hash = "sha256:e0e0c8a57e2791c19f718ed99ab2ba623856b11620cb6b637e5f62ce285a7451", size = 10136, upload-time = "2025-05-16T03:07:01.075Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ba/e205cd11c1c7183b23c97e4bcd1de7bc0633e2e867601c32ecfc6ad42675/types_pytz-2025.2.0.20250516-py3-none-any.whl", hash = "sha256:e0e0c8a57e2791c19f718ed99ab2ba623856b11620cb6b637e5f62ce285a7451", size = 10136 }, ] [[package]] name = "types-pywin32" version = "310.0.0.20250516" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/bc/c7be2934a37cc8c645c945ca88450b541e482c4df3ac51e5556377d34811/types_pywin32-310.0.0.20250516.tar.gz", hash = "sha256:91e5bfc033f65c9efb443722eff8101e31d690dd9a540fa77525590d3da9cc9d", size = 328459, upload-time = "2025-05-16T03:07:57.411Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/bc/c7be2934a37cc8c645c945ca88450b541e482c4df3ac51e5556377d34811/types_pywin32-310.0.0.20250516.tar.gz", hash = "sha256:91e5bfc033f65c9efb443722eff8101e31d690dd9a540fa77525590d3da9cc9d", size = 328459 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/72/469e4cc32399dbe6c843e38fdb6d04fee755e984e137c0da502f74d3ac59/types_pywin32-310.0.0.20250516-py3-none-any.whl", hash = "sha256:f9ef83a1ec3e5aae2b0e24c5f55ab41272b5dfeaabb9a0451d33684c9545e41a", size = 390411, upload-time = "2025-05-16T03:07:56.282Z" }, + { url = "https://files.pythonhosted.org/packages/9b/72/469e4cc32399dbe6c843e38fdb6d04fee755e984e137c0da502f74d3ac59/types_pywin32-310.0.0.20250516-py3-none-any.whl", hash = "sha256:f9ef83a1ec3e5aae2b0e24c5f55ab41272b5dfeaabb9a0451d33684c9545e41a", size = 390411 }, ] [[package]] name = "types-pyyaml" version = "6.0.12.20250516" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4e/22/59e2aeb48ceeee1f7cd4537db9568df80d62bdb44a7f9e743502ea8aab9c/types_pyyaml-6.0.12.20250516.tar.gz", hash = "sha256:9f21a70216fc0fa1b216a8176db5f9e0af6eb35d2f2932acb87689d03a5bf6ba", size = 17378, upload-time = "2025-05-16T03:08:04.897Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/22/59e2aeb48ceeee1f7cd4537db9568df80d62bdb44a7f9e743502ea8aab9c/types_pyyaml-6.0.12.20250516.tar.gz", hash = "sha256:9f21a70216fc0fa1b216a8176db5f9e0af6eb35d2f2932acb87689d03a5bf6ba", size = 17378 } wheels = [ - { url = "https://files.pythonhosted.org/packages/99/5f/e0af6f7f6a260d9af67e1db4f54d732abad514252a7a378a6c4d17dd1036/types_pyyaml-6.0.12.20250516-py3-none-any.whl", hash = "sha256:8478208feaeb53a34cb5d970c56a7cd76b72659442e733e268a94dc72b2d0530", size = 20312, upload-time = "2025-05-16T03:08:04.019Z" }, + { url = "https://files.pythonhosted.org/packages/99/5f/e0af6f7f6a260d9af67e1db4f54d732abad514252a7a378a6c4d17dd1036/types_pyyaml-6.0.12.20250516-py3-none-any.whl", hash = "sha256:8478208feaeb53a34cb5d970c56a7cd76b72659442e733e268a94dc72b2d0530", size = 20312 }, ] [[package]] @@ -6089,18 +6129,18 @@ dependencies = [ { name = "cryptography" }, { name = "types-pyopenssl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3a/95/c054d3ac940e8bac4ca216470c80c26688a0e79e09f520a942bb27da3386/types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e", size = 49679, upload-time = "2024-10-04T02:43:59.224Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/95/c054d3ac940e8bac4ca216470c80c26688a0e79e09f520a942bb27da3386/types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e", size = 49679 } wheels = [ - { url = "https://files.pythonhosted.org/packages/55/82/7d25dce10aad92d2226b269bce2f85cfd843b4477cd50245d7d40ecf8f89/types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed", size = 58737, upload-time = "2024-10-04T02:43:57.968Z" }, + { url = "https://files.pythonhosted.org/packages/55/82/7d25dce10aad92d2226b269bce2f85cfd843b4477cd50245d7d40ecf8f89/types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed", size = 58737 }, ] [[package]] name = "types-regex" version = "2024.11.6.20250403" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c7/75/012b90c8557d3abb3b58a9073a94d211c8f75c9b2e26bf0d8af7ecf7bc78/types_regex-2024.11.6.20250403.tar.gz", hash = "sha256:3fdf2a70bbf830de4b3a28e9649a52d43dabb57cdb18fbfe2252eefb53666665", size = 12394, upload-time = "2025-04-03T02:54:35.379Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/75/012b90c8557d3abb3b58a9073a94d211c8f75c9b2e26bf0d8af7ecf7bc78/types_regex-2024.11.6.20250403.tar.gz", hash = "sha256:3fdf2a70bbf830de4b3a28e9649a52d43dabb57cdb18fbfe2252eefb53666665", size = 12394 } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/49/67200c4708f557be6aa4ecdb1fa212d67a10558c5240251efdc799cca22f/types_regex-2024.11.6.20250403-py3-none-any.whl", hash = "sha256:e22c0f67d73f4b4af6086a340f387b6f7d03bed8a0bb306224b75c51a29b0001", size = 10396, upload-time = "2025-04-03T02:54:34.555Z" }, + { url = "https://files.pythonhosted.org/packages/61/49/67200c4708f557be6aa4ecdb1fa212d67a10558c5240251efdc799cca22f/types_regex-2024.11.6.20250403-py3-none-any.whl", hash = "sha256:e22c0f67d73f4b4af6086a340f387b6f7d03bed8a0bb306224b75c51a29b0001", size = 10396 }, ] [[package]] @@ -6110,9 +6150,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/7f/73b3a04a53b0fd2a911d4ec517940ecd6600630b559e4505cc7b68beb5a0/types_requests-2.32.4.20250611.tar.gz", hash = "sha256:741c8777ed6425830bf51e54d6abe245f79b4dcb9019f1622b773463946bf826", size = 23118, upload-time = "2025-06-11T03:11:41.272Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/7f/73b3a04a53b0fd2a911d4ec517940ecd6600630b559e4505cc7b68beb5a0/types_requests-2.32.4.20250611.tar.gz", hash = "sha256:741c8777ed6425830bf51e54d6abe245f79b4dcb9019f1622b773463946bf826", size = 23118 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/ea/0be9258c5a4fa1ba2300111aa5a0767ee6d18eb3fd20e91616c12082284d/types_requests-2.32.4.20250611-py3-none-any.whl", hash = "sha256:ad2fe5d3b0cb3c2c902c8815a70e7fb2302c4b8c1f77bdcd738192cdb3878072", size = 20643, upload-time = "2025-06-11T03:11:40.186Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ea/0be9258c5a4fa1ba2300111aa5a0767ee6d18eb3fd20e91616c12082284d/types_requests-2.32.4.20250611-py3-none-any.whl", hash = "sha256:ad2fe5d3b0cb3c2c902c8815a70e7fb2302c4b8c1f77bdcd738192cdb3878072", size = 20643 }, ] [[package]] @@ -6123,27 +6163,27 @@ dependencies = [ { name = "types-oauthlib" }, { name = "types-requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/7b/1803a83dbccf0698a9fb70a444d12f1dcb0f49a5d8a6327a1e53fac19e15/types_requests_oauthlib-2.0.0.20250516.tar.gz", hash = "sha256:2a384b6ca080bd1eb30a88e14836237dc43d217892fddf869f03aea65213e0d4", size = 11034, upload-time = "2025-05-16T03:09:45.119Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/7b/1803a83dbccf0698a9fb70a444d12f1dcb0f49a5d8a6327a1e53fac19e15/types_requests_oauthlib-2.0.0.20250516.tar.gz", hash = "sha256:2a384b6ca080bd1eb30a88e14836237dc43d217892fddf869f03aea65213e0d4", size = 11034 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/3c/1bc76f1097cc4978cc97df11524f47559f8927fb2a2807375947bd185189/types_requests_oauthlib-2.0.0.20250516-py3-none-any.whl", hash = "sha256:faf417c259a3ae54c1b72c77032c07af3025ed90164c905fb785d21e8580139c", size = 14343, upload-time = "2025-05-16T03:09:43.874Z" }, + { url = "https://files.pythonhosted.org/packages/e8/3c/1bc76f1097cc4978cc97df11524f47559f8927fb2a2807375947bd185189/types_requests_oauthlib-2.0.0.20250516-py3-none-any.whl", hash = "sha256:faf417c259a3ae54c1b72c77032c07af3025ed90164c905fb785d21e8580139c", size = 14343 }, ] [[package]] name = "types-s3transfer" version = "0.13.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/42/c1/45038f259d6741c252801044e184fec4dbaeff939a58f6160d7c32bf4975/types_s3transfer-0.13.0.tar.gz", hash = "sha256:203dadcb9865c2f68fb44bc0440e1dc05b79197ba4a641c0976c26c9af75ef52", size = 14175, upload-time = "2025-05-28T02:16:07.614Z" } +sdist = { url = "https://files.pythonhosted.org/packages/42/c1/45038f259d6741c252801044e184fec4dbaeff939a58f6160d7c32bf4975/types_s3transfer-0.13.0.tar.gz", hash = "sha256:203dadcb9865c2f68fb44bc0440e1dc05b79197ba4a641c0976c26c9af75ef52", size = 14175 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/5d/6bbe4bf6a79fb727945291aef88b5ecbdba857a603f1bbcf1a6be0d3f442/types_s3transfer-0.13.0-py3-none-any.whl", hash = "sha256:79c8375cbf48a64bff7654c02df1ec4b20d74f8c5672fc13e382f593ca5565b3", size = 19588, upload-time = "2025-05-28T02:16:06.709Z" }, + { url = "https://files.pythonhosted.org/packages/c8/5d/6bbe4bf6a79fb727945291aef88b5ecbdba857a603f1bbcf1a6be0d3f442/types_s3transfer-0.13.0-py3-none-any.whl", hash = "sha256:79c8375cbf48a64bff7654c02df1ec4b20d74f8c5672fc13e382f593ca5565b3", size = 19588 }, ] [[package]] name = "types-setuptools" version = "80.9.0.20250529" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/79/66/1b276526aad4696a9519919e637801f2c103419d2c248a6feb2729e034d1/types_setuptools-80.9.0.20250529.tar.gz", hash = "sha256:79e088ba0cba2186c8d6499cbd3e143abb142d28a44b042c28d3148b1e353c91", size = 41337, upload-time = "2025-05-29T03:07:34.487Z" } +sdist = { url = "https://files.pythonhosted.org/packages/79/66/1b276526aad4696a9519919e637801f2c103419d2c248a6feb2729e034d1/types_setuptools-80.9.0.20250529.tar.gz", hash = "sha256:79e088ba0cba2186c8d6499cbd3e143abb142d28a44b042c28d3148b1e353c91", size = 41337 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/d8/83790d67ec771bf029a45ff1bd1aedbb738d8aa58c09dd0cc3033eea0e69/types_setuptools-80.9.0.20250529-py3-none-any.whl", hash = "sha256:00dfcedd73e333a430e10db096e4d46af93faf9314f832f13b6bbe3d6757e95f", size = 63263, upload-time = "2025-05-29T03:07:33.064Z" }, + { url = "https://files.pythonhosted.org/packages/1b/d8/83790d67ec771bf029a45ff1bd1aedbb738d8aa58c09dd0cc3033eea0e69/types_setuptools-80.9.0.20250529-py3-none-any.whl", hash = "sha256:00dfcedd73e333a430e10db096e4d46af93faf9314f832f13b6bbe3d6757e95f", size = 63263 }, ] [[package]] @@ -6153,27 +6193,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4e/55/c71a25fd3fc9200df4d0b5fd2f6d74712a82f9a8bbdd90cefb9e6aee39dd/types_shapely-2.0.0.20250404.tar.gz", hash = "sha256:863f540b47fa626c33ae64eae06df171f9ab0347025d4458d2df496537296b4f", size = 25066, upload-time = "2025-04-04T02:54:30.592Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/55/c71a25fd3fc9200df4d0b5fd2f6d74712a82f9a8bbdd90cefb9e6aee39dd/types_shapely-2.0.0.20250404.tar.gz", hash = "sha256:863f540b47fa626c33ae64eae06df171f9ab0347025d4458d2df496537296b4f", size = 25066 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/ff/7f4d414eb81534ba2476f3d54f06f1463c2ebf5d663fd10cff16ba607dd6/types_shapely-2.0.0.20250404-py3-none-any.whl", hash = "sha256:170fb92f5c168a120db39b3287697fdec5c93ef3e1ad15e52552c36b25318821", size = 36350, upload-time = "2025-04-04T02:54:29.506Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ff/7f4d414eb81534ba2476f3d54f06f1463c2ebf5d663fd10cff16ba607dd6/types_shapely-2.0.0.20250404-py3-none-any.whl", hash = "sha256:170fb92f5c168a120db39b3287697fdec5c93ef3e1ad15e52552c36b25318821", size = 36350 }, ] [[package]] name = "types-simplejson" version = "3.20.0.20250326" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/14/e26fc55e1ea56f9ea470917d3e2f8240e6d043ca914181021d04115ae0f7/types_simplejson-3.20.0.20250326.tar.gz", hash = "sha256:b2689bc91e0e672d7a5a947b4cb546b76ae7ddc2899c6678e72a10bf96cd97d2", size = 10489, upload-time = "2025-03-26T02:53:35.825Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/14/e26fc55e1ea56f9ea470917d3e2f8240e6d043ca914181021d04115ae0f7/types_simplejson-3.20.0.20250326.tar.gz", hash = "sha256:b2689bc91e0e672d7a5a947b4cb546b76ae7ddc2899c6678e72a10bf96cd97d2", size = 10489 } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/bf/d3f3a5ba47fd18115e8446d39f025b85905d2008677c29ee4d03b4cddd57/types_simplejson-3.20.0.20250326-py3-none-any.whl", hash = "sha256:db1ddea7b8f7623b27a137578f22fc6c618db8c83ccfb1828ca0d2f0ec11efa7", size = 10462, upload-time = "2025-03-26T02:53:35.036Z" }, + { url = "https://files.pythonhosted.org/packages/76/bf/d3f3a5ba47fd18115e8446d39f025b85905d2008677c29ee4d03b4cddd57/types_simplejson-3.20.0.20250326-py3-none-any.whl", hash = "sha256:db1ddea7b8f7623b27a137578f22fc6c618db8c83ccfb1828ca0d2f0ec11efa7", size = 10462 }, ] [[package]] name = "types-six" version = "1.17.0.20250515" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/78/344047eeced8d230140aa3d9503aa969acb61c6095e7308bbc1ff1de3865/types_six-1.17.0.20250515.tar.gz", hash = "sha256:f4f7f0398cb79304e88397336e642b15e96fbeacf5b96d7625da366b069d2d18", size = 15598, upload-time = "2025-05-15T03:04:19.806Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/78/344047eeced8d230140aa3d9503aa969acb61c6095e7308bbc1ff1de3865/types_six-1.17.0.20250515.tar.gz", hash = "sha256:f4f7f0398cb79304e88397336e642b15e96fbeacf5b96d7625da366b069d2d18", size = 15598 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/85/5ee1c8e35b33b9c8ea1816d5a4e119c27f8bb1539b73b1f636f07aa64750/types_six-1.17.0.20250515-py3-none-any.whl", hash = "sha256:adfaa9568caf35e03d80ffa4ed765c33b282579c869b40bf4b6009c7d8db3fb1", size = 19987, upload-time = "2025-05-15T03:04:18.556Z" }, + { url = "https://files.pythonhosted.org/packages/d1/85/5ee1c8e35b33b9c8ea1816d5a4e119c27f8bb1539b73b1f636f07aa64750/types_six-1.17.0.20250515-py3-none-any.whl", hash = "sha256:adfaa9568caf35e03d80ffa4ed765c33b282579c869b40bf4b6009c7d8db3fb1", size = 19987 }, ] [[package]] @@ -6185,9 +6225,9 @@ dependencies = [ { name = "types-protobuf" }, { name = "types-requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/18/b726d886e7af565c4439d2c8d32e510651be40807e2a66aaea2ed75d7c82/types_tensorflow-2.18.0.20250516.tar.gz", hash = "sha256:5777e1848e52b1f4a87b44ce1ec738b7407a744669bab87ec0f5f1e0ce6bd1fe", size = 257705, upload-time = "2025-05-16T03:09:41.222Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4b/18/b726d886e7af565c4439d2c8d32e510651be40807e2a66aaea2ed75d7c82/types_tensorflow-2.18.0.20250516.tar.gz", hash = "sha256:5777e1848e52b1f4a87b44ce1ec738b7407a744669bab87ec0f5f1e0ce6bd1fe", size = 257705 } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/fd/0d8fbc7172fa7cca345c61a949952df8906f6da161dfbb4305c670aeabad/types_tensorflow-2.18.0.20250516-py3-none-any.whl", hash = "sha256:e8681f8c2a60f87f562df1472790c1e930895e7e463c4c65d1be98d8d908e45e", size = 329211, upload-time = "2025-05-16T03:09:40.111Z" }, + { url = "https://files.pythonhosted.org/packages/96/fd/0d8fbc7172fa7cca345c61a949952df8906f6da161dfbb4305c670aeabad/types_tensorflow-2.18.0.20250516-py3-none-any.whl", hash = "sha256:e8681f8c2a60f87f562df1472790c1e930895e7e463c4c65d1be98d8d908e45e", size = 329211 }, ] [[package]] @@ -6197,27 +6237,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bd/07/eb40de2dc2ff2d1a53180330981b1bdb42313ab4e1b11195d8d64c878b3c/types_tqdm-4.67.0.20250516.tar.gz", hash = "sha256:230ccab8a332d34f193fc007eb132a6ef54b4512452e718bf21ae0a7caeb5a6b", size = 17232, upload-time = "2025-05-16T03:09:52.091Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/07/eb40de2dc2ff2d1a53180330981b1bdb42313ab4e1b11195d8d64c878b3c/types_tqdm-4.67.0.20250516.tar.gz", hash = "sha256:230ccab8a332d34f193fc007eb132a6ef54b4512452e718bf21ae0a7caeb5a6b", size = 17232 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/92/df621429f098fc573a63a8ba348e731c3051b397df0cff278f8887f28d24/types_tqdm-4.67.0.20250516-py3-none-any.whl", hash = "sha256:1dd9b2c65273f2342f37e5179bc6982df86b6669b3376efc12aef0a29e35d36d", size = 24032, upload-time = "2025-05-16T03:09:51.226Z" }, + { url = "https://files.pythonhosted.org/packages/3b/92/df621429f098fc573a63a8ba348e731c3051b397df0cff278f8887f28d24/types_tqdm-4.67.0.20250516-py3-none-any.whl", hash = "sha256:1dd9b2c65273f2342f37e5179bc6982df86b6669b3376efc12aef0a29e35d36d", size = 24032 }, ] [[package]] name = "types-ujson" version = "5.10.0.20250326" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/5c/c974451c4babdb4ae3588925487edde492d59a8403010b4642a554d09954/types_ujson-5.10.0.20250326.tar.gz", hash = "sha256:5469e05f2c31ecb3c4c0267cc8fe41bcd116826fbb4ded69801a645c687dd014", size = 8340, upload-time = "2025-03-26T02:53:39.197Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/5c/c974451c4babdb4ae3588925487edde492d59a8403010b4642a554d09954/types_ujson-5.10.0.20250326.tar.gz", hash = "sha256:5469e05f2c31ecb3c4c0267cc8fe41bcd116826fbb4ded69801a645c687dd014", size = 8340 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/c9/8a73a5f8fa6e70fc02eed506d5ac0ae9ceafbd2b8c9ad34a7de0f29900d6/types_ujson-5.10.0.20250326-py3-none-any.whl", hash = "sha256:acc0913f569def62ef6a892c8a47703f65d05669a3252391a97765cf207dca5b", size = 7644, upload-time = "2025-03-26T02:53:38.2Z" }, + { url = "https://files.pythonhosted.org/packages/3e/c9/8a73a5f8fa6e70fc02eed506d5ac0ae9ceafbd2b8c9ad34a7de0f29900d6/types_ujson-5.10.0.20250326-py3-none-any.whl", hash = "sha256:acc0913f569def62ef6a892c8a47703f65d05669a3252391a97765cf207dca5b", size = 7644 }, ] [[package]] name = "typing-extensions" version = "4.14.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, + { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906 }, ] [[package]] @@ -6228,9 +6268,9 @@ dependencies = [ { name = "mypy-extensions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825, upload-time = "2023-05-24T20:25:47.612Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825 } wheels = [ - { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827, upload-time = "2023-05-24T20:25:45.287Z" }, + { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827 }, ] [[package]] @@ -6240,18 +6280,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726 } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552 }, ] [[package]] name = "tzdata" version = "2025.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 }, ] [[package]] @@ -6261,37 +6301,37 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "tzdata", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761, upload-time = "2025-03-05T21:17:41.549Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026, upload-time = "2025-03-05T21:17:39.857Z" }, + { url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026 }, ] [[package]] name = "ujson" version = "5.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6e/54/6f2bdac7117e89a47de4511c9f01732a283457ab1bf856e1e51aa861619e/ujson-5.9.0.tar.gz", hash = "sha256:89cc92e73d5501b8a7f48575eeb14ad27156ad092c2e9fc7e3cf949f07e75532", size = 7154214, upload-time = "2023-12-10T22:50:34.812Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/54/6f2bdac7117e89a47de4511c9f01732a283457ab1bf856e1e51aa861619e/ujson-5.9.0.tar.gz", hash = "sha256:89cc92e73d5501b8a7f48575eeb14ad27156ad092c2e9fc7e3cf949f07e75532", size = 7154214 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/ca/ae3a6ca5b4f82ce654d6ac3dde5e59520537e20939592061ba506f4e569a/ujson-5.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b23bbb46334ce51ddb5dded60c662fbf7bb74a37b8f87221c5b0fec1ec6454b", size = 57753, upload-time = "2023-12-10T22:49:03.939Z" }, - { url = "https://files.pythonhosted.org/packages/34/5f/c27fa9a1562c96d978c39852b48063c3ca480758f3088dcfc0f3b09f8e93/ujson-5.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6974b3a7c17bbf829e6c3bfdc5823c67922e44ff169851a755eab79a3dd31ec0", size = 54092, upload-time = "2023-12-10T22:49:05.194Z" }, - { url = "https://files.pythonhosted.org/packages/19/f3/1431713de9e5992e5e33ba459b4de28f83904233958855d27da820a101f9/ujson-5.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5964ea916edfe24af1f4cc68488448fbb1ec27a3ddcddc2b236da575c12c8ae", size = 51675, upload-time = "2023-12-10T22:49:06.449Z" }, - { url = "https://files.pythonhosted.org/packages/d3/93/de6fff3ae06351f3b1c372f675fe69bc180f93d237c9e496c05802173dd6/ujson-5.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ba7cac47dd65ff88571eceeff48bf30ed5eb9c67b34b88cb22869b7aa19600d", size = 53246, upload-time = "2023-12-10T22:49:07.691Z" }, - { url = "https://files.pythonhosted.org/packages/26/73/db509fe1d7da62a15c0769c398cec66bdfc61a8bdffaf7dfa9d973e3d65c/ujson-5.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bbd91a151a8f3358c29355a491e915eb203f607267a25e6ab10531b3b157c5e", size = 58182, upload-time = "2023-12-10T22:49:08.89Z" }, - { url = "https://files.pythonhosted.org/packages/fc/a8/6be607fa3e1fa3e1c9b53f5de5acad33b073b6cc9145803e00bcafa729a8/ujson-5.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:829a69d451a49c0de14a9fecb2a2d544a9b2c884c2b542adb243b683a6f15908", size = 584493, upload-time = "2023-12-10T22:49:11.043Z" }, - { url = "https://files.pythonhosted.org/packages/c8/c7/33822c2f1a8175e841e2bc378ffb2c1109ce9280f14cedb1b2fa0caf3145/ujson-5.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a807ae73c46ad5db161a7e883eec0fbe1bebc6a54890152ccc63072c4884823b", size = 656038, upload-time = "2023-12-10T22:49:12.651Z" }, - { url = "https://files.pythonhosted.org/packages/51/b8/5309fbb299d5fcac12bbf3db20896db5178392904abe6b992da233dc69d6/ujson-5.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8fc2aa18b13d97b3c8ccecdf1a3c405f411a6e96adeee94233058c44ff92617d", size = 597643, upload-time = "2023-12-10T22:49:14.883Z" }, - { url = "https://files.pythonhosted.org/packages/5f/64/7b63043b95dd78feed401b9973958af62645a6d19b72b6e83d1ea5af07e0/ujson-5.9.0-cp311-cp311-win32.whl", hash = "sha256:70e06849dfeb2548be48fdd3ceb53300640bc8100c379d6e19d78045e9c26120", size = 38342, upload-time = "2023-12-10T22:49:16.854Z" }, - { url = "https://files.pythonhosted.org/packages/7a/13/a3cd1fc3a1126d30b558b6235c05e2d26eeaacba4979ee2fd2b5745c136d/ujson-5.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:7309d063cd392811acc49b5016728a5e1b46ab9907d321ebbe1c2156bc3c0b99", size = 41923, upload-time = "2023-12-10T22:49:17.983Z" }, - { url = "https://files.pythonhosted.org/packages/16/7e/c37fca6cd924931fa62d615cdbf5921f34481085705271696eff38b38867/ujson-5.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:20509a8c9f775b3a511e308bbe0b72897ba6b800767a7c90c5cca59d20d7c42c", size = 57834, upload-time = "2023-12-10T22:49:19.799Z" }, - { url = "https://files.pythonhosted.org/packages/fb/44/2753e902ee19bf6ccaf0bda02f1f0037f92a9769a5d31319905e3de645b4/ujson-5.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b28407cfe315bd1b34f1ebe65d3bd735d6b36d409b334100be8cdffae2177b2f", size = 54119, upload-time = "2023-12-10T22:49:21.039Z" }, - { url = "https://files.pythonhosted.org/packages/d2/06/2317433e394450bc44afe32b6c39d5a51014da4c6f6cfc2ae7bf7b4a2922/ujson-5.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d302bd17989b6bd90d49bade66943c78f9e3670407dbc53ebcf61271cadc399", size = 51658, upload-time = "2023-12-10T22:49:22.494Z" }, - { url = "https://files.pythonhosted.org/packages/5b/3a/2acf0da085d96953580b46941504aa3c91a1dd38701b9e9bfa43e2803467/ujson-5.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f21315f51e0db8ee245e33a649dd2d9dce0594522de6f278d62f15f998e050e", size = 53370, upload-time = "2023-12-10T22:49:24.045Z" }, - { url = "https://files.pythonhosted.org/packages/03/32/737e6c4b1841720f88ae88ec91f582dc21174bd40742739e1fa16a0c9ffa/ujson-5.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5635b78b636a54a86fdbf6f027e461aa6c6b948363bdf8d4fbb56a42b7388320", size = 58278, upload-time = "2023-12-10T22:49:25.261Z" }, - { url = "https://files.pythonhosted.org/packages/8a/dc/3fda97f1ad070ccf2af597fb67dde358bc698ffecebe3bc77991d60e4fe5/ujson-5.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82b5a56609f1235d72835ee109163c7041b30920d70fe7dac9176c64df87c164", size = 584418, upload-time = "2023-12-10T22:49:27.573Z" }, - { url = "https://files.pythonhosted.org/packages/d7/57/e4083d774fcd8ff3089c0ff19c424abe33f23e72c6578a8172bf65131992/ujson-5.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5ca35f484622fd208f55041b042d9d94f3b2c9c5add4e9af5ee9946d2d30db01", size = 656126, upload-time = "2023-12-10T22:49:29.509Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c3/8c6d5f6506ca9fcedd5a211e30a7d5ee053dc05caf23dae650e1f897effb/ujson-5.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:829b824953ebad76d46e4ae709e940bb229e8999e40881338b3cc94c771b876c", size = 597795, upload-time = "2023-12-10T22:49:31.029Z" }, - { url = "https://files.pythonhosted.org/packages/34/5a/a231f0cd305a34cf2d16930304132db3a7a8c3997b367dd38fc8f8dfae36/ujson-5.9.0-cp312-cp312-win32.whl", hash = "sha256:25fa46e4ff0a2deecbcf7100af3a5d70090b461906f2299506485ff31d9ec437", size = 38495, upload-time = "2023-12-10T22:49:33.2Z" }, - { url = "https://files.pythonhosted.org/packages/30/b7/18b841b44760ed298acdb150608dccdc045c41655e0bae4441f29bcab872/ujson-5.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:60718f1720a61560618eff3b56fd517d107518d3c0160ca7a5a66ac949c6cf1c", size = 42088, upload-time = "2023-12-10T22:49:34.921Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ca/ae3a6ca5b4f82ce654d6ac3dde5e59520537e20939592061ba506f4e569a/ujson-5.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b23bbb46334ce51ddb5dded60c662fbf7bb74a37b8f87221c5b0fec1ec6454b", size = 57753 }, + { url = "https://files.pythonhosted.org/packages/34/5f/c27fa9a1562c96d978c39852b48063c3ca480758f3088dcfc0f3b09f8e93/ujson-5.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6974b3a7c17bbf829e6c3bfdc5823c67922e44ff169851a755eab79a3dd31ec0", size = 54092 }, + { url = "https://files.pythonhosted.org/packages/19/f3/1431713de9e5992e5e33ba459b4de28f83904233958855d27da820a101f9/ujson-5.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5964ea916edfe24af1f4cc68488448fbb1ec27a3ddcddc2b236da575c12c8ae", size = 51675 }, + { url = "https://files.pythonhosted.org/packages/d3/93/de6fff3ae06351f3b1c372f675fe69bc180f93d237c9e496c05802173dd6/ujson-5.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ba7cac47dd65ff88571eceeff48bf30ed5eb9c67b34b88cb22869b7aa19600d", size = 53246 }, + { url = "https://files.pythonhosted.org/packages/26/73/db509fe1d7da62a15c0769c398cec66bdfc61a8bdffaf7dfa9d973e3d65c/ujson-5.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bbd91a151a8f3358c29355a491e915eb203f607267a25e6ab10531b3b157c5e", size = 58182 }, + { url = "https://files.pythonhosted.org/packages/fc/a8/6be607fa3e1fa3e1c9b53f5de5acad33b073b6cc9145803e00bcafa729a8/ujson-5.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:829a69d451a49c0de14a9fecb2a2d544a9b2c884c2b542adb243b683a6f15908", size = 584493 }, + { url = "https://files.pythonhosted.org/packages/c8/c7/33822c2f1a8175e841e2bc378ffb2c1109ce9280f14cedb1b2fa0caf3145/ujson-5.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a807ae73c46ad5db161a7e883eec0fbe1bebc6a54890152ccc63072c4884823b", size = 656038 }, + { url = "https://files.pythonhosted.org/packages/51/b8/5309fbb299d5fcac12bbf3db20896db5178392904abe6b992da233dc69d6/ujson-5.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8fc2aa18b13d97b3c8ccecdf1a3c405f411a6e96adeee94233058c44ff92617d", size = 597643 }, + { url = "https://files.pythonhosted.org/packages/5f/64/7b63043b95dd78feed401b9973958af62645a6d19b72b6e83d1ea5af07e0/ujson-5.9.0-cp311-cp311-win32.whl", hash = "sha256:70e06849dfeb2548be48fdd3ceb53300640bc8100c379d6e19d78045e9c26120", size = 38342 }, + { url = "https://files.pythonhosted.org/packages/7a/13/a3cd1fc3a1126d30b558b6235c05e2d26eeaacba4979ee2fd2b5745c136d/ujson-5.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:7309d063cd392811acc49b5016728a5e1b46ab9907d321ebbe1c2156bc3c0b99", size = 41923 }, + { url = "https://files.pythonhosted.org/packages/16/7e/c37fca6cd924931fa62d615cdbf5921f34481085705271696eff38b38867/ujson-5.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:20509a8c9f775b3a511e308bbe0b72897ba6b800767a7c90c5cca59d20d7c42c", size = 57834 }, + { url = "https://files.pythonhosted.org/packages/fb/44/2753e902ee19bf6ccaf0bda02f1f0037f92a9769a5d31319905e3de645b4/ujson-5.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b28407cfe315bd1b34f1ebe65d3bd735d6b36d409b334100be8cdffae2177b2f", size = 54119 }, + { url = "https://files.pythonhosted.org/packages/d2/06/2317433e394450bc44afe32b6c39d5a51014da4c6f6cfc2ae7bf7b4a2922/ujson-5.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d302bd17989b6bd90d49bade66943c78f9e3670407dbc53ebcf61271cadc399", size = 51658 }, + { url = "https://files.pythonhosted.org/packages/5b/3a/2acf0da085d96953580b46941504aa3c91a1dd38701b9e9bfa43e2803467/ujson-5.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f21315f51e0db8ee245e33a649dd2d9dce0594522de6f278d62f15f998e050e", size = 53370 }, + { url = "https://files.pythonhosted.org/packages/03/32/737e6c4b1841720f88ae88ec91f582dc21174bd40742739e1fa16a0c9ffa/ujson-5.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5635b78b636a54a86fdbf6f027e461aa6c6b948363bdf8d4fbb56a42b7388320", size = 58278 }, + { url = "https://files.pythonhosted.org/packages/8a/dc/3fda97f1ad070ccf2af597fb67dde358bc698ffecebe3bc77991d60e4fe5/ujson-5.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82b5a56609f1235d72835ee109163c7041b30920d70fe7dac9176c64df87c164", size = 584418 }, + { url = "https://files.pythonhosted.org/packages/d7/57/e4083d774fcd8ff3089c0ff19c424abe33f23e72c6578a8172bf65131992/ujson-5.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5ca35f484622fd208f55041b042d9d94f3b2c9c5add4e9af5ee9946d2d30db01", size = 656126 }, + { url = "https://files.pythonhosted.org/packages/0d/c3/8c6d5f6506ca9fcedd5a211e30a7d5ee053dc05caf23dae650e1f897effb/ujson-5.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:829b824953ebad76d46e4ae709e940bb229e8999e40881338b3cc94c771b876c", size = 597795 }, + { url = "https://files.pythonhosted.org/packages/34/5a/a231f0cd305a34cf2d16930304132db3a7a8c3997b367dd38fc8f8dfae36/ujson-5.9.0-cp312-cp312-win32.whl", hash = "sha256:25fa46e4ff0a2deecbcf7100af3a5d70090b461906f2299506485ff31d9ec437", size = 38495 }, + { url = "https://files.pythonhosted.org/packages/30/b7/18b841b44760ed298acdb150608dccdc045c41655e0bae4441f29bcab872/ujson-5.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:60718f1720a61560618eff3b56fd517d107518d3c0160ca7a5a66ac949c6cf1c", size = 42088 }, ] [[package]] @@ -6321,9 +6361,9 @@ dependencies = [ { name = "unstructured-client" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/64/31/98c4c78e305d1294888adf87fd5ee30577a4c393951341ca32b43f167f1e/unstructured-0.16.25.tar.gz", hash = "sha256:73b9b0f51dbb687af572ecdb849a6811710b9cac797ddeab8ee80fa07d8aa5e6", size = 1683097, upload-time = "2025-03-07T11:19:39.507Z" } +sdist = { url = "https://files.pythonhosted.org/packages/64/31/98c4c78e305d1294888adf87fd5ee30577a4c393951341ca32b43f167f1e/unstructured-0.16.25.tar.gz", hash = "sha256:73b9b0f51dbb687af572ecdb849a6811710b9cac797ddeab8ee80fa07d8aa5e6", size = 1683097 } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/4f/ad08585b5c8a33c82ea119494c4d3023f4796958c56e668b15cc282ec0a0/unstructured-0.16.25-py3-none-any.whl", hash = "sha256:14719ccef2830216cf1c5bf654f75e2bf07b17ca5dcee9da5ac74618130fd337", size = 1769286, upload-time = "2025-03-07T11:19:37.299Z" }, + { url = "https://files.pythonhosted.org/packages/12/4f/ad08585b5c8a33c82ea119494c4d3023f4796958c56e668b15cc282ec0a0/unstructured-0.16.25-py3-none-any.whl", hash = "sha256:14719ccef2830216cf1c5bf654f75e2bf07b17ca5dcee9da5ac74618130fd337", size = 1769286 }, ] [package.optional-dependencies] @@ -6356,9 +6396,9 @@ dependencies = [ { name = "pypdf" }, { name = "requests-toolbelt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/85/60/412092671bfc4952640739f2c0c9b2f4c8af26a3c921738fd12621b4ddd8/unstructured_client-0.38.1.tar.gz", hash = "sha256:43ab0670dd8ff53d71e74f9b6dfe490a84a5303dab80a4873e118a840c6d46ca", size = 91781, upload-time = "2025-07-03T15:46:35.054Z" } +sdist = { url = "https://files.pythonhosted.org/packages/85/60/412092671bfc4952640739f2c0c9b2f4c8af26a3c921738fd12621b4ddd8/unstructured_client-0.38.1.tar.gz", hash = "sha256:43ab0670dd8ff53d71e74f9b6dfe490a84a5303dab80a4873e118a840c6d46ca", size = 91781 } wheels = [ - { url = "https://files.pythonhosted.org/packages/26/e0/8c249f00ba85fb4aba5c541463312befbfbf491105ff5c06e508089467be/unstructured_client-0.38.1-py3-none-any.whl", hash = "sha256:71e5467870d0a0119c788c29ec8baf5c0f7123f424affc9d6682eeeb7b8d45fa", size = 212626, upload-time = "2025-07-03T15:46:33.929Z" }, + { url = "https://files.pythonhosted.org/packages/26/e0/8c249f00ba85fb4aba5c541463312befbfbf491105ff5c06e508089467be/unstructured_client-0.38.1-py3-none-any.whl", hash = "sha256:71e5467870d0a0119c788c29ec8baf5c0f7123f424affc9d6682eeeb7b8d45fa", size = 212626 }, ] [[package]] @@ -6368,36 +6408,36 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/94/a6/a9178fef247687917701a60eb66542eb5361c58af40c033ba8174ff7366d/upstash_vector-0.6.0.tar.gz", hash = "sha256:a716ed4d0251362208518db8b194158a616d37d1ccbb1155f619df690599e39b", size = 15075, upload-time = "2024-09-27T12:02:13.533Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/a6/a9178fef247687917701a60eb66542eb5361c58af40c033ba8174ff7366d/upstash_vector-0.6.0.tar.gz", hash = "sha256:a716ed4d0251362208518db8b194158a616d37d1ccbb1155f619df690599e39b", size = 15075 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/45/95073b83b7fd7b83f10ea314f197bae3989bfe022e736b90145fe9ea4362/upstash_vector-0.6.0-py3-none-any.whl", hash = "sha256:d0bdad7765b8a7f5c205b7a9c81ca4b9a4cee3ee4952afc7d5ea5fb76c3f3c3c", size = 15061, upload-time = "2024-09-27T12:02:12.041Z" }, + { url = "https://files.pythonhosted.org/packages/5d/45/95073b83b7fd7b83f10ea314f197bae3989bfe022e736b90145fe9ea4362/upstash_vector-0.6.0-py3-none-any.whl", hash = "sha256:d0bdad7765b8a7f5c205b7a9c81ca4b9a4cee3ee4952afc7d5ea5fb76c3f3c3c", size = 15061 }, ] [[package]] name = "uritemplate" version = "4.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/60/f174043244c5306c9988380d2cb10009f91563fc4b31293d27e17201af56/uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e", size = 33267, upload-time = "2025-06-02T15:12:06.318Z" } +sdist = { url = "https://files.pythonhosted.org/packages/98/60/f174043244c5306c9988380d2cb10009f91563fc4b31293d27e17201af56/uritemplate-4.2.0.tar.gz", hash = "sha256:480c2ed180878955863323eea31b0ede668795de182617fef9c6ca09e6ec9d0e", size = 33267 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/99/3ae339466c9183ea5b8ae87b34c0b897eda475d2aec2307cae60e5cd4f29/uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686", size = 11488, upload-time = "2025-06-02T15:12:03.405Z" }, + { url = "https://files.pythonhosted.org/packages/a9/99/3ae339466c9183ea5b8ae87b34c0b897eda475d2aec2307cae60e5cd4f29/uritemplate-4.2.0-py3-none-any.whl", hash = "sha256:962201ba1c4edcab02e60f9a0d3821e82dfc5d2d6662a21abd533879bdb8a686", size = 11488 }, ] [[package]] name = "urllib3" version = "2.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795 }, ] [[package]] name = "uuid6" version = "2025.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/b7/4c0f736ca824b3a25b15e8213d1bcfc15f8ac2ae48d1b445b310892dc4da/uuid6-2025.0.1.tar.gz", hash = "sha256:cd0af94fa428675a44e32c5319ec5a3485225ba2179eefcf4c3f205ae30a81bd", size = 13932, upload-time = "2025-07-04T18:30:35.186Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/b7/4c0f736ca824b3a25b15e8213d1bcfc15f8ac2ae48d1b445b310892dc4da/uuid6-2025.0.1.tar.gz", hash = "sha256:cd0af94fa428675a44e32c5319ec5a3485225ba2179eefcf4c3f205ae30a81bd", size = 13932 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/b2/93faaab7962e2aa8d6e174afb6f76be2ca0ce89fde14d3af835acebcaa59/uuid6-2025.0.1-py3-none-any.whl", hash = "sha256:80530ce4d02a93cdf82e7122ca0da3ebbbc269790ec1cb902481fa3e9cc9ff99", size = 6979, upload-time = "2025-07-04T18:30:34.001Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b2/93faaab7962e2aa8d6e174afb6f76be2ca0ce89fde14d3af835acebcaa59/uuid6-2025.0.1-py3-none-any.whl", hash = "sha256:80530ce4d02a93cdf82e7122ca0da3ebbbc269790ec1cb902481fa3e9cc9ff99", size = 6979 }, ] [[package]] @@ -6408,9 +6448,9 @@ dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, + { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406 }, ] [package.optional-dependencies] @@ -6428,38 +6468,38 @@ standard = [ name = "uvloop" version = "0.21.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741, upload-time = "2024-10-14T23:38:35.489Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741 } wheels = [ - { url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8", size = 1447410, upload-time = "2024-10-14T23:37:33.612Z" }, - { url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0", size = 805476, upload-time = "2024-10-14T23:37:36.11Z" }, - { url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e", size = 3960855, upload-time = "2024-10-14T23:37:37.683Z" }, - { url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb", size = 3973185, upload-time = "2024-10-14T23:37:40.226Z" }, - { url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6", size = 3820256, upload-time = "2024-10-14T23:37:42.839Z" }, - { url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d", size = 3937323, upload-time = "2024-10-14T23:37:45.337Z" }, - { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284, upload-time = "2024-10-14T23:37:47.833Z" }, - { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349, upload-time = "2024-10-14T23:37:50.149Z" }, - { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089, upload-time = "2024-10-14T23:37:51.703Z" }, - { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770, upload-time = "2024-10-14T23:37:54.122Z" }, - { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321, upload-time = "2024-10-14T23:37:55.766Z" }, - { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022, upload-time = "2024-10-14T23:37:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8", size = 1447410 }, + { url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0", size = 805476 }, + { url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e", size = 3960855 }, + { url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb", size = 3973185 }, + { url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6", size = 3820256 }, + { url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d", size = 3937323 }, + { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284 }, + { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349 }, + { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089 }, + { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770 }, + { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321 }, + { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022 }, ] [[package]] name = "validators" version = "0.35.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/53/66/a435d9ae49850b2f071f7ebd8119dd4e84872b01630d6736761e6e7fd847/validators-0.35.0.tar.gz", hash = "sha256:992d6c48a4e77c81f1b4daba10d16c3a9bb0dbb79b3a19ea847ff0928e70497a", size = 73399, upload-time = "2025-05-01T05:42:06.7Z" } +sdist = { url = "https://files.pythonhosted.org/packages/53/66/a435d9ae49850b2f071f7ebd8119dd4e84872b01630d6736761e6e7fd847/validators-0.35.0.tar.gz", hash = "sha256:992d6c48a4e77c81f1b4daba10d16c3a9bb0dbb79b3a19ea847ff0928e70497a", size = 73399 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/6e/3e955517e22cbdd565f2f8b2e73d52528b14b8bcfdb04f62466b071de847/validators-0.35.0-py3-none-any.whl", hash = "sha256:e8c947097eae7892cb3d26868d637f79f47b4a0554bc6b80065dfe5aac3705dd", size = 44712, upload-time = "2025-05-01T05:42:04.203Z" }, + { url = "https://files.pythonhosted.org/packages/fa/6e/3e955517e22cbdd565f2f8b2e73d52528b14b8bcfdb04f62466b071de847/validators-0.35.0-py3-none-any.whl", hash = "sha256:e8c947097eae7892cb3d26868d637f79f47b4a0554bc6b80065dfe5aac3705dd", size = 44712 }, ] [[package]] name = "vine" version = "5.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bd/e4/d07b5f29d283596b9727dd5275ccbceb63c44a1a82aa9e4bfd20426762ac/vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0", size = 48980, upload-time = "2023-11-05T08:46:53.857Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/e4/d07b5f29d283596b9727dd5275ccbceb63c44a1a82aa9e4bfd20426762ac/vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0", size = 48980 } wheels = [ - { url = "https://files.pythonhosted.org/packages/03/ff/7c0c86c43b3cbb927e0ccc0255cb4057ceba4799cd44ae95174ce8e8b5b2/vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc", size = 9636, upload-time = "2023-11-05T08:46:51.205Z" }, + { url = "https://files.pythonhosted.org/packages/03/ff/7c0c86c43b3cbb927e0ccc0255cb4057ceba4799cd44ae95174ce8e8b5b2/vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc", size = 9636 }, ] [[package]] @@ -6475,9 +6515,9 @@ dependencies = [ { name = "retry" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8a/c5/62f2fbf0359b31d4e8f766e9ee3096c23d08fc294df1ab6ac117c2d1440c/volcengine_compat-1.0.156.tar.gz", hash = "sha256:e357d096828e31a202dc6047bbc5bf6fff3f54a98cd35a99ab5f965ea741a267", size = 329616, upload-time = "2024-10-13T09:19:09.149Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/c5/62f2fbf0359b31d4e8f766e9ee3096c23d08fc294df1ab6ac117c2d1440c/volcengine_compat-1.0.156.tar.gz", hash = "sha256:e357d096828e31a202dc6047bbc5bf6fff3f54a98cd35a99ab5f965ea741a267", size = 329616 } wheels = [ - { url = "https://files.pythonhosted.org/packages/37/da/7ccbe82470dc27e1cfd0466dc637248be906eb8447c28a40c1c74cf617ee/volcengine_compat-1.0.156-py3-none-any.whl", hash = "sha256:4abc149a7601ebad8fa2d28fab50c7945145cf74daecb71bca797b0bdc82c5a5", size = 677272, upload-time = "2024-10-13T09:17:19.944Z" }, + { url = "https://files.pythonhosted.org/packages/37/da/7ccbe82470dc27e1cfd0466dc637248be906eb8447c28a40c1c74cf617ee/volcengine_compat-1.0.156-py3-none-any.whl", hash = "sha256:4abc149a7601ebad8fa2d28fab50c7945145cf74daecb71bca797b0bdc82c5a5", size = 677272 }, ] [[package]] @@ -6496,18 +6536,18 @@ dependencies = [ { name = "sentry-sdk" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/73/09/c84264a219e20efd615e4d5d150cc7d359d57d51328d3fa94ee02d70ed9c/wandb-0.21.0.tar.gz", hash = "sha256:473e01ef200b59d780416062991effa7349a34e51425d4be5ff482af2dc39e02", size = 40085784, upload-time = "2025-07-02T00:24:15.516Z" } +sdist = { url = "https://files.pythonhosted.org/packages/73/09/c84264a219e20efd615e4d5d150cc7d359d57d51328d3fa94ee02d70ed9c/wandb-0.21.0.tar.gz", hash = "sha256:473e01ef200b59d780416062991effa7349a34e51425d4be5ff482af2dc39e02", size = 40085784 } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/dd/65eac086e1bc337bb5f0eed65ba1fe4a6dbc62c97f094e8e9df1ef83ffed/wandb-0.21.0-py3-none-any.whl", hash = "sha256:316e8cd4329738f7562f7369e6eabeeb28ef9d473203f7ead0d03e5dba01c90d", size = 6504284, upload-time = "2025-07-02T00:23:46.671Z" }, - { url = "https://files.pythonhosted.org/packages/17/a7/80556ce9097f59e10807aa68f4a9b29d736a90dca60852a9e2af1641baf8/wandb-0.21.0-py3-none-macosx_10_14_x86_64.whl", hash = "sha256:701d9cbdfcc8550a330c1b54a26f1585519180e0f19247867446593d34ace46b", size = 21717388, upload-time = "2025-07-02T00:23:49.348Z" }, - { url = "https://files.pythonhosted.org/packages/23/ae/660bc75aa37bd23409822ea5ed616177d94873172d34271693c80405c820/wandb-0.21.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:01689faa6b691df23ba2367e0a1ecf6e4d0be44474905840098eedd1fbcb8bdf", size = 21141465, upload-time = "2025-07-02T00:23:52.602Z" }, - { url = "https://files.pythonhosted.org/packages/23/ab/9861929530be56557c74002868c85d0d8ac57050cc21863afe909ae3d46f/wandb-0.21.0-py3-none-macosx_11_0_x86_64.whl", hash = "sha256:55d3f42ddb7971d1699752dff2b85bcb5906ad098d18ab62846c82e9ce5a238d", size = 21793511, upload-time = "2025-07-02T00:23:55.447Z" }, - { url = "https://files.pythonhosted.org/packages/de/52/e5cad2eff6fbed1ac06f4a5b718457fa2fd437f84f5c8f0d31995a2ef046/wandb-0.21.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:893508f0c7da48917448daa5cd622c27ce7ce15119adaa861185034c2bd7b14c", size = 20704643, upload-time = "2025-07-02T00:23:58.255Z" }, - { url = "https://files.pythonhosted.org/packages/83/8f/6bed9358cc33767c877b221d4f565e1ddf00caf4bbbe54d2e3bbc932c6a7/wandb-0.21.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4e8245a8912247ddf7654f7b5330f583a6c56ab88fee65589158490d583c57d", size = 22243012, upload-time = "2025-07-02T00:24:01.423Z" }, - { url = "https://files.pythonhosted.org/packages/be/61/9048015412ea5ca916844af55add4fed7c21fe1ad70bb137951e70b550c5/wandb-0.21.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2e4c4f951e0d02755e315679bfdcb5bc38c1b02e2e5abc5432b91a91bb0cf246", size = 20716440, upload-time = "2025-07-02T00:24:04.198Z" }, - { url = "https://files.pythonhosted.org/packages/02/d9/fcd2273d8ec3f79323e40a031aba5d32d6fa9065702010eb428b5ffbab62/wandb-0.21.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:873749966eeac0069e0e742e6210641b6227d454fb1dae2cf5c437c6ed42d3ca", size = 22320652, upload-time = "2025-07-02T00:24:07.175Z" }, - { url = "https://files.pythonhosted.org/packages/80/68/b8308db6b9c3c96dcd03be17c019aee105e1d7dc1e74d70756cdfb9241c6/wandb-0.21.0-py3-none-win32.whl", hash = "sha256:9d3cccfba658fa011d6cab9045fa4f070a444885e8902ae863802549106a5dab", size = 21484296, upload-time = "2025-07-02T00:24:10.147Z" }, - { url = "https://files.pythonhosted.org/packages/cf/96/71cc033e8abd00e54465e68764709ed945e2da2d66d764f72f4660262b22/wandb-0.21.0-py3-none-win_amd64.whl", hash = "sha256:28a0b2dad09d7c7344ac62b0276be18a2492a5578e4d7c84937a3e1991edaac7", size = 21484301, upload-time = "2025-07-02T00:24:12.658Z" }, + { url = "https://files.pythonhosted.org/packages/38/dd/65eac086e1bc337bb5f0eed65ba1fe4a6dbc62c97f094e8e9df1ef83ffed/wandb-0.21.0-py3-none-any.whl", hash = "sha256:316e8cd4329738f7562f7369e6eabeeb28ef9d473203f7ead0d03e5dba01c90d", size = 6504284 }, + { url = "https://files.pythonhosted.org/packages/17/a7/80556ce9097f59e10807aa68f4a9b29d736a90dca60852a9e2af1641baf8/wandb-0.21.0-py3-none-macosx_10_14_x86_64.whl", hash = "sha256:701d9cbdfcc8550a330c1b54a26f1585519180e0f19247867446593d34ace46b", size = 21717388 }, + { url = "https://files.pythonhosted.org/packages/23/ae/660bc75aa37bd23409822ea5ed616177d94873172d34271693c80405c820/wandb-0.21.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:01689faa6b691df23ba2367e0a1ecf6e4d0be44474905840098eedd1fbcb8bdf", size = 21141465 }, + { url = "https://files.pythonhosted.org/packages/23/ab/9861929530be56557c74002868c85d0d8ac57050cc21863afe909ae3d46f/wandb-0.21.0-py3-none-macosx_11_0_x86_64.whl", hash = "sha256:55d3f42ddb7971d1699752dff2b85bcb5906ad098d18ab62846c82e9ce5a238d", size = 21793511 }, + { url = "https://files.pythonhosted.org/packages/de/52/e5cad2eff6fbed1ac06f4a5b718457fa2fd437f84f5c8f0d31995a2ef046/wandb-0.21.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:893508f0c7da48917448daa5cd622c27ce7ce15119adaa861185034c2bd7b14c", size = 20704643 }, + { url = "https://files.pythonhosted.org/packages/83/8f/6bed9358cc33767c877b221d4f565e1ddf00caf4bbbe54d2e3bbc932c6a7/wandb-0.21.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4e8245a8912247ddf7654f7b5330f583a6c56ab88fee65589158490d583c57d", size = 22243012 }, + { url = "https://files.pythonhosted.org/packages/be/61/9048015412ea5ca916844af55add4fed7c21fe1ad70bb137951e70b550c5/wandb-0.21.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2e4c4f951e0d02755e315679bfdcb5bc38c1b02e2e5abc5432b91a91bb0cf246", size = 20716440 }, + { url = "https://files.pythonhosted.org/packages/02/d9/fcd2273d8ec3f79323e40a031aba5d32d6fa9065702010eb428b5ffbab62/wandb-0.21.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:873749966eeac0069e0e742e6210641b6227d454fb1dae2cf5c437c6ed42d3ca", size = 22320652 }, + { url = "https://files.pythonhosted.org/packages/80/68/b8308db6b9c3c96dcd03be17c019aee105e1d7dc1e74d70756cdfb9241c6/wandb-0.21.0-py3-none-win32.whl", hash = "sha256:9d3cccfba658fa011d6cab9045fa4f070a444885e8902ae863802549106a5dab", size = 21484296 }, + { url = "https://files.pythonhosted.org/packages/cf/96/71cc033e8abd00e54465e68764709ed945e2da2d66d764f72f4660262b22/wandb-0.21.0-py3-none-win_amd64.whl", hash = "sha256:28a0b2dad09d7c7344ac62b0276be18a2492a5578e4d7c84937a3e1991edaac7", size = 21484301 }, ] [[package]] @@ -6517,47 +6557,47 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2a/9a/d451fcc97d029f5812e898fd30a53fd8c15c7bbd058fd75cfc6beb9bd761/watchfiles-1.1.0.tar.gz", hash = "sha256:693ed7ec72cbfcee399e92c895362b6e66d63dac6b91e2c11ae03d10d503e575", size = 94406, upload-time = "2025-06-15T19:06:59.42Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/9a/d451fcc97d029f5812e898fd30a53fd8c15c7bbd058fd75cfc6beb9bd761/watchfiles-1.1.0.tar.gz", hash = "sha256:693ed7ec72cbfcee399e92c895362b6e66d63dac6b91e2c11ae03d10d503e575", size = 94406 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/78/7401154b78ab484ccaaeef970dc2af0cb88b5ba8a1b415383da444cdd8d3/watchfiles-1.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c9649dfc57cc1f9835551deb17689e8d44666315f2e82d337b9f07bd76ae3aa2", size = 405751, upload-time = "2025-06-15T19:05:07.679Z" }, - { url = "https://files.pythonhosted.org/packages/76/63/e6c3dbc1f78d001589b75e56a288c47723de28c580ad715eb116639152b5/watchfiles-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:406520216186b99374cdb58bc48e34bb74535adec160c8459894884c983a149c", size = 397313, upload-time = "2025-06-15T19:05:08.764Z" }, - { url = "https://files.pythonhosted.org/packages/6c/a2/8afa359ff52e99af1632f90cbf359da46184207e893a5f179301b0c8d6df/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb45350fd1dc75cd68d3d72c47f5b513cb0578da716df5fba02fff31c69d5f2d", size = 450792, upload-time = "2025-06-15T19:05:09.869Z" }, - { url = "https://files.pythonhosted.org/packages/1d/bf/7446b401667f5c64972a57a0233be1104157fc3abf72c4ef2666c1bd09b2/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:11ee4444250fcbeb47459a877e5e80ed994ce8e8d20283857fc128be1715dac7", size = 458196, upload-time = "2025-06-15T19:05:11.91Z" }, - { url = "https://files.pythonhosted.org/packages/58/2f/501ddbdfa3fa874ea5597c77eeea3d413579c29af26c1091b08d0c792280/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bda8136e6a80bdea23e5e74e09df0362744d24ffb8cd59c4a95a6ce3d142f79c", size = 484788, upload-time = "2025-06-15T19:05:13.373Z" }, - { url = "https://files.pythonhosted.org/packages/61/1e/9c18eb2eb5c953c96bc0e5f626f0e53cfef4bd19bd50d71d1a049c63a575/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b915daeb2d8c1f5cee4b970f2e2c988ce6514aace3c9296e58dd64dc9aa5d575", size = 597879, upload-time = "2025-06-15T19:05:14.725Z" }, - { url = "https://files.pythonhosted.org/packages/8b/6c/1467402e5185d89388b4486745af1e0325007af0017c3384cc786fff0542/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed8fc66786de8d0376f9f913c09e963c66e90ced9aa11997f93bdb30f7c872a8", size = 477447, upload-time = "2025-06-15T19:05:15.775Z" }, - { url = "https://files.pythonhosted.org/packages/2b/a1/ec0a606bde4853d6c4a578f9391eeb3684a9aea736a8eb217e3e00aa89a1/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe4371595edf78c41ef8ac8df20df3943e13defd0efcb732b2e393b5a8a7a71f", size = 453145, upload-time = "2025-06-15T19:05:17.17Z" }, - { url = "https://files.pythonhosted.org/packages/90/b9/ef6f0c247a6a35d689fc970dc7f6734f9257451aefb30def5d100d6246a5/watchfiles-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b7c5f6fe273291f4d414d55b2c80d33c457b8a42677ad14b4b47ff025d0893e4", size = 626539, upload-time = "2025-06-15T19:05:18.557Z" }, - { url = "https://files.pythonhosted.org/packages/34/44/6ffda5537085106ff5aaa762b0d130ac6c75a08015dd1621376f708c94de/watchfiles-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7738027989881e70e3723c75921f1efa45225084228788fc59ea8c6d732eb30d", size = 624472, upload-time = "2025-06-15T19:05:19.588Z" }, - { url = "https://files.pythonhosted.org/packages/c3/e3/71170985c48028fa3f0a50946916a14055e741db11c2e7bc2f3b61f4d0e3/watchfiles-1.1.0-cp311-cp311-win32.whl", hash = "sha256:622d6b2c06be19f6e89b1d951485a232e3b59618def88dbeda575ed8f0d8dbf2", size = 279348, upload-time = "2025-06-15T19:05:20.856Z" }, - { url = "https://files.pythonhosted.org/packages/89/1b/3e39c68b68a7a171070f81fc2561d23ce8d6859659406842a0e4bebf3bba/watchfiles-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:48aa25e5992b61debc908a61ab4d3f216b64f44fdaa71eb082d8b2de846b7d12", size = 292607, upload-time = "2025-06-15T19:05:21.937Z" }, - { url = "https://files.pythonhosted.org/packages/61/9f/2973b7539f2bdb6ea86d2c87f70f615a71a1fc2dba2911795cea25968aea/watchfiles-1.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:00645eb79a3faa70d9cb15c8d4187bb72970b2470e938670240c7998dad9f13a", size = 285056, upload-time = "2025-06-15T19:05:23.12Z" }, - { url = "https://files.pythonhosted.org/packages/f6/b8/858957045a38a4079203a33aaa7d23ea9269ca7761c8a074af3524fbb240/watchfiles-1.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9dc001c3e10de4725c749d4c2f2bdc6ae24de5a88a339c4bce32300a31ede179", size = 402339, upload-time = "2025-06-15T19:05:24.516Z" }, - { url = "https://files.pythonhosted.org/packages/80/28/98b222cca751ba68e88521fabd79a4fab64005fc5976ea49b53fa205d1fa/watchfiles-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9ba68ec283153dead62cbe81872d28e053745f12335d037de9cbd14bd1877f5", size = 394409, upload-time = "2025-06-15T19:05:25.469Z" }, - { url = "https://files.pythonhosted.org/packages/86/50/dee79968566c03190677c26f7f47960aff738d32087087bdf63a5473e7df/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130fc497b8ee68dce163e4254d9b0356411d1490e868bd8790028bc46c5cc297", size = 450939, upload-time = "2025-06-15T19:05:26.494Z" }, - { url = "https://files.pythonhosted.org/packages/40/45/a7b56fb129700f3cfe2594a01aa38d033b92a33dddce86c8dfdfc1247b72/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50a51a90610d0845a5931a780d8e51d7bd7f309ebc25132ba975aca016b576a0", size = 457270, upload-time = "2025-06-15T19:05:27.466Z" }, - { url = "https://files.pythonhosted.org/packages/b5/c8/fa5ef9476b1d02dc6b5e258f515fcaaecf559037edf8b6feffcbc097c4b8/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc44678a72ac0910bac46fa6a0de6af9ba1355669b3dfaf1ce5f05ca7a74364e", size = 483370, upload-time = "2025-06-15T19:05:28.548Z" }, - { url = "https://files.pythonhosted.org/packages/98/68/42cfcdd6533ec94f0a7aab83f759ec11280f70b11bfba0b0f885e298f9bd/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a543492513a93b001975ae283a51f4b67973662a375a403ae82f420d2c7205ee", size = 598654, upload-time = "2025-06-15T19:05:29.997Z" }, - { url = "https://files.pythonhosted.org/packages/d3/74/b2a1544224118cc28df7e59008a929e711f9c68ce7d554e171b2dc531352/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ac164e20d17cc285f2b94dc31c384bc3aa3dd5e7490473b3db043dd70fbccfd", size = 478667, upload-time = "2025-06-15T19:05:31.172Z" }, - { url = "https://files.pythonhosted.org/packages/8c/77/e3362fe308358dc9f8588102481e599c83e1b91c2ae843780a7ded939a35/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7590d5a455321e53857892ab8879dce62d1f4b04748769f5adf2e707afb9d4f", size = 452213, upload-time = "2025-06-15T19:05:32.299Z" }, - { url = "https://files.pythonhosted.org/packages/6e/17/c8f1a36540c9a1558d4faf08e909399e8133599fa359bf52ec8fcee5be6f/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:37d3d3f7defb13f62ece99e9be912afe9dd8a0077b7c45ee5a57c74811d581a4", size = 626718, upload-time = "2025-06-15T19:05:33.415Z" }, - { url = "https://files.pythonhosted.org/packages/26/45/fb599be38b4bd38032643783d7496a26a6f9ae05dea1a42e58229a20ac13/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7080c4bb3efd70a07b1cc2df99a7aa51d98685be56be6038c3169199d0a1c69f", size = 623098, upload-time = "2025-06-15T19:05:34.534Z" }, - { url = "https://files.pythonhosted.org/packages/a1/e7/fdf40e038475498e160cd167333c946e45d8563ae4dd65caf757e9ffe6b4/watchfiles-1.1.0-cp312-cp312-win32.whl", hash = "sha256:cbcf8630ef4afb05dc30107bfa17f16c0896bb30ee48fc24bf64c1f970f3b1fd", size = 279209, upload-time = "2025-06-15T19:05:35.577Z" }, - { url = "https://files.pythonhosted.org/packages/3f/d3/3ae9d5124ec75143bdf088d436cba39812122edc47709cd2caafeac3266f/watchfiles-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:cbd949bdd87567b0ad183d7676feb98136cde5bb9025403794a4c0db28ed3a47", size = 292786, upload-time = "2025-06-15T19:05:36.559Z" }, - { url = "https://files.pythonhosted.org/packages/26/2f/7dd4fc8b5f2b34b545e19629b4a018bfb1de23b3a496766a2c1165ca890d/watchfiles-1.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:0a7d40b77f07be87c6faa93d0951a0fcd8cbca1ddff60a1b65d741bac6f3a9f6", size = 284343, upload-time = "2025-06-15T19:05:37.5Z" }, - { url = "https://files.pythonhosted.org/packages/8c/6b/686dcf5d3525ad17b384fd94708e95193529b460a1b7bf40851f1328ec6e/watchfiles-1.1.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0ece16b563b17ab26eaa2d52230c9a7ae46cf01759621f4fbbca280e438267b3", size = 406910, upload-time = "2025-06-15T19:06:49.335Z" }, - { url = "https://files.pythonhosted.org/packages/f3/d3/71c2dcf81dc1edcf8af9f4d8d63b1316fb0a2dd90cbfd427e8d9dd584a90/watchfiles-1.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:51b81e55d40c4b4aa8658427a3ee7ea847c591ae9e8b81ef94a90b668999353c", size = 398816, upload-time = "2025-06-15T19:06:50.433Z" }, - { url = "https://files.pythonhosted.org/packages/b8/fa/12269467b2fc006f8fce4cd6c3acfa77491dd0777d2a747415f28ccc8c60/watchfiles-1.1.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2bcdc54ea267fe72bfc7d83c041e4eb58d7d8dc6f578dfddb52f037ce62f432", size = 451584, upload-time = "2025-06-15T19:06:51.834Z" }, - { url = "https://files.pythonhosted.org/packages/bd/d3/254cea30f918f489db09d6a8435a7de7047f8cb68584477a515f160541d6/watchfiles-1.1.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:923fec6e5461c42bd7e3fd5ec37492c6f3468be0499bc0707b4bbbc16ac21792", size = 454009, upload-time = "2025-06-15T19:06:52.896Z" }, + { url = "https://files.pythonhosted.org/packages/8b/78/7401154b78ab484ccaaeef970dc2af0cb88b5ba8a1b415383da444cdd8d3/watchfiles-1.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c9649dfc57cc1f9835551deb17689e8d44666315f2e82d337b9f07bd76ae3aa2", size = 405751 }, + { url = "https://files.pythonhosted.org/packages/76/63/e6c3dbc1f78d001589b75e56a288c47723de28c580ad715eb116639152b5/watchfiles-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:406520216186b99374cdb58bc48e34bb74535adec160c8459894884c983a149c", size = 397313 }, + { url = "https://files.pythonhosted.org/packages/6c/a2/8afa359ff52e99af1632f90cbf359da46184207e893a5f179301b0c8d6df/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb45350fd1dc75cd68d3d72c47f5b513cb0578da716df5fba02fff31c69d5f2d", size = 450792 }, + { url = "https://files.pythonhosted.org/packages/1d/bf/7446b401667f5c64972a57a0233be1104157fc3abf72c4ef2666c1bd09b2/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:11ee4444250fcbeb47459a877e5e80ed994ce8e8d20283857fc128be1715dac7", size = 458196 }, + { url = "https://files.pythonhosted.org/packages/58/2f/501ddbdfa3fa874ea5597c77eeea3d413579c29af26c1091b08d0c792280/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bda8136e6a80bdea23e5e74e09df0362744d24ffb8cd59c4a95a6ce3d142f79c", size = 484788 }, + { url = "https://files.pythonhosted.org/packages/61/1e/9c18eb2eb5c953c96bc0e5f626f0e53cfef4bd19bd50d71d1a049c63a575/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b915daeb2d8c1f5cee4b970f2e2c988ce6514aace3c9296e58dd64dc9aa5d575", size = 597879 }, + { url = "https://files.pythonhosted.org/packages/8b/6c/1467402e5185d89388b4486745af1e0325007af0017c3384cc786fff0542/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed8fc66786de8d0376f9f913c09e963c66e90ced9aa11997f93bdb30f7c872a8", size = 477447 }, + { url = "https://files.pythonhosted.org/packages/2b/a1/ec0a606bde4853d6c4a578f9391eeb3684a9aea736a8eb217e3e00aa89a1/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe4371595edf78c41ef8ac8df20df3943e13defd0efcb732b2e393b5a8a7a71f", size = 453145 }, + { url = "https://files.pythonhosted.org/packages/90/b9/ef6f0c247a6a35d689fc970dc7f6734f9257451aefb30def5d100d6246a5/watchfiles-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b7c5f6fe273291f4d414d55b2c80d33c457b8a42677ad14b4b47ff025d0893e4", size = 626539 }, + { url = "https://files.pythonhosted.org/packages/34/44/6ffda5537085106ff5aaa762b0d130ac6c75a08015dd1621376f708c94de/watchfiles-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7738027989881e70e3723c75921f1efa45225084228788fc59ea8c6d732eb30d", size = 624472 }, + { url = "https://files.pythonhosted.org/packages/c3/e3/71170985c48028fa3f0a50946916a14055e741db11c2e7bc2f3b61f4d0e3/watchfiles-1.1.0-cp311-cp311-win32.whl", hash = "sha256:622d6b2c06be19f6e89b1d951485a232e3b59618def88dbeda575ed8f0d8dbf2", size = 279348 }, + { url = "https://files.pythonhosted.org/packages/89/1b/3e39c68b68a7a171070f81fc2561d23ce8d6859659406842a0e4bebf3bba/watchfiles-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:48aa25e5992b61debc908a61ab4d3f216b64f44fdaa71eb082d8b2de846b7d12", size = 292607 }, + { url = "https://files.pythonhosted.org/packages/61/9f/2973b7539f2bdb6ea86d2c87f70f615a71a1fc2dba2911795cea25968aea/watchfiles-1.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:00645eb79a3faa70d9cb15c8d4187bb72970b2470e938670240c7998dad9f13a", size = 285056 }, + { url = "https://files.pythonhosted.org/packages/f6/b8/858957045a38a4079203a33aaa7d23ea9269ca7761c8a074af3524fbb240/watchfiles-1.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9dc001c3e10de4725c749d4c2f2bdc6ae24de5a88a339c4bce32300a31ede179", size = 402339 }, + { url = "https://files.pythonhosted.org/packages/80/28/98b222cca751ba68e88521fabd79a4fab64005fc5976ea49b53fa205d1fa/watchfiles-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9ba68ec283153dead62cbe81872d28e053745f12335d037de9cbd14bd1877f5", size = 394409 }, + { url = "https://files.pythonhosted.org/packages/86/50/dee79968566c03190677c26f7f47960aff738d32087087bdf63a5473e7df/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130fc497b8ee68dce163e4254d9b0356411d1490e868bd8790028bc46c5cc297", size = 450939 }, + { url = "https://files.pythonhosted.org/packages/40/45/a7b56fb129700f3cfe2594a01aa38d033b92a33dddce86c8dfdfc1247b72/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50a51a90610d0845a5931a780d8e51d7bd7f309ebc25132ba975aca016b576a0", size = 457270 }, + { url = "https://files.pythonhosted.org/packages/b5/c8/fa5ef9476b1d02dc6b5e258f515fcaaecf559037edf8b6feffcbc097c4b8/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc44678a72ac0910bac46fa6a0de6af9ba1355669b3dfaf1ce5f05ca7a74364e", size = 483370 }, + { url = "https://files.pythonhosted.org/packages/98/68/42cfcdd6533ec94f0a7aab83f759ec11280f70b11bfba0b0f885e298f9bd/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a543492513a93b001975ae283a51f4b67973662a375a403ae82f420d2c7205ee", size = 598654 }, + { url = "https://files.pythonhosted.org/packages/d3/74/b2a1544224118cc28df7e59008a929e711f9c68ce7d554e171b2dc531352/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ac164e20d17cc285f2b94dc31c384bc3aa3dd5e7490473b3db043dd70fbccfd", size = 478667 }, + { url = "https://files.pythonhosted.org/packages/8c/77/e3362fe308358dc9f8588102481e599c83e1b91c2ae843780a7ded939a35/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7590d5a455321e53857892ab8879dce62d1f4b04748769f5adf2e707afb9d4f", size = 452213 }, + { url = "https://files.pythonhosted.org/packages/6e/17/c8f1a36540c9a1558d4faf08e909399e8133599fa359bf52ec8fcee5be6f/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:37d3d3f7defb13f62ece99e9be912afe9dd8a0077b7c45ee5a57c74811d581a4", size = 626718 }, + { url = "https://files.pythonhosted.org/packages/26/45/fb599be38b4bd38032643783d7496a26a6f9ae05dea1a42e58229a20ac13/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7080c4bb3efd70a07b1cc2df99a7aa51d98685be56be6038c3169199d0a1c69f", size = 623098 }, + { url = "https://files.pythonhosted.org/packages/a1/e7/fdf40e038475498e160cd167333c946e45d8563ae4dd65caf757e9ffe6b4/watchfiles-1.1.0-cp312-cp312-win32.whl", hash = "sha256:cbcf8630ef4afb05dc30107bfa17f16c0896bb30ee48fc24bf64c1f970f3b1fd", size = 279209 }, + { url = "https://files.pythonhosted.org/packages/3f/d3/3ae9d5124ec75143bdf088d436cba39812122edc47709cd2caafeac3266f/watchfiles-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:cbd949bdd87567b0ad183d7676feb98136cde5bb9025403794a4c0db28ed3a47", size = 292786 }, + { url = "https://files.pythonhosted.org/packages/26/2f/7dd4fc8b5f2b34b545e19629b4a018bfb1de23b3a496766a2c1165ca890d/watchfiles-1.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:0a7d40b77f07be87c6faa93d0951a0fcd8cbca1ddff60a1b65d741bac6f3a9f6", size = 284343 }, + { url = "https://files.pythonhosted.org/packages/8c/6b/686dcf5d3525ad17b384fd94708e95193529b460a1b7bf40851f1328ec6e/watchfiles-1.1.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0ece16b563b17ab26eaa2d52230c9a7ae46cf01759621f4fbbca280e438267b3", size = 406910 }, + { url = "https://files.pythonhosted.org/packages/f3/d3/71c2dcf81dc1edcf8af9f4d8d63b1316fb0a2dd90cbfd427e8d9dd584a90/watchfiles-1.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:51b81e55d40c4b4aa8658427a3ee7ea847c591ae9e8b81ef94a90b668999353c", size = 398816 }, + { url = "https://files.pythonhosted.org/packages/b8/fa/12269467b2fc006f8fce4cd6c3acfa77491dd0777d2a747415f28ccc8c60/watchfiles-1.1.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2bcdc54ea267fe72bfc7d83c041e4eb58d7d8dc6f578dfddb52f037ce62f432", size = 451584 }, + { url = "https://files.pythonhosted.org/packages/bd/d3/254cea30f918f489db09d6a8435a7de7047f8cb68584477a515f160541d6/watchfiles-1.1.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:923fec6e5461c42bd7e3fd5ec37492c6f3468be0499bc0707b4bbbc16ac21792", size = 454009 }, ] [[package]] name = "wcwidth" version = "0.2.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload-time = "2024-01-06T02:10:57.829Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" }, + { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 }, ] [[package]] @@ -6578,9 +6618,9 @@ dependencies = [ { name = "tenacity" }, { name = "wandb" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/2b/bdac08ae2fa7f660e3fb02e9f4acec5a5683509decd8fbd1ad5641160d3a/weave-0.51.54.tar.gz", hash = "sha256:41aaaa770c0ac2259325dd6035e1bf96f47fb92dbd4eec54d3ef4847587cc061", size = 425873, upload-time = "2025-06-16T21:57:47.582Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/2b/bdac08ae2fa7f660e3fb02e9f4acec5a5683509decd8fbd1ad5641160d3a/weave-0.51.54.tar.gz", hash = "sha256:41aaaa770c0ac2259325dd6035e1bf96f47fb92dbd4eec54d3ef4847587cc061", size = 425873 } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/4d/7cee23e5bf5faab149aeb7cca367a434c4aec1fa0cb1f5a1d20149a2bf6f/weave-0.51.54-py3-none-any.whl", hash = "sha256:7de2c0da8061bc007de2f74fb3dd2496d24337dff3723f057be49fcf53e0a3a2", size = 542168, upload-time = "2025-06-16T21:57:44.929Z" }, + { url = "https://files.pythonhosted.org/packages/48/4d/7cee23e5bf5faab149aeb7cca367a434c4aec1fa0cb1f5a1d20149a2bf6f/weave-0.51.54-py3-none-any.whl", hash = "sha256:7de2c0da8061bc007de2f74fb3dd2496d24337dff3723f057be49fcf53e0a3a2", size = 542168 }, ] [[package]] @@ -6592,67 +6632,67 @@ dependencies = [ { name = "requests" }, { name = "validators" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1f/c1/3285a21d8885f2b09aabb65edb9a8e062a35c2d7175e1bb024fa096582ab/weaviate-client-3.24.2.tar.gz", hash = "sha256:6914c48c9a7e5ad0be9399271f9cb85d6f59ab77476c6d4e56a3925bf149edaa", size = 199332, upload-time = "2023-10-04T08:37:54.26Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/c1/3285a21d8885f2b09aabb65edb9a8e062a35c2d7175e1bb024fa096582ab/weaviate-client-3.24.2.tar.gz", hash = "sha256:6914c48c9a7e5ad0be9399271f9cb85d6f59ab77476c6d4e56a3925bf149edaa", size = 199332 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/98/3136d05f93e30cf29e1db280eaadf766df18d812dfe7994bcced653b2340/weaviate_client-3.24.2-py3-none-any.whl", hash = "sha256:bc50ca5fcebcd48de0d00f66700b0cf7c31a97c4cd3d29b4036d77c5d1d9479b", size = 107968, upload-time = "2023-10-04T08:37:52.511Z" }, + { url = "https://files.pythonhosted.org/packages/ab/98/3136d05f93e30cf29e1db280eaadf766df18d812dfe7994bcced653b2340/weaviate_client-3.24.2-py3-none-any.whl", hash = "sha256:bc50ca5fcebcd48de0d00f66700b0cf7c31a97c4cd3d29b4036d77c5d1d9479b", size = 107968 }, ] [[package]] name = "webencodings" version = "0.5.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" }, + { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774 }, ] [[package]] name = "websocket-client" version = "1.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648, upload-time = "2024-04-23T22:16:16.976Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e6/30/fba0d96b4b5fbf5948ed3f4681f7da2f9f64512e1d303f94b4cc174c24a5/websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da", size = 54648 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826, upload-time = "2024-04-23T22:16:14.422Z" }, + { url = "https://files.pythonhosted.org/packages/5a/84/44687a29792a70e111c5c477230a72c4b957d88d16141199bf9acb7537a3/websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526", size = 58826 }, ] [[package]] name = "websockets" version = "15.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" }, - { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" }, - { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" }, - { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878, upload-time = "2025-03-05T20:02:00.305Z" }, - { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883, upload-time = "2025-03-05T20:02:03.148Z" }, - { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252, upload-time = "2025-03-05T20:02:05.29Z" }, - { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521, upload-time = "2025-03-05T20:02:07.458Z" }, - { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958, upload-time = "2025-03-05T20:02:09.842Z" }, - { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918, upload-time = "2025-03-05T20:02:11.968Z" }, - { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388, upload-time = "2025-03-05T20:02:13.32Z" }, - { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828, upload-time = "2025-03-05T20:02:14.585Z" }, - { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, - { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, - { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, - { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, - { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, - { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, - { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, - { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, - { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, - { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, - { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, + { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423 }, + { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082 }, + { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330 }, + { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878 }, + { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883 }, + { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252 }, + { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521 }, + { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958 }, + { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918 }, + { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388 }, + { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828 }, + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437 }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096 }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332 }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152 }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096 }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523 }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790 }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165 }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160 }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395 }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841 }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743 }, ] [[package]] name = "webvtt-py" version = "0.5.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5e/f6/7c9c964681fb148e0293e6860108d378e09ccab2218f9063fd3eb87f840a/webvtt-py-0.5.1.tar.gz", hash = "sha256:2040dd325277ddadc1e0c6cc66cbc4a1d9b6b49b24c57a0c3364374c3e8a3dc1", size = 55128, upload-time = "2024-05-30T13:40:17.189Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/f6/7c9c964681fb148e0293e6860108d378e09ccab2218f9063fd3eb87f840a/webvtt-py-0.5.1.tar.gz", hash = "sha256:2040dd325277ddadc1e0c6cc66cbc4a1d9b6b49b24c57a0c3364374c3e8a3dc1", size = 55128 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/ed/aad7e0f5a462d679f7b4d2e0d8502c3096740c883b5bbed5103146480937/webvtt_py-0.5.1-py3-none-any.whl", hash = "sha256:9d517d286cfe7fc7825e9d4e2079647ce32f5678eb58e39ef544ffbb932610b7", size = 19802, upload-time = "2024-05-30T13:40:14.661Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ed/aad7e0f5a462d679f7b4d2e0d8502c3096740c883b5bbed5103146480937/webvtt_py-0.5.1-py3-none-any.whl", hash = "sha256:9d517d286cfe7fc7825e9d4e2079647ce32f5678eb58e39ef544ffbb932610b7", size = 19802 }, ] [[package]] @@ -6662,40 +6702,40 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/69/83029f1f6300c5fb2471d621ab06f6ec6b3324685a2ce0f9777fd4a8b71e/werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746", size = 806925, upload-time = "2024-11-08T15:52:18.093Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9f/69/83029f1f6300c5fb2471d621ab06f6ec6b3324685a2ce0f9777fd4a8b71e/werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746", size = 806925 } wheels = [ - { url = "https://files.pythonhosted.org/packages/52/24/ab44c871b0f07f491e5d2ad12c9bd7358e527510618cb1b803a88e986db1/werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", size = 224498, upload-time = "2024-11-08T15:52:16.132Z" }, + { url = "https://files.pythonhosted.org/packages/52/24/ab44c871b0f07f491e5d2ad12c9bd7358e527510618cb1b803a88e986db1/werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e", size = 224498 }, ] [[package]] name = "wrapt" version = "1.17.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c3/fc/e91cc220803d7bc4db93fb02facd8461c37364151b8494762cc88b0fbcef/wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3", size = 55531, upload-time = "2025-01-14T10:35:45.465Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/fc/e91cc220803d7bc4db93fb02facd8461c37364151b8494762cc88b0fbcef/wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3", size = 55531 } wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/f7/a2aab2cbc7a665efab072344a8949a71081eed1d2f451f7f7d2b966594a2/wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58", size = 53308, upload-time = "2025-01-14T10:33:33.992Z" }, - { url = "https://files.pythonhosted.org/packages/50/ff/149aba8365fdacef52b31a258c4dc1c57c79759c335eff0b3316a2664a64/wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda", size = 38488, upload-time = "2025-01-14T10:33:35.264Z" }, - { url = "https://files.pythonhosted.org/packages/65/46/5a917ce85b5c3b490d35c02bf71aedaa9f2f63f2d15d9949cc4ba56e8ba9/wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438", size = 38776, upload-time = "2025-01-14T10:33:38.28Z" }, - { url = "https://files.pythonhosted.org/packages/ca/74/336c918d2915a4943501c77566db41d1bd6e9f4dbc317f356b9a244dfe83/wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a", size = 83776, upload-time = "2025-01-14T10:33:40.678Z" }, - { url = "https://files.pythonhosted.org/packages/09/99/c0c844a5ccde0fe5761d4305485297f91d67cf2a1a824c5f282e661ec7ff/wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000", size = 75420, upload-time = "2025-01-14T10:33:41.868Z" }, - { url = "https://files.pythonhosted.org/packages/b4/b0/9fc566b0fe08b282c850063591a756057c3247b2362b9286429ec5bf1721/wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6", size = 83199, upload-time = "2025-01-14T10:33:43.598Z" }, - { url = "https://files.pythonhosted.org/packages/9d/4b/71996e62d543b0a0bd95dda485219856def3347e3e9380cc0d6cf10cfb2f/wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b", size = 82307, upload-time = "2025-01-14T10:33:48.499Z" }, - { url = "https://files.pythonhosted.org/packages/39/35/0282c0d8789c0dc9bcc738911776c762a701f95cfe113fb8f0b40e45c2b9/wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662", size = 75025, upload-time = "2025-01-14T10:33:51.191Z" }, - { url = "https://files.pythonhosted.org/packages/4f/6d/90c9fd2c3c6fee181feecb620d95105370198b6b98a0770cba090441a828/wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72", size = 81879, upload-time = "2025-01-14T10:33:52.328Z" }, - { url = "https://files.pythonhosted.org/packages/8f/fa/9fb6e594f2ce03ef03eddbdb5f4f90acb1452221a5351116c7c4708ac865/wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317", size = 36419, upload-time = "2025-01-14T10:33:53.551Z" }, - { url = "https://files.pythonhosted.org/packages/47/f8/fb1773491a253cbc123c5d5dc15c86041f746ed30416535f2a8df1f4a392/wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3", size = 38773, upload-time = "2025-01-14T10:33:56.323Z" }, - { url = "https://files.pythonhosted.org/packages/a1/bd/ab55f849fd1f9a58ed7ea47f5559ff09741b25f00c191231f9f059c83949/wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925", size = 53799, upload-time = "2025-01-14T10:33:57.4Z" }, - { url = "https://files.pythonhosted.org/packages/53/18/75ddc64c3f63988f5a1d7e10fb204ffe5762bc663f8023f18ecaf31a332e/wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392", size = 38821, upload-time = "2025-01-14T10:33:59.334Z" }, - { url = "https://files.pythonhosted.org/packages/48/2a/97928387d6ed1c1ebbfd4efc4133a0633546bec8481a2dd5ec961313a1c7/wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40", size = 38919, upload-time = "2025-01-14T10:34:04.093Z" }, - { url = "https://files.pythonhosted.org/packages/73/54/3bfe5a1febbbccb7a2f77de47b989c0b85ed3a6a41614b104204a788c20e/wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d", size = 88721, upload-time = "2025-01-14T10:34:07.163Z" }, - { url = "https://files.pythonhosted.org/packages/25/cb/7262bc1b0300b4b64af50c2720ef958c2c1917525238d661c3e9a2b71b7b/wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b", size = 80899, upload-time = "2025-01-14T10:34:09.82Z" }, - { url = "https://files.pythonhosted.org/packages/2a/5a/04cde32b07a7431d4ed0553a76fdb7a61270e78c5fd5a603e190ac389f14/wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98", size = 89222, upload-time = "2025-01-14T10:34:11.258Z" }, - { url = "https://files.pythonhosted.org/packages/09/28/2e45a4f4771fcfb109e244d5dbe54259e970362a311b67a965555ba65026/wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82", size = 86707, upload-time = "2025-01-14T10:34:12.49Z" }, - { url = "https://files.pythonhosted.org/packages/c6/d2/dcb56bf5f32fcd4bd9aacc77b50a539abdd5b6536872413fd3f428b21bed/wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae", size = 79685, upload-time = "2025-01-14T10:34:15.043Z" }, - { url = "https://files.pythonhosted.org/packages/80/4e/eb8b353e36711347893f502ce91c770b0b0929f8f0bed2670a6856e667a9/wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9", size = 87567, upload-time = "2025-01-14T10:34:16.563Z" }, - { url = "https://files.pythonhosted.org/packages/17/27/4fe749a54e7fae6e7146f1c7d914d28ef599dacd4416566c055564080fe2/wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9", size = 36672, upload-time = "2025-01-14T10:34:17.727Z" }, - { url = "https://files.pythonhosted.org/packages/15/06/1dbf478ea45c03e78a6a8c4be4fdc3c3bddea5c8de8a93bc971415e47f0f/wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991", size = 38865, upload-time = "2025-01-14T10:34:19.577Z" }, - { url = "https://files.pythonhosted.org/packages/2d/82/f56956041adef78f849db6b289b282e72b55ab8045a75abad81898c28d19/wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8", size = 23594, upload-time = "2025-01-14T10:35:44.018Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f7/a2aab2cbc7a665efab072344a8949a71081eed1d2f451f7f7d2b966594a2/wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58", size = 53308 }, + { url = "https://files.pythonhosted.org/packages/50/ff/149aba8365fdacef52b31a258c4dc1c57c79759c335eff0b3316a2664a64/wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda", size = 38488 }, + { url = "https://files.pythonhosted.org/packages/65/46/5a917ce85b5c3b490d35c02bf71aedaa9f2f63f2d15d9949cc4ba56e8ba9/wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438", size = 38776 }, + { url = "https://files.pythonhosted.org/packages/ca/74/336c918d2915a4943501c77566db41d1bd6e9f4dbc317f356b9a244dfe83/wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a", size = 83776 }, + { url = "https://files.pythonhosted.org/packages/09/99/c0c844a5ccde0fe5761d4305485297f91d67cf2a1a824c5f282e661ec7ff/wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000", size = 75420 }, + { url = "https://files.pythonhosted.org/packages/b4/b0/9fc566b0fe08b282c850063591a756057c3247b2362b9286429ec5bf1721/wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6", size = 83199 }, + { url = "https://files.pythonhosted.org/packages/9d/4b/71996e62d543b0a0bd95dda485219856def3347e3e9380cc0d6cf10cfb2f/wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b", size = 82307 }, + { url = "https://files.pythonhosted.org/packages/39/35/0282c0d8789c0dc9bcc738911776c762a701f95cfe113fb8f0b40e45c2b9/wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662", size = 75025 }, + { url = "https://files.pythonhosted.org/packages/4f/6d/90c9fd2c3c6fee181feecb620d95105370198b6b98a0770cba090441a828/wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72", size = 81879 }, + { url = "https://files.pythonhosted.org/packages/8f/fa/9fb6e594f2ce03ef03eddbdb5f4f90acb1452221a5351116c7c4708ac865/wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317", size = 36419 }, + { url = "https://files.pythonhosted.org/packages/47/f8/fb1773491a253cbc123c5d5dc15c86041f746ed30416535f2a8df1f4a392/wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3", size = 38773 }, + { url = "https://files.pythonhosted.org/packages/a1/bd/ab55f849fd1f9a58ed7ea47f5559ff09741b25f00c191231f9f059c83949/wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925", size = 53799 }, + { url = "https://files.pythonhosted.org/packages/53/18/75ddc64c3f63988f5a1d7e10fb204ffe5762bc663f8023f18ecaf31a332e/wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392", size = 38821 }, + { url = "https://files.pythonhosted.org/packages/48/2a/97928387d6ed1c1ebbfd4efc4133a0633546bec8481a2dd5ec961313a1c7/wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40", size = 38919 }, + { url = "https://files.pythonhosted.org/packages/73/54/3bfe5a1febbbccb7a2f77de47b989c0b85ed3a6a41614b104204a788c20e/wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d", size = 88721 }, + { url = "https://files.pythonhosted.org/packages/25/cb/7262bc1b0300b4b64af50c2720ef958c2c1917525238d661c3e9a2b71b7b/wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b", size = 80899 }, + { url = "https://files.pythonhosted.org/packages/2a/5a/04cde32b07a7431d4ed0553a76fdb7a61270e78c5fd5a603e190ac389f14/wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98", size = 89222 }, + { url = "https://files.pythonhosted.org/packages/09/28/2e45a4f4771fcfb109e244d5dbe54259e970362a311b67a965555ba65026/wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82", size = 86707 }, + { url = "https://files.pythonhosted.org/packages/c6/d2/dcb56bf5f32fcd4bd9aacc77b50a539abdd5b6536872413fd3f428b21bed/wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae", size = 79685 }, + { url = "https://files.pythonhosted.org/packages/80/4e/eb8b353e36711347893f502ce91c770b0b0929f8f0bed2670a6856e667a9/wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9", size = 87567 }, + { url = "https://files.pythonhosted.org/packages/17/27/4fe749a54e7fae6e7146f1c7d914d28ef599dacd4416566c055564080fe2/wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9", size = 36672 }, + { url = "https://files.pythonhosted.org/packages/15/06/1dbf478ea45c03e78a6a8c4be4fdc3c3bddea5c8de8a93bc971415e47f0f/wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991", size = 38865 }, + { url = "https://files.pythonhosted.org/packages/2d/82/f56956041adef78f849db6b289b282e72b55ab8045a75abad81898c28d19/wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8", size = 23594 }, ] [[package]] @@ -6707,36 +6747,36 @@ dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4b/cf/7f825a311b11d1e0f7947a94f88adcf1d31e707c54a6d76d61a5d98604ed/xinference-client-1.2.2.tar.gz", hash = "sha256:85d2ba0fcbaae616b06719c422364123cbac97f3e3c82e614095fe6d0e630ed0", size = 44824, upload-time = "2025-02-08T09:28:56.692Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4b/cf/7f825a311b11d1e0f7947a94f88adcf1d31e707c54a6d76d61a5d98604ed/xinference-client-1.2.2.tar.gz", hash = "sha256:85d2ba0fcbaae616b06719c422364123cbac97f3e3c82e614095fe6d0e630ed0", size = 44824 } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/0f/fc58e062cf2f7506a33d2fe5446a1e88eb7f64914addffd7ed8b12749712/xinference_client-1.2.2-py3-none-any.whl", hash = "sha256:6941d87cf61283a9d6e81cee6cb2609a183d34c6b7d808c6ba0c33437520518f", size = 25723, upload-time = "2025-02-08T09:28:54.046Z" }, + { url = "https://files.pythonhosted.org/packages/77/0f/fc58e062cf2f7506a33d2fe5446a1e88eb7f64914addffd7ed8b12749712/xinference_client-1.2.2-py3-none-any.whl", hash = "sha256:6941d87cf61283a9d6e81cee6cb2609a183d34c6b7d808c6ba0c33437520518f", size = 25723 }, ] [[package]] name = "xlrd" version = "2.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/07/5a/377161c2d3538d1990d7af382c79f3b2372e880b65de21b01b1a2b78691e/xlrd-2.0.2.tar.gz", hash = "sha256:08b5e25de58f21ce71dc7db3b3b8106c1fa776f3024c54e45b45b374e89234c9", size = 100167, upload-time = "2025-06-14T08:46:39.039Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/5a/377161c2d3538d1990d7af382c79f3b2372e880b65de21b01b1a2b78691e/xlrd-2.0.2.tar.gz", hash = "sha256:08b5e25de58f21ce71dc7db3b3b8106c1fa776f3024c54e45b45b374e89234c9", size = 100167 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/62/c8d562e7766786ba6587d09c5a8ba9f718ed3fa8af7f4553e8f91c36f302/xlrd-2.0.2-py2.py3-none-any.whl", hash = "sha256:ea762c3d29f4cca48d82df517b6d89fbce4db3107f9d78713e48cd321d5c9aa9", size = 96555, upload-time = "2025-06-14T08:46:37.766Z" }, + { url = "https://files.pythonhosted.org/packages/1a/62/c8d562e7766786ba6587d09c5a8ba9f718ed3fa8af7f4553e8f91c36f302/xlrd-2.0.2-py2.py3-none-any.whl", hash = "sha256:ea762c3d29f4cca48d82df517b6d89fbce4db3107f9d78713e48cd321d5c9aa9", size = 96555 }, ] [[package]] name = "xlsxwriter" version = "3.2.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/47/7704bac42ac6fe1710ae099b70e6a1e68ed173ef14792b647808c357da43/xlsxwriter-3.2.5.tar.gz", hash = "sha256:7e88469d607cdc920151c0ab3ce9cf1a83992d4b7bc730c5ffdd1a12115a7dbe", size = 213306, upload-time = "2025-06-17T08:59:14.619Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/47/7704bac42ac6fe1710ae099b70e6a1e68ed173ef14792b647808c357da43/xlsxwriter-3.2.5.tar.gz", hash = "sha256:7e88469d607cdc920151c0ab3ce9cf1a83992d4b7bc730c5ffdd1a12115a7dbe", size = 213306 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/34/a22e6664211f0c8879521328000bdcae9bf6dbafa94a923e531f6d5b3f73/xlsxwriter-3.2.5-py3-none-any.whl", hash = "sha256:4f4824234e1eaf9d95df9a8fe974585ff91d0f5e3d3f12ace5b71e443c1c6abd", size = 172347, upload-time = "2025-06-17T08:59:13.453Z" }, + { url = "https://files.pythonhosted.org/packages/fa/34/a22e6664211f0c8879521328000bdcae9bf6dbafa94a923e531f6d5b3f73/xlsxwriter-3.2.5-py3-none-any.whl", hash = "sha256:4f4824234e1eaf9d95df9a8fe974585ff91d0f5e3d3f12ace5b71e443c1c6abd", size = 172347 }, ] [[package]] name = "xmltodict" version = "0.14.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/50/05/51dcca9a9bf5e1bce52582683ce50980bcadbc4fa5143b9f2b19ab99958f/xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553", size = 51942, upload-time = "2024-10-16T06:10:29.683Z" } +sdist = { url = "https://files.pythonhosted.org/packages/50/05/51dcca9a9bf5e1bce52582683ce50980bcadbc4fa5143b9f2b19ab99958f/xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553", size = 51942 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/45/fc303eb433e8a2a271739c98e953728422fa61a3c1f36077a49e395c972e/xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac", size = 9981, upload-time = "2024-10-16T06:10:27.649Z" }, + { url = "https://files.pythonhosted.org/packages/d6/45/fc303eb433e8a2a271739c98e953728422fa61a3c1f36077a49e395c972e/xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac", size = 9981 }, ] [[package]] @@ -6748,50 +6788,50 @@ dependencies = [ { name = "multidict" }, { name = "propcache" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b7/9d/4b94a8e6d2b51b599516a5cb88e5bc99b4d8d4583e468057eaa29d5f0918/yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1", size = 181062, upload-time = "2024-12-01T20:35:23.292Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/9d/4b94a8e6d2b51b599516a5cb88e5bc99b4d8d4583e468057eaa29d5f0918/yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1", size = 181062 } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/93/282b5f4898d8e8efaf0790ba6d10e2245d2c9f30e199d1a85cae9356098c/yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069", size = 141555, upload-time = "2024-12-01T20:33:08.819Z" }, - { url = "https://files.pythonhosted.org/packages/6d/9c/0a49af78df099c283ca3444560f10718fadb8a18dc8b3edf8c7bd9fd7d89/yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193", size = 94351, upload-time = "2024-12-01T20:33:10.609Z" }, - { url = "https://files.pythonhosted.org/packages/5a/a1/205ab51e148fdcedad189ca8dd587794c6f119882437d04c33c01a75dece/yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889", size = 92286, upload-time = "2024-12-01T20:33:12.322Z" }, - { url = "https://files.pythonhosted.org/packages/ed/fe/88b690b30f3f59275fb674f5f93ddd4a3ae796c2b62e5bb9ece8a4914b83/yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8", size = 340649, upload-time = "2024-12-01T20:33:13.842Z" }, - { url = "https://files.pythonhosted.org/packages/07/eb/3b65499b568e01f36e847cebdc8d7ccb51fff716dbda1ae83c3cbb8ca1c9/yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca", size = 356623, upload-time = "2024-12-01T20:33:15.535Z" }, - { url = "https://files.pythonhosted.org/packages/33/46/f559dc184280b745fc76ec6b1954de2c55595f0ec0a7614238b9ebf69618/yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8", size = 354007, upload-time = "2024-12-01T20:33:17.518Z" }, - { url = "https://files.pythonhosted.org/packages/af/ba/1865d85212351ad160f19fb99808acf23aab9a0f8ff31c8c9f1b4d671fc9/yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae", size = 344145, upload-time = "2024-12-01T20:33:20.071Z" }, - { url = "https://files.pythonhosted.org/packages/94/cb/5c3e975d77755d7b3d5193e92056b19d83752ea2da7ab394e22260a7b824/yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3", size = 336133, upload-time = "2024-12-01T20:33:22.515Z" }, - { url = "https://files.pythonhosted.org/packages/19/89/b77d3fd249ab52a5c40859815765d35c91425b6bb82e7427ab2f78f5ff55/yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb", size = 347967, upload-time = "2024-12-01T20:33:24.139Z" }, - { url = "https://files.pythonhosted.org/packages/35/bd/f6b7630ba2cc06c319c3235634c582a6ab014d52311e7d7c22f9518189b5/yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e", size = 346397, upload-time = "2024-12-01T20:33:26.205Z" }, - { url = "https://files.pythonhosted.org/packages/18/1a/0b4e367d5a72d1f095318344848e93ea70da728118221f84f1bf6c1e39e7/yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59", size = 350206, upload-time = "2024-12-01T20:33:27.83Z" }, - { url = "https://files.pythonhosted.org/packages/b5/cf/320fff4367341fb77809a2d8d7fe75b5d323a8e1b35710aafe41fdbf327b/yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d", size = 362089, upload-time = "2024-12-01T20:33:29.565Z" }, - { url = "https://files.pythonhosted.org/packages/57/cf/aadba261d8b920253204085268bad5e8cdd86b50162fcb1b10c10834885a/yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e", size = 366267, upload-time = "2024-12-01T20:33:31.449Z" }, - { url = "https://files.pythonhosted.org/packages/54/58/fb4cadd81acdee6dafe14abeb258f876e4dd410518099ae9a35c88d8097c/yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a", size = 359141, upload-time = "2024-12-01T20:33:33.79Z" }, - { url = "https://files.pythonhosted.org/packages/9a/7a/4c571597589da4cd5c14ed2a0b17ac56ec9ee7ee615013f74653169e702d/yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1", size = 84402, upload-time = "2024-12-01T20:33:35.689Z" }, - { url = "https://files.pythonhosted.org/packages/ae/7b/8600250b3d89b625f1121d897062f629883c2f45339623b69b1747ec65fa/yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5", size = 91030, upload-time = "2024-12-01T20:33:37.511Z" }, - { url = "https://files.pythonhosted.org/packages/33/85/bd2e2729752ff4c77338e0102914897512e92496375e079ce0150a6dc306/yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50", size = 142644, upload-time = "2024-12-01T20:33:39.204Z" }, - { url = "https://files.pythonhosted.org/packages/ff/74/1178322cc0f10288d7eefa6e4a85d8d2e28187ccab13d5b844e8b5d7c88d/yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576", size = 94962, upload-time = "2024-12-01T20:33:40.808Z" }, - { url = "https://files.pythonhosted.org/packages/be/75/79c6acc0261e2c2ae8a1c41cf12265e91628c8c58ae91f5ff59e29c0787f/yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640", size = 92795, upload-time = "2024-12-01T20:33:42.322Z" }, - { url = "https://files.pythonhosted.org/packages/6b/32/927b2d67a412c31199e83fefdce6e645247b4fb164aa1ecb35a0f9eb2058/yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2", size = 332368, upload-time = "2024-12-01T20:33:43.956Z" }, - { url = "https://files.pythonhosted.org/packages/19/e5/859fca07169d6eceeaa4fde1997c91d8abde4e9a7c018e371640c2da2b71/yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75", size = 342314, upload-time = "2024-12-01T20:33:46.046Z" }, - { url = "https://files.pythonhosted.org/packages/08/75/76b63ccd91c9e03ab213ef27ae6add2e3400e77e5cdddf8ed2dbc36e3f21/yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512", size = 341987, upload-time = "2024-12-01T20:33:48.352Z" }, - { url = "https://files.pythonhosted.org/packages/1a/e1/a097d5755d3ea8479a42856f51d97eeff7a3a7160593332d98f2709b3580/yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba", size = 336914, upload-time = "2024-12-01T20:33:50.875Z" }, - { url = "https://files.pythonhosted.org/packages/0b/42/e1b4d0e396b7987feceebe565286c27bc085bf07d61a59508cdaf2d45e63/yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb", size = 325765, upload-time = "2024-12-01T20:33:52.641Z" }, - { url = "https://files.pythonhosted.org/packages/7e/18/03a5834ccc9177f97ca1bbb245b93c13e58e8225276f01eedc4cc98ab820/yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272", size = 344444, upload-time = "2024-12-01T20:33:54.395Z" }, - { url = "https://files.pythonhosted.org/packages/c8/03/a713633bdde0640b0472aa197b5b86e90fbc4c5bc05b727b714cd8a40e6d/yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6", size = 340760, upload-time = "2024-12-01T20:33:56.286Z" }, - { url = "https://files.pythonhosted.org/packages/eb/99/f6567e3f3bbad8fd101886ea0276c68ecb86a2b58be0f64077396cd4b95e/yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e", size = 346484, upload-time = "2024-12-01T20:33:58.375Z" }, - { url = "https://files.pythonhosted.org/packages/8e/a9/84717c896b2fc6cb15bd4eecd64e34a2f0a9fd6669e69170c73a8b46795a/yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb", size = 359864, upload-time = "2024-12-01T20:34:00.22Z" }, - { url = "https://files.pythonhosted.org/packages/1e/2e/d0f5f1bef7ee93ed17e739ec8dbcb47794af891f7d165fa6014517b48169/yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393", size = 364537, upload-time = "2024-12-01T20:34:03.54Z" }, - { url = "https://files.pythonhosted.org/packages/97/8a/568d07c5d4964da5b02621a517532adb8ec5ba181ad1687191fffeda0ab6/yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285", size = 357861, upload-time = "2024-12-01T20:34:05.73Z" }, - { url = "https://files.pythonhosted.org/packages/7d/e3/924c3f64b6b3077889df9a1ece1ed8947e7b61b0a933f2ec93041990a677/yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2", size = 84097, upload-time = "2024-12-01T20:34:07.664Z" }, - { url = "https://files.pythonhosted.org/packages/34/45/0e055320daaabfc169b21ff6174567b2c910c45617b0d79c68d7ab349b02/yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477", size = 90399, upload-time = "2024-12-01T20:34:09.61Z" }, - { url = "https://files.pythonhosted.org/packages/f5/4b/a06e0ec3d155924f77835ed2d167ebd3b211a7b0853da1cf8d8414d784ef/yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b", size = 45109, upload-time = "2024-12-01T20:35:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/40/93/282b5f4898d8e8efaf0790ba6d10e2245d2c9f30e199d1a85cae9356098c/yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069", size = 141555 }, + { url = "https://files.pythonhosted.org/packages/6d/9c/0a49af78df099c283ca3444560f10718fadb8a18dc8b3edf8c7bd9fd7d89/yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193", size = 94351 }, + { url = "https://files.pythonhosted.org/packages/5a/a1/205ab51e148fdcedad189ca8dd587794c6f119882437d04c33c01a75dece/yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889", size = 92286 }, + { url = "https://files.pythonhosted.org/packages/ed/fe/88b690b30f3f59275fb674f5f93ddd4a3ae796c2b62e5bb9ece8a4914b83/yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8", size = 340649 }, + { url = "https://files.pythonhosted.org/packages/07/eb/3b65499b568e01f36e847cebdc8d7ccb51fff716dbda1ae83c3cbb8ca1c9/yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca", size = 356623 }, + { url = "https://files.pythonhosted.org/packages/33/46/f559dc184280b745fc76ec6b1954de2c55595f0ec0a7614238b9ebf69618/yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8", size = 354007 }, + { url = "https://files.pythonhosted.org/packages/af/ba/1865d85212351ad160f19fb99808acf23aab9a0f8ff31c8c9f1b4d671fc9/yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae", size = 344145 }, + { url = "https://files.pythonhosted.org/packages/94/cb/5c3e975d77755d7b3d5193e92056b19d83752ea2da7ab394e22260a7b824/yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3", size = 336133 }, + { url = "https://files.pythonhosted.org/packages/19/89/b77d3fd249ab52a5c40859815765d35c91425b6bb82e7427ab2f78f5ff55/yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb", size = 347967 }, + { url = "https://files.pythonhosted.org/packages/35/bd/f6b7630ba2cc06c319c3235634c582a6ab014d52311e7d7c22f9518189b5/yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e", size = 346397 }, + { url = "https://files.pythonhosted.org/packages/18/1a/0b4e367d5a72d1f095318344848e93ea70da728118221f84f1bf6c1e39e7/yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59", size = 350206 }, + { url = "https://files.pythonhosted.org/packages/b5/cf/320fff4367341fb77809a2d8d7fe75b5d323a8e1b35710aafe41fdbf327b/yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d", size = 362089 }, + { url = "https://files.pythonhosted.org/packages/57/cf/aadba261d8b920253204085268bad5e8cdd86b50162fcb1b10c10834885a/yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e", size = 366267 }, + { url = "https://files.pythonhosted.org/packages/54/58/fb4cadd81acdee6dafe14abeb258f876e4dd410518099ae9a35c88d8097c/yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a", size = 359141 }, + { url = "https://files.pythonhosted.org/packages/9a/7a/4c571597589da4cd5c14ed2a0b17ac56ec9ee7ee615013f74653169e702d/yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1", size = 84402 }, + { url = "https://files.pythonhosted.org/packages/ae/7b/8600250b3d89b625f1121d897062f629883c2f45339623b69b1747ec65fa/yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5", size = 91030 }, + { url = "https://files.pythonhosted.org/packages/33/85/bd2e2729752ff4c77338e0102914897512e92496375e079ce0150a6dc306/yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50", size = 142644 }, + { url = "https://files.pythonhosted.org/packages/ff/74/1178322cc0f10288d7eefa6e4a85d8d2e28187ccab13d5b844e8b5d7c88d/yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576", size = 94962 }, + { url = "https://files.pythonhosted.org/packages/be/75/79c6acc0261e2c2ae8a1c41cf12265e91628c8c58ae91f5ff59e29c0787f/yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640", size = 92795 }, + { url = "https://files.pythonhosted.org/packages/6b/32/927b2d67a412c31199e83fefdce6e645247b4fb164aa1ecb35a0f9eb2058/yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2", size = 332368 }, + { url = "https://files.pythonhosted.org/packages/19/e5/859fca07169d6eceeaa4fde1997c91d8abde4e9a7c018e371640c2da2b71/yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75", size = 342314 }, + { url = "https://files.pythonhosted.org/packages/08/75/76b63ccd91c9e03ab213ef27ae6add2e3400e77e5cdddf8ed2dbc36e3f21/yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512", size = 341987 }, + { url = "https://files.pythonhosted.org/packages/1a/e1/a097d5755d3ea8479a42856f51d97eeff7a3a7160593332d98f2709b3580/yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba", size = 336914 }, + { url = "https://files.pythonhosted.org/packages/0b/42/e1b4d0e396b7987feceebe565286c27bc085bf07d61a59508cdaf2d45e63/yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb", size = 325765 }, + { url = "https://files.pythonhosted.org/packages/7e/18/03a5834ccc9177f97ca1bbb245b93c13e58e8225276f01eedc4cc98ab820/yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272", size = 344444 }, + { url = "https://files.pythonhosted.org/packages/c8/03/a713633bdde0640b0472aa197b5b86e90fbc4c5bc05b727b714cd8a40e6d/yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6", size = 340760 }, + { url = "https://files.pythonhosted.org/packages/eb/99/f6567e3f3bbad8fd101886ea0276c68ecb86a2b58be0f64077396cd4b95e/yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e", size = 346484 }, + { url = "https://files.pythonhosted.org/packages/8e/a9/84717c896b2fc6cb15bd4eecd64e34a2f0a9fd6669e69170c73a8b46795a/yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb", size = 359864 }, + { url = "https://files.pythonhosted.org/packages/1e/2e/d0f5f1bef7ee93ed17e739ec8dbcb47794af891f7d165fa6014517b48169/yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393", size = 364537 }, + { url = "https://files.pythonhosted.org/packages/97/8a/568d07c5d4964da5b02621a517532adb8ec5ba181ad1687191fffeda0ab6/yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285", size = 357861 }, + { url = "https://files.pythonhosted.org/packages/7d/e3/924c3f64b6b3077889df9a1ece1ed8947e7b61b0a933f2ec93041990a677/yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2", size = 84097 }, + { url = "https://files.pythonhosted.org/packages/34/45/0e055320daaabfc169b21ff6174567b2c910c45617b0d79c68d7ab349b02/yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477", size = 90399 }, + { url = "https://files.pythonhosted.org/packages/f5/4b/a06e0ec3d155924f77835ed2d167ebd3b211a7b0853da1cf8d8414d784ef/yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b", size = 45109 }, ] [[package]] name = "zipp" version = "3.23.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276 }, ] [[package]] @@ -6801,9 +6841,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/c7/31e6f40282a2c548602c177826df281177caf79efaa101dd14314fb4ee73/zope_event-5.1.tar.gz", hash = "sha256:a153660e0c228124655748e990396b9d8295d6e4f546fa1b34f3319e1c666e7f", size = 18632, upload-time = "2025-06-26T07:14:22.72Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/c7/31e6f40282a2c548602c177826df281177caf79efaa101dd14314fb4ee73/zope_event-5.1.tar.gz", hash = "sha256:a153660e0c228124655748e990396b9d8295d6e4f546fa1b34f3319e1c666e7f", size = 18632 } wheels = [ - { url = "https://files.pythonhosted.org/packages/00/ed/d8c3f56c1edb0ee9b51461dd08580382e9589850f769b69f0dedccff5215/zope_event-5.1-py3-none-any.whl", hash = "sha256:53de8f0e9f61dc0598141ac591f49b042b6d74784dab49971b9cc91d0f73a7df", size = 6905, upload-time = "2025-06-26T07:14:21.779Z" }, + { url = "https://files.pythonhosted.org/packages/00/ed/d8c3f56c1edb0ee9b51461dd08580382e9589850f769b69f0dedccff5215/zope_event-5.1-py3-none-any.whl", hash = "sha256:53de8f0e9f61dc0598141ac591f49b042b6d74784dab49971b9cc91d0f73a7df", size = 6905 }, ] [[package]] @@ -6813,20 +6853,20 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/30/93/9210e7606be57a2dfc6277ac97dcc864fd8d39f142ca194fdc186d596fda/zope.interface-7.2.tar.gz", hash = "sha256:8b49f1a3d1ee4cdaf5b32d2e738362c7f5e40ac8b46dd7d1a65e82a4872728fe", size = 252960, upload-time = "2024-11-28T08:45:39.224Z" } +sdist = { url = "https://files.pythonhosted.org/packages/30/93/9210e7606be57a2dfc6277ac97dcc864fd8d39f142ca194fdc186d596fda/zope.interface-7.2.tar.gz", hash = "sha256:8b49f1a3d1ee4cdaf5b32d2e738362c7f5e40ac8b46dd7d1a65e82a4872728fe", size = 252960 } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/7d/2e8daf0abea7798d16a58f2f3a2bf7588872eee54ac119f99393fdd47b65/zope.interface-7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1909f52a00c8c3dcab6c4fad5d13de2285a4b3c7be063b239b8dc15ddfb73bd2", size = 208776, upload-time = "2024-11-28T08:47:53.009Z" }, - { url = "https://files.pythonhosted.org/packages/a0/2a/0c03c7170fe61d0d371e4c7ea5b62b8cb79b095b3d630ca16719bf8b7b18/zope.interface-7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80ecf2451596f19fd607bb09953f426588fc1e79e93f5968ecf3367550396b22", size = 209296, upload-time = "2024-11-28T08:47:57.993Z" }, - { url = "https://files.pythonhosted.org/packages/49/b4/451f19448772b4a1159519033a5f72672221e623b0a1bd2b896b653943d8/zope.interface-7.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:033b3923b63474800b04cba480b70f6e6243a62208071fc148354f3f89cc01b7", size = 260997, upload-time = "2024-11-28T09:18:13.935Z" }, - { url = "https://files.pythonhosted.org/packages/65/94/5aa4461c10718062c8f8711161faf3249d6d3679c24a0b81dd6fc8ba1dd3/zope.interface-7.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a102424e28c6b47c67923a1f337ede4a4c2bba3965b01cf707978a801fc7442c", size = 255038, upload-time = "2024-11-28T08:48:26.381Z" }, - { url = "https://files.pythonhosted.org/packages/9f/aa/1a28c02815fe1ca282b54f6705b9ddba20328fabdc37b8cf73fc06b172f0/zope.interface-7.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25e6a61dcb184453bb00eafa733169ab6d903e46f5c2ace4ad275386f9ab327a", size = 259806, upload-time = "2024-11-28T08:48:30.78Z" }, - { url = "https://files.pythonhosted.org/packages/a7/2c/82028f121d27c7e68632347fe04f4a6e0466e77bb36e104c8b074f3d7d7b/zope.interface-7.2-cp311-cp311-win_amd64.whl", hash = "sha256:3f6771d1647b1fc543d37640b45c06b34832a943c80d1db214a37c31161a93f1", size = 212305, upload-time = "2024-11-28T08:49:14.525Z" }, - { url = "https://files.pythonhosted.org/packages/68/0b/c7516bc3bad144c2496f355e35bd699443b82e9437aa02d9867653203b4a/zope.interface-7.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:086ee2f51eaef1e4a52bd7d3111a0404081dadae87f84c0ad4ce2649d4f708b7", size = 208959, upload-time = "2024-11-28T08:47:47.788Z" }, - { url = "https://files.pythonhosted.org/packages/a2/e9/1463036df1f78ff8c45a02642a7bf6931ae4a38a4acd6a8e07c128e387a7/zope.interface-7.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:21328fcc9d5b80768bf051faa35ab98fb979080c18e6f84ab3f27ce703bce465", size = 209357, upload-time = "2024-11-28T08:47:50.897Z" }, - { url = "https://files.pythonhosted.org/packages/07/a8/106ca4c2add440728e382f1b16c7d886563602487bdd90004788d45eb310/zope.interface-7.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6dd02ec01f4468da0f234da9d9c8545c5412fef80bc590cc51d8dd084138a89", size = 264235, upload-time = "2024-11-28T09:18:15.56Z" }, - { url = "https://files.pythonhosted.org/packages/fc/ca/57286866285f4b8a4634c12ca1957c24bdac06eae28fd4a3a578e30cf906/zope.interface-7.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e7da17f53e25d1a3bde5da4601e026adc9e8071f9f6f936d0fe3fe84ace6d54", size = 259253, upload-time = "2024-11-28T08:48:29.025Z" }, - { url = "https://files.pythonhosted.org/packages/96/08/2103587ebc989b455cf05e858e7fbdfeedfc3373358320e9c513428290b1/zope.interface-7.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cab15ff4832580aa440dc9790b8a6128abd0b88b7ee4dd56abacbc52f212209d", size = 264702, upload-time = "2024-11-28T08:48:37.363Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c7/3c67562e03b3752ba4ab6b23355f15a58ac2d023a6ef763caaca430f91f2/zope.interface-7.2-cp312-cp312-win_amd64.whl", hash = "sha256:29caad142a2355ce7cfea48725aa8bcf0067e2b5cc63fcf5cd9f97ad12d6afb5", size = 212466, upload-time = "2024-11-28T08:49:14.397Z" }, + { url = "https://files.pythonhosted.org/packages/98/7d/2e8daf0abea7798d16a58f2f3a2bf7588872eee54ac119f99393fdd47b65/zope.interface-7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1909f52a00c8c3dcab6c4fad5d13de2285a4b3c7be063b239b8dc15ddfb73bd2", size = 208776 }, + { url = "https://files.pythonhosted.org/packages/a0/2a/0c03c7170fe61d0d371e4c7ea5b62b8cb79b095b3d630ca16719bf8b7b18/zope.interface-7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80ecf2451596f19fd607bb09953f426588fc1e79e93f5968ecf3367550396b22", size = 209296 }, + { url = "https://files.pythonhosted.org/packages/49/b4/451f19448772b4a1159519033a5f72672221e623b0a1bd2b896b653943d8/zope.interface-7.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:033b3923b63474800b04cba480b70f6e6243a62208071fc148354f3f89cc01b7", size = 260997 }, + { url = "https://files.pythonhosted.org/packages/65/94/5aa4461c10718062c8f8711161faf3249d6d3679c24a0b81dd6fc8ba1dd3/zope.interface-7.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a102424e28c6b47c67923a1f337ede4a4c2bba3965b01cf707978a801fc7442c", size = 255038 }, + { url = "https://files.pythonhosted.org/packages/9f/aa/1a28c02815fe1ca282b54f6705b9ddba20328fabdc37b8cf73fc06b172f0/zope.interface-7.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25e6a61dcb184453bb00eafa733169ab6d903e46f5c2ace4ad275386f9ab327a", size = 259806 }, + { url = "https://files.pythonhosted.org/packages/a7/2c/82028f121d27c7e68632347fe04f4a6e0466e77bb36e104c8b074f3d7d7b/zope.interface-7.2-cp311-cp311-win_amd64.whl", hash = "sha256:3f6771d1647b1fc543d37640b45c06b34832a943c80d1db214a37c31161a93f1", size = 212305 }, + { url = "https://files.pythonhosted.org/packages/68/0b/c7516bc3bad144c2496f355e35bd699443b82e9437aa02d9867653203b4a/zope.interface-7.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:086ee2f51eaef1e4a52bd7d3111a0404081dadae87f84c0ad4ce2649d4f708b7", size = 208959 }, + { url = "https://files.pythonhosted.org/packages/a2/e9/1463036df1f78ff8c45a02642a7bf6931ae4a38a4acd6a8e07c128e387a7/zope.interface-7.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:21328fcc9d5b80768bf051faa35ab98fb979080c18e6f84ab3f27ce703bce465", size = 209357 }, + { url = "https://files.pythonhosted.org/packages/07/a8/106ca4c2add440728e382f1b16c7d886563602487bdd90004788d45eb310/zope.interface-7.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6dd02ec01f4468da0f234da9d9c8545c5412fef80bc590cc51d8dd084138a89", size = 264235 }, + { url = "https://files.pythonhosted.org/packages/fc/ca/57286866285f4b8a4634c12ca1957c24bdac06eae28fd4a3a578e30cf906/zope.interface-7.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e7da17f53e25d1a3bde5da4601e026adc9e8071f9f6f936d0fe3fe84ace6d54", size = 259253 }, + { url = "https://files.pythonhosted.org/packages/96/08/2103587ebc989b455cf05e858e7fbdfeedfc3373358320e9c513428290b1/zope.interface-7.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cab15ff4832580aa440dc9790b8a6128abd0b88b7ee4dd56abacbc52f212209d", size = 264702 }, + { url = "https://files.pythonhosted.org/packages/5f/c7/3c67562e03b3752ba4ab6b23355f15a58ac2d023a6ef763caaca430f91f2/zope.interface-7.2-cp312-cp312-win_amd64.whl", hash = "sha256:29caad142a2355ce7cfea48725aa8bcf0067e2b5cc63fcf5cd9f97ad12d6afb5", size = 212466 }, ] [[package]] @@ -6836,40 +6876,40 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation == 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ed/f6/2ac0287b442160a89d726b17a9184a4c615bb5237db763791a7fd16d9df1/zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09", size = 681701, upload-time = "2024-07-15T00:18:06.141Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/f6/2ac0287b442160a89d726b17a9184a4c615bb5237db763791a7fd16d9df1/zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09", size = 681701 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/40/f67e7d2c25a0e2dc1744dd781110b0b60306657f8696cafb7ad7579469bd/zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e", size = 788699, upload-time = "2024-07-15T00:14:04.909Z" }, - { url = "https://files.pythonhosted.org/packages/e8/46/66d5b55f4d737dd6ab75851b224abf0afe5774976fe511a54d2eb9063a41/zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23", size = 633681, upload-time = "2024-07-15T00:14:13.99Z" }, - { url = "https://files.pythonhosted.org/packages/63/b6/677e65c095d8e12b66b8f862b069bcf1f1d781b9c9c6f12eb55000d57583/zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a", size = 4944328, upload-time = "2024-07-15T00:14:16.588Z" }, - { url = "https://files.pythonhosted.org/packages/59/cc/e76acb4c42afa05a9d20827116d1f9287e9c32b7ad58cc3af0721ce2b481/zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db", size = 5311955, upload-time = "2024-07-15T00:14:19.389Z" }, - { url = "https://files.pythonhosted.org/packages/78/e4/644b8075f18fc7f632130c32e8f36f6dc1b93065bf2dd87f03223b187f26/zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2", size = 5344944, upload-time = "2024-07-15T00:14:22.173Z" }, - { url = "https://files.pythonhosted.org/packages/76/3f/dbafccf19cfeca25bbabf6f2dd81796b7218f768ec400f043edc767015a6/zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca", size = 5442927, upload-time = "2024-07-15T00:14:24.825Z" }, - { url = "https://files.pythonhosted.org/packages/0c/c3/d24a01a19b6733b9f218e94d1a87c477d523237e07f94899e1c10f6fd06c/zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c", size = 4864910, upload-time = "2024-07-15T00:14:26.982Z" }, - { url = "https://files.pythonhosted.org/packages/1c/a9/cf8f78ead4597264f7618d0875be01f9bc23c9d1d11afb6d225b867cb423/zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e", size = 4935544, upload-time = "2024-07-15T00:14:29.582Z" }, - { url = "https://files.pythonhosted.org/packages/2c/96/8af1e3731b67965fb995a940c04a2c20997a7b3b14826b9d1301cf160879/zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5", size = 5467094, upload-time = "2024-07-15T00:14:40.126Z" }, - { url = "https://files.pythonhosted.org/packages/ff/57/43ea9df642c636cb79f88a13ab07d92d88d3bfe3e550b55a25a07a26d878/zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48", size = 4860440, upload-time = "2024-07-15T00:14:42.786Z" }, - { url = "https://files.pythonhosted.org/packages/46/37/edb78f33c7f44f806525f27baa300341918fd4c4af9472fbc2c3094be2e8/zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c", size = 4700091, upload-time = "2024-07-15T00:14:45.184Z" }, - { url = "https://files.pythonhosted.org/packages/c1/f1/454ac3962671a754f3cb49242472df5c2cced4eb959ae203a377b45b1a3c/zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003", size = 5208682, upload-time = "2024-07-15T00:14:47.407Z" }, - { url = "https://files.pythonhosted.org/packages/85/b2/1734b0fff1634390b1b887202d557d2dd542de84a4c155c258cf75da4773/zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78", size = 5669707, upload-time = "2024-07-15T00:15:03.529Z" }, - { url = "https://files.pythonhosted.org/packages/52/5a/87d6971f0997c4b9b09c495bf92189fb63de86a83cadc4977dc19735f652/zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473", size = 5201792, upload-time = "2024-07-15T00:15:28.372Z" }, - { url = "https://files.pythonhosted.org/packages/79/02/6f6a42cc84459d399bd1a4e1adfc78d4dfe45e56d05b072008d10040e13b/zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160", size = 430586, upload-time = "2024-07-15T00:15:32.26Z" }, - { url = "https://files.pythonhosted.org/packages/be/a2/4272175d47c623ff78196f3c10e9dc7045c1b9caf3735bf041e65271eca4/zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0", size = 495420, upload-time = "2024-07-15T00:15:34.004Z" }, - { url = "https://files.pythonhosted.org/packages/7b/83/f23338c963bd9de687d47bf32efe9fd30164e722ba27fb59df33e6b1719b/zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094", size = 788713, upload-time = "2024-07-15T00:15:35.815Z" }, - { url = "https://files.pythonhosted.org/packages/5b/b3/1a028f6750fd9227ee0b937a278a434ab7f7fdc3066c3173f64366fe2466/zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8", size = 633459, upload-time = "2024-07-15T00:15:37.995Z" }, - { url = "https://files.pythonhosted.org/packages/26/af/36d89aae0c1f95a0a98e50711bc5d92c144939efc1f81a2fcd3e78d7f4c1/zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1", size = 4945707, upload-time = "2024-07-15T00:15:39.872Z" }, - { url = "https://files.pythonhosted.org/packages/cd/2e/2051f5c772f4dfc0aae3741d5fc72c3dcfe3aaeb461cc231668a4db1ce14/zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072", size = 5306545, upload-time = "2024-07-15T00:15:41.75Z" }, - { url = "https://files.pythonhosted.org/packages/0a/9e/a11c97b087f89cab030fa71206963090d2fecd8eb83e67bb8f3ffb84c024/zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20", size = 5337533, upload-time = "2024-07-15T00:15:44.114Z" }, - { url = "https://files.pythonhosted.org/packages/fc/79/edeb217c57fe1bf16d890aa91a1c2c96b28c07b46afed54a5dcf310c3f6f/zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373", size = 5436510, upload-time = "2024-07-15T00:15:46.509Z" }, - { url = "https://files.pythonhosted.org/packages/81/4f/c21383d97cb7a422ddf1ae824b53ce4b51063d0eeb2afa757eb40804a8ef/zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db", size = 4859973, upload-time = "2024-07-15T00:15:49.939Z" }, - { url = "https://files.pythonhosted.org/packages/ab/15/08d22e87753304405ccac8be2493a495f529edd81d39a0870621462276ef/zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772", size = 4936968, upload-time = "2024-07-15T00:15:52.025Z" }, - { url = "https://files.pythonhosted.org/packages/eb/fa/f3670a597949fe7dcf38119a39f7da49a8a84a6f0b1a2e46b2f71a0ab83f/zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105", size = 5467179, upload-time = "2024-07-15T00:15:54.971Z" }, - { url = "https://files.pythonhosted.org/packages/4e/a9/dad2ab22020211e380adc477a1dbf9f109b1f8d94c614944843e20dc2a99/zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba", size = 4848577, upload-time = "2024-07-15T00:15:57.634Z" }, - { url = "https://files.pythonhosted.org/packages/08/03/dd28b4484b0770f1e23478413e01bee476ae8227bbc81561f9c329e12564/zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd", size = 4693899, upload-time = "2024-07-15T00:16:00.811Z" }, - { url = "https://files.pythonhosted.org/packages/2b/64/3da7497eb635d025841e958bcd66a86117ae320c3b14b0ae86e9e8627518/zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a", size = 5199964, upload-time = "2024-07-15T00:16:03.669Z" }, - { url = "https://files.pythonhosted.org/packages/43/a4/d82decbab158a0e8a6ebb7fc98bc4d903266bce85b6e9aaedea1d288338c/zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90", size = 5655398, upload-time = "2024-07-15T00:16:06.694Z" }, - { url = "https://files.pythonhosted.org/packages/f2/61/ac78a1263bc83a5cf29e7458b77a568eda5a8f81980691bbc6eb6a0d45cc/zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35", size = 5191313, upload-time = "2024-07-15T00:16:09.758Z" }, - { url = "https://files.pythonhosted.org/packages/e7/54/967c478314e16af5baf849b6ee9d6ea724ae5b100eb506011f045d3d4e16/zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d", size = 430877, upload-time = "2024-07-15T00:16:11.758Z" }, - { url = "https://files.pythonhosted.org/packages/75/37/872d74bd7739639c4553bf94c84af7d54d8211b626b352bc57f0fd8d1e3f/zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b", size = 495595, upload-time = "2024-07-15T00:16:13.731Z" }, + { url = "https://files.pythonhosted.org/packages/9e/40/f67e7d2c25a0e2dc1744dd781110b0b60306657f8696cafb7ad7579469bd/zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e", size = 788699 }, + { url = "https://files.pythonhosted.org/packages/e8/46/66d5b55f4d737dd6ab75851b224abf0afe5774976fe511a54d2eb9063a41/zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23", size = 633681 }, + { url = "https://files.pythonhosted.org/packages/63/b6/677e65c095d8e12b66b8f862b069bcf1f1d781b9c9c6f12eb55000d57583/zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a", size = 4944328 }, + { url = "https://files.pythonhosted.org/packages/59/cc/e76acb4c42afa05a9d20827116d1f9287e9c32b7ad58cc3af0721ce2b481/zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db", size = 5311955 }, + { url = "https://files.pythonhosted.org/packages/78/e4/644b8075f18fc7f632130c32e8f36f6dc1b93065bf2dd87f03223b187f26/zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2", size = 5344944 }, + { url = "https://files.pythonhosted.org/packages/76/3f/dbafccf19cfeca25bbabf6f2dd81796b7218f768ec400f043edc767015a6/zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca", size = 5442927 }, + { url = "https://files.pythonhosted.org/packages/0c/c3/d24a01a19b6733b9f218e94d1a87c477d523237e07f94899e1c10f6fd06c/zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c", size = 4864910 }, + { url = "https://files.pythonhosted.org/packages/1c/a9/cf8f78ead4597264f7618d0875be01f9bc23c9d1d11afb6d225b867cb423/zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e", size = 4935544 }, + { url = "https://files.pythonhosted.org/packages/2c/96/8af1e3731b67965fb995a940c04a2c20997a7b3b14826b9d1301cf160879/zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5", size = 5467094 }, + { url = "https://files.pythonhosted.org/packages/ff/57/43ea9df642c636cb79f88a13ab07d92d88d3bfe3e550b55a25a07a26d878/zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48", size = 4860440 }, + { url = "https://files.pythonhosted.org/packages/46/37/edb78f33c7f44f806525f27baa300341918fd4c4af9472fbc2c3094be2e8/zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c", size = 4700091 }, + { url = "https://files.pythonhosted.org/packages/c1/f1/454ac3962671a754f3cb49242472df5c2cced4eb959ae203a377b45b1a3c/zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003", size = 5208682 }, + { url = "https://files.pythonhosted.org/packages/85/b2/1734b0fff1634390b1b887202d557d2dd542de84a4c155c258cf75da4773/zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78", size = 5669707 }, + { url = "https://files.pythonhosted.org/packages/52/5a/87d6971f0997c4b9b09c495bf92189fb63de86a83cadc4977dc19735f652/zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473", size = 5201792 }, + { url = "https://files.pythonhosted.org/packages/79/02/6f6a42cc84459d399bd1a4e1adfc78d4dfe45e56d05b072008d10040e13b/zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160", size = 430586 }, + { url = "https://files.pythonhosted.org/packages/be/a2/4272175d47c623ff78196f3c10e9dc7045c1b9caf3735bf041e65271eca4/zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0", size = 495420 }, + { url = "https://files.pythonhosted.org/packages/7b/83/f23338c963bd9de687d47bf32efe9fd30164e722ba27fb59df33e6b1719b/zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094", size = 788713 }, + { url = "https://files.pythonhosted.org/packages/5b/b3/1a028f6750fd9227ee0b937a278a434ab7f7fdc3066c3173f64366fe2466/zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8", size = 633459 }, + { url = "https://files.pythonhosted.org/packages/26/af/36d89aae0c1f95a0a98e50711bc5d92c144939efc1f81a2fcd3e78d7f4c1/zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1", size = 4945707 }, + { url = "https://files.pythonhosted.org/packages/cd/2e/2051f5c772f4dfc0aae3741d5fc72c3dcfe3aaeb461cc231668a4db1ce14/zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072", size = 5306545 }, + { url = "https://files.pythonhosted.org/packages/0a/9e/a11c97b087f89cab030fa71206963090d2fecd8eb83e67bb8f3ffb84c024/zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20", size = 5337533 }, + { url = "https://files.pythonhosted.org/packages/fc/79/edeb217c57fe1bf16d890aa91a1c2c96b28c07b46afed54a5dcf310c3f6f/zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373", size = 5436510 }, + { url = "https://files.pythonhosted.org/packages/81/4f/c21383d97cb7a422ddf1ae824b53ce4b51063d0eeb2afa757eb40804a8ef/zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db", size = 4859973 }, + { url = "https://files.pythonhosted.org/packages/ab/15/08d22e87753304405ccac8be2493a495f529edd81d39a0870621462276ef/zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772", size = 4936968 }, + { url = "https://files.pythonhosted.org/packages/eb/fa/f3670a597949fe7dcf38119a39f7da49a8a84a6f0b1a2e46b2f71a0ab83f/zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105", size = 5467179 }, + { url = "https://files.pythonhosted.org/packages/4e/a9/dad2ab22020211e380adc477a1dbf9f109b1f8d94c614944843e20dc2a99/zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba", size = 4848577 }, + { url = "https://files.pythonhosted.org/packages/08/03/dd28b4484b0770f1e23478413e01bee476ae8227bbc81561f9c329e12564/zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd", size = 4693899 }, + { url = "https://files.pythonhosted.org/packages/2b/64/3da7497eb635d025841e958bcd66a86117ae320c3b14b0ae86e9e8627518/zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a", size = 5199964 }, + { url = "https://files.pythonhosted.org/packages/43/a4/d82decbab158a0e8a6ebb7fc98bc4d903266bce85b6e9aaedea1d288338c/zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90", size = 5655398 }, + { url = "https://files.pythonhosted.org/packages/f2/61/ac78a1263bc83a5cf29e7458b77a568eda5a8f81980691bbc6eb6a0d45cc/zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35", size = 5191313 }, + { url = "https://files.pythonhosted.org/packages/e7/54/967c478314e16af5baf849b6ee9d6ea724ae5b100eb506011f045d3d4e16/zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d", size = 430877 }, + { url = "https://files.pythonhosted.org/packages/75/37/872d74bd7739639c4553bf94c84af7d54d8211b626b352bc57f0fd8d1e3f/zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b", size = 495595 }, ] [package.optional-dependencies] diff --git a/dev/start-worker b/dev/start-worker index 66e446c831..a2af04c01c 100755 --- a/dev/start-worker +++ b/dev/start-worker @@ -8,4 +8,4 @@ cd "$SCRIPT_DIR/.." uv --directory api run \ celery -A app.celery worker \ - -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage + -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation diff --git a/docker/.env.example b/docker/.env.example index 743a1e8bba..96ad09ab99 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -215,6 +215,8 @@ DB_DATABASE=dify # The size of the database connection pool. # The default is 30 connections, which can be appropriately increased. SQLALCHEMY_POOL_SIZE=30 +# The default is 10 connections, which allows temporary overflow beyond the pool size. +SQLALCHEMY_MAX_OVERFLOW=10 # Database connection pool recycling time, the default is 3600 seconds. SQLALCHEMY_POOL_RECYCLE=3600 # Whether to print SQL, default is false. @@ -777,6 +779,12 @@ API_SENTRY_PROFILES_SAMPLE_RATE=1.0 # If not set, Sentry error reporting will be disabled. WEB_SENTRY_DSN= +# Plugin_daemon Service Sentry DSN address, default is empty, when empty, +# all monitoring information is not reported to Sentry. +# If not set, Sentry error reporting will be disabled. +PLUGIN_SENTRY_ENABLED=false +PLUGIN_SENTRY_DSN= + # ------------------------------ # Notion Integration Configuration # Variables can be obtained by applying for Notion integration: https://www.notion.so/my-integrations @@ -887,6 +895,14 @@ API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository. # API workflow node execution repository implementation API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository +# Workflow log cleanup configuration +# Enable automatic cleanup of workflow run logs to manage database size +WORKFLOW_LOG_CLEANUP_ENABLED=false +# Number of days to retain workflow run logs (default: 30 days) +WORKFLOW_LOG_RETENTION_DAYS=30 +# Batch size for workflow log cleanup operations (default: 100) +WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100 + # HTTP request node in workflow configuration HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 @@ -1240,6 +1256,10 @@ QUEUE_MONITOR_ALERT_EMAILS= # Monitor interval in minutes, default is 30 minutes QUEUE_MONITOR_INTERVAL=30 +# Swagger UI configuration +SWAGGER_UI_ENABLED=true +SWAGGER_UI_PATH=/swagger-ui.html + # Celery schedule tasks configuration ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false ENABLE_CLEAN_UNUSED_DATASETS_TASK=false diff --git a/docker/README.md b/docker/README.md index 22dfe2c91c..b5c46eb9fc 100644 --- a/docker/README.md +++ b/docker/README.md @@ -4,7 +4,7 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T ### What's Updated -- **Certbot Container**: `docker-compose.yaml` now contains `certbot` for managing SSL certificates. This container automatically renews certificates and ensures secure HTTPS connections. +- **Certbot Container**: `docker-compose.yaml` now contains `certbot` for managing SSL certificates. This container automatically renews certificates and ensures secure HTTPS connections.\ For more information, refer `docker/certbot/README.md`. - **Persistent Environment Variables**: Environment variables are now managed through a `.env` file, ensuring that your configurations persist across deployments. @@ -13,43 +13,44 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T > The `.env` file is a crucial component in Docker and Docker Compose environments, serving as a centralized configuration file where you can define environment variables that are accessible to the containers at runtime. This file simplifies the management of environment settings across different stages of development, testing, and production, providing consistency and ease of configuration to deployments. - **Unified Vector Database Services**: All vector database services are now managed from a single Docker Compose file `docker-compose.yaml`. You can switch between different vector databases by setting the `VECTOR_STORE` environment variable in your `.env` file. + - **Mandatory .env File**: A `.env` file is now required to run `docker compose up`. This file is crucial for configuring your deployment and for any custom settings to persist through upgrades. ### How to Deploy Dify with `docker-compose.yaml` 1. **Prerequisites**: Ensure Docker and Docker Compose are installed on your system. -2. **Environment Setup**: - - Navigate to the `docker` directory. - - Copy the `.env.example` file to a new file named `.env` by running `cp .env.example .env`. - - Customize the `.env` file as needed. Refer to the `.env.example` file for detailed configuration options. -3. **Running the Services**: - - Execute `docker compose up` from the `docker` directory to start the services. - - To specify a vector database, set the `VECTOR_STORE` variable in your `.env` file to your desired vector database service, such as `milvus`, `weaviate`, or `opensearch`. -4. **SSL Certificate Setup**: - - Refer `docker/certbot/README.md` to set up SSL certificates using Certbot. -5. **OpenTelemetry Collector Setup**: +1. **Environment Setup**: + - Navigate to the `docker` directory. + - Copy the `.env.example` file to a new file named `.env` by running `cp .env.example .env`. + - Customize the `.env` file as needed. Refer to the `.env.example` file for detailed configuration options. +1. **Running the Services**: + - Execute `docker compose up` from the `docker` directory to start the services. + - To specify a vector database, set the `VECTOR_STORE` variable in your `.env` file to your desired vector database service, such as `milvus`, `weaviate`, or `opensearch`. +1. **SSL Certificate Setup**: + - Refer `docker/certbot/README.md` to set up SSL certificates using Certbot. +1. **OpenTelemetry Collector Setup**: - Change `ENABLE_OTEL` to `true` in `.env`. - Configure `OTLP_BASE_ENDPOINT` properly. ### How to Deploy Middleware for Developing Dify 1. **Middleware Setup**: - - Use the `docker-compose.middleware.yaml` for setting up essential middleware services like databases and caches. - - Navigate to the `docker` directory. - - Ensure the `middleware.env` file is created by running `cp middleware.env.example middleware.env` (refer to the `middleware.env.example` file). -2. **Running Middleware Services**: - - Navigate to the `docker` directory. - - Execute `docker compose -f docker-compose.middleware.yaml --profile weaviate -p dify up -d` to start the middleware services. (Change the profile to other vector database if you are not using weaviate) + - Use the `docker-compose.middleware.yaml` for setting up essential middleware services like databases and caches. + - Navigate to the `docker` directory. + - Ensure the `middleware.env` file is created by running `cp middleware.env.example middleware.env` (refer to the `middleware.env.example` file). +1. **Running Middleware Services**: + - Navigate to the `docker` directory. + - Execute `docker compose -f docker-compose.middleware.yaml --profile weaviate -p dify up -d` to start the middleware services. (Change the profile to other vector database if you are not using weaviate) ### Migration for Existing Users For users migrating from the `docker-legacy` setup: 1. **Review Changes**: Familiarize yourself with the new `.env` configuration and Docker Compose setup. -2. **Transfer Customizations**: - - If you have customized configurations such as `docker-compose.yaml`, `ssrf_proxy/squid.conf`, or `nginx/conf.d/default.conf`, you will need to reflect these changes in the `.env` file you create. -3. **Data Migration**: - - Ensure that data from services like databases and caches is backed up and migrated appropriately to the new structure if necessary. +1. **Transfer Customizations**: + - If you have customized configurations such as `docker-compose.yaml`, `ssrf_proxy/squid.conf`, or `nginx/conf.d/default.conf`, you will need to reflect these changes in the `.env` file you create. +1. **Data Migration**: + - Ensure that data from services like databases and caches is backed up and migrated appropriately to the new structure if necessary. ### Overview of `.env` @@ -64,39 +65,49 @@ For users migrating from the `docker-legacy` setup: The `.env.example` file provided in the Docker setup is extensive and covers a wide range of configuration options. It is structured into several sections, each pertaining to different aspects of the application and its services. Here are some of the key sections and variables: 1. **Common Variables**: - - `CONSOLE_API_URL`, `SERVICE_API_URL`: URLs for different API services. - - `APP_WEB_URL`: Frontend application URL. - - `FILES_URL`: Base URL for file downloads and previews. -2. **Server Configuration**: - - `LOG_LEVEL`, `DEBUG`, `FLASK_DEBUG`: Logging and debug settings. - - `SECRET_KEY`: A key for encrypting session cookies and other sensitive data. + - `CONSOLE_API_URL`, `SERVICE_API_URL`: URLs for different API services. + - `APP_WEB_URL`: Frontend application URL. + - `FILES_URL`: Base URL for file downloads and previews. -3. **Database Configuration**: - - `DB_USERNAME`, `DB_PASSWORD`, `DB_HOST`, `DB_PORT`, `DB_DATABASE`: PostgreSQL database credentials and connection details. +1. **Server Configuration**: -4. **Redis Configuration**: - - `REDIS_HOST`, `REDIS_PORT`, `REDIS_PASSWORD`: Redis server connection settings. + - `LOG_LEVEL`, `DEBUG`, `FLASK_DEBUG`: Logging and debug settings. + - `SECRET_KEY`: A key for encrypting session cookies and other sensitive data. -5. **Celery Configuration**: - - `CELERY_BROKER_URL`: Configuration for Celery message broker. +1. **Database Configuration**: -6. **Storage Configuration**: - - `STORAGE_TYPE`, `S3_BUCKET_NAME`, `AZURE_BLOB_ACCOUNT_NAME`: Settings for file storage options like local, S3, Azure Blob, etc. + - `DB_USERNAME`, `DB_PASSWORD`, `DB_HOST`, `DB_PORT`, `DB_DATABASE`: PostgreSQL database credentials and connection details. -7. **Vector Database Configuration**: - - `VECTOR_STORE`: Type of vector database (e.g., `weaviate`, `milvus`). - - Specific settings for each vector store like `WEAVIATE_ENDPOINT`, `MILVUS_URI`. +1. **Redis Configuration**: -8. **CORS Configuration**: - - `WEB_API_CORS_ALLOW_ORIGINS`, `CONSOLE_CORS_ALLOW_ORIGINS`: Settings for cross-origin resource sharing. + - `REDIS_HOST`, `REDIS_PORT`, `REDIS_PASSWORD`: Redis server connection settings. -9. **OpenTelemetry Configuration**: - - `ENABLE_OTEL`: Enable OpenTelemetry collector in api. - - `OTLP_BASE_ENDPOINT`: Endpoint for your OTLP exporter. - -10. **Other Service-Specific Environment Variables**: - - Each service like `nginx`, `redis`, `db`, and vector databases have specific environment variables that are directly referenced in the `docker-compose.yaml`. +1. **Celery Configuration**: + + - `CELERY_BROKER_URL`: Configuration for Celery message broker. + +1. **Storage Configuration**: + + - `STORAGE_TYPE`, `S3_BUCKET_NAME`, `AZURE_BLOB_ACCOUNT_NAME`: Settings for file storage options like local, S3, Azure Blob, etc. + +1. **Vector Database Configuration**: + + - `VECTOR_STORE`: Type of vector database (e.g., `weaviate`, `milvus`). + - Specific settings for each vector store like `WEAVIATE_ENDPOINT`, `MILVUS_URI`. + +1. **CORS Configuration**: + + - `WEB_API_CORS_ALLOW_ORIGINS`, `CONSOLE_CORS_ALLOW_ORIGINS`: Settings for cross-origin resource sharing. + +1. **OpenTelemetry Configuration**: + + - `ENABLE_OTEL`: Enable OpenTelemetry collector in api. + - `OTLP_BASE_ENDPOINT`: Endpoint for your OTLP exporter. + +1. **Other Service-Specific Environment Variables**: + + - Each service like `nginx`, `redis`, `db`, and vector databases have specific environment variables that are directly referenced in the `docker-compose.yaml`. ### Additional Information diff --git a/docker/certbot/README.md b/docker/certbot/README.md index 21be34b33a..62b1eee395 100644 --- a/docker/certbot/README.md +++ b/docker/certbot/README.md @@ -2,12 +2,12 @@ ## Short description -docker compose certbot configurations with Backward compatibility (without certbot container). +docker compose certbot configurations with Backward compatibility (without certbot container).\ Use `docker compose --profile certbot up` to use this features. ## The simplest way for launching new servers with SSL certificates -1. Get letsencrypt certs +1. Get letsencrypt certs\ set `.env` values ```properties NGINX_SSL_CERT_FILENAME=fullchain.pem @@ -25,7 +25,7 @@ Use `docker compose --profile certbot up` to use this features. ```shell docker compose exec -it certbot /bin/sh /update-cert.sh ``` -2. Edit `.env` file and `docker compose --profile certbot up` again. +1. Edit `.env` file and `docker compose --profile certbot up` again.\ set `.env` value additionally ```properties NGINX_HTTPS_ENABLED=true @@ -34,7 +34,7 @@ Use `docker compose --profile certbot up` to use this features. ```shell docker compose --profile certbot up -d --no-deps --force-recreate nginx ``` - Then you can access your serve with HTTPS. + Then you can access your serve with HTTPS.\ [https://your_domain.com](https://your_domain.com) ## SSL certificates renewal diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 04981f6b7f..0e695e4fca 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.7.2 + image: langgenius/dify-api:1.8.0 restart: always environment: # Use the shared environment variables. @@ -31,7 +31,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.7.2 + image: langgenius/dify-api:1.8.0 restart: always environment: # Use the shared environment variables. @@ -58,7 +58,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.7.2 + image: langgenius/dify-api:1.8.0 restart: always environment: # Use the shared environment variables. @@ -76,7 +76,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.7.2 + image: langgenius/dify-web:1.8.0 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -212,6 +212,8 @@ services: VOLCENGINE_TOS_ACCESS_KEY: ${PLUGIN_VOLCENGINE_TOS_ACCESS_KEY:-} VOLCENGINE_TOS_SECRET_KEY: ${PLUGIN_VOLCENGINE_TOS_SECRET_KEY:-} VOLCENGINE_TOS_REGION: ${PLUGIN_VOLCENGINE_TOS_REGION:-} + SENTRY_ENABLED: ${PLUGIN_SENTRY_ENABLED:-false} + SENTRY_DSN: ${PLUGIN_SENTRY_DSN:-} ports: - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" volumes: diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index bcf9588dff..44f7439062 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -57,6 +57,7 @@ x-shared-env: &shared-api-worker-env DB_PORT: ${DB_PORT:-5432} DB_DATABASE: ${DB_DATABASE:-dify} SQLALCHEMY_POOL_SIZE: ${SQLALCHEMY_POOL_SIZE:-30} + SQLALCHEMY_MAX_OVERFLOW: ${SQLALCHEMY_MAX_OVERFLOW:-10} SQLALCHEMY_POOL_RECYCLE: ${SQLALCHEMY_POOL_RECYCLE:-3600} SQLALCHEMY_ECHO: ${SQLALCHEMY_ECHO:-false} SQLALCHEMY_POOL_PRE_PING: ${SQLALCHEMY_POOL_PRE_PING:-false} @@ -351,6 +352,8 @@ x-shared-env: &shared-api-worker-env API_SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0} API_SENTRY_PROFILES_SAMPLE_RATE: ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0} WEB_SENTRY_DSN: ${WEB_SENTRY_DSN:-} + PLUGIN_SENTRY_ENABLED: ${PLUGIN_SENTRY_ENABLED:-false} + PLUGIN_SENTRY_DSN: ${PLUGIN_SENTRY_DSN:-} NOTION_INTEGRATION_TYPE: ${NOTION_INTEGRATION_TYPE:-public} NOTION_CLIENT_SECRET: ${NOTION_CLIENT_SECRET:-} NOTION_CLIENT_ID: ${NOTION_CLIENT_ID:-} @@ -396,6 +399,9 @@ x-shared-env: &shared-api-worker-env CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY: ${CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY:-core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository} API_WORKFLOW_RUN_REPOSITORY: ${API_WORKFLOW_RUN_REPOSITORY:-repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository} API_WORKFLOW_NODE_EXECUTION_REPOSITORY: ${API_WORKFLOW_NODE_EXECUTION_REPOSITORY:-repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository} + WORKFLOW_LOG_CLEANUP_ENABLED: ${WORKFLOW_LOG_CLEANUP_ENABLED:-false} + WORKFLOW_LOG_RETENTION_DAYS: ${WORKFLOW_LOG_RETENTION_DAYS:-30} + WORKFLOW_LOG_CLEANUP_BATCH_SIZE: ${WORKFLOW_LOG_CLEANUP_BATCH_SIZE:-100} HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760} HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576} HTTP_REQUEST_NODE_SSL_VERIFY: ${HTTP_REQUEST_NODE_SSL_VERIFY:-True} @@ -562,6 +568,8 @@ x-shared-env: &shared-api-worker-env QUEUE_MONITOR_THRESHOLD: ${QUEUE_MONITOR_THRESHOLD:-200} QUEUE_MONITOR_ALERT_EMAILS: ${QUEUE_MONITOR_ALERT_EMAILS:-} QUEUE_MONITOR_INTERVAL: ${QUEUE_MONITOR_INTERVAL:-30} + SWAGGER_UI_ENABLED: ${SWAGGER_UI_ENABLED:-true} + SWAGGER_UI_PATH: ${SWAGGER_UI_PATH:-/swagger-ui.html} ENABLE_CLEAN_EMBEDDING_CACHE_TASK: ${ENABLE_CLEAN_EMBEDDING_CACHE_TASK:-false} ENABLE_CLEAN_UNUSED_DATASETS_TASK: ${ENABLE_CLEAN_UNUSED_DATASETS_TASK:-false} ENABLE_CREATE_TIDB_SERVERLESS_TASK: ${ENABLE_CREATE_TIDB_SERVERLESS_TASK:-false} @@ -574,7 +582,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.7.2 + image: langgenius/dify-api:1.8.0 restart: always environment: # Use the shared environment variables. @@ -603,7 +611,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.7.2 + image: langgenius/dify-api:1.8.0 restart: always environment: # Use the shared environment variables. @@ -630,7 +638,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.7.2 + image: langgenius/dify-api:1.8.0 restart: always environment: # Use the shared environment variables. @@ -648,7 +656,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.7.2 + image: langgenius/dify-web:1.8.0 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -784,6 +792,8 @@ services: VOLCENGINE_TOS_ACCESS_KEY: ${PLUGIN_VOLCENGINE_TOS_ACCESS_KEY:-} VOLCENGINE_TOS_SECRET_KEY: ${PLUGIN_VOLCENGINE_TOS_SECRET_KEY:-} VOLCENGINE_TOS_REGION: ${PLUGIN_VOLCENGINE_TOS_REGION:-} + SENTRY_ENABLED: ${PLUGIN_SENTRY_ENABLED:-false} + SENTRY_DSN: ${PLUGIN_SENTRY_DSN:-} ports: - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" volumes: diff --git a/sdks/nodejs-client/README.md b/sdks/nodejs-client/README.md index 37b5ca2d0a..3a5688bcbe 100644 --- a/sdks/nodejs-client/README.md +++ b/sdks/nodejs-client/README.md @@ -1,12 +1,15 @@ # Dify Node.js SDK + This is the Node.js SDK for the Dify API, which allows you to easily integrate Dify into your Node.js applications. ## Install + ```bash npm install dify-client ``` ## Usage + After installing the SDK, you can use it in your project like this: ```js @@ -60,4 +63,5 @@ client.messageFeedback(messageId, rating, user) Replace 'your-api-key-here' with your actual Dify API key.Replace 'your-app-id-here' with your actual Dify APP ID. ## License + This SDK is released under the MIT License. diff --git a/sdks/nodejs-client/index.d.ts b/sdks/nodejs-client/index.d.ts index a8b7497f4f..3ea4b9d153 100644 --- a/sdks/nodejs-client/index.d.ts +++ b/sdks/nodejs-client/index.d.ts @@ -14,6 +14,22 @@ interface HeaderParams { interface User { } +interface DifyFileBase { + type: "image" +} + +export interface DifyRemoteFile extends DifyFileBase { + transfer_method: "remote_url" + url: string +} + +export interface DifyLocalFile extends DifyFileBase { + transfer_method: "local_file" + upload_file_id: string +} + +export type DifyFile = DifyRemoteFile | DifyLocalFile; + export declare class DifyClient { constructor(apiKey: string, baseUrl?: string); @@ -44,7 +60,7 @@ export declare class CompletionClient extends DifyClient { inputs: any, user: User, stream?: boolean, - files?: File[] | null + files?: DifyFile[] | null ): Promise; } @@ -55,7 +71,7 @@ export declare class ChatClient extends DifyClient { user: User, stream?: boolean, conversation_id?: string | null, - files?: File[] | null + files?: DifyFile[] | null ): Promise; getSuggested(message_id: string, user: User): Promise; diff --git a/sdks/php-client/README.md b/sdks/php-client/README.md index 91e77ad9ff..444b16a565 100644 --- a/sdks/php-client/README.md +++ b/sdks/php-client/README.md @@ -11,7 +11,7 @@ This is the PHP SDK for the Dify API, which allows you to easily integrate Dify If you want to try the example, you can run `composer install` in this directory. -In exist project, copy the `dify-client.php` to you project, and merge the following to your `composer.json` file, then run `composer install && composer dump-autoload` to install. Guzzle does not require 7.9, other versions have not been tested, but you can try. +In exist project, copy the `dify-client.php` to you project, and merge the following to your `composer.json` file, then run `composer install && composer dump-autoload` to install. Guzzle does not require 7.9, other versions have not been tested, but you can try. ```json { diff --git a/sdks/python-client/README.md b/sdks/python-client/README.md index 7401fd2fd4..34b14b3a94 100644 --- a/sdks/python-client/README.md +++ b/sdks/python-client/README.md @@ -141,8 +141,6 @@ with open(file_path, "rb") as file: result = response.json() print(f'upload_file_id: {result.get("id")}') ``` - - - Others @@ -184,7 +182,8 @@ print('[rename result]') print(rename_conversation_response.json()) ``` -* Using the Workflow Client +- Using the Workflow Client + ```python import json import requests diff --git a/web/Dockerfile b/web/Dockerfile index d284efca87..2ea8402cd6 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -5,6 +5,9 @@ LABEL maintainer="takatost@gmail.com" # if you located in China, you can use aliyun mirror to speed up # RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories +# if you located in China, you can use taobao registry to speed up +# RUN npm config set registry https://registry.npmmirror.com + RUN apk add --no-cache tzdata RUN corepack enable ENV PNPM_HOME="/pnpm" @@ -22,9 +25,6 @@ COPY pnpm-lock.yaml . # Use packageManager from package.json RUN corepack install -# if you located in China, you can use taobao registry to speed up -# RUN pnpm install --frozen-lockfile --registry https://registry.npmmirror.com/ - RUN pnpm install --frozen-lockfile # build resources @@ -34,7 +34,7 @@ COPY --from=packages /app/web/ . COPY . . ENV NODE_OPTIONS="--max-old-space-size=4096" -RUN pnpm build +RUN pnpm build:docker # production stage diff --git a/web/README.md b/web/README.md index 3d9fd2de87..a47cfab041 100644 --- a/web/README.md +++ b/web/README.md @@ -7,6 +7,7 @@ This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next ### Run by source code Before starting the web frontend service, please make sure the following environment is ready. + - [Node.js](https://nodejs.org) >= v22.11.x - [pnpm](https://pnpm.io) v10.x @@ -103,11 +104,9 @@ pnpm run test ``` If you are not familiar with writing tests, here is some code to refer to: -* [classnames.spec.ts](./utils/classnames.spec.ts) -* [index.spec.tsx](./app/components/base/button/index.spec.tsx) - - +- [classnames.spec.ts](./utils/classnames.spec.ts) +- [index.spec.tsx](./app/components/base/button/index.spec.tsx) ## Documentation diff --git a/web/app/(commonLayout)/datasets/template/template.en.mdx b/web/app/(commonLayout)/datasets/template/template.en.mdx index f1bb5d9156..0d41691dfd 100644 --- a/web/app/(commonLayout)/datasets/template/template.en.mdx +++ b/web/app/(commonLayout)/datasets/template/template.en.mdx @@ -1858,10 +1858,10 @@ ___ title="Request" tag="DELETE" label="/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks/{child_chunk_id}" - targetCode={`curl --location --request DELETE '${props.apiBaseUrl}/datasets/{dataset_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' \\\n--header 'Authorization: Bearer {api_key}'`} + targetCode={`curl --location --request DELETE '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' \\\n--header 'Authorization: Bearer {api_key}'`} > ```bash {{ title: 'cURL' }} - curl --location --request DELETE '${props.apiBaseUrl}/datasets/{dataset_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' \ + curl --location --request DELETE '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' \ --header 'Authorization: Bearer {api_key}' ``` diff --git a/web/app/(commonLayout)/datasets/template/template.ja.mdx b/web/app/(commonLayout)/datasets/template/template.ja.mdx index 3011cecbc1..5c7a752c11 100644 --- a/web/app/(commonLayout)/datasets/template/template.ja.mdx +++ b/web/app/(commonLayout)/datasets/template/template.ja.mdx @@ -1614,10 +1614,10 @@ ___ title="リクエスト" tag="DELETE" label="/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks/{child_chunk_id}" - targetCode={`curl --location --request DELETE '${props.apiBaseUrl}/datasets/{dataset_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' \\\n--header 'Authorization: Bearer {api_key}'`} + targetCode={`curl --location --request DELETE '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' \\\n--header 'Authorization: Bearer {api_key}'`} > ```bash {{ title: 'cURL' }} - curl --location --request DELETE '${props.apiBaseUrl}/datasets/{dataset_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' \ + curl --location --request DELETE '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}/segments/{segment_id}/child_chunks/{child_chunk_id}' \ --header 'Authorization: Bearer {api_key}' ``` diff --git a/web/app/(commonLayout)/education-apply/page.tsx b/web/app/(commonLayout)/education-apply/page.tsx index 873034452e..5dd3c35519 100644 --- a/web/app/(commonLayout)/education-apply/page.tsx +++ b/web/app/(commonLayout)/education-apply/page.tsx @@ -13,12 +13,12 @@ import { useProviderContext } from '@/context/provider-context' export default function EducationApply() { const router = useRouter() - const { enableEducationPlan, isEducationAccount } = useProviderContext() + const { enableEducationPlan } = useProviderContext() const searchParams = useSearchParams() const token = searchParams.get('token') const showEducationApplyPage = useMemo(() => { - return enableEducationPlan && !isEducationAccount && token - }, [enableEducationPlan, isEducationAccount, token]) + return enableEducationPlan && token + }, [enableEducationPlan, token]) useEffect(() => { if (!showEducationApplyPage) diff --git a/web/app/account/account-page/AvatarWithEdit.tsx b/web/app/account/account-page/AvatarWithEdit.tsx index 41a6971bf5..0408d2ee34 100644 --- a/web/app/account/account-page/AvatarWithEdit.tsx +++ b/web/app/account/account-page/AvatarWithEdit.tsx @@ -4,7 +4,7 @@ import type { Area } from 'react-easy-crop' import React, { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' -import { RiPencilLine } from '@remixicon/react' +import { RiDeleteBin5Line, RiPencilLine } from '@remixicon/react' import { updateUserProfile } from '@/service/common' import { ToastContext } from '@/app/components/base/toast' import ImageInput, { type OnImageInput } from '@/app/components/base/app-icon-picker/ImageInput' @@ -27,6 +27,10 @@ const AvatarWithEdit = ({ onSave, ...props }: AvatarWithEditProps) => { const [inputImageInfo, setInputImageInfo] = useState() const [isShowAvatarPicker, setIsShowAvatarPicker] = useState(false) const [uploading, setUploading] = useState(false) + const [isShowDeleteConfirm, setIsShowDeleteConfirm] = useState(false) + const [hoverArea, setHoverArea] = useState('left') + + const [onAvatarError, setOnAvatarError] = useState(false) const handleImageInput: OnImageInput = useCallback(async (isCropped: boolean, fileOrTempUrl: string | File, croppedAreaPixels?: Area, fileName?: string) => { setInputImageInfo( @@ -48,6 +52,18 @@ const AvatarWithEdit = ({ onSave, ...props }: AvatarWithEditProps) => { } }, [notify, onSave, t]) + const handleDeleteAvatar = useCallback(async () => { + try { + await updateUserProfile({ url: 'account/avatar', body: { avatar: '' } }) + notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) + setIsShowDeleteConfirm(false) + onSave?.() + } + catch (e) { + notify({ type: 'error', message: (e as Error).message }) + } + }, [notify, onSave, t]) + const { handleLocalFileUpload } = useLocalFileUploader({ limit: 3, disabled: false, @@ -84,14 +100,31 @@ const AvatarWithEdit = ({ onSave, ...props }: AvatarWithEditProps) => { <>
- + setOnAvatarError(x)} />
{ setIsShowAvatarPicker(true) }} className="absolute inset-0 flex cursor-pointer items-center justify-center rounded-full bg-black/50 opacity-0 transition-opacity group-hover:opacity-100" + onClick={() => { + if (hoverArea === 'right' && !onAvatarError) + setIsShowDeleteConfirm(true) + else + setIsShowAvatarPicker(true) + }} + onMouseMove={(e) => { + const rect = e.currentTarget.getBoundingClientRect() + const x = e.clientX - rect.left + const isRight = x > rect.width / 2 + setHoverArea(isRight ? 'right' : 'left') + }} > - - - + {hoverArea === 'right' && !onAvatarError ? ( + + + + ) : ( + + + + )}
@@ -115,6 +148,26 @@ const AvatarWithEdit = ({ onSave, ...props }: AvatarWithEditProps) => {
+ + setIsShowDeleteConfirm(false)} + > +
{t('common.avatar.deleteTitle')}
+

{t('common.avatar.deleteDescription')}

+ +
+ + + +
+
) } diff --git a/web/app/components/app-sidebar/index.tsx b/web/app/components/app-sidebar/index.tsx index cf32339b8a..c3ff45d6a6 100644 --- a/web/app/components/app-sidebar/index.tsx +++ b/web/app/components/app-sidebar/index.tsx @@ -107,7 +107,7 @@ const AppDetailNav = ({ title, desc, isExternal, icon, icon_background, navigati )}
-
+