diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index f08befefb8..76e5c04deb 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -83,9 +83,15 @@ jobs: compose-file: | docker/docker-compose.middleware.yaml services: | + db + redis sandbox ssrf_proxy + - name: setup test config + run: | + cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env + - name: Run Workflow run: uv run --project api bash dev/pytest/pytest_workflow.sh diff --git a/.github/workflows/vdb-tests.yml b/.github/workflows/vdb-tests.yml index 7d0a873ebd..912267094b 100644 --- a/.github/workflows/vdb-tests.yml +++ b/.github/workflows/vdb-tests.yml @@ -84,6 +84,12 @@ jobs: elasticsearch oceanbase + - name: setup test config + run: | + echo $(pwd) + ls -lah . + cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env + - name: Check VDB Ready (TiDB) run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py diff --git a/.gitignore b/.gitignore index 4c938b7682..8f82bea00d 100644 --- a/.gitignore +++ b/.gitignore @@ -179,6 +179,7 @@ docker/volumes/pgvecto_rs/data/* docker/volumes/couchbase/* docker/volumes/oceanbase/* docker/volumes/plugin_daemon/* +docker/volumes/matrixone/* !docker/volumes/oceanbase/init.d docker/nginx/conf.d/default.conf diff --git a/README.md b/README.md index ca09adec08..1dc7e2dd98 100644 --- a/README.md +++ b/README.md @@ -226,6 +226,15 @@ Deploy Dify to AWS with [CDK](https://aws.amazon.com/cdk/) - [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### Using Alibaba Cloud Computing Nest + +Quickly deploy Dify to Alibaba cloud with [Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### Using Alibaba Cloud Data Management + +One-Click deploy Dify to Alibaba Cloud with [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) + + ## Contributing For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). diff --git a/README_AR.md b/README_AR.md index df288fd33c..d93bca8646 100644 --- a/README_AR.md +++ b/README_AR.md @@ -209,6 +209,14 @@ docker compose up -d - [AWS CDK بواسطة @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### استخدام Alibaba Cloud للنشر + [بسرعة نشر Dify إلى سحابة علي بابا مع عش الحوسبة السحابية علي بابا](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### استخدام Alibaba Cloud Data Management للنشر + +انشر ​​Dify على علي بابا كلاود بنقرة واحدة باستخدام [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) + + ## المساهمة لأولئك الذين يرغبون في المساهمة، انظر إلى [دليل المساهمة](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) لدينا. diff --git a/README_BN.md b/README_BN.md index 4a5b5f3928..3efee3684d 100644 --- a/README_BN.md +++ b/README_BN.md @@ -225,6 +225,15 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন - [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### Alibaba Cloud ব্যবহার করে ডিপ্লয় + + [Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### Alibaba Cloud Data Management ব্যবহার করে ডিপ্লয় + + [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) + + ## Contributing যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা] দেখুন (https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)। diff --git a/README_CN.md b/README_CN.md index ba7ee0006d..21e27429ec 100644 --- a/README_CN.md +++ b/README_CN.md @@ -221,6 +221,15 @@ docker compose up -d ##### AWS - [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### 使用 阿里云计算巢 部署 + +使用 [阿里云计算巢](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) 将 Dify 一键部署到 阿里云 + +#### 使用 阿里云数据管理DMS 部署 + +使用 [阿里云数据管理DMS](https://help.aliyun.com/zh/dms/dify-in-invitational-preview) 将 Dify 一键部署到 阿里云 + + ## Star History [![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date) diff --git a/README_DE.md b/README_DE.md index f6023a3935..20c313035e 100644 --- a/README_DE.md +++ b/README_DE.md @@ -221,6 +221,15 @@ Bereitstellung von Dify auf AWS mit [CDK](https://aws.amazon.com/cdk/) ##### AWS - [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### Alibaba Cloud + +[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### Alibaba Cloud Data Management + +Ein-Klick-Bereitstellung von Dify in der Alibaba Cloud mit [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) + + ## Contributing Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren. diff --git a/README_ES.md b/README_ES.md index 12f2ce8c11..e4b7df6686 100644 --- a/README_ES.md +++ b/README_ES.md @@ -221,6 +221,15 @@ Despliegue Dify en AWS usando [CDK](https://aws.amazon.com/cdk/) ##### AWS - [AWS CDK por @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### Alibaba Cloud + +[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### Alibaba Cloud Data Management + +Despliega Dify en Alibaba Cloud con un solo clic con [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) + + ## Contribuir Para aquellos que deseen contribuir con código, consulten nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). diff --git a/README_FR.md b/README_FR.md index b106615b31..8fd17fb7c3 100644 --- a/README_FR.md +++ b/README_FR.md @@ -219,6 +219,15 @@ Déployez Dify sur AWS en utilisant [CDK](https://aws.amazon.com/cdk/) ##### AWS - [AWS CDK par @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### Alibaba Cloud + +[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### Alibaba Cloud Data Management + +Déployez Dify en un clic sur Alibaba Cloud avec [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) + + ## Contribuer Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). diff --git a/README_JA.md b/README_JA.md index 26703f3958..a3ee81e1f2 100644 --- a/README_JA.md +++ b/README_JA.md @@ -155,7 +155,7 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ [こちら](https://dify.ai)のDify Cloudサービスを利用して、セットアップ不要で試すことができます。サンドボックスプランには、200回のGPT-4呼び出しが無料で含まれています。 - **Dify Community Editionのセルフホスティング
** -この[スタートガイド](#quick-start)を使用して、ローカル環境でDifyを簡単に実行できます。 +この[スタートガイド](#クイックスタート)を使用して、ローカル環境でDifyを簡単に実行できます。 詳しくは[ドキュメント](https://docs.dify.ai)をご覧ください。 - **企業/組織向けのDify
** @@ -220,6 +220,13 @@ docker compose up -d ##### AWS - [@KevinZhaoによるAWS CDK](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### Alibaba Cloud +[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### Alibaba Cloud Data Management +[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) を利用して、DifyをAlibaba Cloudへワンクリックでデプロイできます + + ## 貢献 コードに貢献したい方は、[Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)を参照してください。 diff --git a/README_KL.md b/README_KL.md index ea91baa5aa..3e5ab1a74f 100644 --- a/README_KL.md +++ b/README_KL.md @@ -219,6 +219,15 @@ wa'logh nIqHom neH ghun deployment toy'wI' [CDK](https://aws.amazon.com/cdk/) lo ##### AWS - [AWS CDK qachlot @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### Alibaba Cloud + +[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### Alibaba Cloud Data Management + +[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) + + ## Contributing For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). diff --git a/README_KR.md b/README_KR.md index 89301e8b2c..3c504900e1 100644 --- a/README_KR.md +++ b/README_KR.md @@ -213,6 +213,15 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 ##### AWS - [KevinZhao의 AWS CDK](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### Alibaba Cloud + +[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### Alibaba Cloud Data Management + +[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)를 통해 원클릭으로 Dify를 Alibaba Cloud에 배포할 수 있습니다 + + ## 기여 코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요. diff --git a/README_PT.md b/README_PT.md index 157772d528..fb5f3662ae 100644 --- a/README_PT.md +++ b/README_PT.md @@ -218,6 +218,15 @@ Implante o Dify na AWS usando [CDK](https://aws.amazon.com/cdk/) ##### AWS - [AWS CDK por @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### Alibaba Cloud + +[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### Alibaba Cloud Data Management + +Implante o Dify na Alibaba Cloud com um clique usando o [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) + + ## Contribuindo Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). diff --git a/README_SI.md b/README_SI.md index 14de1ea792..647069a220 100644 --- a/README_SI.md +++ b/README_SI.md @@ -219,6 +219,15 @@ Uvedite Dify v AWS z uporabo [CDK](https://aws.amazon.com/cdk/) ##### AWS - [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### Alibaba Cloud + +[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### Alibaba Cloud Data Management + +Z enim klikom namestite Dify na Alibaba Cloud z [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) + + ## Prispevam Za tiste, ki bi radi prispevali kodo, si oglejte naš vodnik za prispevke . Hkrati vas prosimo, da podprete Dify tako, da ga delite na družbenih medijih ter na dogodkih in konferencah. diff --git a/README_TR.md b/README_TR.md index 563a05af3c..f52335646a 100644 --- a/README_TR.md +++ b/README_TR.md @@ -212,6 +212,15 @@ Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.ter ##### AWS - [AWS CDK tarafından @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### Alibaba Cloud + +[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### Alibaba Cloud Data Management + +[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) kullanarak Dify'ı tek tıkla Alibaba Cloud'a dağıtın + + ## Katkıda Bulunma Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakabilirsiniz. diff --git a/README_TW.md b/README_TW.md index f4a76ac109..71082ff893 100644 --- a/README_TW.md +++ b/README_TW.md @@ -224,6 +224,15 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify - [由 @KevinZhao 提供的 AWS CDK](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) +#### 使用 阿里云计算巢進行部署 + +[阿里云](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### 使用 阿里雲數據管理DMS 進行部署 + +透過 [阿里雲數據管理DMS](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/),一鍵將 Dify 部署至阿里雲 + + ## 貢獻 對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。 diff --git a/README_VI.md b/README_VI.md index 4e1e05cbf3..58d8434fff 100644 --- a/README_VI.md +++ b/README_VI.md @@ -214,6 +214,16 @@ Triển khai Dify trên AWS bằng [CDK](https://aws.amazon.com/cdk/) ##### AWS - [AWS CDK bởi @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) + +#### Alibaba Cloud + +[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) + +#### Alibaba Cloud Data Management + +Triển khai Dify lên Alibaba Cloud chỉ với một cú nhấp chuột bằng [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) + + ## Đóng góp Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi. diff --git a/api/.env.example b/api/.env.example index 7878308588..baa9c382c8 100644 --- a/api/.env.example +++ b/api/.env.example @@ -137,7 +137,7 @@ WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* # Vector database configuration -# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase, opengauss, tablestore +# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase, opengauss, tablestore, matrixone VECTOR_STORE=weaviate # Weaviate configuration @@ -294,6 +294,13 @@ VIKINGDB_SCHEMA=http VIKINGDB_CONNECTION_TIMEOUT=30 VIKINGDB_SOCKET_TIMEOUT=30 +# Matrixone configration +MATRIXONE_HOST=127.0.0.1 +MATRIXONE_PORT=6001 +MATRIXONE_USER=dump +MATRIXONE_PASSWORD=111 +MATRIXONE_DATABASE=dify + # Lindorm configuration LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070 LINDORM_USERNAME=admin @@ -332,9 +339,11 @@ PROMPT_GENERATION_MAX_TOKENS=512 CODE_GENERATION_MAX_TOKENS=1024 PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false -# Mail configuration, support: resend, smtp +# Mail configuration, support: resend, smtp, sendgrid MAIL_TYPE= +# If using SendGrid, use the 'from' field for authentication if necessary. MAIL_DEFAULT_SEND_FROM=no-reply +# resend configuration RESEND_API_KEY= RESEND_API_URL=https://api.resend.com # smtp configuration @@ -344,7 +353,8 @@ SMTP_USERNAME=123 SMTP_PASSWORD=abc SMTP_USE_TLS=true SMTP_OPPORTUNISTIC_TLS=false - +# Sendgid configuration +SENDGRID_API_KEY= # Sentry configuration SENTRY_DSN= diff --git a/api/.ruff.toml b/api/.ruff.toml index facb0d5419..0169613bf8 100644 --- a/api/.ruff.toml +++ b/api/.ruff.toml @@ -1,6 +1,4 @@ -exclude = [ - "migrations/*", -] +exclude = ["migrations/*"] line-length = 120 [format] @@ -9,14 +7,14 @@ quote-style = "double" [lint] preview = false select = [ - "B", # flake8-bugbear rules - "C4", # flake8-comprehensions - "E", # pycodestyle E rules - "F", # pyflakes rules - "FURB", # refurb rules - "I", # isort rules - "N", # pep8-naming - "PT", # flake8-pytest-style rules + "B", # flake8-bugbear rules + "C4", # flake8-comprehensions + "E", # pycodestyle E rules + "F", # pyflakes rules + "FURB", # refurb rules + "I", # isort rules + "N", # pep8-naming + "PT", # flake8-pytest-style rules "PLC0208", # iteration-over-set "PLC0414", # useless-import-alias "PLE0604", # invalid-all-object @@ -24,19 +22,19 @@ select = [ "PLR0402", # manual-from-import "PLR1711", # useless-return "PLR1714", # repeated-equality-comparison - "RUF013", # implicit-optional - "RUF019", # unnecessary-key-check - "RUF100", # unused-noqa - "RUF101", # redirected-noqa - "RUF200", # invalid-pyproject-toml - "RUF022", # unsorted-dunder-all - "S506", # unsafe-yaml-load - "SIM", # flake8-simplify rules - "TRY400", # error-instead-of-exception - "TRY401", # verbose-log-message - "UP", # pyupgrade rules - "W191", # tab-indentation - "W605", # invalid-escape-sequence + "RUF013", # implicit-optional + "RUF019", # unnecessary-key-check + "RUF100", # unused-noqa + "RUF101", # redirected-noqa + "RUF200", # invalid-pyproject-toml + "RUF022", # unsorted-dunder-all + "S506", # unsafe-yaml-load + "SIM", # flake8-simplify rules + "TRY400", # error-instead-of-exception + "TRY401", # verbose-log-message + "UP", # pyupgrade rules + "W191", # tab-indentation + "W605", # invalid-escape-sequence # security related linting rules # RCE proctection (sort of) "S102", # exec-builtin, disallow use of `exec` @@ -47,36 +45,37 @@ select = [ ] ignore = [ - "E402", # module-import-not-at-top-of-file - "E711", # none-comparison - "E712", # true-false-comparison - "E721", # type-comparison - "E722", # bare-except - "F821", # undefined-name - "F841", # unused-variable + "E402", # module-import-not-at-top-of-file + "E711", # none-comparison + "E712", # true-false-comparison + "E721", # type-comparison + "E722", # bare-except + "F821", # undefined-name + "F841", # unused-variable "FURB113", # repeated-append "FURB152", # math-constant - "UP007", # non-pep604-annotation - "UP032", # f-string - "UP045", # non-pep604-annotation-optional - "B005", # strip-with-multi-characters - "B006", # mutable-argument-default - "B007", # unused-loop-control-variable - "B026", # star-arg-unpacking-after-keyword-arg - "B903", # class-as-data-structure - "B904", # raise-without-from-inside-except - "B905", # zip-without-explicit-strict - "N806", # non-lowercase-variable-in-function - "N815", # mixed-case-variable-in-class-scope - "PT011", # pytest-raises-too-broad - "SIM102", # collapsible-if - "SIM103", # needless-bool - "SIM105", # suppressible-exception - "SIM107", # return-in-try-except-finally - "SIM108", # if-else-block-instead-of-if-exp - "SIM113", # enumerate-for-loop - "SIM117", # multiple-with-statements - "SIM210", # if-expr-with-true-false + "UP007", # non-pep604-annotation + "UP032", # f-string + "UP045", # non-pep604-annotation-optional + "B005", # strip-with-multi-characters + "B006", # mutable-argument-default + "B007", # unused-loop-control-variable + "B026", # star-arg-unpacking-after-keyword-arg + "B903", # class-as-data-structure + "B904", # raise-without-from-inside-except + "B905", # zip-without-explicit-strict + "N806", # non-lowercase-variable-in-function + "N815", # mixed-case-variable-in-class-scope + "PT011", # pytest-raises-too-broad + "SIM102", # collapsible-if + "SIM103", # needless-bool + "SIM105", # suppressible-exception + "SIM107", # return-in-try-except-finally + "SIM108", # if-else-block-instead-of-if-exp + "SIM113", # enumerate-for-loop + "SIM117", # multiple-with-statements + "SIM210", # if-expr-with-true-false + "UP038", # deprecated and not recommended by Ruff, https://docs.astral.sh/ruff/rules/non-pep604-isinstance/ ] [lint.per-file-ignores] diff --git a/api/commands.py b/api/commands.py index 0a6cc61a68..86769847c1 100644 --- a/api/commands.py +++ b/api/commands.py @@ -281,6 +281,7 @@ def migrate_knowledge_vector_database(): VectorType.ELASTICSEARCH, VectorType.OPENGAUSS, VectorType.TABLESTORE, + VectorType.MATRIXONE, } lower_collection_vector_types = { VectorType.ANALYTICDB, diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index a3da5c1b49..df15b92c35 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -609,7 +609,7 @@ class MailConfig(BaseSettings): """ MAIL_TYPE: Optional[str] = Field( - description="Email service provider type ('smtp' or 'resend'), default to None.", + description="Email service provider type ('smtp' or 'resend' or 'sendGrid), default to None.", default=None, ) @@ -663,6 +663,11 @@ class MailConfig(BaseSettings): default=50, ) + SENDGRID_API_KEY: Optional[str] = Field( + description="API key for SendGrid service", + default=None, + ) + class RagEtlConfig(BaseSettings): """ diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index 2dcf1710b0..60ba272ec9 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -24,6 +24,7 @@ from .vdb.couchbase_config import CouchbaseConfig from .vdb.elasticsearch_config import ElasticsearchConfig from .vdb.huawei_cloud_config import HuaweiCloudConfig from .vdb.lindorm_config import LindormConfig +from .vdb.matrixone_config import MatrixoneConfig from .vdb.milvus_config import MilvusConfig from .vdb.myscale_config import MyScaleConfig from .vdb.oceanbase_config import OceanBaseVectorConfig @@ -323,5 +324,6 @@ class MiddlewareConfig( OpenGaussConfig, TableStoreConfig, DatasetQueueMonitorConfig, + MatrixoneConfig, ): pass diff --git a/api/configs/middleware/vdb/matrixone_config.py b/api/configs/middleware/vdb/matrixone_config.py new file mode 100644 index 0000000000..9400612d8e --- /dev/null +++ b/api/configs/middleware/vdb/matrixone_config.py @@ -0,0 +1,14 @@ +from pydantic import BaseModel, Field + + +class MatrixoneConfig(BaseModel): + """Matrixone vector database configuration.""" + + MATRIXONE_HOST: str = Field(default="localhost", description="Host address of the Matrixone server") + MATRIXONE_PORT: int = Field(default=6001, description="Port number of the Matrixone server") + MATRIXONE_USER: str = Field(default="dump", description="Username for authenticating with Matrixone") + MATRIXONE_PASSWORD: str = Field(default="111", description="Password for authenticating with Matrixone") + MATRIXONE_DATABASE: str = Field(default="dify", description="Name of the Matrixone database to connect to") + MATRIXONE_METRIC: str = Field( + default="l2", description="Distance metric type for vector similarity search (cosine or l2)" + ) diff --git a/api/configs/packaging/__init__.py b/api/configs/packaging/__init__.py index 0107df22c5..dddf71c094 100644 --- a/api/configs/packaging/__init__.py +++ b/api/configs/packaging/__init__.py @@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings): CURRENT_VERSION: str = Field( description="Dify version", - default="1.4.3", + default="1.5.0", ) COMMIT_SHA: str = Field( diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index a974c63e35..dbdcdc46ce 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -63,6 +63,7 @@ from .app import ( statistic, workflow, workflow_app_log, + workflow_draft_variable, workflow_run, workflow_statistic, ) diff --git a/api/controllers/console/admin.py b/api/controllers/console/admin.py index 8cb7ad9f5b..f5257fae79 100644 --- a/api/controllers/console/admin.py +++ b/api/controllers/console/admin.py @@ -56,8 +56,7 @@ class InsertExploreAppListApi(Resource): parser.add_argument("position", type=int, required=True, nullable=False, location="json") args = parser.parse_args() - with Session(db.engine) as session: - app = session.execute(select(App).filter(App.id == args["app_id"])).scalar_one_or_none() + app = db.session.execute(select(App).filter(App.id == args["app_id"])).scalar_one_or_none() if not app: raise NotFound(f"App '{args['app_id']}' is not found") @@ -78,38 +77,38 @@ class InsertExploreAppListApi(Resource): select(RecommendedApp).filter(RecommendedApp.app_id == args["app_id"]) ).scalar_one_or_none() - if not recommended_app: - recommended_app = RecommendedApp( - app_id=app.id, - description=desc, - copyright=copy_right, - privacy_policy=privacy_policy, - custom_disclaimer=custom_disclaimer, - language=args["language"], - category=args["category"], - position=args["position"], - ) + if not recommended_app: + recommended_app = RecommendedApp( + app_id=app.id, + description=desc, + copyright=copy_right, + privacy_policy=privacy_policy, + custom_disclaimer=custom_disclaimer, + language=args["language"], + category=args["category"], + position=args["position"], + ) - db.session.add(recommended_app) + db.session.add(recommended_app) - app.is_public = True - db.session.commit() + app.is_public = True + db.session.commit() - return {"result": "success"}, 201 - else: - recommended_app.description = desc - recommended_app.copyright = copy_right - recommended_app.privacy_policy = privacy_policy - recommended_app.custom_disclaimer = custom_disclaimer - recommended_app.language = args["language"] - recommended_app.category = args["category"] - recommended_app.position = args["position"] + return {"result": "success"}, 201 + else: + recommended_app.description = desc + recommended_app.copyright = copy_right + recommended_app.privacy_policy = privacy_policy + recommended_app.custom_disclaimer = custom_disclaimer + recommended_app.language = args["language"] + recommended_app.category = args["category"] + recommended_app.position = args["position"] - app.is_public = True + app.is_public = True - db.session.commit() + db.session.commit() - return {"result": "success"}, 200 + return {"result": "success"}, 200 class InsertExploreAppApi(Resource): diff --git a/api/controllers/console/app/annotation.py b/api/controllers/console/app/annotation.py index 14fd4679a1..2b48afd550 100644 --- a/api/controllers/console/app/annotation.py +++ b/api/controllers/console/app/annotation.py @@ -208,7 +208,7 @@ class AnnotationBatchImportApi(Resource): if len(request.files) > 1: raise TooManyFilesError() # check file type - if not file.filename or not file.filename.endswith(".csv"): + if not file.filename or not file.filename.lower().endswith(".csv"): raise ValueError("Invalid file type. Only CSV files are allowed") return AppAnnotationService.batch_import_app_annotations(app_id, file) diff --git a/api/controllers/console/app/app_import.py b/api/controllers/console/app/app_import.py index 5dc6515ce0..9ffb94e9f9 100644 --- a/api/controllers/console/app/app_import.py +++ b/api/controllers/console/app/app_import.py @@ -17,6 +17,8 @@ from libs.login import login_required from models import Account from models.model import App from services.app_dsl_service import AppDslService, ImportStatus +from services.enterprise.enterprise_service import EnterpriseService +from services.feature_service import FeatureService class AppImportApi(Resource): @@ -60,7 +62,9 @@ class AppImportApi(Resource): app_id=args.get("app_id"), ) session.commit() - + if result.app_id and FeatureService.get_system_features().webapp_auth.enabled: + # update web app setting as private + EnterpriseService.WebAppAuth.update_app_access_mode(result.app_id, "private") # Return appropriate status code based on result status = result.status if status == ImportStatus.FAILED.value: diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index cbbdd324ba..a9f088a276 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -1,5 +1,6 @@ import json import logging +from collections.abc import Sequence from typing import cast from flask import abort, request @@ -18,10 +19,12 @@ from controllers.console.app.error import ( from controllers.console.app.wraps import get_app_model from controllers.console.wraps import account_initialization_required, setup_required from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError +from core.app.app_config.features.file_upload.manager import FileUploadConfigManager from core.app.apps.base_app_queue_manager import AppQueueManager from core.app.entities.app_invoke_entities import InvokeFrom +from core.file.models import File from extensions.ext_database import db -from factories import variable_factory +from factories import file_factory, variable_factory from fields.workflow_fields import workflow_fields, workflow_pagination_fields from fields.workflow_run_fields import workflow_run_node_execution_fields from libs import helper @@ -30,6 +33,7 @@ from libs.login import current_user, login_required from models import App from models.account import Account from models.model import AppMode +from models.workflow import Workflow from services.app_generate_service import AppGenerateService from services.errors.app import WorkflowHashNotEqualError from services.errors.llm import InvokeRateLimitError @@ -38,6 +42,24 @@ from services.workflow_service import DraftWorkflowDeletionError, WorkflowInUseE logger = logging.getLogger(__name__) +# TODO(QuantumGhost): Refactor existing node run API to handle file parameter parsing +# at the controller level rather than in the workflow logic. This would improve separation +# of concerns and make the code more maintainable. +def _parse_file(workflow: Workflow, files: list[dict] | None = None) -> Sequence[File]: + files = files or [] + + file_extra_config = FileUploadConfigManager.convert(workflow.features_dict, is_vision=False) + file_objs: Sequence[File] = [] + if file_extra_config is None: + return file_objs + file_objs = file_factory.build_from_mappings( + mappings=files, + tenant_id=workflow.tenant_id, + config=file_extra_config, + ) + return file_objs + + class DraftWorkflowApi(Resource): @setup_required @login_required @@ -402,15 +424,30 @@ class DraftWorkflowNodeRunApi(Resource): parser = reqparse.RequestParser() parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json") + parser.add_argument("query", type=str, required=False, location="json", default="") + parser.add_argument("files", type=list, location="json", default=[]) args = parser.parse_args() - inputs = args.get("inputs") - if inputs == None: + user_inputs = args.get("inputs") + if user_inputs is None: raise ValueError("missing inputs") + workflow_srv = WorkflowService() + # fetch draft workflow by app_model + draft_workflow = workflow_srv.get_draft_workflow(app_model=app_model) + if not draft_workflow: + raise ValueError("Workflow not initialized") + files = _parse_file(draft_workflow, args.get("files")) workflow_service = WorkflowService() + workflow_node_execution = workflow_service.run_draft_workflow_node( - app_model=app_model, node_id=node_id, user_inputs=inputs, account=current_user + app_model=app_model, + draft_workflow=draft_workflow, + node_id=node_id, + user_inputs=user_inputs, + account=current_user, + query=args.get("query", ""), + files=files, ) return workflow_node_execution @@ -731,6 +768,27 @@ class WorkflowByIdApi(Resource): return None, 204 +class DraftWorkflowNodeLastRunApi(Resource): + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @marshal_with(workflow_run_node_execution_fields) + def get(self, app_model: App, node_id: str): + srv = WorkflowService() + workflow = srv.get_draft_workflow(app_model) + if not workflow: + raise NotFound("Workflow not found") + node_exec = srv.get_node_last_run( + app_model=app_model, + workflow=workflow, + node_id=node_id, + ) + if node_exec is None: + raise NotFound("last run not found") + return node_exec + + api.add_resource( DraftWorkflowApi, "/apps//workflows/draft", @@ -795,3 +853,7 @@ api.add_resource( WorkflowByIdApi, "/apps//workflows/", ) +api.add_resource( + DraftWorkflowNodeLastRunApi, + "/apps//workflows/draft/nodes//last-run", +) diff --git a/api/controllers/console/app/workflow_app_log.py b/api/controllers/console/app/workflow_app_log.py index b9579e2120..310146a5e7 100644 --- a/api/controllers/console/app/workflow_app_log.py +++ b/api/controllers/console/app/workflow_app_log.py @@ -34,6 +34,20 @@ class WorkflowAppLogApi(Resource): parser.add_argument( "created_at__after", type=str, location="args", help="Filter logs created after this timestamp" ) + parser.add_argument( + "created_by_end_user_session_id", + type=str, + location="args", + required=False, + default=None, + ) + parser.add_argument( + "created_by_account", + type=str, + location="args", + required=False, + default=None, + ) parser.add_argument("page", type=int_range(1, 99999), default=1, location="args") parser.add_argument("limit", type=int_range(1, 100), default=20, location="args") args = parser.parse_args() @@ -57,6 +71,8 @@ class WorkflowAppLogApi(Resource): created_at_after=args.created_at__after, page=args.page, limit=args.limit, + created_by_end_user_session_id=args.created_by_end_user_session_id, + created_by_account=args.created_by_account, ) return workflow_app_log_pagination diff --git a/api/controllers/console/app/workflow_draft_variable.py b/api/controllers/console/app/workflow_draft_variable.py new file mode 100644 index 0000000000..00d6fa3cbf --- /dev/null +++ b/api/controllers/console/app/workflow_draft_variable.py @@ -0,0 +1,421 @@ +import logging +from typing import Any, NoReturn + +from flask import Response +from flask_restful import Resource, fields, inputs, marshal, marshal_with, reqparse +from sqlalchemy.orm import Session +from werkzeug.exceptions import Forbidden + +from controllers.console import api +from controllers.console.app.error import ( + DraftWorkflowNotExist, +) +from controllers.console.app.wraps import get_app_model +from controllers.console.wraps import account_initialization_required, setup_required +from controllers.web.error import InvalidArgumentError, NotFoundError +from core.variables.segment_group import SegmentGroup +from core.variables.segments import ArrayFileSegment, FileSegment, Segment +from core.variables.types import SegmentType +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID +from factories.file_factory import build_from_mapping, build_from_mappings +from factories.variable_factory import build_segment_with_type +from libs.login import current_user, login_required +from models import App, AppMode, db +from models.workflow import WorkflowDraftVariable +from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService +from services.workflow_service import WorkflowService + +logger = logging.getLogger(__name__) + + +def _convert_values_to_json_serializable_object(value: Segment) -> Any: + if isinstance(value, FileSegment): + return value.value.model_dump() + elif isinstance(value, ArrayFileSegment): + return [i.model_dump() for i in value.value] + elif isinstance(value, SegmentGroup): + return [_convert_values_to_json_serializable_object(i) for i in value.value] + else: + return value.value + + +def _serialize_var_value(variable: WorkflowDraftVariable) -> Any: + value = variable.get_value() + # create a copy of the value to avoid affecting the model cache. + value = value.model_copy(deep=True) + # Refresh the url signature before returning it to client. + if isinstance(value, FileSegment): + file = value.value + file.remote_url = file.generate_url() + elif isinstance(value, ArrayFileSegment): + files = value.value + for file in files: + file.remote_url = file.generate_url() + return _convert_values_to_json_serializable_object(value) + + +def _create_pagination_parser(): + parser = reqparse.RequestParser() + parser.add_argument( + "page", + type=inputs.int_range(1, 100_000), + required=False, + default=1, + location="args", + help="the page of data requested", + ) + parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") + return parser + + +_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS = { + "id": fields.String, + "type": fields.String(attribute=lambda model: model.get_variable_type()), + "name": fields.String, + "description": fields.String, + "selector": fields.List(fields.String, attribute=lambda model: model.get_selector()), + "value_type": fields.String, + "edited": fields.Boolean(attribute=lambda model: model.edited), + "visible": fields.Boolean, +} + +_WORKFLOW_DRAFT_VARIABLE_FIELDS = dict( + _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS, + value=fields.Raw(attribute=_serialize_var_value), +) + +_WORKFLOW_DRAFT_ENV_VARIABLE_FIELDS = { + "id": fields.String, + "type": fields.String(attribute=lambda _: "env"), + "name": fields.String, + "description": fields.String, + "selector": fields.List(fields.String, attribute=lambda model: model.get_selector()), + "value_type": fields.String, + "edited": fields.Boolean(attribute=lambda model: model.edited), + "visible": fields.Boolean, +} + +_WORKFLOW_DRAFT_ENV_VARIABLE_LIST_FIELDS = { + "items": fields.List(fields.Nested(_WORKFLOW_DRAFT_ENV_VARIABLE_FIELDS)), +} + + +def _get_items(var_list: WorkflowDraftVariableList) -> list[WorkflowDraftVariable]: + return var_list.variables + + +_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS = { + "items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS), attribute=_get_items), + "total": fields.Raw(), +} + +_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS = { + "items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_FIELDS), attribute=_get_items), +} + + +def _api_prerequisite(f): + """Common prerequisites for all draft workflow variable APIs. + + It ensures the following conditions are satisfied: + + - Dify has been property setup. + - The request user has logged in and initialized. + - The requested app is a workflow or a chat flow. + - The request user has the edit permission for the app. + """ + + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + def wrapper(*args, **kwargs): + if not current_user.is_editor: + raise Forbidden() + return f(*args, **kwargs) + + return wrapper + + +class WorkflowVariableCollectionApi(Resource): + @_api_prerequisite + @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS) + def get(self, app_model: App): + """ + Get draft workflow + """ + parser = _create_pagination_parser() + args = parser.parse_args() + + # fetch draft workflow by app_model + workflow_service = WorkflowService() + workflow_exist = workflow_service.is_workflow_exist(app_model=app_model) + if not workflow_exist: + raise DraftWorkflowNotExist() + + # fetch draft workflow by app_model + with Session(bind=db.engine, expire_on_commit=False) as session: + draft_var_srv = WorkflowDraftVariableService( + session=session, + ) + workflow_vars = draft_var_srv.list_variables_without_values( + app_id=app_model.id, + page=args.page, + limit=args.limit, + ) + + return workflow_vars + + @_api_prerequisite + def delete(self, app_model: App): + draft_var_srv = WorkflowDraftVariableService( + session=db.session(), + ) + draft_var_srv.delete_workflow_variables(app_model.id) + db.session.commit() + return Response("", 204) + + +def validate_node_id(node_id: str) -> NoReturn | None: + if node_id in [ + CONVERSATION_VARIABLE_NODE_ID, + SYSTEM_VARIABLE_NODE_ID, + ]: + # NOTE(QuantumGhost): While we store the system and conversation variables as node variables + # with specific `node_id` in database, we still want to make the API separated. By disallowing + # accessing system and conversation variables in `WorkflowDraftNodeVariableListApi`, + # we mitigate the risk that user of the API depending on the implementation detail of the API. + # + # ref: [Hyrum's Law](https://www.hyrumslaw.com/) + + raise InvalidArgumentError( + f"invalid node_id, please use correspond api for conversation and system variables, node_id={node_id}", + ) + return None + + +class NodeVariableCollectionApi(Resource): + @_api_prerequisite + @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + def get(self, app_model: App, node_id: str): + validate_node_id(node_id) + with Session(bind=db.engine, expire_on_commit=False) as session: + draft_var_srv = WorkflowDraftVariableService( + session=session, + ) + node_vars = draft_var_srv.list_node_variables(app_model.id, node_id) + + return node_vars + + @_api_prerequisite + def delete(self, app_model: App, node_id: str): + validate_node_id(node_id) + srv = WorkflowDraftVariableService(db.session()) + srv.delete_node_variables(app_model.id, node_id) + db.session.commit() + return Response("", 204) + + +class VariableApi(Resource): + _PATCH_NAME_FIELD = "name" + _PATCH_VALUE_FIELD = "value" + + @_api_prerequisite + @marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS) + def get(self, app_model: App, variable_id: str): + draft_var_srv = WorkflowDraftVariableService( + session=db.session(), + ) + variable = draft_var_srv.get_variable(variable_id=variable_id) + if variable is None: + raise NotFoundError(description=f"variable not found, id={variable_id}") + if variable.app_id != app_model.id: + raise NotFoundError(description=f"variable not found, id={variable_id}") + return variable + + @_api_prerequisite + @marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS) + def patch(self, app_model: App, variable_id: str): + # Request payload for file types: + # + # Local File: + # + # { + # "type": "image", + # "transfer_method": "local_file", + # "url": "", + # "upload_file_id": "daded54f-72c7-4f8e-9d18-9b0abdd9f190" + # } + # + # Remote File: + # + # + # { + # "type": "image", + # "transfer_method": "remote_url", + # "url": "http://127.0.0.1:5001/files/1602650a-4fe4-423c-85a2-af76c083e3c4/file-preview?timestamp=1750041099&nonce=...&sign=...=", + # "upload_file_id": "1602650a-4fe4-423c-85a2-af76c083e3c4" + # } + + parser = reqparse.RequestParser() + parser.add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json") + # Parse 'value' field as-is to maintain its original data structure + parser.add_argument(self._PATCH_VALUE_FIELD, type=lambda x: x, required=False, nullable=True, location="json") + + draft_var_srv = WorkflowDraftVariableService( + session=db.session(), + ) + args = parser.parse_args(strict=True) + + variable = draft_var_srv.get_variable(variable_id=variable_id) + if variable is None: + raise NotFoundError(description=f"variable not found, id={variable_id}") + if variable.app_id != app_model.id: + raise NotFoundError(description=f"variable not found, id={variable_id}") + + new_name = args.get(self._PATCH_NAME_FIELD, None) + raw_value = args.get(self._PATCH_VALUE_FIELD, None) + if new_name is None and raw_value is None: + return variable + + new_value = None + if raw_value is not None: + if variable.value_type == SegmentType.FILE: + if not isinstance(raw_value, dict): + raise InvalidArgumentError(description=f"expected dict for file, got {type(raw_value)}") + raw_value = build_from_mapping(mapping=raw_value, tenant_id=app_model.tenant_id) + elif variable.value_type == SegmentType.ARRAY_FILE: + if not isinstance(raw_value, list): + raise InvalidArgumentError(description=f"expected list for files, got {type(raw_value)}") + if len(raw_value) > 0 and not isinstance(raw_value[0], dict): + raise InvalidArgumentError(description=f"expected dict for files[0], got {type(raw_value)}") + raw_value = build_from_mappings(mappings=raw_value, tenant_id=app_model.tenant_id) + new_value = build_segment_with_type(variable.value_type, raw_value) + draft_var_srv.update_variable(variable, name=new_name, value=new_value) + db.session.commit() + return variable + + @_api_prerequisite + def delete(self, app_model: App, variable_id: str): + draft_var_srv = WorkflowDraftVariableService( + session=db.session(), + ) + variable = draft_var_srv.get_variable(variable_id=variable_id) + if variable is None: + raise NotFoundError(description=f"variable not found, id={variable_id}") + if variable.app_id != app_model.id: + raise NotFoundError(description=f"variable not found, id={variable_id}") + draft_var_srv.delete_variable(variable) + db.session.commit() + return Response("", 204) + + +class VariableResetApi(Resource): + @_api_prerequisite + def put(self, app_model: App, variable_id: str): + draft_var_srv = WorkflowDraftVariableService( + session=db.session(), + ) + + workflow_srv = WorkflowService() + draft_workflow = workflow_srv.get_draft_workflow(app_model) + if draft_workflow is None: + raise NotFoundError( + f"Draft workflow not found, app_id={app_model.id}", + ) + variable = draft_var_srv.get_variable(variable_id=variable_id) + if variable is None: + raise NotFoundError(description=f"variable not found, id={variable_id}") + if variable.app_id != app_model.id: + raise NotFoundError(description=f"variable not found, id={variable_id}") + + resetted = draft_var_srv.reset_variable(draft_workflow, variable) + db.session.commit() + if resetted is None: + return Response("", 204) + else: + return marshal(resetted, _WORKFLOW_DRAFT_VARIABLE_FIELDS) + + +def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList: + with Session(bind=db.engine, expire_on_commit=False) as session: + draft_var_srv = WorkflowDraftVariableService( + session=session, + ) + if node_id == CONVERSATION_VARIABLE_NODE_ID: + draft_vars = draft_var_srv.list_conversation_variables(app_model.id) + elif node_id == SYSTEM_VARIABLE_NODE_ID: + draft_vars = draft_var_srv.list_system_variables(app_model.id) + else: + draft_vars = draft_var_srv.list_node_variables(app_id=app_model.id, node_id=node_id) + return draft_vars + + +class ConversationVariableCollectionApi(Resource): + @_api_prerequisite + @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + def get(self, app_model: App): + # NOTE(QuantumGhost): Prefill conversation variables into the draft variables table + # so their IDs can be returned to the caller. + workflow_srv = WorkflowService() + draft_workflow = workflow_srv.get_draft_workflow(app_model) + if draft_workflow is None: + raise NotFoundError(description=f"draft workflow not found, id={app_model.id}") + draft_var_srv = WorkflowDraftVariableService(db.session()) + draft_var_srv.prefill_conversation_variable_default_values(draft_workflow) + db.session.commit() + return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID) + + +class SystemVariableCollectionApi(Resource): + @_api_prerequisite + @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + def get(self, app_model: App): + return _get_variable_list(app_model, SYSTEM_VARIABLE_NODE_ID) + + +class EnvironmentVariableCollectionApi(Resource): + @_api_prerequisite + def get(self, app_model: App): + """ + Get draft workflow + """ + # fetch draft workflow by app_model + workflow_service = WorkflowService() + workflow = workflow_service.get_draft_workflow(app_model=app_model) + if workflow is None: + raise DraftWorkflowNotExist() + + env_vars = workflow.environment_variables + env_vars_list = [] + for v in env_vars: + env_vars_list.append( + { + "id": v.id, + "type": "env", + "name": v.name, + "description": v.description, + "selector": v.selector, + "value_type": v.value_type.value, + "value": v.value, + # Do not track edited for env vars. + "edited": False, + "visible": True, + "editable": True, + } + ) + + return {"items": env_vars_list} + + +api.add_resource( + WorkflowVariableCollectionApi, + "/apps//workflows/draft/variables", +) +api.add_resource(NodeVariableCollectionApi, "/apps//workflows/draft/nodes//variables") +api.add_resource(VariableApi, "/apps//workflows/draft/variables/") +api.add_resource(VariableResetApi, "/apps//workflows/draft/variables//reset") + +api.add_resource(ConversationVariableCollectionApi, "/apps//workflows/draft/conversation-variables") +api.add_resource(SystemVariableCollectionApi, "/apps//workflows/draft/system-variables") +api.add_resource(EnvironmentVariableCollectionApi, "/apps//workflows/draft/environment-variables") diff --git a/api/controllers/console/app/wraps.py b/api/controllers/console/app/wraps.py index 9ad8c15847..03b60610aa 100644 --- a/api/controllers/console/app/wraps.py +++ b/api/controllers/console/app/wraps.py @@ -8,6 +8,15 @@ from libs.login import current_user from models import App, AppMode +def _load_app_model(app_id: str) -> Optional[App]: + app_model = ( + db.session.query(App) + .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .first() + ) + return app_model + + def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode], None] = None): def decorator(view_func): @wraps(view_func) @@ -20,11 +29,7 @@ def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[ del kwargs["app_id"] - app_model = ( - db.session.query(App) - .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") - .first() - ) + app_model = _load_app_model(app_id) if not app_model: raise AppNotFoundError() diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index e68273afa6..1611214cb3 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -686,6 +686,7 @@ class DatasetRetrievalSettingApi(Resource): | VectorType.TABLESTORE | VectorType.HUAWEI_CLOUD | VectorType.TENCENT + | VectorType.MATRIXONE ): return { "retrieval_method": [ @@ -733,6 +734,7 @@ class DatasetRetrievalSettingMockApi(Resource): | VectorType.TABLESTORE | VectorType.TENCENT | VectorType.HUAWEI_CLOUD + | VectorType.MATRIXONE ): return { "retrieval_method": [ diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index f7c04102a9..b2fcf3ce7b 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -5,7 +5,7 @@ from typing import cast from flask import request from flask_login import current_user -from flask_restful import Resource, fields, marshal, marshal_with, reqparse +from flask_restful import Resource, marshal, marshal_with, reqparse from sqlalchemy import asc, desc, select from werkzeug.exceptions import Forbidden, NotFound @@ -43,7 +43,6 @@ from core.model_runtime.errors.invoke import InvokeAuthorizationError from core.plugin.impl.exc import PluginDaemonClientSideError from core.rag.extractor.entity.extract_setting import ExtractSetting from extensions.ext_database import db -from extensions.ext_redis import redis_client from fields.document_fields import ( dataset_and_document_fields, document_fields, @@ -54,8 +53,6 @@ from libs.login import login_required from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile from services.dataset_service import DatasetService, DocumentService from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig -from tasks.add_document_to_index_task import add_document_to_index_task -from tasks.remove_document_from_index_task import remove_document_from_index_task class DocumentResource(Resource): @@ -242,12 +239,10 @@ class DatasetDocumentListApi(Resource): return response - documents_and_batch_fields = {"documents": fields.List(fields.Nested(document_fields)), "batch": fields.String} - @setup_required @login_required @account_initialization_required - @marshal_with(documents_and_batch_fields) + @marshal_with(dataset_and_document_fields) @cloud_edition_billing_resource_check("vector_space") @cloud_edition_billing_rate_limit_check("knowledge") def post(self, dataset_id): @@ -293,6 +288,8 @@ class DatasetDocumentListApi(Resource): try: documents, batch = DocumentService.save_document_with_dataset_id(dataset, knowledge_config, current_user) + dataset = DatasetService.get_dataset(dataset_id) + except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) except QuotaExceededError: @@ -300,7 +297,7 @@ class DatasetDocumentListApi(Resource): except ModelCurrentlyNotSupportError: raise ProviderModelCurrentlyNotSupportError() - return {"documents": documents, "batch": batch} + return {"dataset": dataset, "documents": documents, "batch": batch} @setup_required @login_required @@ -862,77 +859,16 @@ class DocumentStatusApi(DocumentResource): DatasetService.check_dataset_permission(dataset, current_user) document_ids = request.args.getlist("document_id") - for document_id in document_ids: - document = self.get_document(dataset_id, document_id) - indexing_cache_key = "document_{}_indexing".format(document.id) - cache_result = redis_client.get(indexing_cache_key) - if cache_result is not None: - raise InvalidActionError(f"Document:{document.name} is being indexed, please try again later") + try: + DocumentService.batch_update_document_status(dataset, document_ids, action, current_user) + except services.errors.document.DocumentIndexingError as e: + raise InvalidActionError(str(e)) + except ValueError as e: + raise InvalidActionError(str(e)) + except NotFound as e: + raise NotFound(str(e)) - if action == "enable": - if document.enabled: - continue - document.enabled = True - document.disabled_at = None - document.disabled_by = None - document.updated_at = datetime.now(UTC).replace(tzinfo=None) - db.session.commit() - - # Set cache to prevent indexing the same document multiple times - redis_client.setex(indexing_cache_key, 600, 1) - - add_document_to_index_task.delay(document_id) - - elif action == "disable": - if not document.completed_at or document.indexing_status != "completed": - raise InvalidActionError(f"Document: {document.name} is not completed.") - if not document.enabled: - continue - - document.enabled = False - document.disabled_at = datetime.now(UTC).replace(tzinfo=None) - document.disabled_by = current_user.id - document.updated_at = datetime.now(UTC).replace(tzinfo=None) - db.session.commit() - - # Set cache to prevent indexing the same document multiple times - redis_client.setex(indexing_cache_key, 600, 1) - - remove_document_from_index_task.delay(document_id) - - elif action == "archive": - if document.archived: - continue - - document.archived = True - document.archived_at = datetime.now(UTC).replace(tzinfo=None) - document.archived_by = current_user.id - document.updated_at = datetime.now(UTC).replace(tzinfo=None) - db.session.commit() - - if document.enabled: - # Set cache to prevent indexing the same document multiple times - redis_client.setex(indexing_cache_key, 600, 1) - - remove_document_from_index_task.delay(document_id) - - elif action == "un_archive": - if not document.archived: - continue - document.archived = False - document.archived_at = None - document.archived_by = None - document.updated_at = datetime.now(UTC).replace(tzinfo=None) - db.session.commit() - - # Set cache to prevent indexing the same document multiple times - redis_client.setex(indexing_cache_key, 600, 1) - - add_document_to_index_task.delay(document_id) - - else: - raise InvalidActionError() return {"result": "success"}, 200 diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index bc37907a30..48142dbe73 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -374,7 +374,7 @@ class DatasetDocumentSegmentBatchImportApi(Resource): if len(request.files) > 1: raise TooManyFilesError() # check file type - if not file.filename or not file.filename.endswith(".csv"): + if not file.filename or not file.filename.lower().endswith(".csv"): raise ValueError("Invalid file type. Only CSV files are allowed") try: diff --git a/api/controllers/console/workspace/load_balancing_config.py b/api/controllers/console/workspace/load_balancing_config.py index ba74e2c074..b4eb5e246b 100644 --- a/api/controllers/console/workspace/load_balancing_config.py +++ b/api/controllers/console/workspace/load_balancing_config.py @@ -15,7 +15,7 @@ class LoadBalancingCredentialsValidateApi(Resource): @login_required @account_initialization_required def post(self, provider: str): - if not TenantAccountRole.is_privileged_role(current_user.current_tenant.current_role): + if not TenantAccountRole.is_privileged_role(current_user.current_role): raise Forbidden() tenant_id = current_user.current_tenant_id @@ -64,7 +64,7 @@ class LoadBalancingConfigCredentialsValidateApi(Resource): @login_required @account_initialization_required def post(self, provider: str, config_id: str): - if not TenantAccountRole.is_privileged_role(current_user.current_tenant.current_role): + if not TenantAccountRole.is_privileged_role(current_user.current_role): raise Forbidden() tenant_id = current_user.current_tenant_id diff --git a/api/controllers/console/workspace/members.py b/api/controllers/console/workspace/members.py index db49da7840..48225ac90d 100644 --- a/api/controllers/console/workspace/members.py +++ b/api/controllers/console/workspace/members.py @@ -85,6 +85,7 @@ class MemberInviteEmailApi(Resource): return { "result": "success", "invitation_results": invitation_results, + "tenant_id": str(current_user.current_tenant.id), }, 201 @@ -110,7 +111,7 @@ class MemberCancelInviteApi(Resource): except Exception as e: raise ValueError(str(e)) - return {"result": "success"}, 204 + return {"result": "success", "tenant_id": str(current_user.current_tenant.id)}, 200 class MemberUpdateRoleApi(Resource): diff --git a/api/controllers/service_api/app/workflow.py b/api/controllers/service_api/app/workflow.py index df52b49424..efb4acc5fb 100644 --- a/api/controllers/service_api/app/workflow.py +++ b/api/controllers/service_api/app/workflow.py @@ -135,6 +135,20 @@ class WorkflowAppLogApi(Resource): parser.add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args") parser.add_argument("created_at__before", type=str, location="args") parser.add_argument("created_at__after", type=str, location="args") + parser.add_argument( + "created_by_end_user_session_id", + type=str, + location="args", + required=False, + default=None, + ) + parser.add_argument( + "created_by_account", + type=str, + location="args", + required=False, + default=None, + ) parser.add_argument("page", type=int_range(1, 99999), default=1, location="args") parser.add_argument("limit", type=int_range(1, 100), default=20, location="args") args = parser.parse_args() @@ -158,6 +172,8 @@ class WorkflowAppLogApi(Resource): created_at_after=args.created_at__after, page=args.page, limit=args.limit, + created_by_end_user_session_id=args.created_by_end_user_session_id, + created_by_account=args.created_by_account, ) return workflow_app_log_pagination diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index 27e8dd3fa6..839afdb9fd 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -4,8 +4,12 @@ from werkzeug.exceptions import Forbidden, NotFound import services.dataset_service from controllers.service_api import api -from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError -from controllers.service_api.wraps import DatasetApiResource, validate_dataset_token +from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError, InvalidActionError +from controllers.service_api.wraps import ( + DatasetApiResource, + cloud_edition_billing_rate_limit_check, + validate_dataset_token, +) from core.model_runtime.entities.model_entities import ModelType from core.plugin.entities.plugin import ModelProviderID from core.provider_manager import ProviderManager @@ -13,7 +17,7 @@ from fields.dataset_fields import dataset_detail_fields from fields.tag_fields import tag_fields from libs.login import current_user from models.dataset import Dataset, DatasetPermissionEnum -from services.dataset_service import DatasetPermissionService, DatasetService +from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService from services.entities.knowledge_entities.knowledge_entities import RetrievalModel from services.tag_service import TagService @@ -70,6 +74,7 @@ class DatasetListApi(DatasetApiResource): response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page} return response, 200 + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id): """Resource for creating datasets.""" parser = reqparse.RequestParser() @@ -193,6 +198,7 @@ class DatasetApi(DatasetApiResource): return data, 200 + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def patch(self, _, dataset_id): dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -293,6 +299,7 @@ class DatasetApi(DatasetApiResource): return result_data, 200 + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, _, dataset_id): """ Deletes a dataset given its ID. @@ -322,6 +329,56 @@ class DatasetApi(DatasetApiResource): raise DatasetInUseError() +class DocumentStatusApi(DatasetApiResource): + """Resource for batch document status operations.""" + + def patch(self, tenant_id, dataset_id, action): + """ + Batch update document status. + + Args: + tenant_id: tenant id + dataset_id: dataset id + action: action to perform (enable, disable, archive, un_archive) + + Returns: + dict: A dictionary with a key 'result' and a value 'success' + int: HTTP status code 200 indicating that the operation was successful. + + Raises: + NotFound: If the dataset with the given ID does not exist. + Forbidden: If the user does not have permission. + InvalidActionError: If the action is invalid or cannot be performed. + """ + dataset_id_str = str(dataset_id) + dataset = DatasetService.get_dataset(dataset_id_str) + + if dataset is None: + raise NotFound("Dataset not found.") + + # Check user's permission + try: + DatasetService.check_dataset_permission(dataset, current_user) + except services.errors.account.NoPermissionError as e: + raise Forbidden(str(e)) + + # Check dataset model setting + DatasetService.check_dataset_model_setting(dataset) + + # Get document IDs from request body + data = request.get_json() + document_ids = data.get("document_ids", []) + + try: + DocumentService.batch_update_document_status(dataset, document_ids, action, current_user) + except services.errors.document.DocumentIndexingError as e: + raise InvalidActionError(str(e)) + except ValueError as e: + raise InvalidActionError(str(e)) + + return {"result": "success"}, 200 + + class DatasetTagsApi(DatasetApiResource): @validate_dataset_token @marshal_with(tag_fields) @@ -450,6 +507,7 @@ class DatasetTagsBindingStatusApi(DatasetApiResource): api.add_resource(DatasetListApi, "/datasets") api.add_resource(DatasetApi, "/datasets/") +api.add_resource(DocumentStatusApi, "/datasets//documents/status/") api.add_resource(DatasetTagsApi, "/datasets/tags") api.add_resource(DatasetTagBindingApi, "/datasets/tags/binding") api.add_resource(DatasetTagUnbindingApi, "/datasets/tags/unbinding") diff --git a/api/controllers/service_api/dataset/document.py b/api/controllers/service_api/dataset/document.py index ab7ab4dcf0..e4779f3bdf 100644 --- a/api/controllers/service_api/dataset/document.py +++ b/api/controllers/service_api/dataset/document.py @@ -19,7 +19,11 @@ from controllers.service_api.dataset.error import ( ArchivedDocumentImmutableError, DocumentIndexingError, ) -from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_resource_check +from controllers.service_api.wraps import ( + DatasetApiResource, + cloud_edition_billing_rate_limit_check, + cloud_edition_billing_resource_check, +) from core.errors.error import ProviderTokenNotInitError from extensions.ext_database import db from fields.document_fields import document_fields, document_status_fields @@ -35,6 +39,7 @@ class DocumentAddByTextApi(DatasetApiResource): @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_resource_check("documents", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): """Create document by text.""" parser = reqparse.RequestParser() @@ -99,6 +104,7 @@ class DocumentUpdateByTextApi(DatasetApiResource): """Resource for update documents.""" @cloud_edition_billing_resource_check("vector_space", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, document_id): """Update document by text.""" parser = reqparse.RequestParser() @@ -158,6 +164,7 @@ class DocumentAddByFileApi(DatasetApiResource): @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_resource_check("documents", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): """Create document by upload file.""" args = {} @@ -232,6 +239,7 @@ class DocumentUpdateByFileApi(DatasetApiResource): """Resource for update documents.""" @cloud_edition_billing_resource_check("vector_space", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, document_id): """Update document by upload file.""" args = {} @@ -302,6 +310,7 @@ class DocumentUpdateByFileApi(DatasetApiResource): class DocumentDeleteApi(DatasetApiResource): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id, dataset_id, document_id): """Delete document.""" document_id = str(document_id) diff --git a/api/controllers/service_api/dataset/hit_testing.py b/api/controllers/service_api/dataset/hit_testing.py index 465f71bf03..52e9bca5da 100644 --- a/api/controllers/service_api/dataset/hit_testing.py +++ b/api/controllers/service_api/dataset/hit_testing.py @@ -1,9 +1,10 @@ from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase from controllers.service_api import api -from controllers.service_api.wraps import DatasetApiResource +from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_rate_limit_check class HitTestingApi(DatasetApiResource, DatasetsHitTestingBase): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): dataset_id_str = str(dataset_id) diff --git a/api/controllers/service_api/dataset/metadata.py b/api/controllers/service_api/dataset/metadata.py index 35582feea0..1968696ee5 100644 --- a/api/controllers/service_api/dataset/metadata.py +++ b/api/controllers/service_api/dataset/metadata.py @@ -3,7 +3,7 @@ from flask_restful import marshal, reqparse from werkzeug.exceptions import NotFound from controllers.service_api import api -from controllers.service_api.wraps import DatasetApiResource +from controllers.service_api.wraps import DatasetApiResource, cloud_edition_billing_rate_limit_check from fields.dataset_fields import dataset_metadata_fields from services.dataset_service import DatasetService from services.entities.knowledge_entities.knowledge_entities import ( @@ -14,6 +14,7 @@ from services.metadata_service import MetadataService class DatasetMetadataCreateServiceApi(DatasetApiResource): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): parser = reqparse.RequestParser() parser.add_argument("type", type=str, required=True, nullable=True, location="json") @@ -39,6 +40,7 @@ class DatasetMetadataCreateServiceApi(DatasetApiResource): class DatasetMetadataServiceApi(DatasetApiResource): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def patch(self, tenant_id, dataset_id, metadata_id): parser = reqparse.RequestParser() parser.add_argument("name", type=str, required=True, nullable=True, location="json") @@ -54,6 +56,7 @@ class DatasetMetadataServiceApi(DatasetApiResource): metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name")) return marshal(metadata, dataset_metadata_fields), 200 + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id, dataset_id, metadata_id): dataset_id_str = str(dataset_id) metadata_id_str = str(metadata_id) @@ -73,6 +76,7 @@ class DatasetMetadataBuiltInFieldServiceApi(DatasetApiResource): class DatasetMetadataBuiltInFieldActionServiceApi(DatasetApiResource): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, action): dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) @@ -88,6 +92,7 @@ class DatasetMetadataBuiltInFieldActionServiceApi(DatasetApiResource): class DocumentMetadataEditServiceApi(DatasetApiResource): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id): dataset_id_str = str(dataset_id) dataset = DatasetService.get_dataset(dataset_id_str) diff --git a/api/controllers/service_api/dataset/segment.py b/api/controllers/service_api/dataset/segment.py index 337752275a..403b7f0a0c 100644 --- a/api/controllers/service_api/dataset/segment.py +++ b/api/controllers/service_api/dataset/segment.py @@ -8,6 +8,7 @@ from controllers.service_api.app.error import ProviderNotInitializeError from controllers.service_api.wraps import ( DatasetApiResource, cloud_edition_billing_knowledge_limit_check, + cloud_edition_billing_rate_limit_check, cloud_edition_billing_resource_check, ) from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError @@ -35,6 +36,7 @@ class SegmentApi(DatasetApiResource): @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, document_id): """Create single segment.""" # check dataset @@ -139,6 +141,7 @@ class SegmentApi(DatasetApiResource): class DatasetSegmentApi(DatasetApiResource): + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id, dataset_id, document_id, segment_id): # check dataset dataset_id = str(dataset_id) @@ -162,6 +165,7 @@ class DatasetSegmentApi(DatasetApiResource): return 204 @cloud_edition_billing_resource_check("vector_space", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, document_id, segment_id): # check dataset dataset_id = str(dataset_id) @@ -236,6 +240,7 @@ class ChildChunkApi(DatasetApiResource): @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def post(self, tenant_id, dataset_id, document_id, segment_id): """Create child chunk.""" # check dataset @@ -332,6 +337,7 @@ class DatasetChildChunkApi(DatasetApiResource): """Resource for updating child chunks.""" @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def delete(self, tenant_id, dataset_id, document_id, segment_id, child_chunk_id): """Delete child chunk.""" # check dataset @@ -370,6 +376,7 @@ class DatasetChildChunkApi(DatasetApiResource): @cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") + @cloud_edition_billing_rate_limit_check("knowledge", "dataset") def patch(self, tenant_id, dataset_id, document_id, segment_id, child_chunk_id): """Update child chunk.""" # check dataset diff --git a/api/controllers/web/error.py b/api/controllers/web/error.py index 4371e679db..036e11d5c5 100644 --- a/api/controllers/web/error.py +++ b/api/controllers/web/error.py @@ -139,3 +139,13 @@ class InvokeRateLimitError(BaseHTTPException): error_code = "rate_limit_error" description = "Rate Limit Error" code = 429 + + +class NotFoundError(BaseHTTPException): + error_code = "not_found" + code = 404 + + +class InvalidArgumentError(BaseHTTPException): + error_code = "invalid_param" + code = 400 diff --git a/api/core/app/app_config/easy_ui_based_app/dataset/manager.py b/api/core/app/app_config/easy_ui_based_app/dataset/manager.py index 20189053f4..a5492d70bd 100644 --- a/api/core/app/app_config/easy_ui_based_app/dataset/manager.py +++ b/api/core/app/app_config/easy_ui_based_app/dataset/manager.py @@ -138,15 +138,12 @@ class DatasetConfigManager: if not config.get("dataset_configs"): config["dataset_configs"] = {"retrieval_model": "single"} + if not isinstance(config["dataset_configs"], dict): + raise ValueError("dataset_configs must be of object type") + if not config["dataset_configs"].get("datasets"): config["dataset_configs"]["datasets"] = {"strategy": "router", "datasets": []} - if not isinstance(config["dataset_configs"], dict): - raise ValueError("dataset_configs must be of object type") - - if not isinstance(config["dataset_configs"], dict): - raise ValueError("dataset_configs must be of object type") - need_manual_query_datasets = config.get("dataset_configs") and config["dataset_configs"].get( "datasets", {} ).get("datasets") diff --git a/api/core/app/app_config/entities.py b/api/core/app/app_config/entities.py index 3f31b1c3d5..75bd2f677a 100644 --- a/api/core/app/app_config/entities.py +++ b/api/core/app/app_config/entities.py @@ -104,6 +104,7 @@ class VariableEntity(BaseModel): Variable Entity. """ + # `variable` records the name of the variable in user inputs. variable: str label: str description: str = "" diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index 9e6adc4b08..61de9ec670 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -29,13 +29,14 @@ from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader from extensions.ext_database import db from factories import file_factory from libs.flask_utils import preserve_flask_contexts from models import Account, App, Conversation, EndUser, Message, Workflow, WorkflowNodeExecutionTriggeredFrom from models.enums import WorkflowRunTriggeredFrom from services.conversation_service import ConversationService -from services.errors.message import MessageNotExistsError +from services.workflow_draft_variable_service import DraftVarLoader, WorkflowDraftVariableService logger = logging.getLogger(__name__) @@ -116,6 +117,11 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): ) # parse files + # TODO(QuantumGhost): Move file parsing logic to the API controller layer + # for better separation of concerns. + # + # For implementation reference, see the `_parse_file` function and + # `DraftWorkflowNodeRunApi` class which handle this properly. files = args["files"] if args.get("files") else [] file_extra_config = FileUploadConfigManager.convert(workflow.features_dict, is_vision=False) if file_extra_config: @@ -261,6 +267,13 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): app_id=application_generate_entity.app_config.app_id, triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP, ) + var_loader = DraftVarLoader( + engine=db.engine, + app_id=application_generate_entity.app_config.app_id, + tenant_id=application_generate_entity.app_config.tenant_id, + ) + draft_var_srv = WorkflowDraftVariableService(db.session()) + draft_var_srv.prefill_conversation_variable_default_values(workflow) return self._generate( workflow=workflow, @@ -271,6 +284,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): workflow_node_execution_repository=workflow_node_execution_repository, conversation=None, stream=streaming, + variable_loader=var_loader, ) def single_loop_generate( @@ -336,6 +350,13 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): app_id=application_generate_entity.app_config.app_id, triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP, ) + var_loader = DraftVarLoader( + engine=db.engine, + app_id=application_generate_entity.app_config.app_id, + tenant_id=application_generate_entity.app_config.tenant_id, + ) + draft_var_srv = WorkflowDraftVariableService(db.session()) + draft_var_srv.prefill_conversation_variable_default_values(workflow) return self._generate( workflow=workflow, @@ -346,6 +367,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): workflow_node_execution_repository=workflow_node_execution_repository, conversation=None, stream=streaming, + variable_loader=var_loader, ) def _generate( @@ -359,6 +381,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): workflow_node_execution_repository: WorkflowNodeExecutionRepository, conversation: Optional[Conversation] = None, stream: bool = True, + variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER, ) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], Any, None]: """ Generate App response. @@ -367,6 +390,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): :param user: account or end user :param invoke_from: invoke from source :param application_generate_entity: application generate entity + :param workflow_execution_repository: repository for workflow execution :param workflow_node_execution_repository: repository for workflow node execution :param conversation: conversation :param stream: is stream @@ -409,6 +433,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): "conversation_id": conversation.id, "message_id": message.id, "context": context, + "variable_loader": variable_loader, }, ) @@ -437,6 +462,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): conversation_id: str, message_id: str, context: contextvars.Context, + variable_loader: VariableLoader, ) -> None: """ Generate worker in a new thread. @@ -453,8 +479,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): # get conversation and message conversation = self._get_conversation(conversation_id) message = self._get_message(message_id) - if message is None: - raise MessageNotExistsError("Message not exists") # chatbot app runner = AdvancedChatAppRunner( @@ -463,6 +487,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): conversation=conversation, message=message, dialogue_count=self._dialogue_count, + variable_loader=variable_loader, ) runner.run() diff --git a/api/core/app/apps/advanced_chat/app_runner.py b/api/core/app/apps/advanced_chat/app_runner.py index d9b3833862..840a3c9d3b 100644 --- a/api/core/app/apps/advanced_chat/app_runner.py +++ b/api/core/app/apps/advanced_chat/app_runner.py @@ -19,6 +19,7 @@ from core.moderation.base import ModerationError from core.workflow.callbacks import WorkflowCallback, WorkflowLoggingCallback from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import SystemVariableKey +from core.workflow.variable_loader import VariableLoader from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db from models.enums import UserFrom @@ -40,14 +41,17 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): conversation: Conversation, message: Message, dialogue_count: int, + variable_loader: VariableLoader, ) -> None: - super().__init__(queue_manager) - + super().__init__(queue_manager, variable_loader) self.application_generate_entity = application_generate_entity self.conversation = conversation self.message = message self._dialogue_count = dialogue_count + def _get_app_id(self) -> str: + return self.application_generate_entity.app_config.app_id + def run(self) -> None: app_config = self.application_generate_entity.app_config app_config = cast(AdvancedChatAppConfig, app_config) diff --git a/api/core/app/apps/agent_chat/app_generator.py b/api/core/app/apps/agent_chat/app_generator.py index a448bf8a94..edea6199d3 100644 --- a/api/core/app/apps/agent_chat/app_generator.py +++ b/api/core/app/apps/agent_chat/app_generator.py @@ -26,7 +26,6 @@ from factories import file_factory from libs.flask_utils import preserve_flask_contexts from models import Account, App, EndUser from services.conversation_service import ConversationService -from services.errors.message import MessageNotExistsError logger = logging.getLogger(__name__) @@ -124,6 +123,11 @@ class AgentChatAppGenerator(MessageBasedAppGenerator): override_model_config_dict["retriever_resource"] = {"enabled": True} # parse files + # TODO(QuantumGhost): Move file parsing logic to the API controller layer + # for better separation of concerns. + # + # For implementation reference, see the `_parse_file` function and + # `DraftWorkflowNodeRunApi` class which handle this properly. files = args.get("files") or [] file_extra_config = FileUploadConfigManager.convert(override_model_config_dict or app_model_config.to_dict()) if file_extra_config: @@ -233,8 +237,6 @@ class AgentChatAppGenerator(MessageBasedAppGenerator): # get conversation and message conversation = self._get_conversation(conversation_id) message = self._get_message(message_id) - if message is None: - raise MessageNotExistsError("Message not exists") # chatbot app runner = AgentChatAppRunner() diff --git a/api/core/app/apps/chat/app_generator.py b/api/core/app/apps/chat/app_generator.py index a1329cb938..a28c106ce9 100644 --- a/api/core/app/apps/chat/app_generator.py +++ b/api/core/app/apps/chat/app_generator.py @@ -25,7 +25,6 @@ from factories import file_factory from models.account import Account from models.model import App, EndUser from services.conversation_service import ConversationService -from services.errors.message import MessageNotExistsError logger = logging.getLogger(__name__) @@ -115,6 +114,11 @@ class ChatAppGenerator(MessageBasedAppGenerator): override_model_config_dict["retriever_resource"] = {"enabled": True} # parse files + # TODO(QuantumGhost): Move file parsing logic to the API controller layer + # for better separation of concerns. + # + # For implementation reference, see the `_parse_file` function and + # `DraftWorkflowNodeRunApi` class which handle this properly. files = args["files"] if args.get("files") else [] file_extra_config = FileUploadConfigManager.convert(override_model_config_dict or app_model_config.to_dict()) if file_extra_config: @@ -219,8 +223,6 @@ class ChatAppGenerator(MessageBasedAppGenerator): # get conversation and message conversation = self._get_conversation(conversation_id) message = self._get_message(message_id) - if message is None: - raise MessageNotExistsError("Message not exists") # chatbot app runner = ChatAppRunner() diff --git a/api/core/app/apps/common/workflow_response_converter.py b/api/core/app/apps/common/workflow_response_converter.py index 6f524a5872..cd1d298ca2 100644 --- a/api/core/app/apps/common/workflow_response_converter.py +++ b/api/core/app/apps/common/workflow_response_converter.py @@ -48,6 +48,7 @@ from core.workflow.entities.workflow_execution import WorkflowExecution from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution, WorkflowNodeExecutionStatus from core.workflow.nodes import NodeType from core.workflow.nodes.tool.entities import ToolNodeData +from core.workflow.workflow_type_encoder import WorkflowRuntimeTypeConverter from models import ( Account, CreatorUserRole, @@ -125,7 +126,7 @@ class WorkflowResponseConverter: id=workflow_execution.id_, workflow_id=workflow_execution.workflow_id, status=workflow_execution.status, - outputs=workflow_execution.outputs, + outputs=WorkflowRuntimeTypeConverter().to_json_encodable(workflow_execution.outputs), error=workflow_execution.error_message, elapsed_time=workflow_execution.elapsed_time, total_tokens=workflow_execution.total_tokens, @@ -202,6 +203,8 @@ class WorkflowResponseConverter: if not workflow_node_execution.finished_at: return None + json_converter = WorkflowRuntimeTypeConverter() + return NodeFinishStreamResponse( task_id=task_id, workflow_run_id=workflow_node_execution.workflow_execution_id, @@ -214,7 +217,7 @@ class WorkflowResponseConverter: predecessor_node_id=workflow_node_execution.predecessor_node_id, inputs=workflow_node_execution.inputs, process_data=workflow_node_execution.process_data, - outputs=workflow_node_execution.outputs, + outputs=json_converter.to_json_encodable(workflow_node_execution.outputs), status=workflow_node_execution.status, error=workflow_node_execution.error, elapsed_time=workflow_node_execution.elapsed_time, @@ -245,6 +248,8 @@ class WorkflowResponseConverter: if not workflow_node_execution.finished_at: return None + json_converter = WorkflowRuntimeTypeConverter() + return NodeRetryStreamResponse( task_id=task_id, workflow_run_id=workflow_node_execution.workflow_execution_id, @@ -257,7 +262,7 @@ class WorkflowResponseConverter: predecessor_node_id=workflow_node_execution.predecessor_node_id, inputs=workflow_node_execution.inputs, process_data=workflow_node_execution.process_data, - outputs=workflow_node_execution.outputs, + outputs=json_converter.to_json_encodable(workflow_node_execution.outputs), status=workflow_node_execution.status, error=workflow_node_execution.error, elapsed_time=workflow_node_execution.elapsed_time, @@ -376,6 +381,7 @@ class WorkflowResponseConverter: workflow_execution_id: str, event: QueueIterationCompletedEvent, ) -> IterationNodeCompletedStreamResponse: + json_converter = WorkflowRuntimeTypeConverter() return IterationNodeCompletedStreamResponse( task_id=task_id, workflow_run_id=workflow_execution_id, @@ -384,7 +390,7 @@ class WorkflowResponseConverter: node_id=event.node_id, node_type=event.node_type.value, title=event.node_data.title, - outputs=event.outputs, + outputs=json_converter.to_json_encodable(event.outputs), created_at=int(time.time()), extras={}, inputs=event.inputs or {}, @@ -463,7 +469,7 @@ class WorkflowResponseConverter: node_id=event.node_id, node_type=event.node_type.value, title=event.node_data.title, - outputs=event.outputs, + outputs=WorkflowRuntimeTypeConverter().to_json_encodable(event.outputs), created_at=int(time.time()), extras={}, inputs=event.inputs or {}, diff --git a/api/core/app/apps/completion/app_generator.py b/api/core/app/apps/completion/app_generator.py index adcbaad3ec..966a6f1d66 100644 --- a/api/core/app/apps/completion/app_generator.py +++ b/api/core/app/apps/completion/app_generator.py @@ -101,6 +101,11 @@ class CompletionAppGenerator(MessageBasedAppGenerator): ) # parse files + # TODO(QuantumGhost): Move file parsing logic to the API controller layer + # for better separation of concerns. + # + # For implementation reference, see the `_parse_file` function and + # `DraftWorkflowNodeRunApi` class which handle this properly. files = args["files"] if args.get("files") else [] file_extra_config = FileUploadConfigManager.convert(override_model_config_dict or app_model_config.to_dict()) if file_extra_config: @@ -196,8 +201,6 @@ class CompletionAppGenerator(MessageBasedAppGenerator): try: # get message message = self._get_message(message_id) - if message is None: - raise MessageNotExistsError() # chatbot app runner = CompletionAppRunner() diff --git a/api/core/app/apps/message_based_app_generator.py b/api/core/app/apps/message_based_app_generator.py index 58b94f4d43..e84d59209d 100644 --- a/api/core/app/apps/message_based_app_generator.py +++ b/api/core/app/apps/message_based_app_generator.py @@ -29,6 +29,7 @@ from models.enums import CreatorUserRole from models.model import App, AppMode, AppModelConfig, Conversation, EndUser, Message, MessageFile from services.errors.app_model_config import AppModelConfigBrokenError from services.errors.conversation import ConversationNotExistsError +from services.errors.message import MessageNotExistsError logger = logging.getLogger(__name__) @@ -251,7 +252,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): return introduction or "" - def _get_conversation(self, conversation_id: str): + def _get_conversation(self, conversation_id: str) -> Conversation: """ Get conversation by conversation id :param conversation_id: conversation id @@ -260,11 +261,11 @@ class MessageBasedAppGenerator(BaseAppGenerator): conversation = db.session.query(Conversation).filter(Conversation.id == conversation_id).first() if not conversation: - raise ConversationNotExistsError() + raise ConversationNotExistsError("Conversation not exists") return conversation - def _get_message(self, message_id: str) -> Optional[Message]: + def _get_message(self, message_id: str) -> Message: """ Get message by message id :param message_id: message id @@ -272,4 +273,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): """ message = db.session.query(Message).filter(Message.id == message_id).first() + if message is None: + raise MessageNotExistsError("Message not exists") + return message diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index 7f4770fc97..369fa0e48c 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -27,11 +27,13 @@ from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository +from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader from extensions.ext_database import db from factories import file_factory from libs.flask_utils import preserve_flask_contexts from models import Account, App, EndUser, Workflow, WorkflowNodeExecutionTriggeredFrom from models.enums import WorkflowRunTriggeredFrom +from services.workflow_draft_variable_service import DraftVarLoader, WorkflowDraftVariableService logger = logging.getLogger(__name__) @@ -94,6 +96,11 @@ class WorkflowAppGenerator(BaseAppGenerator): files: Sequence[Mapping[str, Any]] = args.get("files") or [] # parse files + # TODO(QuantumGhost): Move file parsing logic to the API controller layer + # for better separation of concerns. + # + # For implementation reference, see the `_parse_file` function and + # `DraftWorkflowNodeRunApi` class which handle this properly. file_extra_config = FileUploadConfigManager.convert(workflow.features_dict, is_vision=False) system_files = file_factory.build_from_mappings( mappings=files, @@ -186,6 +193,7 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow_node_execution_repository: WorkflowNodeExecutionRepository, streaming: bool = True, workflow_thread_pool_id: Optional[str] = None, + variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER, ) -> Union[Mapping[str, Any], Generator[str | Mapping[str, Any], None, None]]: """ Generate App response. @@ -195,6 +203,7 @@ class WorkflowAppGenerator(BaseAppGenerator): :param user: account or end user :param application_generate_entity: application generate entity :param invoke_from: invoke from source + :param workflow_execution_repository: repository for workflow execution :param workflow_node_execution_repository: repository for workflow node execution :param streaming: is stream :param workflow_thread_pool_id: workflow thread pool id @@ -218,6 +227,7 @@ class WorkflowAppGenerator(BaseAppGenerator): "queue_manager": queue_manager, "context": context, "workflow_thread_pool_id": workflow_thread_pool_id, + "variable_loader": variable_loader, }, ) @@ -302,6 +312,13 @@ class WorkflowAppGenerator(BaseAppGenerator): app_id=application_generate_entity.app_config.app_id, triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP, ) + draft_var_srv = WorkflowDraftVariableService(db.session()) + draft_var_srv.prefill_conversation_variable_default_values(workflow) + var_loader = DraftVarLoader( + engine=db.engine, + app_id=application_generate_entity.app_config.app_id, + tenant_id=application_generate_entity.app_config.tenant_id, + ) return self._generate( app_model=app_model, @@ -312,6 +329,7 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, streaming=streaming, + variable_loader=var_loader, ) def single_loop_generate( @@ -378,7 +396,13 @@ class WorkflowAppGenerator(BaseAppGenerator): app_id=application_generate_entity.app_config.app_id, triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP, ) - + draft_var_srv = WorkflowDraftVariableService(db.session()) + draft_var_srv.prefill_conversation_variable_default_values(workflow) + var_loader = DraftVarLoader( + engine=db.engine, + app_id=application_generate_entity.app_config.app_id, + tenant_id=application_generate_entity.app_config.tenant_id, + ) return self._generate( app_model=app_model, workflow=workflow, @@ -388,6 +412,7 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, streaming=streaming, + variable_loader=var_loader, ) def _generate_worker( @@ -396,6 +421,7 @@ class WorkflowAppGenerator(BaseAppGenerator): application_generate_entity: WorkflowAppGenerateEntity, queue_manager: AppQueueManager, context: contextvars.Context, + variable_loader: VariableLoader, workflow_thread_pool_id: Optional[str] = None, ) -> None: """ @@ -414,6 +440,7 @@ class WorkflowAppGenerator(BaseAppGenerator): application_generate_entity=application_generate_entity, queue_manager=queue_manager, workflow_thread_pool_id=workflow_thread_pool_id, + variable_loader=variable_loader, ) runner.run() diff --git a/api/core/app/apps/workflow/app_runner.py b/api/core/app/apps/workflow/app_runner.py index b59e34e222..07aeb57fa3 100644 --- a/api/core/app/apps/workflow/app_runner.py +++ b/api/core/app/apps/workflow/app_runner.py @@ -12,6 +12,7 @@ from core.app.entities.app_invoke_entities import ( from core.workflow.callbacks import WorkflowCallback, WorkflowLoggingCallback from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import SystemVariableKey +from core.workflow.variable_loader import VariableLoader from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db from models.enums import UserFrom @@ -30,6 +31,7 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): self, application_generate_entity: WorkflowAppGenerateEntity, queue_manager: AppQueueManager, + variable_loader: VariableLoader, workflow_thread_pool_id: Optional[str] = None, ) -> None: """ @@ -37,10 +39,13 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): :param queue_manager: application queue manager :param workflow_thread_pool_id: workflow thread pool id """ + super().__init__(queue_manager, variable_loader) self.application_generate_entity = application_generate_entity - self.queue_manager = queue_manager self.workflow_thread_pool_id = workflow_thread_pool_id + def _get_app_id(self) -> str: + return self.application_generate_entity.app_config.app_id + def run(self) -> None: """ Run application diff --git a/api/core/app/apps/workflow_app_runner.py b/api/core/app/apps/workflow_app_runner.py index facc24b4ca..dc6c381e86 100644 --- a/api/core/app/apps/workflow_app_runner.py +++ b/api/core/app/apps/workflow_app_runner.py @@ -1,6 +1,8 @@ from collections.abc import Mapping from typing import Any, Optional, cast +from sqlalchemy.orm import Session + from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom from core.app.apps.base_app_runner import AppRunner from core.app.entities.queue_entities import ( @@ -33,6 +35,7 @@ from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey from core.workflow.graph_engine.entities.event import ( AgentLogEvent, + BaseNodeEvent, GraphEngineEvent, GraphRunFailedEvent, GraphRunPartialSucceededEvent, @@ -62,15 +65,23 @@ from core.workflow.graph_engine.entities.event import ( from core.workflow.graph_engine.entities.graph import Graph from core.workflow.nodes import NodeType from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING +from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader, load_into_variable_pool from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db from models.model import App from models.workflow import Workflow +from services.workflow_draft_variable_service import ( + DraftVariableSaver, +) class WorkflowBasedAppRunner(AppRunner): - def __init__(self, queue_manager: AppQueueManager): + def __init__(self, queue_manager: AppQueueManager, variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER) -> None: self.queue_manager = queue_manager + self._variable_loader = variable_loader + + def _get_app_id(self) -> str: + raise NotImplementedError("not implemented") def _init_graph(self, graph_config: Mapping[str, Any]) -> Graph: """ @@ -173,6 +184,13 @@ class WorkflowBasedAppRunner(AppRunner): except NotImplementedError: variable_mapping = {} + load_into_variable_pool( + variable_loader=self._variable_loader, + variable_pool=variable_pool, + variable_mapping=variable_mapping, + user_inputs=user_inputs, + ) + WorkflowEntry.mapping_user_inputs_to_variable_pool( variable_mapping=variable_mapping, user_inputs=user_inputs, @@ -262,6 +280,12 @@ class WorkflowBasedAppRunner(AppRunner): ) except NotImplementedError: variable_mapping = {} + load_into_variable_pool( + self._variable_loader, + variable_pool=variable_pool, + variable_mapping=variable_mapping, + user_inputs=user_inputs, + ) WorkflowEntry.mapping_user_inputs_to_variable_pool( variable_mapping=variable_mapping, @@ -376,6 +400,8 @@ class WorkflowBasedAppRunner(AppRunner): in_loop_id=event.in_loop_id, ) ) + self._save_draft_var_for_event(event) + elif isinstance(event, NodeRunFailedEvent): self._publish_event( QueueNodeFailedEvent( @@ -438,6 +464,8 @@ class WorkflowBasedAppRunner(AppRunner): in_loop_id=event.in_loop_id, ) ) + self._save_draft_var_for_event(event) + elif isinstance(event, NodeInIterationFailedEvent): self._publish_event( QueueNodeInIterationFailedEvent( @@ -690,3 +718,30 @@ class WorkflowBasedAppRunner(AppRunner): def _publish_event(self, event: AppQueueEvent) -> None: self.queue_manager.publish(event, PublishFrom.APPLICATION_MANAGER) + + def _save_draft_var_for_event(self, event: BaseNodeEvent): + run_result = event.route_node_state.node_run_result + if run_result is None: + return + process_data = run_result.process_data + outputs = run_result.outputs + with Session(bind=db.engine) as session, session.begin(): + draft_var_saver = DraftVariableSaver( + session=session, + app_id=self._get_app_id(), + node_id=event.node_id, + node_type=event.node_type, + # FIXME(QuantumGhost): rely on private state of queue_manager is not ideal. + invoke_from=self.queue_manager._invoke_from, + node_execution_id=event.id, + enclosing_node_id=event.in_loop_id or event.in_iteration_id or None, + ) + draft_var_saver.save(process_data=process_data, outputs=outputs) + + +def _remove_first_element_from_variable_string(key: str) -> str: + """ + Remove the first element from the prefix. + """ + prefix, remaining = key.split(".", maxsplit=1) + return remaining diff --git a/api/core/app/entities/app_invoke_entities.py b/api/core/app/entities/app_invoke_entities.py index c0d99693b0..65ed267959 100644 --- a/api/core/app/entities/app_invoke_entities.py +++ b/api/core/app/entities/app_invoke_entities.py @@ -17,9 +17,24 @@ class InvokeFrom(Enum): Invoke From. """ + # SERVICE_API indicates that this invocation is from an API call to Dify app. + # + # Description of service api in Dify docs: + # https://docs.dify.ai/en/guides/application-publishing/developing-with-apis SERVICE_API = "service-api" + + # WEB_APP indicates that this invocation is from + # the web app of the workflow (or chatflow). + # + # Description of web app in Dify docs: + # https://docs.dify.ai/en/guides/application-publishing/launch-your-webapp-quickly/README WEB_APP = "web-app" + + # EXPLORE indicates that this invocation is from + # the workflow (or chatflow) explore page. EXPLORE = "explore" + # DEBUGGER indicates that this invocation is from + # the workflow (or chatflow) edit page. DEBUGGER = "debugger" @classmethod diff --git a/api/core/file/constants.py b/api/core/file/constants.py index ce1d238e93..0665ed7e0d 100644 --- a/api/core/file/constants.py +++ b/api/core/file/constants.py @@ -1 +1,11 @@ +from typing import Any + +# TODO(QuantumGhost): Refactor variable type identification. Instead of directly +# comparing `dify_model_identity` with constants throughout the codebase, extract +# this logic into a dedicated function. This would encapsulate the implementation +# details of how different variable types are identified. FILE_MODEL_IDENTITY = "__dify__file__" + + +def maybe_file_object(o: Any) -> bool: + return isinstance(o, dict) and o.get("dify_model_identity") == FILE_MODEL_IDENTITY diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py index 848d897779..f2fe306179 100644 --- a/api/core/indexing_runner.py +++ b/api/core/indexing_runner.py @@ -534,7 +534,7 @@ class IndexingRunner: # chunk nodes by chunk size indexing_start_at = time.perf_counter() tokens = 0 - if dataset_document.doc_form != IndexType.PARENT_CHILD_INDEX: + if dataset_document.doc_form != IndexType.PARENT_CHILD_INDEX and dataset.indexing_technique == "economy": # create keyword index create_keyword_thread = threading.Thread( target=self._process_keyword_index, @@ -572,7 +572,7 @@ class IndexingRunner: for future in futures: tokens += future.result() - if dataset_document.doc_form != IndexType.PARENT_CHILD_INDEX: + if dataset_document.doc_form != IndexType.PARENT_CHILD_INDEX and dataset.indexing_technique == "economy": create_keyword_thread.join() indexing_end_at = time.perf_counter() diff --git a/api/core/model_manager.py b/api/core/model_manager.py index 995a30d44c..4886ffe244 100644 --- a/api/core/model_manager.py +++ b/api/core/model_manager.py @@ -542,8 +542,6 @@ class LBModelManager: return config - return None - def cooldown(self, config: ModelLoadBalancingConfiguration, expire: int = 60) -> None: """ Cooldown model load balancing config diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index e0dfe0c312..a98904102c 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -251,7 +251,7 @@ class OpsTraceManager: provider_config_map[tracing_provider]["trace_instance"], provider_config_map[tracing_provider]["config_class"], ) - decrypt_trace_config_key = str(decrypt_trace_config) + decrypt_trace_config_key = json.dumps(decrypt_trace_config, sort_keys=True) tracing_instance = cls.ops_trace_instances_cache.get(decrypt_trace_config_key) if tracing_instance is None: # create new tracing_instance and update the cache if it absent diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py index e9275c31cc..e0d2857e97 100644 --- a/api/core/plugin/entities/plugin_daemon.py +++ b/api/core/plugin/entities/plugin_daemon.py @@ -156,9 +156,23 @@ class PluginInstallTaskStartResponse(BaseModel): task_id: str = Field(description="The ID of the install task.") -class PluginUploadResponse(BaseModel): +class PluginVerification(BaseModel): + """ + Verification of the plugin. + """ + + class AuthorizedCategory(StrEnum): + Langgenius = "langgenius" + Partner = "partner" + Community = "community" + + authorized_category: AuthorizedCategory = Field(description="The authorized category of the plugin.") + + +class PluginDecodeResponse(BaseModel): unique_identifier: str = Field(description="The unique identifier of the plugin.") manifest: PluginDeclaration + verification: Optional[PluginVerification] = Field(default=None, description="Basic verification information") class PluginOAuthAuthorizationUrlResponse(BaseModel): diff --git a/api/core/plugin/impl/oauth.py b/api/core/plugin/impl/oauth.py index 91774984c8..b006bf1d4b 100644 --- a/api/core/plugin/impl/oauth.py +++ b/api/core/plugin/impl/oauth.py @@ -1,3 +1,4 @@ +import binascii from collections.abc import Mapping from typing import Any @@ -16,7 +17,7 @@ class OAuthHandler(BasePluginClient): provider: str, system_credentials: Mapping[str, Any], ) -> PluginOAuthAuthorizationUrlResponse: - return self._request_with_plugin_daemon_response( + response = self._request_with_plugin_daemon_response_stream( "POST", f"plugin/{tenant_id}/dispatch/oauth/get_authorization_url", PluginOAuthAuthorizationUrlResponse, @@ -32,6 +33,9 @@ class OAuthHandler(BasePluginClient): "Content-Type": "application/json", }, ) + for resp in response: + return resp + raise ValueError("No response received from plugin daemon for authorization URL request.") def get_credentials( self, @@ -49,7 +53,7 @@ class OAuthHandler(BasePluginClient): # encode request to raw http request raw_request_bytes = self._convert_request_to_raw_data(request) - return self._request_with_plugin_daemon_response( + response = self._request_with_plugin_daemon_response_stream( "POST", f"plugin/{tenant_id}/dispatch/oauth/get_credentials", PluginOAuthCredentialsResponse, @@ -58,7 +62,8 @@ class OAuthHandler(BasePluginClient): "data": { "provider": provider, "system_credentials": system_credentials, - "raw_request_bytes": raw_request_bytes, + # for json serialization + "raw_http_request": binascii.hexlify(raw_request_bytes).decode(), }, }, headers={ @@ -66,6 +71,9 @@ class OAuthHandler(BasePluginClient): "Content-Type": "application/json", }, ) + for resp in response: + return resp + raise ValueError("No response received from plugin daemon for authorization URL request.") def _convert_request_to_raw_data(self, request: Request) -> bytes: """ @@ -79,7 +87,7 @@ class OAuthHandler(BasePluginClient): """ # Start with the request line method = request.method - path = request.path + path = request.full_path protocol = request.headers.get("HTTP_VERSION", "HTTP/1.1") raw_data = f"{method} {path} {protocol}\r\n".encode() diff --git a/api/core/plugin/impl/plugin.py b/api/core/plugin/impl/plugin.py index 1cd2dc1be7..b7f7b31655 100644 --- a/api/core/plugin/impl/plugin.py +++ b/api/core/plugin/impl/plugin.py @@ -10,10 +10,10 @@ from core.plugin.entities.plugin import ( PluginInstallationSource, ) from core.plugin.entities.plugin_daemon import ( + PluginDecodeResponse, PluginInstallTask, PluginInstallTaskStartResponse, PluginListResponse, - PluginUploadResponse, ) from core.plugin.impl.base import BasePluginClient @@ -53,7 +53,7 @@ class PluginInstaller(BasePluginClient): tenant_id: str, pkg: bytes, verify_signature: bool = False, - ) -> PluginUploadResponse: + ) -> PluginDecodeResponse: """ Upload a plugin package and return the plugin unique identifier. """ @@ -68,7 +68,7 @@ class PluginInstaller(BasePluginClient): return self._request_with_plugin_daemon_response( "POST", f"plugin/{tenant_id}/management/install/upload/package", - PluginUploadResponse, + PluginDecodeResponse, files=body, data=data, ) @@ -176,6 +176,18 @@ class PluginInstaller(BasePluginClient): params={"plugin_unique_identifier": plugin_unique_identifier}, ) + def decode_plugin_from_identifier(self, tenant_id: str, plugin_unique_identifier: str) -> PluginDecodeResponse: + """ + Decode a plugin from an identifier. + """ + return self._request_with_plugin_daemon_response( + "GET", + f"plugin/{tenant_id}/management/decode/from_identifier", + PluginDecodeResponse, + data={"plugin_unique_identifier": plugin_unique_identifier}, + headers={"Content-Type": "application/json"}, + ) + def fetch_plugin_installation_by_ids( self, tenant_id: str, plugin_ids: Sequence[str] ) -> Sequence[PluginInstallation]: diff --git a/api/core/rag/datasource/vdb/matrixone/__init__.py b/api/core/rag/datasource/vdb/matrixone/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/rag/datasource/vdb/matrixone/matrixone_vector.py b/api/core/rag/datasource/vdb/matrixone/matrixone_vector.py new file mode 100644 index 0000000000..4894957382 --- /dev/null +++ b/api/core/rag/datasource/vdb/matrixone/matrixone_vector.py @@ -0,0 +1,233 @@ +import json +import logging +import uuid +from functools import wraps +from typing import Any, Optional + +from mo_vector.client import MoVectorClient # type: ignore +from pydantic import BaseModel, model_validator + +from configs import dify_config +from core.rag.datasource.vdb.vector_base import BaseVector +from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory +from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.embedding.embedding_base import Embeddings +from core.rag.models.document import Document +from extensions.ext_redis import redis_client +from models.dataset import Dataset + +logger = logging.getLogger(__name__) + + +class MatrixoneConfig(BaseModel): + host: str = "localhost" + port: int = 6001 + user: str = "dump" + password: str = "111" + database: str = "dify" + metric: str = "l2" + + @model_validator(mode="before") + @classmethod + def validate_config(cls, values: dict) -> dict: + if not values["host"]: + raise ValueError("config host is required") + if not values["port"]: + raise ValueError("config port is required") + if not values["user"]: + raise ValueError("config user is required") + if not values["password"]: + raise ValueError("config password is required") + if not values["database"]: + raise ValueError("config database is required") + return values + + +def ensure_client(func): + @wraps(func) + def wrapper(self, *args, **kwargs): + if self.client is None: + self.client = self._get_client(None, False) + return func(self, *args, **kwargs) + + return wrapper + + +class MatrixoneVector(BaseVector): + """ + Matrixone vector storage implementation. + """ + + def __init__(self, collection_name: str, config: MatrixoneConfig): + super().__init__(collection_name) + self.config = config + self.collection_name = collection_name.lower() + self.client = None + + @property + def collection_name(self): + return self._collection_name + + @collection_name.setter + def collection_name(self, value): + self._collection_name = value + + def get_type(self) -> str: + return VectorType.MATRIXONE + + def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + if self.client is None: + self.client = self._get_client(len(embeddings[0]), True) + return self.add_texts(texts, embeddings) + + def _get_client(self, dimension: Optional[int] = None, create_table: bool = False) -> MoVectorClient: + """ + Create a new client for the collection. + + The collection will be created if it doesn't exist. + """ + lock_name = f"vector_indexing_lock_{self._collection_name}" + with redis_client.lock(lock_name, timeout=20): + client = MoVectorClient( + connection_string=f"mysql+pymysql://{self.config.user}:{self.config.password}@{self.config.host}:{self.config.port}/{self.config.database}", + table_name=self.collection_name, + vector_dimension=dimension, + create_table=create_table, + ) + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" + if redis_client.get(collection_exist_cache_key): + return client + try: + client.create_full_text_index() + except Exception as e: + logger.exception("Failed to create full text index") + redis_client.set(collection_exist_cache_key, 1, ex=3600) + return client + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + if self.client is None: + self.client = self._get_client(len(embeddings[0]), True) + assert self.client is not None + ids = [] + for _, doc in enumerate(documents): + if doc.metadata is not None: + doc_id = doc.metadata.get("doc_id", str(uuid.uuid4())) + ids.append(doc_id) + self.client.insert( + texts=[doc.page_content for doc in documents], + embeddings=embeddings, + metadatas=[doc.metadata for doc in documents], + ids=ids, + ) + return ids + + @ensure_client + def text_exists(self, id: str) -> bool: + assert self.client is not None + result = self.client.get(ids=[id]) + return len(result) > 0 + + @ensure_client + def delete_by_ids(self, ids: list[str]) -> None: + assert self.client is not None + if not ids: + return + self.client.delete(ids=ids) + + @ensure_client + def get_ids_by_metadata_field(self, key: str, value: str): + assert self.client is not None + results = self.client.query_by_metadata(filter={key: value}) + return [result.id for result in results] + + @ensure_client + def delete_by_metadata_field(self, key: str, value: str) -> None: + assert self.client is not None + self.client.delete(filter={key: value}) + + @ensure_client + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + assert self.client is not None + top_k = kwargs.get("top_k", 5) + document_ids_filter = kwargs.get("document_ids_filter") + filter = None + if document_ids_filter: + filter = {"document_id": {"$in": document_ids_filter}} + + results = self.client.query( + query_vector=query_vector, + k=top_k, + filter=filter, + ) + + docs = [] + # TODO: add the score threshold to the query + for result in results: + metadata = result.metadata + docs.append( + Document( + page_content=result.document, + metadata=metadata, + ) + ) + return docs + + @ensure_client + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + assert self.client is not None + top_k = kwargs.get("top_k", 5) + document_ids_filter = kwargs.get("document_ids_filter") + filter = None + if document_ids_filter: + filter = {"document_id": {"$in": document_ids_filter}} + score_threshold = float(kwargs.get("score_threshold", 0.0)) + + results = self.client.full_text_query( + keywords=[query], + k=top_k, + filter=filter, + ) + + docs = [] + for result in results: + metadata = result.metadata + if isinstance(metadata, str): + import json + + metadata = json.loads(metadata) + score = 1 - result.distance + if score >= score_threshold: + metadata["score"] = score + docs.append( + Document( + page_content=result.document, + metadata=metadata, + ) + ) + return docs + + @ensure_client + def delete(self) -> None: + assert self.client is not None + self.client.delete() + + +class MatrixoneVectorFactory(AbstractVectorFactory): + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> MatrixoneVector: + if dataset.index_struct_dict: + class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] + collection_name = class_prefix + else: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id) + dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.MATRIXONE, collection_name)) + + config = MatrixoneConfig( + host=dify_config.MATRIXONE_HOST or "localhost", + port=dify_config.MATRIXONE_PORT or 6001, + user=dify_config.MATRIXONE_USER or "dump", + password=dify_config.MATRIXONE_PASSWORD or "111", + database=dify_config.MATRIXONE_DATABASE or "dify", + metric=dify_config.MATRIXONE_METRIC or "l2", + ) + return MatrixoneVector(collection_name=collection_name, config=config) diff --git a/api/core/rag/datasource/vdb/vector_factory.py b/api/core/rag/datasource/vdb/vector_factory.py index 66e002312a..67a4a515b1 100644 --- a/api/core/rag/datasource/vdb/vector_factory.py +++ b/api/core/rag/datasource/vdb/vector_factory.py @@ -164,6 +164,10 @@ class Vector: from core.rag.datasource.vdb.huawei.huawei_cloud_vector import HuaweiCloudVectorFactory return HuaweiCloudVectorFactory + case VectorType.MATRIXONE: + from core.rag.datasource.vdb.matrixone.matrixone_vector import MatrixoneVectorFactory + + return MatrixoneVectorFactory case _: raise ValueError(f"Vector store {vector_type} is not supported.") diff --git a/api/core/rag/datasource/vdb/vector_type.py b/api/core/rag/datasource/vdb/vector_type.py index 7a81565e37..0d70947b72 100644 --- a/api/core/rag/datasource/vdb/vector_type.py +++ b/api/core/rag/datasource/vdb/vector_type.py @@ -29,3 +29,4 @@ class VectorType(StrEnum): OPENGAUSS = "opengauss" TABLESTORE = "tablestore" HUAWEI_CLOUD = "huawei_cloud" + MATRIXONE = "matrixone" diff --git a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py index c6cf0d2b27..7a8efb4068 100644 --- a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py +++ b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py @@ -45,7 +45,8 @@ class WeaviateVector(BaseVector): # by changing the connection timeout to pypi.org from 1 second to 0.001 seconds. # TODO: This can be removed once weaviate-client is updated to 3.26.7 or higher, # which does not contain the deprecation check. - weaviate.connect.connection.PYPI_TIMEOUT = 0.001 + if hasattr(weaviate.connect.connection, "PYPI_TIMEOUT"): + weaviate.connect.connection.PYPI_TIMEOUT = 0.001 try: client = weaviate.Client( diff --git a/api/core/rag/extractor/firecrawl/firecrawl_app.py b/api/core/rag/extractor/firecrawl/firecrawl_app.py index 836a1398bf..83a4ac651f 100644 --- a/api/core/rag/extractor/firecrawl/firecrawl_app.py +++ b/api/core/rag/extractor/firecrawl/firecrawl_app.py @@ -22,6 +22,7 @@ class FirecrawlApp: "formats": ["markdown"], "onlyMainContent": True, "timeout": 30000, + "integration": "dify", } if params: json_data.update(params) @@ -39,7 +40,7 @@ class FirecrawlApp: def crawl_url(self, url, params=None) -> str: # Documentation: https://docs.firecrawl.dev/api-reference/endpoint/crawl-post headers = self._prepare_headers() - json_data = {"url": url} + json_data = {"url": url, "integration": "dify"} if params: json_data.update(params) response = self._post_request(f"{self.base_url}/v1/crawl", json_data, headers) @@ -49,7 +50,6 @@ class FirecrawlApp: return cast(str, job_id) else: self._handle_error(response, "start crawl job") - # FIXME: unreachable code for mypy return "" # unreachable def check_crawl_status(self, job_id) -> dict[str, Any]: @@ -82,7 +82,6 @@ class FirecrawlApp: ) else: self._handle_error(response, "check crawl status") - # FIXME: unreachable code for mypy return {} # unreachable def _format_crawl_status_response( @@ -126,4 +125,31 @@ class FirecrawlApp: def _handle_error(self, response, action) -> None: error_message = response.json().get("error", "Unknown error occurred") - raise Exception(f"Failed to {action}. Status code: {response.status_code}. Error: {error_message}") + raise Exception(f"Failed to {action}. Status code: {response.status_code}. Error: {error_message}") # type: ignore[return] + + def search(self, query: str, params: dict[str, Any] | None = None) -> dict[str, Any]: + # Documentation: https://docs.firecrawl.dev/api-reference/endpoint/search + headers = self._prepare_headers() + json_data = { + "query": query, + "limit": 5, + "lang": "en", + "country": "us", + "timeout": 60000, + "ignoreInvalidURLs": False, + "scrapeOptions": {}, + "integration": "dify", + } + if params: + json_data.update(params) + response = self._post_request(f"{self.base_url}/v1/search", json_data, headers) + if response.status_code == 200: + response_data = response.json() + if not response_data.get("success"): + raise Exception(f"Search failed. Error: {response_data.get('warning', 'Unknown error')}") + return cast(dict[str, Any], response_data) + elif response.status_code in {402, 409, 500, 429, 408}: + self._handle_error(response, "perform search") + return {} # Avoid additional exception after handling error + else: + raise Exception(f"Failed to perform search. Status code: {response.status_code}") diff --git a/api/core/rag/extractor/markdown_extractor.py b/api/core/rag/extractor/markdown_extractor.py index 849852ac23..c97765b1dc 100644 --- a/api/core/rag/extractor/markdown_extractor.py +++ b/api/core/rag/extractor/markdown_extractor.py @@ -68,22 +68,17 @@ class MarkdownExtractor(BaseExtractor): continue header_match = re.match(r"^#+\s", line) if header_match: - if current_header is not None: - markdown_tups.append((current_header, current_text)) - + markdown_tups.append((current_header, current_text)) current_header = line current_text = "" else: current_text += line + "\n" markdown_tups.append((current_header, current_text)) - if current_header is not None: - # pass linting, assert keys are defined - markdown_tups = [ - (re.sub(r"#", "", cast(str, key)).strip(), re.sub(r"<.*?>", "", value)) for key, value in markdown_tups - ] - else: - markdown_tups = [(key, re.sub("\n", "", value)) for key, value in markdown_tups] + markdown_tups = [ + (re.sub(r"#", "", cast(str, key)).strip() if key else None, re.sub(r"<.*?>", "", value)) + for key, value in markdown_tups + ] return markdown_tups diff --git a/api/core/rag/index_processor/processor/paragraph_index_processor.py b/api/core/rag/index_processor/processor/paragraph_index_processor.py index dca84b9041..9b90bd2bb3 100644 --- a/api/core/rag/index_processor/processor/paragraph_index_processor.py +++ b/api/core/rag/index_processor/processor/paragraph_index_processor.py @@ -76,6 +76,7 @@ class ParagraphIndexProcessor(BaseIndexProcessor): if dataset.indexing_technique == "high_quality": vector = Vector(dataset) vector.create(documents) + with_keywords = False if with_keywords: keywords_list = kwargs.get("keywords_list") keyword = Keyword(dataset) @@ -91,6 +92,7 @@ class ParagraphIndexProcessor(BaseIndexProcessor): vector.delete_by_ids(node_ids) else: vector.delete() + with_keywords = False if with_keywords: keyword = Keyword(dataset) if node_ids: diff --git a/api/core/rag/index_processor/processor/qa_index_processor.py b/api/core/rag/index_processor/processor/qa_index_processor.py index e778b2cec4..75f3153697 100644 --- a/api/core/rag/index_processor/processor/qa_index_processor.py +++ b/api/core/rag/index_processor/processor/qa_index_processor.py @@ -104,7 +104,7 @@ class QAIndexProcessor(BaseIndexProcessor): def format_by_template(self, file: FileStorage, **kwargs) -> list[Document]: # check file type - if not file.filename or not file.filename.endswith(".csv"): + if not file.filename or not file.filename.lower().endswith(".csv"): raise ValueError("Invalid file type. Only CSV files are allowed") try: diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index 6978860529..38c0b540d5 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -496,6 +496,8 @@ class DatasetRetrieval: all_documents = self.calculate_keyword_score(query, all_documents, top_k) elif index_type == "high_quality": all_documents = self.calculate_vector_score(all_documents, top_k, score_threshold) + else: + all_documents = all_documents[:top_k] if top_k else all_documents self._on_query(query, dataset_ids, app_id, user_from, user_id) diff --git a/api/core/repositories/sqlalchemy_workflow_execution_repository.py b/api/core/repositories/sqlalchemy_workflow_execution_repository.py index e5ead9dc56..cdec92aee7 100644 --- a/api/core/repositories/sqlalchemy_workflow_execution_repository.py +++ b/api/core/repositories/sqlalchemy_workflow_execution_repository.py @@ -6,7 +6,7 @@ import json import logging from typing import Optional, Union -from sqlalchemy import func, select +from sqlalchemy import select from sqlalchemy.engine import Engine from sqlalchemy.orm import sessionmaker @@ -16,6 +16,7 @@ from core.workflow.entities.workflow_execution import ( WorkflowType, ) from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository +from core.workflow.workflow_type_encoder import WorkflowRuntimeTypeConverter from models import ( Account, CreatorUserRole, @@ -146,26 +147,17 @@ class SQLAlchemyWorkflowExecutionRepository(WorkflowExecutionRepository): db_model.workflow_id = domain_model.workflow_id db_model.triggered_from = self._triggered_from - # Check if this is a new record - with self._session_factory() as session: - existing = session.scalar(select(WorkflowRun).where(WorkflowRun.id == domain_model.id_)) - if not existing: - # For new records, get the next sequence number - stmt = select(func.max(WorkflowRun.sequence_number)).where( - WorkflowRun.app_id == self._app_id, - WorkflowRun.tenant_id == self._tenant_id, - ) - max_sequence = session.scalar(stmt) - db_model.sequence_number = (max_sequence or 0) + 1 - else: - # For updates, keep the existing sequence number - db_model.sequence_number = existing.sequence_number + # No sequence number generation needed anymore db_model.type = domain_model.workflow_type db_model.version = domain_model.workflow_version db_model.graph = json.dumps(domain_model.graph) if domain_model.graph else None db_model.inputs = json.dumps(domain_model.inputs) if domain_model.inputs else None - db_model.outputs = json.dumps(domain_model.outputs) if domain_model.outputs else None + db_model.outputs = ( + json.dumps(WorkflowRuntimeTypeConverter().to_json_encodable(domain_model.outputs)) + if domain_model.outputs + else None + ) db_model.status = domain_model.status db_model.error = domain_model.error_message if domain_model.error_message else None db_model.total_tokens = domain_model.total_tokens diff --git a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py index 2f27442616..797cce9354 100644 --- a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py +++ b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py @@ -19,6 +19,7 @@ from core.workflow.entities.workflow_node_execution import ( ) from core.workflow.nodes.enums import NodeType from core.workflow.repositories.workflow_node_execution_repository import OrderConfig, WorkflowNodeExecutionRepository +from core.workflow.workflow_type_encoder import WorkflowRuntimeTypeConverter from models import ( Account, CreatorUserRole, @@ -146,6 +147,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) if not self._creator_user_role: raise ValueError("created_by_role is required in repository constructor") + json_converter = WorkflowRuntimeTypeConverter() db_model = WorkflowNodeExecutionModel() db_model.id = domain_model.id db_model.tenant_id = self._tenant_id @@ -160,9 +162,17 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) db_model.node_id = domain_model.node_id db_model.node_type = domain_model.node_type db_model.title = domain_model.title - db_model.inputs = json.dumps(domain_model.inputs) if domain_model.inputs else None - db_model.process_data = json.dumps(domain_model.process_data) if domain_model.process_data else None - db_model.outputs = json.dumps(domain_model.outputs) if domain_model.outputs else None + db_model.inputs = ( + json.dumps(json_converter.to_json_encodable(domain_model.inputs)) if domain_model.inputs else None + ) + db_model.process_data = ( + json.dumps(json_converter.to_json_encodable(domain_model.process_data)) + if domain_model.process_data + else None + ) + db_model.outputs = ( + json.dumps(json_converter.to_json_encodable(domain_model.outputs)) if domain_model.outputs else None + ) db_model.status = domain_model.status db_model.error = domain_model.error db_model.elapsed_time = domain_model.elapsed_time diff --git a/api/core/variables/segments.py b/api/core/variables/segments.py index 64ba16c367..6cf09e0372 100644 --- a/api/core/variables/segments.py +++ b/api/core/variables/segments.py @@ -75,6 +75,20 @@ class StringSegment(Segment): class FloatSegment(Segment): value_type: SegmentType = SegmentType.NUMBER value: float + # NOTE(QuantumGhost): seems that the equality for FloatSegment with `NaN` value has some problems. + # The following tests cannot pass. + # + # def test_float_segment_and_nan(): + # nan = float("nan") + # assert nan != nan + # + # f1 = FloatSegment(value=float("nan")) + # f2 = FloatSegment(value=float("nan")) + # assert f1 != f2 + # + # f3 = FloatSegment(value=nan) + # f4 = FloatSegment(value=nan) + # assert f3 != f4 class IntegerSegment(Segment): diff --git a/api/core/variables/types.py b/api/core/variables/types.py index 4387e9693e..68d3d82883 100644 --- a/api/core/variables/types.py +++ b/api/core/variables/types.py @@ -18,3 +18,17 @@ class SegmentType(StrEnum): NONE = "none" GROUP = "group" + + def is_array_type(self): + return self in _ARRAY_TYPES + + +_ARRAY_TYPES = frozenset( + [ + SegmentType.ARRAY_ANY, + SegmentType.ARRAY_STRING, + SegmentType.ARRAY_NUMBER, + SegmentType.ARRAY_OBJECT, + SegmentType.ARRAY_FILE, + ] +) diff --git a/api/core/variables/utils.py b/api/core/variables/utils.py index e5d222af7d..692db3502e 100644 --- a/api/core/variables/utils.py +++ b/api/core/variables/utils.py @@ -1,8 +1,26 @@ +import json from collections.abc import Iterable, Sequence +from .segment_group import SegmentGroup +from .segments import ArrayFileSegment, FileSegment, Segment + def to_selector(node_id: str, name: str, paths: Iterable[str] = ()) -> Sequence[str]: selectors = [node_id, name] if paths: selectors.extend(paths) return selectors + + +class SegmentJSONEncoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, ArrayFileSegment): + return [v.model_dump() for v in o.value] + elif isinstance(o, FileSegment): + return o.value.model_dump() + elif isinstance(o, SegmentGroup): + return [self.default(seg) for seg in o.value] + elif isinstance(o, Segment): + return o.value + else: + super().default(o) diff --git a/api/core/workflow/conversation_variable_updater.py b/api/core/workflow/conversation_variable_updater.py new file mode 100644 index 0000000000..84e99bb582 --- /dev/null +++ b/api/core/workflow/conversation_variable_updater.py @@ -0,0 +1,39 @@ +import abc +from typing import Protocol + +from core.variables import Variable + + +class ConversationVariableUpdater(Protocol): + """ + ConversationVariableUpdater defines an abstraction for updating conversation variable values. + + It is intended for use by `v1.VariableAssignerNode` and `v2.VariableAssignerNode` when updating + conversation variables. + + Implementations may choose to batch updates. If batching is used, the `flush` method + should be implemented to persist buffered changes, and `update` + should handle buffering accordingly. + + Note: Since implementations may buffer updates, instances of ConversationVariableUpdater + are not thread-safe. Each VariableAssignerNode should create its own instance during execution. + """ + + @abc.abstractmethod + def update(self, conversation_id: str, variable: "Variable") -> None: + """ + Updates the value of the specified conversation variable in the underlying storage. + + :param conversation_id: The ID of the conversation to update. Typically references `ConversationVariable.id`. + :param variable: The `Variable` instance containing the updated value. + """ + pass + + @abc.abstractmethod + def flush(self): + """ + Flushes all pending updates to the underlying storage system. + + If the implementation does not buffer updates, this method can be a no-op. + """ + pass diff --git a/api/core/workflow/entities/variable_pool.py b/api/core/workflow/entities/variable_pool.py index af26864c01..80dda2632d 100644 --- a/api/core/workflow/entities/variable_pool.py +++ b/api/core/workflow/entities/variable_pool.py @@ -7,12 +7,12 @@ from pydantic import BaseModel, Field from core.file import File, FileAttribute, file_manager from core.variables import Segment, SegmentGroup, Variable +from core.variables.consts import MIN_SELECTORS_LENGTH from core.variables.segments import FileSegment, NoneSegment +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID +from core.workflow.enums import SystemVariableKey from factories import variable_factory -from ..constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID -from ..enums import SystemVariableKey - VariableValue = Union[str, int, float, dict, list, File] VARIABLE_PATTERN = re.compile(r"\{\{#([a-zA-Z0-9_]{1,50}(?:\.[a-zA-Z_][a-zA-Z0-9_]{0,29}){1,10})#\}\}") @@ -30,9 +30,11 @@ class VariablePool(BaseModel): # TODO: This user inputs is not used for pool. user_inputs: Mapping[str, Any] = Field( description="User inputs", + default_factory=dict, ) system_variables: Mapping[SystemVariableKey, Any] = Field( description="System variables", + default_factory=dict, ) environment_variables: Sequence[Variable] = Field( description="Environment variables.", @@ -43,28 +45,7 @@ class VariablePool(BaseModel): default_factory=list, ) - def __init__( - self, - *, - system_variables: Mapping[SystemVariableKey, Any] | None = None, - user_inputs: Mapping[str, Any] | None = None, - environment_variables: Sequence[Variable] | None = None, - conversation_variables: Sequence[Variable] | None = None, - **kwargs, - ): - environment_variables = environment_variables or [] - conversation_variables = conversation_variables or [] - user_inputs = user_inputs or {} - system_variables = system_variables or {} - - super().__init__( - system_variables=system_variables, - user_inputs=user_inputs, - environment_variables=environment_variables, - conversation_variables=conversation_variables, - **kwargs, - ) - + def model_post_init(self, context: Any, /) -> None: for key, value in self.system_variables.items(): self.add((SYSTEM_VARIABLE_NODE_ID, key.value), value) # Add environment variables to the variable pool @@ -91,12 +72,12 @@ class VariablePool(BaseModel): Returns: None """ - if len(selector) < 2: + if len(selector) < MIN_SELECTORS_LENGTH: raise ValueError("Invalid selector") if isinstance(value, Variable): variable = value - if isinstance(value, Segment): + elif isinstance(value, Segment): variable = variable_factory.segment_to_variable(segment=value, selector=selector) else: segment = variable_factory.build_segment(value) @@ -118,7 +99,7 @@ class VariablePool(BaseModel): Raises: ValueError: If the selector is invalid. """ - if len(selector) < 2: + if len(selector) < MIN_SELECTORS_LENGTH: return None hash_key = hash(tuple(selector[1:])) diff --git a/api/core/workflow/graph_engine/entities/event.py b/api/core/workflow/graph_engine/entities/event.py index 9a4939502e..e57e9e4d64 100644 --- a/api/core/workflow/graph_engine/entities/event.py +++ b/api/core/workflow/graph_engine/entities/event.py @@ -66,6 +66,8 @@ class BaseNodeEvent(GraphEngineEvent): """iteration id if node is in iteration""" in_loop_id: Optional[str] = None """loop id if node is in loop""" + # The version of the node, or "1" if not specified. + node_version: str = "1" class NodeRunStartedEvent(BaseNodeEvent): diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index 875cee17e6..61a7a26652 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -53,6 +53,7 @@ from core.workflow.nodes.end.end_stream_processor import EndStreamProcessor from core.workflow.nodes.enums import ErrorStrategy, FailBranchSourceHandle from core.workflow.nodes.event import RunCompletedEvent, RunRetrieverResourceEvent, RunStreamChunkEvent from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING +from core.workflow.utils import variable_utils from libs.flask_utils import preserve_flask_contexts from models.enums import UserFrom from models.workflow import WorkflowType @@ -314,6 +315,7 @@ class GraphEngine: parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) raise e @@ -627,6 +629,7 @@ class GraphEngine: parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, agent_strategy=agent_strategy, + node_version=node_instance.version(), ) max_retries = node_instance.node_data.retry_config.max_retries @@ -639,26 +642,19 @@ class GraphEngine: retry_start_at = datetime.now(UTC).replace(tzinfo=None) # yield control to other threads time.sleep(0.001) - generator = node_instance.run() - for item in generator: - if isinstance(item, GraphEngineEvent): - if isinstance(item, BaseIterationEvent): - # add parallel info to iteration event - item.parallel_id = parallel_id - item.parallel_start_node_id = parallel_start_node_id - item.parent_parallel_id = parent_parallel_id - item.parent_parallel_start_node_id = parent_parallel_start_node_id - elif isinstance(item, BaseLoopEvent): - # add parallel info to loop event - item.parallel_id = parallel_id - item.parallel_start_node_id = parallel_start_node_id - item.parent_parallel_id = parent_parallel_id - item.parent_parallel_start_node_id = parent_parallel_start_node_id - - yield item + event_stream = node_instance.run() + for event in event_stream: + if isinstance(event, GraphEngineEvent): + # add parallel info to iteration event + if isinstance(event, BaseIterationEvent | BaseLoopEvent): + event.parallel_id = parallel_id + event.parallel_start_node_id = parallel_start_node_id + event.parent_parallel_id = parent_parallel_id + event.parent_parallel_start_node_id = parent_parallel_start_node_id + yield event else: - if isinstance(item, RunCompletedEvent): - run_result = item.run_result + if isinstance(event, RunCompletedEvent): + run_result = event.run_result if run_result.status == WorkflowNodeExecutionStatus.FAILED: if ( retries == max_retries @@ -684,6 +680,7 @@ class GraphEngine: error=run_result.error or "Unknown error", retry_index=retries, start_at=retry_start_at, + node_version=node_instance.version(), ) time.sleep(retry_interval) break @@ -694,7 +691,7 @@ class GraphEngine: # if run failed, handle error run_result = self._handle_continue_on_error( node_instance, - item.run_result, + event.run_result, self.graph_runtime_state.variable_pool, handle_exceptions=handle_exceptions, ) @@ -719,6 +716,7 @@ class GraphEngine: parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) should_continue_retry = False else: @@ -733,6 +731,7 @@ class GraphEngine: parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) should_continue_retry = False elif run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED: @@ -793,37 +792,40 @@ class GraphEngine: parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) should_continue_retry = False break - elif isinstance(item, RunStreamChunkEvent): + elif isinstance(event, RunStreamChunkEvent): yield NodeRunStreamChunkEvent( id=node_instance.id, node_id=node_instance.node_id, node_type=node_instance.node_type, node_data=node_instance.node_data, - chunk_content=item.chunk_content, - from_variable_selector=item.from_variable_selector, + chunk_content=event.chunk_content, + from_variable_selector=event.from_variable_selector, route_node_state=route_node_state, parallel_id=parallel_id, parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) - elif isinstance(item, RunRetrieverResourceEvent): + elif isinstance(event, RunRetrieverResourceEvent): yield NodeRunRetrieverResourceEvent( id=node_instance.id, node_id=node_instance.node_id, node_type=node_instance.node_type, node_data=node_instance.node_data, - retriever_resources=item.retriever_resources, - context=item.context, + retriever_resources=event.retriever_resources, + context=event.context, route_node_state=route_node_state, parallel_id=parallel_id, parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) except GenerateTaskStoppedError: # trigger node run failed event @@ -840,6 +842,7 @@ class GraphEngine: parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) return except Exception as e: @@ -854,16 +857,12 @@ class GraphEngine: :param variable_value: variable value :return: """ - self.graph_runtime_state.variable_pool.add([node_id] + variable_key_list, variable_value) - - # if variable_value is a dict, then recursively append variables - if isinstance(variable_value, dict): - for key, value in variable_value.items(): - # construct new key list - new_key_list = variable_key_list + [key] - self._append_variables_recursively( - node_id=node_id, variable_key_list=new_key_list, variable_value=value - ) + variable_utils.append_variables_recursively( + self.graph_runtime_state.variable_pool, + node_id, + variable_key_list, + variable_value, + ) def _is_timed_out(self, start_at: float, max_execution_time: int) -> bool: """ diff --git a/api/core/workflow/nodes/agent/agent_node.py b/api/core/workflow/nodes/agent/agent_node.py index 22c564c1fc..2f28363955 100644 --- a/api/core/workflow/nodes/agent/agent_node.py +++ b/api/core/workflow/nodes/agent/agent_node.py @@ -39,6 +39,10 @@ class AgentNode(ToolNode): _node_data_cls = AgentNodeData # type: ignore _node_type = NodeType.AGENT + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> Generator: """ Run the agent node diff --git a/api/core/workflow/nodes/answer/answer_node.py b/api/core/workflow/nodes/answer/answer_node.py index aa030870e2..38c2bcbdf5 100644 --- a/api/core/workflow/nodes/answer/answer_node.py +++ b/api/core/workflow/nodes/answer/answer_node.py @@ -18,7 +18,11 @@ from core.workflow.utils.variable_template_parser import VariableTemplateParser class AnswerNode(BaseNode[AnswerNodeData]): _node_data_cls = AnswerNodeData - _node_type: NodeType = NodeType.ANSWER + _node_type = NodeType.ANSWER + + @classmethod + def version(cls) -> str: + return "1" def _run(self) -> NodeRunResult: """ @@ -45,7 +49,10 @@ class AnswerNode(BaseNode[AnswerNodeData]): part = cast(TextGenerateRouteChunk, part) answer += part.text - return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, outputs={"answer": answer, "files": files}) + return NodeRunResult( + status=WorkflowNodeExecutionStatus.SUCCEEDED, + outputs={"answer": answer, "files": ArrayFileSegment(value=files)}, + ) @classmethod def _extract_variable_selector_to_variable_mapping( diff --git a/api/core/workflow/nodes/answer/answer_stream_processor.py b/api/core/workflow/nodes/answer/answer_stream_processor.py index ba6ba16e36..f3e4a62ade 100644 --- a/api/core/workflow/nodes/answer/answer_stream_processor.py +++ b/api/core/workflow/nodes/answer/answer_stream_processor.py @@ -109,6 +109,7 @@ class AnswerStreamProcessor(StreamProcessor): parallel_id=event.parallel_id, parallel_start_node_id=event.parallel_start_node_id, from_variable_selector=[answer_node_id, "answer"], + node_version=event.node_version, ) else: route_chunk = cast(VarGenerateRouteChunk, route_chunk) @@ -134,6 +135,7 @@ class AnswerStreamProcessor(StreamProcessor): route_node_state=event.route_node_state, parallel_id=event.parallel_id, parallel_start_node_id=event.parallel_start_node_id, + node_version=event.node_version, ) self.route_position[answer_node_id] += 1 diff --git a/api/core/workflow/nodes/answer/base_stream_processor.py b/api/core/workflow/nodes/answer/base_stream_processor.py index 6671ff0746..09d5464d7a 100644 --- a/api/core/workflow/nodes/answer/base_stream_processor.py +++ b/api/core/workflow/nodes/answer/base_stream_processor.py @@ -57,7 +57,6 @@ class StreamProcessor(ABC): # The branch_identify parameter is added to ensure that # only nodes in the correct logical branch are included. - reachable_node_ids.append(edge.target_node_id) ids = self._fetch_node_ids_in_reachable_branch(edge.target_node_id, run_result.edge_source_handle) reachable_node_ids.extend(ids) else: @@ -74,6 +73,8 @@ class StreamProcessor(ABC): self._remove_node_ids_in_unreachable_branch(node_id, reachable_node_ids) def _fetch_node_ids_in_reachable_branch(self, node_id: str, branch_identify: Optional[str] = None) -> list[str]: + if node_id not in self.rest_node_ids: + self.rest_node_ids.append(node_id) node_ids = [] for edge in self.graph.edge_mapping.get(node_id, []): if edge.target_node_id == self.graph.root_node_id: diff --git a/api/core/workflow/nodes/base/node.py b/api/core/workflow/nodes/base/node.py index 7da0c19740..6973401429 100644 --- a/api/core/workflow/nodes/base/node.py +++ b/api/core/workflow/nodes/base/node.py @@ -1,7 +1,7 @@ import logging from abc import abstractmethod from collections.abc import Generator, Mapping, Sequence -from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, ClassVar, Generic, Optional, TypeVar, Union, cast from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus @@ -23,7 +23,7 @@ GenericNodeData = TypeVar("GenericNodeData", bound=BaseNodeData) class BaseNode(Generic[GenericNodeData]): _node_data_cls: type[GenericNodeData] - _node_type: NodeType + _node_type: ClassVar[NodeType] def __init__( self, @@ -90,8 +90,38 @@ class BaseNode(Generic[GenericNodeData]): graph_config: Mapping[str, Any], config: Mapping[str, Any], ) -> Mapping[str, Sequence[str]]: - """ - Extract variable selector to variable mapping + """Extracts references variable selectors from node configuration. + + The `config` parameter represents the configuration for a specific node type and corresponds + to the `data` field in the node definition object. + + The returned mapping has the following structure: + + {'1747829548239.#1747829667553.result#': ['1747829667553', 'result']} + + For loop and iteration nodes, the mapping may look like this: + + { + "1748332301644.input_selector": ["1748332363630", "result"], + "1748332325079.1748332325079.#sys.workflow_id#": ["sys", "workflow_id"], + } + + where `1748332301644` is the ID of the loop / iteration node, + and `1748332325079` is the ID of the node inside the loop or iteration node. + + Here, the key consists of two parts: the current node ID (provided as the `node_id` + parameter to `_extract_variable_selector_to_variable_mapping`) and the variable selector, + enclosed in `#` symbols. These two parts are separated by a dot (`.`). + + The value is a list of string representing the variable selector, where the first element is the node ID + of the referenced variable, and the second element is the variable name within that node. + + The meaning of the above response is: + + The node with ID `1747829548239` references the variable `result` from the node with + ID `1747829667553`. For example, if `1747829548239` is a LLM node, its prompt may contain a + reference to the `result` output variable of node `1747829667553`. + :param graph_config: graph config :param config: node config :return: @@ -101,9 +131,10 @@ class BaseNode(Generic[GenericNodeData]): raise ValueError("Node ID is required when extracting variable selector to variable mapping.") node_data = cls._node_data_cls(**config.get("data", {})) - return cls._extract_variable_selector_to_variable_mapping( + data = cls._extract_variable_selector_to_variable_mapping( graph_config=graph_config, node_id=node_id, node_data=cast(GenericNodeData, node_data) ) + return data @classmethod def _extract_variable_selector_to_variable_mapping( @@ -139,6 +170,16 @@ class BaseNode(Generic[GenericNodeData]): """ return self._node_type + @classmethod + @abstractmethod + def version(cls) -> str: + """`node_version` returns the version of current node type.""" + # NOTE(QuantumGhost): This should be in sync with `NODE_TYPE_CLASSES_MAPPING`. + # + # If you have introduced a new node type, please add it to `NODE_TYPE_CLASSES_MAPPING` + # in `api/core/workflow/nodes/__init__.py`. + raise NotImplementedError("subclasses of BaseNode must implement `version` method.") + @property def should_continue_on_error(self) -> bool: """judge if should continue on error diff --git a/api/core/workflow/nodes/code/code_node.py b/api/core/workflow/nodes/code/code_node.py index 61c08a7d71..22ed9e2651 100644 --- a/api/core/workflow/nodes/code/code_node.py +++ b/api/core/workflow/nodes/code/code_node.py @@ -40,6 +40,10 @@ class CodeNode(BaseNode[CodeNodeData]): return code_provider.get_default_config() + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: # Get code language code_language = self.node_data.code_language @@ -126,6 +130,9 @@ class CodeNode(BaseNode[CodeNodeData]): prefix: str = "", depth: int = 1, ): + # TODO(QuantumGhost): Replace native Python lists with `Array*Segment` classes. + # Note that `_transform_result` may produce lists containing `None` values, + # which don't conform to the type requirements of `Array*Segment` classes. if depth > dify_config.CODE_MAX_DEPTH: raise DepthLimitError(f"Depth limit {dify_config.CODE_MAX_DEPTH} reached, object too deep.") diff --git a/api/core/workflow/nodes/document_extractor/node.py b/api/core/workflow/nodes/document_extractor/node.py index 429fed2d04..8e6150f9cc 100644 --- a/api/core/workflow/nodes/document_extractor/node.py +++ b/api/core/workflow/nodes/document_extractor/node.py @@ -24,7 +24,7 @@ from configs import dify_config from core.file import File, FileTransferMethod, file_manager from core.helper import ssrf_proxy from core.variables import ArrayFileSegment -from core.variables.segments import FileSegment +from core.variables.segments import ArrayStringSegment, FileSegment from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus from core.workflow.nodes.base import BaseNode @@ -45,6 +45,10 @@ class DocumentExtractorNode(BaseNode[DocumentExtractorNodeData]): _node_data_cls = DocumentExtractorNodeData _node_type = NodeType.DOCUMENT_EXTRACTOR + @classmethod + def version(cls) -> str: + return "1" + def _run(self): variable_selector = self.node_data.variable_selector variable = self.graph_runtime_state.variable_pool.get(variable_selector) @@ -67,7 +71,7 @@ class DocumentExtractorNode(BaseNode[DocumentExtractorNodeData]): status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=inputs, process_data=process_data, - outputs={"text": extracted_text_list}, + outputs={"text": ArrayStringSegment(value=extracted_text_list)}, ) elif isinstance(value, File): extracted_text = _extract_text_from_file(value) @@ -447,7 +451,7 @@ def _extract_text_from_excel(file_content: bytes) -> str: df = df.applymap(lambda x: " ".join(str(x).splitlines()) if isinstance(x, str) else x) # type: ignore # Combine multi-line text in column names into a single line - df.columns = pd.Index([" ".join(col.splitlines()) for col in df.columns]) + df.columns = pd.Index([" ".join(str(col).splitlines()) for col in df.columns]) # Manually construct the Markdown table markdown_table += _construct_markdown_table(df) + "\n\n" diff --git a/api/core/workflow/nodes/end/end_node.py b/api/core/workflow/nodes/end/end_node.py index 0e9756b243..17a0b3adeb 100644 --- a/api/core/workflow/nodes/end/end_node.py +++ b/api/core/workflow/nodes/end/end_node.py @@ -9,6 +9,10 @@ class EndNode(BaseNode[EndNodeData]): _node_data_cls = EndNodeData _node_type = NodeType.END + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: """ Run node diff --git a/api/core/workflow/nodes/end/end_stream_processor.py b/api/core/workflow/nodes/end/end_stream_processor.py index 3ae5af7137..a6fb2ffc18 100644 --- a/api/core/workflow/nodes/end/end_stream_processor.py +++ b/api/core/workflow/nodes/end/end_stream_processor.py @@ -139,6 +139,7 @@ class EndStreamProcessor(StreamProcessor): route_node_state=event.route_node_state, parallel_id=event.parallel_id, parallel_start_node_id=event.parallel_start_node_id, + node_version=event.node_version, ) self.route_position[end_node_id] += 1 diff --git a/api/core/workflow/nodes/http_request/node.py b/api/core/workflow/nodes/http_request/node.py index 6b1ac57c06..971e0f73e7 100644 --- a/api/core/workflow/nodes/http_request/node.py +++ b/api/core/workflow/nodes/http_request/node.py @@ -6,6 +6,7 @@ from typing import Any, Optional from configs import dify_config from core.file import File, FileTransferMethod from core.tools.tool_file_manager import ToolFileManager +from core.variables.segments import ArrayFileSegment from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.variable_entities import VariableSelector from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus @@ -60,6 +61,10 @@ class HttpRequestNode(BaseNode[HttpRequestNodeData]): }, } + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: process_data = {} try: @@ -92,7 +97,7 @@ class HttpRequestNode(BaseNode[HttpRequestNodeData]): status=WorkflowNodeExecutionStatus.SUCCEEDED, outputs={ "status_code": response.status_code, - "body": response.text if not files else "", + "body": response.text if not files.value else "", "headers": response.headers, "files": files, }, @@ -166,7 +171,7 @@ class HttpRequestNode(BaseNode[HttpRequestNodeData]): return mapping - def extract_files(self, url: str, response: Response) -> list[File]: + def extract_files(self, url: str, response: Response) -> ArrayFileSegment: """ Extract files from response by checking both Content-Type header and URL """ @@ -178,7 +183,7 @@ class HttpRequestNode(BaseNode[HttpRequestNodeData]): content_disposition_type = None if not is_file: - return files + return ArrayFileSegment(value=[]) if parsed_content_disposition: content_disposition_filename = parsed_content_disposition.get_filename() @@ -211,4 +216,4 @@ class HttpRequestNode(BaseNode[HttpRequestNodeData]): ) files.append(file) - return files + return ArrayFileSegment(value=files) diff --git a/api/core/workflow/nodes/if_else/if_else_node.py b/api/core/workflow/nodes/if_else/if_else_node.py index 976922f75d..22b748030c 100644 --- a/api/core/workflow/nodes/if_else/if_else_node.py +++ b/api/core/workflow/nodes/if_else/if_else_node.py @@ -1,4 +1,5 @@ -from typing import Literal +from collections.abc import Mapping, Sequence +from typing import Any, Literal from typing_extensions import deprecated @@ -16,6 +17,10 @@ class IfElseNode(BaseNode[IfElseNodeData]): _node_data_cls = IfElseNodeData _node_type = NodeType.IF_ELSE + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: """ Run node @@ -87,6 +92,22 @@ class IfElseNode(BaseNode[IfElseNodeData]): return data + @classmethod + def _extract_variable_selector_to_variable_mapping( + cls, + *, + graph_config: Mapping[str, Any], + node_id: str, + node_data: IfElseNodeData, + ) -> Mapping[str, Sequence[str]]: + var_mapping: dict[str, list[str]] = {} + for case in node_data.cases or []: + for condition in case.conditions: + key = "{}.#{}#".format(node_id, ".".join(condition.variable_selector)) + var_mapping[key] = condition.variable_selector + + return var_mapping + @deprecated("This function is deprecated. You should use the new cases structure.") def _should_not_use_old_function( diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index 42b6795fb0..151efc28ec 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -11,6 +11,7 @@ from flask import Flask, current_app from configs import dify_config from core.variables import ArrayVariable, IntegerVariable, NoneVariable +from core.variables.segments import ArrayAnySegment, ArraySegment from core.workflow.entities.node_entities import ( NodeRunResult, ) @@ -37,6 +38,7 @@ from core.workflow.nodes.base import BaseNode from core.workflow.nodes.enums import NodeType from core.workflow.nodes.event import NodeEvent, RunCompletedEvent from core.workflow.nodes.iteration.entities import ErrorHandleMode, IterationNodeData +from factories.variable_factory import build_segment from libs.flask_utils import preserve_flask_contexts from .exc import ( @@ -72,6 +74,10 @@ class IterationNode(BaseNode[IterationNodeData]): }, } + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> Generator[NodeEvent | InNodeEvent, None, None]: """ Run the node. @@ -85,10 +91,17 @@ class IterationNode(BaseNode[IterationNodeData]): raise InvalidIteratorValueError(f"invalid iterator value: {variable}, please provide a list.") if isinstance(variable, NoneVariable) or len(variable.value) == 0: + # Try our best to preserve the type informat. + if isinstance(variable, ArraySegment): + output = variable.model_copy(update={"value": []}) + else: + output = ArrayAnySegment(value=[]) yield RunCompletedEvent( run_result=NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, - outputs={"output": []}, + # TODO(QuantumGhost): is it possible to compute the type of `output` + # from graph definition? + outputs={"output": output}, ) ) return @@ -231,6 +244,7 @@ class IterationNode(BaseNode[IterationNodeData]): # Flatten the list of lists if isinstance(outputs, list) and all(isinstance(output, list) for output in outputs): outputs = [item for sublist in outputs for item in sublist] + output_segment = build_segment(outputs) yield IterationRunSucceededEvent( iteration_id=self.id, @@ -247,7 +261,7 @@ class IterationNode(BaseNode[IterationNodeData]): yield RunCompletedEvent( run_result=NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, - outputs={"output": outputs}, + outputs={"output": output_segment}, metadata={ WorkflowNodeExecutionMetadataKey.ITERATION_DURATION_MAP: iter_run_map, WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: graph_engine.graph_runtime_state.total_tokens, diff --git a/api/core/workflow/nodes/iteration/iteration_start_node.py b/api/core/workflow/nodes/iteration/iteration_start_node.py index bee481ebdb..9900aa225d 100644 --- a/api/core/workflow/nodes/iteration/iteration_start_node.py +++ b/api/core/workflow/nodes/iteration/iteration_start_node.py @@ -13,6 +13,10 @@ class IterationStartNode(BaseNode[IterationStartNodeData]): _node_data_cls = IterationStartNodeData _node_type = NodeType.ITERATION_START + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: """ Run the node. diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 5cf5848d54..0b9e98f28a 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -24,6 +24,7 @@ from core.rag.entities.metadata_entities import Condition, MetadataCondition from core.rag.retrieval.dataset_retrieval import DatasetRetrieval from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.variables import StringSegment +from core.variables.segments import ArrayObjectSegment from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus from core.workflow.nodes.enums import NodeType @@ -70,6 +71,10 @@ class KnowledgeRetrievalNode(LLMNode): _node_data_cls = KnowledgeRetrievalNodeData # type: ignore _node_type = NodeType.KNOWLEDGE_RETRIEVAL + @classmethod + def version(cls): + return "1" + def _run(self) -> NodeRunResult: # type: ignore node_data = cast(KnowledgeRetrievalNodeData, self.node_data) # extract variables @@ -115,9 +120,12 @@ class KnowledgeRetrievalNode(LLMNode): # retrieve knowledge try: results = self._fetch_dataset_retriever(node_data=node_data, query=query) - outputs = {"result": results} + outputs = {"result": ArrayObjectSegment(value=results)} return NodeRunResult( - status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=variables, process_data=None, outputs=outputs + status=WorkflowNodeExecutionStatus.SUCCEEDED, + inputs=variables, + process_data=None, + outputs=outputs, # type: ignore ) except KnowledgeRetrievalNodeError as e: diff --git a/api/core/workflow/nodes/list_operator/node.py b/api/core/workflow/nodes/list_operator/node.py index e698d3f5d8..3c9ba44cf1 100644 --- a/api/core/workflow/nodes/list_operator/node.py +++ b/api/core/workflow/nodes/list_operator/node.py @@ -3,6 +3,7 @@ from typing import Any, Literal, Union from core.file import File from core.variables import ArrayFileSegment, ArrayNumberSegment, ArrayStringSegment +from core.variables.segments import ArrayAnySegment, ArraySegment from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus from core.workflow.nodes.base import BaseNode @@ -16,6 +17,10 @@ class ListOperatorNode(BaseNode[ListOperatorNodeData]): _node_data_cls = ListOperatorNodeData _node_type = NodeType.LIST_OPERATOR + @classmethod + def version(cls) -> str: + return "1" + def _run(self): inputs: dict[str, list] = {} process_data: dict[str, list] = {} @@ -30,7 +35,11 @@ class ListOperatorNode(BaseNode[ListOperatorNodeData]): if not variable.value: inputs = {"variable": []} process_data = {"variable": []} - outputs = {"result": [], "first_record": None, "last_record": None} + if isinstance(variable, ArraySegment): + result = variable.model_copy(update={"value": []}) + else: + result = ArrayAnySegment(value=[]) + outputs = {"result": result, "first_record": None, "last_record": None} return NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=inputs, @@ -71,7 +80,7 @@ class ListOperatorNode(BaseNode[ListOperatorNodeData]): variable = self._apply_slice(variable) outputs = { - "result": variable.value, + "result": variable, "first_record": variable.value[0] if variable.value else None, "last_record": variable.value[-1] if variable.value else None, } diff --git a/api/core/workflow/nodes/llm/file_saver.py b/api/core/workflow/nodes/llm/file_saver.py index c85baade03..a4b45ce652 100644 --- a/api/core/workflow/nodes/llm/file_saver.py +++ b/api/core/workflow/nodes/llm/file_saver.py @@ -119,9 +119,6 @@ class FileSaverImpl(LLMFileSaver): size=len(data), related_id=tool_file.id, url=url, - # TODO(QuantumGhost): how should I set the following key? - # What's the difference between `remote_url` and `url`? - # What's the purpose of `storage_key` and `dify_model_identity`? storage_key=tool_file.file_key, ) diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index d27124d62c..124ae6d75d 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -138,6 +138,10 @@ class LLMNode(BaseNode[LLMNodeData]): ) self._llm_file_saver = llm_file_saver + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> Generator[NodeEvent | InNodeEvent, None, None]: def process_structured_output(text: str) -> Optional[dict[str, Any]]: """Process structured output if enabled""" @@ -255,7 +259,7 @@ class LLMNode(BaseNode[LLMNodeData]): if structured_output: outputs["structured_output"] = structured_output if self._file_outputs is not None: - outputs["files"] = self._file_outputs + outputs["files"] = ArrayFileSegment(value=self._file_outputs) yield RunCompletedEvent( run_result=NodeRunResult( diff --git a/api/core/workflow/nodes/loop/loop_end_node.py b/api/core/workflow/nodes/loop/loop_end_node.py index 327b9e234b..b144021bab 100644 --- a/api/core/workflow/nodes/loop/loop_end_node.py +++ b/api/core/workflow/nodes/loop/loop_end_node.py @@ -13,6 +13,10 @@ class LoopEndNode(BaseNode[LoopEndNodeData]): _node_data_cls = LoopEndNodeData _node_type = NodeType.LOOP_END + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: """ Run the node. diff --git a/api/core/workflow/nodes/loop/loop_node.py b/api/core/workflow/nodes/loop/loop_node.py index fafa205386..368d662a75 100644 --- a/api/core/workflow/nodes/loop/loop_node.py +++ b/api/core/workflow/nodes/loop/loop_node.py @@ -54,6 +54,10 @@ class LoopNode(BaseNode[LoopNodeData]): _node_data_cls = LoopNodeData _node_type = NodeType.LOOP + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> Generator[NodeEvent | InNodeEvent, None, None]: """Run the node.""" # Get inputs @@ -482,6 +486,13 @@ class LoopNode(BaseNode[LoopNodeData]): variable_mapping.update(sub_node_variable_mapping) + for loop_variable in node_data.loop_variables or []: + if loop_variable.value_type == "variable": + assert loop_variable.value is not None, "Loop variable value must be provided for variable type" + # add loop variable to variable mapping + selector = loop_variable.value + variable_mapping[f"{node_id}.{loop_variable.label}"] = selector + # remove variable out from loop variable_mapping = { key: value for key, value in variable_mapping.items() if value[0] not in loop_graph.node_ids diff --git a/api/core/workflow/nodes/loop/loop_start_node.py b/api/core/workflow/nodes/loop/loop_start_node.py index 5a15f36044..f5e38b7516 100644 --- a/api/core/workflow/nodes/loop/loop_start_node.py +++ b/api/core/workflow/nodes/loop/loop_start_node.py @@ -13,6 +13,10 @@ class LoopStartNode(BaseNode[LoopStartNodeData]): _node_data_cls = LoopStartNodeData _node_type = NodeType.LOOP_START + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: """ Run the node. diff --git a/api/core/workflow/nodes/node_mapping.py b/api/core/workflow/nodes/node_mapping.py index 1f1be59542..67cc884f20 100644 --- a/api/core/workflow/nodes/node_mapping.py +++ b/api/core/workflow/nodes/node_mapping.py @@ -25,6 +25,11 @@ from core.workflow.nodes.variable_assigner.v2 import VariableAssignerNode as Var LATEST_VERSION = "latest" +# NOTE(QuantumGhost): This should be in sync with subclasses of BaseNode. +# Specifically, if you have introduced new node types, you should add them here. +# +# TODO(QuantumGhost): This could be automated with either metaclass or `__init_subclass__` +# hook. Try to avoid duplication of node information. NODE_TYPE_CLASSES_MAPPING: Mapping[NodeType, Mapping[str, type[BaseNode]]] = { NodeType.START: { LATEST_VERSION: StartNode, diff --git a/api/core/workflow/nodes/parameter_extractor/entities.py b/api/core/workflow/nodes/parameter_extractor/entities.py index 369eb13b04..916778d167 100644 --- a/api/core/workflow/nodes/parameter_extractor/entities.py +++ b/api/core/workflow/nodes/parameter_extractor/entities.py @@ -7,6 +7,10 @@ from core.workflow.nodes.base import BaseNodeData from core.workflow.nodes.llm import ModelConfig, VisionConfig +class _ParameterConfigError(Exception): + pass + + class ParameterConfig(BaseModel): """ Parameter Config. @@ -27,6 +31,19 @@ class ParameterConfig(BaseModel): raise ValueError("Invalid parameter name, __reason and __is_success are reserved") return str(value) + def is_array_type(self) -> bool: + return self.type in ("array[string]", "array[number]", "array[object]") + + def element_type(self) -> Literal["string", "number", "object"]: + if self.type == "array[number]": + return "number" + elif self.type == "array[string]": + return "string" + elif self.type == "array[object]": + return "object" + else: + raise _ParameterConfigError(f"{self.type} is not array type.") + class ParameterExtractorNodeData(BaseNodeData): """ diff --git a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py index 2552784762..8d6c2d0a5c 100644 --- a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py +++ b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py @@ -25,6 +25,7 @@ from core.prompt.advanced_prompt_transform import AdvancedPromptTransform from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate from core.prompt.simple_prompt_transform import ModelMode from core.prompt.utils.prompt_message_util import PromptMessageUtil +from core.variables.types import SegmentType from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey, WorkflowNodeExecutionStatus @@ -32,6 +33,7 @@ from core.workflow.nodes.base.node import BaseNode from core.workflow.nodes.enums import NodeType from core.workflow.nodes.llm import ModelConfig, llm_utils from core.workflow.utils import variable_template_parser +from factories.variable_factory import build_segment_with_type from .entities import ParameterExtractorNodeData from .exc import ( @@ -109,6 +111,10 @@ class ParameterExtractorNode(BaseNode): } } + @classmethod + def version(cls) -> str: + return "1" + def _run(self): """ Run the node. @@ -584,28 +590,30 @@ class ParameterExtractorNode(BaseNode): elif parameter.type in {"string", "select"}: if isinstance(result[parameter.name], str): transformed_result[parameter.name] = result[parameter.name] - elif parameter.type.startswith("array"): + elif parameter.is_array_type(): if isinstance(result[parameter.name], list): - nested_type = parameter.type[6:-1] - transformed_result[parameter.name] = [] + nested_type = parameter.element_type() + assert nested_type is not None + segment_value = build_segment_with_type(segment_type=SegmentType(parameter.type), value=[]) + transformed_result[parameter.name] = segment_value for item in result[parameter.name]: if nested_type == "number": if isinstance(item, int | float): - transformed_result[parameter.name].append(item) + segment_value.value.append(item) elif isinstance(item, str): try: if "." in item: - transformed_result[parameter.name].append(float(item)) + segment_value.value.append(float(item)) else: - transformed_result[parameter.name].append(int(item)) + segment_value.value.append(int(item)) except ValueError: pass elif nested_type == "string": if isinstance(item, str): - transformed_result[parameter.name].append(item) + segment_value.value.append(item) elif nested_type == "object": if isinstance(item, dict): - transformed_result[parameter.name].append(item) + segment_value.value.append(item) if parameter.name not in transformed_result: if parameter.type == "number": @@ -615,7 +623,9 @@ class ParameterExtractorNode(BaseNode): elif parameter.type in {"string", "select"}: transformed_result[parameter.name] = "" elif parameter.type.startswith("array"): - transformed_result[parameter.name] = [] + transformed_result[parameter.name] = build_segment_with_type( + segment_type=SegmentType(parameter.type), value=[] + ) return transformed_result diff --git a/api/core/workflow/nodes/question_classifier/question_classifier_node.py b/api/core/workflow/nodes/question_classifier/question_classifier_node.py index 1f50700c7e..a518167cc6 100644 --- a/api/core/workflow/nodes/question_classifier/question_classifier_node.py +++ b/api/core/workflow/nodes/question_classifier/question_classifier_node.py @@ -40,6 +40,10 @@ class QuestionClassifierNode(LLMNode): _node_data_cls = QuestionClassifierNodeData # type: ignore _node_type = NodeType.QUESTION_CLASSIFIER + @classmethod + def version(cls): + return "1" + def _run(self): node_data = cast(QuestionClassifierNodeData, self.node_data) variable_pool = self.graph_runtime_state.variable_pool diff --git a/api/core/workflow/nodes/start/start_node.py b/api/core/workflow/nodes/start/start_node.py index 8839aec9d6..5ee9bc331f 100644 --- a/api/core/workflow/nodes/start/start_node.py +++ b/api/core/workflow/nodes/start/start_node.py @@ -10,6 +10,10 @@ class StartNode(BaseNode[StartNodeData]): _node_data_cls = StartNodeData _node_type = NodeType.START + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: node_inputs = dict(self.graph_runtime_state.variable_pool.user_inputs) system_inputs = self.graph_runtime_state.variable_pool.system_variables @@ -18,5 +22,6 @@ class StartNode(BaseNode[StartNodeData]): # Set system variables as node outputs. for var in system_inputs: node_inputs[SYSTEM_VARIABLE_NODE_ID + "." + var] = system_inputs[var] + outputs = dict(node_inputs) - return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=node_inputs, outputs=node_inputs) + return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=node_inputs, outputs=outputs) diff --git a/api/core/workflow/nodes/template_transform/template_transform_node.py b/api/core/workflow/nodes/template_transform/template_transform_node.py index 476cf7eee4..ba573074c3 100644 --- a/api/core/workflow/nodes/template_transform/template_transform_node.py +++ b/api/core/workflow/nodes/template_transform/template_transform_node.py @@ -28,6 +28,10 @@ class TemplateTransformNode(BaseNode[TemplateTransformNodeData]): "config": {"variables": [{"variable": "arg1", "value_selector": []}], "template": "{{ arg1 }}"}, } + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: # Get variables variables = {} diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index aaecc7b989..aa15d69931 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -12,7 +12,7 @@ from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter from core.tools.errors import ToolInvokeError from core.tools.tool_engine import ToolEngine from core.tools.utils.message_transformer import ToolFileMessageTransformer -from core.variables.segments import ArrayAnySegment +from core.variables.segments import ArrayAnySegment, ArrayFileSegment from core.variables.variables import ArrayAnyVariable from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.variable_pool import VariablePool @@ -44,6 +44,10 @@ class ToolNode(BaseNode[ToolNodeData]): _node_data_cls = ToolNodeData _node_type = NodeType.TOOL + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> Generator: """ Run the tool node @@ -300,6 +304,7 @@ class ToolNode(BaseNode[ToolNodeData]): variables[variable_name] = variable_value elif message.type == ToolInvokeMessage.MessageType.FILE: assert message.meta is not None + assert isinstance(message.meta, File) files.append(message.meta["file"]) elif message.type == ToolInvokeMessage.MessageType.LOG: assert isinstance(message.message, ToolInvokeMessage.LogMessage) @@ -363,7 +368,7 @@ class ToolNode(BaseNode[ToolNodeData]): yield RunCompletedEvent( run_result=NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, - outputs={"text": text, "files": files, "json": json, **variables}, + outputs={"text": text, "files": ArrayFileSegment(value=files), "json": json, **variables}, metadata={ **agent_execution_metadata, WorkflowNodeExecutionMetadataKey.TOOL_INFO: tool_info, diff --git a/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py b/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py index db3e25b015..96bb3e793a 100644 --- a/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py +++ b/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py @@ -1,3 +1,6 @@ +from collections.abc import Mapping + +from core.variables.segments import Segment from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus from core.workflow.nodes.base import BaseNode @@ -9,16 +12,20 @@ class VariableAggregatorNode(BaseNode[VariableAssignerNodeData]): _node_data_cls = VariableAssignerNodeData _node_type = NodeType.VARIABLE_AGGREGATOR + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: # Get variables - outputs = {} + outputs: dict[str, Segment | Mapping[str, Segment]] = {} inputs = {} if not self.node_data.advanced_settings or not self.node_data.advanced_settings.group_enabled: for selector in self.node_data.variables: variable = self.graph_runtime_state.variable_pool.get(selector) if variable is not None: - outputs = {"output": variable.to_object()} + outputs = {"output": variable} inputs = {".".join(selector[1:]): variable.to_object()} break @@ -28,7 +35,7 @@ class VariableAggregatorNode(BaseNode[VariableAssignerNodeData]): variable = self.graph_runtime_state.variable_pool.get(selector) if variable is not None: - outputs[group.group_name] = {"output": variable.to_object()} + outputs[group.group_name] = {"output": variable} inputs[".".join(selector[1:])] = variable.to_object() break diff --git a/api/core/workflow/nodes/variable_assigner/common/helpers.py b/api/core/workflow/nodes/variable_assigner/common/helpers.py index 8031b57fa8..0d2822233e 100644 --- a/api/core/workflow/nodes/variable_assigner/common/helpers.py +++ b/api/core/workflow/nodes/variable_assigner/common/helpers.py @@ -1,19 +1,55 @@ -from sqlalchemy import select -from sqlalchemy.orm import Session +from collections.abc import Mapping, MutableMapping, Sequence +from typing import Any, TypeVar -from core.variables import Variable -from core.workflow.nodes.variable_assigner.common.exc import VariableOperatorNodeError -from extensions.ext_database import db -from models import ConversationVariable +from pydantic import BaseModel + +from core.variables import Segment +from core.variables.consts import MIN_SELECTORS_LENGTH +from core.variables.types import SegmentType + +# Use double underscore (`__`) prefix for internal variables +# to minimize risk of collision with user-defined variable names. +_UPDATED_VARIABLES_KEY = "__updated_variables" -def update_conversation_variable(conversation_id: str, variable: Variable): - stmt = select(ConversationVariable).where( - ConversationVariable.id == variable.id, ConversationVariable.conversation_id == conversation_id +class UpdatedVariable(BaseModel): + name: str + selector: Sequence[str] + value_type: SegmentType + new_value: Any + + +_T = TypeVar("_T", bound=MutableMapping[str, Any]) + + +def variable_to_processed_data(selector: Sequence[str], seg: Segment) -> UpdatedVariable: + if len(selector) < MIN_SELECTORS_LENGTH: + raise Exception("selector too short") + node_id, var_name = selector[:2] + return UpdatedVariable( + name=var_name, + selector=list(selector[:2]), + value_type=seg.value_type, + new_value=seg.value, ) - with Session(db.engine) as session: - row = session.scalar(stmt) - if not row: - raise VariableOperatorNodeError("conversation variable not found in the database") - row.data = variable.model_dump_json() - session.commit() + + +def set_updated_variables(m: _T, updates: Sequence[UpdatedVariable]) -> _T: + m[_UPDATED_VARIABLES_KEY] = updates + return m + + +def get_updated_variables(m: Mapping[str, Any]) -> Sequence[UpdatedVariable] | None: + updated_values = m.get(_UPDATED_VARIABLES_KEY, None) + if updated_values is None: + return None + result = [] + for items in updated_values: + if isinstance(items, UpdatedVariable): + result.append(items) + elif isinstance(items, dict): + items = UpdatedVariable.model_validate(items) + result.append(items) + else: + raise TypeError(f"Invalid updated variable: {items}, type={type(items)}") + return result diff --git a/api/core/workflow/nodes/variable_assigner/common/impl.py b/api/core/workflow/nodes/variable_assigner/common/impl.py new file mode 100644 index 0000000000..8f7a44bb62 --- /dev/null +++ b/api/core/workflow/nodes/variable_assigner/common/impl.py @@ -0,0 +1,38 @@ +from sqlalchemy import Engine, select +from sqlalchemy.orm import Session + +from core.variables.variables import Variable +from models.engine import db +from models.workflow import ConversationVariable + +from .exc import VariableOperatorNodeError + + +class ConversationVariableUpdaterImpl: + _engine: Engine | None + + def __init__(self, engine: Engine | None = None) -> None: + self._engine = engine + + def _get_engine(self) -> Engine: + if self._engine: + return self._engine + return db.engine + + def update(self, conversation_id: str, variable: Variable): + stmt = select(ConversationVariable).where( + ConversationVariable.id == variable.id, ConversationVariable.conversation_id == conversation_id + ) + with Session(self._get_engine()) as session: + row = session.scalar(stmt) + if not row: + raise VariableOperatorNodeError("conversation variable not found in the database") + row.data = variable.model_dump_json() + session.commit() + + def flush(self): + pass + + +def conversation_variable_updater_factory() -> ConversationVariableUpdaterImpl: + return ConversationVariableUpdaterImpl() diff --git a/api/core/workflow/nodes/variable_assigner/v1/node.py b/api/core/workflow/nodes/variable_assigner/v1/node.py index 835e1d77b5..be5083c9c1 100644 --- a/api/core/workflow/nodes/variable_assigner/v1/node.py +++ b/api/core/workflow/nodes/variable_assigner/v1/node.py @@ -1,4 +1,9 @@ +from collections.abc import Callable, Mapping, Sequence +from typing import TYPE_CHECKING, Any, Optional, TypeAlias + from core.variables import SegmentType, Variable +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID +from core.workflow.conversation_variable_updater import ConversationVariableUpdater from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus from core.workflow.nodes.base import BaseNode @@ -7,16 +12,71 @@ from core.workflow.nodes.variable_assigner.common import helpers as common_helpe from core.workflow.nodes.variable_assigner.common.exc import VariableOperatorNodeError from factories import variable_factory +from ..common.impl import conversation_variable_updater_factory from .node_data import VariableAssignerData, WriteMode +if TYPE_CHECKING: + from core.workflow.graph_engine import Graph, GraphInitParams, GraphRuntimeState + + +_CONV_VAR_UPDATER_FACTORY: TypeAlias = Callable[[], ConversationVariableUpdater] + class VariableAssignerNode(BaseNode[VariableAssignerData]): _node_data_cls = VariableAssignerData _node_type = NodeType.VARIABLE_ASSIGNER + _conv_var_updater_factory: _CONV_VAR_UPDATER_FACTORY + + def __init__( + self, + id: str, + config: Mapping[str, Any], + graph_init_params: "GraphInitParams", + graph: "Graph", + graph_runtime_state: "GraphRuntimeState", + previous_node_id: Optional[str] = None, + thread_pool_id: Optional[str] = None, + conv_var_updater_factory: _CONV_VAR_UPDATER_FACTORY = conversation_variable_updater_factory, + ) -> None: + super().__init__( + id=id, + config=config, + graph_init_params=graph_init_params, + graph=graph, + graph_runtime_state=graph_runtime_state, + previous_node_id=previous_node_id, + thread_pool_id=thread_pool_id, + ) + self._conv_var_updater_factory = conv_var_updater_factory + + @classmethod + def version(cls) -> str: + return "1" + + @classmethod + def _extract_variable_selector_to_variable_mapping( + cls, + *, + graph_config: Mapping[str, Any], + node_id: str, + node_data: VariableAssignerData, + ) -> Mapping[str, Sequence[str]]: + mapping = {} + assigned_variable_node_id = node_data.assigned_variable_selector[0] + if assigned_variable_node_id == CONVERSATION_VARIABLE_NODE_ID: + selector_key = ".".join(node_data.assigned_variable_selector) + key = f"{node_id}.#{selector_key}#" + mapping[key] = node_data.assigned_variable_selector + + selector_key = ".".join(node_data.input_variable_selector) + key = f"{node_id}.#{selector_key}#" + mapping[key] = node_data.input_variable_selector + return mapping def _run(self) -> NodeRunResult: + assigned_variable_selector = self.node_data.assigned_variable_selector # Should be String, Number, Object, ArrayString, ArrayNumber, ArrayObject - original_variable = self.graph_runtime_state.variable_pool.get(self.node_data.assigned_variable_selector) + original_variable = self.graph_runtime_state.variable_pool.get(assigned_variable_selector) if not isinstance(original_variable, Variable): raise VariableOperatorNodeError("assigned variable not found") @@ -44,20 +104,28 @@ class VariableAssignerNode(BaseNode[VariableAssignerData]): raise VariableOperatorNodeError(f"unsupported write mode: {self.node_data.write_mode}") # Over write the variable. - self.graph_runtime_state.variable_pool.add(self.node_data.assigned_variable_selector, updated_variable) + self.graph_runtime_state.variable_pool.add(assigned_variable_selector, updated_variable) # TODO: Move database operation to the pipeline. # Update conversation variable. conversation_id = self.graph_runtime_state.variable_pool.get(["sys", "conversation_id"]) if not conversation_id: raise VariableOperatorNodeError("conversation_id not found") - common_helpers.update_conversation_variable(conversation_id=conversation_id.text, variable=updated_variable) + conv_var_updater = self._conv_var_updater_factory() + conv_var_updater.update(conversation_id=conversation_id.text, variable=updated_variable) + conv_var_updater.flush() + updated_variables = [common_helpers.variable_to_processed_data(assigned_variable_selector, updated_variable)] return NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs={ "value": income_value.to_object(), }, + # NOTE(QuantumGhost): although only one variable is updated in `v1.VariableAssignerNode`, + # we still set `output_variables` as a list to ensure the schema of output is + # compatible with `v2.VariableAssignerNode`. + process_data=common_helpers.set_updated_variables({}, updated_variables), + outputs={}, ) diff --git a/api/core/workflow/nodes/variable_assigner/v2/entities.py b/api/core/workflow/nodes/variable_assigner/v2/entities.py index 01df33b6d4..d93affcd15 100644 --- a/api/core/workflow/nodes/variable_assigner/v2/entities.py +++ b/api/core/workflow/nodes/variable_assigner/v2/entities.py @@ -12,6 +12,12 @@ class VariableOperationItem(BaseModel): variable_selector: Sequence[str] input_type: InputType operation: Operation + # NOTE(QuantumGhost): The `value` field serves multiple purposes depending on context: + # + # 1. For CONSTANT input_type: Contains the literal value to be used in the operation. + # 2. For VARIABLE input_type: Initially contains the selector of the source variable. + # 3. During the variable updating procedure: The `value` field is reassigned to hold + # the resolved actual value that will be applied to the target variable. value: Any | None = None diff --git a/api/core/workflow/nodes/variable_assigner/v2/exc.py b/api/core/workflow/nodes/variable_assigner/v2/exc.py index b67af6d73c..fd6c304a9a 100644 --- a/api/core/workflow/nodes/variable_assigner/v2/exc.py +++ b/api/core/workflow/nodes/variable_assigner/v2/exc.py @@ -29,3 +29,8 @@ class InvalidInputValueError(VariableOperatorNodeError): class ConversationIDNotFoundError(VariableOperatorNodeError): def __init__(self): super().__init__("conversation_id not found") + + +class InvalidDataError(VariableOperatorNodeError): + def __init__(self, message: str) -> None: + super().__init__(message) diff --git a/api/core/workflow/nodes/variable_assigner/v2/node.py b/api/core/workflow/nodes/variable_assigner/v2/node.py index 8759a55b34..9292da6f1c 100644 --- a/api/core/workflow/nodes/variable_assigner/v2/node.py +++ b/api/core/workflow/nodes/variable_assigner/v2/node.py @@ -1,34 +1,84 @@ import json -from collections.abc import Sequence -from typing import Any, cast +from collections.abc import Callable, Mapping, MutableMapping, Sequence +from typing import Any, TypeAlias, cast from core.app.entities.app_invoke_entities import InvokeFrom from core.variables import SegmentType, Variable +from core.variables.consts import MIN_SELECTORS_LENGTH from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID +from core.workflow.conversation_variable_updater import ConversationVariableUpdater from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus from core.workflow.nodes.base import BaseNode from core.workflow.nodes.enums import NodeType from core.workflow.nodes.variable_assigner.common import helpers as common_helpers from core.workflow.nodes.variable_assigner.common.exc import VariableOperatorNodeError +from core.workflow.nodes.variable_assigner.common.impl import conversation_variable_updater_factory from . import helpers from .constants import EMPTY_VALUE_MAPPING -from .entities import VariableAssignerNodeData +from .entities import VariableAssignerNodeData, VariableOperationItem from .enums import InputType, Operation from .exc import ( ConversationIDNotFoundError, InputTypeNotSupportedError, + InvalidDataError, InvalidInputValueError, OperationNotSupportedError, VariableNotFoundError, ) +_CONV_VAR_UPDATER_FACTORY: TypeAlias = Callable[[], ConversationVariableUpdater] + + +def _target_mapping_from_item(mapping: MutableMapping[str, Sequence[str]], node_id: str, item: VariableOperationItem): + selector_node_id = item.variable_selector[0] + if selector_node_id != CONVERSATION_VARIABLE_NODE_ID: + return + selector_str = ".".join(item.variable_selector) + key = f"{node_id}.#{selector_str}#" + mapping[key] = item.variable_selector + + +def _source_mapping_from_item(mapping: MutableMapping[str, Sequence[str]], node_id: str, item: VariableOperationItem): + # Keep this in sync with the logic in _run methods... + if item.input_type != InputType.VARIABLE: + return + selector = item.value + if not isinstance(selector, list): + raise InvalidDataError(f"selector is not a list, {node_id=}, {item=}") + if len(selector) < MIN_SELECTORS_LENGTH: + raise InvalidDataError(f"selector too short, {node_id=}, {item=}") + selector_str = ".".join(selector) + key = f"{node_id}.#{selector_str}#" + mapping[key] = selector + class VariableAssignerNode(BaseNode[VariableAssignerNodeData]): _node_data_cls = VariableAssignerNodeData _node_type = NodeType.VARIABLE_ASSIGNER + def _conv_var_updater_factory(self) -> ConversationVariableUpdater: + return conversation_variable_updater_factory() + + @classmethod + def version(cls) -> str: + return "2" + + @classmethod + def _extract_variable_selector_to_variable_mapping( + cls, + *, + graph_config: Mapping[str, Any], + node_id: str, + node_data: VariableAssignerNodeData, + ) -> Mapping[str, Sequence[str]]: + var_mapping: dict[str, Sequence[str]] = {} + for item in node_data.items: + _target_mapping_from_item(var_mapping, node_id, item) + _source_mapping_from_item(var_mapping, node_id, item) + return var_mapping + def _run(self) -> NodeRunResult: inputs = self.node_data.model_dump() process_data: dict[str, Any] = {} @@ -114,6 +164,7 @@ class VariableAssignerNode(BaseNode[VariableAssignerNodeData]): # remove the duplicated items first. updated_variable_selectors = list(set(map(tuple, updated_variable_selectors))) + conv_var_updater = self._conv_var_updater_factory() # Update variables for selector in updated_variable_selectors: variable = self.graph_runtime_state.variable_pool.get(selector) @@ -128,15 +179,23 @@ class VariableAssignerNode(BaseNode[VariableAssignerNodeData]): raise ConversationIDNotFoundError else: conversation_id = conversation_id.value - common_helpers.update_conversation_variable( + conv_var_updater.update( conversation_id=cast(str, conversation_id), variable=variable, ) + conv_var_updater.flush() + updated_variables = [ + common_helpers.variable_to_processed_data(selector, seg) + for selector in updated_variable_selectors + if (seg := self.graph_runtime_state.variable_pool.get(selector)) is not None + ] + process_data = common_helpers.set_updated_variables(process_data, updated_variables) return NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=inputs, process_data=process_data, + outputs={}, ) def _handle_item( diff --git a/api/core/workflow/utils/variable_utils.py b/api/core/workflow/utils/variable_utils.py new file mode 100644 index 0000000000..868868315b --- /dev/null +++ b/api/core/workflow/utils/variable_utils.py @@ -0,0 +1,29 @@ +from core.variables.segments import ObjectSegment, Segment +from core.workflow.entities.variable_pool import VariablePool, VariableValue + + +def append_variables_recursively( + pool: VariablePool, node_id: str, variable_key_list: list[str], variable_value: VariableValue | Segment +): + """ + Append variables recursively + :param pool: variable pool to append variables to + :param node_id: node id + :param variable_key_list: variable key list + :param variable_value: variable value + :return: + """ + pool.add([node_id] + variable_key_list, variable_value) + + # if variable_value is a dict, then recursively append variables + if isinstance(variable_value, ObjectSegment): + variable_dict = variable_value.value + elif isinstance(variable_value, dict): + variable_dict = variable_value + else: + return + + for key, value in variable_dict.items(): + # construct new key list + new_key_list = variable_key_list + [key] + append_variables_recursively(pool, node_id=node_id, variable_key_list=new_key_list, variable_value=value) diff --git a/api/core/workflow/variable_loader.py b/api/core/workflow/variable_loader.py new file mode 100644 index 0000000000..1e13871d0a --- /dev/null +++ b/api/core/workflow/variable_loader.py @@ -0,0 +1,84 @@ +import abc +from collections.abc import Mapping, Sequence +from typing import Any, Protocol + +from core.variables import Variable +from core.variables.consts import MIN_SELECTORS_LENGTH +from core.workflow.entities.variable_pool import VariablePool +from core.workflow.utils import variable_utils + + +class VariableLoader(Protocol): + """Interface for loading variables based on selectors. + + A `VariableLoader` is responsible for retrieving additional variables required during the execution + of a single node, which are not provided as user inputs. + + NOTE(QuantumGhost): Typically, all variables loaded by a `VariableLoader` should belong to the same + application and share the same `app_id`. However, this interface does not enforce that constraint, + and the `app_id` parameter is intentionally omitted from `load_variables` to achieve separation of + concern and allow for flexible implementations. + + Implementations of `VariableLoader` should almost always have an `app_id` parameter in + their constructor. + + TODO(QuantumGhost): this is a temporally workaround. If we can move the creation of node instance into + `WorkflowService.single_step_run`, we may get rid of this interface. + """ + + @abc.abstractmethod + def load_variables(self, selectors: list[list[str]]) -> list[Variable]: + """Load variables based on the provided selectors. If the selectors are empty, + this method should return an empty list. + + The order of the returned variables is not guaranteed. If the caller wants to ensure + a specific order, they should sort the returned list themselves. + + :param: selectors: a list of string list, each inner list should have at least two elements: + - the first element is the node ID, + - the second element is the variable name. + :return: a list of Variable objects that match the provided selectors. + """ + pass + + +class _DummyVariableLoader(VariableLoader): + """A dummy implementation of VariableLoader that does not load any variables. + Serves as a placeholder when no variable loading is needed. + """ + + def load_variables(self, selectors: list[list[str]]) -> list[Variable]: + return [] + + +DUMMY_VARIABLE_LOADER = _DummyVariableLoader() + + +def load_into_variable_pool( + variable_loader: VariableLoader, + variable_pool: VariablePool, + variable_mapping: Mapping[str, Sequence[str]], + user_inputs: Mapping[str, Any], +): + # Loading missing variable from draft var here, and set it into + # variable_pool. + variables_to_load: list[list[str]] = [] + for key, selector in variable_mapping.items(): + # NOTE(QuantumGhost): this logic needs to be in sync with + # `WorkflowEntry.mapping_user_inputs_to_variable_pool`. + node_variable_list = key.split(".") + if len(node_variable_list) < 1: + raise ValueError(f"Invalid variable key: {key}. It should have at least one element.") + if key in user_inputs: + continue + node_variable_key = ".".join(node_variable_list[1:]) + if node_variable_key in user_inputs: + continue + if variable_pool.get(selector) is None: + variables_to_load.append(list(selector)) + loaded = variable_loader.load_variables(variables_to_load) + for var in loaded: + assert len(var.selector) >= MIN_SELECTORS_LENGTH, f"Invalid variable {var}" + variable_utils.append_variables_recursively( + variable_pool, node_id=var.selector[0], variable_key_list=list(var.selector[1:]), variable_value=var + ) diff --git a/api/core/workflow/workflow_cycle_manager.py b/api/core/workflow/workflow_cycle_manager.py index b88f9edd03..6ee562fc8d 100644 --- a/api/core/workflow/workflow_cycle_manager.py +++ b/api/core/workflow/workflow_cycle_manager.py @@ -92,7 +92,7 @@ class WorkflowCycleManager: ) -> WorkflowExecution: workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id) - outputs = WorkflowEntry.handle_special_values(outputs) + # outputs = WorkflowEntry.handle_special_values(outputs) workflow_execution.status = WorkflowExecutionStatus.SUCCEEDED workflow_execution.outputs = outputs or {} @@ -125,7 +125,7 @@ class WorkflowCycleManager: trace_manager: Optional[TraceQueueManager] = None, ) -> WorkflowExecution: execution = self._get_workflow_execution_or_raise_error(workflow_run_id) - outputs = WorkflowEntry.handle_special_values(dict(outputs) if outputs else None) + # outputs = WorkflowEntry.handle_special_values(dict(outputs) if outputs else None) execution.status = WorkflowExecutionStatus.PARTIAL_SUCCEEDED execution.outputs = outputs or {} @@ -242,9 +242,9 @@ class WorkflowCycleManager: raise ValueError(f"Domain node execution not found: {event.node_execution_id}") # Process data - inputs = WorkflowEntry.handle_special_values(event.inputs) - process_data = WorkflowEntry.handle_special_values(event.process_data) - outputs = WorkflowEntry.handle_special_values(event.outputs) + inputs = event.inputs + process_data = event.process_data + outputs = event.outputs # Convert metadata keys to strings execution_metadata_dict = {} @@ -289,7 +289,7 @@ class WorkflowCycleManager: # Process data inputs = WorkflowEntry.handle_special_values(event.inputs) process_data = WorkflowEntry.handle_special_values(event.process_data) - outputs = WorkflowEntry.handle_special_values(event.outputs) + outputs = event.outputs # Convert metadata keys to strings execution_metadata_dict = {} @@ -326,7 +326,7 @@ class WorkflowCycleManager: finished_at = datetime.now(UTC).replace(tzinfo=None) elapsed_time = (finished_at - created_at).total_seconds() inputs = WorkflowEntry.handle_special_values(event.inputs) - outputs = WorkflowEntry.handle_special_values(event.outputs) + outputs = event.outputs # Convert metadata keys to strings origin_metadata = { diff --git a/api/core/workflow/workflow_entry.py b/api/core/workflow/workflow_entry.py index 7648947fca..c0e98db3db 100644 --- a/api/core/workflow/workflow_entry.py +++ b/api/core/workflow/workflow_entry.py @@ -21,6 +21,7 @@ from core.workflow.nodes import NodeType from core.workflow.nodes.base import BaseNode from core.workflow.nodes.event import NodeEvent from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING +from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader, load_into_variable_pool from factories import file_factory from models.enums import UserFrom from models.workflow import ( @@ -119,7 +120,9 @@ class WorkflowEntry: workflow: Workflow, node_id: str, user_id: str, - user_inputs: dict, + user_inputs: Mapping[str, Any], + variable_pool: VariablePool, + variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER, ) -> tuple[BaseNode, Generator[NodeEvent | InNodeEvent, None, None]]: """ Single step run workflow node @@ -129,29 +132,14 @@ class WorkflowEntry: :param user_inputs: user inputs :return: """ - # fetch node info from workflow graph - workflow_graph = workflow.graph_dict - if not workflow_graph: - raise ValueError("workflow graph not found") - - nodes = workflow_graph.get("nodes") - if not nodes: - raise ValueError("nodes not found in workflow graph") - - # fetch node config from node id - try: - node_config = next(filter(lambda node: node["id"] == node_id, nodes)) - except StopIteration: - raise ValueError("node id not found in workflow graph") + node_config = workflow.get_node_config_by_id(node_id) + node_config_data = node_config.get("data", {}) # Get node class - node_type = NodeType(node_config.get("data", {}).get("type")) - node_version = node_config.get("data", {}).get("version", "1") + node_type = NodeType(node_config_data.get("type")) + node_version = node_config_data.get("version", "1") node_cls = NODE_TYPE_CLASSES_MAPPING[node_type][node_version] - # init variable pool - variable_pool = VariablePool(environment_variables=workflow.environment_variables) - # init graph graph = Graph.init(graph_config=workflow.graph_dict) @@ -182,16 +170,33 @@ class WorkflowEntry: except NotImplementedError: variable_mapping = {} + # Loading missing variable from draft var here, and set it into + # variable_pool. + load_into_variable_pool( + variable_loader=variable_loader, + variable_pool=variable_pool, + variable_mapping=variable_mapping, + user_inputs=user_inputs, + ) + cls.mapping_user_inputs_to_variable_pool( variable_mapping=variable_mapping, user_inputs=user_inputs, variable_pool=variable_pool, tenant_id=workflow.tenant_id, ) + try: # run node generator = node_instance.run() except Exception as e: + logger.exception( + "error while running node_instance, workflow_id=%s, node_id=%s, type=%s, version=%s", + workflow.id, + node_instance.id, + node_instance.node_type, + node_instance.version(), + ) raise WorkflowNodeRunFailedError(node_instance=node_instance, error=str(e)) return node_instance, generator @@ -294,10 +299,20 @@ class WorkflowEntry: return node_instance, generator except Exception as e: + logger.exception( + "error while running node_instance, node_id=%s, type=%s, version=%s", + node_instance.id, + node_instance.node_type, + node_instance.version(), + ) raise WorkflowNodeRunFailedError(node_instance=node_instance, error=str(e)) @staticmethod def handle_special_values(value: Optional[Mapping[str, Any]]) -> Mapping[str, Any] | None: + # NOTE(QuantumGhost): Avoid using this function in new code. + # Keep values structured as long as possible and only convert to dict + # immediately before serialization (e.g., JSON serialization) to maintain + # data integrity and type information. result = WorkflowEntry._handle_special_values(value) return result if isinstance(result, Mapping) or result is None else dict(result) @@ -324,10 +339,17 @@ class WorkflowEntry: cls, *, variable_mapping: Mapping[str, Sequence[str]], - user_inputs: dict, + user_inputs: Mapping[str, Any], variable_pool: VariablePool, tenant_id: str, ) -> None: + # NOTE(QuantumGhost): This logic should remain synchronized with + # the implementation of `load_into_variable_pool`, specifically the logic about + # variable existence checking. + + # WARNING(QuantumGhost): The semantics of this method are not clearly defined, + # and multiple parts of the codebase depend on its current behavior. + # Modify with caution. for node_variable, variable_selector in variable_mapping.items(): # fetch node id and variable key from node_variable node_variable_list = node_variable.split(".") diff --git a/api/core/workflow/workflow_type_encoder.py b/api/core/workflow/workflow_type_encoder.py new file mode 100644 index 0000000000..0123fdac18 --- /dev/null +++ b/api/core/workflow/workflow_type_encoder.py @@ -0,0 +1,49 @@ +import json +from collections.abc import Mapping +from typing import Any + +from pydantic import BaseModel + +from core.file.models import File +from core.variables import Segment + + +class WorkflowRuntimeTypeEncoder(json.JSONEncoder): + def default(self, o: Any): + if isinstance(o, Segment): + return o.value + elif isinstance(o, File): + return o.to_dict() + elif isinstance(o, BaseModel): + return o.model_dump(mode="json") + else: + return super().default(o) + + +class WorkflowRuntimeTypeConverter: + def to_json_encodable(self, value: Mapping[str, Any] | None) -> Mapping[str, Any] | None: + result = self._to_json_encodable_recursive(value) + return result if isinstance(result, Mapping) or result is None else dict(result) + + def _to_json_encodable_recursive(self, value: Any) -> Any: + if value is None: + return value + if isinstance(value, (bool, int, str, float)): + return value + if isinstance(value, Segment): + return self._to_json_encodable_recursive(value.value) + if isinstance(value, File): + return value.to_dict() + if isinstance(value, BaseModel): + return value.model_dump(mode="json") + if isinstance(value, dict): + res = {} + for k, v in value.items(): + res[k] = self._to_json_encodable_recursive(v) + return res + if isinstance(value, list): + res_list = [] + for item in value: + res_list.append(self._to_json_encodable_recursive(item)) + return res_list + return value diff --git a/api/events/event_handlers/__init__.py b/api/events/event_handlers/__init__.py index 1d6ad35333..ebc55d5ef8 100644 --- a/api/events/event_handlers/__init__.py +++ b/api/events/event_handlers/__init__.py @@ -3,8 +3,10 @@ from .clean_when_document_deleted import handle from .create_document_index import handle from .create_installed_app_when_app_created import handle from .create_site_record_when_app_created import handle -from .deduct_quota_when_message_created import handle from .delete_tool_parameters_cache_when_sync_draft_workflow import handle from .update_app_dataset_join_when_app_model_config_updated import handle from .update_app_dataset_join_when_app_published_workflow_updated import handle -from .update_provider_last_used_at_when_message_created import handle + +# Consolidated handler replaces both deduct_quota_when_message_created and +# update_provider_last_used_at_when_message_created +from .update_provider_when_message_created import handle diff --git a/api/events/event_handlers/deduct_quota_when_message_created.py b/api/events/event_handlers/deduct_quota_when_message_created.py deleted file mode 100644 index b8e7019446..0000000000 --- a/api/events/event_handlers/deduct_quota_when_message_created.py +++ /dev/null @@ -1,65 +0,0 @@ -from datetime import UTC, datetime - -from configs import dify_config -from core.app.entities.app_invoke_entities import AgentChatAppGenerateEntity, ChatAppGenerateEntity -from core.entities.provider_entities import QuotaUnit -from core.plugin.entities.plugin import ModelProviderID -from events.message_event import message_was_created -from extensions.ext_database import db -from models.provider import Provider, ProviderType - - -@message_was_created.connect -def handle(sender, **kwargs): - message = sender - application_generate_entity = kwargs.get("application_generate_entity") - - if not isinstance(application_generate_entity, ChatAppGenerateEntity | AgentChatAppGenerateEntity): - return - - model_config = application_generate_entity.model_conf - provider_model_bundle = model_config.provider_model_bundle - provider_configuration = provider_model_bundle.configuration - - if provider_configuration.using_provider_type != ProviderType.SYSTEM: - return - - system_configuration = provider_configuration.system_configuration - - if not system_configuration.current_quota_type: - return - - quota_unit = None - for quota_configuration in system_configuration.quota_configurations: - if quota_configuration.quota_type == system_configuration.current_quota_type: - quota_unit = quota_configuration.quota_unit - - if quota_configuration.quota_limit == -1: - return - - break - - used_quota = None - if quota_unit: - if quota_unit == QuotaUnit.TOKENS: - used_quota = message.message_tokens + message.answer_tokens - elif quota_unit == QuotaUnit.CREDITS: - used_quota = dify_config.get_model_credits(model_config.model) - else: - used_quota = 1 - - if used_quota is not None and system_configuration.current_quota_type is not None: - db.session.query(Provider).filter( - Provider.tenant_id == application_generate_entity.app_config.tenant_id, - # TODO: Use provider name with prefix after the data migration. - Provider.provider_name == ModelProviderID(model_config.provider).provider_name, - Provider.provider_type == ProviderType.SYSTEM.value, - Provider.quota_type == system_configuration.current_quota_type.value, - Provider.quota_limit > Provider.quota_used, - ).update( - { - "quota_used": Provider.quota_used + used_quota, - "last_used": datetime.now(tz=UTC).replace(tzinfo=None), - } - ) - db.session.commit() diff --git a/api/events/event_handlers/update_provider_last_used_at_when_message_created.py b/api/events/event_handlers/update_provider_last_used_at_when_message_created.py deleted file mode 100644 index 59412cf87c..0000000000 --- a/api/events/event_handlers/update_provider_last_used_at_when_message_created.py +++ /dev/null @@ -1,20 +0,0 @@ -from datetime import UTC, datetime - -from core.app.entities.app_invoke_entities import AgentChatAppGenerateEntity, ChatAppGenerateEntity -from events.message_event import message_was_created -from extensions.ext_database import db -from models.provider import Provider - - -@message_was_created.connect -def handle(sender, **kwargs): - application_generate_entity = kwargs.get("application_generate_entity") - - if not isinstance(application_generate_entity, ChatAppGenerateEntity | AgentChatAppGenerateEntity): - return - - db.session.query(Provider).filter( - Provider.tenant_id == application_generate_entity.app_config.tenant_id, - Provider.provider_name == application_generate_entity.model_conf.provider, - ).update({"last_used": datetime.now(UTC).replace(tzinfo=None)}) - db.session.commit() diff --git a/api/events/event_handlers/update_provider_when_message_created.py b/api/events/event_handlers/update_provider_when_message_created.py new file mode 100644 index 0000000000..d3943f2eda --- /dev/null +++ b/api/events/event_handlers/update_provider_when_message_created.py @@ -0,0 +1,234 @@ +import logging +import time as time_module +from datetime import datetime +from typing import Any, Optional + +from pydantic import BaseModel +from sqlalchemy import update +from sqlalchemy.orm import Session + +from configs import dify_config +from core.app.entities.app_invoke_entities import AgentChatAppGenerateEntity, ChatAppGenerateEntity +from core.entities.provider_entities import QuotaUnit, SystemConfiguration +from core.plugin.entities.plugin import ModelProviderID +from events.message_event import message_was_created +from extensions.ext_database import db +from libs import datetime_utils +from models.model import Message +from models.provider import Provider, ProviderType + +logger = logging.getLogger(__name__) + + +class _ProviderUpdateFilters(BaseModel): + """Filters for identifying Provider records to update.""" + + tenant_id: str + provider_name: str + provider_type: Optional[str] = None + quota_type: Optional[str] = None + + +class _ProviderUpdateAdditionalFilters(BaseModel): + """Additional filters for Provider updates.""" + + quota_limit_check: bool = False + + +class _ProviderUpdateValues(BaseModel): + """Values to update in Provider records.""" + + last_used: Optional[datetime] = None + quota_used: Optional[Any] = None # Can be Provider.quota_used + int expression + + +class _ProviderUpdateOperation(BaseModel): + """A single Provider update operation.""" + + filters: _ProviderUpdateFilters + values: _ProviderUpdateValues + additional_filters: _ProviderUpdateAdditionalFilters = _ProviderUpdateAdditionalFilters() + description: str = "unknown" + + +@message_was_created.connect +def handle(sender: Message, **kwargs): + """ + Consolidated handler for Provider updates when a message is created. + + This handler replaces both: + - update_provider_last_used_at_when_message_created + - deduct_quota_when_message_created + + By performing all Provider updates in a single transaction, we ensure + consistency and efficiency when updating Provider records. + """ + message = sender + application_generate_entity = kwargs.get("application_generate_entity") + + if not isinstance(application_generate_entity, ChatAppGenerateEntity | AgentChatAppGenerateEntity): + return + + tenant_id = application_generate_entity.app_config.tenant_id + provider_name = application_generate_entity.model_conf.provider + current_time = datetime_utils.naive_utc_now() + + # Prepare updates for both scenarios + updates_to_perform: list[_ProviderUpdateOperation] = [] + + # 1. Always update last_used for the provider + basic_update = _ProviderUpdateOperation( + filters=_ProviderUpdateFilters( + tenant_id=tenant_id, + provider_name=provider_name, + ), + values=_ProviderUpdateValues(last_used=current_time), + description="basic_last_used_update", + ) + updates_to_perform.append(basic_update) + + # 2. Check if we need to deduct quota (system provider only) + model_config = application_generate_entity.model_conf + provider_model_bundle = model_config.provider_model_bundle + provider_configuration = provider_model_bundle.configuration + + if ( + provider_configuration.using_provider_type == ProviderType.SYSTEM + and provider_configuration.system_configuration + and provider_configuration.system_configuration.current_quota_type is not None + ): + system_configuration = provider_configuration.system_configuration + + # Calculate quota usage + used_quota = _calculate_quota_usage( + message=message, + system_configuration=system_configuration, + model_name=model_config.model, + ) + + if used_quota is not None: + quota_update = _ProviderUpdateOperation( + filters=_ProviderUpdateFilters( + tenant_id=tenant_id, + provider_name=ModelProviderID(model_config.provider).provider_name, + provider_type=ProviderType.SYSTEM.value, + quota_type=provider_configuration.system_configuration.current_quota_type.value, + ), + values=_ProviderUpdateValues(quota_used=Provider.quota_used + used_quota, last_used=current_time), + additional_filters=_ProviderUpdateAdditionalFilters( + quota_limit_check=True # Provider.quota_limit > Provider.quota_used + ), + description="quota_deduction_update", + ) + updates_to_perform.append(quota_update) + + # Execute all updates + start_time = time_module.perf_counter() + try: + _execute_provider_updates(updates_to_perform) + + # Log successful completion with timing + duration = time_module.perf_counter() - start_time + + logger.info( + f"Provider updates completed successfully. " + f"Updates: {len(updates_to_perform)}, Duration: {duration:.3f}s, " + f"Tenant: {tenant_id}, Provider: {provider_name}" + ) + + except Exception as e: + # Log failure with timing and context + duration = time_module.perf_counter() - start_time + + logger.exception( + f"Provider updates failed after {duration:.3f}s. " + f"Updates: {len(updates_to_perform)}, Tenant: {tenant_id}, " + f"Provider: {provider_name}" + ) + raise + + +def _calculate_quota_usage( + *, message: Message, system_configuration: SystemConfiguration, model_name: str +) -> Optional[int]: + """Calculate quota usage based on message tokens and quota type.""" + quota_unit = None + for quota_configuration in system_configuration.quota_configurations: + if quota_configuration.quota_type == system_configuration.current_quota_type: + quota_unit = quota_configuration.quota_unit + if quota_configuration.quota_limit == -1: + return None + break + if quota_unit is None: + return None + + try: + if quota_unit == QuotaUnit.TOKENS: + tokens = message.message_tokens + message.answer_tokens + return tokens + if quota_unit == QuotaUnit.CREDITS: + tokens = dify_config.get_model_credits(model_name) + return tokens + elif quota_unit == QuotaUnit.TIMES: + return 1 + return None + except Exception as e: + logger.exception("Failed to calculate quota usage") + return None + + +def _execute_provider_updates(updates_to_perform: list[_ProviderUpdateOperation]): + """Execute all Provider updates in a single transaction.""" + if not updates_to_perform: + return + + # Use SQLAlchemy's context manager for transaction management + # This automatically handles commit/rollback + with Session(db.engine) as session: + # Use a single transaction for all updates + for update_operation in updates_to_perform: + filters = update_operation.filters + values = update_operation.values + additional_filters = update_operation.additional_filters + description = update_operation.description + + # Build the where conditions + where_conditions = [ + Provider.tenant_id == filters.tenant_id, + Provider.provider_name == filters.provider_name, + ] + + # Add additional filters if specified + if filters.provider_type is not None: + where_conditions.append(Provider.provider_type == filters.provider_type) + if filters.quota_type is not None: + where_conditions.append(Provider.quota_type == filters.quota_type) + if additional_filters.quota_limit_check: + where_conditions.append(Provider.quota_limit > Provider.quota_used) + + # Prepare values dict for SQLAlchemy update + update_values = {} + if values.last_used is not None: + update_values["last_used"] = values.last_used + if values.quota_used is not None: + update_values["quota_used"] = values.quota_used + + # Build and execute the update statement + stmt = update(Provider).where(*where_conditions).values(**update_values) + result = session.execute(stmt) + rows_affected = result.rowcount + + logger.debug( + f"Provider update ({description}): {rows_affected} rows affected. " + f"Filters: {filters.model_dump()}, Values: {update_values}" + ) + + # If no rows were affected for quota updates, log a warning + if rows_affected == 0 and description == "quota_deduction_update": + logger.warning( + f"No Provider rows updated for quota deduction. " + f"This may indicate quota limit exceeded or provider not found. " + f"Filters: {filters.model_dump()}" + ) + + logger.debug(f"Successfully processed {len(updates_to_perform)} Provider updates") diff --git a/api/extensions/ext_mail.py b/api/extensions/ext_mail.py index 84bc12eca0..df5d8a9c11 100644 --- a/api/extensions/ext_mail.py +++ b/api/extensions/ext_mail.py @@ -54,6 +54,15 @@ class Mail: use_tls=dify_config.SMTP_USE_TLS, opportunistic_tls=dify_config.SMTP_OPPORTUNISTIC_TLS, ) + case "sendgrid": + from libs.sendgrid import SendGridClient + + if not dify_config.SENDGRID_API_KEY: + raise ValueError("SENDGRID_API_KEY is required for SendGrid mail type") + + self._client = SendGridClient( + sendgrid_api_key=dify_config.SENDGRID_API_KEY, _from=dify_config.MAIL_DEFAULT_SEND_FROM or "" + ) case _: raise ValueError("Unsupported mail type {}".format(mail_type)) diff --git a/api/factories/file_factory.py b/api/factories/file_factory.py index 52f119936f..25d1390492 100644 --- a/api/factories/file_factory.py +++ b/api/factories/file_factory.py @@ -5,6 +5,7 @@ from typing import Any, cast import httpx from sqlalchemy import select +from sqlalchemy.orm import Session from constants import AUDIO_EXTENSIONS, DOCUMENT_EXTENSIONS, IMAGE_EXTENSIONS, VIDEO_EXTENSIONS from core.file import File, FileBelongsTo, FileTransferMethod, FileType, FileUploadConfig, helpers @@ -91,6 +92,8 @@ def build_from_mappings( tenant_id: str, strict_type_validation: bool = False, ) -> Sequence[File]: + # TODO(QuantumGhost): Performance concern - each mapping triggers a separate database query. + # Implement batch processing to reduce database load when handling multiple files. files = [ build_from_mapping( mapping=mapping, @@ -377,3 +380,75 @@ def _get_file_type_by_mimetype(mime_type: str) -> FileType | None: def get_file_type_by_mime_type(mime_type: str) -> FileType: return _get_file_type_by_mimetype(mime_type) or FileType.CUSTOM + + +class StorageKeyLoader: + """FileKeyLoader load the storage key from database for a list of files. + This loader is batched, the database query count is constant regardless of the input size. + """ + + def __init__(self, session: Session, tenant_id: str) -> None: + self._session = session + self._tenant_id = tenant_id + + def _load_upload_files(self, upload_file_ids: Sequence[uuid.UUID]) -> Mapping[uuid.UUID, UploadFile]: + stmt = select(UploadFile).where( + UploadFile.id.in_(upload_file_ids), + UploadFile.tenant_id == self._tenant_id, + ) + + return {uuid.UUID(i.id): i for i in self._session.scalars(stmt)} + + def _load_tool_files(self, tool_file_ids: Sequence[uuid.UUID]) -> Mapping[uuid.UUID, ToolFile]: + stmt = select(ToolFile).where( + ToolFile.id.in_(tool_file_ids), + ToolFile.tenant_id == self._tenant_id, + ) + return {uuid.UUID(i.id): i for i in self._session.scalars(stmt)} + + def load_storage_keys(self, files: Sequence[File]): + """Loads storage keys for a sequence of files by retrieving the corresponding + `UploadFile` or `ToolFile` records from the database based on their transfer method. + + This method doesn't modify the input sequence structure but updates the `_storage_key` + property of each file object by extracting the relevant key from its database record. + + Performance note: This is a batched operation where database query count remains constant + regardless of input size. However, for optimal performance, input sequences should contain + fewer than 1000 files. For larger collections, split into smaller batches and process each + batch separately. + """ + + upload_file_ids: list[uuid.UUID] = [] + tool_file_ids: list[uuid.UUID] = [] + for file in files: + related_model_id = file.related_id + if file.related_id is None: + raise ValueError("file id should not be None.") + if file.tenant_id != self._tenant_id: + err_msg = ( + f"invalid file, expected tenant_id={self._tenant_id}, " + f"got tenant_id={file.tenant_id}, file_id={file.id}, related_model_id={related_model_id}" + ) + raise ValueError(err_msg) + model_id = uuid.UUID(related_model_id) + + if file.transfer_method in (FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL): + upload_file_ids.append(model_id) + elif file.transfer_method == FileTransferMethod.TOOL_FILE: + tool_file_ids.append(model_id) + + tool_files = self._load_tool_files(tool_file_ids) + upload_files = self._load_upload_files(upload_file_ids) + for file in files: + model_id = uuid.UUID(file.related_id) + if file.transfer_method in (FileTransferMethod.LOCAL_FILE, FileTransferMethod.REMOTE_URL): + upload_file_row = upload_files.get(model_id) + if upload_file_row is None: + raise ValueError(f"Upload file not found for id: {model_id}") + file._storage_key = upload_file_row.key + elif file.transfer_method == FileTransferMethod.TOOL_FILE: + tool_file_row = tool_files.get(model_id) + if tool_file_row is None: + raise ValueError(f"Tool file not found for id: {model_id}") + file._storage_key = tool_file_row.file_key diff --git a/api/factories/variable_factory.py b/api/factories/variable_factory.py index a41ef4ae4e..250ee4695e 100644 --- a/api/factories/variable_factory.py +++ b/api/factories/variable_factory.py @@ -43,6 +43,10 @@ class UnsupportedSegmentTypeError(Exception): pass +class TypeMismatchError(Exception): + pass + + # Define the constant SEGMENT_TO_VARIABLE_MAP = { StringSegment: StringVariable, @@ -110,6 +114,10 @@ def _build_variable_from_mapping(*, mapping: Mapping[str, Any], selector: Sequen return cast(Variable, result) +def infer_segment_type_from_value(value: Any, /) -> SegmentType: + return build_segment(value).value_type + + def build_segment(value: Any, /) -> Segment: if value is None: return NoneSegment() @@ -140,10 +148,80 @@ def build_segment(value: Any, /) -> Segment: case SegmentType.NONE: return ArrayAnySegment(value=value) case _: + # This should be unreachable. raise ValueError(f"not supported value {value}") raise ValueError(f"not supported value {value}") +def build_segment_with_type(segment_type: SegmentType, value: Any) -> Segment: + """ + Build a segment with explicit type checking. + + This function creates a segment from a value while enforcing type compatibility + with the specified segment_type. It provides stricter type validation compared + to the standard build_segment function. + + Args: + segment_type: The expected SegmentType for the resulting segment + value: The value to be converted into a segment + + Returns: + Segment: A segment instance of the appropriate type + + Raises: + TypeMismatchError: If the value type doesn't match the expected segment_type + + Special Cases: + - For empty list [] values, if segment_type is array[*], returns the corresponding array type + - Type validation is performed before segment creation + + Examples: + >>> build_segment_with_type(SegmentType.STRING, "hello") + StringSegment(value="hello") + + >>> build_segment_with_type(SegmentType.ARRAY_STRING, []) + ArrayStringSegment(value=[]) + + >>> build_segment_with_type(SegmentType.STRING, 123) + # Raises TypeMismatchError + """ + # Handle None values + if value is None: + if segment_type == SegmentType.NONE: + return NoneSegment() + else: + raise TypeMismatchError(f"Expected {segment_type}, but got None") + + # Handle empty list special case for array types + if isinstance(value, list) and len(value) == 0: + if segment_type == SegmentType.ARRAY_ANY: + return ArrayAnySegment(value=value) + elif segment_type == SegmentType.ARRAY_STRING: + return ArrayStringSegment(value=value) + elif segment_type == SegmentType.ARRAY_NUMBER: + return ArrayNumberSegment(value=value) + elif segment_type == SegmentType.ARRAY_OBJECT: + return ArrayObjectSegment(value=value) + elif segment_type == SegmentType.ARRAY_FILE: + return ArrayFileSegment(value=value) + else: + raise TypeMismatchError(f"Expected {segment_type}, but got empty list") + + # Build segment using existing logic to infer actual type + inferred_segment = build_segment(value) + inferred_type = inferred_segment.value_type + + # Type compatibility checking + if inferred_type == segment_type: + return inferred_segment + + # Type mismatch - raise error with descriptive message + raise TypeMismatchError( + f"Type mismatch: expected {segment_type}, but value '{value}' " + f"(type: {type(value).__name__}) corresponds to {inferred_type}" + ) + + def segment_to_variable( *, segment: Segment, diff --git a/api/fields/workflow_run_fields.py b/api/fields/workflow_run_fields.py index 74fdf8bd97..a106728e9c 100644 --- a/api/fields/workflow_run_fields.py +++ b/api/fields/workflow_run_fields.py @@ -19,7 +19,6 @@ workflow_run_for_log_fields = { workflow_run_for_list_fields = { "id": fields.String, - "sequence_number": fields.Integer, "version": fields.String, "status": fields.String, "elapsed_time": fields.Float, @@ -36,7 +35,6 @@ advanced_chat_workflow_run_for_list_fields = { "id": fields.String, "conversation_id": fields.String, "message_id": fields.String, - "sequence_number": fields.Integer, "version": fields.String, "status": fields.String, "elapsed_time": fields.Float, @@ -63,7 +61,6 @@ workflow_run_pagination_fields = { workflow_run_detail_fields = { "id": fields.String, - "sequence_number": fields.Integer, "version": fields.String, "graph": fields.Raw(attribute="graph_dict"), "inputs": fields.Raw(attribute="inputs_dict"), diff --git a/api/libs/datetime_utils.py b/api/libs/datetime_utils.py new file mode 100644 index 0000000000..e576a34629 --- /dev/null +++ b/api/libs/datetime_utils.py @@ -0,0 +1,22 @@ +import abc +import datetime +from typing import Protocol + + +class _NowFunction(Protocol): + @abc.abstractmethod + def __call__(self, tz: datetime.timezone | None) -> datetime.datetime: + pass + + +# _now_func is a callable with the _NowFunction signature. +# Its sole purpose is to abstract time retrieval, enabling +# developers to mock this behavior in tests and time-dependent scenarios. +_now_func: _NowFunction = datetime.datetime.now + + +def naive_utc_now() -> datetime.datetime: + """Return a naive datetime object (without timezone information) + representing current UTC time. + """ + return _now_func(datetime.UTC).replace(tzinfo=None) diff --git a/api/libs/jsonutil.py b/api/libs/jsonutil.py new file mode 100644 index 0000000000..fa29671034 --- /dev/null +++ b/api/libs/jsonutil.py @@ -0,0 +1,11 @@ +import json + +from pydantic import BaseModel + + +class PydanticModelEncoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, BaseModel): + return o.model_dump() + else: + super().default(o) diff --git a/api/libs/sendgrid.py b/api/libs/sendgrid.py new file mode 100644 index 0000000000..5409e3eeeb --- /dev/null +++ b/api/libs/sendgrid.py @@ -0,0 +1,45 @@ +import logging + +import sendgrid # type: ignore +from python_http_client.exceptions import ForbiddenError, UnauthorizedError +from sendgrid.helpers.mail import Content, Email, Mail, To # type: ignore + + +class SendGridClient: + def __init__(self, sendgrid_api_key: str, _from: str): + self.sendgrid_api_key = sendgrid_api_key + self._from = _from + + def send(self, mail: dict): + logging.debug("Sending email with SendGrid") + + try: + _to = mail["to"] + + if not _to: + raise ValueError("SendGridClient: Cannot send email: recipient address is missing.") + + sg = sendgrid.SendGridAPIClient(api_key=self.sendgrid_api_key) + from_email = Email(self._from) + to_email = To(_to) + subject = mail["subject"] + content = Content("text/html", mail["html"]) + mail = Mail(from_email, to_email, subject, content) + mail_json = mail.get() # type: ignore + response = sg.client.mail.send.post(request_body=mail_json) + logging.debug(response.status_code) + logging.debug(response.body) + logging.debug(response.headers) + + except TimeoutError as e: + logging.exception("SendGridClient Timeout occurred while sending email") + raise + except (UnauthorizedError, ForbiddenError) as e: + logging.exception( + "SendGridClient Authentication failed. " + "Verify that your credentials and the 'from' email address are correct" + ) + raise + except Exception as e: + logging.exception(f"SendGridClient Unexpected error occurred while sending email to {_to}") + raise diff --git a/api/libs/smtp.py b/api/libs/smtp.py index 35561f071c..b94386660e 100644 --- a/api/libs/smtp.py +++ b/api/libs/smtp.py @@ -22,7 +22,11 @@ class SMTPClient: if self.use_tls: if self.opportunistic_tls: smtp = smtplib.SMTP(self.server, self.port, timeout=10) + # Send EHLO command with the HELO domain name as the server address + smtp.ehlo(self.server) smtp.starttls() + # Resend EHLO command to identify the TLS session + smtp.ehlo(self.server) else: smtp = smtplib.SMTP_SSL(self.server, self.port, timeout=10) else: diff --git a/api/migrations/versions/2025_06_19_1633-0ab65e1cc7fa_remove_sequence_number_from_workflow_.py b/api/migrations/versions/2025_06_19_1633-0ab65e1cc7fa_remove_sequence_number_from_workflow_.py new file mode 100644 index 0000000000..29fef77798 --- /dev/null +++ b/api/migrations/versions/2025_06_19_1633-0ab65e1cc7fa_remove_sequence_number_from_workflow_.py @@ -0,0 +1,66 @@ +"""remove sequence_number from workflow_runs + +Revision ID: 0ab65e1cc7fa +Revises: 4474872b0ee6 +Create Date: 2025-06-19 16:33:13.377215 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '0ab65e1cc7fa' +down_revision = '4474872b0ee6' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('workflow_runs', schema=None) as batch_op: + batch_op.drop_index(batch_op.f('workflow_run_tenant_app_sequence_idx')) + batch_op.drop_column('sequence_number') + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + # WARNING: This downgrade CANNOT recover the original sequence_number values! + # The original sequence numbers are permanently lost after the upgrade. + # This downgrade will regenerate sequence numbers based on created_at order, + # which may result in different values than the original sequence numbers. + # + # If you need to preserve original sequence numbers, use the alternative + # migration approach that creates a backup table before removal. + + # Step 1: Add sequence_number column as nullable first + with op.batch_alter_table('workflow_runs', schema=None) as batch_op: + batch_op.add_column(sa.Column('sequence_number', sa.INTEGER(), autoincrement=False, nullable=True)) + + # Step 2: Populate sequence_number values based on created_at order within each app + # NOTE: This recreates sequence numbering logic but values will be different + # from the original sequence numbers that were removed in the upgrade + connection = op.get_bind() + connection.execute(sa.text(""" + UPDATE workflow_runs + SET sequence_number = subquery.row_num + FROM ( + SELECT id, ROW_NUMBER() OVER ( + PARTITION BY tenant_id, app_id + ORDER BY created_at, id + ) as row_num + FROM workflow_runs + ) subquery + WHERE workflow_runs.id = subquery.id + """)) + + # Step 3: Make the column NOT NULL and add the index + with op.batch_alter_table('workflow_runs', schema=None) as batch_op: + batch_op.alter_column('sequence_number', nullable=False) + batch_op.create_index(batch_op.f('workflow_run_tenant_app_sequence_idx'), ['tenant_id', 'app_id', 'sequence_number'], unique=False) + + # ### end Alembic commands ### diff --git a/api/models/_workflow_exc.py b/api/models/_workflow_exc.py new file mode 100644 index 0000000000..f6271bda47 --- /dev/null +++ b/api/models/_workflow_exc.py @@ -0,0 +1,20 @@ +"""All these exceptions are not meant to be caught by callers.""" + + +class WorkflowDataError(Exception): + """Base class for all workflow data related exceptions. + + This should be used to indicate issues with workflow data integrity, such as + no `graph` configuration, missing `nodes` field in `graph` configuration, or + similar issues. + """ + + pass + + +class NodeNotFoundError(WorkflowDataError): + """Raised when a node with the specified ID is not found in the workflow.""" + + def __init__(self, node_id: str): + super().__init__(f"Node with ID '{node_id}' not found in the workflow.") + self.node_id = node_id diff --git a/api/models/model.py b/api/models/model.py index 229e77134e..ce5f449f87 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -10,7 +10,6 @@ from core.plugin.entities.plugin import GenericProviderID from core.tools.entities.tool_entities import ToolProviderType from core.tools.signature import sign_tool_file from core.workflow.entities.workflow_execution import WorkflowExecutionStatus -from services.plugin.plugin_service import PluginService if TYPE_CHECKING: from models.workflow import Workflow @@ -169,6 +168,7 @@ class App(Base): @property def deleted_tools(self) -> list: from core.tools.tool_manager import ToolManager + from services.plugin.plugin_service import PluginService # get agent mode tools app_model_config = self.app_model_config @@ -611,6 +611,14 @@ class InstalledApp(Base): return tenant +class ConversationSource(StrEnum): + """This enumeration is designed for use with `Conversation.from_source`.""" + + # NOTE(QuantumGhost): The enumeration members may not cover all possible cases. + API = "api" + CONSOLE = "console" + + class Conversation(Base): __tablename__ = "conversations" __table_args__ = ( @@ -632,7 +640,14 @@ class Conversation(Base): system_instruction = db.Column(db.Text) system_instruction_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0")) status = db.Column(db.String(255), nullable=False) + + # The `invoke_from` records how the conversation is created. + # + # Its value corresponds to the members of `InvokeFrom`. + # (api/core/app/entities/app_invoke_entities.py) invoke_from = db.Column(db.String(255), nullable=True) + + # ref: ConversationSource. from_source = db.Column(db.String(255), nullable=False) from_end_user_id = db.Column(StringUUID) from_account_id = db.Column(StringUUID) @@ -703,7 +718,6 @@ class Conversation(Base): if "model" in override_model_configs: app_model_config = AppModelConfig() app_model_config = app_model_config.from_model_config_dict(override_model_configs) - assert app_model_config is not None, "app model config not found" model_config = app_model_config.to_dict() else: model_config["configs"] = override_model_configs @@ -817,7 +831,12 @@ class Conversation(Base): @property def first_message(self): - return db.session.query(Message).filter(Message.conversation_id == self.id).first() + return ( + db.session.query(Message) + .filter(Message.conversation_id == self.id) + .order_by(Message.created_at.asc()) + .first() + ) @property def app(self): @@ -894,11 +913,11 @@ class Message(Base): _inputs: Mapped[dict] = mapped_column("inputs", db.JSON) query: Mapped[str] = db.Column(db.Text, nullable=False) message = db.Column(db.JSON, nullable=False) - message_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0")) + message_tokens: Mapped[int] = db.Column(db.Integer, nullable=False, server_default=db.text("0")) message_unit_price = db.Column(db.Numeric(10, 4), nullable=False) message_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) answer: Mapped[str] = db.Column(db.Text, nullable=False) - answer_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0")) + answer_tokens: Mapped[int] = db.Column(db.Integer, nullable=False, server_default=db.text("0")) answer_unit_price = db.Column(db.Numeric(10, 4), nullable=False) answer_price_unit = db.Column(db.Numeric(10, 7), nullable=False, server_default=db.text("0.001")) parent_message_id = db.Column(StringUUID, nullable=True) diff --git a/api/models/workflow.py b/api/models/workflow.py index 2fff045543..7f01135af3 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -7,10 +7,16 @@ from typing import TYPE_CHECKING, Any, Optional, Union from uuid import uuid4 from flask_login import current_user +from sqlalchemy import orm +from core.file.constants import maybe_file_object +from core.file.models import File from core.variables import utils as variable_utils from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID -from factories.variable_factory import build_segment +from core.workflow.nodes.enums import NodeType +from factories.variable_factory import TypeMismatchError, build_segment_with_type + +from ._workflow_exc import NodeNotFoundError, WorkflowDataError if TYPE_CHECKING: from models.model import AppMode @@ -72,6 +78,10 @@ class WorkflowType(Enum): return cls.WORKFLOW if app_mode == AppMode.WORKFLOW else cls.CHAT +class _InvalidGraphDefinitionError(Exception): + pass + + class Workflow(Base): """ Workflow, for `Workflow App` and `Chat App workflow mode`. @@ -136,6 +146,8 @@ class Workflow(Base): "conversation_variables", db.Text, nullable=False, server_default="{}" ) + VERSION_DRAFT = "draft" + @classmethod def new( cls, @@ -179,8 +191,72 @@ class Workflow(Base): @property def graph_dict(self) -> Mapping[str, Any]: + # TODO(QuantumGhost): Consider caching `graph_dict` to avoid repeated JSON decoding. + # + # Using `functools.cached_property` could help, but some code in the codebase may + # modify the returned dict, which can cause issues elsewhere. + # + # For example, changing this property to a cached property led to errors like the + # following when single stepping an `Iteration` node: + # + # Root node id 1748401971780start not found in the graph + # + # There is currently no standard way to make a dict deeply immutable in Python, + # and tracking modifications to the returned dict is difficult. For now, we leave + # the code as-is to avoid these issues. + # + # Currently, the following functions / methods would mutate the returned dict: + # + # - `_get_graph_and_variable_pool_of_single_iteration`. + # - `_get_graph_and_variable_pool_of_single_loop`. return json.loads(self.graph) if self.graph else {} + def get_node_config_by_id(self, node_id: str) -> Mapping[str, Any]: + """Extract a node configuration from the workflow graph by node ID. + A node configuration is a dictionary containing the node's properties, including + the node's id, title, and its data as a dict. + """ + workflow_graph = self.graph_dict + + if not workflow_graph: + raise WorkflowDataError(f"workflow graph not found, workflow_id={self.id}") + + nodes = workflow_graph.get("nodes") + if not nodes: + raise WorkflowDataError("nodes not found in workflow graph") + + try: + node_config = next(filter(lambda node: node["id"] == node_id, nodes)) + except StopIteration: + raise NodeNotFoundError(node_id) + assert isinstance(node_config, dict) + return node_config + + @staticmethod + def get_node_type_from_node_config(node_config: Mapping[str, Any]) -> NodeType: + """Extract type of a node from the node configuration returned by `get_node_config_by_id`.""" + node_config_data = node_config.get("data", {}) + # Get node class + node_type = NodeType(node_config_data.get("type")) + return node_type + + @staticmethod + def get_enclosing_node_type_and_id(node_config: Mapping[str, Any]) -> tuple[NodeType, str] | None: + in_loop = node_config.get("isInLoop", False) + in_iteration = node_config.get("isInIteration", False) + if in_loop: + loop_id = node_config.get("loop_id") + if loop_id is None: + raise _InvalidGraphDefinitionError("invalid graph") + return NodeType.LOOP, loop_id + elif in_iteration: + iteration_id = node_config.get("iteration_id") + if iteration_id is None: + raise _InvalidGraphDefinitionError("invalid graph") + return NodeType.ITERATION, iteration_id + else: + return None + @property def features(self) -> str: """ @@ -376,6 +452,10 @@ class Workflow(Base): ensure_ascii=False, ) + @staticmethod + def version_from_datetime(d: datetime) -> str: + return str(d) + class WorkflowRun(Base): """ @@ -386,7 +466,7 @@ class WorkflowRun(Base): - id (uuid) Run ID - tenant_id (uuid) Workspace ID - app_id (uuid) App ID - - sequence_number (int) Auto-increment sequence number, incremented within the App, starting from 1 + - workflow_id (uuid) Workflow ID - type (string) Workflow type - triggered_from (string) Trigger source @@ -419,13 +499,12 @@ class WorkflowRun(Base): __table_args__ = ( db.PrimaryKeyConstraint("id", name="workflow_run_pkey"), db.Index("workflow_run_triggerd_from_idx", "tenant_id", "app_id", "triggered_from"), - db.Index("workflow_run_tenant_app_sequence_idx", "tenant_id", "app_id", "sequence_number"), ) id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) tenant_id: Mapped[str] = mapped_column(StringUUID) app_id: Mapped[str] = mapped_column(StringUUID) - sequence_number: Mapped[int] = mapped_column() + workflow_id: Mapped[str] = mapped_column(StringUUID) type: Mapped[str] = mapped_column(db.String(255)) triggered_from: Mapped[str] = mapped_column(db.String(255)) @@ -485,7 +564,6 @@ class WorkflowRun(Base): "id": self.id, "tenant_id": self.tenant_id, "app_id": self.app_id, - "sequence_number": self.sequence_number, "workflow_id": self.workflow_id, "type": self.type, "triggered_from": self.triggered_from, @@ -511,7 +589,6 @@ class WorkflowRun(Base): id=data.get("id"), tenant_id=data.get("tenant_id"), app_id=data.get("app_id"), - sequence_number=data.get("sequence_number"), workflow_id=data.get("workflow_id"), type=data.get("type"), triggered_from=data.get("triggered_from"), @@ -838,8 +915,18 @@ def _naive_utc_datetime(): class WorkflowDraftVariable(Base): + """`WorkflowDraftVariable` record variables and outputs generated during + debugging worfklow or chatflow. + + IMPORTANT: This model maintains multiple invariant rules that must be preserved. + Do not instantiate this class directly with the constructor. + + Instead, use the factory methods (`new_conversation_variable`, `new_sys_variable`, + `new_node_variable`) defined below to ensure all invariants are properly maintained. + """ + @staticmethod - def unique_columns() -> list[str]: + def unique_app_id_node_id_name() -> list[str]: return [ "app_id", "node_id", @@ -847,7 +934,9 @@ class WorkflowDraftVariable(Base): ] __tablename__ = "workflow_draft_variables" - __table_args__ = (UniqueConstraint(*unique_columns()),) + __table_args__ = (UniqueConstraint(*unique_app_id_node_id_name()),) + # Required for instance variable annotation. + __allow_unmapped__ = True # id is the unique identifier of a draft variable. id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) @@ -928,6 +1017,36 @@ class WorkflowDraftVariable(Base): default=None, ) + # Cache for deserialized value + # + # NOTE(QuantumGhost): This field serves two purposes: + # + # 1. Caches deserialized values to reduce repeated parsing costs + # 2. Allows modification of the deserialized value after retrieval, + # particularly important for `File`` variables which require database + # lookups to obtain storage_key and other metadata + # + # Use double underscore prefix for better encapsulation, + # making this attribute harder to access from outside the class. + __value: Segment | None + + def __init__(self, *args, **kwargs): + """ + The constructor of `WorkflowDraftVariable` is not intended for + direct use outside this file. Its solo purpose is setup private state + used by the model instance. + + Please use the factory methods + (`new_conversation_variable`, `new_sys_variable`, `new_node_variable`) + defined below to create instances of this class. + """ + super().__init__(*args, **kwargs) + self.__value = None + + @orm.reconstructor + def _init_on_load(self): + self.__value = None + def get_selector(self) -> list[str]: selector = json.loads(self.selector) if not isinstance(selector, list): @@ -942,15 +1061,92 @@ class WorkflowDraftVariable(Base): def _set_selector(self, value: list[str]): self.selector = json.dumps(value) - def get_value(self) -> Segment | None: - return build_segment(json.loads(self.value)) + def _loads_value(self) -> Segment: + value = json.loads(self.value) + return self.build_segment_with_type(self.value_type, value) + + @staticmethod + def rebuild_file_types(value: Any) -> Any: + # NOTE(QuantumGhost): Temporary workaround for structured data handling. + # By this point, `output` has been converted to dict by + # `WorkflowEntry.handle_special_values`, so we need to + # reconstruct File objects from their serialized form + # to maintain proper variable saving behavior. + # + # Ideally, we should work with structured data objects directly + # rather than their serialized forms. + # However, multiple components in the codebase depend on + # `WorkflowEntry.handle_special_values`, making a comprehensive migration challenging. + if isinstance(value, dict): + if not maybe_file_object(value): + return value + return File.model_validate(value) + elif isinstance(value, list) and value: + first = value[0] + if not maybe_file_object(first): + return value + return [File.model_validate(i) for i in value] + else: + return value + + @classmethod + def build_segment_with_type(cls, segment_type: SegmentType, value: Any) -> Segment: + # Extends `variable_factory.build_segment_with_type` functionality by + # reconstructing `FileSegment`` or `ArrayFileSegment`` objects from + # their serialized dictionary or list representations, respectively. + if segment_type == SegmentType.FILE: + if isinstance(value, File): + return build_segment_with_type(segment_type, value) + elif isinstance(value, dict): + file = cls.rebuild_file_types(value) + return build_segment_with_type(segment_type, file) + else: + raise TypeMismatchError(f"expected dict or File for FileSegment, got {type(value)}") + if segment_type == SegmentType.ARRAY_FILE: + if not isinstance(value, list): + raise TypeMismatchError(f"expected list for ArrayFileSegment, got {type(value)}") + file_list = cls.rebuild_file_types(value) + return build_segment_with_type(segment_type=segment_type, value=file_list) + + return build_segment_with_type(segment_type=segment_type, value=value) + + def get_value(self) -> Segment: + """Decode the serialized value into its corresponding `Segment` object. + + This method caches the result, so repeated calls will return the same + object instance without re-parsing the serialized data. + + If you need to modify the returned `Segment`, use `value.model_copy()` + to create a copy first to avoid affecting the cached instance. + + For more information about the caching mechanism, see the documentation + of the `__value` field. + + Returns: + Segment: The deserialized value as a Segment object. + """ + + if self.__value is not None: + return self.__value + value = self._loads_value() + self.__value = value + return value def set_name(self, name: str): self.name = name self._set_selector([self.node_id, name]) def set_value(self, value: Segment): - self.value = json.dumps(value.value) + """Updates the `value` and corresponding `value_type` fields in the database model. + + This method also stores the provided Segment object in the deserialized cache + without creating a copy, allowing for efficient value access. + + Args: + value: The Segment object to store as the variable's value. + """ + self.__value = value + self.value = json.dumps(value, cls=variable_utils.SegmentJSONEncoder) self.value_type = value.value_type def get_node_id(self) -> str | None: @@ -976,6 +1172,7 @@ class WorkflowDraftVariable(Base): node_id: str, name: str, value: Segment, + node_execution_id: str | None, description: str = "", ) -> "WorkflowDraftVariable": variable = WorkflowDraftVariable() @@ -987,6 +1184,7 @@ class WorkflowDraftVariable(Base): variable.name = name variable.set_value(value) variable._set_selector(list(variable_utils.to_selector(node_id, name))) + variable.node_execution_id = node_execution_id return variable @classmethod @@ -996,13 +1194,17 @@ class WorkflowDraftVariable(Base): app_id: str, name: str, value: Segment, + description: str = "", ) -> "WorkflowDraftVariable": variable = cls._new( app_id=app_id, node_id=CONVERSATION_VARIABLE_NODE_ID, name=name, value=value, + description=description, + node_execution_id=None, ) + variable.editable = True return variable @classmethod @@ -1012,9 +1214,16 @@ class WorkflowDraftVariable(Base): app_id: str, name: str, value: Segment, + node_execution_id: str, editable: bool = False, ) -> "WorkflowDraftVariable": - variable = cls._new(app_id=app_id, node_id=SYSTEM_VARIABLE_NODE_ID, name=name, value=value) + variable = cls._new( + app_id=app_id, + node_id=SYSTEM_VARIABLE_NODE_ID, + name=name, + node_execution_id=node_execution_id, + value=value, + ) variable.editable = editable return variable @@ -1026,11 +1235,19 @@ class WorkflowDraftVariable(Base): node_id: str, name: str, value: Segment, + node_execution_id: str, visible: bool = True, + editable: bool = True, ) -> "WorkflowDraftVariable": - variable = cls._new(app_id=app_id, node_id=node_id, name=name, value=value) + variable = cls._new( + app_id=app_id, + node_id=node_id, + name=name, + node_execution_id=node_execution_id, + value=value, + ) variable.visible = visible - variable.editable = True + variable.editable = editable return variable @property diff --git a/api/mypy.ini b/api/mypy.ini index 12fe529b08..6836b2602b 100644 --- a/api/mypy.ini +++ b/api/mypy.ini @@ -18,4 +18,3 @@ ignore_missing_imports=True [mypy-flask_restful.inputs] ignore_missing_imports=True - diff --git a/api/pyproject.toml b/api/pyproject.toml index 9631586ed4..6033b3a670 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -81,6 +81,7 @@ dependencies = [ "weave~=0.51.0", "yarl~=1.18.3", "webvtt-py~=0.5.1", + "sendgrid~=6.12.3", ] # Before adding new dependency, consider place it in # alphabet order (a-z) and suitable group. @@ -148,11 +149,13 @@ dev = [ "types-ujson>=5.10.0", "boto3-stubs>=1.38.20", "types-jmespath>=1.0.2.20240106", + "hypothesis>=6.131.15", "types_pyOpenSSL>=24.1.0", "types_cffi>=1.17.0", "types_setuptools>=80.9.0", "pandas-stubs~=2.2.3", "scipy-stubs>=1.15.3.0", + "types-python-http-client>=3.3.7.20240910", ] ############################################################ @@ -202,4 +205,5 @@ vdb = [ "volcengine-compat~=1.0.0", "weaviate-client~=3.24.0", "xinference-client~=1.2.2", + "mo-vector~=0.1.13", ] diff --git a/api/services/app_dsl_service.py b/api/services/app_dsl_service.py index d2875180d8..20257fa345 100644 --- a/api/services/app_dsl_service.py +++ b/api/services/app_dsl_service.py @@ -32,6 +32,7 @@ from models import Account, App, AppMode from models.model import AppModelConfig from models.workflow import Workflow from services.plugin.dependencies_analysis import DependenciesAnalysisService +from services.workflow_draft_variable_service import WorkflowDraftVariableService from services.workflow_service import WorkflowService logger = logging.getLogger(__name__) @@ -292,6 +293,8 @@ class AppDslService: dependencies=check_dependencies_pending_data, ) + draft_var_srv = WorkflowDraftVariableService(session=self._session) + draft_var_srv.delete_workflow_variables(app_id=app.id) return Import( id=import_id, status=status, @@ -421,7 +424,7 @@ class AppDslService: # Set icon type icon_type_value = icon_type or app_data.get("icon_type") - if icon_type_value in ["emoji", "link"]: + if icon_type_value in ["emoji", "link", "image"]: icon_type = icon_type_value else: icon_type = "emoji" diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index e98b47921f..a697c9ab7f 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -59,6 +59,7 @@ from services.external_knowledge_service import ExternalDatasetService from services.feature_service import FeatureModel, FeatureService from services.tag_service import TagService from services.vector_service import VectorService +from tasks.add_document_to_index_task import add_document_to_index_task from tasks.batch_clean_document_task import batch_clean_document_task from tasks.clean_notion_document_task import clean_notion_document_task from tasks.deal_dataset_vector_index_task import deal_dataset_vector_index_task @@ -70,6 +71,7 @@ from tasks.document_indexing_update_task import document_indexing_update_task from tasks.duplicate_document_indexing_task import duplicate_document_indexing_task from tasks.enable_segments_to_index_task import enable_segments_to_index_task from tasks.recover_document_indexing_task import recover_document_indexing_task +from tasks.remove_document_from_index_task import remove_document_from_index_task from tasks.retry_document_indexing_task import retry_document_indexing_task from tasks.sync_website_document_indexing_task import sync_website_document_indexing_task @@ -278,174 +280,332 @@ class DatasetService: @staticmethod def update_dataset(dataset_id, data, user): + """ + Update dataset configuration and settings. + + Args: + dataset_id: The unique identifier of the dataset to update + data: Dictionary containing the update data + user: The user performing the update operation + + Returns: + Dataset: The updated dataset object + + Raises: + ValueError: If dataset not found or validation fails + NoPermissionError: If user lacks permission to update the dataset + """ + # Retrieve and validate dataset existence dataset = DatasetService.get_dataset(dataset_id) if not dataset: raise ValueError("Dataset not found") + # Verify user has permission to update this dataset DatasetService.check_dataset_permission(dataset, user) + + # Handle external dataset updates if dataset.provider == "external": - external_retrieval_model = data.get("external_retrieval_model", None) - if external_retrieval_model: - dataset.retrieval_model = external_retrieval_model - dataset.name = data.get("name", dataset.name) - dataset.description = data.get("description", "") - permission = data.get("permission") - if permission: - dataset.permission = permission - external_knowledge_id = data.get("external_knowledge_id", None) - db.session.add(dataset) - if not external_knowledge_id: - raise ValueError("External knowledge id is required.") - external_knowledge_api_id = data.get("external_knowledge_api_id", None) - if not external_knowledge_api_id: - raise ValueError("External knowledge api id is required.") - - with Session(db.engine) as session: - external_knowledge_binding = ( - session.query(ExternalKnowledgeBindings).filter_by(dataset_id=dataset_id).first() - ) - - if not external_knowledge_binding: - raise ValueError("External knowledge binding not found.") - - if ( - external_knowledge_binding.external_knowledge_id != external_knowledge_id - or external_knowledge_binding.external_knowledge_api_id != external_knowledge_api_id - ): - external_knowledge_binding.external_knowledge_id = external_knowledge_id - external_knowledge_binding.external_knowledge_api_id = external_knowledge_api_id - db.session.add(external_knowledge_binding) - db.session.commit() + return DatasetService._update_external_dataset(dataset, data, user) else: - data.pop("partial_member_list", None) - data.pop("external_knowledge_api_id", None) - data.pop("external_knowledge_id", None) - data.pop("external_retrieval_model", None) - filtered_data = {k: v for k, v in data.items() if v is not None or k == "description"} - action = None - if dataset.indexing_technique != data["indexing_technique"]: - # if update indexing_technique - if data["indexing_technique"] == "economy": - action = "remove" - filtered_data["embedding_model"] = None - filtered_data["embedding_model_provider"] = None - filtered_data["collection_binding_id"] = None - elif data["indexing_technique"] == "high_quality": - action = "add" - # get embedding model setting - try: - model_manager = ModelManager() - embedding_model = model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, - provider=data["embedding_model_provider"], - model_type=ModelType.TEXT_EMBEDDING, - model=data["embedding_model"], - ) - filtered_data["embedding_model"] = embedding_model.model - filtered_data["embedding_model_provider"] = embedding_model.provider - dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding( - embedding_model.provider, embedding_model.model - ) - filtered_data["collection_binding_id"] = dataset_collection_binding.id - except LLMBadRequestError: - raise ValueError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." - ) - except ProviderTokenNotInitError as ex: - raise ValueError(ex.description) - else: - # add default plugin id to both setting sets, to make sure the plugin model provider is consistent - # Skip embedding model checks if not provided in the update request - if ( - "embedding_model_provider" not in data - or "embedding_model" not in data - or not data.get("embedding_model_provider") - or not data.get("embedding_model") - ): - # If the dataset already has embedding model settings, use those - if dataset.embedding_model_provider and dataset.embedding_model: - # Keep existing values - filtered_data["embedding_model_provider"] = dataset.embedding_model_provider - filtered_data["embedding_model"] = dataset.embedding_model - # If collection_binding_id exists, keep it too - if dataset.collection_binding_id: - filtered_data["collection_binding_id"] = dataset.collection_binding_id - # Otherwise, don't try to update embedding model settings at all - # Remove these fields from filtered_data if they exist but are None/empty - if "embedding_model_provider" in filtered_data and not filtered_data["embedding_model_provider"]: - del filtered_data["embedding_model_provider"] - if "embedding_model" in filtered_data and not filtered_data["embedding_model"]: - del filtered_data["embedding_model"] - else: - skip_embedding_update = False - try: - # Handle existing model provider - plugin_model_provider = dataset.embedding_model_provider - plugin_model_provider_str = None - if plugin_model_provider: - plugin_model_provider_str = str(ModelProviderID(plugin_model_provider)) + return DatasetService._update_internal_dataset(dataset, data, user) - # Handle new model provider from request - new_plugin_model_provider = data["embedding_model_provider"] - new_plugin_model_provider_str = None - if new_plugin_model_provider: - new_plugin_model_provider_str = str(ModelProviderID(new_plugin_model_provider)) + @staticmethod + def _update_external_dataset(dataset, data, user): + """ + Update external dataset configuration. - # Only update embedding model if both values are provided and different from current - if ( - plugin_model_provider_str != new_plugin_model_provider_str - or data["embedding_model"] != dataset.embedding_model - ): - action = "update" - model_manager = ModelManager() - try: - embedding_model = model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, - provider=data["embedding_model_provider"], - model_type=ModelType.TEXT_EMBEDDING, - model=data["embedding_model"], - ) - except ProviderTokenNotInitError: - # If we can't get the embedding model, skip updating it - # and keep the existing settings if available - if dataset.embedding_model_provider and dataset.embedding_model: - filtered_data["embedding_model_provider"] = dataset.embedding_model_provider - filtered_data["embedding_model"] = dataset.embedding_model - if dataset.collection_binding_id: - filtered_data["collection_binding_id"] = dataset.collection_binding_id - # Skip the rest of the embedding model update - skip_embedding_update = True - if not skip_embedding_update: - filtered_data["embedding_model"] = embedding_model.model - filtered_data["embedding_model_provider"] = embedding_model.provider - dataset_collection_binding = ( - DatasetCollectionBindingService.get_dataset_collection_binding( - embedding_model.provider, embedding_model.model - ) - ) - filtered_data["collection_binding_id"] = dataset_collection_binding.id - except LLMBadRequestError: - raise ValueError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." - ) - except ProviderTokenNotInitError as ex: - raise ValueError(ex.description) + Args: + dataset: The dataset object to update + data: Update data dictionary + user: User performing the update - filtered_data["updated_by"] = user.id - filtered_data["updated_at"] = datetime.datetime.now() + Returns: + Dataset: Updated dataset object + """ + # Update retrieval model if provided + external_retrieval_model = data.get("external_retrieval_model", None) + if external_retrieval_model: + dataset.retrieval_model = external_retrieval_model - # update Retrieval model - filtered_data["retrieval_model"] = data["retrieval_model"] + # Update basic dataset properties + dataset.name = data.get("name", dataset.name) + dataset.description = data.get("description", dataset.description) - db.session.query(Dataset).filter_by(id=dataset_id).update(filtered_data) + # Update permission if provided + permission = data.get("permission") + if permission: + dataset.permission = permission + + # Validate and update external knowledge configuration + external_knowledge_id = data.get("external_knowledge_id", None) + external_knowledge_api_id = data.get("external_knowledge_api_id", None) + + if not external_knowledge_id: + raise ValueError("External knowledge id is required.") + if not external_knowledge_api_id: + raise ValueError("External knowledge api id is required.") + # Update metadata fields + dataset.updated_by = user.id if user else None + dataset.updated_at = datetime.datetime.utcnow() + db.session.add(dataset) + + # Update external knowledge binding + DatasetService._update_external_knowledge_binding(dataset.id, external_knowledge_id, external_knowledge_api_id) + + # Commit changes to database + db.session.commit() - db.session.commit() - if action: - deal_dataset_vector_index_task.delay(dataset_id, action) return dataset + @staticmethod + def _update_external_knowledge_binding(dataset_id, external_knowledge_id, external_knowledge_api_id): + """ + Update external knowledge binding configuration. + + Args: + dataset_id: Dataset identifier + external_knowledge_id: External knowledge identifier + external_knowledge_api_id: External knowledge API identifier + """ + with Session(db.engine) as session: + external_knowledge_binding = ( + session.query(ExternalKnowledgeBindings).filter_by(dataset_id=dataset_id).first() + ) + + if not external_knowledge_binding: + raise ValueError("External knowledge binding not found.") + + # Update binding if values have changed + if ( + external_knowledge_binding.external_knowledge_id != external_knowledge_id + or external_knowledge_binding.external_knowledge_api_id != external_knowledge_api_id + ): + external_knowledge_binding.external_knowledge_id = external_knowledge_id + external_knowledge_binding.external_knowledge_api_id = external_knowledge_api_id + db.session.add(external_knowledge_binding) + + @staticmethod + def _update_internal_dataset(dataset, data, user): + """ + Update internal dataset configuration. + + Args: + dataset: The dataset object to update + data: Update data dictionary + user: User performing the update + + Returns: + Dataset: Updated dataset object + """ + # Remove external-specific fields from update data + data.pop("partial_member_list", None) + data.pop("external_knowledge_api_id", None) + data.pop("external_knowledge_id", None) + data.pop("external_retrieval_model", None) + + # Filter out None values except for description field + filtered_data = {k: v for k, v in data.items() if v is not None or k == "description"} + + # Handle indexing technique changes and embedding model updates + action = DatasetService._handle_indexing_technique_change(dataset, data, filtered_data) + + # Add metadata fields + filtered_data["updated_by"] = user.id + filtered_data["updated_at"] = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + # update Retrieval model + filtered_data["retrieval_model"] = data["retrieval_model"] + + # Update dataset in database + db.session.query(Dataset).filter_by(id=dataset.id).update(filtered_data) + db.session.commit() + + # Trigger vector index task if indexing technique changed + if action: + deal_dataset_vector_index_task.delay(dataset.id, action) + + return dataset + + @staticmethod + def _handle_indexing_technique_change(dataset, data, filtered_data): + """ + Handle changes in indexing technique and configure embedding models accordingly. + + Args: + dataset: Current dataset object + data: Update data dictionary + filtered_data: Filtered update data + + Returns: + str: Action to perform ('add', 'remove', 'update', or None) + """ + if dataset.indexing_technique != data["indexing_technique"]: + if data["indexing_technique"] == "economy": + # Remove embedding model configuration for economy mode + filtered_data["embedding_model"] = None + filtered_data["embedding_model_provider"] = None + filtered_data["collection_binding_id"] = None + return "remove" + elif data["indexing_technique"] == "high_quality": + # Configure embedding model for high quality mode + DatasetService._configure_embedding_model_for_high_quality(data, filtered_data) + return "add" + else: + # Handle embedding model updates when indexing technique remains the same + return DatasetService._handle_embedding_model_update_when_technique_unchanged(dataset, data, filtered_data) + return None + + @staticmethod + def _configure_embedding_model_for_high_quality(data, filtered_data): + """ + Configure embedding model settings for high quality indexing. + + Args: + data: Update data dictionary + filtered_data: Filtered update data to modify + """ + try: + model_manager = ModelManager() + embedding_model = model_manager.get_model_instance( + tenant_id=current_user.current_tenant_id, + provider=data["embedding_model_provider"], + model_type=ModelType.TEXT_EMBEDDING, + model=data["embedding_model"], + ) + filtered_data["embedding_model"] = embedding_model.model + filtered_data["embedding_model_provider"] = embedding_model.provider + dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding( + embedding_model.provider, embedding_model.model + ) + filtered_data["collection_binding_id"] = dataset_collection_binding.id + except LLMBadRequestError: + raise ValueError( + "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." + ) + except ProviderTokenNotInitError as ex: + raise ValueError(ex.description) + + @staticmethod + def _handle_embedding_model_update_when_technique_unchanged(dataset, data, filtered_data): + """ + Handle embedding model updates when indexing technique remains the same. + + Args: + dataset: Current dataset object + data: Update data dictionary + filtered_data: Filtered update data to modify + + Returns: + str: Action to perform ('update' or None) + """ + # Skip embedding model checks if not provided in the update request + if ( + "embedding_model_provider" not in data + or "embedding_model" not in data + or not data.get("embedding_model_provider") + or not data.get("embedding_model") + ): + DatasetService._preserve_existing_embedding_settings(dataset, filtered_data) + return None + else: + return DatasetService._update_embedding_model_settings(dataset, data, filtered_data) + + @staticmethod + def _preserve_existing_embedding_settings(dataset, filtered_data): + """ + Preserve existing embedding model settings when not provided in update. + + Args: + dataset: Current dataset object + filtered_data: Filtered update data to modify + """ + # If the dataset already has embedding model settings, use those + if dataset.embedding_model_provider and dataset.embedding_model: + filtered_data["embedding_model_provider"] = dataset.embedding_model_provider + filtered_data["embedding_model"] = dataset.embedding_model + # If collection_binding_id exists, keep it too + if dataset.collection_binding_id: + filtered_data["collection_binding_id"] = dataset.collection_binding_id + # Otherwise, don't try to update embedding model settings at all + # Remove these fields from filtered_data if they exist but are None/empty + if "embedding_model_provider" in filtered_data and not filtered_data["embedding_model_provider"]: + del filtered_data["embedding_model_provider"] + if "embedding_model" in filtered_data and not filtered_data["embedding_model"]: + del filtered_data["embedding_model"] + + @staticmethod + def _update_embedding_model_settings(dataset, data, filtered_data): + """ + Update embedding model settings with new values. + + Args: + dataset: Current dataset object + data: Update data dictionary + filtered_data: Filtered update data to modify + + Returns: + str: Action to perform ('update' or None) + """ + try: + # Compare current and new model provider settings + current_provider_str = ( + str(ModelProviderID(dataset.embedding_model_provider)) if dataset.embedding_model_provider else None + ) + new_provider_str = ( + str(ModelProviderID(data["embedding_model_provider"])) if data["embedding_model_provider"] else None + ) + + # Only update if values are different + if current_provider_str != new_provider_str or data["embedding_model"] != dataset.embedding_model: + DatasetService._apply_new_embedding_settings(dataset, data, filtered_data) + return "update" + except LLMBadRequestError: + raise ValueError( + "No Embedding Model available. Please configure a valid provider in the Settings -> Model Provider." + ) + except ProviderTokenNotInitError as ex: + raise ValueError(ex.description) + return None + + @staticmethod + def _apply_new_embedding_settings(dataset, data, filtered_data): + """ + Apply new embedding model settings to the dataset. + + Args: + dataset: Current dataset object + data: Update data dictionary + filtered_data: Filtered update data to modify + """ + model_manager = ModelManager() + try: + embedding_model = model_manager.get_model_instance( + tenant_id=current_user.current_tenant_id, + provider=data["embedding_model_provider"], + model_type=ModelType.TEXT_EMBEDDING, + model=data["embedding_model"], + ) + except ProviderTokenNotInitError: + # If we can't get the embedding model, preserve existing settings + logging.warning( + f"Failed to initialize embedding model {data['embedding_model_provider']}/{data['embedding_model']}, " + f"preserving existing settings" + ) + if dataset.embedding_model_provider and dataset.embedding_model: + filtered_data["embedding_model_provider"] = dataset.embedding_model_provider + filtered_data["embedding_model"] = dataset.embedding_model + if dataset.collection_binding_id: + filtered_data["collection_binding_id"] = dataset.collection_binding_id + # Skip the rest of the embedding model update + return + + # Apply new embedding model settings + filtered_data["embedding_model"] = embedding_model.model + filtered_data["embedding_model_provider"] = embedding_model.provider + dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding( + embedding_model.provider, embedding_model.model + ) + filtered_data["collection_binding_id"] = dataset_collection_binding.id + @staticmethod def delete_dataset(dataset_id, user): dataset = DatasetService.get_dataset(dataset_id) @@ -976,12 +1136,17 @@ class DocumentService: process_rule = knowledge_config.process_rule if process_rule: if process_rule.mode in ("custom", "hierarchical"): - dataset_process_rule = DatasetProcessRule( - dataset_id=dataset.id, - mode=process_rule.mode, - rules=process_rule.rules.model_dump_json() if process_rule.rules else None, - created_by=account.id, - ) + if process_rule.rules: + dataset_process_rule = DatasetProcessRule( + dataset_id=dataset.id, + mode=process_rule.mode, + rules=process_rule.rules.model_dump_json() if process_rule.rules else None, + created_by=account.id, + ) + else: + dataset_process_rule = dataset.latest_process_rule + if not dataset_process_rule: + raise ValueError("No process rule found.") elif process_rule.mode == "automatic": dataset_process_rule = DatasetProcessRule( dataset_id=dataset.id, @@ -1402,16 +1567,16 @@ class DocumentService: knowledge_config.embedding_model, # type: ignore ) dataset_collection_binding_id = dataset_collection_binding.id - if knowledge_config.retrieval_model: - retrieval_model = knowledge_config.retrieval_model - else: - retrieval_model = RetrievalModel( - search_method=RetrievalMethod.SEMANTIC_SEARCH.value, - reranking_enable=False, - reranking_model=RerankingModel(reranking_provider_name="", reranking_model_name=""), - top_k=2, - score_threshold_enabled=False, - ) + if knowledge_config.retrieval_model: + retrieval_model = knowledge_config.retrieval_model + else: + retrieval_model = RetrievalModel( + search_method=RetrievalMethod.SEMANTIC_SEARCH.value, + reranking_enable=False, + reranking_model=RerankingModel(reranking_provider_name="", reranking_model_name=""), + top_k=2, + score_threshold_enabled=False, + ) # save dataset dataset = Dataset( tenant_id=tenant_id, @@ -1603,6 +1768,191 @@ class DocumentService: if not isinstance(args["process_rule"]["rules"]["segmentation"]["max_tokens"], int): raise ValueError("Process rule segmentation max_tokens is invalid") + @staticmethod + def batch_update_document_status(dataset: Dataset, document_ids: list[str], action: str, user): + """ + Batch update document status. + + Args: + dataset (Dataset): The dataset object + document_ids (list[str]): List of document IDs to update + action (str): Action to perform (enable, disable, archive, un_archive) + user: Current user performing the action + + Raises: + DocumentIndexingError: If document is being indexed or not in correct state + ValueError: If action is invalid + """ + if not document_ids: + return + + # Early validation of action parameter + valid_actions = ["enable", "disable", "archive", "un_archive"] + if action not in valid_actions: + raise ValueError(f"Invalid action: {action}. Must be one of {valid_actions}") + + documents_to_update = [] + + # First pass: validate all documents and prepare updates + for document_id in document_ids: + document = DocumentService.get_document(dataset.id, document_id) + if not document: + continue + + # Check if document is being indexed + indexing_cache_key = f"document_{document.id}_indexing" + cache_result = redis_client.get(indexing_cache_key) + if cache_result is not None: + raise DocumentIndexingError(f"Document:{document.name} is being indexed, please try again later") + + # Prepare update based on action + update_info = DocumentService._prepare_document_status_update(document, action, user) + if update_info: + documents_to_update.append(update_info) + + # Second pass: apply all updates in a single transaction + if documents_to_update: + try: + for update_info in documents_to_update: + document = update_info["document"] + updates = update_info["updates"] + + # Apply updates to the document + for field, value in updates.items(): + setattr(document, field, value) + + db.session.add(document) + + # Batch commit all changes + db.session.commit() + except Exception as e: + # Rollback on any error + db.session.rollback() + raise e + # Execute async tasks and set Redis cache after successful commit + # propagation_error is used to capture any errors for submitting async task execution + propagation_error = None + for update_info in documents_to_update: + try: + # Execute async tasks after successful commit + if update_info["async_task"]: + task_info = update_info["async_task"] + task_func = task_info["function"] + task_args = task_info["args"] + task_func.delay(*task_args) + except Exception as e: + # Log the error but do not rollback the transaction + logging.exception(f"Error executing async task for document {update_info['document'].id}") + # don't raise the error immediately, but capture it for later + propagation_error = e + try: + # Set Redis cache if needed after successful commit + if update_info["set_cache"]: + document = update_info["document"] + indexing_cache_key = f"document_{document.id}_indexing" + redis_client.setex(indexing_cache_key, 600, 1) + except Exception as e: + # Log the error but do not rollback the transaction + logging.exception(f"Error setting cache for document {update_info['document'].id}") + # Raise any propagation error after all updates + if propagation_error: + raise propagation_error + + @staticmethod + def _prepare_document_status_update(document, action: str, user): + """ + Prepare document status update information. + + Args: + document: Document object to update + action: Action to perform + user: Current user + + Returns: + dict: Update information or None if no update needed + """ + now = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + + if action == "enable": + return DocumentService._prepare_enable_update(document, now) + elif action == "disable": + return DocumentService._prepare_disable_update(document, user, now) + elif action == "archive": + return DocumentService._prepare_archive_update(document, user, now) + elif action == "un_archive": + return DocumentService._prepare_unarchive_update(document, now) + + return None + + @staticmethod + def _prepare_enable_update(document, now): + """Prepare updates for enabling a document.""" + if document.enabled: + return None + + return { + "document": document, + "updates": {"enabled": True, "disabled_at": None, "disabled_by": None, "updated_at": now}, + "async_task": {"function": add_document_to_index_task, "args": [document.id]}, + "set_cache": True, + } + + @staticmethod + def _prepare_disable_update(document, user, now): + """Prepare updates for disabling a document.""" + if not document.completed_at or document.indexing_status != "completed": + raise DocumentIndexingError(f"Document: {document.name} is not completed.") + + if not document.enabled: + return None + + return { + "document": document, + "updates": {"enabled": False, "disabled_at": now, "disabled_by": user.id, "updated_at": now}, + "async_task": {"function": remove_document_from_index_task, "args": [document.id]}, + "set_cache": True, + } + + @staticmethod + def _prepare_archive_update(document, user, now): + """Prepare updates for archiving a document.""" + if document.archived: + return None + + update_info = { + "document": document, + "updates": {"archived": True, "archived_at": now, "archived_by": user.id, "updated_at": now}, + "async_task": None, + "set_cache": False, + } + + # Only set async task and cache if document is currently enabled + if document.enabled: + update_info["async_task"] = {"function": remove_document_from_index_task, "args": [document.id]} + update_info["set_cache"] = True + + return update_info + + @staticmethod + def _prepare_unarchive_update(document, now): + """Prepare updates for unarchiving a document.""" + if not document.archived: + return None + + update_info = { + "document": document, + "updates": {"archived": False, "archived_at": None, "archived_by": None, "updated_at": now}, + "async_task": None, + "set_cache": False, + } + + # Only re-index if the document is currently enabled + if document.enabled: + update_info["async_task"] = {"function": add_document_to_index_task, "args": [document.id]} + update_info["set_cache"] = True + + return update_info + class SegmentService: @classmethod diff --git a/api/services/entities/knowledge_entities/knowledge_entities.py b/api/services/entities/knowledge_entities/knowledge_entities.py index bb3be61f85..603064ca07 100644 --- a/api/services/entities/knowledge_entities/knowledge_entities.py +++ b/api/services/entities/knowledge_entities/knowledge_entities.py @@ -101,7 +101,7 @@ class WeightModel(BaseModel): class RetrievalModel(BaseModel): - search_method: Literal["hybrid_search", "semantic_search", "full_text_search"] + search_method: Literal["hybrid_search", "semantic_search", "full_text_search", "keyword_search"] reranking_enable: bool reranking_model: Optional[RerankingModel] = None reranking_mode: Optional[str] = None diff --git a/api/services/errors/app.py b/api/services/errors/app.py index 87e9e9247d..5d348c61be 100644 --- a/api/services/errors/app.py +++ b/api/services/errors/app.py @@ -4,3 +4,7 @@ class MoreLikeThisDisabledError(Exception): class WorkflowHashNotEqualError(Exception): pass + + +class IsDraftWorkflowError(Exception): + pass diff --git a/api/services/errors/plugin.py b/api/services/errors/plugin.py new file mode 100644 index 0000000000..be5b144b3d --- /dev/null +++ b/api/services/errors/plugin.py @@ -0,0 +1,5 @@ +from services.errors.base import BaseServiceError + + +class PluginInstallationForbiddenError(BaseServiceError): + pass diff --git a/api/services/feature_service.py b/api/services/feature_service.py index be85a03e80..188caf3505 100644 --- a/api/services/feature_service.py +++ b/api/services/feature_service.py @@ -88,6 +88,26 @@ class WebAppAuthModel(BaseModel): allow_email_password_login: bool = False +class PluginInstallationScope(StrEnum): + NONE = "none" + OFFICIAL_ONLY = "official_only" + OFFICIAL_AND_SPECIFIC_PARTNERS = "official_and_specific_partners" + ALL = "all" + + +class PluginInstallationPermissionModel(BaseModel): + # Plugin installation scope – possible values: + # none: prohibit all plugin installations + # official_only: allow only Dify official plugins + # official_and_specific_partners: allow official and specific partner plugins + # all: allow installation of all plugins + plugin_installation_scope: PluginInstallationScope = PluginInstallationScope.ALL + + # If True, restrict plugin installation to the marketplace only + # Equivalent to ForceEnablePluginVerification + restrict_to_marketplace_only: bool = False + + class FeatureModel(BaseModel): billing: BillingModel = BillingModel() education: EducationModel = EducationModel() @@ -128,6 +148,7 @@ class SystemFeatureModel(BaseModel): license: LicenseModel = LicenseModel() branding: BrandingModel = BrandingModel() webapp_auth: WebAppAuthModel = WebAppAuthModel() + plugin_installation_permission: PluginInstallationPermissionModel = PluginInstallationPermissionModel() class FeatureService: @@ -291,3 +312,12 @@ class FeatureService: features.license.workspaces.enabled = license_info["workspaces"]["enabled"] features.license.workspaces.limit = license_info["workspaces"]["limit"] features.license.workspaces.size = license_info["workspaces"]["used"] + + if "PluginInstallationPermission" in enterprise_info: + plugin_installation_info = enterprise_info["PluginInstallationPermission"] + features.plugin_installation_permission.plugin_installation_scope = plugin_installation_info[ + "pluginInstallationScope" + ] + features.plugin_installation_permission.restrict_to_marketplace_only = plugin_installation_info[ + "restrictToMarketplaceOnly" + ] diff --git a/api/services/metadata_service.py b/api/services/metadata_service.py index 26d6d4ce18..cfcb121153 100644 --- a/api/services/metadata_service.py +++ b/api/services/metadata_service.py @@ -19,6 +19,10 @@ from services.entities.knowledge_entities.knowledge_entities import ( class MetadataService: @staticmethod def create_metadata(dataset_id: str, metadata_args: MetadataArgs) -> DatasetMetadata: + # check if metadata name is too long + if len(metadata_args.name) > 255: + raise ValueError("Metadata name cannot exceed 255 characters.") + # check if metadata name already exists if ( db.session.query(DatasetMetadata) @@ -42,6 +46,10 @@ class MetadataService: @staticmethod def update_metadata_name(dataset_id: str, metadata_id: str, name: str) -> DatasetMetadata: # type: ignore + # check if metadata name is too long + if len(name) > 255: + raise ValueError("Metadata name cannot exceed 255 characters.") + lock_key = f"dataset_metadata_lock_{dataset_id}" # check if metadata name already exists if ( diff --git a/api/services/moderation_service.py b/api/services/moderation_service.py deleted file mode 100644 index 082afeed89..0000000000 --- a/api/services/moderation_service.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Optional - -from core.moderation.factory import ModerationFactory, ModerationOutputsResult -from extensions.ext_database import db -from models.model import App, AppModelConfig - - -class ModerationService: - def moderation_for_outputs(self, app_id: str, app_model: App, text: str) -> ModerationOutputsResult: - app_model_config: Optional[AppModelConfig] = None - - app_model_config = ( - db.session.query(AppModelConfig).filter(AppModelConfig.id == app_model.app_model_config_id).first() - ) - - if not app_model_config: - raise ValueError("app model config not found") - - name = app_model_config.sensitive_word_avoidance_dict["type"] - config = app_model_config.sensitive_word_avoidance_dict["config"] - - moderation = ModerationFactory(name, app_id, app_model.tenant_id, config) - return moderation.moderation_for_outputs(text) diff --git a/api/services/plugin/data_migration.py b/api/services/plugin/data_migration.py index 1c5abfecba..5324036414 100644 --- a/api/services/plugin/data_migration.py +++ b/api/services/plugin/data_migration.py @@ -3,7 +3,7 @@ import logging import click -from core.entities import DEFAULT_PLUGIN_ID +from core.plugin.entities.plugin import GenericProviderID, ModelProviderID, ToolProviderID from models.engine import db logger = logging.getLogger(__name__) @@ -12,17 +12,17 @@ logger = logging.getLogger(__name__) class PluginDataMigration: @classmethod def migrate(cls) -> None: - cls.migrate_db_records("providers", "provider_name") # large table - cls.migrate_db_records("provider_models", "provider_name") - cls.migrate_db_records("provider_orders", "provider_name") - cls.migrate_db_records("tenant_default_models", "provider_name") - cls.migrate_db_records("tenant_preferred_model_providers", "provider_name") - cls.migrate_db_records("provider_model_settings", "provider_name") - cls.migrate_db_records("load_balancing_model_configs", "provider_name") + cls.migrate_db_records("providers", "provider_name", ModelProviderID) # large table + cls.migrate_db_records("provider_models", "provider_name", ModelProviderID) + cls.migrate_db_records("provider_orders", "provider_name", ModelProviderID) + cls.migrate_db_records("tenant_default_models", "provider_name", ModelProviderID) + cls.migrate_db_records("tenant_preferred_model_providers", "provider_name", ModelProviderID) + cls.migrate_db_records("provider_model_settings", "provider_name", ModelProviderID) + cls.migrate_db_records("load_balancing_model_configs", "provider_name", ModelProviderID) cls.migrate_datasets() - cls.migrate_db_records("embeddings", "provider_name") # large table - cls.migrate_db_records("dataset_collection_bindings", "provider_name") - cls.migrate_db_records("tool_builtin_providers", "provider") + cls.migrate_db_records("embeddings", "provider_name", ModelProviderID) # large table + cls.migrate_db_records("dataset_collection_bindings", "provider_name", ModelProviderID) + cls.migrate_db_records("tool_builtin_providers", "provider", ToolProviderID) @classmethod def migrate_datasets(cls) -> None: @@ -66,9 +66,10 @@ limit 1000""" fg="white", ) ) - retrieval_model["reranking_model"]["reranking_provider_name"] = ( - f"{DEFAULT_PLUGIN_ID}/{retrieval_model['reranking_model']['reranking_provider_name']}/{retrieval_model['reranking_model']['reranking_provider_name']}" - ) + # update google to langgenius/gemini/google etc. + retrieval_model["reranking_model"]["reranking_provider_name"] = ModelProviderID( + retrieval_model["reranking_model"]["reranking_provider_name"] + ).to_string() retrieval_model_changed = True click.echo( @@ -86,9 +87,11 @@ limit 1000""" update_retrieval_model_sql = ", retrieval_model = :retrieval_model" params["retrieval_model"] = json.dumps(retrieval_model) + params["provider_name"] = ModelProviderID(provider_name).to_string() + sql = f"""update {table_name} set {provider_column_name} = - concat('{DEFAULT_PLUGIN_ID}/', {provider_column_name}, '/', {provider_column_name}) + :provider_name {update_retrieval_model_sql} where id = :record_id""" conn.execute(db.text(sql), params) @@ -122,7 +125,9 @@ limit 1000""" ) @classmethod - def migrate_db_records(cls, table_name: str, provider_column_name: str) -> None: + def migrate_db_records( + cls, table_name: str, provider_column_name: str, provider_cls: type[GenericProviderID] + ) -> None: click.echo(click.style(f"Migrating [{table_name}] data for plugin", fg="white")) processed_count = 0 @@ -166,7 +171,8 @@ limit 1000""" ) try: - updated_value = f"{DEFAULT_PLUGIN_ID}/{provider_name}/{provider_name}" + # update jina to langgenius/jina_tool/jina etc. + updated_value = provider_cls(provider_name).to_string() batch_updates.append((updated_value, record_id)) except Exception as e: failed_ids.append(record_id) diff --git a/api/services/plugin/oauth_service.py b/api/services/plugin/oauth_service.py index 461247419b..b84dd0afc5 100644 --- a/api/services/plugin/oauth_service.py +++ b/api/services/plugin/oauth_service.py @@ -1,7 +1,53 @@ +import json +import uuid + from core.plugin.impl.base import BasePluginClient +from extensions.ext_redis import redis_client -class OAuthService(BasePluginClient): - @classmethod - def get_authorization_url(cls, tenant_id: str, user_id: str, provider_name: str) -> str: - return "1234567890" +class OAuthProxyService(BasePluginClient): + # Default max age for proxy context parameter in seconds + __MAX_AGE__ = 5 * 60 # 5 minutes + __KEY_PREFIX__ = "oauth_proxy_context:" + + @staticmethod + def create_proxy_context(user_id: str, tenant_id: str, plugin_id: str, provider: str): + """ + Create a proxy context for an OAuth 2.0 authorization request. + + This parameter is a crucial security measure to prevent Cross-Site Request + Forgery (CSRF) attacks. It works by generating a unique nonce and storing it + in a distributed cache (Redis) along with the user's session context. + + The returned nonce should be included as the 'proxy_context' parameter in the + authorization URL. Upon callback, the `use_proxy_context` method + is used to verify the state, ensuring the request's integrity and authenticity, + and mitigating replay attacks. + """ + context_id = str(uuid.uuid4()) + data = { + "user_id": user_id, + "plugin_id": plugin_id, + "tenant_id": tenant_id, + "provider": provider, + } + redis_client.setex( + f"{OAuthProxyService.__KEY_PREFIX__}{context_id}", + OAuthProxyService.__MAX_AGE__, + json.dumps(data), + ) + return context_id + + @staticmethod + def use_proxy_context(context_id: str): + """ + Validate the proxy context parameter. + This checks if the context_id is valid and not expired. + """ + if not context_id: + raise ValueError("context_id is required") + # get data from redis + data = redis_client.getdel(f"{OAuthProxyService.__KEY_PREFIX__}{context_id}") + if not data: + raise ValueError("context_id is invalid") + return json.loads(data) diff --git a/api/services/plugin/plugin_service.py b/api/services/plugin/plugin_service.py index a8b64f27db..d7fb4a7c1b 100644 --- a/api/services/plugin/plugin_service.py +++ b/api/services/plugin/plugin_service.py @@ -17,11 +17,18 @@ from core.plugin.entities.plugin import ( PluginInstallation, PluginInstallationSource, ) -from core.plugin.entities.plugin_daemon import PluginInstallTask, PluginListResponse, PluginUploadResponse +from core.plugin.entities.plugin_daemon import ( + PluginDecodeResponse, + PluginInstallTask, + PluginListResponse, + PluginVerification, +) from core.plugin.impl.asset import PluginAssetManager from core.plugin.impl.debugging import PluginDebuggingClient from core.plugin.impl.plugin import PluginInstaller from extensions.ext_redis import redis_client +from services.errors.plugin import PluginInstallationForbiddenError +from services.feature_service import FeatureService, PluginInstallationScope logger = logging.getLogger(__name__) @@ -86,6 +93,42 @@ class PluginService: logger.exception("failed to fetch latest plugin version") return result + @staticmethod + def _check_marketplace_only_permission(): + """ + Check if the marketplace only permission is enabled + """ + features = FeatureService.get_system_features() + if features.plugin_installation_permission.restrict_to_marketplace_only: + raise PluginInstallationForbiddenError("Plugin installation is restricted to marketplace only") + + @staticmethod + def _check_plugin_installation_scope(plugin_verification: Optional[PluginVerification]): + """ + Check the plugin installation scope + """ + features = FeatureService.get_system_features() + + match features.plugin_installation_permission.plugin_installation_scope: + case PluginInstallationScope.OFFICIAL_ONLY: + if ( + plugin_verification is None + or plugin_verification.authorized_category != PluginVerification.AuthorizedCategory.Langgenius + ): + raise PluginInstallationForbiddenError("Plugin installation is restricted to official only") + case PluginInstallationScope.OFFICIAL_AND_SPECIFIC_PARTNERS: + if plugin_verification is None or plugin_verification.authorized_category not in [ + PluginVerification.AuthorizedCategory.Langgenius, + PluginVerification.AuthorizedCategory.Partner, + ]: + raise PluginInstallationForbiddenError( + "Plugin installation is restricted to official and specific partners" + ) + case PluginInstallationScope.NONE: + raise PluginInstallationForbiddenError("Installing plugins is not allowed") + case PluginInstallationScope.ALL: + pass + @staticmethod def get_debugging_key(tenant_id: str) -> str: """ @@ -208,6 +251,8 @@ class PluginService: # check if plugin pkg is already downloaded manager = PluginInstaller() + features = FeatureService.get_system_features() + try: manager.fetch_plugin_manifest(tenant_id, new_plugin_unique_identifier) # already downloaded, skip, and record install event @@ -215,7 +260,14 @@ class PluginService: except Exception: # plugin not installed, download and upload pkg pkg = download_plugin_pkg(new_plugin_unique_identifier) - manager.upload_pkg(tenant_id, pkg, verify_signature=False) + response = manager.upload_pkg( + tenant_id, + pkg, + verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only, + ) + + # check if the plugin is available to install + PluginService._check_plugin_installation_scope(response.verification) return manager.upgrade_plugin( tenant_id, @@ -239,6 +291,7 @@ class PluginService: """ Upgrade plugin with github """ + PluginService._check_marketplace_only_permission() manager = PluginInstaller() return manager.upgrade_plugin( tenant_id, @@ -253,33 +306,43 @@ class PluginService: ) @staticmethod - def upload_pkg(tenant_id: str, pkg: bytes, verify_signature: bool = False) -> PluginUploadResponse: + def upload_pkg(tenant_id: str, pkg: bytes, verify_signature: bool = False) -> PluginDecodeResponse: """ Upload plugin package files returns: plugin_unique_identifier """ + PluginService._check_marketplace_only_permission() manager = PluginInstaller() - return manager.upload_pkg(tenant_id, pkg, verify_signature) + features = FeatureService.get_system_features() + response = manager.upload_pkg( + tenant_id, + pkg, + verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only, + ) + return response @staticmethod def upload_pkg_from_github( tenant_id: str, repo: str, version: str, package: str, verify_signature: bool = False - ) -> PluginUploadResponse: + ) -> PluginDecodeResponse: """ Install plugin from github release package files, returns plugin_unique_identifier """ + PluginService._check_marketplace_only_permission() pkg = download_with_size_limit( f"https://github.com/{repo}/releases/download/{version}/{package}", dify_config.PLUGIN_MAX_PACKAGE_SIZE ) + features = FeatureService.get_system_features() manager = PluginInstaller() - return manager.upload_pkg( + response = manager.upload_pkg( tenant_id, pkg, - verify_signature, + verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only, ) + return response @staticmethod def upload_bundle( @@ -289,11 +352,15 @@ class PluginService: Upload a plugin bundle and return the dependencies. """ manager = PluginInstaller() + PluginService._check_marketplace_only_permission() return manager.upload_bundle(tenant_id, bundle, verify_signature) @staticmethod def install_from_local_pkg(tenant_id: str, plugin_unique_identifiers: Sequence[str]): + PluginService._check_marketplace_only_permission() + manager = PluginInstaller() + return manager.install_from_identifiers( tenant_id, plugin_unique_identifiers, @@ -307,6 +374,8 @@ class PluginService: Install plugin from github release package files, returns plugin_unique_identifier """ + PluginService._check_marketplace_only_permission() + manager = PluginInstaller() return manager.install_from_identifiers( tenant_id, @@ -322,28 +391,33 @@ class PluginService: ) @staticmethod - def fetch_marketplace_pkg( - tenant_id: str, plugin_unique_identifier: str, verify_signature: bool = False - ) -> PluginDeclaration: + def fetch_marketplace_pkg(tenant_id: str, plugin_unique_identifier: str) -> PluginDeclaration: """ Fetch marketplace package """ if not dify_config.MARKETPLACE_ENABLED: raise ValueError("marketplace is not enabled") + features = FeatureService.get_system_features() + manager = PluginInstaller() try: declaration = manager.fetch_plugin_manifest(tenant_id, plugin_unique_identifier) except Exception: pkg = download_plugin_pkg(plugin_unique_identifier) - declaration = manager.upload_pkg(tenant_id, pkg, verify_signature).manifest + response = manager.upload_pkg( + tenant_id, + pkg, + verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only, + ) + # check if the plugin is available to install + PluginService._check_plugin_installation_scope(response.verification) + declaration = response.manifest return declaration @staticmethod - def install_from_marketplace_pkg( - tenant_id: str, plugin_unique_identifiers: Sequence[str], verify_signature: bool = False - ): + def install_from_marketplace_pkg(tenant_id: str, plugin_unique_identifiers: Sequence[str]): """ Install plugin from marketplace package files, returns installation task id @@ -353,15 +427,26 @@ class PluginService: manager = PluginInstaller() + features = FeatureService.get_system_features() + # check if already downloaded for plugin_unique_identifier in plugin_unique_identifiers: try: manager.fetch_plugin_manifest(tenant_id, plugin_unique_identifier) + plugin_decode_response = manager.decode_plugin_from_identifier(tenant_id, plugin_unique_identifier) + # check if the plugin is available to install + PluginService._check_plugin_installation_scope(plugin_decode_response.verification) # already downloaded, skip except Exception: # plugin not installed, download and upload pkg pkg = download_plugin_pkg(plugin_unique_identifier) - manager.upload_pkg(tenant_id, pkg, verify_signature) + response = manager.upload_pkg( + tenant_id, + pkg, + verify_signature=features.plugin_installation_permission.restrict_to_marketplace_only, + ) + # check if the plugin is available to install + PluginService._check_plugin_installation_scope(response.verification) return manager.install_from_identifiers( tenant_id, diff --git a/api/services/vector_service.py b/api/services/vector_service.py index 19e37f4ee3..9165139193 100644 --- a/api/services/vector_service.py +++ b/api/services/vector_service.py @@ -97,16 +97,16 @@ class VectorService: vector = Vector(dataset=dataset) vector.delete_by_ids([segment.index_node_id]) vector.add_texts([document], duplicate_check=True) - - # update keyword index - keyword = Keyword(dataset) - keyword.delete_by_ids([segment.index_node_id]) - - # save keyword index - if keywords and len(keywords) > 0: - keyword.add_texts([document], keywords_list=[keywords]) else: - keyword.add_texts([document]) + # update keyword index + keyword = Keyword(dataset) + keyword.delete_by_ids([segment.index_node_id]) + + # save keyword index + if keywords and len(keywords) > 0: + keyword.add_texts([document], keywords_list=[keywords]) + else: + keyword.add_texts([document]) @classmethod def generate_child_chunks( diff --git a/api/services/workflow_app_service.py b/api/services/workflow_app_service.py index 6b30a70372..6eabf03018 100644 --- a/api/services/workflow_app_service.py +++ b/api/services/workflow_app_service.py @@ -5,7 +5,7 @@ from sqlalchemy import and_, func, or_, select from sqlalchemy.orm import Session from core.workflow.entities.workflow_execution import WorkflowExecutionStatus -from models import App, EndUser, WorkflowAppLog, WorkflowRun +from models import Account, App, EndUser, WorkflowAppLog, WorkflowRun from models.enums import CreatorUserRole @@ -21,6 +21,8 @@ class WorkflowAppService: created_at_after: datetime | None = None, page: int = 1, limit: int = 20, + created_by_end_user_session_id: str | None = None, + created_by_account: str | None = None, ) -> dict: """ Get paginate workflow app logs using SQLAlchemy 2.0 style @@ -32,6 +34,8 @@ class WorkflowAppService: :param created_at_after: filter logs created after this timestamp :param page: page number :param limit: items per page + :param created_by_end_user_session_id: filter by end user session id + :param created_by_account: filter by account email :return: Pagination object """ # Build base statement using SQLAlchemy 2.0 style @@ -71,6 +75,26 @@ class WorkflowAppService: if created_at_after: stmt = stmt.where(WorkflowAppLog.created_at >= created_at_after) + # Filter by end user session id or account email + if created_by_end_user_session_id: + stmt = stmt.join( + EndUser, + and_( + WorkflowAppLog.created_by == EndUser.id, + WorkflowAppLog.created_by_role == CreatorUserRole.END_USER, + EndUser.session_id == created_by_end_user_session_id, + ), + ) + if created_by_account: + stmt = stmt.join( + Account, + and_( + WorkflowAppLog.created_by == Account.id, + WorkflowAppLog.created_by_role == CreatorUserRole.ACCOUNT, + Account.email == created_by_account, + ), + ) + stmt = stmt.order_by(WorkflowAppLog.created_at.desc()) # Get total count using the same filters diff --git a/api/services/workflow_draft_variable_service.py b/api/services/workflow_draft_variable_service.py new file mode 100644 index 0000000000..164693c2e1 --- /dev/null +++ b/api/services/workflow_draft_variable_service.py @@ -0,0 +1,722 @@ +import dataclasses +import datetime +import logging +from collections.abc import Mapping, Sequence +from enum import StrEnum +from typing import Any, ClassVar + +from sqlalchemy import Engine, orm, select +from sqlalchemy.dialects.postgresql import insert +from sqlalchemy.orm import Session +from sqlalchemy.sql.expression import and_, or_ + +from core.app.entities.app_invoke_entities import InvokeFrom +from core.file.models import File +from core.variables import Segment, StringSegment, Variable +from core.variables.consts import MIN_SELECTORS_LENGTH +from core.variables.segments import ArrayFileSegment, FileSegment +from core.variables.types import SegmentType +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID +from core.workflow.enums import SystemVariableKey +from core.workflow.nodes import NodeType +from core.workflow.nodes.variable_assigner.common.helpers import get_updated_variables +from core.workflow.variable_loader import VariableLoader +from factories.file_factory import StorageKeyLoader +from factories.variable_factory import build_segment, segment_to_variable +from models import App, Conversation +from models.enums import DraftVariableType +from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel, is_system_variable_editable + +_logger = logging.getLogger(__name__) + + +@dataclasses.dataclass(frozen=True) +class WorkflowDraftVariableList: + variables: list[WorkflowDraftVariable] + total: int | None = None + + +class WorkflowDraftVariableError(Exception): + pass + + +class VariableResetError(WorkflowDraftVariableError): + pass + + +class UpdateNotSupportedError(WorkflowDraftVariableError): + pass + + +class DraftVarLoader(VariableLoader): + # This implements the VariableLoader interface for loading draft variables. + # + # ref: core.workflow.variable_loader.VariableLoader + + # Database engine used for loading variables. + _engine: Engine + # Application ID for which variables are being loaded. + _app_id: str + _tenant_id: str + _fallback_variables: Sequence[Variable] + + def __init__( + self, + engine: Engine, + app_id: str, + tenant_id: str, + fallback_variables: Sequence[Variable] | None = None, + ) -> None: + self._engine = engine + self._app_id = app_id + self._tenant_id = tenant_id + self._fallback_variables = fallback_variables or [] + + def _selector_to_tuple(self, selector: Sequence[str]) -> tuple[str, str]: + return (selector[0], selector[1]) + + def load_variables(self, selectors: list[list[str]]) -> list[Variable]: + if not selectors: + return [] + + # Map each selector (as a tuple via `_selector_to_tuple`) to its corresponding Variable instance. + variable_by_selector: dict[tuple[str, str], Variable] = {} + + with Session(bind=self._engine, expire_on_commit=False) as session: + srv = WorkflowDraftVariableService(session) + draft_vars = srv.get_draft_variables_by_selectors(self._app_id, selectors) + + for draft_var in draft_vars: + segment = draft_var.get_value() + variable = segment_to_variable( + segment=segment, + selector=draft_var.get_selector(), + id=draft_var.id, + name=draft_var.name, + description=draft_var.description, + ) + selector_tuple = self._selector_to_tuple(variable.selector) + variable_by_selector[selector_tuple] = variable + + # Important: + files: list[File] = [] + for draft_var in draft_vars: + value = draft_var.get_value() + if isinstance(value, FileSegment): + files.append(value.value) + elif isinstance(value, ArrayFileSegment): + files.extend(value.value) + with Session(bind=self._engine) as session: + storage_key_loader = StorageKeyLoader(session, tenant_id=self._tenant_id) + storage_key_loader.load_storage_keys(files) + + return list(variable_by_selector.values()) + + +class WorkflowDraftVariableService: + _session: Session + + def __init__(self, session: Session) -> None: + self._session = session + + def get_variable(self, variable_id: str) -> WorkflowDraftVariable | None: + return self._session.query(WorkflowDraftVariable).filter(WorkflowDraftVariable.id == variable_id).first() + + def get_draft_variables_by_selectors( + self, + app_id: str, + selectors: Sequence[list[str]], + ) -> list[WorkflowDraftVariable]: + ors = [] + for selector in selectors: + assert len(selector) >= MIN_SELECTORS_LENGTH, f"Invalid selector to get: {selector}" + node_id, name = selector[:2] + ors.append(and_(WorkflowDraftVariable.node_id == node_id, WorkflowDraftVariable.name == name)) + + # NOTE(QuantumGhost): Although the number of `or` expressions may be large, as long as + # each expression includes conditions on both `node_id` and `name` (which are covered by the unique index), + # PostgreSQL can efficiently retrieve the results using a bitmap index scan. + # + # Alternatively, a `SELECT` statement could be constructed for each selector and + # combined using `UNION` to fetch all rows. + # Benchmarking indicates that both approaches yield comparable performance. + variables = ( + self._session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.app_id == app_id, or_(*ors)).all() + ) + return variables + + def list_variables_without_values(self, app_id: str, page: int, limit: int) -> WorkflowDraftVariableList: + criteria = WorkflowDraftVariable.app_id == app_id + total = None + query = self._session.query(WorkflowDraftVariable).filter(criteria) + if page == 1: + total = query.count() + variables = ( + # Do not load the `value` field. + query.options(orm.defer(WorkflowDraftVariable.value)) + .order_by(WorkflowDraftVariable.id.desc()) + .limit(limit) + .offset((page - 1) * limit) + .all() + ) + + return WorkflowDraftVariableList(variables=variables, total=total) + + def _list_node_variables(self, app_id: str, node_id: str) -> WorkflowDraftVariableList: + criteria = ( + WorkflowDraftVariable.app_id == app_id, + WorkflowDraftVariable.node_id == node_id, + ) + query = self._session.query(WorkflowDraftVariable).filter(*criteria) + variables = query.order_by(WorkflowDraftVariable.id.desc()).all() + return WorkflowDraftVariableList(variables=variables) + + def list_node_variables(self, app_id: str, node_id: str) -> WorkflowDraftVariableList: + return self._list_node_variables(app_id, node_id) + + def list_conversation_variables(self, app_id: str) -> WorkflowDraftVariableList: + return self._list_node_variables(app_id, CONVERSATION_VARIABLE_NODE_ID) + + def list_system_variables(self, app_id: str) -> WorkflowDraftVariableList: + return self._list_node_variables(app_id, SYSTEM_VARIABLE_NODE_ID) + + def get_conversation_variable(self, app_id: str, name: str) -> WorkflowDraftVariable | None: + return self._get_variable(app_id=app_id, node_id=CONVERSATION_VARIABLE_NODE_ID, name=name) + + def get_system_variable(self, app_id: str, name: str) -> WorkflowDraftVariable | None: + return self._get_variable(app_id=app_id, node_id=SYSTEM_VARIABLE_NODE_ID, name=name) + + def get_node_variable(self, app_id: str, node_id: str, name: str) -> WorkflowDraftVariable | None: + return self._get_variable(app_id, node_id, name) + + def _get_variable(self, app_id: str, node_id: str, name: str) -> WorkflowDraftVariable | None: + variable = ( + self._session.query(WorkflowDraftVariable) + .where( + WorkflowDraftVariable.app_id == app_id, + WorkflowDraftVariable.node_id == node_id, + WorkflowDraftVariable.name == name, + ) + .first() + ) + return variable + + def update_variable( + self, + variable: WorkflowDraftVariable, + name: str | None = None, + value: Segment | None = None, + ) -> WorkflowDraftVariable: + if not variable.editable: + raise UpdateNotSupportedError(f"variable not support updating, id={variable.id}") + if name is not None: + variable.set_name(name) + if value is not None: + variable.set_value(value) + variable.last_edited_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + self._session.flush() + return variable + + def _reset_conv_var(self, workflow: Workflow, variable: WorkflowDraftVariable) -> WorkflowDraftVariable | None: + conv_var_by_name = {i.name: i for i in workflow.conversation_variables} + conv_var = conv_var_by_name.get(variable.name) + + if conv_var is None: + self._session.delete(instance=variable) + self._session.flush() + _logger.warning( + "Conversation variable not found for draft variable, id=%s, name=%s", variable.id, variable.name + ) + return None + + variable.set_value(conv_var) + variable.last_edited_at = None + self._session.add(variable) + self._session.flush() + return variable + + def _reset_node_var(self, workflow: Workflow, variable: WorkflowDraftVariable) -> WorkflowDraftVariable | None: + # If a variable does not allow updating, it makes no sence to resetting it. + if not variable.editable: + return variable + # No execution record for this variable, delete the variable instead. + if variable.node_execution_id is None: + self._session.delete(instance=variable) + self._session.flush() + _logger.warning("draft variable has no node_execution_id, id=%s, name=%s", variable.id, variable.name) + return None + + query = select(WorkflowNodeExecutionModel).where(WorkflowNodeExecutionModel.id == variable.node_execution_id) + node_exec = self._session.scalars(query).first() + if node_exec is None: + _logger.warning( + "Node exectution not found for draft variable, id=%s, name=%s, node_execution_id=%s", + variable.id, + variable.name, + variable.node_execution_id, + ) + self._session.delete(instance=variable) + self._session.flush() + return None + + # Get node type for proper value extraction + node_config = workflow.get_node_config_by_id(variable.node_id) + node_type = workflow.get_node_type_from_node_config(node_config) + + outputs_dict = node_exec.outputs_dict or {} + + # Note: Based on the implementation in `_build_from_variable_assigner_mapping`, + # VariableAssignerNode (both v1 and v2) can only create conversation draft variables. + # For consistency, we should simply return when processing VARIABLE_ASSIGNER nodes. + # + # This implementation must remain synchronized with the `_build_from_variable_assigner_mapping` + # and `save` methods. + if node_type == NodeType.VARIABLE_ASSIGNER: + return variable + + if variable.name not in outputs_dict: + # If variable not found in execution data, delete the variable + self._session.delete(instance=variable) + self._session.flush() + return None + value = outputs_dict[variable.name] + value_seg = WorkflowDraftVariable.build_segment_with_type(variable.value_type, value) + # Extract variable value using unified logic + variable.set_value(value_seg) + variable.last_edited_at = None # Reset to indicate this is a reset operation + self._session.flush() + return variable + + def reset_variable(self, workflow: Workflow, variable: WorkflowDraftVariable) -> WorkflowDraftVariable | None: + variable_type = variable.get_variable_type() + if variable_type == DraftVariableType.CONVERSATION: + return self._reset_conv_var(workflow, variable) + elif variable_type == DraftVariableType.NODE: + return self._reset_node_var(workflow, variable) + else: + raise VariableResetError(f"cannot reset system variable, variable_id={variable.id}") + + def delete_variable(self, variable: WorkflowDraftVariable): + self._session.delete(variable) + + def delete_workflow_variables(self, app_id: str): + ( + self._session.query(WorkflowDraftVariable) + .filter(WorkflowDraftVariable.app_id == app_id) + .delete(synchronize_session=False) + ) + + def delete_node_variables(self, app_id: str, node_id: str): + return self._delete_node_variables(app_id, node_id) + + def _delete_node_variables(self, app_id: str, node_id: str): + self._session.query(WorkflowDraftVariable).where( + WorkflowDraftVariable.app_id == app_id, + WorkflowDraftVariable.node_id == node_id, + ).delete() + + def _get_conversation_id_from_draft_variable(self, app_id: str) -> str | None: + draft_var = self._get_variable( + app_id=app_id, + node_id=SYSTEM_VARIABLE_NODE_ID, + name=str(SystemVariableKey.CONVERSATION_ID), + ) + if draft_var is None: + return None + segment = draft_var.get_value() + if not isinstance(segment, StringSegment): + _logger.warning( + "sys.conversation_id variable is not a string: app_id=%s, id=%s", + app_id, + draft_var.id, + ) + return None + return segment.value + + def get_or_create_conversation( + self, + account_id: str, + app: App, + workflow: Workflow, + ) -> str: + """ + get_or_create_conversation creates and returns the ID of a conversation for debugging. + + If a conversation already exists, as determined by the following criteria, its ID is returned: + - The system variable `sys.conversation_id` exists in the draft variable table, and + - A corresponding conversation record is found in the database. + + If no such conversation exists, a new conversation is created and its ID is returned. + """ + conv_id = self._get_conversation_id_from_draft_variable(workflow.app_id) + + if conv_id is not None: + conversation = ( + self._session.query(Conversation) + .filter( + Conversation.id == conv_id, + Conversation.app_id == workflow.app_id, + ) + .first() + ) + # Only return the conversation ID if it exists and is valid (has a correspond conversation record in DB). + if conversation is not None: + return conv_id + conversation = Conversation( + app_id=workflow.app_id, + app_model_config_id=app.app_model_config_id, + model_provider=None, + model_id="", + override_model_configs=None, + mode=app.mode, + name="Draft Debugging Conversation", + inputs={}, + introduction="", + system_instruction="", + system_instruction_tokens=0, + status="normal", + invoke_from=InvokeFrom.DEBUGGER.value, + from_source="console", + from_end_user_id=None, + from_account_id=account_id, + ) + + self._session.add(conversation) + self._session.flush() + return conversation.id + + def prefill_conversation_variable_default_values(self, workflow: Workflow): + """""" + draft_conv_vars: list[WorkflowDraftVariable] = [] + for conv_var in workflow.conversation_variables: + draft_var = WorkflowDraftVariable.new_conversation_variable( + app_id=workflow.app_id, + name=conv_var.name, + value=conv_var, + description=conv_var.description, + ) + draft_conv_vars.append(draft_var) + _batch_upsert_draft_varaible( + self._session, + draft_conv_vars, + policy=_UpsertPolicy.IGNORE, + ) + + +class _UpsertPolicy(StrEnum): + IGNORE = "ignore" + OVERWRITE = "overwrite" + + +def _batch_upsert_draft_varaible( + session: Session, + draft_vars: Sequence[WorkflowDraftVariable], + policy: _UpsertPolicy = _UpsertPolicy.OVERWRITE, +) -> None: + if not draft_vars: + return None + # Although we could use SQLAlchemy ORM operations here, we choose not to for several reasons: + # + # 1. The variable saving process involves writing multiple rows to the + # `workflow_draft_variables` table. Batch insertion significantly improves performance. + # 2. Using the ORM would require either: + # + # a. Checking for the existence of each variable before insertion, + # resulting in 2n SQL statements for n variables and potential concurrency issues. + # b. Attempting insertion first, then updating if a unique index violation occurs, + # which still results in n to 2n SQL statements. + # + # Both approaches are inefficient and suboptimal. + # 3. We do not need to retrieve the results of the SQL execution or populate ORM + # model instances with the returned values. + # 4. Batch insertion with `ON CONFLICT DO UPDATE` allows us to insert or update all + # variables in a single SQL statement, avoiding the issues above. + # + # For these reasons, we use the SQLAlchemy query builder and rely on dialect-specific + # insert operations instead of the ORM layer. + stmt = insert(WorkflowDraftVariable).values([_model_to_insertion_dict(v) for v in draft_vars]) + if policy == _UpsertPolicy.OVERWRITE: + stmt = stmt.on_conflict_do_update( + index_elements=WorkflowDraftVariable.unique_app_id_node_id_name(), + set_={ + "updated_at": stmt.excluded.updated_at, + "last_edited_at": stmt.excluded.last_edited_at, + "description": stmt.excluded.description, + "value_type": stmt.excluded.value_type, + "value": stmt.excluded.value, + "visible": stmt.excluded.visible, + "editable": stmt.excluded.editable, + "node_execution_id": stmt.excluded.node_execution_id, + }, + ) + elif _UpsertPolicy.IGNORE: + stmt = stmt.on_conflict_do_nothing(index_elements=WorkflowDraftVariable.unique_app_id_node_id_name()) + else: + raise Exception("Invalid value for update policy.") + session.execute(stmt) + + +def _model_to_insertion_dict(model: WorkflowDraftVariable) -> dict[str, Any]: + d: dict[str, Any] = { + "app_id": model.app_id, + "last_edited_at": None, + "node_id": model.node_id, + "name": model.name, + "selector": model.selector, + "value_type": model.value_type, + "value": model.value, + "node_execution_id": model.node_execution_id, + } + if model.visible is not None: + d["visible"] = model.visible + if model.editable is not None: + d["editable"] = model.editable + if model.created_at is not None: + d["created_at"] = model.created_at + if model.updated_at is not None: + d["updated_at"] = model.updated_at + if model.description is not None: + d["description"] = model.description + return d + + +def _build_segment_for_serialized_values(v: Any) -> Segment: + """ + Reconstructs Segment objects from serialized values, with special handling + for FileSegment and ArrayFileSegment types. + + This function should only be used when: + 1. No explicit type information is available + 2. The input value is in serialized form (dict or list) + + It detects potential file objects in the serialized data and properly rebuilds the + appropriate segment type. + """ + return build_segment(WorkflowDraftVariable.rebuild_file_types(v)) + + +class DraftVariableSaver: + # _DUMMY_OUTPUT_IDENTITY is a placeholder output for workflow nodes. + # Its sole possible value is `None`. + # + # This is used to signal the execution of a workflow node when it has no other outputs. + _DUMMY_OUTPUT_IDENTITY: ClassVar[str] = "__dummy__" + _DUMMY_OUTPUT_VALUE: ClassVar[None] = None + + # _EXCLUDE_VARIABLE_NAMES_MAPPING maps node types and versions to variable names that + # should be excluded when saving draft variables. This prevents certain internal or + # technical variables from being exposed in the draft environment, particularly those + # that aren't meant to be directly edited or viewed by users. + _EXCLUDE_VARIABLE_NAMES_MAPPING: dict[NodeType, frozenset[str]] = { + NodeType.LLM: frozenset(["finish_reason"]), + NodeType.LOOP: frozenset(["loop_round"]), + } + + # Database session used for persisting draft variables. + _session: Session + + # The application ID associated with the draft variables. + # This should match the `Workflow.app_id` of the workflow to which the current node belongs. + _app_id: str + + # The ID of the node for which DraftVariableSaver is saving output variables. + _node_id: str + + # The type of the current node (see NodeType). + _node_type: NodeType + + # Indicates how the workflow execution was triggered (see InvokeFrom). + _invoke_from: InvokeFrom + + # + _node_execution_id: str + + # _enclosing_node_id identifies the container node that the current node belongs to. + # For example, if the current node is an LLM node inside an Iteration node + # or Loop node, then `_enclosing_node_id` refers to the ID of + # the containing Iteration or Loop node. + # + # If the current node is not nested within another node, `_enclosing_node_id` is + # `None`. + _enclosing_node_id: str | None + + def __init__( + self, + session: Session, + app_id: str, + node_id: str, + node_type: NodeType, + invoke_from: InvokeFrom, + node_execution_id: str, + enclosing_node_id: str | None = None, + ): + self._session = session + self._app_id = app_id + self._node_id = node_id + self._node_type = node_type + self._invoke_from = invoke_from + self._node_execution_id = node_execution_id + self._enclosing_node_id = enclosing_node_id + + def _create_dummy_output_variable(self): + return WorkflowDraftVariable.new_node_variable( + app_id=self._app_id, + node_id=self._node_id, + name=self._DUMMY_OUTPUT_IDENTITY, + node_execution_id=self._node_execution_id, + value=build_segment(self._DUMMY_OUTPUT_VALUE), + visible=False, + editable=False, + ) + + def _should_save_output_variables_for_draft(self) -> bool: + # Only save output variables for debugging execution of workflow. + if self._invoke_from != InvokeFrom.DEBUGGER: + return False + if self._enclosing_node_id is not None and self._node_type != NodeType.VARIABLE_ASSIGNER: + # Currently we do not save output variables for nodes inside loop or iteration. + return False + return True + + def _build_from_variable_assigner_mapping(self, process_data: Mapping[str, Any]) -> list[WorkflowDraftVariable]: + draft_vars: list[WorkflowDraftVariable] = [] + updated_variables = get_updated_variables(process_data) or [] + + for item in updated_variables: + selector = item.selector + if len(selector) < MIN_SELECTORS_LENGTH: + raise Exception("selector too short") + # NOTE(QuantumGhost): only the following two kinds of variable could be updated by + # VariableAssigner: ConversationVariable and iteration variable. + # We only save conversation variable here. + if selector[0] != CONVERSATION_VARIABLE_NODE_ID: + continue + segment = WorkflowDraftVariable.build_segment_with_type(segment_type=item.value_type, value=item.new_value) + draft_vars.append( + WorkflowDraftVariable.new_conversation_variable( + app_id=self._app_id, + name=item.name, + value=segment, + ) + ) + # Add a dummy output variable to indicate that this node is executed. + draft_vars.append(self._create_dummy_output_variable()) + return draft_vars + + def _build_variables_from_start_mapping(self, output: Mapping[str, Any]) -> list[WorkflowDraftVariable]: + draft_vars = [] + has_non_sys_variables = False + for name, value in output.items(): + value_seg = _build_segment_for_serialized_values(value) + node_id, name = self._normalize_variable_for_start_node(name) + # If node_id is not `sys`, it means that the variable is a user-defined input field + # in `Start` node. + if node_id != SYSTEM_VARIABLE_NODE_ID: + draft_vars.append( + WorkflowDraftVariable.new_node_variable( + app_id=self._app_id, + node_id=self._node_id, + name=name, + node_execution_id=self._node_execution_id, + value=value_seg, + visible=True, + editable=True, + ) + ) + has_non_sys_variables = True + else: + if name == SystemVariableKey.FILES: + # Here we know the type of variable must be `array[file]`, we + # just build files from the value. + files = [File.model_validate(v) for v in value] + if files: + value_seg = WorkflowDraftVariable.build_segment_with_type(SegmentType.ARRAY_FILE, files) + else: + value_seg = ArrayFileSegment(value=[]) + + draft_vars.append( + WorkflowDraftVariable.new_sys_variable( + app_id=self._app_id, + name=name, + node_execution_id=self._node_execution_id, + value=value_seg, + editable=self._should_variable_be_editable(node_id, name), + ) + ) + if not has_non_sys_variables: + draft_vars.append(self._create_dummy_output_variable()) + return draft_vars + + def _normalize_variable_for_start_node(self, name: str) -> tuple[str, str]: + if not name.startswith(f"{SYSTEM_VARIABLE_NODE_ID}."): + return self._node_id, name + _, name_ = name.split(".", maxsplit=1) + return SYSTEM_VARIABLE_NODE_ID, name_ + + def _build_variables_from_mapping(self, output: Mapping[str, Any]) -> list[WorkflowDraftVariable]: + draft_vars = [] + for name, value in output.items(): + if not self._should_variable_be_saved(name): + _logger.debug( + "Skip saving variable as it has been excluded by its node_type, name=%s, node_type=%s", + name, + self._node_type, + ) + continue + if isinstance(value, Segment): + value_seg = value + else: + value_seg = _build_segment_for_serialized_values(value) + draft_vars.append( + WorkflowDraftVariable.new_node_variable( + app_id=self._app_id, + node_id=self._node_id, + name=name, + node_execution_id=self._node_execution_id, + value=value_seg, + visible=self._should_variable_be_visible(self._node_id, self._node_type, name), + ) + ) + return draft_vars + + def save( + self, + process_data: Mapping[str, Any] | None = None, + outputs: Mapping[str, Any] | None = None, + ): + draft_vars: list[WorkflowDraftVariable] = [] + if outputs is None: + outputs = {} + if process_data is None: + process_data = {} + if not self._should_save_output_variables_for_draft(): + return + if self._node_type == NodeType.VARIABLE_ASSIGNER: + draft_vars = self._build_from_variable_assigner_mapping(process_data=process_data) + elif self._node_type == NodeType.START: + draft_vars = self._build_variables_from_start_mapping(outputs) + else: + draft_vars = self._build_variables_from_mapping(outputs) + _batch_upsert_draft_varaible(self._session, draft_vars) + + @staticmethod + def _should_variable_be_editable(node_id: str, name: str) -> bool: + if node_id in (CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID): + return False + if node_id == SYSTEM_VARIABLE_NODE_ID and not is_system_variable_editable(name): + return False + return True + + @staticmethod + def _should_variable_be_visible(node_id: str, node_type: NodeType, name: str) -> bool: + if node_type in NodeType.IF_ELSE: + return False + if node_id == SYSTEM_VARIABLE_NODE_ID and not is_system_variable_editable(name): + return False + return True + + def _should_variable_be_saved(self, name: str) -> bool: + exclude_var_names = self._EXCLUDE_VARIABLE_NAMES_MAPPING.get(self._node_type) + if exclude_var_names is None: + return True + return name not in exclude_var_names diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index bc213ccce6..0fd94ac86e 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -1,6 +1,7 @@ import json import time -from collections.abc import Callable, Generator, Sequence +import uuid +from collections.abc import Callable, Generator, Mapping, Sequence from datetime import UTC, datetime from typing import Any, Optional from uuid import uuid4 @@ -8,12 +9,17 @@ from uuid import uuid4 from sqlalchemy import select from sqlalchemy.orm import Session +from core.app.app_config.entities import VariableEntityType from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager +from core.app.entities.app_invoke_entities import InvokeFrom +from core.file import File from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository from core.variables import Variable from core.workflow.entities.node_entities import NodeRunResult +from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution, WorkflowNodeExecutionStatus +from core.workflow.enums import SystemVariableKey from core.workflow.errors import WorkflowNodeRunFailedError from core.workflow.graph_engine.entities.event import InNodeEvent from core.workflow.nodes import NodeType @@ -22,9 +28,11 @@ from core.workflow.nodes.enums import ErrorStrategy from core.workflow.nodes.event import RunCompletedEvent from core.workflow.nodes.event.types import NodeEvent from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING +from core.workflow.nodes.start.entities import StartNodeData from core.workflow.workflow_entry import WorkflowEntry from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated from extensions.ext_database import db +from factories.file_factory import build_from_mapping, build_from_mappings from models.account import Account from models.model import App, AppMode from models.tools import WorkflowToolProvider @@ -34,10 +42,15 @@ from models.workflow import ( WorkflowNodeExecutionTriggeredFrom, WorkflowType, ) -from services.errors.app import WorkflowHashNotEqualError +from services.errors.app import IsDraftWorkflowError, WorkflowHashNotEqualError from services.workflow.workflow_converter import WorkflowConverter from .errors.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError +from .workflow_draft_variable_service import ( + DraftVariableSaver, + DraftVarLoader, + WorkflowDraftVariableService, +) class WorkflowService: @@ -45,6 +58,33 @@ class WorkflowService: Workflow Service """ + def get_node_last_run(self, app_model: App, workflow: Workflow, node_id: str) -> WorkflowNodeExecutionModel | None: + # TODO(QuantumGhost): This query is not fully covered by index. + criteria = ( + WorkflowNodeExecutionModel.tenant_id == app_model.tenant_id, + WorkflowNodeExecutionModel.app_id == app_model.id, + WorkflowNodeExecutionModel.workflow_id == workflow.id, + WorkflowNodeExecutionModel.node_id == node_id, + ) + node_exec = ( + db.session.query(WorkflowNodeExecutionModel) + .filter(*criteria) + .order_by(WorkflowNodeExecutionModel.created_at.desc()) + .first() + ) + return node_exec + + def is_workflow_exist(self, app_model: App) -> bool: + return ( + db.session.query(Workflow) + .filter( + Workflow.tenant_id == app_model.tenant_id, + Workflow.app_id == app_model.id, + Workflow.version == Workflow.VERSION_DRAFT, + ) + .count() + ) > 0 + def get_draft_workflow(self, app_model: App) -> Optional[Workflow]: """ Get draft workflow @@ -61,6 +101,23 @@ class WorkflowService: # return draft workflow return workflow + def get_published_workflow_by_id(self, app_model: App, workflow_id: str) -> Optional[Workflow]: + # fetch published workflow by workflow_id + workflow = ( + db.session.query(Workflow) + .filter( + Workflow.tenant_id == app_model.tenant_id, + Workflow.app_id == app_model.id, + Workflow.id == workflow_id, + ) + .first() + ) + if not workflow: + return None + if workflow.version == Workflow.VERSION_DRAFT: + raise IsDraftWorkflowError(f"Workflow is draft version, id={workflow_id}") + return workflow + def get_published_workflow(self, app_model: App) -> Optional[Workflow]: """ Get published workflow @@ -199,7 +256,7 @@ class WorkflowService: tenant_id=app_model.tenant_id, app_id=app_model.id, type=draft_workflow.type, - version=str(datetime.now(UTC).replace(tzinfo=None)), + version=Workflow.version_from_datetime(datetime.now(UTC).replace(tzinfo=None)), graph=draft_workflow.graph, features=draft_workflow.features, created_by=account.id, @@ -253,26 +310,85 @@ class WorkflowService: return default_config def run_draft_workflow_node( - self, app_model: App, node_id: str, user_inputs: dict, account: Account + self, + app_model: App, + draft_workflow: Workflow, + node_id: str, + user_inputs: Mapping[str, Any], + account: Account, + query: str = "", + files: Sequence[File] | None = None, ) -> WorkflowNodeExecutionModel: """ Run draft workflow node """ - # fetch draft workflow by app_model - draft_workflow = self.get_draft_workflow(app_model=app_model) - if not draft_workflow: - raise ValueError("Workflow not initialized") + files = files or [] + + with Session(bind=db.engine, expire_on_commit=False) as session, session.begin(): + draft_var_srv = WorkflowDraftVariableService(session) + draft_var_srv.prefill_conversation_variable_default_values(draft_workflow) + + node_config = draft_workflow.get_node_config_by_id(node_id) + node_type = Workflow.get_node_type_from_node_config(node_config) + node_data = node_config.get("data", {}) + if node_type == NodeType.START: + with Session(bind=db.engine) as session, session.begin(): + draft_var_srv = WorkflowDraftVariableService(session) + conversation_id = draft_var_srv.get_or_create_conversation( + account_id=account.id, + app=app_model, + workflow=draft_workflow, + ) + start_data = StartNodeData.model_validate(node_data) + user_inputs = _rebuild_file_for_user_inputs_in_start_node( + tenant_id=draft_workflow.tenant_id, start_node_data=start_data, user_inputs=user_inputs + ) + # init variable pool + variable_pool = _setup_variable_pool( + query=query, + files=files or [], + user_id=account.id, + user_inputs=user_inputs, + workflow=draft_workflow, + # NOTE(QuantumGhost): We rely on `DraftVarLoader` to load conversation variables. + conversation_variables=[], + node_type=node_type, + conversation_id=conversation_id, + ) + + else: + variable_pool = VariablePool( + system_variables={}, + user_inputs=user_inputs, + environment_variables=draft_workflow.environment_variables, + conversation_variables=[], + ) + + variable_loader = DraftVarLoader( + engine=db.engine, + app_id=app_model.id, + tenant_id=app_model.tenant_id, + ) + + eclosing_node_type_and_id = draft_workflow.get_enclosing_node_type_and_id(node_config) + if eclosing_node_type_and_id: + _, enclosing_node_id = eclosing_node_type_and_id + else: + enclosing_node_id = None + + run = WorkflowEntry.single_step_run( + workflow=draft_workflow, + node_id=node_id, + user_inputs=user_inputs, + user_id=account.id, + variable_pool=variable_pool, + variable_loader=variable_loader, + ) # run draft workflow node start_at = time.perf_counter() - node_execution = self._handle_node_run_result( - invoke_node_fn=lambda: WorkflowEntry.single_step_run( - workflow=draft_workflow, - node_id=node_id, - user_inputs=user_inputs, - user_id=account.id, - ), + invoke_node_fn=lambda: run, start_at=start_at, node_id=node_id, ) @@ -292,6 +408,18 @@ class WorkflowService: # Convert node_execution to WorkflowNodeExecution after save workflow_node_execution = repository.to_db_model(node_execution) + with Session(bind=db.engine) as session, session.begin(): + draft_var_saver = DraftVariableSaver( + session=session, + app_id=app_model.id, + node_id=workflow_node_execution.node_id, + node_type=NodeType(workflow_node_execution.node_type), + invoke_from=InvokeFrom.DEBUGGER, + enclosing_node_id=enclosing_node_id, + node_execution_id=node_execution.id, + ) + draft_var_saver.save(process_data=node_execution.process_data, outputs=node_execution.outputs) + session.commit() return workflow_node_execution def run_free_workflow_node( @@ -332,7 +460,7 @@ class WorkflowService: node_run_result = event.run_result # sign output files - node_run_result.outputs = WorkflowEntry.handle_special_values(node_run_result.outputs) + # node_run_result.outputs = WorkflowEntry.handle_special_values(node_run_result.outputs) break if not node_run_result: @@ -394,7 +522,7 @@ class WorkflowService: if node_run_result.process_data else None ) - outputs = WorkflowEntry.handle_special_values(node_run_result.outputs) if node_run_result.outputs else None + outputs = node_run_result.outputs node_execution.inputs = inputs node_execution.process_data = process_data @@ -531,3 +659,83 @@ class WorkflowService: session.delete(workflow) return True + + +def _setup_variable_pool( + query: str, + files: Sequence[File], + user_id: str, + user_inputs: Mapping[str, Any], + workflow: Workflow, + node_type: NodeType, + conversation_id: str, + conversation_variables: list[Variable], +): + # Only inject system variables for START node type. + if node_type == NodeType.START: + # Create a variable pool. + system_inputs: dict[SystemVariableKey, Any] = { + # From inputs: + SystemVariableKey.FILES: files, + SystemVariableKey.USER_ID: user_id, + # From workflow model + SystemVariableKey.APP_ID: workflow.app_id, + SystemVariableKey.WORKFLOW_ID: workflow.id, + # Randomly generated. + SystemVariableKey.WORKFLOW_EXECUTION_ID: str(uuid.uuid4()), + } + + # Only add chatflow-specific variables for non-workflow types + if workflow.type != WorkflowType.WORKFLOW.value: + system_inputs.update( + { + SystemVariableKey.QUERY: query, + SystemVariableKey.CONVERSATION_ID: conversation_id, + SystemVariableKey.DIALOGUE_COUNT: 0, + } + ) + else: + system_inputs = {} + + # init variable pool + variable_pool = VariablePool( + system_variables=system_inputs, + user_inputs=user_inputs, + environment_variables=workflow.environment_variables, + conversation_variables=conversation_variables, + ) + + return variable_pool + + +def _rebuild_file_for_user_inputs_in_start_node( + tenant_id: str, start_node_data: StartNodeData, user_inputs: Mapping[str, Any] +) -> Mapping[str, Any]: + inputs_copy = dict(user_inputs) + + for variable in start_node_data.variables: + if variable.type not in (VariableEntityType.FILE, VariableEntityType.FILE_LIST): + continue + if variable.variable not in user_inputs: + continue + value = user_inputs[variable.variable] + file = _rebuild_single_file(tenant_id=tenant_id, value=value, variable_entity_type=variable.type) + inputs_copy[variable.variable] = file + return inputs_copy + + +def _rebuild_single_file(tenant_id: str, value: Any, variable_entity_type: VariableEntityType) -> File | Sequence[File]: + if variable_entity_type == VariableEntityType.FILE: + if not isinstance(value, dict): + raise ValueError(f"expected dict for file object, got {type(value)}") + return build_from_mapping(mapping=value, tenant_id=tenant_id) + elif variable_entity_type == VariableEntityType.FILE_LIST: + if not isinstance(value, list): + raise ValueError(f"expected list for file list object, got {type(value)}") + if len(value) == 0: + return [] + if not isinstance(value[0], dict): + raise ValueError(f"expected dict for first element in the file list, got {type(value)}") + return build_from_mappings(mappings=value, tenant_id=tenant_id) + else: + raise Exception("unreachable") diff --git a/api/tests/integration_tests/.env.example b/api/tests/integration_tests/.env.example index 9e40a8494d..4046096c27 100644 --- a/api/tests/integration_tests/.env.example +++ b/api/tests/integration_tests/.env.example @@ -1,107 +1,217 @@ -# OpenAI API Key -OPENAI_API_KEY= +FLASK_APP=app.py +FLASK_DEBUG=0 +SECRET_KEY='uhySf6a3aZuvRNfAlcr47paOw9TRYBY6j8ZHXpVw1yx5RP27Yj3w2uvI' -# Azure OpenAI API Base Endpoint & API Key -AZURE_OPENAI_API_BASE= -AZURE_OPENAI_API_KEY= +CONSOLE_API_URL=http://127.0.0.1:5001 +CONSOLE_WEB_URL=http://127.0.0.1:3000 -# Anthropic API Key -ANTHROPIC_API_KEY= +# Service API base URL +SERVICE_API_URL=http://127.0.0.1:5001 -# Replicate API Key -REPLICATE_API_KEY= +# Web APP base URL +APP_WEB_URL=http://127.0.0.1:3000 -# Hugging Face API Key -HUGGINGFACE_API_KEY= -HUGGINGFACE_TEXT_GEN_ENDPOINT_URL= -HUGGINGFACE_TEXT2TEXT_GEN_ENDPOINT_URL= -HUGGINGFACE_EMBEDDINGS_ENDPOINT_URL= +# Files URL +FILES_URL=http://127.0.0.1:5001 -# Minimax Credentials -MINIMAX_API_KEY= -MINIMAX_GROUP_ID= +# The time in seconds after the signature is rejected +FILES_ACCESS_TIMEOUT=300 -# Spark Credentials -SPARK_APP_ID= -SPARK_API_KEY= -SPARK_API_SECRET= +# Access token expiration time in minutes +ACCESS_TOKEN_EXPIRE_MINUTES=60 -# Tongyi Credentials -TONGYI_DASHSCOPE_API_KEY= +# Refresh token expiration time in days +REFRESH_TOKEN_EXPIRE_DAYS=30 -# Wenxin Credentials -WENXIN_API_KEY= -WENXIN_SECRET_KEY= +# celery configuration +CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1 -# ZhipuAI Credentials -ZHIPUAI_API_KEY= +# redis configuration +REDIS_HOST=localhost +REDIS_PORT=6379 +REDIS_USERNAME= +REDIS_PASSWORD=difyai123456 +REDIS_USE_SSL=false +REDIS_DB=0 -# Baichuan Credentials -BAICHUAN_API_KEY= -BAICHUAN_SECRET_KEY= +# PostgreSQL database configuration +DB_USERNAME=postgres +DB_PASSWORD=difyai123456 +DB_HOST=localhost +DB_PORT=5432 +DB_DATABASE=dify -# ChatGLM Credentials -CHATGLM_API_BASE= +# Storage configuration +# use for store upload files, private keys... +# storage type: opendal, s3, aliyun-oss, azure-blob, baidu-obs, google-storage, huawei-obs, oci-storage, tencent-cos, volcengine-tos, supabase +STORAGE_TYPE=opendal -# Xinference Credentials -XINFERENCE_SERVER_URL= -XINFERENCE_GENERATION_MODEL_UID= -XINFERENCE_CHAT_MODEL_UID= -XINFERENCE_EMBEDDINGS_MODEL_UID= -XINFERENCE_RERANK_MODEL_UID= +# Apache OpenDAL storage configuration, refer to https://github.com/apache/opendal +OPENDAL_SCHEME=fs +OPENDAL_FS_ROOT=storage -# OpenLLM Credentials -OPENLLM_SERVER_URL= +# CORS configuration +WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* +CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* -# LocalAI Credentials -LOCALAI_SERVER_URL= +# Vector database configuration +# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase +VECTOR_STORE=weaviate +# Weaviate configuration +WEAVIATE_ENDPOINT=http://localhost:8080 +WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih +WEAVIATE_GRPC_ENABLED=false +WEAVIATE_BATCH_SIZE=100 -# Cohere Credentials -COHERE_API_KEY= -# Jina Credentials -JINA_API_KEY= +# Upload configuration +UPLOAD_FILE_SIZE_LIMIT=15 +UPLOAD_FILE_BATCH_LIMIT=5 +UPLOAD_IMAGE_FILE_SIZE_LIMIT=10 +UPLOAD_VIDEO_FILE_SIZE_LIMIT=100 +UPLOAD_AUDIO_FILE_SIZE_LIMIT=50 -# Ollama Credentials -OLLAMA_BASE_URL= +# Model configuration +MULTIMODAL_SEND_FORMAT=base64 +PROMPT_GENERATION_MAX_TOKENS=4096 +CODE_GENERATION_MAX_TOKENS=1024 -# Together API Key -TOGETHER_API_KEY= +# Mail configuration, support: resend, smtp +MAIL_TYPE= +MAIL_DEFAULT_SEND_FROM=no-reply +RESEND_API_KEY= +RESEND_API_URL=https://api.resend.com +# smtp configuration +SMTP_SERVER=smtp.example.com +SMTP_PORT=465 +SMTP_USERNAME=123 +SMTP_PASSWORD=abc +SMTP_USE_TLS=true +SMTP_OPPORTUNISTIC_TLS=false -# Mock Switch -MOCK_SWITCH=false +# Sentry configuration +SENTRY_DSN= + +# DEBUG +DEBUG=false +SQLALCHEMY_ECHO=false + +# Notion import configuration, support public and internal +NOTION_INTEGRATION_TYPE=public +NOTION_CLIENT_SECRET=you-client-secret +NOTION_CLIENT_ID=you-client-id +NOTION_INTERNAL_SECRET=you-internal-secret + +ETL_TYPE=dify +UNSTRUCTURED_API_URL= +UNSTRUCTURED_API_KEY= +SCARF_NO_ANALYTICS=false + +#ssrf +SSRF_PROXY_HTTP_URL= +SSRF_PROXY_HTTPS_URL= +SSRF_DEFAULT_MAX_RETRIES=3 +SSRF_DEFAULT_TIME_OUT=5 +SSRF_DEFAULT_CONNECT_TIME_OUT=5 +SSRF_DEFAULT_READ_TIME_OUT=5 +SSRF_DEFAULT_WRITE_TIME_OUT=5 + +BATCH_UPLOAD_LIMIT=10 +KEYWORD_DATA_SOURCE_TYPE=database + +# Workflow file upload limit +WORKFLOW_FILE_UPLOAD_LIMIT=10 # CODE EXECUTION CONFIGURATION -CODE_EXECUTION_ENDPOINT= -CODE_EXECUTION_API_KEY= +CODE_EXECUTION_ENDPOINT=http://127.0.0.1:8194 +CODE_EXECUTION_API_KEY=dify-sandbox +CODE_MAX_NUMBER=9223372036854775807 +CODE_MIN_NUMBER=-9223372036854775808 +CODE_MAX_STRING_LENGTH=80000 +TEMPLATE_TRANSFORM_MAX_LENGTH=80000 +CODE_MAX_STRING_ARRAY_LENGTH=30 +CODE_MAX_OBJECT_ARRAY_LENGTH=30 +CODE_MAX_NUMBER_ARRAY_LENGTH=1000 -# Volcengine MaaS Credentials -VOLC_API_KEY= -VOLC_SECRET_KEY= -VOLC_MODEL_ENDPOINT_ID= -VOLC_EMBEDDING_ENDPOINT_ID= +# API Tool configuration +API_TOOL_DEFAULT_CONNECT_TIMEOUT=10 +API_TOOL_DEFAULT_READ_TIMEOUT=60 -# 360 AI Credentials -ZHINAO_API_KEY= +# HTTP Node configuration +HTTP_REQUEST_MAX_CONNECT_TIMEOUT=300 +HTTP_REQUEST_MAX_READ_TIMEOUT=600 +HTTP_REQUEST_MAX_WRITE_TIMEOUT=600 +HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 +HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 + +# Respect X-* headers to redirect clients +RESPECT_XFORWARD_HEADERS_ENABLED=false + +# Log file path +LOG_FILE= +# Log file max size, the unit is MB +LOG_FILE_MAX_SIZE=20 +# Log file max backup count +LOG_FILE_BACKUP_COUNT=5 +# Log dateformat +LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S +# Log Timezone +LOG_TZ=UTC +# Log format +LOG_FORMAT=%(asctime)s,%(msecs)d %(levelname)-2s [%(filename)s:%(lineno)d] %(req_id)s %(message)s + +# Indexing configuration +INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000 + +# Workflow runtime configuration +WORKFLOW_MAX_EXECUTION_STEPS=500 +WORKFLOW_MAX_EXECUTION_TIME=1200 +WORKFLOW_CALL_MAX_DEPTH=5 +WORKFLOW_PARALLEL_DEPTH_LIMIT=3 +MAX_VARIABLE_SIZE=204800 + +# App configuration +APP_MAX_EXECUTION_TIME=1200 +APP_MAX_ACTIVE_REQUESTS=0 + +# Celery beat configuration +CELERY_BEAT_SCHEDULER_TIME=1 + +# Position configuration +POSITION_TOOL_PINS= +POSITION_TOOL_INCLUDES= +POSITION_TOOL_EXCLUDES= + +POSITION_PROVIDER_PINS= +POSITION_PROVIDER_INCLUDES= +POSITION_PROVIDER_EXCLUDES= # Plugin configuration -PLUGIN_DAEMON_KEY= -PLUGIN_DAEMON_URL= +PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi +PLUGIN_DAEMON_URL=http://127.0.0.1:5002 +PLUGIN_REMOTE_INSTALL_PORT=5003 +PLUGIN_REMOTE_INSTALL_HOST=localhost +PLUGIN_MAX_PACKAGE_SIZE=15728640 +INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1 # Marketplace configuration -MARKETPLACE_API_URL= -# VESSL AI Credentials -VESSL_AI_MODEL_NAME= -VESSL_AI_API_KEY= -VESSL_AI_ENDPOINT_URL= +MARKETPLACE_ENABLED=true +MARKETPLACE_API_URL=https://marketplace.dify.ai -# GPUStack Credentials -GPUSTACK_SERVER_URL= -GPUSTACK_API_KEY= +# Endpoint configuration +ENDPOINT_URL_TEMPLATE=http://localhost:5002/e/{hook_id} -# Gitee AI Credentials -GITEE_AI_API_KEY= +# Reset password token expiry minutes +RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5 -# xAI Credentials -XAI_API_KEY= -XAI_API_BASE= +CREATE_TIDB_SERVICE_JOB_ENABLED=false + +# Maximum number of submitted thread count in a ThreadPool for parallel node execution +MAX_SUBMIT_COUNT=100 +# Lockout duration in seconds +LOGIN_LOCKOUT_DURATION=86400 + +HTTP_PROXY='http://127.0.0.1:1092' +HTTPS_PROXY='http://127.0.0.1:1092' +NO_PROXY='localhost,127.0.0.1' +LOG_LEVEL=INFO diff --git a/api/tests/integration_tests/conftest.py b/api/tests/integration_tests/conftest.py index 6e3ab4b74b..d9f90f992e 100644 --- a/api/tests/integration_tests/conftest.py +++ b/api/tests/integration_tests/conftest.py @@ -1,19 +1,91 @@ -import os +import pathlib +import random +import secrets +from collections.abc import Generator -# Getting the absolute path of the current file's directory -ABS_PATH = os.path.dirname(os.path.abspath(__file__)) +import pytest +from flask import Flask +from flask.testing import FlaskClient +from sqlalchemy.orm import Session -# Getting the absolute path of the project's root directory -PROJECT_DIR = os.path.abspath(os.path.join(ABS_PATH, os.pardir, os.pardir)) +from app_factory import create_app +from models import Account, DifySetup, Tenant, TenantAccountJoin, db +from services.account_service import AccountService, RegisterService # Loading the .env file if it exists def _load_env() -> None: - dotenv_path = os.path.join(PROJECT_DIR, "tests", "integration_tests", ".env") - if os.path.exists(dotenv_path): + current_file_path = pathlib.Path(__file__).absolute() + # Items later in the list have higher precedence. + files_to_load = [".env", "vdb.env"] + + env_file_paths = [current_file_path.parent / i for i in files_to_load] + for path in env_file_paths: + if not path.exists(): + continue + from dotenv import load_dotenv - load_dotenv(dotenv_path) + # Set `override=True` to ensure values from `vdb.env` take priority over values from `.env` + load_dotenv(str(path), override=True) _load_env() + +_CACHED_APP = create_app() + + +@pytest.fixture +def flask_app() -> Flask: + return _CACHED_APP + + +@pytest.fixture(scope="session") +def setup_account(request) -> Generator[Account, None, None]: + """`dify_setup` completes the setup process for the Dify application. + + It creates `Account` and `Tenant`, and inserts a `DifySetup` record into the database. + + Most tests in the `controllers` package may require dify has been successfully setup. + """ + with _CACHED_APP.test_request_context(): + rand_suffix = random.randint(int(1e6), int(1e7)) # noqa + name = f"test-user-{rand_suffix}" + email = f"{name}@example.com" + RegisterService.setup( + email=email, + name=name, + password=secrets.token_hex(16), + ip_address="localhost", + ) + + with _CACHED_APP.test_request_context(): + with Session(bind=db.engine, expire_on_commit=False) as session: + account = session.query(Account).filter_by(email=email).one() + + yield account + + with _CACHED_APP.test_request_context(): + db.session.query(DifySetup).delete() + db.session.query(TenantAccountJoin).delete() + db.session.query(Account).delete() + db.session.query(Tenant).delete() + db.session.commit() + + +@pytest.fixture +def flask_req_ctx(): + with _CACHED_APP.test_request_context(): + yield + + +@pytest.fixture +def auth_header(setup_account) -> dict[str, str]: + token = AccountService.get_account_jwt_token(setup_account) + return {"Authorization": f"Bearer {token}"} + + +@pytest.fixture +def test_client() -> Generator[FlaskClient, None, None]: + with _CACHED_APP.test_client() as client: + yield client diff --git a/api/tests/integration_tests/controllers/app_fixture.py b/api/tests/integration_tests/controllers/app_fixture.py deleted file mode 100644 index 32e8c11d19..0000000000 --- a/api/tests/integration_tests/controllers/app_fixture.py +++ /dev/null @@ -1,25 +0,0 @@ -import pytest - -from app_factory import create_app -from configs import dify_config - -mock_user = type( - "MockUser", - (object,), - { - "is_authenticated": True, - "id": "123", - "is_editor": True, - "is_dataset_editor": True, - "status": "active", - "get_id": "123", - "current_tenant_id": "9d2074fc-6f86-45a9-b09d-6ecc63b9056b", - }, -) - - -@pytest.fixture -def app(): - app = create_app() - dify_config.LOGIN_DISABLED = True - return app diff --git a/api/tests/integration_tests/controllers/console/__init__.py b/api/tests/integration_tests/controllers/console/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/controllers/console/app/__init__.py b/api/tests/integration_tests/controllers/console/app/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/controllers/console/app/test_workflow_draft_variable.py b/api/tests/integration_tests/controllers/console/app/test_workflow_draft_variable.py new file mode 100644 index 0000000000..038f37af5f --- /dev/null +++ b/api/tests/integration_tests/controllers/console/app/test_workflow_draft_variable.py @@ -0,0 +1,47 @@ +import uuid +from unittest import mock + +from controllers.console.app import workflow_draft_variable as draft_variable_api +from controllers.console.app import wraps +from factories.variable_factory import build_segment +from models import App, AppMode +from models.workflow import WorkflowDraftVariable +from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService + + +def _get_mock_srv_class() -> type[WorkflowDraftVariableService]: + return mock.create_autospec(WorkflowDraftVariableService) + + +class TestWorkflowDraftNodeVariableListApi: + def test_get(self, test_client, auth_header, monkeypatch): + srv_class = _get_mock_srv_class() + mock_app_model: App = App() + mock_app_model.id = str(uuid.uuid4()) + test_node_id = "test_node_id" + mock_app_model.mode = AppMode.ADVANCED_CHAT + mock_load_app_model = mock.Mock(return_value=mock_app_model) + + monkeypatch.setattr(draft_variable_api, "WorkflowDraftVariableService", srv_class) + monkeypatch.setattr(wraps, "_load_app_model", mock_load_app_model) + + var1 = WorkflowDraftVariable.new_node_variable( + app_id="test_app_1", + node_id="test_node_1", + name="str_var", + value=build_segment("str_value"), + node_execution_id=str(uuid.uuid4()), + ) + srv_instance = mock.create_autospec(WorkflowDraftVariableService, instance=True) + srv_class.return_value = srv_instance + srv_instance.list_node_variables.return_value = WorkflowDraftVariableList(variables=[var1]) + + response = test_client.get( + f"/console/api/apps/{mock_app_model.id}/workflows/draft/nodes/{test_node_id}/variables", + headers=auth_header, + ) + assert response.status_code == 200 + response_dict = response.json + assert isinstance(response_dict, dict) + assert "items" in response_dict + assert len(response_dict["items"]) == 1 diff --git a/api/tests/integration_tests/controllers/test_controllers.py b/api/tests/integration_tests/controllers/test_controllers.py deleted file mode 100644 index 276ad3a7ed..0000000000 --- a/api/tests/integration_tests/controllers/test_controllers.py +++ /dev/null @@ -1,9 +0,0 @@ -from unittest.mock import patch - -from app_fixture import mock_user # type: ignore - - -def test_post_requires_login(app): - with app.test_client() as client, patch("flask_login.utils._get_user", mock_user): - response = client.get("/console/api/data-source/integrates") - assert response.status_code == 200 diff --git a/api/tests/integration_tests/factories/__init__.py b/api/tests/integration_tests/factories/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/factories/test_storage_key_loader.py b/api/tests/integration_tests/factories/test_storage_key_loader.py new file mode 100644 index 0000000000..fecb3f6d95 --- /dev/null +++ b/api/tests/integration_tests/factories/test_storage_key_loader.py @@ -0,0 +1,371 @@ +import unittest +from datetime import UTC, datetime +from typing import Optional +from unittest.mock import patch +from uuid import uuid4 + +import pytest +from sqlalchemy.orm import Session + +from core.file import File, FileTransferMethod, FileType +from extensions.ext_database import db +from factories.file_factory import StorageKeyLoader +from models import ToolFile, UploadFile +from models.enums import CreatorUserRole + + +@pytest.mark.usefixtures("flask_req_ctx") +class TestStorageKeyLoader(unittest.TestCase): + """ + Integration tests for StorageKeyLoader class. + + Tests the batched loading of storage keys from the database for files + with different transfer methods: LOCAL_FILE, REMOTE_URL, and TOOL_FILE. + """ + + def setUp(self): + """Set up test data before each test method.""" + self.session = db.session() + self.tenant_id = str(uuid4()) + self.user_id = str(uuid4()) + self.conversation_id = str(uuid4()) + + # Create test data that will be cleaned up after each test + self.test_upload_files = [] + self.test_tool_files = [] + + # Create StorageKeyLoader instance + self.loader = StorageKeyLoader(self.session, self.tenant_id) + + def tearDown(self): + """Clean up test data after each test method.""" + self.session.rollback() + + def _create_upload_file( + self, file_id: Optional[str] = None, storage_key: Optional[str] = None, tenant_id: Optional[str] = None + ) -> UploadFile: + """Helper method to create an UploadFile record for testing.""" + if file_id is None: + file_id = str(uuid4()) + if storage_key is None: + storage_key = f"test_storage_key_{uuid4()}" + if tenant_id is None: + tenant_id = self.tenant_id + + upload_file = UploadFile( + tenant_id=tenant_id, + storage_type="local", + key=storage_key, + name="test_file.txt", + size=1024, + extension=".txt", + mime_type="text/plain", + created_by_role=CreatorUserRole.ACCOUNT, + created_by=self.user_id, + created_at=datetime.now(UTC), + used=False, + ) + upload_file.id = file_id + + self.session.add(upload_file) + self.session.flush() + self.test_upload_files.append(upload_file) + + return upload_file + + def _create_tool_file( + self, file_id: Optional[str] = None, file_key: Optional[str] = None, tenant_id: Optional[str] = None + ) -> ToolFile: + """Helper method to create a ToolFile record for testing.""" + if file_id is None: + file_id = str(uuid4()) + if file_key is None: + file_key = f"test_file_key_{uuid4()}" + if tenant_id is None: + tenant_id = self.tenant_id + + tool_file = ToolFile() + tool_file.id = file_id + tool_file.user_id = self.user_id + tool_file.tenant_id = tenant_id + tool_file.conversation_id = self.conversation_id + tool_file.file_key = file_key + tool_file.mimetype = "text/plain" + tool_file.original_url = "http://example.com/file.txt" + tool_file.name = "test_tool_file.txt" + tool_file.size = 2048 + + self.session.add(tool_file) + self.session.flush() + self.test_tool_files.append(tool_file) + + return tool_file + + def _create_file( + self, related_id: str, transfer_method: FileTransferMethod, tenant_id: Optional[str] = None + ) -> File: + """Helper method to create a File object for testing.""" + if tenant_id is None: + tenant_id = self.tenant_id + + # Set related_id for LOCAL_FILE and TOOL_FILE transfer methods + file_related_id = None + remote_url = None + + if transfer_method in (FileTransferMethod.LOCAL_FILE, FileTransferMethod.TOOL_FILE): + file_related_id = related_id + elif transfer_method == FileTransferMethod.REMOTE_URL: + remote_url = "https://example.com/test_file.txt" + file_related_id = related_id + + return File( + id=str(uuid4()), # Generate new UUID for File.id + tenant_id=tenant_id, + type=FileType.DOCUMENT, + transfer_method=transfer_method, + related_id=file_related_id, + remote_url=remote_url, + filename="test_file.txt", + extension=".txt", + mime_type="text/plain", + size=1024, + storage_key="initial_key", + ) + + def test_load_storage_keys_local_file(self): + """Test loading storage keys for LOCAL_FILE transfer method.""" + # Create test data + upload_file = self._create_upload_file() + file = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE) + + # Load storage keys + self.loader.load_storage_keys([file]) + + # Verify storage key was loaded correctly + assert file._storage_key == upload_file.key + + def test_load_storage_keys_remote_url(self): + """Test loading storage keys for REMOTE_URL transfer method.""" + # Create test data + upload_file = self._create_upload_file() + file = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.REMOTE_URL) + + # Load storage keys + self.loader.load_storage_keys([file]) + + # Verify storage key was loaded correctly + assert file._storage_key == upload_file.key + + def test_load_storage_keys_tool_file(self): + """Test loading storage keys for TOOL_FILE transfer method.""" + # Create test data + tool_file = self._create_tool_file() + file = self._create_file(related_id=tool_file.id, transfer_method=FileTransferMethod.TOOL_FILE) + + # Load storage keys + self.loader.load_storage_keys([file]) + + # Verify storage key was loaded correctly + assert file._storage_key == tool_file.file_key + + def test_load_storage_keys_mixed_methods(self): + """Test batch loading with mixed transfer methods.""" + # Create test data for different transfer methods + upload_file1 = self._create_upload_file() + upload_file2 = self._create_upload_file() + tool_file = self._create_tool_file() + + file1 = self._create_file(related_id=upload_file1.id, transfer_method=FileTransferMethod.LOCAL_FILE) + file2 = self._create_file(related_id=upload_file2.id, transfer_method=FileTransferMethod.REMOTE_URL) + file3 = self._create_file(related_id=tool_file.id, transfer_method=FileTransferMethod.TOOL_FILE) + + files = [file1, file2, file3] + + # Load storage keys + self.loader.load_storage_keys(files) + + # Verify all storage keys were loaded correctly + assert file1._storage_key == upload_file1.key + assert file2._storage_key == upload_file2.key + assert file3._storage_key == tool_file.file_key + + def test_load_storage_keys_empty_list(self): + """Test with empty file list.""" + # Should not raise any exceptions + self.loader.load_storage_keys([]) + + def test_load_storage_keys_tenant_mismatch(self): + """Test tenant_id validation.""" + # Create file with different tenant_id + upload_file = self._create_upload_file() + file = self._create_file( + related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE, tenant_id=str(uuid4()) + ) + + # Should raise ValueError for tenant mismatch + with pytest.raises(ValueError) as context: + self.loader.load_storage_keys([file]) + + assert "invalid file, expected tenant_id" in str(context.value) + + def test_load_storage_keys_missing_file_id(self): + """Test with None file.related_id.""" + # Create a file with valid parameters first, then manually set related_id to None + file = self._create_file(related_id=str(uuid4()), transfer_method=FileTransferMethod.LOCAL_FILE) + file.related_id = None + + # Should raise ValueError for None file related_id + with pytest.raises(ValueError) as context: + self.loader.load_storage_keys([file]) + + assert str(context.value) == "file id should not be None." + + def test_load_storage_keys_nonexistent_upload_file_records(self): + """Test with missing UploadFile database records.""" + # Create file with non-existent upload file id + non_existent_id = str(uuid4()) + file = self._create_file(related_id=non_existent_id, transfer_method=FileTransferMethod.LOCAL_FILE) + + # Should raise ValueError for missing record + with pytest.raises(ValueError): + self.loader.load_storage_keys([file]) + + def test_load_storage_keys_nonexistent_tool_file_records(self): + """Test with missing ToolFile database records.""" + # Create file with non-existent tool file id + non_existent_id = str(uuid4()) + file = self._create_file(related_id=non_existent_id, transfer_method=FileTransferMethod.TOOL_FILE) + + # Should raise ValueError for missing record + with pytest.raises(ValueError): + self.loader.load_storage_keys([file]) + + def test_load_storage_keys_invalid_uuid(self): + """Test with invalid UUID format.""" + # Create a file with valid parameters first, then manually set invalid related_id + file = self._create_file(related_id=str(uuid4()), transfer_method=FileTransferMethod.LOCAL_FILE) + file.related_id = "invalid-uuid-format" + + # Should raise ValueError for invalid UUID + with pytest.raises(ValueError): + self.loader.load_storage_keys([file]) + + def test_load_storage_keys_batch_efficiency(self): + """Test batched operations use efficient queries.""" + # Create multiple files of different types + upload_files = [self._create_upload_file() for _ in range(3)] + tool_files = [self._create_tool_file() for _ in range(2)] + + files = [] + files.extend( + [self._create_file(related_id=uf.id, transfer_method=FileTransferMethod.LOCAL_FILE) for uf in upload_files] + ) + files.extend( + [self._create_file(related_id=tf.id, transfer_method=FileTransferMethod.TOOL_FILE) for tf in tool_files] + ) + + # Mock the session to count queries + with patch.object(self.session, "scalars", wraps=self.session.scalars) as mock_scalars: + self.loader.load_storage_keys(files) + + # Should make exactly 2 queries (one for upload_files, one for tool_files) + assert mock_scalars.call_count == 2 + + # Verify all storage keys were loaded correctly + for i, file in enumerate(files[:3]): + assert file._storage_key == upload_files[i].key + for i, file in enumerate(files[3:]): + assert file._storage_key == tool_files[i].file_key + + def test_load_storage_keys_tenant_isolation(self): + """Test that tenant isolation works correctly.""" + # Create files for different tenants + other_tenant_id = str(uuid4()) + + # Create upload file for current tenant + upload_file_current = self._create_upload_file() + file_current = self._create_file( + related_id=upload_file_current.id, transfer_method=FileTransferMethod.LOCAL_FILE + ) + + # Create upload file for other tenant (but don't add to cleanup list) + upload_file_other = UploadFile( + tenant_id=other_tenant_id, + storage_type="local", + key="other_tenant_key", + name="other_file.txt", + size=1024, + extension=".txt", + mime_type="text/plain", + created_by_role=CreatorUserRole.ACCOUNT, + created_by=self.user_id, + created_at=datetime.now(UTC), + used=False, + ) + upload_file_other.id = str(uuid4()) + self.session.add(upload_file_other) + self.session.flush() + + # Create file for other tenant but try to load with current tenant's loader + file_other = self._create_file( + related_id=upload_file_other.id, transfer_method=FileTransferMethod.LOCAL_FILE, tenant_id=other_tenant_id + ) + + # Should raise ValueError due to tenant mismatch + with pytest.raises(ValueError) as context: + self.loader.load_storage_keys([file_other]) + + assert "invalid file, expected tenant_id" in str(context.value) + + # Current tenant's file should still work + self.loader.load_storage_keys([file_current]) + assert file_current._storage_key == upload_file_current.key + + def test_load_storage_keys_mixed_tenant_batch(self): + """Test batch with mixed tenant files (should fail on first mismatch).""" + # Create files for current tenant + upload_file_current = self._create_upload_file() + file_current = self._create_file( + related_id=upload_file_current.id, transfer_method=FileTransferMethod.LOCAL_FILE + ) + + # Create file for different tenant + other_tenant_id = str(uuid4()) + file_other = self._create_file( + related_id=str(uuid4()), transfer_method=FileTransferMethod.LOCAL_FILE, tenant_id=other_tenant_id + ) + + # Should raise ValueError on tenant mismatch + with pytest.raises(ValueError) as context: + self.loader.load_storage_keys([file_current, file_other]) + + assert "invalid file, expected tenant_id" in str(context.value) + + def test_load_storage_keys_duplicate_file_ids(self): + """Test handling of duplicate file IDs in the batch.""" + # Create upload file + upload_file = self._create_upload_file() + + # Create two File objects with same related_id + file1 = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE) + file2 = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE) + + # Should handle duplicates gracefully + self.loader.load_storage_keys([file1, file2]) + + # Both files should have the same storage key + assert file1._storage_key == upload_file.key + assert file2._storage_key == upload_file.key + + def test_load_storage_keys_session_isolation(self): + """Test that the loader uses the provided session correctly.""" + # Create test data + upload_file = self._create_upload_file() + file = self._create_file(related_id=upload_file.id, transfer_method=FileTransferMethod.LOCAL_FILE) + + # Create loader with different session (same underlying connection) + + with Session(bind=db.engine) as other_session: + other_loader = StorageKeyLoader(other_session, self.tenant_id) + with pytest.raises(ValueError): + other_loader.load_storage_keys([file]) diff --git a/api/tests/integration_tests/services/__init__.py b/api/tests/integration_tests/services/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/services/test_workflow_draft_variable_service.py b/api/tests/integration_tests/services/test_workflow_draft_variable_service.py new file mode 100644 index 0000000000..30cd2e60cb --- /dev/null +++ b/api/tests/integration_tests/services/test_workflow_draft_variable_service.py @@ -0,0 +1,501 @@ +import json +import unittest +import uuid + +import pytest +from sqlalchemy.orm import Session + +from core.variables.variables import StringVariable +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID +from core.workflow.nodes import NodeType +from factories.variable_factory import build_segment +from libs import datetime_utils +from models import db +from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel +from services.workflow_draft_variable_service import DraftVarLoader, VariableResetError, WorkflowDraftVariableService + + +@pytest.mark.usefixtures("flask_req_ctx") +class TestWorkflowDraftVariableService(unittest.TestCase): + _test_app_id: str + _session: Session + _node1_id = "test_node_1" + _node2_id = "test_node_2" + _node_exec_id = str(uuid.uuid4()) + + def setUp(self): + self._test_app_id = str(uuid.uuid4()) + self._session: Session = db.session() + sys_var = WorkflowDraftVariable.new_sys_variable( + app_id=self._test_app_id, + name="sys_var", + value=build_segment("sys_value"), + node_execution_id=self._node_exec_id, + ) + conv_var = WorkflowDraftVariable.new_conversation_variable( + app_id=self._test_app_id, + name="conv_var", + value=build_segment("conv_value"), + ) + node2_vars = [ + WorkflowDraftVariable.new_node_variable( + app_id=self._test_app_id, + node_id=self._node2_id, + name="int_var", + value=build_segment(1), + visible=False, + node_execution_id=self._node_exec_id, + ), + WorkflowDraftVariable.new_node_variable( + app_id=self._test_app_id, + node_id=self._node2_id, + name="str_var", + value=build_segment("str_value"), + visible=True, + node_execution_id=self._node_exec_id, + ), + ] + node1_var = WorkflowDraftVariable.new_node_variable( + app_id=self._test_app_id, + node_id=self._node1_id, + name="str_var", + value=build_segment("str_value"), + visible=True, + node_execution_id=self._node_exec_id, + ) + _variables = list(node2_vars) + _variables.extend( + [ + node1_var, + sys_var, + conv_var, + ] + ) + + db.session.add_all(_variables) + db.session.flush() + self._variable_ids = [v.id for v in _variables] + self._node1_str_var_id = node1_var.id + self._sys_var_id = sys_var.id + self._conv_var_id = conv_var.id + self._node2_var_ids = [v.id for v in node2_vars] + + def _get_test_srv(self) -> WorkflowDraftVariableService: + return WorkflowDraftVariableService(session=self._session) + + def tearDown(self): + self._session.rollback() + + def test_list_variables(self): + srv = self._get_test_srv() + var_list = srv.list_variables_without_values(self._test_app_id, page=1, limit=2) + assert var_list.total == 5 + assert len(var_list.variables) == 2 + page1_var_ids = {v.id for v in var_list.variables} + assert page1_var_ids.issubset(self._variable_ids) + + var_list_2 = srv.list_variables_without_values(self._test_app_id, page=2, limit=2) + assert var_list_2.total is None + assert len(var_list_2.variables) == 2 + page2_var_ids = {v.id for v in var_list_2.variables} + assert page2_var_ids.isdisjoint(page1_var_ids) + assert page2_var_ids.issubset(self._variable_ids) + + def test_get_node_variable(self): + srv = self._get_test_srv() + node_var = srv.get_node_variable(self._test_app_id, self._node1_id, "str_var") + assert node_var is not None + assert node_var.id == self._node1_str_var_id + assert node_var.name == "str_var" + assert node_var.get_value() == build_segment("str_value") + + def test_get_system_variable(self): + srv = self._get_test_srv() + sys_var = srv.get_system_variable(self._test_app_id, "sys_var") + assert sys_var is not None + assert sys_var.id == self._sys_var_id + assert sys_var.name == "sys_var" + assert sys_var.get_value() == build_segment("sys_value") + + def test_get_conversation_variable(self): + srv = self._get_test_srv() + conv_var = srv.get_conversation_variable(self._test_app_id, "conv_var") + assert conv_var is not None + assert conv_var.id == self._conv_var_id + assert conv_var.name == "conv_var" + assert conv_var.get_value() == build_segment("conv_value") + + def test_delete_node_variables(self): + srv = self._get_test_srv() + srv.delete_node_variables(self._test_app_id, self._node2_id) + node2_var_count = ( + self._session.query(WorkflowDraftVariable) + .where( + WorkflowDraftVariable.app_id == self._test_app_id, + WorkflowDraftVariable.node_id == self._node2_id, + ) + .count() + ) + assert node2_var_count == 0 + + def test_delete_variable(self): + srv = self._get_test_srv() + node_1_var = ( + self._session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.id == self._node1_str_var_id).one() + ) + srv.delete_variable(node_1_var) + exists = bool( + self._session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.id == self._node1_str_var_id).first() + ) + assert exists is False + + def test__list_node_variables(self): + srv = self._get_test_srv() + node_vars = srv._list_node_variables(self._test_app_id, self._node2_id) + assert len(node_vars.variables) == 2 + assert {v.id for v in node_vars.variables} == set(self._node2_var_ids) + + def test_get_draft_variables_by_selectors(self): + srv = self._get_test_srv() + selectors = [ + [self._node1_id, "str_var"], + [self._node2_id, "str_var"], + [self._node2_id, "int_var"], + ] + variables = srv.get_draft_variables_by_selectors(self._test_app_id, selectors) + assert len(variables) == 3 + assert {v.id for v in variables} == {self._node1_str_var_id} | set(self._node2_var_ids) + + +@pytest.mark.usefixtures("flask_req_ctx") +class TestDraftVariableLoader(unittest.TestCase): + _test_app_id: str + _test_tenant_id: str + + _node1_id = "test_loader_node_1" + _node_exec_id = str(uuid.uuid4()) + + def setUp(self): + self._test_app_id = str(uuid.uuid4()) + self._test_tenant_id = str(uuid.uuid4()) + sys_var = WorkflowDraftVariable.new_sys_variable( + app_id=self._test_app_id, + name="sys_var", + value=build_segment("sys_value"), + node_execution_id=self._node_exec_id, + ) + conv_var = WorkflowDraftVariable.new_conversation_variable( + app_id=self._test_app_id, + name="conv_var", + value=build_segment("conv_value"), + ) + node_var = WorkflowDraftVariable.new_node_variable( + app_id=self._test_app_id, + node_id=self._node1_id, + name="str_var", + value=build_segment("str_value"), + visible=True, + node_execution_id=self._node_exec_id, + ) + _variables = [ + node_var, + sys_var, + conv_var, + ] + + with Session(bind=db.engine, expire_on_commit=False) as session: + session.add_all(_variables) + session.flush() + session.commit() + self._variable_ids = [v.id for v in _variables] + self._node_var_id = node_var.id + self._sys_var_id = sys_var.id + self._conv_var_id = conv_var.id + + def tearDown(self): + with Session(bind=db.engine, expire_on_commit=False) as session: + session.query(WorkflowDraftVariable).filter(WorkflowDraftVariable.app_id == self._test_app_id).delete( + synchronize_session=False + ) + session.commit() + + def test_variable_loader_with_empty_selector(self): + var_loader = DraftVarLoader(engine=db.engine, app_id=self._test_app_id, tenant_id=self._test_tenant_id) + variables = var_loader.load_variables([]) + assert len(variables) == 0 + + def test_variable_loader_with_non_empty_selector(self): + var_loader = DraftVarLoader(engine=db.engine, app_id=self._test_app_id, tenant_id=self._test_tenant_id) + variables = var_loader.load_variables( + [ + [SYSTEM_VARIABLE_NODE_ID, "sys_var"], + [CONVERSATION_VARIABLE_NODE_ID, "conv_var"], + [self._node1_id, "str_var"], + ] + ) + assert len(variables) == 3 + conv_var = next(v for v in variables if v.selector[0] == CONVERSATION_VARIABLE_NODE_ID) + assert conv_var.id == self._conv_var_id + sys_var = next(v for v in variables if v.selector[0] == SYSTEM_VARIABLE_NODE_ID) + assert sys_var.id == self._sys_var_id + node1_var = next(v for v in variables if v.selector[0] == self._node1_id) + assert node1_var.id == self._node_var_id + + +@pytest.mark.usefixtures("flask_req_ctx") +class TestWorkflowDraftVariableServiceResetVariable(unittest.TestCase): + """Integration tests for reset_variable functionality using real database""" + + _test_app_id: str + _test_tenant_id: str + _test_workflow_id: str + _session: Session + _node_id = "test_reset_node" + _node_exec_id: str + _workflow_node_exec_id: str + + def setUp(self): + self._test_app_id = str(uuid.uuid4()) + self._test_tenant_id = str(uuid.uuid4()) + self._test_workflow_id = str(uuid.uuid4()) + self._node_exec_id = str(uuid.uuid4()) + self._workflow_node_exec_id = str(uuid.uuid4()) + self._session: Session = db.session() + + # Create a workflow node execution record with outputs + # Note: The WorkflowNodeExecutionModel.id should match the node_execution_id in WorkflowDraftVariable + self._workflow_node_execution = WorkflowNodeExecutionModel( + id=self._node_exec_id, # This should match the node_execution_id in the variable + tenant_id=self._test_tenant_id, + app_id=self._test_app_id, + workflow_id=self._test_workflow_id, + triggered_from="workflow-run", + workflow_run_id=str(uuid.uuid4()), + index=1, + node_execution_id=self._node_exec_id, + node_id=self._node_id, + node_type=NodeType.LLM.value, + title="Test Node", + inputs='{"input": "test input"}', + process_data='{"test_var": "process_value", "other_var": "other_process"}', + outputs='{"test_var": "output_value", "other_var": "other_output"}', + status="succeeded", + elapsed_time=1.5, + created_by_role="account", + created_by=str(uuid.uuid4()), + ) + + # Create conversation variables for the workflow + self._conv_variables = [ + StringVariable( + id=str(uuid.uuid4()), + name="conv_var_1", + description="Test conversation variable 1", + value="default_value_1", + ), + StringVariable( + id=str(uuid.uuid4()), + name="conv_var_2", + description="Test conversation variable 2", + value="default_value_2", + ), + ] + + # Create test variables + self._node_var_with_exec = WorkflowDraftVariable.new_node_variable( + app_id=self._test_app_id, + node_id=self._node_id, + name="test_var", + value=build_segment("old_value"), + node_execution_id=self._node_exec_id, + ) + self._node_var_with_exec.last_edited_at = datetime_utils.naive_utc_now() + + self._node_var_without_exec = WorkflowDraftVariable.new_node_variable( + app_id=self._test_app_id, + node_id=self._node_id, + name="no_exec_var", + value=build_segment("some_value"), + node_execution_id="temp_exec_id", + ) + # Manually set node_execution_id to None after creation + self._node_var_without_exec.node_execution_id = None + + self._node_var_missing_exec = WorkflowDraftVariable.new_node_variable( + app_id=self._test_app_id, + node_id=self._node_id, + name="missing_exec_var", + value=build_segment("some_value"), + node_execution_id=str(uuid.uuid4()), # Use a valid UUID that doesn't exist in database + ) + + self._conv_var = WorkflowDraftVariable.new_conversation_variable( + app_id=self._test_app_id, + name="conv_var_1", + value=build_segment("old_conv_value"), + ) + self._conv_var.last_edited_at = datetime_utils.naive_utc_now() + + # Add all to database + db.session.add_all( + [ + self._workflow_node_execution, + self._node_var_with_exec, + self._node_var_without_exec, + self._node_var_missing_exec, + self._conv_var, + ] + ) + db.session.flush() + + # Store IDs for assertions + self._node_var_with_exec_id = self._node_var_with_exec.id + self._node_var_without_exec_id = self._node_var_without_exec.id + self._node_var_missing_exec_id = self._node_var_missing_exec.id + self._conv_var_id = self._conv_var.id + + def _get_test_srv(self) -> WorkflowDraftVariableService: + return WorkflowDraftVariableService(session=self._session) + + def _create_mock_workflow(self) -> Workflow: + """Create a real workflow with conversation variables and graph""" + conversation_vars = self._conv_variables + + # Create a simple graph with the test node + graph = { + "nodes": [{"id": "test_reset_node", "type": "llm", "title": "Test Node", "data": {"type": "llm"}}], + "edges": [], + } + + workflow = Workflow.new( + tenant_id=str(uuid.uuid4()), + app_id=self._test_app_id, + type="workflow", + version="1.0", + graph=json.dumps(graph), + features="{}", + created_by=str(uuid.uuid4()), + environment_variables=[], + conversation_variables=conversation_vars, + ) + return workflow + + def tearDown(self): + self._session.rollback() + + def test_reset_node_variable_with_valid_execution_record(self): + """Test resetting a node variable with valid execution record - should restore from execution""" + srv = self._get_test_srv() + mock_workflow = self._create_mock_workflow() + + # Get the variable before reset + variable = srv.get_variable(self._node_var_with_exec_id) + assert variable is not None + assert variable.get_value().value == "old_value" + assert variable.last_edited_at is not None + + # Reset the variable + result = srv.reset_variable(mock_workflow, variable) + + # Should return the updated variable + assert result is not None + assert result.id == self._node_var_with_exec_id + assert result.node_execution_id == self._workflow_node_execution.id + assert result.last_edited_at is None # Should be reset to None + + # The returned variable should have the updated value from execution record + assert result.get_value().value == "output_value" + + # Verify the variable was updated in database + updated_variable = srv.get_variable(self._node_var_with_exec_id) + assert updated_variable is not None + # The value should be updated from the execution record's outputs + assert updated_variable.get_value().value == "output_value" + assert updated_variable.last_edited_at is None + assert updated_variable.node_execution_id == self._workflow_node_execution.id + + def test_reset_node_variable_with_no_execution_id(self): + """Test resetting a node variable with no execution ID - should delete variable""" + srv = self._get_test_srv() + mock_workflow = self._create_mock_workflow() + + # Get the variable before reset + variable = srv.get_variable(self._node_var_without_exec_id) + assert variable is not None + + # Reset the variable + result = srv.reset_variable(mock_workflow, variable) + + # Should return None (variable deleted) + assert result is None + + # Verify the variable was deleted + deleted_variable = srv.get_variable(self._node_var_without_exec_id) + assert deleted_variable is None + + def test_reset_node_variable_with_missing_execution_record(self): + """Test resetting a node variable when execution record doesn't exist""" + srv = self._get_test_srv() + mock_workflow = self._create_mock_workflow() + + # Get the variable before reset + variable = srv.get_variable(self._node_var_missing_exec_id) + assert variable is not None + + # Reset the variable + result = srv.reset_variable(mock_workflow, variable) + + # Should return None (variable deleted) + assert result is None + + # Verify the variable was deleted + deleted_variable = srv.get_variable(self._node_var_missing_exec_id) + assert deleted_variable is None + + def test_reset_conversation_variable(self): + """Test resetting a conversation variable""" + srv = self._get_test_srv() + mock_workflow = self._create_mock_workflow() + + # Get the variable before reset + variable = srv.get_variable(self._conv_var_id) + assert variable is not None + assert variable.get_value().value == "old_conv_value" + assert variable.last_edited_at is not None + + # Reset the variable + result = srv.reset_variable(mock_workflow, variable) + + # Should return the updated variable + assert result is not None + assert result.id == self._conv_var_id + assert result.last_edited_at is None # Should be reset to None + + # Verify the variable was updated with default value from workflow + updated_variable = srv.get_variable(self._conv_var_id) + assert updated_variable is not None + # The value should be updated from the workflow's conversation variable default + assert updated_variable.get_value().value == "default_value_1" + assert updated_variable.last_edited_at is None + + def test_reset_system_variable_raises_error(self): + """Test that resetting a system variable raises an error""" + srv = self._get_test_srv() + mock_workflow = self._create_mock_workflow() + + # Create a system variable + sys_var = WorkflowDraftVariable.new_sys_variable( + app_id=self._test_app_id, + name="sys_var", + value=build_segment("sys_value"), + node_execution_id=self._node_exec_id, + ) + db.session.add(sys_var) + db.session.flush() + + # Attempt to reset the system variable + with pytest.raises(VariableResetError) as exc_info: + srv.reset_variable(mock_workflow, sys_var) + + assert "cannot reset system variable" in str(exc_info.value) + assert sys_var.id in str(exc_info.value) diff --git a/api/tests/integration_tests/vdb/matrixone/__init__.py b/api/tests/integration_tests/vdb/matrixone/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/vdb/matrixone/test_matrixone.py b/api/tests/integration_tests/vdb/matrixone/test_matrixone.py new file mode 100644 index 0000000000..c8b19ef3ad --- /dev/null +++ b/api/tests/integration_tests/vdb/matrixone/test_matrixone.py @@ -0,0 +1,25 @@ +from core.rag.datasource.vdb.matrixone.matrixone_vector import MatrixoneConfig, MatrixoneVector +from tests.integration_tests.vdb.test_vector_store import ( + AbstractVectorTest, + get_example_text, + setup_mock_redis, +) + + +class MatrixoneVectorTest(AbstractVectorTest): + def __init__(self): + super().__init__() + self.vector = MatrixoneVector( + collection_name=self.collection_name, + config=MatrixoneConfig( + host="localhost", port=6001, user="dump", password="111", database="dify", metric="l2" + ), + ) + + def get_ids_by_metadata_field(self): + ids = self.vector.get_ids_by_metadata_field(key="document_id", value=self.example_doc_id) + assert len(ids) == 1 + + +def test_matrixone_vector(setup_mock_redis): + MatrixoneVectorTest().run_all_tests() diff --git a/api/tests/integration_tests/workflow/nodes/test_llm.py b/api/tests/integration_tests/workflow/nodes/test_llm.py index 6aa48b1cbb..a3b2fdc376 100644 --- a/api/tests/integration_tests/workflow/nodes/test_llm.py +++ b/api/tests/integration_tests/workflow/nodes/test_llm.py @@ -8,8 +8,6 @@ from unittest.mock import MagicMock, patch import pytest -from app_factory import create_app -from configs import dify_config from core.app.entities.app_invoke_entities import InvokeFrom from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage from core.model_runtime.entities.message_entities import AssistantPromptMessage @@ -30,21 +28,6 @@ from tests.integration_tests.model_runtime.__mock.plugin_daemon import setup_mod from tests.integration_tests.workflow.nodes.__mock.code_executor import setup_code_executor_mock -@pytest.fixture(scope="session") -def app(): - # Set up storage configuration - os.environ["STORAGE_TYPE"] = "opendal" - os.environ["OPENDAL_SCHEME"] = "fs" - os.environ["OPENDAL_FS_ROOT"] = "storage" - - # Ensure storage directory exists - os.makedirs("storage", exist_ok=True) - - app = create_app() - dify_config.LOGIN_DISABLED = True - return app - - def init_llm_node(config: dict) -> LLMNode: graph_config = { "edges": [ @@ -102,197 +85,195 @@ def init_llm_node(config: dict) -> LLMNode: return node -def test_execute_llm(app): - with app.app_context(): - node = init_llm_node( - config={ - "id": "llm", - "data": { - "title": "123", - "type": "llm", - "model": { - "provider": "langgenius/openai/openai", - "name": "gpt-3.5-turbo", - "mode": "chat", - "completion_params": {}, - }, - "prompt_template": [ - { - "role": "system", - "text": "you are a helpful assistant.\ntoday's weather is {{#abc.output#}}.", - }, - {"role": "user", "text": "{{#sys.query#}}"}, - ], - "memory": None, - "context": {"enabled": False}, - "vision": {"enabled": False}, +def test_execute_llm(flask_req_ctx): + node = init_llm_node( + config={ + "id": "llm", + "data": { + "title": "123", + "type": "llm", + "model": { + "provider": "langgenius/openai/openai", + "name": "gpt-3.5-turbo", + "mode": "chat", + "completion_params": {}, }, + "prompt_template": [ + { + "role": "system", + "text": "you are a helpful assistant.\ntoday's weather is {{#abc.output#}}.", + }, + {"role": "user", "text": "{{#sys.query#}}"}, + ], + "memory": None, + "context": {"enabled": False}, + "vision": {"enabled": False}, }, - ) + }, + ) - credentials = {"openai_api_key": os.environ.get("OPENAI_API_KEY")} + credentials = {"openai_api_key": os.environ.get("OPENAI_API_KEY")} - # Create a proper LLM result with real entities - mock_usage = LLMUsage( - prompt_tokens=30, - prompt_unit_price=Decimal("0.001"), - prompt_price_unit=Decimal("1000"), - prompt_price=Decimal("0.00003"), - completion_tokens=20, - completion_unit_price=Decimal("0.002"), - completion_price_unit=Decimal("1000"), - completion_price=Decimal("0.00004"), - total_tokens=50, - total_price=Decimal("0.00007"), - currency="USD", - latency=0.5, - ) + # Create a proper LLM result with real entities + mock_usage = LLMUsage( + prompt_tokens=30, + prompt_unit_price=Decimal("0.001"), + prompt_price_unit=Decimal("1000"), + prompt_price=Decimal("0.00003"), + completion_tokens=20, + completion_unit_price=Decimal("0.002"), + completion_price_unit=Decimal("1000"), + completion_price=Decimal("0.00004"), + total_tokens=50, + total_price=Decimal("0.00007"), + currency="USD", + latency=0.5, + ) - mock_message = AssistantPromptMessage(content="This is a test response from the mocked LLM.") + mock_message = AssistantPromptMessage(content="This is a test response from the mocked LLM.") - mock_llm_result = LLMResult( - model="gpt-3.5-turbo", - prompt_messages=[], - message=mock_message, - usage=mock_usage, - ) + mock_llm_result = LLMResult( + model="gpt-3.5-turbo", + prompt_messages=[], + message=mock_message, + usage=mock_usage, + ) - # Create a simple mock model instance that doesn't call real providers - mock_model_instance = MagicMock() - mock_model_instance.invoke_llm.return_value = mock_llm_result + # Create a simple mock model instance that doesn't call real providers + mock_model_instance = MagicMock() + mock_model_instance.invoke_llm.return_value = mock_llm_result - # Create a simple mock model config with required attributes - mock_model_config = MagicMock() - mock_model_config.mode = "chat" - mock_model_config.provider = "langgenius/openai/openai" - mock_model_config.model = "gpt-3.5-turbo" - mock_model_config.provider_model_bundle.configuration.tenant_id = "9d2074fc-6f86-45a9-b09d-6ecc63b9056b" + # Create a simple mock model config with required attributes + mock_model_config = MagicMock() + mock_model_config.mode = "chat" + mock_model_config.provider = "langgenius/openai/openai" + mock_model_config.model = "gpt-3.5-turbo" + mock_model_config.provider_model_bundle.configuration.tenant_id = "9d2074fc-6f86-45a9-b09d-6ecc63b9056b" - # Mock the _fetch_model_config method - def mock_fetch_model_config_func(_node_data_model): - return mock_model_instance, mock_model_config + # Mock the _fetch_model_config method + def mock_fetch_model_config_func(_node_data_model): + return mock_model_instance, mock_model_config - # Also mock ModelManager.get_model_instance to avoid database calls - def mock_get_model_instance(_self, **kwargs): - return mock_model_instance + # Also mock ModelManager.get_model_instance to avoid database calls + def mock_get_model_instance(_self, **kwargs): + return mock_model_instance - with ( - patch.object(node, "_fetch_model_config", mock_fetch_model_config_func), - patch("core.model_manager.ModelManager.get_model_instance", mock_get_model_instance), - ): - # execute node - result = node._run() - assert isinstance(result, Generator) + with ( + patch.object(node, "_fetch_model_config", mock_fetch_model_config_func), + patch("core.model_manager.ModelManager.get_model_instance", mock_get_model_instance), + ): + # execute node + result = node._run() + assert isinstance(result, Generator) - for item in result: - if isinstance(item, RunCompletedEvent): - assert item.run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED - assert item.run_result.process_data is not None - assert item.run_result.outputs is not None - assert item.run_result.outputs.get("text") is not None - assert item.run_result.outputs.get("usage", {})["total_tokens"] > 0 + for item in result: + if isinstance(item, RunCompletedEvent): + assert item.run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert item.run_result.process_data is not None + assert item.run_result.outputs is not None + assert item.run_result.outputs.get("text") is not None + assert item.run_result.outputs.get("usage", {})["total_tokens"] > 0 @pytest.mark.parametrize("setup_code_executor_mock", [["none"]], indirect=True) -def test_execute_llm_with_jinja2(app, setup_code_executor_mock): +def test_execute_llm_with_jinja2(flask_req_ctx, setup_code_executor_mock): """ Test execute LLM node with jinja2 """ - with app.app_context(): - node = init_llm_node( - config={ - "id": "llm", - "data": { - "title": "123", - "type": "llm", - "model": {"provider": "openai", "name": "gpt-3.5-turbo", "mode": "chat", "completion_params": {}}, - "prompt_config": { - "jinja2_variables": [ - {"variable": "sys_query", "value_selector": ["sys", "query"]}, - {"variable": "output", "value_selector": ["abc", "output"]}, - ] - }, - "prompt_template": [ - { - "role": "system", - "text": "you are a helpful assistant.\ntoday's weather is {{#abc.output#}}", - "jinja2_text": "you are a helpful assistant.\ntoday's weather is {{output}}.", - "edition_type": "jinja2", - }, - { - "role": "user", - "text": "{{#sys.query#}}", - "jinja2_text": "{{sys_query}}", - "edition_type": "basic", - }, - ], - "memory": None, - "context": {"enabled": False}, - "vision": {"enabled": False}, + node = init_llm_node( + config={ + "id": "llm", + "data": { + "title": "123", + "type": "llm", + "model": {"provider": "openai", "name": "gpt-3.5-turbo", "mode": "chat", "completion_params": {}}, + "prompt_config": { + "jinja2_variables": [ + {"variable": "sys_query", "value_selector": ["sys", "query"]}, + {"variable": "output", "value_selector": ["abc", "output"]}, + ] }, + "prompt_template": [ + { + "role": "system", + "text": "you are a helpful assistant.\ntoday's weather is {{#abc.output#}}", + "jinja2_text": "you are a helpful assistant.\ntoday's weather is {{output}}.", + "edition_type": "jinja2", + }, + { + "role": "user", + "text": "{{#sys.query#}}", + "jinja2_text": "{{sys_query}}", + "edition_type": "basic", + }, + ], + "memory": None, + "context": {"enabled": False}, + "vision": {"enabled": False}, }, - ) + }, + ) - # Mock db.session.close() - db.session.close = MagicMock() + # Mock db.session.close() + db.session.close = MagicMock() - # Create a proper LLM result with real entities - mock_usage = LLMUsage( - prompt_tokens=30, - prompt_unit_price=Decimal("0.001"), - prompt_price_unit=Decimal("1000"), - prompt_price=Decimal("0.00003"), - completion_tokens=20, - completion_unit_price=Decimal("0.002"), - completion_price_unit=Decimal("1000"), - completion_price=Decimal("0.00004"), - total_tokens=50, - total_price=Decimal("0.00007"), - currency="USD", - latency=0.5, - ) + # Create a proper LLM result with real entities + mock_usage = LLMUsage( + prompt_tokens=30, + prompt_unit_price=Decimal("0.001"), + prompt_price_unit=Decimal("1000"), + prompt_price=Decimal("0.00003"), + completion_tokens=20, + completion_unit_price=Decimal("0.002"), + completion_price_unit=Decimal("1000"), + completion_price=Decimal("0.00004"), + total_tokens=50, + total_price=Decimal("0.00007"), + currency="USD", + latency=0.5, + ) - mock_message = AssistantPromptMessage(content="Test response: sunny weather and what's the weather today?") + mock_message = AssistantPromptMessage(content="Test response: sunny weather and what's the weather today?") - mock_llm_result = LLMResult( - model="gpt-3.5-turbo", - prompt_messages=[], - message=mock_message, - usage=mock_usage, - ) + mock_llm_result = LLMResult( + model="gpt-3.5-turbo", + prompt_messages=[], + message=mock_message, + usage=mock_usage, + ) - # Create a simple mock model instance that doesn't call real providers - mock_model_instance = MagicMock() - mock_model_instance.invoke_llm.return_value = mock_llm_result + # Create a simple mock model instance that doesn't call real providers + mock_model_instance = MagicMock() + mock_model_instance.invoke_llm.return_value = mock_llm_result - # Create a simple mock model config with required attributes - mock_model_config = MagicMock() - mock_model_config.mode = "chat" - mock_model_config.provider = "openai" - mock_model_config.model = "gpt-3.5-turbo" - mock_model_config.provider_model_bundle.configuration.tenant_id = "9d2074fc-6f86-45a9-b09d-6ecc63b9056b" + # Create a simple mock model config with required attributes + mock_model_config = MagicMock() + mock_model_config.mode = "chat" + mock_model_config.provider = "openai" + mock_model_config.model = "gpt-3.5-turbo" + mock_model_config.provider_model_bundle.configuration.tenant_id = "9d2074fc-6f86-45a9-b09d-6ecc63b9056b" - # Mock the _fetch_model_config method - def mock_fetch_model_config_func(_node_data_model): - return mock_model_instance, mock_model_config + # Mock the _fetch_model_config method + def mock_fetch_model_config_func(_node_data_model): + return mock_model_instance, mock_model_config - # Also mock ModelManager.get_model_instance to avoid database calls - def mock_get_model_instance(_self, **kwargs): - return mock_model_instance + # Also mock ModelManager.get_model_instance to avoid database calls + def mock_get_model_instance(_self, **kwargs): + return mock_model_instance - with ( - patch.object(node, "_fetch_model_config", mock_fetch_model_config_func), - patch("core.model_manager.ModelManager.get_model_instance", mock_get_model_instance), - ): - # execute node - result = node._run() + with ( + patch.object(node, "_fetch_model_config", mock_fetch_model_config_func), + patch("core.model_manager.ModelManager.get_model_instance", mock_get_model_instance), + ): + # execute node + result = node._run() - for item in result: - if isinstance(item, RunCompletedEvent): - assert item.run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED - assert item.run_result.process_data is not None - assert "sunny" in json.dumps(item.run_result.process_data) - assert "what's the weather today?" in json.dumps(item.run_result.process_data) + for item in result: + if isinstance(item, RunCompletedEvent): + assert item.run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED + assert item.run_result.process_data is not None + assert "sunny" in json.dumps(item.run_result.process_data) + assert "what's the weather today?" in json.dumps(item.run_result.process_data) def test_extract_json(): diff --git a/api/tests/unit_tests/conftest.py b/api/tests/unit_tests/conftest.py index e09acc4c39..077ffe3408 100644 --- a/api/tests/unit_tests/conftest.py +++ b/api/tests/unit_tests/conftest.py @@ -1,4 +1,5 @@ import os +from unittest.mock import MagicMock, patch import pytest from flask import Flask @@ -11,6 +12,24 @@ PROJECT_DIR = os.path.abspath(os.path.join(ABS_PATH, os.pardir, os.pardir)) CACHED_APP = Flask(__name__) +# set global mock for Redis client +redis_mock = MagicMock() +redis_mock.get = MagicMock(return_value=None) +redis_mock.setex = MagicMock() +redis_mock.setnx = MagicMock() +redis_mock.delete = MagicMock() +redis_mock.lock = MagicMock() +redis_mock.exists = MagicMock(return_value=False) +redis_mock.set = MagicMock() +redis_mock.expire = MagicMock() +redis_mock.hgetall = MagicMock(return_value={}) +redis_mock.hdel = MagicMock() +redis_mock.incr = MagicMock(return_value=1) + +# apply the mock to the Redis client in the Flask app +redis_patcher = patch("extensions.ext_redis.redis_client", redis_mock) +redis_patcher.start() + @pytest.fixture def app() -> Flask: @@ -21,3 +40,19 @@ def app() -> Flask: def _provide_app_context(app: Flask): with app.app_context(): yield + + +@pytest.fixture(autouse=True) +def reset_redis_mock(): + """reset the Redis mock before each test""" + redis_mock.reset_mock() + redis_mock.get.return_value = None + redis_mock.setex.return_value = None + redis_mock.setnx.return_value = None + redis_mock.delete.return_value = None + redis_mock.exists.return_value = False + redis_mock.set.return_value = None + redis_mock.expire.return_value = None + redis_mock.hgetall.return_value = {} + redis_mock.hdel.return_value = None + redis_mock.incr.return_value = 1 diff --git a/api/tests/unit_tests/controllers/console/app/workflow_draft_variables_test.py b/api/tests/unit_tests/controllers/console/app/workflow_draft_variables_test.py new file mode 100644 index 0000000000..f26be6702a --- /dev/null +++ b/api/tests/unit_tests/controllers/console/app/workflow_draft_variables_test.py @@ -0,0 +1,302 @@ +import datetime +import uuid +from collections import OrderedDict +from typing import Any, NamedTuple + +from flask_restful import marshal + +from controllers.console.app.workflow_draft_variable import ( + _WORKFLOW_DRAFT_VARIABLE_FIELDS, + _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS, + _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS, + _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS, +) +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID +from factories.variable_factory import build_segment +from models.workflow import WorkflowDraftVariable +from services.workflow_draft_variable_service import WorkflowDraftVariableList + +_TEST_APP_ID = "test_app_id" +_TEST_NODE_EXEC_ID = str(uuid.uuid4()) + + +class TestWorkflowDraftVariableFields: + def test_conversation_variable(self): + conv_var = WorkflowDraftVariable.new_conversation_variable( + app_id=_TEST_APP_ID, name="conv_var", value=build_segment(1) + ) + + conv_var.id = str(uuid.uuid4()) + conv_var.visible = True + + expected_without_value: OrderedDict[str, Any] = OrderedDict( + { + "id": str(conv_var.id), + "type": conv_var.get_variable_type().value, + "name": "conv_var", + "description": "", + "selector": [CONVERSATION_VARIABLE_NODE_ID, "conv_var"], + "value_type": "number", + "edited": False, + "visible": True, + } + ) + + assert marshal(conv_var, _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS) == expected_without_value + expected_with_value = expected_without_value.copy() + expected_with_value["value"] = 1 + assert marshal(conv_var, _WORKFLOW_DRAFT_VARIABLE_FIELDS) == expected_with_value + + def test_create_sys_variable(self): + sys_var = WorkflowDraftVariable.new_sys_variable( + app_id=_TEST_APP_ID, + name="sys_var", + value=build_segment("a"), + editable=True, + node_execution_id=_TEST_NODE_EXEC_ID, + ) + + sys_var.id = str(uuid.uuid4()) + sys_var.last_edited_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + sys_var.visible = True + + expected_without_value = OrderedDict( + { + "id": str(sys_var.id), + "type": sys_var.get_variable_type().value, + "name": "sys_var", + "description": "", + "selector": [SYSTEM_VARIABLE_NODE_ID, "sys_var"], + "value_type": "string", + "edited": True, + "visible": True, + } + ) + assert marshal(sys_var, _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS) == expected_without_value + expected_with_value = expected_without_value.copy() + expected_with_value["value"] = "a" + assert marshal(sys_var, _WORKFLOW_DRAFT_VARIABLE_FIELDS) == expected_with_value + + def test_node_variable(self): + node_var = WorkflowDraftVariable.new_node_variable( + app_id=_TEST_APP_ID, + node_id="test_node", + name="node_var", + value=build_segment([1, "a"]), + visible=False, + node_execution_id=_TEST_NODE_EXEC_ID, + ) + + node_var.id = str(uuid.uuid4()) + node_var.last_edited_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + + expected_without_value: OrderedDict[str, Any] = OrderedDict( + { + "id": str(node_var.id), + "type": node_var.get_variable_type().value, + "name": "node_var", + "description": "", + "selector": ["test_node", "node_var"], + "value_type": "array[any]", + "edited": True, + "visible": False, + } + ) + + assert marshal(node_var, _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS) == expected_without_value + expected_with_value = expected_without_value.copy() + expected_with_value["value"] = [1, "a"] + assert marshal(node_var, _WORKFLOW_DRAFT_VARIABLE_FIELDS) == expected_with_value + + +class TestWorkflowDraftVariableList: + def test_workflow_draft_variable_list(self): + class TestCase(NamedTuple): + name: str + var_list: WorkflowDraftVariableList + expected: dict + + node_var = WorkflowDraftVariable.new_node_variable( + app_id=_TEST_APP_ID, + node_id="test_node", + name="test_var", + value=build_segment("a"), + visible=True, + node_execution_id=_TEST_NODE_EXEC_ID, + ) + node_var.id = str(uuid.uuid4()) + node_var_dict = OrderedDict( + { + "id": str(node_var.id), + "type": node_var.get_variable_type().value, + "name": "test_var", + "description": "", + "selector": ["test_node", "test_var"], + "value_type": "string", + "edited": False, + "visible": True, + } + ) + + cases = [ + TestCase( + name="empty variable list", + var_list=WorkflowDraftVariableList(variables=[]), + expected=OrderedDict( + { + "items": [], + "total": None, + } + ), + ), + TestCase( + name="empty variable list with total", + var_list=WorkflowDraftVariableList(variables=[], total=10), + expected=OrderedDict( + { + "items": [], + "total": 10, + } + ), + ), + TestCase( + name="non-empty variable list", + var_list=WorkflowDraftVariableList(variables=[node_var], total=None), + expected=OrderedDict( + { + "items": [node_var_dict], + "total": None, + } + ), + ), + TestCase( + name="non-empty variable list with total", + var_list=WorkflowDraftVariableList(variables=[node_var], total=10), + expected=OrderedDict( + { + "items": [node_var_dict], + "total": 10, + } + ), + ), + ] + + for idx, case in enumerate(cases, 1): + assert marshal(case.var_list, _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS) == case.expected, ( + f"Test case {idx} failed, {case.name=}" + ) + + +def test_workflow_node_variables_fields(): + conv_var = WorkflowDraftVariable.new_conversation_variable( + app_id=_TEST_APP_ID, name="conv_var", value=build_segment(1) + ) + resp = marshal(WorkflowDraftVariableList(variables=[conv_var]), _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + assert isinstance(resp, dict) + assert len(resp["items"]) == 1 + item_dict = resp["items"][0] + assert item_dict["name"] == "conv_var" + assert item_dict["value"] == 1 + + +def test_workflow_file_variable_with_signed_url(): + """Test that File type variables include signed URLs in API responses.""" + from core.file.enums import FileTransferMethod, FileType + from core.file.models import File + + # Create a File object with LOCAL_FILE transfer method (which generates signed URLs) + test_file = File( + id="test_file_id", + tenant_id="test_tenant_id", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.LOCAL_FILE, + related_id="test_upload_file_id", + filename="test.jpg", + extension=".jpg", + mime_type="image/jpeg", + size=12345, + ) + + # Create a WorkflowDraftVariable with the File + file_var = WorkflowDraftVariable.new_node_variable( + app_id=_TEST_APP_ID, + node_id="test_node", + name="file_var", + value=build_segment(test_file), + node_execution_id=_TEST_NODE_EXEC_ID, + ) + + # Marshal the variable using the API fields + resp = marshal(WorkflowDraftVariableList(variables=[file_var]), _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + + # Verify the response structure + assert isinstance(resp, dict) + assert len(resp["items"]) == 1 + item_dict = resp["items"][0] + assert item_dict["name"] == "file_var" + + # Verify the value is a dict (File.to_dict() result) and contains expected fields + value = item_dict["value"] + assert isinstance(value, dict) + + # Verify the File fields are preserved + assert value["id"] == test_file.id + assert value["filename"] == test_file.filename + assert value["type"] == test_file.type.value + assert value["transfer_method"] == test_file.transfer_method.value + assert value["size"] == test_file.size + + # Verify the URL is present (it should be a signed URL for LOCAL_FILE transfer method) + remote_url = value["remote_url"] + assert remote_url is not None + + assert isinstance(remote_url, str) + # For LOCAL_FILE, the URL should contain signature parameters + assert "timestamp=" in remote_url + assert "nonce=" in remote_url + assert "sign=" in remote_url + + +def test_workflow_file_variable_remote_url(): + """Test that File type variables with REMOTE_URL transfer method return the remote URL.""" + from core.file.enums import FileTransferMethod, FileType + from core.file.models import File + + # Create a File object with REMOTE_URL transfer method + test_file = File( + id="test_file_id", + tenant_id="test_tenant_id", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.REMOTE_URL, + remote_url="https://example.com/test.jpg", + filename="test.jpg", + extension=".jpg", + mime_type="image/jpeg", + size=12345, + ) + + # Create a WorkflowDraftVariable with the File + file_var = WorkflowDraftVariable.new_node_variable( + app_id=_TEST_APP_ID, + node_id="test_node", + name="file_var", + value=build_segment(test_file), + node_execution_id=_TEST_NODE_EXEC_ID, + ) + + # Marshal the variable using the API fields + resp = marshal(WorkflowDraftVariableList(variables=[file_var]), _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + + # Verify the response structure + assert isinstance(resp, dict) + assert len(resp["items"]) == 1 + item_dict = resp["items"][0] + assert item_dict["name"] == "file_var" + + # Verify the value is a dict (File.to_dict() result) and contains expected fields + value = item_dict["value"] + assert isinstance(value, dict) + remote_url = value["remote_url"] + + # For REMOTE_URL, the URL should be the original remote URL + assert remote_url == test_file.remote_url diff --git a/api/tests/unit_tests/core/app/segments/test_factory.py b/api/tests/unit_tests/core/app/segments/test_factory.py deleted file mode 100644 index e6e289c12a..0000000000 --- a/api/tests/unit_tests/core/app/segments/test_factory.py +++ /dev/null @@ -1,165 +0,0 @@ -from uuid import uuid4 - -import pytest - -from core.variables import ( - ArrayNumberVariable, - ArrayObjectVariable, - ArrayStringVariable, - FloatVariable, - IntegerVariable, - ObjectSegment, - SecretVariable, - StringVariable, -) -from core.variables.exc import VariableError -from core.variables.segments import ArrayAnySegment -from factories import variable_factory - - -def test_string_variable(): - test_data = {"value_type": "string", "name": "test_text", "value": "Hello, World!"} - result = variable_factory.build_conversation_variable_from_mapping(test_data) - assert isinstance(result, StringVariable) - - -def test_integer_variable(): - test_data = {"value_type": "number", "name": "test_int", "value": 42} - result = variable_factory.build_conversation_variable_from_mapping(test_data) - assert isinstance(result, IntegerVariable) - - -def test_float_variable(): - test_data = {"value_type": "number", "name": "test_float", "value": 3.14} - result = variable_factory.build_conversation_variable_from_mapping(test_data) - assert isinstance(result, FloatVariable) - - -def test_secret_variable(): - test_data = {"value_type": "secret", "name": "test_secret", "value": "secret_value"} - result = variable_factory.build_conversation_variable_from_mapping(test_data) - assert isinstance(result, SecretVariable) - - -def test_invalid_value_type(): - test_data = {"value_type": "unknown", "name": "test_invalid", "value": "value"} - with pytest.raises(VariableError): - variable_factory.build_conversation_variable_from_mapping(test_data) - - -def test_build_a_blank_string(): - result = variable_factory.build_conversation_variable_from_mapping( - { - "value_type": "string", - "name": "blank", - "value": "", - } - ) - assert isinstance(result, StringVariable) - assert result.value == "" - - -def test_build_a_object_variable_with_none_value(): - var = variable_factory.build_segment( - { - "key1": None, - } - ) - assert isinstance(var, ObjectSegment) - assert var.value["key1"] is None - - -def test_object_variable(): - mapping = { - "id": str(uuid4()), - "value_type": "object", - "name": "test_object", - "description": "Description of the variable.", - "value": { - "key1": "text", - "key2": 2, - }, - } - variable = variable_factory.build_conversation_variable_from_mapping(mapping) - assert isinstance(variable, ObjectSegment) - assert isinstance(variable.value["key1"], str) - assert isinstance(variable.value["key2"], int) - - -def test_array_string_variable(): - mapping = { - "id": str(uuid4()), - "value_type": "array[string]", - "name": "test_array", - "description": "Description of the variable.", - "value": [ - "text", - "text", - ], - } - variable = variable_factory.build_conversation_variable_from_mapping(mapping) - assert isinstance(variable, ArrayStringVariable) - assert isinstance(variable.value[0], str) - assert isinstance(variable.value[1], str) - - -def test_array_number_variable(): - mapping = { - "id": str(uuid4()), - "value_type": "array[number]", - "name": "test_array", - "description": "Description of the variable.", - "value": [ - 1, - 2.0, - ], - } - variable = variable_factory.build_conversation_variable_from_mapping(mapping) - assert isinstance(variable, ArrayNumberVariable) - assert isinstance(variable.value[0], int) - assert isinstance(variable.value[1], float) - - -def test_array_object_variable(): - mapping = { - "id": str(uuid4()), - "value_type": "array[object]", - "name": "test_array", - "description": "Description of the variable.", - "value": [ - { - "key1": "text", - "key2": 1, - }, - { - "key1": "text", - "key2": 1, - }, - ], - } - variable = variable_factory.build_conversation_variable_from_mapping(mapping) - assert isinstance(variable, ArrayObjectVariable) - assert isinstance(variable.value[0], dict) - assert isinstance(variable.value[1], dict) - assert isinstance(variable.value[0]["key1"], str) - assert isinstance(variable.value[0]["key2"], int) - assert isinstance(variable.value[1]["key1"], str) - assert isinstance(variable.value[1]["key2"], int) - - -def test_variable_cannot_large_than_200_kb(): - with pytest.raises(VariableError): - variable_factory.build_conversation_variable_from_mapping( - { - "id": str(uuid4()), - "value_type": "string", - "name": "test_text", - "value": "a" * 1024 * 201, - } - ) - - -def test_array_none_variable(): - var = variable_factory.build_segment([None, None, None, None]) - assert isinstance(var, ArrayAnySegment) - assert var.value == [None, None, None, None] diff --git a/api/tests/unit_tests/core/file/test_models.py b/api/tests/unit_tests/core/file/test_models.py new file mode 100644 index 0000000000..3ada2087c6 --- /dev/null +++ b/api/tests/unit_tests/core/file/test_models.py @@ -0,0 +1,25 @@ +from core.file import File, FileTransferMethod, FileType + + +def test_file(): + file = File( + id="test-file", + tenant_id="test-tenant-id", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.TOOL_FILE, + related_id="test-related-id", + filename="image.png", + extension=".png", + mime_type="image/png", + size=67, + storage_key="test-storage-key", + url="https://example.com/image.png", + ) + assert file.tenant_id == "test-tenant-id" + assert file.type == FileType.IMAGE + assert file.transfer_method == FileTransferMethod.TOOL_FILE + assert file.related_id == "test-related-id" + assert file.filename == "image.png" + assert file.extension == ".png" + assert file.mime_type == "image/png" + assert file.size == 67 diff --git a/api/tests/unit_tests/core/rag/extractor/test_markdown_extractor.py b/api/tests/unit_tests/core/rag/extractor/test_markdown_extractor.py new file mode 100644 index 0000000000..d4cf534c56 --- /dev/null +++ b/api/tests/unit_tests/core/rag/extractor/test_markdown_extractor.py @@ -0,0 +1,22 @@ +from core.rag.extractor.markdown_extractor import MarkdownExtractor + + +def test_markdown_to_tups(): + markdown = """ +this is some text without header + +# title 1 +this is balabala text + +## title 2 +this is more specific text. + """ + extractor = MarkdownExtractor(file_path="dummy_path") + updated_output = extractor.markdown_to_tups(markdown) + assert len(updated_output) == 3 + key, header_value = updated_output[0] + assert key == None + assert header_value.strip() == "this is some text without header" + title_1, value = updated_output[1] + assert title_1.strip() == "title 1" + assert value.strip() == "this is balabala text" diff --git a/api/tests/unit_tests/core/app/segments/test_segment.py b/api/tests/unit_tests/core/variables/test_segment.py similarity index 100% rename from api/tests/unit_tests/core/app/segments/test_segment.py rename to api/tests/unit_tests/core/variables/test_segment.py diff --git a/api/tests/unit_tests/core/app/segments/test_variables.py b/api/tests/unit_tests/core/variables/test_variables.py similarity index 100% rename from api/tests/unit_tests/core/app/segments/test_variables.py rename to api/tests/unit_tests/core/variables/test_variables.py diff --git a/api/tests/unit_tests/core/workflow/nodes/base/test_base_node.py b/api/tests/unit_tests/core/workflow/nodes/base/test_base_node.py new file mode 100644 index 0000000000..8712b61a23 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/nodes/base/test_base_node.py @@ -0,0 +1,36 @@ +from core.workflow.nodes.base.node import BaseNode +from core.workflow.nodes.enums import NodeType + +# Ensures that all node classes are imported. +from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING + +_ = NODE_TYPE_CLASSES_MAPPING + + +def _get_all_subclasses(root: type[BaseNode]) -> list[type[BaseNode]]: + subclasses = [] + queue = [root] + while queue: + cls = queue.pop() + + subclasses.extend(cls.__subclasses__()) + queue.extend(cls.__subclasses__()) + + return subclasses + + +def test_ensure_subclasses_of_base_node_has_node_type_and_version_method_defined(): + classes = _get_all_subclasses(BaseNode) # type: ignore + type_version_set: set[tuple[NodeType, str]] = set() + + for cls in classes: + # Validate that 'version' is directly defined in the class (not inherited) by checking the class's __dict__ + assert "version" in cls.__dict__, f"class {cls} should have version method defined (NOT INHERITED.)" + node_type = cls._node_type + node_version = cls.version() + + assert isinstance(cls._node_type, NodeType) + assert isinstance(node_version, str) + node_type_and_version = (node_type, node_version) + assert node_type_and_version not in type_version_set + type_version_set.add(node_type_and_version) diff --git a/api/tests/unit_tests/core/workflow/nodes/iteration/test_iteration.py b/api/tests/unit_tests/core/workflow/nodes/iteration/test_iteration.py index 6d854c950d..362072a3db 100644 --- a/api/tests/unit_tests/core/workflow/nodes/iteration/test_iteration.py +++ b/api/tests/unit_tests/core/workflow/nodes/iteration/test_iteration.py @@ -3,6 +3,7 @@ import uuid from unittest.mock import patch from core.app.entities.app_invoke_entities import InvokeFrom +from core.variables.segments import ArrayAnySegment, ArrayStringSegment from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus @@ -197,7 +198,7 @@ def test_run(): count += 1 if isinstance(item, RunCompletedEvent): assert item.run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED - assert item.run_result.outputs == {"output": ["dify 123", "dify 123"]} + assert item.run_result.outputs == {"output": ArrayStringSegment(value=["dify 123", "dify 123"])} assert count == 20 @@ -413,7 +414,7 @@ def test_run_parallel(): count += 1 if isinstance(item, RunCompletedEvent): assert item.run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED - assert item.run_result.outputs == {"output": ["dify 123", "dify 123"]} + assert item.run_result.outputs == {"output": ArrayStringSegment(value=["dify 123", "dify 123"])} assert count == 32 @@ -654,7 +655,7 @@ def test_iteration_run_in_parallel_mode(): parallel_arr.append(item) if isinstance(item, RunCompletedEvent): assert item.run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED - assert item.run_result.outputs == {"output": ["dify 123", "dify 123"]} + assert item.run_result.outputs == {"output": ArrayStringSegment(value=["dify 123", "dify 123"])} assert count == 32 for item in sequential_result: @@ -662,7 +663,7 @@ def test_iteration_run_in_parallel_mode(): count += 1 if isinstance(item, RunCompletedEvent): assert item.run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED - assert item.run_result.outputs == {"output": ["dify 123", "dify 123"]} + assert item.run_result.outputs == {"output": ArrayStringSegment(value=["dify 123", "dify 123"])} assert count == 64 @@ -846,7 +847,7 @@ def test_iteration_run_error_handle(): count += 1 if isinstance(item, RunCompletedEvent): assert item.run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED - assert item.run_result.outputs == {"output": [None, None]} + assert item.run_result.outputs == {"output": ArrayAnySegment(value=[None, None])} assert count == 14 # execute remove abnormal output @@ -857,5 +858,5 @@ def test_iteration_run_error_handle(): count += 1 if isinstance(item, RunCompletedEvent): assert item.run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED - assert item.run_result.outputs == {"output": []} + assert item.run_result.outputs == {"output": ArrayAnySegment(value=[])} assert count == 14 diff --git a/api/tests/unit_tests/core/workflow/nodes/test_document_extractor_node.py b/api/tests/unit_tests/core/workflow/nodes/test_document_extractor_node.py index 4cb1aa93f9..66c7818adf 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_document_extractor_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_document_extractor_node.py @@ -7,6 +7,7 @@ from docx.oxml.text.paragraph import CT_P from core.file import File, FileTransferMethod from core.variables import ArrayFileSegment +from core.variables.segments import ArrayStringSegment from core.variables.variables import StringVariable from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionStatus @@ -69,7 +70,13 @@ def test_run_invalid_variable_type(document_extractor_node, mock_graph_runtime_s @pytest.mark.parametrize( ("mime_type", "file_content", "expected_text", "transfer_method", "extension"), [ - ("text/plain", b"Hello, world!", ["Hello, world!"], FileTransferMethod.LOCAL_FILE, ".txt"), + ( + "text/plain", + b"Hello, world!", + ["Hello, world!"], + FileTransferMethod.LOCAL_FILE, + ".txt", + ), ( "application/pdf", b"%PDF-1.5\n%Test PDF content", @@ -84,7 +91,13 @@ def test_run_invalid_variable_type(document_extractor_node, mock_graph_runtime_s FileTransferMethod.REMOTE_URL, "", ), - ("text/plain", b"Remote content", ["Remote content"], FileTransferMethod.REMOTE_URL, None), + ( + "text/plain", + b"Remote content", + ["Remote content"], + FileTransferMethod.REMOTE_URL, + None, + ), ], ) def test_run_extract_text( @@ -131,7 +144,7 @@ def test_run_extract_text( assert isinstance(result, NodeRunResult) assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED, result.error assert result.outputs is not None - assert result.outputs["text"] == expected_text + assert result.outputs["text"] == ArrayStringSegment(value=expected_text) if transfer_method == FileTransferMethod.REMOTE_URL: mock_ssrf_proxy_get.assert_called_once_with("https://example.com/file.txt") @@ -329,3 +342,26 @@ def test_extract_text_from_excel_all_sheets_fail(mock_excel_file): assert result == "" assert mock_excel_instance.parse.call_count == 2 + + +@patch("pandas.ExcelFile") +def test_extract_text_from_excel_numeric_type_column(mock_excel_file): + """Test extracting text from Excel file with numeric column names.""" + + # Test numeric type column + data = {1: ["Test"], 1.1: ["Test"]} + + df = pd.DataFrame(data) + + # Mock ExcelFile + mock_excel_instance = Mock() + mock_excel_instance.sheet_names = ["Sheet1"] + mock_excel_instance.parse.return_value = df + mock_excel_file.return_value = mock_excel_instance + + file_content = b"fake_excel_content" + result = _extract_text_from_excel(file_content) + + expected_manual = "| 1.0 | 1.1 |\n| --- | --- |\n| Test | Test |\n\n" + + assert expected_manual == result diff --git a/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py b/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py index 77d42e2692..7d3a1d6a2d 100644 --- a/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py +++ b/api/tests/unit_tests/core/workflow/nodes/test_list_operator.py @@ -115,7 +115,7 @@ def test_filter_files_by_type(list_operator_node): }, ] assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED - for expected_file, result_file in zip(expected_files, result.outputs["result"]): + for expected_file, result_file in zip(expected_files, result.outputs["result"].value): assert expected_file["filename"] == result_file.filename assert expected_file["type"] == result_file.type assert expected_file["tenant_id"] == result_file.tenant_id diff --git a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py index 9793da129d..deb3e29b86 100644 --- a/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py +++ b/api/tests/unit_tests/core/workflow/nodes/variable_assigner/v1/test_variable_assigner_v1.py @@ -5,6 +5,7 @@ from uuid import uuid4 from core.app.entities.app_invoke_entities import InvokeFrom from core.variables import ArrayStringVariable, StringVariable +from core.workflow.conversation_variable_updater import ConversationVariableUpdater from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import SystemVariableKey from core.workflow.graph_engine.entities.graph import Graph @@ -63,10 +64,11 @@ def test_overwrite_string_variable(): name="test_string_variable", value="the second value", ) + conversation_id = str(uuid.uuid4()) # construct variable pool variable_pool = VariablePool( - system_variables={SystemVariableKey.CONVERSATION_ID: "conversation_id"}, + system_variables={SystemVariableKey.CONVERSATION_ID: conversation_id}, user_inputs={}, environment_variables=[], conversation_variables=[conversation_variable], @@ -77,6 +79,9 @@ def test_overwrite_string_variable(): input_variable, ) + mock_conv_var_updater = mock.Mock(spec=ConversationVariableUpdater) + mock_conv_var_updater_factory = mock.Mock(return_value=mock_conv_var_updater) + node = VariableAssignerNode( id=str(uuid.uuid4()), graph_init_params=init_params, @@ -91,11 +96,20 @@ def test_overwrite_string_variable(): "input_variable_selector": [DEFAULT_NODE_ID, input_variable.name], }, }, + conv_var_updater_factory=mock_conv_var_updater_factory, ) - with mock.patch("core.workflow.nodes.variable_assigner.common.helpers.update_conversation_variable") as mock_run: - list(node.run()) - mock_run.assert_called_once() + list(node.run()) + expected_var = StringVariable( + id=conversation_variable.id, + name=conversation_variable.name, + description=conversation_variable.description, + selector=conversation_variable.selector, + value_type=conversation_variable.value_type, + value=input_variable.value, + ) + mock_conv_var_updater.update.assert_called_once_with(conversation_id=conversation_id, variable=expected_var) + mock_conv_var_updater.flush.assert_called_once() got = variable_pool.get(["conversation", conversation_variable.name]) assert got is not None @@ -148,9 +162,10 @@ def test_append_variable_to_array(): name="test_string_variable", value="the second value", ) + conversation_id = str(uuid.uuid4()) variable_pool = VariablePool( - system_variables={SystemVariableKey.CONVERSATION_ID: "conversation_id"}, + system_variables={SystemVariableKey.CONVERSATION_ID: conversation_id}, user_inputs={}, environment_variables=[], conversation_variables=[conversation_variable], @@ -160,6 +175,9 @@ def test_append_variable_to_array(): input_variable, ) + mock_conv_var_updater = mock.Mock(spec=ConversationVariableUpdater) + mock_conv_var_updater_factory = mock.Mock(return_value=mock_conv_var_updater) + node = VariableAssignerNode( id=str(uuid.uuid4()), graph_init_params=init_params, @@ -174,11 +192,22 @@ def test_append_variable_to_array(): "input_variable_selector": [DEFAULT_NODE_ID, input_variable.name], }, }, + conv_var_updater_factory=mock_conv_var_updater_factory, ) - with mock.patch("core.workflow.nodes.variable_assigner.common.helpers.update_conversation_variable") as mock_run: - list(node.run()) - mock_run.assert_called_once() + list(node.run()) + expected_value = list(conversation_variable.value) + expected_value.append(input_variable.value) + expected_var = ArrayStringVariable( + id=conversation_variable.id, + name=conversation_variable.name, + description=conversation_variable.description, + selector=conversation_variable.selector, + value_type=conversation_variable.value_type, + value=expected_value, + ) + mock_conv_var_updater.update.assert_called_once_with(conversation_id=conversation_id, variable=expected_var) + mock_conv_var_updater.flush.assert_called_once() got = variable_pool.get(["conversation", conversation_variable.name]) assert got is not None @@ -225,13 +254,17 @@ def test_clear_array(): value=["the first value"], ) + conversation_id = str(uuid.uuid4()) variable_pool = VariablePool( - system_variables={SystemVariableKey.CONVERSATION_ID: "conversation_id"}, + system_variables={SystemVariableKey.CONVERSATION_ID: conversation_id}, user_inputs={}, environment_variables=[], conversation_variables=[conversation_variable], ) + mock_conv_var_updater = mock.Mock(spec=ConversationVariableUpdater) + mock_conv_var_updater_factory = mock.Mock(return_value=mock_conv_var_updater) + node = VariableAssignerNode( id=str(uuid.uuid4()), graph_init_params=init_params, @@ -246,11 +279,20 @@ def test_clear_array(): "input_variable_selector": [], }, }, + conv_var_updater_factory=mock_conv_var_updater_factory, ) - with mock.patch("core.workflow.nodes.variable_assigner.common.helpers.update_conversation_variable") as mock_run: - list(node.run()) - mock_run.assert_called_once() + list(node.run()) + expected_var = ArrayStringVariable( + id=conversation_variable.id, + name=conversation_variable.name, + description=conversation_variable.description, + selector=conversation_variable.selector, + value_type=conversation_variable.value_type, + value=[], + ) + mock_conv_var_updater.update.assert_called_once_with(conversation_id=conversation_id, variable=expected_var) + mock_conv_var_updater.flush.assert_called_once() got = variable_pool.get(["conversation", conversation_variable.name]) assert got is not None diff --git a/api/tests/unit_tests/core/workflow/test_variable_pool.py b/api/tests/unit_tests/core/workflow/test_variable_pool.py index efbcdc760c..bb8d34fad5 100644 --- a/api/tests/unit_tests/core/workflow/test_variable_pool.py +++ b/api/tests/unit_tests/core/workflow/test_variable_pool.py @@ -1,8 +1,12 @@ import pytest +from pydantic import ValidationError from core.file import File, FileTransferMethod, FileType from core.variables import FileSegment, StringSegment +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID from core.workflow.entities.variable_pool import VariablePool +from core.workflow.enums import SystemVariableKey +from factories.variable_factory import build_segment, segment_to_variable @pytest.fixture @@ -44,3 +48,38 @@ def test_use_long_selector(pool): result = pool.get(("node_1", "part_1", "part_2")) assert result is not None assert result.value == "test_value" + + +class TestVariablePool: + def test_constructor(self): + pool = VariablePool() + pool = VariablePool( + variable_dictionary={}, + user_inputs={}, + system_variables={}, + environment_variables=[], + conversation_variables=[], + ) + + pool = VariablePool( + user_inputs={"key": "value"}, + system_variables={SystemVariableKey.WORKFLOW_ID: "test_workflow_id"}, + environment_variables=[ + segment_to_variable( + segment=build_segment(1), + selector=[ENVIRONMENT_VARIABLE_NODE_ID, "env_var_1"], + name="env_var_1", + ) + ], + conversation_variables=[ + segment_to_variable( + segment=build_segment("1"), + selector=[CONVERSATION_VARIABLE_NODE_ID, "conv_var_1"], + name="conv_var_1", + ) + ], + ) + + def test_constructor_with_invalid_system_variable_key(self): + with pytest.raises(ValidationError): + VariablePool(system_variables={"invalid_key": "value"}) # type: ignore diff --git a/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py b/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py index fddc182594..646de8bf3a 100644 --- a/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py +++ b/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py @@ -163,7 +163,6 @@ def real_workflow_run(): workflow_run.tenant_id = "test-tenant-id" workflow_run.app_id = "test-app-id" workflow_run.workflow_id = "test-workflow-id" - workflow_run.sequence_number = 1 workflow_run.type = "chat" workflow_run.triggered_from = "app-run" workflow_run.version = "1.0" diff --git a/api/tests/unit_tests/core/workflow/utils/test_variable_template_parser.py b/api/tests/unit_tests/core/workflow/utils/test_variable_template_parser.py index 2f90afcf89..28ef05edde 100644 --- a/api/tests/unit_tests/core/workflow/utils/test_variable_template_parser.py +++ b/api/tests/unit_tests/core/workflow/utils/test_variable_template_parser.py @@ -1,22 +1,10 @@ -from core.variables import SecretVariable +import dataclasses + from core.workflow.entities.variable_entities import VariableSelector -from core.workflow.entities.variable_pool import VariablePool -from core.workflow.enums import SystemVariableKey from core.workflow.utils import variable_template_parser def test_extract_selectors_from_template(): - variable_pool = VariablePool( - system_variables={ - SystemVariableKey("user_id"): "fake-user-id", - }, - user_inputs={}, - environment_variables=[ - SecretVariable(name="secret_key", value="fake-secret-key"), - ], - conversation_variables=[], - ) - variable_pool.add(("node_id", "custom_query"), "fake-user-query") template = ( "Hello, {{#sys.user_id#}}! Your query is {{#node_id.custom_query#}}. And your key is {{#env.secret_key#}}." ) @@ -26,3 +14,35 @@ def test_extract_selectors_from_template(): VariableSelector(variable="#node_id.custom_query#", value_selector=["node_id", "custom_query"]), VariableSelector(variable="#env.secret_key#", value_selector=["env", "secret_key"]), ] + + +def test_invalid_references(): + @dataclasses.dataclass + class TestCase: + name: str + template: str + + cases = [ + TestCase( + name="lack of closing brace", + template="Hello, {{#sys.user_id#", + ), + TestCase( + name="lack of opening brace", + template="Hello, #sys.user_id#}}", + ), + TestCase( + name="lack selector name", + template="Hello, {{#sys#}}", + ), + TestCase( + name="empty node name part", + template="Hello, {{#.user_id#}}", + ), + ] + for idx, c in enumerate(cases, 1): + fail_msg = f"Test case {c.name} failed, index={idx}" + selectors = variable_template_parser.extract_selectors_from_template(c.template) + assert selectors == [], fail_msg + parser = variable_template_parser.VariableTemplateParser(c.template) + assert parser.extract_variable_selectors() == [], fail_msg diff --git a/api/tests/unit_tests/core/workflow/utils/test_variable_utils.py b/api/tests/unit_tests/core/workflow/utils/test_variable_utils.py new file mode 100644 index 0000000000..f1cb937bb3 --- /dev/null +++ b/api/tests/unit_tests/core/workflow/utils/test_variable_utils.py @@ -0,0 +1,148 @@ +from typing import Any + +from core.variables.segments import ObjectSegment, StringSegment +from core.workflow.entities.variable_pool import VariablePool +from core.workflow.utils.variable_utils import append_variables_recursively + + +class TestAppendVariablesRecursively: + """Test cases for append_variables_recursively function""" + + def test_append_simple_dict_value(self): + """Test appending a simple dictionary value""" + pool = VariablePool() + node_id = "test_node" + variable_key_list = ["output"] + variable_value = {"name": "John", "age": 30} + + append_variables_recursively(pool, node_id, variable_key_list, variable_value) + + # Check that the main variable is added + main_var = pool.get([node_id] + variable_key_list) + assert main_var is not None + assert main_var.value == variable_value + + # Check that nested variables are added recursively + name_var = pool.get([node_id] + variable_key_list + ["name"]) + assert name_var is not None + assert name_var.value == "John" + + age_var = pool.get([node_id] + variable_key_list + ["age"]) + assert age_var is not None + assert age_var.value == 30 + + def test_append_object_segment_value(self): + """Test appending an ObjectSegment value""" + pool = VariablePool() + node_id = "test_node" + variable_key_list = ["result"] + + # Create an ObjectSegment + obj_data = {"status": "success", "code": 200} + variable_value = ObjectSegment(value=obj_data) + + append_variables_recursively(pool, node_id, variable_key_list, variable_value) + + # Check that the main variable is added + main_var = pool.get([node_id] + variable_key_list) + assert main_var is not None + assert isinstance(main_var, ObjectSegment) + assert main_var.value == obj_data + + # Check that nested variables are added recursively + status_var = pool.get([node_id] + variable_key_list + ["status"]) + assert status_var is not None + assert status_var.value == "success" + + code_var = pool.get([node_id] + variable_key_list + ["code"]) + assert code_var is not None + assert code_var.value == 200 + + def test_append_nested_dict_value(self): + """Test appending a nested dictionary value""" + pool = VariablePool() + node_id = "test_node" + variable_key_list = ["data"] + + variable_value = { + "user": { + "profile": {"name": "Alice", "email": "alice@example.com"}, + "settings": {"theme": "dark", "notifications": True}, + }, + "metadata": {"version": "1.0", "timestamp": 1234567890}, + } + + append_variables_recursively(pool, node_id, variable_key_list, variable_value) + + # Check deeply nested variables + name_var = pool.get([node_id] + variable_key_list + ["user", "profile", "name"]) + assert name_var is not None + assert name_var.value == "Alice" + + email_var = pool.get([node_id] + variable_key_list + ["user", "profile", "email"]) + assert email_var is not None + assert email_var.value == "alice@example.com" + + theme_var = pool.get([node_id] + variable_key_list + ["user", "settings", "theme"]) + assert theme_var is not None + assert theme_var.value == "dark" + + notifications_var = pool.get([node_id] + variable_key_list + ["user", "settings", "notifications"]) + assert notifications_var is not None + assert notifications_var.value == 1 # Boolean True is converted to integer 1 + + version_var = pool.get([node_id] + variable_key_list + ["metadata", "version"]) + assert version_var is not None + assert version_var.value == "1.0" + + def test_append_non_dict_value(self): + """Test appending a non-dictionary value (should not recurse)""" + pool = VariablePool() + node_id = "test_node" + variable_key_list = ["simple"] + variable_value = "simple_string" + + append_variables_recursively(pool, node_id, variable_key_list, variable_value) + + # Check that only the main variable is added + main_var = pool.get([node_id] + variable_key_list) + assert main_var is not None + assert main_var.value == variable_value + + # Ensure no additional variables are created + assert len(pool.variable_dictionary[node_id]) == 1 + + def test_append_segment_non_object_value(self): + """Test appending a Segment that is not ObjectSegment (should not recurse)""" + pool = VariablePool() + node_id = "test_node" + variable_key_list = ["text"] + variable_value = StringSegment(value="Hello World") + + append_variables_recursively(pool, node_id, variable_key_list, variable_value) + + # Check that only the main variable is added + main_var = pool.get([node_id] + variable_key_list) + assert main_var is not None + assert isinstance(main_var, StringSegment) + assert main_var.value == "Hello World" + + # Ensure no additional variables are created + assert len(pool.variable_dictionary[node_id]) == 1 + + def test_append_empty_dict_value(self): + """Test appending an empty dictionary value""" + pool = VariablePool() + node_id = "test_node" + variable_key_list = ["empty"] + variable_value: dict[str, Any] = {} + + append_variables_recursively(pool, node_id, variable_key_list, variable_value) + + # Check that the main variable is added + main_var = pool.get([node_id] + variable_key_list) + assert main_var is not None + assert main_var.value == {} + + # Ensure only the main variable is created (no recursion for empty dict) + assert len(pool.variable_dictionary[node_id]) == 1 diff --git a/api/tests/unit_tests/factories/test_variable_factory.py b/api/tests/unit_tests/factories/test_variable_factory.py new file mode 100644 index 0000000000..481fbdc91a --- /dev/null +++ b/api/tests/unit_tests/factories/test_variable_factory.py @@ -0,0 +1,865 @@ +import math +from dataclasses import dataclass +from typing import Any +from uuid import uuid4 + +import pytest +from hypothesis import given +from hypothesis import strategies as st + +from core.file import File, FileTransferMethod, FileType +from core.variables import ( + ArrayNumberVariable, + ArrayObjectVariable, + ArrayStringVariable, + FloatVariable, + IntegerVariable, + ObjectSegment, + SecretVariable, + SegmentType, + StringVariable, +) +from core.variables.exc import VariableError +from core.variables.segments import ( + ArrayAnySegment, + ArrayFileSegment, + ArrayNumberSegment, + ArrayObjectSegment, + ArrayStringSegment, + FileSegment, + FloatSegment, + IntegerSegment, + NoneSegment, + ObjectSegment, + StringSegment, +) +from core.variables.types import SegmentType +from factories import variable_factory +from factories.variable_factory import TypeMismatchError, build_segment_with_type + + +def test_string_variable(): + test_data = {"value_type": "string", "name": "test_text", "value": "Hello, World!"} + result = variable_factory.build_conversation_variable_from_mapping(test_data) + assert isinstance(result, StringVariable) + + +def test_integer_variable(): + test_data = {"value_type": "number", "name": "test_int", "value": 42} + result = variable_factory.build_conversation_variable_from_mapping(test_data) + assert isinstance(result, IntegerVariable) + + +def test_float_variable(): + test_data = {"value_type": "number", "name": "test_float", "value": 3.14} + result = variable_factory.build_conversation_variable_from_mapping(test_data) + assert isinstance(result, FloatVariable) + + +def test_secret_variable(): + test_data = {"value_type": "secret", "name": "test_secret", "value": "secret_value"} + result = variable_factory.build_conversation_variable_from_mapping(test_data) + assert isinstance(result, SecretVariable) + + +def test_invalid_value_type(): + test_data = {"value_type": "unknown", "name": "test_invalid", "value": "value"} + with pytest.raises(VariableError): + variable_factory.build_conversation_variable_from_mapping(test_data) + + +def test_build_a_blank_string(): + result = variable_factory.build_conversation_variable_from_mapping( + { + "value_type": "string", + "name": "blank", + "value": "", + } + ) + assert isinstance(result, StringVariable) + assert result.value == "" + + +def test_build_a_object_variable_with_none_value(): + var = variable_factory.build_segment( + { + "key1": None, + } + ) + assert isinstance(var, ObjectSegment) + assert var.value["key1"] is None + + +def test_object_variable(): + mapping = { + "id": str(uuid4()), + "value_type": "object", + "name": "test_object", + "description": "Description of the variable.", + "value": { + "key1": "text", + "key2": 2, + }, + } + variable = variable_factory.build_conversation_variable_from_mapping(mapping) + assert isinstance(variable, ObjectSegment) + assert isinstance(variable.value["key1"], str) + assert isinstance(variable.value["key2"], int) + + +def test_array_string_variable(): + mapping = { + "id": str(uuid4()), + "value_type": "array[string]", + "name": "test_array", + "description": "Description of the variable.", + "value": [ + "text", + "text", + ], + } + variable = variable_factory.build_conversation_variable_from_mapping(mapping) + assert isinstance(variable, ArrayStringVariable) + assert isinstance(variable.value[0], str) + assert isinstance(variable.value[1], str) + + +def test_array_number_variable(): + mapping = { + "id": str(uuid4()), + "value_type": "array[number]", + "name": "test_array", + "description": "Description of the variable.", + "value": [ + 1, + 2.0, + ], + } + variable = variable_factory.build_conversation_variable_from_mapping(mapping) + assert isinstance(variable, ArrayNumberVariable) + assert isinstance(variable.value[0], int) + assert isinstance(variable.value[1], float) + + +def test_array_object_variable(): + mapping = { + "id": str(uuid4()), + "value_type": "array[object]", + "name": "test_array", + "description": "Description of the variable.", + "value": [ + { + "key1": "text", + "key2": 1, + }, + { + "key1": "text", + "key2": 1, + }, + ], + } + variable = variable_factory.build_conversation_variable_from_mapping(mapping) + assert isinstance(variable, ArrayObjectVariable) + assert isinstance(variable.value[0], dict) + assert isinstance(variable.value[1], dict) + assert isinstance(variable.value[0]["key1"], str) + assert isinstance(variable.value[0]["key2"], int) + assert isinstance(variable.value[1]["key1"], str) + assert isinstance(variable.value[1]["key2"], int) + + +def test_variable_cannot_large_than_200_kb(): + with pytest.raises(VariableError): + variable_factory.build_conversation_variable_from_mapping( + { + "id": str(uuid4()), + "value_type": "string", + "name": "test_text", + "value": "a" * 1024 * 201, + } + ) + + +def test_array_none_variable(): + var = variable_factory.build_segment([None, None, None, None]) + assert isinstance(var, ArrayAnySegment) + assert var.value == [None, None, None, None] + + +def test_build_segment_none_type(): + """Test building NoneSegment from None value.""" + segment = variable_factory.build_segment(None) + assert isinstance(segment, NoneSegment) + assert segment.value is None + assert segment.value_type == SegmentType.NONE + + +def test_build_segment_none_type_properties(): + """Test NoneSegment properties and methods.""" + segment = variable_factory.build_segment(None) + assert segment.text == "" + assert segment.log == "" + assert segment.markdown == "" + assert segment.to_object() is None + + +def test_build_segment_array_file_single_file(): + """Test building ArrayFileSegment from list with single file.""" + file = File( + id="test_file_id", + tenant_id="test_tenant_id", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.REMOTE_URL, + remote_url="https://test.example.com/test-file.png", + filename="test-file", + extension=".png", + mime_type="image/png", + size=1000, + ) + segment = variable_factory.build_segment([file]) + assert isinstance(segment, ArrayFileSegment) + assert len(segment.value) == 1 + assert segment.value[0] == file + assert segment.value_type == SegmentType.ARRAY_FILE + + +def test_build_segment_array_file_multiple_files(): + """Test building ArrayFileSegment from list with multiple files.""" + file1 = File( + id="test_file_id_1", + tenant_id="test_tenant_id", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.REMOTE_URL, + remote_url="https://test.example.com/test-file1.png", + filename="test-file1", + extension=".png", + mime_type="image/png", + size=1000, + ) + file2 = File( + id="test_file_id_2", + tenant_id="test_tenant_id", + type=FileType.DOCUMENT, + transfer_method=FileTransferMethod.LOCAL_FILE, + related_id="test_relation_id", + filename="test-file2", + extension=".txt", + mime_type="text/plain", + size=500, + ) + segment = variable_factory.build_segment([file1, file2]) + assert isinstance(segment, ArrayFileSegment) + assert len(segment.value) == 2 + assert segment.value[0] == file1 + assert segment.value[1] == file2 + assert segment.value_type == SegmentType.ARRAY_FILE + + +def test_build_segment_array_file_empty_list(): + """Test building ArrayFileSegment from empty list should create ArrayAnySegment.""" + segment = variable_factory.build_segment([]) + assert isinstance(segment, ArrayAnySegment) + assert segment.value == [] + assert segment.value_type == SegmentType.ARRAY_ANY + + +def test_build_segment_array_any_mixed_types(): + """Test building ArrayAnySegment from list with mixed types.""" + mixed_values = ["string", 42, 3.14, {"key": "value"}, None] + segment = variable_factory.build_segment(mixed_values) + assert isinstance(segment, ArrayAnySegment) + assert segment.value == mixed_values + assert segment.value_type == SegmentType.ARRAY_ANY + + +def test_build_segment_array_any_with_nested_arrays(): + """Test building ArrayAnySegment from list containing arrays.""" + nested_values = [["nested", "array"], [1, 2, 3], "string"] + segment = variable_factory.build_segment(nested_values) + assert isinstance(segment, ArrayAnySegment) + assert segment.value == nested_values + assert segment.value_type == SegmentType.ARRAY_ANY + + +def test_build_segment_array_any_mixed_with_files(): + """Test building ArrayAnySegment from list with files and other types.""" + file = File( + id="test_file_id", + tenant_id="test_tenant_id", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.REMOTE_URL, + remote_url="https://test.example.com/test-file.png", + filename="test-file", + extension=".png", + mime_type="image/png", + size=1000, + ) + mixed_values = [file, "string", 42] + segment = variable_factory.build_segment(mixed_values) + assert isinstance(segment, ArrayAnySegment) + assert segment.value == mixed_values + assert segment.value_type == SegmentType.ARRAY_ANY + + +def test_build_segment_array_any_all_none_values(): + """Test building ArrayAnySegment from list with all None values.""" + none_values = [None, None, None] + segment = variable_factory.build_segment(none_values) + assert isinstance(segment, ArrayAnySegment) + assert segment.value == none_values + assert segment.value_type == SegmentType.ARRAY_ANY + + +def test_build_segment_array_file_properties(): + """Test ArrayFileSegment properties and methods.""" + file1 = File( + id="test_file_id_1", + tenant_id="test_tenant_id", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.REMOTE_URL, + remote_url="https://test.example.com/test-file1.png", + filename="test-file1", + extension=".png", + mime_type="image/png", + size=1000, + ) + file2 = File( + id="test_file_id_2", + tenant_id="test_tenant_id", + type=FileType.DOCUMENT, + transfer_method=FileTransferMethod.REMOTE_URL, + remote_url="https://test.example.com/test-file2.txt", + filename="test-file2", + extension=".txt", + mime_type="text/plain", + size=500, + ) + segment = variable_factory.build_segment([file1, file2]) + + # Test properties + assert segment.text == "" # ArrayFileSegment text property returns empty string + assert segment.log == "" # ArrayFileSegment log property returns empty string + assert segment.markdown == f"{file1.markdown}\n{file2.markdown}" + assert segment.to_object() == [file1, file2] + + +def test_build_segment_array_any_properties(): + """Test ArrayAnySegment properties and methods.""" + mixed_values = ["string", 42, None] + segment = variable_factory.build_segment(mixed_values) + + # Test properties + assert segment.text == str(mixed_values) + assert segment.log == str(mixed_values) + assert segment.markdown == "string\n42\nNone" + assert segment.to_object() == mixed_values + + +def test_build_segment_edge_cases(): + """Test edge cases for build_segment function.""" + # Test with complex nested structures + complex_structure = [{"nested": {"deep": [1, 2, 3]}}, [{"inner": "value"}], "mixed"] + segment = variable_factory.build_segment(complex_structure) + assert isinstance(segment, ArrayAnySegment) + assert segment.value == complex_structure + + # Test with single None in list + single_none = [None] + segment = variable_factory.build_segment(single_none) + assert isinstance(segment, ArrayAnySegment) + assert segment.value == single_none + + +def test_build_segment_file_array_with_different_file_types(): + """Test ArrayFileSegment with different file types.""" + image_file = File( + id="image_id", + tenant_id="test_tenant_id", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.REMOTE_URL, + remote_url="https://test.example.com/image.png", + filename="image", + extension=".png", + mime_type="image/png", + size=1000, + ) + + video_file = File( + id="video_id", + tenant_id="test_tenant_id", + type=FileType.VIDEO, + transfer_method=FileTransferMethod.LOCAL_FILE, + related_id="video_relation_id", + filename="video", + extension=".mp4", + mime_type="video/mp4", + size=5000, + ) + + audio_file = File( + id="audio_id", + tenant_id="test_tenant_id", + type=FileType.AUDIO, + transfer_method=FileTransferMethod.LOCAL_FILE, + related_id="audio_relation_id", + filename="audio", + extension=".mp3", + mime_type="audio/mpeg", + size=3000, + ) + + segment = variable_factory.build_segment([image_file, video_file, audio_file]) + assert isinstance(segment, ArrayFileSegment) + assert len(segment.value) == 3 + assert segment.value[0].type == FileType.IMAGE + assert segment.value[1].type == FileType.VIDEO + assert segment.value[2].type == FileType.AUDIO + + +@st.composite +def _generate_file(draw) -> File: + file_id = draw(st.text(min_size=1, max_size=10)) + tenant_id = draw(st.text(min_size=1, max_size=10)) + file_type, mime_type, extension = draw( + st.sampled_from( + [ + (FileType.IMAGE, "image/png", ".png"), + (FileType.VIDEO, "video/mp4", ".mp4"), + (FileType.DOCUMENT, "text/plain", ".txt"), + (FileType.AUDIO, "audio/mpeg", ".mp3"), + ] + ) + ) + filename = "test-file" + size = draw(st.integers(min_value=0, max_value=1024 * 1024)) + + transfer_method = draw(st.sampled_from(list(FileTransferMethod))) + if transfer_method == FileTransferMethod.REMOTE_URL: + url = "https://test.example.com/test-file" + file = File( + id="test_file_id", + tenant_id="test_tenant_id", + type=file_type, + transfer_method=transfer_method, + remote_url=url, + related_id=None, + filename=filename, + extension=extension, + mime_type=mime_type, + size=size, + ) + else: + relation_id = draw(st.uuids(version=4)) + + file = File( + id="test_file_id", + tenant_id="test_tenant_id", + type=file_type, + transfer_method=transfer_method, + related_id=str(relation_id), + filename=filename, + extension=extension, + mime_type=mime_type, + size=size, + ) + return file + + +def _scalar_value() -> st.SearchStrategy[int | float | str | File | None]: + return st.one_of( + st.none(), + st.integers(), + st.floats(), + st.text(), + _generate_file(), + ) + + +@given(_scalar_value()) +def test_build_segment_and_extract_values_for_scalar_types(value): + seg = variable_factory.build_segment(value) + # nan == nan yields false, so we need to use `math.isnan` to check `seg.value` here. + if isinstance(value, float) and math.isnan(value): + assert math.isnan(seg.value) + else: + assert seg.value == value + + +@given(st.lists(_scalar_value())) +def test_build_segment_and_extract_values_for_array_types(values): + seg = variable_factory.build_segment(values) + assert seg.value == values + + +def test_build_segment_type_for_scalar(): + @dataclass(frozen=True) + class TestCase: + value: int | float | str | File + expected_type: SegmentType + + file = File( + id="test_file_id", + tenant_id="test_tenant_id", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.REMOTE_URL, + remote_url="https://test.example.com/test-file.png", + filename="test-file", + extension=".png", + mime_type="image/png", + size=1000, + ) + cases = [ + TestCase(0, SegmentType.NUMBER), + TestCase(0.0, SegmentType.NUMBER), + TestCase("", SegmentType.STRING), + TestCase(file, SegmentType.FILE), + ] + + for idx, c in enumerate(cases, 1): + segment = variable_factory.build_segment(c.value) + assert segment.value_type == c.expected_type, f"test case {idx} failed." + + +class TestBuildSegmentWithType: + """Test cases for build_segment_with_type function.""" + + def test_string_type(self): + """Test building a string segment with correct type.""" + result = build_segment_with_type(SegmentType.STRING, "hello") + assert isinstance(result, StringSegment) + assert result.value == "hello" + assert result.value_type == SegmentType.STRING + + def test_number_type_integer(self): + """Test building a number segment with integer value.""" + result = build_segment_with_type(SegmentType.NUMBER, 42) + assert isinstance(result, IntegerSegment) + assert result.value == 42 + assert result.value_type == SegmentType.NUMBER + + def test_number_type_float(self): + """Test building a number segment with float value.""" + result = build_segment_with_type(SegmentType.NUMBER, 3.14) + assert isinstance(result, FloatSegment) + assert result.value == 3.14 + assert result.value_type == SegmentType.NUMBER + + def test_object_type(self): + """Test building an object segment with correct type.""" + test_obj = {"key": "value", "nested": {"inner": 123}} + result = build_segment_with_type(SegmentType.OBJECT, test_obj) + assert isinstance(result, ObjectSegment) + assert result.value == test_obj + assert result.value_type == SegmentType.OBJECT + + def test_file_type(self): + """Test building a file segment with correct type.""" + test_file = File( + id="test_file_id", + tenant_id="test_tenant_id", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.REMOTE_URL, + remote_url="https://test.example.com/test-file.png", + filename="test-file", + extension=".png", + mime_type="image/png", + size=1000, + storage_key="test_storage_key", + ) + result = build_segment_with_type(SegmentType.FILE, test_file) + assert isinstance(result, FileSegment) + assert result.value == test_file + assert result.value_type == SegmentType.FILE + + def test_none_type(self): + """Test building a none segment with None value.""" + result = build_segment_with_type(SegmentType.NONE, None) + assert isinstance(result, NoneSegment) + assert result.value is None + assert result.value_type == SegmentType.NONE + + def test_empty_array_string(self): + """Test building an empty array[string] segment.""" + result = build_segment_with_type(SegmentType.ARRAY_STRING, []) + assert isinstance(result, ArrayStringSegment) + assert result.value == [] + assert result.value_type == SegmentType.ARRAY_STRING + + def test_empty_array_number(self): + """Test building an empty array[number] segment.""" + result = build_segment_with_type(SegmentType.ARRAY_NUMBER, []) + assert isinstance(result, ArrayNumberSegment) + assert result.value == [] + assert result.value_type == SegmentType.ARRAY_NUMBER + + def test_empty_array_object(self): + """Test building an empty array[object] segment.""" + result = build_segment_with_type(SegmentType.ARRAY_OBJECT, []) + assert isinstance(result, ArrayObjectSegment) + assert result.value == [] + assert result.value_type == SegmentType.ARRAY_OBJECT + + def test_empty_array_file(self): + """Test building an empty array[file] segment.""" + result = build_segment_with_type(SegmentType.ARRAY_FILE, []) + assert isinstance(result, ArrayFileSegment) + assert result.value == [] + assert result.value_type == SegmentType.ARRAY_FILE + + def test_empty_array_any(self): + """Test building an empty array[any] segment.""" + result = build_segment_with_type(SegmentType.ARRAY_ANY, []) + assert isinstance(result, ArrayAnySegment) + assert result.value == [] + assert result.value_type == SegmentType.ARRAY_ANY + + def test_array_with_values(self): + """Test building array segments with actual values.""" + # Array of strings + result = build_segment_with_type(SegmentType.ARRAY_STRING, ["hello", "world"]) + assert isinstance(result, ArrayStringSegment) + assert result.value == ["hello", "world"] + assert result.value_type == SegmentType.ARRAY_STRING + + # Array of numbers + result = build_segment_with_type(SegmentType.ARRAY_NUMBER, [1, 2, 3.14]) + assert isinstance(result, ArrayNumberSegment) + assert result.value == [1, 2, 3.14] + assert result.value_type == SegmentType.ARRAY_NUMBER + + # Array of objects + result = build_segment_with_type(SegmentType.ARRAY_OBJECT, [{"a": 1}, {"b": 2}]) + assert isinstance(result, ArrayObjectSegment) + assert result.value == [{"a": 1}, {"b": 2}] + assert result.value_type == SegmentType.ARRAY_OBJECT + + def test_type_mismatch_string_to_number(self): + """Test type mismatch when expecting number but getting string.""" + with pytest.raises(TypeMismatchError) as exc_info: + build_segment_with_type(SegmentType.NUMBER, "not_a_number") + + assert "Type mismatch" in str(exc_info.value) + assert "expected number" in str(exc_info.value) + assert "str" in str(exc_info.value) + + def test_type_mismatch_number_to_string(self): + """Test type mismatch when expecting string but getting number.""" + with pytest.raises(TypeMismatchError) as exc_info: + build_segment_with_type(SegmentType.STRING, 123) + + assert "Type mismatch" in str(exc_info.value) + assert "expected string" in str(exc_info.value) + assert "int" in str(exc_info.value) + + def test_type_mismatch_none_to_string(self): + """Test type mismatch when expecting string but getting None.""" + with pytest.raises(TypeMismatchError) as exc_info: + build_segment_with_type(SegmentType.STRING, None) + + assert "Expected string, but got None" in str(exc_info.value) + + def test_type_mismatch_empty_list_to_non_array(self): + """Test type mismatch when expecting non-array type but getting empty list.""" + with pytest.raises(TypeMismatchError) as exc_info: + build_segment_with_type(SegmentType.STRING, []) + + assert "Expected string, but got empty list" in str(exc_info.value) + + def test_type_mismatch_object_to_array(self): + """Test type mismatch when expecting array but getting object.""" + with pytest.raises(TypeMismatchError) as exc_info: + build_segment_with_type(SegmentType.ARRAY_STRING, {"key": "value"}) + + assert "Type mismatch" in str(exc_info.value) + assert "expected array[string]" in str(exc_info.value) + + def test_compatible_number_types(self): + """Test that int and float are both compatible with NUMBER type.""" + # Integer should work + result_int = build_segment_with_type(SegmentType.NUMBER, 42) + assert isinstance(result_int, IntegerSegment) + assert result_int.value_type == SegmentType.NUMBER + + # Float should work + result_float = build_segment_with_type(SegmentType.NUMBER, 3.14) + assert isinstance(result_float, FloatSegment) + assert result_float.value_type == SegmentType.NUMBER + + @pytest.mark.parametrize( + ("segment_type", "value", "expected_class"), + [ + (SegmentType.STRING, "test", StringSegment), + (SegmentType.NUMBER, 42, IntegerSegment), + (SegmentType.NUMBER, 3.14, FloatSegment), + (SegmentType.OBJECT, {}, ObjectSegment), + (SegmentType.NONE, None, NoneSegment), + (SegmentType.ARRAY_STRING, [], ArrayStringSegment), + (SegmentType.ARRAY_NUMBER, [], ArrayNumberSegment), + (SegmentType.ARRAY_OBJECT, [], ArrayObjectSegment), + (SegmentType.ARRAY_ANY, [], ArrayAnySegment), + ], + ) + def test_parametrized_valid_types(self, segment_type, value, expected_class): + """Parametrized test for valid type combinations.""" + result = build_segment_with_type(segment_type, value) + assert isinstance(result, expected_class) + assert result.value == value + assert result.value_type == segment_type + + @pytest.mark.parametrize( + ("segment_type", "value"), + [ + (SegmentType.STRING, 123), + (SegmentType.NUMBER, "not_a_number"), + (SegmentType.OBJECT, "not_an_object"), + (SegmentType.ARRAY_STRING, "not_an_array"), + (SegmentType.STRING, None), + (SegmentType.NUMBER, None), + ], + ) + def test_parametrized_type_mismatches(self, segment_type, value): + """Parametrized test for type mismatches that should raise TypeMismatchError.""" + with pytest.raises(TypeMismatchError): + build_segment_with_type(segment_type, value) + + +# Test cases for ValueError scenarios in build_segment function +class TestBuildSegmentValueErrors: + """Test cases for ValueError scenarios in the build_segment function.""" + + @dataclass(frozen=True) + class ValueErrorTestCase: + """Test case data for ValueError scenarios.""" + + name: str + description: str + test_value: Any + + def _get_test_cases(self): + """Get all test cases for ValueError scenarios.""" + + # Define inline classes for complex test cases + class CustomType: + pass + + def unsupported_function(): + return "test" + + def gen(): + yield 1 + yield 2 + + return [ + self.ValueErrorTestCase( + name="unsupported_custom_type", + description="custom class that doesn't match any supported type", + test_value=CustomType(), + ), + self.ValueErrorTestCase( + name="unsupported_set_type", + description="set (unsupported collection type)", + test_value={1, 2, 3}, + ), + self.ValueErrorTestCase( + name="unsupported_tuple_type", description="tuple (unsupported type)", test_value=(1, 2, 3) + ), + self.ValueErrorTestCase( + name="unsupported_bytes_type", + description="bytes (unsupported type)", + test_value=b"hello world", + ), + self.ValueErrorTestCase( + name="unsupported_function_type", + description="function (unsupported type)", + test_value=unsupported_function, + ), + self.ValueErrorTestCase( + name="unsupported_module_type", description="module (unsupported type)", test_value=math + ), + self.ValueErrorTestCase( + name="array_with_unsupported_element_types", + description="array with unsupported element types", + test_value=[CustomType()], + ), + self.ValueErrorTestCase( + name="mixed_array_with_unsupported_types", + description="array with mix of supported and unsupported types", + test_value=["valid_string", 42, CustomType()], + ), + self.ValueErrorTestCase( + name="nested_unsupported_types", + description="nested structures containing unsupported types", + test_value=[{"valid": "data"}, CustomType()], + ), + self.ValueErrorTestCase( + name="complex_number_type", + description="complex number (unsupported type)", + test_value=3 + 4j, + ), + self.ValueErrorTestCase( + name="range_type", description="range object (unsupported type)", test_value=range(10) + ), + self.ValueErrorTestCase( + name="generator_type", + description="generator (unsupported type)", + test_value=gen(), + ), + self.ValueErrorTestCase( + name="exception_message_contains_value", + description="set to verify error message contains the actual unsupported value", + test_value={1, 2, 3}, + ), + self.ValueErrorTestCase( + name="array_with_mixed_unsupported_segment_types", + description="array processing with unsupported segment types in match", + test_value=[CustomType()], + ), + self.ValueErrorTestCase( + name="frozenset_type", + description="frozenset (unsupported type)", + test_value=frozenset([1, 2, 3]), + ), + self.ValueErrorTestCase( + name="memoryview_type", + description="memoryview (unsupported type)", + test_value=memoryview(b"hello"), + ), + self.ValueErrorTestCase( + name="slice_type", description="slice object (unsupported type)", test_value=slice(1, 10, 2) + ), + self.ValueErrorTestCase(name="type_object", description="type object (unsupported type)", test_value=type), + self.ValueErrorTestCase( + name="generic_object", description="generic object (unsupported type)", test_value=object() + ), + ] + + def test_build_segment_unsupported_types(self): + """Table-driven test for all ValueError scenarios in build_segment function.""" + test_cases = self._get_test_cases() + + for index, test_case in enumerate(test_cases, 1): + # Use test value directly + test_value = test_case.test_value + + with pytest.raises(ValueError) as exc_info: # noqa: PT012 + segment = variable_factory.build_segment(test_value) + pytest.fail(f"Test case {index} ({test_case.name}) should raise ValueError but not, result={segment}") + + error_message = str(exc_info.value) + assert "not supported value" in error_message, ( + f"Test case {index} ({test_case.name}): Expected 'not supported value' in error message, " + f"but got: {error_message}" + ) + + def test_build_segment_boolean_type_note(self): + """Note: Boolean values are actually handled as integers in Python, so they don't raise ValueError.""" + # Boolean values in Python are subclasses of int, so they get processed as integers + # True becomes IntegerSegment(value=1) and False becomes IntegerSegment(value=0) + true_segment = variable_factory.build_segment(True) + false_segment = variable_factory.build_segment(False) + + # Verify they are processed as integers, not as errors + assert true_segment.value == 1, "Test case 1 (boolean_true): Expected True to be processed as integer 1" + assert false_segment.value == 0, "Test case 2 (boolean_false): Expected False to be processed as integer 0" + assert true_segment.value_type == SegmentType.NUMBER + assert false_segment.value_type == SegmentType.NUMBER diff --git a/api/tests/unit_tests/libs/test_datetime_utils.py b/api/tests/unit_tests/libs/test_datetime_utils.py new file mode 100644 index 0000000000..e7781a5821 --- /dev/null +++ b/api/tests/unit_tests/libs/test_datetime_utils.py @@ -0,0 +1,20 @@ +import datetime + +from libs.datetime_utils import naive_utc_now + + +def test_naive_utc_now(monkeypatch): + tz_aware_utc_now = datetime.datetime.now(tz=datetime.UTC) + + def _now_func(tz: datetime.timezone | None) -> datetime.datetime: + return tz_aware_utc_now.astimezone(tz) + + monkeypatch.setattr("libs.datetime_utils._now_func", _now_func) + + naive_datetime = naive_utc_now() + + assert naive_datetime.tzinfo is None + assert naive_datetime.date() == tz_aware_utc_now.date() + naive_time = naive_datetime.time() + utc_time = tz_aware_utc_now.time() + assert naive_time == utc_time diff --git a/api/tests/unit_tests/models/__init__.py b/api/tests/unit_tests/models/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/models/test_workflow.py b/api/tests/unit_tests/models/test_workflow.py index b79e95c7ed..69163d48bd 100644 --- a/api/tests/unit_tests/models/test_workflow.py +++ b/api/tests/unit_tests/models/test_workflow.py @@ -1,10 +1,15 @@ +import dataclasses import json from unittest import mock from uuid import uuid4 from constants import HIDDEN_VALUE +from core.file.enums import FileTransferMethod, FileType +from core.file.models import File from core.variables import FloatVariable, IntegerVariable, SecretVariable, StringVariable -from models.workflow import Workflow, WorkflowNodeExecutionModel +from core.variables.segments import IntegerSegment, Segment +from factories.variable_factory import build_segment +from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel, is_system_variable_editable def test_environment_variables(): @@ -163,3 +168,147 @@ class TestWorkflowNodeExecution: original = {"a": 1, "b": ["2"]} node_exec.execution_metadata = json.dumps(original) assert node_exec.execution_metadata_dict == original + + +class TestIsSystemVariableEditable: + def test_is_system_variable(self): + cases = [ + ("query", True), + ("files", True), + ("dialogue_count", False), + ("conversation_id", False), + ("user_id", False), + ("app_id", False), + ("workflow_id", False), + ("workflow_run_id", False), + ] + for name, editable in cases: + assert editable == is_system_variable_editable(name) + + assert is_system_variable_editable("invalid_or_new_system_variable") == False + + +class TestWorkflowDraftVariableGetValue: + def test_get_value_by_case(self): + @dataclasses.dataclass + class TestCase: + name: str + value: Segment + + tenant_id = "test_tenant_id" + + test_file = File( + tenant_id=tenant_id, + type=FileType.IMAGE, + transfer_method=FileTransferMethod.REMOTE_URL, + remote_url="https://example.com/example.jpg", + filename="example.jpg", + extension=".jpg", + mime_type="image/jpeg", + size=100, + ) + cases: list[TestCase] = [ + TestCase( + name="number/int", + value=build_segment(1), + ), + TestCase( + name="number/float", + value=build_segment(1.0), + ), + TestCase( + name="string", + value=build_segment("a"), + ), + TestCase( + name="object", + value=build_segment({}), + ), + TestCase( + name="file", + value=build_segment(test_file), + ), + TestCase( + name="array[any]", + value=build_segment([1, "a"]), + ), + TestCase( + name="array[string]", + value=build_segment(["a", "b"]), + ), + TestCase( + name="array[number]/int", + value=build_segment([1, 2]), + ), + TestCase( + name="array[number]/float", + value=build_segment([1.0, 2.0]), + ), + TestCase( + name="array[number]/mixed", + value=build_segment([1, 2.0]), + ), + TestCase( + name="array[object]", + value=build_segment([{}, {"a": 1}]), + ), + TestCase( + name="none", + value=build_segment(None), + ), + ] + + for idx, c in enumerate(cases, 1): + fail_msg = f"test case {c.name} failed, index={idx}" + draft_var = WorkflowDraftVariable() + draft_var.set_value(c.value) + assert c.value == draft_var.get_value(), fail_msg + + def test_file_variable_preserves_all_fields(self): + """Test that File type variables preserve all fields during encoding/decoding.""" + tenant_id = "test_tenant_id" + + # Create a File with specific field values + test_file = File( + id="test_file_id", + tenant_id=tenant_id, + type=FileType.IMAGE, + transfer_method=FileTransferMethod.REMOTE_URL, + remote_url="https://example.com/test.jpg", + filename="test.jpg", + extension=".jpg", + mime_type="image/jpeg", + size=12345, # Specific size to test preservation + storage_key="test_storage_key", + ) + + # Create a FileSegment and WorkflowDraftVariable + file_segment = build_segment(test_file) + draft_var = WorkflowDraftVariable() + draft_var.set_value(file_segment) + + # Retrieve the value and verify all fields are preserved + retrieved_segment = draft_var.get_value() + retrieved_file = retrieved_segment.value + + # Verify all important fields are preserved + assert retrieved_file.id == test_file.id + assert retrieved_file.tenant_id == test_file.tenant_id + assert retrieved_file.type == test_file.type + assert retrieved_file.transfer_method == test_file.transfer_method + assert retrieved_file.remote_url == test_file.remote_url + assert retrieved_file.filename == test_file.filename + assert retrieved_file.extension == test_file.extension + assert retrieved_file.mime_type == test_file.mime_type + assert retrieved_file.size == test_file.size # This was the main issue being fixed + # Note: storage_key is not serialized in model_dump() so it won't be preserved + + # Verify the segments have the same type and the important fields match + assert file_segment.value_type == retrieved_segment.value_type + + def test_get_and_set_value(self): + draft_var = WorkflowDraftVariable() + int_var = IntegerSegment(value=1) + draft_var.set_value(int_var) + value = draft_var.get_value() + assert value == int_var diff --git a/api/tests/unit_tests/services/test_dataset_permission.py b/api/tests/unit_tests/services/test_dataset_permission.py index 066f541c1b..a67252e856 100644 --- a/api/tests/unit_tests/services/test_dataset_permission.py +++ b/api/tests/unit_tests/services/test_dataset_permission.py @@ -8,151 +8,298 @@ from services.dataset_service import DatasetService from services.errors.account import NoPermissionError +class DatasetPermissionTestDataFactory: + """Factory class for creating test data and mock objects for dataset permission tests.""" + + @staticmethod + def create_dataset_mock( + dataset_id: str = "dataset-123", + tenant_id: str = "test-tenant-123", + created_by: str = "creator-456", + permission: DatasetPermissionEnum = DatasetPermissionEnum.ONLY_ME, + **kwargs, + ) -> Mock: + """Create a mock dataset with specified attributes.""" + dataset = Mock(spec=Dataset) + dataset.id = dataset_id + dataset.tenant_id = tenant_id + dataset.created_by = created_by + dataset.permission = permission + for key, value in kwargs.items(): + setattr(dataset, key, value) + return dataset + + @staticmethod + def create_user_mock( + user_id: str = "user-789", + tenant_id: str = "test-tenant-123", + role: TenantAccountRole = TenantAccountRole.NORMAL, + **kwargs, + ) -> Mock: + """Create a mock user with specified attributes.""" + user = Mock(spec=Account) + user.id = user_id + user.current_tenant_id = tenant_id + user.current_role = role + for key, value in kwargs.items(): + setattr(user, key, value) + return user + + @staticmethod + def create_dataset_permission_mock( + dataset_id: str = "dataset-123", + account_id: str = "user-789", + **kwargs, + ) -> Mock: + """Create a mock dataset permission record.""" + permission = Mock(spec=DatasetPermission) + permission.dataset_id = dataset_id + permission.account_id = account_id + for key, value in kwargs.items(): + setattr(permission, key, value) + return permission + + class TestDatasetPermissionService: - """Test cases for dataset permission checking functionality""" + """ + Comprehensive unit tests for DatasetService.check_dataset_permission method. - def setup_method(self): - """Set up test fixtures""" - # Mock tenant and user - self.tenant_id = "test-tenant-123" - self.creator_id = "creator-456" - self.normal_user_id = "normal-789" - self.owner_user_id = "owner-999" + This test suite covers all permission scenarios including: + - Cross-tenant access restrictions + - Owner privilege checks + - Different permission levels (ONLY_ME, ALL_TEAM, PARTIAL_TEAM) + - Explicit permission checks for PARTIAL_TEAM + - Error conditions and logging + """ - # Mock dataset - self.dataset = Mock(spec=Dataset) - self.dataset.id = "dataset-123" - self.dataset.tenant_id = self.tenant_id - self.dataset.created_by = self.creator_id + @pytest.fixture + def mock_dataset_service_dependencies(self): + """Common mock setup for dataset service dependencies.""" + with patch("services.dataset_service.db.session") as mock_session: + yield { + "db_session": mock_session, + } - # Mock users - self.creator_user = Mock(spec=Account) - self.creator_user.id = self.creator_id - self.creator_user.current_tenant_id = self.tenant_id - self.creator_user.current_role = TenantAccountRole.EDITOR - - self.normal_user = Mock(spec=Account) - self.normal_user.id = self.normal_user_id - self.normal_user.current_tenant_id = self.tenant_id - self.normal_user.current_role = TenantAccountRole.NORMAL - - self.owner_user = Mock(spec=Account) - self.owner_user.id = self.owner_user_id - self.owner_user.current_tenant_id = self.tenant_id - self.owner_user.current_role = TenantAccountRole.OWNER - - def test_permission_check_different_tenant_should_fail(self): - """Test that users from different tenants cannot access dataset""" - self.normal_user.current_tenant_id = "different-tenant" - - with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."): - DatasetService.check_dataset_permission(self.dataset, self.normal_user) - - def test_owner_can_access_any_dataset(self): - """Test that tenant owners can access any dataset regardless of permission""" - self.dataset.permission = DatasetPermissionEnum.ONLY_ME + @pytest.fixture + def mock_logging_dependencies(self): + """Mock setup for logging tests.""" + with patch("services.dataset_service.logging") as mock_logging: + yield { + "logging": mock_logging, + } + def _assert_permission_check_passes(self, dataset: Mock, user: Mock): + """Helper method to verify that permission check passes without raising exceptions.""" # Should not raise any exception - DatasetService.check_dataset_permission(self.dataset, self.owner_user) + DatasetService.check_dataset_permission(dataset, user) - def test_only_me_permission_creator_can_access(self): - """Test ONLY_ME permission allows only creator to access""" - self.dataset.permission = DatasetPermissionEnum.ONLY_ME + def _assert_permission_check_fails( + self, dataset: Mock, user: Mock, expected_message: str = "You do not have permission to access this dataset." + ): + """Helper method to verify that permission check fails with expected error.""" + with pytest.raises(NoPermissionError, match=expected_message): + DatasetService.check_dataset_permission(dataset, user) - # Creator should be able to access - DatasetService.check_dataset_permission(self.dataset, self.creator_user) + def _assert_database_query_called(self, mock_session: Mock, dataset_id: str, account_id: str): + """Helper method to verify database query calls for permission checks.""" + mock_session.query().filter_by.assert_called_with(dataset_id=dataset_id, account_id=account_id) - def test_only_me_permission_others_cannot_access(self): - """Test ONLY_ME permission denies access to non-creators""" - self.dataset.permission = DatasetPermissionEnum.ONLY_ME - - with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."): - DatasetService.check_dataset_permission(self.dataset, self.normal_user) - - def test_all_team_permission_allows_access(self): - """Test ALL_TEAM permission allows any team member to access""" - self.dataset.permission = DatasetPermissionEnum.ALL_TEAM - - # Should not raise any exception for team members - DatasetService.check_dataset_permission(self.dataset, self.normal_user) - DatasetService.check_dataset_permission(self.dataset, self.creator_user) - - @patch("services.dataset_service.db.session") - def test_partial_team_permission_creator_can_access(self, mock_session): - """Test PARTIAL_TEAM permission allows creator to access""" - self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM - - # Should not raise any exception for creator - DatasetService.check_dataset_permission(self.dataset, self.creator_user) - - # Should not query database for creator + def _assert_database_query_not_called(self, mock_session: Mock): + """Helper method to verify that database query was not called.""" mock_session.query.assert_not_called() - @patch("services.dataset_service.db.session") - def test_partial_team_permission_with_explicit_permission(self, mock_session): - """Test PARTIAL_TEAM permission allows users with explicit permission""" - self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM + # ==================== Cross-Tenant Access Tests ==================== + + def test_permission_check_different_tenant_should_fail(self): + """Test that users from different tenants cannot access dataset regardless of other permissions.""" + # Create dataset and user from different tenants + dataset = DatasetPermissionTestDataFactory.create_dataset_mock( + tenant_id="tenant-123", permission=DatasetPermissionEnum.ALL_TEAM + ) + user = DatasetPermissionTestDataFactory.create_user_mock( + user_id="user-789", tenant_id="different-tenant-456", role=TenantAccountRole.EDITOR + ) + + # Should fail due to different tenant + self._assert_permission_check_fails(dataset, user) + + # ==================== Owner Privilege Tests ==================== + + def test_owner_can_access_any_dataset(self): + """Test that tenant owners can access any dataset regardless of permission level.""" + # Create dataset with restrictive permission + dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.ONLY_ME) + + # Create owner user + owner_user = DatasetPermissionTestDataFactory.create_user_mock( + user_id="owner-999", role=TenantAccountRole.OWNER + ) + + # Owner should have access regardless of dataset permission + self._assert_permission_check_passes(dataset, owner_user) + + # ==================== ONLY_ME Permission Tests ==================== + + def test_only_me_permission_creator_can_access(self): + """Test ONLY_ME permission allows only the dataset creator to access.""" + # Create dataset with ONLY_ME permission + dataset = DatasetPermissionTestDataFactory.create_dataset_mock( + created_by="creator-456", permission=DatasetPermissionEnum.ONLY_ME + ) + + # Create creator user + creator_user = DatasetPermissionTestDataFactory.create_user_mock( + user_id="creator-456", role=TenantAccountRole.EDITOR + ) + + # Creator should be able to access + self._assert_permission_check_passes(dataset, creator_user) + + def test_only_me_permission_others_cannot_access(self): + """Test ONLY_ME permission denies access to non-creators.""" + # Create dataset with ONLY_ME permission + dataset = DatasetPermissionTestDataFactory.create_dataset_mock( + created_by="creator-456", permission=DatasetPermissionEnum.ONLY_ME + ) + + # Create normal user (not the creator) + normal_user = DatasetPermissionTestDataFactory.create_user_mock( + user_id="normal-789", role=TenantAccountRole.NORMAL + ) + + # Non-creator should be denied access + self._assert_permission_check_fails(dataset, normal_user) + + # ==================== ALL_TEAM Permission Tests ==================== + + def test_all_team_permission_allows_access(self): + """Test ALL_TEAM permission allows any team member to access the dataset.""" + # Create dataset with ALL_TEAM permission + dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.ALL_TEAM) + + # Create different types of team members + normal_user = DatasetPermissionTestDataFactory.create_user_mock( + user_id="normal-789", role=TenantAccountRole.NORMAL + ) + editor_user = DatasetPermissionTestDataFactory.create_user_mock( + user_id="editor-456", role=TenantAccountRole.EDITOR + ) + + # All team members should have access + self._assert_permission_check_passes(dataset, normal_user) + self._assert_permission_check_passes(dataset, editor_user) + + # ==================== PARTIAL_TEAM Permission Tests ==================== + + def test_partial_team_permission_creator_can_access(self, mock_dataset_service_dependencies): + """Test PARTIAL_TEAM permission allows creator to access without database query.""" + # Create dataset with PARTIAL_TEAM permission + dataset = DatasetPermissionTestDataFactory.create_dataset_mock( + created_by="creator-456", permission=DatasetPermissionEnum.PARTIAL_TEAM + ) + + # Create creator user + creator_user = DatasetPermissionTestDataFactory.create_user_mock( + user_id="creator-456", role=TenantAccountRole.EDITOR + ) + + # Creator should have access without database query + self._assert_permission_check_passes(dataset, creator_user) + self._assert_database_query_not_called(mock_dataset_service_dependencies["db_session"]) + + def test_partial_team_permission_with_explicit_permission(self, mock_dataset_service_dependencies): + """Test PARTIAL_TEAM permission allows users with explicit permission records.""" + # Create dataset with PARTIAL_TEAM permission + dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.PARTIAL_TEAM) + + # Create normal user (not the creator) + normal_user = DatasetPermissionTestDataFactory.create_user_mock( + user_id="normal-789", role=TenantAccountRole.NORMAL + ) # Mock database query to return a permission record - mock_permission = Mock(spec=DatasetPermission) - mock_session.query().filter_by().first.return_value = mock_permission + mock_permission = DatasetPermissionTestDataFactory.create_dataset_permission_mock( + dataset_id=dataset.id, account_id=normal_user.id + ) + mock_dataset_service_dependencies["db_session"].query().filter_by().first.return_value = mock_permission - # Should not raise any exception - DatasetService.check_dataset_permission(self.dataset, self.normal_user) + # User with explicit permission should have access + self._assert_permission_check_passes(dataset, normal_user) + self._assert_database_query_called(mock_dataset_service_dependencies["db_session"], dataset.id, normal_user.id) - # Verify database was queried correctly - mock_session.query().filter_by.assert_called_with(dataset_id=self.dataset.id, account_id=self.normal_user.id) + def test_partial_team_permission_without_explicit_permission(self, mock_dataset_service_dependencies): + """Test PARTIAL_TEAM permission denies users without explicit permission records.""" + # Create dataset with PARTIAL_TEAM permission + dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.PARTIAL_TEAM) - @patch("services.dataset_service.db.session") - def test_partial_team_permission_without_explicit_permission(self, mock_session): - """Test PARTIAL_TEAM permission denies users without explicit permission""" - self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM + # Create normal user (not the creator) + normal_user = DatasetPermissionTestDataFactory.create_user_mock( + user_id="normal-789", role=TenantAccountRole.NORMAL + ) # Mock database query to return None (no permission record) - mock_session.query().filter_by().first.return_value = None + mock_dataset_service_dependencies["db_session"].query().filter_by().first.return_value = None - with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."): - DatasetService.check_dataset_permission(self.dataset, self.normal_user) + # User without explicit permission should be denied access + self._assert_permission_check_fails(dataset, normal_user) + self._assert_database_query_called(mock_dataset_service_dependencies["db_session"], dataset.id, normal_user.id) - # Verify database was queried correctly - mock_session.query().filter_by.assert_called_with(dataset_id=self.dataset.id, account_id=self.normal_user.id) - - @patch("services.dataset_service.db.session") - def test_partial_team_permission_non_creator_without_permission_fails(self, mock_session): - """Test that non-creators without explicit permission are denied access""" - self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM + def test_partial_team_permission_non_creator_without_permission_fails(self, mock_dataset_service_dependencies): + """Test that non-creators without explicit permission are denied access to PARTIAL_TEAM datasets.""" + # Create dataset with PARTIAL_TEAM permission + dataset = DatasetPermissionTestDataFactory.create_dataset_mock( + created_by="creator-456", permission=DatasetPermissionEnum.PARTIAL_TEAM + ) # Create a different user (not the creator) - other_user = Mock(spec=Account) - other_user.id = "other-user-123" - other_user.current_tenant_id = self.tenant_id - other_user.current_role = TenantAccountRole.NORMAL + other_user = DatasetPermissionTestDataFactory.create_user_mock( + user_id="other-user-123", role=TenantAccountRole.NORMAL + ) # Mock database query to return None (no permission record) - mock_session.query().filter_by().first.return_value = None + mock_dataset_service_dependencies["db_session"].query().filter_by().first.return_value = None - with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."): - DatasetService.check_dataset_permission(self.dataset, other_user) + # Non-creator without explicit permission should be denied access + self._assert_permission_check_fails(dataset, other_user) + self._assert_database_query_called(mock_dataset_service_dependencies["db_session"], dataset.id, other_user.id) + + # ==================== Enum Usage Tests ==================== def test_partial_team_permission_uses_correct_enum(self): - """Test that the method correctly uses DatasetPermissionEnum.PARTIAL_TEAM""" - # This test ensures we're using the enum instead of string literals - self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM - - # Creator should always have access - DatasetService.check_dataset_permission(self.dataset, self.creator_user) - - @patch("services.dataset_service.logging") - @patch("services.dataset_service.db.session") - def test_permission_denied_logs_debug_message(self, mock_session, mock_logging): - """Test that permission denied events are logged""" - self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM - mock_session.query().filter_by().first.return_value = None - - with pytest.raises(NoPermissionError): - DatasetService.check_dataset_permission(self.dataset, self.normal_user) - - # Verify debug message was logged - mock_logging.debug.assert_called_with( - f"User {self.normal_user.id} does not have permission to access dataset {self.dataset.id}" + """Test that the method correctly uses DatasetPermissionEnum.PARTIAL_TEAM instead of string literals.""" + # Create dataset with PARTIAL_TEAM permission using enum + dataset = DatasetPermissionTestDataFactory.create_dataset_mock( + created_by="creator-456", permission=DatasetPermissionEnum.PARTIAL_TEAM + ) + + # Create creator user + creator_user = DatasetPermissionTestDataFactory.create_user_mock( + user_id="creator-456", role=TenantAccountRole.EDITOR + ) + + # Creator should always have access regardless of permission level + self._assert_permission_check_passes(dataset, creator_user) + + # ==================== Logging Tests ==================== + + def test_permission_denied_logs_debug_message(self, mock_dataset_service_dependencies, mock_logging_dependencies): + """Test that permission denied events are properly logged for debugging purposes.""" + # Create dataset with PARTIAL_TEAM permission + dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.PARTIAL_TEAM) + + # Create normal user (not the creator) + normal_user = DatasetPermissionTestDataFactory.create_user_mock( + user_id="normal-789", role=TenantAccountRole.NORMAL + ) + + # Mock database query to return None (no permission record) + mock_dataset_service_dependencies["db_session"].query().filter_by().first.return_value = None + + # Attempt permission check (should fail) + with pytest.raises(NoPermissionError): + DatasetService.check_dataset_permission(dataset, normal_user) + + # Verify debug message was logged with correct user and dataset information + mock_logging_dependencies["logging"].debug.assert_called_with( + f"User {normal_user.id} does not have permission to access dataset {dataset.id}" ) diff --git a/api/tests/unit_tests/services/test_dataset_service_batch_update_document_status.py b/api/tests/unit_tests/services/test_dataset_service_batch_update_document_status.py new file mode 100644 index 0000000000..dc09aca5b2 --- /dev/null +++ b/api/tests/unit_tests/services/test_dataset_service_batch_update_document_status.py @@ -0,0 +1,804 @@ +import datetime +from typing import Optional + +# Mock redis_client before importing dataset_service +from unittest.mock import Mock, call, patch + +import pytest + +from models.dataset import Dataset, Document +from services.dataset_service import DocumentService +from services.errors.document import DocumentIndexingError +from tests.unit_tests.conftest import redis_mock + + +class DocumentBatchUpdateTestDataFactory: + """Factory class for creating test data and mock objects for document batch update tests.""" + + @staticmethod + def create_dataset_mock(dataset_id: str = "dataset-123", tenant_id: str = "tenant-456") -> Mock: + """Create a mock dataset with specified attributes.""" + dataset = Mock(spec=Dataset) + dataset.id = dataset_id + dataset.tenant_id = tenant_id + return dataset + + @staticmethod + def create_user_mock(user_id: str = "user-789") -> Mock: + """Create a mock user.""" + user = Mock() + user.id = user_id + return user + + @staticmethod + def create_document_mock( + document_id: str = "doc-1", + name: str = "test_document.pdf", + enabled: bool = True, + archived: bool = False, + indexing_status: str = "completed", + completed_at: Optional[datetime.datetime] = None, + **kwargs, + ) -> Mock: + """Create a mock document with specified attributes.""" + document = Mock(spec=Document) + document.id = document_id + document.name = name + document.enabled = enabled + document.archived = archived + document.indexing_status = indexing_status + document.completed_at = completed_at or datetime.datetime.now() + + # Set default values for optional fields + document.disabled_at = None + document.disabled_by = None + document.archived_at = None + document.archived_by = None + document.updated_at = None + + for key, value in kwargs.items(): + setattr(document, key, value) + return document + + @staticmethod + def create_multiple_documents( + document_ids: list[str], enabled: bool = True, archived: bool = False, indexing_status: str = "completed" + ) -> list[Mock]: + """Create multiple mock documents with specified attributes.""" + documents = [] + for doc_id in document_ids: + doc = DocumentBatchUpdateTestDataFactory.create_document_mock( + document_id=doc_id, + name=f"document_{doc_id}.pdf", + enabled=enabled, + archived=archived, + indexing_status=indexing_status, + ) + documents.append(doc) + return documents + + +class TestDatasetServiceBatchUpdateDocumentStatus: + """ + Comprehensive unit tests for DocumentService.batch_update_document_status method. + + This test suite covers all supported actions (enable, disable, archive, un_archive), + error conditions, edge cases, and validates proper interaction with Redis cache, + database operations, and async task triggers. + """ + + @pytest.fixture + def mock_document_service_dependencies(self): + """Common mock setup for document service dependencies.""" + with ( + patch("services.dataset_service.DocumentService.get_document") as mock_get_doc, + patch("extensions.ext_database.db.session") as mock_db, + patch("services.dataset_service.datetime") as mock_datetime, + ): + current_time = datetime.datetime(2023, 1, 1, 12, 0, 0) + mock_datetime.datetime.now.return_value = current_time + mock_datetime.UTC = datetime.UTC + + yield { + "get_document": mock_get_doc, + "db_session": mock_db, + "datetime": mock_datetime, + "current_time": current_time, + } + + @pytest.fixture + def mock_async_task_dependencies(self): + """Mock setup for async task dependencies.""" + with ( + patch("services.dataset_service.add_document_to_index_task") as mock_add_task, + patch("services.dataset_service.remove_document_from_index_task") as mock_remove_task, + ): + yield {"add_task": mock_add_task, "remove_task": mock_remove_task} + + def _assert_document_enabled(self, document: Mock, user_id: str, current_time: datetime.datetime): + """Helper method to verify document was enabled correctly.""" + assert document.enabled == True + assert document.disabled_at is None + assert document.disabled_by is None + assert document.updated_at == current_time.replace(tzinfo=None) + + def _assert_document_disabled(self, document: Mock, user_id: str, current_time: datetime.datetime): + """Helper method to verify document was disabled correctly.""" + assert document.enabled == False + assert document.disabled_at == current_time.replace(tzinfo=None) + assert document.disabled_by == user_id + assert document.updated_at == current_time.replace(tzinfo=None) + + def _assert_document_archived(self, document: Mock, user_id: str, current_time: datetime.datetime): + """Helper method to verify document was archived correctly.""" + assert document.archived == True + assert document.archived_at == current_time.replace(tzinfo=None) + assert document.archived_by == user_id + assert document.updated_at == current_time.replace(tzinfo=None) + + def _assert_document_unarchived(self, document: Mock): + """Helper method to verify document was unarchived correctly.""" + assert document.archived == False + assert document.archived_at is None + assert document.archived_by is None + + def _assert_redis_cache_operations(self, document_ids: list[str], action: str = "setex"): + """Helper method to verify Redis cache operations.""" + if action == "setex": + expected_calls = [call(f"document_{doc_id}_indexing", 600, 1) for doc_id in document_ids] + redis_mock.setex.assert_has_calls(expected_calls) + elif action == "get": + expected_calls = [call(f"document_{doc_id}_indexing") for doc_id in document_ids] + redis_mock.get.assert_has_calls(expected_calls) + + def _assert_async_task_calls(self, mock_task, document_ids: list[str], task_type: str): + """Helper method to verify async task calls.""" + expected_calls = [call(doc_id) for doc_id in document_ids] + if task_type in {"add", "remove"}: + mock_task.delay.assert_has_calls(expected_calls) + + # ==================== Enable Document Tests ==================== + + def test_batch_update_enable_documents_success( + self, mock_document_service_dependencies, mock_async_task_dependencies + ): + """Test successful enabling of disabled documents.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create disabled documents + disabled_docs = DocumentBatchUpdateTestDataFactory.create_multiple_documents(["doc-1", "doc-2"], enabled=False) + mock_document_service_dependencies["get_document"].side_effect = disabled_docs + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Call the method to enable documents + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1", "doc-2"], action="enable", user=user + ) + + # Verify document attributes were updated correctly + for doc in disabled_docs: + self._assert_document_enabled(doc, user.id, mock_document_service_dependencies["current_time"]) + + # Verify Redis cache operations + self._assert_redis_cache_operations(["doc-1", "doc-2"], "get") + self._assert_redis_cache_operations(["doc-1", "doc-2"], "setex") + + # Verify async tasks were triggered for indexing + self._assert_async_task_calls(mock_async_task_dependencies["add_task"], ["doc-1", "doc-2"], "add") + + # Verify database operations + mock_db = mock_document_service_dependencies["db_session"] + assert mock_db.add.call_count == 2 + assert mock_db.commit.call_count == 1 + + def test_batch_update_enable_already_enabled_document_skipped(self, mock_document_service_dependencies): + """Test enabling documents that are already enabled.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create already enabled document + enabled_doc = DocumentBatchUpdateTestDataFactory.create_document_mock(enabled=True) + mock_document_service_dependencies["get_document"].return_value = enabled_doc + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Attempt to enable already enabled document + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1"], action="enable", user=user + ) + + # Verify no database operations occurred (document was skipped) + mock_db = mock_document_service_dependencies["db_session"] + mock_db.commit.assert_not_called() + + # Verify no Redis setex operations occurred (document was skipped) + redis_mock.setex.assert_not_called() + + # ==================== Disable Document Tests ==================== + + def test_batch_update_disable_documents_success( + self, mock_document_service_dependencies, mock_async_task_dependencies + ): + """Test successful disabling of enabled and completed documents.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create enabled documents + enabled_docs = DocumentBatchUpdateTestDataFactory.create_multiple_documents(["doc-1", "doc-2"], enabled=True) + mock_document_service_dependencies["get_document"].side_effect = enabled_docs + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Call the method to disable documents + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1", "doc-2"], action="disable", user=user + ) + + # Verify document attributes were updated correctly + for doc in enabled_docs: + self._assert_document_disabled(doc, user.id, mock_document_service_dependencies["current_time"]) + + # Verify Redis cache operations for indexing prevention + self._assert_redis_cache_operations(["doc-1", "doc-2"], "setex") + + # Verify async tasks were triggered to remove from index + self._assert_async_task_calls(mock_async_task_dependencies["remove_task"], ["doc-1", "doc-2"], "remove") + + # Verify database operations + mock_db = mock_document_service_dependencies["db_session"] + assert mock_db.add.call_count == 2 + assert mock_db.commit.call_count == 1 + + def test_batch_update_disable_already_disabled_document_skipped( + self, mock_document_service_dependencies, mock_async_task_dependencies + ): + """Test disabling documents that are already disabled.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create already disabled document + disabled_doc = DocumentBatchUpdateTestDataFactory.create_document_mock(enabled=False) + mock_document_service_dependencies["get_document"].return_value = disabled_doc + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Attempt to disable already disabled document + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1"], action="disable", user=user + ) + + # Verify no database operations occurred (document was skipped) + mock_db = mock_document_service_dependencies["db_session"] + mock_db.commit.assert_not_called() + + # Verify no Redis setex operations occurred (document was skipped) + redis_mock.setex.assert_not_called() + + # Verify no async tasks were triggered (document was skipped) + mock_async_task_dependencies["add_task"].delay.assert_not_called() + + def test_batch_update_disable_non_completed_document_error(self, mock_document_service_dependencies): + """Test that DocumentIndexingError is raised when trying to disable non-completed documents.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create a document that's not completed + non_completed_doc = DocumentBatchUpdateTestDataFactory.create_document_mock( + enabled=True, + indexing_status="indexing", # Not completed + completed_at=None, # Not completed + ) + mock_document_service_dependencies["get_document"].return_value = non_completed_doc + + # Verify that DocumentIndexingError is raised + with pytest.raises(DocumentIndexingError) as exc_info: + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1"], action="disable", user=user + ) + + # Verify error message indicates document is not completed + assert "is not completed" in str(exc_info.value) + + # ==================== Archive Document Tests ==================== + + def test_batch_update_archive_documents_success( + self, mock_document_service_dependencies, mock_async_task_dependencies + ): + """Test successful archiving of unarchived documents.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create unarchived enabled document + unarchived_doc = DocumentBatchUpdateTestDataFactory.create_document_mock(enabled=True, archived=False) + mock_document_service_dependencies["get_document"].return_value = unarchived_doc + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Call the method to archive documents + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1"], action="archive", user=user + ) + + # Verify document attributes were updated correctly + self._assert_document_archived(unarchived_doc, user.id, mock_document_service_dependencies["current_time"]) + + # Verify Redis cache was set (because document was enabled) + redis_mock.setex.assert_called_once_with("document_doc-1_indexing", 600, 1) + + # Verify async task was triggered to remove from index (because enabled) + mock_async_task_dependencies["remove_task"].delay.assert_called_once_with("doc-1") + + # Verify database operations + mock_db = mock_document_service_dependencies["db_session"] + mock_db.add.assert_called_once() + mock_db.commit.assert_called_once() + + def test_batch_update_archive_already_archived_document_skipped(self, mock_document_service_dependencies): + """Test archiving documents that are already archived.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create already archived document + archived_doc = DocumentBatchUpdateTestDataFactory.create_document_mock(enabled=True, archived=True) + mock_document_service_dependencies["get_document"].return_value = archived_doc + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Attempt to archive already archived document + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-3"], action="archive", user=user + ) + + # Verify no database operations occurred (document was skipped) + mock_db = mock_document_service_dependencies["db_session"] + mock_db.commit.assert_not_called() + + # Verify no Redis setex operations occurred (document was skipped) + redis_mock.setex.assert_not_called() + + def test_batch_update_archive_disabled_document_no_index_removal( + self, mock_document_service_dependencies, mock_async_task_dependencies + ): + """Test archiving disabled documents (should not trigger index removal).""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Set up disabled, unarchived document + disabled_unarchived_doc = DocumentBatchUpdateTestDataFactory.create_document_mock(enabled=False, archived=False) + mock_document_service_dependencies["get_document"].return_value = disabled_unarchived_doc + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Archive the disabled document + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1"], action="archive", user=user + ) + + # Verify document was archived + self._assert_document_archived( + disabled_unarchived_doc, user.id, mock_document_service_dependencies["current_time"] + ) + + # Verify no Redis cache was set (document is disabled) + redis_mock.setex.assert_not_called() + + # Verify no index removal task was triggered (document is disabled) + mock_async_task_dependencies["remove_task"].delay.assert_not_called() + + # Verify database operations still occurred + mock_db = mock_document_service_dependencies["db_session"] + mock_db.add.assert_called_once() + mock_db.commit.assert_called_once() + + # ==================== Unarchive Document Tests ==================== + + def test_batch_update_unarchive_documents_success( + self, mock_document_service_dependencies, mock_async_task_dependencies + ): + """Test successful unarchiving of archived documents.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create mock archived document + archived_doc = DocumentBatchUpdateTestDataFactory.create_document_mock(enabled=True, archived=True) + mock_document_service_dependencies["get_document"].return_value = archived_doc + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Call the method to unarchive documents + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1"], action="un_archive", user=user + ) + + # Verify document attributes were updated correctly + self._assert_document_unarchived(archived_doc) + assert archived_doc.updated_at == mock_document_service_dependencies["current_time"].replace(tzinfo=None) + + # Verify Redis cache was set (because document is enabled) + redis_mock.setex.assert_called_once_with("document_doc-1_indexing", 600, 1) + + # Verify async task was triggered to add back to index (because enabled) + mock_async_task_dependencies["add_task"].delay.assert_called_once_with("doc-1") + + # Verify database operations + mock_db = mock_document_service_dependencies["db_session"] + mock_db.add.assert_called_once() + mock_db.commit.assert_called_once() + + def test_batch_update_unarchive_already_unarchived_document_skipped( + self, mock_document_service_dependencies, mock_async_task_dependencies + ): + """Test unarchiving documents that are already unarchived.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create already unarchived document + unarchived_doc = DocumentBatchUpdateTestDataFactory.create_document_mock(enabled=True, archived=False) + mock_document_service_dependencies["get_document"].return_value = unarchived_doc + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Attempt to unarchive already unarchived document + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1"], action="un_archive", user=user + ) + + # Verify no database operations occurred (document was skipped) + mock_db = mock_document_service_dependencies["db_session"] + mock_db.commit.assert_not_called() + + # Verify no Redis setex operations occurred (document was skipped) + redis_mock.setex.assert_not_called() + + # Verify no async tasks were triggered (document was skipped) + mock_async_task_dependencies["add_task"].delay.assert_not_called() + + def test_batch_update_unarchive_disabled_document_no_index_addition( + self, mock_document_service_dependencies, mock_async_task_dependencies + ): + """Test unarchiving disabled documents (should not trigger index addition).""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create mock archived but disabled document + archived_disabled_doc = DocumentBatchUpdateTestDataFactory.create_document_mock(enabled=False, archived=True) + mock_document_service_dependencies["get_document"].return_value = archived_disabled_doc + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Unarchive the disabled document + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1"], action="un_archive", user=user + ) + + # Verify document was unarchived + self._assert_document_unarchived(archived_disabled_doc) + assert archived_disabled_doc.updated_at == mock_document_service_dependencies["current_time"].replace( + tzinfo=None + ) + + # Verify no Redis cache was set (document is disabled) + redis_mock.setex.assert_not_called() + + # Verify no index addition task was triggered (document is disabled) + mock_async_task_dependencies["add_task"].delay.assert_not_called() + + # Verify database operations still occurred + mock_db = mock_document_service_dependencies["db_session"] + mock_db.add.assert_called_once() + mock_db.commit.assert_called_once() + + # ==================== Error Handling Tests ==================== + + def test_batch_update_document_indexing_error_redis_cache_hit(self, mock_document_service_dependencies): + """Test that DocumentIndexingError is raised when documents are currently being indexed.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create mock enabled document + enabled_doc = DocumentBatchUpdateTestDataFactory.create_document_mock(enabled=True) + mock_document_service_dependencies["get_document"].return_value = enabled_doc + + # Set up mock to indicate document is being indexed + redis_mock.reset_mock() + redis_mock.get.return_value = "indexing" + + # Verify that DocumentIndexingError is raised + with pytest.raises(DocumentIndexingError) as exc_info: + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1"], action="enable", user=user + ) + + # Verify error message contains document name + assert "test_document.pdf" in str(exc_info.value) + assert "is being indexed" in str(exc_info.value) + + # Verify Redis cache was checked + redis_mock.get.assert_called_once_with("document_doc-1_indexing") + + def test_batch_update_invalid_action_error(self, mock_document_service_dependencies): + """Test that ValueError is raised when an invalid action is provided.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create mock document + doc = DocumentBatchUpdateTestDataFactory.create_document_mock(enabled=True) + mock_document_service_dependencies["get_document"].return_value = doc + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Test with invalid action + invalid_action = "invalid_action" + with pytest.raises(ValueError) as exc_info: + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1"], action=invalid_action, user=user + ) + + # Verify error message contains the invalid action + assert invalid_action in str(exc_info.value) + assert "Invalid action" in str(exc_info.value) + + # Verify no Redis operations occurred + redis_mock.setex.assert_not_called() + + def test_batch_update_async_task_error_handling( + self, mock_document_service_dependencies, mock_async_task_dependencies + ): + """Test handling of async task errors during batch operations.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create mock disabled document + disabled_doc = DocumentBatchUpdateTestDataFactory.create_document_mock(enabled=False) + mock_document_service_dependencies["get_document"].return_value = disabled_doc + + # Mock async task to raise an exception + mock_async_task_dependencies["add_task"].delay.side_effect = Exception("Celery task error") + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Verify that async task error is propagated + with pytest.raises(Exception) as exc_info: + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1"], action="enable", user=user + ) + + # Verify error message + assert "Celery task error" in str(exc_info.value) + + # Verify database operations completed successfully + mock_db = mock_document_service_dependencies["db_session"] + mock_db.add.assert_called_once() + mock_db.commit.assert_called_once() + + # Verify Redis cache was set successfully + redis_mock.setex.assert_called_once_with("document_doc-1_indexing", 600, 1) + + # Verify document was updated + self._assert_document_enabled(disabled_doc, user.id, mock_document_service_dependencies["current_time"]) + + # ==================== Edge Case Tests ==================== + + def test_batch_update_empty_document_list(self, mock_document_service_dependencies): + """Test batch operations with an empty document ID list.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Call method with empty document list + result = DocumentService.batch_update_document_status( + dataset=dataset, document_ids=[], action="enable", user=user + ) + + # Verify no document lookups were performed + mock_document_service_dependencies["get_document"].assert_not_called() + + # Verify method returns None (early return) + assert result is None + + def test_batch_update_document_not_found_skipped(self, mock_document_service_dependencies): + """Test behavior when some documents don't exist in the database.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Mock document service to return None (document not found) + mock_document_service_dependencies["get_document"].return_value = None + + # Call method with non-existent document ID + # This should not raise an error, just skip the missing document + try: + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["non-existent-doc"], action="enable", user=user + ) + except Exception as e: + pytest.fail(f"Method should not raise exception for missing documents: {e}") + + # Verify document lookup was attempted + mock_document_service_dependencies["get_document"].assert_called_once_with(dataset.id, "non-existent-doc") + + def test_batch_update_mixed_document_states_and_actions( + self, mock_document_service_dependencies, mock_async_task_dependencies + ): + """Test batch operations on documents with mixed states and various scenarios.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create documents in various states + disabled_doc = DocumentBatchUpdateTestDataFactory.create_document_mock("doc-1", enabled=False) + enabled_doc = DocumentBatchUpdateTestDataFactory.create_document_mock("doc-2", enabled=True) + archived_doc = DocumentBatchUpdateTestDataFactory.create_document_mock("doc-3", enabled=True, archived=True) + + # Mix of different document states + documents = [disabled_doc, enabled_doc, archived_doc] + mock_document_service_dependencies["get_document"].side_effect = documents + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Perform enable operation on mixed state documents + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=["doc-1", "doc-2", "doc-3"], action="enable", user=user + ) + + # Verify only the disabled document was processed + # (enabled and archived documents should be skipped for enable action) + + # Only one add should occur (for the disabled document that was enabled) + mock_db = mock_document_service_dependencies["db_session"] + mock_db.add.assert_called_once() + # Only one commit should occur + mock_db.commit.assert_called_once() + + # Only one Redis setex should occur (for the document that was enabled) + redis_mock.setex.assert_called_once_with("document_doc-1_indexing", 600, 1) + + # Only one async task should be triggered (for the document that was enabled) + mock_async_task_dependencies["add_task"].delay.assert_called_once_with("doc-1") + + # ==================== Performance Tests ==================== + + def test_batch_update_large_document_list_performance( + self, mock_document_service_dependencies, mock_async_task_dependencies + ): + """Test batch operations with a large number of documents.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create large list of document IDs + document_ids = [f"doc-{i}" for i in range(1, 101)] # 100 documents + + # Create mock documents + mock_documents = DocumentBatchUpdateTestDataFactory.create_multiple_documents( + document_ids, + enabled=False, # All disabled, will be enabled + ) + mock_document_service_dependencies["get_document"].side_effect = mock_documents + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Perform batch enable operation + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=document_ids, action="enable", user=user + ) + + # Verify all documents were processed + assert mock_document_service_dependencies["get_document"].call_count == 100 + + # Verify all documents were updated + for mock_doc in mock_documents: + self._assert_document_enabled(mock_doc, user.id, mock_document_service_dependencies["current_time"]) + + # Verify database operations + mock_db = mock_document_service_dependencies["db_session"] + assert mock_db.add.call_count == 100 + assert mock_db.commit.call_count == 1 + + # Verify Redis cache operations occurred for each document + assert redis_mock.setex.call_count == 100 + + # Verify async tasks were triggered for each document + assert mock_async_task_dependencies["add_task"].delay.call_count == 100 + + # Verify correct Redis cache keys were set + expected_redis_calls = [call(f"document_doc-{i}_indexing", 600, 1) for i in range(1, 101)] + redis_mock.setex.assert_has_calls(expected_redis_calls) + + # Verify correct async task calls + expected_task_calls = [call(f"doc-{i}") for i in range(1, 101)] + mock_async_task_dependencies["add_task"].delay.assert_has_calls(expected_task_calls) + + def test_batch_update_mixed_document_states_complex_scenario( + self, mock_document_service_dependencies, mock_async_task_dependencies + ): + """Test complex batch operations with documents in various states.""" + dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() + user = DocumentBatchUpdateTestDataFactory.create_user_mock() + + # Create documents in various states + doc1 = DocumentBatchUpdateTestDataFactory.create_document_mock("doc-1", enabled=False) # Will be enabled + doc2 = DocumentBatchUpdateTestDataFactory.create_document_mock( + "doc-2", enabled=True + ) # Already enabled, will be skipped + doc3 = DocumentBatchUpdateTestDataFactory.create_document_mock( + "doc-3", enabled=True + ) # Already enabled, will be skipped + doc4 = DocumentBatchUpdateTestDataFactory.create_document_mock( + "doc-4", enabled=True + ) # Not affected by enable action + doc5 = DocumentBatchUpdateTestDataFactory.create_document_mock( + "doc-5", enabled=True, archived=True + ) # Not affected by enable action + doc6 = None # Non-existent, will be skipped + + mock_document_service_dependencies["get_document"].side_effect = [doc1, doc2, doc3, doc4, doc5, doc6] + + # Reset module-level Redis mock + redis_mock.reset_mock() + redis_mock.get.return_value = None + + # Perform mixed batch operations + DocumentService.batch_update_document_status( + dataset=dataset, + document_ids=["doc-1", "doc-2", "doc-3", "doc-4", "doc-5", "doc-6"], + action="enable", # This will only affect doc1 + user=user, + ) + + # Verify document 1 was enabled + self._assert_document_enabled(doc1, user.id, mock_document_service_dependencies["current_time"]) + + # Verify other documents were skipped appropriately + assert doc2.enabled == True # No change + assert doc3.enabled == True # No change + assert doc4.enabled == True # No change + assert doc5.enabled == True # No change + + # Verify database commits occurred for processed documents + # Only doc1 should be added (others were skipped, doc6 doesn't exist) + mock_db = mock_document_service_dependencies["db_session"] + assert mock_db.add.call_count == 1 + assert mock_db.commit.call_count == 1 + + # Verify Redis cache operations occurred for processed documents + # Only doc1 should have Redis operations + assert redis_mock.setex.call_count == 1 + + # Verify async tasks were triggered for processed documents + # Only doc1 should trigger tasks + assert mock_async_task_dependencies["add_task"].delay.call_count == 1 + + # Verify correct Redis cache keys were set + expected_redis_calls = [call("document_doc-1_indexing", 600, 1)] + redis_mock.setex.assert_has_calls(expected_redis_calls) + + # Verify correct async task calls + expected_task_calls = [call("doc-1")] + mock_async_task_dependencies["add_task"].delay.assert_has_calls(expected_task_calls) diff --git a/api/tests/unit_tests/services/test_dataset_service_update_dataset.py b/api/tests/unit_tests/services/test_dataset_service_update_dataset.py new file mode 100644 index 0000000000..cdbb439c85 --- /dev/null +++ b/api/tests/unit_tests/services/test_dataset_service_update_dataset.py @@ -0,0 +1,633 @@ +import datetime +from typing import Any, Optional + +# Mock redis_client before importing dataset_service +from unittest.mock import Mock, patch + +import pytest + +from core.model_runtime.entities.model_entities import ModelType +from models.dataset import Dataset, ExternalKnowledgeBindings +from services.dataset_service import DatasetService +from services.errors.account import NoPermissionError +from tests.unit_tests.conftest import redis_mock + + +class DatasetUpdateTestDataFactory: + """Factory class for creating test data and mock objects for dataset update tests.""" + + @staticmethod + def create_dataset_mock( + dataset_id: str = "dataset-123", + provider: str = "vendor", + name: str = "old_name", + description: str = "old_description", + indexing_technique: str = "high_quality", + retrieval_model: str = "old_model", + embedding_model_provider: Optional[str] = None, + embedding_model: Optional[str] = None, + collection_binding_id: Optional[str] = None, + **kwargs, + ) -> Mock: + """Create a mock dataset with specified attributes.""" + dataset = Mock(spec=Dataset) + dataset.id = dataset_id + dataset.provider = provider + dataset.name = name + dataset.description = description + dataset.indexing_technique = indexing_technique + dataset.retrieval_model = retrieval_model + dataset.embedding_model_provider = embedding_model_provider + dataset.embedding_model = embedding_model + dataset.collection_binding_id = collection_binding_id + for key, value in kwargs.items(): + setattr(dataset, key, value) + return dataset + + @staticmethod + def create_user_mock(user_id: str = "user-789") -> Mock: + """Create a mock user.""" + user = Mock() + user.id = user_id + return user + + @staticmethod + def create_external_binding_mock( + external_knowledge_id: str = "old_knowledge_id", external_knowledge_api_id: str = "old_api_id" + ) -> Mock: + """Create a mock external knowledge binding.""" + binding = Mock(spec=ExternalKnowledgeBindings) + binding.external_knowledge_id = external_knowledge_id + binding.external_knowledge_api_id = external_knowledge_api_id + return binding + + @staticmethod + def create_embedding_model_mock(model: str = "text-embedding-ada-002", provider: str = "openai") -> Mock: + """Create a mock embedding model.""" + embedding_model = Mock() + embedding_model.model = model + embedding_model.provider = provider + return embedding_model + + @staticmethod + def create_collection_binding_mock(binding_id: str = "binding-456") -> Mock: + """Create a mock collection binding.""" + binding = Mock() + binding.id = binding_id + return binding + + @staticmethod + def create_current_user_mock(tenant_id: str = "tenant-123") -> Mock: + """Create a mock current user.""" + current_user = Mock() + current_user.current_tenant_id = tenant_id + return current_user + + +class TestDatasetServiceUpdateDataset: + """ + Comprehensive unit tests for DatasetService.update_dataset method. + + This test suite covers all supported scenarios including: + - External dataset updates + - Internal dataset updates with different indexing techniques + - Embedding model updates + - Permission checks + - Error conditions and edge cases + """ + + @pytest.fixture + def mock_dataset_service_dependencies(self): + """Common mock setup for dataset service dependencies.""" + with ( + patch("services.dataset_service.DatasetService.get_dataset") as mock_get_dataset, + patch("services.dataset_service.DatasetService.check_dataset_permission") as mock_check_perm, + patch("extensions.ext_database.db.session") as mock_db, + patch("services.dataset_service.datetime") as mock_datetime, + ): + current_time = datetime.datetime(2023, 1, 1, 12, 0, 0) + mock_datetime.datetime.now.return_value = current_time + mock_datetime.UTC = datetime.UTC + + yield { + "get_dataset": mock_get_dataset, + "check_permission": mock_check_perm, + "db_session": mock_db, + "datetime": mock_datetime, + "current_time": current_time, + } + + @pytest.fixture + def mock_external_provider_dependencies(self): + """Mock setup for external provider tests.""" + with patch("services.dataset_service.Session") as mock_session: + from extensions.ext_database import db + + with patch.object(db.__class__, "engine", new_callable=Mock): + session_mock = Mock() + mock_session.return_value.__enter__.return_value = session_mock + yield session_mock + + @pytest.fixture + def mock_internal_provider_dependencies(self): + """Mock setup for internal provider tests.""" + with ( + patch("services.dataset_service.ModelManager") as mock_model_manager, + patch( + "services.dataset_service.DatasetCollectionBindingService.get_dataset_collection_binding" + ) as mock_get_binding, + patch("services.dataset_service.deal_dataset_vector_index_task") as mock_task, + patch("services.dataset_service.current_user") as mock_current_user, + ): + mock_current_user.current_tenant_id = "tenant-123" + yield { + "model_manager": mock_model_manager, + "get_binding": mock_get_binding, + "task": mock_task, + "current_user": mock_current_user, + } + + def _assert_database_update_called(self, mock_db, dataset_id: str, expected_updates: dict[str, Any]): + """Helper method to verify database update calls.""" + mock_db.query.return_value.filter_by.return_value.update.assert_called_once_with(expected_updates) + mock_db.commit.assert_called_once() + + def _assert_external_dataset_update(self, mock_dataset, mock_binding, update_data: dict[str, Any]): + """Helper method to verify external dataset updates.""" + assert mock_dataset.name == update_data.get("name", mock_dataset.name) + assert mock_dataset.description == update_data.get("description", mock_dataset.description) + assert mock_dataset.retrieval_model == update_data.get("external_retrieval_model", mock_dataset.retrieval_model) + + if "external_knowledge_id" in update_data: + assert mock_binding.external_knowledge_id == update_data["external_knowledge_id"] + if "external_knowledge_api_id" in update_data: + assert mock_binding.external_knowledge_api_id == update_data["external_knowledge_api_id"] + + # ==================== External Dataset Tests ==================== + + def test_update_external_dataset_success( + self, mock_dataset_service_dependencies, mock_external_provider_dependencies + ): + """Test successful update of external dataset.""" + dataset = DatasetUpdateTestDataFactory.create_dataset_mock( + provider="external", name="old_name", description="old_description", retrieval_model="old_model" + ) + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + user = DatasetUpdateTestDataFactory.create_user_mock() + binding = DatasetUpdateTestDataFactory.create_external_binding_mock() + + # Mock external knowledge binding query + mock_external_provider_dependencies.query.return_value.filter_by.return_value.first.return_value = binding + + update_data = { + "name": "new_name", + "description": "new_description", + "external_retrieval_model": "new_model", + "permission": "only_me", + "external_knowledge_id": "new_knowledge_id", + "external_knowledge_api_id": "new_api_id", + } + + result = DatasetService.update_dataset("dataset-123", update_data, user) + + # Verify permission check was called + mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user) + + # Verify dataset and binding updates + self._assert_external_dataset_update(dataset, binding, update_data) + + # Verify database operations + mock_db = mock_dataset_service_dependencies["db_session"] + mock_db.add.assert_any_call(dataset) + mock_db.add.assert_any_call(binding) + mock_db.commit.assert_called_once() + + # Verify return value + assert result == dataset + + def test_update_external_dataset_missing_knowledge_id_error(self, mock_dataset_service_dependencies): + """Test error when external knowledge id is missing.""" + dataset = DatasetUpdateTestDataFactory.create_dataset_mock(provider="external") + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + user = DatasetUpdateTestDataFactory.create_user_mock() + update_data = {"name": "new_name", "external_knowledge_api_id": "api_id"} + + with pytest.raises(ValueError) as context: + DatasetService.update_dataset("dataset-123", update_data, user) + + assert "External knowledge id is required" in str(context.value) + + def test_update_external_dataset_missing_api_id_error(self, mock_dataset_service_dependencies): + """Test error when external knowledge api id is missing.""" + dataset = DatasetUpdateTestDataFactory.create_dataset_mock(provider="external") + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + user = DatasetUpdateTestDataFactory.create_user_mock() + update_data = {"name": "new_name", "external_knowledge_id": "knowledge_id"} + + with pytest.raises(ValueError) as context: + DatasetService.update_dataset("dataset-123", update_data, user) + + assert "External knowledge api id is required" in str(context.value) + + def test_update_external_dataset_binding_not_found_error( + self, mock_dataset_service_dependencies, mock_external_provider_dependencies + ): + """Test error when external knowledge binding is not found.""" + dataset = DatasetUpdateTestDataFactory.create_dataset_mock(provider="external") + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + user = DatasetUpdateTestDataFactory.create_user_mock() + + # Mock external knowledge binding query returning None + mock_external_provider_dependencies.query.return_value.filter_by.return_value.first.return_value = None + + update_data = { + "name": "new_name", + "external_knowledge_id": "knowledge_id", + "external_knowledge_api_id": "api_id", + } + + with pytest.raises(ValueError) as context: + DatasetService.update_dataset("dataset-123", update_data, user) + + assert "External knowledge binding not found" in str(context.value) + + # ==================== Internal Dataset Basic Tests ==================== + + def test_update_internal_dataset_basic_success(self, mock_dataset_service_dependencies): + """Test successful update of internal dataset with basic fields.""" + dataset = DatasetUpdateTestDataFactory.create_dataset_mock( + provider="vendor", + indexing_technique="high_quality", + embedding_model_provider="openai", + embedding_model="text-embedding-ada-002", + collection_binding_id="binding-123", + ) + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + user = DatasetUpdateTestDataFactory.create_user_mock() + + update_data = { + "name": "new_name", + "description": "new_description", + "indexing_technique": "high_quality", + "retrieval_model": "new_model", + "embedding_model_provider": "openai", + "embedding_model": "text-embedding-ada-002", + } + + result = DatasetService.update_dataset("dataset-123", update_data, user) + + # Verify permission check was called + mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user) + + # Verify database update was called with correct filtered data + expected_filtered_data = { + "name": "new_name", + "description": "new_description", + "indexing_technique": "high_quality", + "retrieval_model": "new_model", + "embedding_model_provider": "openai", + "embedding_model": "text-embedding-ada-002", + "updated_by": user.id, + "updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None), + } + + self._assert_database_update_called( + mock_dataset_service_dependencies["db_session"], "dataset-123", expected_filtered_data + ) + + # Verify return value + assert result == dataset + + def test_update_internal_dataset_filter_none_values(self, mock_dataset_service_dependencies): + """Test that None values are filtered out except for description field.""" + dataset = DatasetUpdateTestDataFactory.create_dataset_mock(provider="vendor", indexing_technique="high_quality") + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + user = DatasetUpdateTestDataFactory.create_user_mock() + + update_data = { + "name": "new_name", + "description": None, # Should be included + "indexing_technique": "high_quality", + "retrieval_model": "new_model", + "embedding_model_provider": None, # Should be filtered out + "embedding_model": None, # Should be filtered out + } + + result = DatasetService.update_dataset("dataset-123", update_data, user) + + # Verify database update was called with filtered data + expected_filtered_data = { + "name": "new_name", + "description": None, # Description should be included even if None + "indexing_technique": "high_quality", + "retrieval_model": "new_model", + "updated_by": user.id, + "updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None), + } + + actual_call_args = mock_dataset_service_dependencies[ + "db_session" + ].query.return_value.filter_by.return_value.update.call_args[0][0] + # Remove timestamp for comparison as it's dynamic + del actual_call_args["updated_at"] + del expected_filtered_data["updated_at"] + + assert actual_call_args == expected_filtered_data + + # Verify return value + assert result == dataset + + # ==================== Indexing Technique Switch Tests ==================== + + def test_update_internal_dataset_indexing_technique_to_economy( + self, mock_dataset_service_dependencies, mock_internal_provider_dependencies + ): + """Test updating internal dataset indexing technique to economy.""" + dataset = DatasetUpdateTestDataFactory.create_dataset_mock(provider="vendor", indexing_technique="high_quality") + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + user = DatasetUpdateTestDataFactory.create_user_mock() + + update_data = {"indexing_technique": "economy", "retrieval_model": "new_model"} + + result = DatasetService.update_dataset("dataset-123", update_data, user) + + # Verify database update was called with embedding model fields cleared + expected_filtered_data = { + "indexing_technique": "economy", + "embedding_model": None, + "embedding_model_provider": None, + "collection_binding_id": None, + "retrieval_model": "new_model", + "updated_by": user.id, + "updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None), + } + + self._assert_database_update_called( + mock_dataset_service_dependencies["db_session"], "dataset-123", expected_filtered_data + ) + + # Verify return value + assert result == dataset + + def test_update_internal_dataset_indexing_technique_to_high_quality( + self, mock_dataset_service_dependencies, mock_internal_provider_dependencies + ): + """Test updating internal dataset indexing technique to high_quality.""" + dataset = DatasetUpdateTestDataFactory.create_dataset_mock(provider="vendor", indexing_technique="economy") + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + user = DatasetUpdateTestDataFactory.create_user_mock() + + # Mock embedding model + embedding_model = DatasetUpdateTestDataFactory.create_embedding_model_mock() + mock_internal_provider_dependencies[ + "model_manager" + ].return_value.get_model_instance.return_value = embedding_model + + # Mock collection binding + binding = DatasetUpdateTestDataFactory.create_collection_binding_mock() + mock_internal_provider_dependencies["get_binding"].return_value = binding + + update_data = { + "indexing_technique": "high_quality", + "embedding_model_provider": "openai", + "embedding_model": "text-embedding-ada-002", + "retrieval_model": "new_model", + } + + result = DatasetService.update_dataset("dataset-123", update_data, user) + + # Verify embedding model was validated + mock_internal_provider_dependencies["model_manager"].return_value.get_model_instance.assert_called_once_with( + tenant_id=mock_internal_provider_dependencies["current_user"].current_tenant_id, + provider="openai", + model_type=ModelType.TEXT_EMBEDDING, + model="text-embedding-ada-002", + ) + + # Verify collection binding was retrieved + mock_internal_provider_dependencies["get_binding"].assert_called_once_with("openai", "text-embedding-ada-002") + + # Verify database update was called with correct data + expected_filtered_data = { + "indexing_technique": "high_quality", + "embedding_model": "text-embedding-ada-002", + "embedding_model_provider": "openai", + "collection_binding_id": "binding-456", + "retrieval_model": "new_model", + "updated_by": user.id, + "updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None), + } + + self._assert_database_update_called( + mock_dataset_service_dependencies["db_session"], "dataset-123", expected_filtered_data + ) + + # Verify vector index task was triggered + mock_internal_provider_dependencies["task"].delay.assert_called_once_with("dataset-123", "add") + + # Verify return value + assert result == dataset + + # ==================== Embedding Model Update Tests ==================== + + def test_update_internal_dataset_keep_existing_embedding_model(self, mock_dataset_service_dependencies): + """Test updating internal dataset without changing embedding model.""" + dataset = DatasetUpdateTestDataFactory.create_dataset_mock( + provider="vendor", + indexing_technique="high_quality", + embedding_model_provider="openai", + embedding_model="text-embedding-ada-002", + collection_binding_id="binding-123", + ) + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + user = DatasetUpdateTestDataFactory.create_user_mock() + + update_data = {"name": "new_name", "indexing_technique": "high_quality", "retrieval_model": "new_model"} + + result = DatasetService.update_dataset("dataset-123", update_data, user) + + # Verify database update was called with existing embedding model preserved + expected_filtered_data = { + "name": "new_name", + "indexing_technique": "high_quality", + "embedding_model_provider": "openai", + "embedding_model": "text-embedding-ada-002", + "collection_binding_id": "binding-123", + "retrieval_model": "new_model", + "updated_by": user.id, + "updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None), + } + + self._assert_database_update_called( + mock_dataset_service_dependencies["db_session"], "dataset-123", expected_filtered_data + ) + + # Verify return value + assert result == dataset + + def test_update_internal_dataset_embedding_model_update( + self, mock_dataset_service_dependencies, mock_internal_provider_dependencies + ): + """Test updating internal dataset with new embedding model.""" + dataset = DatasetUpdateTestDataFactory.create_dataset_mock( + provider="vendor", + indexing_technique="high_quality", + embedding_model_provider="openai", + embedding_model="text-embedding-ada-002", + ) + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + user = DatasetUpdateTestDataFactory.create_user_mock() + + # Mock embedding model + embedding_model = DatasetUpdateTestDataFactory.create_embedding_model_mock("text-embedding-3-small") + mock_internal_provider_dependencies[ + "model_manager" + ].return_value.get_model_instance.return_value = embedding_model + + # Mock collection binding + binding = DatasetUpdateTestDataFactory.create_collection_binding_mock("binding-789") + mock_internal_provider_dependencies["get_binding"].return_value = binding + + update_data = { + "indexing_technique": "high_quality", + "embedding_model_provider": "openai", + "embedding_model": "text-embedding-3-small", + "retrieval_model": "new_model", + } + + result = DatasetService.update_dataset("dataset-123", update_data, user) + + # Verify embedding model was validated + mock_internal_provider_dependencies["model_manager"].return_value.get_model_instance.assert_called_once_with( + tenant_id=mock_internal_provider_dependencies["current_user"].current_tenant_id, + provider="openai", + model_type=ModelType.TEXT_EMBEDDING, + model="text-embedding-3-small", + ) + + # Verify collection binding was retrieved + mock_internal_provider_dependencies["get_binding"].assert_called_once_with("openai", "text-embedding-3-small") + + # Verify database update was called with correct data + expected_filtered_data = { + "indexing_technique": "high_quality", + "embedding_model": "text-embedding-3-small", + "embedding_model_provider": "openai", + "collection_binding_id": "binding-789", + "retrieval_model": "new_model", + "updated_by": user.id, + "updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None), + } + + self._assert_database_update_called( + mock_dataset_service_dependencies["db_session"], "dataset-123", expected_filtered_data + ) + + # Verify vector index task was triggered + mock_internal_provider_dependencies["task"].delay.assert_called_once_with("dataset-123", "update") + + # Verify return value + assert result == dataset + + def test_update_internal_dataset_no_indexing_technique_change(self, mock_dataset_service_dependencies): + """Test updating internal dataset without changing indexing technique.""" + dataset = DatasetUpdateTestDataFactory.create_dataset_mock( + provider="vendor", + indexing_technique="high_quality", + embedding_model_provider="openai", + embedding_model="text-embedding-ada-002", + collection_binding_id="binding-123", + ) + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + user = DatasetUpdateTestDataFactory.create_user_mock() + + update_data = { + "name": "new_name", + "indexing_technique": "high_quality", # Same as current + "retrieval_model": "new_model", + } + + result = DatasetService.update_dataset("dataset-123", update_data, user) + + # Verify database update was called with correct data + expected_filtered_data = { + "name": "new_name", + "indexing_technique": "high_quality", + "embedding_model_provider": "openai", + "embedding_model": "text-embedding-ada-002", + "collection_binding_id": "binding-123", + "retrieval_model": "new_model", + "updated_by": user.id, + "updated_at": mock_dataset_service_dependencies["current_time"].replace(tzinfo=None), + } + + self._assert_database_update_called( + mock_dataset_service_dependencies["db_session"], "dataset-123", expected_filtered_data + ) + + # Verify return value + assert result == dataset + + # ==================== Error Handling Tests ==================== + + def test_update_dataset_not_found_error(self, mock_dataset_service_dependencies): + """Test error when dataset is not found.""" + mock_dataset_service_dependencies["get_dataset"].return_value = None + + user = DatasetUpdateTestDataFactory.create_user_mock() + update_data = {"name": "new_name"} + + with pytest.raises(ValueError) as context: + DatasetService.update_dataset("dataset-123", update_data, user) + + assert "Dataset not found" in str(context.value) + + def test_update_dataset_permission_error(self, mock_dataset_service_dependencies): + """Test error when user doesn't have permission.""" + dataset = DatasetUpdateTestDataFactory.create_dataset_mock() + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + user = DatasetUpdateTestDataFactory.create_user_mock() + mock_dataset_service_dependencies["check_permission"].side_effect = NoPermissionError("No permission") + + update_data = {"name": "new_name"} + + with pytest.raises(NoPermissionError): + DatasetService.update_dataset("dataset-123", update_data, user) + + def test_update_internal_dataset_embedding_model_error( + self, mock_dataset_service_dependencies, mock_internal_provider_dependencies + ): + """Test error when embedding model is not available.""" + dataset = DatasetUpdateTestDataFactory.create_dataset_mock(provider="vendor", indexing_technique="economy") + mock_dataset_service_dependencies["get_dataset"].return_value = dataset + + user = DatasetUpdateTestDataFactory.create_user_mock() + + # Mock model manager to raise error + mock_internal_provider_dependencies["model_manager"].return_value.get_model_instance.side_effect = Exception( + "No Embedding Model available" + ) + + update_data = { + "indexing_technique": "high_quality", + "embedding_model_provider": "invalid_provider", + "embedding_model": "invalid_model", + "retrieval_model": "new_model", + } + + with pytest.raises(Exception) as context: + DatasetService.update_dataset("dataset-123", update_data, user) + + assert "No Embedding Model available".lower() in str(context.value).lower() diff --git a/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py b/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py new file mode 100644 index 0000000000..8ae69c8d64 --- /dev/null +++ b/api/tests/unit_tests/services/workflow/test_workflow_draft_variable_service.py @@ -0,0 +1,222 @@ +import dataclasses +import secrets +from unittest import mock +from unittest.mock import Mock, patch + +import pytest +from sqlalchemy.orm import Session + +from core.app.entities.app_invoke_entities import InvokeFrom +from core.variables.types import SegmentType +from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID +from core.workflow.nodes import NodeType +from models.enums import DraftVariableType +from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel +from services.workflow_draft_variable_service import ( + DraftVariableSaver, + VariableResetError, + WorkflowDraftVariableService, +) + + +class TestDraftVariableSaver: + def _get_test_app_id(self): + suffix = secrets.token_hex(6) + return f"test_app_id_{suffix}" + + def test__should_variable_be_visible(self): + mock_session = mock.MagicMock(spec=Session) + test_app_id = self._get_test_app_id() + saver = DraftVariableSaver( + session=mock_session, + app_id=test_app_id, + node_id="test_node_id", + node_type=NodeType.START, + invoke_from=InvokeFrom.DEBUGGER, + node_execution_id="test_execution_id", + ) + assert saver._should_variable_be_visible("123_456", NodeType.IF_ELSE, "output") == False + assert saver._should_variable_be_visible("123", NodeType.START, "output") == True + + def test__normalize_variable_for_start_node(self): + @dataclasses.dataclass(frozen=True) + class TestCase: + name: str + input_node_id: str + input_name: str + expected_node_id: str + expected_name: str + + _NODE_ID = "1747228642872" + cases = [ + TestCase( + name="name with `sys.` prefix should return the system node_id", + input_node_id=_NODE_ID, + input_name="sys.workflow_id", + expected_node_id=SYSTEM_VARIABLE_NODE_ID, + expected_name="workflow_id", + ), + TestCase( + name="name without `sys.` prefix should return the original input node_id", + input_node_id=_NODE_ID, + input_name="start_input", + expected_node_id=_NODE_ID, + expected_name="start_input", + ), + TestCase( + name="dummy_variable should return the original input node_id", + input_node_id=_NODE_ID, + input_name="__dummy__", + expected_node_id=_NODE_ID, + expected_name="__dummy__", + ), + ] + + mock_session = mock.MagicMock(spec=Session) + test_app_id = self._get_test_app_id() + saver = DraftVariableSaver( + session=mock_session, + app_id=test_app_id, + node_id=_NODE_ID, + node_type=NodeType.START, + invoke_from=InvokeFrom.DEBUGGER, + node_execution_id="test_execution_id", + ) + for idx, c in enumerate(cases, 1): + fail_msg = f"Test case {c.name} failed, index={idx}" + node_id, name = saver._normalize_variable_for_start_node(c.input_name) + assert node_id == c.expected_node_id, fail_msg + assert name == c.expected_name, fail_msg + + +class TestWorkflowDraftVariableService: + def _get_test_app_id(self): + suffix = secrets.token_hex(6) + return f"test_app_id_{suffix}" + + def test_reset_conversation_variable(self): + """Test resetting a conversation variable""" + mock_session = Mock(spec=Session) + service = WorkflowDraftVariableService(mock_session) + mock_workflow = Mock(spec=Workflow) + mock_workflow.app_id = self._get_test_app_id() + + # Create mock variable + mock_variable = Mock(spec=WorkflowDraftVariable) + mock_variable.get_variable_type.return_value = DraftVariableType.CONVERSATION + mock_variable.id = "var-id" + mock_variable.name = "test_var" + + # Mock the _reset_conv_var method + expected_result = Mock(spec=WorkflowDraftVariable) + with patch.object(service, "_reset_conv_var", return_value=expected_result) as mock_reset_conv: + result = service.reset_variable(mock_workflow, mock_variable) + + mock_reset_conv.assert_called_once_with(mock_workflow, mock_variable) + assert result == expected_result + + def test_reset_node_variable_with_no_execution_id(self): + """Test resetting a node variable with no execution ID - should delete variable""" + mock_session = Mock(spec=Session) + service = WorkflowDraftVariableService(mock_session) + mock_workflow = Mock(spec=Workflow) + mock_workflow.app_id = self._get_test_app_id() + + # Create mock variable with no execution ID + mock_variable = Mock(spec=WorkflowDraftVariable) + mock_variable.get_variable_type.return_value = DraftVariableType.NODE + mock_variable.node_execution_id = None + mock_variable.id = "var-id" + mock_variable.name = "test_var" + + result = service._reset_node_var(mock_workflow, mock_variable) + + # Should delete the variable and return None + mock_session.delete.assert_called_once_with(instance=mock_variable) + mock_session.flush.assert_called_once() + assert result is None + + def test_reset_node_variable_with_missing_execution_record(self): + """Test resetting a node variable when execution record doesn't exist""" + mock_session = Mock(spec=Session) + service = WorkflowDraftVariableService(mock_session) + mock_workflow = Mock(spec=Workflow) + mock_workflow.app_id = self._get_test_app_id() + + # Create mock variable with execution ID + mock_variable = Mock(spec=WorkflowDraftVariable) + mock_variable.get_variable_type.return_value = DraftVariableType.NODE + mock_variable.node_execution_id = "exec-id" + mock_variable.id = "var-id" + mock_variable.name = "test_var" + + # Mock session.scalars to return None (no execution record found) + mock_scalars = Mock() + mock_scalars.first.return_value = None + mock_session.scalars.return_value = mock_scalars + + result = service._reset_node_var(mock_workflow, mock_variable) + + # Should delete the variable and return None + mock_session.delete.assert_called_once_with(instance=mock_variable) + mock_session.flush.assert_called_once() + assert result is None + + def test_reset_node_variable_with_valid_execution_record(self): + """Test resetting a node variable with valid execution record - should restore from execution""" + mock_session = Mock(spec=Session) + service = WorkflowDraftVariableService(mock_session) + mock_workflow = Mock(spec=Workflow) + mock_workflow.app_id = self._get_test_app_id() + + # Create mock variable with execution ID + mock_variable = Mock(spec=WorkflowDraftVariable) + mock_variable.get_variable_type.return_value = DraftVariableType.NODE + mock_variable.node_execution_id = "exec-id" + mock_variable.id = "var-id" + mock_variable.name = "test_var" + mock_variable.node_id = "node-id" + mock_variable.value_type = SegmentType.STRING + + # Create mock execution record + mock_execution = Mock(spec=WorkflowNodeExecutionModel) + mock_execution.process_data_dict = {"test_var": "process_value"} + mock_execution.outputs_dict = {"test_var": "output_value"} + + # Mock session.scalars to return the execution record + mock_scalars = Mock() + mock_scalars.first.return_value = mock_execution + mock_session.scalars.return_value = mock_scalars + + # Mock workflow methods + mock_node_config = {"type": "test_node"} + mock_workflow.get_node_config_by_id.return_value = mock_node_config + mock_workflow.get_node_type_from_node_config.return_value = NodeType.LLM + + result = service._reset_node_var(mock_workflow, mock_variable) + + # Verify variable.set_value was called with the correct value + mock_variable.set_value.assert_called_once() + # Verify last_edited_at was reset + assert mock_variable.last_edited_at is None + # Verify session.flush was called + mock_session.flush.assert_called() + + # Should return the updated variable + assert result == mock_variable + + def test_reset_system_variable_raises_error(self): + """Test that resetting a system variable raises an error""" + mock_session = Mock(spec=Session) + service = WorkflowDraftVariableService(mock_session) + mock_workflow = Mock(spec=Workflow) + mock_workflow.app_id = self._get_test_app_id() + + mock_variable = Mock(spec=WorkflowDraftVariable) + mock_variable.get_variable_type.return_value = DraftVariableType.SYS # Not a valid enum value for this test + mock_variable.id = "var-id" + + with pytest.raises(VariableResetError) as exc_info: + service.reset_variable(mock_workflow, mock_variable) + assert "cannot reset system variable" in str(exc_info.value) + assert "variable_id=var-id" in str(exc_info.value) diff --git a/api/tests/unit_tests/utils/http_parser/test_oauth_convert_request_to_raw_data.py b/api/tests/unit_tests/utils/http_parser/test_oauth_convert_request_to_raw_data.py index f788a9756b..293ac253f5 100644 --- a/api/tests/unit_tests/utils/http_parser/test_oauth_convert_request_to_raw_data.py +++ b/api/tests/unit_tests/utils/http_parser/test_oauth_convert_request_to_raw_data.py @@ -1,3 +1,5 @@ +import json + from werkzeug import Request from werkzeug.datastructures import Headers from werkzeug.test import EnvironBuilder @@ -15,6 +17,59 @@ def test_oauth_convert_request_to_raw_data(): request = Request(builder.get_environ()) raw_request_bytes = oauth_handler._convert_request_to_raw_data(request) - assert b"GET /test HTTP/1.1" in raw_request_bytes + assert b"GET /test? HTTP/1.1" in raw_request_bytes assert b"Content-Type: application/json" in raw_request_bytes assert b"\r\n\r\n" in raw_request_bytes + + +def test_oauth_convert_request_to_raw_data_with_query_params(): + oauth_handler = OAuthHandler() + builder = EnvironBuilder( + method="GET", + path="/test", + query_string="code=abc123&state=xyz789", + headers=Headers({"Content-Type": "application/json"}), + ) + request = Request(builder.get_environ()) + raw_request_bytes = oauth_handler._convert_request_to_raw_data(request) + + assert b"GET /test?code=abc123&state=xyz789 HTTP/1.1" in raw_request_bytes + assert b"Content-Type: application/json" in raw_request_bytes + assert b"\r\n\r\n" in raw_request_bytes + + +def test_oauth_convert_request_to_raw_data_with_post_body(): + oauth_handler = OAuthHandler() + builder = EnvironBuilder( + method="POST", + path="/test", + data="param1=value1¶m2=value2", + headers=Headers({"Content-Type": "application/x-www-form-urlencoded"}), + ) + request = Request(builder.get_environ()) + raw_request_bytes = oauth_handler._convert_request_to_raw_data(request) + + assert b"POST /test? HTTP/1.1" in raw_request_bytes + assert b"Content-Type: application/x-www-form-urlencoded" in raw_request_bytes + assert b"\r\n\r\n" in raw_request_bytes + assert b"param1=value1¶m2=value2" in raw_request_bytes + + +def test_oauth_convert_request_to_raw_data_with_json_body(): + oauth_handler = OAuthHandler() + json_data = {"code": "abc123", "state": "xyz789", "grant_type": "authorization_code"} + builder = EnvironBuilder( + method="POST", + path="/test", + data=json.dumps(json_data), + headers=Headers({"Content-Type": "application/json"}), + ) + request = Request(builder.get_environ()) + raw_request_bytes = oauth_handler._convert_request_to_raw_data(request) + + assert b"POST /test? HTTP/1.1" in raw_request_bytes + assert b"Content-Type: application/json" in raw_request_bytes + assert b"\r\n\r\n" in raw_request_bytes + assert b'"code": "abc123"' in raw_request_bytes + assert b'"state": "xyz789"' in raw_request_bytes + assert b'"grant_type": "authorization_code"' in raw_request_bytes diff --git a/api/uv.lock b/api/uv.lock index c900191ceb..759f1aec03 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -36,7 +36,7 @@ wheels = [ [[package]] name = "aiohttp" -version = "3.12.7" +version = "3.12.13" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, @@ -47,42 +47,42 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/eb/62/95588e933dfea06a3af0332990bd19f6768f8f37fa4c0fe33fe4c55cf9d0/aiohttp-3.12.7.tar.gz", hash = "sha256:08bf55b216c779eddb6e41c1841c17d7ddd12776c7d7b36051c0a292a9ca828e", size = 7806530, upload-time = "2025-06-02T16:34:10.399Z" } +sdist = { url = "https://files.pythonhosted.org/packages/42/6e/ab88e7cb2a4058bed2f7870276454f85a7c56cd6da79349eb314fc7bbcaa/aiohttp-3.12.13.tar.gz", hash = "sha256:47e2da578528264a12e4e3dd8dd72a7289e5f812758fe086473fab037a10fcce", size = 7819160, upload-time = "2025-06-14T15:15:41.354Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/af/19/37560cc111d6fd95ff6c4bd14445e3c629269fce406c89cc7a69a2865ecf/aiohttp-3.12.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:388b5947aa6931ef4ce3ed4edde6853e84980677886992cfadcf733dd06eed63", size = 707169, upload-time = "2025-06-02T16:31:39.107Z" }, - { url = "https://files.pythonhosted.org/packages/b9/18/29bbefb094f81a687473c1d31391bf8a4c48c7b5b8559c3679fc14e67597/aiohttp-3.12.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ed5af1cce257cca27a3e920b003b3b397f63418a203064b7d804ea3b45782af", size = 479443, upload-time = "2025-06-02T16:31:41.185Z" }, - { url = "https://files.pythonhosted.org/packages/cf/7d/119f3e012c75a3fe38f86ac1d77f1452779e0e940770d5827d4e62aa5655/aiohttp-3.12.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f466ae8f9c02993b7d167be685bdbeb527cf254a3cfcc757697e0e336399d0a2", size = 467706, upload-time = "2025-06-02T16:31:43.401Z" }, - { url = "https://files.pythonhosted.org/packages/83/f1/f61d8573d648e17347ab9a112063e4363664b5b6100792467fbb26028bde/aiohttp-3.12.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2be095a420a9f9a12eff343d877ae180dd919238b539431af08cef929e874759", size = 1737902, upload-time = "2025-06-02T16:31:45.948Z" }, - { url = "https://files.pythonhosted.org/packages/4b/f8/7a8a000bc63de3c79aaa8f03b0784e29e9982276f4579c5e2e56d560e403/aiohttp-3.12.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b058cf2ba6adba699960d7bc403411c8a99ab5d3e5ea3eb01473638ae7d1a30e", size = 1686569, upload-time = "2025-06-02T16:31:47.749Z" }, - { url = "https://files.pythonhosted.org/packages/5c/4e/29a5b35ca9a598f51dc7deff4e8403bf813988f30a8b250e25a8442641b7/aiohttp-3.12.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9b6a660163b055686dbb0acc961978fd14537eba5d9da6cbdb4dced7a8d3be1a", size = 1785359, upload-time = "2025-06-02T16:31:49.687Z" }, - { url = "https://files.pythonhosted.org/packages/f9/36/0521398a69c40ac24c659b130597e2544cde1d7dd00291b8a6206bb552d0/aiohttp-3.12.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d741923905f267ad5d5c8f86a56f9d2beac9f32a36c217c5d9ef65cd74fd8ca0", size = 1824408, upload-time = "2025-06-02T16:31:51.518Z" }, - { url = "https://files.pythonhosted.org/packages/c1/41/79506d76da96399b6b700acbe10b14291547a3b49a1cc7ed2c5edaa199ce/aiohttp-3.12.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:519f5454b6018158ae0e789b8f6a88726c47dd680982eb318ef3ca4dee727314", size = 1726867, upload-time = "2025-06-02T16:31:53.277Z" }, - { url = "https://files.pythonhosted.org/packages/32/d1/d59ed16962934b46c7569d04af2dc9638a38ae5812680b9d6c7ee42d770e/aiohttp-3.12.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e4eebfe470e22cc4b374d7e32c07e96d777a5c0fa51f3824de68e697da037ec", size = 1663943, upload-time = "2025-06-02T16:31:55.094Z" }, - { url = "https://files.pythonhosted.org/packages/15/d5/971d1b277e6a3d5b679f0c9ba076c343a5125ea2eacc51c23ea7d875d43a/aiohttp-3.12.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:74ff39445f94923cf595e9e6dd602ecbe66b12364e2207e61342b8834417f8da", size = 1712217, upload-time = "2025-06-02T16:31:57.336Z" }, - { url = "https://files.pythonhosted.org/packages/e1/9c/c21fd0ba87772f3d1d43cdbfcfd40fe29f37d36a5d73997a8a4d4d1485c3/aiohttp-3.12.7-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:77cb9dba16486ecfeac8076763600b9714941e0ff696e53a30e8d408d9a196ca", size = 1707375, upload-time = "2025-06-02T16:31:59.12Z" }, - { url = "https://files.pythonhosted.org/packages/85/48/bb97ef3a694df852b70e6f5c1aaf3621a3a26b35f0a0d90481f3fdb1ce8b/aiohttp-3.12.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a7b3b9cbe83e3918a1918b0de274884f17b64224c1c9210a6fb0f7c10d246636", size = 1687561, upload-time = "2025-06-02T16:32:01.385Z" }, - { url = "https://files.pythonhosted.org/packages/d5/75/0b85f30ba9eb1dbdb5d3a53d3a0db29990220f69187acb24d06903686c5d/aiohttp-3.12.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6055f53c70938498884e71ca966abe8e9e7558489e13a7e40b6384dee7230d1d", size = 1781163, upload-time = "2025-06-02T16:32:03.176Z" }, - { url = "https://files.pythonhosted.org/packages/92/51/6350a4c485c7d2fb794101d46085c3830485ec1c37738d8af6c9c5ed8e1a/aiohttp-3.12.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8493a42d5b2a736c6804239b985feebeea1c60f8fcb46a3607d6dce3c1a42b12", size = 1801624, upload-time = "2025-06-02T16:32:05.027Z" }, - { url = "https://files.pythonhosted.org/packages/4f/dd/6a75eaaac93b5552090e42c38a576062028ce4af50f0b50ac550332d332c/aiohttp-3.12.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:372f2237cade45f563d973c2a913895f2699a892c0eb11c55c6880b6f0acf219", size = 1714679, upload-time = "2025-06-02T16:32:06.837Z" }, - { url = "https://files.pythonhosted.org/packages/de/ad/0574964387d8eed7fcd0c2fe6ef20c4affe0b265c710938d5fffdb3776b5/aiohttp-3.12.7-cp311-cp311-win32.whl", hash = "sha256:41f686749a099b507563a5c0cb4fd77367b05448a2c1758784ad506a28e9e579", size = 424709, upload-time = "2025-06-02T16:32:08.671Z" }, - { url = "https://files.pythonhosted.org/packages/4f/ab/6b82b43abb0990e4452aaef509cf1403ab50c04b5c090f92fb1b255fb319/aiohttp-3.12.7-cp311-cp311-win_amd64.whl", hash = "sha256:7a3691583470d4397aca70fbf8e0f0778b63a2c2a6a23263bdeeb68395972f29", size = 449100, upload-time = "2025-06-02T16:32:10.373Z" }, - { url = "https://files.pythonhosted.org/packages/5d/65/0bd8ccbffa33ee69db9f5c43f3f62fb8b600b607388e9a8deab8962d0523/aiohttp-3.12.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9b9345918f5b5156a5712c37d1d331baf320df67547ea032a49a609b773c3606", size = 698263, upload-time = "2025-06-02T16:32:12.107Z" }, - { url = "https://files.pythonhosted.org/packages/99/64/a48a8abc4e684fb447d1f7b61e7adcb19865b91e20b50595f49b2942fbb3/aiohttp-3.12.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3091b4883f405dbabeb9ea821a25dec16d03a51c3e0d2752fc3ab48b652bf196", size = 472877, upload-time = "2025-06-02T16:32:14.386Z" }, - { url = "https://files.pythonhosted.org/packages/7d/e4/994bc56a7d7733e9cd1f45db8b656e78d51d7a61cefff8043ec4f7d4a23f/aiohttp-3.12.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:97fd97abd4cf199eff4041d0346a7dc68b60deab177f01de87283be513ffc3ab", size = 465716, upload-time = "2025-06-02T16:32:16.108Z" }, - { url = "https://files.pythonhosted.org/packages/39/b0/bddc489288a0e3b05fa05387db9caebc38577204a17db0d5428abae524ba/aiohttp-3.12.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a5938973105cd5ff17176e8cb36bc19cac7c82ae7c58c0dbd7e023972d0c708", size = 1712513, upload-time = "2025-06-02T16:32:17.898Z" }, - { url = "https://files.pythonhosted.org/packages/4d/4a/c06d3ce0dc5f96338cc8d18da57d74608585a3751234eeef5952e4f48ade/aiohttp-3.12.7-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e506ae5c4c05d1a1e87edd64b994cea2d49385d41d32e1c6be8764f31cf2245c", size = 1695167, upload-time = "2025-06-02T16:32:20.131Z" }, - { url = "https://files.pythonhosted.org/packages/79/ec/e847fdfe2b1c1f1a2b0ba5343a9b2bd033a0545f8eaf1f7894a6614473ae/aiohttp-3.12.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b780b402e6361c4cfcec252580f5ecdd86cb68376520ac34748d3f8b262dd598", size = 1750261, upload-time = "2025-06-02T16:32:22.717Z" }, - { url = "https://files.pythonhosted.org/packages/2c/5e/b832ff59737d99cc5ae51b737c52976d19990ccee922ba6fe811f615e7f9/aiohttp-3.12.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf981bbfb7ff2ebc1b3bfae49d2efe2c51ca1cf3d90867f47c310df65398e85e", size = 1796416, upload-time = "2025-06-02T16:32:25.15Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ff/51ae87efce9b53aafd384179f58923bf178f561897cf80054a440fdf8363/aiohttp-3.12.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94f98e0e5a49f89b252e115844f756c04fc8050f38252a32a3dd994ce8121f10", size = 1715855, upload-time = "2025-06-02T16:32:27.236Z" }, - { url = "https://files.pythonhosted.org/packages/b1/54/5a77116498f84d2503f5588e687eccfa43a85aa2450bc195ee6e5bb75695/aiohttp-3.12.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:410e96cc6824fc4ced9703fb2ac2d06c6190d21fc6f5b588f62b1918628449c1", size = 1631656, upload-time = "2025-06-02T16:32:29.15Z" }, - { url = "https://files.pythonhosted.org/packages/46/34/554220592f8ade7f3cabebfb9325e95078f842140f293ced3ab977fd13ec/aiohttp-3.12.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:43e93987fe9df4349db8deae7c391695538c35e4ba893133c7e823234f6e4537", size = 1692718, upload-time = "2025-06-02T16:32:31.295Z" }, - { url = "https://files.pythonhosted.org/packages/ff/9d/ae7103bb8c73c3521e38ae8cde301ddc937024b1681ce134bb1ef01be7d0/aiohttp-3.12.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:cb3f3dcb59f3e16819a1c7d3fa32e7b87255b661c1e139a1b5940bde270704ab", size = 1714171, upload-time = "2025-06-02T16:32:33.767Z" }, - { url = "https://files.pythonhosted.org/packages/5d/4d/9b8b8f362e36392939019340321f7efcc1807bb2c4cdea8eb1019d3398ff/aiohttp-3.12.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4a46fe4a4c66b2712059e48a8384eb93565fbe3251af4844860fed846ef4ca75", size = 1654822, upload-time = "2025-06-02T16:32:36.23Z" }, - { url = "https://files.pythonhosted.org/packages/48/30/0ca82df423ee346206bc167852c825cd210c11d2f1fa0064a2a55d7f60d5/aiohttp-3.12.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ad01793164661af70918490ef8efc2c09df7a3c686b6c84ca90a2d69cdbc3911", size = 1734385, upload-time = "2025-06-02T16:32:38.171Z" }, - { url = "https://files.pythonhosted.org/packages/43/bd/96d12318c0f82ac8323bd4459ee26291ad220f688988077a21e538b0872c/aiohttp-3.12.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e85c6833be3f49cead2e7bc79080e5c18d6dab9af32226ab5a01dc20c523e7d9", size = 1762356, upload-time = "2025-06-02T16:32:40.142Z" }, - { url = "https://files.pythonhosted.org/packages/6c/39/7a9b706bf42f293415584d60cf35e80d0558929ab70e72cb40b747f0dfc7/aiohttp-3.12.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3c9f52149d8249566e72c50c7985c2345521b3b78f84aa86f6f492cd50b14793", size = 1721970, upload-time = "2025-06-02T16:32:42.187Z" }, - { url = "https://files.pythonhosted.org/packages/19/f2/8899367a52dec8100f43036e5a792cfdbae317bf3a80549da90290083ff4/aiohttp-3.12.7-cp312-cp312-win32.whl", hash = "sha256:0e1c33ac0f6a396bcefe9c1d52c9d38a051861885a5c102ca5c8298aba0108fa", size = 419443, upload-time = "2025-06-02T16:32:44.335Z" }, - { url = "https://files.pythonhosted.org/packages/e8/34/ad5225b4edbcc23496537011d67ef1a147c03205c07340f4a50993b219b9/aiohttp-3.12.7-cp312-cp312-win_amd64.whl", hash = "sha256:b4aed5233a9d13e34e8624ecb798533aa2da97e7048cc69671b7a6d7a2efe7e8", size = 445544, upload-time = "2025-06-02T16:32:46.631Z" }, + { url = "https://files.pythonhosted.org/packages/6a/65/5566b49553bf20ffed6041c665a5504fb047cefdef1b701407b8ce1a47c4/aiohttp-3.12.13-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c229b1437aa2576b99384e4be668af1db84b31a45305d02f61f5497cfa6f60c", size = 709401, upload-time = "2025-06-14T15:13:30.774Z" }, + { url = "https://files.pythonhosted.org/packages/14/b5/48e4cc61b54850bdfafa8fe0b641ab35ad53d8e5a65ab22b310e0902fa42/aiohttp-3.12.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04076d8c63471e51e3689c93940775dc3d12d855c0c80d18ac5a1c68f0904358", size = 481669, upload-time = "2025-06-14T15:13:32.316Z" }, + { url = "https://files.pythonhosted.org/packages/04/4f/e3f95c8b2a20a0437d51d41d5ccc4a02970d8ad59352efb43ea2841bd08e/aiohttp-3.12.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:55683615813ce3601640cfaa1041174dc956d28ba0511c8cbd75273eb0587014", size = 469933, upload-time = "2025-06-14T15:13:34.104Z" }, + { url = "https://files.pythonhosted.org/packages/41/c9/c5269f3b6453b1cfbd2cfbb6a777d718c5f086a3727f576c51a468b03ae2/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:921bc91e602d7506d37643e77819cb0b840d4ebb5f8d6408423af3d3bf79a7b7", size = 1740128, upload-time = "2025-06-14T15:13:35.604Z" }, + { url = "https://files.pythonhosted.org/packages/6f/49/a3f76caa62773d33d0cfaa842bdf5789a78749dbfe697df38ab1badff369/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e72d17fe0974ddeae8ed86db297e23dba39c7ac36d84acdbb53df2e18505a013", size = 1688796, upload-time = "2025-06-14T15:13:37.125Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e4/556fccc4576dc22bf18554b64cc873b1a3e5429a5bdb7bbef7f5d0bc7664/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0653d15587909a52e024a261943cf1c5bdc69acb71f411b0dd5966d065a51a47", size = 1787589, upload-time = "2025-06-14T15:13:38.745Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3d/d81b13ed48e1a46734f848e26d55a7391708421a80336e341d2aef3b6db2/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a77b48997c66722c65e157c06c74332cdf9c7ad00494b85ec43f324e5c5a9b9a", size = 1826635, upload-time = "2025-06-14T15:13:40.733Z" }, + { url = "https://files.pythonhosted.org/packages/75/a5/472e25f347da88459188cdaadd1f108f6292f8a25e62d226e63f860486d1/aiohttp-3.12.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6946bae55fd36cfb8e4092c921075cde029c71c7cb571d72f1079d1e4e013bc", size = 1729095, upload-time = "2025-06-14T15:13:42.312Z" }, + { url = "https://files.pythonhosted.org/packages/b9/fe/322a78b9ac1725bfc59dfc301a5342e73d817592828e4445bd8f4ff83489/aiohttp-3.12.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f95db8c8b219bcf294a53742c7bda49b80ceb9d577c8e7aa075612b7f39ffb7", size = 1666170, upload-time = "2025-06-14T15:13:44.884Z" }, + { url = "https://files.pythonhosted.org/packages/7a/77/ec80912270e231d5e3839dbd6c065472b9920a159ec8a1895cf868c2708e/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:03d5eb3cfb4949ab4c74822fb3326cd9655c2b9fe22e4257e2100d44215b2e2b", size = 1714444, upload-time = "2025-06-14T15:13:46.401Z" }, + { url = "https://files.pythonhosted.org/packages/21/b2/fb5aedbcb2b58d4180e58500e7c23ff8593258c27c089abfbcc7db65bd40/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6383dd0ffa15515283c26cbf41ac8e6705aab54b4cbb77bdb8935a713a89bee9", size = 1709604, upload-time = "2025-06-14T15:13:48.377Z" }, + { url = "https://files.pythonhosted.org/packages/e3/15/a94c05f7c4dc8904f80b6001ad6e07e035c58a8ebfcc15e6b5d58500c858/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6548a411bc8219b45ba2577716493aa63b12803d1e5dc70508c539d0db8dbf5a", size = 1689786, upload-time = "2025-06-14T15:13:50.401Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fd/0d2e618388f7a7a4441eed578b626bda9ec6b5361cd2954cfc5ab39aa170/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:81b0fcbfe59a4ca41dc8f635c2a4a71e63f75168cc91026c61be665945739e2d", size = 1783389, upload-time = "2025-06-14T15:13:51.945Z" }, + { url = "https://files.pythonhosted.org/packages/a6/6b/6986d0c75996ef7e64ff7619b9b7449b1d1cbbe05c6755e65d92f1784fe9/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6a83797a0174e7995e5edce9dcecc517c642eb43bc3cba296d4512edf346eee2", size = 1803853, upload-time = "2025-06-14T15:13:53.533Z" }, + { url = "https://files.pythonhosted.org/packages/21/65/cd37b38f6655d95dd07d496b6d2f3924f579c43fd64b0e32b547b9c24df5/aiohttp-3.12.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a5734d8469a5633a4e9ffdf9983ff7cdb512524645c7a3d4bc8a3de45b935ac3", size = 1716909, upload-time = "2025-06-14T15:13:55.148Z" }, + { url = "https://files.pythonhosted.org/packages/fd/20/2de7012427dc116714c38ca564467f6143aec3d5eca3768848d62aa43e62/aiohttp-3.12.13-cp311-cp311-win32.whl", hash = "sha256:fef8d50dfa482925bb6b4c208b40d8e9fa54cecba923dc65b825a72eed9a5dbd", size = 427036, upload-time = "2025-06-14T15:13:57.076Z" }, + { url = "https://files.pythonhosted.org/packages/f8/b6/98518bcc615ef998a64bef371178b9afc98ee25895b4f476c428fade2220/aiohttp-3.12.13-cp311-cp311-win_amd64.whl", hash = "sha256:9a27da9c3b5ed9d04c36ad2df65b38a96a37e9cfba6f1381b842d05d98e6afe9", size = 451427, upload-time = "2025-06-14T15:13:58.505Z" }, + { url = "https://files.pythonhosted.org/packages/b4/6a/ce40e329788013cd190b1d62bbabb2b6a9673ecb6d836298635b939562ef/aiohttp-3.12.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0aa580cf80558557285b49452151b9c69f2fa3ad94c5c9e76e684719a8791b73", size = 700491, upload-time = "2025-06-14T15:14:00.048Z" }, + { url = "https://files.pythonhosted.org/packages/28/d9/7150d5cf9163e05081f1c5c64a0cdf3c32d2f56e2ac95db2a28fe90eca69/aiohttp-3.12.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b103a7e414b57e6939cc4dece8e282cfb22043efd0c7298044f6594cf83ab347", size = 475104, upload-time = "2025-06-14T15:14:01.691Z" }, + { url = "https://files.pythonhosted.org/packages/f8/91/d42ba4aed039ce6e449b3e2db694328756c152a79804e64e3da5bc19dffc/aiohttp-3.12.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f64e748e9e741d2eccff9597d09fb3cd962210e5b5716047cbb646dc8fe06f", size = 467948, upload-time = "2025-06-14T15:14:03.561Z" }, + { url = "https://files.pythonhosted.org/packages/99/3b/06f0a632775946981d7c4e5a865cddb6e8dfdbaed2f56f9ade7bb4a1039b/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c955989bf4c696d2ededc6b0ccb85a73623ae6e112439398935362bacfaaf6", size = 1714742, upload-time = "2025-06-14T15:14:05.558Z" }, + { url = "https://files.pythonhosted.org/packages/92/a6/2552eebad9ec5e3581a89256276009e6a974dc0793632796af144df8b740/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d640191016763fab76072c87d8854a19e8e65d7a6fcfcbf017926bdbbb30a7e5", size = 1697393, upload-time = "2025-06-14T15:14:07.194Z" }, + { url = "https://files.pythonhosted.org/packages/d8/9f/bd08fdde114b3fec7a021381b537b21920cdd2aa29ad48c5dffd8ee314f1/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dc507481266b410dede95dd9f26c8d6f5a14315372cc48a6e43eac652237d9b", size = 1752486, upload-time = "2025-06-14T15:14:08.808Z" }, + { url = "https://files.pythonhosted.org/packages/f7/e1/affdea8723aec5bd0959171b5490dccd9a91fcc505c8c26c9f1dca73474d/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8a94daa873465d518db073bd95d75f14302e0208a08e8c942b2f3f1c07288a75", size = 1798643, upload-time = "2025-06-14T15:14:10.767Z" }, + { url = "https://files.pythonhosted.org/packages/f3/9d/666d856cc3af3a62ae86393baa3074cc1d591a47d89dc3bf16f6eb2c8d32/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f52420cde4ce0bb9425a375d95577fe082cb5721ecb61da3049b55189e4e6", size = 1718082, upload-time = "2025-06-14T15:14:12.38Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ce/3c185293843d17be063dada45efd2712bb6bf6370b37104b4eda908ffdbd/aiohttp-3.12.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f7df1f620ec40f1a7fbcb99ea17d7326ea6996715e78f71a1c9a021e31b96b8", size = 1633884, upload-time = "2025-06-14T15:14:14.415Z" }, + { url = "https://files.pythonhosted.org/packages/3a/5b/f3413f4b238113be35dfd6794e65029250d4b93caa0974ca572217745bdb/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3062d4ad53b36e17796dce1c0d6da0ad27a015c321e663657ba1cc7659cfc710", size = 1694943, upload-time = "2025-06-14T15:14:16.48Z" }, + { url = "https://files.pythonhosted.org/packages/82/c8/0e56e8bf12081faca85d14a6929ad5c1263c146149cd66caa7bc12255b6d/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:8605e22d2a86b8e51ffb5253d9045ea73683d92d47c0b1438e11a359bdb94462", size = 1716398, upload-time = "2025-06-14T15:14:18.589Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f3/33192b4761f7f9b2f7f4281365d925d663629cfaea093a64b658b94fc8e1/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:54fbbe6beafc2820de71ece2198458a711e224e116efefa01b7969f3e2b3ddae", size = 1657051, upload-time = "2025-06-14T15:14:20.223Z" }, + { url = "https://files.pythonhosted.org/packages/5e/0b/26ddd91ca8f84c48452431cb4c5dd9523b13bc0c9766bda468e072ac9e29/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:050bd277dfc3768b606fd4eae79dd58ceda67d8b0b3c565656a89ae34525d15e", size = 1736611, upload-time = "2025-06-14T15:14:21.988Z" }, + { url = "https://files.pythonhosted.org/packages/c3/8d/e04569aae853302648e2c138a680a6a2f02e374c5b6711732b29f1e129cc/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2637a60910b58f50f22379b6797466c3aa6ae28a6ab6404e09175ce4955b4e6a", size = 1764586, upload-time = "2025-06-14T15:14:23.979Z" }, + { url = "https://files.pythonhosted.org/packages/ac/98/c193c1d1198571d988454e4ed75adc21c55af247a9fda08236602921c8c8/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e986067357550d1aaa21cfe9897fa19e680110551518a5a7cf44e6c5638cb8b5", size = 1724197, upload-time = "2025-06-14T15:14:25.692Z" }, + { url = "https://files.pythonhosted.org/packages/e7/9e/07bb8aa11eec762c6b1ff61575eeeb2657df11ab3d3abfa528d95f3e9337/aiohttp-3.12.13-cp312-cp312-win32.whl", hash = "sha256:ac941a80aeea2aaae2875c9500861a3ba356f9ff17b9cb2dbfb5cbf91baaf5bf", size = 421771, upload-time = "2025-06-14T15:14:27.364Z" }, + { url = "https://files.pythonhosted.org/packages/52/66/3ce877e56ec0813069cdc9607cd979575859c597b6fb9b4182c6d5f31886/aiohttp-3.12.13-cp312-cp312-win_amd64.whl", hash = "sha256:671f41e6146a749b6c81cb7fd07f5a8356d46febdaaaf07b0e774ff04830461e", size = 447869, upload-time = "2025-06-14T15:14:29.05Z" }, ] [[package]] @@ -111,16 +111,16 @@ wheels = [ [[package]] name = "alembic" -version = "1.16.1" +version = "1.16.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mako" }, { name = "sqlalchemy" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/20/89/bfb4fe86e3fc3972d35431af7bedbc60fa606e8b17196704a1747f7aa4c3/alembic-1.16.1.tar.gz", hash = "sha256:43d37ba24b3d17bc1eb1024fe0f51cd1dc95aeb5464594a02c6bb9ca9864bfa4", size = 1955006, upload-time = "2025-05-21T23:11:05.991Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/35/116797ff14635e496bbda0c168987f5326a6555b09312e9b817e360d1f56/alembic-1.16.2.tar.gz", hash = "sha256:e53c38ff88dadb92eb22f8b150708367db731d58ad7e9d417c9168ab516cbed8", size = 1963563, upload-time = "2025-06-16T18:05:08.566Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/31/59/565286efff3692c5716c212202af61466480f6357c4ae3089d4453bff1f3/alembic-1.16.1-py3-none-any.whl", hash = "sha256:0cdd48acada30d93aa1035767d67dff25702f8de74d7c3919f2e8492c8db2e67", size = 242488, upload-time = "2025-05-21T23:11:07.783Z" }, + { url = "https://files.pythonhosted.org/packages/dd/e2/88e425adac5ad887a087c38d04fe2030010572a3e0e627f8a6e8c33eeda8/alembic-1.16.2-py3-none-any.whl", hash = "sha256:5f42e9bd0afdbd1d5e3ad856c01754530367debdebf21ed6894e34af52b3bb03", size = 242717, upload-time = "2025-06-16T18:05:10.27Z" }, ] [[package]] @@ -143,12 +143,9 @@ sdist = { url = "https://files.pythonhosted.org/packages/a0/87/1d7019d23891897cb [[package]] name = "alibabacloud-endpoint-util" -version = "0.0.3" +version = "0.0.4" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "alibabacloud-tea" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/24/54/56736a0f224b3f2b65bb9c9ff9e20fa390351a7587c99f19ca1f8d596ae1/alibabacloud_endpoint_util-0.0.3.tar.gz", hash = "sha256:8c0efb76fdcc3af4ca716ef24bbce770201a3f83f98c0afcf81655f684b9c7d2", size = 2756, upload-time = "2020-09-16T07:27:42.19Z" } +sdist = { url = "https://files.pythonhosted.org/packages/92/7d/8cc92a95c920e344835b005af6ea45a0db98763ad6ad19299d26892e6c8d/alibabacloud_endpoint_util-0.0.4.tar.gz", hash = "sha256:a593eb8ddd8168d5dc2216cd33111b144f9189fcd6e9ca20e48f358a739bbf90", size = 2813, upload-time = "2025-06-12T07:20:52.572Z" } [[package]] name = "alibabacloud-gateway-spi" @@ -544,16 +541,16 @@ wheels = [ [[package]] name = "boto3-stubs" -version = "1.38.28" +version = "1.38.37" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore-stubs" }, { name = "types-s3transfer" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7b/dc/c4b318bdf66ff4973d3cbf0f218e47bdf558b94d921c8ff27d401e8ca4f9/boto3_stubs-1.38.28.tar.gz", hash = "sha256:fae8e009ea4d810b77e49d88247183b5d8d153bf268ebde8b4abdf58a701802f", size = 99065, upload-time = "2025-06-02T19:42:51.659Z" } +sdist = { url = "https://files.pythonhosted.org/packages/31/3d/eab265de66aed281852e26d00d99a5367e4531cbf8c0a125ac7a5ff429a6/boto3_stubs-1.38.37.tar.gz", hash = "sha256:c98bf859009b14578ecd3bad495c728b79686874e31eb7efca16e557ddc4a326", size = 99162, upload-time = "2025-06-16T19:43:00.877Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/4a/d70ef76080e0d37e45d07d65801a6680f5bc1d156333a4e5d0ca6103d45f/boto3_stubs-1.38.28-py3-none-any.whl", hash = "sha256:accd8c0943a8d960c5c2d4a7aa661fd43e842893e76adf1b5b163c02f4f62537", size = 68668, upload-time = "2025-06-02T19:42:43.426Z" }, + { url = "https://files.pythonhosted.org/packages/7c/7b/3960cb23b08ab51e46f0c51d96fe5e97f343b9fc264f6e0951bf789ea4e0/boto3_stubs-1.38.37-py3-none-any.whl", hash = "sha256:b7f4c60a549e9a34aaeb4ddf9fb4ef8eee9fd6b3503ee82e874f046a5cebe017", size = 68732, upload-time = "2025-06-16T19:42:54.504Z" }, ] [package.optional-dependencies] @@ -577,14 +574,14 @@ wheels = [ [[package]] name = "botocore-stubs" -version = "1.38.28" +version = "1.38.30" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-awscrt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bc/0e/a47d392ee3fbd444a628f1b7257affc0a13c45e8742de4eb31fa4e2758f2/botocore_stubs-1.38.28.tar.gz", hash = "sha256:b9549050b81051bdbb91966323d6a2b6c5c78ca8dcc328e16dfc44b765be39be", size = 42312, upload-time = "2025-06-02T20:18:13.928Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/df/9bf9f6346daf1cbcb736a12417efe025ed63d72a015799f4e8f26a823b93/botocore_stubs-1.38.30.tar.gz", hash = "sha256:291d7bf39a316c00a8a55b7255489b02c0cea1a343482e7784e8d1e235bae995", size = 42299, upload-time = "2025-06-04T20:14:50.799Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/3d/26c1a62b379f0dd401798f9f8f9331ac40f3c3917c34b5eed3ed0b85e2b0/botocore_stubs-1.38.28-py3-none-any.whl", hash = "sha256:9151ced682edb44b28202d268f4dc5ef5534be8b592b87e07ce4c99650818bce", size = 65629, upload-time = "2025-06-02T20:18:11.886Z" }, + { url = "https://files.pythonhosted.org/packages/98/71/cc9bc544489160cbacc8472b70ba0f9b93385628ed8bd0f673855b7ceeb7/botocore_stubs-1.38.30-py3-none-any.whl", hash = "sha256:2efb8bdf36504aff596c670d875d8f7dd15205277c15c4cea54afdba8200c266", size = 65628, upload-time = "2025-06-04T20:14:48.089Z" }, ] [[package]] @@ -727,11 +724,11 @@ wheels = [ [[package]] name = "certifi" -version = "2025.4.26" +version = "2025.6.15" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" } +sdist = { url = "https://files.pythonhosted.org/packages/73/f7/f14b46d4bcd21092d7d3ccef689615220d8a08fb25e564b65d20738e672e/certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b", size = 158753, upload-time = "2025-06-15T02:45:51.329Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" }, + { url = "https://files.pythonhosted.org/packages/84/ae/320161bd181fc06471eed047ecce67b693fd7515b16d495d8932db763426/certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", size = 157650, upload-time = "2025-06-15T02:45:49.977Z" }, ] [[package]] @@ -1105,36 +1102,43 @@ sdist = { url = "https://files.pythonhosted.org/packages/6b/b0/e595ce2a2527e169c [[package]] name = "cryptography" -version = "42.0.8" +version = "45.0.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/93/a7/1498799a2ea06148463a9a2c10ab2f6a921a74fb19e231b27dc412a748e2/cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2", size = 671250, upload-time = "2024-06-04T19:55:08.609Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/c8/a2a376a8711c1e11708b9c9972e0c3223f5fc682552c82d8db844393d6ce/cryptography-45.0.4.tar.gz", hash = "sha256:7405ade85c83c37682c8fe65554759800a4a8c54b2d96e0f8ad114d31b808d57", size = 744890, upload-time = "2025-06-10T00:03:51.297Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/8b/1b929ba8139430e09e140e6939c2b29c18df1f2fc2149e41bdbdcdaf5d1f/cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e", size = 5899961, upload-time = "2024-06-04T19:53:57.933Z" }, - { url = "https://files.pythonhosted.org/packages/fa/5d/31d833daa800e4fab33209843095df7adb4a78ea536929145534cbc15026/cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d", size = 3114353, upload-time = "2024-06-04T19:54:12.171Z" }, - { url = "https://files.pythonhosted.org/packages/5d/32/f6326c70a9f0f258a201d3b2632bca586ea24d214cec3cf36e374040e273/cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902", size = 3647773, upload-time = "2024-06-04T19:54:07.051Z" }, - { url = "https://files.pythonhosted.org/packages/35/66/2d87e9ca95c82c7ee5f2c09716fc4c4242c1ae6647b9bd27e55e920e9f10/cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801", size = 3839763, upload-time = "2024-06-04T19:54:30.383Z" }, - { url = "https://files.pythonhosted.org/packages/c2/de/8083fa2e68d403553a01a9323f4f8b9d7ffed09928ba25635c29fb28c1e7/cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949", size = 3632661, upload-time = "2024-06-04T19:54:32.955Z" }, - { url = "https://files.pythonhosted.org/packages/07/40/d6f6819c62e808ea74639c3c640f7edd636b86cce62cb14943996a15df92/cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9", size = 3851536, upload-time = "2024-06-04T19:53:53.131Z" }, - { url = "https://files.pythonhosted.org/packages/5c/46/de71d48abf2b6d3c808f4fbb0f4dc44a4e72786be23df0541aa2a3f6fd7e/cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583", size = 3754209, upload-time = "2024-06-04T19:54:55.259Z" }, - { url = "https://files.pythonhosted.org/packages/25/c9/86f04e150c5d5d5e4a731a2c1e0e43da84d901f388e3fea3d5de98d689a7/cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7", size = 3923551, upload-time = "2024-06-04T19:54:16.46Z" }, - { url = "https://files.pythonhosted.org/packages/53/c2/903014dafb7271fb148887d4355b2e90319cad6e810663be622b0c933fc9/cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b", size = 3739265, upload-time = "2024-06-04T19:54:23.194Z" }, - { url = "https://files.pythonhosted.org/packages/95/26/82d704d988a193cbdc69ac3b41c687c36eaed1642cce52530ad810c35645/cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7", size = 3937371, upload-time = "2024-06-04T19:55:04.303Z" }, - { url = "https://files.pythonhosted.org/packages/cf/71/4e0d05c9acd638a225f57fb6162aa3d03613c11b76893c23ea4675bb28c5/cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2", size = 2438849, upload-time = "2024-06-04T19:54:27.39Z" }, - { url = "https://files.pythonhosted.org/packages/06/0f/78da3cad74f2ba6c45321dc90394d70420ea846730dc042ef527f5a224b5/cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba", size = 2889090, upload-time = "2024-06-04T19:54:14.245Z" }, - { url = "https://files.pythonhosted.org/packages/60/12/f064af29190cdb1d38fe07f3db6126091639e1dece7ec77c4ff037d49193/cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28", size = 5901232, upload-time = "2024-06-04T19:54:52.722Z" }, - { url = "https://files.pythonhosted.org/packages/43/c2/4a3eef67e009a522711ebd8ac89424c3a7fe591ece7035d964419ad52a1d/cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e", size = 3648711, upload-time = "2024-06-04T19:54:44.323Z" }, - { url = "https://files.pythonhosted.org/packages/49/1c/9f6d13cc8041c05eebff1154e4e71bedd1db8e174fff999054435994187a/cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70", size = 3841968, upload-time = "2024-06-04T19:54:57.911Z" }, - { url = "https://files.pythonhosted.org/packages/5f/f9/c3d4f19b82bdb25a3d857fe96e7e571c981810e47e3f299cc13ac429066a/cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c", size = 3633032, upload-time = "2024-06-04T19:54:48.518Z" }, - { url = "https://files.pythonhosted.org/packages/fa/e2/b7e6e8c261536c489d9cf908769880d94bd5d9a187e166b0dc838d2e6a56/cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7", size = 3852478, upload-time = "2024-06-04T19:54:50.599Z" }, - { url = "https://files.pythonhosted.org/packages/a2/68/e16751f6b859bc120f53fddbf3ebada5c34f0e9689d8af32884d8b2e4b4c/cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e", size = 3754102, upload-time = "2024-06-04T19:54:46.231Z" }, - { url = "https://files.pythonhosted.org/packages/0f/38/85c74d0ac4c540780e072b1e6f148ecb718418c1062edcb20d22f3ec5bbb/cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961", size = 3925042, upload-time = "2024-06-04T19:54:34.767Z" }, - { url = "https://files.pythonhosted.org/packages/89/f4/a8b982e88eb5350407ebdbf4717b55043271d878705329e107f4783555f2/cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1", size = 3738833, upload-time = "2024-06-04T19:54:05.231Z" }, - { url = "https://files.pythonhosted.org/packages/fd/2b/be327b580645927bb1a1f32d5a175b897a9b956bc085b095e15c40bac9ed/cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14", size = 3938751, upload-time = "2024-06-04T19:54:37.837Z" }, - { url = "https://files.pythonhosted.org/packages/3c/d5/c6a78ffccdbe4516711ebaa9ed2c7eb6ac5dfa3dc920f2c7e920af2418b0/cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c", size = 2439281, upload-time = "2024-06-04T19:53:55.903Z" }, - { url = "https://files.pythonhosted.org/packages/a2/7b/b0d330852dd5953daee6b15f742f15d9f18e9c0154eb4cfcc8718f0436da/cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a", size = 2886038, upload-time = "2024-06-04T19:54:18.707Z" }, + { url = "https://files.pythonhosted.org/packages/cc/1c/92637793de053832523b410dbe016d3f5c11b41d0cf6eef8787aabb51d41/cryptography-45.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:425a9a6ac2823ee6e46a76a21a4e8342d8fa5c01e08b823c1f19a8b74f096069", size = 7055712, upload-time = "2025-06-10T00:02:38.826Z" }, + { url = "https://files.pythonhosted.org/packages/ba/14/93b69f2af9ba832ad6618a03f8a034a5851dc9a3314336a3d71c252467e1/cryptography-45.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:680806cf63baa0039b920f4976f5f31b10e772de42f16310a6839d9f21a26b0d", size = 4205335, upload-time = "2025-06-10T00:02:41.64Z" }, + { url = "https://files.pythonhosted.org/packages/67/30/fae1000228634bf0b647fca80403db5ca9e3933b91dd060570689f0bd0f7/cryptography-45.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4ca0f52170e821bc8da6fc0cc565b7bb8ff8d90d36b5e9fdd68e8a86bdf72036", size = 4431487, upload-time = "2025-06-10T00:02:43.696Z" }, + { url = "https://files.pythonhosted.org/packages/6d/5a/7dffcf8cdf0cb3c2430de7404b327e3db64735747d641fc492539978caeb/cryptography-45.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f3fe7a5ae34d5a414957cc7f457e2b92076e72938423ac64d215722f6cf49a9e", size = 4208922, upload-time = "2025-06-10T00:02:45.334Z" }, + { url = "https://files.pythonhosted.org/packages/c6/f3/528729726eb6c3060fa3637253430547fbaaea95ab0535ea41baa4a6fbd8/cryptography-45.0.4-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:25eb4d4d3e54595dc8adebc6bbd5623588991d86591a78c2548ffb64797341e2", size = 3900433, upload-time = "2025-06-10T00:02:47.359Z" }, + { url = "https://files.pythonhosted.org/packages/d9/4a/67ba2e40f619e04d83c32f7e1d484c1538c0800a17c56a22ff07d092ccc1/cryptography-45.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ce1678a2ccbe696cf3af15a75bb72ee008d7ff183c9228592ede9db467e64f1b", size = 4464163, upload-time = "2025-06-10T00:02:49.412Z" }, + { url = "https://files.pythonhosted.org/packages/7e/9a/b4d5aa83661483ac372464809c4b49b5022dbfe36b12fe9e323ca8512420/cryptography-45.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:49fe9155ab32721b9122975e168a6760d8ce4cffe423bcd7ca269ba41b5dfac1", size = 4208687, upload-time = "2025-06-10T00:02:50.976Z" }, + { url = "https://files.pythonhosted.org/packages/db/b7/a84bdcd19d9c02ec5807f2ec2d1456fd8451592c5ee353816c09250e3561/cryptography-45.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:2882338b2a6e0bd337052e8b9007ced85c637da19ef9ecaf437744495c8c2999", size = 4463623, upload-time = "2025-06-10T00:02:52.542Z" }, + { url = "https://files.pythonhosted.org/packages/d8/84/69707d502d4d905021cac3fb59a316344e9f078b1da7fb43ecde5e10840a/cryptography-45.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:23b9c3ea30c3ed4db59e7b9619272e94891f8a3a5591d0b656a7582631ccf750", size = 4332447, upload-time = "2025-06-10T00:02:54.63Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ee/d4f2ab688e057e90ded24384e34838086a9b09963389a5ba6854b5876598/cryptography-45.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b0a97c927497e3bc36b33987abb99bf17a9a175a19af38a892dc4bbb844d7ee2", size = 4572830, upload-time = "2025-06-10T00:02:56.689Z" }, + { url = "https://files.pythonhosted.org/packages/70/d4/994773a261d7ff98034f72c0e8251fe2755eac45e2265db4c866c1c6829c/cryptography-45.0.4-cp311-abi3-win32.whl", hash = "sha256:e00a6c10a5c53979d6242f123c0a97cff9f3abed7f064fc412c36dc521b5f257", size = 2932769, upload-time = "2025-06-10T00:02:58.467Z" }, + { url = "https://files.pythonhosted.org/packages/5a/42/c80bd0b67e9b769b364963b5252b17778a397cefdd36fa9aa4a5f34c599a/cryptography-45.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:817ee05c6c9f7a69a16200f0c90ab26d23a87701e2a284bd15156783e46dbcc8", size = 3410441, upload-time = "2025-06-10T00:03:00.14Z" }, + { url = "https://files.pythonhosted.org/packages/ce/0b/2488c89f3a30bc821c9d96eeacfcab6ff3accc08a9601ba03339c0fd05e5/cryptography-45.0.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:964bcc28d867e0f5491a564b7debb3ffdd8717928d315d12e0d7defa9e43b723", size = 7031836, upload-time = "2025-06-10T00:03:01.726Z" }, + { url = "https://files.pythonhosted.org/packages/fe/51/8c584ed426093aac257462ae62d26ad61ef1cbf5b58d8b67e6e13c39960e/cryptography-45.0.4-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6a5bf57554e80f75a7db3d4b1dacaa2764611ae166ab42ea9a72bcdb5d577637", size = 4195746, upload-time = "2025-06-10T00:03:03.94Z" }, + { url = "https://files.pythonhosted.org/packages/5c/7d/4b0ca4d7af95a704eef2f8f80a8199ed236aaf185d55385ae1d1610c03c2/cryptography-45.0.4-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:46cf7088bf91bdc9b26f9c55636492c1cce3e7aaf8041bbf0243f5e5325cfb2d", size = 4424456, upload-time = "2025-06-10T00:03:05.589Z" }, + { url = "https://files.pythonhosted.org/packages/1d/45/5fabacbc6e76ff056f84d9f60eeac18819badf0cefc1b6612ee03d4ab678/cryptography-45.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7bedbe4cc930fa4b100fc845ea1ea5788fcd7ae9562e669989c11618ae8d76ee", size = 4198495, upload-time = "2025-06-10T00:03:09.172Z" }, + { url = "https://files.pythonhosted.org/packages/55/b7/ffc9945b290eb0a5d4dab9b7636706e3b5b92f14ee5d9d4449409d010d54/cryptography-45.0.4-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:eaa3e28ea2235b33220b949c5a0d6cf79baa80eab2eb5607ca8ab7525331b9ff", size = 3885540, upload-time = "2025-06-10T00:03:10.835Z" }, + { url = "https://files.pythonhosted.org/packages/7f/e3/57b010282346980475e77d414080acdcb3dab9a0be63071efc2041a2c6bd/cryptography-45.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7ef2dde4fa9408475038fc9aadfc1fb2676b174e68356359632e980c661ec8f6", size = 4452052, upload-time = "2025-06-10T00:03:12.448Z" }, + { url = "https://files.pythonhosted.org/packages/37/e6/ddc4ac2558bf2ef517a358df26f45bc774a99bf4653e7ee34b5e749c03e3/cryptography-45.0.4-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:6a3511ae33f09094185d111160fd192c67aa0a2a8d19b54d36e4c78f651dc5ad", size = 4198024, upload-time = "2025-06-10T00:03:13.976Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c0/85fa358ddb063ec588aed4a6ea1df57dc3e3bc1712d87c8fa162d02a65fc/cryptography-45.0.4-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:06509dc70dd71fa56eaa138336244e2fbaf2ac164fc9b5e66828fccfd2b680d6", size = 4451442, upload-time = "2025-06-10T00:03:16.248Z" }, + { url = "https://files.pythonhosted.org/packages/33/67/362d6ec1492596e73da24e669a7fbbaeb1c428d6bf49a29f7a12acffd5dc/cryptography-45.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5f31e6b0a5a253f6aa49be67279be4a7e5a4ef259a9f33c69f7d1b1191939872", size = 4325038, upload-time = "2025-06-10T00:03:18.4Z" }, + { url = "https://files.pythonhosted.org/packages/53/75/82a14bf047a96a1b13ebb47fb9811c4f73096cfa2e2b17c86879687f9027/cryptography-45.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:944e9ccf67a9594137f942d5b52c8d238b1b4e46c7a0c2891b7ae6e01e7c80a4", size = 4560964, upload-time = "2025-06-10T00:03:20.06Z" }, + { url = "https://files.pythonhosted.org/packages/cd/37/1a3cba4c5a468ebf9b95523a5ef5651244693dc712001e276682c278fc00/cryptography-45.0.4-cp37-abi3-win32.whl", hash = "sha256:c22fe01e53dc65edd1945a2e6f0015e887f84ced233acecb64b4daadb32f5c97", size = 2924557, upload-time = "2025-06-10T00:03:22.563Z" }, + { url = "https://files.pythonhosted.org/packages/2a/4b/3256759723b7e66380397d958ca07c59cfc3fb5c794fb5516758afd05d41/cryptography-45.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:627ba1bc94f6adf0b0a2e35d87020285ead22d9f648c7e75bb64f367375f3b22", size = 3395508, upload-time = "2025-06-10T00:03:24.586Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ba/cf442ae99ef363855ed84b39e0fb3c106ac66b7a7703f3c9c9cfe05412cb/cryptography-45.0.4-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4828190fb6c4bcb6ebc6331f01fe66ae838bb3bd58e753b59d4b22eb444b996c", size = 3590512, upload-time = "2025-06-10T00:03:36.982Z" }, + { url = "https://files.pythonhosted.org/packages/28/9a/a7d5bb87d149eb99a5abdc69a41e4e47b8001d767e5f403f78bfaafc7aa7/cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:03dbff8411206713185b8cebe31bc5c0eb544799a50c09035733716b386e61a4", size = 4146899, upload-time = "2025-06-10T00:03:38.659Z" }, + { url = "https://files.pythonhosted.org/packages/17/11/9361c2c71c42cc5c465cf294c8030e72fb0c87752bacbd7a3675245e3db3/cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:51dfbd4d26172d31150d84c19bbe06c68ea4b7f11bbc7b3a5e146b367c311349", size = 4388900, upload-time = "2025-06-10T00:03:40.233Z" }, + { url = "https://files.pythonhosted.org/packages/c0/76/f95b83359012ee0e670da3e41c164a0c256aeedd81886f878911581d852f/cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:0339a692de47084969500ee455e42c58e449461e0ec845a34a6a9b9bf7df7fb8", size = 4146422, upload-time = "2025-06-10T00:03:41.827Z" }, + { url = "https://files.pythonhosted.org/packages/09/ad/5429fcc4def93e577a5407988f89cf15305e64920203d4ac14601a9dc876/cryptography-45.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:0cf13c77d710131d33e63626bd55ae7c0efb701ebdc2b3a7952b9b23a0412862", size = 4388475, upload-time = "2025-06-10T00:03:43.493Z" }, + { url = "https://files.pythonhosted.org/packages/99/49/0ab9774f64555a1b50102757811508f5ace451cf5dc0a2d074a4b9deca6a/cryptography-45.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:bbc505d1dc469ac12a0a064214879eac6294038d6b24ae9f71faae1448a9608d", size = 3337594, upload-time = "2025-06-10T00:03:45.523Z" }, ] [[package]] @@ -1262,6 +1266,7 @@ dependencies = [ { name = "readabilipy" }, { name = "redis", extra = ["hiredis"] }, { name = "resend" }, + { name = "sendgrid" }, { name = "sentry-sdk", extra = ["flask"] }, { name = "sqlalchemy" }, { name = "starlette" }, @@ -1279,6 +1284,7 @@ dev = [ { name = "coverage" }, { name = "dotenv-linter" }, { name = "faker" }, + { name = "hypothesis" }, { name = "lxml-stubs" }, { name = "mypy" }, { name = "pandas-stubs" }, @@ -1317,6 +1323,7 @@ dev = [ { name = "types-pymysql" }, { name = "types-pyopenssl" }, { name = "types-python-dateutil" }, + { name = "types-python-http-client" }, { name = "types-pywin32" }, { name = "types-pyyaml" }, { name = "types-regex" }, @@ -1352,6 +1359,7 @@ vdb = [ { name = "clickhouse-connect" }, { name = "couchbase" }, { name = "elasticsearch" }, + { name = "mo-vector" }, { name = "opensearch-py" }, { name = "oracledb" }, { name = "pgvecto-rs", extra = ["sqlalchemy"] }, @@ -1437,6 +1445,7 @@ requires-dist = [ { name = "readabilipy", specifier = "~=0.3.0" }, { name = "redis", extras = ["hiredis"], specifier = "~=6.1.0" }, { name = "resend", specifier = "~=2.9.0" }, + { name = "sendgrid", specifier = "~=6.12.3" }, { name = "sentry-sdk", extras = ["flask"], specifier = "~=2.28.0" }, { name = "sqlalchemy", specifier = "~=2.0.29" }, { name = "starlette", specifier = "==0.41.0" }, @@ -1454,6 +1463,7 @@ dev = [ { name = "coverage", specifier = "~=7.2.4" }, { name = "dotenv-linter", specifier = "~=0.5.0" }, { name = "faker", specifier = "~=32.1.0" }, + { name = "hypothesis", specifier = ">=6.131.15" }, { name = "lxml-stubs", specifier = "~=0.5.1" }, { name = "mypy", specifier = "~=1.16.0" }, { name = "pandas-stubs", specifier = "~=2.2.3" }, @@ -1492,6 +1502,7 @@ dev = [ { name = "types-pymysql", specifier = "~=1.1.0" }, { name = "types-pyopenssl", specifier = ">=24.1.0" }, { name = "types-python-dateutil", specifier = "~=2.9.0" }, + { name = "types-python-http-client", specifier = ">=3.3.7.20240910" }, { name = "types-pywin32", specifier = "~=310.0.0" }, { name = "types-pyyaml", specifier = "~=6.0.12" }, { name = "types-regex", specifier = "~=2024.11.6" }, @@ -1527,6 +1538,7 @@ vdb = [ { name = "clickhouse-connect", specifier = "~=0.7.16" }, { name = "couchbase", specifier = "~=4.3.0" }, { name = "elasticsearch", specifier = "==8.14.0" }, + { name = "mo-vector", specifier = "~=0.1.13" }, { name = "opensearch-py", specifier = "==2.4.0" }, { name = "oracledb", specifier = "==3.0.0" }, { name = "pgvecto-rs", extras = ["sqlalchemy"], specifier = "~=0.2.1" }, @@ -1562,18 +1574,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, ] -[[package]] -name = "docker-pycreds" -version = "0.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c5/e6/d1f6c00b7221e2d7c4b470132c931325c8b22c51ca62417e300f5ce16009/docker-pycreds-0.4.0.tar.gz", hash = "sha256:6ce3270bcaf404cc4c3e27e4b6c70d3521deae82fb508767870fdbf772d584d4", size = 8754, upload-time = "2018-11-29T03:26:50.996Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f5/e8/f6bd1eee09314e7e6dee49cbe2c5e22314ccdb38db16c9fc72d2fa80d054/docker_pycreds-0.4.0-py2.py3-none-any.whl", hash = "sha256:7266112468627868005106ec19cd0d722702d2b7d5912a28e19b826c3d37af49", size = 8982, upload-time = "2018-11-29T03:26:49.575Z" }, -] - [[package]] name = "docstring-parser" version = "0.16" @@ -1608,6 +1608,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" }, ] +[[package]] +name = "ecdsa" +version = "0.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793, upload-time = "2025-03-13T11:52:43.25Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607, upload-time = "2025-03-13T11:52:41.757Z" }, +] + [[package]] name = "elastic-transport" version = "8.17.1" @@ -1749,15 +1761,15 @@ wheels = [ [[package]] name = "flask-cors" -version = "6.0.0" +version = "6.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "flask" }, { name = "werkzeug" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/20/e7/b3c6afdd984672b55dff07482699c688af6c01bd7fd5dd55f9c9d1a88d1c/flask_cors-6.0.0.tar.gz", hash = "sha256:4592c1570246bf7beee96b74bc0adbbfcb1b0318f6ba05c412e8909eceec3393", size = 11875, upload-time = "2025-05-17T14:35:16.98Z" } +sdist = { url = "https://files.pythonhosted.org/packages/76/37/bcfa6c7d5eec777c4c7cf45ce6b27631cebe5230caf88d85eadd63edd37a/flask_cors-6.0.1.tar.gz", hash = "sha256:d81bcb31f07b0985be7f48406247e9243aced229b7747219160a0559edd678db", size = 13463, upload-time = "2025-06-11T01:32:08.518Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/f0/0ee29090016345938f016ee98aa8b5de1c500ee93491dc0c76495848fca1/flask_cors-6.0.0-py3-none-any.whl", hash = "sha256:6332073356452343a8ccddbfec7befdc3fdd040141fe776ec9b94c262f058657", size = 11549, upload-time = "2025-05-17T14:35:15.766Z" }, + { url = "https://files.pythonhosted.org/packages/17/f8/01bf35a3afd734345528f98d0353f2a978a476528ad4d7e78b70c4d149dd/flask_cors-6.0.1-py3-none-any.whl", hash = "sha256:c7b2cbfb1a31aa0d2e5341eea03a6805349f7a61647daee1a15c46bbe981494c", size = 13244, upload-time = "2025-06-11T01:32:07.352Z" }, ] [[package]] @@ -1826,45 +1838,45 @@ wheels = [ [[package]] name = "frozenlist" -version = "1.6.0" +version = "1.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/f4/d744cba2da59b5c1d88823cf9e8a6c74e4659e2b27604ed973be2a0bf5ab/frozenlist-1.6.0.tar.gz", hash = "sha256:b99655c32c1c8e06d111e7f41c06c29a5318cb1835df23a45518e02a47c63b68", size = 42831, upload-time = "2025-04-17T22:38:53.099Z" } +sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/53/b5/bc883b5296ec902115c00be161da93bf661199c465ec4c483feec6ea4c32/frozenlist-1.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae8337990e7a45683548ffb2fee1af2f1ed08169284cd829cdd9a7fa7470530d", size = 160912, upload-time = "2025-04-17T22:36:17.235Z" }, - { url = "https://files.pythonhosted.org/packages/6f/93/51b058b563d0704b39c56baa222828043aafcac17fd3734bec5dbeb619b1/frozenlist-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c952f69dd524558694818a461855f35d36cc7f5c0adddce37e962c85d06eac0", size = 124315, upload-time = "2025-04-17T22:36:18.735Z" }, - { url = "https://files.pythonhosted.org/packages/c9/e0/46cd35219428d350558b874d595e132d1c17a9471a1bd0d01d518a261e7c/frozenlist-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f5fef13136c4e2dee91bfb9a44e236fff78fc2cd9f838eddfc470c3d7d90afe", size = 122230, upload-time = "2025-04-17T22:36:20.6Z" }, - { url = "https://files.pythonhosted.org/packages/d1/0f/7ad2ce928ad06d6dd26a61812b959ded573d3e9d0ee6109d96c2be7172e9/frozenlist-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:716bbba09611b4663ecbb7cd022f640759af8259e12a6ca939c0a6acd49eedba", size = 314842, upload-time = "2025-04-17T22:36:22.088Z" }, - { url = "https://files.pythonhosted.org/packages/34/76/98cbbd8a20a5c3359a2004ae5e5b216af84a150ccbad67c8f8f30fb2ea91/frozenlist-1.6.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7b8c4dc422c1a3ffc550b465090e53b0bf4839047f3e436a34172ac67c45d595", size = 304919, upload-time = "2025-04-17T22:36:24.247Z" }, - { url = "https://files.pythonhosted.org/packages/9a/fa/258e771ce3a44348c05e6b01dffc2bc67603fba95761458c238cd09a2c77/frozenlist-1.6.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b11534872256e1666116f6587a1592ef395a98b54476addb5e8d352925cb5d4a", size = 324074, upload-time = "2025-04-17T22:36:26.291Z" }, - { url = "https://files.pythonhosted.org/packages/d5/a4/047d861fd8c538210e12b208c0479912273f991356b6bdee7ea8356b07c9/frozenlist-1.6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c6eceb88aaf7221f75be6ab498dc622a151f5f88d536661af3ffc486245a626", size = 321292, upload-time = "2025-04-17T22:36:27.909Z" }, - { url = "https://files.pythonhosted.org/packages/c0/25/cfec8af758b4525676cabd36efcaf7102c1348a776c0d1ad046b8a7cdc65/frozenlist-1.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62c828a5b195570eb4b37369fcbbd58e96c905768d53a44d13044355647838ff", size = 301569, upload-time = "2025-04-17T22:36:29.448Z" }, - { url = "https://files.pythonhosted.org/packages/87/2f/0c819372fa9f0c07b153124bf58683b8d0ca7bb73ea5ccde9b9ef1745beb/frozenlist-1.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c6bd2c6399920c9622362ce95a7d74e7f9af9bfec05fff91b8ce4b9647845a", size = 313625, upload-time = "2025-04-17T22:36:31.55Z" }, - { url = "https://files.pythonhosted.org/packages/50/5f/f0cf8b0fdedffdb76b3745aa13d5dbe404d63493cc211ce8250f2025307f/frozenlist-1.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49ba23817781e22fcbd45fd9ff2b9b8cdb7b16a42a4851ab8025cae7b22e96d0", size = 312523, upload-time = "2025-04-17T22:36:33.078Z" }, - { url = "https://files.pythonhosted.org/packages/e1/6c/38c49108491272d3e84125bbabf2c2d0b304899b52f49f0539deb26ad18d/frozenlist-1.6.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:431ef6937ae0f853143e2ca67d6da76c083e8b1fe3df0e96f3802fd37626e606", size = 322657, upload-time = "2025-04-17T22:36:34.688Z" }, - { url = "https://files.pythonhosted.org/packages/bd/4b/3bd3bad5be06a9d1b04b1c22be80b5fe65b502992d62fab4bdb25d9366ee/frozenlist-1.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9d124b38b3c299ca68433597ee26b7819209cb8a3a9ea761dfe9db3a04bba584", size = 303414, upload-time = "2025-04-17T22:36:36.363Z" }, - { url = "https://files.pythonhosted.org/packages/5b/89/7e225a30bef6e85dbfe22622c24afe932e9444de3b40d58b1ea589a14ef8/frozenlist-1.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:118e97556306402e2b010da1ef21ea70cb6d6122e580da64c056b96f524fbd6a", size = 320321, upload-time = "2025-04-17T22:36:38.16Z" }, - { url = "https://files.pythonhosted.org/packages/22/72/7e3acef4dd9e86366cb8f4d8f28e852c2b7e116927e9722b31a6f71ea4b0/frozenlist-1.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb3b309f1d4086b5533cf7bbcf3f956f0ae6469664522f1bde4feed26fba60f1", size = 323975, upload-time = "2025-04-17T22:36:40.289Z" }, - { url = "https://files.pythonhosted.org/packages/d8/85/e5da03d20507e13c66ce612c9792b76811b7a43e3320cce42d95b85ac755/frozenlist-1.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54dece0d21dce4fdb188a1ffc555926adf1d1c516e493c2914d7c370e454bc9e", size = 316553, upload-time = "2025-04-17T22:36:42.045Z" }, - { url = "https://files.pythonhosted.org/packages/ac/8e/6c609cbd0580ae8a0661c408149f196aade7d325b1ae7adc930501b81acb/frozenlist-1.6.0-cp311-cp311-win32.whl", hash = "sha256:654e4ba1d0b2154ca2f096bed27461cf6160bc7f504a7f9a9ef447c293caf860", size = 115511, upload-time = "2025-04-17T22:36:44.067Z" }, - { url = "https://files.pythonhosted.org/packages/f2/13/a84804cfde6de12d44ed48ecbf777ba62b12ff09e761f76cdd1ff9e14bb1/frozenlist-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e911391bffdb806001002c1f860787542f45916c3baf764264a52765d5a5603", size = 120863, upload-time = "2025-04-17T22:36:45.465Z" }, - { url = "https://files.pythonhosted.org/packages/9c/8a/289b7d0de2fbac832ea80944d809759976f661557a38bb8e77db5d9f79b7/frozenlist-1.6.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c5b9e42ace7d95bf41e19b87cec8f262c41d3510d8ad7514ab3862ea2197bfb1", size = 160193, upload-time = "2025-04-17T22:36:47.382Z" }, - { url = "https://files.pythonhosted.org/packages/19/80/2fd17d322aec7f430549f0669f599997174f93ee17929ea5b92781ec902c/frozenlist-1.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ca9973735ce9f770d24d5484dcb42f68f135351c2fc81a7a9369e48cf2998a29", size = 123831, upload-time = "2025-04-17T22:36:49.401Z" }, - { url = "https://files.pythonhosted.org/packages/99/06/f5812da431273f78c6543e0b2f7de67dfd65eb0a433978b2c9c63d2205e4/frozenlist-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6ac40ec76041c67b928ca8aaffba15c2b2ee3f5ae8d0cb0617b5e63ec119ca25", size = 121862, upload-time = "2025-04-17T22:36:51.899Z" }, - { url = "https://files.pythonhosted.org/packages/d0/31/9e61c6b5fc493cf24d54881731204d27105234d09878be1a5983182cc4a5/frozenlist-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b7a8a3180dfb280eb044fdec562f9b461614c0ef21669aea6f1d3dac6ee576", size = 316361, upload-time = "2025-04-17T22:36:53.402Z" }, - { url = "https://files.pythonhosted.org/packages/9d/55/22ca9362d4f0222324981470fd50192be200154d51509ee6eb9baa148e96/frozenlist-1.6.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c444d824e22da6c9291886d80c7d00c444981a72686e2b59d38b285617cb52c8", size = 307115, upload-time = "2025-04-17T22:36:55.016Z" }, - { url = "https://files.pythonhosted.org/packages/ae/39/4fff42920a57794881e7bb3898dc7f5f539261711ea411b43bba3cde8b79/frozenlist-1.6.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb52c8166499a8150bfd38478248572c924c003cbb45fe3bcd348e5ac7c000f9", size = 322505, upload-time = "2025-04-17T22:36:57.12Z" }, - { url = "https://files.pythonhosted.org/packages/55/f2/88c41f374c1e4cf0092a5459e5f3d6a1e17ed274c98087a76487783df90c/frozenlist-1.6.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b35298b2db9c2468106278537ee529719228950a5fdda686582f68f247d1dc6e", size = 322666, upload-time = "2025-04-17T22:36:58.735Z" }, - { url = "https://files.pythonhosted.org/packages/75/51/034eeb75afdf3fd03997856195b500722c0b1a50716664cde64e28299c4b/frozenlist-1.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d108e2d070034f9d57210f22fefd22ea0d04609fc97c5f7f5a686b3471028590", size = 302119, upload-time = "2025-04-17T22:37:00.512Z" }, - { url = "https://files.pythonhosted.org/packages/2b/a6/564ecde55ee633270a793999ef4fd1d2c2b32b5a7eec903b1012cb7c5143/frozenlist-1.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1be9111cb6756868ac242b3c2bd1f09d9aea09846e4f5c23715e7afb647103", size = 316226, upload-time = "2025-04-17T22:37:02.102Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c8/6c0682c32377f402b8a6174fb16378b683cf6379ab4d2827c580892ab3c7/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:94bb451c664415f02f07eef4ece976a2c65dcbab9c2f1705b7031a3a75349d8c", size = 312788, upload-time = "2025-04-17T22:37:03.578Z" }, - { url = "https://files.pythonhosted.org/packages/b6/b8/10fbec38f82c5d163ca1750bfff4ede69713badf236a016781cf1f10a0f0/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d1a686d0b0949182b8faddea596f3fc11f44768d1f74d4cad70213b2e139d821", size = 325914, upload-time = "2025-04-17T22:37:05.213Z" }, - { url = "https://files.pythonhosted.org/packages/62/ca/2bf4f3a1bd40cdedd301e6ecfdbb291080d5afc5f9ce350c0739f773d6b9/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ea8e59105d802c5a38bdbe7362822c522230b3faba2aa35c0fa1765239b7dd70", size = 305283, upload-time = "2025-04-17T22:37:06.985Z" }, - { url = "https://files.pythonhosted.org/packages/09/64/20cc13ccf94abc2a1f482f74ad210703dc78a590d0b805af1c9aa67f76f9/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:abc4e880a9b920bc5020bf6a431a6bb40589d9bca3975c980495f63632e8382f", size = 319264, upload-time = "2025-04-17T22:37:08.618Z" }, - { url = "https://files.pythonhosted.org/packages/20/ff/86c6a2bbe98cfc231519f5e6d712a0898488ceac804a917ce014f32e68f6/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a79713adfe28830f27a3c62f6b5406c37376c892b05ae070906f07ae4487046", size = 326482, upload-time = "2025-04-17T22:37:10.196Z" }, - { url = "https://files.pythonhosted.org/packages/2f/da/8e381f66367d79adca245d1d71527aac774e30e291d41ef161ce2d80c38e/frozenlist-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a0318c2068e217a8f5e3b85e35899f5a19e97141a45bb925bb357cfe1daf770", size = 318248, upload-time = "2025-04-17T22:37:12.284Z" }, - { url = "https://files.pythonhosted.org/packages/39/24/1a1976563fb476ab6f0fa9fefaac7616a4361dbe0461324f9fd7bf425dbe/frozenlist-1.6.0-cp312-cp312-win32.whl", hash = "sha256:853ac025092a24bb3bf09ae87f9127de9fe6e0c345614ac92536577cf956dfcc", size = 115161, upload-time = "2025-04-17T22:37:13.902Z" }, - { url = "https://files.pythonhosted.org/packages/80/2e/fb4ed62a65f8cd66044706b1013f0010930d8cbb0729a2219561ea075434/frozenlist-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bdfe2d7e6c9281c6e55523acd6c2bf77963cb422fdc7d142fb0cb6621b66878", size = 120548, upload-time = "2025-04-17T22:37:15.326Z" }, - { url = "https://files.pythonhosted.org/packages/71/3e/b04a0adda73bd52b390d730071c0d577073d3d26740ee1bad25c3ad0f37b/frozenlist-1.6.0-py3-none-any.whl", hash = "sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191", size = 12404, upload-time = "2025-04-17T22:38:51.668Z" }, + { url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a", size = 82251, upload-time = "2025-06-09T23:00:16.279Z" }, + { url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750", size = 48183, upload-time = "2025-06-09T23:00:17.698Z" }, + { url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd", size = 47107, upload-time = "2025-06-09T23:00:18.952Z" }, + { url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2", size = 237333, upload-time = "2025-06-09T23:00:20.275Z" }, + { url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f", size = 231724, upload-time = "2025-06-09T23:00:21.705Z" }, + { url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30", size = 245842, upload-time = "2025-06-09T23:00:23.148Z" }, + { url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98", size = 239767, upload-time = "2025-06-09T23:00:25.103Z" }, + { url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86", size = 224130, upload-time = "2025-06-09T23:00:27.061Z" }, + { url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae", size = 235301, upload-time = "2025-06-09T23:00:29.02Z" }, + { url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8", size = 234606, upload-time = "2025-06-09T23:00:30.514Z" }, + { url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31", size = 248372, upload-time = "2025-06-09T23:00:31.966Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7", size = 229860, upload-time = "2025-06-09T23:00:33.375Z" }, + { url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5", size = 245893, upload-time = "2025-06-09T23:00:35.002Z" }, + { url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898", size = 246323, upload-time = "2025-06-09T23:00:36.468Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56", size = 233149, upload-time = "2025-06-09T23:00:37.963Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7", size = 39565, upload-time = "2025-06-09T23:00:39.753Z" }, + { url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d", size = 44019, upload-time = "2025-06-09T23:00:40.988Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, + { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, + { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, + { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, + { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, + { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, + { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, + { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, + { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, + { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, + { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, + { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, + { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, + { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, ] [[package]] @@ -2221,28 +2233,28 @@ wheels = [ [[package]] name = "greenlet" -version = "3.2.2" +version = "3.2.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/34/c1/a82edae11d46c0d83481aacaa1e578fea21d94a1ef400afd734d47ad95ad/greenlet-3.2.2.tar.gz", hash = "sha256:ad053d34421a2debba45aa3cc39acf454acbcd025b3fc1a9f8a0dee237abd485", size = 185797, upload-time = "2025-05-09T19:47:35.066Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/92/bb85bd6e80148a4d2e0c59f7c0c2891029f8fd510183afc7d8d2feeed9b6/greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365", size = 185752, upload-time = "2025-06-05T16:16:09.955Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/9f/a47e19261747b562ce88219e5ed8c859d42c6e01e73da6fbfa3f08a7be13/greenlet-3.2.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:dcb9cebbf3f62cb1e5afacae90761ccce0effb3adaa32339a0670fe7805d8068", size = 268635, upload-time = "2025-05-09T14:50:39.007Z" }, - { url = "https://files.pythonhosted.org/packages/11/80/a0042b91b66975f82a914d515e81c1944a3023f2ce1ed7a9b22e10b46919/greenlet-3.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf3fc9145141250907730886b031681dfcc0de1c158f3cc51c092223c0f381ce", size = 628786, upload-time = "2025-05-09T15:24:00.692Z" }, - { url = "https://files.pythonhosted.org/packages/38/a2/8336bf1e691013f72a6ebab55da04db81a11f68e82bb691f434909fa1327/greenlet-3.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:efcdfb9df109e8a3b475c016f60438fcd4be68cd13a365d42b35914cdab4bb2b", size = 640866, upload-time = "2025-05-09T15:24:48.153Z" }, - { url = "https://files.pythonhosted.org/packages/f8/7e/f2a3a13e424670a5d08826dab7468fa5e403e0fbe0b5f951ff1bc4425b45/greenlet-3.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bd139e4943547ce3a56ef4b8b1b9479f9e40bb47e72cc906f0f66b9d0d5cab3", size = 636752, upload-time = "2025-05-09T15:29:23.182Z" }, - { url = "https://files.pythonhosted.org/packages/fd/5d/ce4a03a36d956dcc29b761283f084eb4a3863401c7cb505f113f73af8774/greenlet-3.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71566302219b17ca354eb274dfd29b8da3c268e41b646f330e324e3967546a74", size = 636028, upload-time = "2025-05-09T14:53:32.854Z" }, - { url = "https://files.pythonhosted.org/packages/4b/29/b130946b57e3ceb039238413790dd3793c5e7b8e14a54968de1fe449a7cf/greenlet-3.2.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3091bc45e6b0c73f225374fefa1536cd91b1e987377b12ef5b19129b07d93ebe", size = 583869, upload-time = "2025-05-09T14:53:43.614Z" }, - { url = "https://files.pythonhosted.org/packages/ac/30/9f538dfe7f87b90ecc75e589d20cbd71635531a617a336c386d775725a8b/greenlet-3.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:44671c29da26539a5f142257eaba5110f71887c24d40df3ac87f1117df589e0e", size = 1112886, upload-time = "2025-05-09T15:27:01.304Z" }, - { url = "https://files.pythonhosted.org/packages/be/92/4b7deeb1a1e9c32c1b59fdca1cac3175731c23311ddca2ea28a8b6ada91c/greenlet-3.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c23ea227847c9dbe0b3910f5c0dd95658b607137614eb821e6cbaecd60d81cc6", size = 1138355, upload-time = "2025-05-09T14:53:58.011Z" }, - { url = "https://files.pythonhosted.org/packages/c5/eb/7551c751a2ea6498907b2fcbe31d7a54b602ba5e8eb9550a9695ca25d25c/greenlet-3.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:0a16fb934fcabfdfacf21d79e6fed81809d8cd97bc1be9d9c89f0e4567143d7b", size = 295437, upload-time = "2025-05-09T15:00:57.733Z" }, - { url = "https://files.pythonhosted.org/packages/2c/a1/88fdc6ce0df6ad361a30ed78d24c86ea32acb2b563f33e39e927b1da9ea0/greenlet-3.2.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:df4d1509efd4977e6a844ac96d8be0b9e5aa5d5c77aa27ca9f4d3f92d3fcf330", size = 270413, upload-time = "2025-05-09T14:51:32.455Z" }, - { url = "https://files.pythonhosted.org/packages/a6/2e/6c1caffd65490c68cd9bcec8cb7feb8ac7b27d38ba1fea121fdc1f2331dc/greenlet-3.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da956d534a6d1b9841f95ad0f18ace637668f680b1339ca4dcfb2c1837880a0b", size = 637242, upload-time = "2025-05-09T15:24:02.63Z" }, - { url = "https://files.pythonhosted.org/packages/98/28/088af2cedf8823b6b7ab029a5626302af4ca1037cf8b998bed3a8d3cb9e2/greenlet-3.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c7b15fb9b88d9ee07e076f5a683027bc3befd5bb5d25954bb633c385d8b737e", size = 651444, upload-time = "2025-05-09T15:24:49.856Z" }, - { url = "https://files.pythonhosted.org/packages/4a/9f/0116ab876bb0bc7a81eadc21c3f02cd6100dcd25a1cf2a085a130a63a26a/greenlet-3.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:752f0e79785e11180ebd2e726c8a88109ded3e2301d40abced2543aa5d164275", size = 646067, upload-time = "2025-05-09T15:29:24.989Z" }, - { url = "https://files.pythonhosted.org/packages/35/17/bb8f9c9580e28a94a9575da847c257953d5eb6e39ca888239183320c1c28/greenlet-3.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ae572c996ae4b5e122331e12bbb971ea49c08cc7c232d1bd43150800a2d6c65", size = 648153, upload-time = "2025-05-09T14:53:34.716Z" }, - { url = "https://files.pythonhosted.org/packages/2c/ee/7f31b6f7021b8df6f7203b53b9cc741b939a2591dcc6d899d8042fcf66f2/greenlet-3.2.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02f5972ff02c9cf615357c17ab713737cccfd0eaf69b951084a9fd43f39833d3", size = 603865, upload-time = "2025-05-09T14:53:45.738Z" }, - { url = "https://files.pythonhosted.org/packages/b5/2d/759fa59323b521c6f223276a4fc3d3719475dc9ae4c44c2fe7fc750f8de0/greenlet-3.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4fefc7aa68b34b9224490dfda2e70ccf2131368493add64b4ef2d372955c207e", size = 1119575, upload-time = "2025-05-09T15:27:04.248Z" }, - { url = "https://files.pythonhosted.org/packages/30/05/356813470060bce0e81c3df63ab8cd1967c1ff6f5189760c1a4734d405ba/greenlet-3.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a31ead8411a027c2c4759113cf2bd473690517494f3d6e4bf67064589afcd3c5", size = 1147460, upload-time = "2025-05-09T14:54:00.315Z" }, - { url = "https://files.pythonhosted.org/packages/07/f4/b2a26a309a04fb844c7406a4501331b9400e1dd7dd64d3450472fd47d2e1/greenlet-3.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:b24c7844c0a0afc3ccbeb0b807adeefb7eff2b5599229ecedddcfeb0ef333bec", size = 296239, upload-time = "2025-05-09T14:57:17.633Z" }, + { url = "https://files.pythonhosted.org/packages/fc/2e/d4fcb2978f826358b673f779f78fa8a32ee37df11920dc2bb5589cbeecef/greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822", size = 270219, upload-time = "2025-06-05T16:10:10.414Z" }, + { url = "https://files.pythonhosted.org/packages/16/24/929f853e0202130e4fe163bc1d05a671ce8dcd604f790e14896adac43a52/greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83", size = 630383, upload-time = "2025-06-05T16:38:51.785Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b2/0320715eb61ae70c25ceca2f1d5ae620477d246692d9cc284c13242ec31c/greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf", size = 642422, upload-time = "2025-06-05T16:41:35.259Z" }, + { url = "https://files.pythonhosted.org/packages/bd/49/445fd1a210f4747fedf77615d941444349c6a3a4a1135bba9701337cd966/greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b", size = 638375, upload-time = "2025-06-05T16:48:18.235Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c8/ca19760cf6eae75fa8dc32b487e963d863b3ee04a7637da77b616703bc37/greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147", size = 637627, upload-time = "2025-06-05T16:13:02.858Z" }, + { url = "https://files.pythonhosted.org/packages/65/89/77acf9e3da38e9bcfca881e43b02ed467c1dedc387021fc4d9bd9928afb8/greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5", size = 585502, upload-time = "2025-06-05T16:12:49.642Z" }, + { url = "https://files.pythonhosted.org/packages/97/c6/ae244d7c95b23b7130136e07a9cc5aadd60d59b5951180dc7dc7e8edaba7/greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc", size = 1114498, upload-time = "2025-06-05T16:36:46.598Z" }, + { url = "https://files.pythonhosted.org/packages/89/5f/b16dec0cbfd3070658e0d744487919740c6d45eb90946f6787689a7efbce/greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba", size = 1139977, upload-time = "2025-06-05T16:12:38.262Z" }, + { url = "https://files.pythonhosted.org/packages/66/77/d48fb441b5a71125bcac042fc5b1494c806ccb9a1432ecaa421e72157f77/greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34", size = 297017, upload-time = "2025-06-05T16:25:05.225Z" }, + { url = "https://files.pythonhosted.org/packages/f3/94/ad0d435f7c48debe960c53b8f60fb41c2026b1d0fa4a99a1cb17c3461e09/greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d", size = 271992, upload-time = "2025-06-05T16:11:23.467Z" }, + { url = "https://files.pythonhosted.org/packages/93/5d/7c27cf4d003d6e77749d299c7c8f5fd50b4f251647b5c2e97e1f20da0ab5/greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b", size = 638820, upload-time = "2025-06-05T16:38:52.882Z" }, + { url = "https://files.pythonhosted.org/packages/c6/7e/807e1e9be07a125bb4c169144937910bf59b9d2f6d931578e57f0bce0ae2/greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d", size = 653046, upload-time = "2025-06-05T16:41:36.343Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ab/158c1a4ea1068bdbc78dba5a3de57e4c7aeb4e7fa034320ea94c688bfb61/greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264", size = 647701, upload-time = "2025-06-05T16:48:19.604Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0d/93729068259b550d6a0288da4ff72b86ed05626eaf1eb7c0d3466a2571de/greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688", size = 649747, upload-time = "2025-06-05T16:13:04.628Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f6/c82ac1851c60851302d8581680573245c8fc300253fc1ff741ae74a6c24d/greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb", size = 605461, upload-time = "2025-06-05T16:12:50.792Z" }, + { url = "https://files.pythonhosted.org/packages/98/82/d022cf25ca39cf1200650fc58c52af32c90f80479c25d1cbf57980ec3065/greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c", size = 1121190, upload-time = "2025-06-05T16:36:48.59Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e1/25297f70717abe8104c20ecf7af0a5b82d2f5a980eb1ac79f65654799f9f/greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163", size = 1149055, upload-time = "2025-06-05T16:12:40.457Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8f/8f9e56c5e82eb2c26e8cde787962e66494312dc8cb261c460e1f3a9c88bc/greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849", size = 297817, upload-time = "2025-06-05T16:29:49.244Z" }, ] [[package]] @@ -2364,17 +2376,17 @@ wheels = [ [[package]] name = "hf-xet" -version = "1.1.2" +version = "1.1.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/be/58f20728a5b445f8b064e74f0618897b3439f5ef90934da1916b9dfac76f/hf_xet-1.1.2.tar.gz", hash = "sha256:3712d6d4819d3976a1c18e36db9f503e296283f9363af818f50703506ed63da3", size = 467009, upload-time = "2025-05-16T20:44:34.944Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/11/b480bb7515db97d5b2b703927a59bbdd3f87e68d47dff5591aada467b4a9/hf_xet-1.1.4.tar.gz", hash = "sha256:875158df90cb13547752532ed73cad9dfaad3b29e203143838f67178418d08a4", size = 492082, upload-time = "2025-06-16T21:20:51.375Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/ae/f1a63f75d9886f18a80220ba31a1c7b9c4752f03aae452f358f538c6a991/hf_xet-1.1.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:dfd1873fd648488c70735cb60f7728512bca0e459e61fcd107069143cd798469", size = 2642559, upload-time = "2025-05-16T20:44:30.217Z" }, - { url = "https://files.pythonhosted.org/packages/50/ab/d2c83ae18f1015d926defd5bfbe94c62d15e93f900e6a192e318ee947105/hf_xet-1.1.2-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:29b584983b2d977c44157d9241dcf0fd50acde0b7bff8897fe4386912330090d", size = 2541360, upload-time = "2025-05-16T20:44:29.056Z" }, - { url = "https://files.pythonhosted.org/packages/9f/a7/693dc9f34f979e30a378125e2150a0b2d8d166e6d83ce3950eeb81e560aa/hf_xet-1.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b29ac84298147fe9164cc55ad994ba47399f90b5d045b0b803b99cf5f06d8ec", size = 5183081, upload-time = "2025-05-16T20:44:27.505Z" }, - { url = "https://files.pythonhosted.org/packages/3d/23/c48607883f692a36c0a7735f47f98bad32dbe459a32d1568c0f21576985d/hf_xet-1.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d921ba32615676e436a0d15e162331abc9ed43d440916b1d836dc27ce1546173", size = 5356100, upload-time = "2025-05-16T20:44:25.681Z" }, - { url = "https://files.pythonhosted.org/packages/eb/5b/b2316c7f1076da0582b52ea228f68bea95e243c388440d1dc80297c9d813/hf_xet-1.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d9b03c34e13c44893ab6e8fea18ee8d2a6878c15328dd3aabedbdd83ee9f2ed3", size = 5647688, upload-time = "2025-05-16T20:44:31.867Z" }, - { url = "https://files.pythonhosted.org/packages/2c/98/e6995f0fa579929da7795c961f403f4ee84af36c625963f52741d56f242c/hf_xet-1.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01b18608955b3d826307d37da8bd38b28a46cd2d9908b3a3655d1363274f941a", size = 5322627, upload-time = "2025-05-16T20:44:33.677Z" }, - { url = "https://files.pythonhosted.org/packages/59/40/8f1d5a44a64d8bf9e3c19576e789f716af54875b46daae65426714e75db1/hf_xet-1.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:3562902c81299b09f3582ddfb324400c6a901a2f3bc854f83556495755f4954c", size = 2739542, upload-time = "2025-05-16T20:44:36.287Z" }, + { url = "https://files.pythonhosted.org/packages/c4/62/3b41a7439930996530c64955874445012fd9044c82c60b34c5891c34fec6/hf_xet-1.1.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6591ab9f61ea82d261107ed90237e2ece972f6a7577d96f5f071208bbf255d1c", size = 2643151, upload-time = "2025-06-16T21:20:45.656Z" }, + { url = "https://files.pythonhosted.org/packages/9b/9f/1744fb1d79e0ac147578b193ce29208ebb9f4636e8cdf505638f6f0a6874/hf_xet-1.1.4-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:071b0b4d4698990f746edd666c7cc42555833d22035d88db0df936677fb57d29", size = 2510687, upload-time = "2025-06-16T21:20:43.754Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/49a81d4f81b0d21cc758b6fca3880a85ca0d209e8425c8b3a6ef694881ca/hf_xet-1.1.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5b610831e92e41182d4c028653978b844d332d492cdcba1b920d3aca4a0207e", size = 3057631, upload-time = "2025-06-16T21:20:42.006Z" }, + { url = "https://files.pythonhosted.org/packages/bf/8b/65fa08273789dafbc38d0f0bdd20df56b63ebc6566981bbaa255d9d84a33/hf_xet-1.1.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f6578bcd71393abfd60395279cc160ca808b61f5f9d535b922fcdcd3f77a708d", size = 2949250, upload-time = "2025-06-16T21:20:39.914Z" }, + { url = "https://files.pythonhosted.org/packages/8b/4b/224340bb1d5c63b6e03e04095b4e42230848454bf4293c45cd7bdaa0c208/hf_xet-1.1.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fb2bbfa2aae0e4f0baca988e7ba8d8c1a39a25adf5317461eb7069ad00505b3e", size = 3124670, upload-time = "2025-06-16T21:20:47.688Z" }, + { url = "https://files.pythonhosted.org/packages/4a/b7/4be010014de6585401c32a04c46b09a4a842d66bd16ed549a401e973b74b/hf_xet-1.1.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:73346ba3e2e15ea8909a26b0862b458f15b003e6277935e3fba5bf273508d698", size = 3234131, upload-time = "2025-06-16T21:20:49.535Z" }, + { url = "https://files.pythonhosted.org/packages/c2/2d/cf148d532f741fbf93f380ff038a33c1309d1e24ea629dc39d11dca08c92/hf_xet-1.1.4-cp37-abi3-win_amd64.whl", hash = "sha256:52e8f8bc2029d8b911493f43cea131ac3fa1f0dc6a13c50b593c4516f02c6fc3", size = 2695589, upload-time = "2025-06-16T21:20:53.151Z" }, ] [[package]] @@ -2510,7 +2522,7 @@ socks = [ [[package]] name = "huggingface-hub" -version = "0.32.3" +version = "0.33.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -2522,9 +2534,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/59/74/c4961b31e0f142a032ea24f477c3a7524dfabfd8126398a968b3cc6bf804/huggingface_hub-0.32.3.tar.gz", hash = "sha256:752c889ebf3a63cbd39803f6d87ccc135a463bbcb36abfa2faff0ccbf1cec087", size = 424525, upload-time = "2025-05-30T08:23:56.042Z" } +sdist = { url = "https://files.pythonhosted.org/packages/91/8a/1362d565fefabaa4185cf3ae842a98dbc5b35146f5694f7080f043a6952f/huggingface_hub-0.33.0.tar.gz", hash = "sha256:aa31f70d29439d00ff7a33837c03f1f9dd83971ce4e29ad664d63ffb17d3bb97", size = 426179, upload-time = "2025-06-11T17:08:07.913Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/df/dc/4f4d8080cbce7a38c1d0f1ba4932f9134480b9761af8ef4c65d49254b2bd/huggingface_hub-0.32.3-py3-none-any.whl", hash = "sha256:e46f7ea7fe2b5e5f67cc4e37eb201140091946a314d7c2b134a9673dadd80b6a", size = 512094, upload-time = "2025-05-30T08:23:54.091Z" }, + { url = "https://files.pythonhosted.org/packages/33/fb/53587a89fbc00799e4179796f51b3ad713c5de6bb680b2becb6d37c94649/huggingface_hub-0.33.0-py3-none-any.whl", hash = "sha256:e8668875b40c68f9929150d99727d39e5ebb8a05a98e4191b908dc7ded9074b3", size = 514799, upload-time = "2025-06-11T17:08:05.757Z" }, ] [[package]] @@ -2548,6 +2560,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, ] +[[package]] +name = "hypothesis" +version = "6.131.15" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "sortedcontainers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/6f/1e291f80627f3e043b19a86f9f6b172b910e3575577917d3122a6558410d/hypothesis-6.131.15.tar.gz", hash = "sha256:11849998ae5eecc8c586c6c98e47677fcc02d97475065f62768cfffbcc15ef7a", size = 436596, upload-time = "2025-05-07T23:04:25.127Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/c7/78597bcec48e1585ea9029deb2bf2341516e90dd615a3db498413d68a4cc/hypothesis-6.131.15-py3-none-any.whl", hash = "sha256:e02e67e9f3cfd4cd4a67ccc03bf7431beccc1a084c5e90029799ddd36ce006d7", size = 501128, upload-time = "2025-05-07T23:04:22.045Z" }, +] + [[package]] name = "idna" version = "3.10" @@ -2675,11 +2700,11 @@ wheels = [ [[package]] name = "json-repair" -version = "0.46.0" +version = "0.46.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/79/5a/5c14f14735438eb27fd4eb9bb4eb273c996c0d1d959182382cbc618f99dd/json_repair-0.46.0.tar.gz", hash = "sha256:abc751162baf8e384685558acba978478e833c1207be31468d9babfaf8029ab6", size = 33321, upload-time = "2025-05-22T06:22:07.305Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/74/f8e4eb4ce31be034c08fd3da37328c9ab7a7503831cf6f41d2121699cc88/json_repair-0.46.2.tar.gz", hash = "sha256:4c81154d61c028ca3750b451472dbb33978f2ee6f44be84c42b444b03d9f4b16", size = 33605, upload-time = "2025-06-06T08:05:48.46Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/33/ea/8ca71883f10acf4449720b86dd85cea51d7a71bec2558d5275ae0dcc66e4/json_repair-0.46.0-py3-none-any.whl", hash = "sha256:54d6a9889fba0846b80befb2b1aca619103ad3ed74612fb3fedd965a4a3b1653", size = 22255, upload-time = "2025-05-22T06:22:06.206Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d7/5f31df5ad00474f3005bbbac5f3a1e8d36535b40f1d352e6a5bd9880bf1f/json_repair-0.46.2-py3-none-any.whl", hash = "sha256:21fb339de583ab68db4272f984ec6fca9cc453d8117d9870e83c28b6b56c20e6", size = 22326, upload-time = "2025-06-06T08:05:47.064Z" }, ] [[package]] @@ -2726,7 +2751,7 @@ wheels = [ [[package]] name = "kubernetes" -version = "32.0.1" +version = "33.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -2741,9 +2766,9 @@ dependencies = [ { name = "urllib3" }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b7/e8/0598f0e8b4af37cd9b10d8b87386cf3173cb8045d834ab5f6ec347a758b3/kubernetes-32.0.1.tar.gz", hash = "sha256:42f43d49abd437ada79a79a16bd48a604d3471a117a8347e87db693f2ba0ba28", size = 946691, upload-time = "2025-02-18T21:06:34.148Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/52/19ebe8004c243fdfa78268a96727c71e08f00ff6fe69a301d0b7fcbce3c2/kubernetes-33.1.0.tar.gz", hash = "sha256:f64d829843a54c251061a8e7a14523b521f2dc5c896cf6d65ccf348648a88993", size = 1036779, upload-time = "2025-06-09T21:57:58.521Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/10/9f8af3e6f569685ce3af7faab51c8dd9d93b9c38eba339ca31c746119447/kubernetes-32.0.1-py2.py3-none-any.whl", hash = "sha256:35282ab8493b938b08ab5526c7ce66588232df00ef5e1dbe88a419107dc10998", size = 1988070, upload-time = "2025-02-18T21:06:31.391Z" }, + { url = "https://files.pythonhosted.org/packages/89/43/d9bebfc3db7dea6ec80df5cb2aad8d274dd18ec2edd6c4f21f32c237cbbb/kubernetes-33.1.0-py2.py3-none-any.whl", hash = "sha256:544de42b24b64287f7e0aa9513c93cb503f7f40eea39b20f66810011a86eabc5", size = 1941335, upload-time = "2025-06-09T21:57:56.327Z" }, ] [[package]] @@ -2789,53 +2814,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/de/f0/63b06b99b730b9954f8709f6f7d9b8d076fa0a973e472efe278089bde42b/langsmith-0.1.147-py3-none-any.whl", hash = "sha256:7166fc23b965ccf839d64945a78e9f1157757add228b086141eb03a60d699a15", size = 311812, upload-time = "2024-11-27T17:32:39.569Z" }, ] -[[package]] -name = "levenshtein" -version = "0.27.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "rapidfuzz" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7e/b3/b5f8011483ba9083a0bc74c4d58705e9cf465fbe55c948a1b1357d0a2aa8/levenshtein-0.27.1.tar.gz", hash = "sha256:3e18b73564cfc846eec94dd13fab6cb006b5d2e0cc56bad1fd7d5585881302e3", size = 382571, upload-time = "2025-03-02T19:44:56.148Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/22/84/110136e740655779aceb0da2399977362f21b2dbf3ea3646557f9c2237c4/levenshtein-0.27.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e6f1760108319a108dceb2f02bc7cdb78807ad1f9c673c95eaa1d0fe5dfcaae", size = 174555, upload-time = "2025-03-02T19:42:51.781Z" }, - { url = "https://files.pythonhosted.org/packages/19/5b/176d96959f5c5969f356d8856f8e20d2e72f7e4879f6d1cda8e5c2ac2614/levenshtein-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c4ed8400d94ab348099395e050b8ed9dd6a5d6b5b9e75e78b2b3d0b5f5b10f38", size = 156286, upload-time = "2025-03-02T19:42:53.106Z" }, - { url = "https://files.pythonhosted.org/packages/2a/2d/a75abaafc8a46b0dc52ab14dc96708989a31799a02a4914f9210c3415f04/levenshtein-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7826efe51be8ff58bc44a633e022fdd4b9fc07396375a6dbc4945a3bffc7bf8f", size = 152413, upload-time = "2025-03-02T19:42:55.129Z" }, - { url = "https://files.pythonhosted.org/packages/9a/5f/533f4adf964b10817a1d0ecca978b3542b3b9915c96172d20162afe18bed/levenshtein-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff5afb78719659d353055863c7cb31599fbea6865c0890b2d840ee40214b3ddb", size = 184236, upload-time = "2025-03-02T19:42:56.427Z" }, - { url = "https://files.pythonhosted.org/packages/02/79/e698623795e36e0d166a3aa1eac6fe1e446cac3a5c456664a95c351571d1/levenshtein-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:201dafd5c004cd52018560cf3213da799534d130cf0e4db839b51f3f06771de0", size = 185502, upload-time = "2025-03-02T19:42:57.596Z" }, - { url = "https://files.pythonhosted.org/packages/ac/94/76b64762f4af6e20bbab79713c4c48783240e6e502b2f52e5037ddda688a/levenshtein-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5ddd59f3cfaec216811ee67544779d9e2d6ed33f79337492a248245d6379e3d", size = 161749, upload-time = "2025-03-02T19:42:59.222Z" }, - { url = "https://files.pythonhosted.org/packages/56/d0/d10eff9224c94a478078a469aaeb43471fdeddad035f443091224c7544b8/levenshtein-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6afc241d27ecf5b921063b796812c55b0115423ca6fa4827aa4b1581643d0a65", size = 246686, upload-time = "2025-03-02T19:43:00.454Z" }, - { url = "https://files.pythonhosted.org/packages/b2/8a/ebbeff74461da3230d00e8a8197480a2ea1a9bbb7dbc273214d7ea3896cb/levenshtein-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ee2e766277cceb8ca9e584ea03b8dc064449ba588d3e24c1923e4b07576db574", size = 1116616, upload-time = "2025-03-02T19:43:02.431Z" }, - { url = "https://files.pythonhosted.org/packages/1d/9b/e7323684f833ede13113fba818c3afe665a78b47d720afdeb2e530c1ecb3/levenshtein-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:920b23d6109453913ce78ec451bc402ff19d020ee8be4722e9d11192ec2fac6f", size = 1401483, upload-time = "2025-03-02T19:43:04.62Z" }, - { url = "https://files.pythonhosted.org/packages/ef/1d/9b6ab30ff086a33492d6f7de86a07050b15862ccf0d9feeccfbe26af52d8/levenshtein-0.27.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:560d7edba126e2eea3ac3f2f12e7bd8bc9c6904089d12b5b23b6dfa98810b209", size = 1225805, upload-time = "2025-03-02T19:43:06.734Z" }, - { url = "https://files.pythonhosted.org/packages/1b/07/ae2f31e87ff65ba4857e25192646f1f3c8cca83c2ac1c27e551215b7e1b6/levenshtein-0.27.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8d5362b6c7aa4896dc0cb1e7470a4ad3c06124e0af055dda30d81d3c5549346b", size = 1419860, upload-time = "2025-03-02T19:43:08.084Z" }, - { url = "https://files.pythonhosted.org/packages/43/d2/dfcc5c22c07bab9be99f3f47a907be583bcd37bfd2eec57a205e59671019/levenshtein-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:65ba880815b0f80a80a293aeebac0fab8069d03ad2d6f967a886063458f9d7a1", size = 1188823, upload-time = "2025-03-02T19:43:09.592Z" }, - { url = "https://files.pythonhosted.org/packages/8b/96/713335623f8ab50eba0627c8685618dc3a985aedaaea9f492986b9443551/levenshtein-0.27.1-cp311-cp311-win32.whl", hash = "sha256:fcc08effe77fec0bc5b0f6f10ff20b9802b961c4a69047b5499f383119ddbe24", size = 88156, upload-time = "2025-03-02T19:43:11.442Z" }, - { url = "https://files.pythonhosted.org/packages/aa/ae/444d6e8ba9a35379a56926716f18bb2e77c6cf69e5324521fbe6885f14f6/levenshtein-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:0ed402d8902be7df212ac598fc189f9b2d520817fdbc6a05e2ce44f7f3ef6857", size = 100399, upload-time = "2025-03-02T19:43:13.066Z" }, - { url = "https://files.pythonhosted.org/packages/80/c0/ff226897a238a2deb2ca2c00d658755a1aa01884b0ddc8f5d406cb5f2b0d/levenshtein-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:7fdaab29af81a8eb981043737f42450efca64b9761ca29385487b29c506da5b5", size = 88033, upload-time = "2025-03-02T19:43:14.211Z" }, - { url = "https://files.pythonhosted.org/packages/0d/73/84a7126b9e6441c2547f1fbfd65f3c15c387d1fc04e0dd1d025a12107771/levenshtein-0.27.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:25fb540d8c55d1dc7bdc59b7de518ea5ed9df92eb2077e74bcb9bb6de7b06f69", size = 173953, upload-time = "2025-03-02T19:43:16.029Z" }, - { url = "https://files.pythonhosted.org/packages/8f/5c/06c01870c0cf336f9f29397bbfbfbbfd3a59918868716e7bb15828e89367/levenshtein-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f09cfab6387e9c908c7b37961c045e8e10eb9b7ec4a700367f8e080ee803a562", size = 156399, upload-time = "2025-03-02T19:43:17.233Z" }, - { url = "https://files.pythonhosted.org/packages/c7/4a/c1d3f27ec8b3fff5a96617251bf3f61c67972869ac0a0419558fc3e2cbe6/levenshtein-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dafa29c0e616f322b574e0b2aeb5b1ff2f8d9a1a6550f22321f3bd9bb81036e3", size = 151061, upload-time = "2025-03-02T19:43:18.414Z" }, - { url = "https://files.pythonhosted.org/packages/4d/8f/2521081e9a265891edf46aa30e1b59c1f347a452aed4c33baafbec5216fa/levenshtein-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be7a7642ea64392fa1e6ef7968c2e50ef2152c60948f95d0793361ed97cf8a6f", size = 183119, upload-time = "2025-03-02T19:43:19.975Z" }, - { url = "https://files.pythonhosted.org/packages/1f/a0/a63e3bce6376127596d04be7f57e672d2f3d5f540265b1e30b9dd9b3c5a9/levenshtein-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:060b48c45ed54bcea9582ce79c6365b20a1a7473767e0b3d6be712fa3a22929c", size = 185352, upload-time = "2025-03-02T19:43:21.424Z" }, - { url = "https://files.pythonhosted.org/packages/17/8c/8352e992063952b38fb61d49bad8d193a4a713e7eeceb3ae74b719d7863d/levenshtein-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:712f562c5e64dd0398d3570fe99f8fbb88acec7cc431f101cb66c9d22d74c542", size = 159879, upload-time = "2025-03-02T19:43:22.792Z" }, - { url = "https://files.pythonhosted.org/packages/69/b4/564866e2038acf47c3de3e9292fc7fc7cc18d2593fedb04f001c22ac6e15/levenshtein-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6141ad65cab49aa4527a3342d76c30c48adb2393b6cdfeca65caae8d25cb4b8", size = 245005, upload-time = "2025-03-02T19:43:24.069Z" }, - { url = "https://files.pythonhosted.org/packages/ba/f9/7367f87e3a6eed282f3654ec61a174b4d1b78a7a73f2cecb91f0ab675153/levenshtein-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:799b8d73cda3265331116f62932f553804eae16c706ceb35aaf16fc2a704791b", size = 1116865, upload-time = "2025-03-02T19:43:25.4Z" }, - { url = "https://files.pythonhosted.org/packages/f5/02/b5b3bfb4b4cd430e9d110bad2466200d51c6061dae7c5a64e36047c8c831/levenshtein-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ec99871d98e517e1cc4a15659c62d6ea63ee5a2d72c5ddbebd7bae8b9e2670c8", size = 1401723, upload-time = "2025-03-02T19:43:28.099Z" }, - { url = "https://files.pythonhosted.org/packages/ef/69/b93bccd093b3f06a99e67e11ebd6e100324735dc2834958ba5852a1b9fed/levenshtein-0.27.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8799164e1f83588dbdde07f728ea80796ea72196ea23484d78d891470241b222", size = 1226276, upload-time = "2025-03-02T19:43:30.192Z" }, - { url = "https://files.pythonhosted.org/packages/ab/32/37dd1bc5ce866c136716619e6f7081d7078d7dd1c1da7025603dcfd9cf5f/levenshtein-0.27.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:583943813898326516ab451a83f734c6f07488cda5c361676150d3e3e8b47927", size = 1420132, upload-time = "2025-03-02T19:43:33.322Z" }, - { url = "https://files.pythonhosted.org/packages/4b/08/f3bc828dd9f0f8433b26f37c4fceab303186ad7b9b70819f2ccb493d99fc/levenshtein-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5bb22956af44bb4eade93546bf95be610c8939b9a9d4d28b2dfa94abf454fed7", size = 1189144, upload-time = "2025-03-02T19:43:34.814Z" }, - { url = "https://files.pythonhosted.org/packages/2d/54/5ecd89066cf579223d504abe3ac37ba11f63b01a19fd12591083acc00eb6/levenshtein-0.27.1-cp312-cp312-win32.whl", hash = "sha256:d9099ed1bcfa7ccc5540e8ad27b5dc6f23d16addcbe21fdd82af6440f4ed2b6d", size = 88279, upload-time = "2025-03-02T19:43:38.86Z" }, - { url = "https://files.pythonhosted.org/packages/53/79/4f8fabcc5aca9305b494d1d6c7a98482e90a855e0050ae9ff5d7bf4ab2c6/levenshtein-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:7f071ecdb50aa6c15fd8ae5bcb67e9da46ba1df7bba7c6bf6803a54c7a41fd96", size = 100659, upload-time = "2025-03-02T19:43:40.082Z" }, - { url = "https://files.pythonhosted.org/packages/cb/81/f8e4c0f571c2aac2e0c56a6e0e41b679937a2b7013e79415e4aef555cff0/levenshtein-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:83b9033a984ccace7703f35b688f3907d55490182fd39b33a8e434d7b2e249e6", size = 88168, upload-time = "2025-03-02T19:43:41.42Z" }, - { url = "https://files.pythonhosted.org/packages/7d/44/c5955d0b6830925559b00617d80c9f6e03a9b00c451835ee4da7010e71cd/levenshtein-0.27.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:909b7b6bce27a4ec90576c9a9bd9af5a41308dfecf364b410e80b58038277bbe", size = 170533, upload-time = "2025-03-02T19:44:38.096Z" }, - { url = "https://files.pythonhosted.org/packages/e7/3f/858572d68b33e13a9c154b99f153317efe68381bf63cc4e986e820935fc3/levenshtein-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d193a7f97b8c6a350e36ec58e41a627c06fa4157c3ce4b2b11d90cfc3c2ebb8f", size = 153119, upload-time = "2025-03-02T19:44:39.388Z" }, - { url = "https://files.pythonhosted.org/packages/d1/60/2bd8d001ea4eb53ca16faa7a649d56005ba22b1bcc2a4f1617ab27ed7e48/levenshtein-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:614be316e3c06118705fae1f717f9072d35108e5fd4e66a7dd0e80356135340b", size = 149576, upload-time = "2025-03-02T19:44:40.617Z" }, - { url = "https://files.pythonhosted.org/packages/e4/db/0580797e1e4ac26cf67761a235b29b49f62d2b175dbbc609882f2aecd4e4/levenshtein-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31fc0a5bb070722bdabb6f7e14955a294a4a968c68202d294699817f21545d22", size = 157445, upload-time = "2025-03-02T19:44:41.901Z" }, - { url = "https://files.pythonhosted.org/packages/f4/de/9c171c96d1f15c900086d7212b5543a85539e767689fc4933d14048ba1ec/levenshtein-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9415aa5257227af543be65768a80c7a75e266c3c818468ce6914812f88f9c3df", size = 243141, upload-time = "2025-03-02T19:44:43.228Z" }, - { url = "https://files.pythonhosted.org/packages/dc/1e/408fd10217eac0e43aea0604be22b4851a09e03d761d44d4ea12089dd70e/levenshtein-0.27.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:7987ef006a3cf56a4532bd4c90c2d3b7b4ca9ad3bf8ae1ee5713c4a3bdfda913", size = 98045, upload-time = "2025-03-02T19:44:44.527Z" }, -] - [[package]] name = "litellm" version = "1.63.7" @@ -3102,6 +3080,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/80/9d/627375bab4c90dd066093fc2c9a26b86f87e26d980dbf71667b44cbee3eb/mmh3-5.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:a4c1a76808dfea47f7407a0b07aaff9087447ef6280716fd0783409b3088bb3c", size = 38888, upload-time = "2025-01-25T08:39:05.174Z" }, ] +[[package]] +name = "mo-vector" +version = "0.1.13" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "pymysql" }, + { name = "sqlalchemy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/01/03/2ef4de1c8d970288f018b6b63439563336c51f26f57706dc51e4c395fdbe/mo_vector-0.1.13.tar.gz", hash = "sha256:8526c37e99157a0c9866bf3868600e877980464eccb212f8ea71971c0630eb69", size = 16926, upload-time = "2025-06-18T09:27:27.906Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/e7/514f5cf5909f96adf09b78146a9e5c92f82abcc212bc3f88456bf2640c23/mo_vector-0.1.13-py3-none-any.whl", hash = "sha256:f7d619acc3e92ed59631e6b3a12508240e22cf428c87daf022c0d87fbd5da459", size = 20091, upload-time = "2025-06-18T09:27:26.899Z" }, +] + [[package]] name = "mpmath" version = "1.3.0" @@ -3198,28 +3190,28 @@ wheels = [ [[package]] name = "mypy" -version = "1.16.0" +version = "1.16.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mypy-extensions" }, { name = "pathspec" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d4/38/13c2f1abae94d5ea0354e146b95a1be9b2137a0d506728e0da037c4276f6/mypy-1.16.0.tar.gz", hash = "sha256:84b94283f817e2aa6350a14b4a8fb2a35a53c286f97c9d30f53b63620e7af8ab", size = 3323139, upload-time = "2025-05-29T13:46:12.532Z" } +sdist = { url = "https://files.pythonhosted.org/packages/81/69/92c7fa98112e4d9eb075a239caa4ef4649ad7d441545ccffbd5e34607cbb/mypy-1.16.1.tar.gz", hash = "sha256:6bd00a0a2094841c5e47e7374bb42b83d64c527a502e3334e1173a0c24437bab", size = 3324747, upload-time = "2025-06-16T16:51:35.145Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/24/c4/ff2f79db7075c274fe85b5fff8797d29c6b61b8854c39e3b7feb556aa377/mypy-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9f826aaa7ff8443bac6a494cf743f591488ea940dd360e7dd330e30dd772a5ab", size = 10884498, upload-time = "2025-05-29T13:18:54.066Z" }, - { url = "https://files.pythonhosted.org/packages/02/07/12198e83006235f10f6a7808917376b5d6240a2fd5dce740fe5d2ebf3247/mypy-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82d056e6faa508501af333a6af192c700b33e15865bda49611e3d7d8358ebea2", size = 10011755, upload-time = "2025-05-29T13:34:00.851Z" }, - { url = "https://files.pythonhosted.org/packages/f1/9b/5fd5801a72b5d6fb6ec0105ea1d0e01ab2d4971893076e558d4b6d6b5f80/mypy-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:089bedc02307c2548eb51f426e085546db1fa7dd87fbb7c9fa561575cf6eb1ff", size = 11800138, upload-time = "2025-05-29T13:32:55.082Z" }, - { url = "https://files.pythonhosted.org/packages/2e/81/a117441ea5dfc3746431e51d78a4aca569c677aa225bca2cc05a7c239b61/mypy-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6a2322896003ba66bbd1318c10d3afdfe24e78ef12ea10e2acd985e9d684a666", size = 12533156, upload-time = "2025-05-29T13:19:12.963Z" }, - { url = "https://files.pythonhosted.org/packages/3f/38/88ec57c6c86014d3f06251e00f397b5a7daa6888884d0abf187e4f5f587f/mypy-1.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:021a68568082c5b36e977d54e8f1de978baf401a33884ffcea09bd8e88a98f4c", size = 12742426, upload-time = "2025-05-29T13:20:22.72Z" }, - { url = "https://files.pythonhosted.org/packages/bd/53/7e9d528433d56e6f6f77ccf24af6ce570986c2d98a5839e4c2009ef47283/mypy-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:54066fed302d83bf5128632d05b4ec68412e1f03ef2c300434057d66866cea4b", size = 9478319, upload-time = "2025-05-29T13:21:17.582Z" }, - { url = "https://files.pythonhosted.org/packages/70/cf/158e5055e60ca2be23aec54a3010f89dcffd788732634b344fc9cb1e85a0/mypy-1.16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c5436d11e89a3ad16ce8afe752f0f373ae9620841c50883dc96f8b8805620b13", size = 11062927, upload-time = "2025-05-29T13:35:52.328Z" }, - { url = "https://files.pythonhosted.org/packages/94/34/cfff7a56be1609f5d10ef386342ce3494158e4d506516890142007e6472c/mypy-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f2622af30bf01d8fc36466231bdd203d120d7a599a6d88fb22bdcb9dbff84090", size = 10083082, upload-time = "2025-05-29T13:35:33.378Z" }, - { url = "https://files.pythonhosted.org/packages/b3/7f/7242062ec6288c33d8ad89574df87c3903d394870e5e6ba1699317a65075/mypy-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d045d33c284e10a038f5e29faca055b90eee87da3fc63b8889085744ebabb5a1", size = 11828306, upload-time = "2025-05-29T13:21:02.164Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5f/b392f7b4f659f5b619ce5994c5c43caab3d80df2296ae54fa888b3d17f5a/mypy-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b4968f14f44c62e2ec4a038c8797a87315be8df7740dc3ee8d3bfe1c6bf5dba8", size = 12702764, upload-time = "2025-05-29T13:20:42.826Z" }, - { url = "https://files.pythonhosted.org/packages/9b/c0/7646ef3a00fa39ac9bc0938626d9ff29d19d733011be929cfea59d82d136/mypy-1.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb14a4a871bb8efb1e4a50360d4e3c8d6c601e7a31028a2c79f9bb659b63d730", size = 12896233, upload-time = "2025-05-29T13:18:37.446Z" }, - { url = "https://files.pythonhosted.org/packages/6d/38/52f4b808b3fef7f0ef840ee8ff6ce5b5d77381e65425758d515cdd4f5bb5/mypy-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:bd4e1ebe126152a7bbaa4daedd781c90c8f9643c79b9748caa270ad542f12bec", size = 9565547, upload-time = "2025-05-29T13:20:02.836Z" }, - { url = "https://files.pythonhosted.org/packages/99/a3/6ed10530dec8e0fdc890d81361260c9ef1f5e5c217ad8c9b21ecb2b8366b/mypy-1.16.0-py3-none-any.whl", hash = "sha256:29e1499864a3888bca5c1542f2d7232c6e586295183320caa95758fc84034031", size = 2265773, upload-time = "2025-05-29T13:35:18.762Z" }, + { url = "https://files.pythonhosted.org/packages/9a/61/ec1245aa1c325cb7a6c0f8570a2eee3bfc40fa90d19b1267f8e50b5c8645/mypy-1.16.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:472e4e4c100062488ec643f6162dd0d5208e33e2f34544e1fc931372e806c0cc", size = 10890557, upload-time = "2025-06-16T16:37:21.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/bb/6eccc0ba0aa0c7a87df24e73f0ad34170514abd8162eb0c75fd7128171fb/mypy-1.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea16e2a7d2714277e349e24d19a782a663a34ed60864006e8585db08f8ad1782", size = 10012921, upload-time = "2025-06-16T16:51:28.659Z" }, + { url = "https://files.pythonhosted.org/packages/5f/80/b337a12e2006715f99f529e732c5f6a8c143bb58c92bb142d5ab380963a5/mypy-1.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08e850ea22adc4d8a4014651575567b0318ede51e8e9fe7a68f25391af699507", size = 11802887, upload-time = "2025-06-16T16:50:53.627Z" }, + { url = "https://files.pythonhosted.org/packages/d9/59/f7af072d09793d581a745a25737c7c0a945760036b16aeb620f658a017af/mypy-1.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22d76a63a42619bfb90122889b903519149879ddbf2ba4251834727944c8baca", size = 12531658, upload-time = "2025-06-16T16:33:55.002Z" }, + { url = "https://files.pythonhosted.org/packages/82/c4/607672f2d6c0254b94a646cfc45ad589dd71b04aa1f3d642b840f7cce06c/mypy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2c7ce0662b6b9dc8f4ed86eb7a5d505ee3298c04b40ec13b30e572c0e5ae17c4", size = 12732486, upload-time = "2025-06-16T16:37:03.301Z" }, + { url = "https://files.pythonhosted.org/packages/b6/5e/136555ec1d80df877a707cebf9081bd3a9f397dedc1ab9750518d87489ec/mypy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:211287e98e05352a2e1d4e8759c5490925a7c784ddc84207f4714822f8cf99b6", size = 9479482, upload-time = "2025-06-16T16:47:37.48Z" }, + { url = "https://files.pythonhosted.org/packages/b4/d6/39482e5fcc724c15bf6280ff5806548c7185e0c090712a3736ed4d07e8b7/mypy-1.16.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:af4792433f09575d9eeca5c63d7d90ca4aeceda9d8355e136f80f8967639183d", size = 11066493, upload-time = "2025-06-16T16:47:01.683Z" }, + { url = "https://files.pythonhosted.org/packages/e6/e5/26c347890efc6b757f4d5bb83f4a0cf5958b8cf49c938ac99b8b72b420a6/mypy-1.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66df38405fd8466ce3517eda1f6640611a0b8e70895e2a9462d1d4323c5eb4b9", size = 10081687, upload-time = "2025-06-16T16:48:19.367Z" }, + { url = "https://files.pythonhosted.org/packages/44/c7/b5cb264c97b86914487d6a24bd8688c0172e37ec0f43e93b9691cae9468b/mypy-1.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44e7acddb3c48bd2713994d098729494117803616e116032af192871aed80b79", size = 11839723, upload-time = "2025-06-16T16:49:20.912Z" }, + { url = "https://files.pythonhosted.org/packages/15/f8/491997a9b8a554204f834ed4816bda813aefda31cf873bb099deee3c9a99/mypy-1.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ab5eca37b50188163fa7c1b73c685ac66c4e9bdee4a85c9adac0e91d8895e15", size = 12722980, upload-time = "2025-06-16T16:37:40.929Z" }, + { url = "https://files.pythonhosted.org/packages/df/f0/2bd41e174b5fd93bc9de9a28e4fb673113633b8a7f3a607fa4a73595e468/mypy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb6229b2c9086247e21a83c309754b9058b438704ad2f6807f0d8227f6ebdd", size = 12903328, upload-time = "2025-06-16T16:34:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/61/81/5572108a7bec2c46b8aff7e9b524f371fe6ab5efb534d38d6b37b5490da8/mypy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:1f0435cf920e287ff68af3d10a118a73f212deb2ce087619eb4e648116d1fe9b", size = 9562321, upload-time = "2025-06-16T16:48:58.823Z" }, + { url = "https://files.pythonhosted.org/packages/cf/d3/53e684e78e07c1a2bf7105715e5edd09ce951fc3f47cf9ed095ec1b7a037/mypy-1.16.1-py3-none-any.whl", hash = "sha256:5fc2ac4027d0ef28d6ba69a0343737a23c4d1b83672bf38d1fe237bdc0643b37", size = 2265923, upload-time = "2025-06-16T16:48:02.366Z" }, ] [[package]] @@ -3291,27 +3283,25 @@ wheels = [ [[package]] name = "numexpr" -version = "2.10.2" +version = "2.11.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/21/67/c7415cf04ebe418193cfd6595ae03e3a64d76dac7b9c010098b39cc7992e/numexpr-2.10.2.tar.gz", hash = "sha256:b0aff6b48ebc99d2f54f27b5f73a58cb92fde650aeff1b397c71c8788b4fff1a", size = 106787, upload-time = "2024-11-23T13:34:23.127Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d2/8f/2cc977e91adbfbcdb6b49fdb9147e1d1c7566eb2c0c1e737e9a47020b5ca/numexpr-2.11.0.tar.gz", hash = "sha256:75b2c01a4eda2e7c357bc67a3f5c3dd76506c15b5fd4dc42845ef2e182181bad", size = 108960, upload-time = "2025-06-09T11:05:56.79Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/b7/f25d6166f92ef23737c1c90416144492a664f0a56510d90f7c6577c2cd14/numexpr-2.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b360eb8d392483410fe6a3d5a7144afa298c9a0aa3e9fe193e89590b47dd477", size = 145055, upload-time = "2024-11-23T13:33:36.154Z" }, - { url = "https://files.pythonhosted.org/packages/66/64/428361ea6415826332f38ef2dd5c3abf4e7e601f033bfc9be68b680cb765/numexpr-2.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d9a42f5c24880350d88933c4efee91b857c378aaea7e8b86221fff569069841e", size = 134743, upload-time = "2024-11-23T13:33:37.273Z" }, - { url = "https://files.pythonhosted.org/packages/3f/fb/639ec91d2ea7b4a5d66e26e8ef8e06b020c8e9b9ebaf3bab7b0a9bee472e/numexpr-2.10.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83fcb11988b57cc25b028a36d285287d706d1f536ebf2662ea30bd990e0de8b9", size = 410397, upload-time = "2024-11-23T13:33:38.474Z" }, - { url = "https://files.pythonhosted.org/packages/89/5a/0f5c5b8a3a6d34eeecb30d0e2f722d50b9b38c0e175937e7c6268ffab997/numexpr-2.10.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4213a92efa9770bc28e3792134e27c7e5c7e97068bdfb8ba395baebbd12f991b", size = 398902, upload-time = "2024-11-23T13:33:39.802Z" }, - { url = "https://files.pythonhosted.org/packages/a2/d5/ec734e735eba5a753efed5be3707ee7447ebd371772f8081b65a4153fb97/numexpr-2.10.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebdbef5763ca057eea0c2b5698e4439d084a0505d9d6e94f4804f26e8890c45e", size = 1380354, upload-time = "2024-11-23T13:33:41.68Z" }, - { url = "https://files.pythonhosted.org/packages/30/51/406e572531d817480bd612ee08239a36ee82865fea02fce569f15631f4ee/numexpr-2.10.2-cp311-cp311-win32.whl", hash = "sha256:3bf01ec502d89944e49e9c1b5cc7c7085be8ca2eb9dd46a0eafd218afbdbd5f5", size = 151938, upload-time = "2024-11-23T13:33:43.998Z" }, - { url = "https://files.pythonhosted.org/packages/04/32/5882ed1dbd96234f327a73316a481add151ff827cfaf2ea24fb4d5ad04db/numexpr-2.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:e2d0ae24b0728e4bc3f1d3f33310340d67321d36d6043f7ce26897f4f1042db0", size = 144961, upload-time = "2024-11-23T13:33:45.329Z" }, - { url = "https://files.pythonhosted.org/packages/2b/96/d5053dea06d8298ae8052b4b049cbf8ef74998e28d57166cc27b8ae909e2/numexpr-2.10.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5323a46e75832334f1af86da1ef6ff0add00fbacdd266250be872b438bdf2be", size = 145029, upload-time = "2024-11-23T13:33:46.892Z" }, - { url = "https://files.pythonhosted.org/packages/3e/3c/fcd5a812ed5dda757b2d9ef2764a3e1cca6f6d1f02dbf113dc23a2c7702a/numexpr-2.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a42963bd4c62d8afa4f51e7974debfa39a048383f653544ab54f50a2f7ec6c42", size = 134851, upload-time = "2024-11-23T13:33:47.986Z" }, - { url = "https://files.pythonhosted.org/packages/0a/52/0ed3b306d8c9944129bce97fec73a2caff13adbd7e1df148d546d7eb2d4d/numexpr-2.10.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5191ba8f2975cb9703afc04ae845a929e193498c0e8bcd408ecb147b35978470", size = 411837, upload-time = "2024-11-23T13:33:49.223Z" }, - { url = "https://files.pythonhosted.org/packages/7d/9c/6b671dd3fb67d7e7da93cb76b7c5277743f310a216b7856bb18776bb3371/numexpr-2.10.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:97298b14f0105a794bea06fd9fbc5c423bd3ff4d88cbc618860b83eb7a436ad6", size = 400577, upload-time = "2024-11-23T13:33:50.559Z" }, - { url = "https://files.pythonhosted.org/packages/ea/4d/a167d1a215fe10ce58c45109f2869fd13aa0eef66f7e8c69af68be45d436/numexpr-2.10.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9d7805ccb6be2d3b0f7f6fad3707a09ac537811e8e9964f4074d28cb35543db", size = 1381735, upload-time = "2024-11-23T13:33:51.918Z" }, - { url = "https://files.pythonhosted.org/packages/c1/d4/17e4434f989e4917d31cbd88a043e1c9c16958149cf43fa622987111392b/numexpr-2.10.2-cp312-cp312-win32.whl", hash = "sha256:cb845b2d4f9f8ef0eb1c9884f2b64780a85d3b5ae4eeb26ae2b0019f489cd35e", size = 152102, upload-time = "2024-11-23T13:33:53.93Z" }, - { url = "https://files.pythonhosted.org/packages/b8/25/9ae599994076ef2a42d35ff6b0430da002647f212567851336a6c7b132d6/numexpr-2.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:57b59cbb5dcce4edf09cd6ce0b57ff60312479930099ca8d944c2fac896a1ead", size = 145061, upload-time = "2024-11-23T13:33:55.161Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d1/1cf8137990b3f3d445556ed63b9bc347aec39bde8c41146b02d3b35c1adc/numexpr-2.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:450eba3c93c3e3e8070566ad8d70590949d6e574b1c960bf68edd789811e7da8", size = 147535, upload-time = "2025-06-09T11:05:08.929Z" }, + { url = "https://files.pythonhosted.org/packages/b6/5e/bac7649d043f47c7c14c797efe60dbd19476468a149399cd706fe2e47f8c/numexpr-2.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f0eb88dbac8a7e61ee433006d0ddfd6eb921f5c6c224d1b50855bc98fb304c44", size = 136710, upload-time = "2025-06-09T11:05:10.366Z" }, + { url = "https://files.pythonhosted.org/packages/1b/9f/c88fc34d82d23c66ea0b78b00a1fb3b64048e0f7ac7791b2cd0d2a4ce14d/numexpr-2.11.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a194e3684b3553ea199c3f4837f422a521c7e2f0cce13527adc3a6b4049f9e7c", size = 411169, upload-time = "2025-06-09T11:05:11.797Z" }, + { url = "https://files.pythonhosted.org/packages/e4/8d/4d78dad430b41d836146f9e6f545f5c4f7d1972a6aa427d8570ab232bf16/numexpr-2.11.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f677668ab2bb2452fee955af3702fbb3b71919e61e4520762b1e5f54af59c0d8", size = 401671, upload-time = "2025-06-09T11:05:13.127Z" }, + { url = "https://files.pythonhosted.org/packages/83/1c/414670eb41a82b78bd09769a4f5fb49a934f9b3990957f02c833637a511e/numexpr-2.11.0-cp311-cp311-win32.whl", hash = "sha256:7d9e76a77c9644fbd60da3984e516ead5b84817748c2da92515cd36f1941a04d", size = 153159, upload-time = "2025-06-09T11:05:14.452Z" }, + { url = "https://files.pythonhosted.org/packages/0c/97/8d00ca9b36f3ac68a8fd85e930ab0c9448d8c9ca7ce195ee75c188dabd45/numexpr-2.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:7163b488bfdcd13c300a8407c309e4cee195ef95d07facf5ac2678d66c988805", size = 146224, upload-time = "2025-06-09T11:05:15.877Z" }, + { url = "https://files.pythonhosted.org/packages/38/45/7a0e5a0b800d92e73825494ac695fa05a52c7fc7088d69a336880136b437/numexpr-2.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4229060be866813122385c608bbd3ea48fe0b33e91f2756810d28c1cdbfc98f1", size = 147494, upload-time = "2025-06-09T11:05:17.015Z" }, + { url = "https://files.pythonhosted.org/packages/74/46/3a26b84e44f4739ec98de0ede4b95b4b8096f721e22d0e97517eeb02017e/numexpr-2.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:097aa8835d32d6ac52f2be543384019b4b134d1fb67998cbfc4271155edfe54a", size = 136832, upload-time = "2025-06-09T11:05:18.55Z" }, + { url = "https://files.pythonhosted.org/packages/75/05/e3076ff25d4a108b47640c169c0a64811748c43b63d9cc052ea56de1631e/numexpr-2.11.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f082321c244ff5d0e252071fb2c4fe02063a45934144a1456a5370ca139bec2", size = 412618, upload-time = "2025-06-09T11:05:20.093Z" }, + { url = "https://files.pythonhosted.org/packages/70/e8/15e0e077a004db0edd530da96c60c948689c888c464ee5d14b82405ebd86/numexpr-2.11.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7a19435ca3d7dd502b8d8dce643555eb1b6013989e3f7577857289f6db6be16", size = 403363, upload-time = "2025-06-09T11:05:21.217Z" }, + { url = "https://files.pythonhosted.org/packages/10/14/f22afb3a7ae41d03ba87f62d00fbcfb76389f9cc91b7a82593c39c509318/numexpr-2.11.0-cp312-cp312-win32.whl", hash = "sha256:f326218262c8d8537887cc4bbd613c8409d62f2cac799835c0360e0d9cefaa5c", size = 153307, upload-time = "2025-06-09T11:05:22.855Z" }, + { url = "https://files.pythonhosted.org/packages/18/70/abc585269424582b3cd6db261e33b2ec96b5d4971da3edb29fc9b62a8926/numexpr-2.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:0a184e5930c77ab91dd9beee4df403b825cd9dfc4e9ba4670d31c9fcb4e2c08e", size = 146337, upload-time = "2025-06-09T11:05:23.976Z" }, ] [[package]] @@ -3711,28 +3701,28 @@ wheels = [ [[package]] name = "opik" -version = "1.7.29" +version = "1.7.34" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "boto3-stubs", extra = ["bedrock-runtime"] }, { name = "click" }, { name = "httpx" }, { name = "jinja2" }, - { name = "levenshtein" }, { name = "litellm" }, { name = "openai" }, { name = "pydantic" }, { name = "pydantic-settings" }, { name = "pytest" }, + { name = "rapidfuzz" }, { name = "rich" }, { name = "sentry-sdk" }, { name = "tenacity" }, { name = "tqdm" }, { name = "uuid6" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/48/94/52faf9277891bfa77dc4bac80b5d88d78103d8fad7f6a6c9495ae083e4da/opik-1.7.29.tar.gz", hash = "sha256:5a13692b233d90663a32cbab452937bf8b6fdc2d516c671c102c3beb34301b64", size = 300856, upload-time = "2025-06-02T11:11:27.955Z" } +sdist = { url = "https://files.pythonhosted.org/packages/14/c3/c11c1c5db253e6cfd36198e7c11b7383d8e90c5a2a74a177e6ced3ab8b9f/opik-1.7.34.tar.gz", hash = "sha256:c9e4de44c46e276e3abe18b48d01aa5f0826dd0578dc20361ed56ec5802d481c", size = 305179, upload-time = "2025-06-12T13:59:21.379Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/43/462b700030c70f525eebf4bc63a226ae46109eca598cd59db081de9a8b21/opik-1.7.29-py3-none-any.whl", hash = "sha256:3eb893e7b612d0b799682060193e4f2eebf55b65811203c4fe46af04cf84fa8c", size = 566635, upload-time = "2025-06-02T11:11:26.225Z" }, + { url = "https://files.pythonhosted.org/packages/9a/7f/c2e06f62e541a8c32cfc570b63a8b2467f2f227d122b9ed6acbc0492ad88/opik-1.7.34-py3-none-any.whl", hash = "sha256:487530f3fc32748c666c2371ed8296a5c901631071c47fa20b578bd755b43048", size = 573502, upload-time = "2025-06-12T13:59:19.65Z" }, ] [[package]] @@ -4050,7 +4040,7 @@ wheels = [ [[package]] name = "posthog" -version = "4.2.0" +version = "5.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "backoff" }, @@ -4059,9 +4049,9 @@ dependencies = [ { name = "requests" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ce/5b/2e9890700b7b55a370edbfbe5948eae780d48af9b46ad06ea2e7970576f4/posthog-4.2.0.tar.gz", hash = "sha256:c4abc95de03294be005b3b7e8735e9d7abab88583da26262112bacce64b0c3b5", size = 80727, upload-time = "2025-05-23T23:23:55.943Z" } +sdist = { url = "https://files.pythonhosted.org/packages/27/68/c71abb7d3df9f52875a393c648814feb64f97a0b2b116e1146f78b0c9651/posthog-5.0.0.tar.gz", hash = "sha256:9f6a5eb650c19473b20085665f53ef762ba2a8558754eefc20843b07ac5be80e", size = 82927, upload-time = "2025-06-16T15:39:19.362Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/16/7b6c5844acee2d343d463ee0e3143cd8c7c48a6c0d079a2f7daf0c80b95c/posthog-4.2.0-py2.py3-none-any.whl", hash = "sha256:60c7066caac43e43e326e9196d8c1aadeafc8b0be9e5c108446e352711fa456b", size = 96692, upload-time = "2025-05-23T23:23:54.384Z" }, + { url = "https://files.pythonhosted.org/packages/82/d6/1123e623d3a4b657a4c670827bd81c0f65ed5a1a62f056e6c48a174cdc63/posthog-5.0.0-py3-none-any.whl", hash = "sha256:ac87c4bd1549a780045cfba1097352fc3c933b66b3be3e926915519ce1eabe44", size = 100042, upload-time = "2025-06-16T15:39:17.867Z" }, ] [[package]] @@ -4078,43 +4068,43 @@ wheels = [ [[package]] name = "propcache" -version = "0.3.1" +version = "0.3.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/07/c8/fdc6686a986feae3541ea23dcaa661bd93972d3940460646c6bb96e21c40/propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf", size = 43651, upload-time = "2025-03-26T03:06:12.05Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/90/0f/5a5319ee83bd651f75311fcb0c492c21322a7fc8f788e4eef23f44243427/propcache-0.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5", size = 80243, upload-time = "2025-03-26T03:04:01.912Z" }, - { url = "https://files.pythonhosted.org/packages/ce/84/3db5537e0879942783e2256616ff15d870a11d7ac26541336fe1b673c818/propcache-0.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371", size = 46503, upload-time = "2025-03-26T03:04:03.704Z" }, - { url = "https://files.pythonhosted.org/packages/e2/c8/b649ed972433c3f0d827d7f0cf9ea47162f4ef8f4fe98c5f3641a0bc63ff/propcache-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da", size = 45934, upload-time = "2025-03-26T03:04:05.257Z" }, - { url = "https://files.pythonhosted.org/packages/59/f9/4c0a5cf6974c2c43b1a6810c40d889769cc8f84cea676cbe1e62766a45f8/propcache-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744", size = 233633, upload-time = "2025-03-26T03:04:07.044Z" }, - { url = "https://files.pythonhosted.org/packages/e7/64/66f2f4d1b4f0007c6e9078bd95b609b633d3957fe6dd23eac33ebde4b584/propcache-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0", size = 241124, upload-time = "2025-03-26T03:04:08.676Z" }, - { url = "https://files.pythonhosted.org/packages/aa/bf/7b8c9fd097d511638fa9b6af3d986adbdf567598a567b46338c925144c1b/propcache-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5", size = 240283, upload-time = "2025-03-26T03:04:10.172Z" }, - { url = "https://files.pythonhosted.org/packages/fa/c9/e85aeeeaae83358e2a1ef32d6ff50a483a5d5248bc38510d030a6f4e2816/propcache-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256", size = 232498, upload-time = "2025-03-26T03:04:11.616Z" }, - { url = "https://files.pythonhosted.org/packages/8e/66/acb88e1f30ef5536d785c283af2e62931cb934a56a3ecf39105887aa8905/propcache-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073", size = 221486, upload-time = "2025-03-26T03:04:13.102Z" }, - { url = "https://files.pythonhosted.org/packages/f5/f9/233ddb05ffdcaee4448508ee1d70aa7deff21bb41469ccdfcc339f871427/propcache-0.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d", size = 222675, upload-time = "2025-03-26T03:04:14.658Z" }, - { url = "https://files.pythonhosted.org/packages/98/b8/eb977e28138f9e22a5a789daf608d36e05ed93093ef12a12441030da800a/propcache-0.3.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f", size = 215727, upload-time = "2025-03-26T03:04:16.207Z" }, - { url = "https://files.pythonhosted.org/packages/89/2d/5f52d9c579f67b8ee1edd9ec073c91b23cc5b7ff7951a1e449e04ed8fdf3/propcache-0.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0", size = 217878, upload-time = "2025-03-26T03:04:18.11Z" }, - { url = "https://files.pythonhosted.org/packages/7a/fd/5283e5ed8a82b00c7a989b99bb6ea173db1ad750bf0bf8dff08d3f4a4e28/propcache-0.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a", size = 230558, upload-time = "2025-03-26T03:04:19.562Z" }, - { url = "https://files.pythonhosted.org/packages/90/38/ab17d75938ef7ac87332c588857422ae126b1c76253f0f5b1242032923ca/propcache-0.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a", size = 233754, upload-time = "2025-03-26T03:04:21.065Z" }, - { url = "https://files.pythonhosted.org/packages/06/5d/3b921b9c60659ae464137508d3b4c2b3f52f592ceb1964aa2533b32fcf0b/propcache-0.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9", size = 226088, upload-time = "2025-03-26T03:04:22.718Z" }, - { url = "https://files.pythonhosted.org/packages/54/6e/30a11f4417d9266b5a464ac5a8c5164ddc9dd153dfa77bf57918165eb4ae/propcache-0.3.1-cp311-cp311-win32.whl", hash = "sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005", size = 40859, upload-time = "2025-03-26T03:04:24.039Z" }, - { url = "https://files.pythonhosted.org/packages/1d/3a/8a68dd867da9ca2ee9dfd361093e9cb08cb0f37e5ddb2276f1b5177d7731/propcache-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7", size = 45153, upload-time = "2025-03-26T03:04:25.211Z" }, - { url = "https://files.pythonhosted.org/packages/41/aa/ca78d9be314d1e15ff517b992bebbed3bdfef5b8919e85bf4940e57b6137/propcache-0.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723", size = 80430, upload-time = "2025-03-26T03:04:26.436Z" }, - { url = "https://files.pythonhosted.org/packages/1a/d8/f0c17c44d1cda0ad1979af2e593ea290defdde9eaeb89b08abbe02a5e8e1/propcache-0.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976", size = 46637, upload-time = "2025-03-26T03:04:27.932Z" }, - { url = "https://files.pythonhosted.org/packages/ae/bd/c1e37265910752e6e5e8a4c1605d0129e5b7933c3dc3cf1b9b48ed83b364/propcache-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b", size = 46123, upload-time = "2025-03-26T03:04:30.659Z" }, - { url = "https://files.pythonhosted.org/packages/d4/b0/911eda0865f90c0c7e9f0415d40a5bf681204da5fd7ca089361a64c16b28/propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f", size = 243031, upload-time = "2025-03-26T03:04:31.977Z" }, - { url = "https://files.pythonhosted.org/packages/0a/06/0da53397c76a74271621807265b6eb61fb011451b1ddebf43213df763669/propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70", size = 249100, upload-time = "2025-03-26T03:04:33.45Z" }, - { url = "https://files.pythonhosted.org/packages/f1/eb/13090e05bf6b963fc1653cdc922133ced467cb4b8dab53158db5a37aa21e/propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7", size = 250170, upload-time = "2025-03-26T03:04:35.542Z" }, - { url = "https://files.pythonhosted.org/packages/3b/4c/f72c9e1022b3b043ec7dc475a0f405d4c3e10b9b1d378a7330fecf0652da/propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25", size = 245000, upload-time = "2025-03-26T03:04:37.501Z" }, - { url = "https://files.pythonhosted.org/packages/e8/fd/970ca0e22acc829f1adf5de3724085e778c1ad8a75bec010049502cb3a86/propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277", size = 230262, upload-time = "2025-03-26T03:04:39.532Z" }, - { url = "https://files.pythonhosted.org/packages/c4/42/817289120c6b9194a44f6c3e6b2c3277c5b70bbad39e7df648f177cc3634/propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8", size = 236772, upload-time = "2025-03-26T03:04:41.109Z" }, - { url = "https://files.pythonhosted.org/packages/7c/9c/3b3942b302badd589ad6b672da3ca7b660a6c2f505cafd058133ddc73918/propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e", size = 231133, upload-time = "2025-03-26T03:04:42.544Z" }, - { url = "https://files.pythonhosted.org/packages/98/a1/75f6355f9ad039108ff000dfc2e19962c8dea0430da9a1428e7975cf24b2/propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee", size = 230741, upload-time = "2025-03-26T03:04:44.06Z" }, - { url = "https://files.pythonhosted.org/packages/67/0c/3e82563af77d1f8731132166da69fdfd95e71210e31f18edce08a1eb11ea/propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815", size = 244047, upload-time = "2025-03-26T03:04:45.983Z" }, - { url = "https://files.pythonhosted.org/packages/f7/50/9fb7cca01532a08c4d5186d7bb2da6c4c587825c0ae134b89b47c7d62628/propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5", size = 246467, upload-time = "2025-03-26T03:04:47.699Z" }, - { url = "https://files.pythonhosted.org/packages/a9/02/ccbcf3e1c604c16cc525309161d57412c23cf2351523aedbb280eb7c9094/propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7", size = 241022, upload-time = "2025-03-26T03:04:49.195Z" }, - { url = "https://files.pythonhosted.org/packages/db/19/e777227545e09ca1e77a6e21274ae9ec45de0f589f0ce3eca2a41f366220/propcache-0.3.1-cp312-cp312-win32.whl", hash = "sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b", size = 40647, upload-time = "2025-03-26T03:04:50.595Z" }, - { url = "https://files.pythonhosted.org/packages/24/bb/3b1b01da5dd04c77a204c84e538ff11f624e31431cfde7201d9110b092b1/propcache-0.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3", size = 44784, upload-time = "2025-03-26T03:04:51.791Z" }, - { url = "https://files.pythonhosted.org/packages/b8/d3/c3cb8f1d6ae3b37f83e1de806713a9b3642c5895f0215a62e1a4bd6e5e34/propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40", size = 12376, upload-time = "2025-03-26T03:06:10.5Z" }, + { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, + { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, + { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, + { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, + { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, + { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, + { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, + { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, + { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, + { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, + { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, + { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, + { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, + { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, + { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, + { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, + { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, + { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, + { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, + { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, + { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, ] [[package]] @@ -4264,7 +4254,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.11.5" +version = "2.11.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -4272,9 +4262,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/86/8ce9040065e8f924d642c58e4a344e33163a07f6b57f836d0d734e0ad3fb/pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a", size = 787102, upload-time = "2025-05-22T21:18:08.761Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/69/831ed22b38ff9b4b64b66569f0e5b7b97cf3638346eb95a2147fdb49ad5f/pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7", size = 444229, upload-time = "2025-05-22T21:18:06.329Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, ] [[package]] @@ -4377,7 +4367,7 @@ crypto = [ [[package]] name = "pymilvus" -version = "2.5.10" +version = "2.5.11" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "grpcio" }, @@ -4388,9 +4378,9 @@ dependencies = [ { name = "setuptools" }, { name = "ujson" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/da/e2/88f126a08d8eefba7341e3eb323406a227146094aab7137a2b91d882e98d/pymilvus-2.5.10.tar.gz", hash = "sha256:cc44ad776aeab781ee4c4a4d334b73e746066ab2fb6722c5311f02efa6fc54a2", size = 1260364, upload-time = "2025-05-23T06:08:06.992Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/d9/3a76b1f5014a20efcfe1bb0aa46423d9cf1df5ab2ce8b1479248b943692a/pymilvus-2.5.11.tar.gz", hash = "sha256:cb1c291c659da73c58f2f5c2bd5bcbb87feb76f720afd72b9e7ace813d384c83", size = 1262466, upload-time = "2025-06-10T00:41:54.295Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/4b/847704930ad8ddd0d0975e9a3a5e3fe704f642debe97454135c2b9ee7081/pymilvus-2.5.10-py3-none-any.whl", hash = "sha256:7da540f93068871cda3941602c55227aeaafb66f2f0d9c05e8f9db783716b100", size = 227635, upload-time = "2025-05-23T06:08:05.397Z" }, + { url = "https://files.pythonhosted.org/packages/e7/2c/a9f2c2daff511e127616a4294e597bf4c7626d49865f62865432698c7ba9/pymilvus-2.5.11-py3-none-any.whl", hash = "sha256:20417ea0f364cd8e9d3783b432ad25c32cff8f3ceb40cdfdf54f8bbcf052cd7e", size = 228115, upload-time = "2025-06-10T00:41:52.354Z" }, ] [[package]] @@ -4418,19 +4408,17 @@ wheels = [ [[package]] name = "pyobvector" -version = "0.1.20" +version = "0.1.14" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiomysql" }, { name = "numpy" }, - { name = "pydantic" }, { name = "pymysql" }, { name = "sqlalchemy" }, - { name = "sqlglot" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/13/f7/fcc99e41bac6aee0822667809957800b2f512f2d96d1fa52c283cb58db16/pyobvector-0.1.20.tar.gz", hash = "sha256:26e8155d5b933333a2ab21b08e51df84e374d188f5fb26520347450e72f34c6e", size = 35256, upload-time = "2025-03-17T12:32:16.66Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/59/7d762061808948dd6aad165a000b34e22163dc83fb5014184eeacc0fabe5/pyobvector-0.1.14.tar.gz", hash = "sha256:4f85cdd63064d040e94c0a96099a0cd5cda18ce625865382e89429f28422fc02", size = 26780, upload-time = "2024-11-20T11:46:18.017Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/7d/00cd33c18814bec0a4f6f4a89becdbe2c6c28d78f87d765d0259a22b117e/pyobvector-0.1.20-py3-none-any.whl", hash = "sha256:1a991f8b9f53e1a749fc396ccb90c7591e64b2c5679930023f00789c62e7af72", size = 46264, upload-time = "2025-03-17T12:32:15.356Z" }, + { url = "https://files.pythonhosted.org/packages/88/68/ecb21b74c974e7be7f9034e205d08db62d614ff5c221581ae96d37ef853e/pyobvector-0.1.14-py3-none-any.whl", hash = "sha256:828e0bec49a177355b70c7a1270af3b0bf5239200ee0d096e4165b267eeff97c", size = 35526, upload-time = "2024-11-20T11:46:16.809Z" }, ] [[package]] @@ -4640,6 +4628,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863, upload-time = "2024-01-23T06:32:58.246Z" }, ] +[[package]] +name = "python-http-client" +version = "3.3.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/fa/284e52a8c6dcbe25671f02d217bf2f85660db940088faf18ae7a05e97313/python_http_client-3.3.7.tar.gz", hash = "sha256:bf841ee45262747e00dec7ee9971dfb8c7d83083f5713596488d67739170cea0", size = 9377, upload-time = "2022-03-09T20:23:56.386Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/31/9b360138f4e4035ee9dac4fe1132b6437bd05751aaf1db2a2d83dc45db5f/python_http_client-3.3.7-py3-none-any.whl", hash = "sha256:ad371d2bbedc6ea15c26179c6222a78bc9308d272435ddf1d5c84f068f249a36", size = 8352, upload-time = "2022-03-09T20:23:54.862Z" }, +] + [[package]] name = "python-iso639" version = "2025.2.18" @@ -4907,7 +4904,7 @@ wheels = [ [[package]] name = "requests" -version = "2.32.3" +version = "2.32.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -4915,9 +4912,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" }, + { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, ] [[package]] @@ -5045,27 +5042,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.11.12" +version = "0.11.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/0a/92416b159ec00cdf11e5882a9d80d29bf84bba3dbebc51c4898bfbca1da6/ruff-0.11.12.tar.gz", hash = "sha256:43cf7f69c7d7c7d7513b9d59c5d8cafd704e05944f978614aa9faff6ac202603", size = 4202289, upload-time = "2025-05-29T13:31:40.037Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/da/9c6f995903b4d9474b39da91d2d626659af3ff1eeb43e9ae7c119349dba6/ruff-0.11.13.tar.gz", hash = "sha256:26fa247dc68d1d4e72c179e08889a25ac0c7ba4d78aecfc835d49cbfd60bf514", size = 4282054, upload-time = "2025-06-05T21:00:15.721Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/60/cc/53eb79f012d15e136d40a8e8fc519ba8f55a057f60b29c2df34efd47c6e3/ruff-0.11.12-py3-none-linux_armv6l.whl", hash = "sha256:c7680aa2f0d4c4f43353d1e72123955c7a2159b8646cd43402de6d4a3a25d7cc", size = 10285597, upload-time = "2025-05-29T13:30:57.539Z" }, - { url = "https://files.pythonhosted.org/packages/e7/d7/73386e9fb0232b015a23f62fea7503f96e29c29e6c45461d4a73bac74df9/ruff-0.11.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:2cad64843da9f134565c20bcc430642de897b8ea02e2e79e6e02a76b8dcad7c3", size = 11053154, upload-time = "2025-05-29T13:31:00.865Z" }, - { url = "https://files.pythonhosted.org/packages/4e/eb/3eae144c5114e92deb65a0cb2c72326c8469e14991e9bc3ec0349da1331c/ruff-0.11.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:9b6886b524a1c659cee1758140138455d3c029783d1b9e643f3624a5ee0cb0aa", size = 10403048, upload-time = "2025-05-29T13:31:03.413Z" }, - { url = "https://files.pythonhosted.org/packages/29/64/20c54b20e58b1058db6689e94731f2a22e9f7abab74e1a758dfba058b6ca/ruff-0.11.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc3a3690aad6e86c1958d3ec3c38c4594b6ecec75c1f531e84160bd827b2012", size = 10597062, upload-time = "2025-05-29T13:31:05.539Z" }, - { url = "https://files.pythonhosted.org/packages/29/3a/79fa6a9a39422a400564ca7233a689a151f1039110f0bbbabcb38106883a/ruff-0.11.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f97fdbc2549f456c65b3b0048560d44ddd540db1f27c778a938371424b49fe4a", size = 10155152, upload-time = "2025-05-29T13:31:07.986Z" }, - { url = "https://files.pythonhosted.org/packages/e5/a4/22c2c97b2340aa968af3a39bc38045e78d36abd4ed3fa2bde91c31e712e3/ruff-0.11.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74adf84960236961090e2d1348c1a67d940fd12e811a33fb3d107df61eef8fc7", size = 11723067, upload-time = "2025-05-29T13:31:10.57Z" }, - { url = "https://files.pythonhosted.org/packages/bc/cf/3e452fbd9597bcd8058856ecd42b22751749d07935793a1856d988154151/ruff-0.11.12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b56697e5b8bcf1d61293ccfe63873aba08fdbcbbba839fc046ec5926bdb25a3a", size = 12460807, upload-time = "2025-05-29T13:31:12.88Z" }, - { url = "https://files.pythonhosted.org/packages/2f/ec/8f170381a15e1eb7d93cb4feef8d17334d5a1eb33fee273aee5d1f8241a3/ruff-0.11.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d47afa45e7b0eaf5e5969c6b39cbd108be83910b5c74626247e366fd7a36a13", size = 12063261, upload-time = "2025-05-29T13:31:15.236Z" }, - { url = "https://files.pythonhosted.org/packages/0d/bf/57208f8c0a8153a14652a85f4116c0002148e83770d7a41f2e90b52d2b4e/ruff-0.11.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bf9603fe1bf949de8b09a2da896f05c01ed7a187f4a386cdba6760e7f61be", size = 11329601, upload-time = "2025-05-29T13:31:18.68Z" }, - { url = "https://files.pythonhosted.org/packages/c3/56/edf942f7fdac5888094d9ffa303f12096f1a93eb46570bcf5f14c0c70880/ruff-0.11.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08033320e979df3b20dba567c62f69c45e01df708b0f9c83912d7abd3e0801cd", size = 11522186, upload-time = "2025-05-29T13:31:21.216Z" }, - { url = "https://files.pythonhosted.org/packages/ed/63/79ffef65246911ed7e2290aeece48739d9603b3a35f9529fec0fc6c26400/ruff-0.11.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:929b7706584f5bfd61d67d5070f399057d07c70585fa8c4491d78ada452d3bef", size = 10449032, upload-time = "2025-05-29T13:31:23.417Z" }, - { url = "https://files.pythonhosted.org/packages/88/19/8c9d4d8a1c2a3f5a1ea45a64b42593d50e28b8e038f1aafd65d6b43647f3/ruff-0.11.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7de4a73205dc5756b8e09ee3ed67c38312dce1aa28972b93150f5751199981b5", size = 10129370, upload-time = "2025-05-29T13:31:25.777Z" }, - { url = "https://files.pythonhosted.org/packages/bc/0f/2d15533eaa18f460530a857e1778900cd867ded67f16c85723569d54e410/ruff-0.11.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:2635c2a90ac1b8ca9e93b70af59dfd1dd2026a40e2d6eebaa3efb0465dd9cf02", size = 11123529, upload-time = "2025-05-29T13:31:28.396Z" }, - { url = "https://files.pythonhosted.org/packages/4f/e2/4c2ac669534bdded835356813f48ea33cfb3a947dc47f270038364587088/ruff-0.11.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d05d6a78a89166f03f03a198ecc9d18779076ad0eec476819467acb401028c0c", size = 11577642, upload-time = "2025-05-29T13:31:30.647Z" }, - { url = "https://files.pythonhosted.org/packages/a7/9b/c9ddf7f924d5617a1c94a93ba595f4b24cb5bc50e98b94433ab3f7ad27e5/ruff-0.11.12-py3-none-win32.whl", hash = "sha256:f5a07f49767c4be4772d161bfc049c1f242db0cfe1bd976e0f0886732a4765d6", size = 10475511, upload-time = "2025-05-29T13:31:32.917Z" }, - { url = "https://files.pythonhosted.org/packages/fd/d6/74fb6d3470c1aada019ffff33c0f9210af746cca0a4de19a1f10ce54968a/ruff-0.11.12-py3-none-win_amd64.whl", hash = "sha256:5a4d9f8030d8c3a45df201d7fb3ed38d0219bccd7955268e863ee4a115fa0832", size = 11523573, upload-time = "2025-05-29T13:31:35.782Z" }, - { url = "https://files.pythonhosted.org/packages/44/42/d58086ec20f52d2b0140752ae54b355ea2be2ed46f914231136dd1effcc7/ruff-0.11.12-py3-none-win_arm64.whl", hash = "sha256:65194e37853158d368e333ba282217941029a28ea90913c67e558c611d04daa5", size = 10697770, upload-time = "2025-05-29T13:31:38.009Z" }, + { url = "https://files.pythonhosted.org/packages/7d/ce/a11d381192966e0b4290842cc8d4fac7dc9214ddf627c11c1afff87da29b/ruff-0.11.13-py3-none-linux_armv6l.whl", hash = "sha256:4bdfbf1240533f40042ec00c9e09a3aade6f8c10b6414cf11b519488d2635d46", size = 10292516, upload-time = "2025-06-05T20:59:32.944Z" }, + { url = "https://files.pythonhosted.org/packages/78/db/87c3b59b0d4e753e40b6a3b4a2642dfd1dcaefbff121ddc64d6c8b47ba00/ruff-0.11.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:aef9c9ed1b5ca28bb15c7eac83b8670cf3b20b478195bd49c8d756ba0a36cf48", size = 11106083, upload-time = "2025-06-05T20:59:37.03Z" }, + { url = "https://files.pythonhosted.org/packages/77/79/d8cec175856ff810a19825d09ce700265f905c643c69f45d2b737e4a470a/ruff-0.11.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53b15a9dfdce029c842e9a5aebc3855e9ab7771395979ff85b7c1dedb53ddc2b", size = 10436024, upload-time = "2025-06-05T20:59:39.741Z" }, + { url = "https://files.pythonhosted.org/packages/8b/5b/f6d94f2980fa1ee854b41568368a2e1252681b9238ab2895e133d303538f/ruff-0.11.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab153241400789138d13f362c43f7edecc0edfffce2afa6a68434000ecd8f69a", size = 10646324, upload-time = "2025-06-05T20:59:42.185Z" }, + { url = "https://files.pythonhosted.org/packages/6c/9c/b4c2acf24ea4426016d511dfdc787f4ce1ceb835f3c5fbdbcb32b1c63bda/ruff-0.11.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c51f93029d54a910d3d24f7dd0bb909e31b6cd989a5e4ac513f4eb41629f0dc", size = 10174416, upload-time = "2025-06-05T20:59:44.319Z" }, + { url = "https://files.pythonhosted.org/packages/f3/10/e2e62f77c65ede8cd032c2ca39c41f48feabedb6e282bfd6073d81bb671d/ruff-0.11.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1808b3ed53e1a777c2ef733aca9051dc9bf7c99b26ece15cb59a0320fbdbd629", size = 11724197, upload-time = "2025-06-05T20:59:46.935Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f0/466fe8469b85c561e081d798c45f8a1d21e0b4a5ef795a1d7f1a9a9ec182/ruff-0.11.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d28ce58b5ecf0f43c1b71edffabe6ed7f245d5336b17805803312ec9bc665933", size = 12511615, upload-time = "2025-06-05T20:59:49.534Z" }, + { url = "https://files.pythonhosted.org/packages/17/0e/cefe778b46dbd0cbcb03a839946c8f80a06f7968eb298aa4d1a4293f3448/ruff-0.11.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55e4bc3a77842da33c16d55b32c6cac1ec5fb0fbec9c8c513bdce76c4f922165", size = 12117080, upload-time = "2025-06-05T20:59:51.654Z" }, + { url = "https://files.pythonhosted.org/packages/5d/2c/caaeda564cbe103bed145ea557cb86795b18651b0f6b3ff6a10e84e5a33f/ruff-0.11.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:633bf2c6f35678c56ec73189ba6fa19ff1c5e4807a78bf60ef487b9dd272cc71", size = 11326315, upload-time = "2025-06-05T20:59:54.469Z" }, + { url = "https://files.pythonhosted.org/packages/75/f0/782e7d681d660eda8c536962920c41309e6dd4ebcea9a2714ed5127d44bd/ruff-0.11.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ffbc82d70424b275b089166310448051afdc6e914fdab90e08df66c43bb5ca9", size = 11555640, upload-time = "2025-06-05T20:59:56.986Z" }, + { url = "https://files.pythonhosted.org/packages/5d/d4/3d580c616316c7f07fb3c99dbecfe01fbaea7b6fd9a82b801e72e5de742a/ruff-0.11.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a9ddd3ec62a9a89578c85842b836e4ac832d4a2e0bfaad3b02243f930ceafcc", size = 10507364, upload-time = "2025-06-05T20:59:59.154Z" }, + { url = "https://files.pythonhosted.org/packages/5a/dc/195e6f17d7b3ea6b12dc4f3e9de575db7983db187c378d44606e5d503319/ruff-0.11.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d237a496e0778d719efb05058c64d28b757c77824e04ffe8796c7436e26712b7", size = 10141462, upload-time = "2025-06-05T21:00:01.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/8e/39a094af6967faa57ecdeacb91bedfb232474ff8c3d20f16a5514e6b3534/ruff-0.11.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:26816a218ca6ef02142343fd24c70f7cd8c5aa6c203bca284407adf675984432", size = 11121028, upload-time = "2025-06-05T21:00:04.06Z" }, + { url = "https://files.pythonhosted.org/packages/5a/c0/b0b508193b0e8a1654ec683ebab18d309861f8bd64e3a2f9648b80d392cb/ruff-0.11.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:51c3f95abd9331dc5b87c47ac7f376db5616041173826dfd556cfe3d4977f492", size = 11602992, upload-time = "2025-06-05T21:00:06.249Z" }, + { url = "https://files.pythonhosted.org/packages/7c/91/263e33ab93ab09ca06ce4f8f8547a858cc198072f873ebc9be7466790bae/ruff-0.11.13-py3-none-win32.whl", hash = "sha256:96c27935418e4e8e77a26bb05962817f28b8ef3843a6c6cc49d8783b5507f250", size = 10474944, upload-time = "2025-06-05T21:00:08.459Z" }, + { url = "https://files.pythonhosted.org/packages/46/f4/7c27734ac2073aae8efb0119cae6931b6fb48017adf048fdf85c19337afc/ruff-0.11.13-py3-none-win_amd64.whl", hash = "sha256:29c3189895a8a6a657b7af4e97d330c8a3afd2c9c8f46c81e2fc5a31866517e3", size = 11548669, upload-time = "2025-06-05T21:00:11.147Z" }, + { url = "https://files.pythonhosted.org/packages/ec/bf/b273dd11673fed8a6bd46032c0ea2a04b2ac9bfa9c628756a5856ba113b0/ruff-0.11.13-py3-none-win_arm64.whl", hash = "sha256:b4385285e9179d608ff1d2fb9922062663c658605819a6876d8beef0c30b7f3b", size = 10683928, upload-time = "2025-06-05T21:00:13.758Z" }, ] [[package]] @@ -5114,6 +5111,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6c/42/cd8dc81f8060de1f14960885ad5b2d2651f41de8b93d09f3f919d6567a5a/scipy_stubs-1.15.3.0-py3-none-any.whl", hash = "sha256:a251254cf4fd6e7fb87c55c1feee92d32ddbc1f542ecdf6a0159cdb81c2fb62d", size = 459062, upload-time = "2025-05-08T16:58:33.356Z" }, ] +[[package]] +name = "sendgrid" +version = "6.12.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ecdsa" }, + { name = "python-http-client" }, + { name = "werkzeug" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/11/31/62e00433878dccf33edf07f8efa417b9030a2464eb3b04bbd797a11b4447/sendgrid-6.12.4.tar.gz", hash = "sha256:9e88b849daf0fa4bdf256c3b5da9f5a3272402c0c2fd6b1928c9de440db0a03d", size = 50271, upload-time = "2025-06-12T10:29:37.213Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/9c/45d068fd831a65e6ed1e2ab3233de58784842afdc62fdcdd0a01bbb6b39d/sendgrid-6.12.4-py3-none-any.whl", hash = "sha256:9a211b96241e63bd5b9ed9afcc8608f4bcac426e4a319b3920ab877c8426e92c", size = 102122, upload-time = "2025-06-12T10:29:35.457Z" }, +] + [[package]] name = "sentry-sdk" version = "2.28.0" @@ -5247,6 +5258,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/37/c3/6eeb6034408dac0fa653d126c9204ade96b819c936e136c5e8a6897eee9c/socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3", size = 12763, upload-time = "2020-04-17T15:50:31.878Z" }, ] +[[package]] +name = "sortedcontainers" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, +] + [[package]] name = "soupsieve" version = "2.7" @@ -5258,40 +5278,31 @@ wheels = [ [[package]] name = "sqlalchemy" -version = "2.0.35" +version = "2.0.41" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/36/48/4f190a83525f5cefefa44f6adc9e6386c4de5218d686c27eda92eb1f5424/sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f", size = 9562798, upload-time = "2024-09-16T20:30:05.964Z" } +sdist = { url = "https://files.pythonhosted.org/packages/63/66/45b165c595ec89aa7dcc2c1cd222ab269bc753f1fc7a1e68f8481bd957bf/sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9", size = 9689424, upload-time = "2025-05-14T17:10:32.339Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/46/9215a35bf98c3a2528e987791e6180eb51624d2c7d5cb8e2d96a6450b657/SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60", size = 2091274, upload-time = "2024-09-16T21:07:13.344Z" }, - { url = "https://files.pythonhosted.org/packages/1e/69/919673c5101a0c633658d58b11b454b251ca82300941fba801201434755d/SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62", size = 2081672, upload-time = "2024-09-16T21:07:14.807Z" }, - { url = "https://files.pythonhosted.org/packages/67/ea/a6b0597cbda12796be2302153369dbbe90573fdab3bc4885f8efac499247/SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6", size = 3200083, upload-time = "2024-09-16T22:45:15.766Z" }, - { url = "https://files.pythonhosted.org/packages/8c/d6/97bdc8d714fb21762f2092511f380f18cdb2d985d516071fa925bb433a90/SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7", size = 3200080, upload-time = "2024-09-16T21:18:19.033Z" }, - { url = "https://files.pythonhosted.org/packages/87/d2/8c2adaf2ade4f6f1b725acd0b0be9210bb6a2df41024729a8eec6a86fe5a/SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71", size = 3137108, upload-time = "2024-09-16T22:45:19.167Z" }, - { url = "https://files.pythonhosted.org/packages/7e/ae/ea05d0bfa8f2b25ae34591895147152854fc950f491c4ce362ae06035db8/SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01", size = 3157437, upload-time = "2024-09-16T21:18:21.988Z" }, - { url = "https://files.pythonhosted.org/packages/fe/5d/8ad6df01398388a766163d27960b3365f1bbd8bb7b05b5cad321a8b69b25/SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e", size = 2061935, upload-time = "2024-09-16T20:54:10.564Z" }, - { url = "https://files.pythonhosted.org/packages/ff/68/8557efc0c32c8e2c147cb6512237448b8ed594a57cd015fda67f8e56bb3f/SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8", size = 2087281, upload-time = "2024-09-16T20:54:13.429Z" }, - { url = "https://files.pythonhosted.org/packages/2f/2b/fff87e6db0da31212c98bbc445f83fb608ea92b96bda3f3f10e373bac76c/SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2", size = 2089790, upload-time = "2024-09-16T21:07:16.161Z" }, - { url = "https://files.pythonhosted.org/packages/68/92/4bb761bd82764d5827bf6b6095168c40fb5dbbd23670203aef2f96ba6bc6/SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468", size = 2080266, upload-time = "2024-09-16T21:07:18.277Z" }, - { url = "https://files.pythonhosted.org/packages/22/46/068a65db6dc253c6f25a7598d99e0a1d60b14f661f9d09ef6c73c718fa4e/SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d", size = 3229760, upload-time = "2024-09-16T22:45:20.863Z" }, - { url = "https://files.pythonhosted.org/packages/6e/36/59830dafe40dda592304debd4cd86e583f63472f3a62c9e2695a5795e786/SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db", size = 3240649, upload-time = "2024-09-16T21:18:23.996Z" }, - { url = "https://files.pythonhosted.org/packages/00/50/844c50c6996f9c7f000c959dd1a7436a6c94e449ee113046a1d19e470089/SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c", size = 3176138, upload-time = "2024-09-16T22:45:22.518Z" }, - { url = "https://files.pythonhosted.org/packages/df/d2/336b18cac68eecb67de474fc15c85f13be4e615c6f5bae87ea38c6734ce0/SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8", size = 3202753, upload-time = "2024-09-16T21:18:25.966Z" }, - { url = "https://files.pythonhosted.org/packages/f0/f3/ee1e62fabdc10910b5ef720ae08e59bc785f26652876af3a50b89b97b412/SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf", size = 2060113, upload-time = "2024-09-16T20:54:15.16Z" }, - { url = "https://files.pythonhosted.org/packages/60/63/a3cef44a52979169d884f3583d0640e64b3c28122c096474a1d7cfcaf1f3/SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc", size = 2085839, upload-time = "2024-09-16T20:54:17.11Z" }, - { url = "https://files.pythonhosted.org/packages/0e/c6/33c706449cdd92b1b6d756b247761e27d32230fd6b2de5f44c4c3e5632b2/SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1", size = 1881276, upload-time = "2024-09-16T23:14:28.324Z" }, -] - -[[package]] -name = "sqlglot" -version = "26.24.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4d/f7/0fa9f9f2477c4e3d8e28b0f5e066f0e72343c29c8a302ee6a77579e8986b/sqlglot-26.24.0.tar.gz", hash = "sha256:e778ca9cb685b4fc34b59d50432c20f463c63ec90d0448fa91afa7f320a88518", size = 5371208, upload-time = "2025-05-30T08:44:06.516Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/11/6995759d913d714ff443478f5865b2616dbcd32b12764d02df9550d7a61e/sqlglot-26.24.0-py3-none-any.whl", hash = "sha256:81f7e47bb1b4b396c564359f47c7c1aee476575a0cadf84dc35f7189cab87f82", size = 464043, upload-time = "2025-05-30T08:44:00.801Z" }, + { url = "https://files.pythonhosted.org/packages/37/4e/b00e3ffae32b74b5180e15d2ab4040531ee1bef4c19755fe7926622dc958/sqlalchemy-2.0.41-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f", size = 2121232, upload-time = "2025-05-14T17:48:20.444Z" }, + { url = "https://files.pythonhosted.org/packages/ef/30/6547ebb10875302074a37e1970a5dce7985240665778cfdee2323709f749/sqlalchemy-2.0.41-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560", size = 2110897, upload-time = "2025-05-14T17:48:21.634Z" }, + { url = "https://files.pythonhosted.org/packages/9e/21/59df2b41b0f6c62da55cd64798232d7349a9378befa7f1bb18cf1dfd510a/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f", size = 3273313, upload-time = "2025-05-14T17:51:56.205Z" }, + { url = "https://files.pythonhosted.org/packages/62/e4/b9a7a0e5c6f79d49bcd6efb6e90d7536dc604dab64582a9dec220dab54b6/sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6", size = 3273807, upload-time = "2025-05-14T17:55:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/39/d8/79f2427251b44ddee18676c04eab038d043cff0e764d2d8bb08261d6135d/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04", size = 3209632, upload-time = "2025-05-14T17:51:59.384Z" }, + { url = "https://files.pythonhosted.org/packages/d4/16/730a82dda30765f63e0454918c982fb7193f6b398b31d63c7c3bd3652ae5/sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582", size = 3233642, upload-time = "2025-05-14T17:55:29.901Z" }, + { url = "https://files.pythonhosted.org/packages/04/61/c0d4607f7799efa8b8ea3c49b4621e861c8f5c41fd4b5b636c534fcb7d73/sqlalchemy-2.0.41-cp311-cp311-win32.whl", hash = "sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8", size = 2086475, upload-time = "2025-05-14T17:56:02.095Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8e/8344f8ae1cb6a479d0741c02cd4f666925b2bf02e2468ddaf5ce44111f30/sqlalchemy-2.0.41-cp311-cp311-win_amd64.whl", hash = "sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504", size = 2110903, upload-time = "2025-05-14T17:56:03.499Z" }, + { url = "https://files.pythonhosted.org/packages/3e/2a/f1f4e068b371154740dd10fb81afb5240d5af4aa0087b88d8b308b5429c2/sqlalchemy-2.0.41-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9", size = 2119645, upload-time = "2025-05-14T17:55:24.854Z" }, + { url = "https://files.pythonhosted.org/packages/9b/e8/c664a7e73d36fbfc4730f8cf2bf930444ea87270f2825efbe17bf808b998/sqlalchemy-2.0.41-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1", size = 2107399, upload-time = "2025-05-14T17:55:28.097Z" }, + { url = "https://files.pythonhosted.org/packages/5c/78/8a9cf6c5e7135540cb682128d091d6afa1b9e48bd049b0d691bf54114f70/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70", size = 3293269, upload-time = "2025-05-14T17:50:38.227Z" }, + { url = "https://files.pythonhosted.org/packages/3c/35/f74add3978c20de6323fb11cb5162702670cc7a9420033befb43d8d5b7a4/sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e", size = 3303364, upload-time = "2025-05-14T17:51:49.829Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d4/c990f37f52c3f7748ebe98883e2a0f7d038108c2c5a82468d1ff3eec50b7/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078", size = 3229072, upload-time = "2025-05-14T17:50:39.774Z" }, + { url = "https://files.pythonhosted.org/packages/15/69/cab11fecc7eb64bc561011be2bd03d065b762d87add52a4ca0aca2e12904/sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae", size = 3268074, upload-time = "2025-05-14T17:51:51.736Z" }, + { url = "https://files.pythonhosted.org/packages/5c/ca/0c19ec16858585d37767b167fc9602593f98998a68a798450558239fb04a/sqlalchemy-2.0.41-cp312-cp312-win32.whl", hash = "sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6", size = 2084514, upload-time = "2025-05-14T17:55:49.915Z" }, + { url = "https://files.pythonhosted.org/packages/7f/23/4c2833d78ff3010a4e17f984c734f52b531a8c9060a50429c9d4b0211be6/sqlalchemy-2.0.41-cp312-cp312-win_amd64.whl", hash = "sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0", size = 2111557, upload-time = "2025-05-14T17:55:51.349Z" }, + { url = "https://files.pythonhosted.org/packages/1c/fc/9ba22f01b5cdacc8f5ed0d22304718d2c758fce3fd49a5372b886a86f37c/sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576", size = 1911224, upload-time = "2025-05-14T17:39:42.154Z" }, ] [[package]] @@ -5594,11 +5605,11 @@ wheels = [ [[package]] name = "types-aiofiles" -version = "24.1.0.20250516" +version = "24.1.0.20250606" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b3/13/41faafda1d85fc8afa744ee67a2d788b3ba63e5f1c7303e5d10c2d784d2d/types_aiofiles-24.1.0.20250516.tar.gz", hash = "sha256:7fd2a7f793bbe180b7b22cd4f59300fe61fdc9940b3bbc9899ffe32849b95188", size = 14304, upload-time = "2025-05-16T03:08:29.55Z" } +sdist = { url = "https://files.pythonhosted.org/packages/64/6e/fac4ffc896cb3faf2ac5d23747b65dd8bae1d9ee23305d1a3b12111c3989/types_aiofiles-24.1.0.20250606.tar.gz", hash = "sha256:48f9e26d2738a21e0b0f19381f713dcdb852a36727da8414b1ada145d40a18fe", size = 14364, upload-time = "2025-06-06T03:09:26.515Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/e7/828287ba5d1107db1093a123fe8e481eb5aab55c911f1100f28d0df80d5a/types_aiofiles-24.1.0.20250516-py3-none-any.whl", hash = "sha256:ec265994629146804b656a971c46f393ce860305834b3cacb4b8b6fb7dba7e33", size = 14253, upload-time = "2025-05-16T03:08:28.67Z" }, + { url = "https://files.pythonhosted.org/packages/71/de/f2fa2ab8a5943898e93d8036941e05bfd1e1f377a675ee52c7c307dccb75/types_aiofiles-24.1.0.20250606-py3-none-any.whl", hash = "sha256:e568c53fb9017c80897a9aa15c74bf43b7ee90e412286ec1e0912b6e79301aee", size = 14276, upload-time = "2025-06-06T03:09:25.662Z" }, ] [[package]] @@ -5672,11 +5683,11 @@ wheels = [ [[package]] name = "types-docutils" -version = "0.21.0.20250526" +version = "0.21.0.20250604" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/34/bf/bb5695f7a9660f79a9cd999ea13ff7331b8f2d03aec3d2fd7c38be4bc8aa/types_docutils-0.21.0.20250526.tar.gz", hash = "sha256:6c7ba387716315df0d86a796baec9d5a71825ed2746cb7763193aafbb70ac86c", size = 38140, upload-time = "2025-05-26T03:10:49.242Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/d0/d28035370d669f14d4e23bd63d093207331f361afa24d2686d2c3fe6be8d/types_docutils-0.21.0.20250604.tar.gz", hash = "sha256:5a9cc7f5a4c5ef694aa0abc61111e0b1376a53dee90d65757f77f31acfcca8f2", size = 40953, upload-time = "2025-06-04T03:10:27.439Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/35/84/73bca8d1364f6685bd6e00eaa15e653ef96163231fbd7a612f3a845497fb/types_docutils-0.21.0.20250526-py3-none-any.whl", hash = "sha256:44d9f9ed19bb75071deb6804947c123f30bbc617a656420f044e09b9f16b72d1", size = 62000, upload-time = "2025-05-26T03:10:48.101Z" }, + { url = "https://files.pythonhosted.org/packages/89/91/887e9591c1ee50dfbf7c2fa2f3f51bc6db683013b6d2b0cd3983adf3d502/types_docutils-0.21.0.20250604-py3-none-any.whl", hash = "sha256:bfa8628176c06a80cdd1d6f3fb32e972e042db53538596488dfe0e9c5962b222", size = 65915, upload-time = "2025-06-04T03:10:26.067Z" }, ] [[package]] @@ -5880,6 +5891,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c5/3f/b0e8db149896005adc938a1e7f371d6d7e9eca4053a29b108978ed15e0c2/types_python_dateutil-2.9.0.20250516-py3-none-any.whl", hash = "sha256:2b2b3f57f9c6a61fba26a9c0ffb9ea5681c9b83e69cd897c6b5f668d9c0cab93", size = 14356, upload-time = "2025-05-16T03:06:57.249Z" }, ] +[[package]] +name = "types-python-http-client" +version = "3.3.7.20240910" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/d7/bb2754c2d1b20c1890593ec89799c99e8875b04f474197c41354f41e9d31/types-python-http-client-3.3.7.20240910.tar.gz", hash = "sha256:8a6ebd30ad4b90a329ace69c240291a6176388624693bc971a5ecaa7e9b05074", size = 2804, upload-time = "2024-09-10T02:38:31.608Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/95/8f492d37d99630e096acbb4071788483282a34a73ae89dd1a5727f4189cc/types_python_http_client-3.3.7.20240910-py3-none-any.whl", hash = "sha256:58941bd986fb8bb0f4f782ef376be145ece8023f391364fbcd22bd26b13a140e", size = 3917, upload-time = "2024-09-10T02:38:30.261Z" }, +] + [[package]] name = "types-pytz" version = "2025.2.0.20250516" @@ -5918,14 +5938,14 @@ wheels = [ [[package]] name = "types-requests" -version = "2.32.0.20250602" +version = "2.32.4.20250611" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/48/b0/5321e6eeba5d59e4347fcf9bf06a5052f085c3aa0f4876230566d6a4dc97/types_requests-2.32.0.20250602.tar.gz", hash = "sha256:ee603aeefec42051195ae62ca7667cd909a2f8128fdf8aad9e8a5219ecfab3bf", size = 23042, upload-time = "2025-06-02T03:15:02.958Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/7f/73b3a04a53b0fd2a911d4ec517940ecd6600630b559e4505cc7b68beb5a0/types_requests-2.32.4.20250611.tar.gz", hash = "sha256:741c8777ed6425830bf51e54d6abe245f79b4dcb9019f1622b773463946bf826", size = 23118, upload-time = "2025-06-11T03:11:41.272Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/da/18/9b782980e575c6581d5c0c1c99f4c6f89a1d7173dad072ee96b2756c02e6/types_requests-2.32.0.20250602-py3-none-any.whl", hash = "sha256:f4f335f87779b47ce10b8b8597b409130299f6971ead27fead4fe7ba6ea3e726", size = 20638, upload-time = "2025-06-02T03:15:01.959Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ea/0be9258c5a4fa1ba2300111aa5a0767ee6d18eb3fd20e91616c12082284d/types_requests-2.32.4.20250611-py3-none-any.whl", hash = "sha256:ad2fe5d3b0cb3c2c902c8815a70e7fb2302c4b8c1f77bdcd738192cdb3878072", size = 20643, upload-time = "2025-06-11T03:11:40.186Z" }, ] [[package]] @@ -6204,34 +6224,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" }, ] -[[package]] -name = "uuid-utils" -version = "0.11.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/7f/7d83b937889d65682d95b40c94ba226b353d3f532290ee3acb17c8746e49/uuid_utils-0.11.0.tar.gz", hash = "sha256:18cf2b7083da7f3cca0517647213129eb16d20d7ed0dd74b3f4f8bff2aa334ea", size = 18854, upload-time = "2025-05-22T11:23:15.596Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/20/4a34f2a6e77b1f0f3334b111e4d2411fc8646ab2987892a36507e2d6a498/uuid_utils-0.11.0-cp39-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:094445ccd323bc5507e28e9d6d86b983513efcf19ab59c2dd75239cef765631a", size = 593779, upload-time = "2025-05-22T11:22:41.36Z" }, - { url = "https://files.pythonhosted.org/packages/a1/a1/1897cd3d37144f698392ec8aae89da2c00c6d34acd77f75312477f4510ab/uuid_utils-0.11.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6430b53d343215f85269ffd74e1d1f4b25ae1031acf0ac24ff3d5721f6a06f48", size = 300848, upload-time = "2025-05-22T11:22:43.221Z" }, - { url = "https://files.pythonhosted.org/packages/d4/36/3ae8896de8a5320a9e7529452ed29af0082daf8c3787f17c5cbf9defc651/uuid_utils-0.11.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be2e6e4318d23195887fa74fa1d64565a34f7127fdcf22918954981d79765f68", size = 336053, upload-time = "2025-05-22T11:22:44.741Z" }, - { url = "https://files.pythonhosted.org/packages/fe/b6/751e84cd056074a40ca9ac21db6ca4802e31d78207309c0d9c8ff69cd43b/uuid_utils-0.11.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d37289ab72aa30b5550bfa64d91431c62c89e4969bdf989988aa97f918d5f803", size = 338529, upload-time = "2025-05-22T11:22:46.303Z" }, - { url = "https://files.pythonhosted.org/packages/3b/c2/f6a1c00a1b067a886fc57c24da46bb0bcb753c92afb898871c6df3ae606f/uuid_utils-0.11.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1012595220f945fe09641f1365a8a06915bf432cac1b31ebd262944934a9b787", size = 480378, upload-time = "2025-05-22T11:22:47.482Z" }, - { url = "https://files.pythonhosted.org/packages/60/ea/cefc0521e07a35e85416d145382ac4817957cdec037271d0c9e27cbc7d45/uuid_utils-0.11.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35cd3fc718a673e4516e87afb9325558969eca513aa734515b9031d1b651bbb1", size = 332220, upload-time = "2025-05-22T11:22:48.55Z" }, - { url = "https://files.pythonhosted.org/packages/03/91/5929f209bd4660a7e3b4d47d26189d3cf33e14297312a5f51f5451805fec/uuid_utils-0.11.0-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed325e0c40e0f59ae82b347f534df954b50cedf12bf60d025625538530e1965d", size = 359052, upload-time = "2025-05-22T11:22:49.617Z" }, - { url = "https://files.pythonhosted.org/packages/d8/0d/32034d5b13bc07dd95f23122cb743b4eeca8e6d88173ea3c7100c67b6269/uuid_utils-0.11.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5c8b7cf201990ee3140956e541967bd556a7365ec738cb504b04187ad89c757a", size = 515186, upload-time = "2025-05-22T11:22:50.808Z" }, - { url = "https://files.pythonhosted.org/packages/e7/43/ccf2474f723d6de5e214c22999ffb34219acf83d1e3fff6a4734172e10c0/uuid_utils-0.11.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:9966df55bed5d538ba2e9cc40115796480f437f9007727116ef99dc2f42bd5fa", size = 535318, upload-time = "2025-05-22T11:22:52.304Z" }, - { url = "https://files.pythonhosted.org/packages/fb/05/f668b4ad2b3542cd021c4b27d1ff4e425f854f299bcf7ee36f304399a58c/uuid_utils-0.11.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cb04b6c604968424b7e6398d54debbdd5b771b39fc1e648c6eabf3f1dc20582e", size = 502691, upload-time = "2025-05-22T11:22:53.483Z" }, - { url = "https://files.pythonhosted.org/packages/9e/0b/b906301638eef837c89b19206989dbe27794c591d794ecc06167d9a47c41/uuid_utils-0.11.0-cp39-abi3-win32.whl", hash = "sha256:18420eb3316bb514f09f2da15750ac135478c3a12a704e2c5fb59eab642bb255", size = 180147, upload-time = "2025-05-22T11:22:54.598Z" }, - { url = "https://files.pythonhosted.org/packages/56/99/ad24ee5ecfc5fbd4a4490bb59c0e72ce604d5eef08683d345546ff6a6f2d/uuid_utils-0.11.0-cp39-abi3-win_amd64.whl", hash = "sha256:37c4805af61a7cce899597d34e7c3dd5cb6a8b4b93a90fbca3826b071ba544df", size = 183574, upload-time = "2025-05-22T11:22:55.581Z" }, - { url = "https://files.pythonhosted.org/packages/0e/76/2301b1d34defc8c234596ffb6e6d456cd7ef061d108e10a14ceda5ec5d4b/uuid_utils-0.11.0-cp39-abi3-win_arm64.whl", hash = "sha256:4065cf17bbe97f6d8ccc7dc6a0bae7d28fd4797d7f32028a5abd979aeb7bf7c9", size = 181014, upload-time = "2025-05-22T11:22:56.575Z" }, -] - [[package]] name = "uuid6" -version = "2024.7.10" +version = "2025.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2c/56/2560a9f1ccab9e12b1b3478a3c870796cf4d8ee5652bb19b61751cced14a/uuid6-2024.7.10.tar.gz", hash = "sha256:2d29d7f63f593caaeea0e0d0dd0ad8129c9c663b29e19bdf882e864bedf18fb0", size = 8705, upload-time = "2024-07-10T16:39:37.592Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/49/06a089c184580f510e20226d9a081e4323d13db2fbc92d566697b5395c1e/uuid6-2025.0.0.tar.gz", hash = "sha256:bb78aa300e29db89b00410371d0c1f1824e59e29995a9daa3dedc8033d1d84ec", size = 13941, upload-time = "2025-06-11T20:02:05.324Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d3/3e/4ae6af487ce5781ed71d5fe10aca72e7cbc4d4f45afc31b120287082a8dd/uuid6-2024.7.10-py3-none-any.whl", hash = "sha256:93432c00ba403751f722829ad21759ff9db051dea140bf81493271e8e4dd18b7", size = 6376, upload-time = "2024-07-10T16:39:36.148Z" }, + { url = "https://files.pythonhosted.org/packages/0a/50/4da47101af45b6cfa291559577993b52ee4399b3cd54ba307574a11e4f3a/uuid6-2025.0.0-py3-none-any.whl", hash = "sha256:2c73405ff5333c7181443958c6865e0d1b9b816bb160549e8d80ba186263cb3a", size = 7001, upload-time = "2025-06-11T20:02:04.521Z" }, ] [[package]] @@ -6316,12 +6315,12 @@ wheels = [ [[package]] name = "wandb" -version = "0.19.11" +version = "0.20.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, - { name = "docker-pycreds" }, { name = "gitpython" }, + { name = "packaging" }, { name = "platformdirs" }, { name = "protobuf" }, { name = "psutil" }, @@ -6330,58 +6329,61 @@ dependencies = [ { name = "requests" }, { name = "sentry-sdk" }, { name = "setproctitle" }, - { name = "setuptools" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/39/98/0ff2925a21b998d4b84731429f4554ca3d9b5cad42c09c075e7306c3aca0/wandb-0.19.11.tar.gz", hash = "sha256:3f50a27dfadbb25946a513ffe856c0e8e538b5626ef207aa50b00c3b0356bff8", size = 39511477, upload-time = "2025-05-07T20:50:01.341Z" } +sdist = { url = "https://files.pythonhosted.org/packages/62/1f/92be0ca87fb49eb48c16dcf0845a3579a57c4734fec2b95862cf5a0494a0/wandb-0.20.1.tar.gz", hash = "sha256:dbd3fc60dfe7bf83c4de24b206b99b44949fef323f817a783883db72fc5f3bfe", size = 40320062, upload-time = "2025-06-05T00:00:24.483Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/2c/f8bab58c73fdde4442f1baffd9ea5d1bb3113906a97a27e8d9ab72db7a69/wandb-0.19.11-py3-none-any.whl", hash = "sha256:ff3bf050ba25ebae7aedc9a775ffab90c28068832edfe5458423f488c2558f82", size = 6481327, upload-time = "2025-05-07T20:49:33.461Z" }, - { url = "https://files.pythonhosted.org/packages/45/4a/34b364280f690f4c6d7660f528fba9f13bdecabc4c869d266a4632cf836e/wandb-0.19.11-py3-none-macosx_10_14_x86_64.whl", hash = "sha256:0823fd9aa6343f40c04e01959997ca8c6d6adf1bd81c8d45261fa4915f1c6b67", size = 20555751, upload-time = "2025-05-07T20:49:36.392Z" }, - { url = "https://files.pythonhosted.org/packages/d8/e6/a27868fdb83a60df37b9d15e52c3353dd88d74442f27ae48cf765c6b9554/wandb-0.19.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c758ef5439599d9023db5b3cf1698477055d82f9fae48af2779f63f1d289167c", size = 20377587, upload-time = "2025-05-07T20:49:39.126Z" }, - { url = "https://files.pythonhosted.org/packages/21/f7/d5cf5b58c2b3015364c7b2b6af6a440cbeda4103b67332e1e64b30f6252d/wandb-0.19.11-py3-none-macosx_11_0_x86_64.whl", hash = "sha256:de2dfd4911e7691735e271654c735e7b90cdee9d29a3796fbf06e9e92d48f3d7", size = 20985041, upload-time = "2025-05-07T20:49:41.571Z" }, - { url = "https://files.pythonhosted.org/packages/68/06/8b827f16a0b8f18002d2fffa7c5a7fd447946e0d0c68aeec0dd7eb18cdd3/wandb-0.19.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfff738850770d26b13f8f3fe400a6456f1e39e87f3f29d5aa241b249476df95", size = 20017696, upload-time = "2025-05-07T20:49:44.04Z" }, - { url = "https://files.pythonhosted.org/packages/f9/31/eeb2878b26566c04c3e9b8b20b3ec3c54a2be50535088d36a37c008e07a3/wandb-0.19.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8ff673007448df11cc69379ae0df28ead866800dc1ec7bc151b402db0bbcf40", size = 21425857, upload-time = "2025-05-07T20:49:46.347Z" }, - { url = "https://files.pythonhosted.org/packages/10/30/08988360678ae78334bb16625c28260fcaba49f500b89f8766807cb74d71/wandb-0.19.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:858bc5023fa1b3285d89d15f62be78afdb28301064daa49ea3f4ebde5dcedad2", size = 20023145, upload-time = "2025-05-07T20:49:48.965Z" }, - { url = "https://files.pythonhosted.org/packages/c8/e9/a639c42c8ca517c4d25e8970d64d0c5a9bd35b784faed5f47d9cca3dcd12/wandb-0.19.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:90e4b57649896acb16c3dd41b3093df1a169c2f1d94ff15d76af86b8a60dcdac", size = 21504842, upload-time = "2025-05-07T20:49:51.628Z" }, - { url = "https://files.pythonhosted.org/packages/44/74/dbe9277dd935b77dd16939cdf15357766fec0813a6e336cf5f1d07eb016e/wandb-0.19.11-py3-none-win32.whl", hash = "sha256:38dea43c7926d8800405a73b80b9adfe81eb315fc6f2ac6885c77eb966634421", size = 20767584, upload-time = "2025-05-07T20:49:56.629Z" }, - { url = "https://files.pythonhosted.org/packages/36/d5/215cac3edec5c5ac6e7231beb9d22466d5d4e4a132fa3a1d044f7d682c15/wandb-0.19.11-py3-none-win_amd64.whl", hash = "sha256:73402003c56ddc2198878492ab2bff55bb49bce5587eae5960e737d27c0c48f7", size = 20767588, upload-time = "2025-05-07T20:49:58.85Z" }, + { url = "https://files.pythonhosted.org/packages/c9/18/afcc37d0b93dd6f6d0f0c5683b9cfff9416ae1539931f58932a2938c0070/wandb-0.20.1-py3-none-any.whl", hash = "sha256:e6395cabf074247042be1cf0dc6ab0b06aa4c9538c2e1fdc5b507a690ce0cf17", size = 6458872, upload-time = "2025-06-04T23:59:55.441Z" }, + { url = "https://files.pythonhosted.org/packages/e6/b5/70f9e2a3d1380b729ae5853763d938edc50072df357f79bbd19b9aae8e3f/wandb-0.20.1-py3-none-macosx_10_14_x86_64.whl", hash = "sha256:2475a48c693adf677d40da9e1c8ceeaf86d745ffc3b7e3535731279d02f9e845", size = 22517483, upload-time = "2025-06-04T23:59:58.687Z" }, + { url = "https://files.pythonhosted.org/packages/cc/7e/4eb9aeb2fd974d410a8f2eb11b0219536503913a050d46a03206151705c8/wandb-0.20.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:99cce804c31ec1e0d1e691650a7d51773ed7329c41745d56384fa3655a0e9b2c", size = 22034511, upload-time = "2025-06-05T00:00:01.301Z" }, + { url = "https://files.pythonhosted.org/packages/34/38/1df22c2273e6f7ab0aae4fd032085d6d92ab112f5b261646e7dc5e675cfe/wandb-0.20.1-py3-none-macosx_11_0_x86_64.whl", hash = "sha256:ce3ee412677a1679e04b21e03a91e1e02eb90faf658d682bee86c33cf5f32e09", size = 22720771, upload-time = "2025-06-05T00:00:04.122Z" }, + { url = "https://files.pythonhosted.org/packages/38/96/78fc7a7ea7158d136c84f481423f8736c9346a2387287ec8a6d92019975c/wandb-0.20.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e58ca32c7147161158f09b0fb5f5896876f8569d0d10ae7b64d0510c868ce33", size = 21537453, upload-time = "2025-06-05T00:00:09.474Z" }, + { url = "https://files.pythonhosted.org/packages/88/c9/41b8bdb493e5eda32b502bc1cc49d539335a92cacaf0ef304d7dae0240aa/wandb-0.20.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:591506ecbdd396648cc323ba270f3ab4aed3158e1dbfa7636c09f9f7f0253e1c", size = 23161349, upload-time = "2025-06-05T00:00:11.903Z" }, + { url = "https://files.pythonhosted.org/packages/7d/f2/79e783cc50a47d373dfbda862eb5396de8139167e8c6443a16ef0166106f/wandb-0.20.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:382508532db09893f81cc926b1d333caa4c8a7db057878899fadf929bbdb3b56", size = 21550624, upload-time = "2025-06-05T00:00:14.28Z" }, + { url = "https://files.pythonhosted.org/packages/26/32/23890a726302e7be28bda9fff47ce9b491af64e339aba4d32b3b8d1a7aaf/wandb-0.20.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:29ea495e49393db860f17437fe37e48018da90436ce10949b471780f09293bd7", size = 23237996, upload-time = "2025-06-05T00:00:16.647Z" }, + { url = "https://files.pythonhosted.org/packages/af/94/296e520b086b2a4f10e99bcea3cd5856421b9c004824663501e3789a713b/wandb-0.20.1-py3-none-win32.whl", hash = "sha256:455ee0a652e59ab1e4b546fa1dc833dd3063aa7e64eb8abf95d22f0e9f08c574", size = 22518456, upload-time = "2025-06-05T00:00:19.006Z" }, + { url = "https://files.pythonhosted.org/packages/52/5f/c44ad7b2a062ca5f4da99ae475cea274c38f6ec37bdaca1b1c653ee87274/wandb-0.20.1-py3-none-win_amd64.whl", hash = "sha256:6d2431652f096b7e394c29a99135a6441c02ed3198b963f0b351a5b5e56aeca0", size = 22518459, upload-time = "2025-06-05T00:00:21.374Z" }, ] [[package]] name = "watchfiles" -version = "1.0.5" +version = "1.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/03/e2/8ed598c42057de7aa5d97c472254af4906ff0a59a66699d426fc9ef795d7/watchfiles-1.0.5.tar.gz", hash = "sha256:b7529b5dcc114679d43827d8c35a07c493ad6f083633d573d81c660abc5979e9", size = 94537, upload-time = "2025-04-08T10:36:26.722Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/9a/d451fcc97d029f5812e898fd30a53fd8c15c7bbd058fd75cfc6beb9bd761/watchfiles-1.1.0.tar.gz", hash = "sha256:693ed7ec72cbfcee399e92c895362b6e66d63dac6b91e2c11ae03d10d503e575", size = 94406, upload-time = "2025-06-15T19:06:59.42Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/f4/41b591f59021786ef517e1cdc3b510383551846703e03f204827854a96f8/watchfiles-1.0.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:237f9be419e977a0f8f6b2e7b0475ababe78ff1ab06822df95d914a945eac827", size = 405336, upload-time = "2025-04-08T10:34:59.359Z" }, - { url = "https://files.pythonhosted.org/packages/ae/06/93789c135be4d6d0e4f63e96eea56dc54050b243eacc28439a26482b5235/watchfiles-1.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0da39ff917af8b27a4bdc5a97ac577552a38aac0d260a859c1517ea3dc1a7c4", size = 395977, upload-time = "2025-04-08T10:35:00.522Z" }, - { url = "https://files.pythonhosted.org/packages/d2/db/1cd89bd83728ca37054512d4d35ab69b5f12b8aa2ac9be3b0276b3bf06cc/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cfcb3952350e95603f232a7a15f6c5f86c5375e46f0bd4ae70d43e3e063c13d", size = 455232, upload-time = "2025-04-08T10:35:01.698Z" }, - { url = "https://files.pythonhosted.org/packages/40/90/d8a4d44ffe960517e487c9c04f77b06b8abf05eb680bed71c82b5f2cad62/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68b2dddba7a4e6151384e252a5632efcaa9bc5d1c4b567f3cb621306b2ca9f63", size = 459151, upload-time = "2025-04-08T10:35:03.358Z" }, - { url = "https://files.pythonhosted.org/packages/6c/da/267a1546f26465dead1719caaba3ce660657f83c9d9c052ba98fb8856e13/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95cf944fcfc394c5f9de794ce581914900f82ff1f855326f25ebcf24d5397418", size = 489054, upload-time = "2025-04-08T10:35:04.561Z" }, - { url = "https://files.pythonhosted.org/packages/b1/31/33850dfd5c6efb6f27d2465cc4c6b27c5a6f5ed53c6fa63b7263cf5f60f6/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf6cd9f83d7c023b1aba15d13f705ca7b7d38675c121f3cc4a6e25bd0857ee9", size = 523955, upload-time = "2025-04-08T10:35:05.786Z" }, - { url = "https://files.pythonhosted.org/packages/09/84/b7d7b67856efb183a421f1416b44ca975cb2ea6c4544827955dfb01f7dc2/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:852de68acd6212cd6d33edf21e6f9e56e5d98c6add46f48244bd479d97c967c6", size = 502234, upload-time = "2025-04-08T10:35:07.187Z" }, - { url = "https://files.pythonhosted.org/packages/71/87/6dc5ec6882a2254cfdd8b0718b684504e737273903b65d7338efaba08b52/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5730f3aa35e646103b53389d5bc77edfbf578ab6dab2e005142b5b80a35ef25", size = 454750, upload-time = "2025-04-08T10:35:08.859Z" }, - { url = "https://files.pythonhosted.org/packages/3d/6c/3786c50213451a0ad15170d091570d4a6554976cf0df19878002fc96075a/watchfiles-1.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:18b3bd29954bc4abeeb4e9d9cf0b30227f0f206c86657674f544cb032296acd5", size = 631591, upload-time = "2025-04-08T10:35:10.64Z" }, - { url = "https://files.pythonhosted.org/packages/1b/b3/1427425ade4e359a0deacce01a47a26024b2ccdb53098f9d64d497f6684c/watchfiles-1.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ba5552a1b07c8edbf197055bc9d518b8f0d98a1c6a73a293bc0726dce068ed01", size = 625370, upload-time = "2025-04-08T10:35:12.412Z" }, - { url = "https://files.pythonhosted.org/packages/15/ba/f60e053b0b5b8145d682672024aa91370a29c5c921a88977eb565de34086/watchfiles-1.0.5-cp311-cp311-win32.whl", hash = "sha256:2f1fefb2e90e89959447bc0420fddd1e76f625784340d64a2f7d5983ef9ad246", size = 277791, upload-time = "2025-04-08T10:35:13.719Z" }, - { url = "https://files.pythonhosted.org/packages/50/ed/7603c4e164225c12c0d4e8700b64bb00e01a6c4eeea372292a3856be33a4/watchfiles-1.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:b6e76ceb1dd18c8e29c73f47d41866972e891fc4cc7ba014f487def72c1cf096", size = 291622, upload-time = "2025-04-08T10:35:15.071Z" }, - { url = "https://files.pythonhosted.org/packages/a2/c2/99bb7c96b4450e36877fde33690ded286ff555b5a5c1d925855d556968a1/watchfiles-1.0.5-cp311-cp311-win_arm64.whl", hash = "sha256:266710eb6fddc1f5e51843c70e3bebfb0f5e77cf4f27129278c70554104d19ed", size = 283699, upload-time = "2025-04-08T10:35:16.732Z" }, - { url = "https://files.pythonhosted.org/packages/2a/8c/4f0b9bdb75a1bfbd9c78fad7d8854369283f74fe7cf03eb16be77054536d/watchfiles-1.0.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5eb568c2aa6018e26da9e6c86f3ec3fd958cee7f0311b35c2630fa4217d17f2", size = 401511, upload-time = "2025-04-08T10:35:17.956Z" }, - { url = "https://files.pythonhosted.org/packages/dc/4e/7e15825def77f8bd359b6d3f379f0c9dac4eb09dd4ddd58fd7d14127179c/watchfiles-1.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0a04059f4923ce4e856b4b4e5e783a70f49d9663d22a4c3b3298165996d1377f", size = 392715, upload-time = "2025-04-08T10:35:19.202Z" }, - { url = "https://files.pythonhosted.org/packages/58/65/b72fb817518728e08de5840d5d38571466c1b4a3f724d190cec909ee6f3f/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e380c89983ce6e6fe2dd1e1921b9952fb4e6da882931abd1824c092ed495dec", size = 454138, upload-time = "2025-04-08T10:35:20.586Z" }, - { url = "https://files.pythonhosted.org/packages/3e/a4/86833fd2ea2e50ae28989f5950b5c3f91022d67092bfec08f8300d8b347b/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fe43139b2c0fdc4a14d4f8d5b5d967f7a2777fd3d38ecf5b1ec669b0d7e43c21", size = 458592, upload-time = "2025-04-08T10:35:21.87Z" }, - { url = "https://files.pythonhosted.org/packages/38/7e/42cb8df8be9a37e50dd3a818816501cf7a20d635d76d6bd65aae3dbbff68/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee0822ce1b8a14fe5a066f93edd20aada932acfe348bede8aa2149f1a4489512", size = 487532, upload-time = "2025-04-08T10:35:23.143Z" }, - { url = "https://files.pythonhosted.org/packages/fc/fd/13d26721c85d7f3df6169d8b495fcac8ab0dc8f0945ebea8845de4681dab/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0dbcb1c2d8f2ab6e0a81c6699b236932bd264d4cef1ac475858d16c403de74d", size = 522865, upload-time = "2025-04-08T10:35:24.702Z" }, - { url = "https://files.pythonhosted.org/packages/a1/0d/7f9ae243c04e96c5455d111e21b09087d0eeaf9a1369e13a01c7d3d82478/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2014a2b18ad3ca53b1f6c23f8cd94a18ce930c1837bd891262c182640eb40a6", size = 499887, upload-time = "2025-04-08T10:35:25.969Z" }, - { url = "https://files.pythonhosted.org/packages/8e/0f/a257766998e26aca4b3acf2ae97dff04b57071e991a510857d3799247c67/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f6ae86d5cb647bf58f9f655fcf577f713915a5d69057a0371bc257e2553234", size = 454498, upload-time = "2025-04-08T10:35:27.353Z" }, - { url = "https://files.pythonhosted.org/packages/81/79/8bf142575a03e0af9c3d5f8bcae911ee6683ae93a625d349d4ecf4c8f7df/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1a7bac2bde1d661fb31f4d4e8e539e178774b76db3c2c17c4bb3e960a5de07a2", size = 630663, upload-time = "2025-04-08T10:35:28.685Z" }, - { url = "https://files.pythonhosted.org/packages/f1/80/abe2e79f610e45c63a70d271caea90c49bbf93eb00fa947fa9b803a1d51f/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ab626da2fc1ac277bbf752446470b367f84b50295264d2d313e28dc4405d663", size = 625410, upload-time = "2025-04-08T10:35:30.42Z" }, - { url = "https://files.pythonhosted.org/packages/91/6f/bc7fbecb84a41a9069c2c6eb6319f7f7df113adf113e358c57fc1aff7ff5/watchfiles-1.0.5-cp312-cp312-win32.whl", hash = "sha256:9f4571a783914feda92018ef3901dab8caf5b029325b5fe4558c074582815249", size = 277965, upload-time = "2025-04-08T10:35:32.023Z" }, - { url = "https://files.pythonhosted.org/packages/99/a5/bf1c297ea6649ec59e935ab311f63d8af5faa8f0b86993e3282b984263e3/watchfiles-1.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:360a398c3a19672cf93527f7e8d8b60d8275119c5d900f2e184d32483117a705", size = 291693, upload-time = "2025-04-08T10:35:33.225Z" }, - { url = "https://files.pythonhosted.org/packages/7f/7b/fd01087cc21db5c47e5beae507b87965db341cce8a86f9eb12bf5219d4e0/watchfiles-1.0.5-cp312-cp312-win_arm64.whl", hash = "sha256:1a2902ede862969077b97523987c38db28abbe09fb19866e711485d9fbf0d417", size = 283287, upload-time = "2025-04-08T10:35:34.568Z" }, + { url = "https://files.pythonhosted.org/packages/8b/78/7401154b78ab484ccaaeef970dc2af0cb88b5ba8a1b415383da444cdd8d3/watchfiles-1.1.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:c9649dfc57cc1f9835551deb17689e8d44666315f2e82d337b9f07bd76ae3aa2", size = 405751, upload-time = "2025-06-15T19:05:07.679Z" }, + { url = "https://files.pythonhosted.org/packages/76/63/e6c3dbc1f78d001589b75e56a288c47723de28c580ad715eb116639152b5/watchfiles-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:406520216186b99374cdb58bc48e34bb74535adec160c8459894884c983a149c", size = 397313, upload-time = "2025-06-15T19:05:08.764Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a2/8afa359ff52e99af1632f90cbf359da46184207e893a5f179301b0c8d6df/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb45350fd1dc75cd68d3d72c47f5b513cb0578da716df5fba02fff31c69d5f2d", size = 450792, upload-time = "2025-06-15T19:05:09.869Z" }, + { url = "https://files.pythonhosted.org/packages/1d/bf/7446b401667f5c64972a57a0233be1104157fc3abf72c4ef2666c1bd09b2/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:11ee4444250fcbeb47459a877e5e80ed994ce8e8d20283857fc128be1715dac7", size = 458196, upload-time = "2025-06-15T19:05:11.91Z" }, + { url = "https://files.pythonhosted.org/packages/58/2f/501ddbdfa3fa874ea5597c77eeea3d413579c29af26c1091b08d0c792280/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bda8136e6a80bdea23e5e74e09df0362744d24ffb8cd59c4a95a6ce3d142f79c", size = 484788, upload-time = "2025-06-15T19:05:13.373Z" }, + { url = "https://files.pythonhosted.org/packages/61/1e/9c18eb2eb5c953c96bc0e5f626f0e53cfef4bd19bd50d71d1a049c63a575/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b915daeb2d8c1f5cee4b970f2e2c988ce6514aace3c9296e58dd64dc9aa5d575", size = 597879, upload-time = "2025-06-15T19:05:14.725Z" }, + { url = "https://files.pythonhosted.org/packages/8b/6c/1467402e5185d89388b4486745af1e0325007af0017c3384cc786fff0542/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed8fc66786de8d0376f9f913c09e963c66e90ced9aa11997f93bdb30f7c872a8", size = 477447, upload-time = "2025-06-15T19:05:15.775Z" }, + { url = "https://files.pythonhosted.org/packages/2b/a1/ec0a606bde4853d6c4a578f9391eeb3684a9aea736a8eb217e3e00aa89a1/watchfiles-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe4371595edf78c41ef8ac8df20df3943e13defd0efcb732b2e393b5a8a7a71f", size = 453145, upload-time = "2025-06-15T19:05:17.17Z" }, + { url = "https://files.pythonhosted.org/packages/90/b9/ef6f0c247a6a35d689fc970dc7f6734f9257451aefb30def5d100d6246a5/watchfiles-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b7c5f6fe273291f4d414d55b2c80d33c457b8a42677ad14b4b47ff025d0893e4", size = 626539, upload-time = "2025-06-15T19:05:18.557Z" }, + { url = "https://files.pythonhosted.org/packages/34/44/6ffda5537085106ff5aaa762b0d130ac6c75a08015dd1621376f708c94de/watchfiles-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7738027989881e70e3723c75921f1efa45225084228788fc59ea8c6d732eb30d", size = 624472, upload-time = "2025-06-15T19:05:19.588Z" }, + { url = "https://files.pythonhosted.org/packages/c3/e3/71170985c48028fa3f0a50946916a14055e741db11c2e7bc2f3b61f4d0e3/watchfiles-1.1.0-cp311-cp311-win32.whl", hash = "sha256:622d6b2c06be19f6e89b1d951485a232e3b59618def88dbeda575ed8f0d8dbf2", size = 279348, upload-time = "2025-06-15T19:05:20.856Z" }, + { url = "https://files.pythonhosted.org/packages/89/1b/3e39c68b68a7a171070f81fc2561d23ce8d6859659406842a0e4bebf3bba/watchfiles-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:48aa25e5992b61debc908a61ab4d3f216b64f44fdaa71eb082d8b2de846b7d12", size = 292607, upload-time = "2025-06-15T19:05:21.937Z" }, + { url = "https://files.pythonhosted.org/packages/61/9f/2973b7539f2bdb6ea86d2c87f70f615a71a1fc2dba2911795cea25968aea/watchfiles-1.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:00645eb79a3faa70d9cb15c8d4187bb72970b2470e938670240c7998dad9f13a", size = 285056, upload-time = "2025-06-15T19:05:23.12Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/858957045a38a4079203a33aaa7d23ea9269ca7761c8a074af3524fbb240/watchfiles-1.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9dc001c3e10de4725c749d4c2f2bdc6ae24de5a88a339c4bce32300a31ede179", size = 402339, upload-time = "2025-06-15T19:05:24.516Z" }, + { url = "https://files.pythonhosted.org/packages/80/28/98b222cca751ba68e88521fabd79a4fab64005fc5976ea49b53fa205d1fa/watchfiles-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9ba68ec283153dead62cbe81872d28e053745f12335d037de9cbd14bd1877f5", size = 394409, upload-time = "2025-06-15T19:05:25.469Z" }, + { url = "https://files.pythonhosted.org/packages/86/50/dee79968566c03190677c26f7f47960aff738d32087087bdf63a5473e7df/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130fc497b8ee68dce163e4254d9b0356411d1490e868bd8790028bc46c5cc297", size = 450939, upload-time = "2025-06-15T19:05:26.494Z" }, + { url = "https://files.pythonhosted.org/packages/40/45/a7b56fb129700f3cfe2594a01aa38d033b92a33dddce86c8dfdfc1247b72/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50a51a90610d0845a5931a780d8e51d7bd7f309ebc25132ba975aca016b576a0", size = 457270, upload-time = "2025-06-15T19:05:27.466Z" }, + { url = "https://files.pythonhosted.org/packages/b5/c8/fa5ef9476b1d02dc6b5e258f515fcaaecf559037edf8b6feffcbc097c4b8/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc44678a72ac0910bac46fa6a0de6af9ba1355669b3dfaf1ce5f05ca7a74364e", size = 483370, upload-time = "2025-06-15T19:05:28.548Z" }, + { url = "https://files.pythonhosted.org/packages/98/68/42cfcdd6533ec94f0a7aab83f759ec11280f70b11bfba0b0f885e298f9bd/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a543492513a93b001975ae283a51f4b67973662a375a403ae82f420d2c7205ee", size = 598654, upload-time = "2025-06-15T19:05:29.997Z" }, + { url = "https://files.pythonhosted.org/packages/d3/74/b2a1544224118cc28df7e59008a929e711f9c68ce7d554e171b2dc531352/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ac164e20d17cc285f2b94dc31c384bc3aa3dd5e7490473b3db043dd70fbccfd", size = 478667, upload-time = "2025-06-15T19:05:31.172Z" }, + { url = "https://files.pythonhosted.org/packages/8c/77/e3362fe308358dc9f8588102481e599c83e1b91c2ae843780a7ded939a35/watchfiles-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7590d5a455321e53857892ab8879dce62d1f4b04748769f5adf2e707afb9d4f", size = 452213, upload-time = "2025-06-15T19:05:32.299Z" }, + { url = "https://files.pythonhosted.org/packages/6e/17/c8f1a36540c9a1558d4faf08e909399e8133599fa359bf52ec8fcee5be6f/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:37d3d3f7defb13f62ece99e9be912afe9dd8a0077b7c45ee5a57c74811d581a4", size = 626718, upload-time = "2025-06-15T19:05:33.415Z" }, + { url = "https://files.pythonhosted.org/packages/26/45/fb599be38b4bd38032643783d7496a26a6f9ae05dea1a42e58229a20ac13/watchfiles-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7080c4bb3efd70a07b1cc2df99a7aa51d98685be56be6038c3169199d0a1c69f", size = 623098, upload-time = "2025-06-15T19:05:34.534Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e7/fdf40e038475498e160cd167333c946e45d8563ae4dd65caf757e9ffe6b4/watchfiles-1.1.0-cp312-cp312-win32.whl", hash = "sha256:cbcf8630ef4afb05dc30107bfa17f16c0896bb30ee48fc24bf64c1f970f3b1fd", size = 279209, upload-time = "2025-06-15T19:05:35.577Z" }, + { url = "https://files.pythonhosted.org/packages/3f/d3/3ae9d5124ec75143bdf088d436cba39812122edc47709cd2caafeac3266f/watchfiles-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:cbd949bdd87567b0ad183d7676feb98136cde5bb9025403794a4c0db28ed3a47", size = 292786, upload-time = "2025-06-15T19:05:36.559Z" }, + { url = "https://files.pythonhosted.org/packages/26/2f/7dd4fc8b5f2b34b545e19629b4a018bfb1de23b3a496766a2c1165ca890d/watchfiles-1.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:0a7d40b77f07be87c6faa93d0951a0fcd8cbca1ddff60a1b65d741bac6f3a9f6", size = 284343, upload-time = "2025-06-15T19:05:37.5Z" }, + { url = "https://files.pythonhosted.org/packages/8c/6b/686dcf5d3525ad17b384fd94708e95193529b460a1b7bf40851f1328ec6e/watchfiles-1.1.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0ece16b563b17ab26eaa2d52230c9a7ae46cf01759621f4fbbca280e438267b3", size = 406910, upload-time = "2025-06-15T19:06:49.335Z" }, + { url = "https://files.pythonhosted.org/packages/f3/d3/71c2dcf81dc1edcf8af9f4d8d63b1316fb0a2dd90cbfd427e8d9dd584a90/watchfiles-1.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:51b81e55d40c4b4aa8658427a3ee7ea847c591ae9e8b81ef94a90b668999353c", size = 398816, upload-time = "2025-06-15T19:06:50.433Z" }, + { url = "https://files.pythonhosted.org/packages/b8/fa/12269467b2fc006f8fce4cd6c3acfa77491dd0777d2a747415f28ccc8c60/watchfiles-1.1.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2bcdc54ea267fe72bfc7d83c041e4eb58d7d8dc6f578dfddb52f037ce62f432", size = 451584, upload-time = "2025-06-15T19:06:51.834Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d3/254cea30f918f489db09d6a8435a7de7047f8cb68584477a515f160541d6/watchfiles-1.1.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:923fec6e5461c42bd7e3fd5ec37492c6f3468be0499bc0707b4bbbc16ac21792", size = 454009, upload-time = "2025-06-15T19:06:52.896Z" }, ] [[package]] @@ -6395,7 +6397,7 @@ wheels = [ [[package]] name = "weave" -version = "0.51.50" +version = "0.51.54" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -6409,12 +6411,11 @@ dependencies = [ { name = "pydantic" }, { name = "rich" }, { name = "tenacity" }, - { name = "uuid-utils" }, { name = "wandb" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fe/d1/47ab7923eb389ec7b1ca0138d9929dc50bfd0dfac324f42167f99fa02798/weave-0.51.50.tar.gz", hash = "sha256:773434765a3230bf8f4dfe9e04f9c7dfd90b03f18bb5e069186ce67d1f7c4dd8", size = 410739, upload-time = "2025-06-02T21:20:45.208Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/2b/bdac08ae2fa7f660e3fb02e9f4acec5a5683509decd8fbd1ad5641160d3a/weave-0.51.54.tar.gz", hash = "sha256:41aaaa770c0ac2259325dd6035e1bf96f47fb92dbd4eec54d3ef4847587cc061", size = 425873, upload-time = "2025-06-16T21:57:47.582Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/4a/72ed6f8435759f44090c8ae81d3a50716f0c3e527e733a78e77b9a834372/weave-0.51.50-py3-none-any.whl", hash = "sha256:23fb74ec95f57fe30f31019f32f6626f39993aa228024eb9cf8fe83e58b698de", size = 524057, upload-time = "2025-06-02T21:20:43.205Z" }, + { url = "https://files.pythonhosted.org/packages/48/4d/7cee23e5bf5faab149aeb7cca367a434c4aec1fa0cb1f5a1d20149a2bf6f/weave-0.51.54-py3-none-any.whl", hash = "sha256:7de2c0da8061bc007de2f74fb3dd2496d24337dff3723f057be49fcf53e0a3a2", size = 542168, upload-time = "2025-06-16T21:57:44.929Z" }, ] [[package]] @@ -6548,11 +6549,11 @@ wheels = [ [[package]] name = "xlrd" -version = "2.0.1" +version = "2.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a6/b3/19a2540d21dea5f908304375bd43f5ed7a4c28a370dc9122c565423e6b44/xlrd-2.0.1.tar.gz", hash = "sha256:f72f148f54442c6b056bf931dbc34f986fd0c3b0b6b5a58d013c9aef274d0c88", size = 100259, upload-time = "2020-12-11T10:14:22.201Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/5a/377161c2d3538d1990d7af382c79f3b2372e880b65de21b01b1a2b78691e/xlrd-2.0.2.tar.gz", hash = "sha256:08b5e25de58f21ce71dc7db3b3b8106c1fa776f3024c54e45b45b374e89234c9", size = 100167, upload-time = "2025-06-14T08:46:39.039Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a6/0c/c2a72d51fe56e08a08acc85d13013558a2d793028ae7385448a6ccdfae64/xlrd-2.0.1-py2.py3-none-any.whl", hash = "sha256:6a33ee89877bd9abc1158129f6e94be74e2679636b8a205b43b85206c3f0bbdd", size = 96531, upload-time = "2020-12-11T10:14:20.877Z" }, + { url = "https://files.pythonhosted.org/packages/1a/62/c8d562e7766786ba6587d09c5a8ba9f718ed3fa8af7f4553e8f91c36f302/xlrd-2.0.2-py2.py3-none-any.whl", hash = "sha256:ea762c3d29f4cca48d82df517b6d89fbce4db3107f9d78713e48cd321d5c9aa9", size = 96555, upload-time = "2025-06-14T08:46:37.766Z" }, ] [[package]] @@ -6621,11 +6622,11 @@ wheels = [ [[package]] name = "zipp" -version = "3.22.0" +version = "3.23.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/12/b6/7b3d16792fdf94f146bed92be90b4eb4563569eca91513c8609aebf0c167/zipp-3.22.0.tar.gz", hash = "sha256:dd2f28c3ce4bc67507bfd3781d21b7bb2be31103b51a4553ad7d90b84e57ace5", size = 25257, upload-time = "2025-05-26T14:46:32.217Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ad/da/f64669af4cae46f17b90798a827519ce3737d31dbafad65d391e49643dc4/zipp-3.22.0-py3-none-any.whl", hash = "sha256:fe208f65f2aca48b81f9e6fd8cf7b8b32c26375266b009b413d45306b6148343", size = 9796, upload-time = "2025-05-26T14:46:30.775Z" }, + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, ] [[package]] diff --git a/docker/.env.example b/docker/.env.example index 195446b7ba..275da8e2e4 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -399,7 +399,7 @@ SUPABASE_URL=your-server-url # ------------------------------ # The type of vector store to use. -# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`. +# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`. VECTOR_STORE=weaviate # The Weaviate endpoint URL. Only available when VECTOR_STORE is `weaviate`. @@ -490,6 +490,13 @@ TIDB_VECTOR_USER= TIDB_VECTOR_PASSWORD= TIDB_VECTOR_DATABASE=dify +# Matrixone vector configurations. +MATRIXONE_HOST=matrixone +MATRIXONE_PORT=6001 +MATRIXONE_USER=dump +MATRIXONE_PASSWORD=111 +MATRIXONE_DATABASE=dify + # Tidb on qdrant configuration, only available when VECTOR_STORE is `tidb_on_qdrant` TIDB_ON_QDRANT_URL=http://127.0.0.1 TIDB_ON_QDRANT_API_KEY=dify @@ -719,10 +726,11 @@ NOTION_INTERNAL_SECRET= # Mail related configuration # ------------------------------ -# Mail type, support: resend, smtp +# Mail type, support: resend, smtp, sendgrid MAIL_TYPE=resend # Default send from email address, if not specified +# If using SendGrid, use the 'from' field for authentication if necessary. MAIL_DEFAULT_SEND_FROM= # API-Key for the Resend email provider, used when MAIL_TYPE is `resend`. @@ -738,6 +746,9 @@ SMTP_PASSWORD= SMTP_USE_TLS=true SMTP_OPPORTUNISTIC_TLS=false +# Sendgid configuration +SENDGRID_API_KEY= + # ------------------------------ # Others Configuration # ------------------------------ @@ -787,6 +798,9 @@ HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 HTTP_REQUEST_NODE_SSL_VERIFY=True +# Respect X-* headers to redirect clients +RESPECT_XFORWARD_HEADERS_ENABLED=false + # SSRF Proxy server HTTP URL SSRF_PROXY_HTTP_URL=http://ssrf_proxy:3128 # SSRF Proxy server HTTPS URL @@ -815,7 +829,8 @@ TEXT_GENERATION_TIMEOUT_MS=60000 # Environment Variables for db Service # ------------------------------ -PGUSER=${DB_USERNAME} +# The name of the default postgres user. +POSTGRES_USER=${DB_USERNAME} # The password for the default postgres user. POSTGRES_PASSWORD=${DB_PASSWORD} # The name of the default postgres database. diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 158ede88cf..a6a4ed959a 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.4.3 + image: langgenius/dify-api:1.5.0 restart: always environment: # Use the shared environment variables. @@ -31,7 +31,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.4.3 + image: langgenius/dify-api:1.5.0 restart: always environment: # Use the shared environment variables. @@ -57,7 +57,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.4.3 + image: langgenius/dify-web:1.5.0 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -84,7 +84,7 @@ services: image: postgres:15-alpine restart: always environment: - PGUSER: ${PGUSER:-postgres} + POSTGRES_USER: ${POSTGRES_USER:-postgres} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-difyai123456} POSTGRES_DB: ${POSTGRES_DB:-dify} PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata} @@ -617,6 +617,18 @@ services: ports: - ${MYSCALE_PORT:-8123}:${MYSCALE_PORT:-8123} + # Matrixone vector store. + matrixone: + hostname: matrixone + image: matrixorigin/matrixone:2.1.1 + profiles: + - matrixone + restart: always + volumes: + - ./volumes/matrixone/data:/mo-data + ports: + - ${MATRIXONE_PORT:-6001}:${MATRIXONE_PORT:-6001} + # https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html # https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html#docker-prod-prerequisites elasticsearch: diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 99aa87bcc0..0019835357 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -195,6 +195,11 @@ x-shared-env: &shared-api-worker-env TIDB_VECTOR_USER: ${TIDB_VECTOR_USER:-} TIDB_VECTOR_PASSWORD: ${TIDB_VECTOR_PASSWORD:-} TIDB_VECTOR_DATABASE: ${TIDB_VECTOR_DATABASE:-dify} + MATRIXONE_HOST: ${MATRIXONE_HOST:-matrixone} + MATRIXONE_PORT: ${MATRIXONE_PORT:-6001} + MATRIXONE_USER: ${MATRIXONE_USER:-dump} + MATRIXONE_PASSWORD: ${MATRIXONE_PASSWORD:-111} + MATRIXONE_DATABASE: ${MATRIXONE_DATABASE:-dify} TIDB_ON_QDRANT_URL: ${TIDB_ON_QDRANT_URL:-http://127.0.0.1} TIDB_ON_QDRANT_API_KEY: ${TIDB_ON_QDRANT_API_KEY:-dify} TIDB_ON_QDRANT_CLIENT_TIMEOUT: ${TIDB_ON_QDRANT_CLIENT_TIMEOUT:-20} @@ -322,6 +327,7 @@ x-shared-env: &shared-api-worker-env SMTP_PASSWORD: ${SMTP_PASSWORD:-} SMTP_USE_TLS: ${SMTP_USE_TLS:-true} SMTP_OPPORTUNISTIC_TLS: ${SMTP_OPPORTUNISTIC_TLS:-false} + SENDGRID_API_KEY: ${SENDGRID_API_KEY:-} INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-4000} INVITE_EXPIRY_HOURS: ${INVITE_EXPIRY_HOURS:-72} RESET_PASSWORD_TOKEN_EXPIRY_MINUTES: ${RESET_PASSWORD_TOKEN_EXPIRY_MINUTES:-5} @@ -349,6 +355,7 @@ x-shared-env: &shared-api-worker-env HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760} HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576} HTTP_REQUEST_NODE_SSL_VERIFY: ${HTTP_REQUEST_NODE_SSL_VERIFY:-True} + RESPECT_XFORWARD_HEADERS_ENABLED: ${RESPECT_XFORWARD_HEADERS_ENABLED:-false} SSRF_PROXY_HTTP_URL: ${SSRF_PROXY_HTTP_URL:-http://ssrf_proxy:3128} SSRF_PROXY_HTTPS_URL: ${SSRF_PROXY_HTTPS_URL:-http://ssrf_proxy:3128} LOOP_NODE_MAX_COUNT: ${LOOP_NODE_MAX_COUNT:-100} @@ -356,7 +363,7 @@ x-shared-env: &shared-api-worker-env MAX_PARALLEL_LIMIT: ${MAX_PARALLEL_LIMIT:-10} MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-99} TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000} - PGUSER: ${PGUSER:-${DB_USERNAME}} + POSTGRES_USER: ${POSTGRES_USER:-${DB_USERNAME}} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-${DB_PASSWORD}} POSTGRES_DB: ${POSTGRES_DB:-${DB_DATABASE}} PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata} @@ -509,7 +516,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.4.3 + image: langgenius/dify-api:1.5.0 restart: always environment: # Use the shared environment variables. @@ -538,7 +545,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.4.3 + image: langgenius/dify-api:1.5.0 restart: always environment: # Use the shared environment variables. @@ -564,7 +571,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.4.3 + image: langgenius/dify-web:1.5.0 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -591,7 +598,7 @@ services: image: postgres:15-alpine restart: always environment: - PGUSER: ${PGUSER:-postgres} + POSTGRES_USER: ${POSTGRES_USER:-postgres} POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-difyai123456} POSTGRES_DB: ${POSTGRES_DB:-dify} PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata} @@ -1124,6 +1131,18 @@ services: ports: - ${MYSCALE_PORT:-8123}:${MYSCALE_PORT:-8123} + # Matrixone vector store. + matrixone: + hostname: matrixone + image: matrixorigin/matrixone:2.1.1 + profiles: + - matrixone + restart: always + volumes: + - ./volumes/matrixone/data:/mo-data + ports: + - ${MATRIXONE_PORT:-6001}:${MATRIXONE_PORT:-6001} + # https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html # https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html#docker-prod-prerequisites elasticsearch: diff --git a/docker/middleware.env.example b/docker/middleware.env.example index f261d88d48..2eba62f594 100644 --- a/docker/middleware.env.example +++ b/docker/middleware.env.example @@ -1,7 +1,7 @@ # ------------------------------ # Environment Variables for db Service # ------------------------------ -PGUSER=postgres +POSTGRES_USER=postgres # The password for the default postgres user. POSTGRES_PASSWORD=difyai123456 # The name of the default postgres database. diff --git a/images/GitHub_README_if.png b/images/GitHub_README_if.png index 2a4e67264e..10c9d87b08 100644 Binary files a/images/GitHub_README_if.png and b/images/GitHub_README_if.png differ diff --git a/tests/unit_tests/events/test_provider_update_deadlock_prevention.py b/tests/unit_tests/events/test_provider_update_deadlock_prevention.py new file mode 100644 index 0000000000..47c175acd7 --- /dev/null +++ b/tests/unit_tests/events/test_provider_update_deadlock_prevention.py @@ -0,0 +1,248 @@ +import threading +from unittest.mock import Mock, patch + +from core.app.entities.app_invoke_entities import ChatAppGenerateEntity +from core.entities.provider_entities import QuotaUnit +from events.event_handlers.update_provider_when_message_created import ( + handle, + get_update_stats, +) +from models.provider import ProviderType +from sqlalchemy.exc import OperationalError + + +class TestProviderUpdateDeadlockPrevention: + """Test suite for deadlock prevention in Provider updates.""" + + def setup_method(self): + """Setup test fixtures.""" + self.mock_message = Mock() + self.mock_message.answer_tokens = 100 + + self.mock_app_config = Mock() + self.mock_app_config.tenant_id = "test-tenant-123" + + self.mock_model_conf = Mock() + self.mock_model_conf.provider = "openai" + + self.mock_system_config = Mock() + self.mock_system_config.current_quota_type = QuotaUnit.TOKENS + + self.mock_provider_config = Mock() + self.mock_provider_config.using_provider_type = ProviderType.SYSTEM + self.mock_provider_config.system_configuration = self.mock_system_config + + self.mock_provider_bundle = Mock() + self.mock_provider_bundle.configuration = self.mock_provider_config + + self.mock_model_conf.provider_model_bundle = self.mock_provider_bundle + + self.mock_generate_entity = Mock(spec=ChatAppGenerateEntity) + self.mock_generate_entity.app_config = self.mock_app_config + self.mock_generate_entity.model_conf = self.mock_model_conf + + @patch("events.event_handlers.update_provider_when_message_created.db") + def test_consolidated_handler_basic_functionality(self, mock_db): + """Test that the consolidated handler performs both updates correctly.""" + # Setup mock query chain + mock_query = Mock() + mock_db.session.query.return_value = mock_query + mock_query.filter.return_value = mock_query + mock_query.order_by.return_value = mock_query + mock_query.update.return_value = 1 # 1 row affected + + # Call the handler + handle(self.mock_message, application_generate_entity=self.mock_generate_entity) + + # Verify db.session.query was called + assert mock_db.session.query.called + + # Verify commit was called + mock_db.session.commit.assert_called_once() + + # Verify no rollback was called + assert not mock_db.session.rollback.called + + @patch("events.event_handlers.update_provider_when_message_created.db") + def test_deadlock_retry_mechanism(self, mock_db): + """Test that deadlock errors trigger retry logic.""" + # Setup mock to raise deadlock error on first attempt, succeed on second + mock_query = Mock() + mock_db.session.query.return_value = mock_query + mock_query.filter.return_value = mock_query + mock_query.order_by.return_value = mock_query + mock_query.update.return_value = 1 + + # First call raises deadlock, second succeeds + mock_db.session.commit.side_effect = [ + OperationalError("deadlock detected", None, None), + None, # Success on retry + ] + + # Call the handler + handle(self.mock_message, application_generate_entity=self.mock_generate_entity) + + # Verify commit was called twice (original + retry) + assert mock_db.session.commit.call_count == 2 + + # Verify rollback was called once (after first failure) + mock_db.session.rollback.assert_called_once() + + @patch("events.event_handlers.update_provider_when_message_created.db") + @patch("events.event_handlers.update_provider_when_message_created.time.sleep") + def test_exponential_backoff_timing(self, mock_sleep, mock_db): + """Test that retry delays follow exponential backoff pattern.""" + # Setup mock to fail twice, succeed on third attempt + mock_query = Mock() + mock_db.session.query.return_value = mock_query + mock_query.filter.return_value = mock_query + mock_query.order_by.return_value = mock_query + mock_query.update.return_value = 1 + + mock_db.session.commit.side_effect = [ + OperationalError("deadlock detected", None, None), + OperationalError("deadlock detected", None, None), + None, # Success on third attempt + ] + + # Call the handler + handle(self.mock_message, application_generate_entity=self.mock_generate_entity) + + # Verify sleep was called twice with increasing delays + assert mock_sleep.call_count == 2 + + # First delay should be around 0.1s + jitter + first_delay = mock_sleep.call_args_list[0][0][0] + assert 0.1 <= first_delay <= 0.3 + + # Second delay should be around 0.2s + jitter + second_delay = mock_sleep.call_args_list[1][0][0] + assert 0.2 <= second_delay <= 0.4 + + def test_concurrent_handler_execution(self): + """Test that multiple handlers can run concurrently without deadlock.""" + results = [] + errors = [] + + def run_handler(): + try: + with patch( + "events.event_handlers.update_provider_when_message_created.db" + ) as mock_db: + mock_query = Mock() + mock_db.session.query.return_value = mock_query + mock_query.filter.return_value = mock_query + mock_query.order_by.return_value = mock_query + mock_query.update.return_value = 1 + + handle( + self.mock_message, + application_generate_entity=self.mock_generate_entity, + ) + results.append("success") + except Exception as e: + errors.append(str(e)) + + # Run multiple handlers concurrently + threads = [] + for _ in range(5): + thread = threading.Thread(target=run_handler) + threads.append(thread) + thread.start() + + # Wait for all threads to complete + for thread in threads: + thread.join(timeout=5) + + # Verify all handlers completed successfully + assert len(results) == 5 + assert len(errors) == 0 + + def test_performance_stats_tracking(self): + """Test that performance statistics are tracked correctly.""" + # Reset stats + stats = get_update_stats() + initial_total = stats["total_updates"] + + with patch( + "events.event_handlers.update_provider_when_message_created.db" + ) as mock_db: + mock_query = Mock() + mock_db.session.query.return_value = mock_query + mock_query.filter.return_value = mock_query + mock_query.order_by.return_value = mock_query + mock_query.update.return_value = 1 + + # Call handler + handle( + self.mock_message, application_generate_entity=self.mock_generate_entity + ) + + # Check that stats were updated + updated_stats = get_update_stats() + assert updated_stats["total_updates"] == initial_total + 1 + assert updated_stats["successful_updates"] >= initial_total + 1 + + def test_non_chat_entity_ignored(self): + """Test that non-chat entities are ignored by the handler.""" + # Create a non-chat entity + mock_non_chat_entity = Mock() + mock_non_chat_entity.__class__.__name__ = "NonChatEntity" + + with patch( + "events.event_handlers.update_provider_when_message_created.db" + ) as mock_db: + # Call handler with non-chat entity + handle(self.mock_message, application_generate_entity=mock_non_chat_entity) + + # Verify no database operations were performed + assert not mock_db.session.query.called + assert not mock_db.session.commit.called + + @patch("events.event_handlers.update_provider_when_message_created.db") + def test_quota_calculation_tokens(self, mock_db): + """Test quota calculation for token-based quotas.""" + # Setup token-based quota + self.mock_system_config.current_quota_type = QuotaUnit.TOKENS + self.mock_message.answer_tokens = 150 + + mock_query = Mock() + mock_db.session.query.return_value = mock_query + mock_query.filter.return_value = mock_query + mock_query.order_by.return_value = mock_query + mock_query.update.return_value = 1 + + # Call handler + handle(self.mock_message, application_generate_entity=self.mock_generate_entity) + + # Verify update was called with token count + update_calls = mock_query.update.call_args_list + + # Should have at least one call with quota_used update + quota_update_found = False + for call in update_calls: + values = call[0][0] # First argument to update() + if "quota_used" in values: + quota_update_found = True + break + + assert quota_update_found + + @patch("events.event_handlers.update_provider_when_message_created.db") + def test_quota_calculation_times(self, mock_db): + """Test quota calculation for times-based quotas.""" + # Setup times-based quota + self.mock_system_config.current_quota_type = QuotaUnit.TIMES + + mock_query = Mock() + mock_db.session.query.return_value = mock_query + mock_query.filter.return_value = mock_query + mock_query.order_by.return_value = mock_query + mock_query.update.return_value = 1 + + # Call handler + handle(self.mock_message, application_generate_entity=self.mock_generate_entity) + + # Verify update was called + assert mock_query.update.called + assert mock_db.session.commit.called diff --git a/web/.env.example b/web/.env.example index 78b4f33e8c..c30064ffed 100644 --- a/web/.env.example +++ b/web/.env.example @@ -56,3 +56,5 @@ NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER=true NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL=true NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL=true +# The maximum number of tree node depth for workflow +NEXT_PUBLIC_MAX_TREE_DEPTH=50 diff --git a/web/app/(commonLayout)/datasets/Datasets.tsx b/web/app/(commonLayout)/datasets/Datasets.tsx index 28461e8617..2d4848e92e 100644 --- a/web/app/(commonLayout)/datasets/Datasets.tsx +++ b/web/app/(commonLayout)/datasets/Datasets.tsx @@ -81,7 +81,7 @@ const Datasets = ({ currentContainer?.removeEventListener('scroll', onScroll) onScroll.cancel() } - }, [onScroll]) + }, [containerRef, onScroll]) return (