diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index 7c632f8a34..b1cf41a226 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -39,7 +39,7 @@ jobs: api/pyproject.toml api/poetry.lock - - name: Poetry check + - name: Check Poetry lockfile run: | poetry check -C api --lock poetry show -C api @@ -47,6 +47,9 @@ jobs: - name: Install dependencies run: poetry install -C api --with dev + - name: Check dependencies in pyproject.toml + run: poetry run -C api bash dev/pytest/pytest_artifacts.sh + - name: Run Unit tests run: poetry run -C api bash dev/pytest/pytest_unit_tests.sh diff --git a/.github/workflows/build-push.yml b/.github/workflows/build-push.yml index 407bd47d9b..6daaaf5791 100644 --- a/.github/workflows/build-push.yml +++ b/.github/workflows/build-push.yml @@ -125,7 +125,7 @@ jobs: with: images: ${{ env[matrix.image_name_env] }} tags: | - type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') }} + type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/') && !contains(github.ref, '-') }} type=ref,event=branch type=sha,enable=true,priority=100,prefix=,suffix=,format=long type=raw,value=${{ github.ref_name }},enable=${{ startsWith(github.ref, 'refs/tags/') }} diff --git a/README.md b/README.md index 1c49c415fe..75094d39db 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ alt="chat on Discord"> follow on Twitter + alt="follow on X(Twitter)"> Docker Pulls @@ -196,10 +196,14 @@ If you'd like to configure a highly-available setup, there are community-contrib #### Using Terraform for Deployment +Deploy Dify to Cloud Platform with a single click using [terraform](https://www.terraform.io/) + ##### Azure Global -Deploy Dify to Azure with a single click using [terraform](https://www.terraform.io/). - [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform) +##### Google Cloud +- [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) + ## Contributing For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). @@ -219,7 +223,7 @@ At the same time, please consider supporting Dify by sharing it on social media * [Github Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions. * [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). * [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community. -* [Twitter](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community. +* [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community. ## Star history diff --git a/README_AR.md b/README_AR.md index 10d572cc49..e46ba73738 100644 --- a/README_AR.md +++ b/README_AR.md @@ -17,7 +17,7 @@ alt="chat on Discord"> follow on Twitter + alt="follow on X(Twitter)"> Docker Pulls @@ -179,10 +179,13 @@ docker compose up -d #### استخدام Terraform للتوزيع +انشر Dify إلى منصة السحابة بنقرة واحدة باستخدام [terraform](https://www.terraform.io/) + ##### Azure Global -استخدم [terraform](https://www.terraform.io/) لنشر Dify على Azure بنقرة واحدة. - [Azure Terraform بواسطة @nikawang](https://github.com/nikawang/dify-azure-terraform) +##### Google Cloud +- [Google Cloud Terraform بواسطة @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) ## المساهمة diff --git a/README_CN.md b/README_CN.md index 32551fcc31..4553524ce5 100644 --- a/README_CN.md +++ b/README_CN.md @@ -17,7 +17,7 @@ alt="chat on Discord"> follow on Twitter + alt="follow on X(Twitter)"> Docker Pulls @@ -202,10 +202,14 @@ docker compose up -d #### 使用 Terraform 部署 +使用 [terraform](https://www.terraform.io/) 一键将 Dify 部署到云平台 + ##### Azure Global -使用 [terraform](https://www.terraform.io/) 一键部署 Dify 到 Azure。 - [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform) +##### Google Cloud +- [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) + ## Star History [![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date) @@ -232,7 +236,7 @@ docker compose up -d - [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](CONTRIBUTING.md)。 - [电子邮件支持](mailto:hello@dify.ai?subject=[GitHub]Questions%20About%20Dify)。👉:关于使用 Dify.AI 的问题。 - [Discord](https://discord.gg/FngNHpbcY7)。👉:分享您的应用程序并与社区交流。 -- [Twitter](https://twitter.com/dify_ai)。👉:分享您的应用程序并与社区交流。 +- [X(Twitter)](https://twitter.com/dify_ai)。👉:分享您的应用程序并与社区交流。 - [商业许可](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry)。👉:有关商业用途许可 Dify.AI 的商业咨询。 - [微信]() 👉:扫描下方二维码,添加微信好友,备注 Dify,我们将邀请您加入 Dify 社区。 wechat diff --git a/README_ES.md b/README_ES.md index 2ae044b328..7da5ac7b61 100644 --- a/README_ES.md +++ b/README_ES.md @@ -17,7 +17,7 @@ alt="chat en Discord"> seguir en Twitter + alt="seguir en X(Twitter)"> Descargas de Docker @@ -204,10 +204,13 @@ Si desea configurar una configuración de alta disponibilidad, la comunidad prop #### Uso de Terraform para el despliegue +Despliega Dify en una plataforma en la nube con un solo clic utilizando [terraform](https://www.terraform.io/) + ##### Azure Global -Utiliza [terraform](https://www.terraform.io/) para desplegar Dify en Azure con un solo clic. - [Azure Terraform por @nikawang](https://github.com/nikawang/dify-azure-terraform) +##### Google Cloud +- [Google Cloud Terraform por @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) ## Contribuir @@ -228,7 +231,7 @@ Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en * [Discusión en GitHub](https://github.com/langgenius/dify/discussions). Lo mejor para: compartir comentarios y hacer preguntas. * [Reporte de problemas en GitHub](https://github.com/langgenius/dify/issues). Lo mejor para: errores que encuentres usando Dify.AI y propuestas de características. Consulta nuestra [Guía de contribución](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). * [Discord](https://discord.gg/FngNHpbcY7). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad. -* [Twitter](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad. +* [X(Twitter)](https://twitter.com/dify_ai). Lo mejor para: compartir tus aplicaciones y pasar el rato con la comunidad. ## Historial de Estrellas diff --git a/README_FR.md b/README_FR.md index 681d596749..15f6f2650f 100644 --- a/README_FR.md +++ b/README_FR.md @@ -17,7 +17,7 @@ alt="chat sur Discord"> suivre sur Twitter + alt="suivre sur X(Twitter)"> Tirages Docker @@ -202,10 +202,13 @@ Si vous souhaitez configurer une configuration haute disponibilité, la communau #### Utilisation de Terraform pour le déploiement +Déployez Dify sur une plateforme cloud en un clic en utilisant [terraform](https://www.terraform.io/) + ##### Azure Global -Utilisez [terraform](https://www.terraform.io/) pour déployer Dify sur Azure en un clic. - [Azure Terraform par @nikawang](https://github.com/nikawang/dify-azure-terraform) +##### Google Cloud +- [Google Cloud Terraform par @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) ## Contribuer @@ -226,7 +229,7 @@ Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur le * [Discussion GitHub](https://github.com/langgenius/dify/discussions). Meilleur pour: partager des commentaires et poser des questions. * [Problèmes GitHub](https://github.com/langgenius/dify/issues). Meilleur pour: les bogues que vous rencontrez en utilisant Dify.AI et les propositions de fonctionnalités. Consultez notre [Guide de contribution](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). * [Discord](https://discord.gg/FngNHpbcY7). Meilleur pour: partager vos applications et passer du temps avec la communauté. -* [Twitter](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté. +* [X(Twitter)](https://twitter.com/dify_ai). Meilleur pour: partager vos applications et passer du temps avec la communauté. ## Historique des étoiles diff --git a/README_JA.md b/README_JA.md index e6a8621e7b..a2e6b173f5 100644 --- a/README_JA.md +++ b/README_JA.md @@ -17,7 +17,7 @@ alt="Discordでチャット"> Twitterでフォロー + alt="X(Twitter)でフォロー"> Docker Pulls @@ -68,7 +68,7 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ プロンプトの作成、モデルパフォーマンスの比較が行え、チャットベースのアプリに音声合成などの機能も追加できます。 **4. RAGパイプライン**: - ドキュメントの取り込みから検索までをカバーする広範なRAG機能ができます。ほかにもPDF、PPT、その他の一般的なドキュメントフォーマットからのテキスト抽出のサーポイントも提供します。 + ドキュメントの取り込みから検索までをカバーする広範なRAG機能ができます。ほかにもPDF、PPT、その他の一般的なドキュメントフォーマットからのテキスト抽出のサポートも提供します。 **5. エージェント機能**: LLM Function CallingやReActに基づくエージェントの定義が可能で、AIエージェント用のプリビルトまたはカスタムツールを追加できます。Difyには、Google検索、DALL·E、Stable Diffusion、WolframAlphaなどのAIエージェント用の50以上の組み込みツールが提供します。 @@ -201,10 +201,13 @@ docker compose up -d #### Terraformを使用したデプロイ -##### Azure Global -[terraform](https://www.terraform.io/) を使用して、AzureにDifyをワンクリックでデプロイします。 -- [nikawangのAzure Terraform](https://github.com/nikawang/dify-azure-terraform) +[terraform](https://www.terraform.io/) を使用して、ワンクリックでDifyをクラウドプラットフォームにデプロイします +##### Azure Global +- [@nikawangによるAzure Terraform](https://github.com/nikawang/dify-azure-terraform) + +##### Google Cloud +- [@sotazumによるGoogle Cloud Terraform](https://github.com/DeNA/dify-google-cloud-terraform) ## 貢献 @@ -225,7 +228,7 @@ docker compose up -d * [Github Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。 * [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](CONTRIBUTING_JA.md)を参照してください * [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。 -* [Twitter](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。 +* [X(Twitter)](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。 diff --git a/README_KL.md b/README_KL.md index 04620d42bb..8f2affdce5 100644 --- a/README_KL.md +++ b/README_KL.md @@ -17,7 +17,7 @@ alt="chat on Discord"> follow on Twitter + alt="follow on X(Twitter)"> Docker Pulls @@ -202,10 +202,13 @@ If you'd like to configure a highly-available setup, there are community-contrib #### Terraform atorlugu pilersitsineq -##### Azure Global -Atoruk [terraform](https://www.terraform.io/) Dify-mik Azure-mut ataatsikkut ikkussuilluarlugu. -- [Azure Terraform atorlugu @nikawang](https://github.com/nikawang/dify-azure-terraform) +wa'logh nIqHom neH ghun deployment toy'wI' [terraform](https://www.terraform.io/) lo'laH. +##### Azure Global +- [Azure Terraform mung @nikawang](https://github.com/nikawang/dify-azure-terraform) + +##### Google Cloud +- [Google Cloud Terraform qachlot @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) ## Contributing @@ -228,7 +231,7 @@ At the same time, please consider supporting Dify by sharing it on social media ). Best for: sharing feedback and asking questions. * [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). * [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community. -* [Twitter](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community. +* [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community. ## Star History diff --git a/README_KR.md b/README_KR.md index a5f3bc68d0..6c3a9ed7f6 100644 --- a/README_KR.md +++ b/README_KR.md @@ -17,7 +17,7 @@ alt="chat on Discord"> follow on Twitter + alt="follow on X(Twitter)"> Docker Pulls @@ -39,7 +39,6 @@ README بالعربية Türkçe README README Tiếng Việt -

@@ -195,10 +194,14 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 #### Terraform을 사용한 배포 +[terraform](https://www.terraform.io/)을 사용하여 단 한 번의 클릭으로 Dify를 클라우드 플랫폼에 배포하십시오 + ##### Azure Global -[terraform](https://www.terraform.io/)을 사용하여 Azure에 Dify를 원클릭으로 배포하세요. - [nikawang의 Azure Terraform](https://github.com/nikawang/dify-azure-terraform) +##### Google Cloud +- [sotazum의 Google Cloud Terraform](https://github.com/DeNA/dify-google-cloud-terraform) + ## 기여 코드에 기여하고 싶은 분들은 [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요. diff --git a/README_TR.md b/README_TR.md index 54b6db3f82..a75889e576 100644 --- a/README_TR.md +++ b/README_TR.md @@ -17,7 +17,7 @@ alt="Discord'da sohbet et"> Twitter'da takip et + alt="X(Twitter)'da takip et"> Docker Çekmeleri @@ -200,9 +200,13 @@ Yüksek kullanılabilirliğe sahip bir kurulum yapılandırmak isterseniz, Dify' #### Dağıtım için Terraform Kullanımı +Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.terraform.io/) kullanarak + ##### Azure Global -[Terraform](https://www.terraform.io/) kullanarak Dify'ı Azure'a tek tıklamayla dağıtın. -- [@nikawang tarafından Azure Terraform](https://github.com/nikawang/dify-azure-terraform) +- [Azure Terraform tarafından @nikawang](https://github.com/nikawang/dify-azure-terraform) + +##### Google Cloud +- [Google Cloud Terraform tarafından @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) ## Katkıda Bulunma @@ -222,7 +226,7 @@ Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda p * [Github Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için. * [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakın. * [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. -* [Twitter](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. +* [X(Twitter)](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. ## Star history diff --git a/README_VI.md b/README_VI.md index 6d4035eceb..8d49e49766 100644 --- a/README_VI.md +++ b/README_VI.md @@ -17,7 +17,7 @@ alt="chat trên Discord"> theo dõi trên Twitter + alt="theo dõi trên X(Twitter)"> Docker Pulls @@ -196,10 +196,14 @@ Nếu bạn muốn cấu hình một cài đặt có độ sẵn sàng cao, có #### Sử dụng Terraform để Triển khai +Triển khai Dify lên nền tảng đám mây với một cú nhấp chuột bằng cách sử dụng [terraform](https://www.terraform.io/) + ##### Azure Global -Triển khai Dify lên Azure chỉ với một cú nhấp chuột bằng cách sử dụng [terraform](https://www.terraform.io/). - [Azure Terraform bởi @nikawang](https://github.com/nikawang/dify-azure-terraform) +##### Google Cloud +- [Google Cloud Terraform bởi @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) + ## Đóng góp Đối với những người muốn đóng góp mã, xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi. @@ -219,7 +223,7 @@ Triển khai Dify lên Azure chỉ với một cú nhấp chuột bằng cách s * [Thảo luận GitHub](https://github.com/langgenius/dify/discussions). Tốt nhất cho: chia sẻ phản hồi và đặt câu hỏi. * [Vấn đề GitHub](https://github.com/langgenius/dify/issues). Tốt nhất cho: lỗi bạn gặp phải khi sử dụng Dify.AI và đề xuất tính năng. Xem [Hướng dẫn Đóng góp](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) của chúng tôi. * [Discord](https://discord.gg/FngNHpbcY7). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng. -* [Twitter](https://twitter.com/dify_ai). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng. +* [X(Twitter)](https://twitter.com/dify_ai). Tốt nhất cho: chia sẻ ứng dụng của bạn và giao lưu với cộng đồng. ## Lịch sử Yêu thích diff --git a/api/.env.example b/api/.env.example index a3db406aea..aa155003ab 100644 --- a/api/.env.example +++ b/api/.env.example @@ -20,6 +20,9 @@ FILES_URL=http://127.0.0.1:5001 # The time in seconds after the signature is rejected FILES_ACCESS_TIMEOUT=300 +# Access token expiration time in minutes +ACCESS_TOKEN_EXPIRE_MINUTES=60 + # celery configuration CELERY_BROKER_URL=redis://:difyai123456@localhost:6379/1 @@ -39,7 +42,7 @@ DB_DATABASE=dify # Storage configuration # use for store upload files, private keys... -# storage type: local, s3, azure-blob, google-storage, tencent-cos, huawei-obs, volcengine-tos +# storage type: local, s3, azure-blob, google-storage, tencent-cos, huawei-obs, volcengine-tos, baidu-obs, supabase STORAGE_TYPE=local STORAGE_LOCAL_PATH=storage S3_USE_AWS_MANAGED_IAM=false @@ -79,6 +82,12 @@ HUAWEI_OBS_SECRET_KEY=your-secret-key HUAWEI_OBS_ACCESS_KEY=your-access-key HUAWEI_OBS_SERVER=your-server-url +# Baidu OBS Storage Configuration +BAIDU_OBS_BUCKET_NAME=your-bucket-name +BAIDU_OBS_SECRET_KEY=your-secret-key +BAIDU_OBS_ACCESS_KEY=your-access-key +BAIDU_OBS_ENDPOINT=your-server-url + # OCI Storage configuration OCI_ENDPOINT=your-endpoint OCI_BUCKET_NAME=your-bucket-name @@ -93,11 +102,16 @@ VOLCENGINE_TOS_ACCESS_KEY=your-access-key VOLCENGINE_TOS_SECRET_KEY=your-secret-key VOLCENGINE_TOS_REGION=your-region +# Supabase Storage Configuration +SUPABASE_BUCKET_NAME=your-bucket-name +SUPABASE_API_KEY=your-access-key +SUPABASE_URL=your-server-url + # CORS configuration WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,* -# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector +# Vector database configuration, support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, vikingdb VECTOR_STORE=weaviate # Weaviate configuration @@ -162,6 +176,8 @@ PGVECTOR_PORT=5433 PGVECTOR_USER=postgres PGVECTOR_PASSWORD=postgres PGVECTOR_DATABASE=postgres +PGVECTOR_MIN_CONNECTION=1 +PGVECTOR_MAX_CONNECTION=5 # Tidb Vector configuration TIDB_VECTOR_HOST=xxx.eu-central-1.xxx.aws.tidbcloud.com @@ -195,6 +211,24 @@ OPENSEARCH_USER=admin OPENSEARCH_PASSWORD=admin OPENSEARCH_SECURE=true +# Baidu configuration +BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287 +BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS=30000 +BAIDU_VECTOR_DB_ACCOUNT=root +BAIDU_VECTOR_DB_API_KEY=dify +BAIDU_VECTOR_DB_DATABASE=dify +BAIDU_VECTOR_DB_SHARD=1 +BAIDU_VECTOR_DB_REPLICAS=3 + +# ViKingDB configuration +VIKINGDB_ACCESS_KEY=your-ak +VIKINGDB_SECRET_KEY=your-sk +VIKINGDB_REGION=cn-shanghai +VIKINGDB_HOST=api-vikingdb.xxx.volces.com +VIKINGDB_SCHEMA=http +VIKINGDB_CONNECTION_TIMEOUT=30 +VIKINGDB_SOCKET_TIMEOUT=30 + # Upload configuration UPLOAD_FILE_SIZE_LIMIT=15 UPLOAD_FILE_BATCH_LIMIT=5 @@ -263,6 +297,9 @@ HTTP_REQUEST_MAX_WRITE_TIMEOUT=600 HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 +# Respect X-* headers to redirect clients +RESPECT_XFORWARD_HEADERS_ENABLED=false + # Log file path LOG_FILE= diff --git a/api/app.py b/api/app.py index 1b58beee15..52dd492225 100644 --- a/api/app.py +++ b/api/app.py @@ -26,7 +26,7 @@ from commands import register_commands from configs import dify_config # DO NOT REMOVE BELOW -from events import event_handlers +from events import event_handlers # noqa: F401 from extensions import ( ext_celery, ext_code_based_extension, @@ -36,6 +36,7 @@ from extensions import ( ext_login, ext_mail, ext_migrate, + ext_proxy_fix, ext_redis, ext_sentry, ext_storage, @@ -45,7 +46,7 @@ from extensions.ext_login import login_manager from libs.passport import PassportService # TODO: Find a way to avoid importing models here -from models import account, dataset, model, source, task, tool, tools, web +from models import account, dataset, model, source, task, tool, tools, web # noqa: F401 from services.account_service import AccountService # DO NOT REMOVE ABOVE @@ -156,6 +157,7 @@ def initialize_extensions(app): ext_mail.init_app(app) ext_hosting_provider.init_app(app) ext_sentry.init_app(app) + ext_proxy_fix.init_app(app) # Flask-Login configuration @@ -181,10 +183,10 @@ def load_user_from_request(request_from_flask_login): decoded = PassportService().verify(auth_token) user_id = decoded.get("user_id") - account = AccountService.load_logged_in_account(account_id=user_id, token=auth_token) - if account: - contexts.tenant_id.set(account.current_tenant_id) - return account + logged_in_account = AccountService.load_logged_in_account(account_id=user_id) + if logged_in_account: + contexts.tenant_id.set(logged_in_account.current_tenant_id) + return logged_in_account @login_manager.unauthorized_handler diff --git a/api/commands.py b/api/commands.py index 7ef4aed7f7..dbcd8a744d 100644 --- a/api/commands.py +++ b/api/commands.py @@ -347,6 +347,14 @@ def migrate_knowledge_vector_database(): index_name = Dataset.gen_collection_name_by_id(dataset_id) index_struct_dict = {"type": "elasticsearch", "vector_store": {"class_prefix": index_name}} dataset.index_struct = json.dumps(index_struct_dict) + elif vector_type == VectorType.BAIDU: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id) + index_struct_dict = { + "type": VectorType.BAIDU, + "vector_store": {"class_prefix": collection_name}, + } + dataset.index_struct = json.dumps(index_struct_dict) else: raise ValueError(f"Vector store {vector_type} is not supported.") diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index 9218d529cc..a3334d1634 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -247,6 +247,12 @@ class HttpConfig(BaseSettings): default=None, ) + RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field( + description="Enable or disable the X-Forwarded-For Proxy Fix middleware from Werkzeug" + " to respect X-* headers to redirect clients", + default=False, + ) + class InnerAPIConfig(BaseSettings): """ @@ -354,9 +360,9 @@ class WorkflowConfig(BaseSettings): ) -class OAuthConfig(BaseSettings): +class AuthConfig(BaseSettings): """ - Configuration for OAuth authentication + Configuration for authentication and OAuth """ OAUTH_REDIRECT_PATH: str = Field( @@ -365,7 +371,7 @@ class OAuthConfig(BaseSettings): ) GITHUB_CLIENT_ID: Optional[str] = Field( - description="GitHub OAuth client secret", + description="GitHub OAuth client ID", default=None, ) @@ -384,6 +390,11 @@ class OAuthConfig(BaseSettings): default=None, ) + ACCESS_TOKEN_EXPIRE_MINUTES: PositiveInt = Field( + description="Expiration time for access tokens in minutes", + default=60, + ) + class ModerationConfig(BaseSettings): """ @@ -601,6 +612,7 @@ class PositionConfig(BaseSettings): class FeatureConfig( # place the configs in alphabet order AppExecutionConfig, + AuthConfig, # Changed from OAuthConfig to AuthConfig BillingConfig, CodeExecutionSandboxConfig, DataSetConfig, @@ -615,14 +627,13 @@ class FeatureConfig( MailConfig, ModelLoadBalanceConfig, ModerationConfig, - OAuthConfig, + PositionConfig, RagEtlConfig, SecurityConfig, ToolConfig, UpdateConfig, WorkflowConfig, WorkspaceConfig, - PositionConfig, # hosted services config HostedServiceConfig, CeleryBeatConfig, diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index 6ad216c191..fa7f41d630 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -8,9 +8,11 @@ from configs.middleware.cache.redis_config import RedisConfig from configs.middleware.storage.aliyun_oss_storage_config import AliyunOSSStorageConfig from configs.middleware.storage.amazon_s3_storage_config import S3StorageConfig from configs.middleware.storage.azure_blob_storage_config import AzureBlobStorageConfig +from configs.middleware.storage.baidu_obs_storage_config import BaiduOBSStorageConfig from configs.middleware.storage.google_cloud_storage_config import GoogleCloudStorageConfig from configs.middleware.storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig from configs.middleware.storage.oci_storage_config import OCIStorageConfig +from configs.middleware.storage.supabase_storage_config import SupabaseStorageConfig from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig from configs.middleware.storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig @@ -26,6 +28,7 @@ from configs.middleware.vdb.qdrant_config import QdrantConfig from configs.middleware.vdb.relyt_config import RelytConfig from configs.middleware.vdb.tencent_vector_config import TencentVectorDBConfig from configs.middleware.vdb.tidb_vector_config import TiDBVectorConfig +from configs.middleware.vdb.vikingdb_config import VikingDBConfig from configs.middleware.vdb.weaviate_config import WeaviateConfig @@ -190,6 +193,22 @@ class CeleryConfig(DatabaseConfig): return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False +class InternalTestConfig(BaseSettings): + """ + Configuration settings for Internal Test + """ + + AWS_SECRET_ACCESS_KEY: Optional[str] = Field( + description="Internal test AWS secret access key", + default=None, + ) + + AWS_ACCESS_KEY_ID: Optional[str] = Field( + description="Internal test AWS access key ID", + default=None, + ) + + class MiddlewareConfig( # place the configs in alphabet order CeleryConfig, @@ -200,12 +219,14 @@ class MiddlewareConfig( StorageConfig, AliyunOSSStorageConfig, AzureBlobStorageConfig, + BaiduOBSStorageConfig, GoogleCloudStorageConfig, - TencentCloudCOSStorageConfig, HuaweiCloudOBSStorageConfig, - VolcengineTOSStorageConfig, - S3StorageConfig, OCIStorageConfig, + S3StorageConfig, + SupabaseStorageConfig, + TencentCloudCOSStorageConfig, + VolcengineTOSStorageConfig, # configs of vdb and vdb providers VectorStoreConfig, AnalyticdbConfig, @@ -222,5 +243,7 @@ class MiddlewareConfig( TiDBVectorConfig, WeaviateConfig, ElasticsearchConfig, + InternalTestConfig, + VikingDBConfig, ): pass diff --git a/api/configs/middleware/storage/baidu_obs_storage_config.py b/api/configs/middleware/storage/baidu_obs_storage_config.py new file mode 100644 index 0000000000..c511628a15 --- /dev/null +++ b/api/configs/middleware/storage/baidu_obs_storage_config.py @@ -0,0 +1,29 @@ +from typing import Optional + +from pydantic import BaseModel, Field + + +class BaiduOBSStorageConfig(BaseModel): + """ + Configuration settings for Baidu Object Storage Service (OBS) + """ + + BAIDU_OBS_BUCKET_NAME: Optional[str] = Field( + description="Name of the Baidu OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')", + default=None, + ) + + BAIDU_OBS_ACCESS_KEY: Optional[str] = Field( + description="Access Key ID for authenticating with Baidu OBS", + default=None, + ) + + BAIDU_OBS_SECRET_KEY: Optional[str] = Field( + description="Secret Access Key for authenticating with Baidu OBS", + default=None, + ) + + BAIDU_OBS_ENDPOINT: Optional[str] = Field( + description="URL of the Baidu OSS endpoint for your chosen region (e.g., 'https://.bj.bcebos.com')", + default=None, + ) diff --git a/api/configs/middleware/storage/supabase_storage_config.py b/api/configs/middleware/storage/supabase_storage_config.py new file mode 100644 index 0000000000..a3e905b21c --- /dev/null +++ b/api/configs/middleware/storage/supabase_storage_config.py @@ -0,0 +1,24 @@ +from typing import Optional + +from pydantic import BaseModel, Field + + +class SupabaseStorageConfig(BaseModel): + """ + Configuration settings for Supabase Object Storage Service + """ + + SUPABASE_BUCKET_NAME: Optional[str] = Field( + description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')", + default=None, + ) + + SUPABASE_API_KEY: Optional[str] = Field( + description="API KEY for authenticating with Supabase", + default=None, + ) + + SUPABASE_URL: Optional[str] = Field( + description="URL of the Supabase", + default=None, + ) diff --git a/api/configs/middleware/vdb/baidu_vector_config.py b/api/configs/middleware/vdb/baidu_vector_config.py new file mode 100644 index 0000000000..44742c2e2f --- /dev/null +++ b/api/configs/middleware/vdb/baidu_vector_config.py @@ -0,0 +1,45 @@ +from typing import Optional + +from pydantic import Field, NonNegativeInt, PositiveInt +from pydantic_settings import BaseSettings + + +class BaiduVectorDBConfig(BaseSettings): + """ + Configuration settings for Baidu Vector Database + """ + + BAIDU_VECTOR_DB_ENDPOINT: Optional[str] = Field( + description="URL of the Baidu Vector Database service (e.g., 'http://vdb.bj.baidubce.com')", + default=None, + ) + + BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS: PositiveInt = Field( + description="Timeout in milliseconds for Baidu Vector Database operations (default is 30000 milliseconds)", + default=30000, + ) + + BAIDU_VECTOR_DB_ACCOUNT: Optional[str] = Field( + description="Account for authenticating with the Baidu Vector Database", + default=None, + ) + + BAIDU_VECTOR_DB_API_KEY: Optional[str] = Field( + description="API key for authenticating with the Baidu Vector Database service", + default=None, + ) + + BAIDU_VECTOR_DB_DATABASE: Optional[str] = Field( + description="Name of the specific Baidu Vector Database to connect to", + default=None, + ) + + BAIDU_VECTOR_DB_SHARD: PositiveInt = Field( + description="Number of shards for the Baidu Vector Database (default is 1)", + default=1, + ) + + BAIDU_VECTOR_DB_REPLICAS: NonNegativeInt = Field( + description="Number of replicas for the Baidu Vector Database (default is 3)", + default=3, + ) diff --git a/api/configs/middleware/vdb/pgvector_config.py b/api/configs/middleware/vdb/pgvector_config.py index 395dcaa420..85f5dca7e2 100644 --- a/api/configs/middleware/vdb/pgvector_config.py +++ b/api/configs/middleware/vdb/pgvector_config.py @@ -33,3 +33,13 @@ class PGVectorConfig(BaseSettings): description="Name of the PostgreSQL database to connect to", default=None, ) + + PGVECTOR_MIN_CONNECTION: PositiveInt = Field( + description="Min connection of the PostgreSQL database", + default=1, + ) + + PGVECTOR_MAX_CONNECTION: PositiveInt = Field( + description="Max connection of the PostgreSQL database", + default=5, + ) diff --git a/api/configs/middleware/vdb/vikingdb_config.py b/api/configs/middleware/vdb/vikingdb_config.py new file mode 100644 index 0000000000..5ad98d898a --- /dev/null +++ b/api/configs/middleware/vdb/vikingdb_config.py @@ -0,0 +1,37 @@ +from typing import Optional + +from pydantic import BaseModel, Field + + +class VikingDBConfig(BaseModel): + """ + Configuration for connecting to Volcengine VikingDB. + Refer to the following documentation for details on obtaining credentials: + https://www.volcengine.com/docs/6291/65568 + """ + + VIKINGDB_ACCESS_KEY: Optional[str] = Field( + default=None, description="The Access Key provided by Volcengine VikingDB for API authentication." + ) + VIKINGDB_SECRET_KEY: Optional[str] = Field( + default=None, description="The Secret Key provided by Volcengine VikingDB for API authentication." + ) + VIKINGDB_REGION: Optional[str] = Field( + default="cn-shanghai", + description="The region of the Volcengine VikingDB service.(e.g., 'cn-shanghai', 'cn-beijing').", + ) + VIKINGDB_HOST: Optional[str] = Field( + default="api-vikingdb.mlp.cn-shanghai.volces.com", + description="The host of the Volcengine VikingDB service.(e.g., 'api-vikingdb.volces.com', \ + 'api-vikingdb.mlp.cn-shanghai.volces.com')", + ) + VIKINGDB_SCHEME: Optional[str] = Field( + default="http", + description="The scheme of the Volcengine VikingDB service.(e.g., 'http', 'https').", + ) + VIKINGDB_CONNECTION_TIMEOUT: Optional[int] = Field( + default=30, description="The connection timeout of the Volcengine VikingDB service." + ) + VIKINGDB_SOCKET_TIMEOUT: Optional[int] = Field( + default=30, description="The socket timeout of the Volcengine VikingDB service." + ) diff --git a/api/configs/packaging/__init__.py b/api/configs/packaging/__init__.py index c752660122..c311a989b4 100644 --- a/api/configs/packaging/__init__.py +++ b/api/configs/packaging/__init__.py @@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings): CURRENT_VERSION: str = Field( description="Dify version", - default="0.8.3", + default="0.9.1", ) COMMIT_SHA: str = Field( diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index eb7c1464d3..c7282fcf14 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -37,7 +37,16 @@ from .auth import activate, data_source_bearer_auth, data_source_oauth, forgot_p from .billing import billing # Import datasets controllers -from .datasets import data_source, datasets, datasets_document, datasets_segments, file, hit_testing, website +from .datasets import ( + data_source, + datasets, + datasets_document, + datasets_segments, + external, + file, + hit_testing, + website, +) # Import explore controllers from .explore import ( diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index df7bd352af..c1e16b3b9b 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -188,6 +188,7 @@ class ChatConversationApi(Resource): subquery.c.from_end_user_session_id.ilike(keyword_filter), ), ) + .group_by(Conversation.id) ) account = current_user diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index 62837af2b9..18a7b23166 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -7,7 +7,7 @@ from flask_restful import Resource, reqparse import services from controllers.console import api from controllers.console.setup import setup_required -from libs.helper import email, get_remote_ip +from libs.helper import email, extract_remote_ip from libs.password import valid_password from models.account import Account from services.account_service import AccountService, TenantService @@ -40,17 +40,16 @@ class LoginApi(Resource): "data": "workspace not found, please contact system admin to invite you to join in a workspace", } - token = AccountService.login(account, ip_address=get_remote_ip(request)) + token_pair = AccountService.login(account=account, ip_address=extract_remote_ip(request)) - return {"result": "success", "data": token} + return {"result": "success", "data": token_pair.model_dump()} class LogoutApi(Resource): @setup_required def get(self): account = cast(Account, flask_login.current_user) - token = request.headers.get("Authorization", "").split(" ")[1] - AccountService.logout(account=account, token=token) + AccountService.logout(account=account) flask_login.logout_user() return {"result": "success"} @@ -106,5 +105,19 @@ class ResetPasswordApi(Resource): return {"result": "success"} +class RefreshTokenApi(Resource): + def post(self): + parser = reqparse.RequestParser() + parser.add_argument("refresh_token", type=str, required=True, location="json") + args = parser.parse_args() + + try: + new_token_pair = AccountService.refresh_token(args["refresh_token"]) + return {"result": "success", "data": new_token_pair.model_dump()} + except Exception as e: + return {"result": "fail", "data": str(e)}, 401 + + api.add_resource(LoginApi, "/login") api.add_resource(LogoutApi, "/logout") +api.add_resource(RefreshTokenApi, "/refresh-token") diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index ad0c0580ae..c5909b8c10 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -9,7 +9,7 @@ from flask_restful import Resource from configs import dify_config from constants.languages import languages from extensions.ext_database import db -from libs.helper import get_remote_ip +from libs.helper import extract_remote_ip from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo from models.account import Account, AccountStatus from services.account_service import AccountService, RegisterService, TenantService @@ -81,9 +81,14 @@ class OAuthCallback(Resource): TenantService.create_owner_tenant_if_not_exist(account) - token = AccountService.login(account, ip_address=get_remote_ip(request)) + token_pair = AccountService.login( + account=account, + ip_address=extract_remote_ip(request), + ) - return redirect(f"{dify_config.CONSOLE_WEB_URL}?console_token={token}") + return redirect( + f"{dify_config.CONSOLE_WEB_URL}?access_token={token_pair.access_token}&refresh_token={token_pair.refresh_token}" + ) def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]: diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 2c4e5ac607..6583356d23 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -49,7 +49,7 @@ class DatasetListApi(Resource): page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) ids = request.args.getlist("ids") - provider = request.args.get("provider", default="vendor") + # provider = request.args.get("provider", default="vendor") search = request.args.get("keyword", default=None, type=str) tag_ids = request.args.getlist("tag_ids") @@ -57,7 +57,7 @@ class DatasetListApi(Resource): datasets, total = DatasetService.get_datasets_by_ids(ids, current_user.current_tenant_id) else: datasets, total = DatasetService.get_datasets( - page, limit, provider, current_user.current_tenant_id, current_user, search, tag_ids + page, limit, current_user.current_tenant_id, current_user, search, tag_ids ) # check embedding setting @@ -110,6 +110,26 @@ class DatasetListApi(Resource): nullable=True, help="Invalid indexing technique.", ) + parser.add_argument( + "external_knowledge_api_id", + type=str, + nullable=True, + required=False, + ) + parser.add_argument( + "provider", + type=str, + nullable=True, + choices=Dataset.PROVIDER_LIST, + required=False, + default="vendor", + ) + parser.add_argument( + "external_knowledge_id", + type=str, + nullable=True, + required=False, + ) args = parser.parse_args() # The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator @@ -123,6 +143,9 @@ class DatasetListApi(Resource): indexing_technique=args["indexing_technique"], account=current_user, permission=DatasetPermissionEnum.ONLY_ME, + provider=args["provider"], + external_knowledge_api_id=args["external_knowledge_api_id"], + external_knowledge_id=args["external_knowledge_id"], ) except services.errors.dataset.DatasetNameDuplicateError: raise DatasetNameDuplicateError() @@ -211,6 +234,33 @@ class DatasetApi(Resource): ) parser.add_argument("retrieval_model", type=dict, location="json", help="Invalid retrieval model.") parser.add_argument("partial_member_list", type=list, location="json", help="Invalid parent user list.") + + parser.add_argument( + "external_retrieval_model", + type=dict, + required=False, + nullable=True, + location="json", + help="Invalid external retrieval model.", + ) + + parser.add_argument( + "external_knowledge_id", + type=str, + required=False, + nullable=True, + location="json", + help="Invalid external knowledge id.", + ) + + parser.add_argument( + "external_knowledge_api_id", + type=str, + required=False, + nullable=True, + location="json", + help="Invalid external knowledge api id.", + ) args = parser.parse_args() data = request.get_json() @@ -563,10 +613,12 @@ class DatasetRetrievalSettingApi(Resource): case ( VectorType.MILVUS | VectorType.RELYT - | VectorType.PGVECTOR | VectorType.TIDB_VECTOR | VectorType.CHROMA | VectorType.TENCENT + | VectorType.PGVECTO_RS + | VectorType.BAIDU + | VectorType.VIKINGDB ): return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} case ( @@ -577,6 +629,7 @@ class DatasetRetrievalSettingApi(Resource): | VectorType.MYSCALE | VectorType.ORACLE | VectorType.ELASTICSEARCH + | VectorType.PGVECTOR ): return { "retrieval_method": [ @@ -602,6 +655,8 @@ class DatasetRetrievalSettingMockApi(Resource): | VectorType.CHROMA | VectorType.TENCENT | VectorType.PGVECTO_RS + | VectorType.BAIDU + | VectorType.VIKINGDB ): return {"retrieval_method": [RetrievalMethod.SEMANTIC_SEARCH.value]} case ( diff --git a/api/controllers/console/datasets/external.py b/api/controllers/console/datasets/external.py new file mode 100644 index 0000000000..2dc054cfbd --- /dev/null +++ b/api/controllers/console/datasets/external.py @@ -0,0 +1,263 @@ +from flask import request +from flask_login import current_user +from flask_restful import Resource, marshal, reqparse +from werkzeug.exceptions import Forbidden, InternalServerError, NotFound + +import services +from controllers.console import api +from controllers.console.datasets.error import DatasetNameDuplicateError +from controllers.console.setup import setup_required +from controllers.console.wraps import account_initialization_required +from fields.dataset_fields import dataset_detail_fields +from libs.login import login_required +from services.dataset_service import DatasetService +from services.external_knowledge_service import ExternalDatasetService +from services.hit_testing_service import HitTestingService +from services.knowledge_service import ExternalDatasetTestService + + +def _validate_name(name): + if not name or len(name) < 1 or len(name) > 100: + raise ValueError("Name must be between 1 to 100 characters.") + return name + + +def _validate_description_length(description): + if description and len(description) > 400: + raise ValueError("Description cannot exceed 400 characters.") + return description + + +class ExternalApiTemplateListApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self): + page = request.args.get("page", default=1, type=int) + limit = request.args.get("limit", default=20, type=int) + search = request.args.get("keyword", default=None, type=str) + + external_knowledge_apis, total = ExternalDatasetService.get_external_knowledge_apis( + page, limit, current_user.current_tenant_id, search + ) + response = { + "data": [item.to_dict() for item in external_knowledge_apis], + "has_more": len(external_knowledge_apis) == limit, + "limit": limit, + "total": total, + "page": page, + } + return response, 200 + + @setup_required + @login_required + @account_initialization_required + def post(self): + parser = reqparse.RequestParser() + parser.add_argument( + "name", + nullable=False, + required=True, + help="Name is required. Name must be between 1 to 100 characters.", + type=_validate_name, + ) + parser.add_argument( + "settings", + type=dict, + location="json", + nullable=False, + required=True, + ) + args = parser.parse_args() + + ExternalDatasetService.validate_api_list(args["settings"]) + + # The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator + if not current_user.is_dataset_editor: + raise Forbidden() + + try: + external_knowledge_api = ExternalDatasetService.create_external_knowledge_api( + tenant_id=current_user.current_tenant_id, user_id=current_user.id, args=args + ) + except services.errors.dataset.DatasetNameDuplicateError: + raise DatasetNameDuplicateError() + + return external_knowledge_api.to_dict(), 201 + + +class ExternalApiTemplateApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self, external_knowledge_api_id): + external_knowledge_api_id = str(external_knowledge_api_id) + external_knowledge_api = ExternalDatasetService.get_external_knowledge_api(external_knowledge_api_id) + if external_knowledge_api is None: + raise NotFound("API template not found.") + + return external_knowledge_api.to_dict(), 200 + + @setup_required + @login_required + @account_initialization_required + def patch(self, external_knowledge_api_id): + external_knowledge_api_id = str(external_knowledge_api_id) + + parser = reqparse.RequestParser() + parser.add_argument( + "name", + nullable=False, + required=True, + help="type is required. Name must be between 1 to 100 characters.", + type=_validate_name, + ) + parser.add_argument( + "settings", + type=dict, + location="json", + nullable=False, + required=True, + ) + args = parser.parse_args() + ExternalDatasetService.validate_api_list(args["settings"]) + + external_knowledge_api = ExternalDatasetService.update_external_knowledge_api( + tenant_id=current_user.current_tenant_id, + user_id=current_user.id, + external_knowledge_api_id=external_knowledge_api_id, + args=args, + ) + + return external_knowledge_api.to_dict(), 200 + + @setup_required + @login_required + @account_initialization_required + def delete(self, external_knowledge_api_id): + external_knowledge_api_id = str(external_knowledge_api_id) + + # The role of the current user in the ta table must be admin, owner, or editor + if not current_user.is_editor or current_user.is_dataset_operator: + raise Forbidden() + + ExternalDatasetService.delete_external_knowledge_api(current_user.current_tenant_id, external_knowledge_api_id) + return {"result": "success"}, 200 + + +class ExternalApiUseCheckApi(Resource): + @setup_required + @login_required + @account_initialization_required + def get(self, external_knowledge_api_id): + external_knowledge_api_id = str(external_knowledge_api_id) + + external_knowledge_api_is_using, count = ExternalDatasetService.external_knowledge_api_use_check( + external_knowledge_api_id + ) + return {"is_using": external_knowledge_api_is_using, "count": count}, 200 + + +class ExternalDatasetCreateApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self): + # The role of the current user in the ta table must be admin, owner, or editor + if not current_user.is_editor: + raise Forbidden() + + parser = reqparse.RequestParser() + parser.add_argument("external_knowledge_api_id", type=str, required=True, nullable=False, location="json") + parser.add_argument("external_knowledge_id", type=str, required=True, nullable=False, location="json") + parser.add_argument( + "name", + nullable=False, + required=True, + help="name is required. Name must be between 1 to 100 characters.", + type=_validate_name, + ) + parser.add_argument("description", type=str, required=False, nullable=True, location="json") + parser.add_argument("external_retrieval_model", type=dict, required=False, location="json") + + args = parser.parse_args() + + # The role of the current user in the ta table must be admin, owner, or editor, or dataset_operator + if not current_user.is_dataset_editor: + raise Forbidden() + + try: + dataset = ExternalDatasetService.create_external_dataset( + tenant_id=current_user.current_tenant_id, + user_id=current_user.id, + args=args, + ) + except services.errors.dataset.DatasetNameDuplicateError: + raise DatasetNameDuplicateError() + + return marshal(dataset, dataset_detail_fields), 201 + + +class ExternalKnowledgeHitTestingApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self, dataset_id): + dataset_id_str = str(dataset_id) + dataset = DatasetService.get_dataset(dataset_id_str) + if dataset is None: + raise NotFound("Dataset not found.") + + try: + DatasetService.check_dataset_permission(dataset, current_user) + except services.errors.account.NoPermissionError as e: + raise Forbidden(str(e)) + + parser = reqparse.RequestParser() + parser.add_argument("query", type=str, location="json") + parser.add_argument("external_retrieval_model", type=dict, required=False, location="json") + args = parser.parse_args() + + HitTestingService.hit_testing_args_check(args) + + try: + response = HitTestingService.external_retrieve( + dataset=dataset, + query=args["query"], + account=current_user, + external_retrieval_model=args["external_retrieval_model"], + ) + + return response + except Exception as e: + raise InternalServerError(str(e)) + + +class BedrockRetrievalApi(Resource): + # this api is only for internal testing + def post(self): + parser = reqparse.RequestParser() + parser.add_argument("retrieval_setting", nullable=False, required=True, type=dict, location="json") + parser.add_argument( + "query", + nullable=False, + required=True, + type=str, + ) + parser.add_argument("knowledge_id", nullable=False, required=True, type=str) + args = parser.parse_args() + + # Call the knowledge retrieval service + result = ExternalDatasetTestService.knowledge_retrieval( + args["retrieval_setting"], args["query"], args["knowledge_id"] + ) + return result, 200 + + +api.add_resource(ExternalKnowledgeHitTestingApi, "/datasets//external-hit-testing") +api.add_resource(ExternalDatasetCreateApi, "/datasets/external") +api.add_resource(ExternalApiTemplateListApi, "/datasets/external-knowledge-api") +api.add_resource(ExternalApiTemplateApi, "/datasets/external-knowledge-api/") +api.add_resource(ExternalApiUseCheckApi, "/datasets/external-knowledge-api//use-check") +# this api is only for internal test +api.add_resource(BedrockRetrievalApi, "/test/retrieval") diff --git a/api/controllers/console/datasets/hit_testing.py b/api/controllers/console/datasets/hit_testing.py index 0b4a7be986..6e6d8c0bd7 100644 --- a/api/controllers/console/datasets/hit_testing.py +++ b/api/controllers/console/datasets/hit_testing.py @@ -47,6 +47,7 @@ class HitTestingApi(Resource): parser = reqparse.RequestParser() parser.add_argument("query", type=str, location="json") parser.add_argument("retrieval_model", type=dict, required=False, location="json") + parser.add_argument("external_retrieval_model", type=dict, required=False, location="json") args = parser.parse_args() HitTestingService.hit_testing_args_check(args) @@ -57,6 +58,7 @@ class HitTestingApi(Resource): query=args["query"], account=current_user, retrieval_model=args["retrieval_model"], + external_retrieval_model=args["external_retrieval_model"], limit=10, ) diff --git a/api/controllers/console/datasets/website.py b/api/controllers/console/datasets/website.py index cb54f1aacb..e80ce17c68 100644 --- a/api/controllers/console/datasets/website.py +++ b/api/controllers/console/datasets/website.py @@ -14,7 +14,9 @@ class WebsiteCrawlApi(Resource): @account_initialization_required def post(self): parser = reqparse.RequestParser() - parser.add_argument("provider", type=str, choices=["firecrawl"], required=True, nullable=True, location="json") + parser.add_argument( + "provider", type=str, choices=["firecrawl", "jinareader"], required=True, nullable=True, location="json" + ) parser.add_argument("url", type=str, required=True, nullable=True, location="json") parser.add_argument("options", type=dict, required=True, nullable=True, location="json") args = parser.parse_args() @@ -33,7 +35,7 @@ class WebsiteCrawlStatusApi(Resource): @account_initialization_required def get(self, job_id: str): parser = reqparse.RequestParser() - parser.add_argument("provider", type=str, choices=["firecrawl"], required=True, location="args") + parser.add_argument("provider", type=str, choices=["firecrawl", "jinareader"], required=True, location="args") args = parser.parse_args() # get crawl status try: diff --git a/api/controllers/console/setup.py b/api/controllers/console/setup.py index 46b4ef5d87..15a4af118b 100644 --- a/api/controllers/console/setup.py +++ b/api/controllers/console/setup.py @@ -4,7 +4,7 @@ from flask import request from flask_restful import Resource, reqparse from configs import dify_config -from libs.helper import StrLen, email, get_remote_ip +from libs.helper import StrLen, email, extract_remote_ip from libs.password import valid_password from models.model import DifySetup from services.account_service import RegisterService, TenantService @@ -46,7 +46,7 @@ class SetupApi(Resource): # setup RegisterService.setup( - email=args["email"], name=args["name"], password=args["password"], ip_address=get_remote_ip(request) + email=args["email"], name=args["name"], password=args["password"], ip_address=extract_remote_ip(request) ) return {"result": "success"}, 201 diff --git a/api/controllers/console/version.py b/api/controllers/console/version.py index 76adbfe6a9..deda1a0d02 100644 --- a/api/controllers/console/version.py +++ b/api/controllers/console/version.py @@ -38,11 +38,52 @@ class VersionApi(Resource): return result content = json.loads(response.content) - result["version"] = content["version"] - result["release_date"] = content["releaseDate"] - result["release_notes"] = content["releaseNotes"] - result["can_auto_update"] = content["canAutoUpdate"] + if _has_new_version(latest_version=content["version"], current_version=f"{args.get('current_version')}"): + result["version"] = content["version"] + result["release_date"] = content["releaseDate"] + result["release_notes"] = content["releaseNotes"] + result["can_auto_update"] = content["canAutoUpdate"] return result +def _has_new_version(*, latest_version: str, current_version: str) -> bool: + def parse_version(version: str) -> tuple: + # Split version into parts and pre-release suffix if any + parts = version.split("-") + version_parts = parts[0].split(".") + pre_release = parts[1] if len(parts) > 1 else None + + # Validate version format + if len(version_parts) != 3: + raise ValueError(f"Invalid version format: {version}") + + try: + # Convert version parts to integers + major, minor, patch = map(int, version_parts) + return (major, minor, patch, pre_release) + except ValueError: + raise ValueError(f"Invalid version format: {version}") + + latest = parse_version(latest_version) + current = parse_version(current_version) + + # Compare major, minor, and patch versions + for latest_part, current_part in zip(latest[:3], current[:3]): + if latest_part > current_part: + return True + elif latest_part < current_part: + return False + + # If versions are equal, check pre-release suffixes + if latest[3] is None and current[3] is not None: + return True + elif latest[3] is not None and current[3] is None: + return False + elif latest[3] is not None and current[3] is not None: + # Simple string comparison for pre-release versions + return latest[3] > current[3] + + return False + + api.add_resource(VersionApi, "/version") diff --git a/api/controllers/console/workspace/model_providers.py b/api/controllers/console/workspace/model_providers.py index fe0bcf7338..9e8a53bbfb 100644 --- a/api/controllers/console/workspace/model_providers.py +++ b/api/controllers/console/workspace/model_providers.py @@ -126,13 +126,12 @@ class ModelProviderIconApi(Resource): Get model provider icon """ - @setup_required - @login_required - @account_initialization_required def get(self, provider: str, icon_type: str, lang: str): model_provider_service = ModelProviderService() icon, mimetype = model_provider_service.get_model_provider_icon( - provider=provider, icon_type=icon_type, lang=lang + provider=provider, + icon_type=icon_type, + lang=lang, ) return send_file(io.BytesIO(icon), mimetype=mimetype) diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py index dc88f6b812..3138a260b3 100644 --- a/api/controllers/console/workspace/models.py +++ b/api/controllers/console/workspace/models.py @@ -72,8 +72,9 @@ class DefaultModelApi(Resource): provider=model_setting["provider"], model=model_setting["model"], ) - except Exception: - logging.warning(f"{model_setting['model_type']} save error") + except Exception as ex: + logging.exception(f"{model_setting['model_type']} save error: {ex}") + raise ex return {"result": "success"} diff --git a/api/controllers/files/error.py b/api/controllers/files/error.py new file mode 100644 index 0000000000..a7ce4cd6f7 --- /dev/null +++ b/api/controllers/files/error.py @@ -0,0 +1,7 @@ +from libs.exception import BaseHTTPException + + +class UnsupportedFileTypeError(BaseHTTPException): + error_code = "unsupported_file_type" + description = "File type not allowed." + code = 415 diff --git a/api/controllers/files/image_preview.py b/api/controllers/files/image_preview.py index 2432285d93..a56c1c332d 100644 --- a/api/controllers/files/image_preview.py +++ b/api/controllers/files/image_preview.py @@ -4,7 +4,7 @@ from werkzeug.exceptions import NotFound import services from controllers.files import api -from libs.exception import BaseHTTPException +from controllers.files.error import UnsupportedFileTypeError from services.account_service import TenantService from services.file_service import FileService @@ -50,9 +50,3 @@ class WorkspaceWebappLogoApi(Resource): api.add_resource(ImagePreviewApi, "/files//image-preview") api.add_resource(WorkspaceWebappLogoApi, "/files/workspaces//webapp-logo") - - -class UnsupportedFileTypeError(BaseHTTPException): - error_code = "unsupported_file_type" - description = "File type not allowed." - code = 415 diff --git a/api/controllers/files/tool_files.py b/api/controllers/files/tool_files.py index 38ac0815da..406cd42214 100644 --- a/api/controllers/files/tool_files.py +++ b/api/controllers/files/tool_files.py @@ -3,8 +3,8 @@ from flask_restful import Resource, reqparse from werkzeug.exceptions import Forbidden, NotFound from controllers.files import api +from controllers.files.error import UnsupportedFileTypeError from core.tools.tool_file_manager import ToolFileManager -from libs.exception import BaseHTTPException class ToolFilePreviewApi(Resource): @@ -43,9 +43,3 @@ class ToolFilePreviewApi(Resource): api.add_resource(ToolFilePreviewApi, "/files/tools/.") - - -class UnsupportedFileTypeError(BaseHTTPException): - error_code = "unsupported_file_type" - description = "File type not allowed." - code = 415 diff --git a/api/controllers/service_api/app/completion.py b/api/controllers/service_api/app/completion.py index 8d8e356c4c..5c3601cf23 100644 --- a/api/controllers/service_api/app/completion.py +++ b/api/controllers/service_api/app/completion.py @@ -4,6 +4,7 @@ from flask_restful import Resource, reqparse from werkzeug.exceptions import InternalServerError, NotFound import services +from constants import UUID_NIL from controllers.service_api import api from controllers.service_api.app.error import ( AppUnavailableError, @@ -107,6 +108,7 @@ class ChatApi(Resource): parser.add_argument("conversation_id", type=uuid_value, location="json") parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json") parser.add_argument("auto_generate_name", type=bool, required=False, default=True, location="json") + parser.add_argument("parent_message_id", type=uuid_value, required=False, default=UUID_NIL, location="json") args = parser.parse_args() diff --git a/api/controllers/service_api/dataset/dataset.py b/api/controllers/service_api/dataset/dataset.py index c2c0672a03..f076cff6c8 100644 --- a/api/controllers/service_api/dataset/dataset.py +++ b/api/controllers/service_api/dataset/dataset.py @@ -28,11 +28,11 @@ class DatasetListApi(DatasetApiResource): page = request.args.get("page", default=1, type=int) limit = request.args.get("limit", default=20, type=int) - provider = request.args.get("provider", default="vendor") + # provider = request.args.get("provider", default="vendor") search = request.args.get("keyword", default=None, type=str) tag_ids = request.args.getlist("tag_ids") - datasets, total = DatasetService.get_datasets(page, limit, provider, tenant_id, current_user, search, tag_ids) + datasets, total = DatasetService.get_datasets(page, limit, tenant_id, current_user, search, tag_ids) # check embedding setting provider_manager = ProviderManager() configurations = provider_manager.get_configurations(tenant_id=current_user.current_tenant_id) @@ -82,6 +82,26 @@ class DatasetListApi(DatasetApiResource): required=False, nullable=False, ) + parser.add_argument( + "external_knowledge_api_id", + type=str, + nullable=True, + required=False, + default="_validate_name", + ) + parser.add_argument( + "provider", + type=str, + nullable=True, + required=False, + default="vendor", + ) + parser.add_argument( + "external_knowledge_id", + type=str, + nullable=True, + required=False, + ) args = parser.parse_args() try: @@ -91,6 +111,9 @@ class DatasetListApi(DatasetApiResource): indexing_technique=args["indexing_technique"], account=current_user, permission=args["permission"], + provider=args["provider"], + external_knowledge_api_id=args["external_knowledge_api_id"], + external_knowledge_id=args["external_knowledge_id"], ) except services.errors.dataset.DatasetNameDuplicateError: raise DatasetNameDuplicateError() diff --git a/api/core/agent/cot_agent_runner.py b/api/core/agent/cot_agent_runner.py index ebe04bf260..d98ba5a3fa 100644 --- a/api/core/agent/cot_agent_runner.py +++ b/api/core/agent/cot_agent_runner.py @@ -369,7 +369,7 @@ class CotAgentRunner(BaseAgentRunner, ABC): return message def _organize_historic_prompt_messages( - self, current_session_messages: list[PromptMessage] = None + self, current_session_messages: Optional[list[PromptMessage]] = None ) -> list[PromptMessage]: """ organize historic prompt messages diff --git a/api/core/agent/cot_chat_agent_runner.py b/api/core/agent/cot_chat_agent_runner.py index bdec6b7ed1..5e16373fff 100644 --- a/api/core/agent/cot_chat_agent_runner.py +++ b/api/core/agent/cot_chat_agent_runner.py @@ -27,7 +27,7 @@ class CotChatAgentRunner(CotAgentRunner): return SystemPromptMessage(content=system_prompt) - def _organize_user_query(self, query, prompt_messages: list[PromptMessage] = None) -> list[PromptMessage]: + def _organize_user_query(self, query, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: """ Organize user query """ diff --git a/api/core/agent/cot_completion_agent_runner.py b/api/core/agent/cot_completion_agent_runner.py index 9dab956f9a..0563090537 100644 --- a/api/core/agent/cot_completion_agent_runner.py +++ b/api/core/agent/cot_completion_agent_runner.py @@ -1,4 +1,5 @@ import json +from typing import Optional from core.agent.cot_agent_runner import CotAgentRunner from core.model_runtime.entities.message_entities import AssistantPromptMessage, PromptMessage, UserPromptMessage @@ -21,7 +22,7 @@ class CotCompletionAgentRunner(CotAgentRunner): return system_prompt - def _organize_historic_prompt(self, current_session_messages: list[PromptMessage] = None) -> str: + def _organize_historic_prompt(self, current_session_messages: Optional[list[PromptMessage]] = None) -> str: """ Organize historic prompt """ diff --git a/api/core/agent/fc_agent_runner.py b/api/core/agent/fc_agent_runner.py index 13164e0bfc..7b22025582 100644 --- a/api/core/agent/fc_agent_runner.py +++ b/api/core/agent/fc_agent_runner.py @@ -2,7 +2,7 @@ import json import logging from collections.abc import Generator from copy import deepcopy -from typing import Any, Union +from typing import Any, Optional, Union from core.agent.base_agent_runner import BaseAgentRunner from core.app.apps.base_app_queue_manager import PublishFrom @@ -370,7 +370,7 @@ class FunctionCallAgentRunner(BaseAgentRunner): return tool_calls def _init_system_message( - self, prompt_template: str, prompt_messages: list[PromptMessage] = None + self, prompt_template: str, prompt_messages: Optional[list[PromptMessage]] = None ) -> list[PromptMessage]: """ Initialize system message @@ -385,7 +385,7 @@ class FunctionCallAgentRunner(BaseAgentRunner): return prompt_messages - def _organize_user_query(self, query, prompt_messages: list[PromptMessage] = None) -> list[PromptMessage]: + def _organize_user_query(self, query, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: """ Organize user query """ diff --git a/api/core/agent/output_parser/cot_output_parser.py b/api/core/agent/output_parser/cot_output_parser.py index d04e38777a..99876b2f5e 100644 --- a/api/core/agent/output_parser/cot_output_parser.py +++ b/api/core/agent/output_parser/cot_output_parser.py @@ -14,7 +14,7 @@ class CotAgentOutputParser: ) -> Generator[Union[str, AgentScratchpadUnit.Action], None, None]: def parse_action(json_str): try: - action = json.loads(json_str) + action = json.loads(json_str, strict=False) action_name = None action_input = None diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index 445ef6d0ab..0d183596f3 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -113,6 +113,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): # always enable retriever resource in debugger mode app_config.additional_features.show_retrieve_source = True + workflow_run_id = str(uuid.uuid4()) # init application generate entity application_generate_entity = AdvancedChatAppGenerateEntity( task_id=str(uuid.uuid4()), @@ -127,6 +128,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): invoke_from=invoke_from, extras=extras, trace_manager=trace_manager, + workflow_run_id=workflow_run_id, ) contexts.tenant_id.set(application_generate_entity.app_config.tenant_id) diff --git a/api/core/app/apps/advanced_chat/app_runner.py b/api/core/app/apps/advanced_chat/app_runner.py index 1bca1e1b71..1dcd051d15 100644 --- a/api/core/app/apps/advanced_chat/app_runner.py +++ b/api/core/app/apps/advanced_chat/app_runner.py @@ -149,6 +149,9 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): SystemVariableKey.CONVERSATION_ID: self.conversation.id, SystemVariableKey.USER_ID: user_id, SystemVariableKey.DIALOGUE_COUNT: conversation_dialogue_count, + SystemVariableKey.APP_ID: app_config.app_id, + SystemVariableKey.WORKFLOW_ID: app_config.workflow_id, + SystemVariableKey.WORKFLOW_RUN_ID: self.application_generate_entity.workflow_run_id, } # init variable pool diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index 94206a1b1c..fd63c7787f 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -45,6 +45,7 @@ from core.app.entities.task_entities import ( from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline from core.app.task_pipeline.message_cycle_manage import MessageCycleManage from core.app.task_pipeline.workflow_cycle_manage import WorkflowCycleManage +from core.model_runtime.entities.llm_entities import LLMUsage from core.model_runtime.utils.encoders import jsonable_encoder from core.ops.ops_trace_manager import TraceQueueManager from core.workflow.enums import SystemVariableKey @@ -55,6 +56,7 @@ from models.account import Account from models.model import Conversation, EndUser, Message from models.workflow import ( Workflow, + WorkflowNodeExecution, WorkflowRunStatus, ) @@ -71,6 +73,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc _workflow: Workflow _user: Union[Account, EndUser] _workflow_system_variables: dict[SystemVariableKey, Any] + _wip_workflow_node_executions: dict[str, WorkflowNodeExecution] def __init__( self, @@ -107,9 +110,14 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc SystemVariableKey.FILES: application_generate_entity.files, SystemVariableKey.CONVERSATION_ID: conversation.id, SystemVariableKey.USER_ID: user_id, + SystemVariableKey.DIALOGUE_COUNT: conversation.dialogue_count, + SystemVariableKey.APP_ID: application_generate_entity.app_config.app_id, + SystemVariableKey.WORKFLOW_ID: workflow.id, + SystemVariableKey.WORKFLOW_RUN_ID: application_generate_entity.workflow_run_id, } self._task_state = WorkflowTaskState() + self._wip_workflow_node_executions = {} self._conversation_name_generate_thread = None @@ -231,7 +239,8 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc except Exception as e: logger.error(e) break - yield MessageAudioEndStreamResponse(audio="", task_id=task_id) + if tts_publisher: + yield MessageAudioEndStreamResponse(audio="", task_id=task_id) def _process_stream_response( self, @@ -504,6 +513,10 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc self._message.total_price = usage.total_price self._message.currency = usage.currency + self._task_state.metadata["usage"] = jsonable_encoder(usage) + else: + self._task_state.metadata["usage"] = jsonable_encoder(LLMUsage.empty_usage()) + db.session.commit() message_was_created.send( diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index 57a77591a0..bd0ab53278 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -99,6 +99,7 @@ class WorkflowAppGenerator(BaseAppGenerator): user_id = user.id if isinstance(user, Account) else user.session_id trace_manager = TraceQueueManager(app_model.id, user_id) + workflow_run_id = str(uuid.uuid4()) # init application generate entity application_generate_entity = WorkflowAppGenerateEntity( task_id=str(uuid.uuid4()), @@ -110,6 +111,7 @@ class WorkflowAppGenerator(BaseAppGenerator): invoke_from=invoke_from, call_depth=call_depth, trace_manager=trace_manager, + workflow_run_id=workflow_run_id, ) contexts.tenant_id.set(application_generate_entity.app_config.tenant_id) diff --git a/api/core/app/apps/workflow/app_runner.py b/api/core/app/apps/workflow/app_runner.py index 22ec228fa7..378a4bb8bc 100644 --- a/api/core/app/apps/workflow/app_runner.py +++ b/api/core/app/apps/workflow/app_runner.py @@ -90,6 +90,9 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): system_inputs = { SystemVariableKey.FILES: files, SystemVariableKey.USER_ID: user_id, + SystemVariableKey.APP_ID: app_config.app_id, + SystemVariableKey.WORKFLOW_ID: app_config.workflow_id, + SystemVariableKey.WORKFLOW_RUN_ID: self.application_generate_entity.workflow_run_id, } variable_pool = VariablePool( diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index 93edf8e0e8..7c53556e43 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -52,6 +52,7 @@ from models.workflow import ( Workflow, WorkflowAppLog, WorkflowAppLogCreatedFrom, + WorkflowNodeExecution, WorkflowRun, WorkflowRunStatus, ) @@ -69,6 +70,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa _task_state: WorkflowTaskState _application_generate_entity: WorkflowAppGenerateEntity _workflow_system_variables: dict[SystemVariableKey, Any] + _wip_workflow_node_executions: dict[str, WorkflowNodeExecution] def __init__( self, @@ -97,9 +99,13 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa self._workflow_system_variables = { SystemVariableKey.FILES: application_generate_entity.files, SystemVariableKey.USER_ID: user_id, + SystemVariableKey.APP_ID: application_generate_entity.app_config.app_id, + SystemVariableKey.WORKFLOW_ID: workflow.id, + SystemVariableKey.WORKFLOW_RUN_ID: application_generate_entity.workflow_run_id, } self._task_state = WorkflowTaskState() + self._wip_workflow_node_executions = {} def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]: """ @@ -212,7 +218,8 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa except Exception as e: logger.error(e) break - yield MessageAudioEndStreamResponse(audio="", task_id=task_id) + if tts_publisher: + yield MessageAudioEndStreamResponse(audio="", task_id=task_id) def _process_stream_response( self, diff --git a/api/core/app/entities/app_invoke_entities.py b/api/core/app/entities/app_invoke_entities.py index 87ca51ef1b..e757c8db75 100644 --- a/api/core/app/entities/app_invoke_entities.py +++ b/api/core/app/entities/app_invoke_entities.py @@ -152,6 +152,7 @@ class AdvancedChatAppGenerateEntity(AppGenerateEntity): conversation_id: Optional[str] = None parent_message_id: Optional[str] = None + workflow_run_id: Optional[str] = None query: str class SingleIterationRunEntity(BaseModel): @@ -172,6 +173,7 @@ class WorkflowAppGenerateEntity(AppGenerateEntity): # app config app_config: WorkflowUIBasedAppConfig + workflow_run_id: Optional[str] = None class SingleIterationRunEntity(BaseModel): """ diff --git a/api/core/app/segments/exc.py b/api/core/app/segments/exc.py index d15d6d500f..5cf67c3bac 100644 --- a/api/core/app/segments/exc.py +++ b/api/core/app/segments/exc.py @@ -1,2 +1,2 @@ -class VariableError(Exception): +class VariableError(ValueError): pass diff --git a/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py b/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py index 8f834b6458..917649f34e 100644 --- a/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py +++ b/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py @@ -248,7 +248,8 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline, MessageCycleMan else: start_listener_time = time.time() yield MessageAudioStreamResponse(audio=audio.audio, task_id=task_id) - yield MessageAudioEndStreamResponse(audio="", task_id=task_id) + if publisher: + yield MessageAudioEndStreamResponse(audio="", task_id=task_id) def _process_stream_response( self, publisher: AppGeneratorTTSPublisher, trace_manager: Optional[TraceQueueManager] = None diff --git a/api/core/app/task_pipeline/message_cycle_manage.py b/api/core/app/task_pipeline/message_cycle_manage.py index 5872e00740..236eebf0b8 100644 --- a/api/core/app/task_pipeline/message_cycle_manage.py +++ b/api/core/app/task_pipeline/message_cycle_manage.py @@ -1,8 +1,10 @@ +import logging from threading import Thread from typing import Optional, Union from flask import Flask, current_app +from configs import dify_config from core.app.entities.app_invoke_entities import ( AdvancedChatAppGenerateEntity, AgentChatAppGenerateEntity, @@ -82,7 +84,9 @@ class MessageCycleManage: try: name = LLMGenerator.generate_conversation_name(app_model.tenant_id, query) conversation.name = name - except: + except Exception as e: + if dify_config.DEBUG: + logging.exception(f"generate conversation name failed: {e}") pass db.session.merge(conversation) diff --git a/api/core/app/task_pipeline/workflow_cycle_manage.py b/api/core/app/task_pipeline/workflow_cycle_manage.py index f10189798f..b8f5ac2603 100644 --- a/api/core/app/task_pipeline/workflow_cycle_manage.py +++ b/api/core/app/task_pipeline/workflow_cycle_manage.py @@ -57,6 +57,7 @@ class WorkflowCycleManage: _user: Union[Account, EndUser] _task_state: WorkflowTaskState _workflow_system_variables: dict[SystemVariableKey, Any] + _wip_workflow_node_executions: dict[str, WorkflowNodeExecution] def _handle_workflow_run_start(self) -> WorkflowRun: max_sequence = ( @@ -85,6 +86,9 @@ class WorkflowCycleManage: # init workflow run workflow_run = WorkflowRun() + workflow_run_id = self._workflow_system_variables[SystemVariableKey.WORKFLOW_RUN_ID] + if workflow_run_id: + workflow_run.id = workflow_run_id workflow_run.tenant_id = self._workflow.tenant_id workflow_run.app_id = self._workflow.app_id workflow_run.sequence_number = new_sequence_number @@ -248,6 +252,8 @@ class WorkflowCycleManage: db.session.refresh(workflow_node_execution) db.session.close() + self._wip_workflow_node_executions[workflow_node_execution.node_execution_id] = workflow_node_execution + return workflow_node_execution def _handle_workflow_node_execution_success(self, event: QueueNodeSucceededEvent) -> WorkflowNodeExecution: @@ -260,20 +266,36 @@ class WorkflowCycleManage: inputs = WorkflowEntry.handle_special_values(event.inputs) outputs = WorkflowEntry.handle_special_values(event.outputs) + execution_metadata = ( + json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None + ) + finished_at = datetime.now(timezone.utc).replace(tzinfo=None) + elapsed_time = (finished_at - event.start_at).total_seconds() + + db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update( + { + WorkflowNodeExecution.status: WorkflowNodeExecutionStatus.SUCCEEDED.value, + WorkflowNodeExecution.inputs: json.dumps(inputs) if inputs else None, + WorkflowNodeExecution.process_data: json.dumps(event.process_data) if event.process_data else None, + WorkflowNodeExecution.outputs: json.dumps(outputs) if outputs else None, + WorkflowNodeExecution.execution_metadata: execution_metadata, + WorkflowNodeExecution.finished_at: finished_at, + WorkflowNodeExecution.elapsed_time: elapsed_time, + } + ) + + db.session.commit() + db.session.close() workflow_node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED.value workflow_node_execution.inputs = json.dumps(inputs) if inputs else None workflow_node_execution.process_data = json.dumps(event.process_data) if event.process_data else None workflow_node_execution.outputs = json.dumps(outputs) if outputs else None - workflow_node_execution.execution_metadata = ( - json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None - ) - workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None) - workflow_node_execution.elapsed_time = (workflow_node_execution.finished_at - event.start_at).total_seconds() + workflow_node_execution.execution_metadata = execution_metadata + workflow_node_execution.finished_at = finished_at + workflow_node_execution.elapsed_time = elapsed_time - db.session.commit() - db.session.refresh(workflow_node_execution) - db.session.close() + self._wip_workflow_node_executions.pop(workflow_node_execution.node_execution_id) return workflow_node_execution @@ -287,18 +309,33 @@ class WorkflowCycleManage: inputs = WorkflowEntry.handle_special_values(event.inputs) outputs = WorkflowEntry.handle_special_values(event.outputs) + finished_at = datetime.now(timezone.utc).replace(tzinfo=None) + elapsed_time = (finished_at - event.start_at).total_seconds() + + db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update( + { + WorkflowNodeExecution.status: WorkflowNodeExecutionStatus.FAILED.value, + WorkflowNodeExecution.error: event.error, + WorkflowNodeExecution.inputs: json.dumps(inputs) if inputs else None, + WorkflowNodeExecution.process_data: json.dumps(event.process_data) if event.process_data else None, + WorkflowNodeExecution.outputs: json.dumps(outputs) if outputs else None, + WorkflowNodeExecution.finished_at: finished_at, + WorkflowNodeExecution.elapsed_time: elapsed_time, + } + ) + + db.session.commit() + db.session.close() workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value workflow_node_execution.error = event.error - workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None) workflow_node_execution.inputs = json.dumps(inputs) if inputs else None workflow_node_execution.process_data = json.dumps(event.process_data) if event.process_data else None workflow_node_execution.outputs = json.dumps(outputs) if outputs else None - workflow_node_execution.elapsed_time = (workflow_node_execution.finished_at - event.start_at).total_seconds() + workflow_node_execution.finished_at = finished_at + workflow_node_execution.elapsed_time = elapsed_time - db.session.commit() - db.session.refresh(workflow_node_execution) - db.session.close() + self._wip_workflow_node_executions.pop(workflow_node_execution.node_execution_id) return workflow_node_execution @@ -675,17 +712,7 @@ class WorkflowCycleManage: :param node_execution_id: workflow node execution id :return: """ - workflow_node_execution = ( - db.session.query(WorkflowNodeExecution) - .filter( - WorkflowNodeExecution.tenant_id == self._application_generate_entity.app_config.tenant_id, - WorkflowNodeExecution.app_id == self._application_generate_entity.app_config.app_id, - WorkflowNodeExecution.workflow_id == self._workflow.id, - WorkflowNodeExecution.triggered_from == WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value, - WorkflowNodeExecution.node_execution_id == node_execution_id, - ) - .first() - ) + workflow_node_execution = self._wip_workflow_node_executions.get(node_execution_id) if not workflow_node_execution: raise Exception(f"Workflow node execution not found: {node_execution_id}") diff --git a/api/core/callback_handler/agent_tool_callback_handler.py b/api/core/callback_handler/agent_tool_callback_handler.py index 99e992fd89..d826edf6a0 100644 --- a/api/core/callback_handler/agent_tool_callback_handler.py +++ b/api/core/callback_handler/agent_tool_callback_handler.py @@ -1,9 +1,9 @@ -import os from collections.abc import Mapping, Sequence from typing import Any, Optional, TextIO, Union from pydantic import BaseModel +from configs import dify_config from core.ops.entities.trace_entity import TraceTaskName from core.ops.ops_trace_manager import TraceQueueManager, TraceTask from core.tools.entities.tool_entities import ToolInvokeMessage @@ -50,7 +50,8 @@ class DifyAgentCallbackHandler(BaseModel): tool_inputs: Mapping[str, Any], ) -> None: """Do nothing.""" - print_text("\n[on_tool_start] ToolCall:" + tool_name + "\n" + str(tool_inputs) + "\n", color=self.color) + if dify_config.DEBUG: + print_text("\n[on_tool_start] ToolCall:" + tool_name + "\n" + str(tool_inputs) + "\n", color=self.color) def on_tool_end( self, @@ -62,11 +63,12 @@ class DifyAgentCallbackHandler(BaseModel): trace_manager: Optional[TraceQueueManager] = None, ) -> None: """If not the final action, print out observation.""" - print_text("\n[on_tool_end]\n", color=self.color) - print_text("Tool: " + tool_name + "\n", color=self.color) - print_text("Inputs: " + str(tool_inputs) + "\n", color=self.color) - print_text("Outputs: " + str(tool_outputs)[:1000] + "\n", color=self.color) - print_text("\n") + if dify_config.DEBUG: + print_text("\n[on_tool_end]\n", color=self.color) + print_text("Tool: " + tool_name + "\n", color=self.color) + print_text("Inputs: " + str(tool_inputs) + "\n", color=self.color) + print_text("Outputs: " + str(tool_outputs)[:1000] + "\n", color=self.color) + print_text("\n") if trace_manager: trace_manager.add_trace_task( @@ -82,30 +84,33 @@ class DifyAgentCallbackHandler(BaseModel): def on_tool_error(self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any) -> None: """Do nothing.""" - print_text("\n[on_tool_error] Error: " + str(error) + "\n", color="red") + if dify_config.DEBUG: + print_text("\n[on_tool_error] Error: " + str(error) + "\n", color="red") def on_agent_start(self, thought: str) -> None: """Run on agent start.""" - if thought: - print_text( - "\n[on_agent_start] \nCurrent Loop: " + str(self.current_loop) + "\nThought: " + thought + "\n", - color=self.color, - ) - else: - print_text("\n[on_agent_start] \nCurrent Loop: " + str(self.current_loop) + "\n", color=self.color) + if dify_config.DEBUG: + if thought: + print_text( + "\n[on_agent_start] \nCurrent Loop: " + str(self.current_loop) + "\nThought: " + thought + "\n", + color=self.color, + ) + else: + print_text("\n[on_agent_start] \nCurrent Loop: " + str(self.current_loop) + "\n", color=self.color) def on_agent_finish(self, color: Optional[str] = None, **kwargs: Any) -> None: """Run on agent end.""" - print_text("\n[on_agent_finish]\n Loop: " + str(self.current_loop) + "\n", color=self.color) + if dify_config.DEBUG: + print_text("\n[on_agent_finish]\n Loop: " + str(self.current_loop) + "\n", color=self.color) self.current_loop += 1 @property def ignore_agent(self) -> bool: """Whether to ignore agent callbacks.""" - return not os.environ.get("DEBUG") or os.environ.get("DEBUG").lower() != "true" + return not dify_config.DEBUG @property def ignore_chat_model(self) -> bool: """Whether to ignore chat model callbacks.""" - return not os.environ.get("DEBUG") or os.environ.get("DEBUG").lower() != "true" + return not dify_config.DEBUG diff --git a/api/core/callback_handler/index_tool_callback_handler.py b/api/core/callback_handler/index_tool_callback_handler.py index 7cf472d984..1481578630 100644 --- a/api/core/callback_handler/index_tool_callback_handler.py +++ b/api/core/callback_handler/index_tool_callback_handler.py @@ -44,7 +44,6 @@ class DatasetIndexToolCallbackHandler: DocumentSegment.index_node_id == document.metadata["doc_id"] ) - # if 'dataset_id' in document.metadata: if "dataset_id" in document.metadata: query = query.filter(DocumentSegment.dataset_id == document.metadata["dataset_id"]) @@ -59,7 +58,7 @@ class DatasetIndexToolCallbackHandler: for item in resource: dataset_retriever_resource = DatasetRetrieverResource( message_id=self._message_id, - position=item.get("position"), + position=item.get("position") or 0, dataset_id=item.get("dataset_id"), dataset_name=item.get("dataset_name"), document_id=item.get("document_id"), diff --git a/api/core/embedding/cached_embedding.py b/api/core/embedding/cached_embedding.py index 75219051cd..31d2171e72 100644 --- a/api/core/embedding/cached_embedding.py +++ b/api/core/embedding/cached_embedding.py @@ -5,6 +5,7 @@ from typing import Optional, cast import numpy as np from sqlalchemy.exc import IntegrityError +from configs import dify_config from core.embedding.embedding_constant import EmbeddingInputType from core.model_manager import ModelInstance from core.model_runtime.entities.model_entities import ModelPropertyKey @@ -110,6 +111,8 @@ class CacheEmbedding(Embeddings): embedding_results = embedding_result.embeddings[0] embedding_results = (embedding_results / np.linalg.norm(embedding_results)).tolist() except Exception as ex: + if dify_config.DEBUG: + logging.exception(f"Failed to embed query text: {ex}") raise ex try: @@ -122,6 +125,8 @@ class CacheEmbedding(Embeddings): encoded_str = encoded_vector.decode("utf-8") redis_client.setex(embedding_cache_key, 600, encoded_str) except Exception as ex: - logging.exception("Failed to add embedding to redis %s", ex) + if dify_config.DEBUG: + logging.exception("Failed to add embedding to redis %s", ex) + raise ex return embedding_results diff --git a/api/core/entities/provider_configuration.py b/api/core/entities/provider_configuration.py index 4797b69b85..807f09598c 100644 --- a/api/core/entities/provider_configuration.py +++ b/api/core/entities/provider_configuration.py @@ -119,7 +119,7 @@ class ProviderConfiguration(BaseModel): credentials = model_configuration.credentials break - if self.custom_configuration.provider: + if not credentials and self.custom_configuration.provider: credentials = self.custom_configuration.provider.credentials return credentials diff --git a/api/core/file/message_file_parser.py b/api/core/file/message_file_parser.py index 83059b216e..641686bd7c 100644 --- a/api/core/file/message_file_parser.py +++ b/api/core/file/message_file_parser.py @@ -198,16 +198,34 @@ class MessageFileParser: if "amazonaws.com" not in parsed_url.netloc: return False query_params = parse_qs(parsed_url.query) - required_params = ["Signature", "Expires"] - for param in required_params: - if param not in query_params: + + def check_presign_v2(query_params): + required_params = ["Signature", "Expires"] + for param in required_params: + if param not in query_params: + return False + if not query_params["Expires"][0].isdigit(): return False - if not query_params["Expires"][0].isdigit(): - return False - signature = query_params["Signature"][0] - if not re.match(r"^[A-Za-z0-9+/]+={0,2}$", signature): - return False - return True + signature = query_params["Signature"][0] + if not re.match(r"^[A-Za-z0-9+/]+={0,2}$", signature): + return False + + return True + + def check_presign_v4(query_params): + required_params = ["X-Amz-Signature", "X-Amz-Expires"] + for param in required_params: + if param not in query_params: + return False + if not query_params["X-Amz-Expires"][0].isdigit(): + return False + signature = query_params["X-Amz-Signature"][0] + if not re.match(r"^[A-Za-z0-9+/]+={0,2}$", signature): + return False + + return True + + return check_presign_v4(query_params) or check_presign_v2(query_params) except Exception: return False diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py index af20df41b1..8df26172b7 100644 --- a/api/core/indexing_runner.py +++ b/api/core/indexing_runner.py @@ -211,9 +211,9 @@ class IndexingRunner: tenant_id: str, extract_settings: list[ExtractSetting], tmp_processing_rule: dict, - doc_form: str = None, + doc_form: Optional[str] = None, doc_language: str = "English", - dataset_id: str = None, + dataset_id: Optional[str] = None, indexing_technique: str = "economy", ) -> dict: """ diff --git a/api/core/memory/token_buffer_memory.py b/api/core/memory/token_buffer_memory.py index 60b36c50f0..bc94912c1e 100644 --- a/api/core/memory/token_buffer_memory.py +++ b/api/core/memory/token_buffer_memory.py @@ -58,7 +58,11 @@ class TokenBufferMemory: # instead of all messages from the conversation, we only need to extract messages # that belong to the thread of last message thread_messages = extract_thread_messages(messages) - thread_messages.pop(0) + + # for newly created message, its answer is temporarily empty, we don't need to add it to memory + if thread_messages and not thread_messages[0].answer: + thread_messages.pop(0) + messages = list(reversed(thread_messages)) message_file_parser = MessageFileParser(tenant_id=app_record.tenant_id, app_id=app_record.id) diff --git a/api/core/model_runtime/callbacks/base_callback.py b/api/core/model_runtime/callbacks/base_callback.py index 92da53c9a4..6bd9325785 100644 --- a/api/core/model_runtime/callbacks/base_callback.py +++ b/api/core/model_runtime/callbacks/base_callback.py @@ -1,3 +1,4 @@ +from abc import ABC, abstractmethod from typing import Optional from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk @@ -13,7 +14,7 @@ _TEXT_COLOR_MAPPING = { } -class Callback: +class Callback(ABC): """ Base class for callbacks. Only for LLM. @@ -21,6 +22,7 @@ class Callback: raise_error: bool = False + @abstractmethod def on_before_invoke( self, llm_instance: AIModel, @@ -48,6 +50,7 @@ class Callback: """ raise NotImplementedError() + @abstractmethod def on_new_chunk( self, llm_instance: AIModel, @@ -77,6 +80,7 @@ class Callback: """ raise NotImplementedError() + @abstractmethod def on_after_invoke( self, llm_instance: AIModel, @@ -106,6 +110,7 @@ class Callback: """ raise NotImplementedError() + @abstractmethod def on_invoke_error( self, llm_instance: AIModel, diff --git a/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md b/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md new file mode 100644 index 0000000000..f5b806ade6 --- /dev/null +++ b/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md @@ -0,0 +1,310 @@ +## Custom Integration of Pre-defined Models + +### Introduction + +After completing the vendors integration, the next step is to connect the vendor's models. To illustrate the entire connection process, we will use Xinference as an example to demonstrate a complete vendor integration. + +It is important to note that for custom models, each model connection requires a complete vendor credential. + +Unlike pre-defined models, a custom vendor integration always includes the following two parameters, which do not need to be defined in the vendor YAML file. + +![](images/index/image-3.png) + +As mentioned earlier, vendors do not need to implement validate_provider_credential. The runtime will automatically call the corresponding model layer's validate_credentials to validate the credentials based on the model type and name selected by the user. + +### Writing the Vendor YAML + +First, we need to identify the types of models supported by the vendor we are integrating. + +Currently supported model types are as follows: + +- `llm` Text Generation Models + +- `text_embedding` Text Embedding Models + +- `rerank` Rerank Models + +- `speech2text` Speech-to-Text + +- `tts` Text-to-Speech + +- `moderation` Moderation + +Xinference supports LLM, Text Embedding, and Rerank. So we will start by writing xinference.yaml. + +```yaml +provider: xinference #Define the vendor identifier +label: # Vendor display name, supports both en_US (English) and zh_Hans (Simplified Chinese). If zh_Hans is not set, it will use en_US by default. + en_US: Xorbits Inference +icon_small: # Small icon, refer to other vendors' icons stored in the _assets directory within the vendor implementation directory; follows the same language policy as the label + en_US: icon_s_en.svg +icon_large: # Large icon + en_US: icon_l_en.svg +help: # Help information + title: + en_US: How to deploy Xinference + zh_Hans: 如何部署 Xinference + url: + en_US: https://github.com/xorbitsai/inference +supported_model_types: # Supported model types. Xinference supports LLM, Text Embedding, and Rerank +- llm +- text-embedding +- rerank +configurate_methods: # Since Xinference is a locally deployed vendor with no predefined models, users need to deploy whatever models they need according to Xinference documentation. Thus, it only supports custom models. +- customizable-model +provider_credential_schema: + credential_form_schemas: +``` + + +Then, we need to determine what credentials are required to define a model in Xinference. + +- Since it supports three different types of models, we need to specify the model_type to denote the model type. Here is how we can define it: + +```yaml +provider_credential_schema: + credential_form_schemas: + - variable: model_type + type: select + label: + en_US: Model type + zh_Hans: 模型类型 + required: true + options: + - value: text-generation + label: + en_US: Language Model + zh_Hans: 语言模型 + - value: embeddings + label: + en_US: Text Embedding + - value: reranking + label: + en_US: Rerank +``` + +- Next, each model has its own model_name, so we need to define that here: + +```yaml + - variable: model_name + type: text-input + label: + en_US: Model name + zh_Hans: 模型名称 + required: true + placeholder: + zh_Hans: 填写模型名称 + en_US: Input model name +``` + +- Specify the Xinference local deployment address: + +```yaml + - variable: server_url + label: + zh_Hans: 服务器URL + en_US: Server url + type: text-input + required: true + placeholder: + zh_Hans: 在此输入Xinference的服务器地址,如 https://example.com/xxx + en_US: Enter the url of your Xinference, for example https://example.com/xxx +``` + +- Each model has a unique model_uid, so we also need to define that here: + +```yaml + - variable: model_uid + label: + zh_Hans: 模型UID + en_US: Model uid + type: text-input + required: true + placeholder: + zh_Hans: 在此输入您的Model UID + en_US: Enter the model uid +``` + +Now, we have completed the basic definition of the vendor. + +### Writing the Model Code + +Next, let's take the `llm` type as an example and write `xinference.llm.llm.py`. + +In `llm.py`, create a Xinference LLM class, we name it `XinferenceAILargeLanguageModel` (this can be arbitrary), inheriting from the `__base.large_language_model.LargeLanguageModel` base class, and implement the following methods: + +- LLM Invocation + +Implement the core method for LLM invocation, supporting both stream and synchronous responses. + +```python +def _invoke(self, model: str, credentials: dict, + prompt_messages: list[PromptMessage], model_parameters: dict, + tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, + stream: bool = True, user: Optional[str] = None) \ + -> Union[LLMResult, Generator]: + """ + Invoke large language model + + :param model: model name + :param credentials: model credentials + :param prompt_messages: prompt messages + :param model_parameters: model parameters + :param tools: tools for tool usage + :param stop: stop words + :param stream: is the response a stream + :param user: unique user id + :return: full response or stream response chunk generator result + """ +``` + +When implementing, ensure to use two functions to return data separately for synchronous and stream responses. This is important because Python treats functions containing the `yield` keyword as generator functions, mandating them to return `Generator` types. Here’s an example (note that the example uses simplified parameters; in real implementation, use the parameter list as defined above): + +```python +def _invoke(self, stream: bool, **kwargs) \ + -> Union[LLMResult, Generator]: + if stream: + return self._handle_stream_response(**kwargs) + return self._handle_sync_response(**kwargs) + +def _handle_stream_response(self, **kwargs) -> Generator: + for chunk in response: + yield chunk +def _handle_sync_response(self, **kwargs) -> LLMResult: + return LLMResult(**response) +``` + +- Pre-compute Input Tokens + +If the model does not provide an interface for pre-computing tokens, you can return 0 directly. + +```python +def get_num_tokens(self, model: str, credentials: dict, prompt_messages: list[PromptMessage],tools: Optional[list[PromptMessageTool]] = None) -> int: + """ + Get number of tokens for given prompt messages + + :param model: model name + :param credentials: model credentials + :param prompt_messages: prompt messages + :param tools: tools for tool usage + :return: token count + """ +``` + + +Sometimes, you might not want to return 0 directly. In such cases, you can use `self._get_num_tokens_by_gpt2(text: str)` to get pre-computed tokens. This method is provided by the `AIModel` base class, and it uses GPT2's Tokenizer for calculation. However, it should be noted that this is only a substitute and may not be fully accurate. + +- Model Credentials Validation + +Similar to vendor credentials validation, this method validates individual model credentials. + +```python +def validate_credentials(self, model: str, credentials: dict) -> None: + """ + Validate model credentials + + :param model: model name + :param credentials: model credentials + :return: None + """ +``` + +- Model Parameter Schema + +Unlike custom types, since the YAML file does not define which parameters a model supports, we need to dynamically generate the model parameter schema. + +For instance, Xinference supports `max_tokens`, `temperature`, and `top_p` parameters. + +However, some vendors may support different parameters for different models. For example, the `OpenLLM` vendor supports `top_k`, but not all models provided by this vendor support `top_k`. Let's say model A supports `top_k` but model B does not. In such cases, we need to dynamically generate the model parameter schema, as illustrated below: + +```python + def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + """ + used to define customizable model schema + """ + rules = [ + ParameterRule( + name='temperature', type=ParameterType.FLOAT, + use_template='temperature', + label=I18nObject( + zh_Hans='温度', en_US='Temperature' + ) + ), + ParameterRule( + name='top_p', type=ParameterType.FLOAT, + use_template='top_p', + label=I18nObject( + zh_Hans='Top P', en_US='Top P' + ) + ), + ParameterRule( + name='max_tokens', type=ParameterType.INT, + use_template='max_tokens', + min=1, + default=512, + label=I18nObject( + zh_Hans='最大生成长度', en_US='Max Tokens' + ) + ) + ] + + # if model is A, add top_k to rules + if model == 'A': + rules.append( + ParameterRule( + name='top_k', type=ParameterType.INT, + use_template='top_k', + min=1, + default=50, + label=I18nObject( + zh_Hans='Top K', en_US='Top K' + ) + ) + ) + + """ + some NOT IMPORTANT code here + """ + + entity = AIModelEntity( + model=model, + label=I18nObject( + en_US=model + ), + fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, + model_type=model_type, + model_properties={ + ModelPropertyKey.MODE: ModelType.LLM, + }, + parameter_rules=rules + ) + + return entity +``` + +- Exception Error Mapping + +When a model invocation error occurs, it should be mapped to the runtime's specified `InvokeError` type, enabling Dify to handle different errors appropriately. + +Runtime Errors: + +- `InvokeConnectionError` Connection error during invocation +- `InvokeServerUnavailableError` Service provider unavailable +- `InvokeRateLimitError` Rate limit reached +- `InvokeAuthorizationError` Authorization failure +- `InvokeBadRequestError` Invalid request parameters + +```python + @property + def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: + """ + Map model invoke error to unified error + The key is the error type thrown to the caller + The value is the error type thrown by the model, + which needs to be converted into a unified error type for the caller. + + :return: Invoke error mapping + """ +``` + +For interface method details, see: [Interfaces](./interfaces.md). For specific implementations, refer to: [llm.py](https://github.com/langgenius/dify-runtime/blob/main/lib/model_providers/anthropic/llm/llm.py). \ No newline at end of file diff --git a/api/core/model_runtime/docs/en_US/images/index/image-1.png b/api/core/model_runtime/docs/en_US/images/index/image-1.png new file mode 100644 index 0000000000..b158d44b29 Binary files /dev/null and b/api/core/model_runtime/docs/en_US/images/index/image-1.png differ diff --git a/api/core/model_runtime/docs/en_US/images/index/image-2.png b/api/core/model_runtime/docs/en_US/images/index/image-2.png new file mode 100644 index 0000000000..c70cd3da5e Binary files /dev/null and b/api/core/model_runtime/docs/en_US/images/index/image-2.png differ diff --git a/api/core/model_runtime/docs/en_US/images/index/image-3.png b/api/core/model_runtime/docs/en_US/images/index/image-3.png new file mode 100644 index 0000000000..bf0b9a7f47 Binary files /dev/null and b/api/core/model_runtime/docs/en_US/images/index/image-3.png differ diff --git a/api/core/model_runtime/docs/en_US/images/index/image.png b/api/core/model_runtime/docs/en_US/images/index/image.png new file mode 100644 index 0000000000..eb63d107e1 Binary files /dev/null and b/api/core/model_runtime/docs/en_US/images/index/image.png differ diff --git a/api/core/model_runtime/docs/en_US/predefined_model_scale_out.md b/api/core/model_runtime/docs/en_US/predefined_model_scale_out.md new file mode 100644 index 0000000000..3e16257452 --- /dev/null +++ b/api/core/model_runtime/docs/en_US/predefined_model_scale_out.md @@ -0,0 +1,173 @@ +## Predefined Model Integration + +After completing the vendor integration, the next step is to integrate the models from the vendor. + +First, we need to determine the type of model to be integrated and create the corresponding model type `module` under the respective vendor's directory. + +Currently supported model types are: + +- `llm` Text Generation Model +- `text_embedding` Text Embedding Model +- `rerank` Rerank Model +- `speech2text` Speech-to-Text +- `tts` Text-to-Speech +- `moderation` Moderation + +Continuing with `Anthropic` as an example, `Anthropic` only supports LLM, so create a `module` named `llm` under `model_providers.anthropic`. + +For predefined models, we first need to create a YAML file named after the model under the `llm` `module`, such as `claude-2.1.yaml`. + +### Prepare Model YAML + +```yaml +model: claude-2.1 # Model identifier +# Display name of the model, which can be set to en_US English or zh_Hans Chinese. If zh_Hans is not set, it will default to en_US. +# This can also be omitted, in which case the model identifier will be used as the label +label: + en_US: claude-2.1 +model_type: llm # Model type, claude-2.1 is an LLM +features: # Supported features, agent-thought supports Agent reasoning, vision supports image understanding +- agent-thought +model_properties: # Model properties + mode: chat # LLM mode, complete for text completion models, chat for conversation models + context_size: 200000 # Maximum context size +parameter_rules: # Parameter rules for the model call; only LLM requires this +- name: temperature # Parameter variable name + # Five default configuration templates are provided: temperature/top_p/max_tokens/presence_penalty/frequency_penalty + # The template variable name can be set directly in use_template, which will use the default configuration in entities.defaults.PARAMETER_RULE_TEMPLATE + # Additional configuration parameters will override the default configuration if set + use_template: temperature +- name: top_p + use_template: top_p +- name: top_k + label: # Display name of the parameter + zh_Hans: 取样数量 + en_US: Top k + type: int # Parameter type, supports float/int/string/boolean + help: # Help information, describing the parameter's function + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + required: false # Whether the parameter is mandatory; can be omitted +- name: max_tokens_to_sample + use_template: max_tokens + default: 4096 # Default value of the parameter + min: 1 # Minimum value of the parameter, applicable to float/int only + max: 4096 # Maximum value of the parameter, applicable to float/int only +pricing: # Pricing information + input: '8.00' # Input unit price, i.e., prompt price + output: '24.00' # Output unit price, i.e., response content price + unit: '0.000001' # Price unit, meaning the above prices are per 100K + currency: USD # Price currency +``` + +It is recommended to prepare all model configurations before starting the implementation of the model code. + +You can also refer to the YAML configuration information under the corresponding model type directories of other vendors in the `model_providers` directory. For the complete YAML rules, refer to: [Schema](schema.md#aimodelentity). + +### Implement the Model Call Code + +Next, create a Python file named `llm.py` under the `llm` `module` to write the implementation code. + +Create an Anthropic LLM class named `AnthropicLargeLanguageModel` (or any other name), inheriting from the `__base.large_language_model.LargeLanguageModel` base class, and implement the following methods: + +- LLM Call + +Implement the core method for calling the LLM, supporting both streaming and synchronous responses. + +```python + def _invoke(self, model: str, credentials: dict, + prompt_messages: list[PromptMessage], model_parameters: dict, + tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, + stream: bool = True, user: Optional[str] = None) \ + -> Union[LLMResult, Generator]: + """ + Invoke large language model + + :param model: model name + :param credentials: model credentials + :param prompt_messages: prompt messages + :param model_parameters: model parameters + :param tools: tools for tool calling + :param stop: stop words + :param stream: is stream response + :param user: unique user id + :return: full response or stream response chunk generator result + """ +``` + +Ensure to use two functions for returning data, one for synchronous returns and the other for streaming returns, because Python identifies functions containing the `yield` keyword as generator functions, fixing the return type to `Generator`. Thus, synchronous and streaming returns need to be implemented separately, as shown below (note that the example uses simplified parameters, for actual implementation follow the above parameter list): + +```python + def _invoke(self, stream: bool, **kwargs) \ + -> Union[LLMResult, Generator]: + if stream: + return self._handle_stream_response(**kwargs) + return self._handle_sync_response(**kwargs) + + def _handle_stream_response(self, **kwargs) -> Generator: + for chunk in response: + yield chunk + def _handle_sync_response(self, **kwargs) -> LLMResult: + return LLMResult(**response) +``` + +- Pre-compute Input Tokens + +If the model does not provide an interface to precompute tokens, return 0 directly. + +```python + def get_num_tokens(self, model: str, credentials: dict, prompt_messages: list[PromptMessage], + tools: Optional[list[PromptMessageTool]] = None) -> int: + """ + Get number of tokens for given prompt messages + + :param model: model name + :param credentials: model credentials + :param prompt_messages: prompt messages + :param tools: tools for tool calling + :return: + """ +``` + +- Validate Model Credentials + +Similar to vendor credential validation, but specific to a single model. + +```python + def validate_credentials(self, model: str, credentials: dict) -> None: + """ + Validate model credentials + + :param model: model name + :param credentials: model credentials + :return: + """ +``` + +- Map Invoke Errors + +When a model call fails, map it to a specific `InvokeError` type as required by Runtime, allowing Dify to handle different errors accordingly. + +Runtime Errors: + +- `InvokeConnectionError` Connection error + +- `InvokeServerUnavailableError` Service provider unavailable +- `InvokeRateLimitError` Rate limit reached +- `InvokeAuthorizationError` Authorization failed +- `InvokeBadRequestError` Parameter error + +```python + @property + def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: + """ + Map model invoke error to unified error + The key is the error type thrown to the caller + The value is the error type thrown by the model, + which needs to be converted into a unified error type for the caller. + + :return: Invoke error mapping + """ +``` + +For interface method explanations, see: [Interfaces](./interfaces.md). For detailed implementation, refer to: [llm.py](https://github.com/langgenius/dify-runtime/blob/main/lib/model_providers/anthropic/llm/llm.py). \ No newline at end of file diff --git a/api/core/model_runtime/docs/en_US/provider_scale_out.md b/api/core/model_runtime/docs/en_US/provider_scale_out.md index ba356c5cab..07be5811d3 100644 --- a/api/core/model_runtime/docs/en_US/provider_scale_out.md +++ b/api/core/model_runtime/docs/en_US/provider_scale_out.md @@ -58,7 +58,7 @@ provider_credential_schema: # Provider credential rules, as Anthropic only supp en_US: Enter your API URL ``` -You can also refer to the YAML configuration information under other provider directories in `model_providers`. The complete YAML rules are available at: [Schema](schema.md#Provider). +You can also refer to the YAML configuration information under other provider directories in `model_providers`. The complete YAML rules are available at: [Schema](schema.md#provider). ### Implementing Provider Code diff --git a/api/core/model_runtime/docs/zh_Hans/provider_scale_out.md b/api/core/model_runtime/docs/zh_Hans/provider_scale_out.md index b34544c789..78aad8876f 100644 --- a/api/core/model_runtime/docs/zh_Hans/provider_scale_out.md +++ b/api/core/model_runtime/docs/zh_Hans/provider_scale_out.md @@ -117,7 +117,7 @@ model_credential_schema: en_US: Enter your API Base ``` -也可以参考 `model_providers` 目录下其他供应商目录下的 YAML 配置信息,完整的 YAML 规则见:[Schema](schema.md#Provider)。 +也可以参考 `model_providers` 目录下其他供应商目录下的 YAML 配置信息,完整的 YAML 规则见:[Schema](schema.md#provider)。 #### 实现供应商代码 diff --git a/api/core/model_runtime/model_providers/__base/large_language_model.py b/api/core/model_runtime/model_providers/__base/large_language_model.py index ba88cc1f38..0027411a6e 100644 --- a/api/core/model_runtime/model_providers/__base/large_language_model.py +++ b/api/core/model_runtime/model_providers/__base/large_language_model.py @@ -94,7 +94,7 @@ class LargeLanguageModel(AIModel): ) try: - if "response_format" in model_parameters: + if "response_format" in model_parameters and model_parameters["response_format"] in {"JSON", "XML"}: result = self._code_block_mode_wrapper( model=model, credentials=credentials, diff --git a/api/core/model_runtime/model_providers/__base/tts_model.py b/api/core/model_runtime/model_providers/__base/tts_model.py index 70be9322a7..862ec29daf 100644 --- a/api/core/model_runtime/model_providers/__base/tts_model.py +++ b/api/core/model_runtime/model_providers/__base/tts_model.py @@ -1,7 +1,7 @@ import logging import re from abc import abstractmethod -from typing import Optional +from typing import Any, Optional from pydantic import ConfigDict @@ -88,7 +88,7 @@ class TTSModel(AIModel): else: return [{"name": d["name"], "value": d["mode"]} for d in voices] - def _get_model_default_voice(self, model: str, credentials: dict) -> any: + def _get_model_default_voice(self, model: str, credentials: dict) -> Any: """ Get voice for given tts model diff --git a/api/core/model_runtime/model_providers/_position.yaml b/api/core/model_runtime/model_providers/_position.yaml index 80db22ea84..89fccef659 100644 --- a/api/core/model_runtime/model_providers/_position.yaml +++ b/api/core/model_runtime/model_providers/_position.yaml @@ -40,3 +40,4 @@ - fireworks - mixedbread - nomic +- voyage diff --git a/api/core/model_runtime/model_providers/anthropic/llm/llm.py b/api/core/model_runtime/model_providers/anthropic/llm/llm.py index 46e1b415b8..3a5a42ba05 100644 --- a/api/core/model_runtime/model_providers/anthropic/llm/llm.py +++ b/api/core/model_runtime/model_providers/anthropic/llm/llm.py @@ -169,7 +169,7 @@ class AnthropicLargeLanguageModel(LargeLanguageModel): stop: Optional[list[str]] = None, stream: bool = True, user: Optional[str] = None, - callbacks: list[Callback] = None, + callbacks: Optional[list[Callback]] = None, ) -> Union[LLMResult, Generator]: """ Code block mode wrapper for invoking large language model diff --git a/api/core/model_runtime/model_providers/azure_openai/_constant.py b/api/core/model_runtime/model_providers/azure_openai/_constant.py index 0dada70cc5..baa5421396 100644 --- a/api/core/model_runtime/model_providers/azure_openai/_constant.py +++ b/api/core/model_runtime/model_providers/azure_openai/_constant.py @@ -1081,8 +1081,81 @@ LLM_BASE_MODELS = [ ), ), ), + AzureBaseModel( + base_model_name="o1-preview", + entity=AIModelEntity( + model="fake-deployment-name", + label=I18nObject( + en_US="fake-deployment-name-label", + ), + model_type=ModelType.LLM, + features=[ + ModelFeature.AGENT_THOUGHT, + ], + fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, + model_properties={ + ModelPropertyKey.MODE: LLMMode.CHAT.value, + ModelPropertyKey.CONTEXT_SIZE: 128000, + }, + parameter_rules=[ + ParameterRule( + name="response_format", + label=I18nObject(zh_Hans="回复格式", en_US="response_format"), + type="string", + help=I18nObject( + zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" + ), + required=False, + options=["text", "json_object"], + ), + _get_max_tokens(default=512, min_val=1, max_val=32768), + ], + pricing=PriceConfig( + input=15.00, + output=60.00, + unit=0.000001, + currency="USD", + ), + ), + ), + AzureBaseModel( + base_model_name="o1-mini", + entity=AIModelEntity( + model="fake-deployment-name", + label=I18nObject( + en_US="fake-deployment-name-label", + ), + model_type=ModelType.LLM, + features=[ + ModelFeature.AGENT_THOUGHT, + ], + fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, + model_properties={ + ModelPropertyKey.MODE: LLMMode.CHAT.value, + ModelPropertyKey.CONTEXT_SIZE: 128000, + }, + parameter_rules=[ + ParameterRule( + name="response_format", + label=I18nObject(zh_Hans="回复格式", en_US="response_format"), + type="string", + help=I18nObject( + zh_Hans="指定模型必须输出的格式", en_US="specifying the format that the model must output" + ), + required=False, + options=["text", "json_object"], + ), + _get_max_tokens(default=512, min_val=1, max_val=65536), + ], + pricing=PriceConfig( + input=3.00, + output=12.00, + unit=0.000001, + currency="USD", + ), + ), + ), ] - EMBEDDING_BASE_MODELS = [ AzureBaseModel( base_model_name="text-embedding-ada-002", diff --git a/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml b/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml index 867f9fec42..093f57c51e 100644 --- a/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml +++ b/api/core/model_runtime/model_providers/azure_openai/azure_openai.yaml @@ -53,6 +53,9 @@ model_credential_schema: type: select required: true options: + - label: + en_US: 2024-09-01-preview + value: 2024-09-01-preview - label: en_US: 2024-08-01-preview value: 2024-08-01-preview @@ -120,6 +123,18 @@ model_credential_schema: show_on: - variable: __model_type value: llm + - label: + en_US: o1-mini + value: o1-mini + show_on: + - variable: __model_type + value: llm + - label: + en_US: o1-preview + value: o1-preview + show_on: + - variable: __model_type + value: llm - label: en_US: gpt-4o-mini value: gpt-4o-mini diff --git a/api/core/model_runtime/model_providers/azure_openai/llm/llm.py b/api/core/model_runtime/model_providers/azure_openai/llm/llm.py index f0033ea051..b9cc3bb672 100644 --- a/api/core/model_runtime/model_providers/azure_openai/llm/llm.py +++ b/api/core/model_runtime/model_providers/azure_openai/llm/llm.py @@ -312,10 +312,24 @@ class AzureOpenAILargeLanguageModel(_CommonAzureOpenAI, LargeLanguageModel): if user: extra_model_kwargs["user"] = user + # clear illegal prompt messages + prompt_messages = self._clear_illegal_prompt_messages(model, prompt_messages) + + block_as_stream = False + if model.startswith("o1"): + if stream: + block_as_stream = True + stream = False + + if "stream_options" in extra_model_kwargs: + del extra_model_kwargs["stream_options"] + + if "stop" in extra_model_kwargs: + del extra_model_kwargs["stop"] + # chat model - messages = [self._convert_prompt_message_to_dict(m) for m in prompt_messages] response = client.chat.completions.create( - messages=messages, + messages=[self._convert_prompt_message_to_dict(m) for m in prompt_messages], model=model, stream=stream, **model_parameters, @@ -325,7 +339,91 @@ class AzureOpenAILargeLanguageModel(_CommonAzureOpenAI, LargeLanguageModel): if stream: return self._handle_chat_generate_stream_response(model, credentials, response, prompt_messages, tools) - return self._handle_chat_generate_response(model, credentials, response, prompt_messages, tools) + block_result = self._handle_chat_generate_response(model, credentials, response, prompt_messages, tools) + + if block_as_stream: + return self._handle_chat_block_as_stream_response(block_result, prompt_messages, stop) + + return block_result + + def _handle_chat_block_as_stream_response( + self, + block_result: LLMResult, + prompt_messages: list[PromptMessage], + stop: Optional[list[str]] = None, + ) -> Generator[LLMResultChunk, None, None]: + """ + Handle llm chat response + + :param model: model name + :param credentials: credentials + :param response: response + :param prompt_messages: prompt messages + :param tools: tools for tool calling + :param stop: stop words + :return: llm response chunk generator + """ + text = block_result.message.content + text = cast(str, text) + + if stop: + text = self.enforce_stop_tokens(text, stop) + + yield LLMResultChunk( + model=block_result.model, + prompt_messages=prompt_messages, + system_fingerprint=block_result.system_fingerprint, + delta=LLMResultChunkDelta( + index=0, + message=AssistantPromptMessage(content=text), + finish_reason="stop", + usage=block_result.usage, + ), + ) + + def _clear_illegal_prompt_messages(self, model: str, prompt_messages: list[PromptMessage]) -> list[PromptMessage]: + """ + Clear illegal prompt messages for OpenAI API + + :param model: model name + :param prompt_messages: prompt messages + :return: cleaned prompt messages + """ + checklist = ["gpt-4-turbo", "gpt-4-turbo-2024-04-09"] + + if model in checklist: + # count how many user messages are there + user_message_count = len([m for m in prompt_messages if isinstance(m, UserPromptMessage)]) + if user_message_count > 1: + for prompt_message in prompt_messages: + if isinstance(prompt_message, UserPromptMessage): + if isinstance(prompt_message.content, list): + prompt_message.content = "\n".join( + [ + item.data + if item.type == PromptMessageContentType.TEXT + else "[IMAGE]" + if item.type == PromptMessageContentType.IMAGE + else "" + for item in prompt_message.content + ] + ) + + if model.startswith("o1"): + system_message_count = len([m for m in prompt_messages if isinstance(m, SystemPromptMessage)]) + if system_message_count > 0: + new_prompt_messages = [] + for prompt_message in prompt_messages: + if isinstance(prompt_message, SystemPromptMessage): + prompt_message = UserPromptMessage( + content=prompt_message.content, + name=prompt_message.name, + ) + + new_prompt_messages.append(prompt_message) + prompt_messages = new_prompt_messages + + return prompt_messages def _handle_chat_generate_response( self, @@ -560,7 +658,7 @@ class AzureOpenAILargeLanguageModel(_CommonAzureOpenAI, LargeLanguageModel): tokens_per_message = 4 # if there's a name, the role is omitted tokens_per_name = -1 - elif model.startswith("gpt-35-turbo") or model.startswith("gpt-4"): + elif model.startswith("gpt-35-turbo") or model.startswith("gpt-4") or model.startswith("o1"): tokens_per_message = 3 tokens_per_name = 1 else: diff --git a/api/core/model_runtime/model_providers/azure_openai/tts/tts.py b/api/core/model_runtime/model_providers/azure_openai/tts/tts.py index af178703a0..133cc9f76e 100644 --- a/api/core/model_runtime/model_providers/azure_openai/tts/tts.py +++ b/api/core/model_runtime/model_providers/azure_openai/tts/tts.py @@ -1,6 +1,6 @@ import concurrent.futures import copy -from typing import Optional +from typing import Any, Optional from openai import AzureOpenAI @@ -19,7 +19,7 @@ class AzureOpenAIText2SpeechModel(_CommonAzureOpenAI, TTSModel): def _invoke( self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, user: Optional[str] = None - ) -> any: + ) -> Any: """ _invoke text2speech model @@ -56,7 +56,7 @@ class AzureOpenAIText2SpeechModel(_CommonAzureOpenAI, TTSModel): except Exception as ex: raise CredentialsValidateFailedError(str(ex)) - def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> any: + def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> Any: """ _tts_invoke_streaming text2speech model :param model: model name diff --git a/api/core/model_runtime/model_providers/bedrock/bedrock.yaml b/api/core/model_runtime/model_providers/bedrock/bedrock.yaml index c540ee23b3..952f968b9d 100644 --- a/api/core/model_runtime/model_providers/bedrock/bedrock.yaml +++ b/api/core/model_runtime/model_providers/bedrock/bedrock.yaml @@ -50,34 +50,62 @@ provider_credential_schema: label: en_US: US East (N. Virginia) zh_Hans: 美国东部 (弗吉尼亚北部) + - value: us-east-2 + label: + en_US: US East (Ohio) + zh_Hans: 美国东部 (弗吉尼亚北部) - value: us-west-2 label: en_US: US West (Oregon) zh_Hans: 美国西部 (俄勒冈州) + - value: ap-south-1 + label: + en_US: Asia Pacific (Mumbai) + zh_Hans: 亚太地区(孟买) - value: ap-southeast-1 label: en_US: Asia Pacific (Singapore) zh_Hans: 亚太地区 (新加坡) - - value: ap-northeast-1 - label: - en_US: Asia Pacific (Tokyo) - zh_Hans: 亚太地区 (东京) - - value: eu-central-1 - label: - en_US: Europe (Frankfurt) - zh_Hans: 欧洲 (法兰克福) - - value: eu-west-2 - label: - en_US: Eu west London (London) - zh_Hans: 欧洲西部 (伦敦) - - value: us-gov-west-1 - label: - en_US: AWS GovCloud (US-West) - zh_Hans: AWS GovCloud (US-West) - value: ap-southeast-2 label: en_US: Asia Pacific (Sydney) zh_Hans: 亚太地区 (悉尼) + - value: ap-northeast-1 + label: + en_US: Asia Pacific (Tokyo) + zh_Hans: 亚太地区 (东京) + - value: ap-northeast-2 + label: + en_US: Asia Pacific (Seoul) + zh_Hans: 亚太地区(首尔) + - value: ca-central-1 + label: + en_US: Canada (Central) + zh_Hans: 加拿大(中部) + - value: eu-central-1 + label: + en_US: Europe (Frankfurt) + zh_Hans: 欧洲 (法兰克福) + - value: eu-west-1 + label: + en_US: Europe (Ireland) + zh_Hans: 欧洲(爱尔兰) + - value: eu-west-2 + label: + en_US: Europe (London) + zh_Hans: 欧洲西部 (伦敦) + - value: eu-west-3 + label: + en_US: Europe (Paris) + zh_Hans: 欧洲(巴黎) + - value: sa-east-1 + label: + en_US: South America (São Paulo) + zh_Hans: 南美洲(圣保罗) + - value: us-gov-west-1 + label: + en_US: AWS GovCloud (US-West) + zh_Hans: AWS GovCloud (US-West) - variable: model_for_validation required: false label: diff --git a/api/core/model_runtime/model_providers/bedrock/llm/_position.yaml b/api/core/model_runtime/model_providers/bedrock/llm/_position.yaml index 86c8061dee..47e2b020fd 100644 --- a/api/core/model_runtime/model_providers/bedrock/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/bedrock/llm/_position.yaml @@ -6,6 +6,8 @@ - anthropic.claude-v2:1 - anthropic.claude-3-sonnet-v1:0 - anthropic.claude-3-haiku-v1:0 +- ai21.jamba-1-5-large-v1:0 +- ai21.jamba-1-5-mini-v1:0 - cohere.command-light-text-v14 - cohere.command-text-v14 - cohere.command-r-plus-v1.0 @@ -15,6 +17,10 @@ - meta.llama3-1-405b-instruct-v1:0 - meta.llama3-8b-instruct-v1:0 - meta.llama3-70b-instruct-v1:0 +- us.meta.llama3-2-1b-instruct-v1:0 +- us.meta.llama3-2-3b-instruct-v1:0 +- us.meta.llama3-2-11b-instruct-v1:0 +- us.meta.llama3-2-90b-instruct-v1:0 - meta.llama2-13b-chat-v1 - meta.llama2-70b-chat-v1 - mistral.mistral-large-2407-v1:0 diff --git a/api/core/model_runtime/model_providers/bedrock/llm/ai21.jamba-1-5-large-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/ai21.jamba-1-5-large-v1.0.yaml new file mode 100644 index 0000000000..276c7312ce --- /dev/null +++ b/api/core/model_runtime/model_providers/bedrock/llm/ai21.jamba-1-5-large-v1.0.yaml @@ -0,0 +1,26 @@ +model: ai21.jamba-1-5-large-v1:0 +label: + en_US: Jamba 1.5 Large +model_type: llm +model_properties: + mode: completion + context_size: 256000 +parameter_rules: + - name: temperature + use_template: temperature + default: 1 + min: 0.0 + max: 2.0 + - name: top_p + use_template: top_p + - name: max_gen_len + use_template: max_tokens + required: true + default: 4096 + min: 1 + max: 4096 +pricing: + input: '0.002' + output: '0.008' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/ai21.jamba-1-5-mini-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/ai21.jamba-1-5-mini-v1.0.yaml new file mode 100644 index 0000000000..3461d8ab71 --- /dev/null +++ b/api/core/model_runtime/model_providers/bedrock/llm/ai21.jamba-1-5-mini-v1.0.yaml @@ -0,0 +1,26 @@ +model: ai21.jamba-1-5-mini-v1:0 +label: + en_US: Jamba 1.5 Mini +model_type: llm +model_properties: + mode: completion + context_size: 256000 +parameter_rules: + - name: temperature + use_template: temperature + default: 1 + min: 0.0 + max: 2.0 + - name: top_p + use_template: top_p + - name: max_gen_len + use_template: max_tokens + required: true + default: 4096 + min: 1 + max: 4096 +pricing: + input: '0.0002' + output: '0.0004' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/llm.py b/api/core/model_runtime/model_providers/bedrock/llm/llm.py index 77bab0c294..ff0403ee47 100644 --- a/api/core/model_runtime/model_providers/bedrock/llm/llm.py +++ b/api/core/model_runtime/model_providers/bedrock/llm/llm.py @@ -63,6 +63,7 @@ class BedrockLargeLanguageModel(LargeLanguageModel): {"prefix": "us.anthropic.claude-3", "support_system_prompts": True, "support_tool_use": True}, {"prefix": "eu.anthropic.claude-3", "support_system_prompts": True, "support_tool_use": True}, {"prefix": "anthropic.claude-3", "support_system_prompts": True, "support_tool_use": True}, + {"prefix": "us.meta.llama3-2", "support_system_prompts": True, "support_tool_use": True}, {"prefix": "meta.llama", "support_system_prompts": True, "support_tool_use": False}, {"prefix": "mistral.mistral-7b-instruct", "support_system_prompts": False, "support_tool_use": False}, {"prefix": "mistral.mixtral-8x7b-instruct", "support_system_prompts": False, "support_tool_use": False}, @@ -70,6 +71,7 @@ class BedrockLargeLanguageModel(LargeLanguageModel): {"prefix": "mistral.mistral-small", "support_system_prompts": True, "support_tool_use": True}, {"prefix": "cohere.command-r", "support_system_prompts": True, "support_tool_use": True}, {"prefix": "amazon.titan", "support_system_prompts": False, "support_tool_use": False}, + {"prefix": "ai21.jamba-1-5", "support_system_prompts": True, "support_tool_use": False}, ] @staticmethod @@ -90,7 +92,7 @@ class BedrockLargeLanguageModel(LargeLanguageModel): stop: Optional[list[str]] = None, stream: bool = True, user: Optional[str] = None, - callbacks: list[Callback] = None, + callbacks: Optional[list[Callback]] = None, ) -> Union[LLMResult, Generator]: """ Code block mode wrapper for invoking large language model diff --git a/api/core/model_runtime/model_providers/bedrock/llm/us.meta.llama3-2-11b-instruct-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/us.meta.llama3-2-11b-instruct-v1.0.yaml new file mode 100644 index 0000000000..029f428776 --- /dev/null +++ b/api/core/model_runtime/model_providers/bedrock/llm/us.meta.llama3-2-11b-instruct-v1.0.yaml @@ -0,0 +1,29 @@ +model: us.meta.llama3-2-11b-instruct-v1:0 +label: + en_US: US Meta Llama 3.2 11B Instruct +model_type: llm +features: + - vision + - tool-call +model_properties: + mode: completion + context_size: 128000 +parameter_rules: + - name: temperature + use_template: temperature + default: 0.5 + min: 0.0 + max: 1 + - name: top_p + use_template: top_p + - name: max_gen_len + use_template: max_tokens + required: true + default: 512 + min: 1 + max: 2048 +pricing: + input: '0.00035' + output: '0.00035' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/us.meta.llama3-2-1b-instruct-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/us.meta.llama3-2-1b-instruct-v1.0.yaml new file mode 100644 index 0000000000..51c8474e54 --- /dev/null +++ b/api/core/model_runtime/model_providers/bedrock/llm/us.meta.llama3-2-1b-instruct-v1.0.yaml @@ -0,0 +1,26 @@ +model: us.meta.llama3-2-1b-instruct-v1:0 +label: + en_US: US Meta Llama 3.2 1B Instruct +model_type: llm +model_properties: + mode: completion + context_size: 128000 +parameter_rules: + - name: temperature + use_template: temperature + default: 0.5 + min: 0.0 + max: 1 + - name: top_p + use_template: top_p + - name: max_gen_len + use_template: max_tokens + required: true + default: 512 + min: 1 + max: 2048 +pricing: + input: '0.0001' + output: '0.0001' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/us.meta.llama3-2-3b-instruct-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/us.meta.llama3-2-3b-instruct-v1.0.yaml new file mode 100644 index 0000000000..472cc7403e --- /dev/null +++ b/api/core/model_runtime/model_providers/bedrock/llm/us.meta.llama3-2-3b-instruct-v1.0.yaml @@ -0,0 +1,26 @@ +model: us.meta.llama3-2-3b-instruct-v1:0 +label: + en_US: US Meta Llama 3.2 3B Instruct +model_type: llm +model_properties: + mode: completion + context_size: 128000 +parameter_rules: + - name: temperature + use_template: temperature + default: 0.5 + min: 0.0 + max: 1 + - name: top_p + use_template: top_p + - name: max_gen_len + use_template: max_tokens + required: true + default: 512 + min: 1 + max: 2048 +pricing: + input: '0.00015' + output: '0.00015' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/bedrock/llm/us.meta.llama3-2-90b-instruct-v1.0.yaml b/api/core/model_runtime/model_providers/bedrock/llm/us.meta.llama3-2-90b-instruct-v1.0.yaml new file mode 100644 index 0000000000..cecd0236ca --- /dev/null +++ b/api/core/model_runtime/model_providers/bedrock/llm/us.meta.llama3-2-90b-instruct-v1.0.yaml @@ -0,0 +1,31 @@ +model: us.meta.llama3-2-90b-instruct-v1:0 +label: + en_US: US Meta Llama 3.2 90B Instruct +model_type: llm +features: + - tool-call +model_properties: + mode: completion + context_size: 128000 +parameter_rules: + - name: temperature + use_template: temperature + default: 0.5 + min: 0.0 + max: 1 + - name: top_p + use_template: top_p + default: 0.9 + min: 0 + max: 1 + - name: max_gen_len + use_template: max_tokens + required: true + default: 512 + min: 1 + max: 2048 +pricing: + input: '0.002' + output: '0.002' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/fireworks/llm/llm.py b/api/core/model_runtime/model_providers/fireworks/llm/llm.py index 2dcf1adba6..24aad9c4d3 100644 --- a/api/core/model_runtime/model_providers/fireworks/llm/llm.py +++ b/api/core/model_runtime/model_providers/fireworks/llm/llm.py @@ -511,7 +511,7 @@ class FireworksLargeLanguageModel(_CommonFireworks, LargeLanguageModel): model: str, messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None, - credentials: dict = None, + credentials: Optional[dict] = None, ) -> int: """ Approximate num tokens with GPT2 tokenizer. diff --git a/api/core/model_runtime/model_providers/fishaudio/tts/tts.py b/api/core/model_runtime/model_providers/fishaudio/tts/tts.py index 895a7a914c..e518d7b95b 100644 --- a/api/core/model_runtime/model_providers/fishaudio/tts/tts.py +++ b/api/core/model_runtime/model_providers/fishaudio/tts/tts.py @@ -1,4 +1,4 @@ -from typing import Optional +from typing import Any, Optional import httpx @@ -46,7 +46,7 @@ class FishAudioText2SpeechModel(TTSModel): content_text: str, voice: str, user: Optional[str] = None, - ) -> any: + ) -> Any: """ Invoke text2speech model @@ -87,7 +87,7 @@ class FishAudioText2SpeechModel(TTSModel): except Exception as ex: raise CredentialsValidateFailedError(str(ex)) - def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> any: + def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> Any: """ Invoke streaming text2speech model :param model: model name @@ -112,7 +112,7 @@ class FishAudioText2SpeechModel(TTSModel): except Exception as ex: raise InvokeBadRequestError(str(ex)) - def _tts_invoke_streaming_sentence(self, credentials: dict, content_text: str, voice: Optional[str] = None) -> any: + def _tts_invoke_streaming_sentence(self, credentials: dict, content_text: str, voice: Optional[str] = None) -> Any: """ Invoke streaming text2speech model diff --git a/api/core/model_runtime/model_providers/google/llm/_position.yaml b/api/core/model_runtime/model_providers/google/llm/_position.yaml new file mode 100644 index 0000000000..63b9ca3a29 --- /dev/null +++ b/api/core/model_runtime/model_providers/google/llm/_position.yaml @@ -0,0 +1,15 @@ +- gemini-1.5-pro +- gemini-1.5-pro-latest +- gemini-1.5-pro-001 +- gemini-1.5-pro-002 +- gemini-1.5-pro-exp-0801 +- gemini-1.5-pro-exp-0827 +- gemini-1.5-flash +- gemini-1.5-flash-latest +- gemini-1.5-flash-001 +- gemini-1.5-flash-002 +- gemini-1.5-flash-exp-0827 +- gemini-1.5-flash-8b-exp-0827 +- gemini-1.5-flash-8b-exp-0924 +- gemini-pro +- gemini-pro-vision diff --git a/api/core/model_runtime/model_providers/groq/llm/_position.yaml b/api/core/model_runtime/model_providers/groq/llm/_position.yaml index be115ca920..0613b19f87 100644 --- a/api/core/model_runtime/model_providers/groq/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/groq/llm/_position.yaml @@ -5,3 +5,4 @@ - llama3-8b-8192 - mixtral-8x7b-32768 - llama2-70b-4096 +- llama-guard-3-8b diff --git a/api/core/model_runtime/model_providers/groq/llm/llama-guard-3-8b.yaml b/api/core/model_runtime/model_providers/groq/llm/llama-guard-3-8b.yaml new file mode 100644 index 0000000000..03779ccc66 --- /dev/null +++ b/api/core/model_runtime/model_providers/groq/llm/llama-guard-3-8b.yaml @@ -0,0 +1,25 @@ +model: llama-guard-3-8b +label: + zh_Hans: Llama-Guard-3-8B + en_US: Llama-Guard-3-8B +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 8192 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: max_tokens + use_template: max_tokens + default: 512 + min: 1 + max: 8192 +pricing: + input: '0.20' + output: '0.20' + unit: '0.000001' + currency: USD diff --git a/api/core/model_runtime/model_providers/jina/rerank/rerank.py b/api/core/model_runtime/model_providers/jina/rerank/rerank.py index 79ca68914f..0350207651 100644 --- a/api/core/model_runtime/model_providers/jina/rerank/rerank.py +++ b/api/core/model_runtime/model_providers/jina/rerank/rerank.py @@ -61,11 +61,19 @@ class JinaRerankModel(RerankModel): rerank_documents = [] for result in results["results"]: + index = result["index"] + if "document" in result: + text = result["document"]["text"] + else: + # llama.cpp rerank maynot return original documents + text = docs[index] + rerank_document = RerankDocument( - index=result["index"], - text=result["document"]["text"], + index=index, + text=text, score=result["relevance_score"], ) + if score_threshold is None or result["relevance_score"] >= score_threshold: rerank_documents.append(rerank_document) diff --git a/api/core/model_runtime/model_providers/localai/rerank/rerank.py b/api/core/model_runtime/model_providers/localai/rerank/rerank.py index 2b0f53bc19..075b44658d 100644 --- a/api/core/model_runtime/model_providers/localai/rerank/rerank.py +++ b/api/core/model_runtime/model_providers/localai/rerank/rerank.py @@ -70,11 +70,19 @@ class LocalaiRerankModel(RerankModel): rerank_documents = [] for result in results["results"]: + index = result["index"] + if "document" in result: + text = result["document"]["text"] + else: + # llama.cpp rerank maynot return original documents + text = docs[index] + rerank_document = RerankDocument( - index=result["index"], - text=result["document"]["text"], + index=index, + text=text, score=result["relevance_score"], ) + if score_threshold is None or result["relevance_score"] >= score_threshold: rerank_documents.append(rerank_document) diff --git a/api/core/model_runtime/model_providers/openai/llm/llm.py b/api/core/model_runtime/model_providers/openai/llm/llm.py index d42fce528a..1ac3837ad3 100644 --- a/api/core/model_runtime/model_providers/openai/llm/llm.py +++ b/api/core/model_runtime/model_providers/openai/llm/llm.py @@ -111,7 +111,7 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel): stop: Optional[list[str]] = None, stream: bool = True, user: Optional[str] = None, - callbacks: list[Callback] = None, + callbacks: Optional[list[Callback]] = None, ) -> Union[LLMResult, Generator]: """ Code block mode wrapper for invoking large language model diff --git a/api/core/model_runtime/model_providers/openai/speech2text/speech2text.py b/api/core/model_runtime/model_providers/openai/speech2text/speech2text.py index 18f97e45f3..0d54d2ea9a 100644 --- a/api/core/model_runtime/model_providers/openai/speech2text/speech2text.py +++ b/api/core/model_runtime/model_providers/openai/speech2text/speech2text.py @@ -2,6 +2,8 @@ from typing import IO, Optional from openai import OpenAI +from core.model_runtime.entities.common_entities import I18nObject +from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel from core.model_runtime.model_providers.openai._common import _CommonOpenAI @@ -58,3 +60,18 @@ class OpenAISpeech2TextModel(_CommonOpenAI, Speech2TextModel): response = client.audio.transcriptions.create(model=model, file=file) return response.text + + def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + """ + used to define customizable model schema + """ + entity = AIModelEntity( + model=model, + label=I18nObject(en_US=model), + fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, + model_type=ModelType.SPEECH2TEXT, + model_properties={}, + parameter_rules=[], + ) + + return entity diff --git a/api/core/model_runtime/model_providers/openai/tts/tts.py b/api/core/model_runtime/model_providers/openai/tts/tts.py index a14c91639b..2e57b95944 100644 --- a/api/core/model_runtime/model_providers/openai/tts/tts.py +++ b/api/core/model_runtime/model_providers/openai/tts/tts.py @@ -1,5 +1,5 @@ import concurrent.futures -from typing import Optional +from typing import Any, Optional from openai import OpenAI @@ -16,7 +16,7 @@ class OpenAIText2SpeechModel(_CommonOpenAI, TTSModel): def _invoke( self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, user: Optional[str] = None - ) -> any: + ) -> Any: """ _invoke text2speech model @@ -55,7 +55,7 @@ class OpenAIText2SpeechModel(_CommonOpenAI, TTSModel): except Exception as ex: raise CredentialsValidateFailedError(str(ex)) - def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> any: + def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> Any: """ _tts_invoke_streaming text2speech model diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py b/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py index c2ffe653c8..356ac56b1e 100644 --- a/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py +++ b/api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py @@ -688,7 +688,7 @@ class OAIAPICompatLargeLanguageModel(_CommonOaiApiCompat, LargeLanguageModel): model: str, messages: list[PromptMessage], tools: Optional[list[PromptMessageTool]] = None, - credentials: dict = None, + credentials: Optional[dict] = None, ) -> int: """ Approximate num tokens with GPT2 tokenizer. diff --git a/api/core/model_runtime/model_providers/openai_api_compatible/speech2text/speech2text.py b/api/core/model_runtime/model_providers/openai_api_compatible/speech2text/speech2text.py index 405096578c..cef77cc941 100644 --- a/api/core/model_runtime/model_providers/openai_api_compatible/speech2text/speech2text.py +++ b/api/core/model_runtime/model_providers/openai_api_compatible/speech2text/speech2text.py @@ -3,6 +3,8 @@ from urllib.parse import urljoin import requests +from core.model_runtime.entities.common_entities import I18nObject +from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelType from core.model_runtime.errors.invoke import InvokeBadRequestError from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel @@ -59,3 +61,18 @@ class OAICompatSpeech2TextModel(_CommonOaiApiCompat, Speech2TextModel): self._invoke(model, credentials, audio_file) except Exception as ex: raise CredentialsValidateFailedError(str(ex)) + + def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + """ + used to define customizable model schema + """ + entity = AIModelEntity( + model=model, + label=I18nObject(en_US=model), + fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, + model_type=ModelType.SPEECH2TEXT, + model_properties={}, + parameter_rules=[], + ) + + return entity diff --git a/api/core/model_runtime/model_providers/openrouter/llm/_position.yaml b/api/core/model_runtime/model_providers/openrouter/llm/_position.yaml index d9497b76b8..5a25c84c34 100644 --- a/api/core/model_runtime/model_providers/openrouter/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/openrouter/llm/_position.yaml @@ -14,6 +14,10 @@ - google/gemini-pro - cohere/command-r-plus - cohere/command-r +- meta-llama/llama-3.2-1b-instruct +- meta-llama/llama-3.2-3b-instruct +- meta-llama/llama-3.2-11b-vision-instruct +- meta-llama/llama-3.2-90b-vision-instruct - meta-llama/llama-3.1-405b-instruct - meta-llama/llama-3.1-70b-instruct - meta-llama/llama-3.1-8b-instruct @@ -22,6 +26,7 @@ - mistralai/mixtral-8x22b-instruct - mistralai/mixtral-8x7b-instruct - mistralai/mistral-7b-instruct +- qwen/qwen-2.5-72b-instruct - qwen/qwen-2-72b-instruct - deepseek/deepseek-chat - deepseek/deepseek-coder diff --git a/api/core/model_runtime/model_providers/openrouter/llm/claude-3-5-sonnet.yaml b/api/core/model_runtime/model_providers/openrouter/llm/claude-3-5-sonnet.yaml index 40558854e2..e829048e55 100644 --- a/api/core/model_runtime/model_providers/openrouter/llm/claude-3-5-sonnet.yaml +++ b/api/core/model_runtime/model_providers/openrouter/llm/claude-3-5-sonnet.yaml @@ -27,9 +27,9 @@ parameter_rules: - name: max_tokens use_template: max_tokens required: true - default: 4096 + default: 8192 min: 1 - max: 4096 + max: 8192 - name: response_format use_template: response_format pricing: diff --git a/api/core/model_runtime/model_providers/openrouter/llm/llama-3.2-11b-vision-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/llama-3.2-11b-vision-instruct.yaml new file mode 100644 index 0000000000..235156997f --- /dev/null +++ b/api/core/model_runtime/model_providers/openrouter/llm/llama-3.2-11b-vision-instruct.yaml @@ -0,0 +1,45 @@ +model: meta-llama/llama-3.2-11b-vision-instruct +label: + zh_Hans: llama-3.2-11b-vision-instruct + en_US: llama-3.2-11b-vision-instruct +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 131072 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: top_k + label: + zh_Hans: 取样数量 + en_US: Top k + type: int + help: + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + - name: max_tokens + use_template: max_tokens + - name: context_length_exceeded_behavior + default: None + label: + zh_Hans: 上下文长度超出行为 + en_US: Context Length Exceeded Behavior + help: + zh_Hans: 上下文长度超出行为 + en_US: Context Length Exceeded Behavior + type: string + options: + - None + - truncate + - error + - name: response_format + use_template: response_format +pricing: + input: '0.055' + output: '0.055' + unit: '0.000001' + currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/llama-3.2-1b-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/llama-3.2-1b-instruct.yaml new file mode 100644 index 0000000000..657ef16835 --- /dev/null +++ b/api/core/model_runtime/model_providers/openrouter/llm/llama-3.2-1b-instruct.yaml @@ -0,0 +1,45 @@ +model: meta-llama/llama-3.2-1b-instruct +label: + zh_Hans: llama-3.2-1b-instruct + en_US: llama-3.2-1b-instruct +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 131072 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: top_k + label: + zh_Hans: 取样数量 + en_US: Top k + type: int + help: + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + - name: max_tokens + use_template: max_tokens + - name: context_length_exceeded_behavior + default: None + label: + zh_Hans: 上下文长度超出行为 + en_US: Context Length Exceeded Behavior + help: + zh_Hans: 上下文长度超出行为 + en_US: Context Length Exceeded Behavior + type: string + options: + - None + - truncate + - error + - name: response_format + use_template: response_format +pricing: + input: '0.01' + output: '0.02' + unit: '0.000001' + currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/llama-3.2-3b-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/llama-3.2-3b-instruct.yaml new file mode 100644 index 0000000000..7f6e24e591 --- /dev/null +++ b/api/core/model_runtime/model_providers/openrouter/llm/llama-3.2-3b-instruct.yaml @@ -0,0 +1,45 @@ +model: meta-llama/llama-3.2-3b-instruct +label: + zh_Hans: llama-3.2-3b-instruct + en_US: llama-3.2-3b-instruct +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 131072 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: top_k + label: + zh_Hans: 取样数量 + en_US: Top k + type: int + help: + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + - name: max_tokens + use_template: max_tokens + - name: context_length_exceeded_behavior + default: None + label: + zh_Hans: 上下文长度超出行为 + en_US: Context Length Exceeded Behavior + help: + zh_Hans: 上下文长度超出行为 + en_US: Context Length Exceeded Behavior + type: string + options: + - None + - truncate + - error + - name: response_format + use_template: response_format +pricing: + input: '0.03' + output: '0.05' + unit: '0.000001' + currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/llama-3.2-90b-vision-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/llama-3.2-90b-vision-instruct.yaml new file mode 100644 index 0000000000..5d597f00a2 --- /dev/null +++ b/api/core/model_runtime/model_providers/openrouter/llm/llama-3.2-90b-vision-instruct.yaml @@ -0,0 +1,45 @@ +model: meta-llama/llama-3.2-90b-vision-instruct +label: + zh_Hans: llama-3.2-90b-vision-instruct + en_US: llama-3.2-90b-vision-instruct +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 131072 +parameter_rules: + - name: temperature + use_template: temperature + - name: top_p + use_template: top_p + - name: top_k + label: + zh_Hans: 取样数量 + en_US: Top k + type: int + help: + zh_Hans: 仅从每个后续标记的前 K 个选项中采样。 + en_US: Only sample from the top K options for each subsequent token. + - name: max_tokens + use_template: max_tokens + - name: context_length_exceeded_behavior + default: None + label: + zh_Hans: 上下文长度超出行为 + en_US: Context Length Exceeded Behavior + help: + zh_Hans: 上下文长度超出行为 + en_US: Context Length Exceeded Behavior + type: string + options: + - None + - truncate + - error + - name: response_format + use_template: response_format +pricing: + input: '0.35' + output: '0.4' + unit: '0.000001' + currency: USD diff --git a/api/core/model_runtime/model_providers/openrouter/llm/qwen2.5-72b-instruct.yaml b/api/core/model_runtime/model_providers/openrouter/llm/qwen2.5-72b-instruct.yaml new file mode 100644 index 0000000000..f141a40a00 --- /dev/null +++ b/api/core/model_runtime/model_providers/openrouter/llm/qwen2.5-72b-instruct.yaml @@ -0,0 +1,30 @@ +model: qwen/qwen-2.5-72b-instruct +label: + en_US: qwen-2.5-72b-instruct +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 131072 +parameter_rules: + - name: temperature + use_template: temperature + - name: max_tokens + use_template: max_tokens + type: int + default: 512 + min: 1 + max: 8192 + help: + zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 + en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. + - name: top_p + use_template: top_p + - name: frequency_penalty + use_template: frequency_penalty +pricing: + input: "0.35" + output: "0.4" + unit: "0.000001" + currency: USD diff --git a/api/core/model_runtime/model_providers/perfxcloud/llm/_position.yaml b/api/core/model_runtime/model_providers/perfxcloud/llm/_position.yaml index 37bf400f1e..c6930e54f5 100644 --- a/api/core/model_runtime/model_providers/perfxcloud/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/perfxcloud/llm/_position.yaml @@ -1,24 +1,23 @@ - Qwen2.5-72B-Instruct - Qwen2.5-7B-Instruct +- Qwen2-72B-Instruct +- Qwen2-72B-Instruct-AWQ-int4 +- Qwen2-72B-Instruct-GPTQ-Int4 +- Qwen2-7B-Instruct +- Qwen2-7B +- Qwen1.5-110B-Chat-GPTQ-Int4 +- Qwen1.5-72B-Chat-GPTQ-Int4 +- Qwen1.5-7B +- Qwen-14B-Chat-Int4 - Yi-Coder-1.5B-Chat - Yi-Coder-9B-Chat -- Qwen2-72B-Instruct-AWQ-int4 - Yi-1_5-9B-Chat-16K -- Qwen2-7B-Instruct - Reflection-Llama-3.1-70B -- Qwen2-72B-Instruct - Meta-Llama-3.1-8B-Instruct - - Meta-Llama-3.1-405B-Instruct-AWQ-INT4 - Meta-Llama-3-70B-Instruct-GPTQ-Int4 -- chatglm3-6b - Meta-Llama-3-8B-Instruct - Llama3-Chinese_v2 - deepseek-v2-lite-chat -- Qwen2-72B-Instruct-GPTQ-Int4 -- Qwen2-7B -- Qwen-14B-Chat-Int4 -- Qwen1.5-72B-Chat-GPTQ-Int4 -- Qwen1.5-7B -- Qwen1.5-110B-Chat-GPTQ-Int4 - deepseek-v2-chat +- chatglm3-6b diff --git a/api/core/model_runtime/model_providers/perfxcloud/text_embedding/_position.yaml b/api/core/model_runtime/model_providers/perfxcloud/text_embedding/_position.yaml new file mode 100644 index 0000000000..99163d4293 --- /dev/null +++ b/api/core/model_runtime/model_providers/perfxcloud/text_embedding/_position.yaml @@ -0,0 +1,4 @@ +- gte-Qwen2-7B-instruct +- BAAI/bge-large-en-v1.5 +- BAAI/bge-large-zh-v1.5 +- BAAI/bge-m3 diff --git a/api/core/model_runtime/model_providers/perfxcloud/text_embedding/gte-Qwen2-7B-instruct.yaml b/api/core/model_runtime/model_providers/perfxcloud/text_embedding/gte-Qwen2-7B-instruct.yaml index 03db0d8bce..161d5ea9a2 100644 --- a/api/core/model_runtime/model_providers/perfxcloud/text_embedding/gte-Qwen2-7B-instruct.yaml +++ b/api/core/model_runtime/model_providers/perfxcloud/text_embedding/gte-Qwen2-7B-instruct.yaml @@ -2,3 +2,4 @@ model: gte-Qwen2-7B-instruct model_type: text-embedding model_properties: context_size: 2048 +deprecated: true diff --git a/api/core/model_runtime/model_providers/sagemaker/tts/tts.py b/api/core/model_runtime/model_providers/sagemaker/tts/tts.py index a22bd6dd6e..1a5afd18f9 100644 --- a/api/core/model_runtime/model_providers/sagemaker/tts/tts.py +++ b/api/core/model_runtime/model_providers/sagemaker/tts/tts.py @@ -77,7 +77,7 @@ class SageMakerText2SpeechModel(TTSModel): """ pass - def _detect_lang_code(self, content: str, map_dict: dict = None): + def _detect_lang_code(self, content: str, map_dict: Optional[dict] = None): map_dict = {"zh": "<|zh|>", "en": "<|en|>", "ja": "<|jp|>", "zh-TW": "<|yue|>", "ko": "<|ko|>"} response = self.comprehend_client.detect_dominant_language(Text=content) @@ -192,7 +192,7 @@ class SageMakerText2SpeechModel(TTSModel): InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError], } - def _get_model_default_voice(self, model: str, credentials: dict) -> any: + def _get_model_default_voice(self, model: str, credentials: dict) -> Any: return "" def _get_model_word_limit(self, model: str, credentials: dict) -> int: @@ -225,7 +225,7 @@ class SageMakerText2SpeechModel(TTSModel): json_obj = json.loads(json_str) return json_obj - def _tts_invoke_streaming(self, model_type: str, payload: dict, sagemaker_endpoint: str) -> any: + def _tts_invoke_streaming(self, model_type: str, payload: dict, sagemaker_endpoint: str) -> Any: """ _tts_invoke_streaming text2speech model diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/_position.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/_position.yaml index a3e5d0981f..8d1df82140 100644 --- a/api/core/model_runtime/model_providers/siliconflow/llm/_position.yaml +++ b/api/core/model_runtime/model_providers/siliconflow/llm/_position.yaml @@ -1,18 +1,17 @@ - Qwen/Qwen2.5-72B-Instruct -- Qwen/Qwen2.5-Math-72B-Instruct - Qwen/Qwen2.5-32B-Instruct - Qwen/Qwen2.5-14B-Instruct - Qwen/Qwen2.5-7B-Instruct - Qwen/Qwen2.5-Coder-7B-Instruct -- deepseek-ai/DeepSeek-V2.5 +- Qwen/Qwen2.5-Math-72B-Instruct - Qwen/Qwen2-72B-Instruct - Qwen/Qwen2-57B-A14B-Instruct - Qwen/Qwen2-7B-Instruct - Qwen/Qwen2-1.5B-Instruct +- deepseek-ai/DeepSeek-V2.5 - deepseek-ai/DeepSeek-V2-Chat - deepseek-ai/DeepSeek-Coder-V2-Instruct - THUDM/glm-4-9b-chat -- THUDM/chatglm3-6b - 01-ai/Yi-1.5-34B-Chat-16K - 01-ai/Yi-1.5-9B-Chat-16K - 01-ai/Yi-1.5-6B-Chat @@ -26,13 +25,4 @@ - google/gemma-2-27b-it - google/gemma-2-9b-it - mistralai/Mistral-7B-Instruct-v0.2 -- Pro/Qwen/Qwen2-7B-Instruct -- Pro/Qwen/Qwen2-1.5B-Instruct -- Pro/THUDM/glm-4-9b-chat -- Pro/THUDM/chatglm3-6b -- Pro/01-ai/Yi-1.5-9B-Chat-16K -- Pro/01-ai/Yi-1.5-6B-Chat -- Pro/internlm/internlm2_5-7b-chat -- Pro/meta-llama/Meta-Llama-3.1-8B-Instruct -- Pro/meta-llama/Meta-Llama-3-8B-Instruct -- Pro/google/gemma-2-9b-it +- mistralai/Mixtral-8x7B-Instruct-v0.1 diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/internlm2_5-20b-chat.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/internlm2_5-20b-chat.yaml new file mode 100644 index 0000000000..d9663582e5 --- /dev/null +++ b/api/core/model_runtime/model_providers/siliconflow/llm/internlm2_5-20b-chat.yaml @@ -0,0 +1,30 @@ +model: internlm/internlm2_5-20b-chat +label: + en_US: internlm/internlm2_5-20b-chat +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 32768 +parameter_rules: + - name: temperature + use_template: temperature + - name: max_tokens + use_template: max_tokens + type: int + default: 512 + min: 1 + max: 4096 + help: + zh_Hans: 指定生成结果长度的上限。如果生成结果截断,可以调大该参数。 + en_US: Specifies the upper limit on the length of generated results. If the generated results are truncated, you can increase this parameter. + - name: top_p + use_template: top_p + - name: frequency_penalty + use_template: frequency_penalty +pricing: + input: '1' + output: '1' + unit: '0.000001' + currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/llm.py b/api/core/model_runtime/model_providers/siliconflow/llm/llm.py index c1868b6ad0..4f8f4e0f61 100644 --- a/api/core/model_runtime/model_providers/siliconflow/llm/llm.py +++ b/api/core/model_runtime/model_providers/siliconflow/llm/llm.py @@ -1,8 +1,18 @@ from collections.abc import Generator from typing import Optional, Union -from core.model_runtime.entities.llm_entities import LLMResult +from core.model_runtime.entities.common_entities import I18nObject +from core.model_runtime.entities.llm_entities import LLMMode, LLMResult from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool +from core.model_runtime.entities.model_entities import ( + AIModelEntity, + FetchFrom, + ModelFeature, + ModelPropertyKey, + ModelType, + ParameterRule, + ParameterType, +) from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel @@ -29,3 +39,53 @@ class SiliconflowLargeLanguageModel(OAIAPICompatLargeLanguageModel): def _add_custom_parameters(cls, credentials: dict) -> None: credentials["mode"] = "chat" credentials["endpoint_url"] = "https://api.siliconflow.cn/v1" + + def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: + return AIModelEntity( + model=model, + label=I18nObject(en_US=model, zh_Hans=model), + model_type=ModelType.LLM, + features=[ModelFeature.TOOL_CALL, ModelFeature.MULTI_TOOL_CALL, ModelFeature.STREAM_TOOL_CALL] + if credentials.get("function_calling_type") == "tool_call" + else [], + fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, + model_properties={ + ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", 8000)), + ModelPropertyKey.MODE: LLMMode.CHAT.value, + }, + parameter_rules=[ + ParameterRule( + name="temperature", + use_template="temperature", + label=I18nObject(en_US="Temperature", zh_Hans="温度"), + type=ParameterType.FLOAT, + ), + ParameterRule( + name="max_tokens", + use_template="max_tokens", + default=512, + min=1, + max=int(credentials.get("max_tokens", 1024)), + label=I18nObject(en_US="Max Tokens", zh_Hans="最大标记"), + type=ParameterType.INT, + ), + ParameterRule( + name="top_p", + use_template="top_p", + label=I18nObject(en_US="Top P", zh_Hans="Top P"), + type=ParameterType.FLOAT, + ), + ParameterRule( + name="top_k", + use_template="top_k", + label=I18nObject(en_US="Top K", zh_Hans="Top K"), + type=ParameterType.FLOAT, + ), + ParameterRule( + name="frequency_penalty", + use_template="frequency_penalty", + label=I18nObject(en_US="Frequency Penalty", zh_Hans="重复惩罚"), + type=ParameterType.FLOAT, + ), + ], + ) diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-coder-7b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-coder-7b-instruct.yaml new file mode 100644 index 0000000000..76526200cc --- /dev/null +++ b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-coder-7b-instruct.yaml @@ -0,0 +1,74 @@ +model: Qwen/Qwen2.5-Coder-7B-Instruct +label: + en_US: Qwen/Qwen2.5-Coder-7B-Instruct +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 131072 +parameter_rules: + - name: temperature + use_template: temperature + type: float + default: 0.3 + min: 0.0 + max: 2.0 + help: + zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 + en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. + - name: max_tokens + use_template: max_tokens + type: int + default: 8192 + min: 1 + max: 8192 + help: + zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 + en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. + - name: top_p + use_template: top_p + type: float + default: 0.8 + min: 0.1 + max: 0.9 + help: + zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 + en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. + - name: top_k + type: int + min: 0 + max: 99 + label: + zh_Hans: 取样数量 + en_US: Top k + help: + zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 + en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. + - name: seed + required: false + type: int + default: 1234 + label: + zh_Hans: 随机种子 + en_US: Random seed + help: + zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 + en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. + - name: repetition_penalty + required: false + type: float + default: 1.1 + label: + zh_Hans: 重复惩罚 + en_US: Repetition penalty + help: + zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 + en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. + - name: response_format + use_template: response_format +pricing: + input: '0' + output: '0' + unit: '0.000001' + currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-math-72b-instruct.yaml b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-math-72b-instruct.yaml new file mode 100644 index 0000000000..90afa0cfd5 --- /dev/null +++ b/api/core/model_runtime/model_providers/siliconflow/llm/qwen2.5-math-72b-instruct.yaml @@ -0,0 +1,74 @@ +model: Qwen/Qwen2.5-Math-72B-Instruct +label: + en_US: Qwen/Qwen2.5-Math-72B-Instruct +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 4096 +parameter_rules: + - name: temperature + use_template: temperature + type: float + default: 0.3 + min: 0.0 + max: 2.0 + help: + zh_Hans: 用于控制随机性和多样性的程度。具体来说,temperature值控制了生成文本时对每个候选词的概率分布进行平滑的程度。较高的temperature值会降低概率分布的峰值,使得更多的低概率词被选择,生成结果更加多样化;而较低的temperature值则会增强概率分布的峰值,使得高概率词更容易被选择,生成结果更加确定。 + en_US: Used to control the degree of randomness and diversity. Specifically, the temperature value controls the degree to which the probability distribution of each candidate word is smoothed when generating text. A higher temperature value will reduce the peak value of the probability distribution, allowing more low-probability words to be selected, and the generated results will be more diverse; while a lower temperature value will enhance the peak value of the probability distribution, making it easier for high-probability words to be selected. , the generated results are more certain. + - name: max_tokens + use_template: max_tokens + type: int + default: 2000 + min: 1 + max: 2000 + help: + zh_Hans: 用于指定模型在生成内容时token的最大数量,它定义了生成的上限,但不保证每次都会生成到这个数量。 + en_US: It is used to specify the maximum number of tokens when the model generates content. It defines the upper limit of generation, but does not guarantee that this number will be generated every time. + - name: top_p + use_template: top_p + type: float + default: 0.8 + min: 0.1 + max: 0.9 + help: + zh_Hans: 生成过程中核采样方法概率阈值,例如,取值为0.8时,仅保留概率加起来大于等于0.8的最可能token的最小集合作为候选集。取值范围为(0,1.0),取值越大,生成的随机性越高;取值越低,生成的确定性越高。 + en_US: The probability threshold of the kernel sampling method during the generation process. For example, when the value is 0.8, only the smallest set of the most likely tokens with a sum of probabilities greater than or equal to 0.8 is retained as the candidate set. The value range is (0,1.0). The larger the value, the higher the randomness generated; the lower the value, the higher the certainty generated. + - name: top_k + type: int + min: 0 + max: 99 + label: + zh_Hans: 取样数量 + en_US: Top k + help: + zh_Hans: 生成时,采样候选集的大小。例如,取值为50时,仅将单次生成中得分最高的50个token组成随机采样的候选集。取值越大,生成的随机性越高;取值越小,生成的确定性越高。 + en_US: The size of the sample candidate set when generated. For example, when the value is 50, only the 50 highest-scoring tokens in a single generation form a randomly sampled candidate set. The larger the value, the higher the randomness generated; the smaller the value, the higher the certainty generated. + - name: seed + required: false + type: int + default: 1234 + label: + zh_Hans: 随机种子 + en_US: Random seed + help: + zh_Hans: 生成时使用的随机数种子,用户控制模型生成内容的随机性。支持无符号64位整数,默认值为 1234。在使用seed时,模型将尽可能生成相同或相似的结果,但目前不保证每次生成的结果完全相同。 + en_US: The random number seed used when generating, the user controls the randomness of the content generated by the model. Supports unsigned 64-bit integers, default value is 1234. When using seed, the model will try its best to generate the same or similar results, but there is currently no guarantee that the results will be exactly the same every time. + - name: repetition_penalty + required: false + type: float + default: 1.1 + label: + zh_Hans: 重复惩罚 + en_US: Repetition penalty + help: + zh_Hans: 用于控制模型生成时的重复度。提高repetition_penalty时可以降低模型生成的重复度。1.0表示不做惩罚。 + en_US: Used to control the repeatability when generating models. Increasing repetition_penalty can reduce the duplication of model generation. 1.0 means no punishment. + - name: response_format + use_template: response_format +pricing: + input: '4.13' + output: '4.13' + unit: '0.000001' + currency: RMB diff --git a/api/core/model_runtime/model_providers/siliconflow/siliconflow.yaml b/api/core/model_runtime/model_providers/siliconflow/siliconflow.yaml index c46a891604..71f9a92381 100644 --- a/api/core/model_runtime/model_providers/siliconflow/siliconflow.yaml +++ b/api/core/model_runtime/model_providers/siliconflow/siliconflow.yaml @@ -20,6 +20,7 @@ supported_model_types: - speech2text configurate_methods: - predefined-model + - customizable-model provider_credential_schema: credential_form_schemas: - variable: api_key @@ -30,3 +31,57 @@ provider_credential_schema: placeholder: zh_Hans: 在此输入您的 API Key en_US: Enter your API Key +model_credential_schema: + model: + label: + en_US: Model Name + zh_Hans: 模型名称 + placeholder: + en_US: Enter your model name + zh_Hans: 输入模型名称 + credential_form_schemas: + - variable: api_key + label: + en_US: API Key + type: secret-input + required: true + placeholder: + zh_Hans: 在此输入您的 API Key + en_US: Enter your API Key + - variable: context_size + label: + zh_Hans: 模型上下文长度 + en_US: Model context size + required: true + type: text-input + default: '4096' + placeholder: + zh_Hans: 在此输入您的模型上下文长度 + en_US: Enter your Model context size + - variable: max_tokens + label: + zh_Hans: 最大 token 上限 + en_US: Upper bound for max tokens + default: '4096' + type: text-input + show_on: + - variable: __model_type + value: llm + - variable: function_calling_type + label: + en_US: Function calling + type: select + required: false + default: no_call + options: + - value: no_call + label: + en_US: Not Support + zh_Hans: 不支持 + - value: function_call + label: + en_US: Support + zh_Hans: 支持 + show_on: + - variable: __model_type + value: llm diff --git a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-coder-7b-instruct.yaml b/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-coder-7b-instruct.yaml index fdcd3d4275..7ebeec3953 100644 --- a/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-coder-7b-instruct.yaml +++ b/api/core/model_runtime/model_providers/tongyi/llm/qwen2.5-coder-7b-instruct.yaml @@ -1,7 +1,7 @@ # for more details, please refer to https://help.aliyun.com/zh/model-studio/getting-started/models -model: qwen2.5-7b-instruct +model: qwen2.5-coder-7b-instruct label: - en_US: qwen2.5-7b-instruct + en_US: qwen2.5-coder-7b-instruct model_type: llm features: - agent-thought diff --git a/api/core/model_runtime/model_providers/tongyi/rerank/__init__.py b/api/core/model_runtime/model_providers/tongyi/rerank/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/model_runtime/model_providers/tongyi/rerank/_position.yaml b/api/core/model_runtime/model_providers/tongyi/rerank/_position.yaml new file mode 100644 index 0000000000..439afda992 --- /dev/null +++ b/api/core/model_runtime/model_providers/tongyi/rerank/_position.yaml @@ -0,0 +1 @@ +- gte-rerank diff --git a/api/core/model_runtime/model_providers/tongyi/rerank/gte-rerank.yaml b/api/core/model_runtime/model_providers/tongyi/rerank/gte-rerank.yaml new file mode 100644 index 0000000000..44d51b9b0d --- /dev/null +++ b/api/core/model_runtime/model_providers/tongyi/rerank/gte-rerank.yaml @@ -0,0 +1,4 @@ +model: gte-rerank +model_type: rerank +model_properties: + context_size: 4000 diff --git a/api/core/model_runtime/model_providers/tongyi/rerank/rerank.py b/api/core/model_runtime/model_providers/tongyi/rerank/rerank.py new file mode 100644 index 0000000000..c9245bd82d --- /dev/null +++ b/api/core/model_runtime/model_providers/tongyi/rerank/rerank.py @@ -0,0 +1,136 @@ +from typing import Optional + +import dashscope +from dashscope.common.error import ( + AuthenticationError, + InvalidParameter, + RequestFailure, + ServiceUnavailableError, + UnsupportedHTTPMethod, + UnsupportedModel, +) + +from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult +from core.model_runtime.errors.invoke import ( + InvokeAuthorizationError, + InvokeBadRequestError, + InvokeConnectionError, + InvokeError, + InvokeRateLimitError, + InvokeServerUnavailableError, +) +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.__base.rerank_model import RerankModel + + +class GTERerankModel(RerankModel): + """ + Model class for GTE rerank model. + """ + + def _invoke( + self, + model: str, + credentials: dict, + query: str, + docs: list[str], + score_threshold: Optional[float] = None, + top_n: Optional[int] = None, + user: Optional[str] = None, + ) -> RerankResult: + """ + Invoke rerank model + + :param model: model name + :param credentials: model credentials + :param query: search query + :param docs: docs for reranking + :param score_threshold: score threshold + :param top_n: top n + :param user: unique user id + :return: rerank result + """ + if len(docs) == 0: + return RerankResult(model=model, docs=docs) + + # initialize client + dashscope.api_key = credentials["dashscope_api_key"] + + response = dashscope.TextReRank.call( + query=query, + documents=docs, + model=model, + top_n=top_n, + return_documents=True, + ) + + rerank_documents = [] + for _, result in enumerate(response.output.results): + # format document + rerank_document = RerankDocument( + index=result.index, + score=result.relevance_score, + text=result["document"]["text"], + ) + + # score threshold check + if score_threshold is not None: + if result.relevance_score >= score_threshold: + rerank_documents.append(rerank_document) + else: + rerank_documents.append(rerank_document) + + return RerankResult(model=model, docs=rerank_documents) + + def validate_credentials(self, model: str, credentials: dict) -> None: + """ + Validate model credentials + + :param model: model name + :param credentials: model credentials + :return: + """ + try: + self.invoke( + model=model, + credentials=credentials, + query="What is the capital of the United States?", + docs=[ + "Carson City is the capital city of the American state of Nevada. At the 2010 United States " + "Census, Carson City had a population of 55,274.", + "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " + "are a political division controlled by the United States. Its capital is Saipan.", + ], + score_threshold=0.8, + ) + except Exception as ex: + print(ex) + raise CredentialsValidateFailedError(str(ex)) + + @property + def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: + """ + Map model invoke error to unified error + The key is the error type thrown to the caller + The value is the error type thrown by the model, + which needs to be converted into a unified error type for the caller. + + :return: Invoke error mapping + """ + return { + InvokeConnectionError: [ + RequestFailure, + ], + InvokeServerUnavailableError: [ + ServiceUnavailableError, + ], + InvokeRateLimitError: [], + InvokeAuthorizationError: [ + AuthenticationError, + ], + InvokeBadRequestError: [ + InvalidParameter, + UnsupportedModel, + UnsupportedHTTPMethod, + ], + } diff --git a/api/core/model_runtime/model_providers/tongyi/tongyi.yaml b/api/core/model_runtime/model_providers/tongyi/tongyi.yaml index 1a09c20fd9..6349c22714 100644 --- a/api/core/model_runtime/model_providers/tongyi/tongyi.yaml +++ b/api/core/model_runtime/model_providers/tongyi/tongyi.yaml @@ -18,6 +18,7 @@ supported_model_types: - llm - tts - text-embedding + - rerank configurate_methods: - predefined-model - customizable-model diff --git a/api/core/model_runtime/model_providers/tongyi/tts/tts.py b/api/core/model_runtime/model_providers/tongyi/tts/tts.py index 48a38897a8..ca3b9fbc1c 100644 --- a/api/core/model_runtime/model_providers/tongyi/tts/tts.py +++ b/api/core/model_runtime/model_providers/tongyi/tts/tts.py @@ -1,6 +1,6 @@ import threading from queue import Queue -from typing import Optional +from typing import Any, Optional import dashscope from dashscope import SpeechSynthesizer @@ -20,7 +20,7 @@ class TongyiText2SpeechModel(_CommonTongyi, TTSModel): def _invoke( self, model: str, tenant_id: str, credentials: dict, content_text: str, voice: str, user: Optional[str] = None - ) -> any: + ) -> Any: """ _invoke text2speech model @@ -58,7 +58,7 @@ class TongyiText2SpeechModel(_CommonTongyi, TTSModel): except Exception as ex: raise CredentialsValidateFailedError(str(ex)) - def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> any: + def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> Any: """ _tts_invoke_streaming text2speech model diff --git a/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py b/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py index 1dd785d545..1469de6055 100644 --- a/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py +++ b/api/core/model_runtime/model_providers/vertex_ai/llm/llm.py @@ -7,6 +7,7 @@ from collections.abc import Generator from typing import Optional, Union, cast import google.auth.transport.requests +import requests import vertexai.generative_models as glm from anthropic import AnthropicVertex, Stream from anthropic.types import ( @@ -653,9 +654,15 @@ class VertexAiLargeLanguageModel(LargeLanguageModel): if c.type == PromptMessageContentType.TEXT: parts.append(glm.Part.from_text(c.data)) else: - metadata, data = c.data.split(",", 1) - mime_type = metadata.split(";", 1)[0].split(":")[1] - parts.append(glm.Part.from_data(mime_type=mime_type, data=data)) + message_content = cast(ImagePromptMessageContent, c) + if not message_content.data.startswith("data:"): + url_arr = message_content.data.split(".") + mime_type = f"image/{url_arr[-1]}" + parts.append(glm.Part.from_uri(mime_type=mime_type, uri=message_content.data)) + else: + metadata, data = c.data.split(",", 1) + mime_type = metadata.split(";", 1)[0].split(":")[1] + parts.append(glm.Part.from_data(mime_type=mime_type, data=data)) glm_content = glm.Content(role="user", parts=parts) return glm_content elif isinstance(message, AssistantPromptMessage): diff --git a/api/core/model_runtime/model_providers/voyage/__init__.py b/api/core/model_runtime/model_providers/voyage/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/model_runtime/model_providers/voyage/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/voyage/_assets/icon_l_en.svg new file mode 100644 index 0000000000..a961f5e435 --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/_assets/icon_l_en.svg @@ -0,0 +1,21 @@ + \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/voyage/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/voyage/_assets/icon_s_en.svg new file mode 100644 index 0000000000..2c4e121dd7 --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/_assets/icon_s_en.svg @@ -0,0 +1,8 @@ + + + voyage + + + + + \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/voyage/rerank/__init__.py b/api/core/model_runtime/model_providers/voyage/rerank/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/model_runtime/model_providers/voyage/rerank/_position.yaml b/api/core/model_runtime/model_providers/voyage/rerank/_position.yaml new file mode 100644 index 0000000000..32afefbe04 --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/rerank/_position.yaml @@ -0,0 +1,4 @@ +- rerank-2 +- rerank-lite-2 +- rerank-1 +- rerank-lite-1 diff --git a/api/core/model_runtime/model_providers/voyage/rerank/rerank-1.yaml b/api/core/model_runtime/model_providers/voyage/rerank/rerank-1.yaml new file mode 100644 index 0000000000..9c894eda85 --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/rerank/rerank-1.yaml @@ -0,0 +1,4 @@ +model: rerank-1 +model_type: rerank +model_properties: + context_size: 8000 diff --git a/api/core/model_runtime/model_providers/voyage/rerank/rerank-2.yaml b/api/core/model_runtime/model_providers/voyage/rerank/rerank-2.yaml new file mode 100644 index 0000000000..b760d3c418 --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/rerank/rerank-2.yaml @@ -0,0 +1,4 @@ +model: rerank-2 +model_type: rerank +model_properties: + context_size: 16000 diff --git a/api/core/model_runtime/model_providers/voyage/rerank/rerank-lite-1.yaml b/api/core/model_runtime/model_providers/voyage/rerank/rerank-lite-1.yaml new file mode 100644 index 0000000000..b052d6f000 --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/rerank/rerank-lite-1.yaml @@ -0,0 +1,4 @@ +model: rerank-lite-1 +model_type: rerank +model_properties: + context_size: 4000 diff --git a/api/core/model_runtime/model_providers/voyage/rerank/rerank-lite-2.yaml b/api/core/model_runtime/model_providers/voyage/rerank/rerank-lite-2.yaml new file mode 100644 index 0000000000..b6fa37a25b --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/rerank/rerank-lite-2.yaml @@ -0,0 +1,4 @@ +model: rerank-lite-2 +model_type: rerank +model_properties: + context_size: 8000 diff --git a/api/core/model_runtime/model_providers/voyage/rerank/rerank.py b/api/core/model_runtime/model_providers/voyage/rerank/rerank.py new file mode 100644 index 0000000000..33fdebbb45 --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/rerank/rerank.py @@ -0,0 +1,123 @@ +from typing import Optional + +import httpx + +from core.model_runtime.entities.common_entities import I18nObject +from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType +from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult +from core.model_runtime.errors.invoke import ( + InvokeAuthorizationError, + InvokeBadRequestError, + InvokeConnectionError, + InvokeError, + InvokeRateLimitError, + InvokeServerUnavailableError, +) +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.__base.rerank_model import RerankModel + + +class VoyageRerankModel(RerankModel): + """ + Model class for Voyage rerank model. + """ + + def _invoke( + self, + model: str, + credentials: dict, + query: str, + docs: list[str], + score_threshold: Optional[float] = None, + top_n: Optional[int] = None, + user: Optional[str] = None, + ) -> RerankResult: + """ + Invoke rerank model + :param model: model name + :param credentials: model credentials + :param query: search query + :param docs: docs for reranking + :param score_threshold: score threshold + :param top_n: top n documents to return + :param user: unique user id + :return: rerank result + """ + if len(docs) == 0: + return RerankResult(model=model, docs=[]) + + base_url = credentials.get("base_url", "https://api.voyageai.com/v1") + base_url = base_url.removesuffix("/") + + try: + response = httpx.post( + base_url + "/rerank", + json={"model": model, "query": query, "documents": docs, "top_k": top_n, "return_documents": True}, + headers={"Authorization": f"Bearer {credentials.get('api_key')}", "Content-Type": "application/json"}, + ) + response.raise_for_status() + results = response.json() + + rerank_documents = [] + for result in results["data"]: + rerank_document = RerankDocument( + index=result["index"], + text=result["document"], + score=result["relevance_score"], + ) + if score_threshold is None or result["relevance_score"] >= score_threshold: + rerank_documents.append(rerank_document) + + return RerankResult(model=model, docs=rerank_documents) + except httpx.HTTPStatusError as e: + raise InvokeServerUnavailableError(str(e)) + + def validate_credentials(self, model: str, credentials: dict) -> None: + """ + Validate model credentials + :param model: model name + :param credentials: model credentials + :return: + """ + try: + self._invoke( + model=model, + credentials=credentials, + query="What is the capital of the United States?", + docs=[ + "Carson City is the capital city of the American state of Nevada. At the 2010 United States " + "Census, Carson City had a population of 55,274.", + "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean that " + "are a political division controlled by the United States. Its capital is Saipan.", + ], + score_threshold=0.8, + ) + except Exception as ex: + raise CredentialsValidateFailedError(str(ex)) + + @property + def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: + """ + Map model invoke error to unified error + """ + return { + InvokeConnectionError: [httpx.ConnectError], + InvokeServerUnavailableError: [httpx.RemoteProtocolError], + InvokeRateLimitError: [], + InvokeAuthorizationError: [httpx.HTTPStatusError], + InvokeBadRequestError: [httpx.RequestError], + } + + def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: + """ + generate custom model entities from credentials + """ + entity = AIModelEntity( + model=model, + label=I18nObject(en_US=model), + model_type=ModelType.RERANK, + fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, + model_properties={ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size", "8000"))}, + ) + + return entity diff --git a/api/core/model_runtime/model_providers/voyage/text_embedding/__init__.py b/api/core/model_runtime/model_providers/voyage/text_embedding/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/model_runtime/model_providers/voyage/text_embedding/_position.yaml b/api/core/model_runtime/model_providers/voyage/text_embedding/_position.yaml new file mode 100644 index 0000000000..595663990f --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/text_embedding/_position.yaml @@ -0,0 +1,6 @@ +- voyage-3 +- voyage-3-lite +- voyage-finance-2 +- voyage-multilingual-2 +- voyage-law-2 +- voyage-code-2 diff --git a/api/core/model_runtime/model_providers/voyage/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/voyage/text_embedding/text_embedding.py new file mode 100644 index 0000000000..a8a4d3c15b --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/text_embedding/text_embedding.py @@ -0,0 +1,172 @@ +import time +from json import JSONDecodeError, dumps +from typing import Optional + +import requests + +from core.embedding.embedding_constant import EmbeddingInputType +from core.model_runtime.entities.common_entities import I18nObject +from core.model_runtime.entities.model_entities import AIModelEntity, FetchFrom, ModelPropertyKey, ModelType, PriceType +from core.model_runtime.entities.text_embedding_entities import EmbeddingUsage, TextEmbeddingResult +from core.model_runtime.errors.invoke import ( + InvokeAuthorizationError, + InvokeBadRequestError, + InvokeConnectionError, + InvokeError, + InvokeRateLimitError, + InvokeServerUnavailableError, +) +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel + + +class VoyageTextEmbeddingModel(TextEmbeddingModel): + """ + Model class for Voyage text embedding model. + """ + + api_base: str = "https://api.voyageai.com/v1" + + def _invoke( + self, + model: str, + credentials: dict, + texts: list[str], + user: Optional[str] = None, + input_type: EmbeddingInputType = EmbeddingInputType.DOCUMENT, + ) -> TextEmbeddingResult: + """ + Invoke text embedding model + + :param model: model name + :param credentials: model credentials + :param texts: texts to embed + :param user: unique user id + :param input_type: input type + :return: embeddings result + """ + api_key = credentials["api_key"] + if not api_key: + raise CredentialsValidateFailedError("api_key is required") + + base_url = credentials.get("base_url", self.api_base) + base_url = base_url.removesuffix("/") + + url = base_url + "/embeddings" + headers = {"Authorization": "Bearer " + api_key, "Content-Type": "application/json"} + voyage_input_type = "null" + if input_type is not None: + voyage_input_type = input_type.value + data = {"model": model, "input": texts, "input_type": voyage_input_type} + + try: + response = requests.post(url, headers=headers, data=dumps(data)) + except Exception as e: + raise InvokeConnectionError(str(e)) + + if response.status_code != 200: + try: + resp = response.json() + msg = resp["detail"] + if response.status_code == 401: + raise InvokeAuthorizationError(msg) + elif response.status_code == 429: + raise InvokeRateLimitError(msg) + elif response.status_code == 500: + raise InvokeServerUnavailableError(msg) + else: + raise InvokeBadRequestError(msg) + except JSONDecodeError as e: + raise InvokeServerUnavailableError( + f"Failed to convert response to json: {e} with text: {response.text}" + ) + + try: + resp = response.json() + embeddings = resp["data"] + usage = resp["usage"] + except Exception as e: + raise InvokeServerUnavailableError(f"Failed to convert response to json: {e} with text: {response.text}") + + usage = self._calc_response_usage(model=model, credentials=credentials, tokens=usage["total_tokens"]) + + result = TextEmbeddingResult( + model=model, embeddings=[[float(data) for data in x["embedding"]] for x in embeddings], usage=usage + ) + + return result + + def get_num_tokens(self, model: str, credentials: dict, texts: list[str]) -> int: + """ + Get number of tokens for given prompt messages + + :param model: model name + :param credentials: model credentials + :param texts: texts to embed + :return: + """ + return sum(self._get_num_tokens_by_gpt2(text) for text in texts) + + def validate_credentials(self, model: str, credentials: dict) -> None: + """ + Validate model credentials + + :param model: model name + :param credentials: model credentials + :return: + """ + try: + self._invoke(model=model, credentials=credentials, texts=["ping"]) + except Exception as e: + raise CredentialsValidateFailedError(f"Credentials validation failed: {e}") + + @property + def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]: + return { + InvokeConnectionError: [InvokeConnectionError], + InvokeServerUnavailableError: [InvokeServerUnavailableError], + InvokeRateLimitError: [InvokeRateLimitError], + InvokeAuthorizationError: [InvokeAuthorizationError], + InvokeBadRequestError: [KeyError, InvokeBadRequestError], + } + + def _calc_response_usage(self, model: str, credentials: dict, tokens: int) -> EmbeddingUsage: + """ + Calculate response usage + + :param model: model name + :param credentials: model credentials + :param tokens: input tokens + :return: usage + """ + # get input price info + input_price_info = self.get_price( + model=model, credentials=credentials, price_type=PriceType.INPUT, tokens=tokens + ) + + # transform usage + usage = EmbeddingUsage( + tokens=tokens, + total_tokens=tokens, + unit_price=input_price_info.unit_price, + price_unit=input_price_info.unit, + total_price=input_price_info.total_amount, + currency=input_price_info.currency, + latency=time.perf_counter() - self.started_at, + ) + + return usage + + def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity: + """ + generate custom model entities from credentials + """ + entity = AIModelEntity( + model=model, + label=I18nObject(en_US=model), + model_type=ModelType.TEXT_EMBEDDING, + fetch_from=FetchFrom.CUSTOMIZABLE_MODEL, + model_properties={ModelPropertyKey.CONTEXT_SIZE: int(credentials.get("context_size"))}, + ) + + return entity diff --git a/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-3-lite.yaml b/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-3-lite.yaml new file mode 100644 index 0000000000..a06bb7639f --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-3-lite.yaml @@ -0,0 +1,8 @@ +model: voyage-3-lite +model_type: text-embedding +model_properties: + context_size: 32000 +pricing: + input: '0.00002' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-3.yaml b/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-3.yaml new file mode 100644 index 0000000000..117afbcaf3 --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-3.yaml @@ -0,0 +1,8 @@ +model: voyage-3 +model_type: text-embedding +model_properties: + context_size: 32000 +pricing: + input: '0.00006' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-code-2.yaml b/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-code-2.yaml new file mode 100644 index 0000000000..693669c82c --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-code-2.yaml @@ -0,0 +1,8 @@ +model: voyage-code-2 +model_type: text-embedding +model_properties: + context_size: 16000 +pricing: + input: '0.00012' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-finance-2.yaml b/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-finance-2.yaml new file mode 100644 index 0000000000..555e11002a --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-finance-2.yaml @@ -0,0 +1,8 @@ +model: voyage-finance-2 +model_type: text-embedding +model_properties: + context_size: 32000 +pricing: + input: '0.00012' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-law-2.yaml b/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-law-2.yaml new file mode 100644 index 0000000000..032693286f --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-law-2.yaml @@ -0,0 +1,8 @@ +model: voyage-law-2 +model_type: text-embedding +model_properties: + context_size: 16000 +pricing: + input: '0.00012' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-multilingual-2.yaml b/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-multilingual-2.yaml new file mode 100644 index 0000000000..9ecf4d5009 --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/text_embedding/voyage-multilingual-2.yaml @@ -0,0 +1,8 @@ +model: voyage-multilingual-2 +model_type: text-embedding +model_properties: + context_size: 32000 +pricing: + input: '0.00012' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/voyage/voyage.py b/api/core/model_runtime/model_providers/voyage/voyage.py new file mode 100644 index 0000000000..3e33b45e11 --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/voyage.py @@ -0,0 +1,28 @@ +import logging + +from core.model_runtime.entities.model_entities import ModelType +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.__base.model_provider import ModelProvider + +logger = logging.getLogger(__name__) + + +class VoyageProvider(ModelProvider): + def validate_provider_credentials(self, credentials: dict) -> None: + """ + Validate provider credentials + if validate failed, raise exception + + :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. + """ + try: + model_instance = self.get_model_instance(ModelType.TEXT_EMBEDDING) + + # Use `voyage-3` model for validate, + # no matter what model you pass in, text completion model or chat model + model_instance.validate_credentials(model="voyage-3", credentials=credentials) + except CredentialsValidateFailedError as ex: + raise ex + except Exception as ex: + logger.exception(f"{self.get_provider_schema().provider} credentials validate failed") + raise ex diff --git a/api/core/model_runtime/model_providers/voyage/voyage.yaml b/api/core/model_runtime/model_providers/voyage/voyage.yaml new file mode 100644 index 0000000000..c64707800e --- /dev/null +++ b/api/core/model_runtime/model_providers/voyage/voyage.yaml @@ -0,0 +1,31 @@ +provider: voyage +label: + en_US: Voyage +description: + en_US: Embedding and Rerank Model Supported +icon_small: + en_US: icon_s_en.svg +icon_large: + en_US: icon_l_en.svg +background: "#EFFDFD" +help: + title: + en_US: Get your API key from Voyage AI + zh_Hans: 从 Voyage 获取 API Key + url: + en_US: https://dash.voyageai.com/ +supported_model_types: + - text-embedding + - rerank +configurate_methods: + - predefined-model +provider_credential_schema: + credential_form_schemas: + - variable: api_key + label: + en_US: API Key + type: secret-input + required: true + placeholder: + zh_Hans: 在此输入您的 API Key + en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/wenxin/llm/llm.py b/api/core/model_runtime/model_providers/wenxin/llm/llm.py index f7c160b6b4..952cbb33f4 100644 --- a/api/core/model_runtime/model_providers/wenxin/llm/llm.py +++ b/api/core/model_runtime/model_providers/wenxin/llm/llm.py @@ -64,7 +64,7 @@ class ErnieBotLargeLanguageModel(LargeLanguageModel): stop: Optional[list[str]] = None, stream: bool = True, user: Optional[str] = None, - callbacks: list[Callback] = None, + callbacks: Optional[list[Callback]] = None, ) -> Union[LLMResult, Generator]: """ Code block mode wrapper for invoking large language model diff --git a/api/core/model_runtime/model_providers/xinference/llm/llm.py b/api/core/model_runtime/model_providers/xinference/llm/llm.py index 286640079b..0c9d08679a 100644 --- a/api/core/model_runtime/model_providers/xinference/llm/llm.py +++ b/api/core/model_runtime/model_providers/xinference/llm/llm.py @@ -59,6 +59,7 @@ from core.model_runtime.model_providers.__base.large_language_model import Large from core.model_runtime.model_providers.xinference.xinference_helper import ( XinferenceHelper, XinferenceModelExtraParameter, + validate_model_uid, ) from core.model_runtime.utils import helper @@ -114,7 +115,7 @@ class XinferenceAILargeLanguageModel(LargeLanguageModel): } """ try: - if "/" in credentials["model_uid"] or "?" in credentials["model_uid"] or "#" in credentials["model_uid"]: + if not validate_model_uid(credentials): raise CredentialsValidateFailedError("model_uid should not contain /, ?, or #") extra_param = XinferenceHelper.get_xinference_extra_parameter( diff --git a/api/core/model_runtime/model_providers/xinference/rerank/rerank.py b/api/core/model_runtime/model_providers/xinference/rerank/rerank.py index 8f18bc42d2..6368cd76dc 100644 --- a/api/core/model_runtime/model_providers/xinference/rerank/rerank.py +++ b/api/core/model_runtime/model_providers/xinference/rerank/rerank.py @@ -15,6 +15,7 @@ from core.model_runtime.errors.invoke import ( ) from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.__base.rerank_model import RerankModel +from core.model_runtime.model_providers.xinference.xinference_helper import validate_model_uid class XinferenceRerankModel(RerankModel): @@ -77,10 +78,7 @@ class XinferenceRerankModel(RerankModel): ) # score threshold check - if score_threshold is not None: - if result["relevance_score"] >= score_threshold: - rerank_documents.append(rerank_document) - else: + if score_threshold is None or result["relevance_score"] >= score_threshold: rerank_documents.append(rerank_document) return RerankResult(model=model, docs=rerank_documents) @@ -94,7 +92,7 @@ class XinferenceRerankModel(RerankModel): :return: """ try: - if "/" in credentials["model_uid"] or "?" in credentials["model_uid"] or "#" in credentials["model_uid"]: + if not validate_model_uid(credentials): raise CredentialsValidateFailedError("model_uid should not contain /, ?, or #") credentials["server_url"] = credentials["server_url"].removesuffix("/") diff --git a/api/core/model_runtime/model_providers/xinference/speech2text/speech2text.py b/api/core/model_runtime/model_providers/xinference/speech2text/speech2text.py index a6c5b8a0a5..c5ad383911 100644 --- a/api/core/model_runtime/model_providers/xinference/speech2text/speech2text.py +++ b/api/core/model_runtime/model_providers/xinference/speech2text/speech2text.py @@ -14,6 +14,7 @@ from core.model_runtime.errors.invoke import ( ) from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.__base.speech2text_model import Speech2TextModel +from core.model_runtime.model_providers.xinference.xinference_helper import validate_model_uid class XinferenceSpeech2TextModel(Speech2TextModel): @@ -42,7 +43,7 @@ class XinferenceSpeech2TextModel(Speech2TextModel): :return: """ try: - if "/" in credentials["model_uid"] or "?" in credentials["model_uid"] or "#" in credentials["model_uid"]: + if not validate_model_uid(credentials): raise CredentialsValidateFailedError("model_uid should not contain /, ?, or #") credentials["server_url"] = credentials["server_url"].removesuffix("/") diff --git a/api/core/model_runtime/model_providers/xinference/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/xinference/text_embedding/text_embedding.py index 1627239132..ddc21b365c 100644 --- a/api/core/model_runtime/model_providers/xinference/text_embedding/text_embedding.py +++ b/api/core/model_runtime/model_providers/xinference/text_embedding/text_embedding.py @@ -17,7 +17,7 @@ from core.model_runtime.errors.invoke import ( ) from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.__base.text_embedding_model import TextEmbeddingModel -from core.model_runtime.model_providers.xinference.xinference_helper import XinferenceHelper +from core.model_runtime.model_providers.xinference.xinference_helper import XinferenceHelper, validate_model_uid class XinferenceTextEmbeddingModel(TextEmbeddingModel): @@ -110,7 +110,7 @@ class XinferenceTextEmbeddingModel(TextEmbeddingModel): :return: """ try: - if "/" in credentials["model_uid"] or "?" in credentials["model_uid"] or "#" in credentials["model_uid"]: + if not validate_model_uid(credentials): raise CredentialsValidateFailedError("model_uid should not contain /, ?, or #") server_url = credentials["server_url"] diff --git a/api/core/model_runtime/model_providers/xinference/tts/tts.py b/api/core/model_runtime/model_providers/xinference/tts/tts.py index 81dbe397d2..3f46b50c33 100644 --- a/api/core/model_runtime/model_providers/xinference/tts/tts.py +++ b/api/core/model_runtime/model_providers/xinference/tts/tts.py @@ -1,5 +1,5 @@ import concurrent.futures -from typing import Optional +from typing import Any, Optional from xinference_client.client.restful.restful_client import RESTfulAudioModelHandle @@ -15,7 +15,7 @@ from core.model_runtime.errors.invoke import ( ) from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.__base.tts_model import TTSModel -from core.model_runtime.model_providers.xinference.xinference_helper import XinferenceHelper +from core.model_runtime.model_providers.xinference.xinference_helper import XinferenceHelper, validate_model_uid class XinferenceText2SpeechModel(TTSModel): @@ -70,7 +70,7 @@ class XinferenceText2SpeechModel(TTSModel): :return: """ try: - if "/" in credentials["model_uid"] or "?" in credentials["model_uid"] or "#" in credentials["model_uid"]: + if not validate_model_uid(credentials): raise CredentialsValidateFailedError("model_uid should not contain /, ?, or #") credentials["server_url"] = credentials["server_url"].removesuffix("/") @@ -166,7 +166,7 @@ class XinferenceText2SpeechModel(TTSModel): return self.model_voices["__default"]["all"] - def _get_model_default_voice(self, model: str, credentials: dict) -> any: + def _get_model_default_voice(self, model: str, credentials: dict) -> Any: return "" def _get_model_word_limit(self, model: str, credentials: dict) -> int: @@ -178,7 +178,7 @@ class XinferenceText2SpeechModel(TTSModel): def _get_model_workers_limit(self, model: str, credentials: dict) -> int: return 5 - def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> any: + def _tts_invoke_streaming(self, model: str, credentials: dict, content_text: str, voice: str) -> Any: """ _tts_invoke_streaming text2speech model diff --git a/api/core/model_runtime/model_providers/xinference/xinference_helper.py b/api/core/model_runtime/model_providers/xinference/xinference_helper.py index 619ee1492a..baa3ccbe8a 100644 --- a/api/core/model_runtime/model_providers/xinference/xinference_helper.py +++ b/api/core/model_runtime/model_providers/xinference/xinference_helper.py @@ -132,3 +132,16 @@ class XinferenceHelper: context_length=context_length, model_family=model_family, ) + + +def validate_model_uid(credentials: dict) -> bool: + """ + Validate the model_uid within the credentials dictionary to ensure it does not + contain forbidden characters ("/", "?", "#"). + + param credentials: model credentials + :return: True if the model_uid does not contain forbidden characters ("/", "?", "#"), else False. + """ + forbidden_characters = ["/", "?", "#"] + model_uid = credentials.get("model_uid", "") + return not any(char in forbidden_characters for char in model_uid) diff --git a/api/core/model_runtime/model_providers/zhipuai/llm/llm.py b/api/core/model_runtime/model_providers/zhipuai/llm/llm.py index ea331701ab..e0c4980523 100644 --- a/api/core/model_runtime/model_providers/zhipuai/llm/llm.py +++ b/api/core/model_runtime/model_providers/zhipuai/llm/llm.py @@ -223,6 +223,16 @@ class ZhipuAILargeLanguageModel(_CommonZhipuaiAI, LargeLanguageModel): else: new_prompt_messages.append(copy_prompt_message) + # zhipuai moved web_search param to tools + if "web_search" in model_parameters: + enable_web_search = model_parameters.get("web_search") + model_parameters.pop("web_search") + web_search_params = {"type": "web_search", "web_search": {"enable": enable_web_search}} + if "tools" in model_parameters: + model_parameters["tools"].append(web_search_params) + else: + model_parameters["tools"] = [web_search_params] + if model in {"glm-4v", "glm-4v-plus"}: params = self._construct_glm_4v_parameter(model, new_prompt_messages, model_parameters) else: diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/assistant.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/assistant.py index f772340a82..c29b057498 100644 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/assistant.py +++ b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/assistant/assistant.py @@ -41,8 +41,8 @@ class Assistant(BaseAPI): conversation_id: Optional[str] = None, attachments: Optional[list[assistant_create_params.AssistantAttachments]] = None, metadata: dict | None = None, - request_id: str = None, - user_id: str = None, + request_id: Optional[str] = None, + user_id: Optional[str] = None, extra_headers: Headers | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, @@ -72,9 +72,9 @@ class Assistant(BaseAPI): def query_support( self, *, - assistant_id_list: list[str] = None, - request_id: str = None, - user_id: str = None, + assistant_id_list: Optional[list[str]] = None, + request_id: Optional[str] = None, + user_id: Optional[str] = None, extra_headers: Headers | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, @@ -99,8 +99,8 @@ class Assistant(BaseAPI): page: int = 1, page_size: int = 10, *, - request_id: str = None, - user_id: str = None, + request_id: Optional[str] = None, + user_id: Optional[str] = None, extra_headers: Headers | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/files.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/files.py index ba9de75b7e..c723f6f66e 100644 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/files.py +++ b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/files.py @@ -1,7 +1,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import TYPE_CHECKING, Literal, cast +from typing import TYPE_CHECKING, Literal, Optional, cast import httpx @@ -34,11 +34,11 @@ class Files(BaseAPI): def create( self, *, - file: FileTypes = None, - upload_detail: list[UploadDetail] = None, + file: Optional[FileTypes] = None, + upload_detail: Optional[list[UploadDetail]] = None, purpose: Literal["fine-tune", "retrieval", "batch"], - knowledge_id: str = None, - sentence_size: int = None, + knowledge_id: Optional[str] = None, + sentence_size: Optional[int] = None, extra_headers: Headers | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/document.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/document.py index 2c4066d893..492c49da66 100644 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/document.py +++ b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/knowledge/document/document.py @@ -34,12 +34,12 @@ class Document(BaseAPI): def create( self, *, - file: FileTypes = None, + file: Optional[FileTypes] = None, custom_separator: Optional[list[str]] = None, - upload_detail: list[UploadDetail] = None, + upload_detail: Optional[list[UploadDetail]] = None, purpose: Literal["retrieval"], - knowledge_id: str = None, - sentence_size: int = None, + knowledge_id: Optional[str] = None, + sentence_size: Optional[int] = None, extra_headers: Headers | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/videos.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/videos.py index f1f1c08036..71c8316602 100644 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/videos.py +++ b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/api_resource/videos/videos.py @@ -31,11 +31,11 @@ class Videos(BaseAPI): self, model: str, *, - prompt: str = None, - image_url: str = None, + prompt: Optional[str] = None, + image_url: Optional[str] = None, sensitive_word_check: Optional[SensitiveWordCheckRequest] | NotGiven = NOT_GIVEN, - request_id: str = None, - user_id: str = None, + request_id: Optional[str] = None, + user_id: Optional[str] = None, extra_headers: Headers | None = None, extra_body: Body | None = None, timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_models.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_models.py index 6d8ba700b7..69b1d3a83d 100644 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_models.py +++ b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_base_models.py @@ -48,7 +48,7 @@ from ._utils import ( ) if TYPE_CHECKING: - from pydantic_core.core_schema import LiteralSchema, ModelField, ModelFieldsSchema + from pydantic_core.core_schema import ModelField __all__ = ["BaseModel", "GenericModel"] _BaseModelT = TypeVar("_BaseModelT", bound="BaseModel") diff --git a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_utils.py b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_utils.py index ce5e7786aa..3a7b234ab0 100644 --- a/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_utils.py +++ b/api/core/model_runtime/model_providers/zhipuai/zhipuai_sdk/core/_utils/_utils.py @@ -248,7 +248,7 @@ def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]: @functools.wraps(func) def wrapper(*args: object, **kwargs: object) -> object: given_params: set[str] = set() - for i, _ in enumerate(args): + for i in range(len(args)): try: given_params.add(positional[i]) except IndexError: diff --git a/api/core/moderation/keywords/keywords.py b/api/core/moderation/keywords/keywords.py index dc6a7ec564..4846da8f93 100644 --- a/api/core/moderation/keywords/keywords.py +++ b/api/core/moderation/keywords/keywords.py @@ -18,8 +18,12 @@ class KeywordsModeration(Moderation): if not config.get("keywords"): raise ValueError("keywords is required") - if len(config.get("keywords")) > 1000: - raise ValueError("keywords length must be less than 1000") + if len(config.get("keywords")) > 10000: + raise ValueError("keywords length must be less than 10000") + + keywords_row_len = config["keywords"].split("\n") + if len(keywords_row_len) > 100: + raise ValueError("the number of rows for the keywords must be less than 100") def moderation_for_inputs(self, inputs: dict, query: str = "") -> ModerationInputsResult: flagged = False diff --git a/api/core/ops/langfuse_trace/langfuse_trace.py b/api/core/ops/langfuse_trace/langfuse_trace.py index 6aefbec9aa..0cba40c51a 100644 --- a/api/core/ops/langfuse_trace/langfuse_trace.py +++ b/api/core/ops/langfuse_trace/langfuse_trace.py @@ -110,26 +110,35 @@ class LangFuseDataTrace(BaseTraceInstance): self.add_trace(langfuse_trace_data=trace_data) # through workflow_run_id get all_nodes_execution - workflow_nodes_executions = ( - db.session.query( - WorkflowNodeExecution.id, - WorkflowNodeExecution.tenant_id, - WorkflowNodeExecution.app_id, - WorkflowNodeExecution.title, - WorkflowNodeExecution.node_type, - WorkflowNodeExecution.status, - WorkflowNodeExecution.inputs, - WorkflowNodeExecution.outputs, - WorkflowNodeExecution.created_at, - WorkflowNodeExecution.elapsed_time, - WorkflowNodeExecution.process_data, - WorkflowNodeExecution.execution_metadata, - ) + workflow_nodes_execution_id_records = ( + db.session.query(WorkflowNodeExecution.id) .filter(WorkflowNodeExecution.workflow_run_id == trace_info.workflow_run_id) .all() ) - for node_execution in workflow_nodes_executions: + for node_execution_id_record in workflow_nodes_execution_id_records: + node_execution = ( + db.session.query( + WorkflowNodeExecution.id, + WorkflowNodeExecution.tenant_id, + WorkflowNodeExecution.app_id, + WorkflowNodeExecution.title, + WorkflowNodeExecution.node_type, + WorkflowNodeExecution.status, + WorkflowNodeExecution.inputs, + WorkflowNodeExecution.outputs, + WorkflowNodeExecution.created_at, + WorkflowNodeExecution.elapsed_time, + WorkflowNodeExecution.process_data, + WorkflowNodeExecution.execution_metadata, + ) + .filter(WorkflowNodeExecution.id == node_execution_id_record.id) + .first() + ) + + if not node_execution: + continue + node_execution_id = node_execution.id tenant_id = node_execution.tenant_id app_id = node_execution.app_id @@ -159,6 +168,16 @@ class LangFuseDataTrace(BaseTraceInstance): "status": status, } ) + process_data = json.loads(node_execution.process_data) if node_execution.process_data else {} + model_provider = process_data.get("model_provider", None) + model_name = process_data.get("model_name", None) + if model_provider is not None and model_name is not None: + metadata.update( + { + "model_provider": model_provider, + "model_name": model_name, + } + ) # add span if trace_info.message_id: @@ -191,7 +210,6 @@ class LangFuseDataTrace(BaseTraceInstance): self.add_span(langfuse_span_data=span_data) - process_data = json.loads(node_execution.process_data) if node_execution.process_data else {} if process_data and process_data.get("model_mode") == "chat": total_token = metadata.get("total_tokens", 0) # add generation diff --git a/api/core/ops/langsmith_trace/langsmith_trace.py b/api/core/ops/langsmith_trace/langsmith_trace.py index 37cbea13fd..ad45050405 100644 --- a/api/core/ops/langsmith_trace/langsmith_trace.py +++ b/api/core/ops/langsmith_trace/langsmith_trace.py @@ -100,26 +100,35 @@ class LangSmithDataTrace(BaseTraceInstance): self.add_run(langsmith_run) # through workflow_run_id get all_nodes_execution - workflow_nodes_executions = ( - db.session.query( - WorkflowNodeExecution.id, - WorkflowNodeExecution.tenant_id, - WorkflowNodeExecution.app_id, - WorkflowNodeExecution.title, - WorkflowNodeExecution.node_type, - WorkflowNodeExecution.status, - WorkflowNodeExecution.inputs, - WorkflowNodeExecution.outputs, - WorkflowNodeExecution.created_at, - WorkflowNodeExecution.elapsed_time, - WorkflowNodeExecution.process_data, - WorkflowNodeExecution.execution_metadata, - ) + workflow_nodes_execution_id_records = ( + db.session.query(WorkflowNodeExecution.id) .filter(WorkflowNodeExecution.workflow_run_id == trace_info.workflow_run_id) .all() ) - for node_execution in workflow_nodes_executions: + for node_execution_id_record in workflow_nodes_execution_id_records: + node_execution = ( + db.session.query( + WorkflowNodeExecution.id, + WorkflowNodeExecution.tenant_id, + WorkflowNodeExecution.app_id, + WorkflowNodeExecution.title, + WorkflowNodeExecution.node_type, + WorkflowNodeExecution.status, + WorkflowNodeExecution.inputs, + WorkflowNodeExecution.outputs, + WorkflowNodeExecution.created_at, + WorkflowNodeExecution.elapsed_time, + WorkflowNodeExecution.process_data, + WorkflowNodeExecution.execution_metadata, + ) + .filter(WorkflowNodeExecution.id == node_execution_id_record.id) + .first() + ) + + if not node_execution: + continue + node_execution_id = node_execution.id tenant_id = node_execution.tenant_id app_id = node_execution.app_id diff --git a/api/core/rag/datasource/keyword/jieba/jieba.py b/api/core/rag/datasource/keyword/jieba/jieba.py index 3073100746..a0153c1e58 100644 --- a/api/core/rag/datasource/keyword/jieba/jieba.py +++ b/api/core/rag/datasource/keyword/jieba/jieba.py @@ -45,7 +45,7 @@ class Jieba(BaseKeyword): keyword_table_handler = JiebaKeywordTableHandler() keyword_table = self._get_dataset_keyword_table() - keywords_list = kwargs.get("keywords_list", None) + keywords_list = kwargs.get("keywords_list") for i in range(len(texts)): text = texts[i] if keywords_list: diff --git a/api/core/rag/datasource/keyword/keyword_base.py b/api/core/rag/datasource/keyword/keyword_base.py index 4b9ec460e6..be00687abd 100644 --- a/api/core/rag/datasource/keyword/keyword_base.py +++ b/api/core/rag/datasource/keyword/keyword_base.py @@ -27,9 +27,11 @@ class BaseKeyword(ABC): def delete_by_ids(self, ids: list[str]) -> None: raise NotImplementedError + @abstractmethod def delete(self) -> None: raise NotImplementedError + @abstractmethod def search(self, query: str, **kwargs: Any) -> list[Document]: raise NotImplementedError diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index afac1bf300..d3fd0c672a 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -10,6 +10,7 @@ from core.rag.rerank.constants.rerank_mode import RerankMode from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db from models.dataset import Dataset +from services.external_knowledge_service import ExternalDatasetService default_retrieval_model = { "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, @@ -34,6 +35,9 @@ class RetrievalService: weights: Optional[dict] = None, ): dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + if not dataset: + return [] + if not dataset or dataset.available_document_count == 0 or dataset.available_segment_count == 0: return [] all_documents = [] @@ -108,6 +112,16 @@ class RetrievalService: ) return all_documents + @classmethod + def external_retrieve(cls, dataset_id: str, query: str, external_retrieval_model: Optional[dict] = None): + dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + if not dataset: + return [] + all_documents = ExternalDatasetService.fetch_external_knowledge_retrieval( + dataset.tenant_id, dataset_id, query, external_retrieval_model + ) + return all_documents + @classmethod def keyword_search( cls, flask_app: Flask, dataset_id: str, query: str, top_k: int, all_documents: list, exceptions: list diff --git a/api/core/rag/datasource/vdb/baidu/__init__.py b/api/core/rag/datasource/vdb/baidu/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/rag/datasource/vdb/baidu/baidu_vector.py b/api/core/rag/datasource/vdb/baidu/baidu_vector.py new file mode 100644 index 0000000000..543cfa67b3 --- /dev/null +++ b/api/core/rag/datasource/vdb/baidu/baidu_vector.py @@ -0,0 +1,272 @@ +import json +import time +import uuid +from typing import Any + +from pydantic import BaseModel, model_validator +from pymochow import MochowClient +from pymochow.auth.bce_credentials import BceCredentials +from pymochow.configuration import Configuration +from pymochow.model.enum import FieldType, IndexState, IndexType, MetricType, TableState +from pymochow.model.schema import Field, HNSWParams, Schema, VectorIndex +from pymochow.model.table import AnnSearch, HNSWSearchParams, Partition, Row + +from configs import dify_config +from core.rag.datasource.entity.embedding import Embeddings +from core.rag.datasource.vdb.vector_base import BaseVector +from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory +from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.models.document import Document +from extensions.ext_redis import redis_client +from models.dataset import Dataset + + +class BaiduConfig(BaseModel): + endpoint: str + connection_timeout_in_mills: int = 30 * 1000 + account: str + api_key: str + database: str + index_type: str = "HNSW" + metric_type: str = "L2" + shard: int = 1 + replicas: int = 3 + + @model_validator(mode="before") + @classmethod + def validate_config(cls, values: dict) -> dict: + if not values["endpoint"]: + raise ValueError("config BAIDU_VECTOR_DB_ENDPOINT is required") + if not values["account"]: + raise ValueError("config BAIDU_VECTOR_DB_ACCOUNT is required") + if not values["api_key"]: + raise ValueError("config BAIDU_VECTOR_DB_API_KEY is required") + if not values["database"]: + raise ValueError("config BAIDU_VECTOR_DB_DATABASE is required") + return values + + +class BaiduVector(BaseVector): + field_id: str = "id" + field_vector: str = "vector" + field_text: str = "text" + field_metadata: str = "metadata" + field_app_id: str = "app_id" + field_annotation_id: str = "annotation_id" + index_vector: str = "vector_idx" + + def __init__(self, collection_name: str, config: BaiduConfig): + super().__init__(collection_name) + self._client_config = config + self._client = self._init_client(config) + self._db = self._init_database() + + def get_type(self) -> str: + return VectorType.BAIDU + + def to_index_struct(self) -> dict: + return {"type": self.get_type(), "vector_store": {"class_prefix": self._collection_name}} + + def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + self._create_table(len(embeddings[0])) + self.add_texts(texts, embeddings) + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + texts = [doc.page_content for doc in documents] + metadatas = [doc.metadata for doc in documents] + total_count = len(documents) + batch_size = 1000 + + # upsert texts and embeddings batch by batch + table = self._db.table(self._collection_name) + for start in range(0, total_count, batch_size): + end = min(start + batch_size, total_count) + rows = [] + for i in range(start, end, 1): + row = Row( + id=metadatas[i].get("doc_id", str(uuid.uuid4())), + vector=embeddings[i], + text=texts[i], + metadata=json.dumps(metadatas[i]), + app_id=metadatas[i].get("app_id", ""), + annotation_id=metadatas[i].get("annotation_id", ""), + ) + rows.append(row) + table.upsert(rows=rows) + + # rebuild vector index after upsert finished + table.rebuild_index(self.index_vector) + while True: + time.sleep(1) + index = table.describe_index(self.index_vector) + if index.state == IndexState.NORMAL: + break + + def text_exists(self, id: str) -> bool: + res = self._db.table(self._collection_name).query(primary_key={self.field_id: id}) + if res and res.code == 0: + return True + return False + + def delete_by_ids(self, ids: list[str]) -> None: + quoted_ids = [f"'{id}'" for id in ids] + self._db.table(self._collection_name).delete(filter=f"id IN({', '.join(quoted_ids)})") + + def delete_by_metadata_field(self, key: str, value: str) -> None: + self._db.table(self._collection_name).delete(filter=f"{key} = '{value}'") + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + anns = AnnSearch( + vector_field=self.field_vector, + vector_floats=query_vector, + params=HNSWSearchParams(ef=kwargs.get("ef", 10), limit=kwargs.get("top_k", 4)), + ) + res = self._db.table(self._collection_name).search( + anns=anns, + projections=[self.field_id, self.field_text, self.field_metadata], + retrieve_vector=True, + ) + score_threshold = float(kwargs.get("score_threshold") or 0.0) + return self._get_search_res(res, score_threshold) + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + # baidu vector database doesn't support bm25 search on current version + return [] + + def _get_search_res(self, res, score_threshold): + docs = [] + for row in res.rows: + row_data = row.get("row", {}) + meta = row_data.get(self.field_metadata) + if meta is not None: + meta = json.loads(meta) + score = row.get("score", 0.0) + if score > score_threshold: + meta["score"] = score + doc = Document(page_content=row_data.get(self.field_text), metadata=meta) + docs.append(doc) + + return docs + + def delete(self) -> None: + self._db.drop_table(table_name=self._collection_name) + + def _init_client(self, config) -> MochowClient: + config = Configuration(credentials=BceCredentials(config.account, config.api_key), endpoint=config.endpoint) + client = MochowClient(config) + return client + + def _init_database(self): + exists = False + for db in self._client.list_databases(): + if db.database_name == self._client_config.database: + exists = True + break + # Create database if not existed + if exists: + return self._client.database(self._client_config.database) + else: + return self._client.create_database(database_name=self._client_config.database) + + def _table_existed(self) -> bool: + tables = self._db.list_table() + return any(table.table_name == self._collection_name for table in tables) + + def _create_table(self, dimension: int) -> None: + # Try to grab distributed lock and create table + lock_name = "vector_indexing_lock_{}".format(self._collection_name) + with redis_client.lock(lock_name, timeout=20): + table_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + if redis_client.get(table_exist_cache_key): + return + + if self._table_existed(): + return + + self.delete() + + # check IndexType and MetricType + index_type = None + for k, v in IndexType.__members__.items(): + if k == self._client_config.index_type: + index_type = v + if index_type is None: + raise ValueError("unsupported index_type") + metric_type = None + for k, v in MetricType.__members__.items(): + if k == self._client_config.metric_type: + metric_type = v + if metric_type is None: + raise ValueError("unsupported metric_type") + + # Construct field schema + fields = [] + fields.append( + Field( + self.field_id, + FieldType.STRING, + primary_key=True, + partition_key=True, + auto_increment=False, + not_null=True, + ) + ) + fields.append(Field(self.field_metadata, FieldType.STRING, not_null=True)) + fields.append(Field(self.field_app_id, FieldType.STRING)) + fields.append(Field(self.field_annotation_id, FieldType.STRING)) + fields.append(Field(self.field_text, FieldType.TEXT, not_null=True)) + fields.append(Field(self.field_vector, FieldType.FLOAT_VECTOR, not_null=True, dimension=dimension)) + + # Construct vector index params + indexes = [] + indexes.append( + VectorIndex( + index_name="vector_idx", + index_type=index_type, + field="vector", + metric_type=metric_type, + params=HNSWParams(m=16, efconstruction=200), + ) + ) + + # Create table + self._db.create_table( + table_name=self._collection_name, + replication=self._client_config.replicas, + partition=Partition(partition_num=self._client_config.shard), + schema=Schema(fields=fields, indexes=indexes), + description="Table for Dify", + ) + + redis_client.set(table_exist_cache_key, 1, ex=3600) + + # Wait for table created + while True: + time.sleep(1) + table = self._db.describe_table(self._collection_name) + if table.state == TableState.NORMAL: + break + + +class BaiduVectorFactory(AbstractVectorFactory): + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> BaiduVector: + if dataset.index_struct_dict: + class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] + collection_name = class_prefix.lower() + else: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower() + dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.BAIDU, collection_name)) + + return BaiduVector( + collection_name=collection_name, + config=BaiduConfig( + endpoint=dify_config.BAIDU_VECTOR_DB_ENDPOINT, + connection_timeout_in_mills=dify_config.BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS, + account=dify_config.BAIDU_VECTOR_DB_ACCOUNT, + api_key=dify_config.BAIDU_VECTOR_DB_API_KEY, + database=dify_config.BAIDU_VECTOR_DB_DATABASE, + shard=dify_config.BAIDU_VECTOR_DB_SHARD, + replicas=dify_config.BAIDU_VECTOR_DB_REPLICAS, + ), + ) diff --git a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py index 8d57855120..66bc31a4bf 100644 --- a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py +++ b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py @@ -1,5 +1,6 @@ import json import logging +import math from typing import Any, Optional from urllib.parse import urlparse @@ -76,7 +77,7 @@ class ElasticSearchVector(BaseVector): raise ValueError("Elasticsearch vector database version must be greater than 8.0.0") def get_type(self) -> str: - return "elasticsearch" + return VectorType.ELASTICSEARCH def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): uuids = self._get_uuids(documents) @@ -112,7 +113,8 @@ class ElasticSearchVector(BaseVector): def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: top_k = kwargs.get("top_k", 10) - knn = {"field": Field.VECTOR.value, "query_vector": query_vector, "k": top_k} + num_candidates = math.ceil(top_k * 1.5) + knn = {"field": Field.VECTOR.value, "query_vector": query_vector, "k": top_k, "num_candidates": num_candidates} results = self._client.search(index=self._collection_name, knn=knn, size=top_k) diff --git a/api/core/rag/datasource/vdb/pgvector/pgvector.py b/api/core/rag/datasource/vdb/pgvector/pgvector.py index 79879d4f63..25a10a1e48 100644 --- a/api/core/rag/datasource/vdb/pgvector/pgvector.py +++ b/api/core/rag/datasource/vdb/pgvector/pgvector.py @@ -23,6 +23,8 @@ class PGVectorConfig(BaseModel): user: str password: str database: str + min_connection: int + max_connection: int @model_validator(mode="before") @classmethod @@ -37,6 +39,12 @@ class PGVectorConfig(BaseModel): raise ValueError("config PGVECTOR_PASSWORD is required") if not values["database"]: raise ValueError("config PGVECTOR_DATABASE is required") + if not values["min_connection"]: + raise ValueError("config PGVECTOR_MIN_CONNECTION is required") + if not values["max_connection"]: + raise ValueError("config PGVECTOR_MAX_CONNECTION is required") + if values["min_connection"] > values["max_connection"]: + raise ValueError("config PGVECTOR_MIN_CONNECTION should less than PGVECTOR_MAX_CONNECTION") return values @@ -61,8 +69,8 @@ class PGVector(BaseVector): def _create_connection_pool(self, config: PGVectorConfig): return psycopg2.pool.SimpleConnectionPool( - 1, - 5, + config.min_connection, + config.max_connection, host=config.host, port=config.port, user=config.user, @@ -158,7 +166,7 @@ class PGVector(BaseVector): with self._get_cursor() as cur: cur.execute( - f"""SELECT meta, text, ts_rank(to_tsvector(coalesce(text, '')), to_tsquery(%s)) AS score + f"""SELECT meta, text, ts_rank(to_tsvector(coalesce(text, '')), plainto_tsquery(%s)) AS score FROM {self.table_name} WHERE to_tsvector(text) @@ plainto_tsquery(%s) ORDER BY score DESC @@ -213,5 +221,7 @@ class PGVectorFactory(AbstractVectorFactory): user=dify_config.PGVECTOR_USER, password=dify_config.PGVECTOR_PASSWORD, database=dify_config.PGVECTOR_DATABASE, + min_connection=dify_config.PGVECTOR_MIN_CONNECTION, + max_connection=dify_config.PGVECTOR_MAX_CONNECTION, ), ) diff --git a/api/core/rag/datasource/vdb/relyt/relyt_vector.py b/api/core/rag/datasource/vdb/relyt/relyt_vector.py index f47f75718a..254956970f 100644 --- a/api/core/rag/datasource/vdb/relyt/relyt_vector.py +++ b/api/core/rag/datasource/vdb/relyt/relyt_vector.py @@ -162,7 +162,7 @@ class RelytVector(BaseVector): else: return None - def delete_by_uuids(self, ids: list[str] = None): + def delete_by_uuids(self, ids: Optional[list[str]] = None): """Delete by vector IDs. Args: diff --git a/api/core/rag/datasource/vdb/tencent/tencent_vector.py b/api/core/rag/datasource/vdb/tencent/tencent_vector.py index faa373017b..39e3a7f6cf 100644 --- a/api/core/rag/datasource/vdb/tencent/tencent_vector.py +++ b/api/core/rag/datasource/vdb/tencent/tencent_vector.py @@ -56,7 +56,7 @@ class TencentVector(BaseVector): return self._client.create_database(database_name=self._client_config.database) def get_type(self) -> str: - return "tencent" + return VectorType.TENCENT def to_index_struct(self) -> dict: return {"type": self.get_type(), "vector_store": {"class_prefix": self._collection_name}} diff --git a/api/core/rag/datasource/vdb/vector_factory.py b/api/core/rag/datasource/vdb/vector_factory.py index ca90233b7f..873b289027 100644 --- a/api/core/rag/datasource/vdb/vector_factory.py +++ b/api/core/rag/datasource/vdb/vector_factory.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import Any +from typing import Any, Optional from configs import dify_config from core.embedding.cached_embedding import CacheEmbedding @@ -25,7 +25,7 @@ class AbstractVectorFactory(ABC): class Vector: - def __init__(self, dataset: Dataset, attributes: list = None): + def __init__(self, dataset: Dataset, attributes: Optional[list] = None): if attributes is None: attributes = ["doc_id", "dataset_id", "document_id", "doc_hash"] self._dataset = dataset @@ -103,10 +103,18 @@ class Vector: from core.rag.datasource.vdb.analyticdb.analyticdb_vector import AnalyticdbVectorFactory return AnalyticdbVectorFactory + case VectorType.BAIDU: + from core.rag.datasource.vdb.baidu.baidu_vector import BaiduVectorFactory + + return BaiduVectorFactory + case VectorType.VIKINGDB: + from core.rag.datasource.vdb.vikingdb.vikingdb_vector import VikingDBVectorFactory + + return VikingDBVectorFactory case _: raise ValueError(f"Vector store {vector_type} is not supported.") - def create(self, texts: list = None, **kwargs): + def create(self, texts: Optional[list] = None, **kwargs): if texts: embeddings = self._embeddings.embed_documents([document.page_content for document in texts]) self._vector_processor.create(texts=texts, embeddings=embeddings, **kwargs) diff --git a/api/core/rag/datasource/vdb/vector_type.py b/api/core/rag/datasource/vdb/vector_type.py index ba04ea879d..b4d604a080 100644 --- a/api/core/rag/datasource/vdb/vector_type.py +++ b/api/core/rag/datasource/vdb/vector_type.py @@ -16,3 +16,5 @@ class VectorType(str, Enum): TENCENT = "tencent" ORACLE = "oracle" ELASTICSEARCH = "elasticsearch" + BAIDU = "baidu" + VIKINGDB = "vikingdb" diff --git a/api/core/rag/datasource/vdb/vikingdb/__init__.py b/api/core/rag/datasource/vdb/vikingdb/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py new file mode 100644 index 0000000000..22d0e92586 --- /dev/null +++ b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py @@ -0,0 +1,239 @@ +import json +from typing import Any + +from pydantic import BaseModel +from volcengine.viking_db import ( + Data, + DistanceType, + Field, + FieldType, + IndexType, + QuantType, + VectorIndexParams, + VikingDBService, +) + +from configs import dify_config +from core.rag.datasource.entity.embedding import Embeddings +from core.rag.datasource.vdb.field import Field as vdb_Field +from core.rag.datasource.vdb.vector_base import BaseVector +from core.rag.datasource.vdb.vector_factory import AbstractVectorFactory +from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.models.document import Document +from extensions.ext_redis import redis_client +from models.dataset import Dataset + + +class VikingDBConfig(BaseModel): + access_key: str + secret_key: str + host: str + region: str + scheme: str + connection_timeout: int + socket_timeout: int + index_type: str = IndexType.HNSW + distance: str = DistanceType.L2 + quant: str = QuantType.Float + + +class VikingDBVector(BaseVector): + def __init__(self, collection_name: str, group_id: str, config: VikingDBConfig): + super().__init__(collection_name) + self._group_id = group_id + self._client_config = config + self._index_name = f"{self._collection_name}_idx" + self._client = VikingDBService( + host=config.host, + region=config.region, + scheme=config.scheme, + connection_timeout=config.connection_timeout, + socket_timeout=config.socket_timeout, + ak=config.access_key, + sk=config.secret_key, + ) + + def _has_collection(self) -> bool: + try: + self._client.get_collection(self._collection_name) + except Exception: + return False + return True + + def _has_index(self) -> bool: + try: + self._client.get_index(self._collection_name, self._index_name) + except Exception: + return False + return True + + def _create_collection(self, dimension: int): + lock_name = f"vector_indexing_lock_{self._collection_name}" + with redis_client.lock(lock_name, timeout=20): + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" + if redis_client.get(collection_exist_cache_key): + return + + if not self._has_collection(): + fields = [ + Field(field_name=vdb_Field.PRIMARY_KEY.value, field_type=FieldType.String, is_primary_key=True), + Field(field_name=vdb_Field.METADATA_KEY.value, field_type=FieldType.String), + Field(field_name=vdb_Field.GROUP_KEY.value, field_type=FieldType.String), + Field(field_name=vdb_Field.CONTENT_KEY.value, field_type=FieldType.Text), + Field(field_name=vdb_Field.VECTOR.value, field_type=FieldType.Vector, dim=dimension), + ] + + self._client.create_collection( + collection_name=self._collection_name, + fields=fields, + description="Collection For Dify", + ) + + if not self._has_index(): + vector_index = VectorIndexParams( + distance=self._client_config.distance, + index_type=self._client_config.index_type, + quant=self._client_config.quant, + ) + + self._client.create_index( + collection_name=self._collection_name, + index_name=self._index_name, + vector_index=vector_index, + partition_by=vdb_Field.GROUP_KEY.value, + description="Index For Dify", + ) + redis_client.set(collection_exist_cache_key, 1, ex=3600) + + def get_type(self) -> str: + return VectorType.VIKINGDB + + def create(self, texts: list[Document], embeddings: list[list[float]], **kwargs): + dimension = len(embeddings[0]) + self._create_collection(dimension) + self.add_texts(texts, embeddings, **kwargs) + + def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): + page_contents = [doc.page_content for doc in documents] + metadatas = [doc.metadata for doc in documents] + docs = [] + + for i, page_content in enumerate(page_contents): + metadata = {} + if metadatas is not None: + for key, val in metadatas[i].items(): + metadata[key] = val + doc = Data( + { + vdb_Field.PRIMARY_KEY.value: metadatas[i]["doc_id"], + vdb_Field.VECTOR.value: embeddings[i] if embeddings else None, + vdb_Field.CONTENT_KEY.value: page_content, + vdb_Field.METADATA_KEY.value: json.dumps(metadata), + vdb_Field.GROUP_KEY.value: self._group_id, + } + ) + docs.append(doc) + + self._client.get_collection(self._collection_name).upsert_data(docs) + + def text_exists(self, id: str) -> bool: + docs = self._client.get_collection(self._collection_name).fetch_data(id) + not_exists_str = "data does not exist" + if docs is not None and not_exists_str not in docs.fields.get("message", ""): + return True + return False + + def delete_by_ids(self, ids: list[str]) -> None: + self._client.get_collection(self._collection_name).delete_data(ids) + + def get_ids_by_metadata_field(self, key: str, value: str): + # Note: Metadata field value is an dict, but vikingdb field + # not support json type + results = self._client.get_index(self._collection_name, self._index_name).search( + filter={"op": "must", "field": vdb_Field.GROUP_KEY.value, "conds": [self._group_id]}, + # max value is 5000 + limit=5000, + ) + + if not results: + return [] + + ids = [] + for result in results: + metadata = result.fields.get(vdb_Field.METADATA_KEY.value) + if metadata is not None: + metadata = json.loads(metadata) + if metadata.get(key) == value: + ids.append(result.id) + return ids + + def delete_by_metadata_field(self, key: str, value: str) -> None: + ids = self.get_ids_by_metadata_field(key, value) + self.delete_by_ids(ids) + + def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + results = self._client.get_index(self._collection_name, self._index_name).search_by_vector( + query_vector, limit=kwargs.get("top_k", 50) + ) + score_threshold = float(kwargs.get("score_threshold") or 0.0) + return self._get_search_res(results, score_threshold) + + def _get_search_res(self, results, score_threshold): + if len(results) == 0: + return [] + + docs = [] + for result in results: + metadata = result.fields.get(vdb_Field.METADATA_KEY.value) + if metadata is not None: + metadata = json.loads(metadata) + if result.score > score_threshold: + metadata["score"] = result.score + doc = Document(page_content=result.fields.get(vdb_Field.CONTENT_KEY.value), metadata=metadata) + docs.append(doc) + docs = sorted(docs, key=lambda x: x.metadata["score"], reverse=True) + return docs + + def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: + return [] + + def delete(self) -> None: + if self._has_index(): + self._client.drop_index(self._collection_name, self._index_name) + if self._has_collection(): + self._client.drop_collection(self._collection_name) + + +class VikingDBVectorFactory(AbstractVectorFactory): + def init_vector(self, dataset: Dataset, attributes: list, embeddings: Embeddings) -> VikingDBVector: + if dataset.index_struct_dict: + class_prefix: str = dataset.index_struct_dict["vector_store"]["class_prefix"] + collection_name = class_prefix.lower() + else: + dataset_id = dataset.id + collection_name = Dataset.gen_collection_name_by_id(dataset_id).lower() + dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.VIKINGDB, collection_name)) + + if dify_config.VIKINGDB_ACCESS_KEY is None: + raise ValueError("VIKINGDB_ACCESS_KEY should not be None") + if dify_config.VIKINGDB_SECRET_KEY is None: + raise ValueError("VIKINGDB_SECRET_KEY should not be None") + if dify_config.VIKINGDB_HOST is None: + raise ValueError("VIKINGDB_HOST should not be None") + if dify_config.VIKINGDB_REGION is None: + raise ValueError("VIKINGDB_REGION should not be None") + if dify_config.VIKINGDB_SCHEME is None: + raise ValueError("VIKINGDB_SCHEME should not be None") + return VikingDBVector( + collection_name=collection_name, + group_id=dataset.id, + config=VikingDBConfig( + access_key=dify_config.VIKINGDB_ACCESS_KEY, + secret_key=dify_config.VIKINGDB_SECRET_KEY, + host=dify_config.VIKINGDB_HOST, + region=dify_config.VIKINGDB_REGION, + scheme=dify_config.VIKINGDB_SCHEME, + connection_timeout=dify_config.VIKINGDB_CONNECTION_TIMEOUT, + socket_timeout=dify_config.VIKINGDB_SOCKET_TIMEOUT, + ), + ) diff --git a/api/core/rag/entities/context_entities.py b/api/core/rag/entities/context_entities.py new file mode 100644 index 0000000000..cd18ad081f --- /dev/null +++ b/api/core/rag/entities/context_entities.py @@ -0,0 +1,12 @@ +from typing import Optional + +from pydantic import BaseModel + + +class DocumentContext(BaseModel): + """ + Model class for document context. + """ + + content: str + score: Optional[float] = None diff --git a/api/core/rag/extractor/extract_processor.py b/api/core/rag/extractor/extract_processor.py index 0ffc89b214..706a42b735 100644 --- a/api/core/rag/extractor/extract_processor.py +++ b/api/core/rag/extractor/extract_processor.py @@ -1,7 +1,7 @@ import re import tempfile from pathlib import Path -from typing import Union +from typing import Optional, Union from urllib.parse import unquote from configs import dify_config @@ -12,6 +12,7 @@ from core.rag.extractor.entity.extract_setting import ExtractSetting from core.rag.extractor.excel_extractor import ExcelExtractor from core.rag.extractor.firecrawl.firecrawl_web_extractor import FirecrawlWebExtractor from core.rag.extractor.html_extractor import HtmlExtractor +from core.rag.extractor.jina_reader_extractor import JinaReaderWebExtractor from core.rag.extractor.markdown_extractor import MarkdownExtractor from core.rag.extractor.notion_extractor import NotionExtractor from core.rag.extractor.pdf_extractor import PdfExtractor @@ -83,7 +84,7 @@ class ExtractProcessor: @classmethod def extract( - cls, extract_setting: ExtractSetting, is_automatic: bool = False, file_path: str = None + cls, extract_setting: ExtractSetting, is_automatic: bool = False, file_path: Optional[str] = None ) -> list[Document]: if extract_setting.datasource_type == DatasourceType.FILE.value: with tempfile.TemporaryDirectory() as temp_dir: @@ -171,6 +172,15 @@ class ExtractProcessor: only_main_content=extract_setting.website_info.only_main_content, ) return extractor.extract() + elif extract_setting.website_info.provider == "jinareader": + extractor = JinaReaderWebExtractor( + url=extract_setting.website_info.url, + job_id=extract_setting.website_info.job_id, + tenant_id=extract_setting.website_info.tenant_id, + mode=extract_setting.website_info.mode, + only_main_content=extract_setting.website_info.only_main_content, + ) + return extractor.extract() else: raise ValueError(f"Unsupported website provider: {extract_setting.website_info.provider}") else: diff --git a/api/core/rag/extractor/jina_reader_extractor.py b/api/core/rag/extractor/jina_reader_extractor.py new file mode 100644 index 0000000000..5b780af126 --- /dev/null +++ b/api/core/rag/extractor/jina_reader_extractor.py @@ -0,0 +1,35 @@ +from core.rag.extractor.extractor_base import BaseExtractor +from core.rag.models.document import Document +from services.website_service import WebsiteService + + +class JinaReaderWebExtractor(BaseExtractor): + """ + Crawl and scrape websites and return content in clean llm-ready markdown. + """ + + def __init__(self, url: str, job_id: str, tenant_id: str, mode: str = "crawl", only_main_content: bool = False): + """Initialize with url, api_key, base_url and mode.""" + self._url = url + self.job_id = job_id + self.tenant_id = tenant_id + self.mode = mode + self.only_main_content = only_main_content + + def extract(self) -> list[Document]: + """Extract content from the URL.""" + documents = [] + if self.mode == "crawl": + crawl_data = WebsiteService.get_crawl_url_data(self.job_id, "jinareader", self._url, self.tenant_id) + if crawl_data is None: + return [] + document = Document( + page_content=crawl_data.get("content", ""), + metadata={ + "source_url": crawl_data.get("url"), + "description": crawl_data.get("description"), + "title": crawl_data.get("title"), + }, + ) + documents.append(document) + return documents diff --git a/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py b/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py index fa50fa76b2..a41ed3a558 100644 --- a/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py @@ -1,4 +1,5 @@ import logging +from typing import Optional from core.rag.extractor.extractor_base import BaseExtractor from core.rag.models.document import Document @@ -17,7 +18,7 @@ class UnstructuredEpubExtractor(BaseExtractor): def __init__( self, file_path: str, - api_url: str = None, + api_url: Optional[str] = None, ): """Initialize with file path.""" self._file_path = file_path diff --git a/api/core/rag/models/document.py b/api/core/rag/models/document.py index 0ff1fdb81c..1e9aaa24f0 100644 --- a/api/core/rag/models/document.py +++ b/api/core/rag/models/document.py @@ -17,6 +17,8 @@ class Document(BaseModel): """ metadata: Optional[dict] = Field(default_factory=dict) + provider: Optional[str] = "dify" + class BaseDocumentTransformer(ABC): """Abstract base class for document transformation systems. diff --git a/api/core/rag/rerank/rerank_model.py b/api/core/rag/rerank/rerank_model.py index 6356ff87ab..27f86aed34 100644 --- a/api/core/rag/rerank/rerank_model.py +++ b/api/core/rag/rerank/rerank_model.py @@ -28,11 +28,16 @@ class RerankModelRunner: docs = [] doc_id = [] unique_documents = [] - for document in documents: + dify_documents = [item for item in documents if item.provider == "dify"] + external_documents = [item for item in documents if item.provider == "external"] + for document in dify_documents: if document.metadata["doc_id"] not in doc_id: doc_id.append(document.metadata["doc_id"]) docs.append(document.page_content) unique_documents.append(document) + for document in external_documents: + docs.append(document.page_content) + unique_documents.append(document) documents = unique_documents @@ -46,14 +51,10 @@ class RerankModelRunner: # format document rerank_document = Document( page_content=result.text, - metadata={ - "doc_id": documents[result.index].metadata["doc_id"], - "doc_hash": documents[result.index].metadata["doc_hash"], - "document_id": documents[result.index].metadata["document_id"], - "dataset_id": documents[result.index].metadata["dataset_id"], - "score": result.score, - }, + metadata=documents[result.index].metadata, + provider=documents[result.index].provider, ) + rerank_document.metadata["score"] = result.score rerank_documents.append(rerank_document) return rerank_documents diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index 4603957d68..633e41d5cf 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -20,6 +20,7 @@ from core.ops.utils import measure_time from core.rag.data_post_processor.data_post_processor import DataPostProcessor from core.rag.datasource.keyword.jieba.jieba_keyword_table_handler import JiebaKeywordTableHandler from core.rag.datasource.retrieval_service import RetrievalService +from core.rag.entities.context_entities import DocumentContext from core.rag.models.document import Document from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.rag.retrieval.router.multi_dataset_function_call_router import FunctionCallMultiDatasetRouter @@ -30,6 +31,7 @@ from core.tools.tool.dataset_retriever.dataset_retriever_tool import DatasetRetr from extensions.ext_database import db from models.dataset import Dataset, DatasetQuery, DocumentSegment from models.dataset import Document as DatasetDocument +from services.external_knowledge_service import ExternalDatasetService default_retrieval_model = { "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, @@ -110,7 +112,7 @@ class DatasetRetrieval: continue # pass if dataset is not available - if dataset and dataset.available_document_count == 0: + if dataset and dataset.available_document_count == 0 and dataset.provider != "external": continue available_datasets.append(dataset) @@ -146,69 +148,96 @@ class DatasetRetrieval: message_id, ) - document_score_list = {} - for item in all_documents: - if item.metadata.get("score"): - document_score_list[item.metadata["doc_id"]] = item.metadata["score"] - + dify_documents = [item for item in all_documents if item.provider == "dify"] + external_documents = [item for item in all_documents if item.provider == "external"] document_context_list = [] - index_node_ids = [document.metadata["doc_id"] for document in all_documents] - segments = DocumentSegment.query.filter( - DocumentSegment.dataset_id.in_(dataset_ids), - DocumentSegment.completed_at.isnot(None), - DocumentSegment.status == "completed", - DocumentSegment.enabled == True, - DocumentSegment.index_node_id.in_(index_node_ids), - ).all() + retrieval_resource_list = [] + # deal with external documents + for item in external_documents: + document_context_list.append(DocumentContext(content=item.page_content, score=item.metadata.get("score"))) + source = { + "dataset_id": item.metadata.get("dataset_id"), + "dataset_name": item.metadata.get("dataset_name"), + "document_name": item.metadata.get("title"), + "data_source_type": "external", + "retriever_from": invoke_from.to_source(), + "score": item.metadata.get("score"), + "content": item.page_content, + } + retrieval_resource_list.append(source) + document_score_list = {} + # deal with dify documents + if dify_documents: + for item in dify_documents: + if item.metadata.get("score"): + document_score_list[item.metadata["doc_id"]] = item.metadata["score"] - if segments: - index_node_id_to_position = {id: position for position, id in enumerate(index_node_ids)} - sorted_segments = sorted( - segments, key=lambda segment: index_node_id_to_position.get(segment.index_node_id, float("inf")) - ) - for segment in sorted_segments: - if segment.answer: - document_context_list.append(f"question:{segment.get_sign_content()} answer:{segment.answer}") - else: - document_context_list.append(segment.get_sign_content()) - if show_retrieve_source: - context_list = [] - resource_number = 1 + index_node_ids = [document.metadata["doc_id"] for document in dify_documents] + segments = DocumentSegment.query.filter( + DocumentSegment.dataset_id.in_(dataset_ids), + DocumentSegment.status == "completed", + DocumentSegment.enabled == True, + DocumentSegment.index_node_id.in_(index_node_ids), + ).all() + + if segments: + index_node_id_to_position = {id: position for position, id in enumerate(index_node_ids)} + sorted_segments = sorted( + segments, key=lambda segment: index_node_id_to_position.get(segment.index_node_id, float("inf")) + ) for segment in sorted_segments: - dataset = Dataset.query.filter_by(id=segment.dataset_id).first() - document = DatasetDocument.query.filter( - DatasetDocument.id == segment.document_id, - DatasetDocument.enabled == True, - DatasetDocument.archived == False, - ).first() - if dataset and document: - source = { - "position": resource_number, - "dataset_id": dataset.id, - "dataset_name": dataset.name, - "document_id": document.id, - "document_name": document.name, - "data_source_type": document.data_source_type, - "segment_id": segment.id, - "retriever_from": invoke_from.to_source(), - "score": document_score_list.get(segment.index_node_id, None), - } + if segment.answer: + document_context_list.append( + DocumentContext( + content=f"question:{segment.get_sign_content()} answer:{segment.answer}", + score=document_score_list.get(segment.index_node_id, None), + ) + ) + else: + document_context_list.append( + DocumentContext( + content=segment.get_sign_content(), + score=document_score_list.get(segment.index_node_id, None), + ) + ) + if show_retrieve_source: + for segment in sorted_segments: + dataset = Dataset.query.filter_by(id=segment.dataset_id).first() + document = DatasetDocument.query.filter( + DatasetDocument.id == segment.document_id, + DatasetDocument.enabled == True, + DatasetDocument.archived == False, + ).first() + if dataset and document: + source = { + "dataset_id": dataset.id, + "dataset_name": dataset.name, + "document_id": document.id, + "document_name": document.name, + "data_source_type": document.data_source_type, + "segment_id": segment.id, + "retriever_from": invoke_from.to_source(), + "score": document_score_list.get(segment.index_node_id, None), + } - if invoke_from.to_source() == "dev": - source["hit_count"] = segment.hit_count - source["word_count"] = segment.word_count - source["segment_position"] = segment.position - source["index_node_hash"] = segment.index_node_hash - if segment.answer: - source["content"] = f"question:{segment.content} \nanswer:{segment.answer}" - else: - source["content"] = segment.content - context_list.append(source) - resource_number += 1 - if hit_callback: - hit_callback.return_retriever_resource_info(context_list) - - return str("\n".join(document_context_list)) + if invoke_from.to_source() == "dev": + source["hit_count"] = segment.hit_count + source["word_count"] = segment.word_count + source["segment_position"] = segment.position + source["index_node_hash"] = segment.index_node_hash + if segment.answer: + source["content"] = f"question:{segment.content} \nanswer:{segment.answer}" + else: + source["content"] = segment.content + retrieval_resource_list.append(source) + if hit_callback and retrieval_resource_list: + retrieval_resource_list = sorted(retrieval_resource_list, key=lambda x: x.get("score"), reverse=True) + for position, item in enumerate(retrieval_resource_list, start=1): + item["position"] = position + hit_callback.return_retriever_resource_info(retrieval_resource_list) + if document_context_list: + document_context_list = sorted(document_context_list, key=lambda x: x.score, reverse=True) + return str("\n".join([document_context.content for document_context in document_context_list])) return "" def single_retrieve( @@ -256,36 +285,58 @@ class DatasetRetrieval: # get retrieval model config dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() if dataset: - retrieval_model_config = dataset.retrieval_model or default_retrieval_model - - # get top k - top_k = retrieval_model_config["top_k"] - # get retrieval method - if dataset.indexing_technique == "economy": - retrieval_method = "keyword_search" - else: - retrieval_method = retrieval_model_config["search_method"] - # get reranking model - reranking_model = ( - retrieval_model_config["reranking_model"] if retrieval_model_config["reranking_enable"] else None - ) - # get score threshold - score_threshold = 0.0 - score_threshold_enabled = retrieval_model_config.get("score_threshold_enabled") - if score_threshold_enabled: - score_threshold = retrieval_model_config.get("score_threshold") - - with measure_time() as timer: - results = RetrievalService.retrieve( - retrieval_method=retrieval_method, - dataset_id=dataset.id, + results = [] + if dataset.provider == "external": + external_documents = ExternalDatasetService.fetch_external_knowledge_retrieval( + tenant_id=dataset.tenant_id, + dataset_id=dataset_id, query=query, - top_k=top_k, - score_threshold=score_threshold, - reranking_model=reranking_model, - reranking_mode=retrieval_model_config.get("reranking_mode", "reranking_model"), - weights=retrieval_model_config.get("weights", None), + external_retrieval_parameters=dataset.retrieval_model, ) + for external_document in external_documents: + document = Document( + page_content=external_document.get("content"), + metadata=external_document.get("metadata"), + provider="external", + ) + document.metadata["score"] = external_document.get("score") + document.metadata["title"] = external_document.get("title") + document.metadata["dataset_id"] = dataset_id + document.metadata["dataset_name"] = dataset.name + results.append(document) + else: + retrieval_model_config = dataset.retrieval_model or default_retrieval_model + + # get top k + top_k = retrieval_model_config["top_k"] + # get retrieval method + if dataset.indexing_technique == "economy": + retrieval_method = "keyword_search" + else: + retrieval_method = retrieval_model_config["search_method"] + # get reranking model + reranking_model = ( + retrieval_model_config["reranking_model"] + if retrieval_model_config["reranking_enable"] + else None + ) + # get score threshold + score_threshold = 0.0 + score_threshold_enabled = retrieval_model_config.get("score_threshold_enabled") + if score_threshold_enabled: + score_threshold = retrieval_model_config.get("score_threshold") + + with measure_time() as timer: + results = RetrievalService.retrieve( + retrieval_method=retrieval_method, + dataset_id=dataset.id, + query=query, + top_k=top_k, + score_threshold=score_threshold, + reranking_model=reranking_model, + reranking_mode=retrieval_model_config.get("reranking_mode", "reranking_model"), + weights=retrieval_model_config.get("weights", None), + ) self._on_query(query, [dataset_id], app_id, user_from, user_id) if results: @@ -356,7 +407,8 @@ class DatasetRetrieval: self, documents: list[Document], message_id: Optional[str] = None, timer: Optional[dict] = None ) -> None: """Handle retrieval end.""" - for document in documents: + dify_documents = [document for document in documents if document.provider == "dify"] + for document in dify_documents: query = db.session.query(DocumentSegment).filter( DocumentSegment.index_node_id == document.metadata["doc_id"] ) @@ -409,35 +461,54 @@ class DatasetRetrieval: if not dataset: return [] - # get retrieval model , if the model is not setting , using default - retrieval_model = dataset.retrieval_model or default_retrieval_model - - if dataset.indexing_technique == "economy": - # use keyword table query - documents = RetrievalService.retrieve( - retrieval_method="keyword_search", dataset_id=dataset.id, query=query, top_k=top_k + if dataset.provider == "external": + external_documents = ExternalDatasetService.fetch_external_knowledge_retrieval( + tenant_id=dataset.tenant_id, + dataset_id=dataset_id, + query=query, + external_retrieval_parameters=dataset.retrieval_model, ) - if documents: - all_documents.extend(documents) - else: - if top_k > 0: - # retrieval source - documents = RetrievalService.retrieve( - retrieval_method=retrieval_model["search_method"], - dataset_id=dataset.id, - query=query, - top_k=retrieval_model.get("top_k") or 2, - score_threshold=retrieval_model.get("score_threshold", 0.0) - if retrieval_model["score_threshold_enabled"] - else 0.0, - reranking_model=retrieval_model.get("reranking_model", None) - if retrieval_model["reranking_enable"] - else None, - reranking_mode=retrieval_model.get("reranking_mode") or "reranking_model", - weights=retrieval_model.get("weights", None), + for external_document in external_documents: + document = Document( + page_content=external_document.get("content"), + metadata=external_document.get("metadata"), + provider="external", ) + document.metadata["score"] = external_document.get("score") + document.metadata["title"] = external_document.get("title") + document.metadata["dataset_id"] = dataset_id + document.metadata["dataset_name"] = dataset.name + all_documents.append(document) + else: + # get retrieval model , if the model is not setting , using default + retrieval_model = dataset.retrieval_model or default_retrieval_model - all_documents.extend(documents) + if dataset.indexing_technique == "economy": + # use keyword table query + documents = RetrievalService.retrieve( + retrieval_method="keyword_search", dataset_id=dataset.id, query=query, top_k=top_k + ) + if documents: + all_documents.extend(documents) + else: + if top_k > 0: + # retrieval source + documents = RetrievalService.retrieve( + retrieval_method=retrieval_model["search_method"], + dataset_id=dataset.id, + query=query, + top_k=retrieval_model.get("top_k") or 2, + score_threshold=retrieval_model.get("score_threshold", 0.0) + if retrieval_model["score_threshold_enabled"] + else 0.0, + reranking_model=retrieval_model.get("reranking_model", None) + if retrieval_model["reranking_enable"] + else None, + reranking_mode=retrieval_model.get("reranking_mode") or "reranking_model", + weights=retrieval_model.get("weights", None), + ) + + all_documents.extend(documents) def to_dataset_retriever_tool( self, @@ -468,7 +539,7 @@ class DatasetRetrieval: continue # pass if dataset is not available - if dataset and dataset.available_document_count == 0: + if dataset and dataset.provider != "external" and dataset.available_document_count == 0: continue available_datasets.append(dataset) diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 02b8b35be7..9962b559fa 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -341,7 +341,7 @@ class ToolRuntimeVariablePool(BaseModel): self.pool.append(variable) - def set_file(self, tool_name: str, value: str, name: str = None) -> None: + def set_file(self, tool_name: str, value: str, name: Optional[str] = None) -> None: """ set an image variable diff --git a/api/core/tools/provider/_position.yaml b/api/core/tools/provider/_position.yaml index 40c3356116..6bab9a09d8 100644 --- a/api/core/tools/provider/_position.yaml +++ b/api/core/tools/provider/_position.yaml @@ -5,34 +5,68 @@ - searchapi - serper - searxng +- websearch +- tavily +- stackexchange +- pubmed +- arxiv +- aws +- nominatim +- devdocs +- spider +- firecrawl +- brave +- crossref +- jina +- webscraper - dalle - azuredalle - stability -- wikipedia -- nominatim -- yahoo -- alphavantage -- arxiv -- pubmed - stablediffusion -- webscraper -- jina -- aippt -- youtube -- code -- wolframalpha -- maths -- github -- chart -- time -- vectorizer +- cogview +- comfyui +- getimgai +- siliconflow +- spark +- stepfun +- xinference +- alphavantage +- yahoo +- openweather - gaode -- wecom -- qrcode +- aippt +- chart +- youtube +- did - dingtalk +- discord - feishu - feishu_base - feishu_document - feishu_message +- feishu_wiki +- feishu_task +- feishu_calendar +- feishu_spreadsheet - slack +- twilio +- wecom +- wikipedia +- code +- wolframalpha +- maths +- github +- gitlab +- time +- vectorizer +- qrcode - tianditu +- google_translate +- hap +- json_process +- judge0ce +- novitaai +- onebot +- regex +- trello +- vanna diff --git a/api/core/tools/provider/builtin/aws/tools/sagemaker_tts.py b/api/core/tools/provider/builtin/aws/tools/sagemaker_tts.py index bceeaab745..1fafe09b4d 100644 --- a/api/core/tools/provider/builtin/aws/tools/sagemaker_tts.py +++ b/api/core/tools/provider/builtin/aws/tools/sagemaker_tts.py @@ -1,6 +1,6 @@ import json from enum import Enum -from typing import Any, Union +from typing import Any, Optional, Union import boto3 @@ -21,7 +21,7 @@ class SageMakerTTSTool(BuiltinTool): s3_client: Any = None comprehend_client: Any = None - def _detect_lang_code(self, content: str, map_dict: dict = None): + def _detect_lang_code(self, content: str, map_dict: Optional[dict] = None): map_dict = {"zh": "<|zh|>", "en": "<|en|>", "ja": "<|jp|>", "zh-TW": "<|yue|>", "ko": "<|ko|>"} response = self.comprehend_client.detect_dominant_language(Text=content) diff --git a/api/core/tools/provider/builtin/discord/_assets/icon.svg b/api/core/tools/provider/builtin/discord/_assets/icon.svg new file mode 100644 index 0000000000..177a0591f9 --- /dev/null +++ b/api/core/tools/provider/builtin/discord/_assets/icon.svg @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/api/core/tools/provider/builtin/discord/discord.py b/api/core/tools/provider/builtin/discord/discord.py new file mode 100644 index 0000000000..c94824b591 --- /dev/null +++ b/api/core/tools/provider/builtin/discord/discord.py @@ -0,0 +1,9 @@ +from typing import Any + +from core.tools.provider.builtin.discord.tools.discord_webhook import DiscordWebhookTool +from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController + + +class DiscordProvider(BuiltinToolProviderController): + def _validate_credentials(self, credentials: dict[str, Any]) -> None: + DiscordWebhookTool() diff --git a/api/core/tools/provider/builtin/discord/discord.yaml b/api/core/tools/provider/builtin/discord/discord.yaml new file mode 100644 index 0000000000..18b249b522 --- /dev/null +++ b/api/core/tools/provider/builtin/discord/discord.yaml @@ -0,0 +1,16 @@ +identity: + author: Ice Yao + name: discord + label: + en_US: Discord + zh_Hans: Discord + pt_BR: Discord + description: + en_US: Discord Webhook + zh_Hans: Discord Webhook + pt_BR: Discord Webhook + icon: icon.svg + tags: + - social + - productivity +credentials_for_provider: diff --git a/api/core/tools/provider/builtin/discord/tools/discord_webhook.py b/api/core/tools/provider/builtin/discord/tools/discord_webhook.py new file mode 100644 index 0000000000..7fdf791aba --- /dev/null +++ b/api/core/tools/provider/builtin/discord/tools/discord_webhook.py @@ -0,0 +1,49 @@ +from typing import Any, Union + +import httpx + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool + + +class DiscordWebhookTool(BuiltinTool): + def _invoke( + self, user_id: str, tool_parameters: dict[str, Any] + ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: + """ + Incoming Webhooks + API Document: + https://discord.com/developers/docs/resources/webhook#execute-webhook + """ + + content = tool_parameters.get("content", "") + if not content: + return self.create_text_message("Invalid parameter content") + + webhook_url = tool_parameters.get("webhook_url", "") + + if not webhook_url.startswith("https://discord.com/api/webhooks/"): + return self.create_text_message( + f"Invalid parameter webhook_url ${webhook_url}, \ + not a valid Discord webhook URL" + ) + + headers = { + "Content-Type": "application/json", + } + params = {} + payload = { + "content": content, + } + + try: + res = httpx.post(webhook_url, headers=headers, params=params, json=payload) + if res.is_success: + return self.create_text_message("Text message was sent successfully") + else: + return self.create_text_message( + f"Failed to send the text message, \ + status code: {res.status_code}, response: {res.text}" + ) + except Exception as e: + return self.create_text_message("Failed to send message through webhook. {}".format(e)) diff --git a/api/core/tools/provider/builtin/discord/tools/discord_webhook.yaml b/api/core/tools/provider/builtin/discord/tools/discord_webhook.yaml new file mode 100644 index 0000000000..bb3fa43f24 --- /dev/null +++ b/api/core/tools/provider/builtin/discord/tools/discord_webhook.yaml @@ -0,0 +1,40 @@ +identity: + name: discord_webhook + author: Ice Yao + label: + en_US: Incoming Webhook to send message + zh_Hans: 通过入站Webhook发送消息 + pt_BR: Incoming Webhook to send message + icon: icon.svg +description: + human: + en_US: Sending a message on Discord via the Incoming Webhook + zh_Hans: 通过入站Webhook在Discord上发送消息 + pt_BR: Sending a message on Discord via the Incoming Webhook + llm: A tool for sending messages to a chat on Discord. +parameters: + - name: webhook_url + type: string + required: true + label: + en_US: Discord Incoming Webhook url + zh_Hans: Discord入站Webhook的url + pt_BR: Discord Incoming Webhook url + human_description: + en_US: Discord Incoming Webhook url + zh_Hans: Discord入站Webhook的url + pt_BR: Discord Incoming Webhook url + form: form + - name: content + type: string + required: true + label: + en_US: content + zh_Hans: 消息内容 + pt_BR: content + human_description: + en_US: Content to sent to the channel or person. + zh_Hans: 消息内容文本 + pt_BR: Content to sent to the channel or person. + llm_description: Content of the message + form: llm diff --git a/api/core/tools/provider/builtin/feishu_calendar/_assets/icon.png b/api/core/tools/provider/builtin/feishu_calendar/_assets/icon.png new file mode 100644 index 0000000000..2a934747a9 Binary files /dev/null and b/api/core/tools/provider/builtin/feishu_calendar/_assets/icon.png differ diff --git a/api/core/tools/provider/builtin/feishu_calendar/feishu_calendar.py b/api/core/tools/provider/builtin/feishu_calendar/feishu_calendar.py new file mode 100644 index 0000000000..a46a9fa9e8 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/feishu_calendar.py @@ -0,0 +1,7 @@ +from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController +from core.tools.utils.feishu_api_utils import auth + + +class FeishuCalendarProvider(BuiltinToolProviderController): + def _validate_credentials(self, credentials: dict) -> None: + auth(credentials) diff --git a/api/core/tools/provider/builtin/feishu_calendar/feishu_calendar.yaml b/api/core/tools/provider/builtin/feishu_calendar/feishu_calendar.yaml new file mode 100644 index 0000000000..db5bab5c10 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/feishu_calendar.yaml @@ -0,0 +1,36 @@ +identity: + author: Doug Lea + name: feishu_calendar + label: + en_US: Feishu Calendar + zh_Hans: 飞书日历 + description: + en_US: | + Feishu calendar, requires the following permissions: calendar:calendar:read、calendar:calendar、contact:user.id:readonly. + zh_Hans: | + 飞书日历,需要开通以下权限: calendar:calendar:read、calendar:calendar、contact:user.id:readonly。 + icon: icon.png + tags: + - social + - productivity +credentials_for_provider: + app_id: + type: text-input + required: true + label: + en_US: APP ID + placeholder: + en_US: Please input your feishu app id + zh_Hans: 请输入你的飞书 app id + help: + en_US: Get your app_id and app_secret from Feishu + zh_Hans: 从飞书获取您的 app_id 和 app_secret + url: https://open.larkoffice.com/app + app_secret: + type: secret-input + required: true + label: + en_US: APP Secret + placeholder: + en_US: Please input your app secret + zh_Hans: 请输入你的飞书 app secret diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/add_event_attendees.py b/api/core/tools/provider/builtin/feishu_calendar/tools/add_event_attendees.py new file mode 100644 index 0000000000..8f83aea5ab --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/add_event_attendees.py @@ -0,0 +1,20 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class AddEventAttendeesTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + event_id = tool_parameters.get("event_id") + attendee_phone_or_email = tool_parameters.get("attendee_phone_or_email") + need_notification = tool_parameters.get("need_notification", True) + + res = client.add_event_attendees(event_id, attendee_phone_or_email, need_notification) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/add_event_attendees.yaml b/api/core/tools/provider/builtin/feishu_calendar/tools/add_event_attendees.yaml new file mode 100644 index 0000000000..b7744499b0 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/add_event_attendees.yaml @@ -0,0 +1,54 @@ +identity: + name: add_event_attendees + author: Doug Lea + label: + en_US: Add Event Attendees + zh_Hans: 添加日程参会人 +description: + human: + en_US: Add Event Attendees + zh_Hans: 添加日程参会人 + llm: A tool for adding attendees to events in Feishu. (在飞书中添加日程参会人) +parameters: + - name: event_id + type: string + required: true + label: + en_US: Event ID + zh_Hans: 日程 ID + human_description: + en_US: | + The ID of the event, which will be returned when the event is created. For example: fb2a6406-26d6-4c8d-a487-6f0246c94d2f_0. + zh_Hans: | + 创建日程时会返回日程 ID。例如: fb2a6406-26d6-4c8d-a487-6f0246c94d2f_0。 + llm_description: | + 日程 ID,创建日程时会返回日程 ID。例如: fb2a6406-26d6-4c8d-a487-6f0246c94d2f_0。 + form: llm + + - name: need_notification + type: boolean + required: false + default: true + label: + en_US: Need Notification + zh_Hans: 是否需要通知 + human_description: + en_US: | + Whether to send a Bot notification to attendees. true: send, false: do not send. + zh_Hans: | + 是否给参与人发送 Bot 通知,true: 发送,false: 不发送。 + llm_description: | + 是否给参与人发送 Bot 通知,true: 发送,false: 不发送。 + form: form + + - name: attendee_phone_or_email + type: string + required: true + label: + en_US: Attendee Phone or Email + zh_Hans: 参会人电话或邮箱 + human_description: + en_US: The list of attendee emails or phone numbers, separated by commas. + zh_Hans: 日程参会人邮箱或者手机号列表,使用逗号分隔。 + llm_description: 日程参会人邮箱或者手机号列表,使用逗号分隔。 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/create_event.py b/api/core/tools/provider/builtin/feishu_calendar/tools/create_event.py new file mode 100644 index 0000000000..8820bebdbe --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/create_event.py @@ -0,0 +1,26 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class CreateEventTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + summary = tool_parameters.get("summary") + description = tool_parameters.get("description") + start_time = tool_parameters.get("start_time") + end_time = tool_parameters.get("end_time") + attendee_ability = tool_parameters.get("attendee_ability") + need_notification = tool_parameters.get("need_notification", True) + auto_record = tool_parameters.get("auto_record", False) + + res = client.create_event( + summary, description, start_time, end_time, attendee_ability, need_notification, auto_record + ) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/create_event.yaml b/api/core/tools/provider/builtin/feishu_calendar/tools/create_event.yaml new file mode 100644 index 0000000000..f0784221ce --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/create_event.yaml @@ -0,0 +1,119 @@ +identity: + name: create_event + author: Doug Lea + label: + en_US: Create Event + zh_Hans: 创建日程 +description: + human: + en_US: Create Event + zh_Hans: 创建日程 + llm: A tool for creating events in Feishu.(创建飞书日程) +parameters: + - name: summary + type: string + required: false + label: + en_US: Summary + zh_Hans: 日程标题 + human_description: + en_US: The title of the event. If not filled, the event title will display (No Subject). + zh_Hans: 日程标题,若不填则日程标题显示 (无主题)。 + llm_description: 日程标题,若不填则日程标题显示 (无主题)。 + form: llm + + - name: description + type: string + required: false + label: + en_US: Description + zh_Hans: 日程描述 + human_description: + en_US: The description of the event. + zh_Hans: 日程描述。 + llm_description: 日程描述。 + form: llm + + - name: need_notification + type: boolean + required: false + default: true + label: + en_US: Need Notification + zh_Hans: 是否发送通知 + human_description: + en_US: | + Whether to send a bot message when the event is created, true: send, false: do not send. + zh_Hans: 创建日程时是否发送 bot 消息,true:发送,false:不发送。 + llm_description: 创建日程时是否发送 bot 消息,true:发送,false:不发送。 + form: form + + - name: start_time + type: string + required: true + label: + en_US: Start Time + zh_Hans: 开始时间 + human_description: + en_US: | + The start time of the event, format: 2006-01-02 15:04:05. + zh_Hans: 日程开始时间,格式:2006-01-02 15:04:05。 + llm_description: 日程开始时间,格式:2006-01-02 15:04:05。 + form: llm + + - name: end_time + type: string + required: true + label: + en_US: End Time + zh_Hans: 结束时间 + human_description: + en_US: | + The end time of the event, format: 2006-01-02 15:04:05. + zh_Hans: 日程结束时间,格式:2006-01-02 15:04:05。 + llm_description: 日程结束时间,格式:2006-01-02 15:04:05。 + form: llm + + - name: attendee_ability + type: select + required: false + options: + - value: none + label: + en_US: none + zh_Hans: 无 + - value: can_see_others + label: + en_US: can_see_others + zh_Hans: 可以查看参与人列表 + - value: can_invite_others + label: + en_US: can_invite_others + zh_Hans: 可以邀请其它参与人 + - value: can_modify_event + label: + en_US: can_modify_event + zh_Hans: 可以编辑日程 + default: "none" + label: + en_US: attendee_ability + zh_Hans: 参会人权限 + human_description: + en_US: Attendee ability, optional values are none, can_see_others, can_invite_others, can_modify_event, with a default value of none. + zh_Hans: 参会人权限,可选值有无、可以查看参与人列表、可以邀请其它参与人、可以编辑日程,默认值为无。 + llm_description: 参会人权限,可选值有无、可以查看参与人列表、可以邀请其它参与人、可以编辑日程,默认值为无。 + form: form + + - name: auto_record + type: boolean + required: false + default: false + label: + en_US: Auto Record + zh_Hans: 自动录制 + human_description: + en_US: | + Whether to enable automatic recording, true: enabled, automatically record when the meeting starts; false: not enabled. + zh_Hans: 是否开启自动录制,true:开启,会议开始后自动录制;false:不开启。 + llm_description: 是否开启自动录制,true:开启,会议开始后自动录制;false:不开启。 + form: form diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/delete_event.py b/api/core/tools/provider/builtin/feishu_calendar/tools/delete_event.py new file mode 100644 index 0000000000..144889692f --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/delete_event.py @@ -0,0 +1,19 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class DeleteEventTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + event_id = tool_parameters.get("event_id") + need_notification = tool_parameters.get("need_notification", True) + + res = client.delete_event(event_id, need_notification) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/delete_event.yaml b/api/core/tools/provider/builtin/feishu_calendar/tools/delete_event.yaml new file mode 100644 index 0000000000..54fdb04acc --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/delete_event.yaml @@ -0,0 +1,38 @@ +identity: + name: delete_event + author: Doug Lea + label: + en_US: Delete Event + zh_Hans: 删除日程 +description: + human: + en_US: Delete Event + zh_Hans: 删除日程 + llm: A tool for deleting events in Feishu.(在飞书中删除日程) +parameters: + - name: event_id + type: string + required: true + label: + en_US: Event ID + zh_Hans: 日程 ID + human_description: + en_US: | + The ID of the event, for example: e8b9791c-39ae-4908-8ad8-66b13159b9fb_0. + zh_Hans: 日程 ID,例如:e8b9791c-39ae-4908-8ad8-66b13159b9fb_0。 + llm_description: 日程 ID,例如:e8b9791c-39ae-4908-8ad8-66b13159b9fb_0。 + form: llm + + - name: need_notification + type: boolean + required: false + default: true + label: + en_US: Need Notification + zh_Hans: 是否需要通知 + human_description: + en_US: | + Indicates whether to send bot notifications to event participants upon deletion. true: send, false: do not send. + zh_Hans: 删除日程是否给日程参与人发送 bot 通知,true:发送,false:不发送。 + llm_description: 删除日程是否给日程参与人发送 bot 通知,true:发送,false:不发送。 + form: form diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/get_primary_calendar.py b/api/core/tools/provider/builtin/feishu_calendar/tools/get_primary_calendar.py new file mode 100644 index 0000000000..a2cd5a8b17 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/get_primary_calendar.py @@ -0,0 +1,18 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class GetPrimaryCalendarTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + user_id_type = tool_parameters.get("user_id_type", "open_id") + + res = client.get_primary_calendar(user_id_type) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/get_primary_calendar.yaml b/api/core/tools/provider/builtin/feishu_calendar/tools/get_primary_calendar.yaml new file mode 100644 index 0000000000..3440c85d4a --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/get_primary_calendar.yaml @@ -0,0 +1,37 @@ +identity: + name: get_primary_calendar + author: Doug Lea + label: + en_US: Get Primary Calendar + zh_Hans: 查询主日历信息 +description: + human: + en_US: Get Primary Calendar + zh_Hans: 查询主日历信息 + llm: A tool for querying primary calendar information in Feishu.(在飞书中查询主日历信息) +parameters: + - name: user_id_type + type: select + required: false + options: + - value: open_id + label: + en_US: open_id + zh_Hans: open_id + - value: union_id + label: + en_US: union_id + zh_Hans: union_id + - value: user_id + label: + en_US: user_id + zh_Hans: user_id + default: "open_id" + label: + en_US: user_id_type + zh_Hans: 用户 ID 类型 + human_description: + en_US: User ID type, optional values are open_id, union_id, user_id, with a default value of open_id. + zh_Hans: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 + llm_description: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 + form: form diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/list_events.py b/api/core/tools/provider/builtin/feishu_calendar/tools/list_events.py new file mode 100644 index 0000000000..8815b4c9c8 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/list_events.py @@ -0,0 +1,21 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class ListEventsTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + start_time = tool_parameters.get("start_time") + end_time = tool_parameters.get("end_time") + page_token = tool_parameters.get("page_token") + page_size = tool_parameters.get("page_size") + + res = client.list_events(start_time, end_time, page_token, page_size) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/list_events.yaml b/api/core/tools/provider/builtin/feishu_calendar/tools/list_events.yaml new file mode 100644 index 0000000000..f4a5bfe6ba --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/list_events.yaml @@ -0,0 +1,62 @@ +identity: + name: list_events + author: Doug Lea + label: + en_US: List Events + zh_Hans: 获取日程列表 +description: + human: + en_US: List Events + zh_Hans: 获取日程列表 + llm: A tool for listing events in Feishu.(在飞书中获取日程列表) +parameters: + - name: start_time + type: string + required: false + label: + en_US: Start Time + zh_Hans: 开始时间 + human_description: + en_US: | + The start time, defaults to 0:00 of the current day if not provided, format: 2006-01-02 15:04:05. + zh_Hans: 开始时间,不传值时默认当天 0 点时间,格式为:2006-01-02 15:04:05。 + llm_description: 开始时间,不传值时默认当天 0 点时间,格式为:2006-01-02 15:04:05。 + form: llm + + - name: end_time + type: string + required: false + label: + en_US: End Time + zh_Hans: 结束时间 + human_description: + en_US: | + The end time, defaults to 23:59 of the current day if not provided, format: 2006-01-02 15:04:05. + zh_Hans: 结束时间,不传值时默认当天 23:59 分时间,格式为:2006-01-02 15:04:05。 + llm_description: 结束时间,不传值时默认当天 23:59 分时间,格式为:2006-01-02 15:04:05。 + form: llm + + - name: page_size + type: number + required: false + default: 50 + label: + en_US: Page Size + zh_Hans: 分页大小 + human_description: + en_US: The page size, i.e., the number of data entries returned in a single request. The default value is 50, and the value range is [50,1000]. + zh_Hans: 分页大小,即单次请求所返回的数据条目数。默认值为 50,取值范围为 [50,1000]。 + llm_description: 分页大小,即单次请求所返回的数据条目数。默认值为 50,取值范围为 [50,1000]。 + form: llm + + - name: page_token + type: string + required: false + label: + en_US: Page Token + zh_Hans: 分页标记 + human_description: + en_US: The pagination token. Leave it blank for the first request, indicating to start traversing from the beginning; when the pagination query result has more items, a new page_token will be returned simultaneously, which can be used to obtain the query result in the next traversal. + zh_Hans: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 + llm_description: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/search_events.py b/api/core/tools/provider/builtin/feishu_calendar/tools/search_events.py new file mode 100644 index 0000000000..dc365205a4 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/search_events.py @@ -0,0 +1,23 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class SearchEventsTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + query = tool_parameters.get("query") + start_time = tool_parameters.get("start_time") + end_time = tool_parameters.get("end_time") + page_token = tool_parameters.get("page_token") + user_id_type = tool_parameters.get("user_id_type", "open_id") + page_size = tool_parameters.get("page_size", 20) + + res = client.search_events(query, start_time, end_time, page_token, user_id_type, page_size) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/search_events.yaml b/api/core/tools/provider/builtin/feishu_calendar/tools/search_events.yaml new file mode 100644 index 0000000000..e92a282091 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/search_events.yaml @@ -0,0 +1,100 @@ +identity: + name: search_events + author: Doug Lea + label: + en_US: Search Events + zh_Hans: 搜索日程 +description: + human: + en_US: Search Events + zh_Hans: 搜索日程 + llm: A tool for searching events in Feishu.(在飞书中搜索日程) +parameters: + - name: user_id_type + type: select + required: false + options: + - value: open_id + label: + en_US: open_id + zh_Hans: open_id + - value: union_id + label: + en_US: union_id + zh_Hans: union_id + - value: user_id + label: + en_US: user_id + zh_Hans: user_id + default: "open_id" + label: + en_US: user_id_type + zh_Hans: 用户 ID 类型 + human_description: + en_US: User ID type, optional values are open_id, union_id, user_id, with a default value of open_id. + zh_Hans: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 + llm_description: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 + form: form + + - name: query + type: string + required: true + label: + en_US: Query + zh_Hans: 搜索关键字 + human_description: + en_US: The search keyword used for fuzzy searching event names, with a maximum input of 200 characters. + zh_Hans: 用于模糊查询日程名称的搜索关键字,最大输入 200 字符。 + llm_description: 用于模糊查询日程名称的搜索关键字,最大输入 200 字符。 + form: llm + + - name: start_time + type: string + required: false + label: + en_US: Start Time + zh_Hans: 开始时间 + human_description: + en_US: | + The start time, defaults to 0:00 of the current day if not provided, format: 2006-01-02 15:04:05. + zh_Hans: 开始时间,不传值时默认当天 0 点时间,格式为:2006-01-02 15:04:05。 + llm_description: 开始时间,不传值时默认当天 0 点时间,格式为:2006-01-02 15:04:05。 + form: llm + + - name: end_time + type: string + required: false + label: + en_US: End Time + zh_Hans: 结束时间 + human_description: + en_US: | + The end time, defaults to 23:59 of the current day if not provided, format: 2006-01-02 15:04:05. + zh_Hans: 结束时间,不传值时默认当天 23:59 分时间,格式为:2006-01-02 15:04:05。 + llm_description: 结束时间,不传值时默认当天 23:59 分时间,格式为:2006-01-02 15:04:05。 + form: llm + + - name: page_size + type: number + required: false + default: 20 + label: + en_US: Page Size + zh_Hans: 分页大小 + human_description: + en_US: The page size, i.e., the number of data entries returned in a single request. The default value is 20, and the value range is [10,100]. + zh_Hans: 分页大小,即单次请求所返回的数据条目数。默认值为 20,取值范围为 [10,100]。 + llm_description: 分页大小,即单次请求所返回的数据条目数。默认值为 20,取值范围为 [10,100]。 + form: llm + + - name: page_token + type: string + required: false + label: + en_US: Page Token + zh_Hans: 分页标记 + human_description: + en_US: The pagination token. Leave it blank for the first request, indicating to start traversing from the beginning; when the pagination query result has more items, a new page_token will be returned simultaneously, which can be used to obtain the query result in the next traversal. + zh_Hans: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 + llm_description: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/update_event.py b/api/core/tools/provider/builtin/feishu_calendar/tools/update_event.py new file mode 100644 index 0000000000..85bcb1d3f6 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/update_event.py @@ -0,0 +1,24 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class UpdateEventTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + event_id = tool_parameters.get("event_id") + summary = tool_parameters.get("summary") + description = tool_parameters.get("description") + need_notification = tool_parameters.get("need_notification", True) + start_time = tool_parameters.get("start_time") + end_time = tool_parameters.get("end_time") + auto_record = tool_parameters.get("auto_record", False) + + res = client.update_event(event_id, summary, description, need_notification, start_time, end_time, auto_record) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_calendar/tools/update_event.yaml b/api/core/tools/provider/builtin/feishu_calendar/tools/update_event.yaml new file mode 100644 index 0000000000..4d60dbf8c8 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_calendar/tools/update_event.yaml @@ -0,0 +1,100 @@ +identity: + name: update_event + author: Doug Lea + label: + en_US: Update Event + zh_Hans: 更新日程 +description: + human: + en_US: Update Event + zh_Hans: 更新日程 + llm: A tool for updating events in Feishu.(更新飞书中的日程) +parameters: + - name: event_id + type: string + required: true + label: + en_US: Event ID + zh_Hans: 日程 ID + human_description: + en_US: | + The ID of the event, for example: e8b9791c-39ae-4908-8ad8-66b13159b9fb_0. + zh_Hans: 日程 ID,例如:e8b9791c-39ae-4908-8ad8-66b13159b9fb_0。 + llm_description: 日程 ID,例如:e8b9791c-39ae-4908-8ad8-66b13159b9fb_0。 + form: llm + + - name: summary + type: string + required: false + label: + en_US: Summary + zh_Hans: 日程标题 + human_description: + en_US: The title of the event. + zh_Hans: 日程标题。 + llm_description: 日程标题。 + form: llm + + - name: description + type: string + required: false + label: + en_US: Description + zh_Hans: 日程描述 + human_description: + en_US: The description of the event. + zh_Hans: 日程描述。 + llm_description: 日程描述。 + form: llm + + - name: need_notification + type: boolean + required: false + label: + en_US: Need Notification + zh_Hans: 是否发送通知 + human_description: + en_US: | + Whether to send a bot message when the event is updated, true: send, false: do not send. + zh_Hans: 更新日程时是否发送 bot 消息,true:发送,false:不发送。 + llm_description: 更新日程时是否发送 bot 消息,true:发送,false:不发送。 + form: form + + - name: start_time + type: string + required: false + label: + en_US: Start Time + zh_Hans: 开始时间 + human_description: + en_US: | + The start time of the event, format: 2006-01-02 15:04:05. + zh_Hans: 日程开始时间,格式:2006-01-02 15:04:05。 + llm_description: 日程开始时间,格式:2006-01-02 15:04:05。 + form: llm + + - name: end_time + type: string + required: false + label: + en_US: End Time + zh_Hans: 结束时间 + human_description: + en_US: | + The end time of the event, format: 2006-01-02 15:04:05. + zh_Hans: 日程结束时间,格式:2006-01-02 15:04:05。 + llm_description: 日程结束时间,格式:2006-01-02 15:04:05。 + form: llm + + - name: auto_record + type: boolean + required: false + label: + en_US: Auto Record + zh_Hans: 自动录制 + human_description: + en_US: | + Whether to enable automatic recording, true: enabled, automatically record when the meeting starts; false: not enabled. + zh_Hans: 是否开启自动录制,true:开启,会议开始后自动录制;false:不开启。 + llm_description: 是否开启自动录制,true:开启,会议开始后自动录制;false:不开启。 + form: form diff --git a/api/core/tools/provider/builtin/feishu_document/feishu_document.py b/api/core/tools/provider/builtin/feishu_document/feishu_document.py index b0a1e393eb..217ae52082 100644 --- a/api/core/tools/provider/builtin/feishu_document/feishu_document.py +++ b/api/core/tools/provider/builtin/feishu_document/feishu_document.py @@ -1,15 +1,7 @@ -from core.tools.errors import ToolProviderCredentialValidationError from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController -from core.tools.utils.feishu_api_utils import FeishuRequest +from core.tools.utils.feishu_api_utils import auth class FeishuDocumentProvider(BuiltinToolProviderController): def _validate_credentials(self, credentials: dict) -> None: - app_id = credentials.get("app_id") - app_secret = credentials.get("app_secret") - if not app_id or not app_secret: - raise ToolProviderCredentialValidationError("app_id and app_secret is required") - try: - assert FeishuRequest(app_id, app_secret).tenant_access_token is not None - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) + auth(credentials) diff --git a/api/core/tools/provider/builtin/feishu_document/feishu_document.yaml b/api/core/tools/provider/builtin/feishu_document/feishu_document.yaml index 8eaa6b2704..8f9afa6149 100644 --- a/api/core/tools/provider/builtin/feishu_document/feishu_document.yaml +++ b/api/core/tools/provider/builtin/feishu_document/feishu_document.yaml @@ -5,8 +5,10 @@ identity: en_US: Lark Cloud Document zh_Hans: 飞书云文档 description: - en_US: Lark Cloud Document - zh_Hans: 飞书云文档 + en_US: | + Lark cloud document, requires the following permissions: docx:document、drive:drive、docs:document.content:read. + zh_Hans: | + 飞书云文档,需要开通以下权限: docx:document、drive:drive、docs:document.content:read。 icon: icon.svg tags: - social @@ -23,7 +25,7 @@ credentials_for_provider: help: en_US: Get your app_id and app_secret from Feishu zh_Hans: 从飞书获取您的 app_id 和 app_secret - url: https://open.feishu.cn + url: https://open.larkoffice.com/app app_secret: type: secret-input required: true diff --git a/api/core/tools/provider/builtin/feishu_document/tools/create_document.yaml b/api/core/tools/provider/builtin/feishu_document/tools/create_document.yaml index ddf2729f0e..85382e9d8e 100644 --- a/api/core/tools/provider/builtin/feishu_document/tools/create_document.yaml +++ b/api/core/tools/provider/builtin/feishu_document/tools/create_document.yaml @@ -7,7 +7,7 @@ identity: description: human: en_US: Create Lark document - zh_Hans: 创建飞书文档,支持创建空文档和带内容的文档,支持 markdown 语法创建。 + zh_Hans: 创建飞书文档,支持创建空文档和带内容的文档,支持 markdown 语法创建。应用需要开启机器人能力(https://open.feishu.cn/document/faq/trouble-shooting/how-to-enable-bot-ability)。 llm: A tool for creating Feishu documents. parameters: - name: title @@ -41,7 +41,8 @@ parameters: en_US: folder_token zh_Hans: 文档所在文件夹的 Token human_description: - en_US: The token of the folder where the document is located. If it is not passed or is empty, it means the root directory. - zh_Hans: 文档所在文件夹的 Token,不传或传空表示根目录。 - llm_description: 文档所在文件夹的 Token,不传或传空表示根目录。 + en_US: | + The token of the folder where the document is located. If it is not passed or is empty, it means the root directory. For Example: https://svi136aogf123.feishu.cn/drive/folder/JgR9fiG9AlPt8EdsSNpcGjIInbf + zh_Hans: 文档所在文件夹的 Token,不传或传空表示根目录。例如:https://svi136aogf123.feishu.cn/drive/folder/JgR9fiG9AlPt8EdsSNpcGjIInbf。 + llm_description: 文档所在文件夹的 Token,不传或传空表示根目录。例如:https://svi136aogf123.feishu.cn/drive/folder/JgR9fiG9AlPt8EdsSNpcGjIInbf。 form: llm diff --git a/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.py b/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.py index c94a5f70ed..e67a017fac 100644 --- a/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.py +++ b/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.py @@ -12,8 +12,8 @@ class GetDocumentRawContentTool(BuiltinTool): client = FeishuRequest(app_id, app_secret) document_id = tool_parameters.get("document_id") - mode = tool_parameters.get("mode") - lang = tool_parameters.get("lang", 0) + mode = tool_parameters.get("mode", "markdown") + lang = tool_parameters.get("lang", "0") res = client.get_document_content(document_id, mode, lang) return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.yaml b/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.yaml index 51eda73a60..15e827cde9 100644 --- a/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.yaml +++ b/api/core/tools/provider/builtin/feishu_document/tools/get_document_content.yaml @@ -23,8 +23,18 @@ parameters: form: llm - name: mode - type: string + type: select required: false + options: + - value: text + label: + en_US: text + zh_Hans: text + - value: markdown + label: + en_US: markdown + zh_Hans: markdown + default: "markdown" label: en_US: mode zh_Hans: 文档返回格式 @@ -32,18 +42,29 @@ parameters: en_US: Format of the document return, optional values are text, markdown, can be empty, default is markdown. zh_Hans: 文档返回格式,可选值有 text、markdown,可以为空,默认值为 markdown。 llm_description: 文档返回格式,可选值有 text、markdown,可以为空,默认值为 markdown。 - form: llm + form: form - name: lang - type: number + type: select required: false - default: 0 + options: + - value: "0" + label: + en_US: User's default name + zh_Hans: 用户的默认名称 + - value: "1" + label: + en_US: User's English name + zh_Hans: 用户的英文名称 + default: "0" label: en_US: lang zh_Hans: 指定@用户的语言 human_description: en_US: | Specifies the language for MentionUser, optional values are [0, 1]. 0: User's default name, 1: User's English name, default is 0. - zh_Hans: 指定返回的 MentionUser,即 @用户 的语言,可选值有 [0,1]。0:该用户的默认名称,1:该用户的英文名称,默认值为 0。 - llm_description: 指定返回的 MentionUser,即 @用户 的语言,可选值有 [0,1]。0:该用户的默认名称,1:该用户的英文名称,默认值为 0。 - form: llm + zh_Hans: | + 指定返回的 MentionUser,即@用户的语言,可选值有 [0,1]。0: 该用户的默认名称,1: 该用户的英文名称,默认值为 0。 + llm_description: | + 指定返回的 MentionUser,即@用户的语言,可选值有 [0,1]。0: 该用户的默认名称,1: 该用户的英文名称,默认值为 0。 + form: form diff --git a/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.py b/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.py index 572a7abf28..dd57c6870d 100644 --- a/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.py +++ b/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.py @@ -12,8 +12,9 @@ class ListDocumentBlockTool(BuiltinTool): client = FeishuRequest(app_id, app_secret) document_id = tool_parameters.get("document_id") - page_size = tool_parameters.get("page_size", 500) page_token = tool_parameters.get("page_token", "") + user_id_type = tool_parameters.get("user_id_type", "open_id") + page_size = tool_parameters.get("page_size", 500) - res = client.list_document_blocks(document_id, page_token, page_size) + res = client.list_document_blocks(document_id, page_token, user_id_type, page_size) return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.yaml b/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.yaml index 019ac98390..5b8ef7d53c 100644 --- a/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.yaml +++ b/api/core/tools/provider/builtin/feishu_document/tools/list_document_blocks.yaml @@ -46,12 +46,12 @@ parameters: en_US: User ID type, optional values are open_id, union_id, user_id, with a default value of open_id. zh_Hans: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 llm_description: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 - form: llm + form: form - name: page_size type: number required: false - default: "500" + default: 500 label: en_US: page_size zh_Hans: 分页大小 diff --git a/api/core/tools/provider/builtin/feishu_document/tools/write_document.py b/api/core/tools/provider/builtin/feishu_document/tools/write_document.py index 6061250e48..59f08f53dc 100644 --- a/api/core/tools/provider/builtin/feishu_document/tools/write_document.py +++ b/api/core/tools/provider/builtin/feishu_document/tools/write_document.py @@ -13,7 +13,7 @@ class CreateDocumentTool(BuiltinTool): document_id = tool_parameters.get("document_id") content = tool_parameters.get("content") - position = tool_parameters.get("position") + position = tool_parameters.get("position", "end") res = client.write_document(document_id, content, position) return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_document/tools/write_document.yaml b/api/core/tools/provider/builtin/feishu_document/tools/write_document.yaml index 4282e3dcf3..de70f4e772 100644 --- a/api/core/tools/provider/builtin/feishu_document/tools/write_document.yaml +++ b/api/core/tools/provider/builtin/feishu_document/tools/write_document.yaml @@ -35,25 +35,23 @@ parameters: form: llm - name: position - type: string + type: select required: false - label: - en_US: position - zh_Hans: 添加位置 - human_description: - en_US: | - Enumeration values: start or end. Use 'start' to add content at the beginning of the document, and 'end' to add content at the end. The default value is 'end'. - zh_Hans: 枚举值:start 或 end。使用 'start' 在文档开头添加内容,使用 'end' 在文档结尾添加内容,默认值为 'end'。 - llm_description: | - 枚举值 start、end,start: 在文档开头添加内容;end: 在文档结尾添加内容,默认值为 end。 - form: llm options: - value: start label: - en_US: start - zh_Hans: 在文档开头添加内容 + en_US: document start + zh_Hans: 文档开始 - value: end label: - en_US: end - zh_Hans: 在文档结尾添加内容 - default: start + en_US: document end + zh_Hans: 文档结束 + default: "end" + label: + en_US: position + zh_Hans: 内容添加位置 + human_description: + en_US: Content insertion position, optional values are start, end. 'start' means adding content at the beginning of the document; 'end' means adding content at the end of the document. The default value is end. + zh_Hans: 内容添加位置,可选值有 start、end。start 表示在文档开头添加内容;end 表示在文档结尾添加内容,默认值为 end。 + llm_description: 内容添加位置,可选值有 start、end。start 表示在文档开头添加内容;end 表示在文档结尾添加内容,默认值为 end。 + form: form diff --git a/api/core/tools/provider/builtin/feishu_message/feishu_message.py b/api/core/tools/provider/builtin/feishu_message/feishu_message.py index 7b3adb9293..a3b5473769 100644 --- a/api/core/tools/provider/builtin/feishu_message/feishu_message.py +++ b/api/core/tools/provider/builtin/feishu_message/feishu_message.py @@ -1,15 +1,7 @@ -from core.tools.errors import ToolProviderCredentialValidationError from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController -from core.tools.utils.feishu_api_utils import FeishuRequest +from core.tools.utils.feishu_api_utils import auth class FeishuMessageProvider(BuiltinToolProviderController): def _validate_credentials(self, credentials: dict) -> None: - app_id = credentials.get("app_id") - app_secret = credentials.get("app_secret") - if not app_id or not app_secret: - raise ToolProviderCredentialValidationError("app_id and app_secret is required") - try: - assert FeishuRequest(app_id, app_secret).tenant_access_token is not None - except Exception as e: - raise ToolProviderCredentialValidationError(str(e)) + auth(credentials) diff --git a/api/core/tools/provider/builtin/feishu_message/feishu_message.yaml b/api/core/tools/provider/builtin/feishu_message/feishu_message.yaml index 1bd8953ddd..56683ec168 100644 --- a/api/core/tools/provider/builtin/feishu_message/feishu_message.yaml +++ b/api/core/tools/provider/builtin/feishu_message/feishu_message.yaml @@ -5,8 +5,10 @@ identity: en_US: Lark Message zh_Hans: 飞书消息 description: - en_US: Lark Message - zh_Hans: 飞书消息 + en_US: | + Lark message, requires the following permissions: im:message、im:message.group_msg. + zh_Hans: | + 飞书消息,需要开通以下权限: im:message、im:message.group_msg。 icon: icon.svg tags: - social @@ -23,7 +25,7 @@ credentials_for_provider: help: en_US: Get your app_id and app_secret from Feishu zh_Hans: 从飞书获取您的 app_id 和 app_secret - url: https://open.feishu.cn + url: https://open.larkoffice.com/app app_secret: type: secret-input required: true diff --git a/api/core/tools/provider/builtin/feishu_message/tools/get_chat_messages.py b/api/core/tools/provider/builtin/feishu_message/tools/get_chat_messages.py new file mode 100644 index 0000000000..7eb29230b2 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_message/tools/get_chat_messages.py @@ -0,0 +1,23 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class GetChatMessagesTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + container_id = tool_parameters.get("container_id") + start_time = tool_parameters.get("start_time") + end_time = tool_parameters.get("end_time") + page_token = tool_parameters.get("page_token") + sort_type = tool_parameters.get("sort_type", "ByCreateTimeAsc") + page_size = tool_parameters.get("page_size", 20) + + res = client.get_chat_messages(container_id, start_time, end_time, page_token, sort_type, page_size) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_message/tools/get_chat_messages.yaml b/api/core/tools/provider/builtin/feishu_message/tools/get_chat_messages.yaml new file mode 100644 index 0000000000..153c8c80e5 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_message/tools/get_chat_messages.yaml @@ -0,0 +1,96 @@ +identity: + name: get_chat_messages + author: Doug Lea + label: + en_US: Get Chat Messages + zh_Hans: 获取指定单聊、群聊的消息历史 +description: + human: + en_US: Get Chat Messages + zh_Hans: 获取指定单聊、群聊的消息历史 + llm: A tool for getting chat messages from specific one-on-one chats or group chats.(获取指定单聊、群聊的消息历史) +parameters: + - name: container_id + type: string + required: true + label: + en_US: Container Id + zh_Hans: 群聊或单聊的 ID + human_description: + en_US: The ID of the group chat or single chat. Refer to the group ID description for how to obtain it. https://open.feishu.cn/document/server-docs/group/chat/chat-id-description + zh_Hans: 群聊或单聊的 ID,获取方式参见群 ID 说明。https://open.feishu.cn/document/server-docs/group/chat/chat-id-description + llm_description: 群聊或单聊的 ID,获取方式参见群 ID 说明。https://open.feishu.cn/document/server-docs/group/chat/chat-id-description + form: llm + + - name: start_time + type: string + required: false + label: + en_US: Start Time + zh_Hans: 起始时间 + human_description: + en_US: The start time for querying historical messages, formatted as "2006-01-02 15:04:05". + zh_Hans: 待查询历史信息的起始时间,格式为 "2006-01-02 15:04:05"。 + llm_description: 待查询历史信息的起始时间,格式为 "2006-01-02 15:04:05"。 + form: llm + + - name: end_time + type: string + required: false + label: + en_US: End Time + zh_Hans: 结束时间 + human_description: + en_US: The end time for querying historical messages, formatted as "2006-01-02 15:04:05". + zh_Hans: 待查询历史信息的结束时间,格式为 "2006-01-02 15:04:05"。 + llm_description: 待查询历史信息的结束时间,格式为 "2006-01-02 15:04:05"。 + form: llm + + - name: sort_type + type: select + required: false + options: + - value: ByCreateTimeAsc + label: + en_US: ByCreateTimeAsc + zh_Hans: ByCreateTimeAsc + - value: ByCreateTimeDesc + label: + en_US: ByCreateTimeDesc + zh_Hans: ByCreateTimeDesc + default: "ByCreateTimeAsc" + label: + en_US: Sort Type + zh_Hans: 排序方式 + human_description: + en_US: | + The message sorting method. Optional values are ByCreateTimeAsc: sorted in ascending order by message creation time; ByCreateTimeDesc: sorted in descending order by message creation time. The default value is ByCreateTimeAsc. Note: When using page_token for pagination requests, the sorting method (sort_type) is consistent with the first request and cannot be changed midway. + zh_Hans: | + 消息排序方式,可选值有 ByCreateTimeAsc:按消息创建时间升序排列;ByCreateTimeDesc:按消息创建时间降序排列。默认值为:ByCreateTimeAsc。注意:使用 page_token 分页请求时,排序方式(sort_type)均与第一次请求一致,不支持中途改换排序方式。 + llm_description: 消息排序方式,可选值有 ByCreateTimeAsc:按消息创建时间升序排列;ByCreateTimeDesc:按消息创建时间降序排列。默认值为:ByCreateTimeAsc。注意:使用 page_token 分页请求时,排序方式(sort_type)均与第一次请求一致,不支持中途改换排序方式。 + form: form + + - name: page_size + type: number + required: false + default: 20 + label: + en_US: Page Size + zh_Hans: 分页大小 + human_description: + en_US: The page size, i.e., the number of data entries returned in a single request. The default value is 20, and the value range is [1,50]. + zh_Hans: 分页大小,即单次请求所返回的数据条目数。默认值为 20,取值范围为 [1,50]。 + llm_description: 分页大小,即单次请求所返回的数据条目数。默认值为 20,取值范围为 [1,50]。 + form: llm + + - name: page_token + type: string + required: false + label: + en_US: Page Token + zh_Hans: 分页标记 + human_description: + en_US: The pagination token. Leave it blank for the first request, indicating to start traversing from the beginning; when the pagination query result has more items, a new page_token will be returned simultaneously, which can be used to obtain the query result in the next traversal. + zh_Hans: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 + llm_description: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_message/tools/get_thread_messages.py b/api/core/tools/provider/builtin/feishu_message/tools/get_thread_messages.py new file mode 100644 index 0000000000..3b14f46e00 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_message/tools/get_thread_messages.py @@ -0,0 +1,21 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class GetChatMessagesTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + container_id = tool_parameters.get("container_id") + page_token = tool_parameters.get("page_token") + sort_type = tool_parameters.get("sort_type", "ByCreateTimeAsc") + page_size = tool_parameters.get("page_size", 20) + + res = client.get_thread_messages(container_id, page_token, sort_type, page_size) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_message/tools/get_thread_messages.yaml b/api/core/tools/provider/builtin/feishu_message/tools/get_thread_messages.yaml new file mode 100644 index 0000000000..8d5fed9d0b --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_message/tools/get_thread_messages.yaml @@ -0,0 +1,72 @@ +identity: + name: get_thread_messages + author: Doug Lea + label: + en_US: Get Thread Messages + zh_Hans: 获取指定话题的消息历史 +description: + human: + en_US: Get Thread Messages + zh_Hans: 获取指定话题的消息历史 + llm: A tool for getting chat messages from specific threads.(获取指定话题的消息历史) +parameters: + - name: container_id + type: string + required: true + label: + en_US: Thread Id + zh_Hans: 话题 ID + human_description: + en_US: The ID of the thread. Refer to the thread overview on how to obtain the thread_id. https://open.feishu.cn/document/uAjLw4CM/ukTMukTMukTM/reference/im-v1/message/thread-introduction + zh_Hans: 话题 ID,获取方式参见话题概述的如何获取 thread_id 章节。https://open.feishu.cn/document/uAjLw4CM/ukTMukTMukTM/reference/im-v1/message/thread-introduction + llm_description: 话题 ID,获取方式参见话题概述的如何获取 thread_id 章节。https://open.feishu.cn/document/uAjLw4CM/ukTMukTMukTM/reference/im-v1/message/thread-introduction + form: llm + + - name: sort_type + type: select + required: false + options: + - value: ByCreateTimeAsc + label: + en_US: ByCreateTimeAsc + zh_Hans: ByCreateTimeAsc + - value: ByCreateTimeDesc + label: + en_US: ByCreateTimeDesc + zh_Hans: ByCreateTimeDesc + default: "ByCreateTimeAsc" + label: + en_US: Sort Type + zh_Hans: 排序方式 + human_description: + en_US: | + The message sorting method. Optional values are ByCreateTimeAsc: sorted in ascending order by message creation time; ByCreateTimeDesc: sorted in descending order by message creation time. The default value is ByCreateTimeAsc. Note: When using page_token for pagination requests, the sorting method (sort_type) is consistent with the first request and cannot be changed midway. + zh_Hans: | + 消息排序方式,可选值有 ByCreateTimeAsc:按消息创建时间升序排列;ByCreateTimeDesc:按消息创建时间降序排列。默认值为:ByCreateTimeAsc。注意:使用 page_token 分页请求时,排序方式(sort_type)均与第一次请求一致,不支持中途改换排序方式。 + llm_description: 消息排序方式,可选值有 ByCreateTimeAsc:按消息创建时间升序排列;ByCreateTimeDesc:按消息创建时间降序排列。默认值为:ByCreateTimeAsc。注意:使用 page_token 分页请求时,排序方式(sort_type)均与第一次请求一致,不支持中途改换排序方式。 + form: form + + - name: page_size + type: number + required: false + default: 20 + label: + en_US: Page Size + zh_Hans: 分页大小 + human_description: + en_US: The page size, i.e., the number of data entries returned in a single request. The default value is 20, and the value range is [1,50]. + zh_Hans: 分页大小,即单次请求所返回的数据条目数。默认值为 20,取值范围为 [1,50]。 + llm_description: 分页大小,即单次请求所返回的数据条目数。默认值为 20,取值范围为 [1,50]。 + form: llm + + - name: page_token + type: string + required: false + label: + en_US: Page Token + zh_Hans: 分页标记 + human_description: + en_US: The pagination token. Leave it blank for the first request, indicating to start traversing from the beginning; when the pagination query result has more items, a new page_token will be returned simultaneously, which can be used to obtain the query result in the next traversal. + zh_Hans: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 + llm_description: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_message/tools/send_bot_message.yaml b/api/core/tools/provider/builtin/feishu_message/tools/send_bot_message.yaml index 6e398b18ab..4f7f65a8a7 100644 --- a/api/core/tools/provider/builtin/feishu_message/tools/send_bot_message.yaml +++ b/api/core/tools/provider/builtin/feishu_message/tools/send_bot_message.yaml @@ -10,53 +10,53 @@ description: zh_Hans: 发送飞书应用消息 llm: A tool for sending Feishu application messages. parameters: + - name: receive_id + type: string + required: true + label: + en_US: receive_id + zh_Hans: 消息接收者的 ID + human_description: + en_US: The ID of the message receiver, the ID type is consistent with the value of the query parameter receive_id_type. + zh_Hans: 消息接收者的 ID,ID 类型与查询参数 receive_id_type 的取值一致。 + llm_description: 消息接收者的 ID,ID 类型与查询参数 receive_id_type 的取值一致。 + form: llm + - name: receive_id_type type: select required: true options: - value: open_id label: - en_US: open id - zh_Hans: open id + en_US: open_id + zh_Hans: open_id - value: union_id label: - en_US: union id - zh_Hans: union id + en_US: union_id + zh_Hans: union_id - value: user_id label: - en_US: user id - zh_Hans: user id + en_US: user_id + zh_Hans: user_id - value: email label: en_US: email zh_Hans: email - value: chat_id label: - en_US: chat id - zh_Hans: chat id + en_US: chat_id + zh_Hans: chat_id label: - en_US: User ID Type - zh_Hans: 用户 ID 类型 + en_US: receive_id_type + zh_Hans: 消息接收者的 ID 类型 human_description: - en_US: User ID Type - zh_Hans: 用户 ID 类型,可选值有 open_id、union_id、user_id、email、chat_id。 - llm_description: 用户 ID 类型,可选值有 open_id、union_id、user_id、email、chat_id。 - form: llm - - - name: receive_id - type: string - required: true - label: - en_US: Receive Id - zh_Hans: 消息接收者的 ID - human_description: - en_US: The ID of the message receiver. The ID type should correspond to the query parameter receive_id_type. - zh_Hans: 消息接收者的 ID,ID 类型应与查询参数 receive_id_type 对应。 - llm_description: 消息接收者的 ID,ID 类型应与查询参数 receive_id_type 对应。 - form: llm + en_US: The ID type of the message receiver, optional values are open_id, union_id, user_id, email, chat_id, with a default value of open_id. + zh_Hans: 消息接收者的 ID 类型,可选值有 open_id、union_id、user_id、email、chat_id,默认值为 open_id。 + llm_description: 消息接收者的 ID 类型,可选值有 open_id、union_id、user_id、email、chat_id,默认值为 open_id。 + form: form - name: msg_type - type: string + type: select required: true options: - value: text @@ -65,27 +65,61 @@ parameters: zh_Hans: 文本 - value: interactive label: - en_US: message card - zh_Hans: 消息卡片 + en_US: interactive + zh_Hans: 卡片 + - value: post + label: + en_US: post + zh_Hans: 富文本 + - value: image + label: + en_US: image + zh_Hans: 图片 + - value: file + label: + en_US: file + zh_Hans: 文件 + - value: audio + label: + en_US: audio + zh_Hans: 语音 + - value: media + label: + en_US: media + zh_Hans: 视频 + - value: sticker + label: + en_US: sticker + zh_Hans: 表情包 + - value: share_chat + label: + en_US: share_chat + zh_Hans: 分享群名片 + - value: share_user + label: + en_US: share_user + zh_Hans: 分享个人名片 + - value: system + label: + en_US: system + zh_Hans: 系统消息 label: - en_US: Message type + en_US: msg_type zh_Hans: 消息类型 human_description: - en_US: Message type, optional values are, text (text), interactive (message card). - zh_Hans: 消息类型,可选值有:text(文本)、interactive(消息卡片)。 - llm_description: 消息类型,可选值有:text(文本)、interactive(消息卡片)。 - form: llm + en_US: Message type. Optional values are text, post, image, file, audio, media, sticker, interactive, share_chat, share_user, system. For detailed introduction of different message types, refer to the message content(https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json). + zh_Hans: 消息类型。可选值有:text、post、image、file、audio、media、sticker、interactive、share_chat、share_user、system。不同消息类型的详细介绍,参见发送消息内容(https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json)。 + llm_description: 消息类型。可选值有:text、post、image、file、audio、media、sticker、interactive、share_chat、share_user、system。不同消息类型的详细介绍,参见发送消息内容(https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json)。 + form: form - name: content type: string required: true label: - en_US: Message content + en_US: content zh_Hans: 消息内容 human_description: - en_US: Message content - zh_Hans: | - 消息内容,JSON 结构序列化后的字符串。不同 msg_type 对应不同内容, - 具体格式说明参考:https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json - llm_description: 消息内容,JSON 结构序列化后的字符串。不同 msg_type 对应不同内容。 + en_US: Message content, a JSON structure serialized string. The value of this parameter corresponds to msg_type. For example, if msg_type is text, this parameter needs to pass in text type content. To understand the format and usage limitations of different message types, refer to the message content(https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json). + zh_Hans: 消息内容,JSON 结构序列化后的字符串。该参数的取值与 msg_type 对应,例如 msg_type 取值为 text,则该参数需要传入文本类型的内容。了解不同类型的消息内容格式、使用限制,可参见发送消息内容(https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json)。 + llm_description: 消息内容,JSON 结构序列化后的字符串。该参数的取值与 msg_type 对应,例如 msg_type 取值为 text,则该参数需要传入文本类型的内容。了解不同类型的消息内容格式、使用限制,可参见发送消息内容(https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json)。 form: llm diff --git a/api/core/tools/provider/builtin/feishu_message/tools/send_webhook_message.yaml b/api/core/tools/provider/builtin/feishu_message/tools/send_webhook_message.yaml index 8b39ce4874..eeeae8b29c 100644 --- a/api/core/tools/provider/builtin/feishu_message/tools/send_webhook_message.yaml +++ b/api/core/tools/provider/builtin/feishu_message/tools/send_webhook_message.yaml @@ -15,15 +15,18 @@ parameters: required: true label: en_US: webhook - zh_Hans: webhook 的地址 + zh_Hans: webhook human_description: - en_US: The address of the webhook - zh_Hans: webhook 的地址 - llm_description: webhook 的地址 + en_US: | + The address of the webhook, the format of the webhook address corresponding to the bot is as follows: https://open.feishu.cn/open-apis/bot/v2/hook/xxxxxxxxxxxxxxxxx. For details, please refer to: Feishu Custom Bot Usage Guide(https://open.larkoffice.com/document/client-docs/bot-v3/add-custom-bot) + zh_Hans: | + webhook 的地址,机器人对应的 webhook 地址格式如下: https://open.feishu.cn/open-apis/bot/v2/hook/xxxxxxxxxxxxxxxxx,详情可参考: 飞书自定义机器人使用指南(https://open.larkoffice.com/document/client-docs/bot-v3/add-custom-bot) + llm_description: | + webhook 的地址,机器人对应的 webhook 地址格式如下: https://open.feishu.cn/open-apis/bot/v2/hook/xxxxxxxxxxxxxxxxx,详情可参考: 飞书自定义机器人使用指南(https://open.larkoffice.com/document/client-docs/bot-v3/add-custom-bot) form: llm - name: msg_type - type: string + type: select required: true options: - value: text @@ -32,27 +35,34 @@ parameters: zh_Hans: 文本 - value: interactive label: - en_US: message card - zh_Hans: 消息卡片 + en_US: interactive + zh_Hans: 卡片 + - value: image + label: + en_US: image + zh_Hans: 图片 + - value: share_chat + label: + en_US: share_chat + zh_Hans: 分享群名片 label: - en_US: Message type + en_US: msg_type zh_Hans: 消息类型 human_description: - en_US: Message type, optional values are, text (text), interactive (message card). - zh_Hans: 消息类型,可选值有:text(文本)、interactive(消息卡片)。 - llm_description: 消息类型,可选值有:text(文本)、interactive(消息卡片)。 - form: llm + en_US: Message type. Optional values are text, image, interactive, share_chat. For detailed introduction of different message types, refer to the message content(https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json). + zh_Hans: 消息类型。可选值有:text、image、interactive、share_chat。不同消息类型的详细介绍,参见发送消息内容(https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json)。 + llm_description: 消息类型。可选值有:text、image、interactive、share_chat。不同消息类型的详细介绍,参见发送消息内容(https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json)。 + form: form + - name: content type: string required: true label: - en_US: Message content + en_US: content zh_Hans: 消息内容 human_description: - en_US: Message content - zh_Hans: | - 消息内容,JSON 结构序列化后的字符串。不同 msg_type 对应不同内容, - 具体格式说明参考:https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json - llm_description: 消息内容,JSON 结构序列化后的字符串。不同 msg_type 对应不同内容。 + en_US: Message content, a JSON structure serialized string. The value of this parameter corresponds to msg_type. For example, if msg_type is text, this parameter needs to pass in text type content. To understand the format and usage limitations of different message types, refer to the message content(https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json). + zh_Hans: 消息内容,JSON 结构序列化后的字符串。该参数的取值与 msg_type 对应,例如 msg_type 取值为 text,则该参数需要传入文本类型的内容。了解不同类型的消息内容格式、使用限制,可参见发送消息内容(https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json)。 + llm_description: 消息内容,JSON 结构序列化后的字符串。该参数的取值与 msg_type 对应,例如 msg_type 取值为 text,则该参数需要传入文本类型的内容。了解不同类型的消息内容格式、使用限制,可参见发送消息内容(https://open.larkoffice.com/document/server-docs/im-v1/message-content-description/create_json)。 form: llm diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/_assets/icon.png b/api/core/tools/provider/builtin/feishu_spreadsheet/_assets/icon.png new file mode 100644 index 0000000000..258b361261 Binary files /dev/null and b/api/core/tools/provider/builtin/feishu_spreadsheet/_assets/icon.png differ diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/feishu_spreadsheet.py b/api/core/tools/provider/builtin/feishu_spreadsheet/feishu_spreadsheet.py new file mode 100644 index 0000000000..a3b5473769 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/feishu_spreadsheet.py @@ -0,0 +1,7 @@ +from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController +from core.tools.utils.feishu_api_utils import auth + + +class FeishuMessageProvider(BuiltinToolProviderController): + def _validate_credentials(self, credentials: dict) -> None: + auth(credentials) diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/feishu_spreadsheet.yaml b/api/core/tools/provider/builtin/feishu_spreadsheet/feishu_spreadsheet.yaml new file mode 100644 index 0000000000..29e448d730 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/feishu_spreadsheet.yaml @@ -0,0 +1,36 @@ +identity: + author: Doug Lea + name: feishu_spreadsheet + label: + en_US: Feishu Spreadsheet + zh_Hans: 飞书电子表格 + description: + en_US: | + Feishu Spreadsheet, requires the following permissions: sheets:spreadsheet. + zh_Hans: | + 飞书电子表格,需要开通以下权限: sheets:spreadsheet。 + icon: icon.png + tags: + - social + - productivity +credentials_for_provider: + app_id: + type: text-input + required: true + label: + en_US: APP ID + placeholder: + en_US: Please input your feishu app id + zh_Hans: 请输入你的飞书 app id + help: + en_US: Get your app_id and app_secret from Feishu + zh_Hans: 从飞书获取您的 app_id 和 app_secret + url: https://open.larkoffice.com/app + app_secret: + type: secret-input + required: true + label: + en_US: APP Secret + placeholder: + en_US: Please input your app secret + zh_Hans: 请输入你的飞书 app secret diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/add_cols.py b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/add_cols.py new file mode 100644 index 0000000000..44d062f9bd --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/add_cols.py @@ -0,0 +1,22 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class AddColsTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + spreadsheet_token = tool_parameters.get("spreadsheet_token") + sheet_id = tool_parameters.get("sheet_id") + sheet_name = tool_parameters.get("sheet_name") + length = tool_parameters.get("length") + values = tool_parameters.get("values") + + res = client.add_cols(spreadsheet_token, sheet_id, sheet_name, length, values) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/add_cols.yaml b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/add_cols.yaml new file mode 100644 index 0000000000..ef457f8e00 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/add_cols.yaml @@ -0,0 +1,72 @@ +identity: + name: add_cols + author: Doug Lea + label: + en_US: Add Cols + zh_Hans: 新增多列至工作表最后 +description: + human: + en_US: Add Cols + zh_Hans: 新增多列至工作表最后 + llm: A tool for adding multiple columns to the end of a spreadsheet. (新增多列至工作表最后) +parameters: + - name: spreadsheet_token + type: string + required: true + label: + en_US: spreadsheet_token + zh_Hans: 电子表格 token + human_description: + en_US: Spreadsheet token, supports input of spreadsheet URL. + zh_Hans: 电子表格 token,支持输入电子表格 url。 + llm_description: 电子表格 token,支持输入电子表格 url。 + form: llm + + - name: sheet_id + type: string + required: false + label: + en_US: sheet_id + zh_Hans: 工作表 ID + human_description: + en_US: Sheet ID, either sheet_id or sheet_name must be filled. + zh_Hans: 工作表 ID,与 sheet_name 二者其一必填。 + llm_description: 工作表 ID,与 sheet_name 二者其一必填。 + form: llm + + - name: sheet_name + type: string + required: false + label: + en_US: sheet_name + zh_Hans: 工作表名称 + human_description: + en_US: Sheet name, either sheet_id or sheet_name must be filled. + zh_Hans: 工作表名称,与 sheet_id 二者其一必填。 + llm_description: 工作表名称,与 sheet_id 二者其一必填。 + form: llm + + - name: length + type: number + required: true + label: + en_US: length + zh_Hans: 要增加的列数 + human_description: + en_US: Number of columns to add, range (0-5000]. + zh_Hans: 要增加的列数,范围(0-5000]。 + llm_description: 要增加的列数,范围(0-5000]。 + form: llm + + - name: values + type: string + required: false + label: + en_US: values + zh_Hans: 新增列的单元格内容 + human_description: + en_US: | + Content of the new columns, array of objects in string format, each array represents a row of table data, format like: [ [ "ID","Name","Age" ],[ 1,"Zhang San",10 ],[ 2,"Li Si",11 ] ]. + zh_Hans: 新增列的单元格内容,数组对象字符串,每个数组一行表格数据,格式:[["编号","姓名","年龄"],[1,"张三",10],[2,"李四",11]]。 + llm_description: 新增列的单元格内容,数组对象字符串,每个数组一行表格数据,格式:[["编号","姓名","年龄"],[1,"张三",10],[2,"李四",11]]。 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/add_rows.py b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/add_rows.py new file mode 100644 index 0000000000..3a85b7b46c --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/add_rows.py @@ -0,0 +1,22 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class AddRowsTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + spreadsheet_token = tool_parameters.get("spreadsheet_token") + sheet_id = tool_parameters.get("sheet_id") + sheet_name = tool_parameters.get("sheet_name") + length = tool_parameters.get("length") + values = tool_parameters.get("values") + + res = client.add_rows(spreadsheet_token, sheet_id, sheet_name, length, values) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/add_rows.yaml b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/add_rows.yaml new file mode 100644 index 0000000000..37653325ae --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/add_rows.yaml @@ -0,0 +1,72 @@ +identity: + name: add_rows + author: Doug Lea + label: + en_US: Add Rows + zh_Hans: 新增多行至工作表最后 +description: + human: + en_US: Add Rows + zh_Hans: 新增多行至工作表最后 + llm: A tool for adding multiple rows to the end of a spreadsheet. (新增多行至工作表最后) +parameters: + - name: spreadsheet_token + type: string + required: true + label: + en_US: spreadsheet_token + zh_Hans: 电子表格 token + human_description: + en_US: Spreadsheet token, supports input of spreadsheet URL. + zh_Hans: 电子表格 token,支持输入电子表格 url。 + llm_description: 电子表格 token,支持输入电子表格 url。 + form: llm + + - name: sheet_id + type: string + required: false + label: + en_US: sheet_id + zh_Hans: 工作表 ID + human_description: + en_US: Sheet ID, either sheet_id or sheet_name must be filled. + zh_Hans: 工作表 ID,与 sheet_name 二者其一必填。 + llm_description: 工作表 ID,与 sheet_name 二者其一必填。 + form: llm + + - name: sheet_name + type: string + required: false + label: + en_US: sheet_name + zh_Hans: 工作表名称 + human_description: + en_US: Sheet name, either sheet_id or sheet_name must be filled. + zh_Hans: 工作表名称,与 sheet_id 二者其一必填。 + llm_description: 工作表名称,与 sheet_id 二者其一必填。 + form: llm + + - name: length + type: number + required: true + label: + en_US: length + zh_Hans: 要增加行数 + human_description: + en_US: Number of rows to add, range (0-5000]. + zh_Hans: 要增加行数,范围(0-5000]。 + llm_description: 要增加行数,范围(0-5000]。 + form: llm + + - name: values + type: string + required: false + label: + en_US: values + zh_Hans: 新增行的表格内容 + human_description: + en_US: | + Content of the new rows, array of objects in string format, each array represents a row of table data, format like: [ [ "ID","Name","Age" ],[ 1,"Zhang San",10 ],[ 2,"Li Si",11 ] ]. + zh_Hans: 新增行的表格内容,数组对象字符串,每个数组一行表格数据,格式,如:[["编号","姓名","年龄"],[1,"张三",10],[2,"李四",11]]。 + llm_description: 新增行的表格内容,数组对象字符串,每个数组一行表格数据,格式,如:[["编号","姓名","年龄"],[1,"张三",10],[2,"李四",11]]。 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/create_spreadsheet.py b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/create_spreadsheet.py new file mode 100644 index 0000000000..647364fab0 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/create_spreadsheet.py @@ -0,0 +1,19 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class CreateSpreadsheetTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + title = tool_parameters.get("title") + folder_token = tool_parameters.get("folder_token") + + res = client.create_spreadsheet(title, folder_token) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/create_spreadsheet.yaml b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/create_spreadsheet.yaml new file mode 100644 index 0000000000..931310e631 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/create_spreadsheet.yaml @@ -0,0 +1,35 @@ +identity: + name: create_spreadsheet + author: Doug Lea + label: + en_US: Create Spreadsheet + zh_Hans: 创建电子表格 +description: + human: + en_US: Create Spreadsheet + zh_Hans: 创建电子表格 + llm: A tool for creating spreadsheets. (创建电子表格) +parameters: + - name: title + type: string + required: false + label: + en_US: Spreadsheet Title + zh_Hans: 电子表格标题 + human_description: + en_US: The title of the spreadsheet + zh_Hans: 电子表格的标题 + llm_description: 电子表格的标题 + form: llm + + - name: folder_token + type: string + required: false + label: + en_US: Folder Token + zh_Hans: 文件夹 token + human_description: + en_US: The token of the folder, supports folder URL input, e.g., https://bytedance.larkoffice.com/drive/folder/CxHEf4DCSlNkL2dUTCJcPRgentg + zh_Hans: 文件夹 token,支持文件夹 URL 输入,如:https://bytedance.larkoffice.com/drive/folder/CxHEf4DCSlNkL2dUTCJcPRgentg + llm_description: 文件夹 token,支持文件夹 URL 输入,如:https://bytedance.larkoffice.com/drive/folder/CxHEf4DCSlNkL2dUTCJcPRgentg + form: llm diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/get_spreadsheet.py b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/get_spreadsheet.py new file mode 100644 index 0000000000..dda8c59daf --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/get_spreadsheet.py @@ -0,0 +1,19 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class GetSpreadsheetTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + spreadsheet_token = tool_parameters.get("spreadsheet_token") + user_id_type = tool_parameters.get("user_id_type", "open_id") + + res = client.get_spreadsheet(spreadsheet_token, user_id_type) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/get_spreadsheet.yaml b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/get_spreadsheet.yaml new file mode 100644 index 0000000000..c519938617 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/get_spreadsheet.yaml @@ -0,0 +1,49 @@ +identity: + name: get_spreadsheet + author: Doug Lea + label: + en_US: Get Spreadsheet + zh_Hans: 获取电子表格信息 +description: + human: + en_US: Get Spreadsheet + zh_Hans: 获取电子表格信息 + llm: A tool for getting information from spreadsheets. (获取电子表格信息) +parameters: + - name: spreadsheet_token + type: string + required: true + label: + en_US: Spreadsheet Token + zh_Hans: 电子表格 token + human_description: + en_US: Spreadsheet token, supports input of spreadsheet URL. + zh_Hans: 电子表格 token,支持输入电子表格 URL。 + llm_description: 电子表格 token,支持输入电子表格 URL。 + form: llm + + - name: user_id_type + type: select + required: false + options: + - value: open_id + label: + en_US: open_id + zh_Hans: open_id + - value: union_id + label: + en_US: union_id + zh_Hans: union_id + - value: user_id + label: + en_US: user_id + zh_Hans: user_id + default: "open_id" + label: + en_US: user_id_type + zh_Hans: 用户 ID 类型 + human_description: + en_US: User ID type, optional values are open_id, union_id, user_id, with a default value of open_id. + zh_Hans: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 + llm_description: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 + form: form diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/list_spreadsheet_sheets.py b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/list_spreadsheet_sheets.py new file mode 100644 index 0000000000..98497791c0 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/list_spreadsheet_sheets.py @@ -0,0 +1,18 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class ListSpreadsheetSheetsTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + spreadsheet_token = tool_parameters.get("spreadsheet_token") + + res = client.list_spreadsheet_sheets(spreadsheet_token) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/list_spreadsheet_sheets.yaml b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/list_spreadsheet_sheets.yaml new file mode 100644 index 0000000000..c6a7ef45d4 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/list_spreadsheet_sheets.yaml @@ -0,0 +1,23 @@ +identity: + name: list_spreadsheet_sheets + author: Doug Lea + label: + en_US: List Spreadsheet Sheets + zh_Hans: 列出电子表格所有工作表 +description: + human: + en_US: List Spreadsheet Sheets + zh_Hans: 列出电子表格所有工作表 + llm: A tool for listing all sheets in a spreadsheet. (列出电子表格所有工作表) +parameters: + - name: spreadsheet_token + type: string + required: true + label: + en_US: Spreadsheet Token + zh_Hans: 电子表格 token + human_description: + en_US: Spreadsheet token, supports input of spreadsheet URL. + zh_Hans: 电子表格 token,支持输入电子表格 URL。 + llm_description: 电子表格 token,支持输入电子表格 URL。 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_cols.py b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_cols.py new file mode 100644 index 0000000000..ebe3f619d0 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_cols.py @@ -0,0 +1,23 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class ReadColsTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + spreadsheet_token = tool_parameters.get("spreadsheet_token") + sheet_id = tool_parameters.get("sheet_id") + sheet_name = tool_parameters.get("sheet_name") + start_col = tool_parameters.get("start_col") + num_cols = tool_parameters.get("num_cols") + user_id_type = tool_parameters.get("user_id_type", "open_id") + + res = client.read_cols(spreadsheet_token, sheet_id, sheet_name, start_col, num_cols, user_id_type) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_cols.yaml b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_cols.yaml new file mode 100644 index 0000000000..3273857b70 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_cols.yaml @@ -0,0 +1,97 @@ +identity: + name: read_cols + author: Doug Lea + label: + en_US: Read Cols + zh_Hans: 读取工作表列数据 +description: + human: + en_US: Read Cols + zh_Hans: 读取工作表列数据 + llm: A tool for reading column data from a spreadsheet. (读取工作表列数据) +parameters: + - name: spreadsheet_token + type: string + required: true + label: + en_US: spreadsheet_token + zh_Hans: 电子表格 token + human_description: + en_US: Spreadsheet token, supports input of spreadsheet URL. + zh_Hans: 电子表格 token,支持输入电子表格 url。 + llm_description: 电子表格 token,支持输入电子表格 url。 + form: llm + + - name: sheet_id + type: string + required: false + label: + en_US: sheet_id + zh_Hans: 工作表 ID + human_description: + en_US: Sheet ID, either sheet_id or sheet_name must be filled. + zh_Hans: 工作表 ID,与 sheet_name 二者其一必填。 + llm_description: 工作表 ID,与 sheet_name 二者其一必填。 + form: llm + + - name: sheet_name + type: string + required: false + label: + en_US: sheet_name + zh_Hans: 工作表名称 + human_description: + en_US: Sheet name, either sheet_id or sheet_name must be filled. + zh_Hans: 工作表名称,与 sheet_id 二者其一必填。 + llm_description: 工作表名称,与 sheet_id 二者其一必填。 + form: llm + + - name: user_id_type + type: select + required: false + options: + - value: open_id + label: + en_US: open_id + zh_Hans: open_id + - value: union_id + label: + en_US: union_id + zh_Hans: union_id + - value: user_id + label: + en_US: user_id + zh_Hans: user_id + default: "open_id" + label: + en_US: user_id_type + zh_Hans: 用户 ID 类型 + human_description: + en_US: User ID type, optional values are open_id, union_id, user_id, with a default value of open_id. + zh_Hans: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 + llm_description: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 + form: form + + - name: start_col + type: number + required: false + label: + en_US: start_col + zh_Hans: 起始列号 + human_description: + en_US: Starting column number, starting from 1. + zh_Hans: 起始列号,从 1 开始。 + llm_description: 起始列号,从 1 开始。 + form: llm + + - name: num_cols + type: number + required: true + label: + en_US: num_cols + zh_Hans: 读取列数 + human_description: + en_US: Number of columns to read. + zh_Hans: 读取列数 + llm_description: 读取列数 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_rows.py b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_rows.py new file mode 100644 index 0000000000..86b91b104b --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_rows.py @@ -0,0 +1,23 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class ReadRowsTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + spreadsheet_token = tool_parameters.get("spreadsheet_token") + sheet_id = tool_parameters.get("sheet_id") + sheet_name = tool_parameters.get("sheet_name") + start_row = tool_parameters.get("start_row") + num_rows = tool_parameters.get("num_rows") + user_id_type = tool_parameters.get("user_id_type", "open_id") + + res = client.read_rows(spreadsheet_token, sheet_id, sheet_name, start_row, num_rows, user_id_type) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_rows.yaml b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_rows.yaml new file mode 100644 index 0000000000..3e9206e8ef --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_rows.yaml @@ -0,0 +1,97 @@ +identity: + name: read_rows + author: Doug Lea + label: + en_US: Read Rows + zh_Hans: 读取工作表行数据 +description: + human: + en_US: Read Rows + zh_Hans: 读取工作表行数据 + llm: A tool for reading row data from a spreadsheet. (读取工作表行数据) +parameters: + - name: spreadsheet_token + type: string + required: true + label: + en_US: spreadsheet_token + zh_Hans: 电子表格 token + human_description: + en_US: Spreadsheet token, supports input of spreadsheet URL. + zh_Hans: 电子表格 token,支持输入电子表格 url。 + llm_description: 电子表格 token,支持输入电子表格 url。 + form: llm + + - name: sheet_id + type: string + required: false + label: + en_US: sheet_id + zh_Hans: 工作表 ID + human_description: + en_US: Sheet ID, either sheet_id or sheet_name must be filled. + zh_Hans: 工作表 ID,与 sheet_name 二者其一必填。 + llm_description: 工作表 ID,与 sheet_name 二者其一必填。 + form: llm + + - name: sheet_name + type: string + required: false + label: + en_US: sheet_name + zh_Hans: 工作表名称 + human_description: + en_US: Sheet name, either sheet_id or sheet_name must be filled. + zh_Hans: 工作表名称,与 sheet_id 二者其一必填。 + llm_description: 工作表名称,与 sheet_id 二者其一必填。 + form: llm + + - name: user_id_type + type: select + required: false + options: + - value: open_id + label: + en_US: open_id + zh_Hans: open_id + - value: union_id + label: + en_US: union_id + zh_Hans: union_id + - value: user_id + label: + en_US: user_id + zh_Hans: user_id + default: "open_id" + label: + en_US: user_id_type + zh_Hans: 用户 ID 类型 + human_description: + en_US: User ID type, optional values are open_id, union_id, user_id, with a default value of open_id. + zh_Hans: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 + llm_description: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 + form: form + + - name: start_row + type: number + required: false + label: + en_US: start_row + zh_Hans: 起始行号 + human_description: + en_US: Starting row number, starting from 1. + zh_Hans: 起始行号,从 1 开始。 + llm_description: 起始行号,从 1 开始。 + form: llm + + - name: num_rows + type: number + required: true + label: + en_US: num_rows + zh_Hans: 读取行数 + human_description: + en_US: Number of rows to read. + zh_Hans: 读取行数 + llm_description: 读取行数 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_table.py b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_table.py new file mode 100644 index 0000000000..ddd607d878 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_table.py @@ -0,0 +1,23 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class ReadTableTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + spreadsheet_token = tool_parameters.get("spreadsheet_token") + sheet_id = tool_parameters.get("sheet_id") + sheet_name = tool_parameters.get("sheet_name") + num_range = tool_parameters.get("num_range") + query = tool_parameters.get("query") + user_id_type = tool_parameters.get("user_id_type", "open_id") + + res = client.read_table(spreadsheet_token, sheet_id, sheet_name, num_range, query, user_id_type) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_table.yaml b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_table.yaml new file mode 100644 index 0000000000..e3dc80e1eb --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_spreadsheet/tools/read_table.yaml @@ -0,0 +1,122 @@ +identity: + name: read_table + author: Doug Lea + label: + en_US: Read Table + zh_Hans: 自定义读取电子表格行列数据 +description: + human: + en_US: Read Table + zh_Hans: 自定义读取电子表格行列数据 + llm: A tool for custom reading of row and column data from a spreadsheet. (自定义读取电子表格行列数据) +parameters: + - name: spreadsheet_token + type: string + required: true + label: + en_US: spreadsheet_token + zh_Hans: 电子表格 token + human_description: + en_US: Spreadsheet token, supports input of spreadsheet URL. + zh_Hans: 电子表格 token,支持输入电子表格 url。 + llm_description: 电子表格 token,支持输入电子表格 url。 + form: llm + + - name: sheet_id + type: string + required: false + label: + en_US: sheet_id + zh_Hans: 工作表 ID + human_description: + en_US: Sheet ID, either sheet_id or sheet_name must be filled. + zh_Hans: 工作表 ID,与 sheet_name 二者其一必填。 + llm_description: 工作表 ID,与 sheet_name 二者其一必填。 + form: llm + + - name: sheet_name + type: string + required: false + label: + en_US: sheet_name + zh_Hans: 工作表名称 + human_description: + en_US: Sheet name, either sheet_id or sheet_name must be filled. + zh_Hans: 工作表名称,与 sheet_id 二者其一必填。 + llm_description: 工作表名称,与 sheet_id 二者其一必填。 + form: llm + + - name: user_id_type + type: select + required: false + options: + - value: open_id + label: + en_US: open_id + zh_Hans: open_id + - value: union_id + label: + en_US: union_id + zh_Hans: union_id + - value: user_id + label: + en_US: user_id + zh_Hans: user_id + default: "open_id" + label: + en_US: user_id_type + zh_Hans: 用户 ID 类型 + human_description: + en_US: User ID type, optional values are open_id, union_id, user_id, with a default value of open_id. + zh_Hans: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 + llm_description: 用户 ID 类型,可选值有 open_id、union_id、user_id,默认值为 open_id。 + form: form + + - name: start_row + type: number + required: false + label: + en_US: start_row + zh_Hans: 起始行号 + human_description: + en_US: Starting row number, starting from 1. + zh_Hans: 起始行号,从 1 开始。 + llm_description: 起始行号,从 1 开始。 + form: llm + + - name: num_rows + type: number + required: false + label: + en_US: num_rows + zh_Hans: 读取行数 + human_description: + en_US: Number of rows to read. + zh_Hans: 读取行数 + llm_description: 读取行数 + form: llm + + - name: range + type: string + required: false + label: + en_US: range + zh_Hans: 取数范围 + human_description: + en_US: | + Data range, format like: A1:B2, can be empty when query=all. + zh_Hans: 取数范围,格式如:A1:B2,query=all 时可为空。 + llm_description: 取数范围,格式如:A1:B2,query=all 时可为空。 + form: llm + + - name: query + type: string + required: false + label: + en_US: query + zh_Hans: 查询 + human_description: + en_US: Pass "all" to query all data in the table, but no more than 100 columns. + zh_Hans: 传 all,表示查询表格所有数据,但最多查询 100 列数据。 + llm_description: 传 all,表示查询表格所有数据,但最多查询 100 列数据。 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_task/_assets/icon.png b/api/core/tools/provider/builtin/feishu_task/_assets/icon.png new file mode 100644 index 0000000000..3485be0d0f Binary files /dev/null and b/api/core/tools/provider/builtin/feishu_task/_assets/icon.png differ diff --git a/api/core/tools/provider/builtin/feishu_task/feishu_task.py b/api/core/tools/provider/builtin/feishu_task/feishu_task.py new file mode 100644 index 0000000000..6df05968d8 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_task/feishu_task.py @@ -0,0 +1,7 @@ +from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController +from core.tools.utils.feishu_api_utils import auth + + +class FeishuTaskProvider(BuiltinToolProviderController): + def _validate_credentials(self, credentials: dict) -> None: + auth(credentials) diff --git a/api/core/tools/provider/builtin/feishu_task/feishu_task.yaml b/api/core/tools/provider/builtin/feishu_task/feishu_task.yaml new file mode 100644 index 0000000000..88736f79a0 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_task/feishu_task.yaml @@ -0,0 +1,36 @@ +identity: + author: Doug Lea + name: feishu_task + label: + en_US: Feishu Task + zh_Hans: 飞书任务 + description: + en_US: | + Feishu Task, requires the following permissions: task:task:write、contact:user.id:readonly. + zh_Hans: | + 飞书任务,需要开通以下权限: task:task:write、contact:user.id:readonly。 + icon: icon.png + tags: + - social + - productivity +credentials_for_provider: + app_id: + type: text-input + required: true + label: + en_US: APP ID + placeholder: + en_US: Please input your feishu app id + zh_Hans: 请输入你的飞书 app id + help: + en_US: Get your app_id and app_secret from Feishu + zh_Hans: 从飞书获取您的 app_id 和 app_secret + url: https://open.larkoffice.com/app + app_secret: + type: secret-input + required: true + label: + en_US: APP Secret + placeholder: + en_US: Please input your app secret + zh_Hans: 请输入你的飞书 app secret diff --git a/api/core/tools/provider/builtin/feishu_task/tools/add_members.py b/api/core/tools/provider/builtin/feishu_task/tools/add_members.py new file mode 100644 index 0000000000..e58ed22e0f --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_task/tools/add_members.py @@ -0,0 +1,20 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class AddMembersTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + task_guid = tool_parameters.get("task_guid") + member_phone_or_email = tool_parameters.get("member_phone_or_email") + member_role = tool_parameters.get("member_role", "follower") + + res = client.add_members(task_guid, member_phone_or_email, member_role) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_task/tools/add_members.yaml b/api/core/tools/provider/builtin/feishu_task/tools/add_members.yaml new file mode 100644 index 0000000000..063c0f7f04 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_task/tools/add_members.yaml @@ -0,0 +1,58 @@ +identity: + name: add_members + author: Doug Lea + label: + en_US: Add Members + zh_Hans: 添加任务成员 +description: + human: + en_US: Add Members + zh_Hans: 添加任务成员 + llm: A tool for adding members to a Feishu task.(添加任务成员) +parameters: + - name: task_guid + type: string + required: true + label: + en_US: Task GUID + zh_Hans: 任务 GUID + human_description: + en_US: | + The GUID of the task to be added, supports passing either the Task ID or the Task link URL. Example of Task ID: 8b5425ec-9f2a-43bd-a3ab-01912f50282b; Example of Task link URL: https://applink.feishu-pre.net/client/todo/detail?guid=8c6bf822-e4da-449a-b82a-dc44020f9be9&suite_entity_num=t21587362 + zh_Hans: 要添加的任务的 GUID,支持传任务 ID 和任务链接 URL。任务 ID 示例:8b5425ec-9f2a-43bd-a3ab-01912f50282b;任务链接 URL 示例:https://applink.feishu-pre.net/client/todo/detail?guid=8c6bf822-e4da-449a-b82a-dc44020f9be9&suite_entity_num=t21587362 + llm_description: 要添加的任务的 GUID,支持传任务 ID 和任务链接 URL。任务 ID 示例:8b5425ec-9f2a-43bd-a3ab-01912f50282b;任务链接 URL 示例:https://applink.feishu-pre.net/client/todo/detail?guid=8c6bf822-e4da-449a-b82a-dc44020f9be9&suite_entity_num=t21587362 + form: llm + + - name: member_phone_or_email + type: string + required: true + label: + en_US: Task Member Phone Or Email + zh_Hans: 任务成员的电话或邮箱 + human_description: + en_US: A list of member emails or phone numbers, separated by commas. + zh_Hans: 任务成员邮箱或者手机号列表,使用逗号分隔。 + llm_description: 任务成员邮箱或者手机号列表,使用逗号分隔。 + form: llm + + - name: member_role + type: select + required: true + options: + - value: assignee + label: + en_US: assignee + zh_Hans: 负责人 + - value: follower + label: + en_US: follower + zh_Hans: 关注人 + default: "follower" + label: + en_US: member_role + zh_Hans: 成员的角色 + human_description: + en_US: Member role, optional values are "assignee" (responsible person) and "follower" (observer), with a default value of "assignee". + zh_Hans: 成员的角色,可选值有 "assignee"(负责人)和 "follower"(关注人),默认值为 "assignee"。 + llm_description: 成员的角色,可选值有 "assignee"(负责人)和 "follower"(关注人),默认值为 "assignee"。 + form: form diff --git a/api/core/tools/provider/builtin/feishu_task/tools/create_task.py b/api/core/tools/provider/builtin/feishu_task/tools/create_task.py new file mode 100644 index 0000000000..96cdcd71f6 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_task/tools/create_task.py @@ -0,0 +1,22 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class CreateTaskTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + summary = tool_parameters.get("summary") + start_time = tool_parameters.get("start_time") + end_time = tool_parameters.get("end_time") + completed_time = tool_parameters.get("completed_time") + description = tool_parameters.get("description") + + res = client.create_task(summary, start_time, end_time, completed_time, description) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_task/tools/create_task.yaml b/api/core/tools/provider/builtin/feishu_task/tools/create_task.yaml new file mode 100644 index 0000000000..7eb4af168b --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_task/tools/create_task.yaml @@ -0,0 +1,74 @@ +identity: + name: create_task + author: Doug Lea + label: + en_US: Create Task + zh_Hans: 创建飞书任务 +description: + human: + en_US: Create Feishu Task + zh_Hans: 创建飞书任务 + llm: A tool for creating tasks in Feishu.(创建飞书任务) +parameters: + - name: summary + type: string + required: true + label: + en_US: Task Title + zh_Hans: 任务标题 + human_description: + en_US: The title of the task. + zh_Hans: 任务标题 + llm_description: 任务标题 + form: llm + + - name: description + type: string + required: false + label: + en_US: Task Description + zh_Hans: 任务备注 + human_description: + en_US: The description or notes for the task. + zh_Hans: 任务备注 + llm_description: 任务备注 + form: llm + + - name: start_time + type: string + required: false + label: + en_US: Start Time + zh_Hans: 任务开始时间 + human_description: + en_US: | + The start time of the task, in the format: 2006-01-02 15:04:05 + zh_Hans: 任务开始时间,格式为:2006-01-02 15:04:05 + llm_description: 任务开始时间,格式为:2006-01-02 15:04:05 + form: llm + + - name: end_time + type: string + required: false + label: + en_US: End Time + zh_Hans: 任务结束时间 + human_description: + en_US: | + The end time of the task, in the format: 2006-01-02 15:04:05 + zh_Hans: 任务结束时间,格式为:2006-01-02 15:04:05 + llm_description: 任务结束时间,格式为:2006-01-02 15:04:05 + form: llm + + - name: completed_time + type: string + required: false + label: + en_US: Completed Time + zh_Hans: 任务完成时间 + human_description: + en_US: | + The completion time of the task, in the format: 2006-01-02 15:04:05. Leave empty to create an incomplete task; fill in a specific time to create a completed task. + zh_Hans: 任务完成时间,格式为:2006-01-02 15:04:05,不填写表示创建一个未完成任务;填写一个具体的时间表示创建一个已完成任务。 + llm_description: 任务完成时间,格式为:2006-01-02 15:04:05,不填写表示创建一个未完成任务;填写一个具体的时间表示创建一个已完成任务。 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_task/tools/delete_task.py b/api/core/tools/provider/builtin/feishu_task/tools/delete_task.py new file mode 100644 index 0000000000..dee036fee5 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_task/tools/delete_task.py @@ -0,0 +1,18 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class UpdateTaskTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + task_guid = tool_parameters.get("task_guid") + + res = client.delete_task(task_guid) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_task/tools/delete_task.yaml b/api/core/tools/provider/builtin/feishu_task/tools/delete_task.yaml new file mode 100644 index 0000000000..d3f9741367 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_task/tools/delete_task.yaml @@ -0,0 +1,24 @@ +identity: + name: delete_task + author: Doug Lea + label: + en_US: Delete Task + zh_Hans: 删除飞书任务 +description: + human: + en_US: Delete Task + zh_Hans: 删除飞书任务 + llm: A tool for deleting tasks in Feishu.(删除飞书任务) +parameters: + - name: task_guid + type: string + required: true + label: + en_US: Task GUID + zh_Hans: 任务 GUID + human_description: + en_US: | + The GUID of the task to be deleted, supports passing either the Task ID or the Task link URL. Example of Task ID: 8b5425ec-9f2a-43bd-a3ab-01912f50282b; Example of Task link URL: https://applink.feishu-pre.net/client/todo/detail?guid=8c6bf822-e4da-449a-b82a-dc44020f9be9&suite_entity_num=t21587362 + zh_Hans: 要删除的任务的 GUID,支持传任务 ID 和任务链接 URL。任务 ID 示例:8b5425ec-9f2a-43bd-a3ab-01912f50282b;任务链接 URL 示例:https://applink.feishu-pre.net/client/todo/detail?guid=8c6bf822-e4da-449a-b82a-dc44020f9be9&suite_entity_num=t21587362 + llm_description: 要删除的任务的 GUID,支持传任务 ID 和任务链接 URL。任务 ID 示例:8b5425ec-9f2a-43bd-a3ab-01912f50282b;任务链接 URL 示例:https://applink.feishu-pre.net/client/todo/detail?guid=8c6bf822-e4da-449a-b82a-dc44020f9be9&suite_entity_num=t21587362 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_task/tools/update_task.py b/api/core/tools/provider/builtin/feishu_task/tools/update_task.py new file mode 100644 index 0000000000..4a48cd283a --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_task/tools/update_task.py @@ -0,0 +1,23 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class UpdateTaskTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + task_guid = tool_parameters.get("task_guid") + summary = tool_parameters.get("summary") + start_time = tool_parameters.get("start_time") + end_time = tool_parameters.get("end_time") + completed_time = tool_parameters.get("completed_time") + description = tool_parameters.get("description") + + res = client.update_task(task_guid, summary, start_time, end_time, completed_time, description) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_task/tools/update_task.yaml b/api/core/tools/provider/builtin/feishu_task/tools/update_task.yaml new file mode 100644 index 0000000000..83c9bcb1c4 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_task/tools/update_task.yaml @@ -0,0 +1,89 @@ +identity: + name: update_task + author: Doug Lea + label: + en_US: Update Task + zh_Hans: 更新飞书任务 +description: + human: + en_US: Update Feishu Task + zh_Hans: 更新飞书任务 + llm: A tool for updating tasks in Feishu.(更新飞书任务) +parameters: + - name: task_guid + type: string + required: true + label: + en_US: Task GUID + zh_Hans: 任务 GUID + human_description: + en_US: | + The task ID, supports inputting either the Task ID or the Task link URL. Example of Task ID: 42cad8a0-f8c8-4344-9be2-d1d7e8e91b64; Example of Task link URL: https://applink.feishu-pre.net/client/todo/detail?guid=42cad8a0-f8c8-4344-9be2-d1d7e8e91b64&suite_entity_num=t21700217 + zh_Hans: | + 任务ID,支持传入任务 ID 和任务链接 URL。任务 ID 示例: 42cad8a0-f8c8-4344-9be2-d1d7e8e91b64;任务链接 URL 示例: https://applink.feishu-pre.net/client/todo/detail?guid=42cad8a0-f8c8-4344-9be2-d1d7e8e91b64&suite_entity_num=t21700217 + llm_description: | + 任务ID,支持传入任务 ID 和任务链接 URL。任务 ID 示例: 42cad8a0-f8c8-4344-9be2-d1d7e8e91b64;任务链接 URL 示例: https://applink.feishu-pre.net/client/todo/detail?guid=42cad8a0-f8c8-4344-9be2-d1d7e8e91b64&suite_entity_num=t21700217 + form: llm + + - name: summary + type: string + required: true + label: + en_US: Task Title + zh_Hans: 任务标题 + human_description: + en_US: The title of the task. + zh_Hans: 任务标题 + llm_description: 任务标题 + form: llm + + - name: description + type: string + required: false + label: + en_US: Task Description + zh_Hans: 任务备注 + human_description: + en_US: The description or notes for the task. + zh_Hans: 任务备注 + llm_description: 任务备注 + form: llm + + - name: start_time + type: string + required: false + label: + en_US: Start Time + zh_Hans: 任务开始时间 + human_description: + en_US: | + The start time of the task, in the format: 2006-01-02 15:04:05 + zh_Hans: 任务开始时间,格式为:2006-01-02 15:04:05 + llm_description: 任务开始时间,格式为:2006-01-02 15:04:05 + form: llm + + - name: end_time + type: string + required: false + label: + en_US: End Time + zh_Hans: 任务结束时间 + human_description: + en_US: | + The end time of the task, in the format: 2006-01-02 15:04:05 + zh_Hans: 任务结束时间,格式为:2006-01-02 15:04:05 + llm_description: 任务结束时间,格式为:2006-01-02 15:04:05 + form: llm + + - name: completed_time + type: string + required: false + label: + en_US: Completed Time + zh_Hans: 任务完成时间 + human_description: + en_US: | + The completion time of the task, in the format: 2006-01-02 15:04:05 + zh_Hans: 任务完成时间,格式为:2006-01-02 15:04:05 + llm_description: 任务完成时间,格式为:2006-01-02 15:04:05 + form: llm diff --git a/api/core/tools/provider/builtin/feishu_wiki/_assets/icon.png b/api/core/tools/provider/builtin/feishu_wiki/_assets/icon.png new file mode 100644 index 0000000000..878672c9ae Binary files /dev/null and b/api/core/tools/provider/builtin/feishu_wiki/_assets/icon.png differ diff --git a/api/core/tools/provider/builtin/feishu_wiki/feishu_wiki.py b/api/core/tools/provider/builtin/feishu_wiki/feishu_wiki.py new file mode 100644 index 0000000000..6c5fccb1a3 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_wiki/feishu_wiki.py @@ -0,0 +1,7 @@ +from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController +from core.tools.utils.feishu_api_utils import auth + + +class FeishuWikiProvider(BuiltinToolProviderController): + def _validate_credentials(self, credentials: dict) -> None: + auth(credentials) diff --git a/api/core/tools/provider/builtin/feishu_wiki/feishu_wiki.yaml b/api/core/tools/provider/builtin/feishu_wiki/feishu_wiki.yaml new file mode 100644 index 0000000000..1fb5f71cbc --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_wiki/feishu_wiki.yaml @@ -0,0 +1,36 @@ +identity: + author: Doug Lea + name: feishu_wiki + label: + en_US: Feishu Wiki + zh_Hans: 飞书知识库 + description: + en_US: | + Feishu Wiki, requires the following permissions: wiki:wiki:readonly. + zh_Hans: | + 飞书知识库,需要开通以下权限: wiki:wiki:readonly。 + icon: icon.png + tags: + - social + - productivity +credentials_for_provider: + app_id: + type: text-input + required: true + label: + en_US: APP ID + placeholder: + en_US: Please input your feishu app id + zh_Hans: 请输入你的飞书 app id + help: + en_US: Get your app_id and app_secret from Feishu + zh_Hans: 从飞书获取您的 app_id 和 app_secret + url: https://open.larkoffice.com/app + app_secret: + type: secret-input + required: true + label: + en_US: APP Secret + placeholder: + en_US: Please input your app secret + zh_Hans: 请输入你的飞书 app secret diff --git a/api/core/tools/provider/builtin/feishu_wiki/tools/get_wiki_nodes.py b/api/core/tools/provider/builtin/feishu_wiki/tools/get_wiki_nodes.py new file mode 100644 index 0000000000..374b4c9a7d --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_wiki/tools/get_wiki_nodes.py @@ -0,0 +1,21 @@ +from typing import Any + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.tool.builtin_tool import BuiltinTool +from core.tools.utils.feishu_api_utils import FeishuRequest + + +class GetWikiNodesTool(BuiltinTool): + def _invoke(self, user_id: str, tool_parameters: dict[str, Any]) -> ToolInvokeMessage: + app_id = self.runtime.credentials.get("app_id") + app_secret = self.runtime.credentials.get("app_secret") + client = FeishuRequest(app_id, app_secret) + + space_id = tool_parameters.get("space_id") + parent_node_token = tool_parameters.get("parent_node_token") + page_token = tool_parameters.get("page_token") + page_size = tool_parameters.get("page_size") + + res = client.get_wiki_nodes(space_id, parent_node_token, page_token, page_size) + + return self.create_json_message(res) diff --git a/api/core/tools/provider/builtin/feishu_wiki/tools/get_wiki_nodes.yaml b/api/core/tools/provider/builtin/feishu_wiki/tools/get_wiki_nodes.yaml new file mode 100644 index 0000000000..7d6ac3c824 --- /dev/null +++ b/api/core/tools/provider/builtin/feishu_wiki/tools/get_wiki_nodes.yaml @@ -0,0 +1,63 @@ +identity: + name: get_wiki_nodes + author: Doug Lea + label: + en_US: Get Wiki Nodes + zh_Hans: 获取知识空间子节点列表 +description: + human: + en_US: | + Get the list of child nodes in Wiki, make sure the app/bot is a member of the wiki space. See How to add an app as a wiki base administrator (member). https://open.feishu.cn/document/server-docs/docs/wiki-v2/wiki-qa + zh_Hans: | + 获取知识库全部子节点列表,请确保应用/机器人为知识空间成员。参阅如何将应用添加为知识库管理员(成员)。https://open.feishu.cn/document/server-docs/docs/wiki-v2/wiki-qa + llm: A tool for getting all sub-nodes of a knowledge base.(获取知识空间子节点列表) +parameters: + - name: space_id + type: string + required: true + label: + en_US: Space Id + zh_Hans: 知识空间 ID + human_description: + en_US: | + The ID of the knowledge space. Supports space link URL, for example: https://svi136aogf123.feishu.cn/wiki/settings/7166950623940706332 + zh_Hans: 知识空间 ID,支持空间链接 URL,例如:https://svi136aogf123.feishu.cn/wiki/settings/7166950623940706332 + llm_description: 知识空间 ID,支持空间链接 URL,例如:https://svi136aogf123.feishu.cn/wiki/settings/7166950623940706332 + form: llm + + - name: page_size + type: number + required: false + default: 10 + label: + en_US: Page Size + zh_Hans: 分页大小 + human_description: + en_US: The size of each page, with a maximum value of 50. + zh_Hans: 分页大小,最大值 50。 + llm_description: 分页大小,最大值 50。 + form: llm + + - name: page_token + type: string + required: false + label: + en_US: Page Token + zh_Hans: 分页标记 + human_description: + en_US: The pagination token. Leave empty for the first request to start from the beginning; if the paginated query result has more items, a new page_token will be returned, which can be used to get the next set of results. + zh_Hans: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 + llm_description: 分页标记,第一次请求不填,表示从头开始遍历;分页查询结果还有更多项时会同时返回新的 page_token,下次遍历可采用该 page_token 获取查询结果。 + form: llm + + - name: parent_node_token + type: string + required: false + label: + en_US: Parent Node Token + zh_Hans: 父节点 token + human_description: + en_US: The token of the parent node. + zh_Hans: 父节点 token + llm_description: 父节点 token + form: llm diff --git a/api/core/tools/provider/builtin/jina/jina.yaml b/api/core/tools/provider/builtin/jina/jina.yaml index 346175c41f..af3ca23ffa 100644 --- a/api/core/tools/provider/builtin/jina/jina.yaml +++ b/api/core/tools/provider/builtin/jina/jina.yaml @@ -6,9 +6,9 @@ identity: zh_Hans: Jina AI pt_BR: Jina AI description: - en_US: Convert any URL to an LLM-friendly input or perform searches on the web for grounding information. Experience improved output for your agent and RAG systems at no cost. - zh_Hans: 将任何URL转换为LLM易读的输入或在网页上搜索引擎上搜索引擎。 - pt_BR: Converte qualquer URL em uma entrada LLm-fácil de ler ou realize pesquisas na web para obter informação de grounding. Tenha uma experiência melhor para seu agente e sistemas RAG sem custo. + en_US: Your Search Foundation, Supercharged! + zh_Hans: 您的搜索底座,从此不同! + pt_BR: Your Search Foundation, Supercharged! icon: icon.svg tags: - search diff --git a/api/core/tools/provider/builtin/searchapi/tools/google.yaml b/api/core/tools/provider/builtin/searchapi/tools/google.yaml index b69a0e1d3e..0dc1b66724 100644 --- a/api/core/tools/provider/builtin/searchapi/tools/google.yaml +++ b/api/core/tools/provider/builtin/searchapi/tools/google.yaml @@ -65,206 +65,1206 @@ parameters: form: form default: US options: + - value: AF + label: + en_US: Afghanistan + zh_Hans: 阿富汗 + pt_BR: Afeganistão + - value: AL + label: + en_US: Albania + zh_Hans: 阿尔巴尼亚 + pt_BR: Albânia + - value: DZ + label: + en_US: Algeria + zh_Hans: 阿尔及利亚 + pt_BR: Argélia + - value: AS + label: + en_US: American Samoa + zh_Hans: 美属萨摩亚 + pt_BR: Samoa Americana + - value: AD + label: + en_US: Andorra + zh_Hans: 安道尔 + pt_BR: Andorra + - value: AO + label: + en_US: Angola + zh_Hans: 安哥拉 + pt_BR: Angola + - value: AI + label: + en_US: Anguilla + zh_Hans: 安圭拉 + pt_BR: Anguilla + - value: AQ + label: + en_US: Antarctica + zh_Hans: 南极洲 + pt_BR: Antártica + - value: AG + label: + en_US: Antigua and Barbuda + zh_Hans: 安提瓜和巴布达 + pt_BR: Antígua e Barbuda - value: AR label: en_US: Argentina zh_Hans: 阿根廷 pt_BR: Argentina + - value: AM + label: + en_US: Armenia + zh_Hans: 亚美尼亚 + pt_BR: Armênia + - value: AW + label: + en_US: Aruba + zh_Hans: 阿鲁巴 + pt_BR: Aruba - value: AU label: en_US: Australia zh_Hans: 澳大利亚 - pt_BR: Australia + pt_BR: Austrália - value: AT label: en_US: Austria zh_Hans: 奥地利 - pt_BR: Austria + pt_BR: Áustria + - value: AZ + label: + en_US: Azerbaijan + zh_Hans: 阿塞拜疆 + pt_BR: Azerbaijão + - value: BS + label: + en_US: Bahamas + zh_Hans: 巴哈马 + pt_BR: Bahamas + - value: BH + label: + en_US: Bahrain + zh_Hans: 巴林 + pt_BR: Bahrein + - value: BD + label: + en_US: Bangladesh + zh_Hans: 孟加拉国 + pt_BR: Bangladesh + - value: BB + label: + en_US: Barbados + zh_Hans: 巴巴多斯 + pt_BR: Barbados + - value: BY + label: + en_US: Belarus + zh_Hans: 白俄罗斯 + pt_BR: Bielorrússia - value: BE label: en_US: Belgium zh_Hans: 比利时 - pt_BR: Belgium + pt_BR: Bélgica + - value: BZ + label: + en_US: Belize + zh_Hans: 伯利兹 + pt_BR: Belize + - value: BJ + label: + en_US: Benin + zh_Hans: 贝宁 + pt_BR: Benim + - value: BM + label: + en_US: Bermuda + zh_Hans: 百慕大 + pt_BR: Bermudas + - value: BT + label: + en_US: Bhutan + zh_Hans: 不丹 + pt_BR: Butão + - value: BO + label: + en_US: Bolivia + zh_Hans: 玻利维亚 + pt_BR: Bolívia + - value: BA + label: + en_US: Bosnia and Herzegovina + zh_Hans: 波斯尼亚和黑塞哥维那 + pt_BR: Bósnia e Herzegovina + - value: BW + label: + en_US: Botswana + zh_Hans: 博茨瓦纳 + pt_BR: Botsuana + - value: BV + label: + en_US: Bouvet Island + zh_Hans: 布韦岛 + pt_BR: Ilha Bouvet - value: BR label: en_US: Brazil zh_Hans: 巴西 - pt_BR: Brazil + pt_BR: Brasil + - value: IO + label: + en_US: British Indian Ocean Territory + zh_Hans: 英属印度洋领地 + pt_BR: Território Britânico do Oceano Índico + - value: BN + label: + en_US: Brunei Darussalam + zh_Hans: 文莱 + pt_BR: Brunei Darussalam + - value: BG + label: + en_US: Bulgaria + zh_Hans: 保加利亚 + pt_BR: Bulgária + - value: BF + label: + en_US: Burkina Faso + zh_Hans: 布基纳法索 + pt_BR: Burkina Faso + - value: BI + label: + en_US: Burundi + zh_Hans: 布隆迪 + pt_BR: Burundi + - value: KH + label: + en_US: Cambodia + zh_Hans: 柬埔寨 + pt_BR: Camboja + - value: CM + label: + en_US: Cameroon + zh_Hans: 喀麦隆 + pt_BR: Camarões - value: CA label: en_US: Canada zh_Hans: 加拿大 - pt_BR: Canada + pt_BR: Canadá + - value: CV + label: + en_US: Cape Verde + zh_Hans: 佛得角 + pt_BR: Cabo Verde + - value: KY + label: + en_US: Cayman Islands + zh_Hans: 开曼群岛 + pt_BR: Ilhas Cayman + - value: CF + label: + en_US: Central African Republic + zh_Hans: 中非共和国 + pt_BR: República Centro-Africana + - value: TD + label: + en_US: Chad + zh_Hans: 乍得 + pt_BR: Chade - value: CL label: en_US: Chile zh_Hans: 智利 pt_BR: Chile - - value: CO - label: - en_US: Colombia - zh_Hans: 哥伦比亚 - pt_BR: Colombia - value: CN label: en_US: China zh_Hans: 中国 pt_BR: China + - value: CX + label: + en_US: Christmas Island + zh_Hans: 圣诞岛 + pt_BR: Ilha do Natal + - value: CC + label: + en_US: Cocos (Keeling) Islands + zh_Hans: 科科斯(基林)群岛 + pt_BR: Ilhas Cocos (Keeling) + - value: CO + label: + en_US: Colombia + zh_Hans: 哥伦比亚 + pt_BR: Colômbia + - value: KM + label: + en_US: Comoros + zh_Hans: 科摩罗 + pt_BR: Comores + - value: CG + label: + en_US: Congo + zh_Hans: 刚果 + pt_BR: Congo + - value: CD + label: + en_US: Congo, the Democratic Republic of the + zh_Hans: 刚果民主共和国 + pt_BR: Congo, República Democrática do + - value: CK + label: + en_US: Cook Islands + zh_Hans: 库克群岛 + pt_BR: Ilhas Cook + - value: CR + label: + en_US: Costa Rica + zh_Hans: 哥斯达黎加 + pt_BR: Costa Rica + - value: CI + label: + en_US: Cote D'ivoire + zh_Hans: 科特迪瓦 + pt_BR: Costa do Marfim + - value: HR + label: + en_US: Croatia + zh_Hans: 克罗地亚 + pt_BR: Croácia + - value: CU + label: + en_US: Cuba + zh_Hans: 古巴 + pt_BR: Cuba + - value: CY + label: + en_US: Cyprus + zh_Hans: 塞浦路斯 + pt_BR: Chipre - value: CZ label: en_US: Czech Republic zh_Hans: 捷克共和国 - pt_BR: Czech Republic + pt_BR: República Tcheca - value: DK label: en_US: Denmark zh_Hans: 丹麦 - pt_BR: Denmark + pt_BR: Dinamarca + - value: DJ + label: + en_US: Djibouti + zh_Hans: 吉布提 + pt_BR: Djibuti + - value: DM + label: + en_US: Dominica + zh_Hans: 多米尼克 + pt_BR: Dominica + - value: DO + label: + en_US: Dominican Republic + zh_Hans: 多米尼加共和国 + pt_BR: República Dominicana + - value: EC + label: + en_US: Ecuador + zh_Hans: 厄瓜多尔 + pt_BR: Equador + - value: EG + label: + en_US: Egypt + zh_Hans: 埃及 + pt_BR: Egito + - value: SV + label: + en_US: El Salvador + zh_Hans: 萨尔瓦多 + pt_BR: El Salvador + - value: GQ + label: + en_US: Equatorial Guinea + zh_Hans: 赤道几内亚 + pt_BR: Guiné Equatorial + - value: ER + label: + en_US: Eritrea + zh_Hans: 厄立特里亚 + pt_BR: Eritreia + - value: EE + label: + en_US: Estonia + zh_Hans: 爱沙尼亚 + pt_BR: Estônia + - value: ET + label: + en_US: Ethiopia + zh_Hans: 埃塞俄比亚 + pt_BR: Etiópia + - value: FK + label: + en_US: Falkland Islands (Malvinas) + zh_Hans: 福克兰群岛(马尔维纳斯) + pt_BR: Ilhas Falkland (Malvinas) + - value: FO + label: + en_US: Faroe Islands + zh_Hans: 法罗群岛 + pt_BR: Ilhas Faroe + - value: FJ + label: + en_US: Fiji + zh_Hans: 斐济 + pt_BR: Fiji - value: FI label: en_US: Finland zh_Hans: 芬兰 - pt_BR: Finland + pt_BR: Finlândia - value: FR label: en_US: France zh_Hans: 法国 - pt_BR: France + pt_BR: França + - value: GF + label: + en_US: French Guiana + zh_Hans: 法属圭亚那 + pt_BR: Guiana Francesa + - value: PF + label: + en_US: French Polynesia + zh_Hans: 法属波利尼西亚 + pt_BR: Polinésia Francesa + - value: TF + label: + en_US: French Southern Territories + zh_Hans: 法属南部领地 + pt_BR: Territórios Franceses do Sul + - value: GA + label: + en_US: Gabon + zh_Hans: 加蓬 + pt_BR: Gabão + - value: GM + label: + en_US: Gambia + zh_Hans: 冈比亚 + pt_BR: Gâmbia + - value: GE + label: + en_US: Georgia + zh_Hans: 格鲁吉亚 + pt_BR: Geórgia - value: DE label: en_US: Germany zh_Hans: 德国 - pt_BR: Germany + pt_BR: Alemanha + - value: GH + label: + en_US: Ghana + zh_Hans: 加纳 + pt_BR: Gana + - value: GI + label: + en_US: Gibraltar + zh_Hans: 直布罗陀 + pt_BR: Gibraltar + - value: GR + label: + en_US: Greece + zh_Hans: 希腊 + pt_BR: Grécia + - value: GL + label: + en_US: Greenland + zh_Hans: 格陵兰 + pt_BR: Groenlândia + - value: GD + label: + en_US: Grenada + zh_Hans: 格林纳达 + pt_BR: Granada + - value: GP + label: + en_US: Guadeloupe + zh_Hans: 瓜德罗普 + pt_BR: Guadalupe + - value: GU + label: + en_US: Guam + zh_Hans: 关岛 + pt_BR: Guam + - value: GT + label: + en_US: Guatemala + zh_Hans: 危地马拉 + pt_BR: Guatemala + - value: GN + label: + en_US: Guinea + zh_Hans: 几内亚 + pt_BR: Guiné + - value: GW + label: + en_US: Guinea-Bissau + zh_Hans: 几内亚比绍 + pt_BR: Guiné-Bissau + - value: GY + label: + en_US: Guyana + zh_Hans: 圭亚那 + pt_BR: Guiana + - value: HT + label: + en_US: Haiti + zh_Hans: 海地 + pt_BR: Haiti + - value: HM + label: + en_US: Heard Island and McDonald Islands + zh_Hans: 赫德岛和麦克唐纳群岛 + pt_BR: Ilha Heard e Ilhas McDonald + - value: VA + label: + en_US: Holy See (Vatican City State) + zh_Hans: 教廷(梵蒂冈城国) + pt_BR: Santa Sé (Estado da Cidade do Vaticano) + - value: HN + label: + en_US: Honduras + zh_Hans: 洪都拉斯 + pt_BR: Honduras - value: HK label: en_US: Hong Kong zh_Hans: 香港 pt_BR: Hong Kong + - value: HU + label: + en_US: Hungary + zh_Hans: 匈牙利 + pt_BR: Hungria + - value: IS + label: + en_US: Iceland + zh_Hans: 冰岛 + pt_BR: Islândia - value: IN label: en_US: India zh_Hans: 印度 - pt_BR: India + pt_BR: Índia - value: ID label: en_US: Indonesia zh_Hans: 印度尼西亚 - pt_BR: Indonesia + pt_BR: Indonésia + - value: IR + label: + en_US: Iran, Islamic Republic of + zh_Hans: 伊朗 + pt_BR: Irã + - value: IQ + label: + en_US: Iraq + zh_Hans: 伊拉克 + pt_BR: Iraque + - value: IE + label: + en_US: Ireland + zh_Hans: 爱尔兰 + pt_BR: Irlanda + - value: IL + label: + en_US: Israel + zh_Hans: 以色列 + pt_BR: Israel - value: IT label: en_US: Italy zh_Hans: 意大利 - pt_BR: Italy + pt_BR: Itália + - value: JM + label: + en_US: Jamaica + zh_Hans: 牙买加 + pt_BR: Jamaica - value: JP label: en_US: Japan zh_Hans: 日本 - pt_BR: Japan + pt_BR: Japão + - value: JO + label: + en_US: Jordan + zh_Hans: 约旦 + pt_BR: Jordânia + - value: KZ + label: + en_US: Kazakhstan + zh_Hans: 哈萨克斯坦 + pt_BR: Cazaquistão + - value: KE + label: + en_US: Kenya + zh_Hans: 肯尼亚 + pt_BR: Quênia + - value: KI + label: + en_US: Kiribati + zh_Hans: 基里巴斯 + pt_BR: Kiribati + - value: KP + label: + en_US: Korea, Democratic People's Republic of + zh_Hans: 朝鲜 + pt_BR: Coreia, República Democrática Popular da - value: KR label: - en_US: Korea + en_US: Korea, Republic of zh_Hans: 韩国 - pt_BR: Korea + pt_BR: Coreia, República da + - value: KW + label: + en_US: Kuwait + zh_Hans: 科威特 + pt_BR: Kuwait + - value: KG + label: + en_US: Kyrgyzstan + zh_Hans: 吉尔吉斯斯坦 + pt_BR: Quirguistão + - value: LA + label: + en_US: Lao People's Democratic Republic + zh_Hans: 老挝 + pt_BR: República Democrática Popular do Laos + - value: LV + label: + en_US: Latvia + zh_Hans: 拉脱维亚 + pt_BR: Letônia + - value: LB + label: + en_US: Lebanon + zh_Hans: 黎巴嫩 + pt_BR: Líbano + - value: LS + label: + en_US: Lesotho + zh_Hans: 莱索托 + pt_BR: Lesoto + - value: LR + label: + en_US: Liberia + zh_Hans: 利比里亚 + pt_BR: Libéria + - value: LY + label: + en_US: Libyan Arab Jamahiriya + zh_Hans: 利比亚 + pt_BR: Líbia + - value: LI + label: + en_US: Liechtenstein + zh_Hans: 列支敦士登 + pt_BR: Liechtenstein + - value: LT + label: + en_US: Lithuania + zh_Hans: 立陶宛 + pt_BR: Lituânia + - value: LU + label: + en_US: Luxembourg + zh_Hans: 卢森堡 + pt_BR: Luxemburgo + - value: MO + label: + en_US: Macao + zh_Hans: 澳门 + pt_BR: Macau + - value: MK + label: + en_US: Macedonia, the Former Yugosalv Republic of + zh_Hans: 前南斯拉夫马其顿共和国 + pt_BR: Macedônia, Ex-República Iugoslava da + - value: MG + label: + en_US: Madagascar + zh_Hans: 马达加斯加 + pt_BR: Madagascar + - value: MW + label: + en_US: Malawi + zh_Hans: 马拉维 + pt_BR: Malaui - value: MY label: en_US: Malaysia zh_Hans: 马来西亚 - pt_BR: Malaysia + pt_BR: Malásia + - value: MV + label: + en_US: Maldives + zh_Hans: 马尔代夫 + pt_BR: Maldivas + - value: ML + label: + en_US: Mali + zh_Hans: 马里 + pt_BR: Mali + - value: MT + label: + en_US: Malta + zh_Hans: 马耳他 + pt_BR: Malta + - value: MH + label: + en_US: Marshall Islands + zh_Hans: 马绍尔群岛 + pt_BR: Ilhas Marshall + - value: MQ + label: + en_US: Martinique + zh_Hans: 马提尼克 + pt_BR: Martinica + - value: MR + label: + en_US: Mauritania + zh_Hans: 毛里塔尼亚 + pt_BR: Mauritânia + - value: MU + label: + en_US: Mauritius + zh_Hans: 毛里求斯 + pt_BR: Maurício + - value: YT + label: + en_US: Mayotte + zh_Hans: 马约特 + pt_BR: Mayotte - value: MX label: en_US: Mexico zh_Hans: 墨西哥 - pt_BR: Mexico + pt_BR: México + - value: FM + label: + en_US: Micronesia, Federated States of + zh_Hans: 密克罗尼西亚联邦 + pt_BR: Micronésia, Estados Federados da + - value: MD + label: + en_US: Moldova, Republic of + zh_Hans: 摩尔多瓦共和国 + pt_BR: Moldávia, República da + - value: MC + label: + en_US: Monaco + zh_Hans: 摩纳哥 + pt_BR: Mônaco + - value: MN + label: + en_US: Mongolia + zh_Hans: 蒙古 + pt_BR: Mongólia + - value: MS + label: + en_US: Montserrat + zh_Hans: 蒙特塞拉特 + pt_BR: Montserrat + - value: MA + label: + en_US: Morocco + zh_Hans: 摩洛哥 + pt_BR: Marrocos + - value: MZ + label: + en_US: Mozambique + zh_Hans: 莫桑比克 + pt_BR: Moçambique + - value: MM + label: + en_US: Myanmar + zh_Hans: 缅甸 + pt_BR: Mianmar + - value: NA + label: + en_US: Namibia + zh_Hans: 纳米比亚 + pt_BR: Namíbia + - value: NR + label: + en_US: Nauru + zh_Hans: 瑙鲁 + pt_BR: Nauru + - value: NP + label: + en_US: Nepal + zh_Hans: 尼泊尔 + pt_BR: Nepal - value: NL label: en_US: Netherlands zh_Hans: 荷兰 - pt_BR: Netherlands + pt_BR: Países Baixos + - value: AN + label: + en_US: Netherlands Antilles + zh_Hans: 荷属安的列斯 + pt_BR: Antilhas Holandesas + - value: NC + label: + en_US: New Caledonia + zh_Hans: 新喀里多尼亚 + pt_BR: Nova Caledônia - value: NZ label: en_US: New Zealand zh_Hans: 新西兰 - pt_BR: New Zealand - - value: 'NO' + pt_BR: Nova Zelândia + - value: NI + label: + en_US: Nicaragua + zh_Hans: 尼加拉瓜 + pt_BR: Nicarágua + - value: NE + label: + en_US: Niger + zh_Hans: 尼日尔 + pt_BR: Níger + - value: NG + label: + en_US: Nigeria + zh_Hans: 尼日利亚 + pt_BR: Nigéria + - value: NU + label: + en_US: Niue + zh_Hans: 纽埃 + pt_BR: Niue + - value: NF + label: + en_US: Norfolk Island + zh_Hans: 诺福克岛 + pt_BR: Ilha Norfolk + - value: MP + label: + en_US: Northern Mariana Islands + zh_Hans: 北马里亚纳群岛 + pt_BR: Ilhas Marianas do Norte + - value: "NO" label: en_US: Norway zh_Hans: 挪威 - pt_BR: Norway + pt_BR: Noruega + - value: OM + label: + en_US: Oman + zh_Hans: 阿曼 + pt_BR: Omã + - value: PK + label: + en_US: Pakistan + zh_Hans: 巴基斯坦 + pt_BR: Paquistão + - value: PW + label: + en_US: Palau + zh_Hans: 帕劳 + pt_BR: Palau + - value: PS + label: + en_US: Palestinian Territory, Occupied + zh_Hans: 巴勒斯坦领土 + pt_BR: Palestina, Território Ocupado + - value: PA + label: + en_US: Panama + zh_Hans: 巴拿马 + pt_BR: Panamá + - value: PG + label: + en_US: Papua New Guinea + zh_Hans: 巴布亚新几内亚 + pt_BR: Papua Nova Guiné + - value: PY + label: + en_US: Paraguay + zh_Hans: 巴拉圭 + pt_BR: Paraguai + - value: PE + label: + en_US: Peru + zh_Hans: 秘鲁 + pt_BR: Peru - value: PH label: en_US: Philippines zh_Hans: 菲律宾 - pt_BR: Philippines + pt_BR: Filipinas + - value: PN + label: + en_US: Pitcairn + zh_Hans: 皮特凯恩岛 + pt_BR: Pitcairn - value: PL label: en_US: Poland zh_Hans: 波兰 - pt_BR: Poland + pt_BR: Polônia - value: PT label: en_US: Portugal zh_Hans: 葡萄牙 pt_BR: Portugal + - value: PR + label: + en_US: Puerto Rico + zh_Hans: 波多黎各 + pt_BR: Porto Rico + - value: QA + label: + en_US: Qatar + zh_Hans: 卡塔尔 + pt_BR: Catar + - value: RE + label: + en_US: Reunion + zh_Hans: 留尼旺 + pt_BR: Reunião + - value: RO + label: + en_US: Romania + zh_Hans: 罗马尼亚 + pt_BR: Romênia - value: RU label: - en_US: Russia - zh_Hans: 俄罗斯 - pt_BR: Russia + en_US: Russian Federation + zh_Hans: 俄罗斯联邦 + pt_BR: Rússia + - value: RW + label: + en_US: Rwanda + zh_Hans: 卢旺达 + pt_BR: Ruanda + - value: SH + label: + en_US: Saint Helena + zh_Hans: 圣赫勒拿 + pt_BR: Santa Helena + - value: KN + label: + en_US: Saint Kitts and Nevis + zh_Hans: 圣基茨和尼维斯 + pt_BR: São Cristóvão e Nevis + - value: LC + label: + en_US: Saint Lucia + zh_Hans: 圣卢西亚 + pt_BR: Santa Lúcia + - value: PM + label: + en_US: Saint Pierre and Miquelon + zh_Hans: 圣皮埃尔和密克隆 + pt_BR: São Pedro e Miquelon + - value: VC + label: + en_US: Saint Vincent and the Grenadines + zh_Hans: 圣文森特和格林纳丁斯 + pt_BR: São Vicente e Granadinas + - value: WS + label: + en_US: Samoa + zh_Hans: 萨摩亚 + pt_BR: Samoa + - value: SM + label: + en_US: San Marino + zh_Hans: 圣马力诺 + pt_BR: San Marino + - value: ST + label: + en_US: Sao Tome and Principe + zh_Hans: 圣多美和普林西比 + pt_BR: São Tomé e Príncipe - value: SA label: en_US: Saudi Arabia zh_Hans: 沙特阿拉伯 - pt_BR: Saudi Arabia + pt_BR: Arábia Saudita + - value: SN + label: + en_US: Senegal + zh_Hans: 塞内加尔 + pt_BR: Senegal + - value: RS + label: + en_US: Serbia and Montenegro + zh_Hans: 塞尔维亚和黑山 + pt_BR: Sérvia e Montenegro + - value: SC + label: + en_US: Seychelles + zh_Hans: 塞舌尔 + pt_BR: Seicheles + - value: SL + label: + en_US: Sierra Leone + zh_Hans: 塞拉利昂 + pt_BR: Serra Leoa - value: SG label: en_US: Singapore zh_Hans: 新加坡 - pt_BR: Singapore + pt_BR: Singapura + - value: SK + label: + en_US: Slovakia + zh_Hans: 斯洛伐克 + pt_BR: Eslováquia + - value: SI + label: + en_US: Slovenia + zh_Hans: 斯洛文尼亚 + pt_BR: Eslovênia + - value: SB + label: + en_US: Solomon Islands + zh_Hans: 所罗门群岛 + pt_BR: Ilhas Salomão + - value: SO + label: + en_US: Somalia + zh_Hans: 索马里 + pt_BR: Somália - value: ZA label: en_US: South Africa zh_Hans: 南非 - pt_BR: South Africa + pt_BR: África do Sul + - value: GS + label: + en_US: South Georgia and the South Sandwich Islands + zh_Hans: 南乔治亚和南桑威奇群岛 + pt_BR: Geórgia do Sul e Ilhas Sandwich do Sul - value: ES label: en_US: Spain zh_Hans: 西班牙 - pt_BR: Spain + pt_BR: Espanha + - value: LK + label: + en_US: Sri Lanka + zh_Hans: 斯里兰卡 + pt_BR: Sri Lanka + - value: SD + label: + en_US: Sudan + zh_Hans: 苏丹 + pt_BR: Sudão + - value: SR + label: + en_US: Suriname + zh_Hans: 苏里南 + pt_BR: Suriname + - value: SJ + label: + en_US: Svalbard and Jan Mayen + zh_Hans: 斯瓦尔巴特和扬马延岛 + pt_BR: Svalbard e Jan Mayen + - value: SZ + label: + en_US: Swaziland + zh_Hans: 斯威士兰 + pt_BR: Essuatíni - value: SE label: en_US: Sweden zh_Hans: 瑞典 - pt_BR: Sweden + pt_BR: Suécia - value: CH label: en_US: Switzerland zh_Hans: 瑞士 - pt_BR: Switzerland + pt_BR: Suíça + - value: SY + label: + en_US: Syrian Arab Republic + zh_Hans: 叙利亚 + pt_BR: Síria - value: TW label: - en_US: Taiwan + en_US: Taiwan, Province of China zh_Hans: 台湾 pt_BR: Taiwan + - value: TJ + label: + en_US: Tajikistan + zh_Hans: 塔吉克斯坦 + pt_BR: Tajiquistão + - value: TZ + label: + en_US: Tanzania, United Republic of + zh_Hans: 坦桑尼亚联合共和国 + pt_BR: Tanzânia - value: TH label: en_US: Thailand zh_Hans: 泰国 - pt_BR: Thailand + pt_BR: Tailândia + - value: TL + label: + en_US: Timor-Leste + zh_Hans: 东帝汶 + pt_BR: Timor-Leste + - value: TG + label: + en_US: Togo + zh_Hans: 多哥 + pt_BR: Togo + - value: TK + label: + en_US: Tokelau + zh_Hans: 托克劳 + pt_BR: Toquelau + - value: TO + label: + en_US: Tonga + zh_Hans: 汤加 + pt_BR: Tonga + - value: TT + label: + en_US: Trinidad and Tobago + zh_Hans: 特立尼达和多巴哥 + pt_BR: Trindade e Tobago + - value: TN + label: + en_US: Tunisia + zh_Hans: 突尼斯 + pt_BR: Tunísia - value: TR label: en_US: Turkey zh_Hans: 土耳其 - pt_BR: Turkey + pt_BR: Turquia + - value: TM + label: + en_US: Turkmenistan + zh_Hans: 土库曼斯坦 + pt_BR: Turcomenistão + - value: TC + label: + en_US: Turks and Caicos Islands + zh_Hans: 特克斯和凯科斯群岛 + pt_BR: Ilhas Turks e Caicos + - value: TV + label: + en_US: Tuvalu + zh_Hans: 图瓦卢 + pt_BR: Tuvalu + - value: UG + label: + en_US: Uganda + zh_Hans: 乌干达 + pt_BR: Uganda + - value: UA + label: + en_US: Ukraine + zh_Hans: 乌克兰 + pt_BR: Ucrânia + - value: AE + label: + en_US: United Arab Emirates + zh_Hans: 阿联酋 + pt_BR: Emirados Árabes Unidos + - value: UK + label: + en_US: United Kingdom + zh_Hans: 英国 + pt_BR: Reino Unido - value: GB label: en_US: United Kingdom zh_Hans: 英国 - pt_BR: United Kingdom + pt_BR: Reino Unido - value: US label: en_US: United States zh_Hans: 美国 - pt_BR: United States + pt_BR: Estados Unidos + - value: UM + label: + en_US: United States Minor Outlying Islands + zh_Hans: 美国本土外小岛屿 + pt_BR: Ilhas Menores Distantes dos Estados Unidos + - value: UY + label: + en_US: Uruguay + zh_Hans: 乌拉圭 + pt_BR: Uruguai + - value: UZ + label: + en_US: Uzbekistan + zh_Hans: 乌兹别克斯坦 + pt_BR: Uzbequistão + - value: VU + label: + en_US: Vanuatu + zh_Hans: 瓦努阿图 + pt_BR: Vanuatu + - value: VE + label: + en_US: Venezuela + zh_Hans: 委内瑞拉 + pt_BR: Venezuela + - value: VN + label: + en_US: Viet Nam + zh_Hans: 越南 + pt_BR: Vietnã + - value: VG + label: + en_US: Virgin Islands, British + zh_Hans: 英属维尔京群岛 + pt_BR: Ilhas Virgens Britânicas + - value: VI + label: + en_US: Virgin Islands, U.S. + zh_Hans: 美属维尔京群岛 + pt_BR: Ilhas Virgens dos EUA + - value: WF + label: + en_US: Wallis and Futuna + zh_Hans: 瓦利斯和富图纳群岛 + pt_BR: Wallis e Futuna + - value: EH + label: + en_US: Western Sahara + zh_Hans: 西撒哈拉 + pt_BR: Saara Ocidental + - value: YE + label: + en_US: Yemen + zh_Hans: 也门 + pt_BR: Iémen + - value: ZM + label: + en_US: Zambia + zh_Hans: 赞比亚 + pt_BR: Zâmbia + - value: ZW + label: + en_US: Zimbabwe + zh_Hans: 津巴布韦 + pt_BR: Zimbábue - name: hl type: select label: @@ -277,18 +1277,94 @@ parameters: default: en form: form options: + - value: af + label: + en_US: Afrikaans + zh_Hans: 南非语 + - value: ak + label: + en_US: Akan + zh_Hans: 阿坎语 + - value: sq + label: + en_US: Albanian + zh_Hans: 阿尔巴尼亚语 + - value: ws + label: + en_US: Samoa + zh_Hans: 萨摩亚语 + - value: am + label: + en_US: Amharic + zh_Hans: 阿姆哈拉语 - value: ar label: en_US: Arabic zh_Hans: 阿拉伯语 + - value: hy + label: + en_US: Armenian + zh_Hans: 亚美尼亚语 + - value: az + label: + en_US: Azerbaijani + zh_Hans: 阿塞拜疆语 + - value: eu + label: + en_US: Basque + zh_Hans: 巴斯克语 + - value: be + label: + en_US: Belarusian + zh_Hans: 白俄罗斯语 + - value: bem + label: + en_US: Bemba + zh_Hans: 班巴语 + - value: bn + label: + en_US: Bengali + zh_Hans: 孟加拉语 + - value: bh + label: + en_US: Bihari + zh_Hans: 比哈尔语 + - value: xx-bork + label: + en_US: Bork, bork, bork! + zh_Hans: 博克语 + - value: bs + label: + en_US: Bosnian + zh_Hans: 波斯尼亚语 + - value: br + label: + en_US: Breton + zh_Hans: 布列塔尼语 - value: bg label: en_US: Bulgarian zh_Hans: 保加利亚语 + - value: bt + label: + en_US: Bhutanese + zh_Hans: 不丹语 + - value: km + label: + en_US: Cambodian + zh_Hans: 高棉语 - value: ca label: en_US: Catalan zh_Hans: 加泰罗尼亚语 + - value: chr + label: + en_US: Cherokee + zh_Hans: 切罗基语 + - value: ny + label: + en_US: Chichewa + zh_Hans: 齐切瓦语 - value: zh-cn label: en_US: Chinese (Simplified) @@ -297,6 +1373,14 @@ parameters: label: en_US: Chinese (Traditional) zh_Hans: 中文(繁体) + - value: co + label: + en_US: Corsican + zh_Hans: 科西嘉语 + - value: hr + label: + en_US: Croatian + zh_Hans: 克罗地亚语 - value: cs label: en_US: Czech @@ -309,14 +1393,34 @@ parameters: label: en_US: Dutch zh_Hans: 荷兰语 + - value: xx-elmer + label: + en_US: Elmer Fudd + zh_Hans: 艾尔默福德语 - value: en label: en_US: English zh_Hans: 英语 + - value: eo + label: + en_US: Esperanto + zh_Hans: 世界语 - value: et label: en_US: Estonian zh_Hans: 爱沙尼亚语 + - value: ee + label: + en_US: Ewe + zh_Hans: 埃维语 + - value: fo + label: + en_US: Faroese + zh_Hans: 法罗语 + - value: tl + label: + en_US: Filipino + zh_Hans: 菲律宾语 - value: fi label: en_US: Finnish @@ -325,6 +1429,22 @@ parameters: label: en_US: French zh_Hans: 法语 + - value: fy + label: + en_US: Frisian + zh_Hans: 弗里西亚语 + - value: gaa + label: + en_US: Ga + zh_Hans: 加语 + - value: gl + label: + en_US: Galician + zh_Hans: 加利西亚语 + - value: ka + label: + en_US: Georgian + zh_Hans: 格鲁吉亚语 - value: de label: en_US: German @@ -333,6 +1453,34 @@ parameters: label: en_US: Greek zh_Hans: 希腊语 + - value: kl + label: + en_US: Greenlandic + zh_Hans: 格陵兰语 + - value: gn + label: + en_US: Guarani + zh_Hans: 瓜拉尼语 + - value: gu + label: + en_US: Gujarati + zh_Hans: 古吉拉特语 + - value: xx-hacker + label: + en_US: Hacker + zh_Hans: 黑客语 + - value: ht + label: + en_US: Haitian Creole + zh_Hans: 海地克里奥尔语 + - value: ha + label: + en_US: Hausa + zh_Hans: 豪萨语 + - value: haw + label: + en_US: Hawaiian + zh_Hans: 夏威夷语 - value: iw label: en_US: Hebrew @@ -345,10 +1493,26 @@ parameters: label: en_US: Hungarian zh_Hans: 匈牙利语 + - value: is + label: + en_US: Icelandic + zh_Hans: 冰岛语 + - value: ig + label: + en_US: Igbo + zh_Hans: 伊博语 - value: id label: en_US: Indonesian zh_Hans: 印尼语 + - value: ia + label: + en_US: Interlingua + zh_Hans: 国际语 + - value: ga + label: + en_US: Irish + zh_Hans: 爱尔兰语 - value: it label: en_US: Italian @@ -357,22 +1521,94 @@ parameters: label: en_US: Japanese zh_Hans: 日语 + - value: jw + label: + en_US: Javanese + zh_Hans: 爪哇语 - value: kn label: en_US: Kannada zh_Hans: 卡纳达语 + - value: kk + label: + en_US: Kazakh + zh_Hans: 哈萨克语 + - value: rw + label: + en_US: Kinyarwanda + zh_Hans: 基尼亚卢旺达语 + - value: rn + label: + en_US: Kirundi + zh_Hans: 基隆迪语 + - value: xx-klingon + label: + en_US: Klingon + zh_Hans: 克林贡语 + - value: kg + label: + en_US: Kongo + zh_Hans: 刚果语 - value: ko label: en_US: Korean zh_Hans: 韩语 + - value: kri + label: + en_US: Krio (Sierra Leone) + zh_Hans: 塞拉利昂克里奥尔语 + - value: ku + label: + en_US: Kurdish + zh_Hans: 库尔德语 + - value: ckb + label: + en_US: Kurdish (Soranî) + zh_Hans: 库尔德语(索拉尼) + - value: ky + label: + en_US: Kyrgyz + zh_Hans: 吉尔吉斯语 + - value: lo + label: + en_US: Laothian + zh_Hans: 老挝语 + - value: la + label: + en_US: Latin + zh_Hans: 拉丁语 - value: lv label: en_US: Latvian zh_Hans: 拉脱维亚语 + - value: ln + label: + en_US: Lingala + zh_Hans: 林加拉语 - value: lt label: en_US: Lithuanian zh_Hans: 立陶宛语 + - value: loz + label: + en_US: Lozi + zh_Hans: 洛齐语 + - value: lg + label: + en_US: Luganda + zh_Hans: 卢干达语 + - value: ach + label: + en_US: Luo + zh_Hans: 卢奥语 + - value: mk + label: + en_US: Macedonian + zh_Hans: 马其顿语 + - value: mg + label: + en_US: Malagasy + zh_Hans: 马尔加什语 - value: my label: en_US: Malay @@ -381,18 +1617,90 @@ parameters: label: en_US: Malayalam zh_Hans: 马拉雅拉姆语 + - value: mt + label: + en_US: Maltese + zh_Hans: 马耳他语 + - value: mv + label: + en_US: Maldives + zh_Hans: 马尔代夫语 + - value: mi + label: + en_US: Maori + zh_Hans: 毛利语 - value: mr label: en_US: Marathi zh_Hans: 马拉地语 + - value: mfe + label: + en_US: Mauritian Creole + zh_Hans: 毛里求斯克里奥尔语 + - value: mo + label: + en_US: Moldavian + zh_Hans: 摩尔达维亚语 + - value: mn + label: + en_US: Mongolian + zh_Hans: 蒙古语 + - value: sr-me + label: + en_US: Montenegrin + zh_Hans: 黑山语 + - value: ne + label: + en_US: Nepali + zh_Hans: 尼泊尔语 + - value: pcm + label: + en_US: Nigerian Pidgin + zh_Hans: 尼日利亚皮钦语 + - value: nso + label: + en_US: Northern Sotho + zh_Hans: 北索托语 - value: "no" label: en_US: Norwegian zh_Hans: 挪威语 + - value: nn + label: + en_US: Norwegian (Nynorsk) + zh_Hans: 挪威语(尼诺斯克语) + - value: oc + label: + en_US: Occitan + zh_Hans: 奥克语 + - value: or + label: + en_US: Oriya + zh_Hans: 奥里亚语 + - value: om + label: + en_US: Oromo + zh_Hans: 奥罗莫语 + - value: ps + label: + en_US: Pashto + zh_Hans: 普什图语 + - value: fa + label: + en_US: Persian + zh_Hans: 波斯语 + - value: xx-pirate + label: + en_US: Pirate + zh_Hans: 海盗语 - value: pl label: en_US: Polish zh_Hans: 波兰语 + - value: pt + label: + en_US: Portuguese + zh_Hans: 葡萄牙语 - value: pt-br label: en_US: Portuguese (Brazil) @@ -405,18 +1713,62 @@ parameters: label: en_US: Punjabi zh_Hans: 旁遮普语 + - value: qu + label: + en_US: Quechua + zh_Hans: 克丘亚语 - value: ro label: en_US: Romanian zh_Hans: 罗马尼亚语 + - value: rm + label: + en_US: Romansh + zh_Hans: 罗曼什语 + - value: nyn + label: + en_US: Runyakitara + zh_Hans: 卢尼亚基塔拉语 - value: ru label: en_US: Russian zh_Hans: 俄语 + - value: gd + label: + en_US: Scots Gaelic + zh_Hans: 苏格兰盖尔语 - value: sr label: en_US: Serbian zh_Hans: 塞尔维亚语 + - value: sh + label: + en_US: Serbo-Croatian + zh_Hans: 塞尔维亚-克罗地亚语 + - value: st + label: + en_US: Sesotho + zh_Hans: 塞索托语 + - value: tn + label: + en_US: Setswana + zh_Hans: 塞茨瓦纳语 + - value: crs + label: + en_US: Seychellois Creole + zh_Hans: 塞舌尔克里奥尔语 + - value: sn + label: + en_US: Shona + zh_Hans: 绍纳语 + - value: sd + label: + en_US: Sindhi + zh_Hans: 信德语 + - value: si + label: + en_US: Sinhalese + zh_Hans: 僧伽罗语 - value: sk label: en_US: Slovak @@ -425,18 +1777,42 @@ parameters: label: en_US: Slovenian zh_Hans: 斯洛文尼亚语 + - value: so + label: + en_US: Somali + zh_Hans: 索马里语 - value: es label: en_US: Spanish zh_Hans: 西班牙语 + - value: es-419 + label: + en_US: Spanish (Latin American) + zh_Hans: 西班牙语(拉丁美洲) + - value: su + label: + en_US: Sundanese + zh_Hans: 巽他语 + - value: sw + label: + en_US: Swahili + zh_Hans: 斯瓦希里语 - value: sv label: en_US: Swedish zh_Hans: 瑞典语 + - value: tg + label: + en_US: Tajik + zh_Hans: 塔吉克语 - value: ta label: en_US: Tamil zh_Hans: 泰米尔语 + - value: tt + label: + en_US: Tatar + zh_Hans: 鞑靼语 - value: te label: en_US: Telugu @@ -445,18 +1821,82 @@ parameters: label: en_US: Thai zh_Hans: 泰语 + - value: ti + label: + en_US: Tigrinya + zh_Hans: 提格利尼亚语 + - value: to + label: + en_US: Tonga + zh_Hans: 汤加语 + - value: lua + label: + en_US: Tshiluba + zh_Hans: 卢巴语 + - value: tum + label: + en_US: Tumbuka + zh_Hans: 图布卡语 - value: tr label: en_US: Turkish zh_Hans: 土耳其语 + - value: tk + label: + en_US: Turkmen + zh_Hans: 土库曼语 + - value: tw + label: + en_US: Twi + zh_Hans: 契维语 + - value: ug + label: + en_US: Uighur + zh_Hans: 维吾尔语 - value: uk label: en_US: Ukrainian zh_Hans: 乌克兰语 + - value: ur + label: + en_US: Urdu + zh_Hans: 乌尔都语 + - value: uz + label: + en_US: Uzbek + zh_Hans: 乌兹别克语 + - value: vu + label: + en_US: Vanuatu + zh_Hans: 瓦努阿图语 - value: vi label: en_US: Vietnamese zh_Hans: 越南语 + - value: cy + label: + en_US: Welsh + zh_Hans: 威尔士语 + - value: wo + label: + en_US: Wolof + zh_Hans: 沃洛夫语 + - value: xh + label: + en_US: Xhosa + zh_Hans: 科萨语 + - value: yi + label: + en_US: Yiddish + zh_Hans: 意第绪语 + - value: yo + label: + en_US: Yoruba + zh_Hans: 约鲁巴语 + - value: zu + label: + en_US: Zulu + zh_Hans: 祖鲁语 - name: google_domain type: string required: false diff --git a/api/core/tools/provider/builtin/searchapi/tools/google_jobs.yaml b/api/core/tools/provider/builtin/searchapi/tools/google_jobs.yaml index 9033bc0f87..3e00e20fbd 100644 --- a/api/core/tools/provider/builtin/searchapi/tools/google_jobs.yaml +++ b/api/core/tools/provider/builtin/searchapi/tools/google_jobs.yaml @@ -65,36 +65,141 @@ parameters: form: form default: US options: - - value: AR + - value: DZ label: - en_US: Argentina - zh_Hans: 阿根廷 - pt_BR: Argentina - - value: AU + en_US: Algeria + zh_Hans: 阿尔及利亚 + pt_BR: Algeria + - value: AS label: - en_US: Australia - zh_Hans: 澳大利亚 - pt_BR: Australia + en_US: American Samoa + zh_Hans: 美属萨摩亚 + pt_BR: American Samoa + - value: AO + label: + en_US: Angola + zh_Hans: 安哥拉 + pt_BR: Angola + - value: AI + label: + en_US: Anguilla + zh_Hans: 安圭拉 + pt_BR: Anguilla + - value: AG + label: + en_US: Antigua and Barbuda + zh_Hans: 安提瓜和巴布达 + pt_BR: Antigua and Barbuda + - value: AW + label: + en_US: Aruba + zh_Hans: 阿鲁巴 + pt_BR: Aruba - value: AT label: en_US: Austria zh_Hans: 奥地利 pt_BR: Austria + - value: BS + label: + en_US: Bahamas + zh_Hans: 巴哈马 + pt_BR: Bahamas + - value: BH + label: + en_US: Bahrain + zh_Hans: 巴林 + pt_BR: Bahrain + - value: BD + label: + en_US: Bangladesh + zh_Hans: 孟加拉国 + pt_BR: Bangladesh + - value: BY + label: + en_US: Belarus + zh_Hans: 白俄罗斯 + pt_BR: Belarus - value: BE label: en_US: Belgium zh_Hans: 比利时 pt_BR: Belgium + - value: BZ + label: + en_US: Belize + zh_Hans: 伯利兹 + pt_BR: Belize + - value: BJ + label: + en_US: Benin + zh_Hans: 贝宁 + pt_BR: Benin + - value: BM + label: + en_US: Bermuda + zh_Hans: 百慕大 + pt_BR: Bermuda + - value: BO + label: + en_US: Bolivia + zh_Hans: 玻利维亚 + pt_BR: Bolivia + - value: BW + label: + en_US: Botswana + zh_Hans: 博茨瓦纳 + pt_BR: Botswana - value: BR label: en_US: Brazil zh_Hans: 巴西 pt_BR: Brazil + - value: IO + label: + en_US: British Indian Ocean Territory + zh_Hans: 英属印度洋领地 + pt_BR: British Indian Ocean Territory + - value: BF + label: + en_US: Burkina Faso + zh_Hans: 布基纳法索 + pt_BR: Burkina Faso + - value: BI + label: + en_US: Burundi + zh_Hans: 布隆迪 + pt_BR: Burundi + - value: CM + label: + en_US: Cameroon + zh_Hans: 喀麦隆 + pt_BR: Cameroon - value: CA label: en_US: Canada zh_Hans: 加拿大 pt_BR: Canada + - value: CV + label: + en_US: Cape Verde + zh_Hans: 佛得角 + pt_BR: Cape Verde + - value: KY + label: + en_US: Cayman Islands + zh_Hans: 开曼群岛 + pt_BR: Cayman Islands + - value: CF + label: + en_US: Central African Republic + zh_Hans: 中非共和国 + pt_BR: Central African Republic + - value: TD + label: + en_US: Chad + zh_Hans: 乍得 + pt_BR: Chad - value: CL label: en_US: Chile @@ -105,36 +210,141 @@ parameters: en_US: Colombia zh_Hans: 哥伦比亚 pt_BR: Colombia - - value: CN + - value: CD label: - en_US: China - zh_Hans: 中国 - pt_BR: China - - value: CZ + en_US: Congo, the Democratic Republic of the + zh_Hans: 刚果民主共和国 + pt_BR: Congo, the Democratic Republic of the + - value: CR label: - en_US: Czech Republic - zh_Hans: 捷克共和国 - pt_BR: Czech Republic + en_US: Costa Rica + zh_Hans: 哥斯达黎加 + pt_BR: Costa Rica + - value: CI + label: + en_US: Cote D'ivoire + zh_Hans: 科特迪瓦 + pt_BR: Cote D'ivoire + - value: CU + label: + en_US: Cuba + zh_Hans: 古巴 + pt_BR: Cuba - value: DK label: en_US: Denmark zh_Hans: 丹麦 pt_BR: Denmark - - value: FI + - value: DJ label: - en_US: Finland - zh_Hans: 芬兰 - pt_BR: Finland + en_US: Djibouti + zh_Hans: 吉布提 + pt_BR: Djibouti + - value: DM + label: + en_US: Dominica + zh_Hans: 多米尼克 + pt_BR: Dominica + - value: DO + label: + en_US: Dominican Republic + zh_Hans: 多米尼加共和国 + pt_BR: Dominican Republic + - value: EC + label: + en_US: Ecuador + zh_Hans: 厄瓜多尔 + pt_BR: Ecuador + - value: EG + label: + en_US: Egypt + zh_Hans: 埃及 + pt_BR: Egypt + - value: SV + label: + en_US: El Salvador + zh_Hans: 萨尔瓦多 + pt_BR: El Salvador + - value: ET + label: + en_US: Ethiopia + zh_Hans: 埃塞俄比亚 + pt_BR: Ethiopia + - value: FK + label: + en_US: Falkland Islands (Malvinas) + zh_Hans: 福克兰群岛(马尔维纳斯) + pt_BR: Falkland Islands (Malvinas) - value: FR label: en_US: France zh_Hans: 法国 pt_BR: France + - value: GF + label: + en_US: French Guiana + zh_Hans: 法属圭亚那 + pt_BR: French Guiana + - value: PF + label: + en_US: French Polynesia + zh_Hans: 法属波利尼西亚 + pt_BR: French Polynesia + - value: TF + label: + en_US: French Southern Territories + zh_Hans: 法属南部领地 + pt_BR: French Southern Territories + - value: GA + label: + en_US: Gabon + zh_Hans: 加蓬 + pt_BR: Gabon + - value: GM + label: + en_US: Gambia + zh_Hans: 冈比亚 + pt_BR: Gambia - value: DE label: en_US: Germany zh_Hans: 德国 pt_BR: Germany + - value: GH + label: + en_US: Ghana + zh_Hans: 加纳 + pt_BR: Ghana + - value: GR + label: + en_US: Greece + zh_Hans: 希腊 + pt_BR: Greece + - value: GP + label: + en_US: Guadeloupe + zh_Hans: 瓜德罗普 + pt_BR: Guadeloupe + - value: GT + label: + en_US: Guatemala + zh_Hans: 危地马拉 + pt_BR: Guatemala + - value: GY + label: + en_US: Guyana + zh_Hans: 圭亚那 + pt_BR: Guyana + - value: HT + label: + en_US: Haiti + zh_Hans: 海地 + pt_BR: Haiti + - value: HN + label: + en_US: Honduras + zh_Hans: 洪都拉斯 + pt_BR: Honduras - value: HK label: en_US: Hong Kong @@ -150,91 +360,291 @@ parameters: en_US: Indonesia zh_Hans: 印度尼西亚 pt_BR: Indonesia + - value: IQ + label: + en_US: Iraq + zh_Hans: 伊拉克 + pt_BR: Iraq - value: IT label: en_US: Italy zh_Hans: 意大利 pt_BR: Italy + - value: JM + label: + en_US: Jamaica + zh_Hans: 牙买加 + pt_BR: Jamaica - value: JP label: en_US: Japan zh_Hans: 日本 pt_BR: Japan - - value: KR + - value: JO label: - en_US: Korea - zh_Hans: 韩国 - pt_BR: Korea + en_US: Jordan + zh_Hans: 约旦 + pt_BR: Jordan + - value: KZ + label: + en_US: Kazakhstan + zh_Hans: 哈萨克斯坦 + pt_BR: Kazakhstan + - value: KE + label: + en_US: Kenya + zh_Hans: 肯尼亚 + pt_BR: Kenya + - value: KW + label: + en_US: Kuwait + zh_Hans: 科威特 + pt_BR: Kuwait + - value: KG + label: + en_US: Kyrgyzstan + zh_Hans: 吉尔吉斯斯坦 + pt_BR: Kyrgyzstan + - value: LB + label: + en_US: Lebanon + zh_Hans: 黎巴嫩 + pt_BR: Lebanon + - value: LS + label: + en_US: Lesotho + zh_Hans: 莱索托 + pt_BR: Lesotho + - value: LY + label: + en_US: Libyan Arab Jamahiriya + zh_Hans: 利比亚 + pt_BR: Libyan Arab Jamahiriya + - value: MG + label: + en_US: Madagascar + zh_Hans: 马达加斯加 + pt_BR: Madagascar + - value: MW + label: + en_US: Malawi + zh_Hans: 马拉维 + pt_BR: Malawi - value: MY label: en_US: Malaysia zh_Hans: 马来西亚 pt_BR: Malaysia + - value: ML + label: + en_US: Mali + zh_Hans: 马里 + pt_BR: Mali + - value: MQ + label: + en_US: Martinique + zh_Hans: 马提尼克 + pt_BR: Martinique + - value: MU + label: + en_US: Mauritius + zh_Hans: 毛里求斯 + pt_BR: Mauritius + - value: YT + label: + en_US: Mayotte + zh_Hans: 马约特 + pt_BR: Mayotte - value: MX label: en_US: Mexico zh_Hans: 墨西哥 pt_BR: Mexico + - value: MS + label: + en_US: Montserrat + zh_Hans: 蒙特塞拉特 + pt_BR: Montserrat + - value: MA + label: + en_US: Morocco + zh_Hans: 摩洛哥 + pt_BR: Morocco + - value: MZ + label: + en_US: Mozambique + zh_Hans: 莫桑比克 + pt_BR: Mozambique + - value: NA + label: + en_US: Namibia + zh_Hans: 纳米比亚 + pt_BR: Namibia - value: NL label: en_US: Netherlands zh_Hans: 荷兰 pt_BR: Netherlands - - value: NZ + - value: NC label: - en_US: New Zealand - zh_Hans: 新西兰 - pt_BR: New Zealand - - value: 'NO' + en_US: New Caledonia + zh_Hans: 新喀里多尼亚 + pt_BR: New Caledonia + - value: NI label: - en_US: Norway - zh_Hans: 挪威 - pt_BR: Norway + en_US: Nicaragua + zh_Hans: 尼加拉瓜 + pt_BR: Nicaragua + - value: NE + label: + en_US: Niger + zh_Hans: 尼日尔 + pt_BR: Niger + - value: NG + label: + en_US: Nigeria + zh_Hans: 尼日利亚 + pt_BR: Nigeria + - value: OM + label: + en_US: Oman + zh_Hans: 阿曼 + pt_BR: Oman + - value: PK + label: + en_US: Pakistan + zh_Hans: 巴基斯坦 + pt_BR: Pakistan + - value: PS + label: + en_US: Palestinian Territory, Occupied + zh_Hans: 巴勒斯坦领土 + pt_BR: Palestinian Territory, Occupied + - value: PA + label: + en_US: Panama + zh_Hans: 巴拿马 + pt_BR: Panama + - value: PY + label: + en_US: Paraguay + zh_Hans: 巴拉圭 + pt_BR: Paraguay + - value: PE + label: + en_US: Peru + zh_Hans: 秘鲁 + pt_BR: Peru - value: PH label: en_US: Philippines zh_Hans: 菲律宾 pt_BR: Philippines - - value: PL - label: - en_US: Poland - zh_Hans: 波兰 - pt_BR: Poland - value: PT label: en_US: Portugal zh_Hans: 葡萄牙 pt_BR: Portugal + - value: PR + label: + en_US: Puerto Rico + zh_Hans: 波多黎各 + pt_BR: Puerto Rico + - value: QA + label: + en_US: Qatar + zh_Hans: 卡塔尔 + pt_BR: Qatar + - value: RE + label: + en_US: Reunion + zh_Hans: 留尼旺 + pt_BR: Reunion - value: RU label: - en_US: Russia - zh_Hans: 俄罗斯 - pt_BR: Russia + en_US: Russian Federation + zh_Hans: 俄罗斯联邦 + pt_BR: Russian Federation + - value: RW + label: + en_US: Rwanda + zh_Hans: 卢旺达 + pt_BR: Rwanda + - value: SH + label: + en_US: Saint Helena + zh_Hans: 圣赫勒拿 + pt_BR: Saint Helena + - value: PM + label: + en_US: Saint Pierre and Miquelon + zh_Hans: 圣皮埃尔和密克隆 + pt_BR: Saint Pierre and Miquelon + - value: VC + label: + en_US: Saint Vincent and the Grenadines + zh_Hans: 圣文森特和格林纳丁斯 + pt_BR: Saint Vincent and the Grenadines + - value: ST + label: + en_US: Sao Tome and Principe + zh_Hans: 圣多美和普林西比 + pt_BR: Sao Tome and Principe - value: SA label: en_US: Saudi Arabia zh_Hans: 沙特阿拉伯 pt_BR: Saudi Arabia + - value: SN + label: + en_US: Senegal + zh_Hans: 塞内加尔 + pt_BR: Senegal + - value: SC + label: + en_US: Seychelles + zh_Hans: 塞舌尔 + pt_BR: Seychelles + - value: SL + label: + en_US: Sierra Leone + zh_Hans: 塞拉利昂 + pt_BR: Sierra Leone - value: SG label: en_US: Singapore zh_Hans: 新加坡 pt_BR: Singapore + - value: SO + label: + en_US: Somalia + zh_Hans: 索马里 + pt_BR: Somalia - value: ZA label: en_US: South Africa zh_Hans: 南非 pt_BR: South Africa + - value: GS + label: + en_US: South Georgia and the South Sandwich Islands + zh_Hans: 南乔治亚和南桑威奇群岛 + pt_BR: South Georgia and the South Sandwich Islands - value: ES label: en_US: Spain zh_Hans: 西班牙 pt_BR: Spain - - value: SE + - value: LK label: - en_US: Sweden - zh_Hans: 瑞典 - pt_BR: Sweden + en_US: Sri Lanka + zh_Hans: 斯里兰卡 + pt_BR: Sri Lanka + - value: SR + label: + en_US: Suriname + zh_Hans: 苏里南 + pt_BR: Suriname - value: CH label: en_US: Switzerland @@ -242,19 +652,54 @@ parameters: pt_BR: Switzerland - value: TW label: - en_US: Taiwan - zh_Hans: 台湾 - pt_BR: Taiwan + en_US: Taiwan, Province of China + zh_Hans: 中国台湾省 + pt_BR: Taiwan, Province of China + - value: TZ + label: + en_US: Tanzania, United Republic of + zh_Hans: 坦桑尼亚联合共和国 + pt_BR: Tanzania, United Republic of - value: TH label: en_US: Thailand zh_Hans: 泰国 pt_BR: Thailand - - value: TR + - value: TG label: - en_US: Turkey - zh_Hans: 土耳其 - pt_BR: Turkey + en_US: Togo + zh_Hans: 多哥 + pt_BR: Togo + - value: TT + label: + en_US: Trinidad and Tobago + zh_Hans: 特立尼达和多巴哥 + pt_BR: Trinidad and Tobago + - value: TN + label: + en_US: Tunisia + zh_Hans: 突尼斯 + pt_BR: Tunisia + - value: TC + label: + en_US: Turks and Caicos Islands + zh_Hans: 特克斯和凯科斯群岛 + pt_BR: Turks and Caicos Islands + - value: UG + label: + en_US: Uganda + zh_Hans: 乌干达 + pt_BR: Uganda + - value: AE + label: + en_US: United Arab Emirates + zh_Hans: 阿联酋 + pt_BR: United Arab Emirates + - value: UK + label: + en_US: United Kingdom + zh_Hans: 英国 + pt_BR: United Kingdom - value: GB label: en_US: United Kingdom @@ -265,6 +710,46 @@ parameters: en_US: United States zh_Hans: 美国 pt_BR: United States + - value: UY + label: + en_US: Uruguay + zh_Hans: 乌拉圭 + pt_BR: Uruguay + - value: UZ + label: + en_US: Uzbekistan + zh_Hans: 乌兹别克斯坦 + pt_BR: Uzbekistan + - value: VE + label: + en_US: Venezuela + zh_Hans: 委内瑞拉 + pt_BR: Venezuela + - value: VN + label: + en_US: Viet Nam + zh_Hans: 越南 + pt_BR: Viet Nam + - value: VG + label: + en_US: Virgin Islands, British + zh_Hans: 英属维尔京群岛 + pt_BR: Virgin Islands, British + - value: VI + label: + en_US: Virgin Islands, U.S. + zh_Hans: 美属维尔京群岛 + pt_BR: Virgin Islands, U.S. + - value: ZM + label: + en_US: Zambia + zh_Hans: 赞比亚 + pt_BR: Zambia + - value: ZW + label: + en_US: Zimbabwe + zh_Hans: 津巴布韦 + pt_BR: Zimbabwe - name: hl type: select label: @@ -277,18 +762,94 @@ parameters: default: en form: form options: + - value: af + label: + en_US: Afrikaans + zh_Hans: 南非语 + - value: ak + label: + en_US: Akan + zh_Hans: 阿坎语 + - value: sq + label: + en_US: Albanian + zh_Hans: 阿尔巴尼亚语 + - value: ws + label: + en_US: Samoa + zh_Hans: 萨摩亚语 + - value: am + label: + en_US: Amharic + zh_Hans: 阿姆哈拉语 - value: ar label: en_US: Arabic zh_Hans: 阿拉伯语 + - value: hy + label: + en_US: Armenian + zh_Hans: 亚美尼亚语 + - value: az + label: + en_US: Azerbaijani + zh_Hans: 阿塞拜疆语 + - value: eu + label: + en_US: Basque + zh_Hans: 巴斯克语 + - value: be + label: + en_US: Belarusian + zh_Hans: 白俄罗斯语 + - value: bem + label: + en_US: Bemba + zh_Hans: 班巴语 + - value: bn + label: + en_US: Bengali + zh_Hans: 孟加拉语 + - value: bh + label: + en_US: Bihari + zh_Hans: 比哈尔语 + - value: xx-bork + label: + en_US: Bork, bork, bork! + zh_Hans: 博克语 + - value: bs + label: + en_US: Bosnian + zh_Hans: 波斯尼亚语 + - value: br + label: + en_US: Breton + zh_Hans: 布列塔尼语 - value: bg label: en_US: Bulgarian zh_Hans: 保加利亚语 + - value: bt + label: + en_US: Bhutanese + zh_Hans: 不丹语 + - value: km + label: + en_US: Cambodian + zh_Hans: 高棉语 - value: ca label: en_US: Catalan zh_Hans: 加泰罗尼亚语 + - value: chr + label: + en_US: Cherokee + zh_Hans: 切罗基语 + - value: ny + label: + en_US: Chichewa + zh_Hans: 齐切瓦语 - value: zh-cn label: en_US: Chinese (Simplified) @@ -297,6 +858,14 @@ parameters: label: en_US: Chinese (Traditional) zh_Hans: 中文(繁体) + - value: co + label: + en_US: Corsican + zh_Hans: 科西嘉语 + - value: hr + label: + en_US: Croatian + zh_Hans: 克罗地亚语 - value: cs label: en_US: Czech @@ -309,14 +878,34 @@ parameters: label: en_US: Dutch zh_Hans: 荷兰语 + - value: xx-elmer + label: + en_US: Elmer Fudd + zh_Hans: 艾尔默福德语 - value: en label: en_US: English zh_Hans: 英语 + - value: eo + label: + en_US: Esperanto + zh_Hans: 世界语 - value: et label: en_US: Estonian zh_Hans: 爱沙尼亚语 + - value: ee + label: + en_US: Ewe + zh_Hans: 埃维语 + - value: fo + label: + en_US: Faroese + zh_Hans: 法罗语 + - value: tl + label: + en_US: Filipino + zh_Hans: 菲律宾语 - value: fi label: en_US: Finnish @@ -325,6 +914,22 @@ parameters: label: en_US: French zh_Hans: 法语 + - value: fy + label: + en_US: Frisian + zh_Hans: 弗里西亚语 + - value: gaa + label: + en_US: Ga + zh_Hans: 加语 + - value: gl + label: + en_US: Galician + zh_Hans: 加利西亚语 + - value: ka + label: + en_US: Georgian + zh_Hans: 格鲁吉亚语 - value: de label: en_US: German @@ -333,6 +938,34 @@ parameters: label: en_US: Greek zh_Hans: 希腊语 + - value: kl + label: + en_US: Greenlandic + zh_Hans: 格陵兰语 + - value: gn + label: + en_US: Guarani + zh_Hans: 瓜拉尼语 + - value: gu + label: + en_US: Gujarati + zh_Hans: 古吉拉特语 + - value: xx-hacker + label: + en_US: Hacker + zh_Hans: 黑客语 + - value: ht + label: + en_US: Haitian Creole + zh_Hans: 海地克里奥尔语 + - value: ha + label: + en_US: Hausa + zh_Hans: 豪萨语 + - value: haw + label: + en_US: Hawaiian + zh_Hans: 夏威夷语 - value: iw label: en_US: Hebrew @@ -345,10 +978,26 @@ parameters: label: en_US: Hungarian zh_Hans: 匈牙利语 + - value: is + label: + en_US: Icelandic + zh_Hans: 冰岛语 + - value: ig + label: + en_US: Igbo + zh_Hans: 伊博语 - value: id label: en_US: Indonesian zh_Hans: 印尼语 + - value: ia + label: + en_US: Interlingua + zh_Hans: 国际语 + - value: ga + label: + en_US: Irish + zh_Hans: 爱尔兰语 - value: it label: en_US: Italian @@ -357,22 +1006,94 @@ parameters: label: en_US: Japanese zh_Hans: 日语 + - value: jw + label: + en_US: Javanese + zh_Hans: 爪哇语 - value: kn label: en_US: Kannada zh_Hans: 卡纳达语 + - value: kk + label: + en_US: Kazakh + zh_Hans: 哈萨克语 + - value: rw + label: + en_US: Kinyarwanda + zh_Hans: 基尼亚卢旺达语 + - value: rn + label: + en_US: Kirundi + zh_Hans: 基隆迪语 + - value: xx-klingon + label: + en_US: Klingon + zh_Hans: 克林贡语 + - value: kg + label: + en_US: Kongo + zh_Hans: 刚果语 - value: ko label: en_US: Korean zh_Hans: 韩语 + - value: kri + label: + en_US: Krio (Sierra Leone) + zh_Hans: 塞拉利昂克里奥尔语 + - value: ku + label: + en_US: Kurdish + zh_Hans: 库尔德语 + - value: ckb + label: + en_US: Kurdish (Soranî) + zh_Hans: 库尔德语(索拉尼) + - value: ky + label: + en_US: Kyrgyz + zh_Hans: 吉尔吉斯语 + - value: lo + label: + en_US: Laothian + zh_Hans: 老挝语 + - value: la + label: + en_US: Latin + zh_Hans: 拉丁语 - value: lv label: en_US: Latvian zh_Hans: 拉脱维亚语 + - value: ln + label: + en_US: Lingala + zh_Hans: 林加拉语 - value: lt label: en_US: Lithuanian zh_Hans: 立陶宛语 + - value: loz + label: + en_US: Lozi + zh_Hans: 洛齐语 + - value: lg + label: + en_US: Luganda + zh_Hans: 卢干达语 + - value: ach + label: + en_US: Luo + zh_Hans: 卢奥语 + - value: mk + label: + en_US: Macedonian + zh_Hans: 马其顿语 + - value: mg + label: + en_US: Malagasy + zh_Hans: 马尔加什语 - value: my label: en_US: Malay @@ -381,18 +1102,90 @@ parameters: label: en_US: Malayalam zh_Hans: 马拉雅拉姆语 + - value: mt + label: + en_US: Maltese + zh_Hans: 马耳他语 + - value: mv + label: + en_US: Maldives + zh_Hans: 马尔代夫语 + - value: mi + label: + en_US: Maori + zh_Hans: 毛利语 - value: mr label: en_US: Marathi zh_Hans: 马拉地语 + - value: mfe + label: + en_US: Mauritian Creole + zh_Hans: 毛里求斯克里奥尔语 + - value: mo + label: + en_US: Moldavian + zh_Hans: 摩尔达维亚语 + - value: mn + label: + en_US: Mongolian + zh_Hans: 蒙古语 + - value: sr-me + label: + en_US: Montenegrin + zh_Hans: 黑山语 + - value: ne + label: + en_US: Nepali + zh_Hans: 尼泊尔语 + - value: pcm + label: + en_US: Nigerian Pidgin + zh_Hans: 尼日利亚皮钦语 + - value: nso + label: + en_US: Northern Sotho + zh_Hans: 北索托语 - value: "no" label: en_US: Norwegian zh_Hans: 挪威语 + - value: nn + label: + en_US: Norwegian (Nynorsk) + zh_Hans: 挪威语(尼诺斯克语) + - value: oc + label: + en_US: Occitan + zh_Hans: 奥克语 + - value: or + label: + en_US: Oriya + zh_Hans: 奥里亚语 + - value: om + label: + en_US: Oromo + zh_Hans: 奥罗莫语 + - value: ps + label: + en_US: Pashto + zh_Hans: 普什图语 + - value: fa + label: + en_US: Persian + zh_Hans: 波斯语 + - value: xx-pirate + label: + en_US: Pirate + zh_Hans: 海盗语 - value: pl label: en_US: Polish zh_Hans: 波兰语 + - value: pt + label: + en_US: Portuguese + zh_Hans: 葡萄牙语 - value: pt-br label: en_US: Portuguese (Brazil) @@ -405,18 +1198,62 @@ parameters: label: en_US: Punjabi zh_Hans: 旁遮普语 + - value: qu + label: + en_US: Quechua + zh_Hans: 克丘亚语 - value: ro label: en_US: Romanian zh_Hans: 罗马尼亚语 + - value: rm + label: + en_US: Romansh + zh_Hans: 罗曼什语 + - value: nyn + label: + en_US: Runyakitara + zh_Hans: 卢尼亚基塔拉语 - value: ru label: en_US: Russian zh_Hans: 俄语 + - value: gd + label: + en_US: Scots Gaelic + zh_Hans: 苏格兰盖尔语 - value: sr label: en_US: Serbian zh_Hans: 塞尔维亚语 + - value: sh + label: + en_US: Serbo-Croatian + zh_Hans: 塞尔维亚-克罗地亚语 + - value: st + label: + en_US: Sesotho + zh_Hans: 塞索托语 + - value: tn + label: + en_US: Setswana + zh_Hans: 塞茨瓦纳语 + - value: crs + label: + en_US: Seychellois Creole + zh_Hans: 塞舌尔克里奥尔语 + - value: sn + label: + en_US: Shona + zh_Hans: 绍纳语 + - value: sd + label: + en_US: Sindhi + zh_Hans: 信德语 + - value: si + label: + en_US: Sinhalese + zh_Hans: 僧伽罗语 - value: sk label: en_US: Slovak @@ -425,18 +1262,42 @@ parameters: label: en_US: Slovenian zh_Hans: 斯洛文尼亚语 + - value: so + label: + en_US: Somali + zh_Hans: 索马里语 - value: es label: en_US: Spanish zh_Hans: 西班牙语 + - value: es-419 + label: + en_US: Spanish (Latin American) + zh_Hans: 西班牙语(拉丁美洲) + - value: su + label: + en_US: Sundanese + zh_Hans: 巽他语 + - value: sw + label: + en_US: Swahili + zh_Hans: 斯瓦希里语 - value: sv label: en_US: Swedish zh_Hans: 瑞典语 + - value: tg + label: + en_US: Tajik + zh_Hans: 塔吉克语 - value: ta label: en_US: Tamil zh_Hans: 泰米尔语 + - value: tt + label: + en_US: Tatar + zh_Hans: 鞑靼语 - value: te label: en_US: Telugu @@ -445,18 +1306,82 @@ parameters: label: en_US: Thai zh_Hans: 泰语 + - value: ti + label: + en_US: Tigrinya + zh_Hans: 提格利尼亚语 + - value: to + label: + en_US: Tonga + zh_Hans: 汤加语 + - value: lua + label: + en_US: Tshiluba + zh_Hans: 卢巴语 + - value: tum + label: + en_US: Tumbuka + zh_Hans: 图布卡语 - value: tr label: en_US: Turkish zh_Hans: 土耳其语 + - value: tk + label: + en_US: Turkmen + zh_Hans: 土库曼语 + - value: tw + label: + en_US: Twi + zh_Hans: 契维语 + - value: ug + label: + en_US: Uighur + zh_Hans: 维吾尔语 - value: uk label: en_US: Ukrainian zh_Hans: 乌克兰语 + - value: ur + label: + en_US: Urdu + zh_Hans: 乌尔都语 + - value: uz + label: + en_US: Uzbek + zh_Hans: 乌兹别克语 + - value: vu + label: + en_US: Vanuatu + zh_Hans: 瓦努阿图语 - value: vi label: en_US: Vietnamese zh_Hans: 越南语 + - value: cy + label: + en_US: Welsh + zh_Hans: 威尔士语 + - value: wo + label: + en_US: Wolof + zh_Hans: 沃洛夫语 + - value: xh + label: + en_US: Xhosa + zh_Hans: 科萨语 + - value: yi + label: + en_US: Yiddish + zh_Hans: 意第绪语 + - value: yo + label: + en_US: Yoruba + zh_Hans: 约鲁巴语 + - value: zu + label: + en_US: Zulu + zh_Hans: 祖鲁语 - name: is_remote type: select label: diff --git a/api/core/tools/provider/builtin/searchapi/tools/google_news.yaml b/api/core/tools/provider/builtin/searchapi/tools/google_news.yaml index cbb0edf982..ff34af34cc 100644 --- a/api/core/tools/provider/builtin/searchapi/tools/google_news.yaml +++ b/api/core/tools/provider/builtin/searchapi/tools/google_news.yaml @@ -65,206 +65,1206 @@ parameters: form: form default: US options: + - value: AF + label: + en_US: Afghanistan + zh_Hans: 阿富汗 + pt_BR: Afeganistão + - value: AL + label: + en_US: Albania + zh_Hans: 阿尔巴尼亚 + pt_BR: Albânia + - value: DZ + label: + en_US: Algeria + zh_Hans: 阿尔及利亚 + pt_BR: Argélia + - value: AS + label: + en_US: American Samoa + zh_Hans: 美属萨摩亚 + pt_BR: Samoa Americana + - value: AD + label: + en_US: Andorra + zh_Hans: 安道尔 + pt_BR: Andorra + - value: AO + label: + en_US: Angola + zh_Hans: 安哥拉 + pt_BR: Angola + - value: AI + label: + en_US: Anguilla + zh_Hans: 安圭拉 + pt_BR: Anguilla + - value: AQ + label: + en_US: Antarctica + zh_Hans: 南极洲 + pt_BR: Antártica + - value: AG + label: + en_US: Antigua and Barbuda + zh_Hans: 安提瓜和巴布达 + pt_BR: Antígua e Barbuda - value: AR label: en_US: Argentina zh_Hans: 阿根廷 pt_BR: Argentina + - value: AM + label: + en_US: Armenia + zh_Hans: 亚美尼亚 + pt_BR: Armênia + - value: AW + label: + en_US: Aruba + zh_Hans: 阿鲁巴 + pt_BR: Aruba - value: AU label: en_US: Australia zh_Hans: 澳大利亚 - pt_BR: Australia + pt_BR: Austrália - value: AT label: en_US: Austria zh_Hans: 奥地利 - pt_BR: Austria + pt_BR: Áustria + - value: AZ + label: + en_US: Azerbaijan + zh_Hans: 阿塞拜疆 + pt_BR: Azerbaijão + - value: BS + label: + en_US: Bahamas + zh_Hans: 巴哈马 + pt_BR: Bahamas + - value: BH + label: + en_US: Bahrain + zh_Hans: 巴林 + pt_BR: Bahrein + - value: BD + label: + en_US: Bangladesh + zh_Hans: 孟加拉国 + pt_BR: Bangladesh + - value: BB + label: + en_US: Barbados + zh_Hans: 巴巴多斯 + pt_BR: Barbados + - value: BY + label: + en_US: Belarus + zh_Hans: 白俄罗斯 + pt_BR: Bielorrússia - value: BE label: en_US: Belgium zh_Hans: 比利时 - pt_BR: Belgium + pt_BR: Bélgica + - value: BZ + label: + en_US: Belize + zh_Hans: 伯利兹 + pt_BR: Belize + - value: BJ + label: + en_US: Benin + zh_Hans: 贝宁 + pt_BR: Benim + - value: BM + label: + en_US: Bermuda + zh_Hans: 百慕大 + pt_BR: Bermudas + - value: BT + label: + en_US: Bhutan + zh_Hans: 不丹 + pt_BR: Butão + - value: BO + label: + en_US: Bolivia + zh_Hans: 玻利维亚 + pt_BR: Bolívia + - value: BA + label: + en_US: Bosnia and Herzegovina + zh_Hans: 波斯尼亚和黑塞哥维那 + pt_BR: Bósnia e Herzegovina + - value: BW + label: + en_US: Botswana + zh_Hans: 博茨瓦纳 + pt_BR: Botsuana + - value: BV + label: + en_US: Bouvet Island + zh_Hans: 布韦岛 + pt_BR: Ilha Bouvet - value: BR label: en_US: Brazil zh_Hans: 巴西 - pt_BR: Brazil + pt_BR: Brasil + - value: IO + label: + en_US: British Indian Ocean Territory + zh_Hans: 英属印度洋领地 + pt_BR: Território Britânico do Oceano Índico + - value: BN + label: + en_US: Brunei Darussalam + zh_Hans: 文莱 + pt_BR: Brunei Darussalam + - value: BG + label: + en_US: Bulgaria + zh_Hans: 保加利亚 + pt_BR: Bulgária + - value: BF + label: + en_US: Burkina Faso + zh_Hans: 布基纳法索 + pt_BR: Burkina Faso + - value: BI + label: + en_US: Burundi + zh_Hans: 布隆迪 + pt_BR: Burundi + - value: KH + label: + en_US: Cambodia + zh_Hans: 柬埔寨 + pt_BR: Camboja + - value: CM + label: + en_US: Cameroon + zh_Hans: 喀麦隆 + pt_BR: Camarões - value: CA label: en_US: Canada zh_Hans: 加拿大 - pt_BR: Canada + pt_BR: Canadá + - value: CV + label: + en_US: Cape Verde + zh_Hans: 佛得角 + pt_BR: Cabo Verde + - value: KY + label: + en_US: Cayman Islands + zh_Hans: 开曼群岛 + pt_BR: Ilhas Cayman + - value: CF + label: + en_US: Central African Republic + zh_Hans: 中非共和国 + pt_BR: República Centro-Africana + - value: TD + label: + en_US: Chad + zh_Hans: 乍得 + pt_BR: Chade - value: CL label: en_US: Chile zh_Hans: 智利 pt_BR: Chile - - value: CO - label: - en_US: Colombia - zh_Hans: 哥伦比亚 - pt_BR: Colombia - value: CN label: en_US: China zh_Hans: 中国 pt_BR: China + - value: CX + label: + en_US: Christmas Island + zh_Hans: 圣诞岛 + pt_BR: Ilha do Natal + - value: CC + label: + en_US: Cocos (Keeling) Islands + zh_Hans: 科科斯(基林)群岛 + pt_BR: Ilhas Cocos (Keeling) + - value: CO + label: + en_US: Colombia + zh_Hans: 哥伦比亚 + pt_BR: Colômbia + - value: KM + label: + en_US: Comoros + zh_Hans: 科摩罗 + pt_BR: Comores + - value: CG + label: + en_US: Congo + zh_Hans: 刚果 + pt_BR: Congo + - value: CD + label: + en_US: Congo, the Democratic Republic of the + zh_Hans: 刚果民主共和国 + pt_BR: Congo, República Democrática do + - value: CK + label: + en_US: Cook Islands + zh_Hans: 库克群岛 + pt_BR: Ilhas Cook + - value: CR + label: + en_US: Costa Rica + zh_Hans: 哥斯达黎加 + pt_BR: Costa Rica + - value: CI + label: + en_US: Cote D'ivoire + zh_Hans: 科特迪瓦 + pt_BR: Costa do Marfim + - value: HR + label: + en_US: Croatia + zh_Hans: 克罗地亚 + pt_BR: Croácia + - value: CU + label: + en_US: Cuba + zh_Hans: 古巴 + pt_BR: Cuba + - value: CY + label: + en_US: Cyprus + zh_Hans: 塞浦路斯 + pt_BR: Chipre - value: CZ label: en_US: Czech Republic zh_Hans: 捷克共和国 - pt_BR: Czech Republic + pt_BR: República Tcheca - value: DK label: en_US: Denmark zh_Hans: 丹麦 - pt_BR: Denmark + pt_BR: Dinamarca + - value: DJ + label: + en_US: Djibouti + zh_Hans: 吉布提 + pt_BR: Djibuti + - value: DM + label: + en_US: Dominica + zh_Hans: 多米尼克 + pt_BR: Dominica + - value: DO + label: + en_US: Dominican Republic + zh_Hans: 多米尼加共和国 + pt_BR: República Dominicana + - value: EC + label: + en_US: Ecuador + zh_Hans: 厄瓜多尔 + pt_BR: Equador + - value: EG + label: + en_US: Egypt + zh_Hans: 埃及 + pt_BR: Egito + - value: SV + label: + en_US: El Salvador + zh_Hans: 萨尔瓦多 + pt_BR: El Salvador + - value: GQ + label: + en_US: Equatorial Guinea + zh_Hans: 赤道几内亚 + pt_BR: Guiné Equatorial + - value: ER + label: + en_US: Eritrea + zh_Hans: 厄立特里亚 + pt_BR: Eritreia + - value: EE + label: + en_US: Estonia + zh_Hans: 爱沙尼亚 + pt_BR: Estônia + - value: ET + label: + en_US: Ethiopia + zh_Hans: 埃塞俄比亚 + pt_BR: Etiópia + - value: FK + label: + en_US: Falkland Islands (Malvinas) + zh_Hans: 福克兰群岛(马尔维纳斯) + pt_BR: Ilhas Falkland (Malvinas) + - value: FO + label: + en_US: Faroe Islands + zh_Hans: 法罗群岛 + pt_BR: Ilhas Faroe + - value: FJ + label: + en_US: Fiji + zh_Hans: 斐济 + pt_BR: Fiji - value: FI label: en_US: Finland zh_Hans: 芬兰 - pt_BR: Finland + pt_BR: Finlândia - value: FR label: en_US: France zh_Hans: 法国 - pt_BR: France + pt_BR: França + - value: GF + label: + en_US: French Guiana + zh_Hans: 法属圭亚那 + pt_BR: Guiana Francesa + - value: PF + label: + en_US: French Polynesia + zh_Hans: 法属波利尼西亚 + pt_BR: Polinésia Francesa + - value: TF + label: + en_US: French Southern Territories + zh_Hans: 法属南部领地 + pt_BR: Territórios Franceses do Sul + - value: GA + label: + en_US: Gabon + zh_Hans: 加蓬 + pt_BR: Gabão + - value: GM + label: + en_US: Gambia + zh_Hans: 冈比亚 + pt_BR: Gâmbia + - value: GE + label: + en_US: Georgia + zh_Hans: 格鲁吉亚 + pt_BR: Geórgia - value: DE label: en_US: Germany zh_Hans: 德国 - pt_BR: Germany + pt_BR: Alemanha + - value: GH + label: + en_US: Ghana + zh_Hans: 加纳 + pt_BR: Gana + - value: GI + label: + en_US: Gibraltar + zh_Hans: 直布罗陀 + pt_BR: Gibraltar + - value: GR + label: + en_US: Greece + zh_Hans: 希腊 + pt_BR: Grécia + - value: GL + label: + en_US: Greenland + zh_Hans: 格陵兰 + pt_BR: Groenlândia + - value: GD + label: + en_US: Grenada + zh_Hans: 格林纳达 + pt_BR: Granada + - value: GP + label: + en_US: Guadeloupe + zh_Hans: 瓜德罗普 + pt_BR: Guadalupe + - value: GU + label: + en_US: Guam + zh_Hans: 关岛 + pt_BR: Guam + - value: GT + label: + en_US: Guatemala + zh_Hans: 危地马拉 + pt_BR: Guatemala + - value: GN + label: + en_US: Guinea + zh_Hans: 几内亚 + pt_BR: Guiné + - value: GW + label: + en_US: Guinea-Bissau + zh_Hans: 几内亚比绍 + pt_BR: Guiné-Bissau + - value: GY + label: + en_US: Guyana + zh_Hans: 圭亚那 + pt_BR: Guiana + - value: HT + label: + en_US: Haiti + zh_Hans: 海地 + pt_BR: Haiti + - value: HM + label: + en_US: Heard Island and McDonald Islands + zh_Hans: 赫德岛和麦克唐纳群岛 + pt_BR: Ilha Heard e Ilhas McDonald + - value: VA + label: + en_US: Holy See (Vatican City State) + zh_Hans: 教廷(梵蒂冈城国) + pt_BR: Santa Sé (Estado da Cidade do Vaticano) + - value: HN + label: + en_US: Honduras + zh_Hans: 洪都拉斯 + pt_BR: Honduras - value: HK label: en_US: Hong Kong zh_Hans: 香港 pt_BR: Hong Kong + - value: HU + label: + en_US: Hungary + zh_Hans: 匈牙利 + pt_BR: Hungria + - value: IS + label: + en_US: Iceland + zh_Hans: 冰岛 + pt_BR: Islândia - value: IN label: en_US: India zh_Hans: 印度 - pt_BR: India + pt_BR: Índia - value: ID label: en_US: Indonesia zh_Hans: 印度尼西亚 - pt_BR: Indonesia + pt_BR: Indonésia + - value: IR + label: + en_US: Iran, Islamic Republic of + zh_Hans: 伊朗 + pt_BR: Irã + - value: IQ + label: + en_US: Iraq + zh_Hans: 伊拉克 + pt_BR: Iraque + - value: IE + label: + en_US: Ireland + zh_Hans: 爱尔兰 + pt_BR: Irlanda + - value: IL + label: + en_US: Israel + zh_Hans: 以色列 + pt_BR: Israel - value: IT label: en_US: Italy zh_Hans: 意大利 - pt_BR: Italy + pt_BR: Itália + - value: JM + label: + en_US: Jamaica + zh_Hans: 牙买加 + pt_BR: Jamaica - value: JP label: en_US: Japan zh_Hans: 日本 - pt_BR: Japan + pt_BR: Japão + - value: JO + label: + en_US: Jordan + zh_Hans: 约旦 + pt_BR: Jordânia + - value: KZ + label: + en_US: Kazakhstan + zh_Hans: 哈萨克斯坦 + pt_BR: Cazaquistão + - value: KE + label: + en_US: Kenya + zh_Hans: 肯尼亚 + pt_BR: Quênia + - value: KI + label: + en_US: Kiribati + zh_Hans: 基里巴斯 + pt_BR: Kiribati + - value: KP + label: + en_US: Korea, Democratic People's Republic of + zh_Hans: 朝鲜 + pt_BR: Coreia, República Democrática Popular da - value: KR label: - en_US: Korea + en_US: Korea, Republic of zh_Hans: 韩国 - pt_BR: Korea + pt_BR: Coreia, República da + - value: KW + label: + en_US: Kuwait + zh_Hans: 科威特 + pt_BR: Kuwait + - value: KG + label: + en_US: Kyrgyzstan + zh_Hans: 吉尔吉斯斯坦 + pt_BR: Quirguistão + - value: LA + label: + en_US: Lao People's Democratic Republic + zh_Hans: 老挝 + pt_BR: República Democrática Popular do Laos + - value: LV + label: + en_US: Latvia + zh_Hans: 拉脱维亚 + pt_BR: Letônia + - value: LB + label: + en_US: Lebanon + zh_Hans: 黎巴嫩 + pt_BR: Líbano + - value: LS + label: + en_US: Lesotho + zh_Hans: 莱索托 + pt_BR: Lesoto + - value: LR + label: + en_US: Liberia + zh_Hans: 利比里亚 + pt_BR: Libéria + - value: LY + label: + en_US: Libyan Arab Jamahiriya + zh_Hans: 利比亚 + pt_BR: Líbia + - value: LI + label: + en_US: Liechtenstein + zh_Hans: 列支敦士登 + pt_BR: Liechtenstein + - value: LT + label: + en_US: Lithuania + zh_Hans: 立陶宛 + pt_BR: Lituânia + - value: LU + label: + en_US: Luxembourg + zh_Hans: 卢森堡 + pt_BR: Luxemburgo + - value: MO + label: + en_US: Macao + zh_Hans: 澳门 + pt_BR: Macau + - value: MK + label: + en_US: Macedonia, the Former Yugosalv Republic of + zh_Hans: 前南斯拉夫马其顿共和国 + pt_BR: Macedônia, Ex-República Iugoslava da + - value: MG + label: + en_US: Madagascar + zh_Hans: 马达加斯加 + pt_BR: Madagascar + - value: MW + label: + en_US: Malawi + zh_Hans: 马拉维 + pt_BR: Malaui - value: MY label: en_US: Malaysia zh_Hans: 马来西亚 - pt_BR: Malaysia + pt_BR: Malásia + - value: MV + label: + en_US: Maldives + zh_Hans: 马尔代夫 + pt_BR: Maldivas + - value: ML + label: + en_US: Mali + zh_Hans: 马里 + pt_BR: Mali + - value: MT + label: + en_US: Malta + zh_Hans: 马耳他 + pt_BR: Malta + - value: MH + label: + en_US: Marshall Islands + zh_Hans: 马绍尔群岛 + pt_BR: Ilhas Marshall + - value: MQ + label: + en_US: Martinique + zh_Hans: 马提尼克 + pt_BR: Martinica + - value: MR + label: + en_US: Mauritania + zh_Hans: 毛里塔尼亚 + pt_BR: Mauritânia + - value: MU + label: + en_US: Mauritius + zh_Hans: 毛里求斯 + pt_BR: Maurício + - value: YT + label: + en_US: Mayotte + zh_Hans: 马约特 + pt_BR: Mayotte - value: MX label: en_US: Mexico zh_Hans: 墨西哥 - pt_BR: Mexico + pt_BR: México + - value: FM + label: + en_US: Micronesia, Federated States of + zh_Hans: 密克罗尼西亚联邦 + pt_BR: Micronésia, Estados Federados da + - value: MD + label: + en_US: Moldova, Republic of + zh_Hans: 摩尔多瓦共和国 + pt_BR: Moldávia, República da + - value: MC + label: + en_US: Monaco + zh_Hans: 摩纳哥 + pt_BR: Mônaco + - value: MN + label: + en_US: Mongolia + zh_Hans: 蒙古 + pt_BR: Mongólia + - value: MS + label: + en_US: Montserrat + zh_Hans: 蒙特塞拉特 + pt_BR: Montserrat + - value: MA + label: + en_US: Morocco + zh_Hans: 摩洛哥 + pt_BR: Marrocos + - value: MZ + label: + en_US: Mozambique + zh_Hans: 莫桑比克 + pt_BR: Moçambique + - value: MM + label: + en_US: Myanmar + zh_Hans: 缅甸 + pt_BR: Mianmar + - value: NA + label: + en_US: Namibia + zh_Hans: 纳米比亚 + pt_BR: Namíbia + - value: NR + label: + en_US: Nauru + zh_Hans: 瑙鲁 + pt_BR: Nauru + - value: NP + label: + en_US: Nepal + zh_Hans: 尼泊尔 + pt_BR: Nepal - value: NL label: en_US: Netherlands zh_Hans: 荷兰 - pt_BR: Netherlands + pt_BR: Países Baixos + - value: AN + label: + en_US: Netherlands Antilles + zh_Hans: 荷属安的列斯 + pt_BR: Antilhas Holandesas + - value: NC + label: + en_US: New Caledonia + zh_Hans: 新喀里多尼亚 + pt_BR: Nova Caledônia - value: NZ label: en_US: New Zealand zh_Hans: 新西兰 - pt_BR: New Zealand - - value: 'NO' + pt_BR: Nova Zelândia + - value: NI + label: + en_US: Nicaragua + zh_Hans: 尼加拉瓜 + pt_BR: Nicarágua + - value: NE + label: + en_US: Niger + zh_Hans: 尼日尔 + pt_BR: Níger + - value: NG + label: + en_US: Nigeria + zh_Hans: 尼日利亚 + pt_BR: Nigéria + - value: NU + label: + en_US: Niue + zh_Hans: 纽埃 + pt_BR: Niue + - value: NF + label: + en_US: Norfolk Island + zh_Hans: 诺福克岛 + pt_BR: Ilha Norfolk + - value: MP + label: + en_US: Northern Mariana Islands + zh_Hans: 北马里亚纳群岛 + pt_BR: Ilhas Marianas do Norte + - value: "NO" label: en_US: Norway zh_Hans: 挪威 - pt_BR: Norway + pt_BR: Noruega + - value: OM + label: + en_US: Oman + zh_Hans: 阿曼 + pt_BR: Omã + - value: PK + label: + en_US: Pakistan + zh_Hans: 巴基斯坦 + pt_BR: Paquistão + - value: PW + label: + en_US: Palau + zh_Hans: 帕劳 + pt_BR: Palau + - value: PS + label: + en_US: Palestinian Territory, Occupied + zh_Hans: 巴勒斯坦领土 + pt_BR: Palestina, Território Ocupado + - value: PA + label: + en_US: Panama + zh_Hans: 巴拿马 + pt_BR: Panamá + - value: PG + label: + en_US: Papua New Guinea + zh_Hans: 巴布亚新几内亚 + pt_BR: Papua Nova Guiné + - value: PY + label: + en_US: Paraguay + zh_Hans: 巴拉圭 + pt_BR: Paraguai + - value: PE + label: + en_US: Peru + zh_Hans: 秘鲁 + pt_BR: Peru - value: PH label: en_US: Philippines zh_Hans: 菲律宾 - pt_BR: Philippines + pt_BR: Filipinas + - value: PN + label: + en_US: Pitcairn + zh_Hans: 皮特凯恩岛 + pt_BR: Pitcairn - value: PL label: en_US: Poland zh_Hans: 波兰 - pt_BR: Poland + pt_BR: Polônia - value: PT label: en_US: Portugal zh_Hans: 葡萄牙 pt_BR: Portugal + - value: PR + label: + en_US: Puerto Rico + zh_Hans: 波多黎各 + pt_BR: Porto Rico + - value: QA + label: + en_US: Qatar + zh_Hans: 卡塔尔 + pt_BR: Catar + - value: RE + label: + en_US: Reunion + zh_Hans: 留尼旺 + pt_BR: Reunião + - value: RO + label: + en_US: Romania + zh_Hans: 罗马尼亚 + pt_BR: Romênia - value: RU label: - en_US: Russia - zh_Hans: 俄罗斯 - pt_BR: Russia + en_US: Russian Federation + zh_Hans: 俄罗斯联邦 + pt_BR: Rússia + - value: RW + label: + en_US: Rwanda + zh_Hans: 卢旺达 + pt_BR: Ruanda + - value: SH + label: + en_US: Saint Helena + zh_Hans: 圣赫勒拿 + pt_BR: Santa Helena + - value: KN + label: + en_US: Saint Kitts and Nevis + zh_Hans: 圣基茨和尼维斯 + pt_BR: São Cristóvão e Nevis + - value: LC + label: + en_US: Saint Lucia + zh_Hans: 圣卢西亚 + pt_BR: Santa Lúcia + - value: PM + label: + en_US: Saint Pierre and Miquelon + zh_Hans: 圣皮埃尔和密克隆 + pt_BR: São Pedro e Miquelon + - value: VC + label: + en_US: Saint Vincent and the Grenadines + zh_Hans: 圣文森特和格林纳丁斯 + pt_BR: São Vicente e Granadinas + - value: WS + label: + en_US: Samoa + zh_Hans: 萨摩亚 + pt_BR: Samoa + - value: SM + label: + en_US: San Marino + zh_Hans: 圣马力诺 + pt_BR: San Marino + - value: ST + label: + en_US: Sao Tome and Principe + zh_Hans: 圣多美和普林西比 + pt_BR: São Tomé e Príncipe - value: SA label: en_US: Saudi Arabia zh_Hans: 沙特阿拉伯 - pt_BR: Saudi Arabia + pt_BR: Arábia Saudita + - value: SN + label: + en_US: Senegal + zh_Hans: 塞内加尔 + pt_BR: Senegal + - value: RS + label: + en_US: Serbia and Montenegro + zh_Hans: 塞尔维亚和黑山 + pt_BR: Sérvia e Montenegro + - value: SC + label: + en_US: Seychelles + zh_Hans: 塞舌尔 + pt_BR: Seicheles + - value: SL + label: + en_US: Sierra Leone + zh_Hans: 塞拉利昂 + pt_BR: Serra Leoa - value: SG label: en_US: Singapore zh_Hans: 新加坡 - pt_BR: Singapore + pt_BR: Singapura + - value: SK + label: + en_US: Slovakia + zh_Hans: 斯洛伐克 + pt_BR: Eslováquia + - value: SI + label: + en_US: Slovenia + zh_Hans: 斯洛文尼亚 + pt_BR: Eslovênia + - value: SB + label: + en_US: Solomon Islands + zh_Hans: 所罗门群岛 + pt_BR: Ilhas Salomão + - value: SO + label: + en_US: Somalia + zh_Hans: 索马里 + pt_BR: Somália - value: ZA label: en_US: South Africa zh_Hans: 南非 - pt_BR: South Africa + pt_BR: África do Sul + - value: GS + label: + en_US: South Georgia and the South Sandwich Islands + zh_Hans: 南乔治亚和南桑威奇群岛 + pt_BR: Geórgia do Sul e Ilhas Sandwich do Sul - value: ES label: en_US: Spain zh_Hans: 西班牙 - pt_BR: Spain + pt_BR: Espanha + - value: LK + label: + en_US: Sri Lanka + zh_Hans: 斯里兰卡 + pt_BR: Sri Lanka + - value: SD + label: + en_US: Sudan + zh_Hans: 苏丹 + pt_BR: Sudão + - value: SR + label: + en_US: Suriname + zh_Hans: 苏里南 + pt_BR: Suriname + - value: SJ + label: + en_US: Svalbard and Jan Mayen + zh_Hans: 斯瓦尔巴特和扬马延岛 + pt_BR: Svalbard e Jan Mayen + - value: SZ + label: + en_US: Swaziland + zh_Hans: 斯威士兰 + pt_BR: Essuatíni - value: SE label: en_US: Sweden zh_Hans: 瑞典 - pt_BR: Sweden + pt_BR: Suécia - value: CH label: en_US: Switzerland zh_Hans: 瑞士 - pt_BR: Switzerland + pt_BR: Suíça + - value: SY + label: + en_US: Syrian Arab Republic + zh_Hans: 叙利亚 + pt_BR: Síria - value: TW label: - en_US: Taiwan + en_US: Taiwan, Province of China zh_Hans: 台湾 pt_BR: Taiwan + - value: TJ + label: + en_US: Tajikistan + zh_Hans: 塔吉克斯坦 + pt_BR: Tajiquistão + - value: TZ + label: + en_US: Tanzania, United Republic of + zh_Hans: 坦桑尼亚联合共和国 + pt_BR: Tanzânia - value: TH label: en_US: Thailand zh_Hans: 泰国 - pt_BR: Thailand + pt_BR: Tailândia + - value: TL + label: + en_US: Timor-Leste + zh_Hans: 东帝汶 + pt_BR: Timor-Leste + - value: TG + label: + en_US: Togo + zh_Hans: 多哥 + pt_BR: Togo + - value: TK + label: + en_US: Tokelau + zh_Hans: 托克劳 + pt_BR: Toquelau + - value: TO + label: + en_US: Tonga + zh_Hans: 汤加 + pt_BR: Tonga + - value: TT + label: + en_US: Trinidad and Tobago + zh_Hans: 特立尼达和多巴哥 + pt_BR: Trindade e Tobago + - value: TN + label: + en_US: Tunisia + zh_Hans: 突尼斯 + pt_BR: Tunísia - value: TR label: en_US: Turkey zh_Hans: 土耳其 - pt_BR: Turkey + pt_BR: Turquia + - value: TM + label: + en_US: Turkmenistan + zh_Hans: 土库曼斯坦 + pt_BR: Turcomenistão + - value: TC + label: + en_US: Turks and Caicos Islands + zh_Hans: 特克斯和凯科斯群岛 + pt_BR: Ilhas Turks e Caicos + - value: TV + label: + en_US: Tuvalu + zh_Hans: 图瓦卢 + pt_BR: Tuvalu + - value: UG + label: + en_US: Uganda + zh_Hans: 乌干达 + pt_BR: Uganda + - value: UA + label: + en_US: Ukraine + zh_Hans: 乌克兰 + pt_BR: Ucrânia + - value: AE + label: + en_US: United Arab Emirates + zh_Hans: 阿联酋 + pt_BR: Emirados Árabes Unidos + - value: UK + label: + en_US: United Kingdom + zh_Hans: 英国 + pt_BR: Reino Unido - value: GB label: en_US: United Kingdom zh_Hans: 英国 - pt_BR: United Kingdom + pt_BR: Reino Unido - value: US label: en_US: United States zh_Hans: 美国 - pt_BR: United States + pt_BR: Estados Unidos + - value: UM + label: + en_US: United States Minor Outlying Islands + zh_Hans: 美国本土外小岛屿 + pt_BR: Ilhas Menores Distantes dos Estados Unidos + - value: UY + label: + en_US: Uruguay + zh_Hans: 乌拉圭 + pt_BR: Uruguai + - value: UZ + label: + en_US: Uzbekistan + zh_Hans: 乌兹别克斯坦 + pt_BR: Uzbequistão + - value: VU + label: + en_US: Vanuatu + zh_Hans: 瓦努阿图 + pt_BR: Vanuatu + - value: VE + label: + en_US: Venezuela + zh_Hans: 委内瑞拉 + pt_BR: Venezuela + - value: VN + label: + en_US: Viet Nam + zh_Hans: 越南 + pt_BR: Vietnã + - value: VG + label: + en_US: Virgin Islands, British + zh_Hans: 英属维尔京群岛 + pt_BR: Ilhas Virgens Britânicas + - value: VI + label: + en_US: Virgin Islands, U.S. + zh_Hans: 美属维尔京群岛 + pt_BR: Ilhas Virgens dos EUA + - value: WF + label: + en_US: Wallis and Futuna + zh_Hans: 瓦利斯和富图纳群岛 + pt_BR: Wallis e Futuna + - value: EH + label: + en_US: Western Sahara + zh_Hans: 西撒哈拉 + pt_BR: Saara Ocidental + - value: YE + label: + en_US: Yemen + zh_Hans: 也门 + pt_BR: Iémen + - value: ZM + label: + en_US: Zambia + zh_Hans: 赞比亚 + pt_BR: Zâmbia + - value: ZW + label: + en_US: Zimbabwe + zh_Hans: 津巴布韦 + pt_BR: Zimbábue - name: hl type: select label: @@ -277,18 +1277,94 @@ parameters: default: en form: form options: + - value: af + label: + en_US: Afrikaans + zh_Hans: 南非语 + - value: ak + label: + en_US: Akan + zh_Hans: 阿坎语 + - value: sq + label: + en_US: Albanian + zh_Hans: 阿尔巴尼亚语 + - value: ws + label: + en_US: Samoa + zh_Hans: 萨摩亚语 + - value: am + label: + en_US: Amharic + zh_Hans: 阿姆哈拉语 - value: ar label: en_US: Arabic zh_Hans: 阿拉伯语 + - value: hy + label: + en_US: Armenian + zh_Hans: 亚美尼亚语 + - value: az + label: + en_US: Azerbaijani + zh_Hans: 阿塞拜疆语 + - value: eu + label: + en_US: Basque + zh_Hans: 巴斯克语 + - value: be + label: + en_US: Belarusian + zh_Hans: 白俄罗斯语 + - value: bem + label: + en_US: Bemba + zh_Hans: 班巴语 + - value: bn + label: + en_US: Bengali + zh_Hans: 孟加拉语 + - value: bh + label: + en_US: Bihari + zh_Hans: 比哈尔语 + - value: xx-bork + label: + en_US: Bork, bork, bork! + zh_Hans: 博克语 + - value: bs + label: + en_US: Bosnian + zh_Hans: 波斯尼亚语 + - value: br + label: + en_US: Breton + zh_Hans: 布列塔尼语 - value: bg label: en_US: Bulgarian zh_Hans: 保加利亚语 + - value: bt + label: + en_US: Bhutanese + zh_Hans: 不丹语 + - value: km + label: + en_US: Cambodian + zh_Hans: 高棉语 - value: ca label: en_US: Catalan zh_Hans: 加泰罗尼亚语 + - value: chr + label: + en_US: Cherokee + zh_Hans: 切罗基语 + - value: ny + label: + en_US: Chichewa + zh_Hans: 齐切瓦语 - value: zh-cn label: en_US: Chinese (Simplified) @@ -297,6 +1373,14 @@ parameters: label: en_US: Chinese (Traditional) zh_Hans: 中文(繁体) + - value: co + label: + en_US: Corsican + zh_Hans: 科西嘉语 + - value: hr + label: + en_US: Croatian + zh_Hans: 克罗地亚语 - value: cs label: en_US: Czech @@ -309,14 +1393,34 @@ parameters: label: en_US: Dutch zh_Hans: 荷兰语 + - value: xx-elmer + label: + en_US: Elmer Fudd + zh_Hans: 艾尔默福德语 - value: en label: en_US: English zh_Hans: 英语 + - value: eo + label: + en_US: Esperanto + zh_Hans: 世界语 - value: et label: en_US: Estonian zh_Hans: 爱沙尼亚语 + - value: ee + label: + en_US: Ewe + zh_Hans: 埃维语 + - value: fo + label: + en_US: Faroese + zh_Hans: 法罗语 + - value: tl + label: + en_US: Filipino + zh_Hans: 菲律宾语 - value: fi label: en_US: Finnish @@ -325,6 +1429,22 @@ parameters: label: en_US: French zh_Hans: 法语 + - value: fy + label: + en_US: Frisian + zh_Hans: 弗里西亚语 + - value: gaa + label: + en_US: Ga + zh_Hans: 加语 + - value: gl + label: + en_US: Galician + zh_Hans: 加利西亚语 + - value: ka + label: + en_US: Georgian + zh_Hans: 格鲁吉亚语 - value: de label: en_US: German @@ -333,6 +1453,34 @@ parameters: label: en_US: Greek zh_Hans: 希腊语 + - value: kl + label: + en_US: Greenlandic + zh_Hans: 格陵兰语 + - value: gn + label: + en_US: Guarani + zh_Hans: 瓜拉尼语 + - value: gu + label: + en_US: Gujarati + zh_Hans: 古吉拉特语 + - value: xx-hacker + label: + en_US: Hacker + zh_Hans: 黑客语 + - value: ht + label: + en_US: Haitian Creole + zh_Hans: 海地克里奥尔语 + - value: ha + label: + en_US: Hausa + zh_Hans: 豪萨语 + - value: haw + label: + en_US: Hawaiian + zh_Hans: 夏威夷语 - value: iw label: en_US: Hebrew @@ -345,10 +1493,26 @@ parameters: label: en_US: Hungarian zh_Hans: 匈牙利语 + - value: is + label: + en_US: Icelandic + zh_Hans: 冰岛语 + - value: ig + label: + en_US: Igbo + zh_Hans: 伊博语 - value: id label: en_US: Indonesian zh_Hans: 印尼语 + - value: ia + label: + en_US: Interlingua + zh_Hans: 国际语 + - value: ga + label: + en_US: Irish + zh_Hans: 爱尔兰语 - value: it label: en_US: Italian @@ -357,22 +1521,94 @@ parameters: label: en_US: Japanese zh_Hans: 日语 + - value: jw + label: + en_US: Javanese + zh_Hans: 爪哇语 - value: kn label: en_US: Kannada zh_Hans: 卡纳达语 + - value: kk + label: + en_US: Kazakh + zh_Hans: 哈萨克语 + - value: rw + label: + en_US: Kinyarwanda + zh_Hans: 基尼亚卢旺达语 + - value: rn + label: + en_US: Kirundi + zh_Hans: 基隆迪语 + - value: xx-klingon + label: + en_US: Klingon + zh_Hans: 克林贡语 + - value: kg + label: + en_US: Kongo + zh_Hans: 刚果语 - value: ko label: en_US: Korean zh_Hans: 韩语 + - value: kri + label: + en_US: Krio (Sierra Leone) + zh_Hans: 塞拉利昂克里奥尔语 + - value: ku + label: + en_US: Kurdish + zh_Hans: 库尔德语 + - value: ckb + label: + en_US: Kurdish (Soranî) + zh_Hans: 库尔德语(索拉尼) + - value: ky + label: + en_US: Kyrgyz + zh_Hans: 吉尔吉斯语 + - value: lo + label: + en_US: Laothian + zh_Hans: 老挝语 + - value: la + label: + en_US: Latin + zh_Hans: 拉丁语 - value: lv label: en_US: Latvian zh_Hans: 拉脱维亚语 + - value: ln + label: + en_US: Lingala + zh_Hans: 林加拉语 - value: lt label: en_US: Lithuanian zh_Hans: 立陶宛语 + - value: loz + label: + en_US: Lozi + zh_Hans: 洛齐语 + - value: lg + label: + en_US: Luganda + zh_Hans: 卢干达语 + - value: ach + label: + en_US: Luo + zh_Hans: 卢奥语 + - value: mk + label: + en_US: Macedonian + zh_Hans: 马其顿语 + - value: mg + label: + en_US: Malagasy + zh_Hans: 马尔加什语 - value: my label: en_US: Malay @@ -381,18 +1617,90 @@ parameters: label: en_US: Malayalam zh_Hans: 马拉雅拉姆语 + - value: mt + label: + en_US: Maltese + zh_Hans: 马耳他语 + - value: mv + label: + en_US: Maldives + zh_Hans: 马尔代夫语 + - value: mi + label: + en_US: Maori + zh_Hans: 毛利语 - value: mr label: en_US: Marathi zh_Hans: 马拉地语 + - value: mfe + label: + en_US: Mauritian Creole + zh_Hans: 毛里求斯克里奥尔语 + - value: mo + label: + en_US: Moldavian + zh_Hans: 摩尔达维亚语 + - value: mn + label: + en_US: Mongolian + zh_Hans: 蒙古语 + - value: sr-me + label: + en_US: Montenegrin + zh_Hans: 黑山语 + - value: ne + label: + en_US: Nepali + zh_Hans: 尼泊尔语 + - value: pcm + label: + en_US: Nigerian Pidgin + zh_Hans: 尼日利亚皮钦语 + - value: nso + label: + en_US: Northern Sotho + zh_Hans: 北索托语 - value: "no" label: en_US: Norwegian zh_Hans: 挪威语 + - value: nn + label: + en_US: Norwegian (Nynorsk) + zh_Hans: 挪威语(尼诺斯克语) + - value: oc + label: + en_US: Occitan + zh_Hans: 奥克语 + - value: or + label: + en_US: Oriya + zh_Hans: 奥里亚语 + - value: om + label: + en_US: Oromo + zh_Hans: 奥罗莫语 + - value: ps + label: + en_US: Pashto + zh_Hans: 普什图语 + - value: fa + label: + en_US: Persian + zh_Hans: 波斯语 + - value: xx-pirate + label: + en_US: Pirate + zh_Hans: 海盗语 - value: pl label: en_US: Polish zh_Hans: 波兰语 + - value: pt + label: + en_US: Portuguese + zh_Hans: 葡萄牙语 - value: pt-br label: en_US: Portuguese (Brazil) @@ -405,18 +1713,62 @@ parameters: label: en_US: Punjabi zh_Hans: 旁遮普语 + - value: qu + label: + en_US: Quechua + zh_Hans: 克丘亚语 - value: ro label: en_US: Romanian zh_Hans: 罗马尼亚语 + - value: rm + label: + en_US: Romansh + zh_Hans: 罗曼什语 + - value: nyn + label: + en_US: Runyakitara + zh_Hans: 卢尼亚基塔拉语 - value: ru label: en_US: Russian zh_Hans: 俄语 + - value: gd + label: + en_US: Scots Gaelic + zh_Hans: 苏格兰盖尔语 - value: sr label: en_US: Serbian zh_Hans: 塞尔维亚语 + - value: sh + label: + en_US: Serbo-Croatian + zh_Hans: 塞尔维亚-克罗地亚语 + - value: st + label: + en_US: Sesotho + zh_Hans: 塞索托语 + - value: tn + label: + en_US: Setswana + zh_Hans: 塞茨瓦纳语 + - value: crs + label: + en_US: Seychellois Creole + zh_Hans: 塞舌尔克里奥尔语 + - value: sn + label: + en_US: Shona + zh_Hans: 绍纳语 + - value: sd + label: + en_US: Sindhi + zh_Hans: 信德语 + - value: si + label: + en_US: Sinhalese + zh_Hans: 僧伽罗语 - value: sk label: en_US: Slovak @@ -425,18 +1777,42 @@ parameters: label: en_US: Slovenian zh_Hans: 斯洛文尼亚语 + - value: so + label: + en_US: Somali + zh_Hans: 索马里语 - value: es label: en_US: Spanish zh_Hans: 西班牙语 + - value: es-419 + label: + en_US: Spanish (Latin American) + zh_Hans: 西班牙语(拉丁美洲) + - value: su + label: + en_US: Sundanese + zh_Hans: 巽他语 + - value: sw + label: + en_US: Swahili + zh_Hans: 斯瓦希里语 - value: sv label: en_US: Swedish zh_Hans: 瑞典语 + - value: tg + label: + en_US: Tajik + zh_Hans: 塔吉克语 - value: ta label: en_US: Tamil zh_Hans: 泰米尔语 + - value: tt + label: + en_US: Tatar + zh_Hans: 鞑靼语 - value: te label: en_US: Telugu @@ -445,18 +1821,82 @@ parameters: label: en_US: Thai zh_Hans: 泰语 + - value: ti + label: + en_US: Tigrinya + zh_Hans: 提格利尼亚语 + - value: to + label: + en_US: Tonga + zh_Hans: 汤加语 + - value: lua + label: + en_US: Tshiluba + zh_Hans: 卢巴语 + - value: tum + label: + en_US: Tumbuka + zh_Hans: 图布卡语 - value: tr label: en_US: Turkish zh_Hans: 土耳其语 + - value: tk + label: + en_US: Turkmen + zh_Hans: 土库曼语 + - value: tw + label: + en_US: Twi + zh_Hans: 契维语 + - value: ug + label: + en_US: Uighur + zh_Hans: 维吾尔语 - value: uk label: en_US: Ukrainian zh_Hans: 乌克兰语 + - value: ur + label: + en_US: Urdu + zh_Hans: 乌尔都语 + - value: uz + label: + en_US: Uzbek + zh_Hans: 乌兹别克语 + - value: vu + label: + en_US: Vanuatu + zh_Hans: 瓦努阿图语 - value: vi label: en_US: Vietnamese zh_Hans: 越南语 + - value: cy + label: + en_US: Welsh + zh_Hans: 威尔士语 + - value: wo + label: + en_US: Wolof + zh_Hans: 沃洛夫语 + - value: xh + label: + en_US: Xhosa + zh_Hans: 科萨语 + - value: yi + label: + en_US: Yiddish + zh_Hans: 意第绪语 + - value: yo + label: + en_US: Yoruba + zh_Hans: 约鲁巴语 + - value: zu + label: + en_US: Zulu + zh_Hans: 祖鲁语 - name: google_domain type: string required: false diff --git a/api/core/tools/provider/builtin/stepfun/tools/image.py b/api/core/tools/provider/builtin/stepfun/tools/image.py index eb55dae518..61cc14fac6 100644 --- a/api/core/tools/provider/builtin/stepfun/tools/image.py +++ b/api/core/tools/provider/builtin/stepfun/tools/image.py @@ -32,16 +32,17 @@ class StepfunTool(BuiltinTool): prompt = tool_parameters.get("prompt", "") if not prompt: return self.create_text_message("Please input prompt") - + if len(prompt) > 1024: + return self.create_text_message("The prompt length should less than 1024") seed = tool_parameters.get("seed", 0) if seed > 0: extra_body["seed"] = seed - steps = tool_parameters.get("steps", 0) + steps = tool_parameters.get("steps", 50) if steps > 0: extra_body["steps"] = steps - negative_prompt = tool_parameters.get("negative_prompt", "") - if negative_prompt: - extra_body["negative_prompt"] = negative_prompt + cfg_scale = tool_parameters.get("cfg_scale", 7.5) + if cfg_scale > 0: + extra_body["cfg_scale"] = cfg_scale # call openapi stepfun model response = client.images.generate( @@ -51,7 +52,6 @@ class StepfunTool(BuiltinTool): n=tool_parameters.get("n", 1), extra_body=extra_body, ) - print(response) result = [] for image in response.data: diff --git a/api/core/tools/provider/builtin/stepfun/tools/image.yaml b/api/core/tools/provider/builtin/stepfun/tools/image.yaml index 8d7c9b6586..dfda6ed191 100644 --- a/api/core/tools/provider/builtin/stepfun/tools/image.yaml +++ b/api/core/tools/provider/builtin/stepfun/tools/image.yaml @@ -33,9 +33,9 @@ parameters: type: select required: false human_description: - en_US: used for selecting the image size - zh_Hans: 用于选择图像大小 - pt_BR: used for selecting the image size + en_US: The size of the generated image + zh_Hans: 生成的图片大小 + pt_BR: The size of the generated image label: en_US: Image size zh_Hans: 图像大小 @@ -77,17 +77,17 @@ parameters: type: number required: true human_description: - en_US: used for selecting the number of images - zh_Hans: 用于选择图像数量 - pt_BR: used for selecting the number of images + en_US: Number of generated images, now only one image can be generated at a time + zh_Hans: 生成的图像数量,当前仅支持每次生成一张图片 + pt_BR: Number of generated images, now only one image can be generated at a time label: - en_US: Number of images - zh_Hans: 图像数量 - pt_BR: Number of images + en_US: Number of generated images + zh_Hans: 生成的图像数量 + pt_BR: Number of generated images form: form default: 1 min: 1 - max: 10 + max: 1 - name: seed type: number required: false @@ -109,21 +109,25 @@ parameters: zh_Hans: Steps pt_BR: Steps human_description: - en_US: Steps - zh_Hans: Steps - pt_BR: Steps + en_US: Steps, now support integers between 1 and 100 + zh_Hans: Steps, 当前支持 1~100 之间整数 + pt_BR: Steps, now support integers between 1 and 100 form: form - default: 10 - - name: negative_prompt - type: string + default: 50 + min: 1 + max: 100 + - name: cfg_scale + type: number required: false label: - en_US: Negative prompt - zh_Hans: Negative prompt - pt_BR: Negative prompt + en_US: classifier-free guidance scale + zh_Hans: classifier-free guidance scale + pt_BR: classifier-free guidance scale human_description: - en_US: Negative prompt - zh_Hans: Negative prompt - pt_BR: Negative prompt + en_US: classifier-free guidance scale + zh_Hans: classifier-free guidance scale + pt_BR: classifier-free guidance scale form: form - default: (worst quality:1.3), (nsfw), low quality + default: 7.5 + min: 1 + max: 10 diff --git a/api/core/tools/provider/builtin/time/tools/localtime_to_timestamp.py b/api/core/tools/provider/builtin/time/tools/localtime_to_timestamp.py new file mode 100644 index 0000000000..e16b732d02 --- /dev/null +++ b/api/core/tools/provider/builtin/time/tools/localtime_to_timestamp.py @@ -0,0 +1,44 @@ +from datetime import datetime +from typing import Any, Union + +import pytz + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.errors import ToolInvokeError +from core.tools.tool.builtin_tool import BuiltinTool + + +class LocaltimeToTimestampTool(BuiltinTool): + def _invoke( + self, + user_id: str, + tool_parameters: dict[str, Any], + ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: + """ + Convert localtime to timestamp + """ + localtime = tool_parameters.get("localtime") + timezone = tool_parameters.get("timezone", "Asia/Shanghai") + if not timezone: + timezone = None + time_format = "%Y-%m-%d %H:%M:%S" + + timestamp = self.localtime_to_timestamp(localtime, time_format, timezone) + if not timestamp: + return self.create_text_message(f"Invalid localtime: {localtime}") + + return self.create_text_message(f"{timestamp}") + + @staticmethod + def localtime_to_timestamp(localtime: str, time_format: str, local_tz=None) -> int | None: + try: + if local_tz is None: + local_tz = datetime.now().astimezone().tzinfo + if isinstance(local_tz, str): + local_tz = pytz.timezone(local_tz) + local_time = datetime.strptime(localtime, time_format) + localtime = local_tz.localize(local_time) + timestamp = int(localtime.timestamp()) + return timestamp + except Exception as e: + raise ToolInvokeError(str(e)) diff --git a/api/core/tools/provider/builtin/time/tools/localtime_to_timestamp.yaml b/api/core/tools/provider/builtin/time/tools/localtime_to_timestamp.yaml new file mode 100644 index 0000000000..6a3b90595f --- /dev/null +++ b/api/core/tools/provider/builtin/time/tools/localtime_to_timestamp.yaml @@ -0,0 +1,33 @@ +identity: + name: localtime_to_timestamp + author: zhuhao + label: + en_US: localtime to timestamp + zh_Hans: 获取时间戳 +description: + human: + en_US: A tool for localtime convert to timestamp + zh_Hans: 获取时间戳 + llm: A tool for localtime convert to timestamp +parameters: + - name: localtime + type: string + required: true + form: llm + label: + en_US: localtime + zh_Hans: 本地时间 + human_description: + en_US: localtime, such as 2024-1-1 0:0:0 + zh_Hans: 本地时间, 比如2024-1-1 0:0:0 + - name: timezone + type: string + required: false + form: llm + label: + en_US: Timezone + zh_Hans: 时区 + human_description: + en_US: Timezone, such as Asia/Shanghai + zh_Hans: 时区, 比如Asia/Shanghai + default: Asia/Shanghai diff --git a/api/core/tools/provider/builtin/time/tools/timestamp_to_localtime.py b/api/core/tools/provider/builtin/time/tools/timestamp_to_localtime.py new file mode 100644 index 0000000000..bcdd34fd4e --- /dev/null +++ b/api/core/tools/provider/builtin/time/tools/timestamp_to_localtime.py @@ -0,0 +1,44 @@ +from datetime import datetime +from typing import Any, Union + +import pytz + +from core.tools.entities.tool_entities import ToolInvokeMessage +from core.tools.errors import ToolInvokeError +from core.tools.tool.builtin_tool import BuiltinTool + + +class TimestampToLocaltimeTool(BuiltinTool): + def _invoke( + self, + user_id: str, + tool_parameters: dict[str, Any], + ) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]: + """ + Convert timestamp to localtime + """ + timestamp = tool_parameters.get("timestamp") + timezone = tool_parameters.get("timezone", "Asia/Shanghai") + if not timezone: + timezone = None + time_format = "%Y-%m-%d %H:%M:%S" + + locatime = self.timestamp_to_localtime(timestamp, timezone) + if not locatime: + return self.create_text_message(f"Invalid timestamp: {timestamp}") + + localtime_format = locatime.strftime(time_format) + + return self.create_text_message(f"{localtime_format}") + + @staticmethod + def timestamp_to_localtime(timestamp: int, local_tz=None) -> datetime | None: + try: + if local_tz is None: + local_tz = datetime.now().astimezone().tzinfo + if isinstance(local_tz, str): + local_tz = pytz.timezone(local_tz) + local_time = datetime.fromtimestamp(timestamp, local_tz) + return local_time + except Exception as e: + raise ToolInvokeError(str(e)) diff --git a/api/core/tools/provider/builtin/time/tools/timestamp_to_localtime.yaml b/api/core/tools/provider/builtin/time/tools/timestamp_to_localtime.yaml new file mode 100644 index 0000000000..3794e717b4 --- /dev/null +++ b/api/core/tools/provider/builtin/time/tools/timestamp_to_localtime.yaml @@ -0,0 +1,33 @@ +identity: + name: timestamp_to_localtime + author: zhuhao + label: + en_US: Timestamp to localtime + zh_Hans: 时间戳转换 +description: + human: + en_US: A tool for timestamp convert to localtime + zh_Hans: 时间戳转换 + llm: A tool for timestamp convert to localtime +parameters: + - name: timestamp + type: number + required: true + form: llm + label: + en_US: Timestamp + zh_Hans: 时间戳 + human_description: + en_US: Timestamp + zh_Hans: 时间戳 + - name: timezone + type: string + required: false + form: llm + label: + en_US: Timezone + zh_Hans: 时区 + human_description: + en_US: Timezone, such as Asia/Shanghai + zh_Hans: 时区, 比如Asia/Shanghai + default: Asia/Shanghai diff --git a/api/core/tools/provider/builtin/vanna/tools/vanna.py b/api/core/tools/provider/builtin/vanna/tools/vanna.py index c90d766e48..2443991d57 100644 --- a/api/core/tools/provider/builtin/vanna/tools/vanna.py +++ b/api/core/tools/provider/builtin/vanna/tools/vanna.py @@ -111,9 +111,10 @@ class VannaTool(BuiltinTool): # with "visualize" set to True (default behavior) leads to remote code execution. # Affected versions: <= 0.5.5 ######################################################################################### - generate_chart = False - # generate_chart = tool_parameters.get("generate_chart", True) - res = vn.ask(prompt, False, True, generate_chart) + allow_llm_to_see_data = tool_parameters.get("allow_llm_to_see_data", False) + res = vn.ask( + prompt, print_results=False, auto_train=True, visualize=False, allow_llm_to_see_data=allow_llm_to_see_data + ) result = [] diff --git a/api/core/tools/provider/builtin/vanna/tools/vanna.yaml b/api/core/tools/provider/builtin/vanna/tools/vanna.yaml index ae2eae94c4..12ca8a862e 100644 --- a/api/core/tools/provider/builtin/vanna/tools/vanna.yaml +++ b/api/core/tools/provider/builtin/vanna/tools/vanna.yaml @@ -200,14 +200,14 @@ parameters: en_US: If enabled, it will attempt to train on the metadata of that database zh_Hans: 是否自动从数据库获取元数据来训练 form: form - - name: generate_chart + - name: allow_llm_to_see_data type: boolean required: false - default: True + default: false label: - en_US: Generate Charts - zh_Hans: 生成图表 + en_US: Whether to allow the LLM to see the data + zh_Hans: 是否允许LLM查看数据 human_description: - en_US: Generate Charts - zh_Hans: 是否生成图表 + en_US: Whether to allow the LLM to see the data + zh_Hans: 是否允许LLM查看数据 form: form diff --git a/api/core/tools/provider/builtin/vanna/vanna.yaml b/api/core/tools/provider/builtin/vanna/vanna.yaml index b29fa103e1..7f953be172 100644 --- a/api/core/tools/provider/builtin/vanna/vanna.yaml +++ b/api/core/tools/provider/builtin/vanna/vanna.yaml @@ -8,6 +8,9 @@ identity: en_US: The fastest way to get actionable insights from your database just by asking questions. zh_Hans: 一个基于大模型和RAG的Text2SQL工具。 icon: icon.png + tags: + - utilities + - productivity credentials_for_provider: api_key: type: secret-input diff --git a/api/core/tools/provider/builtin/xinference/tools/stable_diffusion.py b/api/core/tools/provider/builtin/xinference/tools/stable_diffusion.py index 847f2730f2..a44d3b730a 100644 --- a/api/core/tools/provider/builtin/xinference/tools/stable_diffusion.py +++ b/api/core/tools/provider/builtin/xinference/tools/stable_diffusion.py @@ -104,14 +104,15 @@ class StableDiffusionTool(BuiltinTool): model = self.runtime.credentials.get("model", None) if not model: return self.create_text_message("Please input model") - + api_key = self.runtime.credentials.get("api_key") or "abc" + headers = {"Authorization": f"Bearer {api_key}"} # set model try: url = str(URL(base_url) / "sdapi" / "v1" / "options") response = post( url, json={"sd_model_checkpoint": model}, - headers={"Authorization": f"Bearer {self.runtime.credentials['api_key']}"}, + headers=headers, ) if response.status_code != 200: raise ToolProviderCredentialValidationError("Failed to set model, please tell user to set model") @@ -257,14 +258,15 @@ class StableDiffusionTool(BuiltinTool): draw_options["prompt"] = f"{lora},{prompt}" else: draw_options["prompt"] = prompt - + api_key = self.runtime.credentials.get("api_key") or "abc" + headers = {"Authorization": f"Bearer {api_key}"} try: url = str(URL(base_url) / "sdapi" / "v1" / "img2img") response = post( url, json=draw_options, timeout=120, - headers={"Authorization": f"Bearer {self.runtime.credentials['api_key']}"}, + headers=headers, ) if response.status_code != 200: return self.create_text_message("Failed to generate image") @@ -298,14 +300,15 @@ class StableDiffusionTool(BuiltinTool): else: draw_options["prompt"] = prompt draw_options["override_settings"]["sd_model_checkpoint"] = model - + api_key = self.runtime.credentials.get("api_key") or "abc" + headers = {"Authorization": f"Bearer {api_key}"} try: url = str(URL(base_url) / "sdapi" / "v1" / "txt2img") response = post( url, json=draw_options, timeout=120, - headers={"Authorization": f"Bearer {self.runtime.credentials['api_key']}"}, + headers=headers, ) if response.status_code != 200: return self.create_text_message("Failed to generate image") diff --git a/api/core/tools/provider/builtin/xinference/xinference.py b/api/core/tools/provider/builtin/xinference/xinference.py index 7c2428cc00..9692e4060e 100644 --- a/api/core/tools/provider/builtin/xinference/xinference.py +++ b/api/core/tools/provider/builtin/xinference/xinference.py @@ -6,12 +6,18 @@ from core.tools.provider.builtin_tool_provider import BuiltinToolProviderControl class XinferenceProvider(BuiltinToolProviderController): def _validate_credentials(self, credentials: dict) -> None: - base_url = credentials.get("base_url") - api_key = credentials.get("api_key") - model = credentials.get("model") + base_url = credentials.get("base_url", "").removesuffix("/") + api_key = credentials.get("api_key", "") + if not api_key: + api_key = "abc" + credentials["api_key"] = api_key + model = credentials.get("model", "") + if not base_url or not model: + raise ToolProviderCredentialValidationError("Xinference base_url and model is required") + headers = {"Authorization": f"Bearer {api_key}"} res = requests.post( f"{base_url}/sdapi/v1/options", - headers={"Authorization": f"Bearer {api_key}"}, + headers=headers, json={"sd_model_checkpoint": model}, ) if res.status_code != 200: diff --git a/api/core/tools/provider/builtin/xinference/xinference.yaml b/api/core/tools/provider/builtin/xinference/xinference.yaml index 19aaf5cbd1..b0c02b9cbc 100644 --- a/api/core/tools/provider/builtin/xinference/xinference.yaml +++ b/api/core/tools/provider/builtin/xinference/xinference.yaml @@ -31,7 +31,7 @@ credentials_for_provider: zh_Hans: 请输入你的模型名称 api_key: type: secret-input - required: true + required: false label: en_US: API Key zh_Hans: Xinference 服务器的 API Key diff --git a/api/core/tools/tool/builtin_tool.py b/api/core/tools/tool/builtin_tool.py index 8edaf7c0e6..e2a81ed0a3 100644 --- a/api/core/tools/tool/builtin_tool.py +++ b/api/core/tools/tool/builtin_tool.py @@ -1,3 +1,5 @@ +from typing import Optional + from core.model_runtime.entities.llm_entities import LLMResult from core.model_runtime.entities.message_entities import PromptMessage, SystemPromptMessage, UserPromptMessage from core.tools.entities.tool_entities import ToolProviderType @@ -124,7 +126,7 @@ class BuiltinTool(Tool): return result - def get_url(self, url: str, user_agent: str = None) -> str: + def get_url(self, url: str, user_agent: Optional[str] = None) -> str: """ get url """ diff --git a/api/core/tools/tool/dataset_retriever/dataset_retriever_tool.py b/api/core/tools/tool/dataset_retriever/dataset_retriever_tool.py index 8dc60408c9..987f94a350 100644 --- a/api/core/tools/tool/dataset_retriever/dataset_retriever_tool.py +++ b/api/core/tools/tool/dataset_retriever/dataset_retriever_tool.py @@ -1,10 +1,12 @@ from pydantic import BaseModel, Field from core.rag.datasource.retrieval_service import RetrievalService +from core.rag.models.document import Document as RetrievalDocument from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.tools.tool.dataset_retriever.dataset_retriever_base_tool import DatasetRetrieverBaseTool from extensions.ext_database import db from models.dataset import Dataset, Document, DocumentSegment +from services.external_knowledge_service import ExternalDatasetService default_retrieval_model = { "search_method": RetrievalMethod.SEMANTIC_SEARCH.value, @@ -53,97 +55,137 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool): for hit_callback in self.hit_callbacks: hit_callback.on_query(query, dataset.id) - - # get retrieval model , if the model is not setting , using default - retrieval_model = dataset.retrieval_model or default_retrieval_model - if dataset.indexing_technique == "economy": - # use keyword table query - documents = RetrievalService.retrieve( - retrieval_method="keyword_search", dataset_id=dataset.id, query=query, top_k=self.top_k + if dataset.provider == "external": + results = [] + external_documents = ExternalDatasetService.fetch_external_knowledge_retrieval( + tenant_id=dataset.tenant_id, + dataset_id=dataset.id, + query=query, + external_retrieval_parameters=dataset.retrieval_model, ) - return str("\n".join([document.page_content for document in documents])) - else: - if self.top_k > 0: - # retrieval source - documents = RetrievalService.retrieve( - retrieval_method=retrieval_model.get("search_method", "semantic_search"), - dataset_id=dataset.id, - query=query, - top_k=self.top_k, - score_threshold=retrieval_model.get("score_threshold", 0.0) - if retrieval_model["score_threshold_enabled"] - else 0.0, - reranking_model=retrieval_model.get("reranking_model", None) - if retrieval_model["reranking_enable"] - else None, - reranking_mode=retrieval_model.get("reranking_mode") or "reranking_model", - weights=retrieval_model.get("weights", None), + for external_document in external_documents: + document = RetrievalDocument( + page_content=external_document.get("content"), + metadata=external_document.get("metadata"), + provider="external", ) - else: - documents = [] - + document.metadata["score"] = external_document.get("score") + document.metadata["title"] = external_document.get("title") + document.metadata["dataset_id"] = dataset.id + document.metadata["dataset_name"] = dataset.name + results.append(document) + # deal with external documents + context_list = [] + for position, item in enumerate(results, start=1): + source = { + "position": position, + "dataset_id": item.metadata.get("dataset_id"), + "dataset_name": item.metadata.get("dataset_name"), + "document_name": item.metadata.get("title"), + "data_source_type": "external", + "retriever_from": self.retriever_from, + "score": item.metadata.get("score"), + "title": item.metadata.get("title"), + "content": item.page_content, + } + context_list.append(source) for hit_callback in self.hit_callbacks: - hit_callback.on_tool_end(documents) - document_score_list = {} - if dataset.indexing_technique != "economy": - for item in documents: - if item.metadata.get("score"): - document_score_list[item.metadata["doc_id"]] = item.metadata["score"] - document_context_list = [] - index_node_ids = [document.metadata["doc_id"] for document in documents] - segments = DocumentSegment.query.filter( - DocumentSegment.dataset_id == self.dataset_id, - DocumentSegment.completed_at.isnot(None), - DocumentSegment.status == "completed", - DocumentSegment.enabled == True, - DocumentSegment.index_node_id.in_(index_node_ids), - ).all() + hit_callback.return_retriever_resource_info(context_list) - if segments: - index_node_id_to_position = {id: position for position, id in enumerate(index_node_ids)} - sorted_segments = sorted( - segments, key=lambda segment: index_node_id_to_position.get(segment.index_node_id, float("inf")) + return str("\n".join([item.page_content for item in results])) + else: + # get retrieval model , if the model is not setting , using default + retrieval_model = dataset.retrieval_model or default_retrieval_model + if dataset.indexing_technique == "economy": + # use keyword table query + documents = RetrievalService.retrieve( + retrieval_method="keyword_search", dataset_id=dataset.id, query=query, top_k=self.top_k ) - for segment in sorted_segments: - if segment.answer: - document_context_list.append(f"question:{segment.get_sign_content()} answer:{segment.answer}") - else: - document_context_list.append(segment.get_sign_content()) - if self.return_resource: - context_list = [] - resource_number = 1 + return str("\n".join([document.page_content for document in documents])) + else: + if self.top_k > 0: + # retrieval source + documents = RetrievalService.retrieve( + retrieval_method=retrieval_model.get("search_method", "semantic_search"), + dataset_id=dataset.id, + query=query, + top_k=self.top_k, + score_threshold=retrieval_model.get("score_threshold", 0.0) + if retrieval_model["score_threshold_enabled"] + else 0.0, + reranking_model=retrieval_model.get("reranking_model", None) + if retrieval_model["reranking_enable"] + else None, + reranking_mode=retrieval_model.get("reranking_mode") or "reranking_model", + weights=retrieval_model.get("weights", None), + ) + else: + documents = [] + + for hit_callback in self.hit_callbacks: + hit_callback.on_tool_end(documents) + document_score_list = {} + if dataset.indexing_technique != "economy": + for item in documents: + if item.metadata.get("score"): + document_score_list[item.metadata["doc_id"]] = item.metadata["score"] + document_context_list = [] + index_node_ids = [document.metadata["doc_id"] for document in documents] + segments = DocumentSegment.query.filter( + DocumentSegment.dataset_id == self.dataset_id, + DocumentSegment.completed_at.isnot(None), + DocumentSegment.status == "completed", + DocumentSegment.enabled == True, + DocumentSegment.index_node_id.in_(index_node_ids), + ).all() + + if segments: + index_node_id_to_position = {id: position for position, id in enumerate(index_node_ids)} + sorted_segments = sorted( + segments, key=lambda segment: index_node_id_to_position.get(segment.index_node_id, float("inf")) + ) for segment in sorted_segments: - context = {} - document = Document.query.filter( - Document.id == segment.document_id, - Document.enabled == True, - Document.archived == False, - ).first() - if dataset and document: - source = { - "position": resource_number, - "dataset_id": dataset.id, - "dataset_name": dataset.name, - "document_id": document.id, - "document_name": document.name, - "data_source_type": document.data_source_type, - "segment_id": segment.id, - "retriever_from": self.retriever_from, - "score": document_score_list.get(segment.index_node_id, None), - } - if self.retriever_from == "dev": - source["hit_count"] = segment.hit_count - source["word_count"] = segment.word_count - source["segment_position"] = segment.position - source["index_node_hash"] = segment.index_node_hash - if segment.answer: - source["content"] = f"question:{segment.content} \nanswer:{segment.answer}" - else: - source["content"] = segment.content - context_list.append(source) - resource_number += 1 + if segment.answer: + document_context_list.append( + f"question:{segment.get_sign_content()} answer:{segment.answer}" + ) + else: + document_context_list.append(segment.get_sign_content()) + if self.return_resource: + context_list = [] + resource_number = 1 + for segment in sorted_segments: + context = {} + document = Document.query.filter( + Document.id == segment.document_id, + Document.enabled == True, + Document.archived == False, + ).first() + if dataset and document: + source = { + "position": resource_number, + "dataset_id": dataset.id, + "dataset_name": dataset.name, + "document_id": document.id, + "document_name": document.name, + "data_source_type": document.data_source_type, + "segment_id": segment.id, + "retriever_from": self.retriever_from, + "score": document_score_list.get(segment.index_node_id, None), + } + if self.retriever_from == "dev": + source["hit_count"] = segment.hit_count + source["word_count"] = segment.word_count + source["segment_position"] = segment.position + source["index_node_hash"] = segment.index_node_hash + if segment.answer: + source["content"] = f"question:{segment.content} \nanswer:{segment.answer}" + else: + source["content"] = segment.content + context_list.append(source) + resource_number += 1 - for hit_callback in self.hit_callbacks: - hit_callback.return_retriever_resource_info(context_list) + for hit_callback in self.hit_callbacks: + hit_callback.return_retriever_resource_info(context_list) - return str("\n".join(document_context_list)) + return str("\n".join(document_context_list)) diff --git a/api/core/tools/tool/tool.py b/api/core/tools/tool/tool.py index d9e9a0faad..cb4ab51ceb 100644 --- a/api/core/tools/tool/tool.py +++ b/api/core/tools/tool/tool.py @@ -318,7 +318,7 @@ class Tool(BaseModel, ABC): """ return ToolInvokeMessage(type=ToolInvokeMessage.MessageType.TEXT, message=text, save_as=save_as) - def create_blob_message(self, blob: bytes, meta: dict = None, save_as: str = "") -> ToolInvokeMessage: + def create_blob_message(self, blob: bytes, meta: Optional[dict] = None, save_as: str = "") -> ToolInvokeMessage: """ create a blob message diff --git a/api/core/tools/tool/workflow_tool.py b/api/core/tools/tool/workflow_tool.py index ad0c7fc631..a885b8784f 100644 --- a/api/core/tools/tool/workflow_tool.py +++ b/api/core/tools/tool/workflow_tool.py @@ -68,10 +68,13 @@ class WorkflowTool(Tool): result = [] - outputs = data.get("outputs", {}) - outputs, files = self._extract_files(outputs) - for file in files: - result.append(self.create_file_var_message(file)) + outputs = data.get("outputs") + if outputs == None: + outputs = {} + else: + outputs, files = self._extract_files(outputs) + for file in files: + result.append(self.create_file_var_message(file)) result.append(self.create_text_message(json.dumps(outputs, ensure_ascii=False))) result.append(self.create_json_message(outputs)) diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index a3303797e1..ed66dd1357 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -4,7 +4,7 @@ import mimetypes from collections.abc import Generator from os import listdir, path from threading import Lock -from typing import Any, Union +from typing import Any, Optional, Union from configs import dify_config from core.agent.entities import AgentToolEntity @@ -72,7 +72,7 @@ class ToolManager: @classmethod def get_tool( - cls, provider_type: str, provider_id: str, tool_name: str, tenant_id: str = None + cls, provider_type: str, provider_id: str, tool_name: str, tenant_id: Optional[str] = None ) -> Union[BuiltinTool, ApiTool]: """ get the tool diff --git a/api/core/tools/utils/feishu_api_utils.py b/api/core/tools/utils/feishu_api_utils.py index ffdb06498f..245b296d18 100644 --- a/api/core/tools/utils/feishu_api_utils.py +++ b/api/core/tools/utils/feishu_api_utils.py @@ -1,9 +1,25 @@ +from typing import Optional + import httpx +from core.tools.errors import ToolProviderCredentialValidationError from extensions.ext_redis import redis_client +def auth(credentials): + app_id = credentials.get("app_id") + app_secret = credentials.get("app_secret") + if not app_id or not app_secret: + raise ToolProviderCredentialValidationError("app_id and app_secret is required") + try: + assert FeishuRequest(app_id, app_secret).tenant_access_token is not None + except Exception as e: + raise ToolProviderCredentialValidationError(str(e)) + + class FeishuRequest: + API_BASE_URL = "https://lark-plugin-api.solutionsuite.cn/lark-plugin" + def __init__(self, app_id: str, app_secret: str): self.app_id = app_id self.app_secret = app_secret @@ -18,7 +34,12 @@ class FeishuRequest: return res.get("tenant_access_token") def _send_request( - self, url: str, method: str = "post", require_token: bool = True, payload: dict = None, params: dict = None + self, + url: str, + method: str = "post", + require_token: bool = True, + payload: Optional[dict] = None, + params: Optional[dict] = None, ): headers = { "Content-Type": "application/json", @@ -42,7 +63,7 @@ class FeishuRequest: "expire": 7200 } """ - url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/access_token/get_tenant_access_token" + url = f"{self.API_BASE_URL}/access_token/get_tenant_access_token" payload = {"app_id": app_id, "app_secret": app_secret} res = self._send_request(url, require_token=False, payload=payload) return res @@ -63,7 +84,7 @@ class FeishuRequest: "msg": "创建飞书文档成功,请查看" } """ - url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/document/create_document" + url = f"{self.API_BASE_URL}/document/create_document" payload = { "title": title, "content": content, @@ -72,13 +93,13 @@ class FeishuRequest: res = self._send_request(url, payload=payload) return res.get("data") - def write_document(self, document_id: str, content: str, position: str = "start") -> dict: - url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/document/write_document" + def write_document(self, document_id: str, content: str, position: str = "end") -> dict: + url = f"{self.API_BASE_URL}/document/write_document" payload = {"document_id": document_id, "content": content, "position": position} res = self._send_request(url, payload=payload) return res - def get_document_content(self, document_id: str, mode: str, lang: int = 0) -> dict: + def get_document_content(self, document_id: str, mode: str = "markdown", lang: str = "0") -> dict: """ API url: https://open.larkoffice.com/document/server-docs/docs/docs/docx-v1/document/raw_content Example Response: @@ -95,45 +116,404 @@ class FeishuRequest: "mode": mode, "lang": lang, } - url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/document/get_document_content" - res = self._send_request(url, method="get", params=params) + url = f"{self.API_BASE_URL}/document/get_document_content" + res = self._send_request(url, method="GET", params=params) return res.get("data").get("content") - def list_document_blocks(self, document_id: str, page_token: str, page_size: int = 500) -> dict: + def list_document_blocks( + self, document_id: str, page_token: str, user_id_type: str = "open_id", page_size: int = 500 + ) -> dict: """ API url: https://open.larkoffice.com/document/server-docs/docs/docs/docx-v1/document/list """ - url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/document/list_document_blocks" params = { + "user_id_type": user_id_type, "document_id": document_id, "page_size": page_size, "page_token": page_token, } - res = self._send_request(url, method="get", params=params) + url = f"{self.API_BASE_URL}/document/list_document_blocks" + res = self._send_request(url, method="GET", params=params) return res.get("data") def send_bot_message(self, receive_id_type: str, receive_id: str, msg_type: str, content: str) -> dict: """ API url: https://open.larkoffice.com/document/server-docs/im-v1/message/create """ - url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/message/send_bot_message" + url = f"{self.API_BASE_URL}/message/send_bot_message" params = { "receive_id_type": receive_id_type, } payload = { "receive_id": receive_id, "msg_type": msg_type, - "content": content, + "content": content.strip('"').replace(r"\"", '"').replace(r"\\", "\\"), } res = self._send_request(url, params=params, payload=payload) return res.get("data") def send_webhook_message(self, webhook: str, msg_type: str, content: str) -> dict: - url = "https://lark-plugin-api.solutionsuite.cn/lark-plugin/message/send_webhook_message" + url = f"{self.API_BASE_URL}/message/send_webhook_message" payload = { "webhook": webhook, "msg_type": msg_type, - "content": content, + "content": content.strip('"').replace(r"\"", '"').replace(r"\\", "\\"), } res = self._send_request(url, require_token=False, payload=payload) return res + + def get_chat_messages( + self, + container_id: str, + start_time: str, + end_time: str, + page_token: str, + sort_type: str = "ByCreateTimeAsc", + page_size: int = 20, + ) -> dict: + """ + API url: https://open.larkoffice.com/document/server-docs/im-v1/message/list + """ + url = f"{self.API_BASE_URL}/message/get_chat_messages" + params = { + "container_id": container_id, + "start_time": start_time, + "end_time": end_time, + "sort_type": sort_type, + "page_token": page_token, + "page_size": page_size, + } + res = self._send_request(url, method="GET", params=params) + return res.get("data") + + def get_thread_messages( + self, container_id: str, page_token: str, sort_type: str = "ByCreateTimeAsc", page_size: int = 20 + ) -> dict: + """ + API url: https://open.larkoffice.com/document/server-docs/im-v1/message/list + """ + url = f"{self.API_BASE_URL}/message/get_thread_messages" + params = { + "container_id": container_id, + "sort_type": sort_type, + "page_token": page_token, + "page_size": page_size, + } + res = self._send_request(url, method="GET", params=params) + return res.get("data") + + def create_task(self, summary: str, start_time: str, end_time: str, completed_time: str, description: str) -> dict: + # 创建任务 + url = f"{self.API_BASE_URL}/task/create_task" + payload = { + "summary": summary, + "start_time": start_time, + "end_time": end_time, + "completed_at": completed_time, + "description": description, + } + res = self._send_request(url, payload=payload) + return res.get("data") + + def update_task( + self, task_guid: str, summary: str, start_time: str, end_time: str, completed_time: str, description: str + ) -> dict: + # 更新任务 + url = f"{self.API_BASE_URL}/task/update_task" + payload = { + "task_guid": task_guid, + "summary": summary, + "start_time": start_time, + "end_time": end_time, + "completed_time": completed_time, + "description": description, + } + res = self._send_request(url, method="PATCH", payload=payload) + return res.get("data") + + def delete_task(self, task_guid: str) -> dict: + # 删除任务 + url = f"{self.API_BASE_URL}/task/delete_task" + payload = { + "task_guid": task_guid, + } + res = self._send_request(url, method="DELETE", payload=payload) + return res + + def add_members(self, task_guid: str, member_phone_or_email: str, member_role: str) -> dict: + # 删除任务 + url = f"{self.API_BASE_URL}/task/add_members" + payload = { + "task_guid": task_guid, + "member_phone_or_email": member_phone_or_email, + "member_role": member_role, + } + res = self._send_request(url, payload=payload) + return res + + def get_wiki_nodes(self, space_id: str, parent_node_token: str, page_token: str, page_size: int = 20) -> dict: + # 获取知识库全部子节点列表 + url = f"{self.API_BASE_URL}/wiki/get_wiki_nodes" + payload = { + "space_id": space_id, + "parent_node_token": parent_node_token, + "page_token": page_token, + "page_size": page_size, + } + res = self._send_request(url, payload=payload) + return res.get("data") + + def get_primary_calendar(self, user_id_type: str = "open_id") -> dict: + url = f"{self.API_BASE_URL}/calendar/get_primary_calendar" + params = { + "user_id_type": user_id_type, + } + res = self._send_request(url, method="GET", params=params) + return res.get("data") + + def create_event( + self, + summary: str, + description: str, + start_time: str, + end_time: str, + attendee_ability: str, + need_notification: bool = True, + auto_record: bool = False, + ) -> dict: + url = f"{self.API_BASE_URL}/calendar/create_event" + payload = { + "summary": summary, + "description": description, + "need_notification": need_notification, + "start_time": start_time, + "end_time": end_time, + "auto_record": auto_record, + "attendee_ability": attendee_ability, + } + res = self._send_request(url, payload=payload) + return res.get("data") + + def update_event( + self, + event_id: str, + summary: str, + description: str, + need_notification: bool, + start_time: str, + end_time: str, + auto_record: bool, + ) -> dict: + url = f"{self.API_BASE_URL}/calendar/update_event/{event_id}" + payload = {} + if summary: + payload["summary"] = summary + if description: + payload["description"] = description + if start_time: + payload["start_time"] = start_time + if end_time: + payload["end_time"] = end_time + if need_notification: + payload["need_notification"] = need_notification + if auto_record: + payload["auto_record"] = auto_record + res = self._send_request(url, method="PATCH", payload=payload) + return res + + def delete_event(self, event_id: str, need_notification: bool = True) -> dict: + url = f"{self.API_BASE_URL}/calendar/delete_event/{event_id}" + params = { + "need_notification": need_notification, + } + res = self._send_request(url, method="DELETE", params=params) + return res + + def list_events(self, start_time: str, end_time: str, page_token: str, page_size: int = 50) -> dict: + url = f"{self.API_BASE_URL}/calendar/list_events" + params = { + "start_time": start_time, + "end_time": end_time, + "page_token": page_token, + "page_size": page_size, + } + res = self._send_request(url, method="GET", params=params) + return res.get("data") + + def search_events( + self, + query: str, + start_time: str, + end_time: str, + page_token: str, + user_id_type: str = "open_id", + page_size: int = 20, + ) -> dict: + url = f"{self.API_BASE_URL}/calendar/search_events" + payload = { + "query": query, + "start_time": start_time, + "end_time": end_time, + "page_token": page_token, + "user_id_type": user_id_type, + "page_size": page_size, + } + res = self._send_request(url, payload=payload) + return res.get("data") + + def add_event_attendees(self, event_id: str, attendee_phone_or_email: str, need_notification: bool = True) -> dict: + # 参加日程参会人 + url = f"{self.API_BASE_URL}/calendar/add_event_attendees" + payload = { + "event_id": event_id, + "attendee_phone_or_email": attendee_phone_or_email, + "need_notification": need_notification, + } + res = self._send_request(url, payload=payload) + return res.get("data") + + def create_spreadsheet( + self, + title: str, + folder_token: str, + ) -> dict: + # 创建电子表格 + url = f"{self.API_BASE_URL}/spreadsheet/create_spreadsheet" + payload = { + "title": title, + "folder_token": folder_token, + } + res = self._send_request(url, payload=payload) + return res.get("data") + + def get_spreadsheet( + self, + spreadsheet_token: str, + user_id_type: str = "open_id", + ) -> dict: + # 获取电子表格信息 + url = f"{self.API_BASE_URL}/spreadsheet/get_spreadsheet" + params = { + "spreadsheet_token": spreadsheet_token, + "user_id_type": user_id_type, + } + res = self._send_request(url, method="GET", params=params) + return res.get("data") + + def list_spreadsheet_sheets( + self, + spreadsheet_token: str, + ) -> dict: + # 列出电子表格的所有工作表 + url = f"{self.API_BASE_URL}/spreadsheet/list_spreadsheet_sheets" + params = { + "spreadsheet_token": spreadsheet_token, + } + res = self._send_request(url, method="GET", params=params) + return res.get("data") + + def add_rows( + self, + spreadsheet_token: str, + sheet_id: str, + sheet_name: str, + length: int, + values: str, + ) -> dict: + # 增加行,在工作表最后添加 + url = f"{self.API_BASE_URL}/spreadsheet/add_rows" + payload = { + "spreadsheet_token": spreadsheet_token, + "sheet_id": sheet_id, + "sheet_name": sheet_name, + "length": length, + "values": values, + } + res = self._send_request(url, payload=payload) + return res.get("data") + + def add_cols( + self, + spreadsheet_token: str, + sheet_id: str, + sheet_name: str, + length: int, + values: str, + ) -> dict: + # 增加列,在工作表最后添加 + url = f"{self.API_BASE_URL}/spreadsheet/add_cols" + payload = { + "spreadsheet_token": spreadsheet_token, + "sheet_id": sheet_id, + "sheet_name": sheet_name, + "length": length, + "values": values, + } + res = self._send_request(url, payload=payload) + return res.get("data") + + def read_rows( + self, + spreadsheet_token: str, + sheet_id: str, + sheet_name: str, + start_row: int, + num_rows: int, + user_id_type: str = "open_id", + ) -> dict: + # 读取工作表行数据 + url = f"{self.API_BASE_URL}/spreadsheet/read_rows" + params = { + "spreadsheet_token": spreadsheet_token, + "sheet_id": sheet_id, + "sheet_name": sheet_name, + "start_row": start_row, + "num_rows": num_rows, + "user_id_type": user_id_type, + } + res = self._send_request(url, method="GET", params=params) + return res.get("data") + + def read_cols( + self, + spreadsheet_token: str, + sheet_id: str, + sheet_name: str, + start_col: int, + num_cols: int, + user_id_type: str = "open_id", + ) -> dict: + # 读取工作表列数据 + url = f"{self.API_BASE_URL}/spreadsheet/read_cols" + params = { + "spreadsheet_token": spreadsheet_token, + "sheet_id": sheet_id, + "sheet_name": sheet_name, + "start_col": start_col, + "num_cols": num_cols, + "user_id_type": user_id_type, + } + res = self._send_request(url, method="GET", params=params) + return res.get("data") + + def read_table( + self, + spreadsheet_token: str, + sheet_id: str, + sheet_name: str, + num_range: str, + query: str, + user_id_type: str = "open_id", + ) -> dict: + # 自定义读取行列数据 + url = f"{self.API_BASE_URL}/spreadsheet/read_table" + params = { + "spreadsheet_token": spreadsheet_token, + "sheet_id": sheet_id, + "sheet_name": sheet_name, + "range": num_range, + "query": query, + "user_id_type": user_id_type, + } + res = self._send_request(url, method="GET", params=params) + return res.get("data") diff --git a/api/core/tools/utils/parser.py b/api/core/tools/utils/parser.py index 9ead4f8e5c..5867a11bb3 100644 --- a/api/core/tools/utils/parser.py +++ b/api/core/tools/utils/parser.py @@ -3,6 +3,7 @@ import uuid from json import dumps as json_dumps from json import loads as json_loads from json.decoder import JSONDecodeError +from typing import Optional from requests import get from yaml import YAMLError, safe_load @@ -16,7 +17,7 @@ from core.tools.errors import ToolApiSchemaError, ToolNotSupportedError, ToolPro class ApiBasedToolSchemaParser: @staticmethod def parse_openapi_to_tool_bundle( - openapi: dict, extra_info: dict = None, warning: dict = None + openapi: dict, extra_info: Optional[dict], warning: Optional[dict] ) -> list[ApiToolBundle]: warning = warning if warning is not None else {} extra_info = extra_info if extra_info is not None else {} @@ -174,7 +175,7 @@ class ApiBasedToolSchemaParser: @staticmethod def parse_openapi_yaml_to_tool_bundle( - yaml: str, extra_info: dict = None, warning: dict = None + yaml: str, extra_info: Optional[dict], warning: Optional[dict] ) -> list[ApiToolBundle]: """ parse openapi yaml to tool bundle @@ -191,7 +192,7 @@ class ApiBasedToolSchemaParser: return ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle(openapi, extra_info=extra_info, warning=warning) @staticmethod - def parse_swagger_to_openapi(swagger: dict, extra_info: dict = None, warning: dict = None) -> dict: + def parse_swagger_to_openapi(swagger: dict, extra_info: Optional[dict], warning: Optional[dict]) -> dict: """ parse swagger to openapi @@ -253,7 +254,7 @@ class ApiBasedToolSchemaParser: @staticmethod def parse_openai_plugin_json_to_tool_bundle( - json: str, extra_info: dict = None, warning: dict = None + json: str, extra_info: Optional[dict], warning: Optional[dict] ) -> list[ApiToolBundle]: """ parse openapi plugin yaml to tool bundle @@ -287,7 +288,7 @@ class ApiBasedToolSchemaParser: @staticmethod def auto_parse_to_tool_bundle( - content: str, extra_info: dict = None, warning: dict = None + content: str, extra_info: Optional[dict] = None, warning: Optional[dict] = None ) -> tuple[list[ApiToolBundle], str]: """ auto parse to tool bundle diff --git a/api/core/tools/utils/web_reader_tool.py b/api/core/tools/utils/web_reader_tool.py index 1ced7d0488..5807d61b94 100644 --- a/api/core/tools/utils/web_reader_tool.py +++ b/api/core/tools/utils/web_reader_tool.py @@ -9,6 +9,7 @@ import tempfile import unicodedata from contextlib import contextmanager from pathlib import Path +from typing import Optional from urllib.parse import unquote import chardet @@ -36,7 +37,7 @@ def page_result(text: str, cursor: int, max_length: int) -> str: return text[cursor : cursor + max_length] -def get_url(url: str, user_agent: str = None) -> str: +def get_url(url: str, user_agent: Optional[str] = None) -> str: """Fetch URL and return the contents as a string.""" headers = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)" diff --git a/api/core/workflow/enums.py b/api/core/workflow/enums.py index da65f6b1fb..213ed57f57 100644 --- a/api/core/workflow/enums.py +++ b/api/core/workflow/enums.py @@ -11,3 +11,6 @@ class SystemVariableKey(str, Enum): CONVERSATION_ID = "conversation_id" USER_ID = "user_id" DIALOGUE_COUNT = "dialogue_count" + APP_ID = "app_id" + WORKFLOW_ID = "workflow_id" + WORKFLOW_RUN_ID = "workflow_run_id" diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index af55688a52..0b3e9bd6a8 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -79,8 +79,9 @@ class KnowledgeRetrievalNode(BaseNode): results = ( db.session.query(Dataset) - .join(subquery, Dataset.id == subquery.c.dataset_id) + .outerjoin(subquery, Dataset.id == subquery.c.dataset_id) .filter(Dataset.tenant_id == self.tenant_id, Dataset.id.in_(dataset_ids)) + .filter((subquery.c.available_document_count > 0) | (Dataset.provider == "external")) .all() ) @@ -121,10 +122,13 @@ class KnowledgeRetrievalNode(BaseNode): ) elif node_data.retrieval_mode == DatasetRetrieveConfigEntity.RetrieveStrategy.MULTIPLE.value: if node_data.multiple_retrieval_config.reranking_mode == "reranking_model": - reranking_model = { - "reranking_provider_name": node_data.multiple_retrieval_config.reranking_model.provider, - "reranking_model_name": node_data.multiple_retrieval_config.reranking_model.model, - } + if node_data.multiple_retrieval_config.reranking_model: + reranking_model = { + "reranking_provider_name": node_data.multiple_retrieval_config.reranking_model.provider, + "reranking_model_name": node_data.multiple_retrieval_config.reranking_model.model, + } + else: + reranking_model = None weights = None elif node_data.multiple_retrieval_config.reranking_mode == "weighted_score": reranking_model = None @@ -156,16 +160,34 @@ class KnowledgeRetrievalNode(BaseNode): weights, node_data.multiple_retrieval_config.reranking_enable, ) - - context_list = [] - if all_documents: + dify_documents = [item for item in all_documents if item.provider == "dify"] + external_documents = [item for item in all_documents if item.provider == "external"] + retrieval_resource_list = [] + # deal with external documents + for item in external_documents: + source = { + "metadata": { + "_source": "knowledge", + "dataset_id": item.metadata.get("dataset_id"), + "dataset_name": item.metadata.get("dataset_name"), + "document_name": item.metadata.get("title"), + "data_source_type": "external", + "retriever_from": "workflow", + "score": item.metadata.get("score"), + }, + "title": item.metadata.get("title"), + "content": item.page_content, + } + retrieval_resource_list.append(source) + document_score_list = {} + # deal with dify documents + if dify_documents: document_score_list = {} - page_number_list = {} - for item in all_documents: + for item in dify_documents: if item.metadata.get("score"): document_score_list[item.metadata["doc_id"]] = item.metadata["score"] - index_node_ids = [document.metadata["doc_id"] for document in all_documents] + index_node_ids = [document.metadata["doc_id"] for document in dify_documents] segments = DocumentSegment.query.filter( DocumentSegment.dataset_id.in_(dataset_ids), DocumentSegment.completed_at.isnot(None), @@ -186,13 +208,10 @@ class KnowledgeRetrievalNode(BaseNode): Document.enabled == True, Document.archived == False, ).first() - - resource_number = 1 if dataset and document: source = { "metadata": { "_source": "knowledge", - "position": resource_number, "dataset_id": dataset.id, "dataset_name": dataset.name, "document_id": document.id, @@ -212,9 +231,16 @@ class KnowledgeRetrievalNode(BaseNode): source["content"] = f"question:{segment.get_sign_content()} \nanswer:{segment.answer}" else: source["content"] = segment.get_sign_content() - context_list.append(source) - resource_number += 1 - return context_list + retrieval_resource_list.append(source) + if retrieval_resource_list: + retrieval_resource_list = sorted( + retrieval_resource_list, key=lambda x: x.get("metadata").get("score"), reverse=True + ) + position = 1 + for item in retrieval_resource_list: + item["metadata"]["position"] = position + position += 1 + return retrieval_resource_list @classmethod def _extract_variable_selector_to_variable_mapping( diff --git a/api/events/event_handlers/create_document_index.py b/api/events/event_handlers/create_document_index.py index 54f6a76e16..5af45e1e50 100644 --- a/api/events/event_handlers/create_document_index.py +++ b/api/events/event_handlers/create_document_index.py @@ -14,7 +14,7 @@ from models.dataset import Document @document_index_created.connect def handle(sender, **kwargs): dataset_id = sender - document_ids = kwargs.get("document_ids", None) + document_ids = kwargs.get("document_ids") documents = [] start_at = time.perf_counter() for document_id in document_ids: diff --git a/api/extensions/ext_proxy_fix.py b/api/extensions/ext_proxy_fix.py new file mode 100644 index 0000000000..c106a4384a --- /dev/null +++ b/api/extensions/ext_proxy_fix.py @@ -0,0 +1,10 @@ +from flask import Flask + +from configs import dify_config + + +def init_app(app: Flask): + if dify_config.RESPECT_XFORWARD_HEADERS_ENABLED: + from werkzeug.middleware.proxy_fix import ProxyFix + + app.wsgi_app = ProxyFix(app.wsgi_app) diff --git a/api/extensions/ext_storage.py b/api/extensions/ext_storage.py index 1e6530f6f4..f90629262d 100644 --- a/api/extensions/ext_storage.py +++ b/api/extensions/ext_storage.py @@ -4,15 +4,9 @@ from typing import Union from flask import Flask -from extensions.storage.aliyun_storage import AliyunStorage -from extensions.storage.azure_storage import AzureStorage -from extensions.storage.google_storage import GoogleStorage -from extensions.storage.huawei_storage import HuaweiStorage -from extensions.storage.local_storage import LocalStorage -from extensions.storage.oci_storage import OCIStorage -from extensions.storage.s3_storage import S3Storage -from extensions.storage.tencent_storage import TencentStorage -from extensions.storage.volcengine_storage import VolcengineStorage +from configs import dify_config +from extensions.storage.base_storage import BaseStorage +from extensions.storage.storage_type import StorageType class Storage: @@ -20,25 +14,56 @@ class Storage: self.storage_runner = None def init_app(self, app: Flask): - storage_type = app.config.get("STORAGE_TYPE") - if storage_type == "s3": - self.storage_runner = S3Storage(app=app) - elif storage_type == "azure-blob": - self.storage_runner = AzureStorage(app=app) - elif storage_type == "aliyun-oss": - self.storage_runner = AliyunStorage(app=app) - elif storage_type == "google-storage": - self.storage_runner = GoogleStorage(app=app) - elif storage_type == "tencent-cos": - self.storage_runner = TencentStorage(app=app) - elif storage_type == "oci-storage": - self.storage_runner = OCIStorage(app=app) - elif storage_type == "huawei-obs": - self.storage_runner = HuaweiStorage(app=app) - elif storage_type == "volcengine-tos": - self.storage_runner = VolcengineStorage(app=app) - else: - self.storage_runner = LocalStorage(app=app) + storage_factory = self.get_storage_factory(dify_config.STORAGE_TYPE) + self.storage_runner = storage_factory(app=app) + + @staticmethod + def get_storage_factory(storage_type: str) -> type[BaseStorage]: + match storage_type: + case StorageType.S3: + from extensions.storage.aws_s3_storage import AwsS3Storage + + return AwsS3Storage + case StorageType.AZURE_BLOB: + from extensions.storage.azure_blob_storage import AzureBlobStorage + + return AzureBlobStorage + case StorageType.ALIYUN_OSS: + from extensions.storage.aliyun_oss_storage import AliyunOssStorage + + return AliyunOssStorage + case StorageType.GOOGLE_STORAGE: + from extensions.storage.google_cloud_storage import GoogleCloudStorage + + return GoogleCloudStorage + case StorageType.TENCENT_COS: + from extensions.storage.tencent_cos_storage import TencentCosStorage + + return TencentCosStorage + case StorageType.OCI_STORAGE: + from extensions.storage.oracle_oci_storage import OracleOCIStorage + + return OracleOCIStorage + case StorageType.HUAWEI_OBS: + from extensions.storage.huawei_obs_storage import HuaweiObsStorage + + return HuaweiObsStorage + case StorageType.BAIDU_OBS: + from extensions.storage.baidu_obs_storage import BaiduObsStorage + + return BaiduObsStorage + case StorageType.VOLCENGINE_TOS: + from extensions.storage.volcengine_tos_storage import VolcengineTosStorage + + return VolcengineTosStorage + case StorageType.SUPBASE: + from extensions.storage.supabase_storage import SupabaseStorage + + return SupabaseStorage + case StorageType.LOCAL | _: + from extensions.storage.local_fs_storage import LocalFsStorage + + return LocalFsStorage def save(self, filename, data): try: diff --git a/api/extensions/storage/aliyun_storage.py b/api/extensions/storage/aliyun_oss_storage.py similarity index 96% rename from api/extensions/storage/aliyun_storage.py rename to api/extensions/storage/aliyun_oss_storage.py index 2677912aa9..53bd399d6d 100644 --- a/api/extensions/storage/aliyun_storage.py +++ b/api/extensions/storage/aliyun_oss_storage.py @@ -7,8 +7,8 @@ from flask import Flask from extensions.storage.base_storage import BaseStorage -class AliyunStorage(BaseStorage): - """Implementation for aliyun storage.""" +class AliyunOssStorage(BaseStorage): + """Implementation for Aliyun OSS storage.""" def __init__(self, app: Flask): super().__init__(app) diff --git a/api/extensions/storage/s3_storage.py b/api/extensions/storage/aws_s3_storage.py similarity index 88% rename from api/extensions/storage/s3_storage.py rename to api/extensions/storage/aws_s3_storage.py index 0858be3af6..38f823763f 100644 --- a/api/extensions/storage/s3_storage.py +++ b/api/extensions/storage/aws_s3_storage.py @@ -1,3 +1,4 @@ +import logging from collections.abc import Generator from contextlib import closing @@ -8,18 +9,25 @@ from flask import Flask from extensions.storage.base_storage import BaseStorage +logger = logging.getLogger(__name__) -class S3Storage(BaseStorage): - """Implementation for s3 storage.""" + +class AwsS3Storage(BaseStorage): + """Implementation for Amazon Web Services S3 storage.""" def __init__(self, app: Flask): super().__init__(app) app_config = self.app.config self.bucket_name = app_config.get("S3_BUCKET_NAME") if app_config.get("S3_USE_AWS_MANAGED_IAM"): + logger.info("Using AWS managed IAM role for S3") + session = boto3.Session() - self.client = session.client("s3") + region_name = app_config.get("S3_REGION") + self.client = session.client(service_name="s3", region_name=region_name) else: + logger.info("Using ak and sk for S3") + self.client = boto3.client( "s3", aws_secret_access_key=app_config.get("S3_SECRET_KEY"), diff --git a/api/extensions/storage/azure_storage.py b/api/extensions/storage/azure_blob_storage.py similarity index 97% rename from api/extensions/storage/azure_storage.py rename to api/extensions/storage/azure_blob_storage.py index ca8cbb9188..daea660a49 100644 --- a/api/extensions/storage/azure_storage.py +++ b/api/extensions/storage/azure_blob_storage.py @@ -8,8 +8,8 @@ from extensions.ext_redis import redis_client from extensions.storage.base_storage import BaseStorage -class AzureStorage(BaseStorage): - """Implementation for azure storage.""" +class AzureBlobStorage(BaseStorage): + """Implementation for Azure Blob storage.""" def __init__(self, app: Flask): super().__init__(app) diff --git a/api/extensions/storage/baidu_obs_storage.py b/api/extensions/storage/baidu_obs_storage.py new file mode 100644 index 0000000000..c5acff4a9d --- /dev/null +++ b/api/extensions/storage/baidu_obs_storage.py @@ -0,0 +1,60 @@ +import base64 +import hashlib +from collections.abc import Generator + +from baidubce.auth.bce_credentials import BceCredentials +from baidubce.bce_client_configuration import BceClientConfiguration +from baidubce.services.bos.bos_client import BosClient +from flask import Flask + +from extensions.storage.base_storage import BaseStorage + + +class BaiduObsStorage(BaseStorage): + """Implementation for Baidu OBS storage.""" + + def __init__(self, app: Flask): + super().__init__(app) + app_config = self.app.config + self.bucket_name = app_config.get("BAIDU_OBS_BUCKET_NAME") + client_config = BceClientConfiguration( + credentials=BceCredentials( + access_key_id=app_config.get("BAIDU_OBS_ACCESS_KEY"), + secret_access_key=app_config.get("BAIDU_OBS_SECRET_KEY"), + ), + endpoint=app_config.get("BAIDU_OBS_ENDPOINT"), + ) + + self.client = BosClient(config=client_config) + + def save(self, filename, data): + md5 = hashlib.md5() + md5.update(data) + content_md5 = base64.standard_b64encode(md5.digest()) + self.client.put_object( + bucket_name=self.bucket_name, key=filename, data=data, content_length=len(data), content_md5=content_md5 + ) + + def load_once(self, filename: str) -> bytes: + response = self.client.get_object(bucket_name=self.bucket_name, key=filename) + return response.data.read() + + def load_stream(self, filename: str) -> Generator: + def generate(filename: str = filename) -> Generator: + response = self.client.get_object(bucket_name=self.bucket_name, key=filename).data + while chunk := response.read(4096): + yield chunk + + return generate() + + def download(self, filename, target_filepath): + self.client.get_object_to_file(bucket_name=self.bucket_name, key=filename, file_name=target_filepath) + + def exists(self, filename): + res = self.client.get_object_meta_data(bucket_name=self.bucket_name, key=filename) + if res is None: + return False + return True + + def delete(self, filename): + self.client.delete_object(bucket_name=self.bucket_name, key=filename) diff --git a/api/extensions/storage/google_storage.py b/api/extensions/storage/google_cloud_storage.py similarity index 96% rename from api/extensions/storage/google_storage.py rename to api/extensions/storage/google_cloud_storage.py index c42f946fa8..d9c74b8d40 100644 --- a/api/extensions/storage/google_storage.py +++ b/api/extensions/storage/google_cloud_storage.py @@ -10,8 +10,8 @@ from google.cloud import storage as google_cloud_storage from extensions.storage.base_storage import BaseStorage -class GoogleStorage(BaseStorage): - """Implementation for google storage.""" +class GoogleCloudStorage(BaseStorage): + """Implementation for Google Cloud storage.""" def __init__(self, app: Flask): super().__init__(app) diff --git a/api/extensions/storage/huawei_storage.py b/api/extensions/storage/huawei_obs_storage.py similarity index 91% rename from api/extensions/storage/huawei_storage.py rename to api/extensions/storage/huawei_obs_storage.py index 269a008fba..dd243d4001 100644 --- a/api/extensions/storage/huawei_storage.py +++ b/api/extensions/storage/huawei_obs_storage.py @@ -6,8 +6,8 @@ from obs import ObsClient from extensions.storage.base_storage import BaseStorage -class HuaweiStorage(BaseStorage): - """Implementation for huawei obs storage.""" +class HuaweiObsStorage(BaseStorage): + """Implementation for Huawei OBS storage.""" def __init__(self, app: Flask): super().__init__(app) @@ -29,7 +29,8 @@ class HuaweiStorage(BaseStorage): def load_stream(self, filename: str) -> Generator: def generate(filename: str = filename) -> Generator: response = self.client.getObject(bucketName=self.bucket_name, objectKey=filename)["body"].response - yield from response.read(4096) + while chunk := response.read(4096): + yield chunk return generate() diff --git a/api/extensions/storage/local_storage.py b/api/extensions/storage/local_fs_storage.py similarity index 96% rename from api/extensions/storage/local_storage.py rename to api/extensions/storage/local_fs_storage.py index f833ae85dc..9308c4d180 100644 --- a/api/extensions/storage/local_storage.py +++ b/api/extensions/storage/local_fs_storage.py @@ -8,8 +8,8 @@ from flask import Flask from extensions.storage.base_storage import BaseStorage -class LocalStorage(BaseStorage): - """Implementation for local storage.""" +class LocalFsStorage(BaseStorage): + """Implementation for local filesystem storage.""" def __init__(self, app: Flask): super().__init__(app) diff --git a/api/extensions/storage/oci_storage.py b/api/extensions/storage/oracle_oci_storage.py similarity index 96% rename from api/extensions/storage/oci_storage.py rename to api/extensions/storage/oracle_oci_storage.py index e32fa0a0ae..6934583567 100644 --- a/api/extensions/storage/oci_storage.py +++ b/api/extensions/storage/oracle_oci_storage.py @@ -8,7 +8,9 @@ from flask import Flask from extensions.storage.base_storage import BaseStorage -class OCIStorage(BaseStorage): +class OracleOCIStorage(BaseStorage): + """Implementation for Oracle OCI storage.""" + def __init__(self, app: Flask): super().__init__(app) app_config = self.app.config diff --git a/api/extensions/storage/storage_type.py b/api/extensions/storage/storage_type.py new file mode 100644 index 0000000000..415bf251f6 --- /dev/null +++ b/api/extensions/storage/storage_type.py @@ -0,0 +1,15 @@ +from enum import Enum + + +class StorageType(str, Enum): + ALIYUN_OSS = "aliyun-oss" + AZURE_BLOB = "azure-blob" + BAIDU_OBS = "baidu-obs" + GOOGLE_STORAGE = "google-storage" + HUAWEI_OBS = "huawei-obs" + LOCAL = "local" + OCI_STORAGE = "oci-storage" + S3 = "s3" + TENCENT_COS = "tencent-cos" + VOLCENGINE_TOS = "volcengine-tos" + SUPBASE = "supabase" diff --git a/api/extensions/storage/supabase_storage.py b/api/extensions/storage/supabase_storage.py new file mode 100644 index 0000000000..1e399f87c8 --- /dev/null +++ b/api/extensions/storage/supabase_storage.py @@ -0,0 +1,60 @@ +import io +from collections.abc import Generator +from pathlib import Path + +from flask import Flask +from supabase import Client + +from extensions.storage.base_storage import BaseStorage + + +class SupabaseStorage(BaseStorage): + """Implementation for supabase obs storage.""" + + def __init__(self, app: Flask): + super().__init__(app) + app_config = self.app.config + self.bucket_name = app_config.get("SUPABASE_BUCKET_NAME") + self.client = Client( + supabase_url=app_config.get("SUPABASE_URL"), supabase_key=app_config.get("SUPABASE_API_KEY") + ) + self.create_bucket( + id=app_config.get("SUPABASE_BUCKET_NAME"), bucket_name=app_config.get("SUPABASE_BUCKET_NAME") + ) + + def create_bucket(self, id, bucket_name): + if not self.bucket_exists(): + self.client.storage.create_bucket(id=id, name=bucket_name) + + def save(self, filename, data): + self.client.storage.from_(self.bucket_name).upload(filename, data) + + def load_once(self, filename: str) -> bytes: + content = self.client.storage.from_(self.bucket_name).download(filename) + return content + + def load_stream(self, filename: str) -> Generator: + def generate(filename: str = filename) -> Generator: + result = self.client.storage.from_(self.bucket_name).download(filename) + byte_stream = io.BytesIO(result) + while chunk := byte_stream.read(4096): # Read in chunks of 4KB + yield chunk + + return generate() + + def download(self, filename, target_filepath): + result = self.client.storage.from_(self.bucket_name).download(filename) + Path(result).write_bytes(result) + + def exists(self, filename): + result = self.client.storage.from_(self.bucket_name).list(filename) + if result.count() > 0: + return True + return False + + def delete(self, filename): + self.client.storage.from_(self.bucket_name).remove(filename) + + def bucket_exists(self): + buckets = self.client.storage.list_buckets() + return any(bucket.name == self.bucket_name for bucket in buckets) diff --git a/api/extensions/storage/tencent_storage.py b/api/extensions/storage/tencent_cos_storage.py similarity index 94% rename from api/extensions/storage/tencent_storage.py rename to api/extensions/storage/tencent_cos_storage.py index 1d499cd3bc..c529dce7ad 100644 --- a/api/extensions/storage/tencent_storage.py +++ b/api/extensions/storage/tencent_cos_storage.py @@ -6,8 +6,8 @@ from qcloud_cos import CosConfig, CosS3Client from extensions.storage.base_storage import BaseStorage -class TencentStorage(BaseStorage): - """Implementation for tencent cos storage.""" +class TencentCosStorage(BaseStorage): + """Implementation for Tencent Cloud COS storage.""" def __init__(self, app: Flask): super().__init__(app) diff --git a/api/extensions/storage/volcengine_storage.py b/api/extensions/storage/volcengine_tos_storage.py similarity index 97% rename from api/extensions/storage/volcengine_storage.py rename to api/extensions/storage/volcengine_tos_storage.py index f74ad2ee6d..1bedcf24c2 100644 --- a/api/extensions/storage/volcengine_storage.py +++ b/api/extensions/storage/volcengine_tos_storage.py @@ -6,7 +6,7 @@ from flask import Flask from extensions.storage.base_storage import BaseStorage -class VolcengineStorage(BaseStorage): +class VolcengineTosStorage(BaseStorage): """Implementation for Volcengine TOS storage.""" def __init__(self, app: Flask): diff --git a/api/fields/dataset_fields.py b/api/fields/dataset_fields.py index 9cf8da7acd..b32423f10c 100644 --- a/api/fields/dataset_fields.py +++ b/api/fields/dataset_fields.py @@ -38,9 +38,20 @@ dataset_retrieval_model_fields = { "score_threshold_enabled": fields.Boolean, "score_threshold": fields.Float, } +external_retrieval_model_fields = { + "top_k": fields.Integer, + "score_threshold": fields.Float, +} tag_fields = {"id": fields.String, "name": fields.String, "type": fields.String} +external_knowledge_info_fields = { + "external_knowledge_id": fields.String, + "external_knowledge_api_id": fields.String, + "external_knowledge_api_name": fields.String, + "external_knowledge_api_endpoint": fields.String, +} + dataset_detail_fields = { "id": fields.String, "name": fields.String, @@ -61,6 +72,8 @@ dataset_detail_fields = { "embedding_available": fields.Boolean, "retrieval_model_dict": fields.Nested(dataset_retrieval_model_fields), "tags": fields.List(fields.Nested(tag_fields)), + "external_knowledge_info": fields.Nested(external_knowledge_info_fields), + "external_retrieval_model": fields.Nested(external_retrieval_model_fields, allow_null=True), } dataset_query_detail_fields = { diff --git a/api/fields/external_dataset_fields.py b/api/fields/external_dataset_fields.py new file mode 100644 index 0000000000..2281460fe2 --- /dev/null +++ b/api/fields/external_dataset_fields.py @@ -0,0 +1,11 @@ +from flask_restful import fields + +from libs.helper import TimestampField + +external_knowledge_api_query_detail_fields = { + "id": fields.String, + "name": fields.String, + "setting": fields.String, + "created_by": fields.String, + "created_at": TimestampField, +} diff --git a/api/libs/helper.py b/api/libs/helper.py index d664ef1ae7..d8a8e7f411 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -162,7 +162,7 @@ def generate_string(n): return result -def get_remote_ip(request) -> str: +def extract_remote_ip(request) -> str: if request.headers.get("CF-Connecting-IP"): return request.headers.get("Cf-Connecting-Ip") elif request.headers.getlist("X-Forwarded-For"): @@ -189,7 +189,7 @@ def compact_generate_response(response: Union[dict, RateLimitGenerator]) -> Resp class TokenManager: @classmethod - def generate_token(cls, account: Account, token_type: str, additional_data: dict = None) -> str: + def generate_token(cls, account: Account, token_type: str, additional_data: Optional[dict] = None) -> str: old_token = cls._get_current_token_for_account(account.id, token_type) if old_token: if isinstance(old_token, bytes): diff --git a/api/libs/json_in_md_parser.py b/api/libs/json_in_md_parser.py index 185ff3f95e..9131408817 100644 --- a/api/libs/json_in_md_parser.py +++ b/api/libs/json_in_md_parser.py @@ -4,25 +4,28 @@ from core.llm_generator.output_parser.errors import OutputParserError def parse_json_markdown(json_string: str) -> dict: - # Remove the triple backticks if present + # Get json from the backticks/braces json_string = json_string.strip() - start_index = json_string.find("```json") - end_index = json_string.find("```", start_index + len("```json")) - - if start_index != -1 and end_index != -1: - extracted_content = json_string[start_index + len("```json") : end_index].strip() - - # Parse the JSON string into a Python dictionary + starts = ["```json", "```", "``", "`", "{"] + ends = ["```", "``", "`", "}"] + end_index = -1 + for s in starts: + start_index = json_string.find(s) + if start_index != -1: + if json_string[start_index] != "{": + start_index += len(s) + break + if start_index != -1: + for e in ends: + end_index = json_string.rfind(e, start_index) + if end_index != -1: + if json_string[end_index] == "}": + end_index += 1 + break + if start_index != -1 and end_index != -1 and start_index < end_index: + extracted_content = json_string[start_index:end_index].strip() + print("content:", extracted_content, start_index, end_index) parsed = json.loads(extracted_content) - elif start_index != -1 and end_index == -1 and json_string.endswith("``"): - end_index = json_string.find("``", start_index + len("```json")) - extracted_content = json_string[start_index + len("```json") : end_index].strip() - - # Parse the JSON string into a Python dictionary - parsed = json.loads(extracted_content) - elif json_string.startswith("{"): - # Parse the JSON string into a Python dictionary - parsed = json.loads(json_string) else: raise Exception("Could not find JSON block in the output.") diff --git a/api/libs/oauth_data_source.py b/api/libs/oauth_data_source.py index 05a73b09b7..e747ea97ad 100644 --- a/api/libs/oauth_data_source.py +++ b/api/libs/oauth_data_source.py @@ -1,3 +1,4 @@ +import datetime import urllib.parse import requests @@ -69,6 +70,7 @@ class NotionOAuth(OAuthDataSource): if data_source_binding: data_source_binding.source_info = source_info data_source_binding.disabled = False + data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None) db.session.commit() else: new_data_source_binding = DataSourceOauthBinding( @@ -104,6 +106,7 @@ class NotionOAuth(OAuthDataSource): if data_source_binding: data_source_binding.source_info = source_info data_source_binding.disabled = False + data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None) db.session.commit() else: new_data_source_binding = DataSourceOauthBinding( @@ -138,6 +141,7 @@ class NotionOAuth(OAuthDataSource): } data_source_binding.source_info = new_source_info data_source_binding.disabled = False + data_source_binding.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None) db.session.commit() else: raise ValueError("Data source binding not found") diff --git a/api/migrations/versions/2024_09_24_0922-6af6a521a53e_update_retrieval_resource.py b/api/migrations/versions/2024_09_24_0922-6af6a521a53e_update_retrieval_resource.py new file mode 100644 index 0000000000..5337b340db --- /dev/null +++ b/api/migrations/versions/2024_09_24_0922-6af6a521a53e_update_retrieval_resource.py @@ -0,0 +1,48 @@ +"""update-retrieval-resource + +Revision ID: 6af6a521a53e +Revises: ec3df697ebbb +Create Date: 2024-09-24 09:22:43.570120 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '6af6a521a53e' +down_revision = 'd57ba9ebb251' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: + batch_op.alter_column('document_id', + existing_type=sa.UUID(), + nullable=True) + batch_op.alter_column('data_source_type', + existing_type=sa.TEXT(), + nullable=True) + batch_op.alter_column('segment_id', + existing_type=sa.UUID(), + nullable=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('dataset_retriever_resources', schema=None) as batch_op: + batch_op.alter_column('segment_id', + existing_type=sa.UUID(), + nullable=False) + batch_op.alter_column('data_source_type', + existing_type=sa.TEXT(), + nullable=False) + batch_op.alter_column('document_id', + existing_type=sa.UUID(), + nullable=False) + + # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_09_25_0434-33f5fac87f29_external_knowledge_api.py b/api/migrations/versions/2024_09_25_0434-33f5fac87f29_external_knowledge_api.py new file mode 100644 index 0000000000..3cb76e72c1 --- /dev/null +++ b/api/migrations/versions/2024_09_25_0434-33f5fac87f29_external_knowledge_api.py @@ -0,0 +1,73 @@ +"""external_knowledge_api + +Revision ID: 33f5fac87f29 +Revises: 6af6a521a53e +Create Date: 2024-09-25 04:34:57.249436 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '33f5fac87f29' +down_revision = '6af6a521a53e' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('external_knowledge_apis', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.String(length=255), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('settings', sa.Text(), nullable=True), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='external_knowledge_apis_pkey') + ) + with op.batch_alter_table('external_knowledge_apis', schema=None) as batch_op: + batch_op.create_index('external_knowledge_apis_name_idx', ['name'], unique=False) + batch_op.create_index('external_knowledge_apis_tenant_idx', ['tenant_id'], unique=False) + + op.create_table('external_knowledge_bindings', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('external_knowledge_api_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('external_knowledge_id', sa.Text(), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.PrimaryKeyConstraint('id', name='external_knowledge_bindings_pkey') + ) + with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op: + batch_op.create_index('external_knowledge_bindings_dataset_idx', ['dataset_id'], unique=False) + batch_op.create_index('external_knowledge_bindings_external_knowledge_api_idx', ['external_knowledge_api_id'], unique=False) + batch_op.create_index('external_knowledge_bindings_external_knowledge_idx', ['external_knowledge_id'], unique=False) + batch_op.create_index('external_knowledge_bindings_tenant_idx', ['tenant_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('external_knowledge_bindings', schema=None) as batch_op: + batch_op.drop_index('external_knowledge_bindings_tenant_idx') + batch_op.drop_index('external_knowledge_bindings_external_knowledge_idx') + batch_op.drop_index('external_knowledge_bindings_external_knowledge_api_idx') + batch_op.drop_index('external_knowledge_bindings_dataset_idx') + + op.drop_table('external_knowledge_bindings') + with op.batch_alter_table('external_knowledge_apis', schema=None) as batch_op: + batch_op.drop_index('external_knowledge_apis_tenant_idx') + batch_op.drop_index('external_knowledge_apis_name_idx') + + op.drop_table('external_knowledge_apis') + # ### end Alembic commands ### diff --git a/api/migrations/versions/2024_10_09_1329-d8e744d88ed6_fix_wrong_service_api_history.py b/api/migrations/versions/2024_10_09_1329-d8e744d88ed6_fix_wrong_service_api_history.py new file mode 100644 index 0000000000..b3b8dfa7d4 --- /dev/null +++ b/api/migrations/versions/2024_10_09_1329-d8e744d88ed6_fix_wrong_service_api_history.py @@ -0,0 +1,48 @@ +"""fix wrong service-api history + +Revision ID: d8e744d88ed6 +Revises: 33f5fac87f29 +Create Date: 2024-10-09 13:29:23.548498 + +""" +from alembic import op +from constants import UUID_NIL +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'd8e744d88ed6' +down_revision = '33f5fac87f29' +branch_labels = None +depends_on = None + +# (UTC) release date of v0.9.0 +v0_9_0_release_date= '2024-09-29 12:00:00' + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + sql = f"""UPDATE + public.messages +SET + parent_message_id = '{UUID_NIL}' +WHERE + invoke_from = 'service-api' + AND parent_message_id IS NULL + AND created_at >= '{v0_9_0_release_date}';""" + op.execute(sql) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + sql = f"""UPDATE + public.messages +SET + parent_message_id = NULL +WHERE + invoke_from = 'service-api' + AND parent_message_id = '{UUID_NIL}' + AND created_at >= '{v0_9_0_release_date}';""" + op.execute(sql) + # ### end Alembic commands ### diff --git a/api/migrations/versions/fca025d3b60f_add_dataset_retrival_model.py b/api/migrations/versions/fca025d3b60f_add_dataset_retrival_model.py index 1f8250c3eb..52495be60a 100644 --- a/api/migrations/versions/fca025d3b60f_add_dataset_retrival_model.py +++ b/api/migrations/versions/fca025d3b60f_add_dataset_retrival_model.py @@ -1,4 +1,4 @@ -"""add-dataset-retrival-model +"""add-dataset-retrieval-model Revision ID: fca025d3b60f Revises: b3a09c049e8e diff --git a/api/models/dataset.py b/api/models/dataset.py index a2d2a3454d..4224ee5e9c 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -38,6 +38,7 @@ class Dataset(db.Model): ) INDEXING_TECHNIQUE_LIST = ["high_quality", "economy", None] + PROVIDER_LIST = ["vendor", "external", None] id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) tenant_id = db.Column(StringUUID, nullable=False) @@ -71,6 +72,14 @@ class Dataset(db.Model): def index_struct_dict(self): return json.loads(self.index_struct) if self.index_struct else None + @property + def external_retrieval_model(self): + default_retrieval_model = { + "top_k": 2, + "score_threshold": 0.0, + } + return self.retrieval_model or default_retrieval_model + @property def created_by_account(self): return db.session.get(Account, self.created_by) @@ -162,6 +171,29 @@ class Dataset(db.Model): return tags or [] + @property + def external_knowledge_info(self): + if self.provider != "external": + return None + external_knowledge_binding = ( + db.session.query(ExternalKnowledgeBindings).filter(ExternalKnowledgeBindings.dataset_id == self.id).first() + ) + if not external_knowledge_binding: + return None + external_knowledge_api = ( + db.session.query(ExternalKnowledgeApis) + .filter(ExternalKnowledgeApis.id == external_knowledge_binding.external_knowledge_api_id) + .first() + ) + if not external_knowledge_api: + return None + return { + "external_knowledge_id": external_knowledge_binding.external_knowledge_id, + "external_knowledge_api_id": external_knowledge_api.id, + "external_knowledge_api_name": external_knowledge_api.name, + "external_knowledge_api_endpoint": json.loads(external_knowledge_api.settings).get("endpoint", ""), + } + @staticmethod def gen_collection_name_by_id(dataset_id: str) -> str: normalized_dataset_id = dataset_id.replace("-", "_") @@ -687,3 +719,77 @@ class DatasetPermission(db.Model): tenant_id = db.Column(StringUUID, nullable=False) has_permission = db.Column(db.Boolean, nullable=False, server_default=db.text("true")) created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + + +class ExternalKnowledgeApis(db.Model): + __tablename__ = "external_knowledge_apis" + __table_args__ = ( + db.PrimaryKeyConstraint("id", name="external_knowledge_apis_pkey"), + db.Index("external_knowledge_apis_tenant_idx", "tenant_id"), + db.Index("external_knowledge_apis_name_idx", "name"), + ) + + id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) + name = db.Column(db.String(255), nullable=False) + description = db.Column(db.String(255), nullable=False) + tenant_id = db.Column(StringUUID, nullable=False) + settings = db.Column(db.Text, nullable=True) + created_by = db.Column(StringUUID, nullable=False) + created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + updated_by = db.Column(StringUUID, nullable=True) + updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + + def to_dict(self): + return { + "id": self.id, + "tenant_id": self.tenant_id, + "name": self.name, + "description": self.description, + "settings": self.settings_dict, + "dataset_bindings": self.dataset_bindings, + "created_by": self.created_by, + "created_at": self.created_at.isoformat(), + } + + @property + def settings_dict(self): + try: + return json.loads(self.settings) if self.settings else None + except JSONDecodeError: + return None + + @property + def dataset_bindings(self): + external_knowledge_bindings = ( + db.session.query(ExternalKnowledgeBindings) + .filter(ExternalKnowledgeBindings.external_knowledge_api_id == self.id) + .all() + ) + dataset_ids = [binding.dataset_id for binding in external_knowledge_bindings] + datasets = db.session.query(Dataset).filter(Dataset.id.in_(dataset_ids)).all() + dataset_bindings = [] + for dataset in datasets: + dataset_bindings.append({"id": dataset.id, "name": dataset.name}) + + return dataset_bindings + + +class ExternalKnowledgeBindings(db.Model): + __tablename__ = "external_knowledge_bindings" + __table_args__ = ( + db.PrimaryKeyConstraint("id", name="external_knowledge_bindings_pkey"), + db.Index("external_knowledge_bindings_tenant_idx", "tenant_id"), + db.Index("external_knowledge_bindings_dataset_idx", "dataset_id"), + db.Index("external_knowledge_bindings_external_knowledge_idx", "external_knowledge_id"), + db.Index("external_knowledge_bindings_external_knowledge_api_idx", "external_knowledge_api_id"), + ) + + id = db.Column(StringUUID, nullable=False, server_default=db.text("uuid_generate_v4()")) + tenant_id = db.Column(StringUUID, nullable=False) + external_knowledge_api_id = db.Column(StringUUID, nullable=False) + dataset_id = db.Column(StringUUID, nullable=False) + external_knowledge_id = db.Column(db.Text, nullable=False) + created_by = db.Column(StringUUID, nullable=False) + created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + updated_by = db.Column(StringUUID, nullable=True) + updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) diff --git a/api/models/model.py b/api/models/model.py index 53940a5a16..0ac9334321 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -1423,10 +1423,10 @@ class DatasetRetrieverResource(db.Model): position = db.Column(db.Integer, nullable=False) dataset_id = db.Column(StringUUID, nullable=False) dataset_name = db.Column(db.Text, nullable=False) - document_id = db.Column(StringUUID, nullable=False) + document_id = db.Column(StringUUID, nullable=True) document_name = db.Column(db.Text, nullable=False) - data_source_type = db.Column(db.Text, nullable=False) - segment_id = db.Column(StringUUID, nullable=False) + data_source_type = db.Column(db.Text, nullable=True) + segment_id = db.Column(StringUUID, nullable=True) score = db.Column(db.Float, nullable=True) content = db.Column(db.Text, nullable=False) hit_count = db.Column(db.Integer, nullable=True) diff --git a/api/poetry.lock b/api/poetry.lock index bce21fb547..efefedfb21 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "aiohappyeyeballs" -version = "2.4.0" +version = "2.4.3" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"}, - {file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"}, + {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, + {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, ] [[package]] @@ -153,13 +153,13 @@ frozenlist = ">=1.1.0" [[package]] name = "alembic" -version = "1.13.2" +version = "1.13.3" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.8" files = [ - {file = "alembic-1.13.2-py3-none-any.whl", hash = "sha256:6b8733129a6224a9a711e17c99b08462dbf7cc9670ba8f2e2ae9af860ceb1953"}, - {file = "alembic-1.13.2.tar.gz", hash = "sha256:1ff0ae32975f4fd96028c39ed9bb3c867fe3af956bd7bb37343b54c9fe7445ef"}, + {file = "alembic-1.13.3-py3-none-any.whl", hash = "sha256:908e905976d15235fae59c9ac42c4c5b75cfcefe3d27c0fbf7ae15a37715d80e"}, + {file = "alembic-1.13.3.tar.gz", hash = "sha256:203503117415561e203aa14541740643a611f641517f0209fcae63e9fa09f1a2"}, ] [package.dependencies] @@ -293,13 +293,13 @@ alibabacloud-tea = "*" [[package]] name = "alibabacloud-tea" -version = "0.3.9" +version = "0.4.0" description = "The tea module of alibabaCloud Python SDK." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "alibabacloud-tea-0.3.9.tar.gz", hash = "sha256:a9689770003fa9313d1995812f9fe36a2be315e5cdfc8d58de0d96808219ced9"}, - {file = "alibabacloud_tea-0.3.9-py3-none-any.whl", hash = "sha256:402fd2a92e6729f228d8c0300b182f80019edce19d83afa497aeb15fd7947f9a"}, + {file = "alibabacloud-tea-0.4.0.tar.gz", hash = "sha256:bdf72d747723bab190331b3c8593109fe2807504469bc0147f78c8c4945ed396"}, + {file = "alibabacloud_tea-0.4.0-py3-none-any.whl", hash = "sha256:59fae5765e6654f884e130233df6fb61ca0fbe01a29ed0755a1cf099a3d4d863"}, ] [package.dependencies] @@ -321,17 +321,17 @@ alibabacloud-tea = ">=0.0.1" [[package]] name = "alibabacloud-tea-openapi" -version = "0.3.11" +version = "0.3.12" description = "Alibaba Cloud openapi SDK Library for Python" optional = false python-versions = ">=3.6" files = [ - {file = "alibabacloud_tea_openapi-0.3.11.tar.gz", hash = "sha256:3f5cace1b1aeb8a64587574097403cfd066b86ee4c3c9abde587f9abfcad38de"}, + {file = "alibabacloud_tea_openapi-0.3.12.tar.gz", hash = "sha256:2e14809f357438e62c1ef4976a7655110dd54a75bbfa7d905fa3798355cfd974"}, ] [package.dependencies] -alibabacloud_credentials = ">=0.3.1,<1.0.0" -alibabacloud_gateway_spi = ">=0.0.1,<1.0.0" +alibabacloud_credentials = ">=0.3.5,<1.0.0" +alibabacloud_gateway_spi = ">=0.0.2,<1.0.0" alibabacloud_openapi_util = ">=0.2.1,<1.0.0" alibabacloud_tea_util = ">=0.3.13,<1.0.0" alibabacloud_tea_xml = ">=0.0.2,<1.0.0" @@ -364,12 +364,12 @@ alibabacloud-tea = ">=0.0.1" [[package]] name = "aliyun-python-sdk-core" -version = "2.15.2" +version = "2.16.0" description = "The core module of Aliyun Python SDK." optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "aliyun-python-sdk-core-2.15.2.tar.gz", hash = "sha256:54f66a53e193c61c5e16ea4505a0cab43543f8ad2ef22833f69c4d5e5151c17d"}, + {file = "aliyun-python-sdk-core-2.16.0.tar.gz", hash = "sha256:651caad597eb39d4fad6cf85133dffe92837d53bdf62db9d8f37dab6508bb8f9"}, ] [package.dependencies] @@ -455,13 +455,13 @@ vertex = ["google-auth (>=2,<3)"] [[package]] name = "anyio" -version = "4.4.0" +version = "4.6.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, + {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, ] [package.dependencies] @@ -471,9 +471,9 @@ sniffio = ">=1.1" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "arxiv" @@ -698,23 +698,23 @@ msrest = ">=0.6.21" [[package]] name = "azure-storage-file-share" -version = "12.17.0" +version = "12.19.0" description = "Microsoft Azure Azure File Share Storage Client Library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "azure-storage-file-share-12.17.0.tar.gz", hash = "sha256:f7b2c6cfc1b7cb80097a53b1ed2efa9e545b49a291430d369cdb49fafbc841d6"}, - {file = "azure_storage_file_share-12.17.0-py3-none-any.whl", hash = "sha256:c4652759a9d529bf08881bb53275bf38774bb643746b849d27c47118f9cf923d"}, + {file = "azure_storage_file_share-12.19.0-py3-none-any.whl", hash = "sha256:eac6cf1a454aba58af4e6ba450b36d16aa1d0c49679fb64ea8756bb896698c5b"}, + {file = "azure_storage_file_share-12.19.0.tar.gz", hash = "sha256:ea7a4174dc6c52f50ac8c30f228159fcc3675d1f8ba771b8d0efcbc310740278"}, ] [package.dependencies] -azure-core = ">=1.28.0" +azure-core = ">=1.30.0" cryptography = ">=2.1.4" isodate = ">=0.6.1" typing-extensions = ">=4.6.0" [package.extras] -aio = ["azure-core[aio] (>=1.28.0)"] +aio = ["azure-core[aio] (>=1.30.0)"] [[package]] name = "backoff" @@ -727,6 +727,22 @@ files = [ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, ] +[[package]] +name = "bce-python-sdk" +version = "0.9.23" +description = "BCE SDK for python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,<4,>=2.7" +files = [ + {file = "bce_python_sdk-0.9.23-py3-none-any.whl", hash = "sha256:8debe21a040e00060f6044877d594765ed7b18bc765c6bf16b878bca864140a3"}, + {file = "bce_python_sdk-0.9.23.tar.gz", hash = "sha256:19739fed5cd0725356fc5ffa2acbdd8fb23f2a81edb91db21a03174551d0cf41"}, +] + +[package.dependencies] +future = ">=0.6.0" +pycryptodome = ">=3.8.0" +six = ">=1.4.0" + [[package]] name = "bcrypt" version = "4.2.0" @@ -787,13 +803,13 @@ lxml = ["lxml"] [[package]] name = "billiard" -version = "4.2.0" +version = "4.2.1" description = "Python multiprocessing fork with improvements and bugfixes" optional = false python-versions = ">=3.7" files = [ - {file = "billiard-4.2.0-py3-none-any.whl", hash = "sha256:07aa978b308f334ff8282bd4a746e681b3513db5c9a514cbdd810cbbdc19714d"}, - {file = "billiard-4.2.0.tar.gz", hash = "sha256:9a3c3184cb275aa17a732f93f65b20c525d3d9f253722d26a82194803ade5a2c"}, + {file = "billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb"}, + {file = "billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f"}, ] [[package]] @@ -828,13 +844,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.19" +version = "1.35.38" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.19-py3-none-any.whl", hash = "sha256:c83f7f0cacfe7c19b109b363ebfa8736e570d24922f16ed371681f58ebab44a9"}, - {file = "botocore-1.35.19.tar.gz", hash = "sha256:42d6d8db7250cbd7899f786f9861e02cab17dc238f64d6acb976098ed9809625"}, + {file = "botocore-1.35.38-py3-none-any.whl", hash = "sha256:2eb17d32fa2d3bb5d475132a83564d28e3acc2161534f24b75a54418a1d51359"}, + {file = "botocore-1.35.38.tar.gz", hash = "sha256:55d9305c44e5ba29476df456120fa4fb919f03f066afa82f2ae400485e7465f4"}, ] [package.dependencies] @@ -843,7 +859,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.21.5)"] +crt = ["awscrt (==0.22.0)"] [[package]] name = "bottleneck" @@ -1049,13 +1065,13 @@ beautifulsoup4 = "*" [[package]] name = "build" -version = "1.2.2" +version = "1.2.2.post1" description = "A simple, correct Python build frontend" optional = false python-versions = ">=3.8" files = [ - {file = "build-1.2.2-py3-none-any.whl", hash = "sha256:277ccc71619d98afdd841a0e96ac9fe1593b823af481d3b0cea748e8894e0613"}, - {file = "build-1.2.2.tar.gz", hash = "sha256:119b2fb462adef986483438377a13b2f42064a2a3a4161f24a0cca698a07ac8c"}, + {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, + {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, ] [package.dependencies] @@ -1241,101 +1257,116 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -1597,128 +1628,6 @@ pandas = ["pandas"] sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"] tzlocal = ["tzlocal (>=4.0)"] -[[package]] -name = "clickhouse-driver" -version = "0.2.9" -description = "Python driver with native interface for ClickHouse" -optional = false -python-versions = "<4,>=3.7" -files = [ - {file = "clickhouse-driver-0.2.9.tar.gz", hash = "sha256:050ea4870ead993910b39e7fae965dc1c347b2e8191dcd977cd4b385f9e19f87"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ce04e9d0d0f39561f312d1ac1a8147bc9206e4267e1a23e20e0423ebac95534"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7ae5c8931bf290b9d85582e7955b9aad7f19ff9954e48caa4f9a180ea4d01078"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e51792f3bd12c32cb15a907f12de3c9d264843f0bb33dce400e3966c9f09a3f"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42fc546c31e4a04c97b749769335a679c9044dc693fa7a93e38c97fd6727173d"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a383a403d185185c64e49edd6a19b2ec973c5adcb8ebff7ed2fc539a2cc65a5"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f05321a97e816afc75b3e4f9eda989848fecf14ecf1a91d0f22c04258123d1f7"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be47e793846aac28442b6b1c6554e0731b848a5a7759a54aa2489997354efe4a"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:780e42a215d1ae2f6d695d74dd6f087781fb2fa51c508b58f79e68c24c5364e0"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9e28f1fe850675e173db586e9f1ac790e8f7edd507a4227cd54cd7445f8e75b6"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:125aae7f1308d3083dadbb3c78f828ae492e060f13e4007a0cf53a8169ed7b39"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2f3c4fbb61e75c62a1ab93a1070d362de4cb5682f82833b2c12deccb3bae888d"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dc03196a84e32d23b88b665be69afae98f57426f5fdf203e16715b756757961"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-win32.whl", hash = "sha256:25695d78a1d7ad6e221e800612eac08559f6182bf6dee0a220d08de7b612d993"}, - {file = "clickhouse_driver-0.2.9-cp310-cp310-win_amd64.whl", hash = "sha256:367acac95398d721a0a2a6cf87e93638c5588b79498a9848676ce7f182540a6c"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a7353a7a08eee3aa0001d8a5d771cb1f37e2acae1b48178002431f23892121a"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6af1c6cbc3481205503ab72a34aa76d6519249c904aa3f7a84b31e7b435555be"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48033803abd1100bfff6b9a1769d831b672cd3cda5147e0323b956fd1416d38d"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f202a58a540c85e47c31dabc8f84b6fe79dca5315c866450a538d58d6fa0571"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4df50fd84bfa4aa1eb7b52d48136066bfb64fabb7ceb62d4c318b45a296200b"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:433a650571a0d7766eb6f402e8f5930222997686c2ee01ded22f1d8fd46af9d4"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:232ee260475611cbf7adb554b81db6b5790b36e634fe2164f4ffcd2ca3e63a71"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:09049f7e71f15c9c9a03f597f77fc1f7b61ababd155c06c0d9e64d1453d945d7"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:424153d1d5f5a807f596a48cc88119f9fb3213ca7e38f57b8d15dcc964dd91f7"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4f078fd1cf19c4ca63b8d1e0803df665310c8d5b644c5b02bf2465e8d6ef8f55"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f138d939e26e767537f891170b69a55a88038919f5c10d8865b67b8777fe4848"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9aafabc7e32942f85dcb46f007f447ab69024831575df97cae28c6ed127654d1"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-win32.whl", hash = "sha256:935e16ebf1a1998d8493979d858821a755503c9b8af572d9c450173d4b88868c"}, - {file = "clickhouse_driver-0.2.9-cp311-cp311-win_amd64.whl", hash = "sha256:306b3102cba278b5dfec6f5f7dc8b78416c403901510475c74913345b56c9e42"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fcb2fd00e58650ae206a6d5dbc83117240e622471aa5124733fbf2805eb8bda0"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7a3e6b0a1eb218e3d870a94c76daaf65da46dca8f6888ea6542f94905c24d88"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8d8e2888a857d8db3d98765a5ad23ab561241feaef68bbffc5a0bd9c142342"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85d50c011467f5ff6772c4059345968b854b72e07a0219030b7c3f68419eb7f7"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93b395c1370629ccce8fb3e14cd5be2646d227bd32018c21f753c543e9a7e96b"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dbcee870c60d9835e5dce1456ab6b9d807e6669246357f4b321ef747b90fa43"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fffa5a5f317b1ec92e406a30a008929054cf3164d2324a3c465d0a0330273bf8"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:476702740a279744badbd177ae1c4a2d089ec128bd676861219d1f92078e4530"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5cd6d95fab5ff80e9dc9baedc9a926f62f74072d42d5804388d63b63bec0bb63"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:05027d32d7cf3e46cb8d04f8c984745ae01bd1bc7b3579f9dadf9b3cca735697"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:3d11831842250b4c1b26503a6e9c511fc03db096608b7c6af743818c421a3032"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:81b4b671b785ebb0b8aeabf2432e47072413d81db959eb8cfd8b6ab58c5799c6"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-win32.whl", hash = "sha256:e893bd4e014877174a59e032b0e99809c95ec61328a0e6bd9352c74a2f6111a8"}, - {file = "clickhouse_driver-0.2.9-cp312-cp312-win_amd64.whl", hash = "sha256:de6624e28eeffd01668803d28ae89e3d4e359b1bff8b60e4933e1cb3c6f86f18"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:909205324089a9ee59bee7ecbfa94595435118cca310fd62efdf13f225aa2965"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03f31d6e47dc2b0f367f598f5629147ed056d7216c1788e25190fcfbfa02e749"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed84179914b2b7bb434c2322a6e7fd83daa681c97a050450511b66d917a129bb"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67d1bf63efb4ba14ae6c6da99622e4a549e68fc3ee14d859bf611d8e6a61b3fa"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eed23ea41dd582d76f7a2ec7e09cbe5e9fec008f11a4799fa35ce44a3ebd283"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a654291132766efa2703058317749d7c69b69f02d89bac75703eaf7f775e20da"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1c26c5ef16d0ef3cabc5bc03e827e01b0a4afb5b4eaf8850b7cf740cee04a1d4"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b57e83d7986d3cbda6096974a9510eb53cb33ad9072288c87c820ba5eee3370e"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:153cc03b36f22cbde55aa6a5bbe99072a025567a54c48b262eb0da15d8cd7c83"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:83a857d99192936091f495826ae97497cd1873af213b1e069d56369fb182ab8e"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb05a9bb22cbe9ad187ad268f86adf7e60df6083331fe59c01571b7b725212dd"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-win32.whl", hash = "sha256:3e282c5c25e32d96ed151e5460d2bf4ecb805ea64449197dd918e84e768016df"}, - {file = "clickhouse_driver-0.2.9-cp37-cp37m-win_amd64.whl", hash = "sha256:c46dccfb04a9afd61a1b0e60bfefceff917f76da2c863f9b36b39248496d5c77"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:612ca9028c718f362c97f552e63d313cf1a70a616ef8532ddb0effdaf12ebef9"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471b884d318e012f68d858476052742048918854f7dfe87d78e819f87a848ffb"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58ee63c35e99da887eb035c8d6d9e64fd298a0efc1460395297dd5cc281a6912"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0819bb63d2c5025a1fb9589f57ef82602687cef11081d6dfa6f2ce44606a1772"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6680ee18870bca1fbab1736c8203a965efaec119ab4c37821ad99add248ee08"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:713c498741b54debd3a10a5529e70b6ed85ca33c3e8629e24ae5cd8160b5a5f2"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:730837b8f63941065c9c955c44286aef0987fb084ffb3f55bf1e4fe07df62269"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9f4e38b2ea09214c8e7848a19391009a18c56a3640e1ba1a606b9e57aeb63404"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:457f1d6639e0345b717ae603c79bd087a35361ce68c1c308d154b80b841e5e7d"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:49a55aeb8ea625a87965a96e361bbb1ad67d0931bfb2a575f899c1064e70c2da"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9230058d8c9b1a04079afae4650fb67745f0f1c39db335728f64d48bd2c19246"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8798258bd556542dd9c6b8ebe62f9c5110c9dcdf97c57fb077e7b8b6d6da0826"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-win32.whl", hash = "sha256:ce8e3f4be46bcc63555863f70ab0035202b082b37e6f16876ef50e7bc4b47056"}, - {file = "clickhouse_driver-0.2.9-cp38-cp38-win_amd64.whl", hash = "sha256:2d982959ff628255808d895a67493f2dab0c3a9bfc65eeda0f00c8ae9962a1b3"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a46b227fab4420566ed24ee70d90076226d16fcf09c6ad4d428717efcf536446"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7eaa2ce5ea08cf5fddebb8c274c450e102f329f9e6966b6cd85aa671c48e5552"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f97f0083194d6e23b5ef6156ed0d5388c37847b298118199d7937ba26412a9e2"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6cab5cdbb0f8ee51d879d977b78f07068b585225ac656f3c081896c362e8f83"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cdb1b011a53ee71539e9dc655f268b111bac484db300da92829ed59e910a8fd0"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bf51bb761b281d20910b4b689c699ef98027845467daa5bb5dfdb53bd6ee404"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8ea462e3cebb121ff55002e9c8a9a0a3fd9b5bbbf688b4960f0a83c0172fb31"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:70bee21c245226ad0d637bf470472e2d487b86911b6d673a862127b934336ff4"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:253a3c223b944d691bf0abbd599f592ea3b36f0a71d2526833b1718f37eca5c2"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a6549b53fc5c403dc556cb39b2ae94d73f9b113daa00438a660bb1dd5380ae4d"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1c685cd4abe61af1c26279ff04b9f567eb4d6c1ec7fb265af7481b1f153043aa"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7e25144219577491929d032a6c3ddd63c6cd7fa764af829a5637f798190d9b26"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-win32.whl", hash = "sha256:0b9925610d25405a8e6d83ff4f54fc2456a121adb0155999972f5edd6ba3efc8"}, - {file = "clickhouse_driver-0.2.9-cp39-cp39-win_amd64.whl", hash = "sha256:b243de483cfa02716053b0148d73558f4694f3c27b97fc1eaa97d7079563a14d"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:45a3d5b1d06750fd6a18c29b871494a2635670099ec7693e756a5885a4a70dbf"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8415ffebd6ca9eef3024763abc450f8659f1716d015bd563c537d01c7fbc3569"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace48db993aa4bd31c42de0fa8d38c94ad47405916d6b61f7a7168a48fb52ac1"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b07123334fe143bfe6fa4e3d4b732d647d5fd2cfb9ec7f2f76104b46fe9d20c6"}, - {file = "clickhouse_driver-0.2.9-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2af3efa73d296420ce6362789f5b1febf75d4aa159a479393f01549115509d5"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:baf57eede88d07a1eb04352d26fc58a4d97991ca3d8840f7c5d48691dec9f251"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:275d0ccdab9c3571bdb3e9acfab4497930aa584ff2766b035bb2f854deaf8b82"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:293da77bfcac3168fb35b27c242f97c1a05502435c0686ecbb8e2e4abcb3de26"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d6c2e5830705e4eeef33070ca4d5a24dfa221f28f2f540e5e6842c26e70b10b"}, - {file = "clickhouse_driver-0.2.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:11934bd78d97dd7e1a23a6222b5edd1e1b4d34e1ead5c846dc2b5c56fdc35ff5"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b802b6f0fbdcc3ab81b87f09b694dde91ab049f44d1d2c08c3dc8ea9a5950cfa"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7af871c5315eb829ecf4533c790461ea8f73b3bfd5f533b0467e479fdf6ddcfd"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d577dd4867b9e26cf60590e1f500990c8701a6e3cfbb9e644f4d0c0fb607028"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ed3dea2d1eca85fef5b8564ddd76dedb15a610c77d55d555b49d9f7c896b64b"}, - {file = "clickhouse_driver-0.2.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:91ec96f2c48e5bdeac9eea43a9bc9cc19acb2d2c59df0a13d5520dfc32457605"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7667ab423452754f36ba8fb41e006a46baace9c94e2aca2a745689b9f2753dfb"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:653583b1f3b088d106f180d6f02c90917ecd669ec956b62903a05df4a7f44863"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef3dd0cbdf2f0171caab90389af0ede068ec802bf46c6a77f14e6edc86671bc"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11b1833ee8ff8d5df39a34a895e060b57bd81e05ea68822bc60476daff4ce1c8"}, - {file = "clickhouse_driver-0.2.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8a3195639e6393b9d4aafe736036881ff86b6be5855d4bf7d9f5c31637181ec3"}, -] - -[package.dependencies] -pytz = "*" -tzlocal = "*" - -[package.extras] -lz4 = ["clickhouse-cityhash (>=1.0.2.1)", "lz4", "lz4 (<=3.0.1)"] -numpy = ["numpy (>=1.12.0)", "pandas (>=0.24.0)"] -zstd = ["clickhouse-cityhash (>=1.0.2.1)", "zstd"] - [[package]] name = "cloudpickle" version = "2.2.1" @@ -1979,43 +1888,38 @@ files = [ [[package]] name = "cryptography" -version = "42.0.8" +version = "43.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, - {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, - {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, - {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, - {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, - {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, - {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, - {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, - {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, - {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, - {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, - {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, - {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, - {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, ] [package.dependencies] @@ -2028,7 +1932,7 @@ nox = ["nox"] pep8test = ["check-sdist", "click", "mypy", "ruff"] sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] [[package]] @@ -2077,13 +1981,13 @@ tokenizer = ["tiktoken"] [[package]] name = "dataclass-wizard" -version = "0.22.3" +version = "0.23.0" description = "Marshal dataclasses to/from JSON. Use field properties with initial values. Construct a dataclass schema with JSON input." optional = false python-versions = "*" files = [ - {file = "dataclass-wizard-0.22.3.tar.gz", hash = "sha256:4c46591782265058f1148cfd1f54a3a91221e63986fdd04c9d59f4ced61f4424"}, - {file = "dataclass_wizard-0.22.3-py2.py3-none-any.whl", hash = "sha256:63751203e54b9b9349212cc185331da73c1adc99c51312575eb73bb5c00c1962"}, + {file = "dataclass-wizard-0.23.0.tar.gz", hash = "sha256:da29ec19846d46a1eef0692ba7c59c8a86ecd3a9eaddc0511cfc7485ad6d9c50"}, + {file = "dataclass_wizard-0.23.0-py2.py3-none-any.whl", hash = "sha256:50207dec6d36494421366b49b7a9ba6a4d831e2650c0af25cb4c057103d4a97c"}, ] [package.extras] @@ -2134,6 +2038,17 @@ packaging = ">=17.0" pandas = ">=0.24.2" pyarrow = ">=3.0.0" +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + [[package]] name = "defusedxml" version = "0.7.1" @@ -2162,15 +2077,29 @@ wrapt = ">=1.10,<2" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +optional = false +python-versions = "*" +files = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] + +[package.dependencies] +packaging = "*" + [[package]] name = "dill" -version = "0.3.8" +version = "0.3.9" description = "serialize all of Python" optional = false python-versions = ">=3.8" files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, ] [package.extras] @@ -2241,87 +2170,104 @@ typing_extensions = ">=4.0,<5.0" [[package]] name = "duckdb" -version = "1.1.0" +version = "1.1.1" description = "DuckDB in-process database" optional = false python-versions = ">=3.7.0" files = [ - {file = "duckdb-1.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5e4cbc408e6e41146dea89b9044dae7356e353db0c96b183e5583ee02bc6ae5d"}, - {file = "duckdb-1.1.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:6370ae27ec8167ccfbefb94f58ad9fdc7bac142399960549d6d367f233189868"}, - {file = "duckdb-1.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:4e1c3414f7fd01f4810dc8b335deffc91933a159282d65fef11c1286bc0ded04"}, - {file = "duckdb-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6bc2a58689adf5520303c5f68b065b9f980bd31f1366c541b8c7490abaf55cd"}, - {file = "duckdb-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d02be208d2885ca085d4c852b911493b8cdac9d6eae893259da32bd72a437c25"}, - {file = "duckdb-1.1.0-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:655df442ceebfc6f3fd6c8766e04b60d44dddedfa90275d794f9fab2d3180879"}, - {file = "duckdb-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6e183729bb64be7798ccbfda6283ebf423c869268c25af2b56929e48f763be2f"}, - {file = "duckdb-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:61fb838da51e07ceb0222c4406b059b90e10efcc453c19a3650b73c0112138c4"}, - {file = "duckdb-1.1.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:7807e2f0d3344668e433f0dc1f54bfaddd410589611393e9a7ed56f8dec9514f"}, - {file = "duckdb-1.1.0-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:3da30b7b466f710d52caa1fdc3ef0bf4176ad7f115953cd9f8b0fbf0f723778f"}, - {file = "duckdb-1.1.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:b9b6a77ef0183f561b1fc2945fcc762a71570ffd33fea4e3a855d413ed596fe4"}, - {file = "duckdb-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16243e66a9fd0e64ee265f2634d137adc6593f54ddf3ef55cb8a29e1decf6e54"}, - {file = "duckdb-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42b910a149e00f40a1766dc74fa309d4255b912a5d2fdcc387287658048650f6"}, - {file = "duckdb-1.1.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:47849d546dc4238c0f20e95fe53b621aa5b08684e68fff91fd84a7092be91a17"}, - {file = "duckdb-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11ec967b67159361ceade34095796a8d19368ea5c30cad988f44896b082b0816"}, - {file = "duckdb-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:510b5885ed6c267b9c0e1e7c6138fdffc2dd6f934a5a95b76da85da127213338"}, - {file = "duckdb-1.1.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:657bc7ac64d5faf069a782ae73afac51ef30ae2e5d0e09ce6a09d03db84ab35e"}, - {file = "duckdb-1.1.0-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:89f3de8cba57d19b41cd3c47dd06d979bd2a2ffead115480e37afbe72b02896d"}, - {file = "duckdb-1.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:f6486323ab20656d22ffa8f3c6e109dde30d0b327b7c831f22ebcfe747f97fb0"}, - {file = "duckdb-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78a4510f82431ee3f14db689fe8727a4a9062c8f2fbb3bcfe3bfad3c1a198004"}, - {file = "duckdb-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64bf2a6e23840d662bd2ac09206a9bd4fa657418884d69e5c352d4456dc70b3c"}, - {file = "duckdb-1.1.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23fc9aa0af74e3803ed90c8d98280fd5bcac8c940592bf6288e8fd60fb051d00"}, - {file = "duckdb-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f3aea31341ce400640dd522e4399b941f66df17e39884f446638fe958d6117c"}, - {file = "duckdb-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:3db4ab31c20de4edaef152930836b38e7662cd71370748fdf2c38ba9cf854dc4"}, - {file = "duckdb-1.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3b6b4fe1edfe35f64f403a9f0ab75258cee35abd964356893ee37424174b7e4"}, - {file = "duckdb-1.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad02f50d5a2020822d1638fc1a9bcf082056f11d2e15ccfc1c1ed4d0f85a3be"}, - {file = "duckdb-1.1.0-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb66e9e7391801928ea134dcab12d2e4c97f2ce0391c603a3e480bbb15830bc8"}, - {file = "duckdb-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:069fb7bca459e31edb32a61f0eea95d7a8a766bef7b8318072563abf8e939593"}, - {file = "duckdb-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e39f9b7b62e64e10d421ff04480290a70129c38067d1a4f600e9212b10542c5a"}, - {file = "duckdb-1.1.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:55ef98bcc7ba745752607f1b926e8d9b7ce32c42c423bbad10c44820aefe23a7"}, - {file = "duckdb-1.1.0-cp38-cp38-macosx_12_0_universal2.whl", hash = "sha256:e2a08175e43b865c1e9611efd18cacd29ddd69093de442b1ebdf312071df7719"}, - {file = "duckdb-1.1.0-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:0e3644b1f034012d82b9baa12a7ea306fe71dc6623731b28c753c4a617ff9499"}, - {file = "duckdb-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:211a33c1ddb5cc609f75eb43772b0b03b45d2fa89bec107e4715267ca907806a"}, - {file = "duckdb-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e74b6f8a5145abbf7e6c1a2a61f0adbcd493c19b358f524ec9a3cebdf362abb"}, - {file = "duckdb-1.1.0-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:58f1633dd2c5af5088ae2d119418e200855d0699d84f2fae9d46d30f404bcead"}, - {file = "duckdb-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d18caea926b1e301c29b140418fca697aad728129e269b4f82c2795a184549e1"}, - {file = "duckdb-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:cd9fb1408942411ad360f8414bc3fbf0091c396ca903d947a10f2e31324d5cbd"}, - {file = "duckdb-1.1.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bd11bc899cebf5ff936d1276a2dfb7b7db08aba3bcc42924afeafc2163bddb43"}, - {file = "duckdb-1.1.0-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:53825a63193c582a78c152ea53de8d145744ddbeea18f452625a82ebc33eb14a"}, - {file = "duckdb-1.1.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:29dc18087de47563b3859a6b98bbed96e1c96ce5db829646dc3b16a916997e7d"}, - {file = "duckdb-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb19319883564237a7a03a104dbe7f445e73519bb67108fcab3d19b6b91fe30"}, - {file = "duckdb-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aac2fcabe2d5072c252d0b3087365f431de812d8199705089fb073e4d039d19c"}, - {file = "duckdb-1.1.0-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d89eaaa5df8a57e7d2bc1f4c46493bb1fee319a00155f2015810ad2ace6570ae"}, - {file = "duckdb-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d86a6926313913cd2cc7e08816d3e7f72ba340adf2959279b1a80058be6526d9"}, - {file = "duckdb-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:d8333f3e85fa2a0f1c222b752c2bd42ea875235ff88492f7bcbb6867d0f644eb"}, - {file = "duckdb-1.1.0.tar.gz", hash = "sha256:b4d4c12b1f98732151bd31377753e0da1a20f6423016d2d097d2e31953ec7c23"}, + {file = "duckdb-1.1.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e310610b692d30aa7f1f40d7878b26978a5b191f23fa8fa082bd17092c67c2fd"}, + {file = "duckdb-1.1.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:7acc97c3cc995850a4fa59dfa6ce713d7ea187c9696632161aa09d898f001a2b"}, + {file = "duckdb-1.1.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:c0a09d78daea0de7ddf3d6d1113e80ceed8c15537e93f8efaad53024ffbde245"}, + {file = "duckdb-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50c3b1667b0c73cb076b1b1f8fa0fd88fcef5c2bbb2b9acdef79e2eae429c248"}, + {file = "duckdb-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1499a9b159d4675ea46786b7ebdbabd8287c62b6b116ccfd529112318d47184e"}, + {file = "duckdb-1.1.1-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:876deda2ce97f4a9005a9ac862f0ebee9e5956d51d589a24955802ca91726d49"}, + {file = "duckdb-1.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:40be901b38c709076f699b0c2f42a0c5663a496647eba350530e3a77f46a239b"}, + {file = "duckdb-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5cb7642c5b21b8165b60029c274fc931c7c29cae3124b9a95ed73d050dd23584"}, + {file = "duckdb-1.1.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:959716b65cf1c94fc117ac9c9692eea0bd64ae53bc8ab6538d459087b474dbeb"}, + {file = "duckdb-1.1.1-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:6ff3c52ce0f8d25478155eb01de043ad0a25badbd10e684a2cd74363f1b86cde"}, + {file = "duckdb-1.1.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:430294cf11ce866d3b726cf4530462316e20b773fed3cf2de3cf63eb89650da6"}, + {file = "duckdb-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc9d48f772fafeea52568a0568cd11314cd79a10214069f3700dbcb31ebdf511"}, + {file = "duckdb-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:572095739024d9a5aa2dd8336c289af6a624c203004213e49b7e2469275e940f"}, + {file = "duckdb-1.1.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:660d9baf637b9a15e1ba74bbe02d3b4a20d82e8cbbd7d0712e0d59e3e9d6efea"}, + {file = "duckdb-1.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b91973605c8a30a38c4381a27895e7768cb3caa6700b2534ab76cc6b72cac390"}, + {file = "duckdb-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:f57c9e070cecf42d379145a75f325ec57fb1d410d6ff6592b5a28c2ff2b5792c"}, + {file = "duckdb-1.1.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:926a99b81c50b9a4a43ca26dcb781f934d35e773d22913548396601ab8d44c12"}, + {file = "duckdb-1.1.1-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:55a2632d27b5a965f1d9fc74b03383e80a3f8e3dc9596807dfb02c8db08cfcb7"}, + {file = "duckdb-1.1.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:8d8174fe47caf48d830dc477a45cedc8c970722df09dc1456bddc760ff6ccf68"}, + {file = "duckdb-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ad84023399002222fa8d5264a8dc2083053027910df728da92cabb07494a489"}, + {file = "duckdb-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c8adbc8b37444424c72043288f1521c860555a4f151ee4b744e6125f5d05729"}, + {file = "duckdb-1.1.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:550524c1b423eeb7ca0fdf1c2e6d29e723d7ec7cfab3050b9feb55a620ae927f"}, + {file = "duckdb-1.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4064243e4d3f445975b78773677de0ccbe924f9c7058a7c2cfedb24bba2ba939"}, + {file = "duckdb-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:4f64516dc62dd0fcbb9785c5bc7532a4fca3e6016bbcc92a2b235aa972c631f6"}, + {file = "duckdb-1.1.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:4bf75a64c927470b6618496adcfbf0f316ef09d46a44cfe8e38b78e9ff40c8a0"}, + {file = "duckdb-1.1.1-cp313-cp313-macosx_12_0_universal2.whl", hash = "sha256:5c8cd6fd7107299b9a243836cd8163e4c08d6228f18cbee4ed9f535f53300096"}, + {file = "duckdb-1.1.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:fc81c02b4d73533a438a9bbae19499531d85b752233c905facc4df41bbde043c"}, + {file = "duckdb-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baff4014caf6553b624a296e4db2926602670bd9be6e0fc75f3e970b085631b0"}, + {file = "duckdb-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e21b75a9a60f10b5b5033138c317d929018c92f355fadae5949b310a9179e0a7"}, + {file = "duckdb-1.1.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8220f039c5ea06dc126232464ab9b77197f80ae53d4611b0a41f73c54f6f3931"}, + {file = "duckdb-1.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:07384414ceae585d4106a7dc154331ae42f45390ed675ec81e3d01f2252a6b01"}, + {file = "duckdb-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:82776b3999e71a962db0bdc3f0258407ef41453f63eb47c33da29b644f8eb530"}, + {file = "duckdb-1.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35d4323655be4053fb90d47e85222c93fd56aea0e8ab0ac44bd8f7249ba85697"}, + {file = "duckdb-1.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:990d0799e0f543a4369413dc6caf7782cbbab49955c08c28ac56d5dab5ccef11"}, + {file = "duckdb-1.1.1-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ef3ba36b317abe000f502702eaaefdd8c3651a25aa0ad409f9487b286e2fb28"}, + {file = "duckdb-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c6e513a572967cd2bab0f20ce265f8eaf95ea7b554eecf1c233717c38569abc"}, + {file = "duckdb-1.1.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:567471cb964a0e54a7874c578e81af7b6ab474676ae6469ae1c33c2353f76fb1"}, + {file = "duckdb-1.1.1-cp38-cp38-macosx_12_0_universal2.whl", hash = "sha256:a41d8eb4dc538d17660b78f2f4ecd0ba29666a396453bb71d6f4972bf2b3959e"}, + {file = "duckdb-1.1.1-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:31be0b9bc1909fb60abda7cd30615fe0224d1e451160d79e8e0313d6205417b0"}, + {file = "duckdb-1.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:541fb49da108e080d4f2984d2fdabaee36d65967a33642f8bce03373b29952f0"}, + {file = "duckdb-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c54f836dac5eddbe369fa654811e979bb07688638a52d1c006172feb5b75a5"}, + {file = "duckdb-1.1.1-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:afb97970ee72e554b507c6f2e40b356bdbf8fc1f466e7c4d1797183eb66c0809"}, + {file = "duckdb-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:a2cdcb68247f02017a35a0b617ceb1d36a02a7c0588d7e2ed91c9a4e9f14c3f6"}, + {file = "duckdb-1.1.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:36d71969cb98d10dc2391d8755921258d197995cc8c69e6c82fc377c2f71940a"}, + {file = "duckdb-1.1.1-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:3693f464409379a21aff4e35b5f67eb6c96fc402649d9ffddbda4ee9ee9ba9b6"}, + {file = "duckdb-1.1.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:06ca7f4ca785cc86e9f9aa23d16b67b82dc454b14c396b2e0ff4c09698c7838e"}, + {file = "duckdb-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ed92f3229bf70897a742e7648f648aa8b0c81a7489072aec5515c5635f3303c"}, + {file = "duckdb-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a80ebf52c03f81265b67720abc06a5c7770d08df82b30cabbe266012bd526229"}, + {file = "duckdb-1.1.1-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:402a42b992227ebb371a48681ce71b6d1c0661385454b269e6aa379f77a8a83a"}, + {file = "duckdb-1.1.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a182d3cbf2e352aaddf392887331bbac460c473cbd55c65d6b6121ef7b43f174"}, + {file = "duckdb-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:fafc7d1ec4401787597a5f983d4ef8a9b0638f31e1674a458c57383911166f27"}, + {file = "duckdb-1.1.1.tar.gz", hash = "sha256:74fb07c1334a73e0ead1b0a03646d349921dac655762d916c8e45194c8218d30"}, ] [[package]] name = "duckduckgo-search" -version = "6.2.12" +version = "6.3.0" description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine." optional = false python-versions = ">=3.8" files = [ - {file = "duckduckgo_search-6.2.12-py3-none-any.whl", hash = "sha256:0d379c1f845b632a41553efb13d571788f19ad289229e641a27b5710d92097a6"}, - {file = "duckduckgo_search-6.2.12.tar.gz", hash = "sha256:04f9f1459763668d268344c7a32d943173d0e060dad53a5c2df4b4d3ca9a74cf"}, + {file = "duckduckgo_search-6.3.0-py3-none-any.whl", hash = "sha256:9a231a7b325226811cf7d35a240f3f501e718ae10a1aa0a638cabc80e129dfe7"}, + {file = "duckduckgo_search-6.3.0.tar.gz", hash = "sha256:e9f56955569325a7d9cacda2488ca78bf6629a459e74415892bee560b664f5eb"}, ] [package.dependencies] click = ">=8.1.7" -primp = ">=0.6.2" +primp = ">=0.6.3" [package.extras] dev = ["mypy (>=1.11.1)", "pytest (>=8.3.1)", "pytest-asyncio (>=0.23.8)", "ruff (>=0.6.1)"] lxml = ["lxml (>=5.2.2)"] +[[package]] +name = "durationpy" +version = "0.9" +description = "Module for converting between datetime.timedelta and Go's Duration strings." +optional = false +python-versions = "*" +files = [ + {file = "durationpy-0.9-py3-none-any.whl", hash = "sha256:e65359a7af5cedad07fb77a2dd3f390f8eb0b74cb845589fa6c057086834dd38"}, + {file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"}, +] + [[package]] name = "elastic-transport" -version = "8.15.0" +version = "8.15.1" description = "Transport classes and utilities shared among Python Elastic client libraries" optional = false python-versions = ">=3.8" files = [ - {file = "elastic_transport-8.15.0-py3-none-any.whl", hash = "sha256:d7080d1dada2b4eee69e7574f9c17a76b42f2895eff428e562f94b0360e158c0"}, - {file = "elastic_transport-8.15.0.tar.gz", hash = "sha256:85d62558f9baafb0868c801233a59b235e61d7b4804c28c2fadaa866b6766233"}, + {file = "elastic_transport-8.15.1-py3-none-any.whl", hash = "sha256:b5e82ff1679d8c7705a03fd85c7f6ef85d6689721762d41228dd312e34f331fc"}, + {file = "elastic_transport-8.15.1.tar.gz", hash = "sha256:9cac4ab5cf9402668cf305ae0b7d93ddc0c7b61461d6d1027850db6da9cc5742"}, ] [package.dependencies] @@ -2329,17 +2275,17 @@ certifi = "*" urllib3 = ">=1.26.2,<3" [package.extras] -develop = ["aiohttp", "furo", "httpx", "opentelemetry-api", "opentelemetry-sdk", "orjson", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "respx", "sphinx (>2)", "sphinx-autodoc-typehints", "trustme"] +develop = ["aiohttp", "furo", "httpcore (<1.0.6)", "httpx", "opentelemetry-api", "opentelemetry-sdk", "orjson", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "respx", "sphinx (>2)", "sphinx-autodoc-typehints", "trustme"] [[package]] name = "elasticsearch" -version = "8.15.1" +version = "8.14.0" description = "Python client for Elasticsearch" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "elasticsearch-8.15.1-py3-none-any.whl", hash = "sha256:02a0476e98768a30d7926335fc0d305c04fdb928eea1354c6e6040d8c2814569"}, - {file = "elasticsearch-8.15.1.tar.gz", hash = "sha256:40c0d312f8adf8bdc81795bc16a0b546ddf544cb1f90e829a244e4780c4dbfd8"}, + {file = "elasticsearch-8.14.0-py3-none-any.whl", hash = "sha256:cef8ef70a81af027f3da74a4f7d9296b390c636903088439087b8262a468c130"}, + {file = "elasticsearch-8.14.0.tar.gz", hash = "sha256:aa2490029dd96f4015b333c1827aa21fd6c0a4d223b00dfb0fe933b8d09a511b"}, ] [package.dependencies] @@ -2347,27 +2293,21 @@ elastic-transport = ">=8.13,<9" [package.extras] async = ["aiohttp (>=3,<4)"] -dev = ["aiohttp", "black", "build", "coverage", "isort", "jinja2", "mapbox-vector-tile", "nox", "numpy", "orjson", "pandas", "pyarrow", "pytest", "pytest-asyncio", "pytest-cov", "python-dateutil", "pyyaml (>=5.4)", "requests (>=2,<3)", "simsimd", "twine", "unasync"] -docs = ["sphinx", "sphinx-autodoc-typehints", "sphinx-rtd-theme (>=2.0)"] orjson = ["orjson (>=3)"] -pyarrow = ["pyarrow (>=1)"] requests = ["requests (>=2.4.0,!=2.32.2,<3.0.0)"] vectorstore-mmr = ["numpy (>=1)", "simsimd (>=3)"] [[package]] name = "emoji" -version = "2.12.1" +version = "2.14.0" description = "Emoji for Python" optional = false python-versions = ">=3.7" files = [ - {file = "emoji-2.12.1-py3-none-any.whl", hash = "sha256:a00d62173bdadc2510967a381810101624a2f0986145b8da0cffa42e29430235"}, - {file = "emoji-2.12.1.tar.gz", hash = "sha256:4aa0488817691aa58d83764b6c209f8a27c0b3ab3f89d1b8dceca1a62e4973eb"}, + {file = "emoji-2.14.0-py3-none-any.whl", hash = "sha256:fcc936bf374b1aec67dda5303ae99710ba88cc9cdce2d1a71c5f2204e6d78799"}, + {file = "emoji-2.14.0.tar.gz", hash = "sha256:f68ac28915a2221667cddb3e6c589303c3c6954c6c5af6fefaec7f9bdf72fdca"}, ] -[package.dependencies] -typing-extensions = ">=4.7.0" - [package.extras] dev = ["coverage", "pytest (>=7.4.4)"] @@ -2432,13 +2372,13 @@ test = ["pytest (>=6)"] [[package]] name = "fastapi" -version = "0.114.2" +version = "0.115.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = false python-versions = ">=3.8" files = [ - {file = "fastapi-0.114.2-py3-none-any.whl", hash = "sha256:44474a22913057b1acb973ab90f4b671ba5200482e7622816d79105dcece1ac5"}, - {file = "fastapi-0.114.2.tar.gz", hash = "sha256:0adb148b62edb09e8c6eeefa3ea934e8f276dabc038c5a82989ea6346050c3da"}, + {file = "fastapi-0.115.0-py3-none-any.whl", hash = "sha256:17ea427674467486e997206a5ab25760f6b09e069f099b96f5b55a32fb6f1631"}, + {file = "fastapi-0.115.0.tar.gz", hash = "sha256:f93b4ca3529a8ebc6fc3fcf710e5efa8de3df9b41570958abf1d97d843138004"}, ] [package.dependencies] @@ -2527,18 +2467,18 @@ sgmllib3k = "*" [[package]] name = "filelock" -version = "3.16.0" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, - {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] @@ -2552,6 +2492,24 @@ files = [ {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, ] +[[package]] +name = "flasgger" +version = "0.9.7.1" +description = "Extract swagger specs from your flask project" +optional = false +python-versions = "*" +files = [ + {file = "flasgger-0.9.7.1.tar.gz", hash = "sha256:ca098e10bfbb12f047acc6299cc70a33851943a746e550d86e65e60d4df245fb"}, +] + +[package.dependencies] +Flask = ">=0.10" +jsonschema = ">=3.0.1" +mistune = "*" +packaging = "*" +PyYAML = ">=3.0" +six = ">=1.10.0" + [[package]] name = "flask" version = "3.0.3" @@ -2701,53 +2659,59 @@ files = [ [[package]] name = "fonttools" -version = "4.53.1" +version = "4.54.1" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.53.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0679a30b59d74b6242909945429dbddb08496935b82f91ea9bf6ad240ec23397"}, - {file = "fonttools-4.53.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8bf06b94694251861ba7fdeea15c8ec0967f84c3d4143ae9daf42bbc7717fe3"}, - {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b96cd370a61f4d083c9c0053bf634279b094308d52fdc2dd9a22d8372fdd590d"}, - {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1c7c5aa18dd3b17995898b4a9b5929d69ef6ae2af5b96d585ff4005033d82f0"}, - {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e013aae589c1c12505da64a7d8d023e584987e51e62006e1bb30d72f26522c41"}, - {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9efd176f874cb6402e607e4cc9b4a9cd584d82fc34a4b0c811970b32ba62501f"}, - {file = "fonttools-4.53.1-cp310-cp310-win32.whl", hash = "sha256:c8696544c964500aa9439efb6761947393b70b17ef4e82d73277413f291260a4"}, - {file = "fonttools-4.53.1-cp310-cp310-win_amd64.whl", hash = "sha256:8959a59de5af6d2bec27489e98ef25a397cfa1774b375d5787509c06659b3671"}, - {file = "fonttools-4.53.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:da33440b1413bad53a8674393c5d29ce64d8c1a15ef8a77c642ffd900d07bfe1"}, - {file = "fonttools-4.53.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ff7e5e9bad94e3a70c5cd2fa27f20b9bb9385e10cddab567b85ce5d306ea923"}, - {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6e7170d675d12eac12ad1a981d90f118c06cf680b42a2d74c6c931e54b50719"}, - {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee32ea8765e859670c4447b0817514ca79054463b6b79784b08a8df3a4d78e3"}, - {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e08f572625a1ee682115223eabebc4c6a2035a6917eac6f60350aba297ccadb"}, - {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b21952c092ffd827504de7e66b62aba26fdb5f9d1e435c52477e6486e9d128b2"}, - {file = "fonttools-4.53.1-cp311-cp311-win32.whl", hash = "sha256:9dfdae43b7996af46ff9da520998a32b105c7f098aeea06b2226b30e74fbba88"}, - {file = "fonttools-4.53.1-cp311-cp311-win_amd64.whl", hash = "sha256:d4d0096cb1ac7a77b3b41cd78c9b6bc4a400550e21dc7a92f2b5ab53ed74eb02"}, - {file = "fonttools-4.53.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d92d3c2a1b39631a6131c2fa25b5406855f97969b068e7e08413325bc0afba58"}, - {file = "fonttools-4.53.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b3c8ebafbee8d9002bd8f1195d09ed2bd9ff134ddec37ee8f6a6375e6a4f0e8"}, - {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f029c095ad66c425b0ee85553d0dc326d45d7059dbc227330fc29b43e8ba60"}, - {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f5e6c3510b79ea27bb1ebfcc67048cde9ec67afa87c7dd7efa5c700491ac7f"}, - {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f677ce218976496a587ab17140da141557beb91d2a5c1a14212c994093f2eae2"}, - {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9e6ceba2a01b448e36754983d376064730690401da1dd104ddb543519470a15f"}, - {file = "fonttools-4.53.1-cp312-cp312-win32.whl", hash = "sha256:791b31ebbc05197d7aa096bbc7bd76d591f05905d2fd908bf103af4488e60670"}, - {file = "fonttools-4.53.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ed170b5e17da0264b9f6fae86073be3db15fa1bd74061c8331022bca6d09bab"}, - {file = "fonttools-4.53.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c818c058404eb2bba05e728d38049438afd649e3c409796723dfc17cd3f08749"}, - {file = "fonttools-4.53.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:651390c3b26b0c7d1f4407cad281ee7a5a85a31a110cbac5269de72a51551ba2"}, - {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54f1bba2f655924c1138bbc7fa91abd61f45c68bd65ab5ed985942712864bbb"}, - {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9cd19cf4fe0595ebdd1d4915882b9440c3a6d30b008f3cc7587c1da7b95be5f"}, - {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2af40ae9cdcb204fc1d8f26b190aa16534fcd4f0df756268df674a270eab575d"}, - {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:35250099b0cfb32d799fb5d6c651220a642fe2e3c7d2560490e6f1d3f9ae9169"}, - {file = "fonttools-4.53.1-cp38-cp38-win32.whl", hash = "sha256:f08df60fbd8d289152079a65da4e66a447efc1d5d5a4d3f299cdd39e3b2e4a7d"}, - {file = "fonttools-4.53.1-cp38-cp38-win_amd64.whl", hash = "sha256:7b6b35e52ddc8fb0db562133894e6ef5b4e54e1283dff606fda3eed938c36fc8"}, - {file = "fonttools-4.53.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75a157d8d26c06e64ace9df037ee93a4938a4606a38cb7ffaf6635e60e253b7a"}, - {file = "fonttools-4.53.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4824c198f714ab5559c5be10fd1adf876712aa7989882a4ec887bf1ef3e00e31"}, - {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:becc5d7cb89c7b7afa8321b6bb3dbee0eec2b57855c90b3e9bf5fb816671fa7c"}, - {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ec3fb43befb54be490147b4a922b5314e16372a643004f182babee9f9c3407"}, - {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:73379d3ffdeecb376640cd8ed03e9d2d0e568c9d1a4e9b16504a834ebadc2dfb"}, - {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:02569e9a810f9d11f4ae82c391ebc6fb5730d95a0657d24d754ed7763fb2d122"}, - {file = "fonttools-4.53.1-cp39-cp39-win32.whl", hash = "sha256:aae7bd54187e8bf7fd69f8ab87b2885253d3575163ad4d669a262fe97f0136cb"}, - {file = "fonttools-4.53.1-cp39-cp39-win_amd64.whl", hash = "sha256:e5b708073ea3d684235648786f5f6153a48dc8762cdfe5563c57e80787c29fbb"}, - {file = "fonttools-4.53.1-py3-none-any.whl", hash = "sha256:f1f8758a2ad110bd6432203a344269f445a2907dc24ef6bccfd0ac4e14e0d71d"}, - {file = "fonttools-4.53.1.tar.gz", hash = "sha256:e128778a8e9bc11159ce5447f76766cefbd876f44bd79aff030287254e4752c4"}, + {file = "fonttools-4.54.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ed7ee041ff7b34cc62f07545e55e1468808691dddfd315d51dd82a6b37ddef2"}, + {file = "fonttools-4.54.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41bb0b250c8132b2fcac148e2e9198e62ff06f3cc472065dff839327945c5882"}, + {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7965af9b67dd546e52afcf2e38641b5be956d68c425bef2158e95af11d229f10"}, + {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:278913a168f90d53378c20c23b80f4e599dca62fbffae4cc620c8eed476b723e"}, + {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0e88e3018ac809b9662615072dcd6b84dca4c2d991c6d66e1970a112503bba7e"}, + {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4aa4817f0031206e637d1e685251ac61be64d1adef111060df84fdcbc6ab6c44"}, + {file = "fonttools-4.54.1-cp310-cp310-win32.whl", hash = "sha256:7e3b7d44e18c085fd8c16dcc6f1ad6c61b71ff463636fcb13df7b1b818bd0c02"}, + {file = "fonttools-4.54.1-cp310-cp310-win_amd64.whl", hash = "sha256:dd9cc95b8d6e27d01e1e1f1fae8559ef3c02c76317da650a19047f249acd519d"}, + {file = "fonttools-4.54.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5419771b64248484299fa77689d4f3aeed643ea6630b2ea750eeab219588ba20"}, + {file = "fonttools-4.54.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:301540e89cf4ce89d462eb23a89464fef50915255ece765d10eee8b2bf9d75b2"}, + {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ae5091547e74e7efecc3cbf8e75200bc92daaeb88e5433c5e3e95ea8ce5aa7"}, + {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82834962b3d7c5ca98cb56001c33cf20eb110ecf442725dc5fdf36d16ed1ab07"}, + {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d26732ae002cc3d2ecab04897bb02ae3f11f06dd7575d1df46acd2f7c012a8d8"}, + {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:58974b4987b2a71ee08ade1e7f47f410c367cdfc5a94fabd599c88165f56213a"}, + {file = "fonttools-4.54.1-cp311-cp311-win32.whl", hash = "sha256:ab774fa225238986218a463f3fe151e04d8c25d7de09df7f0f5fce27b1243dbc"}, + {file = "fonttools-4.54.1-cp311-cp311-win_amd64.whl", hash = "sha256:07e005dc454eee1cc60105d6a29593459a06321c21897f769a281ff2d08939f6"}, + {file = "fonttools-4.54.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:54471032f7cb5fca694b5f1a0aaeba4af6e10ae989df408e0216f7fd6cdc405d"}, + {file = "fonttools-4.54.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fa92cb248e573daab8d032919623cc309c005086d743afb014c836636166f08"}, + {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a911591200114969befa7f2cb74ac148bce5a91df5645443371aba6d222e263"}, + {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93d458c8a6a354dc8b48fc78d66d2a8a90b941f7fec30e94c7ad9982b1fa6bab"}, + {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5eb2474a7c5be8a5331146758debb2669bf5635c021aee00fd7c353558fc659d"}, + {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c9c563351ddc230725c4bdf7d9e1e92cbe6ae8553942bd1fb2b2ff0884e8b714"}, + {file = "fonttools-4.54.1-cp312-cp312-win32.whl", hash = "sha256:fdb062893fd6d47b527d39346e0c5578b7957dcea6d6a3b6794569370013d9ac"}, + {file = "fonttools-4.54.1-cp312-cp312-win_amd64.whl", hash = "sha256:e4564cf40cebcb53f3dc825e85910bf54835e8a8b6880d59e5159f0f325e637e"}, + {file = "fonttools-4.54.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6e37561751b017cf5c40fce0d90fd9e8274716de327ec4ffb0df957160be3bff"}, + {file = "fonttools-4.54.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:357cacb988a18aace66e5e55fe1247f2ee706e01debc4b1a20d77400354cddeb"}, + {file = "fonttools-4.54.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e953cc0bddc2beaf3a3c3b5dd9ab7554677da72dfaf46951e193c9653e515a"}, + {file = "fonttools-4.54.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:58d29b9a294573d8319f16f2f79e42428ba9b6480442fa1836e4eb89c4d9d61c"}, + {file = "fonttools-4.54.1-cp313-cp313-win32.whl", hash = "sha256:9ef1b167e22709b46bf8168368b7b5d3efeaaa746c6d39661c1b4405b6352e58"}, + {file = "fonttools-4.54.1-cp313-cp313-win_amd64.whl", hash = "sha256:262705b1663f18c04250bd1242b0515d3bbae177bee7752be67c979b7d47f43d"}, + {file = "fonttools-4.54.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ed2f80ca07025551636c555dec2b755dd005e2ea8fbeb99fc5cdff319b70b23b"}, + {file = "fonttools-4.54.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9dc080e5a1c3b2656caff2ac2633d009b3a9ff7b5e93d0452f40cd76d3da3b3c"}, + {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d152d1be65652fc65e695e5619e0aa0982295a95a9b29b52b85775243c06556"}, + {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8583e563df41fdecef31b793b4dd3af8a9caa03397be648945ad32717a92885b"}, + {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0d1d353ef198c422515a3e974a1e8d5b304cd54a4c2eebcae708e37cd9eeffb1"}, + {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fda582236fee135d4daeca056c8c88ec5f6f6d88a004a79b84a02547c8f57386"}, + {file = "fonttools-4.54.1-cp38-cp38-win32.whl", hash = "sha256:e7d82b9e56716ed32574ee106cabca80992e6bbdcf25a88d97d21f73a0aae664"}, + {file = "fonttools-4.54.1-cp38-cp38-win_amd64.whl", hash = "sha256:ada215fd079e23e060157aab12eba0d66704316547f334eee9ff26f8c0d7b8ab"}, + {file = "fonttools-4.54.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5b8a096e649768c2f4233f947cf9737f8dbf8728b90e2771e2497c6e3d21d13"}, + {file = "fonttools-4.54.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e10d2e0a12e18f4e2dd031e1bf7c3d7017be5c8dbe524d07706179f355c5dac"}, + {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31c32d7d4b0958600eac75eaf524b7b7cb68d3a8c196635252b7a2c30d80e986"}, + {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c39287f5c8f4a0c5a55daf9eaf9ccd223ea59eed3f6d467133cc727d7b943a55"}, + {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a7a310c6e0471602fe3bf8efaf193d396ea561486aeaa7adc1f132e02d30c4b9"}, + {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d3b659d1029946f4ff9b6183984578041b520ce0f8fb7078bb37ec7445806b33"}, + {file = "fonttools-4.54.1-cp39-cp39-win32.whl", hash = "sha256:e96bc94c8cda58f577277d4a71f51c8e2129b8b36fd05adece6320dd3d57de8a"}, + {file = "fonttools-4.54.1-cp39-cp39-win_amd64.whl", hash = "sha256:e8a4b261c1ef91e7188a30571be6ad98d1c6d9fa2427244c545e2fa0a2494dd7"}, + {file = "fonttools-4.54.1-py3-none-any.whl", hash = "sha256:37cddd62d83dc4f72f7c3f3c2bcf2697e89a30efb152079896544a93907733bd"}, + {file = "fonttools-4.54.1.tar.gz", hash = "sha256:957f669d4922f92c171ba01bef7f29410668db09f6c02111e22b2bce446f3285"}, ] [package.extras] @@ -2766,44 +2730,49 @@ woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] [[package]] name = "frozendict" -version = "2.4.4" +version = "2.4.5" description = "A simple immutable dictionary" optional = false python-versions = ">=3.6" files = [ - {file = "frozendict-2.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4a59578d47b3949437519b5c39a016a6116b9e787bb19289e333faae81462e59"}, - {file = "frozendict-2.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12a342e439aef28ccec533f0253ea53d75fe9102bd6ea928ff530e76eac38906"}, - {file = "frozendict-2.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f79c26dff10ce11dad3b3627c89bb2e87b9dd5958c2b24325f16a23019b8b94"}, - {file = "frozendict-2.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2bd009cf4fc47972838a91e9b83654dc9a095dc4f2bb3a37c3f3124c8a364543"}, - {file = "frozendict-2.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:87ebcde21565a14fe039672c25550060d6f6d88cf1f339beac094c3b10004eb0"}, - {file = "frozendict-2.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:fefeb700bc7eb8b4c2dc48704e4221860d254c8989fb53488540bc44e44a1ac2"}, - {file = "frozendict-2.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:4297d694eb600efa429769125a6f910ec02b85606f22f178bafbee309e7d3ec7"}, - {file = "frozendict-2.4.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:812ab17522ba13637826e65454115a914c2da538356e85f43ecea069813e4b33"}, - {file = "frozendict-2.4.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fee9420475bb6ff357000092aa9990c2f6182b2bab15764330f4ad7de2eae49"}, - {file = "frozendict-2.4.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3148062675536724502c6344d7c485dd4667fdf7980ca9bd05e338ccc0c4471e"}, - {file = "frozendict-2.4.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:78c94991944dd33c5376f720228e5b252ee67faf3bac50ef381adc9e51e90d9d"}, - {file = "frozendict-2.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:1697793b5f62b416c0fc1d94638ec91ed3aa4ab277f6affa3a95216ecb3af170"}, - {file = "frozendict-2.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:199a4d32194f3afed6258de7e317054155bc9519252b568d9cfffde7e4d834e5"}, - {file = "frozendict-2.4.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85375ec6e979e6373bffb4f54576a68bf7497c350861d20686ccae38aab69c0a"}, - {file = "frozendict-2.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2d8536e068d6bf281f23fa835ac07747fb0f8851879dd189e9709f9567408b4d"}, - {file = "frozendict-2.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:259528ba6b56fa051bc996f1c4d8b57e30d6dd3bc2f27441891b04babc4b5e73"}, - {file = "frozendict-2.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:07c3a5dee8bbb84cba770e273cdbf2c87c8e035903af8f781292d72583416801"}, - {file = "frozendict-2.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6874fec816b37b6eb5795b00e0574cba261bf59723e2de607a195d5edaff0786"}, - {file = "frozendict-2.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8f92425686323a950337da4b75b4c17a3327b831df8c881df24038d560640d4"}, - {file = "frozendict-2.4.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d58d9a8d9e49662c6dafbea5e641f97decdb3d6ccd76e55e79818415362ba25"}, - {file = "frozendict-2.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:93a7b19afb429cbf99d56faf436b45ef2fa8fe9aca89c49eb1610c3bd85f1760"}, - {file = "frozendict-2.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b70b431e3a72d410a2cdf1497b3aba2f553635e0c0f657ce311d841bf8273b6"}, - {file = "frozendict-2.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:e1b941132d79ce72d562a13341d38fc217bc1ee24d8c35a20d754e79ff99e038"}, - {file = "frozendict-2.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc2228874eacae390e63fd4f2bb513b3144066a977dc192163c9f6c7f6de6474"}, - {file = "frozendict-2.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63aa49f1919af7d45fb8fd5dec4c0859bc09f46880bd6297c79bb2db2969b63d"}, - {file = "frozendict-2.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6bf9260018d653f3cab9bd147bd8592bf98a5c6e338be0491ced3c196c034a3"}, - {file = "frozendict-2.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6eb716e6a6d693c03b1d53280a1947716129f5ef9bcdd061db5c17dea44b80fe"}, - {file = "frozendict-2.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d13b4310db337f4d2103867c5a05090b22bc4d50ca842093779ef541ea9c9eea"}, - {file = "frozendict-2.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:b3b967d5065872e27b06f785a80c0ed0a45d1f7c9b85223da05358e734d858ca"}, - {file = "frozendict-2.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:4ae8d05c8d0b6134bfb6bfb369d5fa0c4df21eabb5ca7f645af95fdc6689678e"}, - {file = "frozendict-2.4.4-py311-none-any.whl", hash = "sha256:705efca8d74d3facbb6ace80ab3afdd28eb8a237bfb4063ed89996b024bc443d"}, - {file = "frozendict-2.4.4-py312-none-any.whl", hash = "sha256:d9647563e76adb05b7cde2172403123380871360a114f546b4ae1704510801e5"}, - {file = "frozendict-2.4.4.tar.gz", hash = "sha256:3f7c031b26e4ee6a3f786ceb5e3abf1181c4ade92dce1f847da26ea2c96008c7"}, + {file = "frozendict-2.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f364cfe5ef97523a4434e9f458bb4821594d3531d898621e5acae43463dcb5e"}, + {file = "frozendict-2.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4f073c926b1f88fa85ed85222101f61f6c4b2180c95d1528ca6ecc7cef835442"}, + {file = "frozendict-2.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8582f26ca862bb1e40ed790d934adf6afcfc904b0425dcfe01aed2103bed27fb"}, + {file = "frozendict-2.4.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b3eea1678607174c468fe4e3b903625bccfb3215ae2b0138220dc1f6ef71f37"}, + {file = "frozendict-2.4.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3af1b09bad761d5500b096b757c346591b5dfdc8443aa9642c2c02b4885dc09e"}, + {file = "frozendict-2.4.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76c7af5f9db9ae01531edaf38fd3e8faae1bb6b94abbf8fa5bd0326f52e777df"}, + {file = "frozendict-2.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:a5448639058157ccd7f26ba83e441066c6ae515beb68b9b99d5b2dbb0bb36b19"}, + {file = "frozendict-2.4.5-cp310-cp310-win_arm64.whl", hash = "sha256:24953a1cfe344415e7557413e493e21fa9c4cecbc13284b6f334837aa5601c9f"}, + {file = "frozendict-2.4.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:754924d53b1fd2dae7b15f9c86b0610334a840ae728cc717f24aa040fc94136f"}, + {file = "frozendict-2.4.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95bf788d65e3a50d9deef5c68e568c59acf473fea8a7a4d7e405a7e63ac85cdd"}, + {file = "frozendict-2.4.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35730ff269b8a6eb07c3d91d5fd6a7f5c32e08bef665fe1ef51012bdb2702196"}, + {file = "frozendict-2.4.5-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:eb58e0c41bc6bb60a8b0852d50da8836380fb78311608dc26791e3e9aa25bcfe"}, + {file = "frozendict-2.4.5-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:0483776fa25f0c3f9ff75a89b5276a582231f83baea85b091dfefde99b95ac5c"}, + {file = "frozendict-2.4.5-cp36-cp36m-win_amd64.whl", hash = "sha256:a967730a115cb8b5d028e0d07cd680c04324e913ee3d1caef9e039ca740343c4"}, + {file = "frozendict-2.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d10ad91ded20fae8737fddcfed1b6358207f95292a335f96c4cd365d97f5e30"}, + {file = "frozendict-2.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3702438d81deab127963ab41dca1738a0fbf11038a50a25c5f814943a2d25de5"}, + {file = "frozendict-2.4.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de1126d6dd39d7939be388161b973ffadb9c6e97f8f9fdcb52ba790828621191"}, + {file = "frozendict-2.4.5-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:f8d677a6066ca3289181028fccabf77413addddaa7a6629485d52aeaa4f4899c"}, + {file = "frozendict-2.4.5-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:f2ff03ed21657cde7ea04fffc76f0c1736100a03671e4682722685e63486cd96"}, + {file = "frozendict-2.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:14755127b988b428f95f11fa626374e247f8a40316697218f1e2f44c107c5027"}, + {file = "frozendict-2.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bbf9dcb69a44519308c775bcf72b8417c3102f415f77df19b8eddb97b8f88d78"}, + {file = "frozendict-2.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ee3e6f4013446d17c580ce0bac41a1f81371c21dd6e34a85c0f26bae94fdfd9b"}, + {file = "frozendict-2.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b51e0aae859fc8d56138eac4be121a76647fa66ea620af8a62d9d6938729441"}, + {file = "frozendict-2.4.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4bdffecff350a68f1a966dec84459d3ea8e2606c14fb3ebe937b8c6c9e9129"}, + {file = "frozendict-2.4.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f1385852cb05a31ae76c972caa3a07679414b01a664848bde2bae71c9910f045"}, + {file = "frozendict-2.4.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e591124818f2d5d8f2d284a7b78604f4180e73d770c33252370edccff5969293"}, + {file = "frozendict-2.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:05786d9cff61ee95149c15cdfc10cf4b4cb10e1eab1d4648b0c99ed58fedb86d"}, + {file = "frozendict-2.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98b1483980c47bb74ef777ab748031a69eb41c43ef44494e9db79111638fdb84"}, + {file = "frozendict-2.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:08e8d4ebc950916b5cdeedcb697cf893c5903d9cb8ab27dffe41072a08e1cfc1"}, + {file = "frozendict-2.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73ff80ce3f95c3ec60b38ef4ddf5cb8e20ea3edce2f56e89af7c4c45013684bf"}, + {file = "frozendict-2.4.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee64c44e1a146171bdd2bf4501210f273a5ab73cc2e924a01f81b14e4612f94"}, + {file = "frozendict-2.4.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a81d80ded07f0e34a3fa2b78235a48259b886ad20a96da5526a9404d192c6eb3"}, + {file = "frozendict-2.4.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:043bce9f5e7e8df8313d0c961b2da3346c0b002311d88c74cadae2b1f8fd2904"}, + {file = "frozendict-2.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:d2b2a9c809ebb54c8c0fe2acc38d45299f3e9a9d90061773468896221cab1eaa"}, + {file = "frozendict-2.4.5-cp39-cp39-win_arm64.whl", hash = "sha256:20cf1db30ca0c8f76a05ec1256132f505cf6f93eb382a5961c738377037b5b9d"}, + {file = "frozendict-2.4.5-py311-none-any.whl", hash = "sha256:6be054ba76e8a49c6846a7369db3eaaa0593c29a644026c25923f13fdea06483"}, + {file = "frozendict-2.4.5-py312-none-any.whl", hash = "sha256:b8481d83a7219e9e14c719113ea2d8321d7840af35af58c72b3864b7123e7de3"}, + {file = "frozendict-2.4.5.tar.gz", hash = "sha256:fd7add309789595c044c0155a0bddfa9d20c77f65de1e33a14aa3033b936ef63"}, ] [[package]] @@ -2931,6 +2900,17 @@ test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe, test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] tqdm = ["tqdm"] +[[package]] +name = "future" +version = "1.0.0" +description = "Clean single-source support for Python 3 and 2" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, + {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, +] + [[package]] name = "gevent" version = "23.9.1" @@ -3058,6 +3038,20 @@ files = [ docs = ["sphinx (>=4)", "sphinx-rtd-theme (>=1)"] tests = ["cython", "hypothesis", "mpmath", "pytest", "setuptools"] +[[package]] +name = "google" +version = "3.0.0" +description = "Python bindings to the Google search engine." +optional = false +python-versions = "*" +files = [ + {file = "google-3.0.0-py2.py3-none-any.whl", hash = "sha256:889cf695f84e4ae2c55fbc0cfdaf4c1e729417fa52ab1db0485202ba173e4935"}, + {file = "google-3.0.0.tar.gz", hash = "sha256:143530122ee5130509ad5e989f0512f7cb218b2d4eddbafbad40fd10e8d8ccbe"}, +] + +[package.dependencies] +beautifulsoup4 = "*" + [[package]] name = "google-ai-generativelanguage" version = "0.6.9" @@ -3211,30 +3205,30 @@ xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] [[package]] name = "google-cloud-bigquery" -version = "3.25.0" +version = "3.26.0" description = "Google BigQuery API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-bigquery-3.25.0.tar.gz", hash = "sha256:5b2aff3205a854481117436836ae1403f11f2594e6810a98886afd57eda28509"}, - {file = "google_cloud_bigquery-3.25.0-py2.py3-none-any.whl", hash = "sha256:7f0c371bc74d2a7fb74dacbc00ac0f90c8c2bec2289b51dd6685a275873b1ce9"}, + {file = "google_cloud_bigquery-3.26.0-py2.py3-none-any.whl", hash = "sha256:e0e9ad28afa67a18696e624cbccab284bf2c0a3f6eeb9eeb0426c69b943793a8"}, + {file = "google_cloud_bigquery-3.26.0.tar.gz", hash = "sha256:edbdc788beea659e04c0af7fe4dcd6d9155344b98951a0d5055bd2f15da4ba23"}, ] [package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=2.11.1,<3.0.0dev", extras = ["grpc"]} google-auth = ">=2.14.1,<3.0.0dev" -google-cloud-core = ">=1.6.0,<3.0.0dev" -google-resumable-media = ">=0.6.0,<3.0dev" +google-cloud-core = ">=2.4.1,<3.0.0dev" +google-resumable-media = ">=2.0.0,<3.0dev" packaging = ">=20.0.0" -python-dateutil = ">=2.7.2,<3.0dev" +python-dateutil = ">=2.7.3,<3.0dev" requests = ">=2.21.0,<3.0.0dev" [package.extras] -all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] -bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"] +all = ["Shapely (>=1.8.4,<3.0.0dev)", "bigquery-magics (>=0.1.0)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] +bigquery-v2 = ["proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)"] bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] -ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] +ipython = ["bigquery-magics (>=0.1.0)"] ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] @@ -3411,79 +3405,101 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4 [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] +[[package]] +name = "gotrue" +version = "2.9.2" +description = "Python Client Library for Supabase Auth" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "gotrue-2.9.2-py3-none-any.whl", hash = "sha256:fcd5279e8f1cc630f3ac35af5485fe39f8030b23906776920d2c32a4e308cff4"}, + {file = "gotrue-2.9.2.tar.gz", hash = "sha256:57b3245e916c5efbf19a21b1181011a903c1276bb1df2d847558f2f24f29abb2"}, +] + +[package.dependencies] +httpx = {version = ">=0.26,<0.28", extras = ["http2"]} +pydantic = ">=1.10,<3" + [[package]] name = "greenlet" -version = "3.1.0" +version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a814dc3100e8a046ff48faeaa909e80cdb358411a3d6dd5293158425c684eda8"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a771dc64fa44ebe58d65768d869fcfb9060169d203446c1d446e844b62bdfdca"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e49a65d25d7350cca2da15aac31b6f67a43d867448babf997fe83c7505f57bc"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cd8518eade968bc52262d8c46727cfc0826ff4d552cf0430b8d65aaf50bb91d"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dc19e660baea5c38e949455c1181bc018893f25372d10ffe24b3ed7341fb25"}, - {file = "greenlet-3.1.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0a5b1c22c82831f56f2f7ad9bbe4948879762fe0d59833a4a71f16e5fa0f682"}, - {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2651dfb006f391bcb240635079a68a261b227a10a08af6349cba834a2141efa1"}, - {file = "greenlet-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3e7e6ef1737a819819b1163116ad4b48d06cfdd40352d813bb14436024fcda99"}, - {file = "greenlet-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:ffb08f2a1e59d38c7b8b9ac8083c9c8b9875f0955b1e9b9b9a965607a51f8e54"}, - {file = "greenlet-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9730929375021ec90f6447bff4f7f5508faef1c02f399a1953870cdb78e0c345"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:713d450cf8e61854de9420fb7eea8ad228df4e27e7d4ed465de98c955d2b3fa6"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c3446937be153718250fe421da548f973124189f18fe4575a0510b5c928f0cc"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ddc7bcedeb47187be74208bc652d63d6b20cb24f4e596bd356092d8000da6d6"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44151d7b81b9391ed759a2f2865bbe623ef00d648fed59363be2bbbd5154656f"}, - {file = "greenlet-3.1.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cea1cca3be76c9483282dc7760ea1cc08a6ecec1f0b6ca0a94ea0d17432da19"}, - {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:619935a44f414274a2c08c9e74611965650b730eb4efe4b2270f91df5e4adf9a"}, - {file = "greenlet-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:221169d31cada333a0c7fd087b957c8f431c1dba202c3a58cf5a3583ed973e9b"}, - {file = "greenlet-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:01059afb9b178606b4b6e92c3e710ea1635597c3537e44da69f4531e111dd5e9"}, - {file = "greenlet-3.1.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:24fc216ec7c8be9becba8b64a98a78f9cd057fd2dc75ae952ca94ed8a893bf27"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d07c28b85b350564bdff9f51c1c5007dfb2f389385d1bc23288de51134ca303"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:243a223c96a4246f8a30ea470c440fe9db1f5e444941ee3c3cd79df119b8eebf"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26811df4dc81271033a7836bc20d12cd30938e6bd2e9437f56fa03da81b0f8fc"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d86401550b09a55410f32ceb5fe7efcd998bd2dad9e82521713cb148a4a15f"}, - {file = "greenlet-3.1.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26d9c1c4f1748ccac0bae1dbb465fb1a795a75aba8af8ca871503019f4285e2a"}, - {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cd468ec62257bb4544989402b19d795d2305eccb06cde5da0eb739b63dc04665"}, - {file = "greenlet-3.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a53dfe8f82b715319e9953330fa5c8708b610d48b5c59f1316337302af5c0811"}, - {file = "greenlet-3.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:28fe80a3eb673b2d5cc3b12eea468a5e5f4603c26aa34d88bf61bba82ceb2f9b"}, - {file = "greenlet-3.1.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:76b3e3976d2a452cba7aa9e453498ac72240d43030fdc6d538a72b87eaff52fd"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655b21ffd37a96b1e78cc48bf254f5ea4b5b85efaf9e9e2a526b3c9309d660ca"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6f4c2027689093775fd58ca2388d58789009116844432d920e9147f91acbe64"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76e5064fd8e94c3f74d9fd69b02d99e3cdb8fc286ed49a1f10b256e59d0d3a0b"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4bf607f690f7987ab3291406e012cd8591a4f77aa54f29b890f9c331e84989"}, - {file = "greenlet-3.1.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:037d9ac99540ace9424cb9ea89f0accfaff4316f149520b4ae293eebc5bded17"}, - {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:90b5bbf05fe3d3ef697103850c2ce3374558f6fe40fd57c9fac1bf14903f50a5"}, - {file = "greenlet-3.1.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:726377bd60081172685c0ff46afbc600d064f01053190e4450857483c4d44484"}, - {file = "greenlet-3.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:d46d5069e2eeda111d6f71970e341f4bd9aeeee92074e649ae263b834286ecc0"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81eeec4403a7d7684b5812a8aaa626fa23b7d0848edb3a28d2eb3220daddcbd0"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a3dae7492d16e85ea6045fd11cb8e782b63eac8c8d520c3a92c02ac4573b0a6"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b5ea3664eed571779403858d7cd0a9b0ebf50d57d2cdeafc7748e09ef8cd81a"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22f4e26400f7f48faef2d69c20dc055a1f3043d330923f9abe08ea0aecc44df"}, - {file = "greenlet-3.1.0-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13ff8c8e54a10472ce3b2a2da007f915175192f18e6495bad50486e87c7f6637"}, - {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9671e7282d8c6fcabc32c0fb8d7c0ea8894ae85cee89c9aadc2d7129e1a9954"}, - {file = "greenlet-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:184258372ae9e1e9bddce6f187967f2e08ecd16906557c4320e3ba88a93438c3"}, - {file = "greenlet-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:a0409bc18a9f85321399c29baf93545152d74a49d92f2f55302f122007cfda00"}, - {file = "greenlet-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9eb4a1d7399b9f3c7ac68ae6baa6be5f9195d1d08c9ddc45ad559aa6b556bce6"}, - {file = "greenlet-3.1.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a8870983af660798dc1b529e1fd6f1cefd94e45135a32e58bd70edd694540f33"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfcfb73aed40f550a57ea904629bdaf2e562c68fa1164fa4588e752af6efdc3f"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9482c2ed414781c0af0b35d9d575226da6b728bd1a720668fa05837184965b7"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d58ec349e0c2c0bc6669bf2cd4982d2f93bf067860d23a0ea1fe677b0f0b1e09"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd65695a8df1233309b701dec2539cc4b11e97d4fcc0f4185b4a12ce54db0491"}, - {file = "greenlet-3.1.0-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:665b21e95bc0fce5cab03b2e1d90ba9c66c510f1bb5fdc864f3a377d0f553f6b"}, - {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3c59a06c2c28a81a026ff11fbf012081ea34fb9b7052f2ed0366e14896f0a1d"}, - {file = "greenlet-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415b9494ff6240b09af06b91a375731febe0090218e2898d2b85f9b92abcda0"}, - {file = "greenlet-3.1.0-cp38-cp38-win32.whl", hash = "sha256:1544b8dd090b494c55e60c4ff46e238be44fdc472d2589e943c241e0169bcea2"}, - {file = "greenlet-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:7f346d24d74c00b6730440f5eb8ec3fe5774ca8d1c9574e8e57c8671bb51b910"}, - {file = "greenlet-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:db1b3ccb93488328c74e97ff888604a8b95ae4f35f4f56677ca57a4fc3a4220b"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44cd313629ded43bb3b98737bba2f3e2c2c8679b55ea29ed73daea6b755fe8e7"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fad7a051e07f64e297e6e8399b4d6a3bdcad3d7297409e9a06ef8cbccff4f501"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3967dcc1cd2ea61b08b0b276659242cbce5caca39e7cbc02408222fb9e6ff39"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d45b75b0f3fd8d99f62eb7908cfa6d727b7ed190737dec7fe46d993da550b81a"}, - {file = "greenlet-3.1.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2d004db911ed7b6218ec5c5bfe4cf70ae8aa2223dffbb5b3c69e342bb253cb28"}, - {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9505a0c8579899057cbefd4ec34d865ab99852baf1ff33a9481eb3924e2da0b"}, - {file = "greenlet-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fd6e94593f6f9714dbad1aaba734b5ec04593374fa6638df61592055868f8b8"}, - {file = "greenlet-3.1.0-cp39-cp39-win32.whl", hash = "sha256:d0dd943282231480aad5f50f89bdf26690c995e8ff555f26d8a5b9887b559bcc"}, - {file = "greenlet-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:ac0adfdb3a21dc2a24ed728b61e72440d297d0fd3a577389df566651fcd08f97"}, - {file = "greenlet-3.1.0.tar.gz", hash = "sha256:b395121e9bbe8d02a750886f108d540abe66075e61e22f7353d9acb0b81be0f0"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] @@ -3508,61 +3524,70 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [[package]] name = "grpcio" -version = "1.66.1" +version = "1.66.2" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.66.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492"}, - {file = "grpcio-1.66.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac"}, - {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a013c5fbb12bfb5f927444b477a26f1080755a931d5d362e6a9a720ca7dbae60"}, - {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1b24c23d51a1e8790b25514157d43f0a4dce1ac12b3f0b8e9f66a5e2c4c132f"}, - {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffb8ea674d68de4cac6f57d2498fef477cef582f1fa849e9f844863af50083"}, - {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:307b1d538140f19ccbd3aed7a93d8f71103c5d525f3c96f8616111614b14bf2a"}, - {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c17ebcec157cfb8dd445890a03e20caf6209a5bd4ac5b040ae9dbc59eef091d"}, - {file = "grpcio-1.66.1-cp310-cp310-win32.whl", hash = "sha256:ef82d361ed5849d34cf09105d00b94b6728d289d6b9235513cb2fcc79f7c432c"}, - {file = "grpcio-1.66.1-cp310-cp310-win_amd64.whl", hash = "sha256:292a846b92cdcd40ecca46e694997dd6b9be6c4c01a94a0dfb3fcb75d20da858"}, - {file = "grpcio-1.66.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:c30aeceeaff11cd5ddbc348f37c58bcb96da8d5aa93fed78ab329de5f37a0d7a"}, - {file = "grpcio-1.66.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8a1e224ce6f740dbb6b24c58f885422deebd7eb724aff0671a847f8951857c26"}, - {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a66fe4dc35d2330c185cfbb42959f57ad36f257e0cc4557d11d9f0a3f14311df"}, - {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ba04659e4fce609de2658fe4dbf7d6ed21987a94460f5f92df7579fd5d0e22"}, - {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4573608e23f7e091acfbe3e84ac2045680b69751d8d67685ffa193a4429fedb1"}, - {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7e06aa1f764ec8265b19d8f00140b8c4b6ca179a6dc67aa9413867c47e1fb04e"}, - {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3885f037eb11f1cacc41f207b705f38a44b69478086f40608959bf5ad85826dd"}, - {file = "grpcio-1.66.1-cp311-cp311-win32.whl", hash = "sha256:97ae7edd3f3f91480e48ede5d3e7d431ad6005bfdbd65c1b56913799ec79e791"}, - {file = "grpcio-1.66.1-cp311-cp311-win_amd64.whl", hash = "sha256:cfd349de4158d797db2bd82d2020554a121674e98fbe6b15328456b3bf2495bb"}, - {file = "grpcio-1.66.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:a92c4f58c01c77205df6ff999faa008540475c39b835277fb8883b11cada127a"}, - {file = "grpcio-1.66.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fdb14bad0835914f325349ed34a51940bc2ad965142eb3090081593c6e347be9"}, - {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f03a5884c56256e08fd9e262e11b5cfacf1af96e2ce78dc095d2c41ccae2c80d"}, - {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ca2559692d8e7e245d456877a85ee41525f3ed425aa97eb7a70fc9a79df91a0"}, - {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ca1be089fb4446490dd1135828bd42a7c7f8421e74fa581611f7afdf7ab761"}, - {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d639c939ad7c440c7b2819a28d559179a4508783f7e5b991166f8d7a34b52815"}, - {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b9feb4e5ec8dc2d15709f4d5fc367794d69277f5d680baf1910fc9915c633524"}, - {file = "grpcio-1.66.1-cp312-cp312-win32.whl", hash = "sha256:7101db1bd4cd9b880294dec41a93fcdce465bdbb602cd8dc5bd2d6362b618759"}, - {file = "grpcio-1.66.1-cp312-cp312-win_amd64.whl", hash = "sha256:b0aa03d240b5539648d996cc60438f128c7f46050989e35b25f5c18286c86734"}, - {file = "grpcio-1.66.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:ecfe735e7a59e5a98208447293ff8580e9db1e890e232b8b292dc8bd15afc0d2"}, - {file = "grpcio-1.66.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4825a3aa5648010842e1c9d35a082187746aa0cdbf1b7a2a930595a94fb10fce"}, - {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f517fd7259fe823ef3bd21e508b653d5492e706e9f0ef82c16ce3347a8a5620c"}, - {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1fe60d0772831d96d263b53d83fb9a3d050a94b0e94b6d004a5ad111faa5b5b"}, - {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31a049daa428f928f21090403e5d18ea02670e3d5d172581670be006100db9ef"}, - {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f914386e52cbdeb5d2a7ce3bf1fdfacbe9d818dd81b6099a05b741aaf3848bb"}, - {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bff2096bdba686019fb32d2dde45b95981f0d1490e054400f70fc9a8af34b49d"}, - {file = "grpcio-1.66.1-cp38-cp38-win32.whl", hash = "sha256:aa8ba945c96e73de29d25331b26f3e416e0c0f621e984a3ebdb2d0d0b596a3b3"}, - {file = "grpcio-1.66.1-cp38-cp38-win_amd64.whl", hash = "sha256:161d5c535c2bdf61b95080e7f0f017a1dfcb812bf54093e71e5562b16225b4ce"}, - {file = "grpcio-1.66.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:d0cd7050397b3609ea51727b1811e663ffda8bda39c6a5bb69525ef12414b503"}, - {file = "grpcio-1.66.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0e6c9b42ded5d02b6b1fea3a25f036a2236eeb75d0579bfd43c0018c88bf0a3e"}, - {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c9f80f9fad93a8cf71c7f161778ba47fd730d13a343a46258065c4deb4b550c0"}, - {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dd67ed9da78e5121efc5c510f0122a972216808d6de70953a740560c572eb44"}, - {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48b0d92d45ce3be2084b92fb5bae2f64c208fea8ceed7fccf6a7b524d3c4942e"}, - {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d813316d1a752be6f5c4360c49f55b06d4fe212d7df03253dfdae90c8a402bb"}, - {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c9bebc6627873ec27a70fc800f6083a13c70b23a5564788754b9ee52c5aef6c"}, - {file = "grpcio-1.66.1-cp39-cp39-win32.whl", hash = "sha256:30a1c2cf9390c894c90bbc70147f2372130ad189cffef161f0432d0157973f45"}, - {file = "grpcio-1.66.1-cp39-cp39-win_amd64.whl", hash = "sha256:17663598aadbedc3cacd7bbde432f541c8e07d2496564e22b214b22c7523dac8"}, - {file = "grpcio-1.66.1.tar.gz", hash = "sha256:35334f9c9745add3e357e3372756fd32d925bd52c41da97f4dfdafbde0bf0ee2"}, + {file = "grpcio-1.66.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa"}, + {file = "grpcio-1.66.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73"}, + {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf"}, + {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50"}, + {file = "grpcio-1.66.2-cp310-cp310-win32.whl", hash = "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39"}, + {file = "grpcio-1.66.2-cp310-cp310-win_amd64.whl", hash = "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249"}, + {file = "grpcio-1.66.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8"}, + {file = "grpcio-1.66.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a"}, + {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae"}, + {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01"}, + {file = "grpcio-1.66.2-cp311-cp311-win32.whl", hash = "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8"}, + {file = "grpcio-1.66.2-cp311-cp311-win_amd64.whl", hash = "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d"}, + {file = "grpcio-1.66.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf"}, + {file = "grpcio-1.66.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd"}, + {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee"}, + {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c"}, + {file = "grpcio-1.66.2-cp312-cp312-win32.whl", hash = "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453"}, + {file = "grpcio-1.66.2-cp312-cp312-win_amd64.whl", hash = "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679"}, + {file = "grpcio-1.66.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d"}, + {file = "grpcio-1.66.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46"}, + {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a"}, + {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b"}, + {file = "grpcio-1.66.2-cp313-cp313-win32.whl", hash = "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75"}, + {file = "grpcio-1.66.2-cp313-cp313-win_amd64.whl", hash = "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf"}, + {file = "grpcio-1.66.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3"}, + {file = "grpcio-1.66.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd"}, + {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8"}, + {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec"}, + {file = "grpcio-1.66.2-cp38-cp38-win32.whl", hash = "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3"}, + {file = "grpcio-1.66.2-cp38-cp38-win_amd64.whl", hash = "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c"}, + {file = "grpcio-1.66.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d"}, + {file = "grpcio-1.66.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc"}, + {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e"}, + {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e"}, + {file = "grpcio-1.66.2-cp39-cp39-win32.whl", hash = "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7"}, + {file = "grpcio-1.66.2-cp39-cp39-win_amd64.whl", hash = "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987"}, + {file = "grpcio-1.66.2.tar.gz", hash = "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.66.1)"] +protobuf = ["grpcio-tools (>=1.66.2)"] [[package]] name = "grpcio-status" @@ -3826,13 +3851,13 @@ lxml = ["lxml"] [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.6" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, ] [package.dependencies] @@ -3843,7 +3868,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httplib2" @@ -3993,13 +4018,13 @@ files = [ [[package]] name = "idna" -version = "3.9" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ - {file = "idna-3.9-py3-none-any.whl", hash = "sha256:69297d5da0cc9281c77efffb4e730254dd45943f45bbfb461de5991713989b1e"}, - {file = "idna-3.9.tar.gz", hash = "sha256:e5c5dafde284f26e9e0f28f6ea2d6400abd5ca099864a67f576f3981c6476124"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] [package.extras] @@ -4056,18 +4081,15 @@ files = [ [[package]] name = "isodate" -version = "0.6.1" +version = "0.7.2" description = "An ISO 8601 date/time/duration parser and formatter" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, + {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, + {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, ] -[package.dependencies] -six = "*" - [[package]] name = "itsdangerous" version = "2.2.0" @@ -4189,13 +4211,13 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.12.1" +version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, + {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, + {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, ] [package.dependencies] @@ -4341,17 +4363,18 @@ files = [ [[package]] name = "kombu" -version = "5.4.1" +version = "5.4.2" description = "Messaging library for Python." optional = false python-versions = ">=3.8" files = [ - {file = "kombu-5.4.1-py3-none-any.whl", hash = "sha256:621d365f234e4c089596f3a2510f1ade07026efc28caca426161d8f458786cab"}, - {file = "kombu-5.4.1.tar.gz", hash = "sha256:1c05178826dab811f8cab5b0a154d42a7a33d8bcdde9fa3d7b4582e43c3c03db"}, + {file = "kombu-5.4.2-py3-none-any.whl", hash = "sha256:14212f5ccf022fc0a70453bb025a1dcc32782a588c49ea866884047d66e14763"}, + {file = "kombu-5.4.2.tar.gz", hash = "sha256:eef572dd2fd9fc614b37580e3caeafdd5af46c1eff31e7fba89138cdb406f2cf"}, ] [package.dependencies] amqp = ">=5.1.1,<6.0.0" +tzdata = {version = "*", markers = "python_version >= \"3.9\""} vine = "5.1.0" [package.extras] @@ -4373,17 +4396,18 @@ zookeeper = ["kazoo (>=2.8.0)"] [[package]] name = "kubernetes" -version = "30.1.0" +version = "31.0.0" description = "Kubernetes python client" optional = false python-versions = ">=3.6" files = [ - {file = "kubernetes-30.1.0-py2.py3-none-any.whl", hash = "sha256:e212e8b7579031dd2e512168b617373bc1e03888d41ac4e04039240a292d478d"}, - {file = "kubernetes-30.1.0.tar.gz", hash = "sha256:41e4c77af9f28e7a6c314e3bd06a8c6229ddd787cad684e0ab9f69b498e98ebc"}, + {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, + {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, ] [package.dependencies] certifi = ">=14.05.14" +durationpy = ">=0.7" google-auth = ">=1.0.1" oauthlib = ">=3.2.2" python-dateutil = ">=2.5.3" @@ -4413,13 +4437,13 @@ six = "*" [[package]] name = "langfuse" -version = "2.48.1" +version = "2.51.5" description = "A client library for accessing langfuse" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langfuse-2.48.1-py3-none-any.whl", hash = "sha256:8661070b6d94ba1d7da92c054f3110b6ecf4489d6e8204a4080f934f3f49ebf2"}, - {file = "langfuse-2.48.1.tar.gz", hash = "sha256:b8117d90babec6be1bc3303b42e0b71848531eae44118e6e0123d03e7961d0fc"}, + {file = "langfuse-2.51.5-py3-none-any.whl", hash = "sha256:b95401ca710ef94b521afa6541933b6f93d7cfd4a97523c8fc75bca4d6d219fb"}, + {file = "langfuse-2.51.5.tar.gz", hash = "sha256:55bc37b5c5d3ae133c1a95db09117cfb3117add110ba02ebbf2ce45ac4395c5b"}, ] [package.dependencies] @@ -4438,13 +4462,13 @@ openai = ["openai (>=0.27.8)"] [[package]] name = "langsmith" -version = "0.1.120" +version = "0.1.134" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.120-py3-none-any.whl", hash = "sha256:54d2785e301646c0988e0a69ebe4d976488c87b41928b358cb153b6ddd8db62b"}, - {file = "langsmith-0.1.120.tar.gz", hash = "sha256:25499ca187b41bd89d784b272b97a8d76f60e0e21bdf20336e8a2aa6a9b23ac9"}, + {file = "langsmith-0.1.134-py3-none-any.whl", hash = "sha256:ada98ad80ef38807725f32441a472da3dd28394010877751f48f458d3289da04"}, + {file = "langsmith-0.1.134.tar.gz", hash = "sha256:23abee3b508875a0e63c602afafffc02442a19cfd88f9daae05b3e9054fd6b61"}, ] [package.dependencies] @@ -4455,6 +4479,7 @@ pydantic = [ {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, ] requests = ">=2,<3" +requests-toolbelt = ">=1.0.0,<2.0.0" [[package]] name = "llvmlite" @@ -4785,71 +4810,72 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.1" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-win32.whl", hash = "sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97"}, + {file = "MarkupSafe-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-win32.whl", hash = "sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635"}, + {file = "MarkupSafe-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-win32.whl", hash = "sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa"}, + {file = "MarkupSafe-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-win32.whl", hash = "sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c"}, + {file = "MarkupSafe-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-win32.whl", hash = "sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b"}, + {file = "MarkupSafe-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-win32.whl", hash = "sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8"}, + {file = "MarkupSafe-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b"}, + {file = "markupsafe-3.0.1.tar.gz", hash = "sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344"}, ] [[package]] @@ -4946,97 +4972,129 @@ files = [ [package.dependencies] tqdm = "*" +[[package]] +name = "mistune" +version = "3.0.2" +description = "A sane and fast Markdown parser with useful plugins and renderers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, + {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, +] + [[package]] name = "mmh3" -version = "4.1.0" +version = "5.0.1" description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a"}, - {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c"}, - {file = "mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b"}, - {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970"}, - {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da"}, - {file = "mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47"}, - {file = "mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865"}, - {file = "mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896"}, - {file = "mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560"}, - {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9"}, - {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb"}, - {file = "mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f"}, - {file = "mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec"}, - {file = "mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5"}, - {file = "mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5"}, - {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740"}, - {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086"}, - {file = "mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276"}, - {file = "mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9"}, - {file = "mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:372f4b7e1dcde175507640679a2a8790185bb71f3640fc28a4690f73da986a3b"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:438584b97f6fe13e944faf590c90fc127682b57ae969f73334040d9fa1c7ffa5"}, - {file = "mmh3-4.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e27931b232fc676675fac8641c6ec6b596daa64d82170e8597f5a5b8bdcd3b6"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:571a92bad859d7b0330e47cfd1850b76c39b615a8d8e7aa5853c1f971fd0c4b1"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a69d6afe3190fa08f9e3a58e5145549f71f1f3fff27bd0800313426929c7068"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afb127be0be946b7630220908dbea0cee0d9d3c583fa9114a07156f98566dc28"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:940d86522f36348ef1a494cbf7248ab3f4a1638b84b59e6c9e90408bd11ad729"}, - {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dcccc4935686619a8e3d1f7b6e97e3bd89a4a796247930ee97d35ea1a39341"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01bb9b90d61854dfc2407c5e5192bfb47222d74f29d140cb2dd2a69f2353f7cc"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bcb1b8b951a2c0b0fb8a5426c62a22557e2ffc52539e0a7cc46eb667b5d606a9"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6477a05d5e5ab3168e82e8b106e316210ac954134f46ec529356607900aea82a"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:da5892287e5bea6977364b15712a2573c16d134bc5fdcdd4cf460006cf849278"}, - {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:99180d7fd2327a6fffbaff270f760576839dc6ee66d045fa3a450f3490fda7f5"}, - {file = "mmh3-4.1.0-cp38-cp38-win32.whl", hash = "sha256:9b0d4f3949913a9f9a8fb1bb4cc6ecd52879730aab5ff8c5a3d8f5b593594b73"}, - {file = "mmh3-4.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:598c352da1d945108aee0c3c3cfdd0e9b3edef74108f53b49d481d3990402169"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:475d6d1445dd080f18f0f766277e1237fa2914e5fe3307a3b2a3044f30892103"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ca07c41e6a2880991431ac717c2a049056fff497651a76e26fc22224e8b5732"}, - {file = "mmh3-4.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ebe052fef4bbe30c0548d12ee46d09f1b69035ca5208a7075e55adfe091be44"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaefd42e85afb70f2b855a011f7b4d8a3c7e19c3f2681fa13118e4d8627378c5"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0ae43caae5a47afe1b63a1ae3f0986dde54b5fb2d6c29786adbfb8edc9edfb"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6218666f74c8c013c221e7f5f8a693ac9cf68e5ac9a03f2373b32d77c48904de"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac59294a536ba447b5037f62d8367d7d93b696f80671c2c45645fa9f1109413c"}, - {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086844830fcd1e5c84fec7017ea1ee8491487cfc877847d96f86f68881569d2e"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e42b38fad664f56f77f6fbca22d08450f2464baa68acdbf24841bf900eb98e87"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d08b790a63a9a1cde3b5d7d733ed97d4eb884bfbc92f075a091652d6bfd7709a"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73ea4cc55e8aea28c86799ecacebca09e5f86500414870a8abaedfcbaf74d288"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f90938ff137130e47bcec8dc1f4ceb02f10178c766e2ef58a9f657ff1f62d124"}, - {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aa1f13e94b8631c8cd53259250556edcf1de71738936b60febba95750d9632bd"}, - {file = "mmh3-4.1.0-cp39-cp39-win32.whl", hash = "sha256:a3b680b471c181490cf82da2142029edb4298e1bdfcb67c76922dedef789868d"}, - {file = "mmh3-4.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fefef92e9c544a8dbc08f77a8d1b6d48006a750c4375bbcd5ff8199d761e263b"}, - {file = "mmh3-4.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:8e2c1f6a2b41723a4f82bd5a762a777836d29d664fc0095f17910bea0adfd4a6"}, - {file = "mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a"}, + {file = "mmh3-5.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f0a4b4bf05778ed77d820d6e7d0e9bd6beb0c01af10e1ce9233f5d2f814fcafa"}, + {file = "mmh3-5.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac7a391039aeab95810c2d020b69a94eb6b4b37d4e2374831e92db3a0cdf71c6"}, + {file = "mmh3-5.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3a2583b5521ca49756d8d8bceba80627a9cc295f255dcab4e3df7ccc2f09679a"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:081a8423fe53c1ac94f87165f3e4c500125d343410c1a0c5f1703e898a3ef038"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8b4d72713799755dc8954a7d36d5c20a6c8de7b233c82404d122c7c7c1707cc"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:389a6fd51efc76d3182d36ec306448559c1244f11227d2bb771bdd0e6cc91321"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39f4128edaa074bff721b1d31a72508cba4d2887ee7867f22082e1fe9d4edea0"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5d23a94d91aabba3386b3769048d5f4210fdfef80393fece2f34ba5a7b466c"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:16347d038361f8b8f24fd2b7ef378c9b68ddee9f7706e46269b6e0d322814713"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6e299408565af7d61f2d20a5ffdd77cf2ed902460fe4e6726839d59ba4b72316"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42050af21ddfc5445ee5a66e73a8fc758c71790305e3ee9e4a85a8e69e810f94"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2ae9b1f5ef27ec54659920f0404b7ceb39966e28867c461bfe83a05e8d18ddb0"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:50c2495a02045f3047d71d4ae9cdd7a15efc0bcbb7ff17a18346834a8e2d1d19"}, + {file = "mmh3-5.0.1-cp310-cp310-win32.whl", hash = "sha256:c028fa77cddf351ca13b4a56d43c1775652cde0764cadb39120b68f02a23ecf6"}, + {file = "mmh3-5.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c5e741e421ec14400c4aae30890515c201f518403bdef29ae1e00d375bb4bbb5"}, + {file = "mmh3-5.0.1-cp310-cp310-win_arm64.whl", hash = "sha256:b17156d56fabc73dbf41bca677ceb6faed435cc8544f6566d72ea77d8a17e9d0"}, + {file = "mmh3-5.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a6d5a9b1b923f1643559ba1fc0bf7a5076c90cbb558878d3bf3641ce458f25d"}, + {file = "mmh3-5.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3349b968be555f7334bbcce839da98f50e1e80b1c615d8e2aa847ea4a964a012"}, + {file = "mmh3-5.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1bd3c94b110e55db02ab9b605029f48a2f7f677c6e58c09d44e42402d438b7e1"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ba84d48608f79adbb10bb09986b6dc33eeda5c2d1bd75d00820081b73bde9"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0217987a8b8525c8d9170f66d036dec4ab45cfbd53d47e8d76125791ceb155e"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2797063a34e78d1b61639a98b0edec1c856fa86ab80c7ec859f1796d10ba429"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8bba16340adcbd47853a2fbe5afdb397549e8f2e79324ff1dced69a3f8afe7c3"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:282797957c9f60b51b9d768a602c25f579420cc9af46feb77d457a27823d270a"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e4fb670c29e63f954f9e7a2cdcd57b36a854c2538f579ef62681ccbaa1de2b69"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ee7d85438dc6aff328e19ab052086a3c29e8a9b632998a49e5c4b0034e9e8d6"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b7fb5db231f3092444bc13901e6a8d299667126b00636ffbad4a7b45e1051e2f"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c100dd441703da5ec136b1d9003ed4a041d8a1136234c9acd887499796df6ad8"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71f3b765138260fd7a7a2dba0ea5727dabcd18c1f80323c9cfef97a7e86e01d0"}, + {file = "mmh3-5.0.1-cp311-cp311-win32.whl", hash = "sha256:9a76518336247fd17689ce3ae5b16883fd86a490947d46a0193d47fb913e26e3"}, + {file = "mmh3-5.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:336bc4df2e44271f1c302d289cc3d78bd52d3eed8d306c7e4bff8361a12bf148"}, + {file = "mmh3-5.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:af6522722fbbc5999aa66f7244d0986767a46f1fb05accc5200f75b72428a508"}, + {file = "mmh3-5.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f2730bb263ed9c388e8860438b057a53e3cc701134a6ea140f90443c4c11aa40"}, + {file = "mmh3-5.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6246927bc293f6d56724536400b85fb85f5be26101fa77d5f97dd5e2a4c69bf2"}, + {file = "mmh3-5.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fbca322519a6e6e25b6abf43e940e1667cf8ea12510e07fb4919b48a0cd1c411"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae8c19903ed8a1724ad9e67e86f15d198a7a1271a4f9be83d47e38f312ed672"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a09fd6cc72c07c0c07c3357714234b646d78052487c4a3bd5f7f6e08408cff60"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ff8551fee7ae3b11c5d986b6347ade0dccaadd4670ffdb2b944dee120ffcc84"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e39694c73a5a20c8bf36dfd8676ed351e5234d55751ba4f7562d85449b21ef3f"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eba6001989a92f72a89c7cf382fda831678bd780707a66b4f8ca90239fdf2123"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0771f90c9911811cc606a5c7b7b58f33501c9ee896ed68a6ac22c7d55878ecc0"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:09b31ed0c0c0920363e96641fac4efde65b1ab62b8df86293142f35a254e72b4"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5cf4a8deda0235312db12075331cb417c4ba163770edfe789bde71d08a24b692"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:41f7090a95185ef20ac018581a99337f0cbc84a2135171ee3290a9c0d9519585"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b97b5b368fb7ff22194ec5854f5b12d8de9ab67a0f304728c7f16e5d12135b76"}, + {file = "mmh3-5.0.1-cp312-cp312-win32.whl", hash = "sha256:842516acf04da546f94fad52db125ee619ccbdcada179da51c326a22c4578cb9"}, + {file = "mmh3-5.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:d963be0dbfd9fca209c17172f6110787ebf78934af25e3694fe2ba40e55c1e2b"}, + {file = "mmh3-5.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:a5da292ceeed8ce8e32b68847261a462d30fd7b478c3f55daae841404f433c15"}, + {file = "mmh3-5.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:673e3f1c8d4231d6fb0271484ee34cb7146a6499fc0df80788adb56fd76842da"}, + {file = "mmh3-5.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f795a306bd16a52ad578b663462cc8e95500b3925d64118ae63453485d67282b"}, + {file = "mmh3-5.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5ed57a5e28e502a1d60436cc25c76c3a5ba57545f250f2969af231dc1221e0a5"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:632c28e7612e909dbb6cbe2fe496201ada4695b7715584005689c5dc038e59ad"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53fd6bd525a5985e391c43384672d9d6b317fcb36726447347c7fc75bfed34ec"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dceacf6b0b961a0e499836af3aa62d60633265607aef551b2a3e3c48cdaa5edd"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f0738d478fdfb5d920f6aff5452c78f2c35b0eff72caa2a97dfe38e82f93da2"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e70285e7391ab88b872e5bef632bad16b9d99a6d3ca0590656a4753d55988af"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:27e5fc6360aa6b828546a4318da1a7da6bf6e5474ccb053c3a6aa8ef19ff97bd"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7989530c3c1e2c17bf5a0ec2bba09fd19819078ba90beedabb1c3885f5040b0d"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:cdad7bee649950da7ecd3cbbbd12fb81f1161072ecbdb5acfa0018338c5cb9cf"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e143b8f184c1bb58cecd85ab4a4fd6dc65a2d71aee74157392c3fddac2a4a331"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5eb12e886f3646dd636f16b76eb23fc0c27e8ff3c1ae73d4391e50ef60b40f6"}, + {file = "mmh3-5.0.1-cp313-cp313-win32.whl", hash = "sha256:16e6dddfa98e1c2d021268e72c78951234186deb4df6630e984ac82df63d0a5d"}, + {file = "mmh3-5.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:d3ffb792d70b8c4a2382af3598dad6ae0c5bd9cee5b7ffcc99aa2f5fd2c1bf70"}, + {file = "mmh3-5.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:122fa9ec148383f9124292962bda745f192b47bfd470b2af5fe7bb3982b17896"}, + {file = "mmh3-5.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b12bad8c75e6ff5d67319794fb6a5e8c713826c818d47f850ad08b4aa06960c6"}, + {file = "mmh3-5.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e5bbb066538c1048d542246fc347bb7994bdda29a3aea61c22f9f8b57111ce69"}, + {file = "mmh3-5.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:eee6134273f64e2a106827cc8fd77e70cc7239a285006fc6ab4977d59b015af2"}, + {file = "mmh3-5.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d04d9aa19d48e4c7bbec9cabc2c4dccc6ff3b2402f856d5bf0de03e10f167b5b"}, + {file = "mmh3-5.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79f37da1eed034d06567a69a7988456345c7f29e49192831c3975b464493b16e"}, + {file = "mmh3-5.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:242f77666743337aa828a2bf2da71b6ba79623ee7f93edb11e009f69237c8561"}, + {file = "mmh3-5.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffd943fff690463945f6441a2465555b3146deaadf6a5e88f2590d14c655d71b"}, + {file = "mmh3-5.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565b15f8d7df43acb791ff5a360795c20bfa68bca8b352509e0fbabd06cc48cd"}, + {file = "mmh3-5.0.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc6aafb867c2030df98ac7760ff76b500359252867985f357bd387739f3d5287"}, + {file = "mmh3-5.0.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:32898170644d45aa27c974ab0d067809c066205110f5c6d09f47d9ece6978bfe"}, + {file = "mmh3-5.0.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:42865567838d2193eb64e0ef571f678bf361a254fcdef0c5c8e73243217829bd"}, + {file = "mmh3-5.0.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:5ff5c1f301c4a8b6916498969c0fcc7e3dbc56b4bfce5cfe3fe31f3f4609e5ae"}, + {file = "mmh3-5.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:be74c2dda8a6f44a504450aa2c3507f8067a159201586fc01dd41ab80efc350f"}, + {file = "mmh3-5.0.1-cp38-cp38-win32.whl", hash = "sha256:5610a842621ff76c04b20b29cf5f809b131f241a19d4937971ba77dc99a7f330"}, + {file = "mmh3-5.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:de15739ac50776fe8aa1ef13f1be46a6ee1fbd45f6d0651084097eb2be0a5aa4"}, + {file = "mmh3-5.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:48e84cf3cc7e8c41bc07de72299a73b92d9e3cde51d97851420055b1484995f7"}, + {file = "mmh3-5.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd9dc28c2d168c49928195c2e29b96f9582a5d07bd690a28aede4cc07b0e696"}, + {file = "mmh3-5.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2771a1c56a3d4bdad990309cff5d0a8051f29c8ec752d001f97d6392194ae880"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5ff2a8322ba40951a84411550352fba1073ce1c1d1213bb7530f09aed7f8caf"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a16bd3ec90682c9e0a343e6bd4c778c09947c8c5395cdb9e5d9b82b2559efbca"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d45733a78d68b5b05ff4a823aea51fa664df1d3bf4929b152ff4fd6dea2dd69b"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:904285e83cedebc8873b0838ed54c20f7344120be26e2ca5a907ab007a18a7a0"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac4aeb1784e43df728034d0ed72e4b2648db1a69fef48fa58e810e13230ae5ff"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:cb3d4f751a0b8b4c8d06ef1c085216c8fddcc8b8c8d72445976b5167a40c6d1e"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8021851935600e60c42122ed1176399d7692df338d606195cd599d228a04c1c6"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6182d5924a5efc451900f864cbb021d7e8ad5d524816ca17304a0f663bc09bb5"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:5f30b834552a4f79c92e3d266336fb87fd92ce1d36dc6813d3e151035890abbd"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cd4383f35e915e06d077df27e04ffd3be7513ec6a9de2d31f430393f67e192a7"}, + {file = "mmh3-5.0.1-cp39-cp39-win32.whl", hash = "sha256:1455fb6b42665a97db8fc66e89a861e52b567bce27ed054c47877183f86ea6e3"}, + {file = "mmh3-5.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:9e26a0f4eb9855a143f5938a53592fa14c2d3b25801c2106886ab6c173982780"}, + {file = "mmh3-5.0.1-cp39-cp39-win_arm64.whl", hash = "sha256:0d0a35a69abdad7549c4030a714bb4ad07902edb3bbe61e1bbc403ded5d678be"}, + {file = "mmh3-5.0.1.tar.gz", hash = "sha256:7dab080061aeb31a6069a181f27c473a1f67933854e36a3464931f2716508896"}, ] [package.extras] -test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] +benchmark = ["pymmh3 (==0.0.5)", "pyperf (==2.7.0)", "xxhash (==3.5.0)"] +docs = ["myst-parser (==4.0.0)", "shibuya (==2024.8.30)", "sphinx (==8.0.2)", "sphinx-copybutton (==0.5.2)"] +lint = ["black (==24.8.0)", "clang-format (==18.1.8)", "isort (==5.13.2)", "pylint (==3.2.7)"] +plot = ["matplotlib (==3.9.2)", "pandas (==2.2.2)"] +test = ["pytest (==8.3.3)", "pytest-sugar (==1.0.0)"] +type = ["mypy (==1.11.2)"] [[package]] name = "mock" @@ -5260,27 +5318,31 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} [[package]] name = "multiprocess" -version = "0.70.16" +version = "0.70.17" description = "better multiprocessing and multithreading in Python" optional = false python-versions = ">=3.8" files = [ - {file = "multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee"}, - {file = "multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec"}, - {file = "multiprocess-0.70.16-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37b55f71c07e2d741374998c043b9520b626a8dddc8b3129222ca4f1a06ef67a"}, - {file = "multiprocess-0.70.16-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba8c31889abf4511c7308a8c52bb4a30b9d590e7f58523302ba00237702ca054"}, - {file = "multiprocess-0.70.16-pp39-pypy39_pp73-macosx_10_13_x86_64.whl", hash = "sha256:0dfd078c306e08d46d7a8d06fb120313d87aa43af60d66da43ffff40b44d2f41"}, - {file = "multiprocess-0.70.16-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e7b9d0f307cd9bd50851afaac0dba2cb6c44449efff697df7c7645f7d3f2be3a"}, - {file = "multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02"}, - {file = "multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a"}, - {file = "multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e"}, - {file = "multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435"}, - {file = "multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3"}, - {file = "multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1"}, + {file = "multiprocess-0.70.17-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7ddb24e5bcdb64e90ec5543a1f05a39463068b6d3b804aa3f2a4e16ec28562d6"}, + {file = "multiprocess-0.70.17-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d729f55198a3579f6879766a6d9b72b42d4b320c0dcb7844afb774d75b573c62"}, + {file = "multiprocess-0.70.17-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2c82d0375baed8d8dd0d8c38eb87c5ae9c471f8e384ad203a36f095ee860f67"}, + {file = "multiprocess-0.70.17-pp38-pypy38_pp73-macosx_10_9_arm64.whl", hash = "sha256:a22a6b1a482b80eab53078418bb0f7025e4f7d93cc8e1f36481477a023884861"}, + {file = "multiprocess-0.70.17-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:349525099a0c9ac5936f0488b5ee73199098dac3ac899d81d326d238f9fd3ccd"}, + {file = "multiprocess-0.70.17-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:27b8409c02b5dd89d336107c101dfbd1530a2cd4fd425fc27dcb7adb6e0b47bf"}, + {file = "multiprocess-0.70.17-pp39-pypy39_pp73-macosx_10_13_arm64.whl", hash = "sha256:2ea0939b0f4760a16a548942c65c76ff5afd81fbf1083c56ae75e21faf92e426"}, + {file = "multiprocess-0.70.17-pp39-pypy39_pp73-macosx_10_13_x86_64.whl", hash = "sha256:2b12e081df87ab755190e227341b2c3b17ee6587e9c82fecddcbe6aa812cd7f7"}, + {file = "multiprocess-0.70.17-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a0f01cd9d079af7a8296f521dc03859d1a414d14c1e2b6e676ef789333421c95"}, + {file = "multiprocess-0.70.17-py310-none-any.whl", hash = "sha256:38357ca266b51a2e22841b755d9a91e4bb7b937979a54d411677111716c32744"}, + {file = "multiprocess-0.70.17-py311-none-any.whl", hash = "sha256:2884701445d0177aec5bd5f6ee0df296773e4fb65b11903b94c613fb46cfb7d1"}, + {file = "multiprocess-0.70.17-py312-none-any.whl", hash = "sha256:2818af14c52446b9617d1b0755fa70ca2f77c28b25ed97bdaa2c69a22c47b46c"}, + {file = "multiprocess-0.70.17-py313-none-any.whl", hash = "sha256:20c28ca19079a6c879258103a6d60b94d4ffe2d9da07dda93fb1c8bc6243f522"}, + {file = "multiprocess-0.70.17-py38-none-any.whl", hash = "sha256:1d52f068357acd1e5bbc670b273ef8f81d57863235d9fbf9314751886e141968"}, + {file = "multiprocess-0.70.17-py39-none-any.whl", hash = "sha256:c3feb874ba574fbccfb335980020c1ac631fbf2a3f7bee4e2042ede62558a021"}, + {file = "multiprocess-0.70.17.tar.gz", hash = "sha256:4ae2f11a3416809ebc9a48abfc8b14ecce0652a0944731a1493a3c1ba44ff57a"}, ] [package.dependencies] -dill = ">=0.3.8" +dill = ">=0.3.9" [[package]] name = "multitasking" @@ -5539,19 +5601,19 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "oci" -version = "2.133.0" +version = "2.135.2" description = "Oracle Cloud Infrastructure Python SDK" optional = false python-versions = "*" files = [ - {file = "oci-2.133.0-py3-none-any.whl", hash = "sha256:9706365481ca538c89b3a15e6b5c246801eccb06be831a7f21c40f2a2ee310a7"}, - {file = "oci-2.133.0.tar.gz", hash = "sha256:800418025bb98f587c65bbf89c6b6d61ef0f2249e0698d73439baf3251640b7f"}, + {file = "oci-2.135.2-py3-none-any.whl", hash = "sha256:5213319244e1c7f108bcb417322f33f01f043fd9636d4063574039f5fdf4e4f7"}, + {file = "oci-2.135.2.tar.gz", hash = "sha256:520f78983c5246eae80dd5ecfd05e3a565c8b98d02ef0c1b11ba1f61bcccb61d"}, ] [package.dependencies] certifi = "*" circuitbreaker = {version = ">=1.3.1,<3.0.0", markers = "python_version >= \"3.7\""} -cryptography = ">=3.2.1,<43.0.0" +cryptography = ">=3.2.1,<46.0.0" pyOpenSSL = ">=17.5.0,<25.0.0" python-dateutil = ">=2.5.3,<3.0.0" pytz = ">=2016.10" @@ -6059,40 +6121,53 @@ files = [ [[package]] name = "pandas" -version = "2.2.2" +version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, ] [package.dependencies] @@ -6141,20 +6216,20 @@ xml = ["lxml (>=4.9.2)"] [[package]] name = "pathos" -version = "0.3.2" +version = "0.3.3" description = "parallel graph management and execution in heterogeneous computing" optional = false python-versions = ">=3.8" files = [ - {file = "pathos-0.3.2-py3-none-any.whl", hash = "sha256:d669275e6eb4b3fbcd2846d7a6d1bba315fe23add0c614445ba1408d8b38bafe"}, - {file = "pathos-0.3.2.tar.gz", hash = "sha256:4f2a42bc1e10ccf0fe71961e7145fc1437018b6b21bd93b2446abc3983e49a7a"}, + {file = "pathos-0.3.3-py3-none-any.whl", hash = "sha256:e04616c6448608ad1f809360be22e3f2078d949a36a81e6991da6c2dd1f82513"}, + {file = "pathos-0.3.3.tar.gz", hash = "sha256:dcb2a5f321aa34ca541c1c1861011ea49df357bb908379c21dd5741f666e0a58"}, ] [package.dependencies] -dill = ">=0.3.8" -multiprocess = ">=0.70.16" -pox = ">=0.3.4" -ppft = ">=1.7.6.8" +dill = ">=0.3.9" +multiprocess = ">=0.70.17" +pox = ">=0.3.5" +ppft = ">=1.7.6.9" [[package]] name = "peewee" @@ -6168,13 +6243,13 @@ files = [ [[package]] name = "pgvecto-rs" -version = "0.2.1" +version = "0.2.2" description = "Python binding for pgvecto.rs" optional = false python-versions = "<3.13,>=3.8" files = [ - {file = "pgvecto_rs-0.2.1-py3-none-any.whl", hash = "sha256:b3ee2c465219469ad537b3efea2916477c6c576b3d6fd4298980d0733d12bb27"}, - {file = "pgvecto_rs-0.2.1.tar.gz", hash = "sha256:07046eaad2c4f75745f76de9ba483541909f1c595aced8d3434224a4f933daca"}, + {file = "pgvecto_rs-0.2.2-py3-none-any.whl", hash = "sha256:5f3f7f806813de408c45dc10a9eb418b986c4d7b7723e8fce9298f2f7d8fbbd5"}, + {file = "pgvecto_rs-0.2.2.tar.gz", hash = "sha256:edaa913d1747152b1407cbdf6337d51ac852547b54953ef38997433be3a75a3b"}, ] [package.dependencies] @@ -6300,13 +6375,13 @@ xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "4.3.3" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.3.3-py3-none-any.whl", hash = "sha256:50a5450e2e84f44539718293cbb1da0a0885c9d14adf21b77bae4e66fc99d9b5"}, - {file = "platformdirs-4.3.3.tar.gz", hash = "sha256:d4e0b7d8ec176b341fb03cb11ca12d0276faa8c485f9cd218f613840463fc2c0"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] @@ -6374,15 +6449,32 @@ docs = ["sphinx (>=1.7.1)"] redis = ["redis"] tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] +[[package]] +name = "postgrest" +version = "0.17.1" +description = "PostgREST client for Python. This library provides an ORM interface to PostgREST." +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "postgrest-0.17.1-py3-none-any.whl", hash = "sha256:ec1d00dc8532fe5ffb342cfc7c4e610a1e0e2272eb14f78f9b2b61094f9be510"}, + {file = "postgrest-0.17.1.tar.gz", hash = "sha256:e31d9977dbb80dc5f9fdd4d444014686606692dc4ddb9adc85639e56c6d54c92"}, +] + +[package.dependencies] +deprecation = ">=2.1.0,<3.0.0" +httpx = {version = ">=0.26,<0.28", extras = ["http2"]} +pydantic = ">=1.9,<3.0" +strenum = ">=0.4.9,<0.5.0" + [[package]] name = "posthog" -version = "3.6.5" +version = "3.7.0" description = "Integrate PostHog into any python application." optional = false python-versions = "*" files = [ - {file = "posthog-3.6.5-py2.py3-none-any.whl", hash = "sha256:f8b7c573826b061a1d22c9495169c38ebe83a1df2729f49c7129a9c23a02acf6"}, - {file = "posthog-3.6.5.tar.gz", hash = "sha256:7fd3ca809e15476c35f75d18cd6bba31395daf0a17b75242965c469fb6292510"}, + {file = "posthog-3.7.0-py2.py3-none-any.whl", hash = "sha256:3555161c3a9557b5666f96d8e1f17f410ea0f07db56e399e336a1656d4e5c722"}, + {file = "posthog-3.7.0.tar.gz", hash = "sha256:b095d4354ba23f8b346ab5daed8ecfc5108772f922006982dfe8b2d29ebc6e0e"}, ] [package.dependencies] @@ -6399,44 +6491,44 @@ test = ["coverage", "django", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)" [[package]] name = "pox" -version = "0.3.4" +version = "0.3.5" description = "utilities for filesystem exploration and automated builds" optional = false python-versions = ">=3.8" files = [ - {file = "pox-0.3.4-py3-none-any.whl", hash = "sha256:651b8ae8a7b341b7bfd267f67f63106daeb9805f1ac11f323d5280d2da93fdb6"}, - {file = "pox-0.3.4.tar.gz", hash = "sha256:16e6eca84f1bec3828210b06b052adf04cf2ab20c22fd6fbef5f78320c9a6fed"}, + {file = "pox-0.3.5-py3-none-any.whl", hash = "sha256:9e82bcc9e578b43e80a99cad80f0d8f44f4d424f0ee4ee8d4db27260a6aa365a"}, + {file = "pox-0.3.5.tar.gz", hash = "sha256:8120ee4c94e950e6e0483e050a4f0e56076e590ba0a9add19524c254bd23c2d1"}, ] [[package]] name = "ppft" -version = "1.7.6.8" +version = "1.7.6.9" description = "distributed and parallel Python" optional = false python-versions = ">=3.8" files = [ - {file = "ppft-1.7.6.8-py3-none-any.whl", hash = "sha256:de2dd4b1b080923dd9627fbdea52649fd741c752fce4f3cf37e26f785df23d9b"}, - {file = "ppft-1.7.6.8.tar.gz", hash = "sha256:76a429a7d7b74c4d743f6dba8351e58d62b6432ed65df9fe204790160dab996d"}, + {file = "ppft-1.7.6.9-py3-none-any.whl", hash = "sha256:dab36548db5ca3055067fbe6b1a17db5fee29f3c366c579a9a27cebb52ed96f0"}, + {file = "ppft-1.7.6.9.tar.gz", hash = "sha256:73161c67474ea9d81d04bcdad166d399cff3f084d5d2dc21ebdd46c075bbc265"}, ] [package.extras] -dill = ["dill (>=0.3.8)"] +dill = ["dill (>=0.3.9)"] [[package]] name = "primp" -version = "0.6.2" +version = "0.6.3" description = "HTTP client that can impersonate web browsers, mimicking their headers and `TLS/JA3/JA4/HTTP2` fingerprints" optional = false python-versions = ">=3.8" files = [ - {file = "primp-0.6.2-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:4a35d441462a55d9a9525bf170e2ffd2fcb3db6039b23e802859fa22c18cdd51"}, - {file = "primp-0.6.2-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:f67ccade95bdbca3cf9b96b93aa53f9617d85ddbf988da4e9c523aa785fd2d54"}, - {file = "primp-0.6.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8074b93befaf36567e4cf3d4a1a8cd6ab9cc6e4dd4ff710650678daa405aee71"}, - {file = "primp-0.6.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7d3e2a3f8c6262e9b883651b79c4ff2b7677a76f47293a139f541c9ea333ce3b"}, - {file = "primp-0.6.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a460ea389371c6d04839b4b50b5805d99da8ebe281a2e8b534d27377c6d44f0e"}, - {file = "primp-0.6.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5b6b27e89d3c05c811aff0e4fde7a36d6957b15b3112f4ce28b6b99e8ca1e725"}, - {file = "primp-0.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:1006a40a85f88a4c5222094813a1ebc01f85a63e9a33d2c443288c0720bed321"}, - {file = "primp-0.6.2.tar.gz", hash = "sha256:5a96a6b65195a8a989157e67d23bd171c49be238654e02bdf1b1fda36cbcc068"}, + {file = "primp-0.6.3-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bdbe6a7cdaaf5c9ed863432a941f4a75bd4c6ff626cbc8d32fc232793c70ba06"}, + {file = "primp-0.6.3-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:eeb53eb987bdcbcd85740633470255cab887d921df713ffa12a36a13366c9cdb"}, + {file = "primp-0.6.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78da53d3c92a8e3f05bd3286ac76c291f1b6fe5e08ea63b7ba92b0f9141800bb"}, + {file = "primp-0.6.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:86337b44deecdac752bd8112909987fc9fa9b894f30191c80a164dc8f895da53"}, + {file = "primp-0.6.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d3cd9a22b97f3eae42b2a5fb99f00480daf4cd6d9b139e05b0ffb03f7cc037f3"}, + {file = "primp-0.6.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7732bec917e2d3c48a31cdb92e1250f4ad6203a1aa4f802bd9abd84f2286a1e0"}, + {file = "primp-0.6.3-cp38-abi3-win_amd64.whl", hash = "sha256:1e4113c34b86c676ae321af185f03a372caef3ee009f1682c2d62e30ec87348c"}, + {file = "primp-0.6.3.tar.gz", hash = "sha256:17d30ebe26864defad5232dbbe1372e80483940012356e1f68846bb182282039"}, ] [package.extras] @@ -6444,13 +6536,13 @@ dev = ["certifi", "pytest (>=8.1.1)"] [[package]] name = "prompt-toolkit" -version = "3.0.47" +version = "3.0.48" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, - {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, + {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, + {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, ] [package.dependencies] @@ -6475,22 +6567,22 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "4.25.4" +version = "4.25.5" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, - {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, - {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, - {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, - {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, - {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, - {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, - {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, - {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, + {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, + {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, + {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, + {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, + {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, + {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, + {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, + {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, + {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, ] [[package]] @@ -6603,6 +6695,17 @@ files = [ {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, ] +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + [[package]] name = "py-cpuinfo" version = "9.0.0" @@ -6957,13 +7060,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pymilvus" -version = "2.4.6" +version = "2.4.7" description = "Python Sdk for Milvus" optional = false python-versions = ">=3.8" files = [ - {file = "pymilvus-2.4.6-py3-none-any.whl", hash = "sha256:b4c43472edc313b845d313be50610e19054e6954b2c5c3b515565c596c2d3d97"}, - {file = "pymilvus-2.4.6.tar.gz", hash = "sha256:6ac3eb91c92cc01bbe444fe83f895f02d7b2546d96ac67998630bf31ac074d66"}, + {file = "pymilvus-2.4.7-py3-none-any.whl", hash = "sha256:1e5d377bd40fa7eb459d3958dbd96201758f5cf997d41eb3d2d169d0b7fa462e"}, + {file = "pymilvus-2.4.7.tar.gz", hash = "sha256:9ef460b940782a42e1b7b8ae0da03d8cc02d9d80044d13f4b689a7c935ec7aa7"}, ] [package.dependencies] @@ -6980,6 +7083,22 @@ bulk-writer = ["azure-storage-blob", "minio (>=7.0.0)", "pyarrow (>=12.0.0)", "r dev = ["black", "grpcio (==1.62.2)", "grpcio-testing (==1.62.2)", "grpcio-tools (==1.62.2)", "pytest (>=5.3.4)", "pytest-cov (>=2.8.1)", "pytest-timeout (>=1.3.4)", "ruff (>0.4.0)"] model = ["milvus-model (>=0.1.0)"] +[[package]] +name = "pymochow" +version = "1.3.1" +description = "Python SDK for mochow" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pymochow-1.3.1-py3-none-any.whl", hash = "sha256:a7f3b34fd6ea5d1d8413650bb6678365aa148fc396ae945e4ccb4f2365a52327"}, + {file = "pymochow-1.3.1.tar.gz", hash = "sha256:1693d10cd0bb7bce45327890a90adafb503155922ccc029acb257699a73a20ba"}, +] + +[package.dependencies] +future = "*" +orjson = "*" +requests = "*" + [[package]] name = "pymysql" version = "1.1.1" @@ -7015,13 +7134,13 @@ test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] [[package]] name = "pypandoc" -version = "1.13" +version = "1.14" description = "Thin wrapper for pandoc." optional = false python-versions = ">=3.6" files = [ - {file = "pypandoc-1.13-py3-none-any.whl", hash = "sha256:4c7d71bf2f1ed122aac287113b5c4d537a33bbc3c1df5aed11a7d4a7ac074681"}, - {file = "pypandoc-1.13.tar.gz", hash = "sha256:31652073c7960c2b03570bd1e94f602ca9bc3e70099df5ead4cea98ff5151c1e"}, + {file = "pypandoc-1.14-py3-none-any.whl", hash = "sha256:1315c7ad7fac7236dacf69a05b521ed2c3f1d0177f70e9b92bfffce6c023df22"}, + {file = "pypandoc-1.14.tar.gz", hash = "sha256:6b4c45f5f1b9fb5bb562079164806bdbbc3e837b5402bcf3f1139edc5730a197"}, ] [[package]] @@ -7082,28 +7201,28 @@ files = [ [[package]] name = "pyproject-hooks" -version = "1.1.0" +version = "1.2.0" description = "Wrappers to call pyproject.toml-based build backend hooks." optional = false python-versions = ">=3.7" files = [ - {file = "pyproject_hooks-1.1.0-py3-none-any.whl", hash = "sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2"}, - {file = "pyproject_hooks-1.1.0.tar.gz", hash = "sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965"}, + {file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"}, + {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"}, ] [[package]] name = "pyreadline3" -version = "3.5.2" +version = "3.5.4" description = "A python implementation of GNU readline." optional = false python-versions = ">=3.8" files = [ - {file = "pyreadline3-3.5.2-py3-none-any.whl", hash = "sha256:a87d56791e2965b2b187e2ea33dcf664600842c997c0623c95cf8ef07db83de9"}, - {file = "pyreadline3-3.5.2.tar.gz", hash = "sha256:ba82292e52c5a3bb256b291af0c40b457c1e8699cac9a873abbcaac8aef3a1bb"}, + {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, + {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, ] [package.extras] -dev = ["build", "flake8", "pytest", "twine"] +dev = ["build", "flake8", "mypy", "pytest", "twine"] [[package]] name = "pytest" @@ -7149,21 +7268,21 @@ histogram = ["pygal", "pygaljs"] [[package]] name = "pytest-env" -version = "1.1.4" +version = "1.1.5" description = "pytest plugin that allows you to add environment variables." optional = false python-versions = ">=3.8" files = [ - {file = "pytest_env-1.1.4-py3-none-any.whl", hash = "sha256:a4212056d4d440febef311a98fdca56c31256d58fb453d103cba4e8a532b721d"}, - {file = "pytest_env-1.1.4.tar.gz", hash = "sha256:86653658da8f11c6844975db955746c458a9c09f1e64957603161e2ff93f5133"}, + {file = "pytest_env-1.1.5-py3-none-any.whl", hash = "sha256:ce90cf8772878515c24b31cd97c7fa1f4481cd68d588419fd45f10ecaee6bc30"}, + {file = "pytest_env-1.1.5.tar.gz", hash = "sha256:91209840aa0e43385073ac464a554ad2947cc2fd663a9debf88d03b01e0cc1cf"}, ] [package.dependencies] -pytest = ">=8.3.2" +pytest = ">=8.3.3" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] -test = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "pytest-mock (>=3.14)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "pytest-mock (>=3.14)"] [[package]] name = "pytest-mock" @@ -7388,25 +7507,29 @@ files = [ [[package]] name = "pywin32" -version = "306" +version = "307" description = "Python for Window Extensions" optional = false python-versions = "*" files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, + {file = "pywin32-307-cp310-cp310-win32.whl", hash = "sha256:f8f25d893c1e1ce2d685ef6d0a481e87c6f510d0f3f117932781f412e0eba31b"}, + {file = "pywin32-307-cp310-cp310-win_amd64.whl", hash = "sha256:36e650c5e5e6b29b5d317385b02d20803ddbac5d1031e1f88d20d76676dd103d"}, + {file = "pywin32-307-cp310-cp310-win_arm64.whl", hash = "sha256:0c12d61e0274e0c62acee79e3e503c312426ddd0e8d4899c626cddc1cafe0ff4"}, + {file = "pywin32-307-cp311-cp311-win32.whl", hash = "sha256:fec5d27cc893178fab299de911b8e4d12c5954e1baf83e8a664311e56a272b75"}, + {file = "pywin32-307-cp311-cp311-win_amd64.whl", hash = "sha256:987a86971753ed7fdd52a7fb5747aba955b2c7fbbc3d8b76ec850358c1cc28c3"}, + {file = "pywin32-307-cp311-cp311-win_arm64.whl", hash = "sha256:fd436897c186a2e693cd0437386ed79f989f4d13d6f353f8787ecbb0ae719398"}, + {file = "pywin32-307-cp312-cp312-win32.whl", hash = "sha256:07649ec6b01712f36debf39fc94f3d696a46579e852f60157a729ac039df0815"}, + {file = "pywin32-307-cp312-cp312-win_amd64.whl", hash = "sha256:00d047992bb5dcf79f8b9b7c81f72e0130f9fe4b22df613f755ab1cc021d8347"}, + {file = "pywin32-307-cp312-cp312-win_arm64.whl", hash = "sha256:b53658acbfc6a8241d72cc09e9d1d666be4e6c99376bc59e26cdb6223c4554d2"}, + {file = "pywin32-307-cp313-cp313-win32.whl", hash = "sha256:ea4d56e48dc1ab2aa0a5e3c0741ad6e926529510516db7a3b6981a1ae74405e5"}, + {file = "pywin32-307-cp313-cp313-win_amd64.whl", hash = "sha256:576d09813eaf4c8168d0bfd66fb7cb3b15a61041cf41598c2db4a4583bf832d2"}, + {file = "pywin32-307-cp313-cp313-win_arm64.whl", hash = "sha256:b30c9bdbffda6a260beb2919f918daced23d32c79109412c2085cbc513338a0a"}, + {file = "pywin32-307-cp37-cp37m-win32.whl", hash = "sha256:5101472f5180c647d4525a0ed289ec723a26231550dbfd369ec19d5faf60e511"}, + {file = "pywin32-307-cp37-cp37m-win_amd64.whl", hash = "sha256:05de55a7c110478dc4b202230e98af5e0720855360d2b31a44bb4e296d795fba"}, + {file = "pywin32-307-cp38-cp38-win32.whl", hash = "sha256:13d059fb7f10792542082f5731d5d3d9645320fc38814759313e5ee97c3fac01"}, + {file = "pywin32-307-cp38-cp38-win_amd64.whl", hash = "sha256:7e0b2f93769d450a98ac7a31a087e07b126b6d571e8b4386a5762eb85325270b"}, + {file = "pywin32-307-cp39-cp39-win32.whl", hash = "sha256:55ee87f2f8c294e72ad9d4261ca423022310a6e79fb314a8ca76ab3f493854c6"}, + {file = "pywin32-307-cp39-cp39-win_amd64.whl", hash = "sha256:e9d5202922e74985b037c9ef46778335c102b74b95cec70f629453dbe7235d87"}, ] [[package]] @@ -7550,123 +7673,103 @@ dev = ["pytest"] [[package]] name = "rapidfuzz" -version = "3.9.7" +version = "3.10.0" description = "rapid fuzzy string matching" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "rapidfuzz-3.9.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ccf68e30b80e903f2309f90a438dbd640dd98e878eeb5ad361a288051ee5b75c"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:696a79018ef989bf1c9abd9005841cee18005ccad4748bad8a4c274c47b6241a"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4eebf6c93af0ae866c22b403a84747580bb5c10f0d7b51c82a87f25405d4dcb"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e9125377fa3d21a8abd4fbdbcf1c27be73e8b1850f0b61b5b711364bf3b59db"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c12d180b17a22d107c8747de9c68d0b9c1d15dcda5445ff9bf9f4ccfb67c3e16"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1318d42610c26dcd68bd3279a1bf9e3605377260867c9a8ed22eafc1bd93a7c"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5fa6e3c6e0333051c1f3a49f0807b3366f4131c8d6ac8c3e05fd0d0ce3755c"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fcf79b686962d7bec458a0babc904cb4fa319808805e036b9d5a531ee6b9b835"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8b01153c7466d0bad48fba77a303d5a768e66f24b763853469f47220b3de4661"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:94baaeea0b4f8632a6da69348b1e741043eba18d4e3088d674d3f76586b6223d"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6c5b32875646cb7f60c193ade99b2e4b124f19583492115293cd00f6fb198b17"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:110b6294396bc0a447648627479c9320f095c2034c0537f687592e0f58622638"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-win32.whl", hash = "sha256:3445a35c4c8d288f2b2011eb61bce1227c633ce85a3154e727170f37c0266bb2"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:0d1415a732ee75e74a90af12020b77a0b396b36c60afae1bde3208a78cd2c9fc"}, - {file = "rapidfuzz-3.9.7-cp310-cp310-win_arm64.whl", hash = "sha256:836f4d88b8bd0fff2ebe815dcaab8aa6c8d07d1d566a7e21dd137cf6fe11ed5b"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d098ce6162eb5e48fceb0745455bc950af059df6113eec83e916c129fca11408"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:048d55d36c02c6685a2b2741688503c3d15149694506655b6169dcfd3b6c2585"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c33211cfff9aec425bb1bfedaf94afcf337063aa273754f22779d6dadebef4c2"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6d9db2fa4e9be171e9bb31cf2d2575574774966b43f5b951062bb2e67885852"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4e049d5ad61448c9a020d1061eba20944c4887d720c4069724beb6ea1692507"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cfa74aac64c85898b93d9c80bb935a96bf64985e28d4ee0f1a3d1f3bf11a5106"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965693c2e9efd425b0f059f5be50ef830129f82892fa1858e220e424d9d0160f"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8501000a5eb8037c4b56857724797fe5a8b01853c363de91c8d0d0ad56bef319"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d92c552c6b7577402afdd547dcf5d31ea6c8ae31ad03f78226e055cfa37f3c6"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1ee2086f490cb501d86b7e386c1eb4e3a0ccbb0c99067089efaa8c79012c8952"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1de91e7fd7f525e10ea79a6e62c559d1b0278ec097ad83d9da378b6fab65a265"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a4da514d13f4433e16960a17f05b67e0af30ac771719c9a9fb877e5004f74477"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-win32.whl", hash = "sha256:a40184c67db8252593ec518e17fb8a6e86d7259dc9f2d6c0bf4ff4db8cf1ad4b"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:c4f28f1930b09a2c300357d8465b388cecb7e8b2f454a5d5425561710b7fd07f"}, - {file = "rapidfuzz-3.9.7-cp311-cp311-win_arm64.whl", hash = "sha256:675b75412a943bb83f1f53e2e54fd18c80ef15ed642dc6eb0382d1949419d904"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1ef6a1a8f0b12f8722f595f15c62950c9a02d5abc64742561299ffd49f6c6944"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32532af1d70c6ec02ea5ac7ee2766dfff7c8ae8c761abfe8da9e527314e634e8"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1a38bade755aa9dd95a81cda949e1bf9cd92b79341ccc5e2189c9e7bdfc5ec"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d73ee2df41224c87336448d279b5b6a3a75f36e41dd3dcf538c0c9cce36360d8"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be3a1fc3e2ab3bdf93dc0c83c00acca8afd2a80602297d96cf4a0ba028333cdf"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603f48f621272a448ff58bb556feb4371252a02156593303391f5c3281dfaeac"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:268f8e1ca50fc61c0736f3fe9d47891424adf62d96ed30196f30f4bd8216b41f"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f8bf3f0d02935751d8660abda6044821a861f6229f7d359f98bcdcc7e66c39b"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b997ff3b39d4cee9fb025d6c46b0a24bd67595ce5a5b652a97fb3a9d60beb651"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca66676c8ef6557f9b81c5b2b519097817a7c776a6599b8d6fcc3e16edd216fe"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:35d3044cb635ca6b1b2b7b67b3597bd19f34f1753b129eb6d2ae04cf98cd3945"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a93c9e60904cb76e7aefef67afffb8b37c4894f81415ed513db090f29d01101"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-win32.whl", hash = "sha256:579d107102c0725f7c79b4e79f16d3cf4d7c9208f29c66b064fa1fd4641d5155"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-win_amd64.whl", hash = "sha256:953b3780765c8846866faf891ee4290f6a41a6dacf4fbcd3926f78c9de412ca6"}, - {file = "rapidfuzz-3.9.7-cp312-cp312-win_arm64.whl", hash = "sha256:7c20c1474b068c4bd45bf2fd0ad548df284f74e9a14a68b06746c56e3aa8eb70"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fde81b1da9a947f931711febe2e2bee694e891f6d3e6aa6bc02c1884702aea19"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47e92c155a14f44511ea8ebcc6bc1535a1fe8d0a7d67ad3cc47ba61606df7bcf"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8772b745668260c5c4d069c678bbaa68812e6c69830f3771eaad521af7bc17f8"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:578302828dd97ee2ba507d2f71d62164e28d2fc7bc73aad0d2d1d2afc021a5d5"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc3e6081069eea61593f1d6839029da53d00c8c9b205c5534853eaa3f031085c"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b1c2d504eddf97bc0f2eba422c8915576dbf025062ceaca2d68aecd66324ad9"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb76e5a21034f0307c51c5a2fc08856f698c53a4c593b17d291f7d6e9d09ca3"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d4ba2318ef670ce505f42881a5d2af70f948124646947341a3c6ccb33cd70369"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:057bb03f39e285047d7e9412e01ecf31bb2d42b9466a5409d715d587460dd59b"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a8feac9006d5c9758438906f093befffc4290de75663dbb2098461df7c7d28dd"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95b8292383e717e10455f2c917df45032b611141e43d1adf70f71b1566136b11"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e9fbf659537d246086d0297628b3795dc3e4a384101ecc01e5791c827b8d7345"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-win32.whl", hash = "sha256:1dc516ac6d32027be2b0196bedf6d977ac26debd09ca182376322ad620460feb"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-win_amd64.whl", hash = "sha256:b4f86e09d3064dca0b014cd48688964036a904a2d28048f00c8f4640796d06a8"}, - {file = "rapidfuzz-3.9.7-cp313-cp313-win_arm64.whl", hash = "sha256:19c64d8ddb2940b42a4567b23f1681af77f50a5ff6c9b8e85daba079c210716e"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbda3dd68d8b28ccb20ffb6f756fefd9b5ba570a772bedd7643ed441f5793308"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2379e0b2578ad3ac7004f223251550f08bca873ff76c169b09410ec562ad78d8"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d1eff95362f993b0276fd3839aee48625b09aac8938bb0c23b40d219cba5dc5"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd9360e30041690912525a210e48a897b49b230768cc8af1c702e5395690464f"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a93cd834b3c315ab437f0565ee3a2f42dd33768dc885ccbabf9710b131cf70d2"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff196996240db7075f62c7bc4506f40a3c80cd4ae3ab0e79ac6892283a90859"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948dcee7aaa1cd14358b2a7ef08bf0be42bf89049c3a906669874a715fc2c937"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d95751f505a301af1aaf086c19f34536056d6c8efa91b2240de532a3db57b543"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:90db86fa196eecf96cb6db09f1083912ea945c50c57188039392d810d0b784e1"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:3171653212218a162540a3c8eb8ae7d3dcc8548540b69eaecaf3b47c14d89c90"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:36dd6e820379c37a1ffefc8a52b648758e867cd9d78ee5b5dc0c9a6a10145378"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7b702de95666a1f7d5c6b47eacadfe2d2794af3742d63d2134767d13e5d1c713"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-win32.whl", hash = "sha256:9030e7238c0df51aed5c9c5ed8eee2bdd47a2ae788e562c1454af2851c3d1906"}, - {file = "rapidfuzz-3.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:f847fb0fbfb72482b1c05c59cbb275c58a55b73708a7f77a83f8035ee3c86497"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:97f2ce529d2a70a60c290f6ab269a2bbf1d3b47b9724dccc84339b85f7afb044"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e2957fdad10bb83b1982b02deb3604a3f6911a5e545f518b59c741086f92d152"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d5262383634626eb45c536017204b8163a03bc43bda880cf1bdd7885db9a163"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:364587827d7cbd41afa0782adc2d2d19e3f07d355b0750a02a8e33ad27a9c368"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecc24af7f905f3d6efb371a01680116ffea8d64e266618fb9ad1602a9b4f7934"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dc86aa6b29d174713c5f4caac35ffb7f232e3e649113e8d13812b35ab078228"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3dcfbe7266e74a707173a12a7b355a531f2dcfbdb32f09468e664330da14874"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b23806fbdd6b510ba9ac93bb72d503066263b0fba44b71b835be9f063a84025f"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5551d68264c1bb6943f542da83a4dc8940ede52c5847ef158698799cc28d14f5"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:13d8675a1fa7e2b19650ca7ef9a6ec01391d4bb12ab9e0793e8eb024538b4a34"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9b6a5de507b9be6de688dae40143b656f7a93b10995fb8bd90deb555e7875c60"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:111a20a3c090cf244d9406e60500b6c34b2375ba3a5009e2b38fd806fe38e337"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-win32.whl", hash = "sha256:22589c0b8ccc6c391ce7f776c93a8c92c96ab8d34e1a19f1bd2b12a235332632"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:6f83221db5755b8f34222e40607d87f1176a8d5d4dbda4a55a0f0b67d588a69c"}, - {file = "rapidfuzz-3.9.7-cp39-cp39-win_arm64.whl", hash = "sha256:3665b92e788578c3bb334bd5b5fa7ee1a84bafd68be438e3110861d1578c63a0"}, - {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d7df9c2194c7ec930b33c991c55dbd0c10951bd25800c0b7a7b571994ebbced5"}, - {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:68bd888eafd07b09585dcc8bc2716c5ecdb7eed62827470664d25588982b2873"}, - {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1230e0f9026851a6a432beaa0ce575dda7b39fe689b576f99a0704fbb81fc9c"}, - {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3b36e1c61b796ae1777f3e9e11fd39898b09d351c9384baf6e3b7e6191d8ced"}, - {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dba13d86806fcf3fe9c9919f58575e0090eadfb89c058bde02bcc7ab24e4548"}, - {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1f1a33e84056b7892c721d84475d3bde49a145126bc4c6efe0d6d0d59cb31c29"}, - {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3492c7a42b7fa9f0051d7fcce9893e95ed91c97c9ec7fb64346f3e070dd318ed"}, - {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:ece45eb2af8b00f90d10f7419322e8804bd42fb1129026f9bfe712c37508b514"}, - {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcd14cf4876f04b488f6e54a7abd3e9b31db5f5a6aba0ce90659917aaa8c088"}, - {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:521c58c72ed8a612b25cda378ff10dee17e6deb4ee99a070b723519a345527b9"}, - {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18669bb6cdf7d40738526d37e550df09ba065b5a7560f3d802287988b6cb63cf"}, - {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7abe2dbae81120a64bb4f8d3fcafe9122f328c9f86d7f327f174187a5af4ed86"}, - {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a3c0783910911f4f24655826d007c9f4360f08107410952c01ee3df98c713eb2"}, - {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:03126f9a040ff21d2a110610bfd6b93b79377ce8b4121edcb791d61b7df6eec5"}, - {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:591908240f4085e2ade5b685c6e8346e2ed44932cffeaac2fb32ddac95b55c7f"}, - {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9012d86c6397edbc9da4ac0132de7f8ee9d6ce857f4194d5684c4ddbcdd1c5c"}, - {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df596ddd3db38aa513d4c0995611267b3946e7cbe5a8761b50e9306dfec720ee"}, - {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3ed5adb752f4308fcc8f4fb6f8eb7aa4082f9d12676fda0a74fa5564242a8107"}, - {file = "rapidfuzz-3.9.7.tar.gz", hash = "sha256:f1c7296534c1afb6f495aa95871f14ccdc197c6db42965854e483100df313030"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:884453860de029380dded8f3c1918af2d8eb5adf8010261645c7e5c88c2b5428"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718c9bd369288aca5fa929df6dbf66fdbe9768d90940a940c0b5cdc96ade4309"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a68e3724b7dab761c01816aaa64b0903734d999d5589daf97c14ef5cc0629a8e"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1af60988d47534246d9525f77288fdd9de652608a4842815d9018570b959acc6"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3084161fc3e963056232ef8d937449a2943852e07101f5a136c8f3cfa4119217"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6cd67d3d017296d98ff505529104299f78433e4b8af31b55003d901a62bbebe9"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b11a127ac590fc991e8a02c2d7e1ac86e8141c92f78546f18b5c904064a0552c"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aadce42147fc09dcef1afa892485311e824c050352e1aa6e47f56b9b27af4cf0"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b54853c2371bf0e38d67da379519deb6fbe70055efb32f6607081641af3dc752"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ce19887268e90ee81a3957eef5e46a70ecc000713796639f83828b950343f49e"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:f39a2a5ded23b9b9194ec45740dce57177b80f86c6d8eba953d3ff1a25c97766"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0ec338d5f4ad8d9339a88a08db5c23e7f7a52c2b2a10510c48a0cef1fb3f0ddc"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-win32.whl", hash = "sha256:56fd15ea8f4c948864fa5ebd9261c67cf7b89a1c517a0caef4df75446a7af18c"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:43dfc5e733808962a822ff6d9c29f3039a3cfb3620706f5953e17cfe4496724c"}, + {file = "rapidfuzz-3.10.0-cp310-cp310-win_arm64.whl", hash = "sha256:ae7966f205b5a7fde93b44ca8fed37c1c8539328d7f179b1197de34eceaceb5f"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bb0013795b40db5cf361e6f21ee7cda09627cf294977149b50e217d7fe9a2f03"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:69ef5b363afff7150a1fbe788007e307b9802a2eb6ad92ed51ab94e6ad2674c6"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c582c46b1bb0b19f1a5f4c1312f1b640c21d78c371a6615c34025b16ee56369b"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:288f6f6e7410cacb115fb851f3f18bf0e4231eb3f6cb5bd1cec0e7b25c4d039d"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9e29a13d2fd9be3e7d8c26c7ef4ba60b5bc7efbc9dbdf24454c7e9ebba31768"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea2da0459b951ee461bd4e02b8904890bd1c4263999d291c5cd01e6620177ad4"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:457827ba82261aa2ae6ac06a46d0043ab12ba7216b82d87ae1434ec0f29736d6"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5d350864269d56f51ab81ab750c9259ae5cad3152c0680baef143dcec92206a1"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a9b8f51e08c3f983d857c3889930af9ddecc768453822076683664772d87e374"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7f3a6aa6e70fc27e4ff5c479f13cc9fc26a56347610f5f8b50396a0d344c5f55"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:803f255f10d63420979b1909ef976e7d30dec42025c9b067fc1d2040cc365a7e"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2026651761bf83a0f31495cc0f70840d5c0d54388f41316e3f9cb51bd85e49a5"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-win32.whl", hash = "sha256:4df75b3ebbb8cfdb9bf8b213b168620b88fd92d0c16a8bc9f9234630b282db59"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f9f0bbfb6787b97c51516f3ccf97737d504db5d239ad44527673b81f598b84ab"}, + {file = "rapidfuzz-3.10.0-cp311-cp311-win_arm64.whl", hash = "sha256:10fdad800441b9c97d471a937ba7d42625f1b530db05e572f1cb7d401d95c893"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7dc87073ba3a40dd65591a2100aa71602107443bf10770579ff9c8a3242edb94"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a425a0a868cf8e9c6e93e1cda4b758cdfd314bb9a4fc916c5742c934e3613480"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86d5d1d75e61df060c1e56596b6b0a4422a929dff19cc3dbfd5eee762c86b61"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34f213d59219a9c3ca14e94a825f585811a68ac56b4118b4dc388b5b14afc108"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96ad46f5f56f70fab2be9e5f3165a21be58d633b90bf6e67fc52a856695e4bcf"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9178277f72d144a6c7704d7ae7fa15b7b86f0f0796f0e1049c7b4ef748a662ef"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76a35e9e19a7c883c422ffa378e9a04bc98cb3b29648c5831596401298ee51e6"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a6405d34c394c65e4f73a1d300c001f304f08e529d2ed6413b46ee3037956eb"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bd393683129f446a75d8634306aed7e377627098a1286ff3af2a4f1736742820"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b0445fa9880ead81f5a7d0efc0b9c977a947d8052c43519aceeaf56eabaf6843"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c50bc308fa29767ed8f53a8d33b7633a9e14718ced038ed89d41b886e301da32"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e89605afebbd2d4b045bccfdc12a14b16fe8ccbae05f64b4b4c64a97dad1c891"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-win32.whl", hash = "sha256:2db9187f3acf3cd33424ecdbaad75414c298ecd1513470df7bda885dcb68cc15"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:50e3d0c72ea15391ba9531ead7f2068a67c5b18a6a365fef3127583aaadd1725"}, + {file = "rapidfuzz-3.10.0-cp312-cp312-win_arm64.whl", hash = "sha256:9eac95b4278bd53115903d89118a2c908398ee8bdfd977ae844f1bd2b02b917c"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fe5231e8afd069c742ac5b4f96344a0fe4aff52df8e53ef87faebf77f827822c"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:886882367dbc985f5736356105798f2ae6e794e671fc605476cbe2e73838a9bb"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b33e13e537e3afd1627d421a142a12bbbe601543558a391a6fae593356842f6e"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:094c26116d55bf9c53abd840d08422f20da78ec4c4723e5024322321caedca48"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:545fc04f2d592e4350f59deb0818886c1b444ffba3bec535b4fbb97191aaf769"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:916a6abf3632e592b937c3d04c00a6efadd8fd30539cdcd4e6e4d92be7ca5d90"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6ec40cef63b1922083d33bfef2f91fc0b0bc07b5b09bfee0b0f1717d558292"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c77a7330dd15c7eb5fd3631dc646fc96327f98db8181138766bd14d3e905f0ba"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:949b5e9eeaa4ecb4c7e9c2a4689dddce60929dd1ff9c76a889cdbabe8bbf2171"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b5363932a5aab67010ae1a6205c567d1ef256fb333bc23c27582481606be480c"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5dd6eec15b13329abe66cc241b484002ecb0e17d694491c944a22410a6a9e5e2"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79e7f98525b60b3c14524e0a4e1fedf7654657b6e02eb25f1be897ab097706f3"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-win32.whl", hash = "sha256:d29d1b9857c65f8cb3a29270732e1591b9bacf89de9d13fa764f79f07d8f1fd2"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:fa9720e56663cc3649d62b4b5f3145e94b8f5611e8a8e1b46507777249d46aad"}, + {file = "rapidfuzz-3.10.0-cp313-cp313-win_arm64.whl", hash = "sha256:eda4c661e68dddd56c8fbfe1ca35e40dd2afd973f7ebb1605f4d151edc63dff8"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cffbc50e0767396ed483900900dd58ce4351bc0d40e64bced8694bd41864cc71"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c038b9939da3035afb6cb2f465f18163e8f070aba0482923ecff9443def67178"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca366c2e2a54e2f663f4529b189fdeb6e14d419b1c78b754ec1744f3c01070d4"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c4c82b1689b23b1b5e6a603164ed2be41b6f6de292a698b98ba2381e889eb9d"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98f6ebe28831a482981ecfeedc8237047878424ad0c1add2c7f366ba44a20452"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bd1a7676ee2a4c8e2f7f2550bece994f9f89e58afb96088964145a83af7408b"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec9139baa3f85b65adc700eafa03ed04995ca8533dd56c924f0e458ffec044ab"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:26de93e6495078b6af4c4d93a42ca067b16cc0e95699526c82ab7d1025b4d3bf"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f3a0bda83c18195c361b5500377d0767749f128564ca95b42c8849fd475bb327"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:63e4c175cbce8c3adc22dca5e6154588ae673f6c55374d156f3dac732c88d7de"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4dd3d8443970eaa02ab5ae45ce584b061f2799cd9f7e875190e2617440c1f9d4"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e5ddb2388610799fc46abe389600625058f2a73867e63e20107c5ad5ffa57c47"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-win32.whl", hash = "sha256:2e9be5d05cd960914024412b5406fb75a82f8562f45912ff86255acbfdbfb78e"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:47aca565a39c9a6067927871973ca827023e8b65ba6c5747f4c228c8d7ddc04f"}, + {file = "rapidfuzz-3.10.0-cp39-cp39-win_arm64.whl", hash = "sha256:b0732343cdc4273b5921268026dd7266f75466eb21873cb7635a200d9d9c3fac"}, + {file = "rapidfuzz-3.10.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f744b5eb1469bf92dd143d36570d2bdbbdc88fe5cb0b5405e53dd34f479cbd8a"}, + {file = "rapidfuzz-3.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b67cc21a14327a0eb0f47bc3d7e59ec08031c7c55220ece672f9476e7a8068d3"}, + {file = "rapidfuzz-3.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fe5783676f0afba4a522c80b15e99dbf4e393c149ab610308a8ef1f04c6bcc8"}, + {file = "rapidfuzz-3.10.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4688862f957c8629d557d084f20b2d803f8738b6c4066802a0b1cc472e088d9"}, + {file = "rapidfuzz-3.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20bd153aacc244e4c907d772c703fea82754c4db14f8aa64d75ff81b7b8ab92d"}, + {file = "rapidfuzz-3.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:50484d563f8bfa723c74c944b0bb15b9e054db9c889348c8c307abcbee75ab92"}, + {file = "rapidfuzz-3.10.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5897242d455461f2c5b82d7397b29341fd11e85bf3608a522177071044784ee8"}, + {file = "rapidfuzz-3.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:116c71a81e046ba56551d8ab68067ca7034d94b617545316d460a452c5c3c289"}, + {file = "rapidfuzz-3.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0a547e4350d1fa32624d3eab51eff8cf329f4cae110b4ea0402486b1da8be40"}, + {file = "rapidfuzz-3.10.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:399b9b79ccfcf50ca3bad7692bc098bb8eade88d7d5e15773b7f866c91156d0c"}, + {file = "rapidfuzz-3.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7947a425d1be3e744707ee58c6cb318b93a56e08f080722dcc0347e0b7a1bb9a"}, + {file = "rapidfuzz-3.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:94c48b4a2a4b1d22246f48e2b11cae01ec7d23f0c9123f8bb822839ad79d0a88"}, + {file = "rapidfuzz-3.10.0.tar.gz", hash = "sha256:6b62af27e65bb39276a66533655a2fa3c60a487b03935721c45b7809527979be"}, ] [package.extras] -full = ["numpy"] +all = ["numpy"] [[package]] name = "readabilipy" @@ -7690,6 +7793,23 @@ dev = ["coveralls", "m2r", "pycodestyle", "pyflakes", "pylint", "pytest", "pytes docs = ["m2r", "sphinx"] test = ["coveralls", "pycodestyle", "pyflakes", "pylint", "pytest", "pytest-benchmark", "pytest-cov"] +[[package]] +name = "realtime" +version = "2.0.2" +description = "" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "realtime-2.0.2-py3-none-any.whl", hash = "sha256:2634c915bc38807f2013f21e8bcc4d2f79870dfd81460ddb9393883d0489928a"}, + {file = "realtime-2.0.2.tar.gz", hash = "sha256:519da9325b3b8102139d51785013d592f6b2403d81fa21d838a0b0234723ed7d"}, +] + +[package.dependencies] +aiohttp = ">=3.10.2,<4.0.0" +python-dateutil = ">=2.8.1,<3.0.0" +typing-extensions = ">=4.12.2,<5.0.0" +websockets = ">=11,<13" + [[package]] name = "redis" version = "5.0.8" @@ -7928,20 +8048,36 @@ files = [ [package.dependencies] requests = "2.31.0" +[[package]] +name = "retry" +version = "0.9.2" +description = "Easy to use retry decorator." +optional = false +python-versions = "*" +files = [ + {file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"}, + {file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"}, +] + +[package.dependencies] +decorator = ">=3.4.2" +py = ">=1.4.26,<2.0.0" + [[package]] name = "rich" -version = "13.8.1" +version = "13.9.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" files = [ - {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"}, - {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"}, + {file = "rich-13.9.2-py3-none-any.whl", hash = "sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1"}, + {file = "rich-13.9.2.tar.gz", hash = "sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c"}, ] [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] @@ -8074,40 +8210,40 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.6.5" +version = "0.6.9" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.6.5-py3-none-linux_armv6l.whl", hash = "sha256:7e4e308f16e07c95fc7753fc1aaac690a323b2bb9f4ec5e844a97bb7fbebd748"}, - {file = "ruff-0.6.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:932cd69eefe4daf8c7d92bd6689f7e8182571cb934ea720af218929da7bd7d69"}, - {file = "ruff-0.6.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3a8d42d11fff8d3143ff4da41742a98f8f233bf8890e9fe23077826818f8d680"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a50af6e828ee692fb10ff2dfe53f05caecf077f4210fae9677e06a808275754f"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:794ada3400a0d0b89e3015f1a7e01f4c97320ac665b7bc3ade24b50b54cb2972"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:381413ec47f71ce1d1c614f7779d88886f406f1fd53d289c77e4e533dc6ea200"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:52e75a82bbc9b42e63c08d22ad0ac525117e72aee9729a069d7c4f235fc4d276"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09c72a833fd3551135ceddcba5ebdb68ff89225d30758027280968c9acdc7810"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:800c50371bdcb99b3c1551d5691e14d16d6f07063a518770254227f7f6e8c178"}, - {file = "ruff-0.6.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e25ddd9cd63ba1f3bd51c1f09903904a6adf8429df34f17d728a8fa11174253"}, - {file = "ruff-0.6.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7291e64d7129f24d1b0c947ec3ec4c0076e958d1475c61202497c6aced35dd19"}, - {file = "ruff-0.6.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9ad7dfbd138d09d9a7e6931e6a7e797651ce29becd688be8a0d4d5f8177b4b0c"}, - {file = "ruff-0.6.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:005256d977021790cc52aa23d78f06bb5090dc0bfbd42de46d49c201533982ae"}, - {file = "ruff-0.6.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:482c1e6bfeb615eafc5899127b805d28e387bd87db38b2c0c41d271f5e58d8cc"}, - {file = "ruff-0.6.5-py3-none-win32.whl", hash = "sha256:cf4d3fa53644137f6a4a27a2b397381d16454a1566ae5335855c187fbf67e4f5"}, - {file = "ruff-0.6.5-py3-none-win_amd64.whl", hash = "sha256:3e42a57b58e3612051a636bc1ac4e6b838679530235520e8f095f7c44f706ff9"}, - {file = "ruff-0.6.5-py3-none-win_arm64.whl", hash = "sha256:51935067740773afdf97493ba9b8231279e9beef0f2a8079188c4776c25688e0"}, - {file = "ruff-0.6.5.tar.gz", hash = "sha256:4d32d87fab433c0cf285c3683dd4dae63be05fd7a1d65b3f5bf7cdd05a6b96fb"}, + {file = "ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd"}, + {file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"}, + {file = "ruff-0.6.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645d7d8761f915e48a00d4ecc3686969761df69fb561dd914a773c1a8266e14e"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eae02b700763e3847595b9d2891488989cac00214da7f845f4bcf2989007d577"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d5ccc9e58112441de8ad4b29dcb7a86dc25c5f770e3c06a9d57e0e5eba48829"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:417b81aa1c9b60b2f8edc463c58363075412866ae4e2b9ab0f690dc1e87ac1b5"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c866b631f5fbce896a74a6e4383407ba7507b815ccc52bcedabb6810fdb3ef7"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b118afbb3202f5911486ad52da86d1d52305b59e7ef2031cea3425142b97d6f"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3ef0cc774b00fec123f635ce5c547dac263f6ee9fb9cc83437c5904183b55ceb"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:12edd2af0c60fa61ff31cefb90aef4288ac4d372b4962c2864aeea3a1a2460c0"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:55bb01caeaf3a60b2b2bba07308a02fca6ab56233302406ed5245180a05c5625"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:925d26471fa24b0ce5a6cdfab1bb526fb4159952385f386bdcc643813d472039"}, + {file = "ruff-0.6.9-py3-none-win32.whl", hash = "sha256:eb61ec9bdb2506cffd492e05ac40e5bc6284873aceb605503d8494180d6fc84d"}, + {file = "ruff-0.6.9-py3-none-win_amd64.whl", hash = "sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117"}, + {file = "ruff-0.6.9-py3-none-win_arm64.whl", hash = "sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93"}, + {file = "ruff-0.6.9.tar.gz", hash = "sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2"}, ] [[package]] name = "s3transfer" -version = "0.10.2" +version = "0.10.3" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.2-py3-none-any.whl", hash = "sha256:eca1c20de70a39daee580aef4986996620f365c4e0fda6a86100231d62f1bf69"}, - {file = "s3transfer-0.10.2.tar.gz", hash = "sha256:0711534e9356d3cc692fdde846b4a1e4b0cb6519971860796e6bc4c7aea00ef6"}, + {file = "s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d"}, + {file = "s3transfer-0.10.3.tar.gz", hash = "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c"}, ] [package.dependencies] @@ -8293,13 +8429,13 @@ test = ["accelerate (>=0.24.1,<=0.27.0)", "apache-airflow (==2.9.3)", "apache-ai [[package]] name = "sagemaker-core" -version = "1.0.4" +version = "1.0.10" description = "An python package for sagemaker core functionalities" optional = false python-versions = ">=3.8" files = [ - {file = "sagemaker_core-1.0.4-py3-none-any.whl", hash = "sha256:bf71d988dbda03a3cd1557524f2fab4f19d89e54bd38fc7f05bbbcf580715f95"}, - {file = "sagemaker_core-1.0.4.tar.gz", hash = "sha256:203f4eb9d0d2a0e6ba80d79ba8c28b8ea27c94d04f6d9ff01c2fd55b95615c78"}, + {file = "sagemaker_core-1.0.10-py3-none-any.whl", hash = "sha256:0bdcf6a467db988919cc6b6d0077f74871ee24c24adf7f759f9cb98460e08953"}, + {file = "sagemaker_core-1.0.10.tar.gz", hash = "sha256:6d34a9b6dc5e17e8bfffd1d0650726865779c92b3b8f1b59fc15d42061a0dd29"}, ] [package.dependencies] @@ -8348,6 +8484,11 @@ files = [ {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd"}, {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6"}, {file = "scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1"}, + {file = "scikit_learn-1.5.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9a702e2de732bbb20d3bad29ebd77fc05a6b427dc49964300340e4c9328b3f5"}, + {file = "scikit_learn-1.5.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:b0768ad641981f5d3a198430a1d31c3e044ed2e8a6f22166b4d546a5116d7908"}, + {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:178ddd0a5cb0044464fc1bfc4cca5b1833bfc7bb022d70b05db8530da4bb3dd3"}, + {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7284ade780084d94505632241bf78c44ab3b6f1e8ccab3d2af58e0e950f9c12"}, + {file = "scikit_learn-1.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:b7b0f9a0b1040830d38c39b91b3a44e1b643f4b36e36567b80b7c6bd2202a27f"}, {file = "scikit_learn-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9"}, {file = "scikit_learn-1.5.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1"}, {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8"}, @@ -8473,18 +8614,18 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "74.1.2" +version = "75.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-74.1.2-py3-none-any.whl", hash = "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308"}, - {file = "setuptools-74.1.2.tar.gz", hash = "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6"}, + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] @@ -8572,19 +8713,20 @@ files = [ [[package]] name = "simple-websocket" -version = "1.0.0" +version = "1.1.0" description = "Simple WebSocket server and client for Python" optional = false python-versions = ">=3.6" files = [ - {file = "simple-websocket-1.0.0.tar.gz", hash = "sha256:17d2c72f4a2bd85174a97e3e4c88b01c40c3f81b7b648b0cc3ce1305968928c8"}, - {file = "simple_websocket-1.0.0-py3-none-any.whl", hash = "sha256:1d5bf585e415eaa2083e2bcf02a3ecf91f9712e7b3e6b9fa0b461ad04e0837bc"}, + {file = "simple_websocket-1.1.0-py3-none-any.whl", hash = "sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c"}, + {file = "simple_websocket-1.1.0.tar.gz", hash = "sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4"}, ] [package.dependencies] wsproto = "*" [package.extras] +dev = ["flake8", "pytest", "pytest-cov", "tox"] docs = ["sphinx"] [[package]] @@ -8644,60 +8786,60 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.34" +version = "2.0.35" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:95d0b2cf8791ab5fb9e3aa3d9a79a0d5d51f55b6357eecf532a120ba3b5524db"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:243f92596f4fd4c8bd30ab8e8dd5965afe226363d75cab2468f2c707f64cd83b"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ea54f7300553af0a2a7235e9b85f4204e1fc21848f917a3213b0e0818de9a24"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:173f5f122d2e1bff8fbd9f7811b7942bead1f5e9f371cdf9e670b327e6703ebd"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:196958cde924a00488e3e83ff917be3b73cd4ed8352bbc0f2989333176d1c54d"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd90c221ed4e60ac9d476db967f436cfcecbd4ef744537c0f2d5291439848768"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-win32.whl", hash = "sha256:3166dfff2d16fe9be3241ee60ece6fcb01cf8e74dd7c5e0b64f8e19fab44911b"}, - {file = "SQLAlchemy-2.0.34-cp310-cp310-win_amd64.whl", hash = "sha256:6831a78bbd3c40f909b3e5233f87341f12d0b34a58f14115c9e94b4cdaf726d3"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7db3db284a0edaebe87f8f6642c2b2c27ed85c3e70064b84d1c9e4ec06d5d84"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:430093fce0efc7941d911d34f75a70084f12f6ca5c15d19595c18753edb7c33b"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79cb400c360c7c210097b147c16a9e4c14688a6402445ac848f296ade6283bbc"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1b30f31a36c7f3fee848391ff77eebdd3af5750bf95fbf9b8b5323edfdb4ec"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fddde2368e777ea2a4891a3fb4341e910a056be0bb15303bf1b92f073b80c02"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80bd73ea335203b125cf1d8e50fef06be709619eb6ab9e7b891ea34b5baa2287"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-win32.whl", hash = "sha256:6daeb8382d0df526372abd9cb795c992e18eed25ef2c43afe518c73f8cccb721"}, - {file = "SQLAlchemy-2.0.34-cp311-cp311-win_amd64.whl", hash = "sha256:5bc08e75ed11693ecb648b7a0a4ed80da6d10845e44be0c98c03f2f880b68ff4"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:53e68b091492c8ed2bd0141e00ad3089bcc6bf0e6ec4142ad6505b4afe64163e"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bcd18441a49499bf5528deaa9dee1f5c01ca491fc2791b13604e8f972877f812"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:165bbe0b376541092bf49542bd9827b048357f4623486096fc9aaa6d4e7c59a2"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3330415cd387d2b88600e8e26b510d0370db9b7eaf984354a43e19c40df2e2b"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97b850f73f8abbffb66ccbab6e55a195a0eb655e5dc74624d15cff4bfb35bd74"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee4c6917857fd6121ed84f56d1dc78eb1d0e87f845ab5a568aba73e78adf83"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-win32.whl", hash = "sha256:fbb034f565ecbe6c530dff948239377ba859420d146d5f62f0271407ffb8c580"}, - {file = "SQLAlchemy-2.0.34-cp312-cp312-win_amd64.whl", hash = "sha256:707c8f44931a4facd4149b52b75b80544a8d824162602b8cd2fe788207307f9a"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:24af3dc43568f3780b7e1e57c49b41d98b2d940c1fd2e62d65d3928b6f95f021"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60ed6ef0a35c6b76b7640fe452d0e47acc832ccbb8475de549a5cc5f90c2c06"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:413c85cd0177c23e32dee6898c67a5f49296640041d98fddb2c40888fe4daa2e"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:25691f4adfb9d5e796fd48bf1432272f95f4bbe5f89c475a788f31232ea6afba"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:526ce723265643dbc4c7efb54f56648cc30e7abe20f387d763364b3ce7506c82"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-win32.whl", hash = "sha256:13be2cc683b76977a700948411a94c67ad8faf542fa7da2a4b167f2244781cf3"}, - {file = "SQLAlchemy-2.0.34-cp37-cp37m-win_amd64.whl", hash = "sha256:e54ef33ea80d464c3dcfe881eb00ad5921b60f8115ea1a30d781653edc2fd6a2"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43f28005141165edd11fbbf1541c920bd29e167b8bbc1fb410d4fe2269c1667a"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b68094b165a9e930aedef90725a8fcfafe9ef95370cbb54abc0464062dbf808f"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1e03db964e9d32f112bae36f0cc1dcd1988d096cfd75d6a588a3c3def9ab2b"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:203d46bddeaa7982f9c3cc693e5bc93db476ab5de9d4b4640d5c99ff219bee8c"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ae92bebca3b1e6bd203494e5ef919a60fb6dfe4d9a47ed2453211d3bd451b9f5"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9661268415f450c95f72f0ac1217cc6f10256f860eed85c2ae32e75b60278ad8"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-win32.whl", hash = "sha256:895184dfef8708e15f7516bd930bda7e50ead069280d2ce09ba11781b630a434"}, - {file = "SQLAlchemy-2.0.34-cp38-cp38-win_amd64.whl", hash = "sha256:6e7cde3a2221aa89247944cafb1b26616380e30c63e37ed19ff0bba5e968688d"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbcdf987f3aceef9763b6d7b1fd3e4ee210ddd26cac421d78b3c206d07b2700b"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce119fc4ce0d64124d37f66a6f2a584fddc3c5001755f8a49f1ca0a177ef9796"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a17d8fac6df9835d8e2b4c5523666e7051d0897a93756518a1fe101c7f47f2f0"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ebc11c54c6ecdd07bb4efbfa1554538982f5432dfb8456958b6d46b9f834bb7"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e6965346fc1491a566e019a4a1d3dfc081ce7ac1a736536367ca305da6472a8"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:220574e78ad986aea8e81ac68821e47ea9202b7e44f251b7ed8c66d9ae3f4278"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-win32.whl", hash = "sha256:b75b00083e7fe6621ce13cfce9d4469c4774e55e8e9d38c305b37f13cf1e874c"}, - {file = "SQLAlchemy-2.0.34-cp39-cp39-win_amd64.whl", hash = "sha256:c29d03e0adf3cc1a8c3ec62d176824972ae29b67a66cbb18daff3062acc6faa8"}, - {file = "SQLAlchemy-2.0.34-py3-none-any.whl", hash = "sha256:7286c353ee6475613d8beff83167374006c6b3e3f0e6491bfe8ca610eb1dec0f"}, - {file = "sqlalchemy-2.0.34.tar.gz", hash = "sha256:10d8f36990dd929690666679b0f42235c159a7051534adb135728ee52828dd22"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8bea573863762bbf45d1e13f87c2d2fd32cee2dbd50d050f83f87429c9e1ea"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f552023710d4b93d8fb29a91fadf97de89c5926c6bd758897875435f2a939f33"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:016b2e665f778f13d3c438651dd4de244214b527a275e0acf1d44c05bc6026a9"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7befc148de64b6060937231cbff8d01ccf0bfd75aa26383ffdf8d82b12ec04ff"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win32.whl", hash = "sha256:22b83aed390e3099584b839b93f80a0f4a95ee7f48270c97c90acd40ee646f0b"}, + {file = "SQLAlchemy-2.0.35-cp310-cp310-win_amd64.whl", hash = "sha256:a29762cd3d116585278ffb2e5b8cc311fb095ea278b96feef28d0b423154858e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e21f66748ab725ade40fa7af8ec8b5019c68ab00b929f6643e1b1af461eddb60"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a6219108a15fc6d24de499d0d515c7235c617b2540d97116b663dade1a54d62"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042622a5306c23b972192283f4e22372da3b8ddf5f7aac1cc5d9c9b222ab3ff6"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:627dee0c280eea91aed87b20a1f849e9ae2fe719d52cbf847c0e0ea34464b3f7"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4fdcd72a789c1c31ed242fd8c1bcd9ea186a98ee8e5408a50e610edfef980d71"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:89b64cd8898a3a6f642db4eb7b26d1b28a497d4022eccd7717ca066823e9fb01"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win32.whl", hash = "sha256:6a93c5a0dfe8d34951e8a6f499a9479ffb9258123551fa007fc708ae2ac2bc5e"}, + {file = "SQLAlchemy-2.0.35-cp311-cp311-win_amd64.whl", hash = "sha256:c68fe3fcde03920c46697585620135b4ecfdfc1ed23e75cc2c2ae9f8502c10b8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eb60b026d8ad0c97917cb81d3662d0b39b8ff1335e3fabb24984c6acd0c900a2"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6921ee01caf375363be5e9ae70d08ce7ca9d7e0e8983183080211a062d299468"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cdf1a0dbe5ced887a9b127da4ffd7354e9c1a3b9bb330dce84df6b70ccb3a8d"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93a71c8601e823236ac0e5d087e4f397874a421017b3318fd92c0b14acf2b6db"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e04b622bb8a88f10e439084486f2f6349bf4d50605ac3e445869c7ea5cf0fa8c"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1b56961e2d31389aaadf4906d453859f35302b4eb818d34a26fab72596076bb8"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win32.whl", hash = "sha256:0f9f3f9a3763b9c4deb8c5d09c4cc52ffe49f9876af41cc1b2ad0138878453cf"}, + {file = "SQLAlchemy-2.0.35-cp312-cp312-win_amd64.whl", hash = "sha256:25b0f63e7fcc2a6290cb5f7f5b4fc4047843504983a28856ce9b35d8f7de03cc"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f021d334f2ca692523aaf7bbf7592ceff70c8594fad853416a81d66b35e3abf9"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c3f58cf91683102f2f0265c0db3bd3892e9eedabe059720492dbaa4f922da1"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:032d979ce77a6c2432653322ba4cbeabf5a6837f704d16fa38b5a05d8e21fa00"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:2e795c2f7d7249b75bb5f479b432a51b59041580d20599d4e112b5f2046437a3"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:cc32b2990fc34380ec2f6195f33a76b6cdaa9eecf09f0c9404b74fc120aef36f"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win32.whl", hash = "sha256:9509c4123491d0e63fb5e16199e09f8e262066e58903e84615c301dde8fa2e87"}, + {file = "SQLAlchemy-2.0.35-cp37-cp37m-win_amd64.whl", hash = "sha256:3655af10ebcc0f1e4e06c5900bb33e080d6a1fa4228f502121f28a3b1753cde5"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4c31943b61ed8fdd63dfd12ccc919f2bf95eefca133767db6fbbd15da62078ec"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a62dd5d7cc8626a3634208df458c5fe4f21200d96a74d122c83bc2015b333bc1"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0630774b0977804fba4b6bbea6852ab56c14965a2b0c7fc7282c5f7d90a1ae72"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d625eddf7efeba2abfd9c014a22c0f6b3796e0ffb48f5d5ab106568ef01ff5a"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ada603db10bb865bbe591939de854faf2c60f43c9b763e90f653224138f910d9"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c41411e192f8d3ea39ea70e0fae48762cd11a2244e03751a98bd3c0ca9a4e936"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win32.whl", hash = "sha256:d299797d75cd747e7797b1b41817111406b8b10a4f88b6e8fe5b5e59598b43b0"}, + {file = "SQLAlchemy-2.0.35-cp38-cp38-win_amd64.whl", hash = "sha256:0375a141e1c0878103eb3d719eb6d5aa444b490c96f3fedab8471c7f6ffe70ee"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccae5de2a0140d8be6838c331604f91d6fafd0735dbdcee1ac78fc8fbaba76b4"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a275a806f73e849e1c309ac11108ea1a14cd7058577aba962cd7190e27c9e3c"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:732e026240cdd1c1b2e3ac515c7a23820430ed94292ce33806a95869c46bd139"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890da8cd1941fa3dab28c5bac3b9da8502e7e366f895b3b8e500896f12f94d11"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0d8326269dbf944b9201911b0d9f3dc524d64779a07518199a58384c3d37a44"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b76d63495b0508ab9fc23f8152bac63205d2a704cd009a2b0722f4c8e0cba8e0"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win32.whl", hash = "sha256:69683e02e8a9de37f17985905a5eca18ad651bf592314b4d3d799029797d0eb3"}, + {file = "SQLAlchemy-2.0.35-cp39-cp39-win_amd64.whl", hash = "sha256:aee110e4ef3c528f3abbc3c2018c121e708938adeeff9006428dd7c8555e9b3f"}, + {file = "SQLAlchemy-2.0.35-py3-none-any.whl", hash = "sha256:2ab3f0336c0387662ce6221ad30ab3a5e6499aab01b9790879b6578fd9b8faa1"}, + {file = "sqlalchemy-2.0.35.tar.gz", hash = "sha256:e11d7ea4d24f0a262bccf9a7cd6284c976c5369dac21db237cff59586045ab9f"}, ] [package.dependencies] @@ -8746,13 +8888,13 @@ doc = ["sphinx"] [[package]] name = "starlette" -version = "0.38.5" +version = "0.38.6" description = "The little ASGI library that shines." optional = false python-versions = ">=3.8" files = [ - {file = "starlette-0.38.5-py3-none-any.whl", hash = "sha256:632f420a9d13e3ee2a6f18f437b0a9f1faecb0bc42e1942aa2ea0e379a4c4206"}, - {file = "starlette-0.38.5.tar.gz", hash = "sha256:04a92830a9b6eb1442c766199d62260c3d4dc9c4f9188360626b1e0273cb7077"}, + {file = "starlette-0.38.6-py3-none-any.whl", hash = "sha256:4517a1409e2e73ee4951214ba012052b9e16f60e90d73cfb06192c19203bbb05"}, + {file = "starlette-0.38.6.tar.gz", hash = "sha256:863a1588f5574e70a821dadefb41e4881ea451a47a3cd1b4df359d4ffefe5ead"}, ] [package.dependencies] @@ -8761,6 +8903,38 @@ anyio = ">=3.4.0,<5" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] +[[package]] +name = "storage3" +version = "0.8.1" +description = "Supabase Storage client for Python." +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "storage3-0.8.1-py3-none-any.whl", hash = "sha256:0b21205f43eaf0d1dd33bde6c6d0612f88524b7865f017d2ae9827e3f63d9cdc"}, + {file = "storage3-0.8.1.tar.gz", hash = "sha256:ea60b68b2221b3868ccc1a7f1294d57d0d9c51642cdc639d8115fe5d0adc8892"}, +] + +[package.dependencies] +httpx = {version = ">=0.26,<0.28", extras = ["http2"]} +python-dateutil = ">=2.8.2,<3.0.0" +typing-extensions = ">=4.2.0,<5.0.0" + +[[package]] +name = "strenum" +version = "0.4.15" +description = "An Enum that inherits from str." +optional = false +python-versions = "*" +files = [ + {file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"}, + {file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"}, +] + +[package.extras] +docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"] +release = ["twine"] +test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"] + [[package]] name = "strictyaml" version = "1.7.3" @@ -8775,15 +8949,49 @@ files = [ [package.dependencies] python-dateutil = ">=2.6.0" +[[package]] +name = "supabase" +version = "2.8.1" +description = "Supabase client for Python." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "supabase-2.8.1-py3-none-any.whl", hash = "sha256:dfa8bef89b54129093521d5bba2136ff765baf67cd76d8ad0aa4984d61a7815c"}, + {file = "supabase-2.8.1.tar.gz", hash = "sha256:711c70e6acd9e2ff48ca0dc0b1bb70c01c25378cc5189ec9f5ed9655b30bc41d"}, +] + +[package.dependencies] +gotrue = ">=2.7.0,<3.0.0" +httpx = ">=0.24,<0.28" +postgrest = ">=0.17.0,<0.18.0" +realtime = ">=2.0.0,<3.0.0" +storage3 = ">=0.8.0,<0.9.0" +supafunc = ">=0.6.0,<0.7.0" +typing-extensions = ">=4.12.2,<5.0.0" + +[[package]] +name = "supafunc" +version = "0.6.1" +description = "Library for Supabase Functions" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "supafunc-0.6.1-py3-none-any.whl", hash = "sha256:01aeeeb4bf429977664454a32c86418345140faf6d2e6eb0636d52e4547c5fbb"}, + {file = "supafunc-0.6.1.tar.gz", hash = "sha256:3c8761e3999336ccdb7550498a395fd08afc8469382f55ea56f7f640e5a909aa"}, +] + +[package.dependencies] +httpx = {version = ">=0.26,<0.28", extras = ["http2"]} + [[package]] name = "sympy" -version = "1.13.2" +version = "1.13.3" description = "Computer algebra system (CAS) in Python" optional = false python-versions = ">=3.8" files = [ - {file = "sympy-1.13.2-py3-none-any.whl", hash = "sha256:c51d75517712f1aed280d4ce58506a4a88d635d6b5dd48b39102a7ae1f3fcfe9"}, - {file = "sympy-1.13.2.tar.gz", hash = "sha256:401449d84d07be9d0c7a46a64bd54fe097667d5e7181bfe67ec777be9e01cb13"}, + {file = "sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73"}, + {file = "sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9"}, ] [package.dependencies] @@ -8849,13 +9057,13 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"] [[package]] name = "tencentcloud-sdk-python-common" -version = "3.0.1231" +version = "3.0.1247" description = "Tencent Cloud Common SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-common-3.0.1231.tar.gz", hash = "sha256:22aa281ca2eac511e1615b2953da7c4a0bec87cf93a05a7a15dbb61b23a215ee"}, - {file = "tencentcloud_sdk_python_common-3.0.1231-py2.py3-none-any.whl", hash = "sha256:bd0f7c4df4b156ec35c8731afa1f498043c7e1cd5d2feb595ee441fdb45a061e"}, + {file = "tencentcloud-sdk-python-common-3.0.1247.tar.gz", hash = "sha256:1467ac3eaaa5b5d299570ba781903debc4be32dbb3f0f39929a357531ab89170"}, + {file = "tencentcloud_sdk_python_common-3.0.1247-py2.py3-none-any.whl", hash = "sha256:9829d2299c85a2494d6d816247345e98abd2f936cd309e1f67847243f5235091"}, ] [package.dependencies] @@ -8863,17 +9071,17 @@ requests = ">=2.16.0" [[package]] name = "tencentcloud-sdk-python-hunyuan" -version = "3.0.1231" +version = "3.0.1247" description = "Tencent Cloud Hunyuan SDK for Python" optional = false python-versions = "*" files = [ - {file = "tencentcloud-sdk-python-hunyuan-3.0.1231.tar.gz", hash = "sha256:6da12f418f14305b3a6b7bb29b6d95bf4038a6b66b81c0e03b8dafc4f6df99ca"}, - {file = "tencentcloud_sdk_python_hunyuan-3.0.1231-py2.py3-none-any.whl", hash = "sha256:21ba28f69c34c15e20900be3f2c06376fcaf7e58265f939833c55631f2348792"}, + {file = "tencentcloud-sdk-python-hunyuan-3.0.1247.tar.gz", hash = "sha256:85b7332ec55f891a3b4d776e6b30ee2a44cc08c70b689615805aadff6e424fdd"}, + {file = "tencentcloud_sdk_python_hunyuan-3.0.1247-py2.py3-none-any.whl", hash = "sha256:69fdb886616e53ce02e848e5a1a8b36922db731457b07365f230ffb0aa472b5b"}, ] [package.dependencies] -tencentcloud-sdk-python-common = "3.0.1231" +tencentcloud-sdk-python-common = "3.0.1247" [[package]] name = "threadpoolctl" @@ -9126,13 +9334,13 @@ files = [ [[package]] name = "tomli" -version = "2.0.1" +version = "2.0.2" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, ] [[package]] @@ -9316,32 +9524,15 @@ typing-extensions = ">=3.7.4" [[package]] name = "tzdata" -version = "2024.1" +version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] -[[package]] -name = "tzlocal" -version = "5.2" -description = "tzinfo object for the local timezone" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, - {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, -] - -[package.dependencies] -tzdata = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] - [[package]] name = "ujson" version = "5.10.0" @@ -9542,13 +9733,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" -version = "0.30.6" +version = "0.31.1" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.8" files = [ - {file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"}, - {file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"}, + {file = "uvicorn-0.31.1-py3-none-any.whl", hash = "sha256:adc42d9cac80cf3e51af97c1851648066841e7cfb6993a4ca8de29ac1548ed41"}, + {file = "uvicorn-0.31.1.tar.gz", hash = "sha256:f5167919867b161b7bcaf32646c6a94cdbd4c3aa2eb5c17d36bb9aa5cfd8c493"}, ] [package.dependencies] @@ -9623,19 +9814,20 @@ files = [ [[package]] name = "vanna" -version = "0.5.5" +version = "0.7.3" description = "Generate SQL queries from natural language" optional = false python-versions = ">=3.9" files = [ - {file = "vanna-0.5.5-py3-none-any.whl", hash = "sha256:e1a308b7127b9e98c2579c0e4178fc1475d891c498e4a0667cffa10df8891e73"}, - {file = "vanna-0.5.5.tar.gz", hash = "sha256:7d9bf188a635bb75e4f8db15f0e6dbe72a426784779485f087b2df0ce175e664"}, + {file = "vanna-0.7.3-py3-none-any.whl", hash = "sha256:82ba39e5d6c503d1c8cca60835ed401d20ec3a3da98d487f529901dcb30061d6"}, + {file = "vanna-0.7.3.tar.gz", hash = "sha256:4590dd94d2fe180b4efc7a83c867b73144ef58794018910dc226857cfb703077"}, ] [package.dependencies] -clickhouse_driver = {version = "*", optional = true, markers = "extra == \"clickhouse\""} +clickhouse_connect = {version = "*", optional = true, markers = "extra == \"clickhouse\""} db-dtypes = {version = "*", optional = true, markers = "extra == \"postgres\""} duckdb = {version = "*", optional = true, markers = "extra == \"duckdb\""} +flasgger = "*" flask = "*" flask-sock = "*" kaleido = "*" @@ -9649,17 +9841,20 @@ sqlparse = "*" tabulate = "*" [package.extras] -all = ["PyMySQL", "anthropic", "chromadb", "db-dtypes", "duckdb", "fastembed", "google-cloud-aiplatform", "google-cloud-bigquery", "google-generativeai", "httpx", "marqo", "mistralai", "ollama", "openai", "opensearch-dsl", "opensearch-py", "pinecone-client", "psycopg2-binary", "qdrant-client", "snowflake-connector-python", "transformers", "zhipuai"] +all = ["PyMySQL", "anthropic", "azure-common", "azure-identity", "azure-search-documents", "chromadb", "db-dtypes", "duckdb", "fastembed", "google-cloud-aiplatform", "google-cloud-bigquery", "google-generativeai", "httpx", "marqo", "mistralai (>=1.0.0)", "ollama", "openai", "opensearch-dsl", "opensearch-py", "pinecone-client", "psycopg2-binary", "pymilvus[model]", "qdrant-client", "qianfan", "snowflake-connector-python", "transformers", "weaviate-client", "zhipuai"] anthropic = ["anthropic"] +azuresearch = ["azure-common", "azure-identity", "azure-search-documents", "fastembed"] +bedrock = ["boto3", "botocore"] bigquery = ["google-cloud-bigquery"] chromadb = ["chromadb"] -clickhouse = ["clickhouse_driver"] +clickhouse = ["clickhouse_connect"] duckdb = ["duckdb"] gemini = ["google-generativeai"] google = ["google-cloud-aiplatform", "google-generativeai"] hf = ["transformers"] marqo = ["marqo"] -mistralai = ["mistralai"] +milvus = ["pymilvus[model]"] +mistralai = ["mistralai (>=1.0.0)"] mysql = ["PyMySQL"] ollama = ["httpx", "ollama"] openai = ["openai"] @@ -9667,9 +9862,11 @@ opensearch = ["opensearch-dsl", "opensearch-py"] pinecone = ["fastembed", "pinecone-client"] postgres = ["db-dtypes", "psycopg2-binary"] qdrant = ["fastembed", "qdrant-client"] +qianfan = ["qianfan"] snowflake = ["snowflake-connector-python"] test = ["tox"] vllm = ["vllm"] +weaviate = ["weaviate-client"] zhipuai = ["zhipuai"] [[package]] @@ -9683,14 +9880,34 @@ files = [ {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, ] +[[package]] +name = "volcengine-compat" +version = "1.0.156" +description = "Be Compatible with the Volcengine SDK for Python, The version of package dependencies has been modified. like pycryptodome, pytz." +optional = false +python-versions = "*" +files = [ + {file = "volcengine_compat-1.0.156-py3-none-any.whl", hash = "sha256:4abc149a7601ebad8fa2d28fab50c7945145cf74daecb71bca797b0bdc82c5a5"}, + {file = "volcengine_compat-1.0.156.tar.gz", hash = "sha256:e357d096828e31a202dc6047bbc5bf6fff3f54a98cd35a99ab5f965ea741a267"}, +] + +[package.dependencies] +google = ">=3.0.0" +protobuf = ">=3.18.3" +pycryptodome = ">=3.9.9" +pytz = ">=2020.5" +requests = ">=2.25.1" +retry = ">=0.9.2" +six = ">=1.0" + [[package]] name = "volcengine-python-sdk" -version = "1.0.101" +version = "1.0.103" description = "Volcengine SDK for Python" optional = false python-versions = "*" files = [ - {file = "volcengine-python-sdk-1.0.101.tar.gz", hash = "sha256:1b76e71a6dcf3d5be1b2c058e7d281359e6cca2cc920ffe2567d3115beea1d02"}, + {file = "volcengine-python-sdk-1.0.103.tar.gz", hash = "sha256:49fa8572802724972e1cb47a7e692b184b055f41b09099358c1a0fad1d146af5"}, ] [package.dependencies] @@ -9860,97 +10077,83 @@ test = ["websockets"] [[package]] name = "websockets" -version = "13.0.1" +version = "12.0" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.8" files = [ - {file = "websockets-13.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1841c9082a3ba4a05ea824cf6d99570a6a2d8849ef0db16e9c826acb28089e8f"}, - {file = "websockets-13.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c5870b4a11b77e4caa3937142b650fbbc0914a3e07a0cf3131f35c0587489c1c"}, - {file = "websockets-13.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f1d3d1f2eb79fe7b0fb02e599b2bf76a7619c79300fc55f0b5e2d382881d4f7f"}, - {file = "websockets-13.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15c7d62ee071fa94a2fc52c2b472fed4af258d43f9030479d9c4a2de885fd543"}, - {file = "websockets-13.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6724b554b70d6195ba19650fef5759ef11346f946c07dbbe390e039bcaa7cc3d"}, - {file = "websockets-13.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a952fa2ae57a42ba7951e6b2605e08a24801a4931b5644dfc68939e041bc7f"}, - {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:17118647c0ea14796364299e942c330d72acc4b248e07e639d34b75067b3cdd8"}, - {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64a11aae1de4c178fa653b07d90f2fb1a2ed31919a5ea2361a38760192e1858b"}, - {file = "websockets-13.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0617fd0b1d14309c7eab6ba5deae8a7179959861846cbc5cb528a7531c249448"}, - {file = "websockets-13.0.1-cp310-cp310-win32.whl", hash = "sha256:11f9976ecbc530248cf162e359a92f37b7b282de88d1d194f2167b5e7ad80ce3"}, - {file = "websockets-13.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c3c493d0e5141ec055a7d6809a28ac2b88d5b878bb22df8c621ebe79a61123d0"}, - {file = "websockets-13.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:699ba9dd6a926f82a277063603fc8d586b89f4cb128efc353b749b641fcddda7"}, - {file = "websockets-13.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf2fae6d85e5dc384bf846f8243ddaa9197f3a1a70044f59399af001fd1f51d4"}, - {file = "websockets-13.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:52aed6ef21a0f1a2a5e310fb5c42d7555e9c5855476bbd7173c3aa3d8a0302f2"}, - {file = "websockets-13.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eb2b9a318542153674c6e377eb8cb9ca0fc011c04475110d3477862f15d29f0"}, - {file = "websockets-13.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5df891c86fe68b2c38da55b7aea7095beca105933c697d719f3f45f4220a5e0e"}, - {file = "websockets-13.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac2d146ff30d9dd2fcf917e5d147db037a5c573f0446c564f16f1f94cf87462"}, - {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b8ac5b46fd798bbbf2ac6620e0437c36a202b08e1f827832c4bf050da081b501"}, - {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:46af561eba6f9b0848b2c9d2427086cabadf14e0abdd9fde9d72d447df268418"}, - {file = "websockets-13.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b5a06d7f60bc2fc378a333978470dfc4e1415ee52f5f0fce4f7853eb10c1e9df"}, - {file = "websockets-13.0.1-cp311-cp311-win32.whl", hash = "sha256:556e70e4f69be1082e6ef26dcb70efcd08d1850f5d6c5f4f2bcb4e397e68f01f"}, - {file = "websockets-13.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:67494e95d6565bf395476e9d040037ff69c8b3fa356a886b21d8422ad86ae075"}, - {file = "websockets-13.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f9c9e258e3d5efe199ec23903f5da0eeaad58cf6fccb3547b74fd4750e5ac47a"}, - {file = "websockets-13.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6b41a1b3b561f1cba8321fb32987552a024a8f67f0d05f06fcf29f0090a1b956"}, - {file = "websockets-13.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f73e676a46b0fe9426612ce8caeca54c9073191a77c3e9d5c94697aef99296af"}, - {file = "websockets-13.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f613289f4a94142f914aafad6c6c87903de78eae1e140fa769a7385fb232fdf"}, - {file = "websockets-13.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f52504023b1480d458adf496dc1c9e9811df4ba4752f0bc1f89ae92f4f07d0c"}, - {file = "websockets-13.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:139add0f98206cb74109faf3611b7783ceafc928529c62b389917a037d4cfdf4"}, - {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:47236c13be337ef36546004ce8c5580f4b1150d9538b27bf8a5ad8edf23ccfab"}, - {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c44ca9ade59b2e376612df34e837013e2b273e6c92d7ed6636d0556b6f4db93d"}, - {file = "websockets-13.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9bbc525f4be3e51b89b2a700f5746c2a6907d2e2ef4513a8daafc98198b92237"}, - {file = "websockets-13.0.1-cp312-cp312-win32.whl", hash = "sha256:3624fd8664f2577cf8de996db3250662e259bfbc870dd8ebdcf5d7c6ac0b5185"}, - {file = "websockets-13.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0513c727fb8adffa6d9bf4a4463b2bade0186cbd8c3604ae5540fae18a90cb99"}, - {file = "websockets-13.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1ee4cc030a4bdab482a37462dbf3ffb7e09334d01dd37d1063be1136a0d825fa"}, - {file = "websockets-13.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dbb0b697cc0655719522406c059eae233abaa3243821cfdfab1215d02ac10231"}, - {file = "websockets-13.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:acbebec8cb3d4df6e2488fbf34702cbc37fc39ac7abf9449392cefb3305562e9"}, - {file = "websockets-13.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63848cdb6fcc0bf09d4a155464c46c64ffdb5807ede4fb251da2c2692559ce75"}, - {file = "websockets-13.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:872afa52a9f4c414d6955c365b6588bc4401272c629ff8321a55f44e3f62b553"}, - {file = "websockets-13.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05e70fec7c54aad4d71eae8e8cab50525e899791fc389ec6f77b95312e4e9920"}, - {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e82db3756ccb66266504f5a3de05ac6b32f287faacff72462612120074103329"}, - {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4e85f46ce287f5c52438bb3703d86162263afccf034a5ef13dbe4318e98d86e7"}, - {file = "websockets-13.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f3fea72e4e6edb983908f0db373ae0732b275628901d909c382aae3b592589f2"}, - {file = "websockets-13.0.1-cp313-cp313-win32.whl", hash = "sha256:254ecf35572fca01a9f789a1d0f543898e222f7b69ecd7d5381d8d8047627bdb"}, - {file = "websockets-13.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:ca48914cdd9f2ccd94deab5bcb5ac98025a5ddce98881e5cce762854a5de330b"}, - {file = "websockets-13.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b74593e9acf18ea5469c3edaa6b27fa7ecf97b30e9dabd5a94c4c940637ab96e"}, - {file = "websockets-13.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:132511bfd42e77d152c919147078460c88a795af16b50e42a0bd14f0ad71ddd2"}, - {file = "websockets-13.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:165bedf13556f985a2aa064309baa01462aa79bf6112fbd068ae38993a0e1f1b"}, - {file = "websockets-13.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e801ca2f448850685417d723ec70298feff3ce4ff687c6f20922c7474b4746ae"}, - {file = "websockets-13.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30d3a1f041360f029765d8704eae606781e673e8918e6b2c792e0775de51352f"}, - {file = "websockets-13.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67648f5e50231b5a7f6d83b32f9c525e319f0ddc841be0de64f24928cd75a603"}, - {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4f0426d51c8f0926a4879390f53c7f5a855e42d68df95fff6032c82c888b5f36"}, - {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ef48e4137e8799998a343706531e656fdec6797b80efd029117edacb74b0a10a"}, - {file = "websockets-13.0.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:249aab278810bee585cd0d4de2f08cfd67eed4fc75bde623be163798ed4db2eb"}, - {file = "websockets-13.0.1-cp38-cp38-win32.whl", hash = "sha256:06c0a667e466fcb56a0886d924b5f29a7f0886199102f0a0e1c60a02a3751cb4"}, - {file = "websockets-13.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1f3cf6d6ec1142412d4535adabc6bd72a63f5f148c43fe559f06298bc21953c9"}, - {file = "websockets-13.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1fa082ea38d5de51dd409434edc27c0dcbd5fed2b09b9be982deb6f0508d25bc"}, - {file = "websockets-13.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a365bcb7be554e6e1f9f3ed64016e67e2fa03d7b027a33e436aecf194febb63"}, - {file = "websockets-13.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:10a0dc7242215d794fb1918f69c6bb235f1f627aaf19e77f05336d147fce7c37"}, - {file = "websockets-13.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59197afd478545b1f73367620407b0083303569c5f2d043afe5363676f2697c9"}, - {file = "websockets-13.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d20516990d8ad557b5abeb48127b8b779b0b7e6771a265fa3e91767596d7d97"}, - {file = "websockets-13.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1a2e272d067030048e1fe41aa1ec8cfbbaabce733b3d634304fa2b19e5c897f"}, - {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ad327ac80ba7ee61da85383ca8822ff808ab5ada0e4a030d66703cc025b021c4"}, - {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:518f90e6dd089d34eaade01101fd8a990921c3ba18ebbe9b0165b46ebff947f0"}, - {file = "websockets-13.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:68264802399aed6fe9652e89761031acc734fc4c653137a5911c2bfa995d6d6d"}, - {file = "websockets-13.0.1-cp39-cp39-win32.whl", hash = "sha256:a5dc0c42ded1557cc7c3f0240b24129aefbad88af4f09346164349391dea8e58"}, - {file = "websockets-13.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b448a0690ef43db5ef31b3a0d9aea79043882b4632cfc3eaab20105edecf6097"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:faef9ec6354fe4f9a2c0bbb52fb1ff852effc897e2a4501e25eb3a47cb0a4f89"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:03d3f9ba172e0a53e37fa4e636b86cc60c3ab2cfee4935e66ed1d7acaa4625ad"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d450f5a7a35662a9b91a64aefa852f0c0308ee256122f5218a42f1d13577d71e"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f55b36d17ac50aa8a171b771e15fbe1561217510c8768af3d546f56c7576cdc"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14b9c006cac63772b31abbcd3e3abb6228233eec966bf062e89e7fa7ae0b7333"}, - {file = "websockets-13.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b79915a1179a91f6c5f04ece1e592e2e8a6bd245a0e45d12fd56b2b59e559a32"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f40de079779acbcdbb6ed4c65af9f018f8b77c5ec4e17a4b737c05c2db554491"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80e4ba642fc87fa532bac07e5ed7e19d56940b6af6a8c61d4429be48718a380f"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a02b0161c43cc9e0232711eff846569fad6ec836a7acab16b3cf97b2344c060"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6aa74a45d4cdc028561a7d6ab3272c8b3018e23723100b12e58be9dfa5a24491"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00fd961943b6c10ee6f0b1130753e50ac5dcd906130dcd77b0003c3ab797d026"}, - {file = "websockets-13.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d93572720d781331fb10d3da9ca1067817d84ad1e7c31466e9f5e59965618096"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:71e6e5a3a3728886caee9ab8752e8113670936a193284be9d6ad2176a137f376"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c4a6343e3b0714e80da0b0893543bf9a5b5fa71b846ae640e56e9abc6fbc4c83"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a678532018e435396e37422a95e3ab87f75028ac79570ad11f5bf23cd2a7d8c"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6716c087e4aa0b9260c4e579bb82e068f84faddb9bfba9906cb87726fa2e870"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e33505534f3f673270dd67f81e73550b11de5b538c56fe04435d63c02c3f26b5"}, - {file = "websockets-13.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:acab3539a027a85d568c2573291e864333ec9d912675107d6efceb7e2be5d980"}, - {file = "websockets-13.0.1-py3-none-any.whl", hash = "sha256:b80f0c51681c517604152eb6a572f5a9378f877763231fddb883ba2f968e8817"}, - {file = "websockets-13.0.1.tar.gz", hash = "sha256:4d6ece65099411cfd9a48d13701d7438d9c34f479046b34c50ff60bb8834e43e"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, + {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, + {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, + {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, + {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, + {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, + {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, + {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, + {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, + {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, + {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, + {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, + {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, + {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, + {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, + {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, + {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, + {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, + {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, + {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, + {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, + {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, + {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, + {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, ] [[package]] @@ -10139,13 +10342,13 @@ files = [ [[package]] name = "xmltodict" -version = "0.13.0" +version = "0.14.1" description = "Makes working with XML feel like you are working with JSON" optional = false -python-versions = ">=3.4" +python-versions = ">=3.6" files = [ - {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, - {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, + {file = "xmltodict-0.14.1-py2.py3-none-any.whl", hash = "sha256:3ef4a7b71c08f19047fcbea572e1d7f4207ab269da1565b5d40e9823d3894e63"}, + {file = "xmltodict-0.14.1.tar.gz", hash = "sha256:338c8431e4fc554517651972d62f06958718f6262b04316917008e8fd677a6b0"}, ] [[package]] @@ -10255,13 +10458,13 @@ multidict = ">=4.0" [[package]] name = "yfinance" -version = "0.2.43" +version = "0.2.44" description = "Download market data from Yahoo! Finance API" optional = false python-versions = "*" files = [ - {file = "yfinance-0.2.43-py2.py3-none-any.whl", hash = "sha256:11b4f5515b17450bd3bdcdc26b299aeeaea7ff9cb63d0fa0a865f460c0c7618f"}, - {file = "yfinance-0.2.43.tar.gz", hash = "sha256:32404597f325a2a2c2708aceb8d552088dd26891ac0e6018f6c5f3f2f61055f0"}, + {file = "yfinance-0.2.44-py2.py3-none-any.whl", hash = "sha256:fdc18791662f286539f7a08dccd7e8191b1ca509814f7b0faac264623bebe8a8"}, + {file = "yfinance-0.2.44.tar.gz", hash = "sha256:532ad1644ee9cf4024ec0d9cade0cc073664ec0d140cc6c22a0cce8a9118b523"}, ] [package.dependencies] @@ -10337,54 +10540,57 @@ test = ["zope.testrunner"] [[package]] name = "zope-interface" -version = "7.0.3" +version = "7.1.0" description = "Interfaces for Python" optional = false python-versions = ">=3.8" files = [ - {file = "zope.interface-7.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b9369671a20b8d039b8e5a1a33abd12e089e319a3383b4cc0bf5c67bd05fe7b"}, - {file = "zope.interface-7.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db6237e8fa91ea4f34d7e2d16d74741187e9105a63bbb5686c61fea04cdbacca"}, - {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53d678bb1c3b784edbfb0adeebfeea6bf479f54da082854406a8f295d36f8386"}, - {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3aa8fcbb0d3c2be1bfd013a0f0acd636f6ed570c287743ae2bbd467ee967154d"}, - {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6195c3c03fef9f87c0dbee0b3b6451df6e056322463cf35bca9a088e564a3c58"}, - {file = "zope.interface-7.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:11fa1382c3efb34abf16becff8cb214b0b2e3144057c90611621f2d186b7e1b7"}, - {file = "zope.interface-7.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:af94e429f9d57b36e71ef4e6865182090648aada0cb2d397ae2b3f7fc478493a"}, - {file = "zope.interface-7.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dd647fcd765030638577fe6984284e0ebba1a1008244c8a38824be096e37fe3"}, - {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bee1b722077d08721005e8da493ef3adf0b7908e0cd85cc7dc836ac117d6f32"}, - {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2545d6d7aac425d528cd9bf0d9e55fcd47ab7fd15f41a64b1c4bf4c6b24946dc"}, - {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d04b11ea47c9c369d66340dbe51e9031df2a0de97d68f442305ed7625ad6493"}, - {file = "zope.interface-7.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:064ade95cb54c840647205987c7b557f75d2b2f7d1a84bfab4cf81822ef6e7d1"}, - {file = "zope.interface-7.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3fcdc76d0cde1c09c37b7c6b0f8beba2d857d8417b055d4f47df9c34ec518bdd"}, - {file = "zope.interface-7.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3d4b91821305c8d8f6e6207639abcbdaf186db682e521af7855d0bea3047c8ca"}, - {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35062d93bc49bd9b191331c897a96155ffdad10744ab812485b6bad5b588d7e4"}, - {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c96b3e6b0d4f6ddfec4e947130ec30bd2c7b19db6aa633777e46c8eecf1d6afd"}, - {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0c151a6c204f3830237c59ee4770cc346868a7a1af6925e5e38650141a7f05"}, - {file = "zope.interface-7.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:3de1d553ce72868b77a7e9d598c9bff6d3816ad2b4cc81c04f9d8914603814f3"}, - {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab985c566a99cc5f73bc2741d93f1ed24a2cc9da3890144d37b9582965aff996"}, - {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d976fa7b5faf5396eb18ce6c132c98e05504b52b60784e3401f4ef0b2e66709b"}, - {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a207c6b2c58def5011768140861a73f5240f4f39800625072ba84e76c9da0b"}, - {file = "zope.interface-7.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:382d31d1e68877061daaa6499468e9eb38eb7625d4369b1615ac08d3860fe896"}, - {file = "zope.interface-7.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c4316a30e216f51acbd9fb318aa5af2e362b716596d82cbb92f9101c8f8d2e7"}, - {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e6e58078ad2799130c14a1d34ec89044ada0e1495329d72ee0407b9ae5100d"}, - {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:799ef7a444aebbad5a145c3b34bff012b54453cddbde3332d47ca07225792ea4"}, - {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3b7ce6d46fb0e60897d62d1ff370790ce50a57d40a651db91a3dde74f73b738"}, - {file = "zope.interface-7.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:f418c88f09c3ba159b95a9d1cfcdbe58f208443abb1f3109f4b9b12fd60b187c"}, - {file = "zope.interface-7.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:84f8794bd59ca7d09d8fce43ae1b571be22f52748169d01a13d3ece8394d8b5b"}, - {file = "zope.interface-7.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7d92920416f31786bc1b2f34cc4fc4263a35a407425319572cbf96b51e835cd3"}, - {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e5913ec718010dc0e7c215d79a9683b4990e7026828eedfda5268e74e73e11"}, - {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1eeeb92cb7d95c45e726e3c1afe7707919370addae7ed14f614e22217a536958"}, - {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd32f30f40bfd8511b17666895831a51b532e93fc106bfa97f366589d3e4e0e"}, - {file = "zope.interface-7.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:5112c530fa8aa2108a3196b9c2f078f5738c1c37cfc716970edc0df0414acda8"}, - {file = "zope.interface-7.0.3.tar.gz", hash = "sha256:cd2690d4b08ec9eaf47a85914fe513062b20da78d10d6d789a792c0b20307fb1"}, + {file = "zope.interface-7.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2bd9e9f366a5df08ebbdc159f8224904c1c5ce63893984abb76954e6fbe4381a"}, + {file = "zope.interface-7.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:661d5df403cd3c5b8699ac480fa7f58047a3253b029db690efa0c3cf209993ef"}, + {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91b6c30689cfd87c8f264acb2fc16ad6b3c72caba2aec1bf189314cf1a84ca33"}, + {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b6a4924f5bad9fe21d99f66a07da60d75696a136162427951ec3cb223a5570d"}, + {file = "zope.interface-7.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a3c00b35f6170be5454b45abe2719ea65919a2f09e8a6e7b1362312a872cd3"}, + {file = "zope.interface-7.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:b936d61dbe29572fd2cfe13e30b925e5383bed1aba867692670f5a2a2eb7b4e9"}, + {file = "zope.interface-7.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ac20581fc6cd7c754f6dff0ae06fedb060fa0e9ea6309d8be8b2701d9ea51c4"}, + {file = "zope.interface-7.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:848b6fa92d7c8143646e64124ed46818a0049a24ecc517958c520081fd147685"}, + {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec1ef1fdb6f014d5886b97e52b16d0f852364f447d2ab0f0c6027765777b6667"}, + {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bcff5c09d0215f42ba64b49205a278e44413d9bf9fa688fd9e42bfe472b5f4f"}, + {file = "zope.interface-7.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07add15de0cc7e69917f7d286b64d54125c950aeb43efed7a5ea7172f000fbc1"}, + {file = "zope.interface-7.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:9940d5bc441f887c5f375ec62bcf7e7e495a2d5b1da97de1184a88fb567f06af"}, + {file = "zope.interface-7.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f245d039f72e6f802902375755846f5de1ee1e14c3e8736c078565599bcab621"}, + {file = "zope.interface-7.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6159e767d224d8f18deff634a1d3722e68d27488c357f62ebeb5f3e2f5288b1f"}, + {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e956b1fd7f3448dd5e00f273072e73e50dfafcb35e4227e6d5af208075593c9"}, + {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff115ef91c0eeac69cd92daeba36a9d8e14daee445b504eeea2b1c0b55821984"}, + {file = "zope.interface-7.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec001798ab62c3fc5447162bf48496ae9fba02edc295a9e10a0b0c639a6452e"}, + {file = "zope.interface-7.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:124149e2d42067b9c6597f4dafdc7a0983d0163868f897b7bb5dc850b14f9a87"}, + {file = "zope.interface-7.1.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:9733a9a0f94ef53d7aa64661811b20875b5bc6039034c6e42fb9732170130573"}, + {file = "zope.interface-7.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5fcf379b875c610b5a41bc8a891841533f98de0520287d7f85e25386cd10d3e9"}, + {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0a45b5af9f72c805ee668d1479480ca85169312211bed6ed18c343e39307d5f"}, + {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af4a12b459a273b0b34679a5c3dc5e34c1847c3dd14a628aa0668e19e638ea2"}, + {file = "zope.interface-7.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a735f82d2e3ed47ca01a20dfc4c779b966b16352650a8036ab3955aad151ed8a"}, + {file = "zope.interface-7.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:5501e772aff595e3c54266bc1bfc5858e8f38974ce413a8f1044aae0f32a83a3"}, + {file = "zope.interface-7.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec59fe53db7d32abb96c6d4efeed84aab4a7c38c62d7a901a9b20c09dd936e7a"}, + {file = "zope.interface-7.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e53c291debef523b09e1fe3dffe5f35dde164f1c603d77f770b88a1da34b7ed6"}, + {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:711eebc77f2092c6a8b304bad0b81a6ce3cf5490b25574e7309fbc07d881e3af"}, + {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a00ead2e24c76436e1b457a5132d87f83858330f6c923640b7ef82d668525d1"}, + {file = "zope.interface-7.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e28ea0bc4b084fc93a483877653a033062435317082cdc6388dec3438309faf"}, + {file = "zope.interface-7.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:27cfb5205d68b12682b6e55ab8424662d96e8ead19550aad0796b08dd2c9a45e"}, + {file = "zope.interface-7.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e3e48f3dea21c147e1b10c132016cb79af1159facca9736d231694ef5a740a8"}, + {file = "zope.interface-7.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a99240b1d02dc469f6afbe7da1bf617645e60290c272968f4e53feec18d7dce8"}, + {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc8a318162123eddbdf22fcc7b751288ce52e4ad096d3766ff1799244352449d"}, + {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7b25db127db3e6b597c5f74af60309c4ad65acd826f89609662f0dc33a54728"}, + {file = "zope.interface-7.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a29ac607e970b5576547f0e3589ec156e04de17af42839eedcf478450687317"}, + {file = "zope.interface-7.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:a14c9decf0eb61e0892631271d500c1e306c7b6901c998c7035e194d9150fdd1"}, + {file = "zope_interface-7.1.0.tar.gz", hash = "sha256:3f005869a1a05e368965adb2075f97f8ee9a26c61898a9e52a9764d93774f237"}, ] [package.dependencies] setuptools = "*" [package.extras] -docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"] -test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] -testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] +docs = ["Sphinx", "furo", "repoze.sphinx.autointerface"] +test = ["coverage[toml]", "zope.event", "zope.testing"] +testing = ["coverage[toml]", "zope.event", "zope.testing"] [[package]] name = "zstandard" @@ -10501,4 +10707,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "69b42bb1ff033f14e199fee8335356275099421d72bbd7037b7a991ea65cae08" +content-hash = "edb5e3b0d50e84a239224cc77f3f615fdbdd6b504bce5b1075b29363f3054957" diff --git a/api/pyproject.toml b/api/pyproject.toml index f004865d5f..dff74750f0 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -28,6 +28,7 @@ select = [ "PLR0402", # manual-from-import "PLR1711", # useless-return "PLR1714", # repeated-equality-comparison + "RUF013", # implicit-optional "RUF019", # unnecessary-key-check "RUF100", # unused-noqa "RUF101", # redirected-noqa @@ -44,8 +45,6 @@ ignore = [ "E721", # type-comparison "E722", # bare-except "E731", # lambda-assignment - "F403", # undefined-local-with-import-star - "F405", # undefined-local-with-import-star-usage "F821", # undefined-name "F841", # unused-variable "FURB113", # repeated-append @@ -74,8 +73,6 @@ ignore = [ [tool.ruff.lint.per-file-ignores] "app.py" = [ - "F401", # unused-import - "F811", # redefined-while-unused ] "__init__.py" = [ "F401", # unused-import @@ -88,65 +85,40 @@ ignore = [ "N803", # invalid-argument-name ] "tests/*" = [ - "F401", # unused-import "F811", # redefined-while-unused ] +[tool.ruff.lint.pyflakes] +allowed-unused-imports=[ + "_pytest.monkeypatch", + "tests.integration_tests", +] + [tool.ruff.format] exclude = [ ] -[tool.pytest_env] -OPENAI_API_KEY = "sk-IamNotARealKeyJustForMockTestKawaiiiiiiiiii" -UPSTAGE_API_KEY = "up-aaaaaaaaaaaaaaaaaaaa" -FIREWORKS_API_KEY = "fw_aaaaaaaaaaaaaaaaaaaa" -NOMIC_API_KEY = "nk-aaaaaaaaaaaaaaaaaaaa" -AZURE_OPENAI_API_BASE = "https://difyai-openai.openai.azure.com" -AZURE_OPENAI_API_KEY = "xxxxb1707exxxxxxxxxxaaxxxxxf94" -ANTHROPIC_API_KEY = "sk-ant-api11-IamNotARealKeyJustForMockTestKawaiiiiiiiiii-NotBaka-ASkksz" -CHATGLM_API_BASE = "http://a.abc.com:11451" -XINFERENCE_SERVER_URL = "http://a.abc.com:11451" -XINFERENCE_GENERATION_MODEL_UID = "generate" -XINFERENCE_CHAT_MODEL_UID = "chat" -XINFERENCE_EMBEDDINGS_MODEL_UID = "embedding" -XINFERENCE_RERANK_MODEL_UID = "rerank" -GOOGLE_API_KEY = "abcdefghijklmnopqrstuvwxyz" -HUGGINGFACE_API_KEY = "hf-awuwuwuwuwuwuwuwuwuwuwuwuwuwuwuwuwu" -HUGGINGFACE_TEXT_GEN_ENDPOINT_URL = "a" -HUGGINGFACE_TEXT2TEXT_GEN_ENDPOINT_URL = "b" -HUGGINGFACE_EMBEDDINGS_ENDPOINT_URL = "c" -MOCK_SWITCH = "true" -CODE_MAX_STRING_LENGTH = "80000" -CODE_EXECUTION_ENDPOINT = "http://127.0.0.1:8194" -CODE_EXECUTION_API_KEY = "dify-sandbox" -FIRECRAWL_API_KEY = "fc-" -TEI_EMBEDDING_SERVER_URL = "http://a.abc.com:11451" -TEI_RERANK_SERVER_URL = "http://a.abc.com:11451" -MIXEDBREAD_API_KEY = "mk-aaaaaaaaaaaaaaaaaaaa" - [tool.poetry] name = "dify-api" package-mode = false ############################################################ -# Main dependencies +# [ Main ] Dependency group ############################################################ [tool.poetry.dependencies] anthropic = "~0.23.1" authlib = "1.3.1" +azure-ai-inference = "~1.0.0b3" +azure-ai-ml = "~1.20.0" azure-identity = "1.16.1" -azure-storage-blob = "12.13.0" beautifulsoup4 = "4.12.2" boto3 = "1.35.17" -sagemaker = "2.231.0" bs4 = "~0.0.1" cachetools = "~5.3.0" celery = "~5.3.6" chardet = "~5.1.0" cohere = "~5.2.4" -cos-python-sdk-v5 = "1.9.30" -esdk-obs-python = "3.24.6.1" dashscope = { version = "~1.17.0", extras = ["tokenizer"] } flask = "~3.0.1" flask-compress = "~1.14" @@ -154,7 +126,7 @@ flask-cors = "~4.0.0" flask-login = "~0.6.3" flask-migrate = "~4.0.5" flask-restful = "~0.3.10" -Flask-SQLAlchemy = "~3.1.1" +flask-sqlalchemy = "~3.1.1" gevent = "~23.9.1" gmpy2 = "~2.2.1" google-ai-generativelanguage = "0.6.9" @@ -163,22 +135,22 @@ google-api-python-client = "2.90.0" google-auth = "2.29.0" google-auth-httplib2 = "0.2.0" google-cloud-aiplatform = "1.49.0" -google-cloud-storage = "2.16.0" google-generativeai = "0.8.1" googleapis-common-protos = "1.63.0" gunicorn = "~22.0.0" httpx = { version = "~0.27.0", extras = ["socks"] } huggingface-hub = "~0.16.4" jieba = "0.42.1" -langfuse = "^2.48.0" -langsmith = "^0.1.77" +langfuse = "~2.51.3" +langsmith = "~0.1.77" mailchimp-transactional = "~1.0.50" markdown = "~3.5.1" -novita-client = "^0.5.7" +nomic = "~3.1.2" +novita-client = "~0.5.7" numpy = "~1.26.4" +oci = "~2.135.1" openai = "~1.29.0" openpyxl = "~3.1.5" -oss2 = "2.18.5" pandas = { version = "~2.2.2", extras = ["performance", "excel"] } psycopg2-binary = "~2.9.6" pycryptodome = "3.19.1" @@ -195,7 +167,8 @@ readabilipy = "0.2.0" redis = { version = "~5.0.3", extras = ["hiredis"] } replicate = "~0.22.0" resend = "~0.7.0" -scikit-learn = "^1.5.1" +sagemaker = "2.231.0" +scikit-learn = "~1.5.1" sentry-sdk = { version = "~1.44.1", extras = ["flask"] } sqlalchemy = "~2.0.29" tencentcloud-sdk-python-hunyuan = "~3.0.1158" @@ -203,6 +176,8 @@ tiktoken = "~0.7.0" tokenizers = "~0.15.0" transformers = "~4.35.0" unstructured = { version = "~0.10.27", extras = ["docx", "epub", "md", "msg", "ppt", "pptx"] } +validators = "0.21.0" +volcengine-python-sdk = {extras = ["ark"], version = "~1.0.98"} websocket-client = "~1.7.0" werkzeug = "~3.0.1" xinference-client = "0.15.2" @@ -211,65 +186,75 @@ zhipuai = "1.0.7" # Before adding new dependency, consider place it in alphabet order (a-z) and suitable group. ############################################################ +# [ Indirect ] dependency group # Related transparent dependencies with pinned version # required by main implementations ############################################################ -azure-ai-ml = "^1.19.0" -azure-ai-inference = "^1.0.0b3" -volcengine-python-sdk = {extras = ["ark"], version = "^1.0.98"} -oci = "^2.133.0" -tos = "^2.7.1" -nomic = "^3.1.2" -[tool.poetry.group.indriect.dependencies] +[tool.poetry.group.indirect.dependencies] kaleido = "0.2.1" rank-bm25 = "~0.2.2" safetensors = "~0.4.3" ############################################################ -# Tool dependencies required by tool implementations +# [ Tools ] dependency group ############################################################ - -[tool.poetry.group.tool.dependencies] +[tool.poetry.group.tools.dependencies] arxiv = "2.1.0" cloudscraper = "1.2.71" +duckduckgo-search = "~6.3.0" +jsonpath-ng = "1.6.1" matplotlib = "~3.8.2" newspaper3k = "0.2.8" -duckduckgo-search = "^6.2.6" -jsonpath-ng = "1.6.1" +nltk = "3.8.1" numexpr = "~2.9.0" -opensearch-py = "2.4.0" qrcode = "~7.4.2" twilio = "~9.0.4" -vanna = { version = "0.5.5", extras = ["postgres", "mysql", "clickhouse", "duckdb"] } +vanna = { version = "0.7.3", extras = ["postgres", "mysql", "clickhouse", "duckdb"] } wikipedia = "1.4.0" yfinance = "~0.2.40" -nltk = "3.8.1" -############################################################ -# VDB dependencies required by vector store clients -############################################################ +############################################################ +# [ Storage ] dependency group +# Required for storage clients +############################################################ +[tool.poetry.group.storage.dependencies] +azure-storage-blob = "12.13.0" +bce-python-sdk = "~0.9.23" +cos-python-sdk-v5 = "1.9.30" +esdk-obs-python = "3.24.6.1" +google-cloud-storage = "2.16.0" +oss2 = "2.18.5" +supabase = "~2.8.1" +tos = "~2.7.1" + +############################################################ +# [ VDB ] dependency group +# Required by vector store clients +############################################################ [tool.poetry.group.vdb.dependencies] alibabacloud_gpdb20160503 = "~3.8.0" alibabacloud_tea_openapi = "~0.3.9" chromadb = "0.5.1" clickhouse-connect = "~0.7.16" -elasticsearch = "~8.15.1" +elasticsearch = "8.14.0" +opensearch-py = "2.4.0" oracledb = "~2.2.1" pgvecto-rs = { version = "~0.2.1", extras = ['sqlalchemy'] } pgvector = "0.2.5" pymilvus = "~2.4.4" +pymochow = "1.3.1" +qdrant-client = "1.7.3" tcvectordb = "1.3.2" tidb-vector = "0.0.9" -qdrant-client = "1.7.3" +volcengine-compat = "~1.0.156" weaviate-client = "~3.21.0" ############################################################ -# Dev dependencies for running tests +# [ Dev ] dependency group +# Required for development and running tests ############################################################ - [tool.poetry.group.dev] optional = true - [tool.poetry.group.dev.dependencies] coverage = "~7.2.4" pytest = "~8.3.2" @@ -278,12 +263,11 @@ pytest-env = "~1.1.3" pytest-mock = "~3.14.0" ############################################################ -# Lint dependencies for code style linting +# [ Lint ] dependency group +# Required for code style linting ############################################################ - [tool.poetry.group.lint] optional = true - [tool.poetry.group.lint.dependencies] dotenv-linter = "~0.5.0" -ruff = "~0.6.5" +ruff = "~0.6.9" diff --git a/api/pytest.ini b/api/pytest.ini new file mode 100644 index 0000000000..dcca08e2e5 --- /dev/null +++ b/api/pytest.ini @@ -0,0 +1,29 @@ +[pytest] +env = + ANTHROPIC_API_KEY = sk-ant-api11-IamNotARealKeyJustForMockTestKawaiiiiiiiiii-NotBaka-ASkksz + AZURE_OPENAI_API_BASE = https://difyai-openai.openai.azure.com + AZURE_OPENAI_API_KEY = xxxxb1707exxxxxxxxxxaaxxxxxf94 + CHATGLM_API_BASE = http://a.abc.com:11451 + CODE_EXECUTION_API_KEY = dify-sandbox + CODE_EXECUTION_ENDPOINT = http://127.0.0.1:8194 + CODE_MAX_STRING_LENGTH = 80000 + FIRECRAWL_API_KEY = fc- + FIREWORKS_API_KEY = fw_aaaaaaaaaaaaaaaaaaaa + GOOGLE_API_KEY = abcdefghijklmnopqrstuvwxyz + HUGGINGFACE_API_KEY = hf-awuwuwuwuwuwuwuwuwuwuwuwuwuwuwuwuwu + HUGGINGFACE_EMBEDDINGS_ENDPOINT_URL = c + HUGGINGFACE_TEXT2TEXT_GEN_ENDPOINT_URL = b + HUGGINGFACE_TEXT_GEN_ENDPOINT_URL = a + MIXEDBREAD_API_KEY = mk-aaaaaaaaaaaaaaaaaaaa + MOCK_SWITCH = true + NOMIC_API_KEY = nk-aaaaaaaaaaaaaaaaaaaa + OPENAI_API_KEY = sk-IamNotARealKeyJustForMockTestKawaiiiiiiiiii + TEI_EMBEDDING_SERVER_URL = http://a.abc.com:11451 + TEI_RERANK_SERVER_URL = http://a.abc.com:11451 + UPSTAGE_API_KEY = up-aaaaaaaaaaaaaaaaaaaa + VOYAGE_API_KEY = va-aaaaaaaaaaaaaaaaaaaa + XINFERENCE_CHAT_MODEL_UID = chat + XINFERENCE_EMBEDDINGS_MODEL_UID = embedding + XINFERENCE_GENERATION_MODEL_UID = generate + XINFERENCE_RERANK_MODEL_UID = rerank + XINFERENCE_SERVER_URL = http://a.abc.com:11451 diff --git a/api/schedule/clean_unused_messages_task.py b/api/schedule/clean_unused_messages_task.py new file mode 100644 index 0000000000..85e6a58a0e --- /dev/null +++ b/api/schedule/clean_unused_messages_task.py @@ -0,0 +1,92 @@ +import datetime +import time + +import click +from sqlalchemy import func +from werkzeug.exceptions import NotFound + +import app +from configs import dify_config +from core.rag.index_processor.index_processor_factory import IndexProcessorFactory +from extensions.ext_database import db +from models.dataset import Dataset, DatasetQuery, Document + + +@app.celery.task(queue="dataset") +def clean_unused_message_task(): + click.echo(click.style("Start clean unused messages .", fg="green")) + clean_days = int(dify_config.CLEAN_DAY_SETTING) + start_at = time.perf_counter() + thirty_days_ago = datetime.datetime.now() - datetime.timedelta(days=clean_days) + page = 1 + while True: + try: + # Subquery for counting new documents + document_subquery_new = ( + db.session.query(Document.dataset_id, func.count(Document.id).label("document_count")) + .filter( + Document.indexing_status == "completed", + Document.enabled == True, + Document.archived == False, + Document.updated_at > thirty_days_ago, + ) + .group_by(Document.dataset_id) + .subquery() + ) + + # Subquery for counting old documents + document_subquery_old = ( + db.session.query(Document.dataset_id, func.count(Document.id).label("document_count")) + .filter( + Document.indexing_status == "completed", + Document.enabled == True, + Document.archived == False, + Document.updated_at < thirty_days_ago, + ) + .group_by(Document.dataset_id) + .subquery() + ) + + # Main query with join and filter + datasets = ( + db.session.query(Dataset) + .outerjoin(document_subquery_new, Dataset.id == document_subquery_new.c.dataset_id) + .outerjoin(document_subquery_old, Dataset.id == document_subquery_old.c.dataset_id) + .filter( + Dataset.created_at < thirty_days_ago, + func.coalesce(document_subquery_new.c.document_count, 0) == 0, + func.coalesce(document_subquery_old.c.document_count, 0) > 0, + ) + .order_by(Dataset.created_at.desc()) + .paginate(page=page, per_page=50) + ) + + except NotFound: + break + if datasets.items is None or len(datasets.items) == 0: + break + page += 1 + for dataset in datasets: + dataset_query = ( + db.session.query(DatasetQuery) + .filter(DatasetQuery.created_at > thirty_days_ago, DatasetQuery.dataset_id == dataset.id) + .all() + ) + if not dataset_query or len(dataset_query) == 0: + try: + # remove index + index_processor = IndexProcessorFactory(dataset.doc_form).init_index_processor() + index_processor.clean(dataset, None) + + # update document + update_params = {Document.enabled: False} + + Document.query.filter_by(dataset_id=dataset.id).update(update_params) + db.session.commit() + click.echo(click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green")) + except Exception as e: + click.echo( + click.style("clean dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red") + ) + end_at = time.perf_counter() + click.echo(click.style("Cleaned unused dataset from db success latency: {}".format(end_at - start_at), fg="green")) diff --git a/api/services/account_service.py b/api/services/account_service.py index 66ff5d2b7c..eda6011aef 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -1,4 +1,5 @@ import base64 +import json import logging import secrets import uuid @@ -6,18 +7,29 @@ from datetime import datetime, timedelta, timezone from hashlib import sha256 from typing import Any, Optional +from pydantic import BaseModel from sqlalchemy import func from werkzeug.exceptions import Unauthorized from configs import dify_config from constants.languages import language_timezone_mapping, languages from events.tenant_event import tenant_was_created +from extensions.ext_database import db from extensions.ext_redis import redis_client from libs.helper import RateLimiter, TokenManager from libs.passport import PassportService from libs.password import compare_password, hash_password, valid_password from libs.rsa import generate_key_pair -from models.account import * +from models.account import ( + Account, + AccountIntegrate, + AccountStatus, + Tenant, + TenantAccountJoin, + TenantAccountJoinRole, + TenantAccountRole, + TenantStatus, +) from models.model import DifySetup from services.errors.account import ( AccountAlreadyInTenantError, @@ -38,9 +50,39 @@ from tasks.mail_invite_member_task import send_invite_member_mail_task from tasks.mail_reset_password_task import send_reset_password_mail_task +class TokenPair(BaseModel): + access_token: str + refresh_token: str + + +REFRESH_TOKEN_PREFIX = "refresh_token:" +ACCOUNT_REFRESH_TOKEN_PREFIX = "account_refresh_token:" +REFRESH_TOKEN_EXPIRY = timedelta(days=30) + + class AccountService: reset_password_rate_limiter = RateLimiter(prefix="reset_password_rate_limit", max_attempts=5, time_window=60 * 60) + @staticmethod + def _get_refresh_token_key(refresh_token: str) -> str: + return f"{REFRESH_TOKEN_PREFIX}{refresh_token}" + + @staticmethod + def _get_account_refresh_token_key(account_id: str) -> str: + return f"{ACCOUNT_REFRESH_TOKEN_PREFIX}{account_id}" + + @staticmethod + def _store_refresh_token(refresh_token: str, account_id: str) -> None: + redis_client.setex(AccountService._get_refresh_token_key(refresh_token), REFRESH_TOKEN_EXPIRY, account_id) + redis_client.setex( + AccountService._get_account_refresh_token_key(account_id), REFRESH_TOKEN_EXPIRY, refresh_token + ) + + @staticmethod + def _delete_refresh_token(refresh_token: str, account_id: str) -> None: + redis_client.delete(AccountService._get_refresh_token_key(refresh_token)) + redis_client.delete(AccountService._get_account_refresh_token_key(account_id)) + @staticmethod def load_user(user_id: str) -> None | Account: account = Account.query.filter_by(id=user_id).first() @@ -50,9 +92,7 @@ class AccountService: if account.status in {AccountStatus.BANNED.value, AccountStatus.CLOSED.value}: raise Unauthorized("Account is banned or closed.") - current_tenant: TenantAccountJoin = TenantAccountJoin.query.filter_by( - account_id=account.id, current=True - ).first() + current_tenant = TenantAccountJoin.query.filter_by(account_id=account.id, current=True).first() if current_tenant: account.current_tenant_id = current_tenant.tenant_id else: @@ -73,10 +113,12 @@ class AccountService: return account @staticmethod - def get_account_jwt_token(account, *, exp: timedelta = timedelta(days=30)): + def get_account_jwt_token(account: Account) -> str: + exp_dt = datetime.now(timezone.utc) + timedelta(minutes=dify_config.ACCESS_TOKEN_EXPIRE_MINUTES) + exp = int(exp_dt.timestamp()) payload = { "user_id": account.id, - "exp": datetime.now(timezone.utc).replace(tzinfo=None) + exp, + "exp": exp, "iss": dify_config.EDITION, "sub": "Console API Passport", } @@ -202,7 +244,7 @@ class AccountService: return account @staticmethod - def update_last_login(account: Account, *, ip_address: str) -> None: + def update_login_info(account: Account, *, ip_address: str) -> None: """Update last login time and ip""" account.last_login_at = datetime.now(timezone.utc).replace(tzinfo=None) account.last_login_ip = ip_address @@ -210,22 +252,45 @@ class AccountService: db.session.commit() @staticmethod - def login(account: Account, *, ip_address: Optional[str] = None): + def login(account: Account, *, ip_address: Optional[str] = None) -> TokenPair: if ip_address: - AccountService.update_last_login(account, ip_address=ip_address) - exp = timedelta(days=30) - token = AccountService.get_account_jwt_token(account, exp=exp) - redis_client.set(_get_login_cache_key(account_id=account.id, token=token), "1", ex=int(exp.total_seconds())) - return token + AccountService.update_login_info(account=account, ip_address=ip_address) + + access_token = AccountService.get_account_jwt_token(account=account) + refresh_token = _generate_refresh_token() + + AccountService._store_refresh_token(refresh_token, account.id) + + return TokenPair(access_token=access_token, refresh_token=refresh_token) @staticmethod - def logout(*, account: Account, token: str): - redis_client.delete(_get_login_cache_key(account_id=account.id, token=token)) + def logout(*, account: Account) -> None: + refresh_token = redis_client.get(AccountService._get_account_refresh_token_key(account.id)) + if refresh_token: + AccountService._delete_refresh_token(refresh_token.decode("utf-8"), account.id) @staticmethod - def load_logged_in_account(*, account_id: str, token: str): - if not redis_client.get(_get_login_cache_key(account_id=account_id, token=token)): - return None + def refresh_token(refresh_token: str) -> TokenPair: + # Verify the refresh token + account_id = redis_client.get(AccountService._get_refresh_token_key(refresh_token)) + if not account_id: + raise ValueError("Invalid refresh token") + + account = AccountService.load_user(account_id.decode("utf-8")) + if not account: + raise ValueError("Invalid account") + + # Generate new access token and refresh token + new_access_token = AccountService.get_account_jwt_token(account) + new_refresh_token = _generate_refresh_token() + + AccountService._delete_refresh_token(refresh_token, account.id) + AccountService._store_refresh_token(new_refresh_token, account.id) + + return TokenPair(access_token=new_access_token, refresh_token=new_refresh_token) + + @staticmethod + def load_logged_in_account(*, account_id: str): return AccountService.load_user(account_id) @classmethod @@ -247,10 +312,6 @@ class AccountService: return TokenManager.get_token_data(token, "reset_password") -def _get_login_cache_key(*, account_id: str, token: str): - return f"account_login:{account_id}:{token}" - - class TenantService: @staticmethod def create_tenant(name: str) -> Tenant: @@ -321,7 +382,7 @@ class TenantService: return tenant @staticmethod - def switch_tenant(account: Account, tenant_id: int = None) -> None: + def switch_tenant(account: Account, tenant_id: Optional[int] = None) -> None: """Switch the current workspace for the account""" # Ensure tenant_id is provided @@ -687,3 +748,8 @@ class RegisterService: invitation = json.loads(data) return invitation + + +def _generate_refresh_token(length: int = 64): + token = secrets.token_hex(length) + return token diff --git a/api/services/auth/api_key_auth_factory.py b/api/services/auth/api_key_auth_factory.py index ae5b953b47..36387e9c2e 100644 --- a/api/services/auth/api_key_auth_factory.py +++ b/api/services/auth/api_key_auth_factory.py @@ -1,10 +1,13 @@ from services.auth.firecrawl import FirecrawlAuth +from services.auth.jina import JinaAuth class ApiKeyAuthFactory: def __init__(self, provider: str, credentials: dict): if provider == "firecrawl": self.auth = FirecrawlAuth(credentials) + elif provider == "jinareader": + self.auth = JinaAuth(credentials) else: raise ValueError("Invalid provider") diff --git a/api/services/auth/jina.py b/api/services/auth/jina.py new file mode 100644 index 0000000000..de898a1f94 --- /dev/null +++ b/api/services/auth/jina.py @@ -0,0 +1,44 @@ +import json + +import requests + +from services.auth.api_key_auth_base import ApiKeyAuthBase + + +class JinaAuth(ApiKeyAuthBase): + def __init__(self, credentials: dict): + super().__init__(credentials) + auth_type = credentials.get("auth_type") + if auth_type != "bearer": + raise ValueError("Invalid auth type, Jina Reader auth type must be Bearer") + self.api_key = credentials.get("config").get("api_key", None) + + if not self.api_key: + raise ValueError("No API key provided") + + def validate_credentials(self): + headers = self._prepare_headers() + options = { + "url": "https://example.com", + } + response = self._post_request("https://r.jina.ai", options, headers) + if response.status_code == 200: + return True + else: + self._handle_error(response) + + def _prepare_headers(self): + return {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"} + + def _post_request(self, url, data, headers): + return requests.post(url, headers=headers, json=data) + + def _handle_error(self, response): + if response.status_code in {402, 409, 500}: + error_message = response.json().get("error", "Unknown error occurred") + raise Exception(f"Failed to authorize. Status code: {response.status_code}. Error: {error_message}") + else: + if response.text: + error_message = json.loads(response.text).get("error", "Unknown error occurred") + raise Exception(f"Failed to authorize. Status code: {response.status_code}. Error: {error_message}") + raise Exception(f"Unexpected error occurred while trying to authorize. Status code: {response.status_code}") diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index e96f06ed40..ede8764086 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -8,6 +8,7 @@ from typing import Optional from flask_login import current_user from sqlalchemy import func +from werkzeug.exceptions import NotFound from configs import dify_config from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError @@ -32,6 +33,7 @@ from models.dataset import ( DatasetQuery, Document, DocumentSegment, + ExternalKnowledgeBindings, ) from models.model import UploadFile from models.source import DataSourceOauthBinding @@ -39,6 +41,7 @@ from services.errors.account import NoPermissionError from services.errors.dataset import DatasetNameDuplicateError from services.errors.document import DocumentIndexingError from services.errors.file import FileNotExistsError +from services.external_knowledge_service import ExternalDatasetService from services.feature_service import FeatureModel, FeatureService from services.tag_service import TagService from services.vector_service import VectorService @@ -56,10 +59,8 @@ from tasks.sync_website_document_indexing_task import sync_website_document_inde class DatasetService: @staticmethod - def get_datasets(page, per_page, provider="vendor", tenant_id=None, user=None, search=None, tag_ids=None): - query = Dataset.query.filter(Dataset.provider == provider, Dataset.tenant_id == tenant_id).order_by( - Dataset.created_at.desc() - ) + def get_datasets(page, per_page, tenant_id=None, user=None, search=None, tag_ids=None): + query = Dataset.query.filter(Dataset.tenant_id == tenant_id).order_by(Dataset.created_at.desc()) if user: # get permitted dataset ids @@ -137,7 +138,14 @@ class DatasetService: @staticmethod def create_empty_dataset( - tenant_id: str, name: str, indexing_technique: Optional[str], account: Account, permission: Optional[str] = None + tenant_id: str, + name: str, + indexing_technique: Optional[str], + account: Account, + permission: Optional[str] = None, + provider: str = "vendor", + external_knowledge_api_id: Optional[str] = None, + external_knowledge_id: Optional[str] = None, ): # check if dataset name already exists if Dataset.query.filter_by(name=name, tenant_id=tenant_id).first(): @@ -156,12 +164,28 @@ class DatasetService: dataset.embedding_model_provider = embedding_model.provider if embedding_model else None dataset.embedding_model = embedding_model.model if embedding_model else None dataset.permission = permission or DatasetPermissionEnum.ONLY_ME + dataset.provider = provider db.session.add(dataset) + db.session.flush() + + if provider == "external" and external_knowledge_api_id: + external_knowledge_api = ExternalDatasetService.get_external_knowledge_api(external_knowledge_api_id) + if not external_knowledge_api: + raise ValueError("External API template not found.") + external_knowledge_binding = ExternalKnowledgeBindings( + tenant_id=tenant_id, + dataset_id=dataset.id, + external_knowledge_api_id=external_knowledge_api_id, + external_knowledge_id=external_knowledge_id, + created_by=account.id, + ) + db.session.add(external_knowledge_binding) + db.session.commit() return dataset @staticmethod - def get_dataset(dataset_id): + def get_dataset(dataset_id) -> Dataset: return Dataset.query.filter_by(id=dataset_id).first() @staticmethod @@ -202,81 +226,107 @@ class DatasetService: @staticmethod def update_dataset(dataset_id, data, user): - data.pop("partial_member_list", None) - filtered_data = {k: v for k, v in data.items() if v is not None or k == "description"} dataset = DatasetService.get_dataset(dataset_id) + DatasetService.check_dataset_permission(dataset, user) - action = None - if dataset.indexing_technique != data["indexing_technique"]: - # if update indexing_technique - if data["indexing_technique"] == "economy": - action = "remove" - filtered_data["embedding_model"] = None - filtered_data["embedding_model_provider"] = None - filtered_data["collection_binding_id"] = None - elif data["indexing_technique"] == "high_quality": - action = "add" - # get embedding model setting - try: - model_manager = ModelManager() - embedding_model = model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, - provider=data["embedding_model_provider"], - model_type=ModelType.TEXT_EMBEDDING, - model=data["embedding_model"], - ) - filtered_data["embedding_model"] = embedding_model.model - filtered_data["embedding_model_provider"] = embedding_model.provider - dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding( - embedding_model.provider, embedding_model.model - ) - filtered_data["collection_binding_id"] = dataset_collection_binding.id - except LLMBadRequestError: - raise ValueError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." - ) - except ProviderTokenNotInitError as ex: - raise ValueError(ex.description) - else: + if dataset.provider == "external": + dataset.retrieval_model = data.get("external_retrieval_model", None) + dataset.name = data.get("name", dataset.name) + dataset.description = data.get("description", "") + external_knowledge_id = data.get("external_knowledge_id", None) + dataset.permission = data.get("permission") + db.session.add(dataset) + if not external_knowledge_id: + raise ValueError("External knowledge id is required.") + external_knowledge_api_id = data.get("external_knowledge_api_id", None) + if not external_knowledge_api_id: + raise ValueError("External knowledge api id is required.") + external_knowledge_binding = ExternalKnowledgeBindings.query.filter_by(dataset_id=dataset_id).first() if ( - data["embedding_model_provider"] != dataset.embedding_model_provider - or data["embedding_model"] != dataset.embedding_model + external_knowledge_binding.external_knowledge_id != external_knowledge_id + or external_knowledge_binding.external_knowledge_api_id != external_knowledge_api_id ): - action = "update" - try: - model_manager = ModelManager() - embedding_model = model_manager.get_model_instance( - tenant_id=current_user.current_tenant_id, - provider=data["embedding_model_provider"], - model_type=ModelType.TEXT_EMBEDDING, - model=data["embedding_model"], - ) - filtered_data["embedding_model"] = embedding_model.model - filtered_data["embedding_model_provider"] = embedding_model.provider - dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding( - embedding_model.provider, embedding_model.model - ) - filtered_data["collection_binding_id"] = dataset_collection_binding.id - except LLMBadRequestError: - raise ValueError( - "No Embedding Model available. Please configure a valid provider " - "in the Settings -> Model Provider." - ) - except ProviderTokenNotInitError as ex: - raise ValueError(ex.description) + external_knowledge_binding.external_knowledge_id = external_knowledge_id + external_knowledge_binding.external_knowledge_api_id = external_knowledge_api_id + db.session.add(external_knowledge_binding) + db.session.commit() + else: + data.pop("partial_member_list", None) + data.pop("external_knowledge_api_id", None) + data.pop("external_knowledge_id", None) + data.pop("external_retrieval_model", None) + filtered_data = {k: v for k, v in data.items() if v is not None or k == "description"} + action = None + if dataset.indexing_technique != data["indexing_technique"]: + # if update indexing_technique + if data["indexing_technique"] == "economy": + action = "remove" + filtered_data["embedding_model"] = None + filtered_data["embedding_model_provider"] = None + filtered_data["collection_binding_id"] = None + elif data["indexing_technique"] == "high_quality": + action = "add" + # get embedding model setting + try: + model_manager = ModelManager() + embedding_model = model_manager.get_model_instance( + tenant_id=current_user.current_tenant_id, + provider=data["embedding_model_provider"], + model_type=ModelType.TEXT_EMBEDDING, + model=data["embedding_model"], + ) + filtered_data["embedding_model"] = embedding_model.model + filtered_data["embedding_model_provider"] = embedding_model.provider + dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding( + embedding_model.provider, embedding_model.model + ) + filtered_data["collection_binding_id"] = dataset_collection_binding.id + except LLMBadRequestError: + raise ValueError( + "No Embedding Model available. Please configure a valid provider " + "in the Settings -> Model Provider." + ) + except ProviderTokenNotInitError as ex: + raise ValueError(ex.description) + else: + if ( + data["embedding_model_provider"] != dataset.embedding_model_provider + or data["embedding_model"] != dataset.embedding_model + ): + action = "update" + try: + model_manager = ModelManager() + embedding_model = model_manager.get_model_instance( + tenant_id=current_user.current_tenant_id, + provider=data["embedding_model_provider"], + model_type=ModelType.TEXT_EMBEDDING, + model=data["embedding_model"], + ) + filtered_data["embedding_model"] = embedding_model.model + filtered_data["embedding_model_provider"] = embedding_model.provider + dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding( + embedding_model.provider, embedding_model.model + ) + filtered_data["collection_binding_id"] = dataset_collection_binding.id + except LLMBadRequestError: + raise ValueError( + "No Embedding Model available. Please configure a valid provider " + "in the Settings -> Model Provider." + ) + except ProviderTokenNotInitError as ex: + raise ValueError(ex.description) - filtered_data["updated_by"] = user.id - filtered_data["updated_at"] = datetime.datetime.now() + filtered_data["updated_by"] = user.id + filtered_data["updated_at"] = datetime.datetime.now() - # update Retrieval model - filtered_data["retrieval_model"] = data["retrieval_model"] + # update Retrieval model + filtered_data["retrieval_model"] = data["retrieval_model"] - dataset.query.filter_by(id=dataset_id).update(filtered_data) + dataset.query.filter_by(id=dataset_id).update(filtered_data) - db.session.commit() - if action: - deal_dataset_vector_index_task.delay(dataset_id, action) + db.session.commit() + if action: + deal_dataset_vector_index_task.delay(dataset_id, action) return dataset @staticmethod @@ -927,6 +977,8 @@ class DocumentService: ): DatasetService.check_dataset_model_setting(dataset) document = DocumentService.get_document(dataset.id, document_data["original_document_id"]) + if document is None: + raise NotFound("Document not found") if document.display_status != "available": raise ValueError("Document is not available") # update document name diff --git a/api/services/enterprise/base.py b/api/services/enterprise/base.py index ddee52164b..7d4fdfd2d0 100644 --- a/api/services/enterprise/base.py +++ b/api/services/enterprise/base.py @@ -7,11 +7,16 @@ class EnterpriseRequest: base_url = os.environ.get("ENTERPRISE_API_URL", "ENTERPRISE_API_URL") secret_key = os.environ.get("ENTERPRISE_API_SECRET_KEY", "ENTERPRISE_API_SECRET_KEY") + proxies = { + "http": None, + "https": None, + } + @classmethod def send_request(cls, method, endpoint, json=None, params=None): headers = {"Content-Type": "application/json", "Enterprise-Api-Secret-Key": cls.secret_key} url = f"{cls.base_url}{endpoint}" - response = requests.request(method, url, json=json, params=params, headers=headers) + response = requests.request(method, url, json=json, params=params, headers=headers, proxies=cls.proxies) return response.json() diff --git a/api/services/entities/external_knowledge_entities/external_knowledge_entities.py b/api/services/entities/external_knowledge_entities/external_knowledge_entities.py new file mode 100644 index 0000000000..4545f385eb --- /dev/null +++ b/api/services/entities/external_knowledge_entities/external_knowledge_entities.py @@ -0,0 +1,26 @@ +from typing import Literal, Optional, Union + +from pydantic import BaseModel + + +class AuthorizationConfig(BaseModel): + type: Literal[None, "basic", "bearer", "custom"] + api_key: Union[None, str] = None + header: Union[None, str] = None + + +class Authorization(BaseModel): + type: Literal["no-auth", "api-key"] + config: Optional[AuthorizationConfig] = None + + +class ProcessStatusSetting(BaseModel): + request_method: str + url: str + + +class ExternalKnowledgeApiSetting(BaseModel): + url: str + request_method: str + headers: Optional[dict] = None + params: Optional[dict] = None diff --git a/api/services/errors/base.py b/api/services/errors/base.py index 1fed71cf9e..4d39f956b8 100644 --- a/api/services/errors/base.py +++ b/api/services/errors/base.py @@ -1,3 +1,6 @@ +from typing import Optional + + class BaseServiceError(Exception): - def __init__(self, description: str = None): + def __init__(self, description: Optional[str] = None): self.description = description diff --git a/api/services/external_knowledge_service.py b/api/services/external_knowledge_service.py new file mode 100644 index 0000000000..4efdf8d7db --- /dev/null +++ b/api/services/external_knowledge_service.py @@ -0,0 +1,274 @@ +import json +from copy import deepcopy +from datetime import datetime, timezone +from typing import Any, Optional, Union + +import httpx +import validators + +# from tasks.external_document_indexing_task import external_document_indexing_task +from core.helper import ssrf_proxy +from extensions.ext_database import db +from models.dataset import ( + Dataset, + ExternalKnowledgeApis, + ExternalKnowledgeBindings, +) +from services.entities.external_knowledge_entities.external_knowledge_entities import ( + Authorization, + ExternalKnowledgeApiSetting, +) +from services.errors.dataset import DatasetNameDuplicateError + + +class ExternalDatasetService: + @staticmethod + def get_external_knowledge_apis(page, per_page, tenant_id, search=None) -> tuple[list[ExternalKnowledgeApis], int]: + query = ExternalKnowledgeApis.query.filter(ExternalKnowledgeApis.tenant_id == tenant_id).order_by( + ExternalKnowledgeApis.created_at.desc() + ) + if search: + query = query.filter(ExternalKnowledgeApis.name.ilike(f"%{search}%")) + + external_knowledge_apis = query.paginate(page=page, per_page=per_page, max_per_page=100, error_out=False) + + return external_knowledge_apis.items, external_knowledge_apis.total + + @classmethod + def validate_api_list(cls, api_settings: dict): + if not api_settings: + raise ValueError("api list is empty") + if "endpoint" not in api_settings and not api_settings["endpoint"]: + raise ValueError("endpoint is required") + if "api_key" not in api_settings and not api_settings["api_key"]: + raise ValueError("api_key is required") + + @staticmethod + def create_external_knowledge_api(tenant_id: str, user_id: str, args: dict) -> ExternalKnowledgeApis: + ExternalDatasetService.check_endpoint_and_api_key(args.get("settings")) + external_knowledge_api = ExternalKnowledgeApis( + tenant_id=tenant_id, + created_by=user_id, + updated_by=user_id, + name=args.get("name"), + description=args.get("description", ""), + settings=json.dumps(args.get("settings"), ensure_ascii=False), + ) + + db.session.add(external_knowledge_api) + db.session.commit() + return external_knowledge_api + + @staticmethod + def check_endpoint_and_api_key(settings: dict): + if "endpoint" not in settings or not settings["endpoint"]: + raise ValueError("endpoint is required") + if "api_key" not in settings or not settings["api_key"]: + raise ValueError("api_key is required") + + endpoint = f"{settings['endpoint']}/retrieval" + api_key = settings["api_key"] + if not validators.url(endpoint): + raise ValueError(f"invalid endpoint: {endpoint}") + try: + response = httpx.post(endpoint, headers={"Authorization": f"Bearer {api_key}"}) + except Exception as e: + raise ValueError(f"failed to connect to the endpoint: {endpoint}") + if response.status_code == 502: + raise ValueError(f"Bad Gateway: failed to connect to the endpoint: {endpoint}") + if response.status_code == 404: + raise ValueError(f"Not Found: failed to connect to the endpoint: {endpoint}") + if response.status_code == 403: + raise ValueError(f"Forbidden: Authorization failed with api_key: {api_key}") + + @staticmethod + def get_external_knowledge_api(external_knowledge_api_id: str) -> ExternalKnowledgeApis: + return ExternalKnowledgeApis.query.filter_by(id=external_knowledge_api_id).first() + + @staticmethod + def update_external_knowledge_api(tenant_id, user_id, external_knowledge_api_id, args) -> ExternalKnowledgeApis: + external_knowledge_api = ExternalKnowledgeApis.query.filter_by( + id=external_knowledge_api_id, tenant_id=tenant_id + ).first() + if external_knowledge_api is None: + raise ValueError("api template not found") + + external_knowledge_api.name = args.get("name") + external_knowledge_api.description = args.get("description", "") + external_knowledge_api.settings = json.dumps(args.get("settings"), ensure_ascii=False) + external_knowledge_api.updated_by = user_id + external_knowledge_api.updated_at = datetime.now(timezone.utc).replace(tzinfo=None) + db.session.commit() + + return external_knowledge_api + + @staticmethod + def delete_external_knowledge_api(tenant_id: str, external_knowledge_api_id: str): + external_knowledge_api = ExternalKnowledgeApis.query.filter_by( + id=external_knowledge_api_id, tenant_id=tenant_id + ).first() + if external_knowledge_api is None: + raise ValueError("api template not found") + + db.session.delete(external_knowledge_api) + db.session.commit() + + @staticmethod + def external_knowledge_api_use_check(external_knowledge_api_id: str) -> tuple[bool, int]: + count = ExternalKnowledgeBindings.query.filter_by(external_knowledge_api_id=external_knowledge_api_id).count() + if count > 0: + return True, count + return False, 0 + + @staticmethod + def get_external_knowledge_binding_with_dataset_id(tenant_id: str, dataset_id: str) -> ExternalKnowledgeBindings: + external_knowledge_binding = ExternalKnowledgeBindings.query.filter_by( + dataset_id=dataset_id, tenant_id=tenant_id + ).first() + if not external_knowledge_binding: + raise ValueError("external knowledge binding not found") + return external_knowledge_binding + + @staticmethod + def document_create_args_validate(tenant_id: str, external_knowledge_api_id: str, process_parameter: dict): + external_knowledge_api = ExternalKnowledgeApis.query.filter_by( + id=external_knowledge_api_id, tenant_id=tenant_id + ).first() + if external_knowledge_api is None: + raise ValueError("api template not found") + settings = json.loads(external_knowledge_api.settings) + for setting in settings: + custom_parameters = setting.get("document_process_setting") + if custom_parameters: + for parameter in custom_parameters: + if parameter.get("required", False) and not process_parameter.get(parameter.get("name")): + raise ValueError(f'{parameter.get("name")} is required') + + @staticmethod + def process_external_api( + settings: ExternalKnowledgeApiSetting, files: Union[None, dict[str, Any]] + ) -> httpx.Response: + """ + do http request depending on api bundle + """ + + kwargs = { + "url": settings.url, + "headers": settings.headers, + "follow_redirects": True, + } + + response = getattr(ssrf_proxy, settings.request_method)(data=json.dumps(settings.params), files=files, **kwargs) + + return response + + @staticmethod + def assembling_headers(authorization: Authorization, headers: Optional[dict] = None) -> dict[str, Any]: + authorization = deepcopy(authorization) + if headers: + headers = deepcopy(headers) + else: + headers = {} + if authorization.type == "api-key": + if authorization.config is None: + raise ValueError("authorization config is required") + + if authorization.config.api_key is None: + raise ValueError("api_key is required") + + if not authorization.config.header: + authorization.config.header = "Authorization" + + if authorization.config.type == "bearer": + headers[authorization.config.header] = f"Bearer {authorization.config.api_key}" + elif authorization.config.type == "basic": + headers[authorization.config.header] = f"Basic {authorization.config.api_key}" + elif authorization.config.type == "custom": + headers[authorization.config.header] = authorization.config.api_key + + return headers + + @staticmethod + def get_external_knowledge_api_settings(settings: dict) -> ExternalKnowledgeApiSetting: + return ExternalKnowledgeApiSetting.parse_obj(settings) + + @staticmethod + def create_external_dataset(tenant_id: str, user_id: str, args: dict) -> Dataset: + # check if dataset name already exists + if Dataset.query.filter_by(name=args.get("name"), tenant_id=tenant_id).first(): + raise DatasetNameDuplicateError(f"Dataset with name {args.get('name')} already exists.") + external_knowledge_api = ExternalKnowledgeApis.query.filter_by( + id=args.get("external_knowledge_api_id"), tenant_id=tenant_id + ).first() + + if external_knowledge_api is None: + raise ValueError("api template not found") + + dataset = Dataset( + tenant_id=tenant_id, + name=args.get("name"), + description=args.get("description", ""), + provider="external", + retrieval_model=args.get("external_retrieval_model"), + created_by=user_id, + ) + + db.session.add(dataset) + db.session.flush() + + external_knowledge_binding = ExternalKnowledgeBindings( + tenant_id=tenant_id, + dataset_id=dataset.id, + external_knowledge_api_id=args.get("external_knowledge_api_id"), + external_knowledge_id=args.get("external_knowledge_id"), + created_by=user_id, + ) + db.session.add(external_knowledge_binding) + + db.session.commit() + + return dataset + + @staticmethod + def fetch_external_knowledge_retrieval( + tenant_id: str, dataset_id: str, query: str, external_retrieval_parameters: dict + ) -> list: + external_knowledge_binding = ExternalKnowledgeBindings.query.filter_by( + dataset_id=dataset_id, tenant_id=tenant_id + ).first() + if not external_knowledge_binding: + raise ValueError("external knowledge binding not found") + + external_knowledge_api = ExternalKnowledgeApis.query.filter_by( + id=external_knowledge_binding.external_knowledge_api_id + ).first() + if not external_knowledge_api: + raise ValueError("external api template not found") + + settings = json.loads(external_knowledge_api.settings) + headers = {"Content-Type": "application/json"} + if settings.get("api_key"): + headers["Authorization"] = f"Bearer {settings.get('api_key')}" + score_threshold_enabled = external_retrieval_parameters.get("score_threshold_enabled") or False + score_threshold = external_retrieval_parameters.get("score_threshold", 0.0) if score_threshold_enabled else 0.0 + request_params = { + "retrieval_setting": { + "top_k": external_retrieval_parameters.get("top_k"), + "score_threshold": score_threshold, + }, + "query": query, + "knowledge_id": external_knowledge_binding.external_knowledge_id, + } + + external_knowledge_api_setting = { + "url": f"{settings.get('endpoint')}/retrieval", + "request_method": "post", + "headers": headers, + "params": request_params, + } + response = ExternalDatasetService.process_external_api( + ExternalKnowledgeApiSetting(**external_knowledge_api_setting), None + ) + if response.status_code == 200: + return response.json().get("records", []) + return [] diff --git a/api/services/hit_testing_service.py b/api/services/hit_testing_service.py index 3dafafd5b4..7957b4dc82 100644 --- a/api/services/hit_testing_service.py +++ b/api/services/hit_testing_service.py @@ -19,7 +19,15 @@ default_retrieval_model = { class HitTestingService: @classmethod - def retrieve(cls, dataset: Dataset, query: str, account: Account, retrieval_model: dict, limit: int = 10) -> dict: + def retrieve( + cls, + dataset: Dataset, + query: str, + account: Account, + retrieval_model: dict, + external_retrieval_model: dict, + limit: int = 10, + ) -> dict: if dataset.available_document_count == 0 or dataset.available_segment_count == 0: return { "query": { @@ -62,10 +70,44 @@ class HitTestingService: return cls.compact_retrieve_response(dataset, query, all_documents) + @classmethod + def external_retrieve( + cls, + dataset: Dataset, + query: str, + account: Account, + external_retrieval_model: dict, + ) -> dict: + if dataset.provider != "external": + return { + "query": {"content": query}, + "records": [], + } + + start = time.perf_counter() + + all_documents = RetrievalService.external_retrieve( + dataset_id=dataset.id, + query=cls.escape_query_for_search(query), + external_retrieval_model=external_retrieval_model, + ) + + end = time.perf_counter() + logging.debug(f"External knowledge hit testing retrieve in {end - start:0.4f} seconds") + + dataset_query = DatasetQuery( + dataset_id=dataset.id, content=query, source="hit_testing", created_by_role="account", created_by=account.id + ) + + db.session.add(dataset_query) + db.session.commit() + + return cls.compact_external_retrieve_response(dataset, query, all_documents) + @classmethod def compact_retrieve_response(cls, dataset: Dataset, query: str, documents: list[Document]): - i = 0 records = [] + for document in documents: index_node_id = document.metadata["doc_id"] @@ -81,7 +123,6 @@ class HitTestingService: ) if not segment: - i += 1 continue record = { @@ -91,8 +132,6 @@ class HitTestingService: records.append(record) - i += 1 - return { "query": { "content": query, @@ -100,6 +139,25 @@ class HitTestingService: "records": records, } + @classmethod + def compact_external_retrieve_response(cls, dataset: Dataset, query: str, documents: list): + records = [] + if dataset.provider == "external": + for document in documents: + record = { + "content": document.get("content", None), + "title": document.get("title", None), + "score": document.get("score", None), + "metadata": document.get("metadata", None), + } + records.append(record) + return { + "query": { + "content": query, + }, + "records": records, + } + @classmethod def hit_testing_args_check(cls, args): query = args["query"] diff --git a/api/services/knowledge_service.py b/api/services/knowledge_service.py new file mode 100644 index 0000000000..02fe1d19bc --- /dev/null +++ b/api/services/knowledge_service.py @@ -0,0 +1,45 @@ +import boto3 + +from configs import dify_config + + +class ExternalDatasetTestService: + # this service is only for internal testing + @staticmethod + def knowledge_retrieval(retrieval_setting: dict, query: str, knowledge_id: str): + # get bedrock client + client = boto3.client( + "bedrock-agent-runtime", + aws_secret_access_key=dify_config.AWS_SECRET_ACCESS_KEY, + aws_access_key_id=dify_config.AWS_ACCESS_KEY_ID, + # example: us-east-1 + region_name="us-east-1", + ) + # fetch external knowledge retrieval + response = client.retrieve( + knowledgeBaseId=knowledge_id, + retrievalConfiguration={ + "vectorSearchConfiguration": { + "numberOfResults": retrieval_setting.get("top_k"), + "overrideSearchType": "HYBRID", + } + }, + retrievalQuery={"text": query}, + ) + # parse response + results = [] + if response.get("ResponseMetadata") and response.get("ResponseMetadata").get("HTTPStatusCode") == 200: + if response.get("retrievalResults"): + retrieval_results = response.get("retrievalResults") + for retrieval_result in retrieval_results: + # filter out results with score less than threshold + if retrieval_result.get("score") < retrieval_setting.get("score_threshold", 0.0): + continue + result = { + "metadata": retrieval_result.get("metadata"), + "score": retrieval_result.get("score"), + "title": retrieval_result.get("metadata").get("x-amz-bedrock-kb-source-uri"), + "content": retrieval_result.get("content").get("text"), + } + results.append(result) + return {"records": results} diff --git a/api/services/tag_service.py b/api/services/tag_service.py index 5e2851cd8f..a374bdcf00 100644 --- a/api/services/tag_service.py +++ b/api/services/tag_service.py @@ -1,4 +1,5 @@ import uuid +from typing import Optional from flask_login import current_user from sqlalchemy import func @@ -11,7 +12,7 @@ from models.model import App, Tag, TagBinding class TagService: @staticmethod - def get_tags(tag_type: str, current_tenant_id: str, keyword: str = None) -> list: + def get_tags(tag_type: str, current_tenant_id: str, keyword: Optional[str] = None) -> list: query = ( db.session.query(Tag.id, Tag.type, Tag.name, func.count(TagBinding.id).label("binding_count")) .outerjoin(TagBinding, Tag.id == TagBinding.tag_id) diff --git a/api/services/tools/api_tools_manage_service.py b/api/services/tools/api_tools_manage_service.py index 6f6074f596..257c6cf52b 100644 --- a/api/services/tools/api_tools_manage_service.py +++ b/api/services/tools/api_tools_manage_service.py @@ -1,5 +1,6 @@ import json import logging +from typing import Optional from httpx import get @@ -79,7 +80,9 @@ class ApiToolManageService: raise ValueError(f"invalid schema: {str(e)}") @staticmethod - def convert_schema_to_tool_bundles(schema: str, extra_info: dict = None) -> list[ApiToolBundle]: + def convert_schema_to_tool_bundles( + schema: str, extra_info: Optional[dict] = None + ) -> tuple[list[ApiToolBundle], str]: """ convert schema to tool bundles diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index 2bc48c4185..4af73d5063 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -144,7 +144,7 @@ class ToolTransformService: @staticmethod def workflow_provider_to_user_provider( - provider_controller: WorkflowToolProviderController, labels: list[str] = None + provider_controller: WorkflowToolProviderController, labels: Optional[list[str]] = None ): """ convert provider controller to user provider @@ -174,7 +174,7 @@ class ToolTransformService: provider_controller: ApiToolProviderController, db_provider: ApiToolProvider, decrypt_credentials: bool = True, - labels: list[str] = None, + labels: Optional[list[str]] = None, ) -> UserToolProvider: """ convert provider controller to user provider @@ -223,9 +223,9 @@ class ToolTransformService: @staticmethod def tool_to_user_tool( tool: Union[ApiToolBundle, WorkflowTool, Tool], - credentials: dict = None, - tenant_id: str = None, - labels: list[str] = None, + credentials: Optional[dict] = None, + tenant_id: Optional[str] = None, + labels: Optional[list[str]] = None, ) -> UserTool: """ convert tool to user tool diff --git a/api/services/tools/workflow_tools_manage_service.py b/api/services/tools/workflow_tools_manage_service.py index 3830e75339..5868ef3755 100644 --- a/api/services/tools/workflow_tools_manage_service.py +++ b/api/services/tools/workflow_tools_manage_service.py @@ -1,5 +1,6 @@ import json from datetime import datetime +from typing import Optional from sqlalchemy import or_ @@ -32,7 +33,7 @@ class WorkflowToolManageService: description: str, parameters: list[dict], privacy_policy: str = "", - labels: list[str] = None, + labels: Optional[list[str]] = None, ) -> dict: """ Create a workflow tool. @@ -106,7 +107,7 @@ class WorkflowToolManageService: description: str, parameters: list[dict], privacy_policy: str = "", - labels: list[str] = None, + labels: Optional[list[str]] = None, ) -> dict: """ Update a workflow tool. diff --git a/api/services/website_service.py b/api/services/website_service.py index fea605cf30..13cc9c679a 100644 --- a/api/services/website_service.py +++ b/api/services/website_service.py @@ -1,6 +1,7 @@ import datetime import json +import requests from flask_login import current_user from core.helper import encrypter @@ -65,6 +66,35 @@ class WebsiteService: time = str(datetime.datetime.now().timestamp()) redis_client.setex(website_crawl_time_cache_key, 3600, time) return {"status": "active", "job_id": job_id} + elif provider == "jinareader": + api_key = encrypter.decrypt_token( + tenant_id=current_user.current_tenant_id, token=credentials.get("config").get("api_key") + ) + crawl_sub_pages = options.get("crawl_sub_pages", False) + if not crawl_sub_pages: + response = requests.get( + f"https://r.jina.ai/{url}", + headers={"Accept": "application/json", "Authorization": f"Bearer {api_key}"}, + ) + if response.json().get("code") != 200: + raise ValueError("Failed to crawl") + return {"status": "active", "data": response.json().get("data")} + else: + response = requests.post( + "https://adaptivecrawl-kir3wx7b3a-uc.a.run.app", + json={ + "url": url, + "maxPages": options.get("limit", 1), + "useSitemap": options.get("use_sitemap", True), + }, + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {api_key}", + }, + ) + if response.json().get("code") != 200: + raise ValueError("Failed to crawl") + return {"status": "active", "job_id": response.json().get("data", {}).get("taskId")} else: raise ValueError("Invalid provider") @@ -93,6 +123,42 @@ class WebsiteService: time_consuming = abs(end_time - float(start_time)) crawl_status_data["time_consuming"] = f"{time_consuming:.2f}" redis_client.delete(website_crawl_time_cache_key) + elif provider == "jinareader": + api_key = encrypter.decrypt_token( + tenant_id=current_user.current_tenant_id, token=credentials.get("config").get("api_key") + ) + response = requests.post( + "https://adaptivecrawlstatus-kir3wx7b3a-uc.a.run.app", + headers={"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"}, + json={"taskId": job_id}, + ) + data = response.json().get("data", {}) + crawl_status_data = { + "status": data.get("status", "active"), + "job_id": job_id, + "total": len(data.get("urls", [])), + "current": len(data.get("processed", [])) + len(data.get("failed", [])), + "data": [], + "time_consuming": data.get("duration", 0) / 1000, + } + + if crawl_status_data["status"] == "completed": + response = requests.post( + "https://adaptivecrawlstatus-kir3wx7b3a-uc.a.run.app", + headers={"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"}, + json={"taskId": job_id, "urls": list(data.get("processed", {}).keys())}, + ) + data = response.json().get("data", {}) + formatted_data = [ + { + "title": item.get("data", {}).get("title"), + "source_url": item.get("data", {}).get("url"), + "description": item.get("data", {}).get("description"), + "markdown": item.get("data", {}).get("content"), + } + for item in data.get("processed", {}).values() + ] + crawl_status_data["data"] = formatted_data else: raise ValueError("Invalid provider") return crawl_status_data @@ -100,6 +166,8 @@ class WebsiteService: @classmethod def get_crawl_url_data(cls, job_id: str, provider: str, url: str, tenant_id: str) -> dict | None: credentials = ApiKeyAuthService.get_auth_credentials(tenant_id, "website", provider) + # decrypt api_key + api_key = encrypter.decrypt_token(tenant_id=tenant_id, token=credentials.get("config").get("api_key")) if provider == "firecrawl": file_key = "website_files/" + job_id + ".txt" if storage.exists(file_key): @@ -107,8 +175,6 @@ class WebsiteService: if data: data = json.loads(data.decode("utf-8")) else: - # decrypt api_key - api_key = encrypter.decrypt_token(tenant_id=tenant_id, token=credentials.get("config").get("api_key")) firecrawl_app = FirecrawlApp(api_key=api_key, base_url=credentials.get("config").get("base_url", None)) result = firecrawl_app.check_crawl_status(job_id) if result.get("status") != "completed": @@ -119,6 +185,40 @@ class WebsiteService: if item.get("source_url") == url: return item return None + elif provider == "jinareader": + file_key = "website_files/" + job_id + ".txt" + if storage.exists(file_key): + data = storage.load_once(file_key) + if data: + data = json.loads(data.decode("utf-8")) + elif not job_id: + response = requests.get( + f"https://r.jina.ai/{url}", + headers={"Accept": "application/json", "Authorization": f"Bearer {api_key}"}, + ) + if response.json().get("code") != 200: + raise ValueError("Failed to crawl") + return response.json().get("data") + else: + api_key = encrypter.decrypt_token(tenant_id=tenant_id, token=credentials.get("config").get("api_key")) + response = requests.post( + "https://adaptivecrawlstatus-kir3wx7b3a-uc.a.run.app", + headers={"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"}, + json={"taskId": job_id}, + ) + data = response.json().get("data", {}) + if data.get("status") != "completed": + raise ValueError("Crawl job is not completed") + + response = requests.post( + "https://adaptivecrawlstatus-kir3wx7b3a-uc.a.run.app", + headers={"Content-Type": "application/json", "Authorization": f"Bearer {api_key}"}, + json={"taskId": job_id, "urls": list(data.get("processed", {}).keys())}, + ) + data = response.json().get("data", {}) + for item in data.get("processed", {}).values(): + if item.get("data", {}).get("url") == url: + return item.get("data", {}) else: raise ValueError("Invalid provider") diff --git a/api/tasks/external_document_indexing_task.py b/api/tasks/external_document_indexing_task.py new file mode 100644 index 0000000000..6fc719ae8d --- /dev/null +++ b/api/tasks/external_document_indexing_task.py @@ -0,0 +1,93 @@ +import json +import logging +import time + +import click +from celery import shared_task + +from core.indexing_runner import DocumentIsPausedException +from extensions.ext_database import db +from extensions.ext_storage import storage +from models.dataset import Dataset, ExternalKnowledgeApis +from models.model import UploadFile +from services.external_knowledge_service import ExternalDatasetService + + +@shared_task(queue="dataset") +def external_document_indexing_task( + dataset_id: str, external_knowledge_api_id: str, data_source: dict, process_parameter: dict +): + """ + Async process document + :param dataset_id: + :param external_knowledge_api_id: + :param data_source: + :param process_parameter: + Usage: external_document_indexing_task.delay(dataset_id, document_id) + """ + start_at = time.perf_counter() + + dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() + if not dataset: + logging.info( + click.style("Processed external dataset: {} failed, dataset not exit.".format(dataset_id), fg="red") + ) + return + + # get external api template + external_knowledge_api = ( + db.session.query(ExternalKnowledgeApis) + .filter( + ExternalKnowledgeApis.id == external_knowledge_api_id, ExternalKnowledgeApis.tenant_id == dataset.tenant_id + ) + .first() + ) + + if not external_knowledge_api: + logging.info( + click.style( + "Processed external dataset: {} failed, api template: {} not exit.".format( + dataset_id, external_knowledge_api_id + ), + fg="red", + ) + ) + return + files = {} + if data_source["type"] == "upload_file": + upload_file_list = data_source["info_list"]["file_info_list"]["file_ids"] + for file_id in upload_file_list: + file = ( + db.session.query(UploadFile) + .filter(UploadFile.tenant_id == dataset.tenant_id, UploadFile.id == file_id) + .first() + ) + if file: + files[file.id] = (file.name, storage.load_once(file.key), file.mime_type) + try: + settings = ExternalDatasetService.get_external_knowledge_api_settings( + json.loads(external_knowledge_api.settings) + ) + # assemble headers + headers = ExternalDatasetService.assembling_headers(settings.authorization, settings.headers) + + # do http request + response = ExternalDatasetService.process_external_api(settings, headers, process_parameter, files) + job_id = response.json().get("job_id") + if job_id: + # save job_id to dataset + dataset.job_id = job_id + db.session.commit() + + end_at = time.perf_counter() + logging.info( + click.style( + "Processed external dataset: {} successful, latency: {}".format(dataset.id, end_at - start_at), + fg="green", + ) + ) + except DocumentIsPausedException as ex: + logging.info(click.style(str(ex), fg="yellow")) + + except Exception: + pass diff --git a/api/tests/artifact_tests/dependencies/__init__.py b/api/tests/artifact_tests/dependencies/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/artifact_tests/dependencies/test_dependencies_sorted.py b/api/tests/artifact_tests/dependencies/test_dependencies_sorted.py new file mode 100644 index 0000000000..64f2884c4b --- /dev/null +++ b/api/tests/artifact_tests/dependencies/test_dependencies_sorted.py @@ -0,0 +1,49 @@ +from typing import Any + +import toml + + +def load_api_poetry_configs() -> dict[str, Any]: + pyproject_toml = toml.load("api/pyproject.toml") + return pyproject_toml["tool"]["poetry"] + + +def load_all_dependency_groups() -> dict[str, dict[str, dict[str, Any]]]: + configs = load_api_poetry_configs() + configs_by_group = {"main": configs} + for group_name in configs["group"]: + configs_by_group[group_name] = configs["group"][group_name] + dependencies_by_group = {group_name: base["dependencies"] for group_name, base in configs_by_group.items()} + return dependencies_by_group + + +def test_group_dependencies_sorted(): + for group_name, dependencies in load_all_dependency_groups().items(): + dependency_names = list(dependencies.keys()) + expected_dependency_names = sorted(set(dependency_names)) + section = f"tool.poetry.group.{group_name}.dependencies" if group_name else "tool.poetry.dependencies" + assert expected_dependency_names == dependency_names, ( + f"Dependencies in group {group_name} are not sorted. " + f"Check and fix [{section}] section in pyproject.toml file" + ) + + +def test_group_dependencies_version_operator(): + for group_name, dependencies in load_all_dependency_groups().items(): + for dependency_name, specification in dependencies.items(): + version_spec = specification if isinstance(specification, str) else specification["version"] + assert not version_spec.startswith("^"), ( + f"Please replace '{dependency_name} = {version_spec}' with '{dependency_name} = ~{version_spec[1:]}' " + f"'^' operator is too wide and not allowed in the version specification." + ) + + +def test_duplicated_dependency_crossing_groups(): + all_dependency_names: list[str] = [] + for dependencies in load_all_dependency_groups().values(): + dependency_names = list(dependencies.keys()) + all_dependency_names.extend(dependency_names) + expected_all_dependency_names = set(all_dependency_names) + assert sorted(expected_all_dependency_names) == sorted( + all_dependency_names + ), "Duplicated dependencies crossing groups are found" diff --git a/api/tests/integration_tests/model_runtime/__mock/anthropic.py b/api/tests/integration_tests/model_runtime/__mock/anthropic.py index 79a3dc0394..5092af4f13 100644 --- a/api/tests/integration_tests/model_runtime/__mock/anthropic.py +++ b/api/tests/integration_tests/model_runtime/__mock/anthropic.py @@ -5,7 +5,7 @@ from typing import Any, Literal, Union import anthropic import pytest from _pytest.monkeypatch import MonkeyPatch -from anthropic import Anthropic, Stream +from anthropic import Stream from anthropic.resources import Messages from anthropic.types import ( ContentBlock, diff --git a/api/tests/integration_tests/model_runtime/__mock/nomic_embeddings.py b/api/tests/integration_tests/model_runtime/__mock/nomic_embeddings.py index 281e866e45..6a25398cbf 100644 --- a/api/tests/integration_tests/model_runtime/__mock/nomic_embeddings.py +++ b/api/tests/integration_tests/model_runtime/__mock/nomic_embeddings.py @@ -1,6 +1,6 @@ import os from collections.abc import Callable -from typing import Any, Literal, Union +from typing import Any, Literal import pytest diff --git a/api/tests/integration_tests/model_runtime/__mock/openai_chat.py b/api/tests/integration_tests/model_runtime/__mock/openai_chat.py index 439f7d56e9..1dc5df7667 100644 --- a/api/tests/integration_tests/model_runtime/__mock/openai_chat.py +++ b/api/tests/integration_tests/model_runtime/__mock/openai_chat.py @@ -1,6 +1,6 @@ import re from collections.abc import Generator -from json import dumps, loads +from json import dumps from time import time # import monkeypatch @@ -11,11 +11,9 @@ from openai._types import NOT_GIVEN, NotGiven from openai.resources.chat.completions import Completions from openai.types import Completion as CompletionMessage from openai.types.chat import ( - ChatCompletion, ChatCompletionChunk, ChatCompletionMessageParam, ChatCompletionMessageToolCall, - ChatCompletionToolChoiceOptionParam, ChatCompletionToolParam, completion_create_params, ) diff --git a/api/tests/integration_tests/model_runtime/__mock/openai_embeddings.py b/api/tests/integration_tests/model_runtime/__mock/openai_embeddings.py index e27b9891f5..3cc1fa9ff1 100644 --- a/api/tests/integration_tests/model_runtime/__mock/openai_embeddings.py +++ b/api/tests/integration_tests/model_runtime/__mock/openai_embeddings.py @@ -1,7 +1,6 @@ import re from typing import Any, Literal, Union -from openai import OpenAI from openai._types import NOT_GIVEN, NotGiven from openai.resources.embeddings import Embeddings from openai.types.create_embedding_response import CreateEmbeddingResponse, Usage diff --git a/api/tests/integration_tests/model_runtime/__mock/openai_remote.py b/api/tests/integration_tests/model_runtime/__mock/openai_remote.py index cb8f249543..704dbad5d2 100644 --- a/api/tests/integration_tests/model_runtime/__mock/openai_remote.py +++ b/api/tests/integration_tests/model_runtime/__mock/openai_remote.py @@ -1,6 +1,5 @@ from time import time -from openai.resources.models import Models from openai.types.model import Model diff --git a/api/tests/integration_tests/model_runtime/__mock/xinference.py b/api/tests/integration_tests/model_runtime/__mock/xinference.py index 8deb50635f..5f7dad50c1 100644 --- a/api/tests/integration_tests/model_runtime/__mock/xinference.py +++ b/api/tests/integration_tests/model_runtime/__mock/xinference.py @@ -5,7 +5,6 @@ from typing import Union import pytest from _pytest.monkeypatch import MonkeyPatch from requests import Response -from requests.exceptions import ConnectionError from requests.sessions import Session from xinference_client.client.restful.restful_client import ( Client, diff --git a/api/tests/integration_tests/model_runtime/azure_ai_studio/test_llm.py b/api/tests/integration_tests/model_runtime/azure_ai_studio/test_llm.py index 8655b43d8f..85a4f7734d 100644 --- a/api/tests/integration_tests/model_runtime/azure_ai_studio/test_llm.py +++ b/api/tests/integration_tests/model_runtime/azure_ai_studio/test_llm.py @@ -6,10 +6,7 @@ import pytest from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, - ImagePromptMessageContent, - PromptMessageTool, SystemPromptMessage, - TextPromptMessageContent, UserPromptMessage, ) from core.model_runtime.errors.validate import CredentialsValidateFailedError diff --git a/api/tests/integration_tests/model_runtime/chatglm/test_llm.py b/api/tests/integration_tests/model_runtime/chatglm/test_llm.py index 418e88874d..a7c5229e05 100644 --- a/api/tests/integration_tests/model_runtime/chatglm/test_llm.py +++ b/api/tests/integration_tests/model_runtime/chatglm/test_llm.py @@ -8,7 +8,6 @@ from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, PromptMessageTool, SystemPromptMessage, - TextPromptMessageContent, UserPromptMessage, ) from core.model_runtime.entities.model_entities import AIModelEntity diff --git a/api/tests/integration_tests/model_runtime/huggingface_tei/test_rerank.py b/api/tests/integration_tests/model_runtime/huggingface_tei/test_rerank.py index 45370d9fba..cd1c20dd02 100644 --- a/api/tests/integration_tests/model_runtime/huggingface_tei/test_rerank.py +++ b/api/tests/integration_tests/model_runtime/huggingface_tei/test_rerank.py @@ -2,8 +2,7 @@ import os import pytest -from core.model_runtime.entities.rerank_entities import RerankDocument, RerankResult -from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult +from core.model_runtime.entities.rerank_entities import RerankResult from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.huggingface_tei.rerank.rerank import ( HuggingfaceTeiRerankModel, diff --git a/api/tests/integration_tests/model_runtime/localai/test_llm.py b/api/tests/integration_tests/model_runtime/localai/test_llm.py index aa5436c34f..51e899fd51 100644 --- a/api/tests/integration_tests/model_runtime/localai/test_llm.py +++ b/api/tests/integration_tests/model_runtime/localai/test_llm.py @@ -8,10 +8,8 @@ from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, PromptMessageTool, SystemPromptMessage, - TextPromptMessageContent, UserPromptMessage, ) -from core.model_runtime.entities.model_entities import ParameterRule from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.localai.llm.llm import LocalAILanguageModel diff --git a/api/tests/integration_tests/model_runtime/nomic/test_provider.py b/api/tests/integration_tests/model_runtime/nomic/test_provider.py index 6cad400c06..ece4bb9200 100644 --- a/api/tests/integration_tests/model_runtime/nomic/test_provider.py +++ b/api/tests/integration_tests/model_runtime/nomic/test_provider.py @@ -4,7 +4,6 @@ import pytest from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.nomic.nomic import NomicAtlasProvider -from core.model_runtime.model_providers.nomic.text_embedding.text_embedding import NomicTextEmbeddingModel from tests.integration_tests.model_runtime.__mock.nomic_embeddings import setup_nomic_mock diff --git a/api/tests/integration_tests/model_runtime/novita/test_llm.py b/api/tests/integration_tests/model_runtime/novita/test_llm.py index 35fa0dc190..9f92679cd5 100644 --- a/api/tests/integration_tests/model_runtime/novita/test_llm.py +++ b/api/tests/integration_tests/model_runtime/novita/test_llm.py @@ -6,7 +6,6 @@ import pytest from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, - PromptMessageTool, SystemPromptMessage, UserPromptMessage, ) diff --git a/api/tests/integration_tests/model_runtime/oci/test_llm.py b/api/tests/integration_tests/model_runtime/oci/test_llm.py index 531f26a32e..bd5d27eb0f 100644 --- a/api/tests/integration_tests/model_runtime/oci/test_llm.py +++ b/api/tests/integration_tests/model_runtime/oci/test_llm.py @@ -8,7 +8,6 @@ from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, PromptMessageTool, SystemPromptMessage, - TextPromptMessageContent, UserPromptMessage, ) from core.model_runtime.errors.validate import CredentialsValidateFailedError diff --git a/api/tests/integration_tests/model_runtime/openai/test_llm.py b/api/tests/integration_tests/model_runtime/openai/test_llm.py index 3b3ea9ec80..41c99f6875 100644 --- a/api/tests/integration_tests/model_runtime/openai/test_llm.py +++ b/api/tests/integration_tests/model_runtime/openai/test_llm.py @@ -14,7 +14,6 @@ from core.model_runtime.entities.message_entities import ( ) from core.model_runtime.entities.model_entities import AIModelEntity, ModelType from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.model_runtime.model_providers.openai.llm.llm import OpenAILargeLanguageModel """FOR MOCK FIXTURES, DO NOT REMOVE""" diff --git a/api/tests/integration_tests/model_runtime/openrouter/test_llm.py b/api/tests/integration_tests/model_runtime/openrouter/test_llm.py index ce4876a73a..1b0cc6bf4b 100644 --- a/api/tests/integration_tests/model_runtime/openrouter/test_llm.py +++ b/api/tests/integration_tests/model_runtime/openrouter/test_llm.py @@ -6,7 +6,6 @@ import pytest from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, - PromptMessageTool, SystemPromptMessage, UserPromptMessage, ) diff --git a/api/tests/integration_tests/model_runtime/sagemaker/test_provider.py b/api/tests/integration_tests/model_runtime/sagemaker/test_provider.py index 9f0b439d6c..41de2a17fd 100644 --- a/api/tests/integration_tests/model_runtime/sagemaker/test_provider.py +++ b/api/tests/integration_tests/model_runtime/sagemaker/test_provider.py @@ -1,5 +1,3 @@ -import os - import pytest from core.model_runtime.errors.validate import CredentialsValidateFailedError diff --git a/api/tests/integration_tests/model_runtime/sagemaker/test_text_embedding.py b/api/tests/integration_tests/model_runtime/sagemaker/test_text_embedding.py index e4e404c7a8..f77601eea2 100644 --- a/api/tests/integration_tests/model_runtime/sagemaker/test_text_embedding.py +++ b/api/tests/integration_tests/model_runtime/sagemaker/test_text_embedding.py @@ -1,5 +1,3 @@ -import os - import pytest from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult diff --git a/api/tests/integration_tests/model_runtime/stepfun/test_llm.py b/api/tests/integration_tests/model_runtime/stepfun/test_llm.py index c03b1bae1f..f9afca6f59 100644 --- a/api/tests/integration_tests/model_runtime/stepfun/test_llm.py +++ b/api/tests/integration_tests/model_runtime/stepfun/test_llm.py @@ -6,13 +6,11 @@ import pytest from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, - ImagePromptMessageContent, PromptMessageTool, SystemPromptMessage, - TextPromptMessageContent, UserPromptMessage, ) -from core.model_runtime.entities.model_entities import AIModelEntity, ModelType +from core.model_runtime.entities.model_entities import AIModelEntity from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.stepfun.llm.llm import StepfunLargeLanguageModel diff --git a/api/tests/integration_tests/model_runtime/togetherai/test_llm.py b/api/tests/integration_tests/model_runtime/togetherai/test_llm.py index 06ebc2a82d..5787e1bf6a 100644 --- a/api/tests/integration_tests/model_runtime/togetherai/test_llm.py +++ b/api/tests/integration_tests/model_runtime/togetherai/test_llm.py @@ -6,7 +6,6 @@ import pytest from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, - PromptMessageTool, SystemPromptMessage, UserPromptMessage, ) diff --git a/api/tests/integration_tests/model_runtime/tongyi/test_rerank.py b/api/tests/integration_tests/model_runtime/tongyi/test_rerank.py new file mode 100644 index 0000000000..2dcfb92c63 --- /dev/null +++ b/api/tests/integration_tests/model_runtime/tongyi/test_rerank.py @@ -0,0 +1,40 @@ +import os + +import dashscope +import pytest + +from core.model_runtime.entities.rerank_entities import RerankResult +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.tongyi.rerank.rerank import GTERerankModel + + +def test_validate_credentials(): + model = GTERerankModel() + + with pytest.raises(CredentialsValidateFailedError): + model.validate_credentials(model="get-rank", credentials={"dashscope_api_key": "invalid_key"}) + + model.validate_credentials( + model="get-rank", credentials={"dashscope_api_key": os.environ.get("TONGYI_DASHSCOPE_API_KEY")} + ) + + +def test_invoke_model(): + model = GTERerankModel() + + result = model.invoke( + model=dashscope.TextReRank.Models.gte_rerank, + credentials={"dashscope_api_key": os.environ.get("TONGYI_DASHSCOPE_API_KEY")}, + query="什么是文本排序模型", + docs=[ + "文本排序模型广泛用于搜索引擎和推荐系统中,它们根据文本相关性对候选文本进行排序", + "量子计算是计算科学的一个前沿领域", + "预训练语言模型的发展给文本排序模型带来了新的进展", + ], + score_threshold=0.7, + ) + + assert isinstance(result, RerankResult) + assert len(result.docs) == 1 + assert result.docs[0].index == 0 + assert result.docs[0].score >= 0.7 diff --git a/api/tests/integration_tests/model_runtime/upstage/test_llm.py b/api/tests/integration_tests/model_runtime/upstage/test_llm.py index bc7517acbe..0f39e902f3 100644 --- a/api/tests/integration_tests/model_runtime/upstage/test_llm.py +++ b/api/tests/integration_tests/model_runtime/upstage/test_llm.py @@ -10,9 +10,8 @@ from core.model_runtime.entities.message_entities import ( SystemPromptMessage, UserPromptMessage, ) -from core.model_runtime.entities.model_entities import AIModelEntity, ModelType +from core.model_runtime.entities.model_entities import AIModelEntity from core.model_runtime.errors.validate import CredentialsValidateFailedError -from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.model_runtime.model_providers.upstage.llm.llm import UpstageLargeLanguageModel """FOR MOCK FIXTURES, DO NOT REMOVE""" diff --git a/api/tests/integration_tests/model_runtime/voyage/__init__.py b/api/tests/integration_tests/model_runtime/voyage/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/model_runtime/voyage/test_provider.py b/api/tests/integration_tests/model_runtime/voyage/test_provider.py new file mode 100644 index 0000000000..08978c88a9 --- /dev/null +++ b/api/tests/integration_tests/model_runtime/voyage/test_provider.py @@ -0,0 +1,25 @@ +import os +from unittest.mock import Mock, patch + +import pytest + +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.voyage.voyage import VoyageProvider + + +def test_validate_provider_credentials(): + provider = VoyageProvider() + + with pytest.raises(CredentialsValidateFailedError): + provider.validate_provider_credentials(credentials={"api_key": "hahahaha"}) + with patch("requests.post") as mock_post: + mock_response = Mock() + mock_response.json.return_value = { + "object": "list", + "data": [{"object": "embedding", "embedding": [0.23333 for _ in range(1024)], "index": 0}], + "model": "voyage-3", + "usage": {"total_tokens": 1}, + } + mock_response.status_code = 200 + mock_post.return_value = mock_response + provider.validate_provider_credentials(credentials={"api_key": os.environ.get("VOYAGE_API_KEY")}) diff --git a/api/tests/integration_tests/model_runtime/voyage/test_rerank.py b/api/tests/integration_tests/model_runtime/voyage/test_rerank.py new file mode 100644 index 0000000000..e97a9e4c81 --- /dev/null +++ b/api/tests/integration_tests/model_runtime/voyage/test_rerank.py @@ -0,0 +1,92 @@ +import os +from unittest.mock import Mock, patch + +import pytest + +from core.model_runtime.entities.rerank_entities import RerankResult +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.voyage.rerank.rerank import VoyageRerankModel + + +def test_validate_credentials(): + model = VoyageRerankModel() + + with pytest.raises(CredentialsValidateFailedError): + model.validate_credentials( + model="rerank-lite-1", + credentials={"api_key": "invalid_key"}, + ) + with patch("httpx.post") as mock_post: + mock_response = Mock() + mock_response.json.return_value = { + "object": "list", + "data": [ + { + "relevance_score": 0.546875, + "index": 0, + "document": "Carson City is the capital city of the American state of Nevada. At the 2010 United " + "States Census, Carson City had a population of 55,274.", + }, + { + "relevance_score": 0.4765625, + "index": 1, + "document": "The Commonwealth of the Northern Mariana Islands is a group of islands in the " + "Pacific Ocean that are a political division controlled by the United States. Its " + "capital is Saipan.", + }, + ], + "model": "rerank-lite-1", + "usage": {"total_tokens": 96}, + } + mock_response.status_code = 200 + mock_post.return_value = mock_response + model.validate_credentials( + model="rerank-lite-1", + credentials={ + "api_key": os.environ.get("VOYAGE_API_KEY"), + }, + ) + + +def test_invoke_model(): + model = VoyageRerankModel() + with patch("httpx.post") as mock_post: + mock_response = Mock() + mock_response.json.return_value = { + "object": "list", + "data": [ + { + "relevance_score": 0.84375, + "index": 0, + "document": "Kasumi is a girl name of Japanese origin meaning mist.", + }, + { + "relevance_score": 0.4765625, + "index": 1, + "document": "Her music is a kawaii bass, a mix of future bass, pop, and kawaii music and she " + "leads a team named PopiParty.", + }, + ], + "model": "rerank-lite-1", + "usage": {"total_tokens": 59}, + } + mock_response.status_code = 200 + mock_post.return_value = mock_response + result = model.invoke( + model="rerank-lite-1", + credentials={ + "api_key": os.environ.get("VOYAGE_API_KEY"), + }, + query="Who is Kasumi?", + docs=[ + "Kasumi is a girl name of Japanese origin meaning mist.", + "Her music is a kawaii bass, a mix of future bass, pop, and kawaii music and she leads a team named " + "PopiParty.", + ], + score_threshold=0.5, + ) + + assert isinstance(result, RerankResult) + assert len(result.docs) == 1 + assert result.docs[0].index == 0 + assert result.docs[0].score >= 0.5 diff --git a/api/tests/integration_tests/model_runtime/voyage/test_text_embedding.py b/api/tests/integration_tests/model_runtime/voyage/test_text_embedding.py new file mode 100644 index 0000000000..75719672a9 --- /dev/null +++ b/api/tests/integration_tests/model_runtime/voyage/test_text_embedding.py @@ -0,0 +1,70 @@ +import os +from unittest.mock import Mock, patch + +import pytest + +from core.model_runtime.entities.text_embedding_entities import TextEmbeddingResult +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.voyage.text_embedding.text_embedding import VoyageTextEmbeddingModel + + +def test_validate_credentials(): + model = VoyageTextEmbeddingModel() + + with pytest.raises(CredentialsValidateFailedError): + model.validate_credentials(model="voyage-3", credentials={"api_key": "invalid_key"}) + with patch("requests.post") as mock_post: + mock_response = Mock() + mock_response.json.return_value = { + "object": "list", + "data": [{"object": "embedding", "embedding": [0.23333 for _ in range(1024)], "index": 0}], + "model": "voyage-3", + "usage": {"total_tokens": 1}, + } + mock_response.status_code = 200 + mock_post.return_value = mock_response + model.validate_credentials(model="voyage-3", credentials={"api_key": os.environ.get("VOYAGE_API_KEY")}) + + +def test_invoke_model(): + model = VoyageTextEmbeddingModel() + + with patch("requests.post") as mock_post: + mock_response = Mock() + mock_response.json.return_value = { + "object": "list", + "data": [ + {"object": "embedding", "embedding": [0.23333 for _ in range(1024)], "index": 0}, + {"object": "embedding", "embedding": [0.23333 for _ in range(1024)], "index": 1}, + ], + "model": "voyage-3", + "usage": {"total_tokens": 2}, + } + mock_response.status_code = 200 + mock_post.return_value = mock_response + result = model.invoke( + model="voyage-3", + credentials={ + "api_key": os.environ.get("VOYAGE_API_KEY"), + }, + texts=["hello", "world"], + user="abc-123", + ) + + assert isinstance(result, TextEmbeddingResult) + assert len(result.embeddings) == 2 + assert result.usage.total_tokens == 2 + + +def test_get_num_tokens(): + model = VoyageTextEmbeddingModel() + + num_tokens = model.get_num_tokens( + model="voyage-3", + credentials={ + "api_key": os.environ.get("VOYAGE_API_KEY"), + }, + texts=["ping"], + ) + + assert num_tokens == 1 diff --git a/api/tests/integration_tests/model_runtime/xinference/test_llm.py b/api/tests/integration_tests/model_runtime/xinference/test_llm.py index fb5e03855d..5e4cde3638 100644 --- a/api/tests/integration_tests/model_runtime/xinference/test_llm.py +++ b/api/tests/integration_tests/model_runtime/xinference/test_llm.py @@ -8,10 +8,8 @@ from core.model_runtime.entities.message_entities import ( AssistantPromptMessage, PromptMessageTool, SystemPromptMessage, - TextPromptMessageContent, UserPromptMessage, ) -from core.model_runtime.entities.model_entities import AIModelEntity from core.model_runtime.errors.validate import CredentialsValidateFailedError from core.model_runtime.model_providers.xinference.llm.llm import XinferenceAILargeLanguageModel diff --git a/api/tests/integration_tests/tools/__mock/http.py b/api/tests/integration_tests/tools/__mock/http.py index d3c1f3101c..42cf87e317 100644 --- a/api/tests/integration_tests/tools/__mock/http.py +++ b/api/tests/integration_tests/tools/__mock/http.py @@ -17,7 +17,7 @@ class MockedHttp: request = httpx.Request( method, url, params=kwargs.get("params"), headers=kwargs.get("headers"), cookies=kwargs.get("cookies") ) - data = kwargs.get("data", None) + data = kwargs.get("data") resp = json.dumps(data).encode("utf-8") if data else b"OK" response = httpx.Response( status_code=200, diff --git a/api/tests/integration_tests/vdb/__mock/baiduvectordb.py b/api/tests/integration_tests/vdb/__mock/baiduvectordb.py new file mode 100644 index 0000000000..a8eaf42b7d --- /dev/null +++ b/api/tests/integration_tests/vdb/__mock/baiduvectordb.py @@ -0,0 +1,154 @@ +import os + +import pytest +from _pytest.monkeypatch import MonkeyPatch +from pymochow import MochowClient +from pymochow.model.database import Database +from pymochow.model.enum import IndexState, IndexType, MetricType, ReadConsistency, TableState +from pymochow.model.schema import HNSWParams, VectorIndex +from pymochow.model.table import Table +from requests.adapters import HTTPAdapter + + +class MockBaiduVectorDBClass: + def mock_vector_db_client( + self, + config=None, + adapter: HTTPAdapter = None, + ): + self._conn = None + self._config = None + + def list_databases(self, config=None) -> list[Database]: + return [ + Database( + conn=self._conn, + database_name="dify", + config=self._config, + ) + ] + + def create_database(self, database_name: str, config=None) -> Database: + return Database(conn=self._conn, database_name=database_name, config=config) + + def list_table(self, config=None) -> list[Table]: + return [] + + def drop_table(self, table_name: str, config=None): + return {"code": 0, "msg": "Success"} + + def create_table( + self, + table_name: str, + replication: int, + partition: int, + schema, + enable_dynamic_field=False, + description: str = "", + config=None, + ) -> Table: + return Table(self, table_name, replication, partition, schema, enable_dynamic_field, description, config) + + def describe_table(self, table_name: str, config=None) -> Table: + return Table( + self, + table_name, + 3, + 1, + None, + enable_dynamic_field=False, + description="table for dify", + config=config, + state=TableState.NORMAL, + ) + + def upsert(self, rows, config=None): + return {"code": 0, "msg": "operation success", "affectedCount": 1} + + def rebuild_index(self, index_name: str, config=None): + return {"code": 0, "msg": "Success"} + + def describe_index(self, index_name: str, config=None): + return VectorIndex( + index_name=index_name, + index_type=IndexType.HNSW, + field="vector", + metric_type=MetricType.L2, + params=HNSWParams(m=16, efconstruction=200), + auto_build=False, + state=IndexState.NORMAL, + ) + + def query( + self, + primary_key, + partition_key=None, + projections=None, + retrieve_vector=False, + read_consistency=ReadConsistency.EVENTUAL, + config=None, + ): + return { + "row": { + "id": "doc_id_001", + "vector": [0.23432432, 0.8923744, 0.89238432], + "text": "text", + "metadata": {"doc_id": "doc_id_001"}, + }, + "code": 0, + "msg": "Success", + } + + def delete(self, primary_key=None, partition_key=None, filter=None, config=None): + return {"code": 0, "msg": "Success"} + + def search( + self, + anns, + partition_key=None, + projections=None, + retrieve_vector=False, + read_consistency=ReadConsistency.EVENTUAL, + config=None, + ): + return { + "rows": [ + { + "row": { + "id": "doc_id_001", + "vector": [0.23432432, 0.8923744, 0.89238432], + "text": "text", + "metadata": {"doc_id": "doc_id_001"}, + }, + "distance": 0.1, + "score": 0.5, + } + ], + "code": 0, + "msg": "Success", + } + + +MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" + + +@pytest.fixture +def setup_baiduvectordb_mock(request, monkeypatch: MonkeyPatch): + if MOCK: + monkeypatch.setattr(MochowClient, "__init__", MockBaiduVectorDBClass.mock_vector_db_client) + monkeypatch.setattr(MochowClient, "list_databases", MockBaiduVectorDBClass.list_databases) + monkeypatch.setattr(MochowClient, "create_database", MockBaiduVectorDBClass.create_database) + monkeypatch.setattr(Database, "table", MockBaiduVectorDBClass.describe_table) + monkeypatch.setattr(Database, "list_table", MockBaiduVectorDBClass.list_table) + monkeypatch.setattr(Database, "create_table", MockBaiduVectorDBClass.create_table) + monkeypatch.setattr(Database, "drop_table", MockBaiduVectorDBClass.drop_table) + monkeypatch.setattr(Database, "describe_table", MockBaiduVectorDBClass.describe_table) + monkeypatch.setattr(Table, "rebuild_index", MockBaiduVectorDBClass.rebuild_index) + monkeypatch.setattr(Table, "describe_index", MockBaiduVectorDBClass.describe_index) + monkeypatch.setattr(Table, "delete", MockBaiduVectorDBClass.delete) + monkeypatch.setattr(Table, "search", MockBaiduVectorDBClass.search) + + yield + + if MOCK: + monkeypatch.undo() diff --git a/api/tests/integration_tests/vdb/__mock/tcvectordb.py b/api/tests/integration_tests/vdb/__mock/tcvectordb.py index 53c9b3cae3..61d6ed1656 100644 --- a/api/tests/integration_tests/vdb/__mock/tcvectordb.py +++ b/api/tests/integration_tests/vdb/__mock/tcvectordb.py @@ -48,7 +48,7 @@ class MockTcvectordbClass: description: str, index: Index, embedding: Embedding = None, - timeout: float = None, + timeout: Optional[float] = None, ) -> Collection: return Collection( self, @@ -97,9 +97,9 @@ class MockTcvectordbClass: def collection_delete( self, - document_ids: list[str] = None, + document_ids: Optional[list[str]] = None, filter: Filter = None, - timeout: float = None, + timeout: Optional[float] = None, ): return {"code": 0, "msg": "operation success"} diff --git a/api/tests/integration_tests/vdb/__mock/vikingdb.py b/api/tests/integration_tests/vdb/__mock/vikingdb.py new file mode 100644 index 0000000000..0f40337feb --- /dev/null +++ b/api/tests/integration_tests/vdb/__mock/vikingdb.py @@ -0,0 +1,215 @@ +import os +from typing import Union +from unittest.mock import MagicMock + +import pytest +from _pytest.monkeypatch import MonkeyPatch +from volcengine.viking_db import ( + Collection, + Data, + DistanceType, + Field, + FieldType, + Index, + IndexType, + QuantType, + VectorIndexParams, + VikingDBService, +) + +from core.rag.datasource.vdb.field import Field as vdb_Field + + +class MockVikingDBClass: + def __init__( + self, + host="api-vikingdb.volces.com", + region="cn-north-1", + ak="", + sk="", + scheme="http", + connection_timeout=30, + socket_timeout=30, + proxy=None, + ): + self._viking_db_service = MagicMock() + self._viking_db_service.get_exception = MagicMock(return_value='{"data": {"primary_key": "test_id"}}') + + def get_collection(self, collection_name) -> Collection: + return Collection( + collection_name=collection_name, + description="Collection For Dify", + viking_db_service=self._viking_db_service, + primary_key=vdb_Field.PRIMARY_KEY.value, + fields=[ + Field(field_name=vdb_Field.PRIMARY_KEY.value, field_type=FieldType.String, is_primary_key=True), + Field(field_name=vdb_Field.METADATA_KEY.value, field_type=FieldType.String), + Field(field_name=vdb_Field.GROUP_KEY.value, field_type=FieldType.String), + Field(field_name=vdb_Field.CONTENT_KEY.value, field_type=FieldType.Text), + Field(field_name=vdb_Field.VECTOR.value, field_type=FieldType.Vector, dim=768), + ], + indexes=[ + Index( + collection_name=collection_name, + index_name=f"{collection_name}_idx", + vector_index=VectorIndexParams( + distance=DistanceType.L2, + index_type=IndexType.HNSW, + quant=QuantType.Float, + ), + scalar_index=None, + stat=None, + viking_db_service=self._viking_db_service, + ) + ], + ) + + def drop_collection(self, collection_name): + assert collection_name != "" + + def create_collection(self, collection_name, fields, description="") -> Collection: + return Collection( + collection_name=collection_name, + description=description, + primary_key=vdb_Field.PRIMARY_KEY.value, + viking_db_service=self._viking_db_service, + fields=fields, + ) + + def get_index(self, collection_name, index_name) -> Index: + return Index( + collection_name=collection_name, + index_name=index_name, + viking_db_service=self._viking_db_service, + stat=None, + scalar_index=None, + vector_index=VectorIndexParams( + distance=DistanceType.L2, + index_type=IndexType.HNSW, + quant=QuantType.Float, + ), + ) + + def create_index( + self, + collection_name, + index_name, + vector_index=None, + cpu_quota=2, + description="", + partition_by="", + scalar_index=None, + shard_count=None, + shard_policy=None, + ): + return Index( + collection_name=collection_name, + index_name=index_name, + vector_index=vector_index, + cpu_quota=cpu_quota, + description=description, + partition_by=partition_by, + scalar_index=scalar_index, + shard_count=shard_count, + shard_policy=shard_policy, + viking_db_service=self._viking_db_service, + stat=None, + ) + + def drop_index(self, collection_name, index_name): + assert collection_name != "" + assert index_name != "" + + def upsert_data(self, data: Union[Data, list[Data]]): + assert data is not None + + def fetch_data(self, id: Union[str, list[str], int, list[int]]): + return Data( + fields={ + vdb_Field.GROUP_KEY.value: "test_group", + vdb_Field.METADATA_KEY.value: "{}", + vdb_Field.CONTENT_KEY.value: "content", + vdb_Field.PRIMARY_KEY.value: id, + vdb_Field.VECTOR.value: [-0.00762577411336441, -0.01949881482151406, 0.008832383941428398], + }, + id=id, + ) + + def delete_data(self, id: Union[str, list[str], int, list[int]]): + assert id is not None + + def search_by_vector( + self, + vector, + sparse_vectors=None, + filter=None, + limit=10, + output_fields=None, + partition="default", + dense_weight=None, + ) -> list[Data]: + return [ + Data( + fields={ + vdb_Field.GROUP_KEY.value: "test_group", + vdb_Field.METADATA_KEY.value: '\ + {"source": "/var/folders/ml/xxx/xxx.txt", \ + "document_id": "test_document_id", \ + "dataset_id": "test_dataset_id", \ + "doc_id": "test_id", \ + "doc_hash": "test_hash"}', + vdb_Field.CONTENT_KEY.value: "content", + vdb_Field.PRIMARY_KEY.value: "test_id", + vdb_Field.VECTOR.value: vector, + }, + id="test_id", + score=0.10, + ) + ] + + def search( + self, order=None, filter=None, limit=10, output_fields=None, partition="default", dense_weight=None + ) -> list[Data]: + return [ + Data( + fields={ + vdb_Field.GROUP_KEY.value: "test_group", + vdb_Field.METADATA_KEY.value: '\ + {"source": "/var/folders/ml/xxx/xxx.txt", \ + "document_id": "test_document_id", \ + "dataset_id": "test_dataset_id", \ + "doc_id": "test_id", \ + "doc_hash": "test_hash"}', + vdb_Field.CONTENT_KEY.value: "content", + vdb_Field.PRIMARY_KEY.value: "test_id", + vdb_Field.VECTOR.value: [-0.00762577411336441, -0.01949881482151406, 0.008832383941428398], + }, + id="test_id", + score=0.10, + ) + ] + + +MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true" + + +@pytest.fixture +def setup_vikingdb_mock(monkeypatch: MonkeyPatch): + if MOCK: + monkeypatch.setattr(VikingDBService, "__init__", MockVikingDBClass.__init__) + monkeypatch.setattr(VikingDBService, "get_collection", MockVikingDBClass.get_collection) + monkeypatch.setattr(VikingDBService, "create_collection", MockVikingDBClass.create_collection) + monkeypatch.setattr(VikingDBService, "drop_collection", MockVikingDBClass.drop_collection) + monkeypatch.setattr(VikingDBService, "get_index", MockVikingDBClass.get_index) + monkeypatch.setattr(VikingDBService, "create_index", MockVikingDBClass.create_index) + monkeypatch.setattr(VikingDBService, "drop_index", MockVikingDBClass.drop_index) + monkeypatch.setattr(Collection, "upsert_data", MockVikingDBClass.upsert_data) + monkeypatch.setattr(Collection, "fetch_data", MockVikingDBClass.fetch_data) + monkeypatch.setattr(Collection, "delete_data", MockVikingDBClass.delete_data) + monkeypatch.setattr(Index, "search_by_vector", MockVikingDBClass.search_by_vector) + monkeypatch.setattr(Index, "search", MockVikingDBClass.search) + + yield + + if MOCK: + monkeypatch.undo() diff --git a/api/tests/integration_tests/vdb/baidu/__init__.py b/api/tests/integration_tests/vdb/baidu/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/vdb/baidu/test_baidu.py b/api/tests/integration_tests/vdb/baidu/test_baidu.py new file mode 100644 index 0000000000..01a7f8853a --- /dev/null +++ b/api/tests/integration_tests/vdb/baidu/test_baidu.py @@ -0,0 +1,36 @@ +from unittest.mock import MagicMock + +from core.rag.datasource.vdb.baidu.baidu_vector import BaiduConfig, BaiduVector +from tests.integration_tests.vdb.__mock.baiduvectordb import setup_baiduvectordb_mock +from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text, setup_mock_redis + +mock_client = MagicMock() +mock_client.list_databases.return_value = [{"name": "test"}] + + +class BaiduVectorTest(AbstractVectorTest): + def __init__(self): + super().__init__() + self.vector = BaiduVector( + "dify", + BaiduConfig( + endpoint="http://127.0.0.1:5287", + account="root", + api_key="dify", + database="dify", + shard=1, + replicas=3, + ), + ) + + def search_by_vector(self): + hits_by_vector = self.vector.search_by_vector(query_vector=self.example_embedding) + assert len(hits_by_vector) == 1 + + def search_by_full_text(self): + hits_by_full_text = self.vector.search_by_full_text(query=get_example_text()) + assert len(hits_by_full_text) == 0 + + +def test_baidu_vector(setup_mock_redis, setup_baiduvectordb_mock): + BaiduVectorTest().run_all_tests() diff --git a/api/tests/integration_tests/vdb/pgvector/test_pgvector.py b/api/tests/integration_tests/vdb/pgvector/test_pgvector.py index c5a986b747..3d2cfde5d1 100644 --- a/api/tests/integration_tests/vdb/pgvector/test_pgvector.py +++ b/api/tests/integration_tests/vdb/pgvector/test_pgvector.py @@ -1,5 +1,4 @@ from core.rag.datasource.vdb.pgvector.pgvector import PGVector, PGVectorConfig -from core.rag.models.document import Document from tests.integration_tests.vdb.test_vector_store import ( AbstractVectorTest, get_example_text, @@ -18,6 +17,8 @@ class PGVectorTest(AbstractVectorTest): user="postgres", password="difyai123456", database="dify", + min_connection=1, + max_connection=5, ), ) diff --git a/api/tests/integration_tests/vdb/test_vector_store.py b/api/tests/integration_tests/vdb/test_vector_store.py index a11cd225b3..50519e2052 100644 --- a/api/tests/integration_tests/vdb/test_vector_store.py +++ b/api/tests/integration_tests/vdb/test_vector_store.py @@ -1,4 +1,3 @@ -import random import uuid from unittest.mock import MagicMock diff --git a/api/tests/integration_tests/vdb/vikingdb/__init__.py b/api/tests/integration_tests/vdb/vikingdb/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/vdb/vikingdb/test_vikingdb.py b/api/tests/integration_tests/vdb/vikingdb/test_vikingdb.py new file mode 100644 index 0000000000..2572012ea0 --- /dev/null +++ b/api/tests/integration_tests/vdb/vikingdb/test_vikingdb.py @@ -0,0 +1,37 @@ +from core.rag.datasource.vdb.vikingdb.vikingdb_vector import VikingDBConfig, VikingDBVector +from tests.integration_tests.vdb.__mock.vikingdb import setup_vikingdb_mock +from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text, setup_mock_redis + + +class VikingDBVectorTest(AbstractVectorTest): + def __init__(self): + super().__init__() + self.vector = VikingDBVector( + "test_collection", + "test_group", + config=VikingDBConfig( + access_key="test_access_key", + host="test_host", + region="test_region", + scheme="test_scheme", + secret_key="test_secret_key", + connection_timeout=30, + socket_timeout=30, + ), + ) + + def search_by_vector(self): + hits_by_vector = self.vector.search_by_vector(query_vector=self.example_embedding) + assert len(hits_by_vector) == 1 + + def search_by_full_text(self): + hits_by_full_text = self.vector.search_by_full_text(query=get_example_text()) + assert len(hits_by_full_text) == 0 + + def get_ids_by_metadata_field(self): + ids = self.vector.get_ids_by_metadata_field(key="document_id", value="test_document_id") + assert len(ids) > 0 + + +def test_vikingdb_vector(setup_mock_redis, setup_vikingdb_mock): + VikingDBVectorTest().run_all_tests() diff --git a/api/tests/integration_tests/workflow/nodes/__mock/code_executor.py b/api/tests/integration_tests/workflow/nodes/__mock/code_executor.py index 6fb8c86b82..30414811ea 100644 --- a/api/tests/integration_tests/workflow/nodes/__mock/code_executor.py +++ b/api/tests/integration_tests/workflow/nodes/__mock/code_executor.py @@ -1,5 +1,5 @@ import os -from typing import Literal, Optional +from typing import Literal import pytest from _pytest.monkeypatch import MonkeyPatch diff --git a/api/tests/integration_tests/workflow/nodes/__mock/http.py b/api/tests/integration_tests/workflow/nodes/__mock/http.py index f1ab23b002..ec013183b7 100644 --- a/api/tests/integration_tests/workflow/nodes/__mock/http.py +++ b/api/tests/integration_tests/workflow/nodes/__mock/http.py @@ -22,8 +22,8 @@ class MockedHttp: return response # get data, files - data = kwargs.get("data", None) - files = kwargs.get("files", None) + data = kwargs.get("data") + files = kwargs.get("files") if data is not None: resp = dumps(data).encode("utf-8") elif files is not None: diff --git a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py index 88435c4022..4c695f7443 100644 --- a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py +++ b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py @@ -1,4 +1,3 @@ -import json import os import time import uuid diff --git a/api/tests/unit_tests/controllers/test_compare_versions.py b/api/tests/unit_tests/controllers/test_compare_versions.py new file mode 100644 index 0000000000..87902b6d44 --- /dev/null +++ b/api/tests/unit_tests/controllers/test_compare_versions.py @@ -0,0 +1,38 @@ +import pytest + +from controllers.console.version import _has_new_version + + +@pytest.mark.parametrize( + ("latest_version", "current_version", "expected"), + [ + ("1.0.1", "1.0.0", True), + ("1.1.0", "1.0.0", True), + ("2.0.0", "1.9.9", True), + ("1.0.0", "1.0.0", False), + ("1.0.0", "1.0.1", False), + ("1.0.0", "2.0.0", False), + ("1.0.1", "1.0.0-beta", True), + ("1.0.0", "1.0.0-alpha", True), + ("1.0.0-beta", "1.0.0-alpha", True), + ("1.0.0", "1.0.0-rc1", True), + ("1.0.0", "0.9.9", True), + ("1.0.0", "1.0.0-dev", True), + ], +) +def test_has_new_version(latest_version, current_version, expected): + assert _has_new_version(latest_version=latest_version, current_version=current_version) == expected + + +def test_has_new_version_invalid_input(): + with pytest.raises(ValueError): + _has_new_version(latest_version="1.0", current_version="1.0.0") + + with pytest.raises(ValueError): + _has_new_version(latest_version="1.0.0", current_version="1.0") + + with pytest.raises(ValueError): + _has_new_version(latest_version="invalid", current_version="1.0.0") + + with pytest.raises(ValueError): + _has_new_version(latest_version="1.0.0", current_version="invalid") diff --git a/api/tests/unit_tests/core/app/segments/test_variables.py b/api/tests/unit_tests/core/app/segments/test_variables.py index b3f0ae626c..6179675cde 100644 --- a/api/tests/unit_tests/core/app/segments/test_variables.py +++ b/api/tests/unit_tests/core/app/segments/test_variables.py @@ -2,7 +2,6 @@ import pytest from pydantic import ValidationError from core.app.segments import ( - ArrayAnyVariable, FloatVariable, IntegerVariable, ObjectVariable, diff --git a/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py b/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py index d5a1d8f436..8fcdf2e8e5 100644 --- a/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py +++ b/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py @@ -1,9 +1,6 @@ import os -from unittest import mock from core.rag.extractor.firecrawl.firecrawl_app import FirecrawlApp -from core.rag.extractor.firecrawl.firecrawl_web_extractor import FirecrawlWebExtractor -from core.rag.models.document import Document from tests.unit_tests.core.rag.extractor.test_notion_extractor import _mock_response diff --git a/dev/pytest/pytest_artifacts.sh b/dev/pytest/pytest_artifacts.sh new file mode 100755 index 0000000000..d52acb2273 --- /dev/null +++ b/dev/pytest/pytest_artifacts.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -x + +pytest api/tests/artifact_tests/ diff --git a/dev/pytest/pytest_model_runtime.sh b/dev/pytest/pytest_model_runtime.sh index b60ff64fdc..63891eb9f8 100755 --- a/dev/pytest/pytest_model_runtime.sh +++ b/dev/pytest/pytest_model_runtime.sh @@ -9,4 +9,5 @@ pytest api/tests/integration_tests/model_runtime/anthropic \ api/tests/integration_tests/model_runtime/upstage \ api/tests/integration_tests/model_runtime/fireworks \ api/tests/integration_tests/model_runtime/nomic \ - api/tests/integration_tests/model_runtime/mixedbread + api/tests/integration_tests/model_runtime/mixedbread \ + api/tests/integration_tests/model_runtime/voyage \ No newline at end of file diff --git a/dev/pytest/pytest_vdb.sh b/dev/pytest/pytest_vdb.sh index 0b23200dc3..6809ef7c6f 100755 --- a/dev/pytest/pytest_vdb.sh +++ b/dev/pytest/pytest_vdb.sh @@ -8,4 +8,4 @@ pytest api/tests/integration_tests/vdb/chroma \ api/tests/integration_tests/vdb/qdrant \ api/tests/integration_tests/vdb/weaviate \ api/tests/integration_tests/vdb/elasticsearch \ - api/tests/integration_tests/vdb/test_vector_store.py \ No newline at end of file + api/tests/integration_tests/vdb/vikingdb diff --git a/dev/sync-poetry b/dev/sync-poetry index 2dd4dd4fc3..23d5d79e90 100755 --- a/dev/sync-poetry +++ b/dev/sync-poetry @@ -11,5 +11,8 @@ poetry check -C api --lock if [ $? -ne 0 ]; then # update poetry.lock # refreshing lockfile only without updating locked versions + echo "poetry.lock is outdated, refreshing without updating locked versions ..." poetry lock -C api --no-update +else + echo "poetry.lock is ready." fi diff --git a/docker-legacy/docker-compose.yaml b/docker-legacy/docker-compose.yaml index 1636bb6a21..3f230b47ab 100644 --- a/docker-legacy/docker-compose.yaml +++ b/docker-legacy/docker-compose.yaml @@ -2,7 +2,7 @@ version: '3' services: # API service api: - image: langgenius/dify-api:0.8.3 + image: langgenius/dify-api:0.9.1 restart: always environment: # Startup mode, 'api' starts the API server. @@ -227,7 +227,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:0.8.3 + image: langgenius/dify-api:0.9.1 restart: always environment: CONSOLE_WEB_URL: '' @@ -396,7 +396,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:0.8.3 + image: langgenius/dify-web:0.9.1 restart: always environment: # The base URL of console application api server, refers to the Console base URL of WEB service if console domain is diff --git a/docker/.env.example b/docker/.env.example index d43c3edc7e..969deadf67 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -91,6 +91,9 @@ MIGRATION_ENABLED=true # The default value is 300 seconds. FILES_ACCESS_TIMEOUT=300 +# Access token expiration time in minutes +ACCESS_TOKEN_EXPIRE_MINUTES=60 + # The maximum number of active requests for the application, where 0 means unlimited, should be a non-negative integer. APP_MAX_ACTIVE_REQUESTS=0 @@ -261,7 +264,7 @@ CONSOLE_CORS_ALLOW_ORIGINS=* # ------------------------------ # The type of storage to use for storing user files. -# Supported values are `local` and `s3` and `azure-blob` and `google-storage` and `tencent-cos` and `huawei-obs` +# Supported values are `local` , `s3` , `azure-blob` , `google-storage`, `tencent-cos`, `huawei-obs`, `volcengine-tos`, `baidu-obs`, `supabase` # Default: `local` STORAGE_TYPE=local @@ -341,6 +344,24 @@ VOLCENGINE_TOS_ENDPOINT=your-server-url # The region of the Volcengine TOS service. VOLCENGINE_TOS_REGION=your-region +# Baidu OBS Storage Configuration +# The name of the Baidu OBS bucket to use for storing files. +BAIDU_OBS_BUCKET_NAME=your-bucket-name +# The secret key to use for authenticating with the Baidu OBS service. +BAIDU_OBS_SECRET_KEY=your-secret-key +# The access key to use for authenticating with the Baidu OBS service. +BAIDU_OBS_ACCESS_KEY=your-access-key +# The endpoint of the Baidu OBS service. +BAIDU_OBS_ENDPOINT=your-server-url + +# Supabase Storage Configuration +# The name of the Supabase bucket to use for storing files. +SUPABASE_BUCKET_NAME=your-bucket-name +# The api key to use for authenticating with the Supabase service. +SUPABASE_API_KEY=your-access-key +# The project endpoint url of the Supabase service. +SUPABASE_URL=your-server-url + # ------------------------------ # Vector Database Configuration # ------------------------------ @@ -391,6 +412,8 @@ PGVECTOR_PORT=5432 PGVECTOR_USER=postgres PGVECTOR_PASSWORD=difyai123456 PGVECTOR_DATABASE=dify +PGVECTOR_MIN_CONNECTION=1 +PGVECTOR_MAX_CONNECTION=5 # pgvecto-rs configurations, only available when VECTOR_STORE is `pgvecto-rs` PGVECTO_RS_HOST=pgvecto-rs @@ -460,6 +483,15 @@ ELASTICSEARCH_PORT=9200 ELASTICSEARCH_USERNAME=elastic ELASTICSEARCH_PASSWORD=elastic +# baidu vector configurations, only available when VECTOR_STORE is `baidu` +BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287 +BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS=30000 +BAIDU_VECTOR_DB_ACCOUNT=root +BAIDU_VECTOR_DB_API_KEY=dify +BAIDU_VECTOR_DB_DATABASE=dify +BAIDU_VECTOR_DB_SHARD=1 +BAIDU_VECTOR_DB_REPLICAS=3 + # ------------------------------ # Knowledge Configuration # ------------------------------ @@ -795,4 +827,6 @@ POSITION_TOOL_EXCLUDES= # Example: POSITION_PROVIDER_PINS=openai,openllm POSITION_PROVIDER_PINS= POSITION_PROVIDER_INCLUDES= -POSITION_PROVIDER_EXCLUDES= \ No newline at end of file +POSITION_PROVIDER_EXCLUDES= +# CSP https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP +CSP_WHITELIST= \ No newline at end of file diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 95e271a0e9..5db11d1961 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -47,6 +47,7 @@ x-shared-env: &shared-api-worker-env REDIS_SENTINEL_SERVICE_NAME: ${REDIS_SENTINEL_SERVICE_NAME:-} REDIS_SENTINEL_USERNAME: ${REDIS_SENTINEL_USERNAME:-} REDIS_SENTINEL_PASSWORD: ${REDIS_SENTINEL_PASSWORD:-} + ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60} REDIS_SENTINEL_SOCKET_TIMEOUT: ${REDIS_SENTINEL_SOCKET_TIMEOUT:-0.1} CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://:difyai123456@redis:6379/1} BROKER_USE_SSL: ${BROKER_USE_SSL:-false} @@ -165,6 +166,18 @@ x-shared-env: &shared-api-worker-env TENCENT_VECTOR_DB_DATABASE: ${TENCENT_VECTOR_DB_DATABASE:-dify} TENCENT_VECTOR_DB_SHARD: ${TENCENT_VECTOR_DB_SHARD:-1} TENCENT_VECTOR_DB_REPLICAS: ${TENCENT_VECTOR_DB_REPLICAS:-2} + BAIDU_VECTOR_DB_ENDPOINT: ${BAIDU_VECTOR_DB_ENDPOINT:-http://127.0.0.1:5287} + BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS: ${BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS:-30000} + BAIDU_VECTOR_DB_ACCOUNT: ${BAIDU_VECTOR_DB_ACCOUNT:-root} + BAIDU_VECTOR_DB_API_KEY: ${BAIDU_VECTOR_DB_API_KEY:-dify} + BAIDU_VECTOR_DB_DATABASE: ${BAIDU_VECTOR_DB_DATABASE:-dify} + BAIDU_VECTOR_DB_SHARD: ${BAIDU_VECTOR_DB_SHARD:-1} + BAIDU_VECTOR_DB_REPLICAS: ${BAIDU_VECTOR_DB_REPLICAS:-3} + VIKINGDB_ACCESS_KEY: ${VIKINGDB_ACCESS_KEY:-dify} + VIKINGDB_SECRET_KEY: ${VIKINGDB_SECRET_KEY:-dify} + VIKINGDB_REGION: ${VIKINGDB_REGION:-cn-shanghai} + VIKINGDB_HOST: ${VIKINGDB_HOST:-api-vikingdb.xxx.volces.com} + VIKINGDB_SCHEMA: ${VIKINGDB_SCHEMA:-http} UPLOAD_FILE_SIZE_LIMIT: ${UPLOAD_FILE_SIZE_LIMIT:-15} UPLOAD_FILE_BATCH_LIMIT: ${UPLOAD_FILE_BATCH_LIMIT:-5} ETL_TYPE: ${ETL_TYPE:-dify} @@ -204,16 +217,17 @@ x-shared-env: &shared-api-worker-env CODE_MAX_NUMBER_ARRAY_LENGTH: ${CODE_MAX_NUMBER_ARRAY_LENGTH:-1000} WORKFLOW_MAX_EXECUTION_STEPS: ${WORKFLOW_MAX_EXECUTION_STEPS:-500} WORKFLOW_MAX_EXECUTION_TIME: ${WORKFLOW_MAX_EXECUTION_TIME:-1200} - WORKFLOW_CALL_MAX_DEPTH: ${WORKFLOW_MAX_EXECUTION_TIME:-5} + WORKFLOW_CALL_MAX_DEPTH: ${WORKFLOW_CALL_MAX_DEPTH:-5} SSRF_PROXY_HTTP_URL: ${SSRF_PROXY_HTTP_URL:-http://ssrf_proxy:3128} SSRF_PROXY_HTTPS_URL: ${SSRF_PROXY_HTTPS_URL:-http://ssrf_proxy:3128} HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760} HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576} + APP_MAX_EXECUTION_TIME: ${APP_MAX_EXECUTION_TIME:-12000} services: # API service api: - image: langgenius/dify-api:0.8.3 + image: langgenius/dify-api:0.9.1 restart: always environment: # Use the shared environment variables. @@ -233,7 +247,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:0.8.3 + image: langgenius/dify-api:0.9.1 restart: always environment: # Use the shared environment variables. @@ -252,7 +266,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:0.8.3 + image: langgenius/dify-web:0.9.1 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -260,6 +274,7 @@ services: SENTRY_DSN: ${WEB_SENTRY_DSN:-} NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0} TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000} + CSP_WHITELIST: ${CSP_WHITELIST:-} # The postgres database. db: @@ -279,7 +294,7 @@ services: volumes: - ./volumes/db/data:/var/lib/postgresql/data healthcheck: - test: [ "CMD", "pg_isready" ] + test: ['CMD', 'pg_isready'] interval: 1s timeout: 3s retries: 30 @@ -294,7 +309,7 @@ services: # Set the redis password when startup redis server. command: redis-server --requirepass ${REDIS_PASSWORD:-difyai123456} healthcheck: - test: [ "CMD", "redis-cli", "ping" ] + test: ['CMD', 'redis-cli', 'ping'] # The DifySandbox sandbox: @@ -314,7 +329,7 @@ services: volumes: - ./volumes/sandbox/dependencies:/dependencies healthcheck: - test: [ "CMD", "curl", "-f", "http://localhost:8194/health" ] + test: ['CMD', 'curl', '-f', 'http://localhost:8194/health'] networks: - ssrf_proxy_network @@ -327,7 +342,12 @@ services: volumes: - ./ssrf_proxy/squid.conf.template:/etc/squid/squid.conf.template - ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint-mount.sh - entrypoint: [ "sh", "-c", "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ] + entrypoint: + [ + 'sh', + '-c', + "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh", + ] environment: # pls clearly modify the squid env vars to fit your network environment. HTTP_PORT: ${SSRF_HTTP_PORT:-3128} @@ -356,8 +376,8 @@ services: - CERTBOT_EMAIL=${CERTBOT_EMAIL} - CERTBOT_DOMAIN=${CERTBOT_DOMAIN} - CERTBOT_OPTIONS=${CERTBOT_OPTIONS:-} - entrypoint: [ "/docker-entrypoint.sh" ] - command: [ "tail", "-f", "/dev/null" ] + entrypoint: ['/docker-entrypoint.sh'] + command: ['tail', '-f', '/dev/null'] # The nginx reverse proxy. # used for reverse proxying the API service and Web service. @@ -374,7 +394,12 @@ services: - ./volumes/certbot/conf/live:/etc/letsencrypt/live # cert dir (with certbot container) - ./volumes/certbot/conf:/etc/letsencrypt - ./volumes/certbot/www:/var/www/html - entrypoint: [ "sh", "-c", "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ] + entrypoint: + [ + 'sh', + '-c', + "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh", + ] environment: NGINX_SERVER_NAME: ${NGINX_SERVER_NAME:-_} NGINX_HTTPS_ENABLED: ${NGINX_HTTPS_ENABLED:-false} @@ -396,14 +421,14 @@ services: - api - web ports: - - "${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}" - - "${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}" + - '${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}' + - '${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}' # The Weaviate vector store. weaviate: image: semitechnologies/weaviate:1.19.0 profiles: - - "" + - '' - weaviate restart: always volumes: @@ -452,7 +477,7 @@ services: volumes: - ./volumes/pgvector/data:/var/lib/postgresql/data healthcheck: - test: [ "CMD", "pg_isready" ] + test: ['CMD', 'pg_isready'] interval: 1s timeout: 3s retries: 30 @@ -474,7 +499,7 @@ services: volumes: - ./volumes/pgvecto_rs/data:/var/lib/postgresql/data healthcheck: - test: [ "CMD", "pg_isready" ] + test: ['CMD', 'pg_isready'] interval: 1s timeout: 3s retries: 30 @@ -522,7 +547,7 @@ services: - ./volumes/milvus/etcd:/etcd command: etcd -advertise-client-urls=http://127.0.0.1:2379 -listen-client-urls http://0.0.0.0:2379 --data-dir /etcd healthcheck: - test: [ "CMD", "etcdctl", "endpoint", "health" ] + test: ['CMD', 'etcdctl', 'endpoint', 'health'] interval: 30s timeout: 20s retries: 3 @@ -541,7 +566,7 @@ services: - ./volumes/milvus/minio:/minio_data command: minio server /minio_data --console-address ":9001" healthcheck: - test: [ "CMD", "curl", "-f", "http://localhost:9000/minio/health/live" ] + test: ['CMD', 'curl', '-f', 'http://localhost:9000/minio/health/live'] interval: 30s timeout: 20s retries: 3 @@ -553,7 +578,7 @@ services: image: milvusdb/milvus:v2.3.1 profiles: - milvus - command: [ "milvus", "run", "standalone" ] + command: ['milvus', 'run', 'standalone'] environment: ETCD_ENDPOINTS: ${ETCD_ENDPOINTS:-etcd:2379} MINIO_ADDRESS: ${MINIO_ADDRESS:-minio:9000} @@ -561,7 +586,7 @@ services: volumes: - ./volumes/milvus/milvus:/var/lib/milvus healthcheck: - test: [ "CMD", "curl", "-f", "http://localhost:9091/healthz" ] + test: ['CMD', 'curl', '-f', 'http://localhost:9091/healthz'] interval: 30s start_period: 90s timeout: 20s @@ -630,7 +655,7 @@ services: # https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html # https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html#docker-prod-prerequisites elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:8.15.1 + image: docker.elastic.co/elasticsearch/elasticsearch:8.14.3 container_name: elasticsearch profiles: - elasticsearch @@ -643,13 +668,13 @@ services: node.name: dify-es0 discovery.type: single-node xpack.license.self_generated.type: trial - xpack.security.enabled: "true" - xpack.security.enrollment.enabled: "false" - xpack.security.http.ssl.enabled: "false" + xpack.security.enabled: 'true' + xpack.security.enrollment.enabled: 'false' + xpack.security.http.ssl.enabled: 'false' ports: - ${ELASTICSEARCH_PORT:-9200}:9200 healthcheck: - test: [ "CMD", "curl", "-s", "http://localhost:9200/_cluster/health?pretty" ] + test: ['CMD', 'curl', '-s', 'http://localhost:9200/_cluster/health?pretty'] interval: 30s timeout: 10s retries: 50 @@ -657,7 +682,7 @@ services: # https://www.elastic.co/guide/en/kibana/current/docker.html # https://www.elastic.co/guide/en/kibana/current/settings.html kibana: - image: docker.elastic.co/kibana/kibana:8.15.1 + image: docker.elastic.co/kibana/kibana:8.14.3 container_name: kibana profiles: - elasticsearch @@ -667,17 +692,17 @@ services: environment: XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY: d1a66dfd-c4d3-4a0a-8290-2abcb83ab3aa NO_PROXY: localhost,127.0.0.1,elasticsearch,kibana - XPACK_SECURITY_ENABLED: "true" - XPACK_SECURITY_ENROLLMENT_ENABLED: "false" - XPACK_SECURITY_HTTP_SSL_ENABLED: "false" - XPACK_FLEET_ISAIRGAPPED: "true" + XPACK_SECURITY_ENABLED: 'true' + XPACK_SECURITY_ENROLLMENT_ENABLED: 'false' + XPACK_SECURITY_HTTP_SSL_ENABLED: 'false' + XPACK_FLEET_ISAIRGAPPED: 'true' I18N_LOCALE: zh-CN - SERVER_PORT: "5601" + SERVER_PORT: '5601' ELASTICSEARCH_HOSTS: http://elasticsearch:9200 ports: - ${KIBANA_PORT:-5601}:5601 healthcheck: - test: [ "CMD-SHELL", "curl -s http://localhost:5601 >/dev/null || exit 1" ] + test: ['CMD-SHELL', 'curl -s http://localhost:5601 >/dev/null || exit 1'] interval: 30s timeout: 10s retries: 3 diff --git a/web/.env.example b/web/.env.example index 8e254082b3..13ea01a2c7 100644 --- a/web/.env.example +++ b/web/.env.example @@ -22,3 +22,6 @@ NEXT_PUBLIC_UPLOAD_IMAGE_AS_ICON=false # The timeout for the text generation in millisecond NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS=60000 + +# CSP https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP +NEXT_PUBLIC_CSP_WHITELIST= diff --git a/web/Dockerfile b/web/Dockerfile index 48bdb2301a..29f7675f4a 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -46,21 +46,27 @@ ENV TZ=UTC RUN ln -s /usr/share/zoneinfo/${TZ} /etc/localtime \ && echo ${TZ} > /etc/timezone -# global runtime packages -RUN yarn global add pm2 \ - && yarn cache clean WORKDIR /app/web COPY --from=builder /app/web/public ./public COPY --from=builder /app/web/.next/standalone ./ COPY --from=builder /app/web/.next/static ./.next/static - COPY docker/pm2.json ./pm2.json COPY docker/entrypoint.sh ./entrypoint.sh + +# global runtime packages +RUN yarn global add pm2 \ + && yarn cache clean \ + && mkdir /.pm2 \ + && chown -R 1001:0 /.pm2 /app/web \ + && chmod -R g=u /.pm2 /app/web + + ARG COMMIT_SHA ENV COMMIT_SHA=${COMMIT_SHA} +USER 1001 EXPOSE 3000 ENTRYPOINT ["/bin/sh", "./entrypoint.sh"] diff --git a/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/layout.tsx b/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/layout.tsx index e691cc05f6..a58027bcd1 100644 --- a/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/layout.tsx +++ b/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/layout.tsx @@ -1,6 +1,6 @@ 'use client' import type { FC, SVGProps } from 'react' -import React, { useEffect } from 'react' +import React, { useEffect, useMemo } from 'react' import { usePathname } from 'next/navigation' import useSWR from 'swr' import { useTranslation } from 'react-i18next' @@ -203,12 +203,23 @@ const DatasetDetailLayout: FC = (props) => { datasetId, }, apiParams => fetchDatasetRelatedApps(apiParams.datasetId)) - const navigation = [ - { name: t('common.datasetMenus.documents'), href: `/datasets/${datasetId}/documents`, icon: DocumentTextIcon, selectedIcon: DocumentTextSolidIcon }, - { name: t('common.datasetMenus.hitTesting'), href: `/datasets/${datasetId}/hitTesting`, icon: TargetIcon, selectedIcon: TargetSolidIcon }, - // { name: 'api & webhook', href: `/datasets/${datasetId}/api`, icon: CommandLineIcon, selectedIcon: CommandLineSolidIcon }, - { name: t('common.datasetMenus.settings'), href: `/datasets/${datasetId}/settings`, icon: Cog8ToothIcon, selectedIcon: Cog8ToothSolidIcon }, - ] + const navigation = useMemo(() => { + const baseNavigation = [ + { name: t('common.datasetMenus.hitTesting'), href: `/datasets/${datasetId}/hitTesting`, icon: TargetIcon, selectedIcon: TargetSolidIcon }, + // { name: 'api & webhook', href: `/datasets/${datasetId}/api`, icon: CommandLineIcon, selectedIcon: CommandLineSolidIcon }, + { name: t('common.datasetMenus.settings'), href: `/datasets/${datasetId}/settings`, icon: Cog8ToothIcon, selectedIcon: Cog8ToothSolidIcon }, + ] + + if (datasetRes?.provider !== 'external') { + baseNavigation.unshift({ + name: t('common.datasetMenus.documents'), + href: `/datasets/${datasetId}/documents`, + icon: DocumentTextIcon, + selectedIcon: DocumentTextSolidIcon, + }) + } + return baseNavigation + }, [datasetRes?.provider, datasetId, t]) useEffect(() => { if (datasetRes) @@ -233,6 +244,7 @@ const DatasetDetailLayout: FC = (props) => { icon={datasetRes?.icon || 'https://static.dify.ai/images/dataset-default-icon.png'} icon_background={datasetRes?.icon_background || '#F5F5F5'} desc={datasetRes?.description || '--'} + isExternal={datasetRes?.provider === 'external'} navigation={navigation} extraInfo={!isCurrentWorkspaceDatasetOperator ? mode => : undefined} iconType={datasetRes?.data_source_type === DataSourceType.NOTION ? 'notion' : 'dataset'} diff --git a/web/app/(commonLayout)/datasets/Container.tsx b/web/app/(commonLayout)/datasets/Container.tsx index f532ca416f..e350a85354 100644 --- a/web/app/(commonLayout)/datasets/Container.tsx +++ b/web/app/(commonLayout)/datasets/Container.tsx @@ -8,6 +8,7 @@ import { useDebounceFn } from 'ahooks' import useSWR from 'swr' // Components +import ExternalAPIPanel from '../../components/datasets/external-api/external-api-panel' import Datasets from './Datasets' import DatasetFooter from './DatasetFooter' import ApiServer from './ApiServer' @@ -16,6 +17,8 @@ import TabSliderNew from '@/app/components/base/tab-slider-new' import SearchInput from '@/app/components/base/search-input' import TagManagementModal from '@/app/components/base/tag-management' import TagFilter from '@/app/components/base/tag-management/filter' +import Button from '@/app/components/base/button' +import { ApiConnectionMod } from '@/app/components/base/icons/src/vender/solid/development' // Services import { fetchDatasetApiBaseUrl } from '@/service/datasets' @@ -24,12 +27,14 @@ import { fetchDatasetApiBaseUrl } from '@/service/datasets' import { useTabSearchParams } from '@/hooks/use-tab-searchparams' import { useStore as useTagStore } from '@/app/components/base/tag-management/store' import { useAppContext } from '@/context/app-context' +import { useExternalApiPanel } from '@/context/external-api-panel-context' const Container = () => { const { t } = useTranslation() const router = useRouter() const { currentWorkspace } = useAppContext() const showTagManagementModal = useTagStore(s => s.showTagManagementModal) + const { showExternalApiPanel, setShowExternalApiPanel } = useExternalApiPanel() const options = useMemo(() => { return [ @@ -66,7 +71,7 @@ const Container = () => { useEffect(() => { if (currentWorkspace.role === 'normal') return router.replace('/apps') - }, [currentWorkspace]) + }, [currentWorkspace, router]) return (
@@ -80,11 +85,18 @@ const Container = () => {
+
+
)} {activeTab === 'api' && data && }
- {activeTab === 'dataset' && ( <> @@ -94,10 +106,10 @@ const Container = () => { )} )} - {activeTab === 'api' && data && } -
+ {showExternalApiPanel && setShowExternalApiPanel(false)} />} + ) } diff --git a/web/app/(commonLayout)/datasets/DatasetCard.tsx b/web/app/(commonLayout)/datasets/DatasetCard.tsx index f66c6bccf4..e8ccddbcb7 100644 --- a/web/app/(commonLayout)/datasets/DatasetCard.tsx +++ b/web/app/(commonLayout)/datasets/DatasetCard.tsx @@ -18,6 +18,7 @@ import Divider from '@/app/components/base/divider' import RenameDatasetModal from '@/app/components/datasets/rename-modal' import type { Tag } from '@/app/components/base/tag-management/constant' import TagSelector from '@/app/components/base/tag-management/selector' +import CornerLabel from '@/app/components/base/corner-label' import { useAppContext } from '@/context/app-context' export type DatasetCardProps = { @@ -32,6 +33,7 @@ const DatasetCard = ({ const { t } = useTranslation() const { notify } = useContext(ToastContext) const { push } = useRouter() + const EXTERNAL_PROVIDER = 'external' as const const { isCurrentWorkspaceDatasetOperator } = useAppContext() const [tags, setTags] = useState(dataset.tags) @@ -39,6 +41,7 @@ const DatasetCard = ({ const [showRenameModal, setShowRenameModal] = useState(false) const [showConfirmDelete, setShowConfirmDelete] = useState(false) const [confirmMessage, setConfirmMessage] = useState('') + const isExternalProvider = (provider: string): boolean => provider === EXTERNAL_PROVIDER const detectIsUsedByApp = useCallback(async () => { try { const { is_using: isUsedByApp } = await checkIsUsedInApp(dataset.id) @@ -108,13 +111,16 @@ const DatasetCard = ({ return ( <>
{ e.preventDefault() - push(`/datasets/${dataset.id}/documents`) + isExternalProvider(dataset.provider) + ? push(`/datasets/${dataset.id}/hitTesting`) + : push(`/datasets/${dataset.id}/documents`) }} > + {isExternalProvider(dataset.provider) && }
- {dataset.document_count}{t('dataset.documentCount')} - · - {Math.round(dataset.word_count / 1000)}{t('dataset.wordCount')} - · - {dataset.app_count}{t('dataset.appCount')} + {dataset.provider === 'external' + ? <> + {dataset.app_count}{t('dataset.appCount')} + + : <> + {dataset.document_count}{t('dataset.documentCount')} + · + {Math.round(dataset.word_count / 1000)}{t('dataset.wordCount')} + · + {dataset.app_count}{t('dataset.appCount')} + + }
diff --git a/web/app/(commonLayout)/datasets/NewDatasetCard.tsx b/web/app/(commonLayout)/datasets/NewDatasetCard.tsx index f76efa5769..5dd244ad41 100644 --- a/web/app/(commonLayout)/datasets/NewDatasetCard.tsx +++ b/web/app/(commonLayout)/datasets/NewDatasetCard.tsx @@ -4,21 +4,32 @@ import { forwardRef } from 'react' import { useTranslation } from 'react-i18next' import { RiAddLine, + RiArrowRightLine, } from '@remixicon/react' const CreateAppCard = forwardRef((_, ref) => { const { t } = useTranslation() return ( -
-
-
- +
+ +
+
+ +
+
{t('dataset.createDataset')}
-
{t('dataset.createDataset')}
-
-
{t('dataset.createDatasetIntro')}
-
+ +
{t('dataset.createDatasetIntro')}
+ +
{t('dataset.connectDataset')}
+ +
+
) }) diff --git a/web/app/(commonLayout)/datasets/connect/page.tsx b/web/app/(commonLayout)/datasets/connect/page.tsx new file mode 100644 index 0000000000..724c506a7f --- /dev/null +++ b/web/app/(commonLayout)/datasets/connect/page.tsx @@ -0,0 +1,8 @@ +import React from 'react' +import ExternalKnowledgeBaseConnector from '@/app/components/datasets/external-knowledge-base/connector' + +const ExternalKnowledgeBaseCreation = () => { + return +} + +export default ExternalKnowledgeBaseCreation diff --git a/web/app/(commonLayout)/datasets/layout.tsx b/web/app/(commonLayout)/datasets/layout.tsx new file mode 100644 index 0000000000..aecb537aa6 --- /dev/null +++ b/web/app/(commonLayout)/datasets/layout.tsx @@ -0,0 +1,14 @@ +'use client' + +import { ExternalApiPanelProvider } from '@/context/external-api-panel-context' +import { ExternalKnowledgeApiProvider } from '@/context/external-knowledge-api-context' + +export default function DatasetsLayout({ children }: { children: React.ReactNode }) { + return ( + + + {children} + + + ) +} diff --git a/web/app/(commonLayout)/datasets/page.tsx b/web/app/(commonLayout)/datasets/page.tsx index 5aa11aa275..096a1b8979 100644 --- a/web/app/(commonLayout)/datasets/page.tsx +++ b/web/app/(commonLayout)/datasets/page.tsx @@ -1,9 +1,7 @@ import Container from './Container' const AppList = async () => { - return ( - - ) + return } export const metadata = { diff --git a/web/app/(commonLayout)/datasets/store.ts b/web/app/(commonLayout)/datasets/store.ts new file mode 100644 index 0000000000..40b7b15594 --- /dev/null +++ b/web/app/(commonLayout)/datasets/store.ts @@ -0,0 +1,11 @@ +import { create } from 'zustand' + +type DatasetStore = { + showExternalApiPanel: boolean + setShowExternalApiPanel: (show: boolean) => void +} + +export const useDatasetStore = create(set => ({ + showExternalApiPanel: false, + setShowExternalApiPanel: show => set({ showExternalApiPanel: show }), +})) diff --git a/web/app/(shareLayout)/layout.tsx b/web/app/(shareLayout)/layout.tsx index 9c4632cd45..259af2bc2d 100644 --- a/web/app/(shareLayout)/layout.tsx +++ b/web/app/(shareLayout)/layout.tsx @@ -1,7 +1,12 @@ import React from 'react' import type { FC } from 'react' +import type { Metadata } from 'next' import GA, { GaType } from '@/app/components/base/ga' +export const metadata: Metadata = { + icons: 'data:,', // prevent browser from using default favicon +} + const Layout: FC<{ children: React.ReactNode }> = ({ children }) => { diff --git a/web/app/account/account-page/index.module.css b/web/app/account/account-page/index.module.css new file mode 100644 index 0000000000..949d1257e9 --- /dev/null +++ b/web/app/account/account-page/index.module.css @@ -0,0 +1,9 @@ +.modal { + padding: 24px 32px !important; + width: 400px !important; +} + +.bg { + background: linear-gradient(180deg, rgba(217, 45, 32, 0.05) 0%, rgba(217, 45, 32, 0.00) 24.02%), #F9FAFB; +} + diff --git a/web/app/account/account-page/index.tsx b/web/app/account/account-page/index.tsx new file mode 100644 index 0000000000..53f7692e6c --- /dev/null +++ b/web/app/account/account-page/index.tsx @@ -0,0 +1,304 @@ +'use client' +import { useState } from 'react' +import { useTranslation } from 'react-i18next' + +import { useContext } from 'use-context-selector' +import s from './index.module.css' +import Collapse from '@/app/components/header/account-setting/collapse' +import type { IItem } from '@/app/components/header/account-setting/collapse' +import Modal from '@/app/components/base/modal' +import Confirm from '@/app/components/base/confirm' +import Button from '@/app/components/base/button' +import { updateUserProfile } from '@/service/common' +import { useAppContext } from '@/context/app-context' +import { ToastContext } from '@/app/components/base/toast' +import AppIcon from '@/app/components/base/app-icon' +import Avatar from '@/app/components/base/avatar' +import { IS_CE_EDITION } from '@/config' + +const titleClassName = ` + text-sm font-medium text-gray-900 +` +const descriptionClassName = ` + mt-1 text-xs font-normal text-gray-500 +` +const inputClassName = ` + mt-2 w-full px-3 py-2 bg-gray-100 rounded + text-sm font-normal text-gray-800 +` + +const validPassword = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/ + +export default function AccountPage() { + const { t } = useTranslation() + const { mutateUserProfile, userProfile, apps } = useAppContext() + const { notify } = useContext(ToastContext) + const [editNameModalVisible, setEditNameModalVisible] = useState(false) + const [editName, setEditName] = useState('') + const [editing, setEditing] = useState(false) + const [editPasswordModalVisible, setEditPasswordModalVisible] = useState(false) + const [currentPassword, setCurrentPassword] = useState('') + const [password, setPassword] = useState('') + const [confirmPassword, setConfirmPassword] = useState('') + const [showDeleteAccountModal, setShowDeleteAccountModal] = useState(false) + + const handleEditName = () => { + setEditNameModalVisible(true) + setEditName(userProfile.name) + } + const handleSaveName = async () => { + try { + setEditing(true) + await updateUserProfile({ url: 'account/name', body: { name: editName } }) + notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) + mutateUserProfile() + setEditNameModalVisible(false) + setEditing(false) + } + catch (e) { + notify({ type: 'error', message: (e as Error).message }) + setEditNameModalVisible(false) + setEditing(false) + } + } + + const showErrorMessage = (message: string) => { + notify({ + type: 'error', + message, + }) + } + const valid = () => { + if (!password.trim()) { + showErrorMessage(t('login.error.passwordEmpty')) + return false + } + if (!validPassword.test(password)) { + showErrorMessage(t('login.error.passwordInvalid')) + return false + } + if (password !== confirmPassword) { + showErrorMessage(t('common.account.notEqual')) + return false + } + + return true + } + const resetPasswordForm = () => { + setCurrentPassword('') + setPassword('') + setConfirmPassword('') + } + const handleSavePassword = async () => { + if (!valid()) + return + try { + setEditing(true) + await updateUserProfile({ + url: 'account/password', + body: { + password: currentPassword, + new_password: password, + repeat_new_password: confirmPassword, + }, + }) + notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') }) + mutateUserProfile() + setEditPasswordModalVisible(false) + resetPasswordForm() + setEditing(false) + } + catch (e) { + notify({ type: 'error', message: (e as Error).message }) + setEditPasswordModalVisible(false) + setEditing(false) + } + } + + const renderAppItem = (item: IItem) => { + return ( +
+
+ +
+
{item.name}
+
+ ) + } + + return ( + <> +
+

{t('common.account.myAccount')}

+
+
+ +
+

{userProfile.name}

+

{userProfile.email}

+
+
+
+
{t('common.account.name')}
+
+
+ {userProfile.name} +
+
+ {t('common.operation.edit')} +
+
+
+
+
{t('common.account.email')}
+
+
+ {userProfile.email} +
+
+
+ { + IS_CE_EDITION && ( +
+
+
{t('common.account.password')}
+
{t('common.account.passwordTip')}
+
+ +
+ ) + } +
+
+
{t('common.account.langGeniusAccount')}
+
{t('common.account.langGeniusAccountTip')}
+ {!!apps.length && ( + ({ key: app.id, name: app.name }))} + renderItem={renderAppItem} + wrapperClassName='mt-2' + /> + )} + {!IS_CE_EDITION && } +
+ { + editNameModalVisible && ( + setEditNameModalVisible(false)} + className={s.modal} + > +
{t('common.account.editName')}
+
{t('common.account.name')}
+ setEditName(e.target.value)} + /> +
+ + +
+
+ ) + } + { + editPasswordModalVisible && ( + { + setEditPasswordModalVisible(false) + resetPasswordForm() + }} + className={s.modal} + > +
{userProfile.is_password_set ? t('common.account.resetPassword') : t('common.account.setPassword')}
+ {userProfile.is_password_set && ( + <> +
{t('common.account.currentPassword')}
+ setCurrentPassword(e.target.value)} + /> + + )} +
+ {userProfile.is_password_set ? t('common.account.newPassword') : t('common.account.password')} +
+ setPassword(e.target.value)} + /> +
{t('common.account.confirmPassword')}
+ setConfirmPassword(e.target.value)} + /> +
+ + +
+
+ ) + } + { + showDeleteAccountModal && ( + setShowDeleteAccountModal(false)} + onConfirm={() => setShowDeleteAccountModal(false)} + showCancel={false} + type='warning' + title={t('common.account.delete')} + content={ + <> +
+ {t('common.account.deleteTip')} +
+ +
{`${t('common.account.delete')}: ${userProfile.email}`}
+ + } + confirmText={t('common.operation.ok') as string} + /> + ) + } + + ) +} diff --git a/web/app/account/avatar.tsx b/web/app/account/avatar.tsx new file mode 100644 index 0000000000..29bd0cb5a5 --- /dev/null +++ b/web/app/account/avatar.tsx @@ -0,0 +1,94 @@ +'use client' +import { useTranslation } from 'react-i18next' +import { Fragment } from 'react' +import { useRouter } from 'next/navigation' +import { Menu, Transition } from '@headlessui/react' +import Avatar from '@/app/components/base/avatar' +import { logout } from '@/service/common' +import { useAppContext } from '@/context/app-context' +import { LogOut01 } from '@/app/components/base/icons/src/vender/line/general' + +export type IAppSelector = { + isMobile: boolean +} + +export default function AppSelector() { + const router = useRouter() + const { t } = useTranslation() + const { userProfile } = useAppContext() + + const handleLogout = async () => { + await logout({ + url: '/logout', + params: {}, + }) + + if (localStorage?.getItem('console_token')) + localStorage.removeItem('console_token') + + router.push('/signin') + } + + return ( + + { + ({ open }) => ( + <> +
+ + + +
+ + + +
+
+
+
{userProfile.name}
+
{userProfile.email}
+
+ +
+
+
+ +
handleLogout()}> +
+ +
{t('common.userProfile.logout')}
+
+
+
+
+
+ + ) + } +
+ ) +} diff --git a/web/app/account/header.tsx b/web/app/account/header.tsx new file mode 100644 index 0000000000..694533e5ab --- /dev/null +++ b/web/app/account/header.tsx @@ -0,0 +1,37 @@ +'use client' +import { useTranslation } from 'react-i18next' +import { RiArrowRightUpLine, RiRobot2Line } from '@remixicon/react' +import { useRouter } from 'next/navigation' +import Button from '../components/base/button' +import Avatar from './avatar' +import LogoSite from '@/app/components/base/logo/logo-site' + +const Header = () => { + const { t } = useTranslation() + const router = useRouter() + + const back = () => { + router.back() + } + return ( +
+
+
+ +
+
+

{t('common.account.account')}

+
+
+ +
+ +
+
+ ) +} +export default Header diff --git a/web/app/account/layout.tsx b/web/app/account/layout.tsx new file mode 100644 index 0000000000..5aa8b05cbf --- /dev/null +++ b/web/app/account/layout.tsx @@ -0,0 +1,40 @@ +import React from 'react' +import type { ReactNode } from 'react' +import Header from './header' +import SwrInitor from '@/app/components/swr-initor' +import { AppContextProvider } from '@/context/app-context' +import GA, { GaType } from '@/app/components/base/ga' +import HeaderWrapper from '@/app/components/header/header-wrapper' +import { EventEmitterContextProvider } from '@/context/event-emitter' +import { ProviderContextProvider } from '@/context/provider-context' +import { ModalContextProvider } from '@/context/modal-context' + +const Layout = ({ children }: { children: ReactNode }) => { + return ( + <> + + + + + + + +
+ +
+ {children} +
+ + + + + + + ) +} + +export const metadata = { + title: 'Dify', +} + +export default Layout diff --git a/web/app/account/page.tsx b/web/app/account/page.tsx new file mode 100644 index 0000000000..bb7e7f7feb --- /dev/null +++ b/web/app/account/page.tsx @@ -0,0 +1,7 @@ +import AccountPage from './account-page' + +export default function Account() { + return
+ +
+} diff --git a/web/app/components/app-sidebar/basic.tsx b/web/app/components/app-sidebar/basic.tsx index c939cb7bb3..51fc10721e 100644 --- a/web/app/components/app-sidebar/basic.tsx +++ b/web/app/components/app-sidebar/basic.tsx @@ -1,4 +1,5 @@ import React from 'react' +import { useTranslation } from 'react-i18next' import AppIcon from '../base/app-icon' import Tooltip from '@/app/components/base/tooltip' @@ -6,6 +7,7 @@ export type IAppBasicProps = { iconType?: 'app' | 'api' | 'dataset' | 'webapp' | 'notion' icon?: string icon_background?: string | null + isExternal?: boolean name: string type: string | React.ReactNode hoverTip?: string @@ -52,7 +54,9 @@ const ICON_MAP = { notion: , } -export default function AppBasic({ icon, icon_background, name, type, hoverTip, textStyle, mode = 'expand', iconType = 'app' }: IAppBasicProps) { +export default function AppBasic({ icon, icon_background, name, isExternal, type, hoverTip, textStyle, mode = 'expand', iconType = 'app' }: IAppBasicProps) { + const { t } = useTranslation() + return (
{icon && icon_background && iconType === 'app' && ( @@ -83,6 +87,7 @@ export default function AppBasic({ icon, icon_background, name, type, hoverTip, }
{type}
+
{isExternal ? t('dataset.externalTag') : ''}
}
) diff --git a/web/app/components/app-sidebar/index.tsx b/web/app/components/app-sidebar/index.tsx index 5d5d407dc0..5ee063ad64 100644 --- a/web/app/components/app-sidebar/index.tsx +++ b/web/app/components/app-sidebar/index.tsx @@ -15,6 +15,7 @@ export type IAppDetailNavProps = { iconType?: 'app' | 'dataset' | 'notion' title: string desc: string + isExternal?: boolean icon: string icon_background: string navigation: Array<{ @@ -26,7 +27,7 @@ export type IAppDetailNavProps = { extraInfo?: (modeState: string) => React.ReactNode } -const AppDetailNav = ({ title, desc, icon, icon_background, navigation, extraInfo, iconType = 'app' }: IAppDetailNavProps) => { +const AppDetailNav = ({ title, desc, isExternal, icon, icon_background, navigation, extraInfo, iconType = 'app' }: IAppDetailNavProps) => { const { appSidebarExpand, setAppSiderbarExpand } = useAppStore(useShallow(state => ({ appSidebarExpand: state.appSidebarExpand, setAppSiderbarExpand: state.setAppSiderbarExpand, @@ -70,6 +71,7 @@ const AppDetailNav = ({ title, desc, icon, icon_background, navigation, extraInf icon_background={icon_background} name={title} type={desc} + isExternal={isExternal} /> )}
diff --git a/web/app/components/app/annotation/index.tsx b/web/app/components/app/annotation/index.tsx index c66aaef6ce..0bdd12c550 100644 --- a/web/app/components/app/annotation/index.tsx +++ b/web/app/components/app/annotation/index.tsx @@ -3,6 +3,7 @@ import type { FC } from 'react' import React, { useEffect, useState } from 'react' import { useTranslation } from 'react-i18next' import { Pagination } from 'react-headless-pagination' +import { useDebounce } from 'ahooks' import { ArrowLeftIcon, ArrowRightIcon } from '@heroicons/react/24/outline' import Toast from '../../base/toast' import Filter from './filter' @@ -67,10 +68,11 @@ const Annotation: FC = ({ const [queryParams, setQueryParams] = useState({}) const [currPage, setCurrPage] = React.useState(0) + const debouncedQueryParams = useDebounce(queryParams, { wait: 500 }) const query = { page: currPage + 1, limit: APP_PAGE_LIMIT, - keyword: queryParams.keyword || '', + keyword: debouncedQueryParams.keyword || '', } const [controlUpdateList, setControlUpdateList] = useState(Date.now()) @@ -232,8 +234,8 @@ const Annotation: FC = ({ middlePagesSiblingCount={1} setCurrentPage={setCurrPage} totalPages={Math.ceil(total / APP_PAGE_LIMIT)} - truncatableClassName="w-8 px-0.5 text-center" - truncatableText="..." + truncableClassName="w-8 px-0.5 text-center" + truncableText="..." > = ({ const isMobile = media === MediaType.mobile const [showSettingsModal, setShowSettingsModal] = useState(false) const { formatIndexingTechniqueAndMethod } = useKnowledge() + const { t } = useTranslation() const handleSave = (newDataset: DataSet) => { onSave(newDataset) @@ -65,9 +67,11 @@ const Item: FC = ({
{config.name}
- + {config.provider === 'external' + ? + : }
diff --git a/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx b/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx index 91cae54bb8..f556121518 100644 --- a/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx +++ b/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx @@ -1,6 +1,6 @@ 'use client' -import { memo, useEffect, useMemo } from 'react' +import { memo, useCallback, useEffect, useMemo } from 'react' import type { FC } from 'react' import { useTranslation } from 'react-i18next' import WeightedScore from './weighted-score' @@ -11,7 +11,7 @@ import type { DatasetConfigs, } from '@/models/debug' import ModelSelector from '@/app/components/header/account-setting/model-provider-page/model-selector' -import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks' +import { useCurrentProviderAndModel, useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks' import type { ModelConfig } from '@/app/components/workflow/types' import ModelParameterModal from '@/app/components/header/account-setting/model-provider-page/model-parameter-modal' import Tooltip from '@/app/components/base/tooltip' @@ -23,6 +23,7 @@ import { RerankingModeEnum } from '@/models/datasets' import cn from '@/utils/classnames' import { useSelectedDatasetsMode } from '@/app/components/workflow/nodes/knowledge-retrieval/hooks' import Switch from '@/app/components/base/switch' +import Toast from '@/app/components/base/toast' type Props = { datasetConfigs: DatasetConfigs @@ -60,6 +61,24 @@ const ConfigContent: FC = ({ modelList: rerankModelList, defaultModel: rerankDefaultModel, } = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.rerank) + + const { + currentModel, + } = useCurrentProviderAndModel( + rerankModelList, + rerankDefaultModel + ? { + ...rerankDefaultModel, + provider: rerankDefaultModel.provider.provider, + } + : undefined, + ) + + const handleDisabledSwitchClick = useCallback(() => { + if (!currentModel) + Toast.notify({ type: 'error', message: t('workflow.errorMsg.rerankModelRequired') }) + }, [currentModel, rerankDefaultModel, t]) + const rerankModel = (() => { if (datasetConfigs.reranking_model?.reranking_provider_name) { return { @@ -174,6 +193,20 @@ const ConfigContent: FC = ({
) } + { + selectedDatasetsMode.mixtureInternalAndExternal && ( +
+ {t('dataset.mixtureInternalAndExternalTip')} +
+ ) + } + { + selectedDatasetsMode.allExternal && ( +
+ {t('dataset.allExternalTip')} +
+ ) + } { selectedDatasetsMode.mixtureHighQualityAndEconomic && ( @@ -217,27 +250,33 @@ const ConfigContent: FC = ({
{ selectedDatasetsMode.allEconomic && ( - { - onChange({ - ...datasetConfigs, - reranking_enable: v, - }) - }} - /> +
+ { + onChange({ + ...datasetConfigs, + reranking_enable: v, + }) + }} + /> +
) } -
{t('common.modelProvider.rerankModel.key')}
+
{t('common.modelProvider.rerankModel.key')}
{t('common.modelProvider.rerankModel.tip')}
} - popupClassName='ml-0.5' - triggerClassName='ml-0.5 w-3.5 h-3.5' + popupClassName='ml-1' + triggerClassName='ml-1 w-4 h-4' />
diff --git a/web/app/components/app/configuration/dataset-config/params-config/index.tsx b/web/app/components/app/configuration/dataset-config/params-config/index.tsx index 656cbfea65..2d3df0b039 100644 --- a/web/app/components/app/configuration/dataset-config/params-config/index.tsx +++ b/web/app/components/app/configuration/dataset-config/params-config/index.tsx @@ -39,13 +39,26 @@ const ParamsConfig = ({ useEffect(() => { const { allEconomic, + allHighQuality, + allHighQualityFullTextSearch, + allHighQualityVectorSearch, + allExternal, + mixtureHighQualityAndEconomic, + inconsistentEmbeddingModel, + mixtureInternalAndExternal, } = getSelectedDatasetsMode(selectedDatasets) const { datasets, retrieval_model, score_threshold_enabled, ...restConfigs } = datasetConfigs let rerankEnable = restConfigs.reranking_enable - if (allEconomic && !restConfigs.reranking_model?.reranking_provider_name && rerankEnable === undefined) + if ((allEconomic && !restConfigs.reranking_model?.reranking_provider_name && rerankEnable === undefined) || allExternal) rerankEnable = false + if (allEconomic || allHighQuality || allHighQualityFullTextSearch || allHighQualityVectorSearch || (allExternal && selectedDatasets.length === 1)) + setRerankSettingModalOpen(false) + + if (mixtureHighQualityAndEconomic || inconsistentEmbeddingModel || mixtureInternalAndExternal || (allExternal && selectedDatasets.length > 1)) + setRerankSettingModalOpen(true) + setTempDataSetConfigs({ ...getMultipleRetrievalConfig({ top_k: restConfigs.top_k, diff --git a/web/app/components/app/configuration/dataset-config/select-dataset/index.tsx b/web/app/components/app/configuration/dataset-config/select-dataset/index.tsx index 4493755ba0..0d94e599b4 100644 --- a/web/app/components/app/configuration/dataset-config/select-dataset/index.tsx +++ b/web/app/components/app/configuration/dataset-config/select-dataset/index.tsx @@ -47,7 +47,7 @@ const SelectDataSet: FC = ({ const { data, has_more } = await fetchDatasets({ url: '/datasets', params: { page } }) setPage(getPage() + 1) setIsNoMore(!has_more) - const newList = [...(datasets || []), ...data.filter(item => item.indexing_technique)] + const newList = [...(datasets || []), ...data.filter(item => item.indexing_technique || item.provider === 'external')] setDataSets(newList) setLoaded(true) if (!selected.find(item => !item.name)) @@ -145,6 +145,11 @@ const SelectDataSet: FC = ({ /> ) } + { + item.provider === 'external' && ( + + ) + }
))}
diff --git a/web/app/components/app/configuration/dataset-config/settings-modal/index.tsx b/web/app/components/app/configuration/dataset-config/settings-modal/index.tsx index e538c347d9..ee211325cf 100644 --- a/web/app/components/app/configuration/dataset-config/settings-modal/index.tsx +++ b/web/app/components/app/configuration/dataset-config/settings-modal/index.tsx @@ -5,8 +5,10 @@ import { useTranslation } from 'react-i18next' import { isEqual } from 'lodash-es' import { RiCloseLine } from '@remixicon/react' import { BookOpenIcon } from '@heroicons/react/24/outline' +import { ApiConnectionMod } from '@/app/components/base/icons/src/vender/solid/development' import cn from '@/utils/classnames' import IndexMethodRadio from '@/app/components/datasets/settings/index-method-radio' +import Divider from '@/app/components/base/divider' import Button from '@/app/components/base/button' import type { DataSet } from '@/models/datasets' import { useToastContext } from '@/app/components/base/toast' @@ -14,6 +16,7 @@ import { updateDatasetSetting } from '@/service/datasets' import { useAppContext } from '@/context/app-context' import { useModalContext } from '@/context/modal-context' import type { RetrievalConfig } from '@/types/app' +import RetrievalSettings from '@/app/components/datasets/external-knowledge-base/create/RetrievalSettings' import RetrievalMethodConfig from '@/app/components/datasets/common/retrieval-method-config' import EconomicalRetrievalMethodConfig from '@/app/components/datasets/common/economical-retrieval-method-config' import { ensureRerankModelSelected, isReRankModelSelected } from '@/app/components/datasets/common/check-rerank-model' @@ -56,7 +59,10 @@ const SettingsModal: FC = ({ const { t } = useTranslation() const { notify } = useToastContext() const ref = useRef(null) - + const isExternal = currentDataset.provider === 'external' + const [topK, setTopK] = useState(currentDataset?.external_retrieval_model.top_k ?? 2) + const [scoreThreshold, setScoreThreshold] = useState(currentDataset?.external_retrieval_model.score_threshold ?? 0.5) + const [scoreThresholdEnabled, setScoreThresholdEnabled] = useState(currentDataset?.external_retrieval_model.score_threshold_enabled ?? false) const { setShowAccountSettingModal } = useModalContext() const [loading, setLoading] = useState(false) const { isCurrentWorkspaceDatasetOperator } = useAppContext() @@ -73,6 +79,15 @@ const SettingsModal: FC = ({ const [isHideChangedTip, setIsHideChangedTip] = useState(false) const isRetrievalChanged = !isEqual(retrievalConfig, localeCurrentDataset?.retrieval_model_dict) || indexMethod !== localeCurrentDataset?.indexing_technique + const handleSettingsChange = (data: { top_k?: number; score_threshold?: number; score_threshold_enabled?: boolean }) => { + if (data.top_k !== undefined) + setTopK(data.top_k) + if (data.score_threshold !== undefined) + setScoreThreshold(data.score_threshold) + if (data.score_threshold_enabled !== undefined) + setScoreThresholdEnabled(data.score_threshold_enabled) + } + const handleSave = async () => { if (loading) return @@ -113,6 +128,15 @@ const SettingsModal: FC = ({ }, embedding_model: localeCurrentDataset.embedding_model, embedding_model_provider: localeCurrentDataset.embedding_model_provider, + ...(isExternal && { + external_knowledge_id: currentDataset!.external_knowledge_info.external_knowledge_id, + external_knowledge_api_id: currentDataset!.external_knowledge_info.external_knowledge_api_id, + external_retrieval_model: { + top_k: topK, + score_threshold: scoreThreshold, + score_threshold_enabled: scoreThresholdEnabled, + }, + }), }, } as any if (permission === 'partial_members') { @@ -178,7 +202,7 @@ const SettingsModal: FC = ({ }}>
- {t('datasetSettings.form.name')} +
{t('datasetSettings.form.name')}
= ({
- {t('datasetSettings.form.desc')} +
{t('datasetSettings.form.desc')}