diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index a283f8d5ca..54f3f42a25 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -49,8 +49,8 @@ jobs: if: steps.changed-files.outputs.any_changed == 'true' run: | uv run --directory api ruff --version - uv run --directory api ruff check --diff ./ - uv run --directory api ruff format --check --diff ./ + uv run --directory api ruff check ./ + uv run --directory api ruff format --check ./ - name: Dotenv check if: steps.changed-files.outputs.any_changed == 'true' diff --git a/README.md b/README.md index 2909e0e6cf..16a1268cb1 100644 --- a/README.md +++ b/README.md @@ -241,7 +241,7 @@ One-Click deploy Dify to Alibaba Cloud with [Alibaba Cloud Data Management](http For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). At the same time, please consider supporting Dify by sharing it on social media and at events and conferences. -> We are looking for contributors to help translate Dify into languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c). +> We are looking for contributors to help translate Dify into languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c). ## Community & contact diff --git a/README_AR.md b/README_AR.md index e959ca0f78..d2cb0098a3 100644 --- a/README_AR.md +++ b/README_AR.md @@ -223,7 +223,7 @@ docker compose up -d لأولئك الذين يرغبون في المساهمة، انظر إلى [دليل المساهمة](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) لدينا. في الوقت نفسه، يرجى النظر في دعم Dify عن طريق مشاركته على وسائل التواصل الاجتماعي وفي الفعاليات والمؤتمرات. -> نحن نبحث عن مساهمين لمساعدة في ترجمة Dify إلى لغات أخرى غير اللغة الصينية المندرين أو الإنجليزية. إذا كنت مهتمًا بالمساعدة، يرجى الاطلاع على [README للترجمة](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) لمزيد من المعلومات، واترك لنا تعليقًا في قناة `global-users` على [خادم المجتمع على Discord](https://discord.gg/8Tpq4AcN9c). +> نحن نبحث عن مساهمين لمساعدة في ترجمة Dify إلى لغات أخرى غير اللغة الصينية المندرين أو الإنجليزية. إذا كنت مهتمًا بالمساعدة، يرجى الاطلاع على [README للترجمة](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) لمزيد من المعلومات، واترك لنا تعليقًا في قناة `global-users` على [خادم المجتمع على Discord](https://discord.gg/8Tpq4AcN9c). **المساهمون** diff --git a/README_BN.md b/README_BN.md index 29d7374ea5..f57413ec8b 100644 --- a/README_BN.md +++ b/README_BN.md @@ -241,7 +241,7 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা] দেখুন (https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)। একই সাথে, সোশ্যাল মিডিয়া এবং ইভেন্ট এবং কনফারেন্সে এটি শেয়ার করে Dify কে সমর্থন করুন। -> আমরা ম্যান্ডারিন বা ইংরেজি ছাড়া অন্য ভাষায় Dify অনুবাদ করতে সাহায্য করার জন্য অবদানকারীদের খুঁজছি। আপনি যদি সাহায্য করতে আগ্রহী হন, তাহলে আরও তথ্যের জন্য [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) দেখুন এবং আমাদের [ডিসকর্ড কমিউনিটি সার্ভার](https://discord.gg/8Tpq4AcN9c) এর `গ্লোবাল-ইউজারস` চ্যানেলে আমাদের একটি মন্তব্য করুন। +> আমরা ম্যান্ডারিন বা ইংরেজি ছাড়া অন্য ভাষায় Dify অনুবাদ করতে সাহায্য করার জন্য অবদানকারীদের খুঁজছি। আপনি যদি সাহায্য করতে আগ্রহী হন, তাহলে আরও তথ্যের জন্য [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) দেখুন এবং আমাদের [ডিসকর্ড কমিউনিটি সার্ভার](https://discord.gg/8Tpq4AcN9c) এর `গ্লোবাল-ইউজারস` চ্যানেলে আমাদের একটি মন্তব্য করুন। ## কমিউনিটি এবং যোগাযোগ diff --git a/README_CN.md b/README_CN.md index 486a368c09..e9c73eb48b 100644 --- a/README_CN.md +++ b/README_CN.md @@ -244,7 +244,7 @@ docker compose up -d 对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。 同时,请考虑通过社交媒体、活动和会议来支持 Dify 的分享。 -> 我们正在寻找贡献者来帮助将 Dify 翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md)获取更多信息,并在我们的[Discord 社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。 +> 我们正在寻找贡献者来帮助将 Dify 翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)获取更多信息,并在我们的[Discord 社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。 **Contributors** diff --git a/README_DE.md b/README_DE.md index fce52c34c2..d31a56542d 100644 --- a/README_DE.md +++ b/README_DE.md @@ -236,7 +236,7 @@ Ein-Klick-Bereitstellung von Dify in der Alibaba Cloud mit [Alibaba Cloud Data M Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren. -> Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen – außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c). +> Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen – außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c). ## Gemeinschaft & Kontakt diff --git a/README_ES.md b/README_ES.md index 6fd6dfcee8..918bfe2286 100644 --- a/README_ES.md +++ b/README_ES.md @@ -237,7 +237,7 @@ Para aquellos que deseen contribuir con código, consulten nuestra [Guía de con Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en eventos y conferencias. -> Estamos buscando colaboradores para ayudar con la traducción de Dify a idiomas que no sean el mandarín o el inglés. Si estás interesado en ayudar, consulta el [README de i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) para obtener más información y déjanos un comentario en el canal `global-users` de nuestro [Servidor de Comunidad en Discord](https://discord.gg/8Tpq4AcN9c). +> Estamos buscando colaboradores para ayudar con la traducción de Dify a idiomas que no sean el mandarín o el inglés. Si estás interesado en ayudar, consulta el [README de i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para obtener más información y déjanos un comentario en el canal `global-users` de nuestro [Servidor de Comunidad en Discord](https://discord.gg/8Tpq4AcN9c). **Contribuidores** diff --git a/README_FR.md b/README_FR.md index b2209fb495..56ca878aae 100644 --- a/README_FR.md +++ b/README_FR.md @@ -235,7 +235,7 @@ Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribut Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur les réseaux sociaux et lors d'événements et de conférences. -> Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c). +> Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c). **Contributeurs** diff --git a/README_JA.md b/README_JA.md index c658225f90..6d277a36ed 100644 --- a/README_JA.md +++ b/README_JA.md @@ -234,7 +234,7 @@ docker compose up -d 同時に、DifyをSNSやイベント、カンファレンスで共有してサポートしていただけると幸いです。 -> Difyを英語または中国語以外の言語に翻訳してくれる貢献者を募集しています。興味がある場合は、詳細については[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md)を参照してください。また、[Discordコミュニティサーバー](https://discord.gg/8Tpq4AcN9c)の`global-users`チャンネルにコメントを残してください。 +> Difyを英語または中国語以外の言語に翻訳してくれる貢献者を募集しています。興味がある場合は、詳細については[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)を参照してください。また、[Discordコミュニティサーバー](https://discord.gg/8Tpq4AcN9c)の`global-users`チャンネルにコメントを残してください。 **貢献者** diff --git a/README_KL.md b/README_KL.md index bfafcc7407..dac67eeb29 100644 --- a/README_KL.md +++ b/README_KL.md @@ -235,7 +235,7 @@ For those who'd like to contribute code, see our [Contribution Guide](https://gi At the same time, please consider supporting Dify by sharing it on social media and at events and conferences. -> We are looking for contributors to help with translating Dify to languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c). +> We are looking for contributors to help with translating Dify to languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c). **Contributors** diff --git a/README_KR.md b/README_KR.md index 282117e776..072481da02 100644 --- a/README_KR.md +++ b/README_KR.md @@ -229,7 +229,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했 동시에 Dify를 소셜 미디어와 행사 및 컨퍼런스에 공유하여 지원하는 것을 고려해 주시기 바랍니다. -> 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요. +> 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요. **기여자** diff --git a/README_PT.md b/README_PT.md index 576f6b48f7..1260f8e6fd 100644 --- a/README_PT.md +++ b/README_PT.md @@ -233,7 +233,7 @@ Implante o Dify na Alibaba Cloud com um clique usando o [Alibaba Cloud Data Mana Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em eventos e conferências. -> Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c). +> Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c). **Contribuidores** diff --git a/README_TR.md b/README_TR.md index 6e94e54fa0..37953f0de1 100644 --- a/README_TR.md +++ b/README_TR.md @@ -227,7 +227,7 @@ Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.ter Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakabilirsiniz. Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda paylaşarak desteklemeyi düşünün. -> Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın. +> Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın. **Katkıda Bulunanlar** diff --git a/README_TW.md b/README_TW.md index 6e3e22b5c1..f70d6a25f6 100644 --- a/README_TW.md +++ b/README_TW.md @@ -239,7 +239,7 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify 對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。 同時,也請考慮透過在社群媒體和各種活動與會議上分享 Dify 來支持我們。 -> 我們正在尋找貢獻者協助將 Dify 翻譯成中文和英文以外的語言。如果您有興趣幫忙,請查看 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) 獲取更多資訊,並在我們的 [Discord 社群伺服器](https://discord.gg/8Tpq4AcN9c) 的 `global-users` 頻道留言給我們。 +> 我們正在尋找貢獻者協助將 Dify 翻譯成中文和英文以外的語言。如果您有興趣幫忙,請查看 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) 獲取更多資訊,並在我們的 [Discord 社群伺服器](https://discord.gg/8Tpq4AcN9c) 的 `global-users` 頻道留言給我們。 ## 社群與聯絡方式 diff --git a/README_VI.md b/README_VI.md index 51314e6de5..ddd9aa95f6 100644 --- a/README_VI.md +++ b/README_VI.md @@ -231,7 +231,7 @@ Triển khai Dify lên Alibaba Cloud chỉ với một cú nhấp chuột bằng Đồng thời, vui lòng xem xét hỗ trợ Dify bằng cách chia sẻ nó trên mạng xã hội và tại các sự kiện và hội nghị. -> Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi. +> Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi. **Người đóng góp** diff --git a/api/.env.example b/api/.env.example index 80b1c12cd8..18f2dbf647 100644 --- a/api/.env.example +++ b/api/.env.example @@ -4,6 +4,11 @@ # Alternatively you can set it with `SECRET_KEY` environment variable. SECRET_KEY= +# Ensure UTF-8 encoding +LANG=en_US.UTF-8 +LC_ALL=en_US.UTF-8 +PYTHONIOENCODING=utf-8 + # Console API base URL CONSOLE_API_URL=http://localhost:5001 CONSOLE_WEB_URL=http://localhost:3000 diff --git a/api/.ruff.toml b/api/.ruff.toml index 0169613bf8..db6872b9c8 100644 --- a/api/.ruff.toml +++ b/api/.ruff.toml @@ -42,6 +42,8 @@ select = [ "S301", # suspicious-pickle-usage, disallow use of `pickle` and its wrappers. "S302", # suspicious-marshal-usage, disallow use of `marshal` module "S311", # suspicious-non-cryptographic-random-usage + "G001", # don't use str format to logging messages + "G004", # don't use f-strings to format logging messages ] ignore = [ diff --git a/api/Dockerfile b/api/Dockerfile index 8c7a1717b9..e097b5811e 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -37,6 +37,11 @@ EXPOSE 5001 # set timezone ENV TZ=UTC +# Set UTF-8 locale +ENV LANG=en_US.UTF-8 +ENV LC_ALL=en_US.UTF-8 +ENV PYTHONIOENCODING=utf-8 + WORKDIR /app/api RUN \ diff --git a/api/app_factory.py b/api/app_factory.py index 3a258be28f..81155cbacd 100644 --- a/api/app_factory.py +++ b/api/app_factory.py @@ -32,7 +32,7 @@ def create_app() -> DifyApp: initialize_extensions(app) end_time = time.perf_counter() if dify_config.DEBUG: - logging.info(f"Finished create_app ({round((end_time - start_time) * 1000, 2)} ms)") + logging.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2)) return app @@ -91,14 +91,14 @@ def initialize_extensions(app: DifyApp): is_enabled = ext.is_enabled() if hasattr(ext, "is_enabled") else True if not is_enabled: if dify_config.DEBUG: - logging.info(f"Skipped {short_name}") + logging.info("Skipped %s", short_name) continue start_time = time.perf_counter() ext.init_app(app) end_time = time.perf_counter() if dify_config.DEBUG: - logging.info(f"Loaded {short_name} ({round((end_time - start_time) * 1000, 2)} ms)") + logging.info("Loaded %s (%s ms)", short_name, round((end_time - start_time) * 1000, 2)) def create_migrations_app(): diff --git a/api/commands.py b/api/commands.py index c2e62ec261..79bb6713d0 100644 --- a/api/commands.py +++ b/api/commands.py @@ -53,13 +53,13 @@ def reset_password(email, new_password, password_confirm): account = db.session.query(Account).where(Account.email == email).one_or_none() if not account: - click.echo(click.style("Account not found for email: {}".format(email), fg="red")) + click.echo(click.style(f"Account not found for email: {email}", fg="red")) return try: valid_password(new_password) except: - click.echo(click.style("Invalid password. Must match {}".format(password_pattern), fg="red")) + click.echo(click.style(f"Invalid password. Must match {password_pattern}", fg="red")) return # generate password salt @@ -92,13 +92,13 @@ def reset_email(email, new_email, email_confirm): account = db.session.query(Account).where(Account.email == email).one_or_none() if not account: - click.echo(click.style("Account not found for email: {}".format(email), fg="red")) + click.echo(click.style(f"Account not found for email: {email}", fg="red")) return try: email_validate(new_email) except: - click.echo(click.style("Invalid email: {}".format(new_email), fg="red")) + click.echo(click.style(f"Invalid email: {new_email}", fg="red")) return account.email = new_email @@ -142,7 +142,7 @@ def reset_encrypt_key_pair(): click.echo( click.style( - "Congratulations! The asymmetric key pair of workspace {} has been reset.".format(tenant.id), + f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.", fg="green", ) ) @@ -190,14 +190,14 @@ def migrate_annotation_vector_database(): f"Processing the {total_count} app {app.id}. " + f"{create_count} created, {skipped_count} skipped." ) try: - click.echo("Creating app annotation index: {}".format(app.id)) + click.echo(f"Creating app annotation index: {app.id}") app_annotation_setting = ( db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first() ) if not app_annotation_setting: skipped_count = skipped_count + 1 - click.echo("App annotation setting disabled: {}".format(app.id)) + click.echo(f"App annotation setting disabled: {app.id}") continue # get dataset_collection_binding info dataset_collection_binding = ( @@ -206,7 +206,7 @@ def migrate_annotation_vector_database(): .first() ) if not dataset_collection_binding: - click.echo("App annotation collection binding not found: {}".format(app.id)) + click.echo(f"App annotation collection binding not found: {app.id}") continue annotations = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app.id).all() dataset = Dataset( @@ -252,9 +252,7 @@ def migrate_annotation_vector_database(): create_count += 1 except Exception as e: click.echo( - click.style( - "Error creating app annotation index: {} {}".format(e.__class__.__name__, str(e)), fg="red" - ) + click.style(f"Error creating app annotation index: {e.__class__.__name__} {str(e)}", fg="red") ) continue @@ -319,7 +317,7 @@ def migrate_knowledge_vector_database(): f"Processing the {total_count} dataset {dataset.id}. {create_count} created, {skipped_count} skipped." ) try: - click.echo("Creating dataset vector database index: {}".format(dataset.id)) + click.echo(f"Creating dataset vector database index: {dataset.id}") if dataset.index_struct_dict: if dataset.index_struct_dict["type"] == vector_type: skipped_count = skipped_count + 1 @@ -423,9 +421,7 @@ def migrate_knowledge_vector_database(): create_count += 1 except Exception as e: db.session.rollback() - click.echo( - click.style("Error creating dataset index: {} {}".format(e.__class__.__name__, str(e)), fg="red") - ) + click.echo(click.style(f"Error creating dataset index: {e.__class__.__name__} {str(e)}", fg="red")) continue click.echo( @@ -476,7 +472,7 @@ def convert_to_agent_apps(): break for app in apps: - click.echo("Converting app: {}".format(app.id)) + click.echo(f"Converting app: {app.id}") try: app.mode = AppMode.AGENT_CHAT.value @@ -488,11 +484,11 @@ def convert_to_agent_apps(): ) db.session.commit() - click.echo(click.style("Converted app: {}".format(app.id), fg="green")) + click.echo(click.style(f"Converted app: {app.id}", fg="green")) except Exception as e: - click.echo(click.style("Convert app error: {} {}".format(e.__class__.__name__, str(e)), fg="red")) + click.echo(click.style(f"Convert app error: {e.__class__.__name__} {str(e)}", fg="red")) - click.echo(click.style("Conversion complete. Converted {} agent apps.".format(len(proceeded_app_ids)), fg="green")) + click.echo(click.style(f"Conversion complete. Converted {len(proceeded_app_ids)} agent apps.", fg="green")) @click.command("add-qdrant-index", help="Add Qdrant index.") @@ -665,7 +661,7 @@ def create_tenant(email: str, language: Optional[str] = None, name: Optional[str click.echo( click.style( - "Account and tenant created.\nAccount: {}\nPassword: {}".format(email, new_password), + f"Account and tenant created.\nAccount: {email}\nPassword: {new_password}", fg="green", ) ) @@ -726,16 +722,16 @@ where sites.id is null limit 1000""" if tenant: accounts = tenant.get_accounts() if not accounts: - print("Fix failed for app {}".format(app.id)) + print(f"Fix failed for app {app.id}") continue account = accounts[0] - print("Fixing missing site for app {}".format(app.id)) + print(f"Fixing missing site for app {app.id}") app_was_created.send(app, account=account) except Exception: failed_app_ids.append(app_id) - click.echo(click.style("Failed to fix missing site for app {}".format(app_id), fg="red")) - logging.exception(f"Failed to fix app related site missing issue, app_id: {app_id}") + click.echo(click.style(f"Failed to fix missing site for app {app_id}", fg="red")) + logging.exception("Failed to fix app related site missing issue, app_id: %s", app_id) continue if not processed_count: diff --git a/api/configs/app_config.py b/api/configs/app_config.py index 20f8c40427..d3b1cf9d5b 100644 --- a/api/configs/app_config.py +++ b/api/configs/app_config.py @@ -41,7 +41,7 @@ class RemoteSettingsSourceFactory(PydanticBaseSettingsSource): case RemoteSettingsSourceName.NACOS: remote_source = NacosSettingsSource(current_state) case _: - logger.warning(f"Unsupported remote source: {remote_source_name}") + logger.warning("Unsupported remote source: %s", remote_source_name) return {} d: dict[str, Any] = {} diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py index 587ea55ca7..68b16e48db 100644 --- a/api/configs/middleware/__init__.py +++ b/api/configs/middleware/__init__.py @@ -245,11 +245,7 @@ class CeleryConfig(DatabaseConfig): @computed_field def CELERY_RESULT_BACKEND(self) -> str | None: - return ( - "db+{}".format(self.SQLALCHEMY_DATABASE_URI) - if self.CELERY_BACKEND == "database" - else self.CELERY_BROKER_URL - ) + return f"db+{self.SQLALCHEMY_DATABASE_URI}" if self.CELERY_BACKEND == "database" else self.CELERY_BROKER_URL @property def BROKER_USE_SSL(self) -> bool: diff --git a/api/configs/remote_settings_sources/apollo/client.py b/api/configs/remote_settings_sources/apollo/client.py index 88b30d3987..877ff8409f 100644 --- a/api/configs/remote_settings_sources/apollo/client.py +++ b/api/configs/remote_settings_sources/apollo/client.py @@ -76,7 +76,7 @@ class ApolloClient: code, body = http_request(url, timeout=3, headers=self._sign_headers(url)) if code == 200: if not body: - logger.error(f"get_json_from_net load configs failed, body is {body}") + logger.error("get_json_from_net load configs failed, body is %s", body) return None data = json.loads(body) data = data["configurations"] @@ -207,7 +207,7 @@ class ApolloClient: # if the length is 0 it is returned directly if len(notifications) == 0: return - url = "{}/notifications/v2".format(self.config_url) + url = f"{self.config_url}/notifications/v2" params = { "appId": self.app_id, "cluster": self.cluster, @@ -222,7 +222,7 @@ class ApolloClient: return if http_code == 200: if not body: - logger.error(f"_long_poll load configs failed,body is {body}") + logger.error("_long_poll load configs failed,body is %s", body) return data = json.loads(body) for entry in data: @@ -273,12 +273,12 @@ class ApolloClient: time.sleep(60 * 10) # 10 minutes def _do_heart_beat(self, namespace): - url = "{}/configs/{}/{}/{}?ip={}".format(self.config_url, self.app_id, self.cluster, namespace, self.ip) + url = f"{self.config_url}/configs/{self.app_id}/{self.cluster}/{namespace}?ip={self.ip}" try: code, body = http_request(url, timeout=3, headers=self._sign_headers(url)) if code == 200: if not body: - logger.error(f"_do_heart_beat load configs failed,body is {body}") + logger.error("_do_heart_beat load configs failed,body is %s", body) return None data = json.loads(body) if self.last_release_key == data["releaseKey"]: diff --git a/api/configs/remote_settings_sources/apollo/utils.py b/api/configs/remote_settings_sources/apollo/utils.py index 6136112e03..f5b82908ee 100644 --- a/api/configs/remote_settings_sources/apollo/utils.py +++ b/api/configs/remote_settings_sources/apollo/utils.py @@ -24,7 +24,7 @@ def url_encode_wrapper(params): def no_key_cache_key(namespace, key): - return "{}{}{}".format(namespace, len(namespace), key) + return f"{namespace}{len(namespace)}{key}" # Returns whether the obtained value is obtained, and None if it does not diff --git a/api/constants/languages.py b/api/constants/languages.py index 1157ec4307..ab19392c59 100644 --- a/api/constants/languages.py +++ b/api/constants/languages.py @@ -28,5 +28,5 @@ def supported_language(lang): if lang in languages: return lang - error = "{lang} is not a valid language.".format(lang=lang) + error = f"{lang} is not a valid language." raise ValueError(error) diff --git a/api/controllers/console/app/annotation.py b/api/controllers/console/app/annotation.py index 2b48afd550..c2ba880405 100644 --- a/api/controllers/console/app/annotation.py +++ b/api/controllers/console/app/annotation.py @@ -86,7 +86,7 @@ class AnnotationReplyActionStatusApi(Resource): raise Forbidden() job_id = str(job_id) - app_annotation_job_key = "{}_app_annotation_job_{}".format(action, str(job_id)) + app_annotation_job_key = f"{action}_app_annotation_job_{str(job_id)}" cache_result = redis_client.get(app_annotation_job_key) if cache_result is None: raise ValueError("The job does not exist.") @@ -94,7 +94,7 @@ class AnnotationReplyActionStatusApi(Resource): job_status = cache_result.decode() error_msg = "" if job_status == "error": - app_annotation_error_key = "{}_app_annotation_error_{}".format(action, str(job_id)) + app_annotation_error_key = f"{action}_app_annotation_error_{str(job_id)}" error_msg = redis_client.get(app_annotation_error_key).decode() return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200 @@ -123,6 +123,17 @@ class AnnotationListApi(Resource): } return response, 200 + @setup_required + @login_required + @account_initialization_required + def delete(self, app_id): + if not current_user.is_editor: + raise Forbidden() + + app_id = str(app_id) + AppAnnotationService.clear_all_annotations(app_id) + return {"result": "success"}, 204 + class AnnotationExportApi(Resource): @setup_required @@ -223,14 +234,14 @@ class AnnotationBatchImportStatusApi(Resource): raise Forbidden() job_id = str(job_id) - indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id)) + indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}" cache_result = redis_client.get(indexing_cache_key) if cache_result is None: raise ValueError("The job does not exist.") job_status = cache_result.decode() error_msg = "" if job_status == "error": - indexing_error_msg_key = "app_annotation_batch_import_error_msg_{}".format(str(job_id)) + indexing_error_msg_key = f"app_annotation_batch_import_error_msg_{str(job_id)}" error_msg = redis_client.get(indexing_error_msg_key).decode() return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200 diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index b5b6d1f75b..6ddae6fad5 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -51,8 +51,8 @@ class CompletionConversationApi(Resource): if args["keyword"]: query = query.join(Message, Message.conversation_id == Conversation.id).where( or_( - Message.query.ilike("%{}%".format(args["keyword"])), - Message.answer.ilike("%{}%".format(args["keyword"])), + Message.query.ilike(f"%{args['keyword']}%"), + Message.answer.ilike(f"%{args['keyword']}%"), ) ) @@ -174,7 +174,7 @@ class ChatConversationApi(Resource): query = db.select(Conversation).where(Conversation.app_id == app_model.id) if args["keyword"]: - keyword_filter = "%{}%".format(args["keyword"]) + keyword_filter = f"%{args['keyword']}%" query = ( query.join( Message, diff --git a/api/controllers/console/app/generator.py b/api/controllers/console/app/generator.py index 790369c052..4847a2cab8 100644 --- a/api/controllers/console/app/generator.py +++ b/api/controllers/console/app/generator.py @@ -1,5 +1,3 @@ -import os - from flask_login import current_user from flask_restful import Resource, reqparse @@ -29,15 +27,12 @@ class RuleGenerateApi(Resource): args = parser.parse_args() account = current_user - PROMPT_GENERATION_MAX_TOKENS = int(os.getenv("PROMPT_GENERATION_MAX_TOKENS", "512")) - try: rules = LLMGenerator.generate_rule_config( tenant_id=account.current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], no_variable=args["no_variable"], - rule_config_max_tokens=PROMPT_GENERATION_MAX_TOKENS, ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) @@ -64,14 +59,12 @@ class RuleCodeGenerateApi(Resource): args = parser.parse_args() account = current_user - CODE_GENERATION_MAX_TOKENS = int(os.getenv("CODE_GENERATION_MAX_TOKENS", "1024")) try: code_result = LLMGenerator.generate_code( tenant_id=account.current_tenant_id, instruction=args["instruction"], model_config=args["model_config"], code_language=args["code_language"], - max_tokens=CODE_GENERATION_MAX_TOKENS, ) except ProviderTokenNotInitError as ex: raise ProviderNotInitializeError(ex.description) diff --git a/api/controllers/console/app/message.py b/api/controllers/console/app/message.py index 5e79e8dece..d4ce5921c2 100644 --- a/api/controllers/console/app/message.py +++ b/api/controllers/console/app/message.py @@ -5,7 +5,6 @@ from flask_restful import Resource, fields, marshal_with, reqparse from flask_restful.inputs import int_range from werkzeug.exceptions import Forbidden, InternalServerError, NotFound -import services from controllers.console import api from controllers.console.app.error import ( CompletionRequestError, @@ -133,7 +132,7 @@ class MessageFeedbackApi(Resource): rating=args.get("rating"), content=None, ) - except services.errors.message.MessageNotExistsError: + except MessageNotExistsError: raise NotFound("Message Not Exists.") return {"result": "success"} diff --git a/api/controllers/console/auth/data_source_oauth.py b/api/controllers/console/auth/data_source_oauth.py index 4c9697cc32..4940b48754 100644 --- a/api/controllers/console/auth/data_source_oauth.py +++ b/api/controllers/console/auth/data_source_oauth.py @@ -81,7 +81,7 @@ class OAuthDataSourceBinding(Resource): oauth_provider.get_access_token(code) except requests.exceptions.HTTPError as e: logging.exception( - f"An error occurred during the OAuthCallback process with {provider}: {e.response.text}" + "An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text ) return {"error": "OAuth data source process failed"}, 400 @@ -103,7 +103,9 @@ class OAuthDataSourceSync(Resource): try: oauth_provider.sync_data_source(binding_id) except requests.exceptions.HTTPError as e: - logging.exception(f"An error occurred during the OAuthCallback process with {provider}: {e.response.text}") + logging.exception( + "An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text + ) return {"error": "OAuth data source process failed"}, 400 return {"result": "success"}, 200 diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index d0a4f3ff6d..4a6cb99390 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -80,7 +80,7 @@ class OAuthCallback(Resource): user_info = oauth_provider.get_user_info(token) except requests.exceptions.RequestException as e: error_text = e.response.text if e.response else str(e) - logging.exception(f"An error occurred during the OAuth process with {provider}: {error_text}") + logging.exception("An error occurred during the OAuth process with %s: %s", provider, error_text) return {"error": "OAuth process failed"}, 400 if invite_token and RegisterService.is_valid_invite_token(invite_token): diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index d14b208a4b..b6e91dd98e 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -970,7 +970,7 @@ class DocumentRetryApi(DocumentResource): raise DocumentAlreadyFinishedError() retry_documents.append(document) except Exception: - logging.exception(f"Failed to retry document, document id: {document_id}") + logging.exception("Failed to retry document, document id: %s", document_id) continue # retry document DocumentService.retry_document(dataset_id, retry_documents) diff --git a/api/controllers/console/datasets/datasets_segments.py b/api/controllers/console/datasets/datasets_segments.py index b3704ce8b1..8c429044d7 100644 --- a/api/controllers/console/datasets/datasets_segments.py +++ b/api/controllers/console/datasets/datasets_segments.py @@ -1,6 +1,5 @@ import uuid -import pandas as pd from flask import request from flask_login import current_user from flask_restful import Resource, marshal, reqparse @@ -14,8 +13,6 @@ from controllers.console.datasets.error import ( ChildChunkDeleteIndexError, ChildChunkIndexingError, InvalidActionError, - NoFileUploadedError, - TooManyFilesError, ) from controllers.console.wraps import ( account_initialization_required, @@ -32,6 +29,7 @@ from extensions.ext_redis import redis_client from fields.segment_fields import child_chunk_fields, segment_fields from libs.login import login_required from models.dataset import ChildChunk, DocumentSegment +from models.model import UploadFile from services.dataset_service import DatasetService, DocumentService, SegmentService from services.entities.knowledge_entities.knowledge_entities import ChildChunkUpdateArgs, SegmentUpdateArgs from services.errors.chunk import ChildChunkDeleteIndexError as ChildChunkDeleteIndexServiceError @@ -184,7 +182,7 @@ class DatasetDocumentSegmentApi(Resource): raise ProviderNotInitializeError(ex.description) segment_ids = request.args.getlist("segment_id") - document_indexing_cache_key = "document_{}_indexing".format(document.id) + document_indexing_cache_key = f"document_{document.id}_indexing" cache_result = redis_client.get(document_indexing_cache_key) if cache_result is not None: raise InvalidActionError("Document is being indexed, please try again later") @@ -365,37 +363,28 @@ class DatasetDocumentSegmentBatchImportApi(Resource): document = DocumentService.get_document(dataset_id, document_id) if not document: raise NotFound("Document not found.") - # get file from request - file = request.files["file"] - # check file - if "file" not in request.files: - raise NoFileUploadedError() - if len(request.files) > 1: - raise TooManyFilesError() + parser = reqparse.RequestParser() + parser.add_argument("upload_file_id", type=str, required=True, nullable=False, location="json") + args = parser.parse_args() + upload_file_id = args["upload_file_id"] + + upload_file = db.session.query(UploadFile).where(UploadFile.id == upload_file_id).first() + if not upload_file: + raise NotFound("UploadFile not found.") + # check file type - if not file.filename or not file.filename.lower().endswith(".csv"): + if not upload_file.name or not upload_file.name.lower().endswith(".csv"): raise ValueError("Invalid file type. Only CSV files are allowed") try: - # Skip the first row - df = pd.read_csv(file) - result = [] - for index, row in df.iterrows(): - if document.doc_form == "qa_model": - data = {"content": row.iloc[0], "answer": row.iloc[1]} - else: - data = {"content": row.iloc[0]} - result.append(data) - if len(result) == 0: - raise ValueError("The CSV file is empty.") # async job job_id = str(uuid.uuid4()) - indexing_cache_key = "segment_batch_import_{}".format(str(job_id)) + indexing_cache_key = f"segment_batch_import_{str(job_id)}" # send batch add segments task redis_client.setnx(indexing_cache_key, "waiting") batch_create_segment_to_index_task.delay( - str(job_id), result, dataset_id, document_id, current_user.current_tenant_id, current_user.id + str(job_id), upload_file_id, dataset_id, document_id, current_user.current_tenant_id, current_user.id ) except Exception as e: return {"error": str(e)}, 500 @@ -406,7 +395,7 @@ class DatasetDocumentSegmentBatchImportApi(Resource): @account_initialization_required def get(self, job_id): job_id = str(job_id) - indexing_cache_key = "segment_batch_import_{}".format(job_id) + indexing_cache_key = f"segment_batch_import_{job_id}" cache_result = redis_client.get(indexing_cache_key) if cache_result is None: raise ValueError("The job does not exist.") diff --git a/api/controllers/console/explore/installed_app.py b/api/controllers/console/explore/installed_app.py index ffdf73c368..6d9f794307 100644 --- a/api/controllers/console/explore/installed_app.py +++ b/api/controllers/console/explore/installed_app.py @@ -74,7 +74,7 @@ class InstalledAppsListApi(Resource): ): res.append(installed_app) installed_app_list = res - logger.debug(f"installed_app_list: {installed_app_list}, user_id: {user_id}") + logger.debug("installed_app_list: %s, user_id: %s", installed_app_list, user_id) installed_app_list.sort( key=lambda app: ( diff --git a/api/controllers/console/explore/message.py b/api/controllers/console/explore/message.py index 822777604a..de95a9e7b0 100644 --- a/api/controllers/console/explore/message.py +++ b/api/controllers/console/explore/message.py @@ -5,7 +5,6 @@ from flask_restful import marshal_with, reqparse from flask_restful.inputs import int_range from werkzeug.exceptions import InternalServerError, NotFound -import services from controllers.console.app.error import ( AppMoreLikeThisDisabledError, CompletionRequestError, @@ -29,7 +28,11 @@ from models.model import AppMode from services.app_generate_service import AppGenerateService from services.errors.app import MoreLikeThisDisabledError from services.errors.conversation import ConversationNotExistsError -from services.errors.message import MessageNotExistsError, SuggestedQuestionsAfterAnswerDisabledError +from services.errors.message import ( + FirstMessageNotExistsError, + MessageNotExistsError, + SuggestedQuestionsAfterAnswerDisabledError, +) from services.message_service import MessageService @@ -52,9 +55,9 @@ class MessageListApi(InstalledAppResource): return MessageService.pagination_by_first_id( app_model, current_user, args["conversation_id"], args["first_id"], args["limit"] ) - except services.errors.conversation.ConversationNotExistsError: + except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") - except services.errors.message.FirstMessageNotExistsError: + except FirstMessageNotExistsError: raise NotFound("First Message Not Exists.") @@ -77,7 +80,7 @@ class MessageFeedbackApi(InstalledAppResource): rating=args.get("rating"), content=args.get("content"), ) - except services.errors.message.MessageNotExistsError: + except MessageNotExistsError: raise NotFound("Message Not Exists.") return {"result": "success"} diff --git a/api/controllers/console/version.py b/api/controllers/console/version.py index 447cc358f8..8237ea3cdc 100644 --- a/api/controllers/console/version.py +++ b/api/controllers/console/version.py @@ -34,7 +34,7 @@ class VersionApi(Resource): try: response = requests.get(check_update_url, {"current_version": args.get("current_version")}) except Exception as error: - logging.warning("Check update version error: {}.".format(str(error))) + logging.warning("Check update version error: %s.", str(error)) result["version"] = args.get("current_version") return result @@ -55,7 +55,7 @@ def _has_new_version(*, latest_version: str, current_version: str) -> bool: # Compare versions return latest > current except version.InvalidVersion: - logging.warning(f"Invalid version format: latest={latest_version}, current={current_version}") + logging.warning("Invalid version format: latest=%s, current=%s", latest_version, current_version) return False diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index 5cd2e0cd2d..4d5357cd18 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -15,7 +15,7 @@ from controllers.console.auth.error import ( InvalidEmailError, InvalidTokenError, ) -from controllers.console.error import AccountNotFound, EmailSendIpLimitError +from controllers.console.error import AccountInFreezeError, AccountNotFound, EmailSendIpLimitError from controllers.console.workspace.error import ( AccountAlreadyInitedError, CurrentPasswordIncorrectError, @@ -479,20 +479,27 @@ class ChangeEmailResetApi(Resource): parser.add_argument("token", type=str, required=True, nullable=False, location="json") args = parser.parse_args() + if AccountService.is_account_in_freeze(args["new_email"]): + raise AccountInFreezeError() + + if not AccountService.check_email_unique(args["new_email"]): + raise EmailAlreadyInUseError() + reset_data = AccountService.get_change_email_data(args["token"]) if not reset_data: raise InvalidTokenError() AccountService.revoke_change_email_token(args["token"]) - if not AccountService.check_email_unique(args["new_email"]): - raise EmailAlreadyInUseError() - old_email = reset_data.get("old_email", "") if current_user.email != old_email: raise AccountNotFound() - updated_account = AccountService.update_account(current_user, email=args["new_email"]) + updated_account = AccountService.update_account_email(current_user, email=args["new_email"]) + + AccountService.send_change_email_completed_notify_email( + email=args["new_email"], + ) return updated_account @@ -503,6 +510,8 @@ class CheckEmailUnique(Resource): parser = reqparse.RequestParser() parser.add_argument("email", type=email, required=True, location="json") args = parser.parse_args() + if AccountService.is_account_in_freeze(args["email"]): + raise AccountInFreezeError() if not AccountService.check_email_unique(args["email"]): raise EmailAlreadyInUseError() return {"result": "success"} diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py index 37d0f6c764..514d1084c4 100644 --- a/api/controllers/console/workspace/models.py +++ b/api/controllers/console/workspace/models.py @@ -73,8 +73,9 @@ class DefaultModelApi(Resource): ) except Exception as ex: logging.exception( - f"Failed to update default model, model type: {model_setting['model_type']}," - f" model:{model_setting.get('model')}" + "Failed to update default model, model type: %s, model: %s", + model_setting["model_type"], + model_setting.get("model"), ) raise ex @@ -160,8 +161,10 @@ class ModelProviderModelApi(Resource): ) except CredentialsValidateFailedError as ex: logging.exception( - f"Failed to save model credentials, tenant_id: {tenant_id}," - f" model: {args.get('model')}, model_type: {args.get('model_type')}" + "Failed to save model credentials, tenant_id: %s, model: %s, model_type: %s", + tenant_id, + args.get("model"), + args.get("model_type"), ) raise ValueError(str(ex)) diff --git a/api/controllers/service_api/app/annotation.py b/api/controllers/service_api/app/annotation.py index 595ae118ef..9b22c535f4 100644 --- a/api/controllers/service_api/app/annotation.py +++ b/api/controllers/service_api/app/annotation.py @@ -34,7 +34,7 @@ class AnnotationReplyActionStatusApi(Resource): @validate_app_token def get(self, app_model: App, job_id, action): job_id = str(job_id) - app_annotation_job_key = "{}_app_annotation_job_{}".format(action, str(job_id)) + app_annotation_job_key = f"{action}_app_annotation_job_{str(job_id)}" cache_result = redis_client.get(app_annotation_job_key) if cache_result is None: raise ValueError("The job does not exist.") @@ -42,7 +42,7 @@ class AnnotationReplyActionStatusApi(Resource): job_status = cache_result.decode() error_msg = "" if job_status == "error": - app_annotation_error_key = "{}_app_annotation_error_{}".format(action, str(job_id)) + app_annotation_error_key = f"{action}_app_annotation_error_{str(job_id)}" error_msg = redis_client.get(app_annotation_error_key).decode() return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200 diff --git a/api/controllers/service_api/app/completion.py b/api/controllers/service_api/app/completion.py index 7762672494..edc66cc5e9 100644 --- a/api/controllers/service_api/app/completion.py +++ b/api/controllers/service_api/app/completion.py @@ -47,6 +47,9 @@ class CompletionApi(Resource): parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json") args = parser.parse_args() + external_trace_id = get_external_trace_id(request) + if external_trace_id: + args["external_trace_id"] = external_trace_id streaming = args["response_mode"] == "streaming" diff --git a/api/controllers/service_api/app/message.py b/api/controllers/service_api/app/message.py index d90fa2081f..a4f95cb1cb 100644 --- a/api/controllers/service_api/app/message.py +++ b/api/controllers/service_api/app/message.py @@ -15,7 +15,11 @@ from fields.message_fields import agent_thought_fields, feedback_fields from fields.raws import FilesContainedField from libs.helper import TimestampField, uuid_value from models.model import App, AppMode, EndUser -from services.errors.message import SuggestedQuestionsAfterAnswerDisabledError +from services.errors.message import ( + FirstMessageNotExistsError, + MessageNotExistsError, + SuggestedQuestionsAfterAnswerDisabledError, +) from services.message_service import MessageService @@ -65,7 +69,7 @@ class MessageListApi(Resource): ) except services.errors.conversation.ConversationNotExistsError: raise NotFound("Conversation Not Exists.") - except services.errors.message.FirstMessageNotExistsError: + except FirstMessageNotExistsError: raise NotFound("First Message Not Exists.") @@ -87,7 +91,7 @@ class MessageFeedbackApi(Resource): rating=args.get("rating"), content=args.get("content"), ) - except services.errors.message.MessageNotExistsError: + except MessageNotExistsError: raise NotFound("Message Not Exists.") return {"result": "success"} @@ -117,7 +121,7 @@ class MessageSuggestedApi(Resource): questions = MessageService.get_suggested_questions_after_answer( app_model=app_model, user=end_user, message_id=message_id, invoke_from=InvokeFrom.SERVICE_API ) - except services.errors.message.MessageNotExistsError: + except MessageNotExistsError: raise NotFound("Message Not Exists.") except SuggestedQuestionsAfterAnswerDisabledError: raise BadRequest("Suggested Questions Is Disabled.") diff --git a/api/controllers/web/message.py b/api/controllers/web/message.py index f2e1873601..7bb81cd0d3 100644 --- a/api/controllers/web/message.py +++ b/api/controllers/web/message.py @@ -4,7 +4,6 @@ from flask_restful import fields, marshal_with, reqparse from flask_restful.inputs import int_range from werkzeug.exceptions import InternalServerError, NotFound -import services from controllers.web import api from controllers.web.error import ( AppMoreLikeThisDisabledError, @@ -29,7 +28,11 @@ from models.model import AppMode from services.app_generate_service import AppGenerateService from services.errors.app import MoreLikeThisDisabledError from services.errors.conversation import ConversationNotExistsError -from services.errors.message import MessageNotExistsError, SuggestedQuestionsAfterAnswerDisabledError +from services.errors.message import ( + FirstMessageNotExistsError, + MessageNotExistsError, + SuggestedQuestionsAfterAnswerDisabledError, +) from services.message_service import MessageService @@ -73,9 +76,9 @@ class MessageListApi(WebApiResource): return MessageService.pagination_by_first_id( app_model, end_user, args["conversation_id"], args["first_id"], args["limit"] ) - except services.errors.conversation.ConversationNotExistsError: + except ConversationNotExistsError: raise NotFound("Conversation Not Exists.") - except services.errors.message.FirstMessageNotExistsError: + except FirstMessageNotExistsError: raise NotFound("First Message Not Exists.") @@ -96,7 +99,7 @@ class MessageFeedbackApi(WebApiResource): rating=args.get("rating"), content=args.get("content"), ) - except services.errors.message.MessageNotExistsError: + except MessageNotExistsError: raise NotFound("Message Not Exists.") return {"result": "success"} diff --git a/api/core/agent/base_agent_runner.py b/api/core/agent/base_agent_runner.py index 1f3c218d59..ad9b625350 100644 --- a/api/core/agent/base_agent_runner.py +++ b/api/core/agent/base_agent_runner.py @@ -280,7 +280,7 @@ class BaseAgentRunner(AppRunner): def create_agent_thought( self, message_id: str, message: str, tool_name: str, tool_input: str, messages_ids: list[str] - ) -> MessageAgentThought: + ) -> str: """ Create agent thought """ @@ -313,16 +313,15 @@ class BaseAgentRunner(AppRunner): db.session.add(thought) db.session.commit() - db.session.refresh(thought) + agent_thought_id = str(thought.id) + self.agent_thought_count += 1 db.session.close() - self.agent_thought_count += 1 - - return thought + return agent_thought_id def save_agent_thought( self, - agent_thought: MessageAgentThought, + agent_thought_id: str, tool_name: str | None, tool_input: Union[str, dict, None], thought: str | None, @@ -335,12 +334,9 @@ class BaseAgentRunner(AppRunner): """ Save agent thought """ - updated_agent_thought = ( - db.session.query(MessageAgentThought).where(MessageAgentThought.id == agent_thought.id).first() - ) - if not updated_agent_thought: + agent_thought = db.session.query(MessageAgentThought).where(MessageAgentThought.id == agent_thought_id).first() + if not agent_thought: raise ValueError("agent thought not found") - agent_thought = updated_agent_thought if thought: agent_thought.thought += thought @@ -355,7 +351,7 @@ class BaseAgentRunner(AppRunner): except Exception: tool_input = json.dumps(tool_input) - updated_agent_thought.tool_input = tool_input + agent_thought.tool_input = tool_input if observation: if isinstance(observation, dict): @@ -364,27 +360,27 @@ class BaseAgentRunner(AppRunner): except Exception: observation = json.dumps(observation) - updated_agent_thought.observation = observation + agent_thought.observation = observation if answer: agent_thought.answer = answer if messages_ids is not None and len(messages_ids) > 0: - updated_agent_thought.message_files = json.dumps(messages_ids) + agent_thought.message_files = json.dumps(messages_ids) if llm_usage: - updated_agent_thought.message_token = llm_usage.prompt_tokens - updated_agent_thought.message_price_unit = llm_usage.prompt_price_unit - updated_agent_thought.message_unit_price = llm_usage.prompt_unit_price - updated_agent_thought.answer_token = llm_usage.completion_tokens - updated_agent_thought.answer_price_unit = llm_usage.completion_price_unit - updated_agent_thought.answer_unit_price = llm_usage.completion_unit_price - updated_agent_thought.tokens = llm_usage.total_tokens - updated_agent_thought.total_price = llm_usage.total_price + agent_thought.message_token = llm_usage.prompt_tokens + agent_thought.message_price_unit = llm_usage.prompt_price_unit + agent_thought.message_unit_price = llm_usage.prompt_unit_price + agent_thought.answer_token = llm_usage.completion_tokens + agent_thought.answer_price_unit = llm_usage.completion_price_unit + agent_thought.answer_unit_price = llm_usage.completion_unit_price + agent_thought.tokens = llm_usage.total_tokens + agent_thought.total_price = llm_usage.total_price # check if tool labels is not empty - labels = updated_agent_thought.tool_labels or {} - tools = updated_agent_thought.tool.split(";") if updated_agent_thought.tool else [] + labels = agent_thought.tool_labels or {} + tools = agent_thought.tool.split(";") if agent_thought.tool else [] for tool in tools: if not tool: continue @@ -395,7 +391,7 @@ class BaseAgentRunner(AppRunner): else: labels[tool] = {"en_US": tool, "zh_Hans": tool} - updated_agent_thought.tool_labels_str = json.dumps(labels) + agent_thought.tool_labels_str = json.dumps(labels) if tool_invoke_meta is not None: if isinstance(tool_invoke_meta, dict): @@ -404,7 +400,7 @@ class BaseAgentRunner(AppRunner): except Exception: tool_invoke_meta = json.dumps(tool_invoke_meta) - updated_agent_thought.tool_meta_str = tool_invoke_meta + agent_thought.tool_meta_str = tool_invoke_meta db.session.commit() db.session.close() diff --git a/api/core/agent/cot_agent_runner.py b/api/core/agent/cot_agent_runner.py index 4979f63432..565fb42478 100644 --- a/api/core/agent/cot_agent_runner.py +++ b/api/core/agent/cot_agent_runner.py @@ -97,13 +97,13 @@ class CotAgentRunner(BaseAgentRunner, ABC): message_file_ids: list[str] = [] - agent_thought = self.create_agent_thought( + agent_thought_id = self.create_agent_thought( message_id=message.id, message="", tool_name="", tool_input="", messages_ids=message_file_ids ) if iteration_step > 1: self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought.id), PublishFrom.APPLICATION_MANAGER + QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER ) # recalc llm max tokens @@ -133,7 +133,7 @@ class CotAgentRunner(BaseAgentRunner, ABC): # publish agent thought if it's first iteration if iteration_step == 1: self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought.id), PublishFrom.APPLICATION_MANAGER + QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER ) for chunk in react_chunks: @@ -168,7 +168,7 @@ class CotAgentRunner(BaseAgentRunner, ABC): usage_dict["usage"] = LLMUsage.empty_usage() self.save_agent_thought( - agent_thought=agent_thought, + agent_thought_id=agent_thought_id, tool_name=(scratchpad.action.action_name if scratchpad.action and not scratchpad.is_final() else ""), tool_input={scratchpad.action.action_name: scratchpad.action.action_input} if scratchpad.action else {}, tool_invoke_meta={}, @@ -181,7 +181,7 @@ class CotAgentRunner(BaseAgentRunner, ABC): if not scratchpad.is_final(): self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought.id), PublishFrom.APPLICATION_MANAGER + QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER ) if not scratchpad.action: @@ -212,7 +212,7 @@ class CotAgentRunner(BaseAgentRunner, ABC): scratchpad.agent_response = tool_invoke_response self.save_agent_thought( - agent_thought=agent_thought, + agent_thought_id=agent_thought_id, tool_name=scratchpad.action.action_name, tool_input={scratchpad.action.action_name: scratchpad.action.action_input}, thought=scratchpad.thought or "", @@ -224,7 +224,7 @@ class CotAgentRunner(BaseAgentRunner, ABC): ) self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought.id), PublishFrom.APPLICATION_MANAGER + QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER ) # update prompt tool message @@ -244,7 +244,7 @@ class CotAgentRunner(BaseAgentRunner, ABC): # save agent thought self.save_agent_thought( - agent_thought=agent_thought, + agent_thought_id=agent_thought_id, tool_name="", tool_input={}, tool_invoke_meta={}, diff --git a/api/core/agent/fc_agent_runner.py b/api/core/agent/fc_agent_runner.py index 5491689ece..4df71ce9de 100644 --- a/api/core/agent/fc_agent_runner.py +++ b/api/core/agent/fc_agent_runner.py @@ -80,7 +80,7 @@ class FunctionCallAgentRunner(BaseAgentRunner): prompt_messages_tools = [] message_file_ids: list[str] = [] - agent_thought = self.create_agent_thought( + agent_thought_id = self.create_agent_thought( message_id=message.id, message="", tool_name="", tool_input="", messages_ids=message_file_ids ) @@ -114,7 +114,7 @@ class FunctionCallAgentRunner(BaseAgentRunner): for chunk in chunks: if is_first_chunk: self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought.id), PublishFrom.APPLICATION_MANAGER + QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER ) is_first_chunk = False # check if there is any tool call @@ -172,7 +172,7 @@ class FunctionCallAgentRunner(BaseAgentRunner): result.message.content = "" self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought.id), PublishFrom.APPLICATION_MANAGER + QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER ) yield LLMResultChunk( @@ -205,7 +205,7 @@ class FunctionCallAgentRunner(BaseAgentRunner): # save thought self.save_agent_thought( - agent_thought=agent_thought, + agent_thought_id=agent_thought_id, tool_name=tool_call_names, tool_input=tool_call_inputs, thought=response, @@ -216,7 +216,7 @@ class FunctionCallAgentRunner(BaseAgentRunner): llm_usage=current_llm_usage, ) self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought.id), PublishFrom.APPLICATION_MANAGER + QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER ) final_answer += response + "\n" @@ -276,7 +276,7 @@ class FunctionCallAgentRunner(BaseAgentRunner): if len(tool_responses) > 0: # save agent thought self.save_agent_thought( - agent_thought=agent_thought, + agent_thought_id=agent_thought_id, tool_name="", tool_input="", thought="", @@ -291,7 +291,7 @@ class FunctionCallAgentRunner(BaseAgentRunner): messages_ids=message_file_ids, ) self.queue_manager.publish( - QueueAgentThoughtEvent(agent_thought_id=agent_thought.id), PublishFrom.APPLICATION_MANAGER + QueueAgentThoughtEvent(agent_thought_id=agent_thought_id), PublishFrom.APPLICATION_MANAGER ) # update prompt tool diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index 610a5bb278..52ae20ee16 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -600,5 +600,5 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): if len(e.args) > 0 and e.args[0] == "I/O operation on closed file.": # ignore this error raise GenerateTaskStoppedError() else: - logger.exception(f"Failed to process generate task pipeline, conversation_id: {conversation.id}") + logger.exception("Failed to process generate task pipeline, conversation_id: %s", conversation.id) raise e diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index dc27076a4d..abb8db34de 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -271,7 +271,7 @@ class AdvancedChatAppGenerateTaskPipeline: start_listener_time = time.time() yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) except Exception: - logger.exception(f"Failed to listen audio message, task_id: {task_id}") + logger.exception("Failed to listen audio message, task_id: %s", task_id) break if tts_publisher: yield MessageAudioEndStreamResponse(audio="", task_id=task_id) diff --git a/api/core/app/apps/message_based_app_generator.py b/api/core/app/apps/message_based_app_generator.py index 7dd9904eeb..11c979765b 100644 --- a/api/core/app/apps/message_based_app_generator.py +++ b/api/core/app/apps/message_based_app_generator.py @@ -78,7 +78,7 @@ class MessageBasedAppGenerator(BaseAppGenerator): if len(e.args) > 0 and e.args[0] == "I/O operation on closed file.": # ignore this error raise GenerateTaskStoppedError() else: - logger.exception(f"Failed to handle response, conversation_id: {conversation.id}") + logger.exception("Failed to handle response, conversation_id: %s", conversation.id) raise e def _get_app_model_config(self, app_model: App, conversation: Optional[Conversation] = None) -> AppModelConfig: diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index 4c36f63c71..22b0234604 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -483,7 +483,7 @@ class WorkflowAppGenerator(BaseAppGenerator): try: runner.run() except GenerateTaskStoppedError as e: - logger.warning(f"Task stopped: {str(e)}") + logger.warning("Task stopped: %s", str(e)) pass except InvokeAuthorizationError: queue_manager.publish_error( @@ -540,6 +540,6 @@ class WorkflowAppGenerator(BaseAppGenerator): raise GenerateTaskStoppedError() else: logger.exception( - f"Fails to process generate task pipeline, task_id: {application_generate_entity.task_id}" + "Fails to process generate task pipeline, task_id: %s", application_generate_entity.task_id ) raise e diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index e31a316c56..b1e9a340bd 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -246,7 +246,7 @@ class WorkflowAppGenerateTaskPipeline: else: yield MessageAudioStreamResponse(audio=audio_trunk.audio, task_id=task_id) except Exception: - logger.exception(f"Fails to get audio trunk, task_id: {task_id}") + logger.exception("Fails to get audio trunk, task_id: %s", task_id) break if tts_publisher: yield MessageAudioEndStreamResponse(audio="", task_id=task_id) diff --git a/api/core/app/features/annotation_reply/annotation_reply.py b/api/core/app/features/annotation_reply/annotation_reply.py index 54dc69302a..b829340401 100644 --- a/api/core/app/features/annotation_reply/annotation_reply.py +++ b/api/core/app/features/annotation_reply/annotation_reply.py @@ -83,7 +83,7 @@ class AnnotationReplyFeature: return annotation except Exception as e: - logger.warning(f"Query annotation failed, exception: {str(e)}.") + logger.warning("Query annotation failed, exception: %s.", str(e)) return None return None diff --git a/api/core/app/task_pipeline/message_cycle_manager.py b/api/core/app/task_pipeline/message_cycle_manager.py index 824da0b934..f0e9425e3f 100644 --- a/api/core/app/task_pipeline/message_cycle_manager.py +++ b/api/core/app/task_pipeline/message_cycle_manager.py @@ -97,7 +97,7 @@ class MessageCycleManager: conversation.name = name except Exception as e: if dify_config.DEBUG: - logging.exception(f"generate conversation name failed, conversation_id: {conversation_id}") + logging.exception("generate conversation name failed, conversation_id: %s", conversation_id) pass db.session.merge(conversation) diff --git a/api/core/entities/provider_configuration.py b/api/core/entities/provider_configuration.py index af5c18e267..9aaa1f0b10 100644 --- a/api/core/entities/provider_configuration.py +++ b/api/core/entities/provider_configuration.py @@ -900,7 +900,7 @@ class ProviderConfiguration(BaseModel): credentials=copy_credentials, ) except Exception as ex: - logger.warning(f"get custom model schema failed, {ex}") + logger.warning("get custom model schema failed, %s", ex) continue if not custom_model_schema: @@ -1009,7 +1009,7 @@ class ProviderConfiguration(BaseModel): credentials=model_configuration.credentials, ) except Exception as ex: - logger.warning(f"get custom model schema failed, {ex}") + logger.warning("get custom model schema failed, %s", ex) continue if not custom_model_schema: diff --git a/api/core/extension/api_based_extension_requestor.py b/api/core/extension/api_based_extension_requestor.py index 3f4e20ec24..accccd8c40 100644 --- a/api/core/extension/api_based_extension_requestor.py +++ b/api/core/extension/api_based_extension_requestor.py @@ -22,7 +22,7 @@ class APIBasedExtensionRequestor: :param params: the request params :return: the response json """ - headers = {"Content-Type": "application/json", "Authorization": "Bearer {}".format(self.api_key)} + headers = {"Content-Type": "application/json", "Authorization": f"Bearer {self.api_key}"} url = self.api_endpoint @@ -49,8 +49,6 @@ class APIBasedExtensionRequestor: raise ValueError("request connection error") if response.status_code != 200: - raise ValueError( - "request error, status_code: {}, content: {}".format(response.status_code, response.text[:100]) - ) + raise ValueError(f"request error, status_code: {response.status_code}, content: {response.text[:100]}") return cast(dict, response.json()) diff --git a/api/core/extension/extensible.py b/api/core/extension/extensible.py index 06fdb089d4..557f7eb1ed 100644 --- a/api/core/extension/extensible.py +++ b/api/core/extension/extensible.py @@ -66,7 +66,7 @@ class Extensible: # Check for extension module file if (extension_name + ".py") not in file_names: - logging.warning(f"Missing {extension_name}.py file in {subdir_path}, Skip.") + logging.warning("Missing %s.py file in %s, Skip.", extension_name, subdir_path) continue # Check for builtin flag and position @@ -95,7 +95,7 @@ class Extensible: break if not extension_class: - logging.warning(f"Missing subclass of {cls.__name__} in {module_name}, Skip.") + logging.warning("Missing subclass of %s in %s, Skip.", cls.__name__, module_name) continue # Load schema if not builtin @@ -103,7 +103,7 @@ class Extensible: if not builtin: json_path = os.path.join(subdir_path, "schema.json") if not os.path.exists(json_path): - logging.warning(f"Missing schema.json file in {subdir_path}, Skip.") + logging.warning("Missing schema.json file in %s, Skip.", subdir_path) continue with open(json_path, encoding="utf-8") as f: diff --git a/api/core/external_data_tool/api/api.py b/api/core/external_data_tool/api/api.py index 2099a9e34c..d81f372d40 100644 --- a/api/core/external_data_tool/api/api.py +++ b/api/core/external_data_tool/api/api.py @@ -49,7 +49,7 @@ class ApiExternalDataTool(ExternalDataTool): """ # get params from config if not self.config: - raise ValueError("config is required, config: {}".format(self.config)) + raise ValueError(f"config is required, config: {self.config}") api_based_extension_id = self.config.get("api_based_extension_id") assert api_based_extension_id is not None, "api_based_extension_id is required" @@ -74,7 +74,7 @@ class ApiExternalDataTool(ExternalDataTool): # request api requestor = APIBasedExtensionRequestor(api_endpoint=api_based_extension.api_endpoint, api_key=api_key) except Exception as e: - raise ValueError("[External data tool] API query failed, variable: {}, error: {}".format(self.variable, e)) + raise ValueError(f"[External data tool] API query failed, variable: {self.variable}, error: {e}") response_json = requestor.request( point=APIBasedExtensionPoint.APP_EXTERNAL_DATA_TOOL_QUERY, @@ -90,7 +90,7 @@ class ApiExternalDataTool(ExternalDataTool): if not isinstance(response_json["result"], str): raise ValueError( - "[External data tool] API query failed, variable: {}, error: result is not string".format(self.variable) + f"[External data tool] API query failed, variable: {self.variable}, error: result is not string" ) return response_json["result"] diff --git a/api/core/helper/moderation.py b/api/core/helper/moderation.py index a324ac2767..86bac4119a 100644 --- a/api/core/helper/moderation.py +++ b/api/core/helper/moderation.py @@ -55,7 +55,7 @@ def check_moderation(tenant_id: str, model_config: ModelConfigWithCredentialsEnt if moderation_result is True: return True except Exception: - logger.exception(f"Fails to check moderation, provider_name: {provider_name}") + logger.exception("Fails to check moderation, provider_name: %s", provider_name) raise InvokeBadRequestError("Rate limit exceeded, please try again later.") return False diff --git a/api/core/helper/module_import_helper.py b/api/core/helper/module_import_helper.py index 9a041667e4..251309fa2c 100644 --- a/api/core/helper/module_import_helper.py +++ b/api/core/helper/module_import_helper.py @@ -30,7 +30,7 @@ def import_module_from_source(*, module_name: str, py_file_path: AnyStr, use_laz spec.loader.exec_module(module) return module except Exception as e: - logging.exception(f"Failed to load module {module_name} from script file '{py_file_path!r}'") + logging.exception("Failed to load module %s from script file '%s'", module_name, repr(py_file_path)) raise e diff --git a/api/core/helper/ssrf_proxy.py b/api/core/helper/ssrf_proxy.py index 11f245812e..329527633c 100644 --- a/api/core/helper/ssrf_proxy.py +++ b/api/core/helper/ssrf_proxy.py @@ -73,10 +73,12 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): if response.status_code not in STATUS_FORCELIST: return response else: - logging.warning(f"Received status code {response.status_code} for URL {url} which is in the force list") + logging.warning( + "Received status code %s for URL %s which is in the force list", response.status_code, url + ) except httpx.RequestError as e: - logging.warning(f"Request to URL {url} failed on attempt {retries + 1}: {e}") + logging.warning("Request to URL %s failed on attempt %s: %s", url, retries + 1, e) if max_retries == 0: raise diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py index fc5d0547fc..2387658bb6 100644 --- a/api/core/indexing_runner.py +++ b/api/core/indexing_runner.py @@ -84,14 +84,14 @@ class IndexingRunner: documents=documents, ) except DocumentIsPausedError: - raise DocumentIsPausedError("Document paused, document id: {}".format(dataset_document.id)) + raise DocumentIsPausedError(f"Document paused, document id: {dataset_document.id}") except ProviderTokenNotInitError as e: dataset_document.indexing_status = "error" dataset_document.error = str(e.description) dataset_document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) db.session.commit() except ObjectDeletedError: - logging.warning("Document deleted, document id: {}".format(dataset_document.id)) + logging.warning("Document deleted, document id: %s", dataset_document.id) except Exception as e: logging.exception("consume document failed") dataset_document.indexing_status = "error" @@ -147,7 +147,7 @@ class IndexingRunner: index_processor=index_processor, dataset=dataset, dataset_document=dataset_document, documents=documents ) except DocumentIsPausedError: - raise DocumentIsPausedError("Document paused, document id: {}".format(dataset_document.id)) + raise DocumentIsPausedError(f"Document paused, document id: {dataset_document.id}") except ProviderTokenNotInitError as e: dataset_document.indexing_status = "error" dataset_document.error = str(e.description) @@ -222,7 +222,7 @@ class IndexingRunner: index_processor=index_processor, dataset=dataset, dataset_document=dataset_document, documents=documents ) except DocumentIsPausedError: - raise DocumentIsPausedError("Document paused, document id: {}".format(dataset_document.id)) + raise DocumentIsPausedError(f"Document paused, document id: {dataset_document.id}") except ProviderTokenNotInitError as e: dataset_document.indexing_status = "error" dataset_document.error = str(e.description) @@ -324,7 +324,8 @@ class IndexingRunner: except Exception: logging.exception( "Delete image_files failed while indexing_estimate, \ - image_upload_file_is: {}".format(upload_file_id) + image_upload_file_is: %s", + upload_file_id, ) db.session.delete(image_file) @@ -649,7 +650,7 @@ class IndexingRunner: @staticmethod def _check_document_paused_status(document_id: str): - indexing_cache_key = "document_{}_is_paused".format(document_id) + indexing_cache_key = f"document_{document_id}_is_paused" result = redis_client.get(indexing_cache_key) if result: raise DocumentIsPausedError() diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index 331ac933c8..47e5a79160 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -125,16 +125,13 @@ class LLMGenerator: return questions @classmethod - def generate_rule_config( - cls, tenant_id: str, instruction: str, model_config: dict, no_variable: bool, rule_config_max_tokens: int = 512 - ) -> dict: + def generate_rule_config(cls, tenant_id: str, instruction: str, model_config: dict, no_variable: bool) -> dict: output_parser = RuleConfigGeneratorOutputParser() error = "" error_step = "" rule_config = {"prompt": "", "variables": [], "opening_statement": "", "error": ""} - model_parameters = {"max_tokens": rule_config_max_tokens, "temperature": 0.01} - + model_parameters = model_config.get("completion_params", {}) if no_variable: prompt_template = PromptTemplateParser(WORKFLOW_RULE_CONFIG_PROMPT_GENERATE_TEMPLATE) @@ -170,7 +167,7 @@ class LLMGenerator: error = str(e) error_step = "generate rule config" except Exception as e: - logging.exception(f"Failed to generate rule config, model: {model_config.get('name')}") + logging.exception("Failed to generate rule config, model: %s", model_config.get("name")) rule_config["error"] = str(e) rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else "" @@ -267,7 +264,7 @@ class LLMGenerator: error_step = "generate conversation opener" except Exception as e: - logging.exception(f"Failed to generate rule config, model: {model_config.get('name')}") + logging.exception("Failed to generate rule config, model: %s", model_config.get("name")) rule_config["error"] = str(e) rule_config["error"] = f"Failed to {error_step}. Error: {error}" if error else "" @@ -276,12 +273,7 @@ class LLMGenerator: @classmethod def generate_code( - cls, - tenant_id: str, - instruction: str, - model_config: dict, - code_language: str = "javascript", - max_tokens: int = 1000, + cls, tenant_id: str, instruction: str, model_config: dict, code_language: str = "javascript" ) -> dict: if code_language == "python": prompt_template = PromptTemplateParser(PYTHON_CODE_GENERATOR_PROMPT_TEMPLATE) @@ -305,8 +297,7 @@ class LLMGenerator: ) prompt_messages = [UserPromptMessage(content=prompt)] - model_parameters = {"max_tokens": max_tokens, "temperature": 0.01} - + model_parameters = model_config.get("completion_params", {}) try: response = cast( LLMResult, @@ -323,7 +314,7 @@ class LLMGenerator: return {"code": "", "language": code_language, "error": f"Failed to generate code. Error: {error}"} except Exception as e: logging.exception( - f"Failed to invoke LLM model, model: {model_config.get('name')}, language: {code_language}" + "Failed to invoke LLM model, model: %s, language: %s", model_config.get("name"), code_language ) return {"code": "", "language": code_language, "error": f"An unexpected error occurred: {str(e)}"} @@ -395,5 +386,5 @@ class LLMGenerator: error = str(e) return {"output": "", "error": f"Failed to generate JSON Schema. Error: {error}"} except Exception as e: - logging.exception(f"Failed to invoke LLM model, model: {model_config.get('name')}") + logging.exception("Failed to invoke LLM model, model: %s", model_config.get("name")) return {"output": "", "error": f"An unexpected error occurred: {str(e)}"} diff --git a/api/core/mcp/client/sse_client.py b/api/core/mcp/client/sse_client.py index 91debcc8f9..4226e77f7e 100644 --- a/api/core/mcp/client/sse_client.py +++ b/api/core/mcp/client/sse_client.py @@ -88,7 +88,7 @@ class SSETransport: status_queue: Queue to put status updates. """ endpoint_url = urljoin(self.url, sse_data) - logger.info(f"Received endpoint URL: {endpoint_url}") + logger.info("Received endpoint URL: %s", endpoint_url) if not self._validate_endpoint_url(endpoint_url): error_msg = f"Endpoint origin does not match connection origin: {endpoint_url}" @@ -107,7 +107,7 @@ class SSETransport: """ try: message = types.JSONRPCMessage.model_validate_json(sse_data) - logger.debug(f"Received server message: {message}") + logger.debug("Received server message: %s", message) session_message = SessionMessage(message) read_queue.put(session_message) except Exception as exc: @@ -128,7 +128,7 @@ class SSETransport: case "message": self._handle_message_event(sse.data, read_queue) case _: - logger.warning(f"Unknown SSE event: {sse.event}") + logger.warning("Unknown SSE event: %s", sse.event) def sse_reader(self, event_source, read_queue: ReadQueue, status_queue: StatusQueue) -> None: """Read and process SSE events. @@ -142,7 +142,7 @@ class SSETransport: for sse in event_source.iter_sse(): self._handle_sse_event(sse, read_queue, status_queue) except httpx.ReadError as exc: - logger.debug(f"SSE reader shutting down normally: {exc}") + logger.debug("SSE reader shutting down normally: %s", exc) except Exception as exc: read_queue.put(exc) finally: @@ -165,7 +165,7 @@ class SSETransport: ), ) response.raise_for_status() - logger.debug(f"Client message sent successfully: {response.status_code}") + logger.debug("Client message sent successfully: %s", response.status_code) def post_writer(self, client: httpx.Client, endpoint_url: str, write_queue: WriteQueue) -> None: """Handle writing messages to the server. @@ -190,7 +190,7 @@ class SSETransport: except queue.Empty: continue except httpx.ReadError as exc: - logger.debug(f"Post writer shutting down normally: {exc}") + logger.debug("Post writer shutting down normally: %s", exc) except Exception as exc: logger.exception("Error writing messages") write_queue.put(exc) @@ -326,7 +326,7 @@ def send_message(http_client: httpx.Client, endpoint_url: str, session_message: ), ) response.raise_for_status() - logger.debug(f"Client message sent successfully: {response.status_code}") + logger.debug("Client message sent successfully: %s", response.status_code) except Exception as exc: logger.exception("Error sending message") raise @@ -349,13 +349,13 @@ def read_messages( if sse.event == "message": try: message = types.JSONRPCMessage.model_validate_json(sse.data) - logger.debug(f"Received server message: {message}") + logger.debug("Received server message: %s", message) yield SessionMessage(message) except Exception as exc: logger.exception("Error parsing server message") yield exc else: - logger.warning(f"Unknown SSE event: {sse.event}") + logger.warning("Unknown SSE event: %s", sse.event) except Exception as exc: logger.exception("Error reading SSE messages") yield exc diff --git a/api/core/mcp/client/streamable_client.py b/api/core/mcp/client/streamable_client.py index fbd8d05f9e..ca414ebb93 100644 --- a/api/core/mcp/client/streamable_client.py +++ b/api/core/mcp/client/streamable_client.py @@ -129,7 +129,7 @@ class StreamableHTTPTransport: new_session_id = response.headers.get(MCP_SESSION_ID) if new_session_id: self.session_id = new_session_id - logger.info(f"Received session ID: {self.session_id}") + logger.info("Received session ID: %s", self.session_id) def _handle_sse_event( self, @@ -142,7 +142,7 @@ class StreamableHTTPTransport: if sse.event == "message": try: message = JSONRPCMessage.model_validate_json(sse.data) - logger.debug(f"SSE message: {message}") + logger.debug("SSE message: %s", message) # If this is a response and we have original_request_id, replace it if original_request_id is not None and isinstance(message.root, JSONRPCResponse | JSONRPCError): @@ -168,7 +168,7 @@ class StreamableHTTPTransport: logger.debug("Received ping event") return False else: - logger.warning(f"Unknown SSE event: {sse.event}") + logger.warning("Unknown SSE event: %s", sse.event) return False def handle_get_stream( @@ -197,7 +197,7 @@ class StreamableHTTPTransport: self._handle_sse_event(sse, server_to_client_queue) except Exception as exc: - logger.debug(f"GET stream error (non-fatal): {exc}") + logger.debug("GET stream error (non-fatal): %s", exc) def _handle_resumption_request(self, ctx: RequestContext) -> None: """Handle a resumption request using GET with SSE.""" @@ -352,7 +352,7 @@ class StreamableHTTPTransport: # Check if this is a resumption request is_resumption = bool(metadata and metadata.resumption_token) - logger.debug(f"Sending client message: {message}") + logger.debug("Sending client message: %s", message) # Handle initialized notification if self._is_initialized_notification(message): @@ -389,9 +389,9 @@ class StreamableHTTPTransport: if response.status_code == 405: logger.debug("Server does not allow session termination") elif response.status_code != 200: - logger.warning(f"Session termination failed: {response.status_code}") + logger.warning("Session termination failed: %s", response.status_code) except Exception as exc: - logger.warning(f"Session termination failed: {exc}") + logger.warning("Session termination failed: %s", exc) def get_session_id(self) -> str | None: """Get the current session ID.""" diff --git a/api/core/mcp/mcp_client.py b/api/core/mcp/mcp_client.py index 5fe52c008a..875d13de05 100644 --- a/api/core/mcp/mcp_client.py +++ b/api/core/mcp/mcp_client.py @@ -75,7 +75,7 @@ class MCPClient: self.connect_server(client_factory, method_name) else: try: - logger.debug(f"Not supported method {method_name} found in URL path, trying default 'mcp' method.") + logger.debug("Not supported method %s found in URL path, trying default 'mcp' method.", method_name) self.connect_server(sse_client, "sse") except MCPConnectionError: logger.debug("MCP connection failed with 'sse', falling back to 'mcp' method.") diff --git a/api/core/mcp/session/base_session.py b/api/core/mcp/session/base_session.py index 7734b8fdd9..3b6c9a7424 100644 --- a/api/core/mcp/session/base_session.py +++ b/api/core/mcp/session/base_session.py @@ -368,7 +368,7 @@ class BaseSession( self._handle_incoming(notification) except Exception as e: # For other validation errors, log and continue - logging.warning(f"Failed to validate notification: {e}. Message was: {message.message.root}") + logging.warning("Failed to validate notification: %s. Message was: %s", e, message.message.root) else: # Response or error response_queue = self._response_streams.get(message.message.root.id) if response_queue is not None: diff --git a/api/core/model_manager.py b/api/core/model_manager.py index 4886ffe244..51af3d1877 100644 --- a/api/core/model_manager.py +++ b/api/core/model_manager.py @@ -535,9 +535,19 @@ class LBModelManager: if dify_config.DEBUG: logger.info( - f"Model LB\nid: {config.id}\nname:{config.name}\n" - f"tenant_id: {self._tenant_id}\nprovider: {self._provider}\n" - f"model_type: {self._model_type.value}\nmodel: {self._model}" + """Model LB +id: %s +name:%s +tenant_id: %s +provider: %s +model_type: %s +model: %s""", + config.id, + config.name, + self._tenant_id, + self._provider, + self._model_type.value, + self._model, ) return config diff --git a/api/core/model_runtime/model_providers/__base/large_language_model.py b/api/core/model_runtime/model_providers/__base/large_language_model.py index e2cc576f83..ce378b443d 100644 --- a/api/core/model_runtime/model_providers/__base/large_language_model.py +++ b/api/core/model_runtime/model_providers/__base/large_language_model.py @@ -440,7 +440,9 @@ class LargeLanguageModel(AIModel): if callback.raise_error: raise e else: - logger.warning(f"Callback {callback.__class__.__name__} on_before_invoke failed with error {e}") + logger.warning( + "Callback %s on_before_invoke failed with error %s", callback.__class__.__name__, e + ) def _trigger_new_chunk_callbacks( self, @@ -487,7 +489,7 @@ class LargeLanguageModel(AIModel): if callback.raise_error: raise e else: - logger.warning(f"Callback {callback.__class__.__name__} on_new_chunk failed with error {e}") + logger.warning("Callback %s on_new_chunk failed with error %s", callback.__class__.__name__, e) def _trigger_after_invoke_callbacks( self, @@ -535,7 +537,9 @@ class LargeLanguageModel(AIModel): if callback.raise_error: raise e else: - logger.warning(f"Callback {callback.__class__.__name__} on_after_invoke failed with error {e}") + logger.warning( + "Callback %s on_after_invoke failed with error %s", callback.__class__.__name__, e + ) def _trigger_invoke_error_callbacks( self, @@ -583,4 +587,6 @@ class LargeLanguageModel(AIModel): if callback.raise_error: raise e else: - logger.warning(f"Callback {callback.__class__.__name__} on_invoke_error failed with error {e}") + logger.warning( + "Callback %s on_invoke_error failed with error %s", callback.__class__.__name__, e + ) diff --git a/api/core/moderation/output_moderation.py b/api/core/moderation/output_moderation.py index 2ec315417f..b39db4b7ff 100644 --- a/api/core/moderation/output_moderation.py +++ b/api/core/moderation/output_moderation.py @@ -136,6 +136,6 @@ class OutputModeration(BaseModel): result: ModerationOutputsResult = moderation_factory.moderation_for_outputs(moderation_buffer) return result except Exception as e: - logger.exception(f"Moderation Output error, app_id: {app_id}") + logger.exception("Moderation Output error, app_id: %s", app_id) return None diff --git a/api/core/ops/aliyun_trace/aliyun_trace.py b/api/core/ops/aliyun_trace/aliyun_trace.py index cf367efdf0..06050619e9 100644 --- a/api/core/ops/aliyun_trace/aliyun_trace.py +++ b/api/core/ops/aliyun_trace/aliyun_trace.py @@ -10,6 +10,7 @@ from sqlalchemy.orm import Session, sessionmaker from core.ops.aliyun_trace.data_exporter.traceclient import ( TraceClient, convert_datetime_to_nanoseconds, + convert_string_to_id, convert_to_span_id, convert_to_trace_id, generate_span_id, @@ -97,12 +98,13 @@ class AliyunDataTrace(BaseTraceInstance): try: return self.trace_client.get_project_url() except Exception as e: - logger.info(f"Aliyun get run url failed: {str(e)}", exc_info=True) + logger.info("Aliyun get run url failed: %s", str(e), exc_info=True) raise ValueError(f"Aliyun get run url failed: {str(e)}") def workflow_trace(self, trace_info: WorkflowTraceInfo): - external_trace_id = trace_info.metadata.get("external_trace_id") - trace_id = external_trace_id or convert_to_trace_id(trace_info.workflow_run_id) + trace_id = convert_to_trace_id(trace_info.workflow_run_id) + if trace_info.trace_id: + trace_id = convert_string_to_id(trace_info.trace_id) workflow_span_id = convert_to_span_id(trace_info.workflow_run_id, "workflow") self.add_workflow_span(trace_id, workflow_span_id, trace_info) @@ -130,6 +132,9 @@ class AliyunDataTrace(BaseTraceInstance): status = Status(StatusCode.ERROR, trace_info.error) trace_id = convert_to_trace_id(message_id) + if trace_info.trace_id: + trace_id = convert_string_to_id(trace_info.trace_id) + message_span_id = convert_to_span_id(message_id, "message") message_span = SpanData( trace_id=trace_id, @@ -139,7 +144,7 @@ class AliyunDataTrace(BaseTraceInstance): start_time=convert_datetime_to_nanoseconds(trace_info.start_time), end_time=convert_datetime_to_nanoseconds(trace_info.end_time), attributes={ - GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id") or "", GEN_AI_USER_ID: str(user_id), GEN_AI_SPAN_KIND: GenAISpanKind.CHAIN.value, GEN_AI_FRAMEWORK: "dify", @@ -161,12 +166,12 @@ class AliyunDataTrace(BaseTraceInstance): start_time=convert_datetime_to_nanoseconds(trace_info.start_time), end_time=convert_datetime_to_nanoseconds(trace_info.end_time), attributes={ - GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id", ""), + GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id") or "", GEN_AI_USER_ID: str(user_id), GEN_AI_SPAN_KIND: GenAISpanKind.LLM.value, GEN_AI_FRAMEWORK: "dify", - GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name", ""), - GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider", ""), + GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name") or "", + GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider") or "", GEN_AI_USAGE_INPUT_TOKENS: str(trace_info.message_tokens), GEN_AI_USAGE_OUTPUT_TOKENS: str(trace_info.answer_tokens), GEN_AI_USAGE_TOTAL_TOKENS: str(trace_info.total_tokens), @@ -186,9 +191,13 @@ class AliyunDataTrace(BaseTraceInstance): return message_id = trace_info.message_id + trace_id = convert_to_trace_id(message_id) + if trace_info.trace_id: + trace_id = convert_string_to_id(trace_info.trace_id) + documents_data = extract_retrieval_documents(trace_info.documents) dataset_retrieval_span = SpanData( - trace_id=convert_to_trace_id(message_id), + trace_id=trace_id, parent_span_id=convert_to_span_id(message_id, "message"), span_id=generate_span_id(), name="dataset_retrieval", @@ -214,8 +223,12 @@ class AliyunDataTrace(BaseTraceInstance): if trace_info.error: status = Status(StatusCode.ERROR, trace_info.error) + trace_id = convert_to_trace_id(message_id) + if trace_info.trace_id: + trace_id = convert_string_to_id(trace_info.trace_id) + tool_span = SpanData( - trace_id=convert_to_trace_id(message_id), + trace_id=trace_id, parent_span_id=convert_to_span_id(message_id, "message"), span_id=generate_span_id(), name=trace_info.tool_name, @@ -286,7 +299,7 @@ class AliyunDataTrace(BaseTraceInstance): node_span = self.build_workflow_task_span(trace_id, workflow_span_id, trace_info, node_execution) return node_span except Exception as e: - logging.debug(f"Error occurred in build_workflow_node_span: {e}", exc_info=True) + logging.debug("Error occurred in build_workflow_node_span: %s", e, exc_info=True) return None def get_workflow_node_status(self, node_execution: WorkflowNodeExecution) -> Status: @@ -386,14 +399,14 @@ class AliyunDataTrace(BaseTraceInstance): GEN_AI_SESSION_ID: trace_info.metadata.get("conversation_id") or "", GEN_AI_SPAN_KIND: GenAISpanKind.LLM.value, GEN_AI_FRAMEWORK: "dify", - GEN_AI_MODEL_NAME: process_data.get("model_name", ""), - GEN_AI_SYSTEM: process_data.get("model_provider", ""), + GEN_AI_MODEL_NAME: process_data.get("model_name") or "", + GEN_AI_SYSTEM: process_data.get("model_provider") or "", GEN_AI_USAGE_INPUT_TOKENS: str(usage_data.get("prompt_tokens", 0)), GEN_AI_USAGE_OUTPUT_TOKENS: str(usage_data.get("completion_tokens", 0)), GEN_AI_USAGE_TOTAL_TOKENS: str(usage_data.get("total_tokens", 0)), GEN_AI_PROMPT: json.dumps(process_data.get("prompts", []), ensure_ascii=False), GEN_AI_COMPLETION: str(outputs.get("text", "")), - GEN_AI_RESPONSE_FINISH_REASON: outputs.get("finish_reason", ""), + GEN_AI_RESPONSE_FINISH_REASON: outputs.get("finish_reason") or "", INPUT_VALUE: json.dumps(process_data.get("prompts", []), ensure_ascii=False), OUTPUT_VALUE: str(outputs.get("text", "")), }, @@ -421,7 +434,7 @@ class AliyunDataTrace(BaseTraceInstance): GEN_AI_USER_ID: str(user_id), GEN_AI_SPAN_KIND: GenAISpanKind.CHAIN.value, GEN_AI_FRAMEWORK: "dify", - INPUT_VALUE: trace_info.workflow_run_inputs.get("sys.query", ""), + INPUT_VALUE: trace_info.workflow_run_inputs.get("sys.query") or "", OUTPUT_VALUE: json.dumps(trace_info.workflow_run_outputs, ensure_ascii=False), }, status=status, @@ -451,8 +464,13 @@ class AliyunDataTrace(BaseTraceInstance): status: Status = Status(StatusCode.OK) if trace_info.error: status = Status(StatusCode.ERROR, trace_info.error) + + trace_id = convert_to_trace_id(message_id) + if trace_info.trace_id: + trace_id = convert_string_to_id(trace_info.trace_id) + suggested_question_span = SpanData( - trace_id=convert_to_trace_id(message_id), + trace_id=trace_id, parent_span_id=convert_to_span_id(message_id, "message"), span_id=convert_to_span_id(message_id, "suggested_question"), name="suggested_question", @@ -461,8 +479,8 @@ class AliyunDataTrace(BaseTraceInstance): attributes={ GEN_AI_SPAN_KIND: GenAISpanKind.LLM.value, GEN_AI_FRAMEWORK: "dify", - GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name", ""), - GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider", ""), + GEN_AI_MODEL_NAME: trace_info.metadata.get("ls_model_name") or "", + GEN_AI_SYSTEM: trace_info.metadata.get("ls_provider") or "", GEN_AI_PROMPT: json.dumps(trace_info.inputs, ensure_ascii=False), GEN_AI_COMPLETION: json.dumps(trace_info.suggested_question, ensure_ascii=False), INPUT_VALUE: json.dumps(trace_info.inputs, ensure_ascii=False), diff --git a/api/core/ops/aliyun_trace/data_exporter/traceclient.py b/api/core/ops/aliyun_trace/data_exporter/traceclient.py index ba5ac3f420..bd19c8a503 100644 --- a/api/core/ops/aliyun_trace/data_exporter/traceclient.py +++ b/api/core/ops/aliyun_trace/data_exporter/traceclient.py @@ -69,10 +69,10 @@ class TraceClient: if response.status_code == 405: return True else: - logger.debug(f"AliyunTrace API check failed: Unexpected status code: {response.status_code}") + logger.debug("AliyunTrace API check failed: Unexpected status code: %s", response.status_code) return False except requests.exceptions.RequestException as e: - logger.debug(f"AliyunTrace API check failed: {str(e)}") + logger.debug("AliyunTrace API check failed: %s", str(e)) raise ValueError(f"AliyunTrace API check failed: {str(e)}") def get_project_url(self): @@ -109,7 +109,7 @@ class TraceClient: try: self.exporter.export(spans_to_export) except Exception as e: - logger.debug(f"Error exporting spans: {e}") + logger.debug("Error exporting spans: %s", e) def shutdown(self): with self.condition: @@ -181,15 +181,21 @@ def convert_to_trace_id(uuid_v4: Optional[str]) -> int: raise ValueError(f"Invalid UUID input: {e}") +def convert_string_to_id(string: Optional[str]) -> int: + if not string: + return generate_span_id() + hash_bytes = hashlib.sha256(string.encode("utf-8")).digest() + id = int.from_bytes(hash_bytes[:8], byteorder="big", signed=False) + return id + + def convert_to_span_id(uuid_v4: Optional[str], span_type: str) -> int: try: uuid_obj = uuid.UUID(uuid_v4) except Exception as e: raise ValueError(f"Invalid UUID input: {e}") combined_key = f"{uuid_obj.hex}-{span_type}" - hash_bytes = hashlib.sha256(combined_key.encode("utf-8")).digest() - span_id = int.from_bytes(hash_bytes[:8], byteorder="big", signed=False) - return span_id + return convert_string_to_id(combined_key) def convert_datetime_to_nanoseconds(start_time_a: Optional[datetime]) -> Optional[int]: diff --git a/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py b/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py index 1b72a4775a..a20f2485c8 100644 --- a/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py +++ b/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py @@ -77,10 +77,10 @@ def setup_tracer(arize_phoenix_config: ArizeConfig | PhoenixConfig) -> tuple[tra # Create a named tracer instead of setting the global provider tracer_name = f"arize_phoenix_tracer_{arize_phoenix_config.project}" - logger.info(f"[Arize/Phoenix] Created tracer with name: {tracer_name}") + logger.info("[Arize/Phoenix] Created tracer with name: %s", tracer_name) return cast(trace_sdk.Tracer, provider.get_tracer(tracer_name)), processor except Exception as e: - logger.error(f"[Arize/Phoenix] Failed to setup the tracer: {str(e)}", exc_info=True) + logger.error("[Arize/Phoenix] Failed to setup the tracer: %s", str(e), exc_info=True) raise @@ -91,16 +91,21 @@ def datetime_to_nanos(dt: Optional[datetime]) -> int: return int(dt.timestamp() * 1_000_000_000) -def uuid_to_trace_id(string: Optional[str]) -> int: - """Convert UUID string to a valid trace ID (16-byte integer).""" +def string_to_trace_id128(string: Optional[str]) -> int: + """ + Convert any input string into a stable 128-bit integer trace ID. + + This uses SHA-256 hashing and takes the first 16 bytes (128 bits) of the digest. + It's suitable for generating consistent, unique identifiers from strings. + """ if string is None: string = "" hash_object = hashlib.sha256(string.encode()) - # Take the first 16 bytes (128 bits) of the hash + # Take the first 16 bytes (128 bits) of the hash digest digest = hash_object.digest()[:16] - # Convert to integer (128 bits) + # Convert to a 128-bit integer return int.from_bytes(digest, byteorder="big") @@ -120,7 +125,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance): self.file_base_url = os.getenv("FILES_URL", "http://127.0.0.1:5001") def trace(self, trace_info: BaseTraceInfo): - logger.info(f"[Arize/Phoenix] Trace: {trace_info}") + logger.info("[Arize/Phoenix] Trace: %s", trace_info) try: if isinstance(trace_info, WorkflowTraceInfo): self.workflow_trace(trace_info) @@ -138,7 +143,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance): self.generate_name_trace(trace_info) except Exception as e: - logger.error(f"[Arize/Phoenix] Error in the trace: {str(e)}", exc_info=True) + logger.error("[Arize/Phoenix] Error in the trace: %s", str(e), exc_info=True) raise def workflow_trace(self, trace_info: WorkflowTraceInfo): @@ -153,8 +158,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance): } workflow_metadata.update(trace_info.metadata) - external_trace_id = trace_info.metadata.get("external_trace_id") - trace_id = external_trace_id or uuid_to_trace_id(trace_info.workflow_run_id) + trace_id = string_to_trace_id128(trace_info.trace_id or trace_info.workflow_run_id) span_id = RandomIdGenerator().generate_span_id() context = SpanContext( trace_id=trace_id, @@ -310,7 +314,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance): SpanAttributes.SESSION_ID: trace_info.message_data.conversation_id, } - trace_id = uuid_to_trace_id(trace_info.message_id) + trace_id = string_to_trace_id128(trace_info.trace_id or trace_info.message_id) message_span_id = RandomIdGenerator().generate_span_id() span_context = SpanContext( trace_id=trace_id, @@ -406,7 +410,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance): } metadata.update(trace_info.metadata) - trace_id = uuid_to_trace_id(trace_info.message_id) + trace_id = string_to_trace_id128(trace_info.message_id) span_id = RandomIdGenerator().generate_span_id() context = SpanContext( trace_id=trace_id, @@ -468,7 +472,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance): } metadata.update(trace_info.metadata) - trace_id = uuid_to_trace_id(trace_info.message_id) + trace_id = string_to_trace_id128(trace_info.message_id) span_id = RandomIdGenerator().generate_span_id() context = SpanContext( trace_id=trace_id, @@ -521,7 +525,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance): } metadata.update(trace_info.metadata) - trace_id = uuid_to_trace_id(trace_info.message_id) + trace_id = string_to_trace_id128(trace_info.message_id) span_id = RandomIdGenerator().generate_span_id() context = SpanContext( trace_id=trace_id, @@ -568,9 +572,9 @@ class ArizePhoenixDataTrace(BaseTraceInstance): "tool_config": json.dumps(trace_info.tool_config, ensure_ascii=False), } - trace_id = uuid_to_trace_id(trace_info.message_id) + trace_id = string_to_trace_id128(trace_info.message_id) tool_span_id = RandomIdGenerator().generate_span_id() - logger.info(f"[Arize/Phoenix] Creating tool trace with trace_id: {trace_id}, span_id: {tool_span_id}") + logger.info("[Arize/Phoenix] Creating tool trace with trace_id: %s, span_id: %s", trace_id, tool_span_id) # Create span context with the same trace_id as the parent # todo: Create with the appropriate parent span context, so that the tool span is @@ -629,7 +633,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance): } metadata.update(trace_info.metadata) - trace_id = uuid_to_trace_id(trace_info.message_id) + trace_id = string_to_trace_id128(trace_info.message_id) span_id = RandomIdGenerator().generate_span_id() context = SpanContext( trace_id=trace_id, @@ -673,7 +677,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance): span.set_attribute("test", "true") return True except Exception as e: - logger.info(f"[Arize/Phoenix] API check failed: {str(e)}", exc_info=True) + logger.info("[Arize/Phoenix] API check failed: %s", str(e), exc_info=True) raise ValueError(f"[Arize/Phoenix] API check failed: {str(e)}") def get_project_url(self): @@ -683,7 +687,7 @@ class ArizePhoenixDataTrace(BaseTraceInstance): else: return f"{self.arize_phoenix_config.endpoint}/projects/" except Exception as e: - logger.info(f"[Arize/Phoenix] Get run url failed: {str(e)}", exc_info=True) + logger.info("[Arize/Phoenix] Get run url failed: %s", str(e), exc_info=True) raise ValueError(f"[Arize/Phoenix] Get run url failed: {str(e)}") def _get_workflow_nodes(self, workflow_run_id: str): diff --git a/api/core/ops/entities/config_entity.py b/api/core/ops/entities/config_entity.py index 89ff0cfded..626782cee5 100644 --- a/api/core/ops/entities/config_entity.py +++ b/api/core/ops/entities/config_entity.py @@ -102,7 +102,7 @@ class LangfuseConfig(BaseTracingConfig): @field_validator("host") @classmethod def host_validator(cls, v, info: ValidationInfo): - return cls.validate_endpoint_url(v, "https://api.langfuse.com") + return validate_url_with_path(v, "https://api.langfuse.com") class LangSmithConfig(BaseTracingConfig): diff --git a/api/core/ops/entities/trace_entity.py b/api/core/ops/entities/trace_entity.py index 151fa2aaf4..3bad5c92fb 100644 --- a/api/core/ops/entities/trace_entity.py +++ b/api/core/ops/entities/trace_entity.py @@ -14,6 +14,7 @@ class BaseTraceInfo(BaseModel): start_time: Optional[datetime] = None end_time: Optional[datetime] = None metadata: dict[str, Any] + trace_id: Optional[str] = None @field_validator("inputs", "outputs") @classmethod diff --git a/api/core/ops/langfuse_trace/langfuse_trace.py b/api/core/ops/langfuse_trace/langfuse_trace.py index f4a59ef3a7..3a03d9f4fe 100644 --- a/api/core/ops/langfuse_trace/langfuse_trace.py +++ b/api/core/ops/langfuse_trace/langfuse_trace.py @@ -67,14 +67,13 @@ class LangFuseDataTrace(BaseTraceInstance): self.generate_name_trace(trace_info) def workflow_trace(self, trace_info: WorkflowTraceInfo): - external_trace_id = trace_info.metadata.get("external_trace_id") - trace_id = external_trace_id or trace_info.workflow_run_id + trace_id = trace_info.trace_id or trace_info.workflow_run_id user_id = trace_info.metadata.get("user_id") metadata = trace_info.metadata metadata["workflow_app_log_id"] = trace_info.workflow_app_log_id if trace_info.message_id: - trace_id = external_trace_id or trace_info.message_id + trace_id = trace_info.trace_id or trace_info.message_id name = TraceTaskName.MESSAGE_TRACE.value trace_data = LangfuseTrace( id=trace_id, @@ -250,8 +249,10 @@ class LangFuseDataTrace(BaseTraceInstance): user_id = end_user_data.session_id metadata["user_id"] = user_id + trace_id = trace_info.trace_id or message_id + trace_data = LangfuseTrace( - id=message_id, + id=trace_id, user_id=user_id, name=TraceTaskName.MESSAGE_TRACE.value, input={ @@ -285,7 +286,7 @@ class LangFuseDataTrace(BaseTraceInstance): langfuse_generation_data = LangfuseGeneration( name="llm", - trace_id=message_id, + trace_id=trace_id, start_time=trace_info.start_time, end_time=trace_info.end_time, model=message_data.model_id, @@ -311,7 +312,7 @@ class LangFuseDataTrace(BaseTraceInstance): "preset_response": trace_info.preset_response, "inputs": trace_info.inputs, }, - trace_id=trace_info.message_id, + trace_id=trace_info.trace_id or trace_info.message_id, start_time=trace_info.start_time or trace_info.message_data.created_at, end_time=trace_info.end_time or trace_info.message_data.created_at, metadata=trace_info.metadata, @@ -334,7 +335,7 @@ class LangFuseDataTrace(BaseTraceInstance): name=TraceTaskName.SUGGESTED_QUESTION_TRACE.value, input=trace_info.inputs, output=str(trace_info.suggested_question), - trace_id=trace_info.message_id, + trace_id=trace_info.trace_id or trace_info.message_id, start_time=trace_info.start_time, end_time=trace_info.end_time, metadata=trace_info.metadata, @@ -352,7 +353,7 @@ class LangFuseDataTrace(BaseTraceInstance): name=TraceTaskName.DATASET_RETRIEVAL_TRACE.value, input=trace_info.inputs, output={"documents": trace_info.documents}, - trace_id=trace_info.message_id, + trace_id=trace_info.trace_id or trace_info.message_id, start_time=trace_info.start_time or trace_info.message_data.created_at, end_time=trace_info.end_time or trace_info.message_data.updated_at, metadata=trace_info.metadata, @@ -365,7 +366,7 @@ class LangFuseDataTrace(BaseTraceInstance): name=trace_info.tool_name, input=trace_info.tool_inputs, output=trace_info.tool_outputs, - trace_id=trace_info.message_id, + trace_id=trace_info.trace_id or trace_info.message_id, start_time=trace_info.start_time, end_time=trace_info.end_time, metadata=trace_info.metadata, @@ -440,7 +441,7 @@ class LangFuseDataTrace(BaseTraceInstance): try: return self.langfuse_client.auth_check() except Exception as e: - logger.debug(f"LangFuse API check failed: {str(e)}") + logger.debug("LangFuse API check failed: %s", str(e)) raise ValueError(f"LangFuse API check failed: {str(e)}") def get_project_key(self): @@ -448,5 +449,5 @@ class LangFuseDataTrace(BaseTraceInstance): projects = self.langfuse_client.client.projects.get() return projects.data[0].id except Exception as e: - logger.debug(f"LangFuse get project key failed: {str(e)}") + logger.debug("LangFuse get project key failed: %s", str(e)) raise ValueError(f"LangFuse get project key failed: {str(e)}") diff --git a/api/core/ops/langsmith_trace/langsmith_trace.py b/api/core/ops/langsmith_trace/langsmith_trace.py index c97846dc9b..f9e5128e89 100644 --- a/api/core/ops/langsmith_trace/langsmith_trace.py +++ b/api/core/ops/langsmith_trace/langsmith_trace.py @@ -65,8 +65,7 @@ class LangSmithDataTrace(BaseTraceInstance): self.generate_name_trace(trace_info) def workflow_trace(self, trace_info: WorkflowTraceInfo): - external_trace_id = trace_info.metadata.get("external_trace_id") - trace_id = external_trace_id or trace_info.message_id or trace_info.workflow_run_id + trace_id = trace_info.trace_id or trace_info.message_id or trace_info.workflow_run_id if trace_info.start_time is None: trace_info.start_time = datetime.now() message_dotted_order = ( @@ -290,7 +289,7 @@ class LangSmithDataTrace(BaseTraceInstance): reference_example_id=None, input_attachments={}, output_attachments={}, - trace_id=None, + trace_id=trace_info.trace_id, dotted_order=None, parent_run_id=None, ) @@ -319,7 +318,7 @@ class LangSmithDataTrace(BaseTraceInstance): reference_example_id=None, input_attachments={}, output_attachments={}, - trace_id=None, + trace_id=trace_info.trace_id, dotted_order=None, id=str(uuid.uuid4()), ) @@ -351,7 +350,7 @@ class LangSmithDataTrace(BaseTraceInstance): reference_example_id=None, input_attachments={}, output_attachments={}, - trace_id=None, + trace_id=trace_info.trace_id, dotted_order=None, error="", file_list=[], @@ -381,7 +380,7 @@ class LangSmithDataTrace(BaseTraceInstance): reference_example_id=None, input_attachments={}, output_attachments={}, - trace_id=None, + trace_id=trace_info.trace_id, dotted_order=None, error="", file_list=[], @@ -410,7 +409,7 @@ class LangSmithDataTrace(BaseTraceInstance): reference_example_id=None, input_attachments={}, output_attachments={}, - trace_id=None, + trace_id=trace_info.trace_id, dotted_order=None, error="", file_list=[], @@ -440,7 +439,7 @@ class LangSmithDataTrace(BaseTraceInstance): reference_example_id=None, input_attachments={}, output_attachments={}, - trace_id=None, + trace_id=trace_info.trace_id, dotted_order=None, error=trace_info.error or "", ) @@ -465,7 +464,7 @@ class LangSmithDataTrace(BaseTraceInstance): reference_example_id=None, input_attachments={}, output_attachments={}, - trace_id=None, + trace_id=trace_info.trace_id, dotted_order=None, error="", file_list=[], @@ -504,7 +503,7 @@ class LangSmithDataTrace(BaseTraceInstance): self.langsmith_client.delete_project(project_name=random_project_name) return True except Exception as e: - logger.debug(f"LangSmith API check failed: {str(e)}") + logger.debug("LangSmith API check failed: %s", str(e)) raise ValueError(f"LangSmith API check failed: {str(e)}") def get_project_url(self): @@ -523,5 +522,5 @@ class LangSmithDataTrace(BaseTraceInstance): ) return project_url.split("/r/")[0] except Exception as e: - logger.debug(f"LangSmith get run url failed: {str(e)}") + logger.debug("LangSmith get run url failed: %s", str(e)) raise ValueError(f"LangSmith get run url failed: {str(e)}") diff --git a/api/core/ops/opik_trace/opik_trace.py b/api/core/ops/opik_trace/opik_trace.py index 6079b2faef..dd6a424ddb 100644 --- a/api/core/ops/opik_trace/opik_trace.py +++ b/api/core/ops/opik_trace/opik_trace.py @@ -96,8 +96,7 @@ class OpikDataTrace(BaseTraceInstance): self.generate_name_trace(trace_info) def workflow_trace(self, trace_info: WorkflowTraceInfo): - external_trace_id = trace_info.metadata.get("external_trace_id") - dify_trace_id = external_trace_id or trace_info.workflow_run_id + dify_trace_id = trace_info.trace_id or trace_info.workflow_run_id opik_trace_id = prepare_opik_uuid(trace_info.start_time, dify_trace_id) workflow_metadata = wrap_metadata( trace_info.metadata, message_id=trace_info.message_id, workflow_app_log_id=trace_info.workflow_app_log_id @@ -105,7 +104,7 @@ class OpikDataTrace(BaseTraceInstance): root_span_id = None if trace_info.message_id: - dify_trace_id = external_trace_id or trace_info.message_id + dify_trace_id = trace_info.trace_id or trace_info.message_id opik_trace_id = prepare_opik_uuid(trace_info.start_time, dify_trace_id) trace_data = { @@ -276,7 +275,7 @@ class OpikDataTrace(BaseTraceInstance): return metadata = trace_info.metadata - message_id = trace_info.message_id + dify_trace_id = trace_info.trace_id or trace_info.message_id user_id = message_data.from_account_id metadata["user_id"] = user_id @@ -291,7 +290,7 @@ class OpikDataTrace(BaseTraceInstance): metadata["end_user_id"] = end_user_id trace_data = { - "id": prepare_opik_uuid(trace_info.start_time, message_id), + "id": prepare_opik_uuid(trace_info.start_time, dify_trace_id), "name": TraceTaskName.MESSAGE_TRACE.value, "start_time": trace_info.start_time, "end_time": trace_info.end_time, @@ -330,7 +329,7 @@ class OpikDataTrace(BaseTraceInstance): start_time = trace_info.start_time or trace_info.message_data.created_at span_data = { - "trace_id": prepare_opik_uuid(start_time, trace_info.message_id), + "trace_id": prepare_opik_uuid(start_time, trace_info.trace_id or trace_info.message_id), "name": TraceTaskName.MODERATION_TRACE.value, "type": "tool", "start_time": start_time, @@ -356,7 +355,7 @@ class OpikDataTrace(BaseTraceInstance): start_time = trace_info.start_time or message_data.created_at span_data = { - "trace_id": prepare_opik_uuid(start_time, trace_info.message_id), + "trace_id": prepare_opik_uuid(start_time, trace_info.trace_id or trace_info.message_id), "name": TraceTaskName.SUGGESTED_QUESTION_TRACE.value, "type": "tool", "start_time": start_time, @@ -376,7 +375,7 @@ class OpikDataTrace(BaseTraceInstance): start_time = trace_info.start_time or trace_info.message_data.created_at span_data = { - "trace_id": prepare_opik_uuid(start_time, trace_info.message_id), + "trace_id": prepare_opik_uuid(start_time, trace_info.trace_id or trace_info.message_id), "name": TraceTaskName.DATASET_RETRIEVAL_TRACE.value, "type": "tool", "start_time": start_time, @@ -391,7 +390,7 @@ class OpikDataTrace(BaseTraceInstance): def tool_trace(self, trace_info: ToolTraceInfo): span_data = { - "trace_id": prepare_opik_uuid(trace_info.start_time, trace_info.message_id), + "trace_id": prepare_opik_uuid(trace_info.start_time, trace_info.trace_id or trace_info.message_id), "name": trace_info.tool_name, "type": "tool", "start_time": trace_info.start_time, @@ -406,7 +405,7 @@ class OpikDataTrace(BaseTraceInstance): def generate_name_trace(self, trace_info: GenerateNameTraceInfo): trace_data = { - "id": prepare_opik_uuid(trace_info.start_time, trace_info.message_id), + "id": prepare_opik_uuid(trace_info.start_time, trace_info.trace_id or trace_info.message_id), "name": TraceTaskName.GENERATE_NAME_TRACE.value, "start_time": trace_info.start_time, "end_time": trace_info.end_time, @@ -453,12 +452,12 @@ class OpikDataTrace(BaseTraceInstance): self.opik_client.auth_check() return True except Exception as e: - logger.info(f"Opik API check failed: {str(e)}", exc_info=True) + logger.info("Opik API check failed: %s", str(e), exc_info=True) raise ValueError(f"Opik API check failed: {str(e)}") def get_project_url(self): try: return self.opik_client.get_project_url(project_name=self.project) except Exception as e: - logger.info(f"Opik get run url failed: {str(e)}", exc_info=True) + logger.info("Opik get run url failed: %s", str(e), exc_info=True) raise ValueError(f"Opik get run url failed: {str(e)}") diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index 2b546b47cc..a607c76beb 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -287,7 +287,7 @@ class OpsTraceManager: # create new tracing_instance and update the cache if it absent tracing_instance = trace_instance(config_class(**decrypt_trace_config)) cls.ops_trace_instances_cache[decrypt_trace_config_key] = tracing_instance - logging.info(f"new tracing_instance for app_id: {app_id}") + logging.info("new tracing_instance for app_id: %s", app_id) return tracing_instance @classmethod @@ -407,6 +407,7 @@ class TraceTask: def __init__( self, trace_type: Any, + trace_id: Optional[str] = None, message_id: Optional[str] = None, workflow_execution: Optional[WorkflowExecution] = None, conversation_id: Optional[str] = None, @@ -424,6 +425,9 @@ class TraceTask: self.app_id = None self.kwargs = kwargs + external_trace_id = kwargs.get("external_trace_id") + if external_trace_id: + self.trace_id = external_trace_id def execute(self): return self.preprocess() @@ -520,11 +524,8 @@ class TraceTask: "app_id": workflow_run.app_id, } - external_trace_id = self.kwargs.get("external_trace_id") - if external_trace_id: - metadata["external_trace_id"] = external_trace_id - workflow_trace_info = WorkflowTraceInfo( + trace_id=self.trace_id, workflow_data=workflow_run.to_dict(), conversation_id=conversation_id, workflow_id=workflow_id, @@ -584,6 +585,7 @@ class TraceTask: message_tokens = message_data.message_tokens message_trace_info = MessageTraceInfo( + trace_id=self.trace_id, message_id=message_id, message_data=message_data.to_dict(), conversation_model=conversation_mode, @@ -627,6 +629,7 @@ class TraceTask: workflow_app_log_id = str(workflow_app_log_data.id) if workflow_app_log_data else None moderation_trace_info = ModerationTraceInfo( + trace_id=self.trace_id, message_id=workflow_app_log_id or message_id, inputs=inputs, message_data=message_data.to_dict(), @@ -667,6 +670,7 @@ class TraceTask: workflow_app_log_id = str(workflow_app_log_data.id) if workflow_app_log_data else None suggested_question_trace_info = SuggestedQuestionTraceInfo( + trace_id=self.trace_id, message_id=workflow_app_log_id or message_id, message_data=message_data.to_dict(), inputs=message_data.message, @@ -708,6 +712,7 @@ class TraceTask: } dataset_retrieval_trace_info = DatasetRetrievalTraceInfo( + trace_id=self.trace_id, message_id=message_id, inputs=message_data.query or message_data.inputs, documents=[doc.model_dump() for doc in documents] if documents else [], @@ -772,6 +777,7 @@ class TraceTask: ) tool_trace_info = ToolTraceInfo( + trace_id=self.trace_id, message_id=message_id, message_data=message_data.to_dict(), tool_name=tool_name, @@ -807,6 +813,7 @@ class TraceTask: } generate_name_trace_info = GenerateNameTraceInfo( + trace_id=self.trace_id, conversation_id=conversation_id, inputs=inputs, outputs=generate_conversation_name, @@ -843,7 +850,7 @@ class TraceQueueManager: trace_task.app_id = self.app_id trace_manager_queue.put(trace_task) except Exception as e: - logging.exception(f"Error adding trace task, trace_type {trace_task.trace_type}") + logging.exception("Error adding trace task, trace_type %s", trace_task.trace_type) finally: self.start_timer() diff --git a/api/core/ops/utils.py b/api/core/ops/utils.py index 573e8cac88..2c0afb1600 100644 --- a/api/core/ops/utils.py +++ b/api/core/ops/utils.py @@ -67,7 +67,13 @@ def generate_dotted_order( def validate_url(url: str, default_url: str, allowed_schemes: tuple = ("https", "http")) -> str: """ - Validate and normalize URL with proper error handling + Validate and normalize URL with proper error handling. + + NOTE: This function does not retain the `path` component of the provided URL. + In most cases, it is recommended to use `validate_url_with_path` instead. + + This function is deprecated and retained only for compatibility purposes. + New implementations should use `validate_url_with_path`. Args: url: The URL to validate diff --git a/api/core/ops/weave_trace/weave_trace.py b/api/core/ops/weave_trace/weave_trace.py index a34b3b780c..8089860481 100644 --- a/api/core/ops/weave_trace/weave_trace.py +++ b/api/core/ops/weave_trace/weave_trace.py @@ -66,11 +66,11 @@ class WeaveDataTrace(BaseTraceInstance): project_url = f"https://wandb.ai/{self.weave_client._project_id()}" return project_url except Exception as e: - logger.debug(f"Weave get run url failed: {str(e)}") + logger.debug("Weave get run url failed: %s", str(e)) raise ValueError(f"Weave get run url failed: {str(e)}") def trace(self, trace_info: BaseTraceInfo): - logger.debug(f"Trace info: {trace_info}") + logger.debug("Trace info: %s", trace_info) if isinstance(trace_info, WorkflowTraceInfo): self.workflow_trace(trace_info) if isinstance(trace_info, MessageTraceInfo): @@ -87,8 +87,7 @@ class WeaveDataTrace(BaseTraceInstance): self.generate_name_trace(trace_info) def workflow_trace(self, trace_info: WorkflowTraceInfo): - external_trace_id = trace_info.metadata.get("external_trace_id") - trace_id = external_trace_id or trace_info.message_id or trace_info.workflow_run_id + trace_id = trace_info.trace_id or trace_info.message_id or trace_info.workflow_run_id if trace_info.start_time is None: trace_info.start_time = datetime.now() @@ -245,8 +244,12 @@ class WeaveDataTrace(BaseTraceInstance): attributes["start_time"] = trace_info.start_time attributes["end_time"] = trace_info.end_time attributes["tags"] = ["message", str(trace_info.conversation_mode)] + + trace_id = trace_info.trace_id or message_id + attributes["trace_id"] = trace_id + message_run = WeaveTraceModel( - id=message_id, + id=trace_id, op=str(TraceTaskName.MESSAGE_TRACE.value), input_tokens=trace_info.message_tokens, output_tokens=trace_info.answer_tokens, @@ -274,7 +277,7 @@ class WeaveDataTrace(BaseTraceInstance): ) self.start_call( llm_run, - parent_run_id=message_id, + parent_run_id=trace_id, ) self.finish_call(llm_run) self.finish_call(message_run) @@ -289,6 +292,9 @@ class WeaveDataTrace(BaseTraceInstance): attributes["start_time"] = trace_info.start_time or trace_info.message_data.created_at attributes["end_time"] = trace_info.end_time or trace_info.message_data.updated_at + trace_id = trace_info.trace_id or trace_info.message_id + attributes["trace_id"] = trace_id + moderation_run = WeaveTraceModel( id=str(uuid.uuid4()), op=str(TraceTaskName.MODERATION_TRACE.value), @@ -303,7 +309,7 @@ class WeaveDataTrace(BaseTraceInstance): exception=getattr(trace_info, "error", None), file_list=[], ) - self.start_call(moderation_run, parent_run_id=trace_info.message_id) + self.start_call(moderation_run, parent_run_id=trace_id) self.finish_call(moderation_run) def suggested_question_trace(self, trace_info: SuggestedQuestionTraceInfo): @@ -316,6 +322,9 @@ class WeaveDataTrace(BaseTraceInstance): attributes["start_time"] = (trace_info.start_time or message_data.created_at,) attributes["end_time"] = (trace_info.end_time or message_data.updated_at,) + trace_id = trace_info.trace_id or trace_info.message_id + attributes["trace_id"] = trace_id + suggested_question_run = WeaveTraceModel( id=str(uuid.uuid4()), op=str(TraceTaskName.SUGGESTED_QUESTION_TRACE.value), @@ -326,7 +335,7 @@ class WeaveDataTrace(BaseTraceInstance): file_list=[], ) - self.start_call(suggested_question_run, parent_run_id=trace_info.message_id) + self.start_call(suggested_question_run, parent_run_id=trace_id) self.finish_call(suggested_question_run) def dataset_retrieval_trace(self, trace_info: DatasetRetrievalTraceInfo): @@ -338,6 +347,9 @@ class WeaveDataTrace(BaseTraceInstance): attributes["start_time"] = (trace_info.start_time or trace_info.message_data.created_at,) attributes["end_time"] = (trace_info.end_time or trace_info.message_data.updated_at,) + trace_id = trace_info.trace_id or trace_info.message_id + attributes["trace_id"] = trace_id + dataset_retrieval_run = WeaveTraceModel( id=str(uuid.uuid4()), op=str(TraceTaskName.DATASET_RETRIEVAL_TRACE.value), @@ -348,7 +360,7 @@ class WeaveDataTrace(BaseTraceInstance): file_list=[], ) - self.start_call(dataset_retrieval_run, parent_run_id=trace_info.message_id) + self.start_call(dataset_retrieval_run, parent_run_id=trace_id) self.finish_call(dataset_retrieval_run) def tool_trace(self, trace_info: ToolTraceInfo): @@ -357,6 +369,11 @@ class WeaveDataTrace(BaseTraceInstance): attributes["start_time"] = trace_info.start_time attributes["end_time"] = trace_info.end_time + message_id = trace_info.message_id or getattr(trace_info, "conversation_id", None) + message_id = message_id or None + trace_id = trace_info.trace_id or message_id + attributes["trace_id"] = trace_id + tool_run = WeaveTraceModel( id=str(uuid.uuid4()), op=trace_info.tool_name, @@ -366,9 +383,7 @@ class WeaveDataTrace(BaseTraceInstance): attributes=attributes, exception=trace_info.error, ) - message_id = trace_info.message_id or getattr(trace_info, "conversation_id", None) - message_id = message_id or None - self.start_call(tool_run, parent_run_id=message_id) + self.start_call(tool_run, parent_run_id=trace_id) self.finish_call(tool_run) def generate_name_trace(self, trace_info: GenerateNameTraceInfo): @@ -403,7 +418,7 @@ class WeaveDataTrace(BaseTraceInstance): print("Weave login successful") return True except Exception as e: - logger.debug(f"Weave API check failed: {str(e)}") + logger.debug("Weave API check failed: %s", str(e)) raise ValueError(f"Weave API check failed: {str(e)}") def start_call(self, run_data: WeaveTraceModel, parent_run_id: Optional[str] = None): diff --git a/api/core/plugin/impl/exc.py b/api/core/plugin/impl/exc.py index 54a0b90a8d..8b660c807d 100644 --- a/api/core/plugin/impl/exc.py +++ b/api/core/plugin/impl/exc.py @@ -1,3 +1,8 @@ +from collections.abc import Mapping + +from pydantic import TypeAdapter + + class PluginDaemonError(Exception): """Base class for all plugin daemon errors.""" @@ -36,6 +41,21 @@ class PluginDaemonBadRequestError(PluginDaemonClientSideError): class PluginInvokeError(PluginDaemonClientSideError): description: str = "Invoke Error" + def _get_error_object(self) -> Mapping: + try: + return TypeAdapter(Mapping).validate_json(self.description) + except Exception: + return {} + + def get_error_type(self) -> str: + return self._get_error_object().get("error_type", "unknown") + + def get_error_message(self) -> str: + try: + return self._get_error_object().get("message", "unknown") + except Exception: + return self.description + class PluginUniqueIdentifierError(PluginDaemonClientSideError): description: str = "Unique Identifier Error" diff --git a/api/core/rag/datasource/keyword/jieba/jieba.py b/api/core/rag/datasource/keyword/jieba/jieba.py index ec3a23bd96..7c5f47006f 100644 --- a/api/core/rag/datasource/keyword/jieba/jieba.py +++ b/api/core/rag/datasource/keyword/jieba/jieba.py @@ -24,7 +24,7 @@ class Jieba(BaseKeyword): self._config = KeywordTableConfig() def create(self, texts: list[Document], **kwargs) -> BaseKeyword: - lock_name = "keyword_indexing_lock_{}".format(self.dataset.id) + lock_name = f"keyword_indexing_lock_{self.dataset.id}" with redis_client.lock(lock_name, timeout=600): keyword_table_handler = JiebaKeywordTableHandler() keyword_table = self._get_dataset_keyword_table() @@ -43,7 +43,7 @@ class Jieba(BaseKeyword): return self def add_texts(self, texts: list[Document], **kwargs): - lock_name = "keyword_indexing_lock_{}".format(self.dataset.id) + lock_name = f"keyword_indexing_lock_{self.dataset.id}" with redis_client.lock(lock_name, timeout=600): keyword_table_handler = JiebaKeywordTableHandler() @@ -76,7 +76,7 @@ class Jieba(BaseKeyword): return id in set.union(*keyword_table.values()) def delete_by_ids(self, ids: list[str]) -> None: - lock_name = "keyword_indexing_lock_{}".format(self.dataset.id) + lock_name = f"keyword_indexing_lock_{self.dataset.id}" with redis_client.lock(lock_name, timeout=600): keyword_table = self._get_dataset_keyword_table() if keyword_table is not None: @@ -116,7 +116,7 @@ class Jieba(BaseKeyword): return documents def delete(self) -> None: - lock_name = "keyword_indexing_lock_{}".format(self.dataset.id) + lock_name = f"keyword_indexing_lock_{self.dataset.id}" with redis_client.lock(lock_name, timeout=600): dataset_keyword_table = self.dataset.dataset_keyword_table if dataset_keyword_table: diff --git a/api/core/rag/datasource/vdb/baidu/baidu_vector.py b/api/core/rag/datasource/vdb/baidu/baidu_vector.py index db7ffc9c4f..d63ca9f695 100644 --- a/api/core/rag/datasource/vdb/baidu/baidu_vector.py +++ b/api/core/rag/datasource/vdb/baidu/baidu_vector.py @@ -203,9 +203,9 @@ class BaiduVector(BaseVector): def _create_table(self, dimension: int) -> None: # Try to grab distributed lock and create table - lock_name = "vector_indexing_lock_{}".format(self._collection_name) + lock_name = f"vector_indexing_lock_{self._collection_name}" with redis_client.lock(lock_name, timeout=60): - table_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + table_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(table_exist_cache_key): return diff --git a/api/core/rag/datasource/vdb/chroma/chroma_vector.py b/api/core/rag/datasource/vdb/chroma/chroma_vector.py index b8b265d5e6..699a602365 100644 --- a/api/core/rag/datasource/vdb/chroma/chroma_vector.py +++ b/api/core/rag/datasource/vdb/chroma/chroma_vector.py @@ -57,9 +57,9 @@ class ChromaVector(BaseVector): self.add_texts(texts, embeddings, **kwargs) def create_collection(self, collection_name: str): - lock_name = "vector_indexing_lock_{}".format(collection_name) + lock_name = f"vector_indexing_lock_{collection_name}" with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): return self._client.get_or_create_collection(collection_name) diff --git a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py index 68a9952789..bd986393d1 100644 --- a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py +++ b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py @@ -74,9 +74,9 @@ class CouchbaseVector(BaseVector): self.add_texts(texts, embeddings) def _create_collection(self, vector_length: int, uuid: str): - lock_name = "vector_indexing_lock_{}".format(self._collection_name) + lock_name = f"vector_indexing_lock_{self._collection_name}" with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): return if self._collection_exists(self._collection_name): @@ -242,7 +242,7 @@ class CouchbaseVector(BaseVector): try: self._cluster.query(query, named_parameters={"doc_ids": ids}).execute() except Exception as e: - logger.exception(f"Failed to delete documents, ids: {ids}") + logger.exception("Failed to delete documents, ids: %s", ids) def delete_by_document_id(self, document_id: str): query = f""" diff --git a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_ja_vector.py b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_ja_vector.py index 27575197fa..7118029d40 100644 --- a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_ja_vector.py +++ b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_ja_vector.py @@ -29,7 +29,7 @@ class ElasticSearchJaVector(ElasticSearchVector): with redis_client.lock(lock_name, timeout=20): collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): - logger.info(f"Collection {self._collection_name} already exists.") + logger.info("Collection %s already exists.", self._collection_name) return if not self._client.indices.exists(index=self._collection_name): diff --git a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py index ad39717183..832485b236 100644 --- a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py +++ b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py @@ -186,7 +186,7 @@ class ElasticSearchVector(BaseVector): with redis_client.lock(lock_name, timeout=20): collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): - logger.info(f"Collection {self._collection_name} already exists.") + logger.info("Collection %s already exists.", self._collection_name) return if not self._client.indices.exists(index=self._collection_name): diff --git a/api/core/rag/datasource/vdb/huawei/huawei_cloud_vector.py b/api/core/rag/datasource/vdb/huawei/huawei_cloud_vector.py index 89423eb160..0a4067e39c 100644 --- a/api/core/rag/datasource/vdb/huawei/huawei_cloud_vector.py +++ b/api/core/rag/datasource/vdb/huawei/huawei_cloud_vector.py @@ -164,7 +164,7 @@ class HuaweiCloudVector(BaseVector): with redis_client.lock(lock_name, timeout=20): collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): - logger.info(f"Collection {self._collection_name} already exists.") + logger.info("Collection %s already exists.", self._collection_name) return if not self._client.indices.exists(index=self._collection_name): diff --git a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py index e9ff1ce43d..3c65a41f08 100644 --- a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py +++ b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py @@ -89,7 +89,7 @@ class LindormVectorStore(BaseVector): timeout: int = 60, **kwargs, ): - logger.info(f"Total documents to add: {len(documents)}") + logger.info("Total documents to add: %s", len(documents)) uuids = self._get_uuids(documents) total_docs = len(documents) @@ -147,7 +147,7 @@ class LindormVectorStore(BaseVector): time.sleep(0.5) except Exception: - logger.exception(f"Failed to process batch {batch_num + 1}") + logger.exception("Failed to process batch %s", batch_num + 1) raise def get_ids_by_metadata_field(self, key: str, value: str): @@ -180,7 +180,7 @@ class LindormVectorStore(BaseVector): # 1. First check if collection exists if not self._client.indices.exists(index=self._collection_name): - logger.warning(f"Collection {self._collection_name} does not exist") + logger.warning("Collection %s does not exist", self._collection_name) return # 2. Batch process deletions @@ -196,7 +196,7 @@ class LindormVectorStore(BaseVector): } ) else: - logger.warning(f"DELETE BY ID: ID {id} does not exist in the index.") + logger.warning("DELETE BY ID: ID %s does not exist in the index.", id) # 3. Perform bulk deletion if there are valid documents to delete if actions: @@ -209,9 +209,9 @@ class LindormVectorStore(BaseVector): doc_id = delete_error.get("_id") if status == 404: - logger.warning(f"Document not found for deletion: {doc_id}") + logger.warning("Document not found for deletion: %s", doc_id) else: - logger.exception(f"Error deleting document: {error}") + logger.exception("Error deleting document: %s", error) def delete(self) -> None: if self._using_ugc: @@ -225,7 +225,7 @@ class LindormVectorStore(BaseVector): self._client.indices.delete(index=self._collection_name, params={"timeout": 60}) logger.info("Delete index success") else: - logger.warning(f"Index '{self._collection_name}' does not exist. No deletion performed.") + logger.warning("Index '%s' does not exist. No deletion performed.", self._collection_name) def text_exists(self, id: str) -> bool: try: @@ -257,7 +257,7 @@ class LindormVectorStore(BaseVector): params["routing"] = self._routing # type: ignore response = self._client.search(index=self._collection_name, body=query, params=params) except Exception: - logger.exception(f"Error executing vector search, query: {query}") + logger.exception("Error executing vector search, query: %s", query) raise docs_and_scores = [] @@ -324,10 +324,10 @@ class LindormVectorStore(BaseVector): with redis_client.lock(lock_name, timeout=20): collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): - logger.info(f"Collection {self._collection_name} already exists.") + logger.info("Collection %s already exists.", self._collection_name) return if self._client.indices.exists(index=self._collection_name): - logger.info(f"{self._collection_name.lower()} already exists.") + logger.info("%s already exists.", self._collection_name.lower()) redis_client.set(collection_exist_cache_key, 1, ex=3600) return if len(self.kwargs) == 0 and len(kwargs) != 0: diff --git a/api/core/rag/datasource/vdb/milvus/milvus_vector.py b/api/core/rag/datasource/vdb/milvus/milvus_vector.py index 63de6a0603..d64f366e0e 100644 --- a/api/core/rag/datasource/vdb/milvus/milvus_vector.py +++ b/api/core/rag/datasource/vdb/milvus/milvus_vector.py @@ -103,7 +103,7 @@ class MilvusVector(BaseVector): # For standard Milvus installations, check version number return version.parse(milvus_version).base_version >= version.parse("2.5.0").base_version except Exception as e: - logger.warning(f"Failed to check Milvus version: {str(e)}. Disabling hybrid search.") + logger.warning("Failed to check Milvus version: %s. Disabling hybrid search.", str(e)) return False def get_type(self) -> str: @@ -289,9 +289,9 @@ class MilvusVector(BaseVector): """ Create a new collection in Milvus with the specified schema and index parameters. """ - lock_name = "vector_indexing_lock_{}".format(self._collection_name) + lock_name = f"vector_indexing_lock_{self._collection_name}" with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): return # Grab the existing collection if it exists diff --git a/api/core/rag/datasource/vdb/myscale/myscale_vector.py b/api/core/rag/datasource/vdb/myscale/myscale_vector.py index dbb1a7fe19..d5ec4b4436 100644 --- a/api/core/rag/datasource/vdb/myscale/myscale_vector.py +++ b/api/core/rag/datasource/vdb/myscale/myscale_vector.py @@ -53,7 +53,7 @@ class MyScaleVector(BaseVector): return self.add_texts(documents=texts, embeddings=embeddings, **kwargs) def _create_collection(self, dimension: int): - logging.info(f"create MyScale collection {self._collection_name} with dimension {dimension}") + logging.info("create MyScale collection %s with dimension %s", self._collection_name, dimension) self._client.command(f"CREATE DATABASE IF NOT EXISTS {self._config.database}") fts_params = f"('{self._config.fts_params}')" if self._config.fts_params else "" sql = f""" @@ -151,7 +151,7 @@ class MyScaleVector(BaseVector): for r in self._client.query(sql).named_results() ] except Exception as e: - logging.exception(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") # noqa:TRY401 + logging.exception("\033[91m\033[1m%s\033[0m \033[95m%s\033[0m", type(e), str(e)) # noqa:TRY401 return [] def delete(self) -> None: diff --git a/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py index dd196e1f09..d6dfe967d7 100644 --- a/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py +++ b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py @@ -147,7 +147,7 @@ class OceanBaseVector(BaseVector): logger.debug("Current OceanBase version is %s", ob_version) return version.parse(ob_version).base_version >= version.parse("4.3.5.1").base_version except Exception as e: - logger.warning(f"Failed to check OceanBase version: {str(e)}. Disabling hybrid search.") + logger.warning("Failed to check OceanBase version: %s. Disabling hybrid search.", str(e)) return False def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): @@ -229,7 +229,7 @@ class OceanBaseVector(BaseVector): return docs except Exception as e: - logger.warning(f"Failed to fulltext search: {str(e)}.") + logger.warning("Failed to fulltext search: %s.", str(e)) return [] def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: diff --git a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py index 0abb3c0077..ed2dcb40ad 100644 --- a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py +++ b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py @@ -131,7 +131,7 @@ class OpenSearchVector(BaseVector): def delete_by_ids(self, ids: list[str]) -> None: index_name = self._collection_name.lower() if not self._client.indices.exists(index=index_name): - logger.warning(f"Index {index_name} does not exist") + logger.warning("Index %s does not exist", index_name) return # Obtaining All Actual Documents_ID @@ -142,7 +142,7 @@ class OpenSearchVector(BaseVector): if es_ids: actual_ids.extend(es_ids) else: - logger.warning(f"Document with metadata doc_id {doc_id} not found for deletion") + logger.warning("Document with metadata doc_id %s not found for deletion", doc_id) if actual_ids: actions = [{"_op_type": "delete", "_index": index_name, "_id": es_id} for es_id in actual_ids] @@ -155,9 +155,9 @@ class OpenSearchVector(BaseVector): doc_id = delete_error.get("_id") if status == 404: - logger.warning(f"Document not found for deletion: {doc_id}") + logger.warning("Document not found for deletion: %s", doc_id) else: - logger.exception(f"Error deleting document: {error}") + logger.exception("Error deleting document: %s", error) def delete(self) -> None: self._client.indices.delete(index=self._collection_name.lower()) @@ -198,7 +198,7 @@ class OpenSearchVector(BaseVector): try: response = self._client.search(index=self._collection_name.lower(), body=query) except Exception as e: - logger.exception(f"Error executing vector search, query: {query}") + logger.exception("Error executing vector search, query: %s", query) raise docs = [] @@ -242,7 +242,7 @@ class OpenSearchVector(BaseVector): with redis_client.lock(lock_name, timeout=20): collection_exist_cache_key = f"vector_indexing_{self._collection_name.lower()}" if redis_client.get(collection_exist_cache_key): - logger.info(f"Collection {self._collection_name.lower()} already exists.") + logger.info("Collection %s already exists.", self._collection_name.lower()) return if not self._client.indices.exists(index=self._collection_name.lower()): @@ -272,7 +272,7 @@ class OpenSearchVector(BaseVector): }, } - logger.info(f"Creating OpenSearch index {self._collection_name.lower()}") + logger.info("Creating OpenSearch index %s", self._collection_name.lower()) self._client.indices.create(index=self._collection_name.lower(), body=index_body) redis_client.set(collection_exist_cache_key, 1, ex=3600) diff --git a/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py b/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py index b0f0eeca38..e77befcdae 100644 --- a/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py +++ b/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py @@ -82,9 +82,9 @@ class PGVectoRS(BaseVector): self.add_texts(texts, embeddings) def create_collection(self, dimension: int): - lock_name = "vector_indexing_lock_{}".format(self._collection_name) + lock_name = f"vector_indexing_lock_{self._collection_name}" with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): return index_name = f"{self._collection_name}_embedding_index" diff --git a/api/core/rag/datasource/vdb/pgvector/pgvector.py b/api/core/rag/datasource/vdb/pgvector/pgvector.py index 04e9cf801e..746773da63 100644 --- a/api/core/rag/datasource/vdb/pgvector/pgvector.py +++ b/api/core/rag/datasource/vdb/pgvector/pgvector.py @@ -155,7 +155,7 @@ class PGVector(BaseVector): cur.execute(f"DELETE FROM {self.table_name} WHERE id IN %s", (tuple(ids),)) except psycopg2.errors.UndefinedTable: # table not exists - logging.warning(f"Table {self.table_name} not found, skipping delete operation.") + logging.warning("Table %s not found, skipping delete operation.", self.table_name) return except Exception as e: raise e diff --git a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py index dfb95a1839..9741dd8b1d 100644 --- a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py +++ b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py @@ -95,9 +95,9 @@ class QdrantVector(BaseVector): self.add_texts(texts, embeddings, **kwargs) def create_collection(self, collection_name: str, vector_size: int): - lock_name = "vector_indexing_lock_{}".format(collection_name) + lock_name = f"vector_indexing_lock_{collection_name}" with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): return collection_name = collection_name or uuid.uuid4().hex diff --git a/api/core/rag/datasource/vdb/relyt/relyt_vector.py b/api/core/rag/datasource/vdb/relyt/relyt_vector.py index 0c0d6a463d..7a42dd1a89 100644 --- a/api/core/rag/datasource/vdb/relyt/relyt_vector.py +++ b/api/core/rag/datasource/vdb/relyt/relyt_vector.py @@ -70,9 +70,9 @@ class RelytVector(BaseVector): self.add_texts(texts, embeddings) def create_collection(self, dimension: int): - lock_name = "vector_indexing_lock_{}".format(self._collection_name) + lock_name = f"vector_indexing_lock_{self._collection_name}" with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): return index_name = f"{self._collection_name}_embedding_index" diff --git a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py index 9ed6e7369b..784e27fc7f 100644 --- a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py +++ b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py @@ -142,7 +142,7 @@ class TableStoreVector(BaseVector): with redis_client.lock(lock_name, timeout=20): collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): - logging.info(f"Collection {self._collection_name} already exists.") + logging.info("Collection %s already exists.", self._collection_name) return self._create_table_if_not_exist() diff --git a/api/core/rag/datasource/vdb/tencent/tencent_vector.py b/api/core/rag/datasource/vdb/tencent/tencent_vector.py index 23ed8a3344..3aa4b67a78 100644 --- a/api/core/rag/datasource/vdb/tencent/tencent_vector.py +++ b/api/core/rag/datasource/vdb/tencent/tencent_vector.py @@ -92,9 +92,9 @@ class TencentVector(BaseVector): def _create_collection(self, dimension: int) -> None: self._dimension = dimension - lock_name = "vector_indexing_lock_{}".format(self._collection_name) + lock_name = f"vector_indexing_lock_{self._collection_name}" with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): return diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py index ba6a9654f0..e848b39c4d 100644 --- a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py @@ -104,9 +104,9 @@ class TidbOnQdrantVector(BaseVector): self.add_texts(texts, embeddings, **kwargs) def create_collection(self, collection_name: str, vector_size: int): - lock_name = "vector_indexing_lock_{}".format(collection_name) + lock_name = f"vector_indexing_lock_{collection_name}" with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): return collection_name = collection_name or uuid.uuid4().hex diff --git a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py index 61c68b939e..f8a851a246 100644 --- a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py +++ b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py @@ -91,9 +91,9 @@ class TiDBVector(BaseVector): def _create_collection(self, dimension: int): logger.info("_create_collection, collection_name " + self._collection_name) - lock_name = "vector_indexing_lock_{}".format(self._collection_name) + lock_name = f"vector_indexing_lock_{self._collection_name}" with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): return tidb_dist_func = self._get_distance_func() @@ -192,7 +192,7 @@ class TiDBVector(BaseVector): query_vector_str = ", ".join(format(x) for x in query_vector) query_vector_str = "[" + query_vector_str + "]" logger.debug( - f"_collection_name: {self._collection_name}, score_threshold: {score_threshold}, distance: {distance}" + "_collection_name: %s, score_threshold: %s, distance: %s", self._collection_name, score_threshold, distance ) docs = [] diff --git a/api/core/rag/datasource/vdb/vector_factory.py b/api/core/rag/datasource/vdb/vector_factory.py index e018f7d3d4..43c49ed4b3 100644 --- a/api/core/rag/datasource/vdb/vector_factory.py +++ b/api/core/rag/datasource/vdb/vector_factory.py @@ -178,19 +178,19 @@ class Vector: def create(self, texts: Optional[list] = None, **kwargs): if texts: start = time.time() - logger.info(f"start embedding {len(texts)} texts {start}") + logger.info("start embedding %s texts %s", len(texts), start) batch_size = 1000 total_batches = len(texts) + batch_size - 1 for i in range(0, len(texts), batch_size): batch = texts[i : i + batch_size] batch_start = time.time() - logger.info(f"Processing batch {i // batch_size + 1}/{total_batches} ({len(batch)} texts)") + logger.info("Processing batch %s/%s (%s texts)", i // batch_size + 1, total_batches, len(batch)) batch_embeddings = self._embeddings.embed_documents([document.page_content for document in batch]) logger.info( - f"Embedding batch {i // batch_size + 1}/{total_batches} took {time.time() - batch_start:.3f}s" + "Embedding batch %s/%s took %s s", i // batch_size + 1, total_batches, time.time() - batch_start ) self._vector_processor.create(texts=batch, embeddings=batch_embeddings, **kwargs) - logger.info(f"Embedding {len(texts)} texts took {time.time() - start:.3f}s") + logger.info("Embedding %s texts took %s s", len(texts), time.time() - start) def add_texts(self, documents: list[Document], **kwargs): if kwargs.get("duplicate_check", False): @@ -219,7 +219,7 @@ class Vector: self._vector_processor.delete() # delete collection redis cache if self._vector_processor.collection_name: - collection_exist_cache_key = "vector_indexing_{}".format(self._vector_processor.collection_name) + collection_exist_cache_key = f"vector_indexing_{self._vector_processor.collection_name}" redis_client.delete(collection_exist_cache_key) def _get_embeddings(self) -> Embeddings: diff --git a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py index 7a8efb4068..5525ef1685 100644 --- a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py +++ b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py @@ -92,9 +92,9 @@ class WeaviateVector(BaseVector): self.add_texts(texts, embeddings) def _create_collection(self): - lock_name = "vector_indexing_lock_{}".format(self._collection_name) + lock_name = f"vector_indexing_lock_{self._collection_name}" with redis_client.lock(lock_name, timeout=20): - collection_exist_cache_key = "vector_indexing_{}".format(self._collection_name) + collection_exist_cache_key = f"vector_indexing_{self._collection_name}" if redis_client.get(collection_exist_cache_key): return schema = self._default_schema(self._collection_name) diff --git a/api/core/rag/docstore/dataset_docstore.py b/api/core/rag/docstore/dataset_docstore.py index f844770a20..f8da3657fc 100644 --- a/api/core/rag/docstore/dataset_docstore.py +++ b/api/core/rag/docstore/dataset_docstore.py @@ -32,7 +32,7 @@ class DatasetDocumentStore: } @property - def dateset_id(self) -> Any: + def dataset_id(self) -> Any: return self._dataset.id @property @@ -123,13 +123,13 @@ class DatasetDocumentStore: db.session.flush() if save_child: if doc.children: - for postion, child in enumerate(doc.children, start=1): + for position, child in enumerate(doc.children, start=1): child_segment = ChildChunk( tenant_id=self._dataset.tenant_id, dataset_id=self._dataset.id, document_id=self._document_id, segment_id=segment_document.id, - position=postion, + position=position, index_node_id=child.metadata.get("doc_id"), index_node_hash=child.metadata.get("doc_hash"), content=child.page_content, diff --git a/api/core/rag/embedding/cached_embedding.py b/api/core/rag/embedding/cached_embedding.py index f50f9f6b60..9848a28384 100644 --- a/api/core/rag/embedding/cached_embedding.py +++ b/api/core/rag/embedding/cached_embedding.py @@ -69,7 +69,7 @@ class CacheEmbedding(Embeddings): # stackoverflow best way: https://stackoverflow.com/questions/20319813/how-to-check-list-containing-nan if np.isnan(normalized_embedding).any(): # for issue #11827 float values are not json compliant - logger.warning(f"Normalized embedding is nan: {normalized_embedding}") + logger.warning("Normalized embedding is nan: %s", normalized_embedding) continue embedding_queue_embeddings.append(normalized_embedding) except IntegrityError: @@ -122,7 +122,7 @@ class CacheEmbedding(Embeddings): raise ValueError("Normalized embedding is nan please try again") except Exception as ex: if dify_config.DEBUG: - logging.exception(f"Failed to embed query text '{text[:10]}...({len(text)} chars)'") + logging.exception("Failed to embed query text '%s...(%s chars)'", text[:10], len(text)) raise ex try: @@ -136,7 +136,9 @@ class CacheEmbedding(Embeddings): redis_client.setex(embedding_cache_key, 600, encoded_str) except Exception as ex: if dify_config.DEBUG: - logging.exception(f"Failed to add embedding to redis for the text '{text[:10]}...({len(text)} chars)'") + logging.exception( + "Failed to add embedding to redis for the text '%s...(%s chars)'", text[:10], len(text) + ) raise ex return embedding_results # type: ignore diff --git a/api/core/rag/splitter/text_splitter.py b/api/core/rag/splitter/text_splitter.py index 529d8ccd27..489aa05430 100644 --- a/api/core/rag/splitter/text_splitter.py +++ b/api/core/rag/splitter/text_splitter.py @@ -116,7 +116,7 @@ class TextSplitter(BaseDocumentTransformer, ABC): if total + _len + (separator_len if len(current_doc) > 0 else 0) > self._chunk_size: if total > self._chunk_size: logger.warning( - f"Created a chunk of size {total}, which is longer than the specified {self._chunk_size}" + "Created a chunk of size %s, which is longer than the specified %s", total, self._chunk_size ) if len(current_doc) > 0: doc = self._join_docs(current_doc, separator) diff --git a/api/core/repositories/factory.py b/api/core/repositories/factory.py index 4118aa61c7..6e636883ae 100644 --- a/api/core/repositories/factory.py +++ b/api/core/repositories/factory.py @@ -153,7 +153,7 @@ class DifyCoreRepositoryFactory: RepositoryImportError: If the configured repository cannot be created """ class_path = dify_config.CORE_WORKFLOW_EXECUTION_REPOSITORY - logger.debug(f"Creating WorkflowExecutionRepository from: {class_path}") + logger.debug("Creating WorkflowExecutionRepository from: %s", class_path) try: repository_class = cls._import_class(class_path) @@ -199,7 +199,7 @@ class DifyCoreRepositoryFactory: RepositoryImportError: If the configured repository cannot be created """ class_path = dify_config.CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY - logger.debug(f"Creating WorkflowNodeExecutionRepository from: {class_path}") + logger.debug("Creating WorkflowNodeExecutionRepository from: %s", class_path) try: repository_class = cls._import_class(class_path) diff --git a/api/core/repositories/sqlalchemy_workflow_execution_repository.py b/api/core/repositories/sqlalchemy_workflow_execution_repository.py index c579ff4028..74a49842f3 100644 --- a/api/core/repositories/sqlalchemy_workflow_execution_repository.py +++ b/api/core/repositories/sqlalchemy_workflow_execution_repository.py @@ -203,5 +203,5 @@ class SQLAlchemyWorkflowExecutionRepository(WorkflowExecutionRepository): session.commit() # Update the in-memory cache for faster subsequent lookups - logger.debug(f"Updating cache for execution_id: {db_model.id}") + logger.debug("Updating cache for execution_id: %s", db_model.id) self._execution_cache[db_model.id] = db_model diff --git a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py index d4a31390f8..f4532d7f29 100644 --- a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py +++ b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py @@ -215,7 +215,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) # Update the in-memory cache for faster subsequent lookups # Only cache if we have a node_execution_id to use as the cache key if db_model.node_execution_id: - logger.debug(f"Updating cache for node_execution_id: {db_model.node_execution_id}") + logger.debug("Updating cache for node_execution_id: %s", db_model.node_execution_id) self._node_execution_cache[db_model.node_execution_id] = db_model def get_db_models_by_workflow_run( diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index f286466de0..1bb4cfa4cd 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -206,7 +206,7 @@ class ToolManager: ) except Exception as e: builtin_provider = None - logger.info(f"Error getting builtin provider {credential_id}:{e}", exc_info=True) + logger.info("Error getting builtin provider %s:%s", credential_id, e, exc_info=True) # if the provider has been deleted, raise an error if builtin_provider is None: raise ToolProviderNotFoundError(f"provider has been deleted: {credential_id}") @@ -237,7 +237,7 @@ class ToolManager: if builtin_provider is None: raise ToolProviderNotFoundError(f"builtin provider {provider_id} not found") - encrypter, _ = create_provider_encrypter( + encrypter, cache = create_provider_encrypter( tenant_id=tenant_id, config=[ x.to_basic_provider_config() @@ -281,6 +281,7 @@ class ToolManager: builtin_provider.expires_at = refreshed_credentials.expires_at db.session.commit() decrypted_credentials = refreshed_credentials.credentials + cache.delete() return cast( BuiltinTool, @@ -569,7 +570,7 @@ class ToolManager: yield provider except Exception: - logger.exception(f"load builtin provider {provider_path}") + logger.exception("load builtin provider %s", provider_path) continue # set builtin providers loaded cls._builtin_providers_loaded = True @@ -1011,7 +1012,9 @@ class ToolManager: if variable is None: raise ToolParameterError(f"Variable {tool_input.value} does not exist") parameter_value = variable.value - elif tool_input.type in {"mixed", "constant"}: + elif tool_input.type == "constant": + parameter_value = tool_input.value + elif tool_input.type == "mixed": segment_group = variable_pool.convert_template(str(tool_input.value)) parameter_value = segment_group.text else: diff --git a/api/core/tools/utils/parser.py b/api/core/tools/utils/parser.py index a3c84615ca..3857a2a16b 100644 --- a/api/core/tools/utils/parser.py +++ b/api/core/tools/utils/parser.py @@ -105,6 +105,29 @@ class ApiBasedToolSchemaParser: # overwrite the content interface["operation"]["requestBody"]["content"][content_type]["schema"] = root + # handle allOf reference in schema properties + for prop_dict in root.get("properties", {}).values(): + for item in prop_dict.get("allOf", []): + if "$ref" in item: + ref_schema = openapi + reference = item["$ref"].split("/")[1:] + for ref in reference: + ref_schema = ref_schema[ref] + else: + ref_schema = item + for key, value in ref_schema.items(): + if isinstance(value, list): + if key not in prop_dict: + prop_dict[key] = [] + # extends list field + if isinstance(prop_dict[key], list): + prop_dict[key].extend(value) + elif key not in prop_dict: + # add new field + prop_dict[key] = value + if "allOf" in prop_dict: + del prop_dict["allOf"] + # parse body parameters if "schema" in interface["operation"]["requestBody"]["content"][content_type]: body_schema = interface["operation"]["requestBody"]["content"][content_type]["schema"] diff --git a/api/core/tools/utils/web_reader_tool.py b/api/core/tools/utils/web_reader_tool.py index cbd06fc186..df052c16db 100644 --- a/api/core/tools/utils/web_reader_tool.py +++ b/api/core/tools/utils/web_reader_tool.py @@ -55,7 +55,7 @@ def get_url(url: str, user_agent: Optional[str] = None) -> str: main_content_type = mimetypes.guess_type(filename)[0] if main_content_type not in supported_content_types: - return "Unsupported content-type [{}] of URL.".format(main_content_type) + return f"Unsupported content-type [{main_content_type}] of URL." if main_content_type in extract_processor.SUPPORT_URL_CONTENT_TYPES: return cast(str, ExtractProcessor.load_from_url(url, return_text=True)) @@ -67,7 +67,7 @@ def get_url(url: str, user_agent: Optional[str] = None) -> str: response = scraper.get(url, headers=headers, follow_redirects=True, timeout=(120, 300)) # type: ignore if response.status_code != 200: - return "URL returned status code {}.".format(response.status_code) + return f"URL returned status code {response.status_code}." # Detect encoding using chardet detected_encoding = chardet.detect(response.content) diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index 8b89c2a7a9..962b9f7a81 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -194,7 +194,7 @@ class WorkflowTool(Tool): files.append(file_dict) except Exception: - logger.exception(f"Failed to transform file {file}") + logger.exception("Failed to transform file %s", file) else: parameters_result[parameter.name] = tool_parameters.get(parameter.name) diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index b315129763..ef13277e0c 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -238,13 +238,13 @@ class GraphEngine: while True: # max steps reached if self.graph_runtime_state.node_run_steps > self.max_execution_steps: - raise GraphRunFailedError("Max steps {} reached.".format(self.max_execution_steps)) + raise GraphRunFailedError(f"Max steps {self.max_execution_steps} reached.") # or max execution time reached if self._is_timed_out( start_at=self.graph_runtime_state.start_at, max_execution_time=self.max_execution_time ): - raise GraphRunFailedError("Max execution time {}s reached.".format(self.max_execution_time)) + raise GraphRunFailedError(f"Max execution time {self.max_execution_time}s reached.") # init route node state route_node_state = self.graph_runtime_state.node_run_state.create_node_state(node_id=next_node_id) @@ -377,7 +377,7 @@ class GraphEngine: edge = cast(GraphEdge, sub_edge_mappings[0]) if edge.run_condition is None: - logger.warning(f"Edge {edge.target_node_id} run condition is None") + logger.warning("Edge %s run condition is None", edge.target_node_id) continue result = ConditionManager.get_condition_handler( @@ -848,7 +848,7 @@ class GraphEngine: ) return except Exception as e: - logger.exception(f"Node {node.title} run failed") + logger.exception("Node %s run failed", node.title) raise e def _append_variables_recursively(self, node_id: str, variable_key_list: list[str], variable_value: VariableValue): diff --git a/api/core/workflow/nodes/agent/agent_node.py b/api/core/workflow/nodes/agent/agent_node.py index c83303034e..2b6382a8a6 100644 --- a/api/core/workflow/nodes/agent/agent_node.py +++ b/api/core/workflow/nodes/agent/agent_node.py @@ -50,6 +50,7 @@ from .exc import ( AgentInputTypeError, AgentInvocationError, AgentMessageTransformError, + AgentNodeError, AgentVariableNotFoundError, AgentVariableTypeError, ToolFileNotFoundError, @@ -593,7 +594,14 @@ class AgentNode(BaseNode): variables[variable_name] = variable_value elif message.type == ToolInvokeMessage.MessageType.FILE: assert message.meta is not None - assert isinstance(message.meta, File) + assert isinstance(message.meta, dict) + # Validate that meta contains a 'file' key + if "file" not in message.meta: + raise AgentNodeError("File message is missing 'file' key in meta") + + # Validate that the file is an instance of File + if not isinstance(message.meta["file"], File): + raise AgentNodeError(f"Expected File object but got {type(message.meta['file']).__name__}") files.append(message.meta["file"]) elif message.type == ToolInvokeMessage.MessageType.LOG: assert isinstance(message.message, ToolInvokeMessage.LogMessage) diff --git a/api/core/workflow/nodes/answer/base_stream_processor.py b/api/core/workflow/nodes/answer/base_stream_processor.py index 09d5464d7a..7e84557a2d 100644 --- a/api/core/workflow/nodes/answer/base_stream_processor.py +++ b/api/core/workflow/nodes/answer/base_stream_processor.py @@ -36,7 +36,7 @@ class StreamProcessor(ABC): reachable_node_ids: list[str] = [] unreachable_first_node_ids: list[str] = [] if finished_node_id not in self.graph.edge_mapping: - logger.warning(f"node {finished_node_id} has no edge mapping") + logger.warning("node %s has no edge mapping", finished_node_id) return for edge in self.graph.edge_mapping[finished_node_id]: if ( diff --git a/api/core/workflow/nodes/base/node.py b/api/core/workflow/nodes/base/node.py index fb5ec55453..be4f79af19 100644 --- a/api/core/workflow/nodes/base/node.py +++ b/api/core/workflow/nodes/base/node.py @@ -65,7 +65,7 @@ class BaseNode: try: result = self._run() except Exception as e: - logger.exception(f"Node {self.node_id} failed to run") + logger.exception("Node %s failed to run", self.node_id) result = NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, error=str(e), diff --git a/api/core/workflow/nodes/document_extractor/node.py b/api/core/workflow/nodes/document_extractor/node.py index ab5964ebd4..f3061f7d96 100644 --- a/api/core/workflow/nodes/document_extractor/node.py +++ b/api/core/workflow/nodes/document_extractor/node.py @@ -305,7 +305,7 @@ def _extract_text_from_doc(file_content: bytes) -> str: raise TextExtractionError(f"Failed to extract text from DOC: {str(e)}") from e -def paser_docx_part(block, doc: Document, content_items, i): +def parser_docx_part(block, doc: Document, content_items, i): if isinstance(block, CT_P): content_items.append((i, "paragraph", Paragraph(block, doc))) elif isinstance(block, CT_Tbl): @@ -329,7 +329,7 @@ def _extract_text_from_docx(file_content: bytes) -> str: part = next(it, None) i = 0 while part is not None: - paser_docx_part(part, doc, content_items, i) + parser_docx_part(part, doc, content_items, i) i = i + 1 part = next(it, None) @@ -363,7 +363,7 @@ def _extract_text_from_docx(file_content: bytes) -> str: text.append(markdown_table) except Exception as e: - logger.warning(f"Failed to extract table from DOC: {e}") + logger.warning("Failed to extract table from DOC: %s", e) continue return "\n".join(text) diff --git a/api/core/workflow/nodes/http_request/node.py b/api/core/workflow/nodes/http_request/node.py index 6799d5c63c..bc1d5c9b87 100644 --- a/api/core/workflow/nodes/http_request/node.py +++ b/api/core/workflow/nodes/http_request/node.py @@ -129,7 +129,7 @@ class HttpRequestNode(BaseNode): }, ) except HttpRequestNodeError as e: - logger.warning(f"http request node {self.node_id} failed to run: {e}") + logger.warning("http request node %s failed to run: %s", self.node_id, e) return NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, error=str(e), diff --git a/api/core/workflow/nodes/if_else/if_else_node.py b/api/core/workflow/nodes/if_else/if_else_node.py index 86e703dc68..2c83ea3d4f 100644 --- a/api/core/workflow/nodes/if_else/if_else_node.py +++ b/api/core/workflow/nodes/if_else/if_else_node.py @@ -129,7 +129,7 @@ class IfElseNode(BaseNode): var_mapping: dict[str, list[str]] = {} for case in typed_node_data.cases or []: for condition in case.conditions: - key = "{}.#{}#".format(node_id, ".".join(condition.variable_selector)) + key = f"{node_id}.#{'.'.join(condition.variable_selector)}#" var_mapping[key] = condition.variable_selector return var_mapping diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index 5842c8d64b..def1e1cfa3 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -616,7 +616,7 @@ class IterationNode(BaseNode): ) except IterationNodeError as e: - logger.warning(f"Iteration run failed:{str(e)}") + logger.warning("Iteration run failed:%s", str(e)) yield IterationRunFailedEvent( iteration_id=self.id, iteration_node_id=self.node_id, diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 34b0afc75d..e041e217ca 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -453,35 +453,34 @@ class KnowledgeRetrievalNode(BaseNode): elif node_data.metadata_filtering_mode == "manual": if node_data.metadata_filtering_conditions: conditions = [] - if node_data.metadata_filtering_conditions: - for sequence, condition in enumerate(node_data.metadata_filtering_conditions.conditions): # type: ignore - metadata_name = condition.name - expected_value = condition.value - if expected_value is not None and condition.comparison_operator not in ("empty", "not empty"): - if isinstance(expected_value, str): - expected_value = self.graph_runtime_state.variable_pool.convert_template( - expected_value - ).value[0] - if expected_value.value_type in {"number", "integer", "float"}: # type: ignore - expected_value = expected_value.value # type: ignore - elif expected_value.value_type == "string": # type: ignore - expected_value = re.sub(r"[\r\n\t]+", " ", expected_value.text).strip() # type: ignore - else: - raise ValueError("Invalid expected metadata value type") - conditions.append( - Condition( - name=metadata_name, - comparison_operator=condition.comparison_operator, - value=expected_value, - ) - ) - filters = self._process_metadata_filter_func( - sequence, - condition.comparison_operator, - metadata_name, - expected_value, - filters, + for sequence, condition in enumerate(node_data.metadata_filtering_conditions.conditions): # type: ignore + metadata_name = condition.name + expected_value = condition.value + if expected_value is not None and condition.comparison_operator not in ("empty", "not empty"): + if isinstance(expected_value, str): + expected_value = self.graph_runtime_state.variable_pool.convert_template( + expected_value + ).value[0] + if expected_value.value_type in {"number", "integer", "float"}: # type: ignore + expected_value = expected_value.value # type: ignore + elif expected_value.value_type == "string": # type: ignore + expected_value = re.sub(r"[\r\n\t]+", " ", expected_value.text).strip() # type: ignore + else: + raise ValueError("Invalid expected metadata value type") + conditions.append( + Condition( + name=metadata_name, + comparison_operator=condition.comparison_operator, + value=expected_value, ) + ) + filters = self._process_metadata_filter_func( + sequence, + condition.comparison_operator, + metadata_name, + expected_value, + filters, + ) metadata_condition = MetadataCondition( logical_operator=node_data.metadata_filtering_conditions.logical_operator, conditions=conditions, diff --git a/api/core/workflow/nodes/list_operator/node.py b/api/core/workflow/nodes/list_operator/node.py index b91fc622f6..d2e022dc9d 100644 --- a/api/core/workflow/nodes/list_operator/node.py +++ b/api/core/workflow/nodes/list_operator/node.py @@ -299,7 +299,7 @@ def _endswith(value: str) -> Callable[[str], bool]: def _is(value: str) -> Callable[[str], bool]: - return lambda x: x is value + return lambda x: x == value def _in(value: str | Sequence[str]) -> Callable[[str], bool]: diff --git a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py index a23d284626..45c5e0a62c 100644 --- a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py +++ b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py @@ -670,7 +670,7 @@ class ParameterExtractorNode(BaseNode): return cast(dict, json.loads(json_str)) except Exception: pass - logger.info(f"extra error: {result}") + logger.info("extra error: %s", result) return None def _extract_json_from_tool_call(self, tool_call: AssistantPromptMessage.ToolCall) -> Optional[dict]: @@ -690,7 +690,7 @@ class ParameterExtractorNode(BaseNode): return cast(dict, json.loads(json_str)) except Exception: pass - logger.info(f"extra error: {result}") + logger.info("extra error: %s", result) return None def _generate_default_result(self, data: ParameterExtractorNodeData) -> dict: diff --git a/api/core/workflow/nodes/question_classifier/question_classifier_node.py b/api/core/workflow/nodes/question_classifier/question_classifier_node.py index 15012fa48d..3e4984ecd5 100644 --- a/api/core/workflow/nodes/question_classifier/question_classifier_node.py +++ b/api/core/workflow/nodes/question_classifier/question_classifier_node.py @@ -385,9 +385,8 @@ class QuestionClassifierNode(BaseNode): text=QUESTION_CLASSIFIER_COMPLETION_PROMPT.format( histories=memory_str, input_text=input_text, - categories=json.dumps(categories), + categories=json.dumps(categories, ensure_ascii=False), classification_instructions=instruction, - ensure_ascii=False, ) ) diff --git a/api/core/workflow/nodes/tool/entities.py b/api/core/workflow/nodes/tool/entities.py index f0a44d919b..c1cfbb1edc 100644 --- a/api/core/workflow/nodes/tool/entities.py +++ b/api/core/workflow/nodes/tool/entities.py @@ -54,8 +54,8 @@ class ToolNodeData(BaseNodeData, ToolEntity): for val in value: if not isinstance(val, str): raise ValueError("value must be a list of strings") - elif typ == "constant" and not isinstance(value, str | int | float | bool): - raise ValueError("value must be a string, int, float, or bool") + elif typ == "constant" and not isinstance(value, str | int | float | bool | dict): + raise ValueError("value must be a string, int, float, bool or dict") return typ tool_parameters: dict[str, ToolInput] diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index f437ac841d..4c8e13de70 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -6,7 +6,7 @@ from sqlalchemy.orm import Session from core.callback_handler.workflow_tool_callback_handler import DifyWorkflowCallbackHandler from core.file import File, FileTransferMethod -from core.plugin.impl.exc import PluginDaemonClientSideError +from core.plugin.impl.exc import PluginDaemonClientSideError, PluginInvokeError from core.plugin.impl.plugin import PluginInstaller from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter from core.tools.errors import ToolInvokeError @@ -141,13 +141,36 @@ class ToolNode(BaseNode): tenant_id=self.tenant_id, node_id=self.node_id, ) - except (PluginDaemonClientSideError, ToolInvokeError) as e: + except ToolInvokeError as e: yield RunCompletedEvent( run_result=NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, inputs=parameters_for_log, metadata={WorkflowNodeExecutionMetadataKey.TOOL_INFO: tool_info}, - error=f"Failed to transform tool message: {str(e)}", + error=f"Failed to invoke tool {node_data.provider_name}: {str(e)}", + error_type=type(e).__name__, + ) + ) + except PluginInvokeError as e: + yield RunCompletedEvent( + run_result=NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs=parameters_for_log, + metadata={WorkflowNodeExecutionMetadataKey.TOOL_INFO: tool_info}, + error="An error occurred in the plugin, " + f"please contact the author of {node_data.provider_name} for help, " + f"error type: {e.get_error_type()}, " + f"error details: {e.get_error_message()}", + error_type=type(e).__name__, + ) + ) + except PluginDaemonClientSideError as e: + yield RunCompletedEvent( + run_result=NodeRunResult( + status=WorkflowNodeExecutionStatus.FAILED, + inputs=parameters_for_log, + metadata={WorkflowNodeExecutionMetadataKey.TOOL_INFO: tool_info}, + error=f"Failed to invoke tool, error: {e.description}", error_type=type(e).__name__, ) ) diff --git a/api/core/workflow/workflow_entry.py b/api/core/workflow/workflow_entry.py index c8082ebf50..801e36e272 100644 --- a/api/core/workflow/workflow_entry.py +++ b/api/core/workflow/workflow_entry.py @@ -67,7 +67,7 @@ class WorkflowEntry: # check call depth workflow_call_max_depth = dify_config.WORKFLOW_CALL_MAX_DEPTH if call_depth > workflow_call_max_depth: - raise ValueError("Max workflow call depth {} reached.".format(workflow_call_max_depth)) + raise ValueError(f"Max workflow call depth {workflow_call_max_depth} reached.") # init workflow run state graph_runtime_state = GraphRuntimeState(variable_pool=variable_pool, start_at=time.perf_counter()) @@ -193,7 +193,13 @@ class WorkflowEntry: # run node generator = node.run() except Exception as e: - logger.exception(f"error while running node, {workflow.id=}, {node.id=}, {node.type_=}, {node.version()=}") + logger.exception( + "error while running node, workflow_id=%s, node_id=%s, node_type=%s, node_version=%s", + workflow.id, + node.id, + node.type_, + node.version(), + ) raise WorkflowNodeRunFailedError(node=node, err_msg=str(e)) return node, generator @@ -297,7 +303,12 @@ class WorkflowEntry: return node, generator except Exception as e: - logger.exception(f"error while running node, {node.id=}, {node.type_=}, {node.version()=}") + logger.exception( + "error while running node, node_id=%s, node_type=%s, node_version=%s", + node.id, + node.type_, + node.version(), + ) raise WorkflowNodeRunFailedError(node=node, err_msg=str(e)) @staticmethod diff --git a/api/docker/entrypoint.sh b/api/docker/entrypoint.sh index 4de9a25c2f..a850ea9a50 100755 --- a/api/docker/entrypoint.sh +++ b/api/docker/entrypoint.sh @@ -2,6 +2,11 @@ set -e +# Set UTF-8 encoding to address potential encoding issues in containerized environments +export LANG=${LANG:-en_US.UTF-8} +export LC_ALL=${LC_ALL:-en_US.UTF-8} +export PYTHONIOENCODING=${PYTHONIOENCODING:-utf-8} + if [[ "${MIGRATION_ENABLED}" == "true" ]]; then echo "Running migrations" flask upgrade-db diff --git a/api/events/event_handlers/create_document_index.py b/api/events/event_handlers/create_document_index.py index dc50ca8d96..bdb69945f0 100644 --- a/api/events/event_handlers/create_document_index.py +++ b/api/events/event_handlers/create_document_index.py @@ -18,7 +18,7 @@ def handle(sender, **kwargs): documents = [] start_at = time.perf_counter() for document_id in document_ids: - logging.info(click.style("Start process document: {}".format(document_id), fg="green")) + logging.info(click.style(f"Start process document: {document_id}", fg="green")) document = ( db.session.query(Document) @@ -42,7 +42,7 @@ def handle(sender, **kwargs): indexing_runner = IndexingRunner() indexing_runner.run(documents) end_at = time.perf_counter() - logging.info(click.style("Processed dataset: {} latency: {}".format(dataset_id, end_at - start_at), fg="green")) + logging.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green")) except DocumentIsPausedError as ex: logging.info(click.style(str(ex), fg="yellow")) except Exception: diff --git a/api/events/event_handlers/update_provider_when_message_created.py b/api/events/event_handlers/update_provider_when_message_created.py index d3943f2eda..2ed42c71ea 100644 --- a/api/events/event_handlers/update_provider_when_message_created.py +++ b/api/events/event_handlers/update_provider_when_message_created.py @@ -131,9 +131,11 @@ def handle(sender: Message, **kwargs): duration = time_module.perf_counter() - start_time logger.info( - f"Provider updates completed successfully. " - f"Updates: {len(updates_to_perform)}, Duration: {duration:.3f}s, " - f"Tenant: {tenant_id}, Provider: {provider_name}" + "Provider updates completed successfully. Updates: %s, Duration: %s s, Tenant: %s, Provider: %s", + len(updates_to_perform), + duration, + tenant_id, + provider_name, ) except Exception as e: @@ -141,9 +143,11 @@ def handle(sender: Message, **kwargs): duration = time_module.perf_counter() - start_time logger.exception( - f"Provider updates failed after {duration:.3f}s. " - f"Updates: {len(updates_to_perform)}, Tenant: {tenant_id}, " - f"Provider: {provider_name}" + "Provider updates failed after %s s. Updates: %s, Tenant: %s, Provider: %s", + duration, + len(updates_to_perform), + tenant_id, + provider_name, ) raise @@ -219,16 +223,20 @@ def _execute_provider_updates(updates_to_perform: list[_ProviderUpdateOperation] rows_affected = result.rowcount logger.debug( - f"Provider update ({description}): {rows_affected} rows affected. " - f"Filters: {filters.model_dump()}, Values: {update_values}" + "Provider update (%s): %s rows affected. Filters: %s, Values: %s", + description, + rows_affected, + filters.model_dump(), + update_values, ) # If no rows were affected for quota updates, log a warning if rows_affected == 0 and description == "quota_deduction_update": logger.warning( - f"No Provider rows updated for quota deduction. " - f"This may indicate quota limit exceeded or provider not found. " - f"Filters: {filters.model_dump()}" + "No Provider rows updated for quota deduction. " + "This may indicate quota limit exceeded or provider not found. " + "Filters: %s", + filters.model_dump(), ) - logger.debug(f"Successfully processed {len(updates_to_perform)} Provider updates") + logger.debug("Successfully processed %s Provider updates", len(updates_to_perform)) diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index 2c2846ba26..bd72c93404 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -73,13 +73,13 @@ def init_app(app: DifyApp) -> Celery: imports.append("schedule.clean_embedding_cache_task") beat_schedule["clean_embedding_cache_task"] = { "task": "schedule.clean_embedding_cache_task.clean_embedding_cache_task", - "schedule": timedelta(days=day), + "schedule": crontab(minute="0", hour="2", day_of_month=f"*/{day}"), } if dify_config.ENABLE_CLEAN_UNUSED_DATASETS_TASK: imports.append("schedule.clean_unused_datasets_task") beat_schedule["clean_unused_datasets_task"] = { "task": "schedule.clean_unused_datasets_task.clean_unused_datasets_task", - "schedule": timedelta(days=day), + "schedule": crontab(minute="0", hour="3", day_of_month=f"*/{day}"), } if dify_config.ENABLE_CREATE_TIDB_SERVERLESS_TASK: imports.append("schedule.create_tidb_serverless_task") @@ -97,7 +97,7 @@ def init_app(app: DifyApp) -> Celery: imports.append("schedule.clean_messages") beat_schedule["clean_messages"] = { "task": "schedule.clean_messages.clean_messages", - "schedule": timedelta(days=day), + "schedule": crontab(minute="0", hour="4", day_of_month=f"*/{day}"), } if dify_config.ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: imports.append("schedule.mail_clean_document_notify_task") diff --git a/api/extensions/ext_mail.py b/api/extensions/ext_mail.py index df5d8a9c11..fe05138196 100644 --- a/api/extensions/ext_mail.py +++ b/api/extensions/ext_mail.py @@ -64,7 +64,7 @@ class Mail: sendgrid_api_key=dify_config.SENDGRID_API_KEY, _from=dify_config.MAIL_DEFAULT_SEND_FROM or "" ) case _: - raise ValueError("Unsupported mail type {}".format(mail_type)) + raise ValueError(f"Unsupported mail type {mail_type}") def send(self, to: str, subject: str, html: str, from_: Optional[str] = None): if not self._client: diff --git a/api/extensions/ext_redis.py b/api/extensions/ext_redis.py index be2f6115f7..14b9273e9d 100644 --- a/api/extensions/ext_redis.py +++ b/api/extensions/ext_redis.py @@ -137,7 +137,7 @@ def redis_fallback(default_return: Any = None): try: return func(*args, **kwargs) except RedisError as e: - logger.warning(f"Redis operation failed in {func.__name__}: {str(e)}", exc_info=True) + logger.warning("Redis operation failed in %s: %s", func.__name__, str(e), exc_info=True) return default_return return wrapper diff --git a/api/extensions/storage/azure_blob_storage.py b/api/extensions/storage/azure_blob_storage.py index 81eec94da4..7ec0889776 100644 --- a/api/extensions/storage/azure_blob_storage.py +++ b/api/extensions/storage/azure_blob_storage.py @@ -69,7 +69,7 @@ class AzureBlobStorage(BaseStorage): if self.account_key == "managedidentity": return BlobServiceClient(account_url=self.account_url, credential=self.credential) # type: ignore - cache_key = "azure_blob_sas_token_{}_{}".format(self.account_name, self.account_key) + cache_key = f"azure_blob_sas_token_{self.account_name}_{self.account_key}" cache_result = redis_client.get(cache_key) if cache_result is not None: sas_token = cache_result.decode("utf-8") diff --git a/api/extensions/storage/opendal_storage.py b/api/extensions/storage/opendal_storage.py index 12e2738e9d..0ba35506d3 100644 --- a/api/extensions/storage/opendal_storage.py +++ b/api/extensions/storage/opendal_storage.py @@ -35,21 +35,21 @@ class OpenDALStorage(BaseStorage): Path(root).mkdir(parents=True, exist_ok=True) self.op = opendal.Operator(scheme=scheme, **kwargs) # type: ignore - logger.debug(f"opendal operator created with scheme {scheme}") + logger.debug("opendal operator created with scheme %s", scheme) retry_layer = opendal.layers.RetryLayer(max_times=3, factor=2.0, jitter=True) self.op = self.op.layer(retry_layer) logger.debug("added retry layer to opendal operator") def save(self, filename: str, data: bytes) -> None: self.op.write(path=filename, bs=data) - logger.debug(f"file {filename} saved") + logger.debug("file %s saved", filename) def load_once(self, filename: str) -> bytes: if not self.exists(filename): raise FileNotFoundError("File not found") content: bytes = self.op.read(path=filename) - logger.debug(f"file {filename} loaded") + logger.debug("file %s loaded", filename) return content def load_stream(self, filename: str) -> Generator: @@ -60,7 +60,7 @@ class OpenDALStorage(BaseStorage): file = self.op.open(path=filename, mode="rb") while chunk := file.read(batch_size): yield chunk - logger.debug(f"file {filename} loaded as stream") + logger.debug("file %s loaded as stream", filename) def download(self, filename: str, target_filepath: str): if not self.exists(filename): @@ -68,7 +68,7 @@ class OpenDALStorage(BaseStorage): with Path(target_filepath).open("wb") as f: f.write(self.op.read(path=filename)) - logger.debug(f"file {filename} downloaded to {target_filepath}") + logger.debug("file %s downloaded to %s", filename, target_filepath) def exists(self, filename: str) -> bool: res: bool = self.op.exists(path=filename) @@ -77,9 +77,9 @@ class OpenDALStorage(BaseStorage): def delete(self, filename: str): if self.exists(filename): self.op.delete(path=filename) - logger.debug(f"file {filename} deleted") + logger.debug("file %s deleted", filename) return - logger.debug(f"file {filename} not found, skip delete") + logger.debug("file %s not found, skip delete", filename) def scan(self, path: str, files: bool = True, directories: bool = False) -> list[str]: if not self.exists(path): @@ -87,13 +87,13 @@ class OpenDALStorage(BaseStorage): all_files = self.op.scan(path=path) if files and directories: - logger.debug(f"files and directories on {path} scanned") + logger.debug("files and directories on %s scanned", path) return [f.path for f in all_files] if files: - logger.debug(f"files on {path} scanned") + logger.debug("files on %s scanned", path) return [f.path for f in all_files if not f.path.endswith("/")] elif directories: - logger.debug(f"directories on {path} scanned") + logger.debug("directories on %s scanned", path) return [f.path for f in all_files if f.path.endswith("/")] else: raise ValueError("At least one of files or directories must be True") diff --git a/api/extensions/storage/volcengine_tos_storage.py b/api/extensions/storage/volcengine_tos_storage.py index 55fe6545ec..32839d3497 100644 --- a/api/extensions/storage/volcengine_tos_storage.py +++ b/api/extensions/storage/volcengine_tos_storage.py @@ -25,7 +25,7 @@ class VolcengineTosStorage(BaseStorage): def load_once(self, filename: str) -> bytes: data = self.client.get_object(bucket=self.bucket_name, key=filename).read() if not isinstance(data, bytes): - raise TypeError("Expected bytes, got {}".format(type(data).__name__)) + raise TypeError(f"Expected bytes, got {type(data).__name__}") return data def load_stream(self, filename: str) -> Generator: diff --git a/api/libs/email_i18n.py b/api/libs/email_i18n.py index bfbf41a073..b7c9f3ec6c 100644 --- a/api/libs/email_i18n.py +++ b/api/libs/email_i18n.py @@ -25,6 +25,7 @@ class EmailType(Enum): EMAIL_CODE_LOGIN = "email_code_login" CHANGE_EMAIL_OLD = "change_email_old" CHANGE_EMAIL_NEW = "change_email_new" + CHANGE_EMAIL_COMPLETED = "change_email_completed" OWNER_TRANSFER_CONFIRM = "owner_transfer_confirm" OWNER_TRANSFER_OLD_NOTIFY = "owner_transfer_old_notify" OWNER_TRANSFER_NEW_NOTIFY = "owner_transfer_new_notify" @@ -344,6 +345,18 @@ def create_default_email_config() -> EmailI18nConfig: branded_template_path="without-brand/change_mail_confirm_new_template_zh-CN.html", ), }, + EmailType.CHANGE_EMAIL_COMPLETED: { + EmailLanguage.EN_US: EmailTemplate( + subject="Your login email has been changed", + template_path="change_mail_completed_template_en-US.html", + branded_template_path="without-brand/change_mail_completed_template_en-US.html", + ), + EmailLanguage.ZH_HANS: EmailTemplate( + subject="您的登录邮箱已更改", + template_path="change_mail_completed_template_zh-CN.html", + branded_template_path="without-brand/change_mail_completed_template_zh-CN.html", + ), + }, EmailType.OWNER_TRANSFER_CONFIRM: { EmailLanguage.EN_US: EmailTemplate( subject="Verify Your Request to Transfer Workspace Ownership", diff --git a/api/libs/helper.py b/api/libs/helper.py index 00772d530a..b36f972e19 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -95,7 +95,7 @@ def email(email): if re.match(pattern, email) is not None: return email - error = "{email} is not a valid email.".format(email=email) + error = f"{email} is not a valid email." raise ValueError(error) @@ -107,7 +107,7 @@ def uuid_value(value): uuid_obj = uuid.UUID(value) return str(uuid_obj) except ValueError: - error = "{value} is not a valid uuid.".format(value=value) + error = f"{value} is not a valid uuid." raise ValueError(error) @@ -126,7 +126,7 @@ def timestamp_value(timestamp): raise ValueError return int_timestamp except ValueError: - error = "{timestamp} is not a valid timestamp.".format(timestamp=timestamp) + error = f"{timestamp} is not a valid timestamp." raise ValueError(error) @@ -169,14 +169,14 @@ def _get_float(value): try: return float(value) except (TypeError, ValueError): - raise ValueError("{} is not a valid float".format(value)) + raise ValueError(f"{value} is not a valid float") def timezone(timezone_string): if timezone_string and timezone_string in available_timezones(): return timezone_string - error = "{timezone_string} is not a valid timezone.".format(timezone_string=timezone_string) + error = f"{timezone_string} is not a valid timezone." raise ValueError(error) @@ -321,7 +321,7 @@ class TokenManager: key = cls._get_token_key(token, token_type) token_data_json = redis_client.get(key) if token_data_json is None: - logging.warning(f"{token_type} token {token} not found with key {key}") + logging.warning("%s token %s not found with key %s", token_type, token, key) return None token_data: Optional[dict[str, Any]] = json.loads(token_data_json) return token_data diff --git a/api/libs/rsa.py b/api/libs/rsa.py index ed7a0eb116..598e5bc9e3 100644 --- a/api/libs/rsa.py +++ b/api/libs/rsa.py @@ -50,13 +50,13 @@ def encrypt(text: str, public_key: Union[str, bytes]) -> bytes: def get_decrypt_decoding(tenant_id: str) -> tuple[RSA.RsaKey, object]: filepath = os.path.join("privkeys", tenant_id, "private.pem") - cache_key = "tenant_privkey:{hash}".format(hash=hashlib.sha3_256(filepath.encode()).hexdigest()) + cache_key = f"tenant_privkey:{hashlib.sha3_256(filepath.encode()).hexdigest()}" private_key = redis_client.get(cache_key) if not private_key: try: private_key = storage.load(filepath) except FileNotFoundError: - raise PrivkeyNotFoundError("Private key not found, tenant_id: {tenant_id}".format(tenant_id=tenant_id)) + raise PrivkeyNotFoundError(f"Private key not found, tenant_id: {tenant_id}") redis_client.setex(cache_key, 120, private_key) diff --git a/api/libs/sendgrid.py b/api/libs/sendgrid.py index 5409e3eeeb..cfc6c7d794 100644 --- a/api/libs/sendgrid.py +++ b/api/libs/sendgrid.py @@ -41,5 +41,5 @@ class SendGridClient: ) raise except Exception as e: - logging.exception(f"SendGridClient Unexpected error occurred while sending email to {_to}") + logging.exception("SendGridClient Unexpected error occurred while sending email to %s", _to) raise diff --git a/api/libs/smtp.py b/api/libs/smtp.py index b94386660e..a01ad6fab8 100644 --- a/api/libs/smtp.py +++ b/api/libs/smtp.py @@ -50,7 +50,7 @@ class SMTPClient: logging.exception("Timeout occurred while sending email") raise except Exception as e: - logging.exception(f"Unexpected error occurred while sending email to {mail['to']}") + logging.exception("Unexpected error occurred while sending email to %s", mail["to"]) raise finally: if smtp: diff --git a/api/models/dataset.py b/api/models/dataset.py index d877540213..01372f8bf6 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -911,7 +911,7 @@ class DatasetKeywordTable(Base): return json.loads(keyword_table_text.decode("utf-8"), cls=SetDecoder) return None except Exception as e: - logging.exception(f"Failed to load keyword table from file: {file_key}") + logging.exception("Failed to load keyword table from file: %s", file_key) return None diff --git a/api/models/model.py b/api/models/model.py index a78a91ebd5..9f6d51b315 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -32,9 +32,6 @@ from .engine import db from .enums import CreatorUserRole from .types import StringUUID -if TYPE_CHECKING: - from .workflow import Workflow - class DifySetup(Base): __tablename__ = "dify_setups" diff --git a/api/models/workflow.py b/api/models/workflow.py index 79d96e42dd..d89db6c7da 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -42,9 +42,6 @@ from .types import EnumText, StringUUID _logger = logging.getLogger(__name__) -if TYPE_CHECKING: - from models.model import AppMode - class WorkflowType(Enum): """ diff --git a/api/pyproject.toml b/api/pyproject.toml index 7ec8a91198..be42b509ed 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "dify-api" -version = "1.7.0" +version = "1.7.1" requires-python = ">=3.11,<3.13" dependencies = [ diff --git a/api/repositories/factory.py b/api/repositories/factory.py index 0a0adbf2c2..070cdd46dd 100644 --- a/api/repositories/factory.py +++ b/api/repositories/factory.py @@ -48,7 +48,7 @@ class DifyAPIRepositoryFactory(DifyCoreRepositoryFactory): RepositoryImportError: If the configured repository cannot be imported or instantiated """ class_path = dify_config.API_WORKFLOW_NODE_EXECUTION_REPOSITORY - logger.debug(f"Creating DifyAPIWorkflowNodeExecutionRepository from: {class_path}") + logger.debug("Creating DifyAPIWorkflowNodeExecutionRepository from: %s", class_path) try: repository_class = cls._import_class(class_path) @@ -86,7 +86,7 @@ class DifyAPIRepositoryFactory(DifyCoreRepositoryFactory): RepositoryImportError: If the configured repository cannot be imported or instantiated """ class_path = dify_config.API_WORKFLOW_RUN_REPOSITORY - logger.debug(f"Creating APIWorkflowRunRepository from: {class_path}") + logger.debug("Creating APIWorkflowRunRepository from: %s", class_path) try: repository_class = cls._import_class(class_path) diff --git a/api/repositories/sqlalchemy_api_workflow_run_repository.py b/api/repositories/sqlalchemy_api_workflow_run_repository.py index ebd1d74b20..7c3b1f4ce0 100644 --- a/api/repositories/sqlalchemy_api_workflow_run_repository.py +++ b/api/repositories/sqlalchemy_api_workflow_run_repository.py @@ -155,7 +155,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): session.commit() deleted_count = cast(int, result.rowcount) - logger.info(f"Deleted {deleted_count} workflow runs by IDs") + logger.info("Deleted %s workflow runs by IDs", deleted_count) return deleted_count def delete_runs_by_app( @@ -193,11 +193,11 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository): batch_deleted = result.rowcount total_deleted += batch_deleted - logger.info(f"Deleted batch of {batch_deleted} workflow runs for app {app_id}") + logger.info("Deleted batch of %s workflow runs for app %s", batch_deleted, app_id) # If we deleted fewer records than the batch size, we're done if batch_deleted < batch_size: break - logger.info(f"Total deleted {total_deleted} workflow runs for app {app_id}") + logger.info("Total deleted %s workflow runs for app %s", total_deleted, app_id) return total_deleted diff --git a/api/schedule/check_upgradable_plugin_task.py b/api/schedule/check_upgradable_plugin_task.py index c1d6018827..e27391b558 100644 --- a/api/schedule/check_upgradable_plugin_task.py +++ b/api/schedule/check_upgradable_plugin_task.py @@ -16,7 +16,7 @@ def check_upgradable_plugin_task(): start_at = time.perf_counter() now_seconds_of_day = time.time() % 86400 - 30 # we assume the tz is UTC - click.echo(click.style("Now seconds of day: {}".format(now_seconds_of_day), fg="green")) + click.echo(click.style(f"Now seconds of day: {now_seconds_of_day}", fg="green")) strategies = ( db.session.query(TenantPluginAutoUpgradeStrategy) @@ -43,7 +43,7 @@ def check_upgradable_plugin_task(): end_at = time.perf_counter() click.echo( click.style( - "Checked upgradable plugin success latency: {}".format(end_at - start_at), + f"Checked upgradable plugin success latency: {end_at - start_at}", fg="green", ) ) diff --git a/api/schedule/clean_embedding_cache_task.py b/api/schedule/clean_embedding_cache_task.py index 024e3d6f50..2298acf6eb 100644 --- a/api/schedule/clean_embedding_cache_task.py +++ b/api/schedule/clean_embedding_cache_task.py @@ -39,4 +39,4 @@ def clean_embedding_cache_task(): else: break end_at = time.perf_counter() - click.echo(click.style("Cleaned embedding cache from db success latency: {}".format(end_at - start_at), fg="green")) + click.echo(click.style(f"Cleaned embedding cache from db success latency: {end_at - start_at}", fg="green")) diff --git a/api/schedule/clean_messages.py b/api/schedule/clean_messages.py index a6851e36e5..4c35745959 100644 --- a/api/schedule/clean_messages.py +++ b/api/schedule/clean_messages.py @@ -87,4 +87,4 @@ def clean_messages(): db.session.query(Message).where(Message.id == message.id).delete() db.session.commit() end_at = time.perf_counter() - click.echo(click.style("Cleaned messages from db success latency: {}".format(end_at - start_at), fg="green")) + click.echo(click.style(f"Cleaned messages from db success latency: {end_at - start_at}", fg="green")) diff --git a/api/schedule/clean_unused_datasets_task.py b/api/schedule/clean_unused_datasets_task.py index 72e2e73e65..7887835bc5 100644 --- a/api/schedule/clean_unused_datasets_task.py +++ b/api/schedule/clean_unused_datasets_task.py @@ -101,11 +101,9 @@ def clean_unused_datasets_task(): # update document db.session.query(Document).filter_by(dataset_id=dataset.id).update({Document.enabled: False}) db.session.commit() - click.echo(click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green")) + click.echo(click.style(f"Cleaned unused dataset {dataset.id} from db success!", fg="green")) except Exception as e: - click.echo( - click.style("clean dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red") - ) + click.echo(click.style(f"clean dataset index error: {e.__class__.__name__} {str(e)}", fg="red")) while True: try: # Subquery for counting new documents @@ -176,12 +174,8 @@ def clean_unused_datasets_task(): # update document db.session.query(Document).filter_by(dataset_id=dataset.id).update({Document.enabled: False}) db.session.commit() - click.echo( - click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green") - ) + click.echo(click.style(f"Cleaned unused dataset {dataset.id} from db success!", fg="green")) except Exception as e: - click.echo( - click.style("clean dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red") - ) + click.echo(click.style(f"clean dataset index error: {e.__class__.__name__} {str(e)}", fg="red")) end_at = time.perf_counter() - click.echo(click.style("Cleaned unused dataset from db success latency: {}".format(end_at - start_at), fg="green")) + click.echo(click.style(f"Cleaned unused dataset from db success latency: {end_at - start_at}", fg="green")) diff --git a/api/schedule/create_tidb_serverless_task.py b/api/schedule/create_tidb_serverless_task.py index 91953354e6..c343063fae 100644 --- a/api/schedule/create_tidb_serverless_task.py +++ b/api/schedule/create_tidb_serverless_task.py @@ -33,7 +33,7 @@ def create_tidb_serverless_task(): break end_at = time.perf_counter() - click.echo(click.style("Create tidb serverless task success latency: {}".format(end_at - start_at), fg="green")) + click.echo(click.style(f"Create tidb serverless task success latency: {end_at - start_at}", fg="green")) def create_clusters(batch_size): diff --git a/api/schedule/mail_clean_document_notify_task.py b/api/schedule/mail_clean_document_notify_task.py index 5911c98b0a..03ef9062bd 100644 --- a/api/schedule/mail_clean_document_notify_task.py +++ b/api/schedule/mail_clean_document_notify_task.py @@ -90,7 +90,7 @@ def mail_clean_document_notify_task(): db.session.commit() end_at = time.perf_counter() logging.info( - click.style("Send document clean notify mail succeeded: latency: {}".format(end_at - start_at), fg="green") + click.style(f"Send document clean notify mail succeeded: latency: {end_at - start_at}", fg="green") ) except Exception: logging.exception("Send document clean notify mail failed") diff --git a/api/schedule/update_tidb_serverless_status_task.py b/api/schedule/update_tidb_serverless_status_task.py index 4d6c1f1877..1bfeb869e2 100644 --- a/api/schedule/update_tidb_serverless_status_task.py +++ b/api/schedule/update_tidb_serverless_status_task.py @@ -29,9 +29,7 @@ def update_tidb_serverless_status_task(): click.echo(click.style(f"Error: {e}", fg="red")) end_at = time.perf_counter() - click.echo( - click.style("Update tidb serverless status task success latency: {}".format(end_at - start_at), fg="green") - ) + click.echo(click.style(f"Update tidb serverless status task success latency: {end_at - start_at}", fg="green")) def update_clusters(tidb_serverless_list: list[TidbAuthBinding]): diff --git a/api/services/account_service.py b/api/services/account_service.py index 59bffa873c..1cce8e67a4 100644 --- a/api/services/account_service.py +++ b/api/services/account_service.py @@ -54,7 +54,10 @@ from services.errors.workspace import WorkSpaceNotAllowedCreateError, Workspaces from services.feature_service import FeatureService from tasks.delete_account_task import delete_account_task from tasks.mail_account_deletion_task import send_account_deletion_verification_code -from tasks.mail_change_mail_task import send_change_mail_task +from tasks.mail_change_mail_task import ( + send_change_mail_completed_notification_task, + send_change_mail_task, +) from tasks.mail_email_code_login import send_email_code_login_mail_task from tasks.mail_invite_member_task import send_invite_member_mail_task from tasks.mail_owner_transfer_task import ( @@ -329,9 +332,9 @@ class AccountService: db.session.add(account_integrate) db.session.commit() - logging.info(f"Account {account.id} linked {provider} account {open_id}.") + logging.info("Account %s linked %s account %s.", account.id, provider, open_id) except Exception as e: - logging.exception(f"Failed to link {provider} account {open_id} to Account {account.id}") + logging.exception("Failed to link %s account %s to Account %s", provider, open_id, account.id) raise LinkAccountIntegrateError("Failed to link account.") from e @staticmethod @@ -352,6 +355,17 @@ class AccountService: db.session.commit() return account + @staticmethod + def update_account_email(account: Account, email: str) -> Account: + """Update account email""" + account.email = email + account_integrate = db.session.query(AccountIntegrate).filter_by(account_id=account.id).first() + if account_integrate: + db.session.delete(account_integrate) + db.session.add(account) + db.session.commit() + return account + @staticmethod def update_login_info(account: Account, *, ip_address: str) -> None: """Update last login time and ip""" @@ -461,6 +475,22 @@ class AccountService: cls.change_email_rate_limiter.increment_rate_limit(account_email) return token + @classmethod + def send_change_email_completed_notify_email( + cls, + account: Optional[Account] = None, + email: Optional[str] = None, + language: Optional[str] = "en-US", + ): + account_email = account.email if account else email + if account_email is None: + raise ValueError("Email must be provided.") + + send_change_mail_completed_notification_task.delay( + language=language, + to=account_email, + ) + @classmethod def send_owner_transfer_email( cls, @@ -652,6 +682,12 @@ class AccountService: return account + @classmethod + def is_account_in_freeze(cls, email: str) -> bool: + if dify_config.BILLING_ENABLED and BillingService.is_email_in_freeze(email): + return True + return False + @staticmethod @redis_fallback(default_return=None) def add_login_error_rate_limit(email: str) -> None: @@ -881,7 +917,7 @@ class TenantService: """Create tenant member""" if role == TenantAccountRole.OWNER.value: if TenantService.has_roles(tenant, [TenantAccountRole.OWNER]): - logging.error(f"Tenant {tenant.id} has already an owner.") + logging.error("Tenant %s has already an owner.", tenant.id) raise Exception("Tenant already has an owner.") ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=account.id).first() @@ -1133,7 +1169,7 @@ class RegisterService: db.session.query(Tenant).delete() db.session.commit() - logging.exception(f"Setup account failed, email: {email}, name: {name}") + logging.exception("Setup account failed, email: %s, name: %s", email, name) raise ValueError(f"Setup failed: {e}") @classmethod @@ -1257,7 +1293,7 @@ class RegisterService: def revoke_token(cls, workspace_id: str, email: str, token: str): if workspace_id and email: email_hash = sha256(email.encode()).hexdigest() - cache_key = "member_invite_token:{}, {}:{}".format(workspace_id, email_hash, token) + cache_key = f"member_invite_token:{workspace_id}, {email_hash}:{token}" redis_client.delete(cache_key) else: redis_client.delete(cls._get_invitation_token_key(token)) diff --git a/api/services/annotation_service.py b/api/services/annotation_service.py index 7cb0b46517..cfa917daf6 100644 --- a/api/services/annotation_service.py +++ b/api/services/annotation_service.py @@ -74,14 +74,14 @@ class AppAnnotationService: @classmethod def enable_app_annotation(cls, args: dict, app_id: str) -> dict: - enable_app_annotation_key = "enable_app_annotation_{}".format(str(app_id)) + enable_app_annotation_key = f"enable_app_annotation_{str(app_id)}" cache_result = redis_client.get(enable_app_annotation_key) if cache_result is not None: return {"job_id": cache_result, "job_status": "processing"} # async job job_id = str(uuid.uuid4()) - enable_app_annotation_job_key = "enable_app_annotation_job_{}".format(str(job_id)) + enable_app_annotation_job_key = f"enable_app_annotation_job_{str(job_id)}" # send batch add segments task redis_client.setnx(enable_app_annotation_job_key, "waiting") enable_annotation_reply_task.delay( @@ -97,14 +97,14 @@ class AppAnnotationService: @classmethod def disable_app_annotation(cls, app_id: str) -> dict: - disable_app_annotation_key = "disable_app_annotation_{}".format(str(app_id)) + disable_app_annotation_key = f"disable_app_annotation_{str(app_id)}" cache_result = redis_client.get(disable_app_annotation_key) if cache_result is not None: return {"job_id": cache_result, "job_status": "processing"} # async job job_id = str(uuid.uuid4()) - disable_app_annotation_job_key = "disable_app_annotation_job_{}".format(str(job_id)) + disable_app_annotation_job_key = f"disable_app_annotation_job_{str(job_id)}" # send batch add segments task redis_client.setnx(disable_app_annotation_job_key, "waiting") disable_annotation_reply_task.delay(str(job_id), app_id, current_user.current_tenant_id) @@ -127,8 +127,8 @@ class AppAnnotationService: .where(MessageAnnotation.app_id == app_id) .where( or_( - MessageAnnotation.question.ilike("%{}%".format(keyword)), - MessageAnnotation.content.ilike("%{}%".format(keyword)), + MessageAnnotation.question.ilike(f"%{keyword}%"), + MessageAnnotation.content.ilike(f"%{keyword}%"), ) ) .order_by(MessageAnnotation.created_at.desc(), MessageAnnotation.id.desc()) @@ -280,7 +280,7 @@ class AppAnnotationService: try: # Skip the first row - df = pd.read_csv(file) + df = pd.read_csv(file, dtype=str) result = [] for index, row in df.iterrows(): content = {"question": row.iloc[0], "answer": row.iloc[1]} @@ -295,7 +295,7 @@ class AppAnnotationService: raise ValueError("The number of annotations exceeds the limit of your subscription.") # async job job_id = str(uuid.uuid4()) - indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id)) + indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}" # send batch add segments task redis_client.setnx(indexing_cache_key, "waiting") batch_import_annotations_task.delay( @@ -440,3 +440,38 @@ class AppAnnotationService: "embedding_model_name": collection_binding_detail.model_name, }, } + + @classmethod + def clear_all_annotations(cls, app_id: str) -> dict: + app = ( + db.session.query(App) + .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .first() + ) + + if not app: + raise NotFound("App not found") + + # if annotation reply is enabled, delete annotation index + app_annotation_setting = ( + db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() + ) + + annotations_query = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_id) + for annotation in annotations_query.yield_per(100): + annotation_hit_histories_query = db.session.query(AppAnnotationHitHistory).filter( + AppAnnotationHitHistory.annotation_id == annotation.id + ) + for annotation_hit_history in annotation_hit_histories_query.yield_per(100): + db.session.delete(annotation_hit_history) + + # if annotation reply is enabled, delete annotation index + if app_annotation_setting: + delete_annotation_index_task.delay( + annotation.id, app_id, current_user.current_tenant_id, app_annotation_setting.collection_binding_id + ) + + db.session.delete(annotation) + + db.session.commit() + return {"result": "success"} diff --git a/api/services/api_based_extension_service.py b/api/services/api_based_extension_service.py index 457c91e5c0..2f28eff165 100644 --- a/api/services/api_based_extension_service.py +++ b/api/services/api_based_extension_service.py @@ -102,4 +102,4 @@ class APIBasedExtensionService: if resp.get("result") != "pong": raise ValueError(resp) except Exception as e: - raise ValueError("connection error: {}".format(e)) + raise ValueError(f"connection error: {e}") diff --git a/api/services/app_service.py b/api/services/app_service.py index 0b6b85bcb2..0f22666d5a 100644 --- a/api/services/app_service.py +++ b/api/services/app_service.py @@ -53,9 +53,10 @@ class AppService: if args.get("name"): name = args["name"][:30] filters.append(App.name.ilike(f"%{name}%")) - if args.get("tag_ids"): + # Check if tag_ids is not empty to avoid WHERE false condition + if args.get("tag_ids") and len(args["tag_ids"]) > 0: target_ids = TagService.get_target_ids_by_tag_ids("app", tenant_id, args["tag_ids"]) - if target_ids: + if target_ids and len(target_ids) > 0: filters.append(App.id.in_(target_ids)) else: return None @@ -94,7 +95,7 @@ class AppService: except (ProviderTokenNotInitError, LLMBadRequestError): model_instance = None except Exception as e: - logging.exception(f"Get default model instance failed, tenant_id: {tenant_id}") + logging.exception("Get default model instance failed, tenant_id: %s", tenant_id) model_instance = None if model_instance: diff --git a/api/services/clear_free_plan_tenant_expired_logs.py b/api/services/clear_free_plan_tenant_expired_logs.py index ad9b750d40..d057a14afb 100644 --- a/api/services/clear_free_plan_tenant_expired_logs.py +++ b/api/services/clear_free_plan_tenant_expired_logs.py @@ -228,7 +228,7 @@ class ClearFreePlanTenantExpiredLogs: # only process sandbox tenant cls.process_tenant(flask_app, tenant_id, days, batch) except Exception: - logger.exception(f"Failed to process tenant {tenant_id}") + logger.exception("Failed to process tenant %s", tenant_id) finally: nonlocal handled_tenant_count handled_tenant_count += 1 @@ -311,7 +311,7 @@ class ClearFreePlanTenantExpiredLogs: try: tenants.append(tenant_id) except Exception: - logger.exception(f"Failed to process tenant {tenant_id}") + logger.exception("Failed to process tenant %s", tenant_id) continue futures.append( diff --git a/api/services/conversation_service.py b/api/services/conversation_service.py index 525c87fe4a..206c832a20 100644 --- a/api/services/conversation_service.py +++ b/api/services/conversation_service.py @@ -46,9 +46,11 @@ class ConversationService: Conversation.from_account_id == (user.id if isinstance(user, Account) else None), or_(Conversation.invoke_from.is_(None), Conversation.invoke_from == invoke_from.value), ) - if include_ids is not None: + # Check if include_ids is not None and not empty to avoid WHERE false condition + if include_ids is not None and len(include_ids) > 0: stmt = stmt.where(Conversation.id.in_(include_ids)) - if exclude_ids is not None: + # Check if exclude_ids is not None and not empty to avoid WHERE false condition + if exclude_ids is not None and len(exclude_ids) > 0: stmt = stmt.where(~Conversation.id.in_(exclude_ids)) # define sort fields and directions diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index 4872702a76..1280399990 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -91,14 +91,16 @@ class DatasetService: if user.current_role == TenantAccountRole.DATASET_OPERATOR: # only show datasets that the user has permission to access - if permitted_dataset_ids: + # Check if permitted_dataset_ids is not empty to avoid WHERE false condition + if permitted_dataset_ids and len(permitted_dataset_ids) > 0: query = query.where(Dataset.id.in_(permitted_dataset_ids)) else: return [], 0 else: if user.current_role != TenantAccountRole.OWNER or not include_all: # show all datasets that the user has permission to access - if permitted_dataset_ids: + # Check if permitted_dataset_ids is not empty to avoid WHERE false condition + if permitted_dataset_ids and len(permitted_dataset_ids) > 0: query = query.where( db.or_( Dataset.permission == DatasetPermissionEnum.ALL_TEAM, @@ -127,9 +129,10 @@ class DatasetService: if search: query = query.where(Dataset.name.ilike(f"%{search}%")) - if tag_ids: + # Check if tag_ids is not empty to avoid WHERE false condition + if tag_ids and len(tag_ids) > 0: target_ids = TagService.get_target_ids_by_tag_ids("knowledge", tenant_id, tag_ids) - if target_ids: + if target_ids and len(target_ids) > 0: query = query.where(Dataset.id.in_(target_ids)) else: return [], 0 @@ -158,6 +161,9 @@ class DatasetService: @staticmethod def get_datasets_by_ids(ids, tenant_id): + # Check if ids is not empty to avoid WHERE false condition + if not ids or len(ids) == 0: + return [], 0 stmt = select(Dataset).where(Dataset.id.in_(ids), Dataset.tenant_id == tenant_id) datasets = db.paginate(select=stmt, page=1, per_page=len(ids), max_per_page=len(ids), error_out=False) @@ -605,8 +611,9 @@ class DatasetService: except ProviderTokenNotInitError: # If we can't get the embedding model, preserve existing settings logging.warning( - f"Failed to initialize embedding model {data['embedding_model_provider']}/{data['embedding_model']}, " - f"preserving existing settings" + "Failed to initialize embedding model %s/%s, preserving existing settings", + data["embedding_model_provider"], + data["embedding_model"], ) if dataset.embedding_model_provider and dataset.embedding_model: filtered_data["embedding_model_provider"] = dataset.embedding_model_provider @@ -649,11 +656,11 @@ class DatasetService: @staticmethod def check_dataset_permission(dataset, user): if dataset.tenant_id != user.current_tenant_id: - logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}") + logging.debug("User %s does not have permission to access dataset %s", user.id, dataset.id) raise NoPermissionError("You do not have permission to access this dataset.") if user.current_role != TenantAccountRole.OWNER: if dataset.permission == DatasetPermissionEnum.ONLY_ME and dataset.created_by != user.id: - logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}") + logging.debug("User %s does not have permission to access dataset %s", user.id, dataset.id) raise NoPermissionError("You do not have permission to access this dataset.") if dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM: # For partial team permission, user needs explicit permission or be the creator @@ -662,7 +669,7 @@ class DatasetService: db.session.query(DatasetPermission).filter_by(dataset_id=dataset.id, account_id=user.id).first() ) if not user_permission: - logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}") + logging.debug("User %s does not have permission to access dataset %s", user.id, dataset.id) raise NoPermissionError("You do not have permission to access this dataset.") @staticmethod @@ -950,6 +957,9 @@ class DocumentService: @staticmethod def delete_documents(dataset: Dataset, document_ids: list[str]): + # Check if document_ids is not empty to avoid WHERE false condition + if not document_ids or len(document_ids) == 0: + return documents = db.session.query(Document).where(Document.id.in_(document_ids)).all() file_ids = [ document.data_source_info_dict["upload_file_id"] @@ -1000,7 +1010,7 @@ class DocumentService: db.session.add(document) db.session.commit() # set document paused flag - indexing_cache_key = "document_{}_is_paused".format(document.id) + indexing_cache_key = f"document_{document.id}_is_paused" redis_client.setnx(indexing_cache_key, "True") @staticmethod @@ -1015,7 +1025,7 @@ class DocumentService: db.session.add(document) db.session.commit() # delete paused flag - indexing_cache_key = "document_{}_is_paused".format(document.id) + indexing_cache_key = f"document_{document.id}_is_paused" redis_client.delete(indexing_cache_key) # trigger async task recover_document_indexing_task.delay(document.dataset_id, document.id) @@ -1024,7 +1034,7 @@ class DocumentService: def retry_document(dataset_id: str, documents: list[Document]): for document in documents: # add retry flag - retry_indexing_cache_key = "document_{}_is_retried".format(document.id) + retry_indexing_cache_key = f"document_{document.id}_is_retried" cache_result = redis_client.get(retry_indexing_cache_key) if cache_result is not None: raise ValueError("Document is being retried, please try again later") @@ -1041,7 +1051,7 @@ class DocumentService: @staticmethod def sync_website_document(dataset_id: str, document: Document): # add sync flag - sync_indexing_cache_key = "document_{}_is_sync".format(document.id) + sync_indexing_cache_key = f"document_{document.id}_is_sync" cache_result = redis_client.get(sync_indexing_cache_key) if cache_result is not None: raise ValueError("Document is being synced, please try again later") @@ -1174,12 +1184,13 @@ class DocumentService: ) else: logging.warning( - f"Invalid process rule mode: {process_rule.mode}, can not find dataset process rule" + "Invalid process rule mode: %s, can not find dataset process rule", + process_rule.mode, ) return db.session.add(dataset_process_rule) db.session.commit() - lock_name = "add_document_lock_dataset_id_{}".format(dataset.id) + lock_name = f"add_document_lock_dataset_id_{dataset.id}" with redis_client.lock(lock_name, timeout=600): position = DocumentService.get_documents_position(dataset.id) document_ids = [] @@ -1862,7 +1873,7 @@ class DocumentService: task_func.delay(*task_args) except Exception as e: # Log the error but do not rollback the transaction - logging.exception(f"Error executing async task for document {update_info['document'].id}") + logging.exception("Error executing async task for document %s", update_info["document"].id) # don't raise the error immediately, but capture it for later propagation_error = e try: @@ -1873,7 +1884,7 @@ class DocumentService: redis_client.setex(indexing_cache_key, 600, 1) except Exception as e: # Log the error but do not rollback the transaction - logging.exception(f"Error setting cache for document {update_info['document'].id}") + logging.exception("Error setting cache for document %s", update_info["document"].id) # Raise any propagation error after all updates if propagation_error: raise propagation_error @@ -2001,7 +2012,7 @@ class SegmentService: ) # calc embedding use tokens tokens = embedding_model.get_text_embedding_num_tokens(texts=[content])[0] - lock_name = "add_segment_lock_document_id_{}".format(document.id) + lock_name = f"add_segment_lock_document_id_{document.id}" with redis_client.lock(lock_name, timeout=600): max_position = ( db.session.query(func.max(DocumentSegment.position)) @@ -2048,7 +2059,7 @@ class SegmentService: @classmethod def multi_create_segment(cls, segments: list, document: Document, dataset: Dataset): - lock_name = "multi_add_segment_lock_document_id_{}".format(document.id) + lock_name = f"multi_add_segment_lock_document_id_{document.id}" increment_word_count = 0 with redis_client.lock(lock_name, timeout=600): embedding_model = None @@ -2130,7 +2141,7 @@ class SegmentService: @classmethod def update_segment(cls, args: SegmentUpdateArgs, segment: DocumentSegment, document: Document, dataset: Dataset): - indexing_cache_key = "segment_{}_indexing".format(segment.id) + indexing_cache_key = f"segment_{segment.id}_indexing" cache_result = redis_client.get(indexing_cache_key) if cache_result is not None: raise ValueError("Segment is indexing, please try again later") @@ -2300,7 +2311,7 @@ class SegmentService: @classmethod def delete_segment(cls, segment: DocumentSegment, document: Document, dataset: Dataset): - indexing_cache_key = "segment_{}_delete_indexing".format(segment.id) + indexing_cache_key = f"segment_{segment.id}_delete_indexing" cache_result = redis_client.get(indexing_cache_key) if cache_result is not None: raise ValueError("Segment is deleting.") @@ -2318,6 +2329,9 @@ class SegmentService: @classmethod def delete_segments(cls, segment_ids: list, document: Document, dataset: Dataset): + # Check if segment_ids is not empty to avoid WHERE false condition + if not segment_ids or len(segment_ids) == 0: + return index_node_ids = ( db.session.query(DocumentSegment) .with_entities(DocumentSegment.index_node_id) @@ -2337,6 +2351,9 @@ class SegmentService: @classmethod def update_segments_status(cls, segment_ids: list, action: str, dataset: Dataset, document: Document): + # Check if segment_ids is not empty to avoid WHERE false condition + if not segment_ids or len(segment_ids) == 0: + return if action == "enable": segments = ( db.session.query(DocumentSegment) @@ -2352,7 +2369,7 @@ class SegmentService: return real_deal_segmment_ids = [] for segment in segments: - indexing_cache_key = "segment_{}_indexing".format(segment.id) + indexing_cache_key = f"segment_{segment.id}_indexing" cache_result = redis_client.get(indexing_cache_key) if cache_result is not None: continue @@ -2379,7 +2396,7 @@ class SegmentService: return real_deal_segmment_ids = [] for segment in segments: - indexing_cache_key = "segment_{}_indexing".format(segment.id) + indexing_cache_key = f"segment_{segment.id}_indexing" cache_result = redis_client.get(indexing_cache_key) if cache_result is not None: continue @@ -2398,7 +2415,7 @@ class SegmentService: def create_child_chunk( cls, content: str, segment: DocumentSegment, document: Document, dataset: Dataset ) -> ChildChunk: - lock_name = "add_child_lock_{}".format(segment.id) + lock_name = f"add_child_lock_{segment.id}" with redis_client.lock(lock_name, timeout=20): index_node_id = str(uuid.uuid4()) index_node_hash = helper.generate_text_hash(content) @@ -2598,7 +2615,8 @@ class SegmentService: DocumentSegment.document_id == document_id, DocumentSegment.tenant_id == tenant_id ) - if status_list: + # Check if status_list is not empty to avoid WHERE false condition + if status_list and len(status_list) > 0: query = query.where(DocumentSegment.status.in_(status_list)) if keyword: diff --git a/api/services/hit_testing_service.py b/api/services/hit_testing_service.py index 519d5abca5..5a3f504035 100644 --- a/api/services/hit_testing_service.py +++ b/api/services/hit_testing_service.py @@ -77,7 +77,7 @@ class HitTestingService: ) end = time.perf_counter() - logging.debug(f"Hit testing retrieve in {end - start:0.4f} seconds") + logging.debug("Hit testing retrieve in %s seconds", end - start) dataset_query = DatasetQuery( dataset_id=dataset.id, content=query, source="hit_testing", created_by_role="account", created_by=account.id @@ -113,7 +113,7 @@ class HitTestingService: ) end = time.perf_counter() - logging.debug(f"External knowledge hit testing retrieve in {end - start:0.4f} seconds") + logging.debug("External knowledge hit testing retrieve in %s seconds", end - start) dataset_query = DatasetQuery( dataset_id=dataset.id, content=query, source="hit_testing", created_by_role="account", created_by=account.id diff --git a/api/services/message_service.py b/api/services/message_service.py index 283b7b9b4b..a19d6ee157 100644 --- a/api/services/message_service.py +++ b/api/services/message_service.py @@ -111,7 +111,8 @@ class MessageService: base_query = base_query.where(Message.conversation_id == conversation.id) - if include_ids is not None: + # Check if include_ids is not None and not empty to avoid WHERE false condition + if include_ids is not None and len(include_ids) > 0: base_query = base_query.where(Message.id.in_(include_ids)) if last_id: diff --git a/api/services/model_load_balancing_service.py b/api/services/model_load_balancing_service.py index a200cfa146..fe28aa006e 100644 --- a/api/services/model_load_balancing_service.py +++ b/api/services/model_load_balancing_service.py @@ -340,7 +340,7 @@ class ModelLoadBalancingService: config_id = str(config_id) if config_id not in current_load_balancing_configs_dict: - raise ValueError("Invalid load balancing config id: {}".format(config_id)) + raise ValueError(f"Invalid load balancing config id: {config_id}") updated_config_ids.add(config_id) @@ -349,7 +349,7 @@ class ModelLoadBalancingService: # check duplicate name for current_load_balancing_config in current_load_balancing_configs: if current_load_balancing_config.id != config_id and current_load_balancing_config.name == name: - raise ValueError("Load balancing config name {} already exists".format(name)) + raise ValueError(f"Load balancing config name {name} already exists") if credentials: if not isinstance(credentials, dict): @@ -383,7 +383,7 @@ class ModelLoadBalancingService: # check duplicate name for current_load_balancing_config in current_load_balancing_configs: if current_load_balancing_config.name == name: - raise ValueError("Load balancing config name {} already exists".format(name)) + raise ValueError(f"Load balancing config name {name} already exists") if not credentials: raise ValueError("Invalid load balancing config credentials") diff --git a/api/services/model_provider_service.py b/api/services/model_provider_service.py index 0a0a5619e1..54197bf949 100644 --- a/api/services/model_provider_service.py +++ b/api/services/model_provider_service.py @@ -380,7 +380,7 @@ class ModelProviderService: else None ) except Exception as e: - logger.debug(f"get_default_model_of_model_type error: {e}") + logger.debug("get_default_model_of_model_type error: %s", e) return None def update_default_model_of_model_type(self, tenant_id: str, model_type: str, provider: str, model: str) -> None: diff --git a/api/services/ops_service.py b/api/services/ops_service.py index 62f37c1588..7a9db7273e 100644 --- a/api/services/ops_service.py +++ b/api/services/ops_service.py @@ -65,9 +65,7 @@ class OpsService: } ) except Exception: - new_decrypt_tracing_config.update( - {"project_url": "{host}/".format(host=decrypt_tracing_config.get("host"))} - ) + new_decrypt_tracing_config.update({"project_url": f"{decrypt_tracing_config.get('host')}/"}) if tracing_provider == "langsmith" and ( "project_url" not in decrypt_tracing_config or not decrypt_tracing_config.get("project_url") @@ -139,7 +137,7 @@ class OpsService: project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider) elif tracing_provider == "langfuse": project_key = OpsTraceManager.get_trace_config_project_key(tracing_config, tracing_provider) - project_url = "{host}/project/{key}".format(host=tracing_config.get("host"), key=project_key) + project_url = f"{tracing_config.get('host')}/project/{project_key}" elif tracing_provider in ("langsmith", "opik"): project_url = OpsTraceManager.get_trace_config_project_url(tracing_config, tracing_provider) else: diff --git a/api/services/plugin/data_migration.py b/api/services/plugin/data_migration.py index 5324036414..7a4f886bf5 100644 --- a/api/services/plugin/data_migration.py +++ b/api/services/plugin/data_migration.py @@ -110,7 +110,7 @@ limit 1000""" ) ) logger.exception( - f"[{processed_count}] Failed to migrate [{table_name}] {record_id} ({provider_name})" + "[%s] Failed to migrate [%s] %s (%s)", processed_count, table_name, record_id, provider_name ) continue @@ -183,7 +183,7 @@ limit 1000""" ) ) logger.exception( - f"[{processed_count}] Failed to migrate [{table_name}] {record_id} ({provider_name})" + "[%s] Failed to migrate [%s] %s (%s)", processed_count, table_name, record_id, provider_name ) continue diff --git a/api/services/plugin/plugin_migration.py b/api/services/plugin/plugin_migration.py index 1806fbcfd6..222d70a317 100644 --- a/api/services/plugin/plugin_migration.py +++ b/api/services/plugin/plugin_migration.py @@ -78,7 +78,7 @@ class PluginMigration: ) ) except Exception: - logger.exception(f"Failed to process tenant {tenant_id}") + logger.exception("Failed to process tenant %s", tenant_id) futures = [] @@ -136,7 +136,7 @@ class PluginMigration: try: tenants.append(tenant_id) except Exception: - logger.exception(f"Failed to process tenant {tenant_id}") + logger.exception("Failed to process tenant %s", tenant_id) continue futures.append( @@ -273,7 +273,7 @@ class PluginMigration: result.append(ToolProviderID(tool_entity.provider_id).plugin_id) except Exception: - logger.exception(f"Failed to process tool {tool}") + logger.exception("Failed to process tool %s", tool) continue return result @@ -301,7 +301,7 @@ class PluginMigration: plugins: dict[str, str] = {} plugin_ids = [] plugin_not_exist = [] - logger.info(f"Extracting unique plugins from {extracted_plugins}") + logger.info("Extracting unique plugins from %s", extracted_plugins) with open(extracted_plugins) as f: for line in f: data = json.loads(line) @@ -318,7 +318,7 @@ class PluginMigration: else: plugin_not_exist.append(plugin_id) except Exception: - logger.exception(f"Failed to fetch plugin unique identifier for {plugin_id}") + logger.exception("Failed to fetch plugin unique identifier for %s", plugin_id) plugin_not_exist.append(plugin_id) with ThreadPoolExecutor(max_workers=10) as executor: @@ -339,7 +339,7 @@ class PluginMigration: # use a fake tenant id to install all the plugins fake_tenant_id = uuid4().hex - logger.info(f"Installing {len(plugins['plugins'])} plugin instances for fake tenant {fake_tenant_id}") + logger.info("Installing %s plugin instances for fake tenant %s", len(plugins["plugins"]), fake_tenant_id) thread_pool = ThreadPoolExecutor(max_workers=workers) @@ -348,7 +348,7 @@ class PluginMigration: plugin_install_failed.extend(response.get("failed", [])) def install(tenant_id: str, plugin_ids: list[str]) -> None: - logger.info(f"Installing {len(plugin_ids)} plugins for tenant {tenant_id}") + logger.info("Installing %s plugins for tenant %s", len(plugin_ids), tenant_id) # fetch plugin already installed installed_plugins = manager.list_plugins(tenant_id) installed_plugins_ids = [plugin.plugin_id for plugin in installed_plugins] @@ -408,7 +408,7 @@ class PluginMigration: installation = manager.list_plugins(fake_tenant_id) except Exception: - logger.exception(f"Failed to get installation for tenant {fake_tenant_id}") + logger.exception("Failed to get installation for tenant %s", fake_tenant_id) Path(output_file).write_text( json.dumps( @@ -491,7 +491,9 @@ class PluginMigration: else: failed.append(reverse_map[plugin.plugin_unique_identifier]) logger.error( - f"Failed to install plugin {plugin.plugin_unique_identifier}, error: {plugin.message}" + "Failed to install plugin %s, error: %s", + plugin.plugin_unique_identifier, + plugin.message, ) done = True diff --git a/api/services/recommend_app/remote/remote_retrieval.py b/api/services/recommend_app/remote/remote_retrieval.py index 80e1aefc01..85f3a02825 100644 --- a/api/services/recommend_app/remote/remote_retrieval.py +++ b/api/services/recommend_app/remote/remote_retrieval.py @@ -20,7 +20,7 @@ class RemoteRecommendAppRetrieval(RecommendAppRetrievalBase): try: result = self.fetch_recommended_app_detail_from_dify_official(app_id) except Exception as e: - logger.warning(f"fetch recommended app detail from dify official failed: {e}, switch to built-in.") + logger.warning("fetch recommended app detail from dify official failed: %s, switch to built-in.", e) result = BuildInRecommendAppRetrieval.fetch_recommended_app_detail_from_builtin(app_id) return result @@ -28,7 +28,7 @@ class RemoteRecommendAppRetrieval(RecommendAppRetrievalBase): try: result = self.fetch_recommended_apps_from_dify_official(language) except Exception as e: - logger.warning(f"fetch recommended apps from dify official failed: {e}, switch to built-in.") + logger.warning("fetch recommended apps from dify official failed: %s, switch to built-in.", e) result = BuildInRecommendAppRetrieval.fetch_recommended_apps_from_builtin(language) return result diff --git a/api/services/tag_service.py b/api/services/tag_service.py index 75fa52a75c..2e5e96214b 100644 --- a/api/services/tag_service.py +++ b/api/services/tag_service.py @@ -26,6 +26,9 @@ class TagService: @staticmethod def get_target_ids_by_tag_ids(tag_type: str, current_tenant_id: str, tag_ids: list) -> list: + # Check if tag_ids is not empty to avoid WHERE false condition + if not tag_ids or len(tag_ids) == 0: + return [] tags = ( db.session.query(Tag) .where(Tag.id.in_(tag_ids), Tag.tenant_id == current_tenant_id, Tag.type == tag_type) @@ -34,6 +37,9 @@ class TagService: if not tags: return [] tag_ids = [tag.id for tag in tags] + # Check if tag_ids is not empty to avoid WHERE false condition + if not tag_ids or len(tag_ids) == 0: + return [] tag_bindings = ( db.session.query(TagBinding.target_id) .where(TagBinding.tag_id.in_(tag_ids), TagBinding.tenant_id == current_tenant_id) diff --git a/api/services/tools/builtin_tools_manage_service.py b/api/services/tools/builtin_tools_manage_service.py index 65f05d2986..841eeb4333 100644 --- a/api/services/tools/builtin_tools_manage_service.py +++ b/api/services/tools/builtin_tools_manage_service.py @@ -337,7 +337,7 @@ class BuiltinToolManageService: max_number = max(numbers) return f"{default_pattern} {max_number + 1}" except Exception as e: - logger.warning(f"Error generating next provider name for {provider}: {str(e)}") + logger.warning("Error generating next provider name for %s: %s", provider, str(e)) # fallback return f"{credential_type.get_name()} 1" diff --git a/api/services/tools/tools_transform_service.py b/api/services/tools/tools_transform_service.py index 2d192e6f7f..52fbc0979c 100644 --- a/api/services/tools/tools_transform_service.py +++ b/api/services/tools/tools_transform_service.py @@ -275,7 +275,7 @@ class ToolTransformService: username = user.name except Exception: - logger.exception(f"failed to get user name for api provider {db_provider.id}") + logger.exception("failed to get user name for api provider %s", db_provider.id) # add provider into providers credentials = db_provider.credentials result = ToolProviderApiEntity( diff --git a/api/tasks/add_document_to_index_task.py b/api/tasks/add_document_to_index_task.py index 204c1a4f5b..a2105f8a9d 100644 --- a/api/tasks/add_document_to_index_task.py +++ b/api/tasks/add_document_to_index_task.py @@ -22,19 +22,19 @@ def add_document_to_index_task(dataset_document_id: str): Usage: add_document_to_index_task.delay(dataset_document_id) """ - logging.info(click.style("Start add document to index: {}".format(dataset_document_id), fg="green")) + logging.info(click.style(f"Start add document to index: {dataset_document_id}", fg="green")) start_at = time.perf_counter() dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == dataset_document_id).first() if not dataset_document: - logging.info(click.style("Document not found: {}".format(dataset_document_id), fg="red")) + logging.info(click.style(f"Document not found: {dataset_document_id}", fg="red")) db.session.close() return if dataset_document.indexing_status != "completed": return - indexing_cache_key = "document_{}_indexing".format(dataset_document.id) + indexing_cache_key = f"document_{dataset_document.id}_indexing" try: dataset = dataset_document.dataset @@ -101,9 +101,7 @@ def add_document_to_index_task(dataset_document_id: str): end_at = time.perf_counter() logging.info( - click.style( - "Document added to index: {} latency: {}".format(dataset_document.id, end_at - start_at), fg="green" - ) + click.style(f"Document added to index: {dataset_document.id} latency: {end_at - start_at}", fg="green") ) except Exception as e: logging.exception("add document to index failed") diff --git a/api/tasks/annotation/add_annotation_to_index_task.py b/api/tasks/annotation/add_annotation_to_index_task.py index 2a93c21abd..e436f00133 100644 --- a/api/tasks/annotation/add_annotation_to_index_task.py +++ b/api/tasks/annotation/add_annotation_to_index_task.py @@ -25,7 +25,7 @@ def add_annotation_to_index_task( Usage: clean_dataset_task.delay(dataset_id, tenant_id, indexing_technique, index_struct) """ - logging.info(click.style("Start build index for annotation: {}".format(annotation_id), fg="green")) + logging.info(click.style(f"Start build index for annotation: {annotation_id}", fg="green")) start_at = time.perf_counter() try: @@ -50,7 +50,7 @@ def add_annotation_to_index_task( end_at = time.perf_counter() logging.info( click.style( - "Build index successful for annotation: {} latency: {}".format(annotation_id, end_at - start_at), + f"Build index successful for annotation: {annotation_id} latency: {end_at - start_at}", fg="green", ) ) diff --git a/api/tasks/annotation/batch_import_annotations_task.py b/api/tasks/annotation/batch_import_annotations_task.py index 6d48f5df89..47dc3ee90e 100644 --- a/api/tasks/annotation/batch_import_annotations_task.py +++ b/api/tasks/annotation/batch_import_annotations_task.py @@ -25,9 +25,9 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: :param user_id: user_id """ - logging.info(click.style("Start batch import annotation: {}".format(job_id), fg="green")) + logging.info(click.style(f"Start batch import annotation: {job_id}", fg="green")) start_at = time.perf_counter() - indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id)) + indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}" # get app info app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first() @@ -85,7 +85,7 @@ def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: except Exception as e: db.session.rollback() redis_client.setex(indexing_cache_key, 600, "error") - indexing_error_msg_key = "app_annotation_batch_import_error_msg_{}".format(str(job_id)) + indexing_error_msg_key = f"app_annotation_batch_import_error_msg_{str(job_id)}" redis_client.setex(indexing_error_msg_key, 600, str(e)) logging.exception("Build index for batch import annotations failed") finally: diff --git a/api/tasks/annotation/delete_annotation_index_task.py b/api/tasks/annotation/delete_annotation_index_task.py index a6657e813a..f016400e16 100644 --- a/api/tasks/annotation/delete_annotation_index_task.py +++ b/api/tasks/annotation/delete_annotation_index_task.py @@ -15,7 +15,7 @@ def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str """ Async delete annotation index task """ - logging.info(click.style("Start delete app annotation index: {}".format(app_id), fg="green")) + logging.info(click.style(f"Start delete app annotation index: {app_id}", fg="green")) start_at = time.perf_counter() try: dataset_collection_binding = DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type( @@ -35,9 +35,7 @@ def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str except Exception: logging.exception("Delete annotation index failed when annotation deleted.") end_at = time.perf_counter() - logging.info( - click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green") - ) + logging.info(click.style(f"App annotations index deleted : {app_id} latency: {end_at - start_at}", fg="green")) except Exception as e: logging.exception("Annotation deleted index failed") finally: diff --git a/api/tasks/annotation/disable_annotation_reply_task.py b/api/tasks/annotation/disable_annotation_reply_task.py index 5d5d1d3ad8..0076113ce8 100644 --- a/api/tasks/annotation/disable_annotation_reply_task.py +++ b/api/tasks/annotation/disable_annotation_reply_task.py @@ -16,25 +16,25 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str): """ Async enable annotation reply task """ - logging.info(click.style("Start delete app annotations index: {}".format(app_id), fg="green")) + logging.info(click.style(f"Start delete app annotations index: {app_id}", fg="green")) start_at = time.perf_counter() # get app info app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first() annotations_count = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_id).count() if not app: - logging.info(click.style("App not found: {}".format(app_id), fg="red")) + logging.info(click.style(f"App not found: {app_id}", fg="red")) db.session.close() return app_annotation_setting = db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app_id).first() if not app_annotation_setting: - logging.info(click.style("App annotation setting not found: {}".format(app_id), fg="red")) + logging.info(click.style(f"App annotation setting not found: {app_id}", fg="red")) db.session.close() return - disable_app_annotation_key = "disable_app_annotation_{}".format(str(app_id)) - disable_app_annotation_job_key = "disable_app_annotation_job_{}".format(str(job_id)) + disable_app_annotation_key = f"disable_app_annotation_{str(app_id)}" + disable_app_annotation_job_key = f"disable_app_annotation_job_{str(job_id)}" try: dataset = Dataset( @@ -57,13 +57,11 @@ def disable_annotation_reply_task(job_id: str, app_id: str, tenant_id: str): db.session.commit() end_at = time.perf_counter() - logging.info( - click.style("App annotations index deleted : {} latency: {}".format(app_id, end_at - start_at), fg="green") - ) + logging.info(click.style(f"App annotations index deleted : {app_id} latency: {end_at - start_at}", fg="green")) except Exception as e: logging.exception("Annotation batch deleted index failed") redis_client.setex(disable_app_annotation_job_key, 600, "error") - disable_app_annotation_error_key = "disable_app_annotation_error_{}".format(str(job_id)) + disable_app_annotation_error_key = f"disable_app_annotation_error_{str(job_id)}" redis_client.setex(disable_app_annotation_error_key, 600, str(e)) finally: redis_client.delete(disable_app_annotation_key) diff --git a/api/tasks/annotation/enable_annotation_reply_task.py b/api/tasks/annotation/enable_annotation_reply_task.py index 12d10df442..44c65c0783 100644 --- a/api/tasks/annotation/enable_annotation_reply_task.py +++ b/api/tasks/annotation/enable_annotation_reply_task.py @@ -27,19 +27,19 @@ def enable_annotation_reply_task( """ Async enable annotation reply task """ - logging.info(click.style("Start add app annotation to index: {}".format(app_id), fg="green")) + logging.info(click.style(f"Start add app annotation to index: {app_id}", fg="green")) start_at = time.perf_counter() # get app info app = db.session.query(App).where(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first() if not app: - logging.info(click.style("App not found: {}".format(app_id), fg="red")) + logging.info(click.style(f"App not found: {app_id}", fg="red")) db.session.close() return annotations = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_id).all() - enable_app_annotation_key = "enable_app_annotation_{}".format(str(app_id)) - enable_app_annotation_job_key = "enable_app_annotation_job_{}".format(str(job_id)) + enable_app_annotation_key = f"enable_app_annotation_{str(app_id)}" + enable_app_annotation_job_key = f"enable_app_annotation_job_{str(job_id)}" try: documents = [] @@ -68,7 +68,7 @@ def enable_annotation_reply_task( try: old_vector.delete() except Exception as e: - logging.info(click.style("Delete annotation index error: {}".format(str(e)), fg="red")) + logging.info(click.style(f"Delete annotation index error: {str(e)}", fg="red")) annotation_setting.score_threshold = score_threshold annotation_setting.collection_binding_id = dataset_collection_binding.id annotation_setting.updated_user_id = user_id @@ -104,18 +104,16 @@ def enable_annotation_reply_task( try: vector.delete_by_metadata_field("app_id", app_id) except Exception as e: - logging.info(click.style("Delete annotation index error: {}".format(str(e)), fg="red")) + logging.info(click.style(f"Delete annotation index error: {str(e)}", fg="red")) vector.create(documents) db.session.commit() redis_client.setex(enable_app_annotation_job_key, 600, "completed") end_at = time.perf_counter() - logging.info( - click.style("App annotations added to index: {} latency: {}".format(app_id, end_at - start_at), fg="green") - ) + logging.info(click.style(f"App annotations added to index: {app_id} latency: {end_at - start_at}", fg="green")) except Exception as e: logging.exception("Annotation batch created index failed") redis_client.setex(enable_app_annotation_job_key, 600, "error") - enable_app_annotation_error_key = "enable_app_annotation_error_{}".format(str(job_id)) + enable_app_annotation_error_key = f"enable_app_annotation_error_{str(job_id)}" redis_client.setex(enable_app_annotation_error_key, 600, str(e)) db.session.rollback() finally: diff --git a/api/tasks/annotation/update_annotation_to_index_task.py b/api/tasks/annotation/update_annotation_to_index_task.py index 596ba829ad..5f11d5aa00 100644 --- a/api/tasks/annotation/update_annotation_to_index_task.py +++ b/api/tasks/annotation/update_annotation_to_index_task.py @@ -25,7 +25,7 @@ def update_annotation_to_index_task( Usage: clean_dataset_task.delay(dataset_id, tenant_id, indexing_technique, index_struct) """ - logging.info(click.style("Start update index for annotation: {}".format(annotation_id), fg="green")) + logging.info(click.style(f"Start update index for annotation: {annotation_id}", fg="green")) start_at = time.perf_counter() try: @@ -51,7 +51,7 @@ def update_annotation_to_index_task( end_at = time.perf_counter() logging.info( click.style( - "Build index successful for annotation: {} latency: {}".format(annotation_id, end_at - start_at), + f"Build index successful for annotation: {annotation_id} latency: {end_at - start_at}", fg="green", ) ) diff --git a/api/tasks/batch_clean_document_task.py b/api/tasks/batch_clean_document_task.py index 49bff72a96..e64a799146 100644 --- a/api/tasks/batch_clean_document_task.py +++ b/api/tasks/batch_clean_document_task.py @@ -49,7 +49,8 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form except Exception: logging.exception( "Delete image_files failed when storage deleted, \ - image_upload_file_is: {}".format(upload_file_id) + image_upload_file_is: %s", + upload_file_id, ) db.session.delete(image_file) db.session.delete(segment) @@ -61,14 +62,14 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form try: storage.delete(file.key) except Exception: - logging.exception("Delete file failed when document deleted, file_id: {}".format(file.id)) + logging.exception("Delete file failed when document deleted, file_id: %s", file.id) db.session.delete(file) db.session.commit() end_at = time.perf_counter() logging.info( click.style( - "Cleaned documents when documents deleted latency: {}".format(end_at - start_at), + f"Cleaned documents when documents deleted latency: {end_at - start_at}", fg="green", ) ) diff --git a/api/tasks/batch_create_segment_to_index_task.py b/api/tasks/batch_create_segment_to_index_task.py index 64df3175e1..714e30acc3 100644 --- a/api/tasks/batch_create_segment_to_index_task.py +++ b/api/tasks/batch_create_segment_to_index_task.py @@ -1,9 +1,12 @@ import datetime import logging +import tempfile import time import uuid +from pathlib import Path import click +import pandas as pd from celery import shared_task # type: ignore from sqlalchemy import func from sqlalchemy.orm import Session @@ -12,15 +15,17 @@ from core.model_manager import ModelManager from core.model_runtime.entities.model_entities import ModelType from extensions.ext_database import db from extensions.ext_redis import redis_client +from extensions.ext_storage import storage from libs import helper from models.dataset import Dataset, Document, DocumentSegment +from models.model import UploadFile from services.vector_service import VectorService @shared_task(queue="dataset") def batch_create_segment_to_index_task( job_id: str, - content: list, + upload_file_id: str, dataset_id: str, document_id: str, tenant_id: str, @@ -29,18 +34,18 @@ def batch_create_segment_to_index_task( """ Async batch create segment to index :param job_id: - :param content: + :param upload_file_id: :param dataset_id: :param document_id: :param tenant_id: :param user_id: - Usage: batch_create_segment_to_index_task.delay(job_id, content, dataset_id, document_id, tenant_id, user_id) + Usage: batch_create_segment_to_index_task.delay(job_id, upload_file_id, dataset_id, document_id, tenant_id, user_id) """ - logging.info(click.style("Start batch create segment jobId: {}".format(job_id), fg="green")) + logging.info(click.style(f"Start batch create segment jobId: {job_id}", fg="green")) start_at = time.perf_counter() - indexing_cache_key = "segment_batch_import_{}".format(job_id) + indexing_cache_key = f"segment_batch_import_{job_id}" try: with Session(db.engine) as session: @@ -58,6 +63,29 @@ def batch_create_segment_to_index_task( or dataset_document.indexing_status != "completed" ): raise ValueError("Document is not available.") + + upload_file = session.get(UploadFile, upload_file_id) + if not upload_file: + raise ValueError("UploadFile not found.") + + with tempfile.TemporaryDirectory() as temp_dir: + suffix = Path(upload_file.key).suffix + # FIXME mypy: Cannot determine type of 'tempfile._get_candidate_names' better not use it here + file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore + storage.download(upload_file.key, file_path) + + # Skip the first row + df = pd.read_csv(file_path) + content = [] + for index, row in df.iterrows(): + if dataset_document.doc_form == "qa_model": + data = {"content": row.iloc[0], "answer": row.iloc[1]} + else: + data = {"content": row.iloc[0]} + content.append(data) + if len(content) == 0: + raise ValueError("The CSV file is empty.") + document_segments = [] embedding_model = None if dataset.indexing_technique == "high_quality": @@ -115,7 +143,7 @@ def batch_create_segment_to_index_task( end_at = time.perf_counter() logging.info( click.style( - "Segment batch created job: {} latency: {}".format(job_id, end_at - start_at), + f"Segment batch created job: {job_id} latency: {end_at - start_at}", fg="green", ) ) diff --git a/api/tasks/clean_dataset_task.py b/api/tasks/clean_dataset_task.py index fad090141a..fe6d613b1c 100644 --- a/api/tasks/clean_dataset_task.py +++ b/api/tasks/clean_dataset_task.py @@ -42,7 +42,7 @@ def clean_dataset_task( Usage: clean_dataset_task.delay(dataset_id, tenant_id, indexing_technique, index_struct) """ - logging.info(click.style("Start clean dataset when dataset deleted: {}".format(dataset_id), fg="green")) + logging.info(click.style(f"Start clean dataset when dataset deleted: {dataset_id}", fg="green")) start_at = time.perf_counter() try: @@ -57,9 +57,9 @@ def clean_dataset_task( segments = db.session.query(DocumentSegment).where(DocumentSegment.dataset_id == dataset_id).all() if documents is None or len(documents) == 0: - logging.info(click.style("No documents found for dataset: {}".format(dataset_id), fg="green")) + logging.info(click.style(f"No documents found for dataset: {dataset_id}", fg="green")) else: - logging.info(click.style("Cleaning documents for dataset: {}".format(dataset_id), fg="green")) + logging.info(click.style(f"Cleaning documents for dataset: {dataset_id}", fg="green")) # Specify the index type before initializing the index processor if doc_form is None: raise ValueError("Index type must be specified.") @@ -80,7 +80,8 @@ def clean_dataset_task( except Exception: logging.exception( "Delete image_files failed when storage deleted, \ - image_upload_file_is: {}".format(upload_file_id) + image_upload_file_is: %s", + upload_file_id, ) db.session.delete(image_file) db.session.delete(segment) @@ -115,9 +116,7 @@ def clean_dataset_task( db.session.commit() end_at = time.perf_counter() logging.info( - click.style( - "Cleaned dataset when dataset deleted: {} latency: {}".format(dataset_id, end_at - start_at), fg="green" - ) + click.style(f"Cleaned dataset when dataset deleted: {dataset_id} latency: {end_at - start_at}", fg="green") ) except Exception: logging.exception("Cleaned dataset when dataset deleted failed") diff --git a/api/tasks/clean_document_task.py b/api/tasks/clean_document_task.py index dd7a544ff5..d690106d17 100644 --- a/api/tasks/clean_document_task.py +++ b/api/tasks/clean_document_task.py @@ -24,7 +24,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i Usage: clean_document_task.delay(document_id, dataset_id) """ - logging.info(click.style("Start clean document when document deleted: {}".format(document_id), fg="green")) + logging.info(click.style(f"Start clean document when document deleted: {document_id}", fg="green")) start_at = time.perf_counter() try: @@ -51,7 +51,8 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i except Exception: logging.exception( "Delete image_files failed when storage deleted, \ - image_upload_file_is: {}".format(upload_file_id) + image_upload_file_is: %s", + upload_file_id, ) db.session.delete(image_file) db.session.delete(segment) @@ -63,7 +64,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i try: storage.delete(file.key) except Exception: - logging.exception("Delete file failed when document deleted, file_id: {}".format(file_id)) + logging.exception("Delete file failed when document deleted, file_id: %s", file_id) db.session.delete(file) db.session.commit() @@ -77,7 +78,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i end_at = time.perf_counter() logging.info( click.style( - "Cleaned document when document deleted: {} latency: {}".format(document_id, end_at - start_at), + f"Cleaned document when document deleted: {document_id} latency: {end_at - start_at}", fg="green", ) ) diff --git a/api/tasks/clean_notion_document_task.py b/api/tasks/clean_notion_document_task.py index 0f72f87f15..bf1a92f038 100644 --- a/api/tasks/clean_notion_document_task.py +++ b/api/tasks/clean_notion_document_task.py @@ -19,7 +19,7 @@ def clean_notion_document_task(document_ids: list[str], dataset_id: str): Usage: clean_notion_document_task.delay(document_ids, dataset_id) """ logging.info( - click.style("Start clean document when import form notion document deleted: {}".format(dataset_id), fg="green") + click.style(f"Start clean document when import form notion document deleted: {dataset_id}", fg="green") ) start_at = time.perf_counter() diff --git a/api/tasks/create_segment_to_index_task.py b/api/tasks/create_segment_to_index_task.py index 5eda24674a..a8839ffc17 100644 --- a/api/tasks/create_segment_to_index_task.py +++ b/api/tasks/create_segment_to_index_task.py @@ -21,19 +21,19 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]] :param keywords: Usage: create_segment_to_index_task.delay(segment_id) """ - logging.info(click.style("Start create segment to index: {}".format(segment_id), fg="green")) + logging.info(click.style(f"Start create segment to index: {segment_id}", fg="green")) start_at = time.perf_counter() segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first() if not segment: - logging.info(click.style("Segment not found: {}".format(segment_id), fg="red")) + logging.info(click.style(f"Segment not found: {segment_id}", fg="red")) db.session.close() return if segment.status != "waiting": return - indexing_cache_key = "segment_{}_indexing".format(segment.id) + indexing_cache_key = f"segment_{segment.id}_indexing" try: # update segment status to indexing @@ -57,17 +57,17 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]] dataset = segment.dataset if not dataset: - logging.info(click.style("Segment {} has no dataset, pass.".format(segment.id), fg="cyan")) + logging.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan")) return dataset_document = segment.document if not dataset_document: - logging.info(click.style("Segment {} has no document, pass.".format(segment.id), fg="cyan")) + logging.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan")) return if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": - logging.info(click.style("Segment {} document status is invalid, pass.".format(segment.id), fg="cyan")) + logging.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan")) return index_type = dataset.doc_form @@ -84,9 +84,7 @@ def create_segment_to_index_task(segment_id: str, keywords: Optional[list[str]] db.session.commit() end_at = time.perf_counter() - logging.info( - click.style("Segment created to index: {} latency: {}".format(segment.id, end_at - start_at), fg="green") - ) + logging.info(click.style(f"Segment created to index: {segment.id} latency: {end_at - start_at}", fg="green")) except Exception as e: logging.exception("create segment to index failed") segment.enabled = False diff --git a/api/tasks/deal_dataset_vector_index_task.py b/api/tasks/deal_dataset_vector_index_task.py index 7478bf5a90..8c4c1876ad 100644 --- a/api/tasks/deal_dataset_vector_index_task.py +++ b/api/tasks/deal_dataset_vector_index_task.py @@ -20,7 +20,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str): :param action: action Usage: deal_dataset_vector_index_task.delay(dataset_id, action) """ - logging.info(click.style("Start deal dataset vector index: {}".format(dataset_id), fg="green")) + logging.info(click.style(f"Start deal dataset vector index: {dataset_id}", fg="green")) start_at = time.perf_counter() try: @@ -162,9 +162,7 @@ def deal_dataset_vector_index_task(dataset_id: str, action: str): index_processor.clean(dataset, None, with_keywords=False, delete_child_chunks=False) end_at = time.perf_counter() - logging.info( - click.style("Deal dataset vector index: {} latency: {}".format(dataset_id, end_at - start_at), fg="green") - ) + logging.info(click.style(f"Deal dataset vector index: {dataset_id} latency: {end_at - start_at}", fg="green")) except Exception: logging.exception("Deal dataset vector index failed") finally: diff --git a/api/tasks/delete_account_task.py b/api/tasks/delete_account_task.py index d3b33e3052..ef50adf8d5 100644 --- a/api/tasks/delete_account_task.py +++ b/api/tasks/delete_account_task.py @@ -16,11 +16,11 @@ def delete_account_task(account_id): try: BillingService.delete_account(account_id) except Exception as e: - logger.exception(f"Failed to delete account {account_id} from billing service.") + logger.exception("Failed to delete account %s from billing service.", account_id) raise if not account: - logger.error(f"Account {account_id} not found.") + logger.error("Account %s not found.", account_id) return # send success email send_deletion_success_task.delay(account.email) diff --git a/api/tasks/delete_segment_from_index_task.py b/api/tasks/delete_segment_from_index_task.py index 66ff0f9a0a..da12355d23 100644 --- a/api/tasks/delete_segment_from_index_task.py +++ b/api/tasks/delete_segment_from_index_task.py @@ -38,7 +38,7 @@ def delete_segment_from_index_task(index_node_ids: list, dataset_id: str, docume index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True) end_at = time.perf_counter() - logging.info(click.style("Segment deleted from index latency: {}".format(end_at - start_at), fg="green")) + logging.info(click.style(f"Segment deleted from index latency: {end_at - start_at}", fg="green")) except Exception: logging.exception("delete segment from index failed") finally: diff --git a/api/tasks/disable_segment_from_index_task.py b/api/tasks/disable_segment_from_index_task.py index e67ba5c76e..fa4ec15f8a 100644 --- a/api/tasks/disable_segment_from_index_task.py +++ b/api/tasks/disable_segment_from_index_task.py @@ -18,37 +18,37 @@ def disable_segment_from_index_task(segment_id: str): Usage: disable_segment_from_index_task.delay(segment_id) """ - logging.info(click.style("Start disable segment from index: {}".format(segment_id), fg="green")) + logging.info(click.style(f"Start disable segment from index: {segment_id}", fg="green")) start_at = time.perf_counter() segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first() if not segment: - logging.info(click.style("Segment not found: {}".format(segment_id), fg="red")) + logging.info(click.style(f"Segment not found: {segment_id}", fg="red")) db.session.close() return if segment.status != "completed": - logging.info(click.style("Segment is not completed, disable is not allowed: {}".format(segment_id), fg="red")) + logging.info(click.style(f"Segment is not completed, disable is not allowed: {segment_id}", fg="red")) db.session.close() return - indexing_cache_key = "segment_{}_indexing".format(segment.id) + indexing_cache_key = f"segment_{segment.id}_indexing" try: dataset = segment.dataset if not dataset: - logging.info(click.style("Segment {} has no dataset, pass.".format(segment.id), fg="cyan")) + logging.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan")) return dataset_document = segment.document if not dataset_document: - logging.info(click.style("Segment {} has no document, pass.".format(segment.id), fg="cyan")) + logging.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan")) return if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": - logging.info(click.style("Segment {} document status is invalid, pass.".format(segment.id), fg="cyan")) + logging.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan")) return index_type = dataset_document.doc_form @@ -56,9 +56,7 @@ def disable_segment_from_index_task(segment_id: str): index_processor.clean(dataset, [segment.index_node_id]) end_at = time.perf_counter() - logging.info( - click.style("Segment removed from index: {} latency: {}".format(segment.id, end_at - start_at), fg="green") - ) + logging.info(click.style(f"Segment removed from index: {segment.id} latency: {end_at - start_at}", fg="green")) except Exception: logging.exception("remove segment from index failed") segment.enabled = True diff --git a/api/tasks/disable_segments_from_index_task.py b/api/tasks/disable_segments_from_index_task.py index 0c8b1aabc7..f033f05084 100644 --- a/api/tasks/disable_segments_from_index_task.py +++ b/api/tasks/disable_segments_from_index_task.py @@ -25,18 +25,18 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: - logging.info(click.style("Dataset {} not found, pass.".format(dataset_id), fg="cyan")) + logging.info(click.style(f"Dataset {dataset_id} not found, pass.", fg="cyan")) db.session.close() return dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == document_id).first() if not dataset_document: - logging.info(click.style("Document {} not found, pass.".format(document_id), fg="cyan")) + logging.info(click.style(f"Document {document_id} not found, pass.", fg="cyan")) db.session.close() return if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": - logging.info(click.style("Document {} status is invalid, pass.".format(document_id), fg="cyan")) + logging.info(click.style(f"Document {document_id} status is invalid, pass.", fg="cyan")) db.session.close() return # sync index processor @@ -61,7 +61,7 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False) end_at = time.perf_counter() - logging.info(click.style("Segments removed from index latency: {}".format(end_at - start_at), fg="green")) + logging.info(click.style(f"Segments removed from index latency: {end_at - start_at}", fg="green")) except Exception: # update segment error msg db.session.query(DocumentSegment).where( @@ -78,6 +78,6 @@ def disable_segments_from_index_task(segment_ids: list, dataset_id: str, documen db.session.commit() finally: for segment in segments: - indexing_cache_key = "segment_{}_indexing".format(segment.id) + indexing_cache_key = f"segment_{segment.id}_indexing" redis_client.delete(indexing_cache_key) db.session.close() diff --git a/api/tasks/document_indexing_sync_task.py b/api/tasks/document_indexing_sync_task.py index dcc748ef18..56f330b964 100644 --- a/api/tasks/document_indexing_sync_task.py +++ b/api/tasks/document_indexing_sync_task.py @@ -22,13 +22,13 @@ def document_indexing_sync_task(dataset_id: str, document_id: str): Usage: document_indexing_sync_task.delay(dataset_id, document_id) """ - logging.info(click.style("Start sync document: {}".format(document_id), fg="green")) + logging.info(click.style(f"Start sync document: {document_id}", fg="green")) start_at = time.perf_counter() document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() if not document: - logging.info(click.style("Document not found: {}".format(document_id), fg="red")) + logging.info(click.style(f"Document not found: {document_id}", fg="red")) db.session.close() return @@ -108,10 +108,8 @@ def document_indexing_sync_task(dataset_id: str, document_id: str): indexing_runner = IndexingRunner() indexing_runner.run([document]) end_at = time.perf_counter() - logging.info( - click.style("update document: {} latency: {}".format(document.id, end_at - start_at), fg="green") - ) + logging.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green")) except DocumentIsPausedError as ex: logging.info(click.style(str(ex), fg="yellow")) except Exception: - logging.exception("document_indexing_sync_task failed, document_id: {}".format(document_id)) + logging.exception("document_indexing_sync_task failed, document_id: %s", document_id) diff --git a/api/tasks/document_indexing_task.py b/api/tasks/document_indexing_task.py index ec6d10d93b..728db2e2dc 100644 --- a/api/tasks/document_indexing_task.py +++ b/api/tasks/document_indexing_task.py @@ -26,7 +26,7 @@ def document_indexing_task(dataset_id: str, document_ids: list): dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: - logging.info(click.style("Dataset is not found: {}".format(dataset_id), fg="yellow")) + logging.info(click.style(f"Dataset is not found: {dataset_id}", fg="yellow")) db.session.close() return # check document limit @@ -60,7 +60,7 @@ def document_indexing_task(dataset_id: str, document_ids: list): return for document_id in document_ids: - logging.info(click.style("Start process document: {}".format(document_id), fg="green")) + logging.info(click.style(f"Start process document: {document_id}", fg="green")) document = ( db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() @@ -77,10 +77,10 @@ def document_indexing_task(dataset_id: str, document_ids: list): indexing_runner = IndexingRunner() indexing_runner.run(documents) end_at = time.perf_counter() - logging.info(click.style("Processed dataset: {} latency: {}".format(dataset_id, end_at - start_at), fg="green")) + logging.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green")) except DocumentIsPausedError as ex: logging.info(click.style(str(ex), fg="yellow")) except Exception: - logging.exception("Document indexing task failed, dataset_id: {}".format(dataset_id)) + logging.exception("Document indexing task failed, dataset_id: %s", dataset_id) finally: db.session.close() diff --git a/api/tasks/document_indexing_update_task.py b/api/tasks/document_indexing_update_task.py index e53c38ddc3..053c0c5f41 100644 --- a/api/tasks/document_indexing_update_task.py +++ b/api/tasks/document_indexing_update_task.py @@ -20,13 +20,13 @@ def document_indexing_update_task(dataset_id: str, document_id: str): Usage: document_indexing_update_task.delay(dataset_id, document_id) """ - logging.info(click.style("Start update document: {}".format(document_id), fg="green")) + logging.info(click.style(f"Start update document: {document_id}", fg="green")) start_at = time.perf_counter() document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() if not document: - logging.info(click.style("Document not found: {}".format(document_id), fg="red")) + logging.info(click.style(f"Document not found: {document_id}", fg="red")) db.session.close() return @@ -69,10 +69,10 @@ def document_indexing_update_task(dataset_id: str, document_id: str): indexing_runner = IndexingRunner() indexing_runner.run([document]) end_at = time.perf_counter() - logging.info(click.style("update document: {} latency: {}".format(document.id, end_at - start_at), fg="green")) + logging.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green")) except DocumentIsPausedError as ex: logging.info(click.style(str(ex), fg="yellow")) except Exception: - logging.exception("document_indexing_update_task failed, document_id: {}".format(document_id)) + logging.exception("document_indexing_update_task failed, document_id: %s", document_id) finally: db.session.close() diff --git a/api/tasks/duplicate_document_indexing_task.py b/api/tasks/duplicate_document_indexing_task.py index b3ddface59..faa7e2b8d0 100644 --- a/api/tasks/duplicate_document_indexing_task.py +++ b/api/tasks/duplicate_document_indexing_task.py @@ -27,7 +27,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list): dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if dataset is None: - logging.info(click.style("Dataset not found: {}".format(dataset_id), fg="red")) + logging.info(click.style(f"Dataset not found: {dataset_id}", fg="red")) db.session.close() return @@ -63,7 +63,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list): db.session.close() for document_id in document_ids: - logging.info(click.style("Start process document: {}".format(document_id), fg="green")) + logging.info(click.style(f"Start process document: {document_id}", fg="green")) document = ( db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() @@ -95,10 +95,10 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list): indexing_runner = IndexingRunner() indexing_runner.run(documents) end_at = time.perf_counter() - logging.info(click.style("Processed dataset: {} latency: {}".format(dataset_id, end_at - start_at), fg="green")) + logging.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green")) except DocumentIsPausedError as ex: logging.info(click.style(str(ex), fg="yellow")) except Exception: - logging.exception("duplicate_document_indexing_task failed, dataset_id: {}".format(dataset_id)) + logging.exception("duplicate_document_indexing_task failed, dataset_id: %s", dataset_id) finally: db.session.close() diff --git a/api/tasks/enable_segment_to_index_task.py b/api/tasks/enable_segment_to_index_task.py index 13822f078e..f801c9d9ee 100644 --- a/api/tasks/enable_segment_to_index_task.py +++ b/api/tasks/enable_segment_to_index_task.py @@ -21,21 +21,21 @@ def enable_segment_to_index_task(segment_id: str): Usage: enable_segment_to_index_task.delay(segment_id) """ - logging.info(click.style("Start enable segment to index: {}".format(segment_id), fg="green")) + logging.info(click.style(f"Start enable segment to index: {segment_id}", fg="green")) start_at = time.perf_counter() segment = db.session.query(DocumentSegment).where(DocumentSegment.id == segment_id).first() if not segment: - logging.info(click.style("Segment not found: {}".format(segment_id), fg="red")) + logging.info(click.style(f"Segment not found: {segment_id}", fg="red")) db.session.close() return if segment.status != "completed": - logging.info(click.style("Segment is not completed, enable is not allowed: {}".format(segment_id), fg="red")) + logging.info(click.style(f"Segment is not completed, enable is not allowed: {segment_id}", fg="red")) db.session.close() return - indexing_cache_key = "segment_{}_indexing".format(segment.id) + indexing_cache_key = f"segment_{segment.id}_indexing" try: document = Document( @@ -51,17 +51,17 @@ def enable_segment_to_index_task(segment_id: str): dataset = segment.dataset if not dataset: - logging.info(click.style("Segment {} has no dataset, pass.".format(segment.id), fg="cyan")) + logging.info(click.style(f"Segment {segment.id} has no dataset, pass.", fg="cyan")) return dataset_document = segment.document if not dataset_document: - logging.info(click.style("Segment {} has no document, pass.".format(segment.id), fg="cyan")) + logging.info(click.style(f"Segment {segment.id} has no document, pass.", fg="cyan")) return if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": - logging.info(click.style("Segment {} document status is invalid, pass.".format(segment.id), fg="cyan")) + logging.info(click.style(f"Segment {segment.id} document status is invalid, pass.", fg="cyan")) return index_processor = IndexProcessorFactory(dataset_document.doc_form).init_index_processor() @@ -85,9 +85,7 @@ def enable_segment_to_index_task(segment_id: str): index_processor.load(dataset, [document]) end_at = time.perf_counter() - logging.info( - click.style("Segment enabled to index: {} latency: {}".format(segment.id, end_at - start_at), fg="green") - ) + logging.info(click.style(f"Segment enabled to index: {segment.id} latency: {end_at - start_at}", fg="green")) except Exception as e: logging.exception("enable segment to index failed") segment.enabled = False diff --git a/api/tasks/enable_segments_to_index_task.py b/api/tasks/enable_segments_to_index_task.py index e3fdf04d8c..777380631f 100644 --- a/api/tasks/enable_segments_to_index_task.py +++ b/api/tasks/enable_segments_to_index_task.py @@ -27,17 +27,17 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i start_at = time.perf_counter() dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() if not dataset: - logging.info(click.style("Dataset {} not found, pass.".format(dataset_id), fg="cyan")) + logging.info(click.style(f"Dataset {dataset_id} not found, pass.", fg="cyan")) return dataset_document = db.session.query(DatasetDocument).where(DatasetDocument.id == document_id).first() if not dataset_document: - logging.info(click.style("Document {} not found, pass.".format(document_id), fg="cyan")) + logging.info(click.style(f"Document {document_id} not found, pass.", fg="cyan")) db.session.close() return if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed": - logging.info(click.style("Document {} status is invalid, pass.".format(document_id), fg="cyan")) + logging.info(click.style(f"Document {document_id} status is invalid, pass.", fg="cyan")) db.session.close() return # sync index processor @@ -53,7 +53,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i .all() ) if not segments: - logging.info(click.style("Segments not found: {}".format(segment_ids), fg="cyan")) + logging.info(click.style(f"Segments not found: {segment_ids}", fg="cyan")) db.session.close() return @@ -91,7 +91,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i index_processor.load(dataset, documents) end_at = time.perf_counter() - logging.info(click.style("Segments enabled to index latency: {}".format(end_at - start_at), fg="green")) + logging.info(click.style(f"Segments enabled to index latency: {end_at - start_at}", fg="green")) except Exception as e: logging.exception("enable segments to index failed") # update segment error msg @@ -110,6 +110,6 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i db.session.commit() finally: for segment in segments: - indexing_cache_key = "segment_{}_indexing".format(segment.id) + indexing_cache_key = f"segment_{segment.id}_indexing" redis_client.delete(indexing_cache_key) db.session.close() diff --git a/api/tasks/mail_account_deletion_task.py b/api/tasks/mail_account_deletion_task.py index a6f8ce2f0b..38b5ca1800 100644 --- a/api/tasks/mail_account_deletion_task.py +++ b/api/tasks/mail_account_deletion_task.py @@ -37,12 +37,10 @@ def send_deletion_success_task(to: str, language: str = "en-US") -> None: end_at = time.perf_counter() logging.info( - click.style( - "Send account deletion success email to {}: latency: {}".format(to, end_at - start_at), fg="green" - ) + click.style(f"Send account deletion success email to {to}: latency: {end_at - start_at}", fg="green") ) except Exception: - logging.exception("Send account deletion success email to {} failed".format(to)) + logging.exception("Send account deletion success email to %s failed", to) @shared_task(queue="mail") @@ -83,4 +81,4 @@ def send_account_deletion_verification_code(to: str, code: str, language: str = ) ) except Exception: - logging.exception("Send account deletion verification code email to {} failed".format(to)) + logging.exception("Send account deletion verification code email to %s failed", to) diff --git a/api/tasks/mail_change_mail_task.py b/api/tasks/mail_change_mail_task.py index ea1875901c..054053558d 100644 --- a/api/tasks/mail_change_mail_task.py +++ b/api/tasks/mail_change_mail_task.py @@ -5,7 +5,7 @@ import click from celery import shared_task # type: ignore from extensions.ext_mail import mail -from libs.email_i18n import get_email_i18n_service +from libs.email_i18n import EmailType, get_email_i18n_service @shared_task(queue="mail") @@ -22,7 +22,7 @@ def send_change_mail_task(language: str, to: str, code: str, phase: str) -> None if not mail.is_inited(): return - logging.info(click.style("Start change email mail to {}".format(to), fg="green")) + logging.info(click.style(f"Start change email mail to {to}", fg="green")) start_at = time.perf_counter() try: @@ -34,9 +34,45 @@ def send_change_mail_task(language: str, to: str, code: str, phase: str) -> None phase=phase, ) + end_at = time.perf_counter() + logging.info(click.style(f"Send change email mail to {to} succeeded: latency: {end_at - start_at}", fg="green")) + except Exception: + logging.exception("Send change email mail to %s failed", to) + + +@shared_task(queue="mail") +def send_change_mail_completed_notification_task(language: str, to: str) -> None: + """ + Send change email completed notification with internationalization support. + + Args: + language: Language code for email localization + to: Recipient email address + """ + if not mail.is_inited(): + return + + logging.info(click.style(f"Start change email completed notify mail to {to}", fg="green")) + start_at = time.perf_counter() + + try: + email_service = get_email_i18n_service() + email_service.send_email( + email_type=EmailType.CHANGE_EMAIL_COMPLETED, + language_code=language, + to=to, + template_context={ + "to": to, + "email": to, + }, + ) + end_at = time.perf_counter() logging.info( - click.style("Send change email mail to {} succeeded: latency: {}".format(to, end_at - start_at), fg="green") + click.style( + f"Send change email completed mail to {to} succeeded: latency: {end_at - start_at}", + fg="green", + ) ) except Exception: - logging.exception("Send change email mail to {} failed".format(to)) + logging.exception("Send change email completed mail to %s failed", to) diff --git a/api/tasks/mail_email_code_login.py b/api/tasks/mail_email_code_login.py index 34220784e9..a82ab55384 100644 --- a/api/tasks/mail_email_code_login.py +++ b/api/tasks/mail_email_code_login.py @@ -21,7 +21,7 @@ def send_email_code_login_mail_task(language: str, to: str, code: str) -> None: if not mail.is_inited(): return - logging.info(click.style("Start email code login mail to {}".format(to), fg="green")) + logging.info(click.style(f"Start email code login mail to {to}", fg="green")) start_at = time.perf_counter() try: @@ -38,9 +38,7 @@ def send_email_code_login_mail_task(language: str, to: str, code: str) -> None: end_at = time.perf_counter() logging.info( - click.style( - "Send email code login mail to {} succeeded: latency: {}".format(to, end_at - start_at), fg="green" - ) + click.style(f"Send email code login mail to {to} succeeded: latency: {end_at - start_at}", fg="green") ) except Exception: - logging.exception("Send email code login mail to {} failed".format(to)) + logging.exception("Send email code login mail to %s failed", to) diff --git a/api/tasks/mail_enterprise_task.py b/api/tasks/mail_enterprise_task.py index a1c2908624..9c80da06e5 100644 --- a/api/tasks/mail_enterprise_task.py +++ b/api/tasks/mail_enterprise_task.py @@ -15,7 +15,7 @@ def send_enterprise_email_task(to: list[str], subject: str, body: str, substitut if not mail.is_inited(): return - logging.info(click.style("Start enterprise mail to {} with subject {}".format(to, subject), fg="green")) + logging.info(click.style(f"Start enterprise mail to {to} with subject {subject}", fg="green")) start_at = time.perf_counter() try: @@ -25,8 +25,6 @@ def send_enterprise_email_task(to: list[str], subject: str, body: str, substitut email_service.send_raw_email(to=to, subject=subject, html_content=html_content) end_at = time.perf_counter() - logging.info( - click.style("Send enterprise mail to {} succeeded: latency: {}".format(to, end_at - start_at), fg="green") - ) + logging.info(click.style(f"Send enterprise mail to {to} succeeded: latency: {end_at - start_at}", fg="green")) except Exception: - logging.exception("Send enterprise mail to {} failed".format(to)) + logging.exception("Send enterprise mail to %s failed", to) diff --git a/api/tasks/mail_invite_member_task.py b/api/tasks/mail_invite_member_task.py index 8c73de0111..ff351f08af 100644 --- a/api/tasks/mail_invite_member_task.py +++ b/api/tasks/mail_invite_member_task.py @@ -24,9 +24,7 @@ def send_invite_member_mail_task(language: str, to: str, token: str, inviter_nam if not mail.is_inited(): return - logging.info( - click.style("Start send invite member mail to {} in workspace {}".format(to, workspace_name), fg="green") - ) + logging.info(click.style(f"Start send invite member mail to {to} in workspace {workspace_name}", fg="green")) start_at = time.perf_counter() try: @@ -46,9 +44,7 @@ def send_invite_member_mail_task(language: str, to: str, token: str, inviter_nam end_at = time.perf_counter() logging.info( - click.style( - "Send invite member mail to {} succeeded: latency: {}".format(to, end_at - start_at), fg="green" - ) + click.style(f"Send invite member mail to {to} succeeded: latency: {end_at - start_at}", fg="green") ) except Exception: - logging.exception("Send invite member mail to {} failed".format(to)) + logging.exception("Send invite member mail to %s failed", to) diff --git a/api/tasks/mail_owner_transfer_task.py b/api/tasks/mail_owner_transfer_task.py index e566a6bc56..3856bf294a 100644 --- a/api/tasks/mail_owner_transfer_task.py +++ b/api/tasks/mail_owner_transfer_task.py @@ -22,7 +22,7 @@ def send_owner_transfer_confirm_task(language: str, to: str, code: str, workspac if not mail.is_inited(): return - logging.info(click.style("Start owner transfer confirm mail to {}".format(to), fg="green")) + logging.info(click.style(f"Start owner transfer confirm mail to {to}", fg="green")) start_at = time.perf_counter() try: @@ -41,12 +41,12 @@ def send_owner_transfer_confirm_task(language: str, to: str, code: str, workspac end_at = time.perf_counter() logging.info( click.style( - "Send owner transfer confirm mail to {} succeeded: latency: {}".format(to, end_at - start_at), + f"Send owner transfer confirm mail to {to} succeeded: latency: {end_at - start_at}", fg="green", ) ) except Exception: - logging.exception("owner transfer confirm email mail to {} failed".format(to)) + logging.exception("owner transfer confirm email mail to %s failed", to) @shared_task(queue="mail") @@ -63,7 +63,7 @@ def send_old_owner_transfer_notify_email_task(language: str, to: str, workspace: if not mail.is_inited(): return - logging.info(click.style("Start old owner transfer notify mail to {}".format(to), fg="green")) + logging.info(click.style(f"Start old owner transfer notify mail to {to}", fg="green")) start_at = time.perf_counter() try: @@ -82,12 +82,12 @@ def send_old_owner_transfer_notify_email_task(language: str, to: str, workspace: end_at = time.perf_counter() logging.info( click.style( - "Send old owner transfer notify mail to {} succeeded: latency: {}".format(to, end_at - start_at), + f"Send old owner transfer notify mail to {to} succeeded: latency: {end_at - start_at}", fg="green", ) ) except Exception: - logging.exception("old owner transfer notify email mail to {} failed".format(to)) + logging.exception("old owner transfer notify email mail to %s failed", to) @shared_task(queue="mail") @@ -103,7 +103,7 @@ def send_new_owner_transfer_notify_email_task(language: str, to: str, workspace: if not mail.is_inited(): return - logging.info(click.style("Start new owner transfer notify mail to {}".format(to), fg="green")) + logging.info(click.style(f"Start new owner transfer notify mail to {to}", fg="green")) start_at = time.perf_counter() try: @@ -121,9 +121,9 @@ def send_new_owner_transfer_notify_email_task(language: str, to: str, workspace: end_at = time.perf_counter() logging.info( click.style( - "Send new owner transfer notify mail to {} succeeded: latency: {}".format(to, end_at - start_at), + f"Send new owner transfer notify mail to {to} succeeded: latency: {end_at - start_at}", fg="green", ) ) except Exception: - logging.exception("new owner transfer notify email mail to {} failed".format(to)) + logging.exception("new owner transfer notify email mail to %s failed", to) diff --git a/api/tasks/mail_reset_password_task.py b/api/tasks/mail_reset_password_task.py index e2482f2101..b01af7827b 100644 --- a/api/tasks/mail_reset_password_task.py +++ b/api/tasks/mail_reset_password_task.py @@ -21,7 +21,7 @@ def send_reset_password_mail_task(language: str, to: str, code: str) -> None: if not mail.is_inited(): return - logging.info(click.style("Start password reset mail to {}".format(to), fg="green")) + logging.info(click.style(f"Start password reset mail to {to}", fg="green")) start_at = time.perf_counter() try: @@ -38,9 +38,7 @@ def send_reset_password_mail_task(language: str, to: str, code: str) -> None: end_at = time.perf_counter() logging.info( - click.style( - "Send password reset mail to {} succeeded: latency: {}".format(to, end_at - start_at), fg="green" - ) + click.style(f"Send password reset mail to {to} succeeded: latency: {end_at - start_at}", fg="green") ) except Exception: - logging.exception("Send password reset mail to {} failed".format(to)) + logging.exception("Send password reset mail to %s failed", to) diff --git a/api/tasks/ops_trace_task.py b/api/tasks/ops_trace_task.py index 2e77332ffe..c7e0047664 100644 --- a/api/tasks/ops_trace_task.py +++ b/api/tasks/ops_trace_task.py @@ -43,13 +43,11 @@ def process_trace_tasks(file_info): if trace_type: trace_info = trace_type(**trace_info) trace_instance.trace(trace_info) - logging.info(f"Processing trace tasks success, app_id: {app_id}") + logging.info("Processing trace tasks success, app_id: %s", app_id) except Exception as e: - logging.info( - f"error:\n\n\n{e}\n\n\n\n", - ) + logging.info("error:\n\n\n%s\n\n\n\n", e) failed_key = f"{OPS_TRACE_FAILED_KEY}_{app_id}" redis_client.incr(failed_key) - logging.info(f"Processing trace tasks failed, app_id: {app_id}") + logging.info("Processing trace tasks failed, app_id: %s", app_id) finally: storage.delete(file_path) diff --git a/api/tasks/process_tenant_plugin_autoupgrade_check_task.py b/api/tasks/process_tenant_plugin_autoupgrade_check_task.py index 6fcdad0525..9ea6aa6214 100644 --- a/api/tasks/process_tenant_plugin_autoupgrade_check_task.py +++ b/api/tasks/process_tenant_plugin_autoupgrade_check_task.py @@ -58,7 +58,7 @@ def process_tenant_plugin_autoupgrade_check_task( click.echo( click.style( - "Checking upgradable plugin for tenant: {}".format(tenant_id), + f"Checking upgradable plugin for tenant: {tenant_id}", fg="green", ) ) @@ -68,7 +68,7 @@ def process_tenant_plugin_autoupgrade_check_task( # get plugin_ids to check plugin_ids: list[tuple[str, str, str]] = [] # plugin_id, version, unique_identifier - click.echo(click.style("Upgrade mode: {}".format(upgrade_mode), fg="green")) + click.echo(click.style(f"Upgrade mode: {upgrade_mode}", fg="green")) if upgrade_mode == TenantPluginAutoUpgradeStrategy.UpgradeMode.PARTIAL and include_plugins: all_plugins = manager.list_plugins(tenant_id) @@ -142,7 +142,7 @@ def process_tenant_plugin_autoupgrade_check_task( marketplace.record_install_plugin_event(new_unique_identifier) click.echo( click.style( - "Upgrade plugin: {} -> {}".format(original_unique_identifier, new_unique_identifier), + f"Upgrade plugin: {original_unique_identifier} -> {new_unique_identifier}", fg="green", ) ) @@ -156,11 +156,11 @@ def process_tenant_plugin_autoupgrade_check_task( }, ) except Exception as e: - click.echo(click.style("Error when upgrading plugin: {}".format(e), fg="red")) + click.echo(click.style(f"Error when upgrading plugin: {e}", fg="red")) traceback.print_exc() break except Exception as e: - click.echo(click.style("Error when checking upgradable plugin: {}".format(e), fg="red")) + click.echo(click.style(f"Error when checking upgradable plugin: {e}", fg="red")) traceback.print_exc() return diff --git a/api/tasks/recover_document_indexing_task.py b/api/tasks/recover_document_indexing_task.py index dfb2389579..ff489340cd 100644 --- a/api/tasks/recover_document_indexing_task.py +++ b/api/tasks/recover_document_indexing_task.py @@ -18,13 +18,13 @@ def recover_document_indexing_task(dataset_id: str, document_id: str): Usage: recover_document_indexing_task.delay(dataset_id, document_id) """ - logging.info(click.style("Recover document: {}".format(document_id), fg="green")) + logging.info(click.style(f"Recover document: {document_id}", fg="green")) start_at = time.perf_counter() document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() if not document: - logging.info(click.style("Document not found: {}".format(document_id), fg="red")) + logging.info(click.style(f"Document not found: {document_id}", fg="red")) db.session.close() return @@ -37,12 +37,10 @@ def recover_document_indexing_task(dataset_id: str, document_id: str): elif document.indexing_status == "indexing": indexing_runner.run_in_indexing_status(document) end_at = time.perf_counter() - logging.info( - click.style("Processed document: {} latency: {}".format(document.id, end_at - start_at), fg="green") - ) + logging.info(click.style(f"Processed document: {document.id} latency: {end_at - start_at}", fg="green")) except DocumentIsPausedError as ex: logging.info(click.style(str(ex), fg="yellow")) except Exception: - logging.exception("recover_document_indexing_task failed, document_id: {}".format(document_id)) + logging.exception("recover_document_indexing_task failed, document_id: %s", document_id) finally: db.session.close() diff --git a/api/tasks/remove_app_and_related_data_task.py b/api/tasks/remove_app_and_related_data_task.py index 1619f8c546..b6f772dd60 100644 --- a/api/tasks/remove_app_and_related_data_task.py +++ b/api/tasks/remove_app_and_related_data_task.py @@ -201,7 +201,7 @@ def _delete_app_workflow_runs(tenant_id: str, app_id: str): batch_size=1000, ) - logging.info(f"Deleted {deleted_count} workflow runs for app {app_id}") + logging.info("Deleted %s workflow runs for app %s", deleted_count, app_id) def _delete_app_workflow_node_executions(tenant_id: str, app_id: str): @@ -215,7 +215,7 @@ def _delete_app_workflow_node_executions(tenant_id: str, app_id: str): batch_size=1000, ) - logging.info(f"Deleted {deleted_count} workflow node executions for app {app_id}") + logging.info("Deleted %s workflow node executions for app %s", deleted_count, app_id) def _delete_app_workflow_app_logs(tenant_id: str, app_id: str): @@ -342,6 +342,6 @@ def _delete_records(query_sql: str, params: dict, delete_func: Callable, name: s db.session.commit() logging.info(click.style(f"Deleted {name} {record_id}", fg="green")) except Exception: - logging.exception(f"Error occurred while deleting {name} {record_id}") + logging.exception("Error occurred while deleting %s %s", name, record_id) continue rs.close() diff --git a/api/tasks/remove_document_from_index_task.py b/api/tasks/remove_document_from_index_task.py index 3f73cc7b40..524130a297 100644 --- a/api/tasks/remove_document_from_index_task.py +++ b/api/tasks/remove_document_from_index_task.py @@ -19,21 +19,21 @@ def remove_document_from_index_task(document_id: str): Usage: remove_document_from_index.delay(document_id) """ - logging.info(click.style("Start remove document segments from index: {}".format(document_id), fg="green")) + logging.info(click.style(f"Start remove document segments from index: {document_id}", fg="green")) start_at = time.perf_counter() document = db.session.query(Document).where(Document.id == document_id).first() if not document: - logging.info(click.style("Document not found: {}".format(document_id), fg="red")) + logging.info(click.style(f"Document not found: {document_id}", fg="red")) db.session.close() return if document.indexing_status != "completed": - logging.info(click.style("Document is not completed, remove is not allowed: {}".format(document_id), fg="red")) + logging.info(click.style(f"Document is not completed, remove is not allowed: {document_id}", fg="red")) db.session.close() return - indexing_cache_key = "document_{}_indexing".format(document.id) + indexing_cache_key = f"document_{document.id}_indexing" try: dataset = document.dataset @@ -49,7 +49,7 @@ def remove_document_from_index_task(document_id: str): try: index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False) except Exception: - logging.exception(f"clean dataset {dataset.id} from index failed") + logging.exception("clean dataset %s from index failed", dataset.id) # update segment to disable db.session.query(DocumentSegment).where(DocumentSegment.document_id == document.id).update( { @@ -63,9 +63,7 @@ def remove_document_from_index_task(document_id: str): end_at = time.perf_counter() logging.info( - click.style( - "Document removed from index: {} latency: {}".format(document.id, end_at - start_at), fg="green" - ) + click.style(f"Document removed from index: {document.id} latency: {end_at - start_at}", fg="green") ) except Exception: logging.exception("remove document from index failed") diff --git a/api/tasks/retry_document_indexing_task.py b/api/tasks/retry_document_indexing_task.py index 58f0156afb..26b41aff2e 100644 --- a/api/tasks/retry_document_indexing_task.py +++ b/api/tasks/retry_document_indexing_task.py @@ -24,79 +24,83 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]): """ documents: list[Document] = [] start_at = time.perf_counter() + try: + dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() + if not dataset: + logging.info(click.style(f"Dataset not found: {dataset_id}", fg="red")) + return + tenant_id = dataset.tenant_id + for document_id in document_ids: + retry_indexing_cache_key = f"document_{document_id}_is_retried" + # check document limit + features = FeatureService.get_features(tenant_id) + try: + if features.billing.enabled: + vector_space = features.vector_space + if 0 < vector_space.limit <= vector_space.size: + raise ValueError( + "Your total number of documents plus the number of uploads have over the limit of " + "your subscription." + ) + except Exception as e: + document = ( + db.session.query(Document) + .where(Document.id == document_id, Document.dataset_id == dataset_id) + .first() + ) + if document: + document.indexing_status = "error" + document.error = str(e) + document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + db.session.add(document) + db.session.commit() + redis_client.delete(retry_indexing_cache_key) + return - dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first() - if not dataset: - logging.info(click.style("Dataset not found: {}".format(dataset_id), fg="red")) - db.session.close() - return - tenant_id = dataset.tenant_id - for document_id in document_ids: - retry_indexing_cache_key = "document_{}_is_retried".format(document_id) - # check document limit - features = FeatureService.get_features(tenant_id) - try: - if features.billing.enabled: - vector_space = features.vector_space - if 0 < vector_space.limit <= vector_space.size: - raise ValueError( - "Your total number of documents plus the number of uploads have over the limit of " - "your subscription." - ) - except Exception as e: + logging.info(click.style(f"Start retry document: {document_id}", fg="green")) document = ( db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() ) - if document: + if not document: + logging.info(click.style(f"Document not found: {document_id}", fg="yellow")) + return + try: + # clean old data + index_processor = IndexProcessorFactory(document.doc_form).init_index_processor() + + segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all() + if segments: + index_node_ids = [segment.index_node_id for segment in segments] + # delete from vector index + index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True) + + for segment in segments: + db.session.delete(segment) + db.session.commit() + + document.indexing_status = "parsing" + document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + db.session.add(document) + db.session.commit() + + indexing_runner = IndexingRunner() + indexing_runner.run([document]) + redis_client.delete(retry_indexing_cache_key) + except Exception as ex: document.indexing_status = "error" - document.error = str(e) + document.error = str(ex) document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) db.session.add(document) db.session.commit() - redis_client.delete(retry_indexing_cache_key) - db.session.close() - return - - logging.info(click.style("Start retry document: {}".format(document_id), fg="green")) - document = ( - db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() + logging.info(click.style(str(ex), fg="yellow")) + redis_client.delete(retry_indexing_cache_key) + logging.exception("retry_document_indexing_task failed, document_id: %s", document_id) + end_at = time.perf_counter() + logging.info(click.style(f"Retry dataset: {dataset_id} latency: {end_at - start_at}", fg="green")) + except Exception as e: + logging.exception( + "retry_document_indexing_task failed, dataset_id: %s, document_ids: %s", dataset_id, document_ids ) - if not document: - logging.info(click.style("Document not found: {}".format(document_id), fg="yellow")) - db.session.close() - return - try: - # clean old data - index_processor = IndexProcessorFactory(document.doc_form).init_index_processor() - - segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all() - if segments: - index_node_ids = [segment.index_node_id for segment in segments] - # delete from vector index - index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True) - - for segment in segments: - db.session.delete(segment) - db.session.commit() - - document.indexing_status = "parsing" - document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) - db.session.add(document) - db.session.commit() - - indexing_runner = IndexingRunner() - indexing_runner.run([document]) - redis_client.delete(retry_indexing_cache_key) - except Exception as ex: - document.indexing_status = "error" - document.error = str(ex) - document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) - db.session.add(document) - db.session.commit() - logging.info(click.style(str(ex), fg="yellow")) - redis_client.delete(retry_indexing_cache_key) - logging.exception("retry_document_indexing_task failed, document_id: {}".format(document_id)) - finally: - db.session.close() - end_at = time.perf_counter() - logging.info(click.style("Retry dataset: {} latency: {}".format(dataset_id, end_at - start_at), fg="green")) + raise e + finally: + db.session.close() diff --git a/api/tasks/sync_website_document_indexing_task.py b/api/tasks/sync_website_document_indexing_task.py index 539c2db80f..f112a97d2f 100644 --- a/api/tasks/sync_website_document_indexing_task.py +++ b/api/tasks/sync_website_document_indexing_task.py @@ -28,7 +28,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str): if dataset is None: raise ValueError("Dataset not found") - sync_indexing_cache_key = "document_{}_is_sync".format(document_id) + sync_indexing_cache_key = f"document_{document_id}_is_sync" # check document limit features = FeatureService.get_features(dataset.tenant_id) try: @@ -52,10 +52,10 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str): redis_client.delete(sync_indexing_cache_key) return - logging.info(click.style("Start sync website document: {}".format(document_id), fg="green")) + logging.info(click.style(f"Start sync website document: {document_id}", fg="green")) document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first() if not document: - logging.info(click.style("Document not found: {}".format(document_id), fg="yellow")) + logging.info(click.style(f"Document not found: {document_id}", fg="yellow")) return try: # clean old data @@ -87,6 +87,6 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str): db.session.commit() logging.info(click.style(str(ex), fg="yellow")) redis_client.delete(sync_indexing_cache_key) - logging.exception("sync_website_document_indexing_task failed, document_id: {}".format(document_id)) + logging.exception("sync_website_document_indexing_task failed, document_id: %s", document_id) end_at = time.perf_counter() - logging.info(click.style("Sync document: {} latency: {}".format(document_id, end_at - start_at), fg="green")) + logging.info(click.style(f"Sync document: {document_id} latency: {end_at - start_at}", fg="green")) diff --git a/api/templates/change_mail_completed_template_en-US.html b/api/templates/change_mail_completed_template_en-US.html new file mode 100644 index 0000000000..ecaf35868d --- /dev/null +++ b/api/templates/change_mail_completed_template_en-US.html @@ -0,0 +1,135 @@ + + + + + + + + +
+
+ + Dify Logo +
+

Your login email has been changed

+
+

You can now log into Dify with your new email address:

+
+
+ {{email}} +
+

If you did not make this change, email support@dify.ai.

+
+ + + + diff --git a/api/templates/change_mail_completed_template_zh-CN.html b/api/templates/change_mail_completed_template_zh-CN.html new file mode 100644 index 0000000000..b4fdb4b9ab --- /dev/null +++ b/api/templates/change_mail_completed_template_zh-CN.html @@ -0,0 +1,135 @@ + + + + + + + + +
+
+ + Dify Logo +
+

您的登录邮箱已更改

+
+

您现在可以使用新的电子邮件地址登录 Dify:

+
+
+ {{email}} +
+

如果您没有进行此更改,请发送电子邮件至 support@dify.ai

+
+ + + + diff --git a/api/templates/without-brand/change_mail_completed_template_en-US.html b/api/templates/without-brand/change_mail_completed_template_en-US.html new file mode 100644 index 0000000000..f211cc74d9 --- /dev/null +++ b/api/templates/without-brand/change_mail_completed_template_en-US.html @@ -0,0 +1,132 @@ + + + + + + + + +
+
+

Your login email has been changed

+
+

You can now log into {{application_title}} with your new email address:

+
+
+ {{email}} +
+

If you did not make this change, please ignore this email or contact support immediately.

+
+ + + + diff --git a/api/templates/without-brand/change_mail_completed_template_zh-CN.html b/api/templates/without-brand/change_mail_completed_template_zh-CN.html new file mode 100644 index 0000000000..c96604f0e5 --- /dev/null +++ b/api/templates/without-brand/change_mail_completed_template_zh-CN.html @@ -0,0 +1,132 @@ + + + + + + + + +
+
+

您的登录邮箱已更改

+
+

您现在可以使用新的电子邮件地址登录 {{application_title}}:

+
+
+ {{email}} +
+

如果您没有进行此更改,请忽略此电子邮件或立即联系支持。

+
+ + + + diff --git a/api/tests/unit_tests/core/ops/test_config_entity.py b/api/tests/unit_tests/core/ops/test_config_entity.py index 81cb04548d..209f8b7c57 100644 --- a/api/tests/unit_tests/core/ops/test_config_entity.py +++ b/api/tests/unit_tests/core/ops/test_config_entity.py @@ -117,6 +117,13 @@ class TestLangfuseConfig: assert config.secret_key == "secret_key" assert config.host == "https://custom.langfuse.com" + def test_valid_config_with_path(self): + host = "https://custom.langfuse.com/api/v1" + config = LangfuseConfig(public_key="public_key", secret_key="secret_key", host=host) + assert config.public_key == "public_key" + assert config.secret_key == "secret_key" + assert config.host == host + def test_default_values(self): """Test default values are set correctly""" config = LangfuseConfig(public_key="public", secret_key="secret") diff --git a/api/tests/unit_tests/core/tools/utils/test_parser.py b/api/tests/unit_tests/core/tools/utils/test_parser.py index 8e07293ce0..e1eab21ca4 100644 --- a/api/tests/unit_tests/core/tools/utils/test_parser.py +++ b/api/tests/unit_tests/core/tools/utils/test_parser.py @@ -54,3 +54,58 @@ def test_parse_openapi_to_tool_bundle_operation_id(app): assert tool_bundles[0].operation_id == "_get" assert tool_bundles[1].operation_id == "apiresources_get" assert tool_bundles[2].operation_id == "createResource" + + +def test_parse_openapi_to_tool_bundle_properties_all_of(app): + openapi = { + "openapi": "3.0.0", + "info": {"title": "Simple API", "version": "1.0.0"}, + "servers": [{"url": "http://localhost:3000"}], + "paths": { + "/api/resource": { + "get": { + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Request", + }, + }, + }, + "required": True, + }, + }, + }, + }, + "components": { + "schemas": { + "Request": { + "type": "object", + "properties": { + "prop1": { + "enum": ["option1"], + "description": "desc prop1", + "allOf": [ + {"$ref": "#/components/schemas/AllOfItem"}, + { + "enum": ["option2"], + }, + ], + }, + }, + }, + "AllOfItem": { + "type": "string", + "enum": ["option3"], + "description": "desc allOf item", + }, + } + }, + } + with app.test_request_context(): + tool_bundles = ApiBasedToolSchemaParser.parse_openapi_to_tool_bundle(openapi) + + assert tool_bundles[0].parameters[0].type == "string" + assert tool_bundles[0].parameters[0].llm_description == "desc prop1" + # TODO: support enum in OpenAPI + # assert set(tool_bundles[0].parameters[0].options) == {"option1", "option2", "option3"} diff --git a/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py b/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py index 0eaabd0c40..1d37b4803c 100644 --- a/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py @@ -111,5 +111,5 @@ def test_tool_node_on_tool_invoke_error(monkeypatch: pytest.MonkeyPatch): assert isinstance(result, NodeRunResult) assert result.status == WorkflowNodeExecutionStatus.FAILED assert "oops" in result.error - assert "Failed to transform tool message:" in result.error + assert "Failed to invoke tool" in result.error assert result.error_type == "ToolInvokeError" diff --git a/api/tests/unit_tests/factories/test_build_from_mapping.py b/api/tests/unit_tests/factories/test_build_from_mapping.py index d42c4412f5..39280c9267 100644 --- a/api/tests/unit_tests/factories/test_build_from_mapping.py +++ b/api/tests/unit_tests/factories/test_build_from_mapping.py @@ -21,7 +21,7 @@ TEST_REMOTE_URL = "http://example.com/test.jpg" # Test Config TEST_CONFIG = FileUploadConfig( - allowed_file_types=["image", "document"], + allowed_file_types=[FileType.IMAGE, FileType.DOCUMENT], allowed_file_extensions=[".jpg", ".pdf"], allowed_file_upload_methods=[FileTransferMethod.LOCAL_FILE, FileTransferMethod.TOOL_FILE], number_limits=10, @@ -171,10 +171,10 @@ def test_build_without_type_specification(mock_upload_file): mapping = { "transfer_method": "local_file", "upload_file_id": TEST_UPLOAD_FILE_ID, - # leave out the type + # type field is intentionally omitted } file = build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID) - # It should automatically infer the type as "image" based on the file extension + # Should automatically infer the type as "image" based on the file extension assert file.type == FileType.IMAGE @@ -194,3 +194,81 @@ def test_file_validation_with_config(mock_upload_file, file_type, should_pass, e else: with pytest.raises(ValueError, match=expected_error): build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID, config=TEST_CONFIG) + + +def test_invalid_transfer_method(): + """Test that invalid transfer method raises ValueError.""" + mapping = { + "transfer_method": "invalid_method", + "upload_file_id": TEST_UPLOAD_FILE_ID, + "type": "image", + } + with pytest.raises(ValueError, match="No matching enum found for value 'invalid_method'"): + build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID) + + +def test_invalid_uuid_format(): + """Test that invalid UUID format raises ValueError.""" + mapping = { + "transfer_method": "local_file", + "upload_file_id": "not-a-valid-uuid", + "type": "image", + } + with pytest.raises(ValueError, match="Invalid upload file id format"): + build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID) + + +def test_tenant_mismatch(): + """Test that tenant mismatch raises security error.""" + # Create a mock upload file with a different tenant_id + mock_file = MagicMock(spec=UploadFile) + mock_file.id = TEST_UPLOAD_FILE_ID + mock_file.tenant_id = "different_tenant_id" + mock_file.name = "test.jpg" + mock_file.extension = "jpg" + mock_file.mime_type = "image/jpeg" + mock_file.source_url = TEST_REMOTE_URL + mock_file.size = 1024 + mock_file.key = "test_key" + + # Mock the database query to return None (no file found for this tenant) + with patch("factories.file_factory.db.session.scalar", return_value=None): + mapping = local_file_mapping() + with pytest.raises(ValueError, match="Invalid upload file"): + build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID) + + +def test_disallowed_file_types(mock_upload_file): + """Test that disallowed file types are rejected.""" + # Config that only allows image and document types + restricted_config = FileUploadConfig( + allowed_file_types=[FileType.IMAGE, FileType.DOCUMENT], + ) + + # Try to upload a video file + mapping = local_file_mapping(file_type="video") + with pytest.raises(ValueError, match="File validation failed"): + build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID, config=restricted_config) + + +def test_disallowed_extensions(mock_upload_file): + """Test that disallowed file extensions are rejected for custom type.""" + # Mock a file with .exe extension + mock_upload_file.return_value.extension = "exe" + mock_upload_file.return_value.name = "malicious.exe" + mock_upload_file.return_value.mime_type = "application/x-msdownload" + + # Config that only allows specific extensions for custom files + restricted_config = FileUploadConfig( + allowed_file_extensions=[".txt", ".csv", ".json"], + ) + + # Mapping without specifying type (will be detected as custom) + mapping = { + "transfer_method": "local_file", + "upload_file_id": TEST_UPLOAD_FILE_ID, + "type": "custom", + } + + with pytest.raises(ValueError, match="File validation failed"): + build_from_mapping(mapping=mapping, tenant_id=TEST_TENANT_ID, config=restricted_config) diff --git a/api/tests/unit_tests/services/test_dataset_permission.py b/api/tests/unit_tests/services/test_dataset_permission.py index a67252e856..c1e4981325 100644 --- a/api/tests/unit_tests/services/test_dataset_permission.py +++ b/api/tests/unit_tests/services/test_dataset_permission.py @@ -301,5 +301,5 @@ class TestDatasetPermissionService: # Verify debug message was logged with correct user and dataset information mock_logging_dependencies["logging"].debug.assert_called_with( - f"User {normal_user.id} does not have permission to access dataset {dataset.id}" + "User %s does not have permission to access dataset %s", normal_user.id, dataset.id ) diff --git a/api/uv.lock b/api/uv.lock index 623b125ab3..0bce38812e 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1217,7 +1217,7 @@ wheels = [ [[package]] name = "dify-api" -version = "1.7.0" +version = "1.7.1" source = { virtual = "." } dependencies = [ { name = "arize-phoenix-otel" }, diff --git a/docker/.env.example b/docker/.env.example index 88cc544730..9d15ba53d3 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -52,6 +52,11 @@ FILES_URL= # Example: INTERNAL_FILES_URL=http://api:5001 INTERNAL_FILES_URL= +# Ensure UTF-8 encoding +LANG=en_US.UTF-8 +LC_ALL=en_US.UTF-8 +PYTHONIOENCODING=utf-8 + # ------------------------------ # Server Configuration # ------------------------------ diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 394a068200..fe8e4602b7 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.7.0 + image: langgenius/dify-api:1.7.1 restart: always environment: # Use the shared environment variables. @@ -31,7 +31,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.7.0 + image: langgenius/dify-api:1.7.1 restart: always environment: # Use the shared environment variables. @@ -58,7 +58,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.7.0 + image: langgenius/dify-api:1.7.1 restart: always environment: # Use the shared environment variables. @@ -76,7 +76,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.7.0 + image: langgenius/dify-web:1.7.1 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index 3408fef0c2..9f7cc72586 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -20,7 +20,7 @@ services: ports: - "${EXPOSE_POSTGRES_PORT:-5432}:5432" healthcheck: - test: [ "CMD", "pg_isready" ] + test: [ 'CMD', 'pg_isready', '-h', 'db', '-U', '${PGUSER:-postgres}', '-d', '${POSTGRES_DB:-dify}' ] interval: 1s timeout: 3s retries: 30 diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index c2ef2ff723..9e0f78eb07 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -12,6 +12,9 @@ x-shared-env: &shared-api-worker-env APP_WEB_URL: ${APP_WEB_URL:-} FILES_URL: ${FILES_URL:-} INTERNAL_FILES_URL: ${INTERNAL_FILES_URL:-} + LANG: ${LANG:-en_US.UTF-8} + LC_ALL: ${LC_ALL:-en_US.UTF-8} + PYTHONIOENCODING: ${PYTHONIOENCODING:-utf-8} LOG_LEVEL: ${LOG_LEVEL:-INFO} LOG_FILE: ${LOG_FILE:-/app/logs/server.log} LOG_FILE_MAX_SIZE: ${LOG_FILE_MAX_SIZE:-20} @@ -539,7 +542,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.7.0 + image: langgenius/dify-api:1.7.1 restart: always environment: # Use the shared environment variables. @@ -568,7 +571,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.7.0 + image: langgenius/dify-api:1.7.1 restart: always environment: # Use the shared environment variables. @@ -595,7 +598,7 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: - image: langgenius/dify-api:1.7.0 + image: langgenius/dify-api:1.7.1 restart: always environment: # Use the shared environment variables. @@ -613,7 +616,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.7.0 + image: langgenius/dify-web:1.7.1 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} diff --git a/images/GitHub_README_if.png b/images/GitHub_README_if.png index 10c9d87b08..281d95cf9c 100644 Binary files a/images/GitHub_README_if.png and b/images/GitHub_README_if.png differ diff --git a/web/__tests__/check-i18n.test.ts b/web/__tests__/check-i18n.test.ts new file mode 100644 index 0000000000..173aa96118 --- /dev/null +++ b/web/__tests__/check-i18n.test.ts @@ -0,0 +1,569 @@ +import fs from 'node:fs' +import path from 'node:path' + +// Mock functions to simulate the check-i18n functionality +const vm = require('node:vm') +const transpile = require('typescript').transpile + +describe('check-i18n script functionality', () => { + const testDir = path.join(__dirname, '../i18n-test') + const testEnDir = path.join(testDir, 'en-US') + const testZhDir = path.join(testDir, 'zh-Hans') + + // Helper function that replicates the getKeysFromLanguage logic + async function getKeysFromLanguage(language: string, testPath = testDir): Promise { + return new Promise((resolve, reject) => { + const folderPath = path.resolve(testPath, language) + const allKeys: string[] = [] + + if (!fs.existsSync(folderPath)) { + resolve([]) + return + } + + fs.readdir(folderPath, (err, files) => { + if (err) { + reject(err) + return + } + + const translationFiles = files.filter(file => /\.(ts|js)$/.test(file)) + + translationFiles.forEach((file) => { + const filePath = path.join(folderPath, file) + const fileName = file.replace(/\.[^/.]+$/, '') + const camelCaseFileName = fileName.replace(/[-_](.)/g, (_, c) => + c.toUpperCase(), + ) + + try { + const content = fs.readFileSync(filePath, 'utf8') + const moduleExports = {} + const context = { + exports: moduleExports, + module: { exports: moduleExports }, + require, + console, + __filename: filePath, + __dirname: folderPath, + } + + vm.runInNewContext(transpile(content), context) + const translationObj = moduleExports.default || moduleExports + + if(!translationObj || typeof translationObj !== 'object') + throw new Error(`Error parsing file: ${filePath}`) + + const nestedKeys: string[] = [] + const iterateKeys = (obj: any, prefix = '') => { + for (const key in obj) { + const nestedKey = prefix ? `${prefix}.${key}` : key + if (typeof obj[key] === 'object' && obj[key] !== null && !Array.isArray(obj[key])) { + // This is an object (but not array), recurse into it but don't add it as a key + iterateKeys(obj[key], nestedKey) + } + else { + // This is a leaf node (string, number, boolean, array, etc.), add it as a key + nestedKeys.push(nestedKey) + } + } + } + iterateKeys(translationObj) + + const fileKeys = nestedKeys.map(key => `${camelCaseFileName}.${key}`) + allKeys.push(...fileKeys) + } + catch (error) { + reject(error) + } + }) + resolve(allKeys) + }) + }) + } + + beforeEach(() => { + // Clean up and create test directories + if (fs.existsSync(testDir)) + fs.rmSync(testDir, { recursive: true }) + + fs.mkdirSync(testDir, { recursive: true }) + fs.mkdirSync(testEnDir, { recursive: true }) + fs.mkdirSync(testZhDir, { recursive: true }) + }) + + afterEach(() => { + // Clean up test files + if (fs.existsSync(testDir)) + fs.rmSync(testDir, { recursive: true }) + }) + + describe('Key extraction logic', () => { + it('should extract only leaf node keys, not intermediate objects', async () => { + const testContent = `const translation = { + simple: 'Simple Value', + nested: { + level1: 'Level 1 Value', + deep: { + level2: 'Level 2 Value' + } + }, + array: ['not extracted'], + number: 42, + boolean: true +} + +export default translation +` + + fs.writeFileSync(path.join(testEnDir, 'test.ts'), testContent) + + const keys = await getKeysFromLanguage('en-US') + + expect(keys).toEqual([ + 'test.simple', + 'test.nested.level1', + 'test.nested.deep.level2', + 'test.array', + 'test.number', + 'test.boolean', + ]) + + // Should not include intermediate object keys + expect(keys).not.toContain('test.nested') + expect(keys).not.toContain('test.nested.deep') + }) + + it('should handle camelCase file name conversion correctly', async () => { + const testContent = `const translation = { + key: 'value' +} + +export default translation +` + + fs.writeFileSync(path.join(testEnDir, 'app-debug.ts'), testContent) + fs.writeFileSync(path.join(testEnDir, 'user_profile.ts'), testContent) + + const keys = await getKeysFromLanguage('en-US') + + expect(keys).toContain('appDebug.key') + expect(keys).toContain('userProfile.key') + }) + }) + + describe('Missing keys detection', () => { + it('should detect missing keys in target language', async () => { + const enContent = `const translation = { + common: { + save: 'Save', + cancel: 'Cancel', + delete: 'Delete' + }, + app: { + title: 'My App', + version: '1.0' + } +} + +export default translation +` + + const zhContent = `const translation = { + common: { + save: '保存', + cancel: '取消' + // missing 'delete' + }, + app: { + title: '我的应用' + // missing 'version' + } +} + +export default translation +` + + fs.writeFileSync(path.join(testEnDir, 'test.ts'), enContent) + fs.writeFileSync(path.join(testZhDir, 'test.ts'), zhContent) + + const enKeys = await getKeysFromLanguage('en-US') + const zhKeys = await getKeysFromLanguage('zh-Hans') + + const missingKeys = enKeys.filter(key => !zhKeys.includes(key)) + + expect(missingKeys).toContain('test.common.delete') + expect(missingKeys).toContain('test.app.version') + expect(missingKeys).toHaveLength(2) + }) + }) + + describe('Extra keys detection', () => { + it('should detect extra keys in target language', async () => { + const enContent = `const translation = { + common: { + save: 'Save', + cancel: 'Cancel' + } +} + +export default translation +` + + const zhContent = `const translation = { + common: { + save: '保存', + cancel: '取消', + delete: '删除', // extra key + extra: '额外的' // another extra key + }, + newSection: { + someKey: '某个值' // extra section + } +} + +export default translation +` + + fs.writeFileSync(path.join(testEnDir, 'test.ts'), enContent) + fs.writeFileSync(path.join(testZhDir, 'test.ts'), zhContent) + + const enKeys = await getKeysFromLanguage('en-US') + const zhKeys = await getKeysFromLanguage('zh-Hans') + + const extraKeys = zhKeys.filter(key => !enKeys.includes(key)) + + expect(extraKeys).toContain('test.common.delete') + expect(extraKeys).toContain('test.common.extra') + expect(extraKeys).toContain('test.newSection.someKey') + expect(extraKeys).toHaveLength(3) + }) + }) + + describe('File filtering logic', () => { + it('should filter keys by specific file correctly', async () => { + // Create multiple files + const file1Content = `const translation = { + button: 'Button', + text: 'Text' +} + +export default translation +` + + const file2Content = `const translation = { + title: 'Title', + description: 'Description' +} + +export default translation +` + + fs.writeFileSync(path.join(testEnDir, 'components.ts'), file1Content) + fs.writeFileSync(path.join(testEnDir, 'pages.ts'), file2Content) + fs.writeFileSync(path.join(testZhDir, 'components.ts'), file1Content) + fs.writeFileSync(path.join(testZhDir, 'pages.ts'), file2Content) + + const allEnKeys = await getKeysFromLanguage('en-US') + const allZhKeys = await getKeysFromLanguage('zh-Hans') + + // Test file filtering logic + const targetFile = 'components' + const filteredEnKeys = allEnKeys.filter(key => + key.startsWith(targetFile.replace(/[-_](.)/g, (_, c) => c.toUpperCase())), + ) + const filteredZhKeys = allZhKeys.filter(key => + key.startsWith(targetFile.replace(/[-_](.)/g, (_, c) => c.toUpperCase())), + ) + + expect(allEnKeys).toHaveLength(4) // 2 keys from each file + expect(filteredEnKeys).toHaveLength(2) // only components keys + expect(filteredEnKeys).toContain('components.button') + expect(filteredEnKeys).toContain('components.text') + expect(filteredEnKeys).not.toContain('pages.title') + expect(filteredEnKeys).not.toContain('pages.description') + }) + }) + + describe('Complex nested structure handling', () => { + it('should handle deeply nested objects correctly', async () => { + const complexContent = `const translation = { + level1: { + level2: { + level3: { + level4: { + deepValue: 'Deep Value' + }, + anotherValue: 'Another Value' + }, + simpleValue: 'Simple Value' + }, + directValue: 'Direct Value' + }, + rootValue: 'Root Value' +} + +export default translation +` + + fs.writeFileSync(path.join(testEnDir, 'complex.ts'), complexContent) + + const keys = await getKeysFromLanguage('en-US') + + expect(keys).toContain('complex.level1.level2.level3.level4.deepValue') + expect(keys).toContain('complex.level1.level2.level3.anotherValue') + expect(keys).toContain('complex.level1.level2.simpleValue') + expect(keys).toContain('complex.level1.directValue') + expect(keys).toContain('complex.rootValue') + + // Should not include intermediate objects + expect(keys).not.toContain('complex.level1') + expect(keys).not.toContain('complex.level1.level2') + expect(keys).not.toContain('complex.level1.level2.level3') + expect(keys).not.toContain('complex.level1.level2.level3.level4') + }) + }) + + describe('Edge cases', () => { + it('should handle empty objects', async () => { + const emptyContent = `const translation = { + empty: {}, + withValue: 'value' +} + +export default translation +` + + fs.writeFileSync(path.join(testEnDir, 'empty.ts'), emptyContent) + + const keys = await getKeysFromLanguage('en-US') + + expect(keys).toContain('empty.withValue') + expect(keys).not.toContain('empty.empty') + }) + + it('should handle special characters in keys', async () => { + const specialContent = `const translation = { + 'key-with-dash': 'value1', + 'key_with_underscore': 'value2', + 'key.with.dots': 'value3', + normalKey: 'value4' +} + +export default translation +` + + fs.writeFileSync(path.join(testEnDir, 'special.ts'), specialContent) + + const keys = await getKeysFromLanguage('en-US') + + expect(keys).toContain('special.key-with-dash') + expect(keys).toContain('special.key_with_underscore') + expect(keys).toContain('special.key.with.dots') + expect(keys).toContain('special.normalKey') + }) + + it('should handle different value types', async () => { + const typesContent = `const translation = { + stringValue: 'string', + numberValue: 42, + booleanValue: true, + nullValue: null, + undefinedValue: undefined, + arrayValue: ['array', 'values'], + objectValue: { + nested: 'nested value' + } +} + +export default translation +` + + fs.writeFileSync(path.join(testEnDir, 'types.ts'), typesContent) + + const keys = await getKeysFromLanguage('en-US') + + expect(keys).toContain('types.stringValue') + expect(keys).toContain('types.numberValue') + expect(keys).toContain('types.booleanValue') + expect(keys).toContain('types.nullValue') + expect(keys).toContain('types.undefinedValue') + expect(keys).toContain('types.arrayValue') + expect(keys).toContain('types.objectValue.nested') + expect(keys).not.toContain('types.objectValue') + }) + }) + + describe('Real-world scenario tests', () => { + it('should handle app-debug structure like real files', async () => { + const appDebugEn = `const translation = { + pageTitle: { + line1: 'Prompt', + line2: 'Engineering' + }, + operation: { + applyConfig: 'Publish', + resetConfig: 'Reset', + debugConfig: 'Debug' + }, + generate: { + instruction: 'Instructions', + generate: 'Generate', + resTitle: 'Generated Prompt', + noDataLine1: 'Describe your use case on the left,', + noDataLine2: 'the orchestration preview will show here.' + } +} + +export default translation +` + + const appDebugZh = `const translation = { + pageTitle: { + line1: '提示词', + line2: '编排' + }, + operation: { + applyConfig: '发布', + resetConfig: '重置', + debugConfig: '调试' + }, + generate: { + instruction: '指令', + generate: '生成', + resTitle: '生成的提示词', + noData: '在左侧描述您的用例,编排预览将在此处显示。' // This is extra + } +} + +export default translation +` + + fs.writeFileSync(path.join(testEnDir, 'app-debug.ts'), appDebugEn) + fs.writeFileSync(path.join(testZhDir, 'app-debug.ts'), appDebugZh) + + const enKeys = await getKeysFromLanguage('en-US') + const zhKeys = await getKeysFromLanguage('zh-Hans') + + const missingKeys = enKeys.filter(key => !zhKeys.includes(key)) + const extraKeys = zhKeys.filter(key => !enKeys.includes(key)) + + expect(missingKeys).toContain('appDebug.generate.noDataLine1') + expect(missingKeys).toContain('appDebug.generate.noDataLine2') + expect(extraKeys).toContain('appDebug.generate.noData') + + expect(missingKeys).toHaveLength(2) + expect(extraKeys).toHaveLength(1) + }) + + it('should handle time structure with operation nested keys', async () => { + const timeEn = `const translation = { + months: { + January: 'January', + February: 'February' + }, + operation: { + now: 'Now', + ok: 'OK', + cancel: 'Cancel', + pickDate: 'Pick Date' + }, + title: { + pickTime: 'Pick Time' + }, + defaultPlaceholder: 'Pick a time...' +} + +export default translation +` + + const timeZh = `const translation = { + months: { + January: '一月', + February: '二月' + }, + operation: { + now: '此刻', + ok: '确定', + cancel: '取消', + pickDate: '选择日期' + }, + title: { + pickTime: '选择时间' + }, + pickDate: '选择日期', // This is extra - duplicates operation.pickDate + defaultPlaceholder: '请选择时间...' +} + +export default translation +` + + fs.writeFileSync(path.join(testEnDir, 'time.ts'), timeEn) + fs.writeFileSync(path.join(testZhDir, 'time.ts'), timeZh) + + const enKeys = await getKeysFromLanguage('en-US') + const zhKeys = await getKeysFromLanguage('zh-Hans') + + const missingKeys = enKeys.filter(key => !zhKeys.includes(key)) + const extraKeys = zhKeys.filter(key => !enKeys.includes(key)) + + expect(missingKeys).toHaveLength(0) // No missing keys + expect(extraKeys).toContain('time.pickDate') // Extra root-level pickDate + expect(extraKeys).toHaveLength(1) + + // Should have both keys available + expect(zhKeys).toContain('time.operation.pickDate') // Correct nested key + expect(zhKeys).toContain('time.pickDate') // Extra duplicate key + }) + }) + + describe('Statistics calculation', () => { + it('should calculate correct difference statistics', async () => { + const enContent = `const translation = { + key1: 'value1', + key2: 'value2', + key3: 'value3' +} + +export default translation +` + + const zhContentMissing = `const translation = { + key1: 'value1', + key2: 'value2' + // missing key3 +} + +export default translation +` + + const zhContentExtra = `const translation = { + key1: 'value1', + key2: 'value2', + key3: 'value3', + key4: 'extra', + key5: 'extra2' +} + +export default translation +` + + fs.writeFileSync(path.join(testEnDir, 'stats.ts'), enContent) + + // Test missing keys scenario + fs.writeFileSync(path.join(testZhDir, 'stats.ts'), zhContentMissing) + + const enKeys = await getKeysFromLanguage('en-US') + const zhKeysMissing = await getKeysFromLanguage('zh-Hans') + + expect(enKeys.length - zhKeysMissing.length).toBe(1) // +1 means 1 missing key + + // Test extra keys scenario + fs.writeFileSync(path.join(testZhDir, 'stats.ts'), zhContentExtra) + + const zhKeysExtra = await getKeysFromLanguage('zh-Hans') + + expect(enKeys.length - zhKeysExtra.length).toBe(-2) // -2 means 2 extra keys + }) + }) +}) diff --git a/web/__tests__/i18n-upload-features.test.ts b/web/__tests__/i18n-upload-features.test.ts new file mode 100644 index 0000000000..37aefcbef4 --- /dev/null +++ b/web/__tests__/i18n-upload-features.test.ts @@ -0,0 +1,119 @@ +/** + * Test suite for verifying upload feature translations across all locales + * Specifically tests for issue #23062: Missing Upload feature translations (esp. audioUpload) across most locales + */ + +import fs from 'node:fs' +import path from 'node:path' + +// Get all supported locales from the i18n directory +const I18N_DIR = path.join(__dirname, '../i18n') +const getSupportedLocales = (): string[] => { + return fs.readdirSync(I18N_DIR) + .filter(item => fs.statSync(path.join(I18N_DIR, item)).isDirectory()) + .sort() +} + +// Helper function to load translation file content +const loadTranslationContent = (locale: string): string => { + const filePath = path.join(I18N_DIR, locale, 'app-debug.ts') + + if (!fs.existsSync(filePath)) + throw new Error(`Translation file not found: ${filePath}`) + + return fs.readFileSync(filePath, 'utf-8') +} + +// Helper function to check if upload features exist +const hasUploadFeatures = (content: string): { [key: string]: boolean } => { + return { + fileUpload: /fileUpload\s*:\s*{/.test(content), + imageUpload: /imageUpload\s*:\s*{/.test(content), + documentUpload: /documentUpload\s*:\s*{/.test(content), + audioUpload: /audioUpload\s*:\s*{/.test(content), + featureBar: /bar\s*:\s*{/.test(content), + } +} + +describe('Upload Features i18n Translations - Issue #23062', () => { + let supportedLocales: string[] + + beforeAll(() => { + supportedLocales = getSupportedLocales() + console.log(`Testing ${supportedLocales.length} locales for upload features`) + }) + + test('all locales should have translation files', () => { + supportedLocales.forEach((locale) => { + const filePath = path.join(I18N_DIR, locale, 'app-debug.ts') + expect(fs.existsSync(filePath)).toBe(true) + }) + }) + + test('all locales should have required upload features', () => { + const results: { [locale: string]: { [feature: string]: boolean } } = {} + + supportedLocales.forEach((locale) => { + const content = loadTranslationContent(locale) + const features = hasUploadFeatures(content) + results[locale] = features + + // Check that all upload features exist + expect(features.fileUpload).toBe(true) + expect(features.imageUpload).toBe(true) + expect(features.documentUpload).toBe(true) + expect(features.audioUpload).toBe(true) + expect(features.featureBar).toBe(true) + }) + + console.log('✅ All locales have complete upload features') + }) + + test('previously missing locales should now have audioUpload - Issue #23062', () => { + // These locales were specifically missing audioUpload + const previouslyMissingLocales = ['fa-IR', 'hi-IN', 'ro-RO', 'sl-SI', 'th-TH', 'uk-UA', 'vi-VN'] + + previouslyMissingLocales.forEach((locale) => { + const content = loadTranslationContent(locale) + + // Verify audioUpload exists + expect(/audioUpload\s*:\s*{/.test(content)).toBe(true) + + // Verify it has title and description + expect(/audioUpload[^}]*title\s*:/.test(content)).toBe(true) + expect(/audioUpload[^}]*description\s*:/.test(content)).toBe(true) + + console.log(`✅ ${locale} - Issue #23062 resolved: audioUpload feature present`) + }) + }) + + test('upload features should have required properties', () => { + supportedLocales.forEach((locale) => { + const content = loadTranslationContent(locale) + + // Check fileUpload has required properties + if (/fileUpload\s*:\s*{/.test(content)) { + expect(/fileUpload[^}]*title\s*:/.test(content)).toBe(true) + expect(/fileUpload[^}]*description\s*:/.test(content)).toBe(true) + } + + // Check imageUpload has required properties + if (/imageUpload\s*:\s*{/.test(content)) { + expect(/imageUpload[^}]*title\s*:/.test(content)).toBe(true) + expect(/imageUpload[^}]*description\s*:/.test(content)).toBe(true) + } + + // Check documentUpload has required properties + if (/documentUpload\s*:\s*{/.test(content)) { + expect(/documentUpload[^}]*title\s*:/.test(content)).toBe(true) + expect(/documentUpload[^}]*description\s*:/.test(content)).toBe(true) + } + + // Check audioUpload has required properties + if (/audioUpload\s*:\s*{/.test(content)) { + expect(/audioUpload[^}]*title\s*:/.test(content)).toBe(true) + expect(/audioUpload[^}]*description\s*:/.test(content)).toBe(true) + } + }) + }) +}) diff --git a/web/__tests__/real-browser-flicker.test.tsx b/web/__tests__/real-browser-flicker.test.tsx new file mode 100644 index 0000000000..cf3abd5f80 --- /dev/null +++ b/web/__tests__/real-browser-flicker.test.tsx @@ -0,0 +1,445 @@ +/** + * Real Browser Environment Dark Mode Flicker Test + * + * This test attempts to simulate real browser refresh scenarios including: + * 1. SSR HTML generation phase + * 2. Client-side JavaScript loading + * 3. Theme system initialization + * 4. CSS styles application timing + */ + +import { render, screen, waitFor } from '@testing-library/react' +import { ThemeProvider } from 'next-themes' +import useTheme from '@/hooks/use-theme' +import { useEffect, useState } from 'react' + +// Setup browser environment for testing +const setupMockEnvironment = (storedTheme: string | null, systemPrefersDark = false) => { + // Mock localStorage + const mockStorage = { + getItem: jest.fn((key: string) => { + if (key === 'theme') return storedTheme + return null + }), + setItem: jest.fn(), + removeItem: jest.fn(), + } + + // Mock system theme preference + const mockMatchMedia = jest.fn((query: string) => ({ + matches: query.includes('dark') && systemPrefersDark, + media: query, + addListener: jest.fn(), + removeListener: jest.fn(), + })) + + if (typeof window !== 'undefined') { + Object.defineProperty(window, 'localStorage', { + value: mockStorage, + configurable: true, + }) + + Object.defineProperty(window, 'matchMedia', { + value: mockMatchMedia, + configurable: true, + }) + } + + return { mockStorage, mockMatchMedia } +} + +// Simulate real page component based on Dify's actual theme usage +const PageComponent = () => { + const [mounted, setMounted] = useState(false) + const { theme } = useTheme() + + useEffect(() => { + setMounted(true) + }, []) + + // Simulate common theme usage pattern in Dify + const isDark = mounted ? theme === 'dark' : false + + return ( +
+
+

+ Dify Application +

+
+ Current Theme: {mounted ? theme : 'unknown'} +
+
+ Appearance: {isDark ? 'dark' : 'light'} +
+
+
+ ) +} + +const TestThemeProvider = ({ children }: { children: React.ReactNode }) => ( + + {children} + +) + +describe('Real Browser Environment Dark Mode Flicker Test', () => { + beforeEach(() => { + jest.clearAllMocks() + }) + + describe('Page Refresh Scenario Simulation', () => { + test('simulates complete page loading process with dark theme', async () => { + // Setup: User previously selected dark mode + setupMockEnvironment('dark') + + render( + + + , + ) + + // Check initial client-side rendering state + const initialState = { + theme: screen.getByTestId('theme-indicator').textContent, + appearance: screen.getByTestId('visual-appearance').textContent, + } + console.log('Initial client state:', initialState) + + // Wait for theme system to fully initialize + await waitFor(() => { + expect(screen.getByTestId('theme-indicator')).toHaveTextContent('Current Theme: dark') + }) + + const finalState = { + theme: screen.getByTestId('theme-indicator').textContent, + appearance: screen.getByTestId('visual-appearance').textContent, + } + console.log('Final state:', finalState) + + // Document the state change - this is the source of flicker + console.log('State change detection: Initial -> Final') + }) + + test('handles light theme correctly', async () => { + setupMockEnvironment('light') + + render( + + + , + ) + + await waitFor(() => { + expect(screen.getByTestId('theme-indicator')).toHaveTextContent('Current Theme: light') + }) + + expect(screen.getByTestId('visual-appearance')).toHaveTextContent('Appearance: light') + }) + + test('handles system theme with dark preference', async () => { + setupMockEnvironment('system', true) // system theme, dark preference + + render( + + + , + ) + + await waitFor(() => { + expect(screen.getByTestId('theme-indicator')).toHaveTextContent('Current Theme: dark') + }) + + expect(screen.getByTestId('visual-appearance')).toHaveTextContent('Appearance: dark') + }) + + test('handles system theme with light preference', async () => { + setupMockEnvironment('system', false) // system theme, light preference + + render( + + + , + ) + + await waitFor(() => { + expect(screen.getByTestId('theme-indicator')).toHaveTextContent('Current Theme: light') + }) + + expect(screen.getByTestId('visual-appearance')).toHaveTextContent('Appearance: light') + }) + + test('handles no stored theme (defaults to system)', async () => { + setupMockEnvironment(null, false) // no stored theme, system prefers light + + render( + + + , + ) + + await waitFor(() => { + expect(screen.getByTestId('theme-indicator')).toHaveTextContent('Current Theme: light') + }) + }) + + test('measures timing window of style changes', async () => { + setupMockEnvironment('dark') + + const timingData: Array<{ phase: string; timestamp: number; styles: any }> = [] + + const TimingPageComponent = () => { + const [mounted, setMounted] = useState(false) + const { theme } = useTheme() + const isDark = mounted ? theme === 'dark' : false + + // Record timing and styles for each render phase + const currentStyles = { + backgroundColor: isDark ? '#1f2937' : '#ffffff', + color: isDark ? '#ffffff' : '#000000', + } + + timingData.push({ + phase: mounted ? 'CSR' : 'Initial', + timestamp: performance.now(), + styles: currentStyles, + }) + + useEffect(() => { + setMounted(true) + }, []) + + return ( +
+
+ Phase: {mounted ? 'CSR' : 'Initial'} | Theme: {theme} | Visual: {isDark ? 'dark' : 'light'} +
+
+ ) + } + + render( + + + , + ) + + await waitFor(() => { + expect(screen.getByTestId('timing-status')).toHaveTextContent('Phase: CSR') + }) + + // Analyze timing and style changes + console.log('\n=== Style Change Timeline ===') + timingData.forEach((data, index) => { + console.log(`${index + 1}. ${data.phase}: bg=${data.styles.backgroundColor}, color=${data.styles.color}`) + }) + + // Check if there are style changes (this is visible flicker) + const hasStyleChange = timingData.length > 1 + && timingData[0].styles.backgroundColor !== timingData[timingData.length - 1].styles.backgroundColor + + if (hasStyleChange) + console.log('⚠️ Style changes detected - this causes visible flicker') + else + console.log('✅ No style changes detected') + + expect(timingData.length).toBeGreaterThan(1) + }) + }) + + describe('CSS Application Timing Tests', () => { + test('checks CSS class changes causing flicker', async () => { + setupMockEnvironment('dark') + + const cssStates: Array<{ className: string; timestamp: number }> = [] + + const CSSTestComponent = () => { + const [mounted, setMounted] = useState(false) + const { theme } = useTheme() + const isDark = mounted ? theme === 'dark' : false + + // Simulate Tailwind CSS class application + const className = `min-h-screen ${isDark ? 'bg-gray-900 text-white' : 'bg-white text-black'}` + + cssStates.push({ + className, + timestamp: performance.now(), + }) + + useEffect(() => { + setMounted(true) + }, []) + + return ( +
+
Classes: {className}
+
+ ) + } + + render( + + + , + ) + + await waitFor(() => { + expect(screen.getByTestId('css-classes')).toHaveTextContent('bg-gray-900 text-white') + }) + + console.log('\n=== CSS Class Change Detection ===') + cssStates.forEach((state, index) => { + console.log(`${index + 1}. ${state.className}`) + }) + + // Check if CSS classes have changed + const hasCSSChange = cssStates.length > 1 + && cssStates[0].className !== cssStates[cssStates.length - 1].className + + if (hasCSSChange) { + console.log('⚠️ CSS class changes detected - may cause style flicker') + console.log(`From: "${cssStates[0].className}"`) + console.log(`To: "${cssStates[cssStates.length - 1].className}"`) + } + + expect(hasCSSChange).toBe(true) // We expect to see this change + }) + }) + + describe('Edge Cases and Error Handling', () => { + test('handles localStorage access errors gracefully', async () => { + // Mock localStorage to throw an error + const mockStorage = { + getItem: jest.fn(() => { + throw new Error('LocalStorage access denied') + }), + setItem: jest.fn(), + removeItem: jest.fn(), + } + + if (typeof window !== 'undefined') { + Object.defineProperty(window, 'localStorage', { + value: mockStorage, + configurable: true, + }) + } + + render( + + + , + ) + + // Should fallback gracefully without crashing + await waitFor(() => { + expect(screen.getByTestId('theme-indicator')).toBeInTheDocument() + }) + + // Should default to light theme when localStorage fails + expect(screen.getByTestId('visual-appearance')).toHaveTextContent('Appearance: light') + }) + + test('handles invalid theme values in localStorage', async () => { + setupMockEnvironment('invalid-theme-value') + + render( + + + , + ) + + await waitFor(() => { + expect(screen.getByTestId('theme-indicator')).toBeInTheDocument() + }) + + // Should handle invalid values gracefully + const themeIndicator = screen.getByTestId('theme-indicator') + expect(themeIndicator).toBeInTheDocument() + }) + }) + + describe('Performance and Regression Tests', () => { + test('verifies ThemeProvider position fix reduces initialization delay', async () => { + const performanceMarks: Array<{ event: string; timestamp: number }> = [] + + const PerformanceTestComponent = () => { + const [mounted, setMounted] = useState(false) + const { theme } = useTheme() + + performanceMarks.push({ event: 'component-render', timestamp: performance.now() }) + + useEffect(() => { + performanceMarks.push({ event: 'mount-start', timestamp: performance.now() }) + setMounted(true) + performanceMarks.push({ event: 'mount-complete', timestamp: performance.now() }) + }, []) + + useEffect(() => { + if (theme) + performanceMarks.push({ event: 'theme-available', timestamp: performance.now() }) + }, [theme]) + + return ( +
+ Mounted: {mounted.toString()} | Theme: {theme || 'loading'} +
+ ) + } + + setupMockEnvironment('dark') + + render( + + + , + ) + + await waitFor(() => { + expect(screen.getByTestId('performance-test')).toHaveTextContent('Theme: dark') + }) + + // Analyze performance timeline + console.log('\n=== Performance Timeline ===') + performanceMarks.forEach((mark) => { + console.log(`${mark.event}: ${mark.timestamp.toFixed(2)}ms`) + }) + + expect(performanceMarks.length).toBeGreaterThan(3) + }) + }) + + describe('Solution Requirements Definition', () => { + test('defines technical requirements to eliminate flicker', () => { + const technicalRequirements = { + ssrConsistency: 'SSR and CSR must render identical initial styles', + synchronousDetection: 'Theme detection must complete synchronously before first render', + noStyleChanges: 'No visible style changes should occur after hydration', + performanceImpact: 'Solution should not significantly impact page load performance', + browserCompatibility: 'Must work consistently across all major browsers', + } + + console.log('\n=== Technical Requirements ===') + Object.entries(technicalRequirements).forEach(([key, requirement]) => { + console.log(`${key}: ${requirement}`) + expect(requirement).toBeDefined() + }) + + // A successful solution should pass all these requirements + }) + }) +}) diff --git a/web/__tests__/workflow-parallel-limit.test.tsx b/web/__tests__/workflow-parallel-limit.test.tsx new file mode 100644 index 0000000000..0843122ab4 --- /dev/null +++ b/web/__tests__/workflow-parallel-limit.test.tsx @@ -0,0 +1,301 @@ +/** + * MAX_PARALLEL_LIMIT Configuration Bug Test + * + * This test reproduces and verifies the fix for issue #23083: + * MAX_PARALLEL_LIMIT environment variable does not take effect in iteration panel + */ + +import { render, screen } from '@testing-library/react' +import React from 'react' + +// Mock environment variables before importing constants +const originalEnv = process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT + +// Test with different environment values +function setupEnvironment(value?: string) { + if (value) + process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = value + else + delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT + + // Clear module cache to force re-evaluation + jest.resetModules() +} + +function restoreEnvironment() { + if (originalEnv) + process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = originalEnv + else + delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT + + jest.resetModules() +} + +// Mock i18next with proper implementation +jest.mock('react-i18next', () => ({ + useTranslation: () => ({ + t: (key: string) => { + if (key.includes('MaxParallelismTitle')) return 'Max Parallelism' + if (key.includes('MaxParallelismDesc')) return 'Maximum number of parallel executions' + if (key.includes('parallelMode')) return 'Parallel Mode' + if (key.includes('parallelPanelDesc')) return 'Enable parallel execution' + if (key.includes('errorResponseMethod')) return 'Error Response Method' + return key + }, + }), + initReactI18next: { + type: '3rdParty', + init: jest.fn(), + }, +})) + +// Mock i18next module completely to prevent initialization issues +jest.mock('i18next', () => ({ + use: jest.fn().mockReturnThis(), + init: jest.fn().mockReturnThis(), + t: jest.fn(key => key), + isInitialized: true, +})) + +// Mock the useConfig hook +jest.mock('@/app/components/workflow/nodes/iteration/use-config', () => ({ + __esModule: true, + default: () => ({ + inputs: { + is_parallel: true, + parallel_nums: 5, + error_handle_mode: 'terminated', + }, + changeParallel: jest.fn(), + changeParallelNums: jest.fn(), + changeErrorHandleMode: jest.fn(), + }), +})) + +// Mock other components +jest.mock('@/app/components/workflow/nodes/_base/components/variable/var-reference-picker', () => { + return function MockVarReferencePicker() { + return
VarReferencePicker
+ } +}) + +jest.mock('@/app/components/workflow/nodes/_base/components/split', () => { + return function MockSplit() { + return
Split
+ } +}) + +jest.mock('@/app/components/workflow/nodes/_base/components/field', () => { + return function MockField({ title, children }: { title: string, children: React.ReactNode }) { + return ( +
+ + {children} +
+ ) + } +}) + +jest.mock('@/app/components/base/switch', () => { + return function MockSwitch({ defaultValue }: { defaultValue: boolean }) { + return + } +}) + +jest.mock('@/app/components/base/select', () => { + return function MockSelect() { + return + } +}) + +// Use defaultValue to avoid controlled input warnings +jest.mock('@/app/components/base/slider', () => { + return function MockSlider({ value, max, min }: { value: number, max: number, min: number }) { + return ( + + ) + } +}) + +// Use defaultValue to avoid controlled input warnings +jest.mock('@/app/components/base/input', () => { + return function MockInput({ type, max, min, value }: { type: string, max: number, min: number, value: number }) { + return ( + + ) + } +}) + +describe('MAX_PARALLEL_LIMIT Configuration Bug', () => { + const mockNodeData = { + id: 'test-iteration-node', + type: 'iteration' as const, + data: { + title: 'Test Iteration', + desc: 'Test iteration node', + iterator_selector: ['test'], + output_selector: ['output'], + is_parallel: true, + parallel_nums: 5, + error_handle_mode: 'terminated' as const, + }, + } + + beforeEach(() => { + jest.clearAllMocks() + }) + + afterEach(() => { + restoreEnvironment() + }) + + afterAll(() => { + restoreEnvironment() + }) + + describe('Environment Variable Parsing', () => { + it('should parse MAX_PARALLEL_LIMIT from NEXT_PUBLIC_MAX_PARALLEL_LIMIT environment variable', () => { + setupEnvironment('25') + const { MAX_PARALLEL_LIMIT } = require('@/config') + expect(MAX_PARALLEL_LIMIT).toBe(25) + }) + + it('should fallback to default when environment variable is not set', () => { + setupEnvironment() // No environment variable + const { MAX_PARALLEL_LIMIT } = require('@/config') + expect(MAX_PARALLEL_LIMIT).toBe(10) + }) + + it('should handle invalid environment variable values', () => { + setupEnvironment('invalid') + const { MAX_PARALLEL_LIMIT } = require('@/config') + + // Should fall back to default when parsing fails + expect(MAX_PARALLEL_LIMIT).toBe(10) + }) + + it('should handle empty environment variable', () => { + setupEnvironment('') + const { MAX_PARALLEL_LIMIT } = require('@/config') + + // Should fall back to default when empty + expect(MAX_PARALLEL_LIMIT).toBe(10) + }) + + // Edge cases for boundary values + it('should clamp MAX_PARALLEL_LIMIT to MIN when env is 0 or negative', () => { + setupEnvironment('0') + let { MAX_PARALLEL_LIMIT } = require('@/config') + expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default + + setupEnvironment('-5') + ;({ MAX_PARALLEL_LIMIT } = require('@/config')) + expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default + }) + + it('should handle float numbers by parseInt behavior', () => { + setupEnvironment('12.7') + const { MAX_PARALLEL_LIMIT } = require('@/config') + // parseInt truncates to integer + expect(MAX_PARALLEL_LIMIT).toBe(12) + }) + }) + + describe('UI Component Integration (Main Fix Verification)', () => { + it('should render iteration panel with environment-configured max value', () => { + // Set environment variable to a different value + setupEnvironment('30') + + // Import Panel after setting environment + const Panel = require('@/app/components/workflow/nodes/iteration/panel').default + const { MAX_PARALLEL_LIMIT } = require('@/config') + + render( + , + ) + + // Behavior-focused assertion: UI max should equal MAX_PARALLEL_LIMIT + const numberInput = screen.getByTestId('number-input') + expect(numberInput).toHaveAttribute('data-max', String(MAX_PARALLEL_LIMIT)) + + const slider = screen.getByTestId('slider') + expect(slider).toHaveAttribute('data-max', String(MAX_PARALLEL_LIMIT)) + + // Verify the actual values + expect(MAX_PARALLEL_LIMIT).toBe(30) + expect(numberInput.getAttribute('data-max')).toBe('30') + expect(slider.getAttribute('data-max')).toBe('30') + }) + + it('should maintain UI consistency with different environment values', () => { + setupEnvironment('15') + const Panel = require('@/app/components/workflow/nodes/iteration/panel').default + const { MAX_PARALLEL_LIMIT } = require('@/config') + + render( + , + ) + + // Both input and slider should use the same max value from MAX_PARALLEL_LIMIT + const numberInput = screen.getByTestId('number-input') + const slider = screen.getByTestId('slider') + + expect(numberInput.getAttribute('data-max')).toBe(slider.getAttribute('data-max')) + expect(numberInput.getAttribute('data-max')).toBe(String(MAX_PARALLEL_LIMIT)) + }) + }) + + describe('Legacy Constant Verification (For Transition Period)', () => { + // Marked as transition/deprecation tests + it('should maintain MAX_ITERATION_PARALLEL_NUM for backward compatibility', () => { + const { MAX_ITERATION_PARALLEL_NUM } = require('@/app/components/workflow/constants') + expect(typeof MAX_ITERATION_PARALLEL_NUM).toBe('number') + expect(MAX_ITERATION_PARALLEL_NUM).toBe(10) // Hardcoded legacy value + }) + + it('should demonstrate MAX_PARALLEL_LIMIT vs legacy constant difference', () => { + setupEnvironment('50') + const { MAX_PARALLEL_LIMIT } = require('@/config') + const { MAX_ITERATION_PARALLEL_NUM } = require('@/app/components/workflow/constants') + + // MAX_PARALLEL_LIMIT is configurable, MAX_ITERATION_PARALLEL_NUM is not + expect(MAX_PARALLEL_LIMIT).toBe(50) + expect(MAX_ITERATION_PARALLEL_NUM).toBe(10) + expect(MAX_PARALLEL_LIMIT).not.toBe(MAX_ITERATION_PARALLEL_NUM) + }) + }) + + describe('Constants Validation', () => { + it('should validate that required constants exist and have correct types', () => { + const { MAX_PARALLEL_LIMIT } = require('@/config') + const { MIN_ITERATION_PARALLEL_NUM } = require('@/app/components/workflow/constants') + expect(typeof MAX_PARALLEL_LIMIT).toBe('number') + expect(typeof MIN_ITERATION_PARALLEL_NUM).toBe('number') + expect(MAX_PARALLEL_LIMIT).toBeGreaterThanOrEqual(MIN_ITERATION_PARALLEL_NUM) + }) + }) +}) diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/develop/page.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/develop/page.tsx index 415d82285c..11335b270c 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/develop/page.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/develop/page.tsx @@ -1,5 +1,5 @@ import React from 'react' -import type { Locale } from '@/i18n' +import type { Locale } from '@/i18n-config' import DevelopMain from '@/app/components/develop' export type IDevelopProps = { diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/layout-main.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/layout-main.tsx index 7d5d4cb52d..6b3807f1c6 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/layout-main.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/layout-main.tsx @@ -42,7 +42,7 @@ const AppDetailLayout: FC = (props) => { const pathname = usePathname() const media = useBreakpoints() const isMobile = media === MediaType.mobile - const { isCurrentWorkspaceEditor, isLoadingCurrentWorkspace } = useAppContext() + const { isCurrentWorkspaceEditor, isLoadingCurrentWorkspace, currentWorkspace } = useAppContext() const { appDetail, setAppDetail, setAppSiderbarExpand } = useStore(useShallow(state => ({ appDetail: state.appDetail, setAppDetail: state.setAppDetail, @@ -106,7 +106,6 @@ const AppDetailLayout: FC = (props) => { // if ((appDetail.mode === 'advanced-chat' || appDetail.mode === 'workflow') && (pathname).endsWith('workflow')) // setAppSiderbarExpand('collapse') } - // eslint-disable-next-line react-hooks/exhaustive-deps }, [appDetail, isMobile]) useEffect(() => { @@ -120,11 +119,10 @@ const AppDetailLayout: FC = (props) => { }).finally(() => { setIsLoadingAppDetail(false) }) - // eslint-disable-next-line react-hooks/exhaustive-deps }, [appId, pathname]) useEffect(() => { - if (!appDetailRes || isLoadingCurrentWorkspace || isLoadingAppDetail) + if (!appDetailRes || !currentWorkspace.id || isLoadingCurrentWorkspace || isLoadingAppDetail) return const res = appDetailRes // redirection @@ -143,7 +141,6 @@ const AppDetailLayout: FC = (props) => { setAppDetail({ ...res, enable_sso: false }) setNavigation(getNavigations(appId, isCurrentWorkspaceEditor, res.mode)) } - // eslint-disable-next-line react-hooks/exhaustive-deps }, [appDetailRes, isCurrentWorkspaceEditor, isLoadingAppDetail, isLoadingCurrentWorkspace]) useUnmount(() => { diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx index 2afe451fe1..907c270017 100644 --- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx +++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/config-popup.tsx @@ -1,5 +1,5 @@ 'use client' -import type { FC } from 'react' +import type { FC, JSX } from 'react' import React, { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' import { useBoolean } from 'ahooks' diff --git a/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/settings/page.tsx b/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/settings/page.tsx index d9a196d854..688f2c9fc2 100644 --- a/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/settings/page.tsx +++ b/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/settings/page.tsx @@ -1,5 +1,5 @@ import React from 'react' -import { getLocaleOnServer, useTranslation as translate } from '@/i18n/server' +import { getLocaleOnServer, useTranslation as translate } from '@/i18n-config/server' import Form from '@/app/components/datasets/settings/form' const Settings = async () => { diff --git a/web/app/(commonLayout)/datasets/Doc.tsx b/web/app/(commonLayout)/datasets/Doc.tsx index efdfe157f2..042a90f4af 100644 --- a/web/app/(commonLayout)/datasets/Doc.tsx +++ b/web/app/(commonLayout)/datasets/Doc.tsx @@ -8,7 +8,7 @@ import TemplateEn from './template/template.en.mdx' import TemplateZh from './template/template.zh.mdx' import TemplateJa from './template/template.ja.mdx' import I18n from '@/context/i18n' -import { LanguagesSupported } from '@/i18n/language' +import { LanguagesSupported } from '@/i18n-config/language' import useTheme from '@/hooks/use-theme' import { Theme } from '@/types/app' import cn from '@/utils/classnames' diff --git a/web/app/(commonLayout)/plugins/page.tsx b/web/app/(commonLayout)/plugins/page.tsx index 47f2791075..d07c4307ad 100644 --- a/web/app/(commonLayout)/plugins/page.tsx +++ b/web/app/(commonLayout)/plugins/page.tsx @@ -1,7 +1,7 @@ import PluginPage from '@/app/components/plugins/plugin-page' import PluginsPanel from '@/app/components/plugins/plugin-page/plugins-panel' import Marketplace from '@/app/components/plugins/marketplace' -import { getLocaleOnServer } from '@/i18n/server' +import { getLocaleOnServer } from '@/i18n-config/server' const PluginList = async () => { const locale = await getLocaleOnServer() diff --git a/web/app/account/account-page/email-change-modal.tsx b/web/app/account/account-page/email-change-modal.tsx index c3efad104a..bd00f27ac5 100644 --- a/web/app/account/account-page/email-change-modal.tsx +++ b/web/app/account/account-page/email-change-modal.tsx @@ -15,6 +15,8 @@ import { verifyEmail, } from '@/service/common' import { noop } from 'lodash-es' +import { asyncRunSafe } from '@/utils' +import type { ResponseError } from '@/service/fetch' type Props = { show: boolean @@ -39,6 +41,7 @@ const EmailChangeModal = ({ onClose, email, show }: Props) => { const [time, setTime] = useState(0) const [stepToken, setStepToken] = useState('') const [newEmailExited, setNewEmailExited] = useState(false) + const [unAvailableEmail, setUnAvailableEmail] = useState(false) const [isCheckingEmail, setIsCheckingEmail] = useState(false) const startCount = () => { @@ -124,9 +127,17 @@ const EmailChangeModal = ({ onClose, email, show }: Props) => { email, }) setNewEmailExited(false) + setUnAvailableEmail(false) } - catch { - setNewEmailExited(true) + catch (e: any) { + if (e.status === 400) { + const [, errRespData] = await asyncRunSafe(e.json()) + const { code } = errRespData || {} + if (code === 'email_already_in_use') + setNewEmailExited(true) + if (code === 'account_in_freeze') + setUnAvailableEmail(true) + } } finally { setIsCheckingEmail(false) @@ -291,15 +302,18 @@ const EmailChangeModal = ({ onClose, email, show }: Props) => { placeholder={t('common.account.changeEmail.emailPlaceholder')} value={mail} onChange={e => handleNewEmailValueChange(e.target.value)} - destructive={newEmailExited} + destructive={newEmailExited || unAvailableEmail} /> {newEmailExited && (
{t('common.account.changeEmail.existingEmail')}
)} + {unAvailableEmail && ( +
{t('common.account.changeEmail.unAvailableEmail')}
+ )}
) } @@ -142,11 +168,9 @@ const HeaderOptions: FC = ({ position="br" trigger="click" btnElement={ - + } - btnClassName='p-0 border-0' + btnClassName='btn btn-secondary btn-medium w-8 p-0' className={'!z-20 h-fit !w-[155px]'} popupClassName='!w-full !overflow-visible' manualClose @@ -169,6 +193,15 @@ const HeaderOptions: FC = ({ /> ) } + { + showClearConfirm && ( + setShowClearConfirm(false)} + onConfirm={handleConfirmed} + /> + ) + } ) } diff --git a/web/app/components/app/configuration/base/warning-mask/index.tsx b/web/app/components/app/configuration/base/warning-mask/index.tsx index fbe58bee11..8bd7ea12aa 100644 --- a/web/app/components/app/configuration/base/warning-mask/index.tsx +++ b/web/app/components/app/configuration/base/warning-mask/index.tsx @@ -22,14 +22,14 @@ const WarningMask: FC = ({ footer, }) => { return ( -
-
{warningIcon}
-
+
{warningIcon}
+
{title}
-
+
{description}
diff --git a/web/app/components/app/configuration/base/warning-mask/style.module.css b/web/app/components/app/configuration/base/warning-mask/style.module.css index 87f226fd96..a2c394de2a 100644 --- a/web/app/components/app/configuration/base/warning-mask/style.module.css +++ b/web/app/components/app/configuration/base/warning-mask/style.module.css @@ -1,5 +1,4 @@ .mask { - background-color: rgba(239, 244, 255, 0.9); backdrop-filter: blur(2px); } diff --git a/web/app/components/app/configuration/config-var/config-modal/index.tsx b/web/app/components/app/configuration/config-var/config-modal/index.tsx index 00340a2eaf..28eaa828be 100644 --- a/web/app/components/app/configuration/config-var/config-modal/index.tsx +++ b/web/app/components/app/configuration/config-var/config-modal/index.tsx @@ -21,6 +21,7 @@ import { DEFAULT_FILE_UPLOAD_SETTING } from '@/app/components/workflow/constants import { DEFAULT_VALUE_MAX_LEN } from '@/config' import type { Item as SelectItem } from './type-select' import TypeSelector from './type-select' +import { SimpleSelect } from '@/app/components/base/select' const TEXT_MAX_LENGTH = 256 @@ -249,9 +250,31 @@ const ConfigModal: FC = ({ )} {type === InputVarType.select && ( - - - + <> + + + + {options && options.length > 0 && ( + + opt.trim() !== '').map(option => ({ + value: option, + name: option, + })), + ]} + defaultValue={tempPayload.default || ''} + onSelect={item => handlePayloadChange('default')(item.value === '' ? undefined : item.value)} + placeholder={t('appDebug.variableConfig.selectDefaultValue')} + allowSearch={false} + /> + + )} + )} {[InputVarType.singleFile, InputVarType.multiFiles].includes(type) && ( diff --git a/web/app/components/app/configuration/config/agent/agent-tools/setting-built-in-tool.tsx b/web/app/components/app/configuration/config/agent/agent-tools/setting-built-in-tool.tsx index 92f1525bd5..dad5441a54 100644 --- a/web/app/components/app/configuration/config/agent/agent-tools/setting-built-in-tool.tsx +++ b/web/app/components/app/configuration/config/agent/agent-tools/setting-built-in-tool.tsx @@ -21,7 +21,7 @@ import type { Collection, Tool } from '@/app/components/tools/types' import { CollectionType } from '@/app/components/tools/types' import { fetchBuiltInToolList, fetchCustomToolList, fetchModelToolList, fetchWorkflowToolList } from '@/service/tools' import I18n from '@/context/i18n' -import { getLanguage } from '@/i18n/language' +import { getLanguage } from '@/i18n-config/language' import cn from '@/utils/classnames' import type { ToolWithProvider } from '@/app/components/workflow/types' import { diff --git a/web/app/components/app/configuration/index.tsx b/web/app/components/app/configuration/index.tsx index 5525a24a28..512f57bccf 100644 --- a/web/app/components/app/configuration/index.tsx +++ b/web/app/components/app/configuration/index.tsx @@ -82,6 +82,7 @@ import { MittProvider } from '@/context/mitt-context' import { fetchAndMergeValidCompletionParams } from '@/utils/completion-params' import Toast from '@/app/components/base/toast' import { fetchCollectionList } from '@/service/tools' +import { useAppContext } from '@/context/app-context' type PublishConfig = { modelConfig: ModelConfig @@ -91,6 +92,8 @@ type PublishConfig = { const Configuration: FC = () => { const { t } = useTranslation() const { notify } = useContext(ToastContext) + const { isLoadingCurrentWorkspace, currentWorkspace } = useAppContext() + const { appDetail, showAppConfigureFeaturesModal, setAppSiderbarExpand, setShowAppConfigureFeaturesModal } = useAppStore(useShallow(state => ({ appDetail: state.appDetail, setAppSiderbarExpand: state.setAppSiderbarExpand, @@ -842,7 +845,7 @@ const Configuration: FC = () => { setAppSiderbarExpand('collapse') } - if (isLoading) { + if (isLoading || isLoadingCurrentWorkspace || !currentWorkspace.id) { return
diff --git a/web/app/components/app/configuration/tools/external-data-tool-modal.tsx b/web/app/components/app/configuration/tools/external-data-tool-modal.tsx index 3fd020f60f..ceb53ac0b8 100644 --- a/web/app/components/app/configuration/tools/external-data-tool-modal.tsx +++ b/web/app/components/app/configuration/tools/external-data-tool-modal.tsx @@ -12,7 +12,7 @@ import { BookOpen01 } from '@/app/components/base/icons/src/vender/line/educatio import { fetchCodeBasedExtensionList } from '@/service/common' import { SimpleSelect } from '@/app/components/base/select' import I18n from '@/context/i18n' -import { LanguagesSupported } from '@/i18n/language' +import { LanguagesSupported } from '@/i18n-config/language' import type { CodeBasedExtensionItem, ExternalDataTool, diff --git a/web/app/components/app/overview/customize/index.tsx b/web/app/components/app/overview/customize/index.tsx index 0fedd76f89..11d29bb0c8 100644 --- a/web/app/components/app/overview/customize/index.tsx +++ b/web/app/components/app/overview/customize/index.tsx @@ -68,7 +68,7 @@ const CustomizeModal: FC = ({
2
-
{t(`${prefixCustomize}.way1.step3`)}
+
{t(`${prefixCustomize}.way1.step2`)}
{t(`${prefixCustomize}.way1.step2Tip`)}
diff --git a/web/app/components/datasets/documents/detail/batch-modal/index.tsx b/web/app/components/datasets/documents/detail/batch-modal/index.tsx index 614471c565..0952a823b4 100644 --- a/web/app/components/datasets/documents/detail/batch-modal/index.tsx +++ b/web/app/components/datasets/documents/detail/batch-modal/index.tsx @@ -7,14 +7,14 @@ import CSVUploader from './csv-uploader' import CSVDownloader from './csv-downloader' import Button from '@/app/components/base/button' import Modal from '@/app/components/base/modal' -import type { ChunkingMode } from '@/models/datasets' +import type { ChunkingMode, FileItem } from '@/models/datasets' import { noop } from 'lodash-es' export type IBatchModalProps = { isShow: boolean docForm: ChunkingMode onCancel: () => void - onConfirm: (file: File) => void + onConfirm: (file: FileItem) => void } const BatchModal: FC = ({ @@ -24,8 +24,8 @@ const BatchModal: FC = ({ onConfirm, }) => { const { t } = useTranslation() - const [currentCSV, setCurrentCSV] = useState() - const handleFile = (file?: File) => setCurrentCSV(file) + const [currentCSV, setCurrentCSV] = useState() + const handleFile = (file?: FileItem) => setCurrentCSV(file) const handleSend = () => { if (!currentCSV) @@ -56,7 +56,7 @@ const BatchModal: FC = ({ -
diff --git a/web/app/components/datasets/documents/detail/index.tsx b/web/app/components/datasets/documents/detail/index.tsx index aff74038e3..79d12e47e3 100644 --- a/web/app/components/datasets/documents/detail/index.tsx +++ b/web/app/components/datasets/documents/detail/index.tsx @@ -17,7 +17,7 @@ import cn from '@/utils/classnames' import Divider from '@/app/components/base/divider' import Loading from '@/app/components/base/loading' import { ToastContext } from '@/app/components/base/toast' -import type { ChunkingMode, ParentMode, ProcessMode } from '@/models/datasets' +import type { ChunkingMode, FileItem, ParentMode, ProcessMode } from '@/models/datasets' import { useDatasetDetailContext } from '@/context/dataset-detail' import FloatRightContainer from '@/app/components/base/float-right-container' import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints' @@ -111,12 +111,10 @@ const DocumentDetail: FC = ({ datasetId, documentId }) => { } const { mutateAsync: segmentBatchImport } = useSegmentBatchImport() - const runBatch = async (csv: File) => { - const formData = new FormData() - formData.append('file', csv) + const runBatch = async (csv: FileItem) => { await segmentBatchImport({ url: `/datasets/${datasetId}/documents/${documentId}/segments/batch_import`, - body: formData, + body: { upload_file_id: csv.file.id! }, }, { onSuccess: (res) => { setImportStatus(res.job_status) diff --git a/web/app/components/datasets/documents/index.tsx b/web/app/components/datasets/documents/index.tsx index 676581a50f..1f9f36e7b1 100644 --- a/web/app/components/datasets/documents/index.tsx +++ b/web/app/components/datasets/documents/index.tsx @@ -164,7 +164,6 @@ const Documents: FC = ({ datasetId }) => { if (totalPages < currPage + 1) setCurrPage(totalPages === 0 ? 0 : totalPages - 1) } - // eslint-disable-next-line react-hooks/exhaustive-deps }, [documentsRes]) const invalidDocumentDetail = useInvalidDocumentDetailKey() @@ -178,7 +177,6 @@ const Documents: FC = ({ datasetId }) => { invalidChunkList() invalidChildChunkList() }, 5000) - // eslint-disable-next-line react-hooks/exhaustive-deps }, []) const documentsWithProgress = useMemo(() => { @@ -273,6 +271,13 @@ const Documents: FC = ({ datasetId }) => { const documentsList = isDataSourceNotion ? documentsWithProgress?.data : documentsRes?.data const [selectedIds, setSelectedIds] = useState([]) + + // Clear selection when search changes to avoid confusion + useEffect(() => { + if (searchValue !== query.keyword) + setSelectedIds([]) + }, [searchValue, query.keyword]) + const { run: handleSearch } = useDebounceFn(() => { setSearchValue(inputValue) }, { wait: 500 }) diff --git a/web/app/components/datasets/documents/list.tsx b/web/app/components/datasets/documents/list.tsx index 2eb6a3ac1e..2697580f4e 100644 --- a/web/app/components/datasets/documents/list.tsx +++ b/web/app/components/datasets/documents/list.tsx @@ -458,7 +458,8 @@ const DocumentList: FC = ({ handleSave, } = useBatchEditDocumentMetadata({ datasetId, - docList: documents.filter(item => selectedIds.includes(item.id)), + docList: documents.filter(doc => selectedIds.includes(doc.id)), + selectedDocumentIds: selectedIds, // Pass all selected IDs separately onUpdate, }) diff --git a/web/app/components/datasets/hit-testing/index.tsx b/web/app/components/datasets/hit-testing/index.tsx index fef69a5e61..8da7b9e349 100644 --- a/web/app/components/datasets/hit-testing/index.tsx +++ b/web/app/components/datasets/hit-testing/index.tsx @@ -70,7 +70,7 @@ const HitTestingPage: FC = ({ datasetId }: Props) => { const [isShowModifyRetrievalModal, setIsShowModifyRetrievalModal] = useState(false) const [isShowRightPanel, { setTrue: showRightPanel, setFalse: hideRightPanel, set: setShowRightPanel }] = useBoolean(!isMobile) const renderHitResults = (results: HitTesting[] | ExternalKnowledgeBaseHitTesting[]) => ( -
+
{t('datasetHitTesting.hit.title', { num: results.length })}
@@ -93,7 +93,7 @@ const HitTestingPage: FC = ({ datasetId }: Props) => { ) const renderEmptyState = () => ( -
+
{t('datasetHitTesting.hit.emptyTip')} @@ -180,7 +180,7 @@ const HitTestingPage: FC = ({ datasetId }: Props) => {
{/* {renderHitResults(generalResultData)} */} {submitLoading - ?
+ ?
: ( diff --git a/web/app/components/datasets/metadata/hooks/use-batch-edit-document-metadata.ts b/web/app/components/datasets/metadata/hooks/use-batch-edit-document-metadata.ts index 3bb6e1d6ed..f350fd7b8b 100644 --- a/web/app/components/datasets/metadata/hooks/use-batch-edit-document-metadata.ts +++ b/web/app/components/datasets/metadata/hooks/use-batch-edit-document-metadata.ts @@ -9,12 +9,14 @@ import { t } from 'i18next' type Props = { datasetId: string docList: SimpleDocumentDetail[] + selectedDocumentIds?: string[] onUpdate: () => void } const useBatchEditDocumentMetadata = ({ datasetId, docList, + selectedDocumentIds, onUpdate, }: Props) => { const [isShowEditModal, { @@ -79,9 +81,12 @@ const useBatchEditDocumentMetadata = ({ return false }) - const res: MetadataBatchEditToServer = docList.map((item, i) => { - // the new metadata will override the old one - const oldMetadataList = metaDataList[i] + // Use selectedDocumentIds if available, otherwise fall back to docList + const documentIds = selectedDocumentIds || docList.map(doc => doc.id) + const res: MetadataBatchEditToServer = documentIds.map((documentId) => { + // Find the document in docList to get its metadata + const docIndex = docList.findIndex(doc => doc.id === documentId) + const oldMetadataList = docIndex >= 0 ? metaDataList[docIndex] : [] let newMetadataList: MetadataItemWithValue[] = [...oldMetadataList, ...addedList] .filter((item) => { return !removedList.find(removedItem => removedItem.id === item.id) @@ -108,7 +113,7 @@ const useBatchEditDocumentMetadata = ({ }) return { - document_id: item.id, + document_id: documentId, metadata_list: newMetadataList, } }) diff --git a/web/app/components/develop/doc.tsx b/web/app/components/develop/doc.tsx index c61cc09863..65e6d4aec0 100644 --- a/web/app/components/develop/doc.tsx +++ b/web/app/components/develop/doc.tsx @@ -16,7 +16,7 @@ import TemplateChatEn from './template/template_chat.en.mdx' import TemplateChatZh from './template/template_chat.zh.mdx' import TemplateChatJa from './template/template_chat.ja.mdx' import I18n from '@/context/i18n' -import { LanguagesSupported } from '@/i18n/language' +import { LanguagesSupported } from '@/i18n-config/language' import useTheme from '@/hooks/use-theme' import { Theme } from '@/types/app' import cn from '@/utils/classnames' diff --git a/web/app/components/explore/create-app-modal/index.tsx b/web/app/components/explore/create-app-modal/index.tsx index 7e1e59b51b..e94999db04 100644 --- a/web/app/components/explore/create-app-modal/index.tsx +++ b/web/app/components/explore/create-app-modal/index.tsx @@ -27,7 +27,7 @@ export type CreateAppModalProps = { appIconUrl?: string | null appMode?: string appUseIconAsAnswerIcon?: boolean - max_active_requests: number | null + max_active_requests?: number | null onConfirm: (info: { name: string icon_type: AppIconType diff --git a/web/app/components/header/account-setting/language-page/index.tsx b/web/app/components/header/account-setting/language-page/index.tsx index 7d3e09fc21..bf3537b5df 100644 --- a/web/app/components/header/account-setting/language-page/index.tsx +++ b/web/app/components/header/account-setting/language-page/index.tsx @@ -10,7 +10,7 @@ import { updateUserProfile } from '@/service/common' import { ToastContext } from '@/app/components/base/toast' import I18n from '@/context/i18n' import { timezones } from '@/utils/timezone' -import { languages } from '@/i18n/language' +import { languages } from '@/i18n-config/language' const titleClassName = ` mb-2 system-sm-semibold text-text-secondary diff --git a/web/app/components/header/account-setting/members-page/index.tsx b/web/app/components/header/account-setting/members-page/index.tsx index 8b536cbe43..6b4da22084 100644 --- a/web/app/components/header/account-setting/members-page/index.tsx +++ b/web/app/components/header/account-setting/members-page/index.tsx @@ -20,7 +20,7 @@ import { Plan } from '@/app/components/billing/type' import Button from '@/app/components/base/button' import UpgradeBtn from '@/app/components/billing/upgrade-btn' import { NUM_INFINITE } from '@/app/components/billing/config' -import { LanguagesSupported } from '@/i18n/language' +import { LanguagesSupported } from '@/i18n-config/language' import cn from '@/utils/classnames' import Tooltip from '@/app/components/base/tooltip' import { RiPencilLine } from '@remixicon/react' diff --git a/web/app/components/header/account-setting/model-provider-page/install-from-marketplace.tsx b/web/app/components/header/account-setting/model-provider-page/install-from-marketplace.tsx index c60a769e40..7e9cad23eb 100644 --- a/web/app/components/header/account-setting/model-provider-page/install-from-marketplace.tsx +++ b/web/app/components/header/account-setting/model-provider-page/install-from-marketplace.tsx @@ -18,7 +18,7 @@ import ProviderCard from '@/app/components/plugins/provider-card' import List from '@/app/components/plugins/marketplace/list' import type { Plugin } from '@/app/components/plugins/types' import cn from '@/utils/classnames' -import { getLocaleOnClient } from '@/i18n' +import { getLocaleOnClient } from '@/i18n-config' import { getMarketplaceUrl } from '@/utils/var' type InstallFromMarketplaceProps = { diff --git a/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx b/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx index df9a9cbcf7..f6fb1dc6f6 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx @@ -7,7 +7,7 @@ import { useLanguage } from '../hooks' import { Group } from '@/app/components/base/icons/src/vender/other' import { OpenaiBlue, OpenaiTeal, OpenaiViolet, OpenaiYellow } from '@/app/components/base/icons/src/public/llm' import cn from '@/utils/classnames' -import { renderI18nObject } from '@/i18n' +import { renderI18nObject } from '@/i18n-config' type ModelIconProps = { provider?: Model | ModelProvider diff --git a/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx b/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx index 253269d920..220c43c9da 100644 --- a/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx @@ -3,7 +3,7 @@ import type { ModelProvider } from '../declarations' import { useLanguage } from '../hooks' import { Openai } from '@/app/components/base/icons/src/vender/other' import { AnthropicDark, AnthropicLight } from '@/app/components/base/icons/src/public/llm' -import { renderI18nObject } from '@/i18n' +import { renderI18nObject } from '@/i18n-config' import { Theme } from '@/types/app' import cn from '@/utils/classnames' import useTheme from '@/hooks/use-theme' diff --git a/web/app/components/header/maintenance-notice.tsx b/web/app/components/header/maintenance-notice.tsx index f9c00dd01e..4bb4ef7f7d 100644 --- a/web/app/components/header/maintenance-notice.tsx +++ b/web/app/components/header/maintenance-notice.tsx @@ -1,6 +1,6 @@ import { useState } from 'react' import { X } from '@/app/components/base/icons/src/vender/line/general' -import { NOTICE_I18N } from '@/i18n/language' +import { NOTICE_I18N } from '@/i18n-config/language' import { useLanguage } from '@/app/components/header/account-setting/model-provider-page/hooks' const MaintenanceNotice = () => { diff --git a/web/app/components/i18n-server.tsx b/web/app/components/i18n-server.tsx index 78d5ca4861..b3ec42dd81 100644 --- a/web/app/components/i18n-server.tsx +++ b/web/app/components/i18n-server.tsx @@ -1,7 +1,7 @@ import React from 'react' import I18N from './i18n' import { ToastProvider } from './base/toast' -import { getLocaleOnServer } from '@/i18n/server' +import { getLocaleOnServer } from '@/i18n-config/server' export type II18NServerProps = { children: React.ReactNode diff --git a/web/app/components/i18n.tsx b/web/app/components/i18n.tsx index 374b1f608f..10ae66da43 100644 --- a/web/app/components/i18n.tsx +++ b/web/app/components/i18n.tsx @@ -3,8 +3,8 @@ import type { FC } from 'react' import React, { useEffect, useState } from 'react' import I18NContext from '@/context/i18n' -import type { Locale } from '@/i18n' -import { setLocaleOnClient } from '@/i18n' +import type { Locale } from '@/i18n-config' +import { setLocaleOnClient } from '@/i18n-config' import Loading from './base/loading' import { usePrefetchQuery } from '@tanstack/react-query' import { getSystemFeatures } from '@/service/common' diff --git a/web/app/components/plugins/card/index.tsx b/web/app/components/plugins/card/index.tsx index bdb3705f6f..34951275ec 100644 --- a/web/app/components/plugins/card/index.tsx +++ b/web/app/components/plugins/card/index.tsx @@ -9,9 +9,9 @@ import Description from './base/description' import Placeholder from './base/placeholder' import cn from '@/utils/classnames' import { useGetLanguage } from '@/context/i18n' -import { getLanguage } from '@/i18n/language' +import { getLanguage } from '@/i18n-config/language' import { useSingleCategories } from '../hooks' -import { renderI18nObject } from '@/i18n' +import { renderI18nObject } from '@/i18n-config' import { useMixedTranslation } from '@/app/components/plugins/marketplace/hooks' import Partner from '../base/badges/partner' import Verified from '../base/badges/verified' diff --git a/web/app/components/plugins/marketplace/description/index.tsx b/web/app/components/plugins/marketplace/description/index.tsx index 68a465d29d..d4749cbbdd 100644 --- a/web/app/components/plugins/marketplace/description/index.tsx +++ b/web/app/components/plugins/marketplace/description/index.tsx @@ -1,7 +1,7 @@ import { getLocaleOnServer, useTranslation as translate, -} from '@/i18n/server' +} from '@/i18n-config/server' type DescriptionProps = { locale?: string diff --git a/web/app/components/plugins/marketplace/hooks.ts b/web/app/components/plugins/marketplace/hooks.ts index c581ee0d91..45a113030f 100644 --- a/web/app/components/plugins/marketplace/hooks.ts +++ b/web/app/components/plugins/marketplace/hooks.ts @@ -17,7 +17,7 @@ import { getFormattedPlugin, getMarketplaceCollectionsAndPlugins, } from './utils' -import i18n from '@/i18n/i18next-config' +import i18n from '@/i18n-config/i18next-config' import { useMutationPluginsFromMarketplace, } from '@/service/use-plugins' @@ -109,6 +109,7 @@ export const useMarketplacePlugins = () => { export const useMixedTranslation = (localeFromOuter?: string) => { let t = useTranslation().t + // !localeFromOuter only support zh-Hans and en-US for now if (localeFromOuter) t = i18n.getFixedT(localeFromOuter) @@ -121,8 +122,6 @@ export const useMarketplaceContainerScroll = ( callback: () => void, scrollContainerId = 'marketplace-container', ) => { - const container = document.getElementById(scrollContainerId) - const handleScroll = useCallback((e: Event) => { const target = e.target as HTMLDivElement const { @@ -135,6 +134,7 @@ export const useMarketplaceContainerScroll = ( }, [callback]) useEffect(() => { + const container = document.getElementById(scrollContainerId) if (container) container.addEventListener('scroll', handleScroll) @@ -142,7 +142,7 @@ export const useMarketplaceContainerScroll = ( if (container) container.removeEventListener('scroll', handleScroll) } - }, [container, handleScroll]) + }, [handleScroll]) } export const useSearchBoxAutoAnimate = (searchBoxAutoAnimate?: boolean) => { diff --git a/web/app/components/plugins/marketplace/intersection-line/hooks.ts b/web/app/components/plugins/marketplace/intersection-line/hooks.ts index fe30b707cb..0104cc6269 100644 --- a/web/app/components/plugins/marketplace/intersection-line/hooks.ts +++ b/web/app/components/plugins/marketplace/intersection-line/hooks.ts @@ -2,7 +2,7 @@ import { useEffect } from 'react' import { useMarketplaceContext } from '@/app/components/plugins/marketplace/context' export const useScrollIntersection = ( - anchorRef: React.RefObject, + anchorRef: React.RefObject, intersectionContainerId = 'marketplace-container', ) => { const intersected = useMarketplaceContext(v => v.intersected) diff --git a/web/app/components/plugins/marketplace/list/list-with-collection.tsx b/web/app/components/plugins/marketplace/list/list-with-collection.tsx index 4c396c565f..7c8a30f499 100644 --- a/web/app/components/plugins/marketplace/list/list-with-collection.tsx +++ b/web/app/components/plugins/marketplace/list/list-with-collection.tsx @@ -4,7 +4,7 @@ import { RiArrowRightSLine } from '@remixicon/react' import type { MarketplaceCollection } from '../types' import CardWrapper from './card-wrapper' import type { Plugin } from '@/app/components/plugins/types' -import { getLanguage } from '@/i18n/language' +import { getLanguage } from '@/i18n-config/language' import cn from '@/utils/classnames' import type { SearchParamsFromCollection } from '@/app/components/plugins/marketplace/types' import { useMixedTranslation } from '@/app/components/plugins/marketplace/hooks' diff --git a/web/app/components/plugins/marketplace/search-box/index.tsx b/web/app/components/plugins/marketplace/search-box/index.tsx index 5f19afbba6..5c6f04e3bd 100644 --- a/web/app/components/plugins/marketplace/search-box/index.tsx +++ b/web/app/components/plugins/marketplace/search-box/index.tsx @@ -8,6 +8,7 @@ import { RiAddLine } from '@remixicon/react' type SearchBoxProps = { search: string onSearchChange: (search: string) => void + wrapperClassName?: string inputClassName?: string tags: string[] onTagsChange: (tags: string[]) => void @@ -21,6 +22,7 @@ type SearchBoxProps = { const SearchBox = ({ search, onSearchChange, + wrapperClassName, inputClassName, tags, onTagsChange, @@ -32,7 +34,7 @@ const SearchBox = ({ }: SearchBoxProps) => { return (
= ({ useEffect(() => { const newInputs: Record = {} promptConfig.prompt_variables.forEach((item) => { - if (item.type === 'string' || item.type === 'paragraph') + if (item.type === 'select') + newInputs[item.key] = item.default + else if (item.type === 'string' || item.type === 'paragraph') newInputs[item.key] = '' else newInputs[item.key] = undefined diff --git a/web/app/components/tools/add-tool-modal/category.tsx b/web/app/components/tools/add-tool-modal/category.tsx index bfad270061..270b4fc2bf 100644 --- a/web/app/components/tools/add-tool-modal/category.tsx +++ b/web/app/components/tools/add-tool-modal/category.tsx @@ -6,7 +6,7 @@ import { useMount } from 'ahooks' import cn from '@/utils/classnames' import { Apps02 } from '@/app/components/base/icons/src/vender/line/others' import I18n from '@/context/i18n' -import { getLanguage } from '@/i18n/language' +import { getLanguage } from '@/i18n-config/language' import { useStore as useLabelStore } from '@/app/components/tools/labels/store' import { fetchLabelList } from '@/service/tools' diff --git a/web/app/components/tools/edit-custom-collection-modal/test-api.tsx b/web/app/components/tools/edit-custom-collection-modal/test-api.tsx index a03364aa3d..80267a7fe6 100644 --- a/web/app/components/tools/edit-custom-collection-modal/test-api.tsx +++ b/web/app/components/tools/edit-custom-collection-modal/test-api.tsx @@ -11,7 +11,7 @@ import Input from '@/app/components/base/input' import Drawer from '@/app/components/base/drawer-plus' import I18n from '@/context/i18n' import { testAPIAvailable } from '@/service/tools' -import { getLanguage } from '@/i18n/language' +import { getLanguage } from '@/i18n-config/language' type Props = { positionCenter?: boolean diff --git a/web/app/components/tools/marketplace/index.tsx b/web/app/components/tools/marketplace/index.tsx index 3716f2177a..9341e542c9 100644 --- a/web/app/components/tools/marketplace/index.tsx +++ b/web/app/components/tools/marketplace/index.tsx @@ -7,7 +7,7 @@ import { useTranslation } from 'react-i18next' import type { useMarketplace } from './hooks' import List from '@/app/components/plugins/marketplace/list' import Loading from '@/app/components/base/loading' -import { getLocaleOnClient } from '@/i18n' +import { getLocaleOnClient } from '@/i18n-config' import { getMarketplaceUrl } from '@/utils/var' type MarketplaceProps = { diff --git a/web/app/components/tools/mcp/create-card.tsx b/web/app/components/tools/mcp/create-card.tsx index 7416f85a2f..5a2a64af72 100644 --- a/web/app/components/tools/mcp/create-card.tsx +++ b/web/app/components/tools/mcp/create-card.tsx @@ -9,7 +9,7 @@ import { } from '@remixicon/react' import MCPModal from './modal' import I18n from '@/context/i18n' -import { getLanguage } from '@/i18n/language' +import { getLanguage } from '@/i18n-config/language' import { useAppContext } from '@/context/app-context' import { useCreateMCP } from '@/service/use-tools' import type { ToolWithProvider } from '@/app/components/workflow/types' diff --git a/web/app/components/tools/mcp/detail/tool-item.tsx b/web/app/components/tools/mcp/detail/tool-item.tsx index dec82edcca..7a5ea6143d 100644 --- a/web/app/components/tools/mcp/detail/tool-item.tsx +++ b/web/app/components/tools/mcp/detail/tool-item.tsx @@ -3,7 +3,7 @@ import React from 'react' import { useContext } from 'use-context-selector' import type { Tool } from '@/app/components/tools/types' import I18n from '@/context/i18n' -import { getLanguage } from '@/i18n/language' +import { getLanguage } from '@/i18n-config/language' import Tooltip from '@/app/components/base/tooltip' import cn from '@/utils/classnames' diff --git a/web/app/components/tools/mcp/modal.tsx b/web/app/components/tools/mcp/modal.tsx index 88e831bc3a..b7202f5242 100644 --- a/web/app/components/tools/mcp/modal.tsx +++ b/web/app/components/tools/mcp/modal.tsx @@ -95,8 +95,12 @@ const MCPModal = ({ setAppIcon({ type: 'image', url: res.url, fileId: extractFileId(res.url) || '' }) } catch (e) { + let errorMessage = 'Failed to fetch remote icon' + const errorData = await (e as Response).json() + if (errorData?.code) + errorMessage = `Upload failed: ${errorData.code}` console.error('Failed to fetch remote icon:', e) - Toast.notify({ type: 'warning', message: 'Failed to fetch remote icon' }) + Toast.notify({ type: 'warning', message: errorMessage }) } finally { setIsFetchingIcon(false) diff --git a/web/app/components/tools/provider/custom-create-card.tsx b/web/app/components/tools/provider/custom-create-card.tsx index ab5b85e260..dbb7026aba 100644 --- a/web/app/components/tools/provider/custom-create-card.tsx +++ b/web/app/components/tools/provider/custom-create-card.tsx @@ -9,7 +9,7 @@ import { } from '@remixicon/react' import type { CustomCollectionBackend } from '../types' import I18n from '@/context/i18n' -import { getLanguage } from '@/i18n/language' +import { getLanguage } from '@/i18n-config/language' import EditCustomToolModal from '@/app/components/tools/edit-custom-collection-modal' import { createCustomCollection } from '@/service/tools' import Toast from '@/app/components/base/toast' diff --git a/web/app/components/tools/provider/detail.tsx b/web/app/components/tools/provider/detail.tsx index 20c7017ded..87d09bd527 100644 --- a/web/app/components/tools/provider/detail.tsx +++ b/web/app/components/tools/provider/detail.tsx @@ -11,7 +11,7 @@ import type { Collection, CustomCollectionBackend, Tool, WorkflowToolProviderReq import ToolItem from './tool-item' import cn from '@/utils/classnames' import I18n from '@/context/i18n' -import { getLanguage } from '@/i18n/language' +import { getLanguage } from '@/i18n-config/language' import Confirm from '@/app/components/base/confirm' import Button from '@/app/components/base/button' import Indicator from '@/app/components/header/indicator' diff --git a/web/app/components/tools/provider/tool-item.tsx b/web/app/components/tools/provider/tool-item.tsx index d79d20cb9c..7ad202fca5 100644 --- a/web/app/components/tools/provider/tool-item.tsx +++ b/web/app/components/tools/provider/tool-item.tsx @@ -4,7 +4,7 @@ import { useContext } from 'use-context-selector' import type { Collection, Tool } from '../types' import cn from '@/utils/classnames' import I18n from '@/context/i18n' -import { getLanguage } from '@/i18n/language' +import { getLanguage } from '@/i18n-config/language' import SettingBuiltInTool from '@/app/components/app/configuration/config/agent/agent-tools/setting-built-in-tool' type Props = { diff --git a/web/app/components/tools/workflow-tool/configure-button.tsx b/web/app/components/tools/workflow-tool/configure-button.tsx index 0c72f82f39..bf418750b1 100644 --- a/web/app/components/tools/workflow-tool/configure-button.tsx +++ b/web/app/components/tools/workflow-tool/configure-button.tsx @@ -179,8 +179,8 @@ const WorkflowToolConfigureButton = ({ {(!published || !isLoading) && (
{isCurrentWorkspaceManager ? ( diff --git a/web/app/components/workflow-app/index.tsx b/web/app/components/workflow-app/index.tsx index 471d4de0d8..8895253a9f 100644 --- a/web/app/components/workflow-app/index.tsx +++ b/web/app/components/workflow-app/index.tsx @@ -19,6 +19,7 @@ import { FeaturesProvider } from '@/app/components/base/features' import type { Features as FeaturesData } from '@/app/components/base/features/types' import { FILE_EXTS } from '@/app/components/base/prompt-editor/constants' import { fetchFileUploadConfig } from '@/service/common' +import { useAppContext } from '@/context/app-context' import WorkflowWithDefaultContext from '@/app/components/workflow' import { WorkflowContextProvider, @@ -31,6 +32,7 @@ const WorkflowAppWithAdditionalContext = () => { data, isLoading, } = useWorkflowInit() + const { isLoadingCurrentWorkspace, currentWorkspace } = useAppContext() const { data: fileUploadConfigResponse } = useSWR({ url: '/files/upload' }, fetchFileUploadConfig) const nodesData = useMemo(() => { @@ -46,7 +48,7 @@ const WorkflowAppWithAdditionalContext = () => { return [] }, [data]) - if (!data || isLoading) { + if (!data || isLoading || isLoadingCurrentWorkspace || !currentWorkspace.id) { return (
diff --git a/web/app/components/workflow/block-selector/blocks.tsx b/web/app/components/workflow/block-selector/blocks.tsx index 4182530a91..27f8847655 100644 --- a/web/app/components/workflow/block-selector/blocks.tsx +++ b/web/app/components/workflow/block-selector/blocks.tsx @@ -70,6 +70,7 @@ const Blocks = ({ key={block.type} position='right' popupClassName='w-[200px]' + needsDelay={false} popupContent={(
{ const appDetail = useAppStore(s => s.appDetail) @@ -270,8 +270,6 @@ export const useWorkflow = () => { }) setNodes(newNodes) } - - // eslint-disable-next-line react-hooks/exhaustive-deps }, [store]) const isVarUsedInNodes = useCallback((varSelector: ValueSelector) => { @@ -310,9 +308,9 @@ export const useWorkflow = () => { edges, } = store.getState() const connectedEdges = edges.filter(edge => edge.source === nodeId && edge.sourceHandle === nodeHandle) - if (connectedEdges.length > PARALLEL_LIMIT - 1) { + if (connectedEdges.length > MAX_PARALLEL_LIMIT - 1) { const { setShowTips } = workflowStore.getState() - setShowTips(t('workflow.common.parallelTip.limit', { num: PARALLEL_LIMIT })) + setShowTips(t('workflow.common.parallelTip.limit', { num: MAX_PARALLEL_LIMIT })) return false } diff --git a/web/app/components/workflow/nodes/_base/components/before-run-form/form-item.tsx b/web/app/components/workflow/nodes/_base/components/before-run-form/form-item.tsx index b0dde28988..b3308b4145 100644 --- a/web/app/components/workflow/nodes/_base/components/before-run-form/form-item.tsx +++ b/web/app/components/workflow/nodes/_base/components/before-run-form/form-item.tsx @@ -160,7 +160,7 @@ const FormItem: FC = ({ type === InputVarType.select && ( handleValueChange(e.target.value)} placeholder={placeholder?.[language] || placeholder?.en_US} /> @@ -242,7 +242,7 @@ const FormInputItem: FC = ({ )} @@ -251,7 +251,7 @@ const FormInputItem: FC = ({ popupClassName='!w-[387px]' isAdvancedMode isInWorkflow - value={varInput} + value={varInput?.value} setModel={handleAppOrModelSelect} readonly={readOnly} scope={scope} diff --git a/web/app/components/workflow/nodes/_base/components/variable/utils.ts b/web/app/components/workflow/nodes/_base/components/variable/utils.ts index 9b8edd3049..95e6a1eafa 100644 --- a/web/app/components/workflow/nodes/_base/components/variable/utils.ts +++ b/web/app/components/workflow/nodes/_base/components/variable/utils.ts @@ -49,6 +49,13 @@ export const isConversationVar = (valueSelector: ValueSelector) => { return valueSelector[0] === 'conversation' } +export const hasValidChildren = (children: any): boolean => { + return children && ( + (Array.isArray(children) && children.length > 0) + || (!Array.isArray(children) && Object.keys((children as StructuredOutput)?.schema?.properties || {}).length > 0) + ) +} + export const inputVarTypeToVarType = (type: InputVarType): VarType => { return ({ [InputVarType.number]: VarType.number, @@ -140,19 +147,57 @@ const findExceptVarInObject = (obj: any, filterVar: (payload: Var, selector: Val if (isStructuredOutput) { childrenResult = findExceptVarInStructuredOutput(children, filterVar) } - else if (Array.isArray(children)) { - childrenResult = children.filter((item: Var) => { - const { children: itemChildren } = item - const currSelector = [...value_selector, item.variable] + else if (Array.isArray(children)) { + childrenResult = children + .map((item: Var) => { + const { children: itemChildren } = item + const currSelector = [...value_selector, item.variable] - if (!itemChildren) - return filterVar(item, currSelector) + if (!itemChildren) { + return { + item, + filteredObj: null, + passesFilter: filterVar(item, currSelector), + } + } - const filteredObj = findExceptVarInObject(item, filterVar, currSelector, false) // File doesn't contain file children - return filteredObj.children && (filteredObj.children as Var[])?.length > 0 - }) + const filteredObj = findExceptVarInObject(item, filterVar, currSelector, false) + const itemHasValidChildren = hasValidChildren(filteredObj.children) + + let passesFilter + if ((item.type === VarType.object || item.type === VarType.file) && itemChildren) + passesFilter = itemHasValidChildren || filterVar(item, currSelector) + else + passesFilter = itemHasValidChildren + + return { + item, + filteredObj, + passesFilter, + } + }) + .filter(({ passesFilter }) => passesFilter) + .map(({ item, filteredObj }) => { + const { children: itemChildren } = item + if (!itemChildren || !filteredObj) + return item + + return { + ...item, + children: filteredObj.children, + } + }) + + if (isFile && Array.isArray(childrenResult)) { + if (childrenResult.length === 0) { + childrenResult = OUTPUT_FILE_SUB_VARIABLES.map(key => ({ + variable: key, + type: key === 'size' ? VarType.number : VarType.string, + })) + } + } } - else { + else { childrenResult = [] } diff --git a/web/app/components/workflow/nodes/agent/default.ts b/web/app/components/workflow/nodes/agent/default.ts index 51955dc6c2..d312d4d558 100644 --- a/web/app/components/workflow/nodes/agent/default.ts +++ b/web/app/components/workflow/nodes/agent/default.ts @@ -3,7 +3,7 @@ import { ALL_CHAT_AVAILABLE_BLOCKS, ALL_COMPLETION_AVAILABLE_BLOCKS } from '@/ap import type { NodeDefault } from '../../types' import type { AgentNodeType } from './types' import { FormTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations' -import { renderI18nObject } from '@/i18n' +import { renderI18nObject } from '@/i18n-config' const nodeDefault: NodeDefault = { defaultValue: { diff --git a/web/app/components/workflow/nodes/document-extractor/panel.tsx b/web/app/components/workflow/nodes/document-extractor/panel.tsx index a91608c717..572ca366ca 100644 --- a/web/app/components/workflow/nodes/document-extractor/panel.tsx +++ b/web/app/components/workflow/nodes/document-extractor/panel.tsx @@ -13,7 +13,7 @@ import { fetchSupportFileTypes } from '@/service/datasets' import Field from '@/app/components/workflow/nodes/_base/components/field' import { BlockEnum, type NodePanelProps } from '@/app/components/workflow/types' import I18n from '@/context/i18n' -import { LanguagesSupported } from '@/i18n/language' +import { LanguagesSupported } from '@/i18n-config/language' const i18nPrefix = 'workflow.nodes.docExtractor' diff --git a/web/app/components/workflow/nodes/http/components/timeout/index.tsx b/web/app/components/workflow/nodes/http/components/timeout/index.tsx index b0fd3b229e..40ebab0e2a 100644 --- a/web/app/components/workflow/nodes/http/components/timeout/index.tsx +++ b/web/app/components/workflow/nodes/http/components/timeout/index.tsx @@ -20,7 +20,7 @@ const InputField: FC<{ description: string placeholder: string value?: number - onChange: (value: number) => void + onChange: (value: number | undefined) => void readOnly?: boolean min: number max: number @@ -35,8 +35,18 @@ const InputField: FC<{ type='number' value={value} onChange={(e) => { - const value = Math.max(min, Math.min(max, Number.parseInt(e.target.value, 10))) - onChange(value) + const inputValue = e.target.value + if (inputValue === '') { + // When user clears the input, set to undefined to let backend use default values + onChange(undefined) + } + else { + const parsedValue = Number.parseInt(inputValue, 10) + if (!Number.isNaN(parsedValue)) { + const value = Math.max(min, Math.min(max, parsedValue)) + onChange(value) + } + } }} placeholder={placeholder} readOnly={readOnly} diff --git a/web/app/components/workflow/nodes/iteration/panel.tsx b/web/app/components/workflow/nodes/iteration/panel.tsx index 4b529f0785..23e93b0dd5 100644 --- a/web/app/components/workflow/nodes/iteration/panel.tsx +++ b/web/app/components/workflow/nodes/iteration/panel.tsx @@ -3,7 +3,7 @@ import React from 'react' import { useTranslation } from 'react-i18next' import VarReferencePicker from '../_base/components/variable/var-reference-picker' import Split from '../_base/components/split' -import { MAX_ITERATION_PARALLEL_NUM, MIN_ITERATION_PARALLEL_NUM } from '../../constants' +import { MIN_ITERATION_PARALLEL_NUM } from '../../constants' import type { IterationNodeType } from './types' import useConfig from './use-config' import { ErrorHandleMode, type NodePanelProps } from '@/app/components/workflow/types' @@ -12,6 +12,7 @@ import Switch from '@/app/components/base/switch' import Select from '@/app/components/base/select' import Slider from '@/app/components/base/slider' import Input from '@/app/components/base/input' +import { MAX_PARALLEL_LIMIT } from '@/config' const i18nPrefix = 'workflow.nodes.iteration' @@ -96,11 +97,11 @@ const Panel: FC> = ({ inputs.is_parallel && (
{t(`${i18nPrefix}.MaxParallelismDesc`)}
}>
- { changeParallelNums(Number(e.target.value)) }} /> + { changeParallelNums(Number(e.target.value)) }} /> diff --git a/web/app/components/workflow/nodes/iteration/use-interactions.ts b/web/app/components/workflow/nodes/iteration/use-interactions.ts index c294cfd6aa..e0c0b222aa 100644 --- a/web/app/components/workflow/nodes/iteration/use-interactions.ts +++ b/web/app/components/workflow/nodes/iteration/use-interactions.ts @@ -4,6 +4,7 @@ import { useTranslation } from 'react-i18next' import { useStoreApi } from 'reactflow' import type { BlockEnum, + ChildNodeTypeCount, Node, } from '../../types' import { @@ -113,10 +114,17 @@ export const useNodeIterationInteractions = () => { const nodes = getNodes() const childrenNodes = nodes.filter(n => n.parentId === nodeId && n.type !== CUSTOM_ITERATION_START_NODE) const newIdMapping = { ...idMapping } + const childNodeTypeCount: ChildNodeTypeCount = {} const copyChildren = childrenNodes.map((child, index) => { const childNodeType = child.data.type as BlockEnum const nodesWithSameType = nodes.filter(node => node.data.type === childNodeType) + + if(!childNodeTypeCount[childNodeType]) + childNodeTypeCount[childNodeType] = nodesWithSameType.length + 1 + else + childNodeTypeCount[childNodeType] = childNodeTypeCount[childNodeType] + 1 + const { newNode } = generateNewNode({ type: getNodeCustomTypeByNodeDataType(childNodeType), data: { @@ -126,7 +134,7 @@ export const useNodeIterationInteractions = () => { _isBundled: false, _connectedSourceHandleIds: [], _connectedTargetHandleIds: [], - title: nodesWithSameType.length > 0 ? `${t(`workflow.blocks.${childNodeType}`)} ${nodesWithSameType.length + 1}` : t(`workflow.blocks.${childNodeType}`), + title: nodesWithSameType.length > 0 ? `${t(`workflow.blocks.${childNodeType}`)} ${childNodeTypeCount[childNodeType]}` : t(`workflow.blocks.${childNodeType}`), iteration_id: newNodeId, }, position: child.position, diff --git a/web/app/components/workflow/nodes/list-operator/components/filter-condition.tsx b/web/app/components/workflow/nodes/list-operator/components/filter-condition.tsx index 01ce5e645d..cdbb5965f4 100644 --- a/web/app/components/workflow/nodes/list-operator/components/filter-condition.tsx +++ b/web/app/components/workflow/nodes/list-operator/components/filter-condition.tsx @@ -1,25 +1,37 @@ 'use client' import type { FC } from 'react' -import React, { useCallback, useMemo } from 'react' +import React, { useCallback, useMemo, useState } from 'react' import { useTranslation } from 'react-i18next' import ConditionOperator from '../../if-else/components/condition-list/condition-operator' -import { VarType } from '../../../types' import type { Condition } from '../types' import { ComparisonOperator } from '../../if-else/types' import { comparisonOperatorNotRequireValue, getOperators } from '../../if-else/utils' import SubVariablePicker from './sub-variable-picker' -import Input from '@/app/components/base/input' import { FILE_TYPE_OPTIONS, TRANSFER_METHOD } from '@/app/components/workflow/nodes/constants' import { SimpleSelect as Select } from '@/app/components/base/select' import BoolValue from '../../../panel/chat-variable-panel/components/bool-value' +import Input from '@/app/components/workflow/nodes/_base/components/input-support-select-var' +import useAvailableVarList from '@/app/components/workflow/nodes/_base/hooks/use-available-var-list' +import cn from '@/utils/classnames' +import { VarType } from '../../../types' const optionNameI18NPrefix = 'workflow.nodes.ifElse.optionName' + +const VAR_INPUT_SUPPORTED_KEYS: Record = { + name: VarType.string, + url: VarType.string, + extension: VarType.string, + mime_type: VarType.string, + related_id: VarType.number, +} + type Props = { condition: Condition - onChange: (condition: Condition) => void varType: VarType + onChange: (condition: Condition) => void hasSubVariable: boolean readOnly: boolean + nodeId: string } const FilterCondition: FC = ({ @@ -28,10 +40,25 @@ const FilterCondition: FC = ({ onChange, hasSubVariable, readOnly, + nodeId, }) => { const { t } = useTranslation() + const [isFocus, setIsFocus] = useState(false) + + const expectedVarType = VAR_INPUT_SUPPORTED_KEYS[condition.key] + const supportVariableInput = !!expectedVarType + + const { availableVars, availableNodesWithParent } = useAvailableVarList(nodeId, { + onlyLeafNodeVar: false, + filterVar: (varPayload) => { + return expectedVarType ? varPayload.type === expectedVarType : true + }, + }) + const isSelect = [ComparisonOperator.in, ComparisonOperator.notIn, ComparisonOperator.allOf].includes(condition.comparison_operator) const isArrayValue = condition.key === 'transfer_method' || condition.key === 'type' + const isBoolean = varType === VarType.boolean + const selectOptions = useMemo(() => { if (isSelect) { if (condition.key === 'type' || condition.comparison_operator === ComparisonOperator.allOf) { @@ -50,6 +77,7 @@ const FilterCondition: FC = ({ } return [] }, [condition.comparison_operator, condition.key, isSelect, t]) + const handleChange = useCallback((key: string) => { return (value: any) => { onChange({ @@ -60,12 +88,14 @@ const FilterCondition: FC = ({ }, [condition, onChange, isArrayValue]) const handleSubVariableChange = useCallback((value: string) => { + const operators = getOperators(expectedVarType ?? VarType.string, { key: value }) + const newOperator = operators.length > 0 ? operators[0] : ComparisonOperator.equal onChange({ key: value, - comparison_operator: getOperators(varType, { key: value })[0], + comparison_operator: newOperator, value: '', }) - }, [onChange, varType]) + }, [onChange, expectedVarType]) return (
@@ -79,7 +109,7 @@ const FilterCondition: FC = ({
= ({ placeholder='Select value' /> )} - {!isSelect && varType !== VarType.boolean && ( + {!isSelect && !isBoolean && supportVariableInput && ( handleChange('value')(e.target.value)} + onChange={handleChange('value')} + readOnly={readOnly} + nodesOutputVars={availableVars} + availableNodes={availableNodesWithParent} + onFocusChange={setIsFocus} + placeholder={!readOnly ? t('workflow.nodes.http.extractListPlaceholder')! : ''} + placeholderClassName='!leading-[21px]' /> )} - {!isSelect && varType === VarType.boolean && ( + + {!isSelect && !isBoolean && !supportVariableInput && ( + handleChange('value')(e.target.value)} + readOnly={readOnly} + /> + )} + {!isSelect && isBoolean && ( = ({
) } + export default React.memo(FilterCondition) diff --git a/web/app/components/workflow/nodes/list-operator/panel.tsx b/web/app/components/workflow/nodes/list-operator/panel.tsx index d93a79397d..9a89629f09 100644 --- a/web/app/components/workflow/nodes/list-operator/panel.tsx +++ b/web/app/components/workflow/nodes/list-operator/panel.tsx @@ -78,6 +78,7 @@ const Panel: FC> = ({ varType={itemVarType} hasSubVariable={hasSubVariable} readOnly={readOnly} + nodeId={id} /> ) : null} diff --git a/web/app/components/workflow/nodes/question-classifier/components/class-list.tsx b/web/app/components/workflow/nodes/question-classifier/components/class-list.tsx index d0297cfd74..8758490bee 100644 --- a/web/app/components/workflow/nodes/question-classifier/components/class-list.tsx +++ b/web/app/components/workflow/nodes/question-classifier/components/class-list.tsx @@ -64,55 +64,56 @@ const ClassList: FC = ({ const handleSideWidth = 3 // Todo Remove; edit topic name return ( - ({ ...item }))} - setList={handleSortTopic} - handle='.handle' - ghostClass='bg-components-panel-bg' - animation={150} - disabled={readonly} - className='space-y-2' - > - { - list.map((item, index) => { - const canDrag = (() => { - if (readonly) - return false + <> + ({ ...item }))} + setList={handleSortTopic} + handle='.handle' + ghostClass='bg-components-panel-bg' + animation={150} + disabled={readonly} + className='space-y-2' + > + { + list.map((item, index) => { + const canDrag = (() => { + if (readonly) + return false - return topicCount >= 2 - })() - return ( -
-
- + return topicCount >= 2 + })() + return ( +
+
+ +
-
- ) - }) - } + ) + }) + } + {!readonly && ( )} - - + ) } export default React.memo(ClassList) diff --git a/web/app/components/workflow/nodes/tool/node.tsx b/web/app/components/workflow/nodes/tool/node.tsx index e15ddcaaaa..8cc3ec580d 100644 --- a/web/app/components/workflow/nodes/tool/node.tsx +++ b/web/app/components/workflow/nodes/tool/node.tsx @@ -22,13 +22,13 @@ const Node: FC> = ({ {key}
{typeof tool_configurations[key].value === 'string' && ( -
+
{paramSchemas?.find(i => i.name === key)?.type === FormTypeEnum.secretInput ? '********' : tool_configurations[key].value}
)} {typeof tool_configurations[key].value === 'number' && ( -
- {tool_configurations[key].value} +
+ {Number.isNaN(tool_configurations[key].value) ? '' : tool_configurations[key].value}
)} {typeof tool_configurations[key] !== 'string' && tool_configurations[key]?.type === FormTypeEnum.modelSelector && ( diff --git a/web/app/components/workflow/operator/tip-popup.tsx b/web/app/components/workflow/operator/tip-popup.tsx index 7946e1ddaa..3721ed8118 100644 --- a/web/app/components/workflow/operator/tip-popup.tsx +++ b/web/app/components/workflow/operator/tip-popup.tsx @@ -14,6 +14,7 @@ const TipPopup = ({ }: TipPopupProps) => { return ( s.appDetail) const workflowStore = useWorkflowStore() - const inputs = useStore(s => s.inputs) + const { inputs, setInputs } = useStore(s => ({ + inputs: s.inputs, + setInputs: s.setInputs, + })) + + const initialInputs = useMemo(() => { + const initInputs: Record = {} + if (startVariables) { + startVariables.forEach((variable) => { + if (variable.default) + initInputs[variable.variable] = variable.default + }) + } + return initInputs + }, [startVariables]) + const features = useFeatures(s => s.features) const config = useMemo(() => { return { @@ -82,6 +97,11 @@ const ChatWrapper = ( taskId => stopChatMessageResponding(appDetail!.id, taskId), ) + const handleRestartChat = useCallback(() => { + handleRestart() + setInputs(initialInputs) + }, [handleRestart, setInputs, initialInputs]) + const doSend: OnSend = useCallback((message, files, isRegenerate = false, parentAnswer: ChatItem | null = null) => { handleSend( { @@ -115,9 +135,18 @@ const ChatWrapper = ( useImperativeHandle(ref, () => { return { - handleRestart, + handleRestart: handleRestartChat, } - }, [handleRestart]) + }, [handleRestartChat]) + + useEffect(() => { + if (Object.keys(initialInputs).length > 0) { + setInputs({ + ...initialInputs, + ...inputs, + }) + } + }, [initialInputs]) useEffect(() => { if (isResponding) diff --git a/web/app/components/workflow/panel/inputs-panel.tsx b/web/app/components/workflow/panel/inputs-panel.tsx index 8be8d810f0..64ac6d8686 100644 --- a/web/app/components/workflow/panel/inputs-panel.tsx +++ b/web/app/components/workflow/panel/inputs-panel.tsx @@ -1,6 +1,7 @@ import { memo, useCallback, + useEffect, useMemo, } from 'react' import { useTranslation } from 'react-i18next' @@ -32,9 +33,12 @@ type Props = { const InputsPanel = ({ onRun }: Props) => { const { t } = useTranslation() const workflowStore = useWorkflowStore() + const { inputs, setInputs } = useStore(s => ({ + inputs: s.inputs, + setInputs: s.setInputs, + })) const fileSettings = useFeatures(s => s.features.file) const nodes = useNodes() - const inputs = useStore(s => s.inputs) const files = useStore(s => s.files) const workflowRunningData = useStore(s => s.workflowRunningData) const { @@ -44,6 +48,24 @@ const InputsPanel = ({ onRun }: Props) => { const startVariables = startNode?.data.variables const { checkInputsForm } = useCheckInputsForms() + const initialInputs = useMemo(() => { + const initInputs: Record = {} + if (startVariables) { + startVariables.forEach((variable) => { + if (variable.default) + initInputs[variable.variable] = variable.default + }) + } + return initInputs + }, [startVariables]) + + useEffect(() => { + setInputs({ + ...initialInputs, + ...inputs, + }) + }, [initialInputs]) + const variables = useMemo(() => { const data = startVariables || [] if (fileSettings?.image?.enabled) { diff --git a/web/app/components/workflow/types.ts b/web/app/components/workflow/types.ts index 3a8cdf518a..ab6f3fd194 100644 --- a/web/app/components/workflow/types.ts +++ b/web/app/components/workflow/types.ts @@ -448,3 +448,7 @@ export enum VersionHistoryContextMenuOptions { edit = 'edit', delete = 'delete', } + +export interface ChildNodeTypeCount { + [key: string]: number; +} diff --git a/web/app/dev-only/i18n-checker/page.tsx b/web/app/dev-only/i18n-checker/page.tsx deleted file mode 100644 index d821979bb9..0000000000 --- a/web/app/dev-only/i18n-checker/page.tsx +++ /dev/null @@ -1,175 +0,0 @@ -'use client' -import { loadLangResources } from '@/i18n/i18next-config' -import { useCallback, useEffect, useState } from 'react' -import cn from '@/utils/classnames' -import { LanguagesSupported } from '@/i18n/language' - -export default function I18nTest() { - const [langs, setLangs] = useState([]) - - const getLangs = useCallback(async () => { - const langs = await genLangs() - setLangs(langs) - }, []) - - useEffect(() => { - getLangs() - }, []) - - return ( -
- -
-

Summary

- - - - - - - - - - - - - {langs.map(({ locale, count, missing, extra }, idx) => - - - - - - )} - -
- # - - lang - - count - - missing - - extra -
{idx}{locale}{count}{missing.length}{extra.length}
-
- -

Details

- - - - - - - - - - - - - {langs.map(({ locale, missing, extra }, idx) => { - return ( - - - - - - ) - })} - -
- # - - lang - - missing - - extra -
{idx}{locale} -
    - {missing.map(key => ( -
  • {key}
  • - ))} -
-
-
    - {extra.map(key => ( -
  • {key}
  • - ))} -
-
- -
- ) -} - -async function genLangs() { - const langs_: Lang[] = [] - let en!: Lang - - const resources: Record = {} - // Initialize empty resource object - for (const lang of LanguagesSupported) - resources[lang] = await loadLangResources(lang) - - for (const [key, value] of Object.entries(resources)) { - const keys = getNestedKeys(value.translation) - const lang: Lang = { - locale: key, - keys: new Set(keys), - count: keys.length, - missing: [], - extra: [], - } - - langs_.push(lang) - if (key === 'en-US') en = lang - } - - for (const lang of langs_) { - const missing: string[] = [] - const extra: string[] = [] - - for (const key of lang.keys) - if (!en.keys.has(key)) extra.push(key) - - for (const key of en.keys) - if (!lang.keys.has(key)) missing.push(key) - - lang.missing = missing - lang.extra = extra - } - return langs_ -} - -function getNestedKeys(translation: Record): string[] { - const nestedKeys: string[] = [] - const iterateKeys = (obj: Record, prefix = '') => { - for (const key in obj) { - const nestedKey = prefix ? `${prefix}.${key}` : key - // nestedKeys.push(nestedKey); - if (typeof obj[key] === 'object') iterateKeys(obj[key], nestedKey) - else if (typeof obj[key] === 'string') nestedKeys.push(nestedKey) - } - } - iterateKeys(translation) - return nestedKeys -} - -type Lang = { - locale: string; - keys: Set; - count: number; - missing: string[]; - extra: string[]; -} diff --git a/web/app/dev-only/layout.tsx b/web/app/dev-only/layout.tsx deleted file mode 100644 index d8bcc5e679..0000000000 --- a/web/app/dev-only/layout.tsx +++ /dev/null @@ -1,9 +0,0 @@ -import type React from 'react' -import { notFound } from 'next/navigation' - -export default async function Layout({ children }: React.PropsWithChildren) { - if (process.env.NODE_ENV !== 'development') - notFound() - - return children -} diff --git a/web/app/dev-preview/page.tsx b/web/app/dev-preview/page.tsx deleted file mode 100644 index 69464d612a..0000000000 --- a/web/app/dev-preview/page.tsx +++ /dev/null @@ -1,11 +0,0 @@ -'use client' - -import DemoForm from '../components/base/form/form-scenarios/demo' - -export default function Page() { - return ( -
- -
- ) -} diff --git a/web/app/layout.tsx b/web/app/layout.tsx index f086499ca4..46afd95b97 100644 --- a/web/app/layout.tsx +++ b/web/app/layout.tsx @@ -3,7 +3,7 @@ import type { Viewport } from 'next' import I18nServer from './components/i18n-server' import BrowserInitializer from './components/browser-initializer' import SentryInitializer from './components/sentry-initializer' -import { getLocaleOnServer } from '@/i18n/server' +import { getLocaleOnServer } from '@/i18n-config/server' import { TanstackQueryInitializer } from '@/context/query-client' import { ThemeProvider } from 'next-themes' import './styles/globals.css' @@ -62,24 +62,25 @@ const LocaleLayout = async ({ className="color-scheme h-full select-auto" {...datasetMap} > - - - - + + + + {children} - - - - + + + + diff --git a/web/app/signin/_header.tsx b/web/app/signin/_header.tsx index 0efd30b6cc..03e05924b8 100644 --- a/web/app/signin/_header.tsx +++ b/web/app/signin/_header.tsx @@ -3,8 +3,8 @@ import React from 'react' import { useContext } from 'use-context-selector' import Select from '@/app/components/base/select/locale' import Divider from '@/app/components/base/divider' -import { languages } from '@/i18n/language' -import type { Locale } from '@/i18n' +import { languages } from '@/i18n-config/language' +import type { Locale } from '@/i18n-config' import I18n from '@/context/i18n' import dynamic from 'next/dynamic' import { useGlobalPublicStore } from '@/context/global-public-context' diff --git a/web/app/signin/invite-settings/page.tsx b/web/app/signin/invite-settings/page.tsx index ea35900968..2bc7eba7a4 100644 --- a/web/app/signin/invite-settings/page.tsx +++ b/web/app/signin/invite-settings/page.tsx @@ -11,7 +11,7 @@ import Input from '@/app/components/base/input' import { SimpleSelect } from '@/app/components/base/select' import Button from '@/app/components/base/button' import { timezones } from '@/utils/timezone' -import { LanguagesSupported, languages } from '@/i18n/language' +import { LanguagesSupported, languages } from '@/i18n-config/language' import I18n from '@/context/i18n' import { activateMember, invitationCheck } from '@/service/common' import Loading from '@/app/components/base/loading' diff --git a/web/app/signin/oneMoreStep.tsx b/web/app/signin/oneMoreStep.tsx index 657455b90d..3293caa8f5 100644 --- a/web/app/signin/oneMoreStep.tsx +++ b/web/app/signin/oneMoreStep.tsx @@ -9,7 +9,7 @@ import Button from '@/app/components/base/button' import Tooltip from '@/app/components/base/tooltip' import { SimpleSelect } from '@/app/components/base/select' import { timezones } from '@/utils/timezone' -import { LanguagesSupported, languages } from '@/i18n/language' +import { LanguagesSupported, languages } from '@/i18n-config/language' import { oneMoreStep } from '@/service/common' import Toast from '@/app/components/base/toast' import { useDocLink } from '@/context/i18n' diff --git a/web/config/index.ts b/web/config/index.ts index 667723aaaf..4a8b07d6e4 100644 --- a/web/config/index.ts +++ b/web/config/index.ts @@ -13,12 +13,18 @@ const getBooleanConfig = (envVar: string | undefined, dataAttrKey: DatasetAttr, } const getNumberConfig = (envVar: string | undefined, dataAttrKey: DatasetAttr, defaultValue: number) => { - if (envVar) - return Number.parseInt(envVar) + if (envVar) { + const parsed = Number.parseInt(envVar) + if (!Number.isNaN(parsed) && parsed > 0) + return parsed + } const attrValue = globalThis.document?.body?.getAttribute(dataAttrKey) - if (attrValue) - return Number.parseInt(attrValue) + if (attrValue) { + const parsed = Number.parseInt(attrValue) + if (!Number.isNaN(parsed) && parsed > 0) + return parsed + } return defaultValue } @@ -265,6 +271,7 @@ export const FULL_DOC_PREVIEW_LENGTH = 50 export const JSON_SCHEMA_MAX_DEPTH = 10 export const MAX_TOOLS_NUM = getNumberConfig(process.env.NEXT_PUBLIC_MAX_TOOLS_NUM, DatasetAttr.DATA_PUBLIC_MAX_TOOLS_NUM, 10) +export const MAX_PARALLEL_LIMIT = getNumberConfig(process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT, DatasetAttr.DATA_PUBLIC_MAX_PARALLEL_LIMIT, 10) export const TEXT_GENERATION_TIMEOUT_MS = getNumberConfig(process.env.NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS, DatasetAttr.DATA_PUBLIC_TEXT_GENERATION_TIMEOUT_MS, 60000) export const LOOP_NODE_MAX_COUNT = getNumberConfig(process.env.NEXT_PUBLIC_LOOP_NODE_MAX_COUNT, DatasetAttr.DATA_PUBLIC_LOOP_NODE_MAX_COUNT, 100) export const MAX_ITERATIONS_NUM = getNumberConfig(process.env.NEXT_PUBLIC_MAX_ITERATIONS_NUM, DatasetAttr.DATA_PUBLIC_MAX_ITERATIONS_NUM, 99) diff --git a/web/context/i18n.ts b/web/context/i18n.ts index 932beb9936..c0031c5c7b 100644 --- a/web/context/i18n.ts +++ b/web/context/i18n.ts @@ -2,8 +2,8 @@ import { createContext, useContext, } from 'use-context-selector' -import type { Locale } from '@/i18n' -import { getDocLanguage, getLanguage, getPricingPageLanguage } from '@/i18n/language' +import type { Locale } from '@/i18n-config' +import { getDocLanguage, getLanguage, getPricingPageLanguage } from '@/i18n-config/language' import { noop } from 'lodash-es' type II18NContext = { diff --git a/web/context/web-app-context.tsx b/web/context/web-app-context.tsx index 55f95e4811..db1c5158dd 100644 --- a/web/context/web-app-context.tsx +++ b/web/context/web-app-context.tsx @@ -2,6 +2,7 @@ import type { ChatConfig } from '@/app/components/base/chat/types' import Loading from '@/app/components/base/loading' +import { checkOrSetAccessToken } from '@/app/components/share/utils' import { AccessMode } from '@/models/access-control' import type { AppData, AppMeta } from '@/models/share' import { useGetWebAppAccessModeByCode } from '@/service/use-share' @@ -60,6 +61,8 @@ const WebAppStoreProvider: FC = ({ children }) => { const pathname = usePathname() const searchParams = useSearchParams() const redirectUrlParam = searchParams.get('redirect_url') + const session = searchParams.get('session') + const sysUserId = searchParams.get('sys.user_id') const [shareCode, setShareCode] = useState(null) useEffect(() => { const shareCodeFromRedirect = getShareCodeFromRedirectUrl(redirectUrlParam) @@ -69,11 +72,22 @@ const WebAppStoreProvider: FC = ({ children }) => { updateShareCode(newShareCode) }, [pathname, redirectUrlParam, updateShareCode]) const { isFetching, data: accessModeResult } = useGetWebAppAccessModeByCode(shareCode) + const [isFetchingAccessToken, setIsFetchingAccessToken] = useState(true) useEffect(() => { - if (accessModeResult?.accessMode) + if (accessModeResult?.accessMode) { updateWebAppAccessMode(accessModeResult.accessMode) - }, [accessModeResult, updateWebAppAccessMode]) - if (isFetching) { + if (accessModeResult?.accessMode === AccessMode.PUBLIC && session && sysUserId) { + setIsFetchingAccessToken(true) + checkOrSetAccessToken(shareCode).finally(() => { + setIsFetchingAccessToken(false) + }) + } + else { + setIsFetchingAccessToken(false) + } + } + }, [accessModeResult, updateWebAppAccessMode, setIsFetchingAccessToken, shareCode, session, sysUserId]) + if (isFetching || isFetchingAccessToken) { return
diff --git a/web/hooks/use-i18n.ts b/web/hooks/use-i18n.ts index c2356b12a8..606ea9da7f 100644 --- a/web/hooks/use-i18n.ts +++ b/web/hooks/use-i18n.ts @@ -1,5 +1,5 @@ import { useLanguage } from '@/app/components/header/account-setting/model-provider-page/hooks' -import { renderI18nObject } from '@/i18n' +import { renderI18nObject } from '@/i18n-config' export const useRenderI18nObject = () => { const language = useLanguage() diff --git a/web/i18n/DEV.md b/web/i18n-config/DEV.md similarity index 100% rename from web/i18n/DEV.md rename to web/i18n-config/DEV.md diff --git a/web/i18n/README.md b/web/i18n-config/README.md similarity index 99% rename from web/i18n/README.md rename to web/i18n-config/README.md index 5e7058d829..dacda966dd 100644 --- a/web/i18n/README.md +++ b/web/i18n-config/README.md @@ -8,7 +8,6 @@ This directory contains the internationalization (i18n) files for this project. ``` ├── [ 24] README.md -├── [ 0] README_CN.md ├── [ 704] en-US │   ├── [2.4K] app-annotation.ts │   ├── [5.2K] app-api.ts @@ -48,6 +47,7 @@ By default we will use `LanguagesSupported` to determine which languages are sup 1. Create a new folder for the new language. ``` +cd web/i18n cp -r en-US fr-FR ``` diff --git a/web/i18n-config/auto-gen-i18n.js b/web/i18n-config/auto-gen-i18n.js new file mode 100644 index 0000000000..45f5606393 --- /dev/null +++ b/web/i18n-config/auto-gen-i18n.js @@ -0,0 +1,238 @@ +const fs = require('node:fs') +const path = require('node:path') +const vm = require('node:vm') +const transpile = require('typescript').transpile +const magicast = require('magicast') +const { parseModule, generateCode, loadFile } = magicast +const bingTranslate = require('bing-translate-api') +const { translate } = bingTranslate +const data = require('./languages.json') + +const targetLanguage = 'en-US' +const i18nFolder = '../i18n' // Path to i18n folder relative to this script +// https://github.com/plainheart/bing-translate-api/blob/master/src/met/lang.json +const languageKeyMap = data.languages.reduce((map, language) => { + if (language.supported) { + if (language.value === 'zh-Hans' || language.value === 'zh-Hant') + map[language.value] = language.value + else + map[language.value] = language.value.split('-')[0] + } + + return map +}, {}) + +async function translateMissingKeyDeeply(sourceObj, targetObject, toLanguage) { + const skippedKeys = [] + const translatedKeys = [] + + await Promise.all(Object.keys(sourceObj).map(async (key) => { + if (targetObject[key] === undefined) { + if (typeof sourceObj[key] === 'object') { + targetObject[key] = {} + const result = await translateMissingKeyDeeply(sourceObj[key], targetObject[key], toLanguage) + skippedKeys.push(...result.skipped) + translatedKeys.push(...result.translated) + } + else { + try { + const source = sourceObj[key] + if (!source) { + targetObject[key] = '' + return + } + + // Only skip obvious code patterns, not normal text with parentheses + const codePatterns = [ + /\{\{.*\}\}/, // Template variables like {{key}} + /\$\{.*\}/, // Template literals ${...} + /<[^>]+>/, // HTML/XML tags + /function\s*\(/, // Function definitions + /=\s*\(/, // Assignment with function calls + ] + + const isCodeLike = codePatterns.some(pattern => pattern.test(source)) + if (isCodeLike) { + console.log(`⏭️ Skipping code-like content: "${source.substring(0, 50)}..."`) + skippedKeys.push(`${key}: ${source}`) + return + } + + console.log(`🔄 Translating: "${source}" to ${toLanguage}`) + const { translation } = await translate(sourceObj[key], null, languageKeyMap[toLanguage]) + targetObject[key] = translation + translatedKeys.push(`${key}: ${translation}`) + console.log(`✅ Translated: "${translation}"`) + } + catch (error) { + console.error(`❌ Error translating "${sourceObj[key]}" to ${toLanguage}. Key: ${key}`, error.message) + skippedKeys.push(`${key}: ${sourceObj[key]} (Error: ${error.message})`) + + // Add retry mechanism for network errors + if (error.message.includes('network') || error.message.includes('timeout')) { + console.log(`🔄 Retrying translation for key: ${key}`) + try { + await new Promise(resolve => setTimeout(resolve, 1000)) // Wait 1 second + const { translation } = await translate(sourceObj[key], null, languageKeyMap[toLanguage]) + targetObject[key] = translation + translatedKeys.push(`${key}: ${translation}`) + console.log(`✅ Retry successful: "${translation}"`) + } + catch (retryError) { + console.error(`❌ Retry failed for key ${key}:`, retryError.message) + } + } + } + } + } + else if (typeof sourceObj[key] === 'object') { + targetObject[key] = targetObject[key] || {} + const result = await translateMissingKeyDeeply(sourceObj[key], targetObject[key], toLanguage) + skippedKeys.push(...result.skipped) + translatedKeys.push(...result.translated) + } + })) + + return { skipped: skippedKeys, translated: translatedKeys } +} +async function autoGenTrans(fileName, toGenLanguage, isDryRun = false) { + const fullKeyFilePath = path.resolve(__dirname, i18nFolder, targetLanguage, `${fileName}.ts`) + const toGenLanguageFilePath = path.resolve(__dirname, i18nFolder, toGenLanguage, `${fileName}.ts`) + + try { + const content = fs.readFileSync(fullKeyFilePath, 'utf8') + + // Create a safer module environment for vm + const moduleExports = {} + const context = { + exports: moduleExports, + module: { exports: moduleExports }, + require, + console, + __filename: fullKeyFilePath, + __dirname: path.dirname(fullKeyFilePath), + } + + // Use vm.runInNewContext instead of eval for better security + vm.runInNewContext(transpile(content), context) + + const fullKeyContent = moduleExports.default || moduleExports + + if (!fullKeyContent || typeof fullKeyContent !== 'object') + throw new Error(`Failed to extract translation object from ${fullKeyFilePath}`) + + // if toGenLanguageFilePath is not exist, create it + if (!fs.existsSync(toGenLanguageFilePath)) { + fs.writeFileSync(toGenLanguageFilePath, `const translation = { +} + +export default translation +`) + } + // To keep object format and format it for magicast to work: const translation = { ... } => export default {...} + const readContent = await loadFile(toGenLanguageFilePath) + const { code: toGenContent } = generateCode(readContent) + const mod = await parseModule(`export default ${toGenContent.replace('export default translation', '').replace('const translation = ', '')}`) + const toGenOutPut = mod.exports.default + + console.log(`\n🌍 Processing ${fileName} for ${toGenLanguage}...`) + const result = await translateMissingKeyDeeply(fullKeyContent, toGenOutPut, toGenLanguage) + + // Generate summary report + console.log(`\n📊 Translation Summary for ${fileName} -> ${toGenLanguage}:`) + console.log(` ✅ Translated: ${result.translated.length} keys`) + console.log(` ⏭️ Skipped: ${result.skipped.length} keys`) + + if (result.skipped.length > 0) { + console.log(`\n⚠️ Skipped keys in ${fileName} (${toGenLanguage}):`) + result.skipped.slice(0, 5).forEach(item => console.log(` - ${item}`)) + if (result.skipped.length > 5) + console.log(` ... and ${result.skipped.length - 5} more`) + } + + const { code } = generateCode(mod) + const res = `const translation =${code.replace('export default', '')} + +export default translation +`.replace(/,\n\n/g, ',\n').replace('};', '}') + + if (!isDryRun) { + fs.writeFileSync(toGenLanguageFilePath, res) + console.log(`💾 Saved translations to ${toGenLanguageFilePath}`) + } + else { + console.log(`🔍 [DRY RUN] Would save translations to ${toGenLanguageFilePath}`) + } + + return result + } + catch (error) { + console.error(`Error processing file ${fullKeyFilePath}:`, error.message) + throw error + } +} + +// Add command line argument support +const isDryRun = process.argv.includes('--dry-run') +const targetFile = process.argv.find(arg => arg.startsWith('--file='))?.split('=')[1] +const targetLang = process.argv.find(arg => arg.startsWith('--lang='))?.split('=')[1] + +// Rate limiting helper +function delay(ms) { + return new Promise(resolve => setTimeout(resolve, ms)) +} + +async function main() { + console.log('🚀 Starting auto-gen-i18n script...') + console.log(`📋 Mode: ${isDryRun ? 'DRY RUN (no files will be modified)' : 'LIVE MODE'}`) + + const files = fs + .readdirSync(path.resolve(__dirname, i18nFolder, targetLanguage)) + .filter(file => /\.ts$/.test(file)) // Only process .ts files + .map(file => file.replace(/\.ts$/, '')) + .filter(f => f !== 'app-debug') // ast parse error in app-debug + + // Filter by target file if specified + const filesToProcess = targetFile ? files.filter(f => f === targetFile) : files + const languagesToProcess = targetLang ? [targetLang] : Object.keys(languageKeyMap) + + console.log(`📁 Files to process: ${filesToProcess.join(', ')}`) + console.log(`🌍 Languages to process: ${languagesToProcess.join(', ')}`) + + let totalTranslated = 0 + let totalSkipped = 0 + let totalErrors = 0 + + // Process files sequentially to avoid API rate limits + for (const file of filesToProcess) { + console.log(`\n📄 Processing file: ${file}`) + + // Process languages with rate limiting + for (const language of languagesToProcess) { + try { + const result = await autoGenTrans(file, language, isDryRun) + totalTranslated += result.translated.length + totalSkipped += result.skipped.length + + // Rate limiting: wait 500ms between language processing + await delay(500) + } + catch (e) { + console.error(`❌ Error translating ${file} to ${language}:`, e.message) + totalErrors++ + } + } + } + + // Final summary + console.log('\n🎉 Auto-translation completed!') + console.log('📊 Final Summary:') + console.log(` ✅ Total keys translated: ${totalTranslated}`) + console.log(` ⏭️ Total keys skipped: ${totalSkipped}`) + console.log(` ❌ Total errors: ${totalErrors}`) + + if (isDryRun) + console.log('\n💡 This was a dry run. To actually translate, run without --dry-run flag.') +} + +main() diff --git a/web/i18n-config/check-i18n.js b/web/i18n-config/check-i18n.js new file mode 100644 index 0000000000..edc2566a3c --- /dev/null +++ b/web/i18n-config/check-i18n.js @@ -0,0 +1,314 @@ +const fs = require('node:fs') +const path = require('node:path') +const vm = require('node:vm') +const transpile = require('typescript').transpile + +const targetLanguage = 'en-US' +const data = require('./languages.json') +const languages = data.languages.filter(language => language.supported).map(language => language.value) + +async function getKeysFromLanguage(language) { + return new Promise((resolve, reject) => { + const folderPath = path.resolve(__dirname, '../i18n', language) + const allKeys = [] + fs.readdir(folderPath, (err, files) => { + if (err) { + console.error('Error reading folder:', err) + reject(err) + return + } + + // Filter only .ts and .js files + const translationFiles = files.filter(file => /\.(ts|js)$/.test(file)) + + translationFiles.forEach((file) => { + const filePath = path.join(folderPath, file) + const fileName = file.replace(/\.[^/.]+$/, '') // Remove file extension + const camelCaseFileName = fileName.replace(/[-_](.)/g, (_, c) => + c.toUpperCase(), + ) // Convert to camel case + + try { + const content = fs.readFileSync(filePath, 'utf8') + + // Create a safer module environment for vm + const moduleExports = {} + const context = { + exports: moduleExports, + module: { exports: moduleExports }, + require, + console, + __filename: filePath, + __dirname: folderPath, + } + + // Use vm.runInNewContext instead of eval for better security + vm.runInNewContext(transpile(content), context) + + // Extract the translation object + const translationObj = moduleExports.default || moduleExports + + if(!translationObj || typeof translationObj !== 'object') { + console.error(`Error parsing file: ${filePath}`) + reject(new Error(`Error parsing file: ${filePath}`)) + return + } + + const nestedKeys = [] + const iterateKeys = (obj, prefix = '') => { + for (const key in obj) { + const nestedKey = prefix ? `${prefix}.${key}` : key + if (typeof obj[key] === 'object' && obj[key] !== null && !Array.isArray(obj[key])) { + // This is an object (but not array), recurse into it but don't add it as a key + iterateKeys(obj[key], nestedKey) + } + else { + // This is a leaf node (string, number, boolean, array, etc.), add it as a key + nestedKeys.push(nestedKey) + } + } + } + iterateKeys(translationObj) + + // Fixed: accumulate keys instead of overwriting + const fileKeys = nestedKeys.map(key => `${camelCaseFileName}.${key}`) + allKeys.push(...fileKeys) + } + catch (error) { + console.error(`Error processing file ${filePath}:`, error.message) + reject(error) + } + }) + resolve(allKeys) + }) + }) +} + +function removeKeysFromObject(obj, keysToRemove, prefix = '') { + let modified = false + for (const key in obj) { + const fullKey = prefix ? `${prefix}.${key}` : key + + if (keysToRemove.includes(fullKey)) { + delete obj[key] + modified = true + console.log(`🗑️ Removed key: ${fullKey}`) + } + else if (typeof obj[key] === 'object' && obj[key] !== null) { + const subModified = removeKeysFromObject(obj[key], keysToRemove, fullKey) + modified = modified || subModified + } + } + return modified +} + +async function removeExtraKeysFromFile(language, fileName, extraKeys) { + const filePath = path.resolve(__dirname, '../i18n', language, `${fileName}.ts`) + + if (!fs.existsSync(filePath)) { + console.log(`⚠️ File not found: ${filePath}`) + return false + } + + try { + // Filter keys that belong to this file + const camelCaseFileName = fileName.replace(/[-_](.)/g, (_, c) => c.toUpperCase()) + const fileSpecificKeys = extraKeys + .filter(key => key.startsWith(`${camelCaseFileName}.`)) + .map(key => key.substring(camelCaseFileName.length + 1)) // Remove file prefix + + if (fileSpecificKeys.length === 0) + return false + + console.log(`🔄 Processing file: ${filePath}`) + + // Read the original file content + const content = fs.readFileSync(filePath, 'utf8') + const lines = content.split('\n') + + let modified = false + const linesToRemove = [] + + // Find lines to remove for each key + for (const keyToRemove of fileSpecificKeys) { + const keyParts = keyToRemove.split('.') + let targetLineIndex = -1 + + // Build regex pattern for the exact key path + if (keyParts.length === 1) { + // Simple key at root level like "pickDate: 'value'" + for (let i = 0; i < lines.length; i++) { + const line = lines[i] + const simpleKeyPattern = new RegExp(`^\\s*${keyParts[0]}\\s*:`) + if (simpleKeyPattern.test(line)) { + targetLineIndex = i + break + } + } + } + else { + // Nested key - need to find the exact path + const currentPath = [] + let braceDepth = 0 + + for (let i = 0; i < lines.length; i++) { + const line = lines[i] + const trimmedLine = line.trim() + + // Track current object path + const keyMatch = trimmedLine.match(/^(\w+)\s*:\s*{/) + if (keyMatch) { + currentPath.push(keyMatch[1]) + braceDepth++ + } + else if (trimmedLine === '},' || trimmedLine === '}') { + if (braceDepth > 0) { + braceDepth-- + currentPath.pop() + } + } + + // Check if this line matches our target key + const leafKeyMatch = trimmedLine.match(/^(\w+)\s*:/) + if (leafKeyMatch) { + const fullPath = [...currentPath, leafKeyMatch[1]] + const fullPathString = fullPath.join('.') + + if (fullPathString === keyToRemove) { + targetLineIndex = i + break + } + } + } + } + + if (targetLineIndex !== -1) { + linesToRemove.push(targetLineIndex) + console.log(`🗑️ Found key to remove: ${keyToRemove} at line ${targetLineIndex + 1}`) + modified = true + } + else { + console.log(`⚠️ Could not find key: ${keyToRemove}`) + } + } + + if (modified) { + // Remove lines in reverse order to maintain correct indices + linesToRemove.sort((a, b) => b - a) + + for (const lineIndex of linesToRemove) { + const line = lines[lineIndex] + console.log(`🗑️ Removing line ${lineIndex + 1}: ${line.trim()}`) + lines.splice(lineIndex, 1) + + // Also remove trailing comma from previous line if it exists and the next line is a closing brace + if (lineIndex > 0 && lineIndex < lines.length) { + const prevLine = lines[lineIndex - 1] + const nextLine = lines[lineIndex] ? lines[lineIndex].trim() : '' + + if (prevLine.trim().endsWith(',') && (nextLine.startsWith('}') || nextLine === '')) + lines[lineIndex - 1] = prevLine.replace(/,\s*$/, '') + } + } + + // Write back to file + const newContent = lines.join('\n') + fs.writeFileSync(filePath, newContent) + console.log(`💾 Updated file: ${filePath}`) + return true + } + + return false + } + catch (error) { + console.error(`Error processing file ${filePath}:`, error.message) + return false + } +} + +// Add command line argument support +const targetFile = process.argv.find(arg => arg.startsWith('--file='))?.split('=')[1] +const targetLang = process.argv.find(arg => arg.startsWith('--lang='))?.split('=')[1] +const autoRemove = process.argv.includes('--auto-remove') + +async function main() { + const compareKeysCount = async () => { + const allTargetKeys = await getKeysFromLanguage(targetLanguage) + + // Filter target keys by file if specified + const targetKeys = targetFile + ? allTargetKeys.filter(key => key.startsWith(targetFile.replace(/[-_](.)/g, (_, c) => c.toUpperCase()))) + : allTargetKeys + + // Filter languages by target language if specified + const languagesToProcess = targetLang ? [targetLang] : languages + + const allLanguagesKeys = await Promise.all(languagesToProcess.map(language => getKeysFromLanguage(language))) + + // Filter language keys by file if specified + const languagesKeys = targetFile + ? allLanguagesKeys.map(keys => keys.filter(key => key.startsWith(targetFile.replace(/[-_](.)/g, (_, c) => c.toUpperCase())))) + : allLanguagesKeys + + const keysCount = languagesKeys.map(keys => keys.length) + const targetKeysCount = targetKeys.length + + const comparison = languagesToProcess.reduce((result, language, index) => { + const languageKeysCount = keysCount[index] + const difference = targetKeysCount - languageKeysCount + result[language] = difference + return result + }, {}) + + console.log(comparison) + + // Print missing keys and extra keys + for (let index = 0; index < languagesToProcess.length; index++) { + const language = languagesToProcess[index] + const languageKeys = languagesKeys[index] + const missingKeys = targetKeys.filter(key => !languageKeys.includes(key)) + const extraKeys = languageKeys.filter(key => !targetKeys.includes(key)) + + console.log(`Missing keys in ${language}:`, missingKeys) + + // Show extra keys only when there are extra keys (negative difference) + if (extraKeys.length > 0) { + console.log(`Extra keys in ${language} (not in ${targetLanguage}):`, extraKeys) + + // Auto-remove extra keys if flag is set + if (autoRemove) { + console.log(`\n🤖 Auto-removing extra keys from ${language}...`) + + // Get all translation files + const i18nFolder = path.resolve(__dirname, '../i18n', language) + const files = fs.readdirSync(i18nFolder) + .filter(file => /\.ts$/.test(file)) + .map(file => file.replace(/\.ts$/, '')) + .filter(f => !targetFile || f === targetFile) // Filter by target file if specified + + let totalRemoved = 0 + for (const fileName of files) { + const removed = await removeExtraKeysFromFile(language, fileName, extraKeys) + if (removed) totalRemoved++ + } + + console.log(`✅ Auto-removal completed for ${language}. Modified ${totalRemoved} files.`) + } + } + } + } + + console.log('🚀 Starting check-i18n script...') + if (targetFile) + console.log(`📁 Checking file: ${targetFile}`) + + if (targetLang) + console.log(`🌍 Checking language: ${targetLang}`) + + if (autoRemove) + console.log('🤖 Auto-remove mode: ENABLED') + + compareKeysCount() +} + +main() diff --git a/web/i18n-config/i18next-config.ts b/web/i18n-config/i18next-config.ts new file mode 100644 index 0000000000..b26d0afdbe --- /dev/null +++ b/web/i18n-config/i18next-config.ts @@ -0,0 +1,97 @@ +'use client' +import i18n from 'i18next' +import { camelCase } from 'lodash-es' +import { initReactI18next } from 'react-i18next' + +const requireSilent = async (lang: string, namespace: string) => { + let res + try { + res = (await import(`../i18n/${lang}/${namespace}`)).default + } + catch { + res = (await import(`../i18n/en-US/${namespace}`)).default + } + + return res +} + +const NAMESPACES = [ + 'app-annotation', + 'app-api', + 'app-debug', + 'app-log', + 'app-overview', + 'app', + 'billing', + 'common', + 'custom', + 'dataset-creation', + 'dataset-documents', + 'dataset-hit-testing', + 'dataset-settings', + 'dataset', + 'education', + 'explore', + 'layout', + 'login', + 'plugin-tags', + 'plugin', + 'register', + 'run-log', + 'share', + 'time', + 'tools', + 'workflow', +] + +export const loadLangResources = async (lang: string) => { + const modules = await Promise.all(NAMESPACES.map(ns => requireSilent(lang, ns))) + const resources = modules.reduce((acc, mod, index) => { + acc[camelCase(NAMESPACES[index])] = mod + return acc + }, {} as Record) + return resources +} + +/** + * !Need to load en-US and zh-Hans resources for initial rendering, which are used in both marketplace and dify + * !Other languages will be loaded on demand + * !This is to avoid loading all languages at once which can be slow + */ +const getInitialTranslations = () => { + const en_USResources = NAMESPACES.reduce((acc, ns, index) => { + acc[camelCase(NAMESPACES[index])] = require(`../i18n/en-US/${ns}`).default + return acc + }, {} as Record) + const zh_HansResources = NAMESPACES.reduce((acc, ns, index) => { + acc[camelCase(NAMESPACES[index])] = require(`../i18n/zh-Hans/${ns}`).default + return acc + }, {} as Record) + return { + 'en-US': { + translation: en_USResources, + }, + 'zh-Hans': { + translation: zh_HansResources, + }, + } +} + +if (!i18n.isInitialized) { + i18n.use(initReactI18next) + .init({ + lng: undefined, + fallbackLng: 'en-US', + resources: getInitialTranslations(), + }) +} + +export const changeLanguage = async (lng?: string) => { + const resolvedLng = lng ?? 'en-US' + const resource = await loadLangResources(resolvedLng) + if (!i18n.hasResourceBundle(resolvedLng, 'translation')) + i18n.addResourceBundle(resolvedLng, 'translation', resource, true, true) + await i18n.changeLanguage(resolvedLng) +} + +export default i18n diff --git a/web/i18n/index.ts b/web/i18n-config/index.ts similarity index 86% rename from web/i18n/index.ts rename to web/i18n-config/index.ts index 27ed3022ad..fdb31c49b4 100644 --- a/web/i18n/index.ts +++ b/web/i18n-config/index.ts @@ -1,8 +1,8 @@ import Cookies from 'js-cookie' -import { changeLanguage } from '@/i18n/i18next-config' +import { changeLanguage } from '@/i18n-config/i18next-config' import { LOCALE_COOKIE_NAME } from '@/config' -import { LanguagesSupported } from '@/i18n/language' +import { LanguagesSupported } from '@/i18n-config/language' export const i18n = { defaultLocale: 'en-US', diff --git a/web/i18n/language.ts b/web/i18n-config/language.ts similarity index 100% rename from web/i18n/language.ts rename to web/i18n-config/language.ts diff --git a/web/i18n/languages.json b/web/i18n-config/languages.json similarity index 100% rename from web/i18n/languages.json rename to web/i18n-config/languages.json diff --git a/web/i18n/server.ts b/web/i18n-config/server.ts similarity index 97% rename from web/i18n/server.ts rename to web/i18n-config/server.ts index 9aeac291ea..404a71cfaf 100644 --- a/web/i18n/server.ts +++ b/web/i18n-config/server.ts @@ -13,7 +13,7 @@ const initI18next = async (lng: Locale, ns: string) => { const i18nInstance = createInstance() await i18nInstance .use(initReactI18next) - .use(resourcesToBackend((language: string, namespace: string) => import(`./${language}/${namespace}.ts`))) + .use(resourcesToBackend((language: string, namespace: string) => import(`../i18n/${language}/${namespace}.ts`))) .init({ lng: lng === 'zh-Hans' ? 'zh-Hans' : lng, ns, diff --git a/web/i18n/auto-gen-i18n.js b/web/i18n/auto-gen-i18n.js deleted file mode 100644 index a03b3aac24..0000000000 --- a/web/i18n/auto-gen-i18n.js +++ /dev/null @@ -1,109 +0,0 @@ -/* eslint-disable no-eval */ -const fs = require('node:fs') -const path = require('node:path') -const transpile = require('typescript').transpile -const magicast = require('magicast') -const { parseModule, generateCode, loadFile } = magicast -const bingTranslate = require('bing-translate-api') -const { translate } = bingTranslate -const data = require('./languages.json') - -const targetLanguage = 'en-US' -// https://github.com/plainheart/bing-translate-api/blob/master/src/met/lang.json -const languageKeyMap = data.languages.reduce((map, language) => { - if (language.supported) { - if (language.value === 'zh-Hans' || language.value === 'zh-Hant') - map[language.value] = language.value - else - map[language.value] = language.value.split('-')[0] - } - - return map -}, {}) - -async function translateMissingKeyDeeply(sourceObj, targetObject, toLanguage) { - await Promise.all(Object.keys(sourceObj).map(async (key) => { - if (targetObject[key] === undefined) { - if (typeof sourceObj[key] === 'object') { - targetObject[key] = {} - await translateMissingKeyDeeply(sourceObj[key], targetObject[key], toLanguage) - } - else { - try { - const source = sourceObj[key] - if (!source) { - targetObject[key] = '' - return - } - // not support translate with '(' or ')' - if (source.includes('(') || source.includes(')')) - return - - const { translation } = await translate(sourceObj[key], null, languageKeyMap[toLanguage]) - targetObject[key] = translation - } - catch { - console.error(`Error translating "${sourceObj[key]}" to ${toLanguage}. Key: ${key}`) - } - } - } - else if (typeof sourceObj[key] === 'object') { - targetObject[key] = targetObject[key] || {} - await translateMissingKeyDeeply(sourceObj[key], targetObject[key], toLanguage) - } - })) -} - -async function autoGenTrans(fileName, toGenLanguage) { - const fullKeyFilePath = path.join(__dirname, targetLanguage, `${fileName}.ts`) - const toGenLanguageFilePath = path.join(__dirname, toGenLanguage, `${fileName}.ts`) - // eslint-disable-next-line sonarjs/code-eval - const fullKeyContent = eval(transpile(fs.readFileSync(fullKeyFilePath, 'utf8'))) - // if toGenLanguageFilePath is not exist, create it - if (!fs.existsSync(toGenLanguageFilePath)) { - fs.writeFileSync(toGenLanguageFilePath, `const translation = { -} - -export default translation -`) - } - // To keep object format and format it for magicast to work: const translation = { ... } => export default {...} - const readContent = await loadFile(toGenLanguageFilePath) - const { code: toGenContent } = generateCode(readContent) - const mod = await parseModule(`export default ${toGenContent.replace('export default translation', '').replace('const translation = ', '')}`) - const toGenOutPut = mod.exports.default - - await translateMissingKeyDeeply(fullKeyContent, toGenOutPut, toGenLanguage) - const { code } = generateCode(mod) - const res = `const translation =${code.replace('export default', '')} - -export default translation -`.replace(/,\n\n/g, ',\n').replace('};', '}') - - fs.writeFileSync(toGenLanguageFilePath, res) -} - -async function main() { - // const fileName = 'workflow' - // Promise.all(Object.keys(languageKeyMap).map(async (toLanguage) => { - // await autoGenTrans(fileName, toLanguage) - // })) - - const files = fs - .readdirSync(path.join(__dirname, targetLanguage)) - .map(file => file.replace(/\.ts/, '')) - .filter(f => f !== 'app-debug') // ast parse error in app-debug - - await Promise.all(files.map(async (file) => { - await Promise.all(Object.keys(languageKeyMap).map(async (language) => { - try { - await autoGenTrans(file, language) - } - catch (e) { - console.error(`Error translating ${file} to ${language}`, e) - } - })) - })) -} - -main() diff --git a/web/i18n/check-i18n.js b/web/i18n/check-i18n.js deleted file mode 100644 index 55a2301ed8..0000000000 --- a/web/i18n/check-i18n.js +++ /dev/null @@ -1,85 +0,0 @@ -/* eslint-disable no-eval */ -const fs = require('node:fs') -const path = require('node:path') -const transpile = require('typescript').transpile - -const targetLanguage = 'en-US' -const data = require('./languages.json') -const languages = data.languages.filter(language => language.supported).map(language => language.value) - -async function getKeysFromLanuage(language) { - return new Promise((resolve, reject) => { - const folderPath = path.join(__dirname, language) - let allKeys = [] - fs.readdir(folderPath, (err, files) => { - if (err) { - console.error('Error reading folder:', err) - reject(err) - return - } - - files.forEach((file) => { - const filePath = path.join(folderPath, file) - const fileName = file.replace(/\.[^/.]+$/, '') // Remove file extension - const camelCaseFileName = fileName.replace(/[-_](.)/g, (_, c) => - c.toUpperCase(), - ) // Convert to camel case - // console.log(camelCaseFileName) - const content = fs.readFileSync(filePath, 'utf8') - // eslint-disable-next-line sonarjs/code-eval - const translationObj = eval(transpile(content)) - // console.log(translation) - if(!translationObj || typeof translationObj !== 'object') { - console.error(`Error parsing file: ${filePath}`) - reject(new Error(`Error parsing file: ${filePath}`)) - return - } - const keys = Object.keys(translationObj) - const nestedKeys = [] - const iterateKeys = (obj, prefix = '') => { - for (const key in obj) { - const nestedKey = prefix ? `${prefix}.${key}` : key - nestedKeys.push(nestedKey) - if (typeof obj[key] === 'object') - iterateKeys(obj[key], nestedKey) - } - } - iterateKeys(translationObj) - - allKeys = [...keys, ...nestedKeys].map( - key => `${camelCaseFileName}.${key}`, - ) - }) - resolve(allKeys) - }) - }) -} - -async function main() { - const compareKeysCount = async () => { - const targetKeys = await getKeysFromLanuage(targetLanguage) - const languagesKeys = await Promise.all(languages.map(language => getKeysFromLanuage(language))) - - const keysCount = languagesKeys.map(keys => keys.length) - const targetKeysCount = targetKeys.length - - const comparison = languages.reduce((result, language, index) => { - const languageKeysCount = keysCount[index] - const difference = targetKeysCount - languageKeysCount - result[language] = difference - return result - }, {}) - - console.log(comparison) - - // Print missing keys - languages.forEach((language, index) => { - const missingKeys = targetKeys.filter(key => !languagesKeys[index].includes(key)) - console.log(`Missing keys in ${language}:`, missingKeys) - }) - } - - compareKeysCount() -} - -main() diff --git a/web/i18n/de-DE/app-annotation.ts b/web/i18n/de-DE/app-annotation.ts index ef2fa1f236..2e141ed380 100644 --- a/web/i18n/de-DE/app-annotation.ts +++ b/web/i18n/de-DE/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: 'Massenimport', bulkExport: 'Massenexport', clearAll: 'Alle Anmerkungen löschen', + clearAllConfirm: 'Alle Anmerkungen löschen?', }, }, editModal: { diff --git a/web/i18n/de-DE/app-debug.ts b/web/i18n/de-DE/app-debug.ts index 1adaf6f05d..1d7ebc3854 100644 --- a/web/i18n/de-DE/app-debug.ts +++ b/web/i18n/de-DE/app-debug.ts @@ -198,6 +198,33 @@ const translation = { }, }, }, + fileUpload: { + title: 'Datei-Upload', + description: 'Das Chat-Eingabefeld unterstützt das Hochladen von Bildern, Dokumenten und anderen Dateien.', + supportedTypes: 'Unterstützte Dateitypen', + numberLimit: 'Max. Uploads', + modalTitle: 'Datei-Upload-Einstellung', + }, + imageUpload: { + title: 'Bild-Upload', + description: 'Ermöglicht das Hochladen von Bildern.', + supportedTypes: 'Unterstützte Dateitypen', + numberLimit: 'Max. Uploads', + modalTitle: 'Bild-Upload-Einstellung', + }, + bar: { + empty: 'Funktionen aktivieren, um die Web-App-Benutzererfahrung zu verbessern', + enableText: 'Funktionen aktiviert', + manage: 'Verwalten', + }, + documentUpload: { + title: 'Dokument', + description: 'Das Aktivieren von Dokumenten ermöglicht es dem Modell, Dokumente aufzunehmen und Fragen zu ihnen zu beantworten.', + }, + audioUpload: { + title: 'Audio', + description: 'Das Aktivieren von Audio ermöglicht es dem Modell, Audiodateien für Transkription und Analyse zu verarbeiten.', + }, }, resetConfig: { title: 'Zurücksetzen bestätigen?', @@ -261,6 +288,9 @@ const translation = { options: 'Optionen', addOption: 'Option hinzufügen', apiBasedVar: 'API-basierte Variable', + defaultValue: 'Standardwert', + noDefaultValue: 'Kein Standardwert', + selectDefaultValue: 'Standardwert auswählen', }, vision: { name: 'Vision', diff --git a/web/i18n/de-DE/app.ts b/web/i18n/de-DE/app.ts index c28fcb2be5..31221e8f0b 100644 --- a/web/i18n/de-DE/app.ts +++ b/web/i18n/de-DE/app.ts @@ -268,6 +268,7 @@ const translation = { noAccessPermission: 'Keine Berechtigung zum Zugriff auf die Webanwendung', maxActiveRequests: 'Maximale gleichzeitige Anfragen', maxActiveRequestsPlaceholder: 'Geben Sie 0 für unbegrenzt ein', + maxActiveRequestsTip: 'Maximale Anzahl gleichzeitiger aktiver Anfragen pro App (0 für unbegrenzt)', } export default translation diff --git a/web/i18n/de-DE/common.ts b/web/i18n/de-DE/common.ts index 92e4916755..b8efe31ebc 100644 --- a/web/i18n/de-DE/common.ts +++ b/web/i18n/de-DE/common.ts @@ -237,6 +237,7 @@ const translation = { existingEmail: 'Ein Benutzer mit dieser E-Mail-Adresse existiert bereits.', emailLabel: 'Neue E-Mail', authTip: 'Sobald Ihre E-Mail geändert wurde, können Google- oder GitHub-Konten, die mit Ihrer alten E-Mail verknüpft sind, nicht mehr auf dieses Konto zugreifen.', + unAvailableEmail: 'Diese E-Mail ist vorübergehend nicht verfügbar.', }, }, members: { diff --git a/web/i18n/de-DE/plugin.ts b/web/i18n/de-DE/plugin.ts index 6fa6999ae5..aa136528e2 100644 --- a/web/i18n/de-DE/plugin.ts +++ b/web/i18n/de-DE/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { name: 'Nur fixieren', selectedDescription: 'Auto-Update nur für Patch-Versionen', + description: 'Automatische Aktualisierung nur für Patchversionen (z. B. 1.0.1 → 1.0.2). Kleinere Versionsänderungen lösen keine Aktualisierungen aus.', }, latest: { description: 'Immer auf die neueste Version aktualisieren', diff --git a/web/i18n/de-DE/workflow.ts b/web/i18n/de-DE/workflow.ts index de2c3ce38d..121f5da1a2 100644 --- a/web/i18n/de-DE/workflow.ts +++ b/web/i18n/de-DE/workflow.ts @@ -497,6 +497,7 @@ const translation = { search: 'Suchmetadaten', }, title: 'Metadatenfilterung', + tip: 'Metadatenfilterung ist der Prozess, Metadatenattribute (wie Tags, Kategorien oder Zugriffsberechtigungen) zu verwenden, um die Abfrage und Kontrolle der relevanten Informationen innerhalb eines Systems zu verfeinern.', }, }, http: { diff --git a/web/i18n/en-US/app-annotation.ts b/web/i18n/en-US/app-annotation.ts index 43f24a7619..c0a8008d9a 100644 --- a/web/i18n/en-US/app-annotation.ts +++ b/web/i18n/en-US/app-annotation.ts @@ -16,7 +16,8 @@ const translation = { addAnnotation: 'Add Annotation', bulkImport: 'Bulk Import', bulkExport: 'Bulk Export', - clearAll: 'Clear All Annotation', + clearAll: 'Delete All', + clearAllConfirm: 'Delete all annotations?', }, }, editModal: { diff --git a/web/i18n/en-US/app-debug.ts b/web/i18n/en-US/app-debug.ts index 037270ac1b..4bf5653c55 100644 --- a/web/i18n/en-US/app-debug.ts +++ b/web/i18n/en-US/app-debug.ts @@ -405,6 +405,9 @@ const translation = { atLeastOneOption: 'At least one option is required', optionRepeat: 'Has repeat options', }, + 'defaultValue': 'Default value', + 'noDefaultValue': 'No default value', + 'selectDefaultValue': 'Select default value', }, vision: { name: 'Vision', diff --git a/web/i18n/en-US/common.ts b/web/i18n/en-US/common.ts index 127c3841a1..d9b17dcbad 100644 --- a/web/i18n/en-US/common.ts +++ b/web/i18n/en-US/common.ts @@ -248,6 +248,7 @@ const translation = { emailLabel: 'New email', emailPlaceholder: 'Enter a new email', existingEmail: 'A user with this email already exists.', + unAvailableEmail: 'This email is temporarily unavailable.', sendVerifyCode: 'Send verification code', continue: 'Continue', changeTo: 'Change to {{email}}', diff --git a/web/i18n/es-ES/app-annotation.ts b/web/i18n/es-ES/app-annotation.ts index e090c46122..2a797edcc3 100644 --- a/web/i18n/es-ES/app-annotation.ts +++ b/web/i18n/es-ES/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: 'Importar en Masa', bulkExport: 'Exportar en Masa', clearAll: 'Borrar Todas las Anotaciones', + clearAllConfirm: '¿Eliminar todas las anotaciones?', }, }, editModal: { diff --git a/web/i18n/es-ES/app-debug.ts b/web/i18n/es-ES/app-debug.ts index afdea66338..78b3329403 100644 --- a/web/i18n/es-ES/app-debug.ts +++ b/web/i18n/es-ES/app-debug.ts @@ -198,6 +198,33 @@ const translation = { }, }, }, + fileUpload: { + title: 'Subida de archivos', + description: 'La caja de entrada del chat permite subir imágenes, documentos y otros archivos.', + supportedTypes: 'Tipos de archivo soportados', + numberLimit: 'Máximo de subidas', + modalTitle: 'Configuración de subida de archivos', + }, + imageUpload: { + title: 'Subida de imágenes', + description: 'Permite subir imágenes.', + supportedTypes: 'Tipos de archivo soportados', + numberLimit: 'Máximo de subidas', + modalTitle: 'Configuración de subida de imágenes', + }, + bar: { + empty: 'Habilitar funciones para mejorar la experiencia del usuario de la aplicación web', + enableText: 'Funciones habilitadas', + manage: 'Gestionar', + }, + documentUpload: { + title: 'Documento', + description: 'Habilitar Documento permitirá al modelo aceptar documentos y responder preguntas sobre ellos.', + }, + audioUpload: { + title: 'Audio', + description: 'Habilitar Audio permitirá al modelo procesar archivos de audio para transcripción y análisis.', + }, }, automatic: { title: 'Orquestación automatizada de aplicaciones', @@ -282,12 +309,14 @@ const translation = { 'required': 'Requerido', 'hide': 'Ocultar', 'errorMsg': { - varNameRequired: 'Nombre de la variable es requerido', labelNameRequired: 'Nombre de la etiqueta es requerido', varNameCanBeRepeat: 'El nombre de la variable no puede repetirse', atLeastOneOption: 'Se requiere al menos una opción', optionRepeat: 'Hay opciones repetidas', }, + 'defaultValue': 'Valor predeterminado', + 'noDefaultValue': 'Sin valor predeterminado', + 'selectDefaultValue': 'Seleccionar valor predeterminado', }, vision: { name: 'Visión', diff --git a/web/i18n/es-ES/app.ts b/web/i18n/es-ES/app.ts index add9a4318d..55e14df838 100644 --- a/web/i18n/es-ES/app.ts +++ b/web/i18n/es-ES/app.ts @@ -261,6 +261,7 @@ const translation = { noAccessPermission: 'No se permite el acceso a la aplicación web', maxActiveRequestsPlaceholder: 'Introduce 0 para ilimitado', maxActiveRequests: 'Máximas solicitudes concurrentes', + maxActiveRequestsTip: 'Número máximo de solicitudes activas concurrentes por aplicación (0 para ilimitado)', } export default translation diff --git a/web/i18n/es-ES/common.ts b/web/i18n/es-ES/common.ts index eba82dd384..a904bd82b9 100644 --- a/web/i18n/es-ES/common.ts +++ b/web/i18n/es-ES/common.ts @@ -241,6 +241,7 @@ const translation = { verifyNew: 'Verifica tu nuevo correo electrónico', codeLabel: 'Código de verificación', authTip: 'Una vez que tu correo electrónico sea cambiado, las cuentas de Google o GitHub vinculadas a tu antiguo correo electrónico ya no podrán iniciar sesión en esta cuenta.', + unAvailableEmail: 'Este correo electrónico no está disponible temporalmente.', }, }, members: { diff --git a/web/i18n/es-ES/plugin.ts b/web/i18n/es-ES/plugin.ts index 6299684851..e937db7a02 100644 --- a/web/i18n/es-ES/plugin.ts +++ b/web/i18n/es-ES/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { name: 'Arreglar Solo', selectedDescription: 'Actualización automática solo para versiones de parches', + description: 'Actualización automática solo para versiones de parche (por ejemplo, 1.0.1 → 1.0.2). Los cambios de versión menor no activarán actualizaciones.', }, latest: { selectedDescription: 'Siempre actualiza a la última versión', diff --git a/web/i18n/es-ES/workflow.ts b/web/i18n/es-ES/workflow.ts index 3c509934df..d57a0a40f2 100644 --- a/web/i18n/es-ES/workflow.ts +++ b/web/i18n/es-ES/workflow.ts @@ -497,6 +497,7 @@ const translation = { search: 'Buscar metadatos', }, title: 'Filtrado de Metadatos', + tip: 'El filtrado de metadatos es el proceso de utilizar atributos de metadatos (como etiquetas, categorías o permisos de acceso) para refinar y controlar la recuperación de información relevante dentro de un sistema.', }, }, http: { diff --git a/web/i18n/fa-IR/app-annotation.ts b/web/i18n/fa-IR/app-annotation.ts index e78fc8cd7e..d66c2eb0e5 100644 --- a/web/i18n/fa-IR/app-annotation.ts +++ b/web/i18n/fa-IR/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: 'واردات انبوه', bulkExport: 'صادرات انبوه', clearAll: 'پاک کردن همه یادداشت‌ها', + clearAllConfirm: 'آیا همه حاشیه‌نویسی‌ها را حذف کنیم؟', }, }, editModal: { diff --git a/web/i18n/fa-IR/app-debug.ts b/web/i18n/fa-IR/app-debug.ts index 75085ef30e..5efbb9421b 100644 --- a/web/i18n/fa-IR/app-debug.ts +++ b/web/i18n/fa-IR/app-debug.ts @@ -317,7 +317,6 @@ const translation = { 'required': 'مورد نیاز', 'hide': 'مخفی کردن', 'errorMsg': { - varNameRequired: 'نام متغیر مورد نیاز است', labelNameRequired: 'نام برچسب مورد نیاز است', varNameCanBeRepeat: 'نام متغیر نمی‌تواند تکراری باشد', atLeastOneOption: 'حداقل یک گزینه مورد نیاز است', @@ -451,6 +450,33 @@ const translation = { enabled: 'فعال', }, }, + fileUpload: { + title: 'آپلود فایل', + description: 'جعبه ورودی چت امکان آپلود تصاویر، اسناد و سایر فایل‌ها را فراهم می‌کند.', + supportedTypes: 'انواع فایل‌های پشتیبانی شده', + numberLimit: 'حداکثر آپلود', + modalTitle: 'تنظیمات آپلود فایل', + }, + imageUpload: { + title: 'آپلود تصویر', + description: 'امکان آپلود تصاویر را فراهم می‌کند.', + supportedTypes: 'انواع فایل‌های پشتیبانی شده', + numberLimit: 'حداکثر آپلود', + modalTitle: 'تنظیمات آپلود تصویر', + }, + bar: { + empty: 'فعال‌سازی ویژگی برای بهبود تجربه کاربری اپلیکیشن وب', + enableText: 'ویژگی‌های فعال', + manage: 'مدیریت', + }, + documentUpload: { + title: 'سند', + description: 'فعال‌سازی سند به مدل اجازه می‌دهد اسناد را دریافت کرده و درباره آن‌ها پاسخ دهد.', + }, + audioUpload: { + title: 'صوتی', + description: 'فعال‌سازی صوت به مدل اجازه می‌دهد فایل‌های صوتی را برای رونویسی و تجزیه و تحلیل پردازش کند.', + }, }, } diff --git a/web/i18n/fa-IR/app.ts b/web/i18n/fa-IR/app.ts index d8dfba3d81..b2cde413d9 100644 --- a/web/i18n/fa-IR/app.ts +++ b/web/i18n/fa-IR/app.ts @@ -261,6 +261,7 @@ const translation = { noAccessPermission: 'دسترسی به برنامه وب مجاز نیست', maxActiveRequests: 'بیشترین درخواست‌های همزمان', maxActiveRequestsPlaceholder: 'برای نامحدود، 0 را وارد کنید', + maxActiveRequestsTip: 'حداکثر تعداد درخواست‌های فعال همزمان در هر برنامه (0 برای نامحدود)', } export default translation diff --git a/web/i18n/fa-IR/common.ts b/web/i18n/fa-IR/common.ts index c30319b0d2..018fbefa81 100644 --- a/web/i18n/fa-IR/common.ts +++ b/web/i18n/fa-IR/common.ts @@ -241,6 +241,7 @@ const translation = { content1: 'اگر ادامه دهید، ما یک کد تأیید به {{email}} برای بازگشایی مجدد ارسال خواهیم کرد.', content3: 'یک ایمیل جدید وارد کنید و ما یک کد تأیید برای شما ارسال خواهیم کرد.', authTip: 'زمانی که ایمیل شما تغییر کند، حساب‌های گوگل یا گیت‌هاب مرتبط با ایمیل قدیمی شما دیگر قادر به ورود به این حساب نخواهند بود.', + unAvailableEmail: 'این ایمیل به طور موقت در دسترس نیست.', }, }, members: { diff --git a/web/i18n/fa-IR/plugin.ts b/web/i18n/fa-IR/plugin.ts index 5e1cbe02bf..1ba3a714a3 100644 --- a/web/i18n/fa-IR/plugin.ts +++ b/web/i18n/fa-IR/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { name: 'فقط تعمیر کنید', selectedDescription: 'به‌روزرسانی خودکار تنها برای نسخه‌های وصله', + description: 'به‌روزرسانی خودکار فقط برای نسخه‌های پچ (مانند ۱.۰.۱ → ۱.۰.۲). تغییرات جزئی نسخه باعث راه‌اندازی به‌روزرسانی‌ها نمی‌شود.', }, latest: { name: 'جدیدترین', diff --git a/web/i18n/fa-IR/workflow.ts b/web/i18n/fa-IR/workflow.ts index b1aa11d3bf..f95253e73d 100644 --- a/web/i18n/fa-IR/workflow.ts +++ b/web/i18n/fa-IR/workflow.ts @@ -497,6 +497,7 @@ const translation = { conditions: 'شرایط', }, title: 'فیلتر کردن فراداده', + tip: 'فیلتر کردن متاداده فرایند استفاده از ویژگی‌های متاداده (مانند برچسب‌ها، دسته‌ها یا مجوزهای دسترسی) برای تصفیه و کنترل بازیابی اطلاعات مرتبط در یک سیستم است.', }, }, http: { diff --git a/web/i18n/fr-FR/app-annotation.ts b/web/i18n/fr-FR/app-annotation.ts index 3926fe5e26..3a34e326f4 100644 --- a/web/i18n/fr-FR/app-annotation.ts +++ b/web/i18n/fr-FR/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: 'Importation en Vrac', bulkExport: 'Exportation en Vrac', clearAll: 'Effacer toutes les annotations', + clearAllConfirm: 'Supprimer toutes les annotations ?', }, }, editModal: { diff --git a/web/i18n/fr-FR/app-debug.ts b/web/i18n/fr-FR/app-debug.ts index f3984c0435..78294fbd8b 100644 --- a/web/i18n/fr-FR/app-debug.ts +++ b/web/i18n/fr-FR/app-debug.ts @@ -198,6 +198,33 @@ const translation = { }, }, }, + fileUpload: { + title: 'Téléchargement de fichier', + description: 'La boîte de saisie de chat permet de télécharger des images, des documents et d\'autres fichiers.', + supportedTypes: 'Types de fichiers supportés', + numberLimit: 'Nombre max de téléchargements', + modalTitle: 'Paramètres de téléchargement de fichier', + }, + imageUpload: { + title: 'Téléchargement d\'image', + description: 'Permet de télécharger des images.', + supportedTypes: 'Types de fichiers supportés', + numberLimit: 'Nombre max de téléchargements', + modalTitle: 'Paramètres de téléchargement d\'image', + }, + bar: { + empty: 'Activer la fonctionnalité pour améliorer l\'expérience utilisateur de l\'application web', + enableText: 'Fonctionnalités activées', + manage: 'Gérer', + }, + documentUpload: { + title: 'Document', + description: 'Activer Document permettra au modèle de prendre des documents et de répondre aux questions à leur sujet.', + }, + audioUpload: { + title: 'Audio', + description: 'Activer Audio permettra au modèle de traiter les fichiers audio pour la transcription et l\'analyse.', + }, }, resetConfig: { title: 'Confirmer la réinitialisation ?', @@ -270,12 +297,14 @@ const translation = { 'required': 'Required', 'hide': 'Caché', 'errorMsg': { - varNameRequired: 'Variable name is required', labelNameRequired: 'Label name is required', varNameCanBeRepeat: 'Variable name can not be repeated', atLeastOneOption: 'At least one option is required', optionRepeat: 'Has repeat options', }, + 'defaultValue': 'Valeur par défaut', + 'noDefaultValue': 'Aucune valeur par défaut', + 'selectDefaultValue': 'Sélectionner la valeur par défaut', }, vision: { name: 'Vision', diff --git a/web/i18n/fr-FR/app.ts b/web/i18n/fr-FR/app.ts index 523934152f..f572658d12 100644 --- a/web/i18n/fr-FR/app.ts +++ b/web/i18n/fr-FR/app.ts @@ -261,6 +261,7 @@ const translation = { noAccessPermission: 'Pas de permission d\'accéder à l\'application web', maxActiveRequestsPlaceholder: 'Entrez 0 pour illimité', maxActiveRequests: 'Nombre maximal de requêtes simultanées', + maxActiveRequestsTip: 'Nombre maximum de requêtes actives concurrentes par application (0 pour illimité)', } export default translation diff --git a/web/i18n/fr-FR/common.ts b/web/i18n/fr-FR/common.ts index 136e7de2ef..138c7662d6 100644 --- a/web/i18n/fr-FR/common.ts +++ b/web/i18n/fr-FR/common.ts @@ -237,6 +237,7 @@ const translation = { content2: 'Votre adresse e-mail actuelle est {{email}}. Un code de vérification a été envoyé à cette adresse e-mail.', codeLabel: 'Code de vérification', content1: 'Si vous continuez, nous enverrons un code de vérification à {{email}} pour une nouvelle authentification.', + unAvailableEmail: 'Cet e-mail est temporairement indisponible.', }, }, members: { diff --git a/web/i18n/fr-FR/plugin.ts b/web/i18n/fr-FR/plugin.ts index 255171058a..ae6e8c068b 100644 --- a/web/i18n/fr-FR/plugin.ts +++ b/web/i18n/fr-FR/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { selectedDescription: 'Mise à jour automatique uniquement pour les versions de correctif', name: 'Réparer seulement', + description: 'Mise à jour automatique uniquement pour les versions de correctif (par exemple, 1.0.1 → 1.0.2). Les changements de version mineure ne déclencheront pas de mises à jour.', }, latest: { name: 'Dernier', diff --git a/web/i18n/fr-FR/workflow.ts b/web/i18n/fr-FR/workflow.ts index 96bead7ff2..884e3e9772 100644 --- a/web/i18n/fr-FR/workflow.ts +++ b/web/i18n/fr-FR/workflow.ts @@ -497,6 +497,7 @@ const translation = { title: 'Conditions de filtrage des métadonnées', }, title: 'Filtrage des métadonnées', + tip: 'Le filtrage des métadonnées est le processus d\'utilisation des attributs de métadonnées (tels que les étiquettes, les catégories ou les autorisations d\'accès) pour affiner et contrôler la récupération d\'informations pertinentes au sein d\'un système.', }, }, http: { diff --git a/web/i18n/hi-IN/app-annotation.ts b/web/i18n/hi-IN/app-annotation.ts index 0249ebf7d4..b89f33c438 100644 --- a/web/i18n/hi-IN/app-annotation.ts +++ b/web/i18n/hi-IN/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: 'बल्क आयात', bulkExport: 'बल्क निर्यात', clearAll: 'सभी एनोटेशन साफ करें', + clearAllConfirm: 'क्या सभी टिप्पणियाँ हटानी हैं?', }, }, editModal: { diff --git a/web/i18n/hi-IN/app-debug.ts b/web/i18n/hi-IN/app-debug.ts index ded2af4132..ea9b20c500 100644 --- a/web/i18n/hi-IN/app-debug.ts +++ b/web/i18n/hi-IN/app-debug.ts @@ -314,12 +314,14 @@ const translation = { 'required': 'आवश्यक', 'hide': 'छुपाएँ', 'errorMsg': { - varNameRequired: 'वेरिएबल नाम आवश्यक है', labelNameRequired: 'लेबल नाम आवश्यक है', varNameCanBeRepeat: 'वेरिएबल नाम दोहराया नहीं जा सकता', atLeastOneOption: 'कम से कम एक विकल्प आवश्यक है', optionRepeat: 'विकल्प दोहराए गए हैं', }, + 'defaultValue': 'डिफ़ॉल्ट मान', + 'noDefaultValue': 'कोई डिफ़ॉल्ट मान नहीं', + 'selectDefaultValue': 'डिफ़ॉल्ट मान चुनें', }, vision: { name: 'विजन', @@ -465,6 +467,33 @@ const translation = { 'उपकरणों का उपयोग करके एलएलएम की क्षमताओं का विस्तार किया जा सकता है, जैसे इंटरनेट पर खोज करना या वैज्ञानिक गणनाएँ करना', enabled: 'सक्षम', }, + fileUpload: { + title: 'फ़ाइल अपलोड', + description: 'चैट इनपुट बॉक्स छवियों, दस्तावेज़ों और अन्य फ़ाइलों को अपलोड करने की अनुमति देता है।', + supportedTypes: 'समर्थित फ़ाइल प्रकार', + numberLimit: 'अधिकतम अपलोड', + modalTitle: 'फ़ाइल अपलोड सेटिंग', + }, + imageUpload: { + title: 'छवि अपलोड', + description: 'छवियों को अपलोड करने की अनुमति दें।', + supportedTypes: 'समर्थित फ़ाइल प्रकार', + numberLimit: 'अधिकतम अपलोड', + modalTitle: 'छवि अपलोड सेटिंग', + }, + bar: { + empty: 'वेब ऐप उपयोगकर्ता अनुभव को बेहतर बनाने के लिए फीचर सक्षम करें', + enableText: 'फीचर सक्षम', + manage: 'प्रबंधित करें', + }, + documentUpload: { + title: 'दस्तावेज़', + description: 'दस्तावेज़ सक्षम करने से मॉडल दस्तावेज़ों को स्वीकार कर सकेगा और उनके बारे में प्रश्नों का उत्तर दे सकेगा।', + }, + audioUpload: { + title: 'ऑडियो', + description: 'ऑडियो सक्षम करने से मॉडल ट्रांसक्रिप्शन और विश्लेषण के लिए ऑडियो फ़ाइलों को प्रोसेस कर सकेगा।', + }, }, } diff --git a/web/i18n/hi-IN/app.ts b/web/i18n/hi-IN/app.ts index dcd5e54bdc..9b13fdc392 100644 --- a/web/i18n/hi-IN/app.ts +++ b/web/i18n/hi-IN/app.ts @@ -261,6 +261,7 @@ const translation = { noAccessPermission: 'वेब एप्लिकेशन तक पहुँचने की अनुमति नहीं है', maxActiveRequests: 'अधिकतम समवर्ती अनुरोध', maxActiveRequestsPlaceholder: 'असीमित के लिए 0 दर्ज करें', + maxActiveRequestsTip: 'प्रति ऐप सक्रिय अनुरोधों की अधिकतम संख्या (असीमित के लिए 0)', } export default translation diff --git a/web/i18n/hi-IN/common.ts b/web/i18n/hi-IN/common.ts index 51e59449d5..6dfe10eef2 100644 --- a/web/i18n/hi-IN/common.ts +++ b/web/i18n/hi-IN/common.ts @@ -247,6 +247,7 @@ const translation = { content2: 'आपका वर्तमान ईमेल है {{email}}. सत्यापन कोड इस ईमेल पते पर भेजा गया है।', authTip: 'एक बार जब आपका ईमेल बदल दिया जाता है, तो आपके पुराने ईमेल से जुड़े Google या GitHub खाते इस खाते में लॉग इन नहीं कर सकेंगे।', content1: 'अगर आप जारी रखते हैं, तो हम सत्यापन के लिए {{email}} पर एक सत्यापन कोड भेजेंगे।', + unAvailableEmail: 'यह ईमेल अस्थायी रूप से अनुपलब्ध है।', }, }, members: { diff --git a/web/i18n/hi-IN/plugin.ts b/web/i18n/hi-IN/plugin.ts index ae4547421c..e15b6a85a7 100644 --- a/web/i18n/hi-IN/plugin.ts +++ b/web/i18n/hi-IN/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { name: 'केवल ठीक करें', selectedDescription: 'केवल पैच संस्करणों के लिए स्वचालित अपडेट', + description: 'केवल पैच संस्करणों के लिए स्वचालित अद्यतन (जैसे, 1.0.1 → 1.0.2)। छोटा संस्करण परिवर्तन अद्यतन को ट्रिगर नहीं करेगा।', }, latest: { name: 'नवीनतम', diff --git a/web/i18n/hi-IN/workflow.ts b/web/i18n/hi-IN/workflow.ts index 68937e5155..d613c87f6a 100644 --- a/web/i18n/hi-IN/workflow.ts +++ b/web/i18n/hi-IN/workflow.ts @@ -510,6 +510,7 @@ const translation = { search: 'खोज मेटाडेटा', }, title: 'मेटाडेटा फ़िल्टरिंग', + tip: 'मेटाडेटा छानने की प्रक्रिया है जिसमें मेटाडेटा विशेषताओं (जैसे टैग, श्रेणियाँ, या पहुंच अनुमतियाँ) का उपयोग करके एक प्रणाली के भीतर प्रासंगिक जानकारी की पुनर्प्राप्ति को सुधारने और नियंत्रित करने के लिए किया जाता है।', }, }, http: { diff --git a/web/i18n/i18next-config.ts b/web/i18n/i18next-config.ts deleted file mode 100644 index 8c5bd375a7..0000000000 --- a/web/i18n/i18next-config.ts +++ /dev/null @@ -1,74 +0,0 @@ -'use client' -import i18n from 'i18next' -import { camelCase } from 'lodash-es' -import { initReactI18next } from 'react-i18next' - -const requireSilent = async (lang: string, namespace: string) => { - let res - try { - res = (await import(`./${lang}/${namespace}`)).default - } - catch { - res = (await import(`./en-US/${namespace}`)).default - } - - return res -} - -const NAMESPACES = [ - 'app-annotation', - 'app-api', - 'app-debug', - 'app-log', - 'app-overview', - 'app', - 'billing', - 'common', - 'custom', - 'dataset-creation', - 'dataset-documents', - 'dataset-hit-testing', - 'dataset-settings', - 'dataset', - 'education', - 'explore', - 'layout', - 'login', - 'plugin-tags', - 'plugin', - 'register', - 'run-log', - 'share', - 'time', - 'tools', - 'workflow', -] - -export const loadLangResources = async (lang: string) => { - const modules = await Promise.all(NAMESPACES.map(ns => requireSilent(lang, ns))) - const resources = modules.reduce((acc, mod, index) => { - acc[camelCase(NAMESPACES[index])] = mod - return acc - }, {} as Record) - return { - translation: resources, - } -} - -i18n.use(initReactI18next) - .init({ - lng: undefined, - fallbackLng: 'en-US', - }) - -export const changeLanguage = async (lng?: string) => { - const resolvedLng = lng ?? 'en-US' - const resources = { - [resolvedLng]: await loadLangResources(resolvedLng), - } - if (!i18n.hasResourceBundle(resolvedLng, 'translation')) - i18n.addResourceBundle(resolvedLng, 'translation', resources[resolvedLng].translation, true, true) - await i18n.changeLanguage(resolvedLng) -} - -export default i18n diff --git a/web/i18n/it-IT/app-annotation.ts b/web/i18n/it-IT/app-annotation.ts index a7f615860c..bba10ba84e 100644 --- a/web/i18n/it-IT/app-annotation.ts +++ b/web/i18n/it-IT/app-annotation.ts @@ -18,6 +18,7 @@ const translation = { bulkImport: 'Importazione Bulk', bulkExport: 'Esportazione Bulk', clearAll: 'Cancella Tutte le Annotazioni', + clearAllConfirm: 'Eliminare tutte le annotazioni?', }, }, editModal: { diff --git a/web/i18n/it-IT/app-debug.ts b/web/i18n/it-IT/app-debug.ts index bfa75b282b..f79cccf6e7 100644 --- a/web/i18n/it-IT/app-debug.ts +++ b/web/i18n/it-IT/app-debug.ts @@ -216,6 +216,33 @@ const translation = { }, }, }, + fileUpload: { + title: 'Caricamento File', + description: 'La casella di input della chat consente di caricare immagini, documenti e altri file.', + supportedTypes: 'Tipi di File Supportati', + numberLimit: 'Caricamenti massimi', + modalTitle: 'Impostazione Caricamento File', + }, + imageUpload: { + title: 'Caricamento Immagine', + description: 'Consente di caricare immagini.', + supportedTypes: 'Tipi di File Supportati', + numberLimit: 'Caricamenti massimi', + modalTitle: 'Impostazione Caricamento Immagine', + }, + bar: { + empty: 'Abilita funzionalità per migliorare l\'esperienza utente dell\'app web', + enableText: 'Funzionalità Abilitate', + manage: 'Gestisci', + }, + documentUpload: { + title: 'Documento', + description: 'Abilitare Documento consentirà al modello di accettare documenti e rispondere a domande su di essi.', + }, + audioUpload: { + title: 'Audio', + description: 'Abilitare Audio consentirà al modello di elaborare file audio per trascrizione e analisi.', + }, }, automatic: { title: 'Orchestrazione automatizzata delle applicazioni', @@ -316,12 +343,14 @@ const translation = { 'required': 'Richiesto', 'hide': 'Nascondi', 'errorMsg': { - varNameRequired: 'Il nome della variabile è richiesto', labelNameRequired: 'Il nome dell\'etichetta è richiesto', varNameCanBeRepeat: 'Il nome della variabile non può essere ripetuto', atLeastOneOption: 'È richiesta almeno un\'opzione', optionRepeat: 'Ci sono opzioni ripetute', }, + 'defaultValue': 'Valore predefinito', + 'noDefaultValue': 'Nessun valore predefinito', + 'selectDefaultValue': 'Seleziona valore predefinito', }, vision: { name: 'Visione', diff --git a/web/i18n/it-IT/app.ts b/web/i18n/it-IT/app.ts index 63a25dccc6..66cb50b2a0 100644 --- a/web/i18n/it-IT/app.ts +++ b/web/i18n/it-IT/app.ts @@ -273,6 +273,7 @@ const translation = { noAccessPermission: 'Nessun permesso per accedere all\'app web', maxActiveRequestsPlaceholder: 'Inserisci 0 per illimitato', maxActiveRequests: 'Massimo numero di richieste concorrenti', + maxActiveRequestsTip: 'Numero massimo di richieste attive concorrenti per app (0 per illimitato)', } export default translation diff --git a/web/i18n/it-IT/common.ts b/web/i18n/it-IT/common.ts index 16991a94d8..1e74b299ef 100644 --- a/web/i18n/it-IT/common.ts +++ b/web/i18n/it-IT/common.ts @@ -249,6 +249,7 @@ const translation = { content4: 'Ti abbiamo appena inviato un codice di verifica temporaneo a {{email}}.', content1: 'Se continui, invieremo un codice di verifica a {{email}} per la riautenticazione.', sendVerifyCode: 'Invia codice di verifica', + unAvailableEmail: 'Questa email è temporaneamente non disponibile.', }, }, members: { diff --git a/web/i18n/it-IT/plugin.ts b/web/i18n/it-IT/plugin.ts index e7b6b147fa..616e199906 100644 --- a/web/i18n/it-IT/plugin.ts +++ b/web/i18n/it-IT/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { name: 'Ripara solo', selectedDescription: 'Aggiornamento automatico solo per versioni patch', + description: 'Aggiornamento automatico solo per le versioni patch (ad es., 1.0.1 → 1.0.2). Le modifiche delle versioni minori non attiveranno aggiornamenti.', }, latest: { selectedDescription: 'Aggiorna sempre all\'ultima versione', diff --git a/web/i18n/it-IT/workflow.ts b/web/i18n/it-IT/workflow.ts index 97e4bc14f2..196e6f761a 100644 --- a/web/i18n/it-IT/workflow.ts +++ b/web/i18n/it-IT/workflow.ts @@ -514,6 +514,7 @@ const translation = { search: 'Cerca metadati', }, title: 'Filtraggio dei metadati', + tip: 'Il filtraggio dei metadati è il processo di utilizzo degli attributi dei metadati (come tag, categorie o permessi di accesso) per affinare e controllare il recupero di informazioni pertinenti all\'interno di un sistema.', }, }, http: { diff --git a/web/i18n/ja-JP/app-annotation.ts b/web/i18n/ja-JP/app-annotation.ts index 38b891d9d8..7dbdfe018f 100644 --- a/web/i18n/ja-JP/app-annotation.ts +++ b/web/i18n/ja-JP/app-annotation.ts @@ -18,7 +18,8 @@ const translation = { addAnnotation: '注釈を追加', bulkImport: '一括インポート', bulkExport: '一括エクスポート', - clearAll: 'すべての注釈をクリア', + clearAll: 'すべて削除', + clearAllConfirm: 'すべての寸法を削除?', }, }, editModal: { diff --git a/web/i18n/ja-JP/app-debug.ts b/web/i18n/ja-JP/app-debug.ts index decbe4863e..d13a64213a 100644 --- a/web/i18n/ja-JP/app-debug.ts +++ b/web/i18n/ja-JP/app-debug.ts @@ -222,6 +222,10 @@ const translation = { title: 'ドキュメント', description: 'ドキュメント機能を有効にすると、AI モデルがファイルを処理し、その内容に基づいて質問に回答できるようになります。', }, + audioUpload: { + title: '音声', + description: '音声機能を有効にすると、モデルが音声ファイルの転写と分析を処理できるようになります。', + }, }, codegen: { title: 'コードジェネレーター', @@ -307,6 +311,9 @@ const translation = { waitForImgUpload: '画像のアップロードが完了するまでお待ちください', waitForFileUpload: 'ファイルのアップロードが完了するまでお待ちください', }, + warningMessage: { + timeoutExceeded: 'タイムアウトのため結果が表示されません。完全な結果を取得するにはログを参照してください。', + }, chatSubTitle: 'プロンプト', completionSubTitle: '接頭辞プロンプト', promptTip: 'プロンプトは、AI の応答を指示と制約で誘導します。 {{input}} のような変数を挿入します。このプロンプトはユーザーには表示されません。', @@ -386,12 +393,14 @@ const translation = { 'maxNumberOfUploads': 'アップロードの最大数', 'maxNumberTip': 'ドキュメント < {{docLimit}}, 画像 < {{imgLimit}}, 音声 < {{audioLimit}}, 映像 < {{videoLimit}}', 'errorMsg': { - varNameRequired: '変数名は必須です', labelNameRequired: 'ラベル名は必須です', varNameCanBeRepeat: '変数名は繰り返すことができません', atLeastOneOption: '少なくとも 1 つのオプションが必要です', optionRepeat: '繰り返しオプションがあります', }, + 'defaultValue': 'デフォルト値', + 'noDefaultValue': 'デフォルト値なし', + 'selectDefaultValue': 'デフォルト値を選択', }, vision: { name: 'ビジョン', diff --git a/web/i18n/ja-JP/app.ts b/web/i18n/ja-JP/app.ts index e03e9e1177..f68835c7e7 100644 --- a/web/i18n/ja-JP/app.ts +++ b/web/i18n/ja-JP/app.ts @@ -260,6 +260,7 @@ const translation = { noAccessPermission: 'Web アプリにアクセス権限がありません', maxActiveRequestsPlaceholder: '無制限のために0を入力してください', maxActiveRequests: '最大同時リクエスト数', + maxActiveRequestsTip: 'アプリごとの同時アクティブリクエストの最大数(無制限の場合は0)', } export default translation diff --git a/web/i18n/ja-JP/common.ts b/web/i18n/ja-JP/common.ts index c346984932..bd2dd22cf0 100644 --- a/web/i18n/ja-JP/common.ts +++ b/web/i18n/ja-JP/common.ts @@ -249,6 +249,7 @@ const translation = { emailLabel: '新しいメール', emailPlaceholder: '新しいメールを入力してください', existingEmail: 'このメールアドレスのユーザーは既に存在します', + unAvailableEmail: 'このメールアドレスは現在使用できません。', sendVerifyCode: '確認コードを送信', continue: '続行', changeTo: '{{email}} に変更', diff --git a/web/i18n/ja-JP/plugin.ts b/web/i18n/ja-JP/plugin.ts index 38b73a847e..b202b404b3 100644 --- a/web/i18n/ja-JP/plugin.ts +++ b/web/i18n/ja-JP/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { name: '修正のみ', selectedDescription: 'パッチバージョンのみの自動更新', + description: 'パッチバージョンのみ自動更新 (例: 1.0.1 → 1.0.2)。マイナーバージョンの変更は更新をトリガーしません。', }, latest: { name: '最新', diff --git a/web/i18n/ko-KR/app-annotation.ts b/web/i18n/ko-KR/app-annotation.ts index 7a93d17821..662dc3f083 100644 --- a/web/i18n/ko-KR/app-annotation.ts +++ b/web/i18n/ko-KR/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: '일괄 가져오기', bulkExport: '일괄 내보내기', clearAll: '모든 어노테이션 지우기', + clearAllConfirm: '모든 주석을 삭제하시겠습니까?', }, }, editModal: { diff --git a/web/i18n/ko-KR/app-debug.ts b/web/i18n/ko-KR/app-debug.ts index b84946841f..f9bc9978d8 100644 --- a/web/i18n/ko-KR/app-debug.ts +++ b/web/i18n/ko-KR/app-debug.ts @@ -198,6 +198,33 @@ const translation = { }, }, }, + fileUpload: { + title: '파일 업로드', + description: '채팅 입력 상자에서 이미지, 문서 및 기타 파일 업로드를 지원합니다.', + supportedTypes: '지원 파일 유형', + numberLimit: '최대 업로드 수', + modalTitle: '파일 업로드 설정', + }, + imageUpload: { + title: '이미지 업로드', + description: '이미지 업로드를 지원합니다.', + supportedTypes: '지원 파일 유형', + numberLimit: '최대 업로드 수', + modalTitle: '이미지 업로드 설정', + }, + bar: { + empty: '웹 앱 사용자 경험을 향상시키는 기능 활성화', + enableText: '기능 활성화됨', + manage: '관리', + }, + documentUpload: { + title: '문서', + description: '문서를 활성화하면 모델이 문서를 받아들이고 문서에 대한 질문에 답할 수 있습니다.', + }, + audioUpload: { + title: '오디오', + description: '오디오를 활성화하면 모델이 전사 및 분석을 위해 오디오 파일을 처리할 수 있습니다.', + }, }, automatic: { title: '자동 어플리케이션 오케스트레이션', @@ -281,12 +308,14 @@ const translation = { 'required': '필수', 'hide': '숨기기', 'errorMsg': { - varNameRequired: '변수명은 필수입니다', labelNameRequired: '레이블명은 필수입니다', varNameCanBeRepeat: '변수명은 중복될 수 없습니다', atLeastOneOption: '적어도 하나의 옵션이 필요합니다', optionRepeat: '옵션이 중복되어 있습니다', }, + 'defaultValue': '기본값', + 'noDefaultValue': '기본값 없음', + 'selectDefaultValue': '기본값 선택', }, vision: { name: '비전', diff --git a/web/i18n/ko-KR/app.ts b/web/i18n/ko-KR/app.ts index bcc18e70f0..c113947961 100644 --- a/web/i18n/ko-KR/app.ts +++ b/web/i18n/ko-KR/app.ts @@ -286,6 +286,7 @@ const translation = { noAccessPermission: '웹 앱에 대한 접근 권한이 없습니다.', maxActiveRequests: '동시 최대 요청 수', maxActiveRequestsPlaceholder: '무제한 사용을 원하시면 0을 입력하세요.', + maxActiveRequestsTip: '앱당 최대 동시 활성 요청 수(무제한은 0)', } export default translation diff --git a/web/i18n/ko-KR/common.ts b/web/i18n/ko-KR/common.ts index a5ae3fd733..06f8f19ab3 100644 --- a/web/i18n/ko-KR/common.ts +++ b/web/i18n/ko-KR/common.ts @@ -233,6 +233,7 @@ const translation = { content3: '새로운 이메일을 입력하시면 인증 코드를 보내드립니다.', content1: '계속 진행하면, 재인증을 위해 {{email}}로 인증 코드를 전송하겠습니다.', authTip: '이메일이 변경되면, 이전 이메일에 연결된 Google 또는 GitHub 계정은 더 이상 이 계정에 로그인할 수 없습니다.', + unAvailableEmail: '이 이메일은 일시적으로 사용할 수 없습니다.', }, }, members: { diff --git a/web/i18n/ko-KR/plugin.ts b/web/i18n/ko-KR/plugin.ts index 1f60f1365b..815a30d3bb 100644 --- a/web/i18n/ko-KR/plugin.ts +++ b/web/i18n/ko-KR/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { name: '수정만 하기', selectedDescription: '패치 버전만 자동 업데이트', + description: '패치 버전만 자동 업데이트 (예: 1.0.1 → 1.0.2). 마이너 버전 변경은 업데이트를 유발하지 않습니다.', }, latest: { name: '최신', diff --git a/web/i18n/ko-KR/workflow.ts b/web/i18n/ko-KR/workflow.ts index be6c78f3ef..a65925f254 100644 --- a/web/i18n/ko-KR/workflow.ts +++ b/web/i18n/ko-KR/workflow.ts @@ -525,6 +525,7 @@ const translation = { conditions: '조건', }, title: '메타데이터 필터링', + tip: '메타데이터 필터링은 시스템 내에서 관련 정보를 검색하는 과정을 정제하고 제어하기 위해 메타데이터 속성(예: 태그, 카테고리 또는 접근 권한)을 사용하는 과정입니다.', }, }, http: { diff --git a/web/i18n/pl-PL/app-annotation.ts b/web/i18n/pl-PL/app-annotation.ts index 81a525935e..32efc76e66 100644 --- a/web/i18n/pl-PL/app-annotation.ts +++ b/web/i18n/pl-PL/app-annotation.ts @@ -18,6 +18,7 @@ const translation = { bulkImport: 'Masowy import', bulkExport: 'Masowy eksport', clearAll: 'Wyczyść wszystkie adnotacje', + clearAllConfirm: 'Usunąć wszystkie adnotacje?', }, }, editModal: { diff --git a/web/i18n/pl-PL/app-debug.ts b/web/i18n/pl-PL/app-debug.ts index dcd286d351..06e271fbbb 100644 --- a/web/i18n/pl-PL/app-debug.ts +++ b/web/i18n/pl-PL/app-debug.ts @@ -214,6 +214,33 @@ const translation = { }, }, }, + fileUpload: { + title: 'Przesyłanie plików', + description: 'Pole wprowadzania czatu umożliwia przesyłanie obrazów, dokumentów i innych plików.', + supportedTypes: 'Obsługiwane typy plików', + numberLimit: 'Maksymalna liczba przesłanych plików', + modalTitle: 'Ustawienia przesyłania plików', + }, + imageUpload: { + title: 'Przesyłanie obrazów', + description: 'Umożliwia przesyłanie obrazów.', + supportedTypes: 'Obsługiwane typy plików', + numberLimit: 'Maksymalna liczba przesłanych plików', + modalTitle: 'Ustawienia przesyłania obrazów', + }, + bar: { + empty: 'Włącz funkcje aby poprawić doświadczenie użytkownika aplikacji webowej', + enableText: 'Funkcje włączone', + manage: 'Zarządzaj', + }, + documentUpload: { + title: 'Dokument', + description: 'Włączenie Dokumentu pozwoli modelowi na przyjmowanie dokumentów i odpowiadanie na pytania o nich.', + }, + audioUpload: { + title: 'Dźwięk', + description: 'Włączenie Dźwięku pozwoli modelowi na przetwarzanie plików audio do transkrypcji i analizy.', + }, }, automatic: { title: 'Zautomatyzowana orkiestracja aplikacji', @@ -311,12 +338,14 @@ const translation = { 'required': 'Wymagane', 'hide': 'Ukryj', 'errorMsg': { - varNameRequired: 'Wymagana nazwa zmiennej', labelNameRequired: 'Wymagana nazwa etykiety', varNameCanBeRepeat: 'Nazwa zmiennej nie może się powtarzać', atLeastOneOption: 'Wymagana jest co najmniej jedna opcja', optionRepeat: 'Powtarzają się opcje', }, + 'defaultValue': 'Wartość domyślna', + 'noDefaultValue': 'Brak wartości domyślnej', + 'selectDefaultValue': 'Wybierz wartość domyślną', }, vision: { name: 'Wizja', diff --git a/web/i18n/pl-PL/app.ts b/web/i18n/pl-PL/app.ts index 040789424c..9a42b702e7 100644 --- a/web/i18n/pl-PL/app.ts +++ b/web/i18n/pl-PL/app.ts @@ -268,6 +268,7 @@ const translation = { noAccessPermission: 'Brak uprawnień do dostępu do aplikacji internetowej', maxActiveRequests: 'Maksymalne równoczesne żądania', maxActiveRequestsPlaceholder: 'Wprowadź 0, aby uzyskać nielimitowane', + maxActiveRequestsTip: 'Maksymalna liczba jednoczesnych aktywnych żądań na aplikację (0 dla nieograniczonej)', } export default translation diff --git a/web/i18n/pl-PL/common.ts b/web/i18n/pl-PL/common.ts index 78c0f6e9fc..db9b8de950 100644 --- a/web/i18n/pl-PL/common.ts +++ b/web/i18n/pl-PL/common.ts @@ -243,6 +243,7 @@ const translation = { content2: 'Twój aktualny adres email to {{email}}. Kod weryfikacyjny został wysłany na ten adres email.', content4: 'Właśnie wysłaliśmy Ci tymczasowy kod weryfikacyjny na {{email}}.', authTip: 'Gdy twoje e-mail zostanie zmienione, konta Google lub GitHub powiązane z twoim starym e-mailem nie będą mogły już logować się do tego konta.', + unAvailableEmail: 'Ten email jest tymczasowo niedostępny.', }, }, members: { diff --git a/web/i18n/pl-PL/plugin.ts b/web/i18n/pl-PL/plugin.ts index 10944a339b..5badeafe27 100644 --- a/web/i18n/pl-PL/plugin.ts +++ b/web/i18n/pl-PL/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { selectedDescription: 'Automatyczna aktualizacja tylko dla wersji poprawek', name: 'Napraw tylko', + description: 'Automatyczna aktualizacja tylko dla wersji łatkowych (np. 1.0.1 → 1.0.2). Zmiany w wersjach mniejszych nie będą wywoływać aktualizacji.', }, latest: { name: 'Najświeższy', diff --git a/web/i18n/pl-PL/workflow.ts b/web/i18n/pl-PL/workflow.ts index bd47328a65..a29ec9b8f2 100644 --- a/web/i18n/pl-PL/workflow.ts +++ b/web/i18n/pl-PL/workflow.ts @@ -497,6 +497,7 @@ const translation = { select: 'Wybierz zmienną...', }, title: 'Filtrowanie metadanych', + tip: 'Filtracja metadanych to proces wykorzystania atrybutów metadanych (takich jak tagi, kategorie lub uprawnienia dostępu) do precyzowania i kontrolowania pozyskiwania istotnych informacji w systemie.', }, }, http: { diff --git a/web/i18n/pt-BR/app-annotation.ts b/web/i18n/pt-BR/app-annotation.ts index 3ae53ca696..9e2760bf24 100644 --- a/web/i18n/pt-BR/app-annotation.ts +++ b/web/i18n/pt-BR/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: 'Importação em Massa', bulkExport: 'Exportação em Massa', clearAll: 'Limpar Todas as Anotações', + clearAllConfirm: 'Excluir todas as anotações?', }, }, editModal: { diff --git a/web/i18n/pt-BR/app-debug.ts b/web/i18n/pt-BR/app-debug.ts index 96d78dc9a3..5f8aabec65 100644 --- a/web/i18n/pt-BR/app-debug.ts +++ b/web/i18n/pt-BR/app-debug.ts @@ -198,6 +198,33 @@ const translation = { }, }, }, + fileUpload: { + title: 'Upload de Arquivo', + description: 'A caixa de entrada do chat permite fazer upload de imagens, documentos e outros arquivos.', + supportedTypes: 'Tipos de Arquivo Suportados', + numberLimit: 'Máximo de uploads', + modalTitle: 'Configuração de Upload de Arquivo', + }, + imageUpload: { + title: 'Upload de Imagem', + description: 'Permite fazer upload de imagens.', + supportedTypes: 'Tipos de Arquivo Suportados', + numberLimit: 'Máximo de uploads', + modalTitle: 'Configuração de Upload de Imagem', + }, + bar: { + empty: 'Habilitar recursos para melhorar a experiência do usuário do aplicativo web', + enableText: 'Recursos Habilitados', + manage: 'Gerenciar', + }, + documentUpload: { + title: 'Documento', + description: 'Habilitar Documento permitirá que o modelo aceite documentos e responda perguntas sobre eles.', + }, + audioUpload: { + title: 'Áudio', + description: 'Habilitar Áudio permitirá que o modelo processe arquivos de áudio para transcrição e análise.', + }, }, automatic: { title: 'Orquestração Automatizada de Aplicativos', @@ -287,12 +314,14 @@ const translation = { 'required': 'Obrigatório', 'hide': 'Ocultar', 'errorMsg': { - varNameRequired: 'O nome da variável é obrigatório', labelNameRequired: 'O nome do rótulo é obrigatório', varNameCanBeRepeat: 'O nome da variável não pode ser repetido', atLeastOneOption: 'Pelo menos uma opção é obrigatória', optionRepeat: 'Tem opções repetidas', }, + 'defaultValue': 'Valor padrão', + 'noDefaultValue': 'Nenhum valor padrão', + 'selectDefaultValue': 'Selecionar valor padrão', }, vision: { name: 'Visão', diff --git a/web/i18n/pt-BR/app.ts b/web/i18n/pt-BR/app.ts index 980767316f..6122a75a97 100644 --- a/web/i18n/pt-BR/app.ts +++ b/web/i18n/pt-BR/app.ts @@ -261,6 +261,7 @@ const translation = { noAccessPermission: 'Sem permissão para acessar o aplicativo web', maxActiveRequestsPlaceholder: 'Digite 0 para ilimitado', maxActiveRequests: 'Máximo de solicitações simultâneas', + maxActiveRequestsTip: 'Número máximo de solicitações ativas simultâneas por aplicativo (0 para ilimitado)', } export default translation diff --git a/web/i18n/pt-BR/common.ts b/web/i18n/pt-BR/common.ts index 8166f9d28c..8366894a3f 100644 --- a/web/i18n/pt-BR/common.ts +++ b/web/i18n/pt-BR/common.ts @@ -237,6 +237,7 @@ const translation = { newEmail: 'Crie um novo endereço de e-mail', content2: 'Seu email atual é {{email}}. O código de verificação foi enviado para este endereço de email.', content1: 'Se você continuar, enviaremos um código de verificação para {{email}} para reautenticação.', + unAvailableEmail: 'Este e-mail está temporariamente indisponível.', }, }, members: { diff --git a/web/i18n/pt-BR/plugin.ts b/web/i18n/pt-BR/plugin.ts index 47490d218c..9b31f5e190 100644 --- a/web/i18n/pt-BR/plugin.ts +++ b/web/i18n/pt-BR/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { selectedDescription: 'Atualização automática apenas para versões de patch', name: 'Reparar Apenas', + description: 'Atualização automática apenas para versões de patch (por exemplo, 1.0.1 → 1.0.2). Mudanças de versão menor não ativarão atualizações.', }, latest: { description: 'Sempre atualize para a versão mais recente', diff --git a/web/i18n/pt-BR/workflow.ts b/web/i18n/pt-BR/workflow.ts index f36e3b8499..ec870d0e17 100644 --- a/web/i18n/pt-BR/workflow.ts +++ b/web/i18n/pt-BR/workflow.ts @@ -497,6 +497,7 @@ const translation = { placeholder: 'Insira o valor', }, title: 'Filtragem de Metadados', + tip: 'A filtragem de metadados é o processo de usar atributos de metadados (como etiquetas, categorias ou permissões de acesso) para refinar e controlar a recuperação de informações relevantes dentro de um sistema.', }, }, http: { diff --git a/web/i18n/ro-RO/app-annotation.ts b/web/i18n/ro-RO/app-annotation.ts index 42fd17c12b..67feb9db1f 100644 --- a/web/i18n/ro-RO/app-annotation.ts +++ b/web/i18n/ro-RO/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: 'Import în Masă', bulkExport: 'Export în Masă', clearAll: 'Șterge Toate Anotațiile', + clearAllConfirm: 'Șterge toate anotările?', }, }, editModal: { diff --git a/web/i18n/ro-RO/app-debug.ts b/web/i18n/ro-RO/app-debug.ts index f7240055e3..f6a10df1d2 100644 --- a/web/i18n/ro-RO/app-debug.ts +++ b/web/i18n/ro-RO/app-debug.ts @@ -198,6 +198,33 @@ const translation = { }, }, }, + fileUpload: { + title: 'Încărcare fișier', + description: 'Caseta de intrare chat permite încărcarea de imagini, documente și alte fișiere.', + supportedTypes: 'Tipuri de fișiere suportate', + numberLimit: 'Numărul maxim de încărcări', + modalTitle: 'Setări încărcare fișier', + }, + imageUpload: { + title: 'Încărcare imagine', + description: 'Permite încărcarea imaginilor.', + supportedTypes: 'Tipuri de fișiere suportate', + numberLimit: 'Numărul maxim de încărcări', + modalTitle: 'Setări încărcare imagine', + }, + bar: { + empty: 'Activează funcții pentru a îmbunătăți experiența utilizatorilor aplicației web', + enableText: 'Funcții activate', + manage: 'Gestionează', + }, + documentUpload: { + title: 'Document', + description: 'Activarea Documentului va permite modelului să primească documente și să răspundă la întrebări despre ele.', + }, + audioUpload: { + title: 'Audio', + description: 'Activarea Audio va permite modelului să proceseze fișiere audio pentru transcriere și analiză.', + }, }, automatic: { title: 'Orchestrarea automată a aplicațiilor', @@ -287,12 +314,14 @@ const translation = { 'required': 'Obligatoriu', 'hide': 'Ascundeți', 'errorMsg': { - varNameRequired: 'Numele variabilei este obligatoriu', labelNameRequired: 'Numele etichetei este obligatoriu', varNameCanBeRepeat: 'Numele variabilei nu poate fi repetat', atLeastOneOption: 'Este necesară cel puțin o opțiune', optionRepeat: 'Există opțiuni repetate', }, + 'defaultValue': 'Valoare implicită', + 'noDefaultValue': 'Fără valoare implicită', + 'selectDefaultValue': 'Selectați valoarea implicită', }, vision: { name: 'Viziune', diff --git a/web/i18n/ro-RO/app.ts b/web/i18n/ro-RO/app.ts index 791bbcbc7e..d674b4ca82 100644 --- a/web/i18n/ro-RO/app.ts +++ b/web/i18n/ro-RO/app.ts @@ -261,6 +261,7 @@ const translation = { noAccessPermission: 'Nici o permisiune pentru a accesa aplicația web', maxActiveRequestsPlaceholder: 'Introduceți 0 pentru nelimitat', maxActiveRequests: 'Maxime cereri simultane', + maxActiveRequestsTip: 'Numărul maxim de cereri active concurente pe aplicație (0 pentru nelimitat)', } export default translation diff --git a/web/i18n/ro-RO/common.ts b/web/i18n/ro-RO/common.ts index dbc00bb134..2e578768f6 100644 --- a/web/i18n/ro-RO/common.ts +++ b/web/i18n/ro-RO/common.ts @@ -237,6 +237,7 @@ const translation = { content4: 'Tocmai ți-am trimis un cod de verificare temporar la {{email}}.', content2: 'Adresa ta de email curentă este {{email}}. Codul de verificare a fost trimis la această adresă de email.', emailLabel: 'Email nou', + unAvailableEmail: 'Acest email este temporar indisponibil.', }, }, members: { diff --git a/web/i18n/ro-RO/plugin.ts b/web/i18n/ro-RO/plugin.ts index 8c3ba06bbc..d65dc829f8 100644 --- a/web/i18n/ro-RO/plugin.ts +++ b/web/i18n/ro-RO/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { selectedDescription: 'Actualizare automată doar pentru versiuni patch', name: 'Fix doar', + description: 'Actualizare automată doar pentru versiunile de patch (de exemplu, 1.0.1 → 1.0.2). Schimbările de versiune minore nu vor declanșa actualizări.', }, latest: { name: 'Ultimul', diff --git a/web/i18n/ro-RO/workflow.ts b/web/i18n/ro-RO/workflow.ts index 2569d5339c..5612f5d1fc 100644 --- a/web/i18n/ro-RO/workflow.ts +++ b/web/i18n/ro-RO/workflow.ts @@ -497,6 +497,7 @@ const translation = { search: 'Căutare metadate', }, title: 'Filtrarea metadatelor', + tip: 'Filtrarea metadatelor este procesul de utilizare a atributelor metadatelor (cum ar fi etichetele, categoriile sau permisiunile de acces) pentru a rafina și controla recuperarea informațiilor relevante într-un sistem.', }, }, http: { diff --git a/web/i18n/ru-RU/app-annotation.ts b/web/i18n/ru-RU/app-annotation.ts index 18f2ae4a11..e189c9ca93 100644 --- a/web/i18n/ru-RU/app-annotation.ts +++ b/web/i18n/ru-RU/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: 'Массовый импорт', bulkExport: 'Массовый экспорт', clearAll: 'Очистить все аннотации', + clearAllConfirm: 'Удалить все аннотации?', }, }, editModal: { diff --git a/web/i18n/ru-RU/app-debug.ts b/web/i18n/ru-RU/app-debug.ts index 5d4dbb53d3..1d45c90a43 100644 --- a/web/i18n/ru-RU/app-debug.ts +++ b/web/i18n/ru-RU/app-debug.ts @@ -198,6 +198,33 @@ const translation = { }, }, }, + fileUpload: { + title: 'Загрузка файлов', + description: 'Поле ввода чата позволяет загружать изображения, документы и другие файлы.', + supportedTypes: 'Поддерживаемые типы файлов', + numberLimit: 'Максимум загрузок', + modalTitle: 'Настройка загрузки файлов', + }, + imageUpload: { + title: 'Загрузка изображений', + description: 'Позволяет загружать изображения.', + supportedTypes: 'Поддерживаемые типы файлов', + numberLimit: 'Максимум загрузок', + modalTitle: 'Настройка загрузки изображений', + }, + bar: { + empty: 'Включить функции для улучшения пользовательского опыта веб-приложения', + enableText: 'Функции включены', + manage: 'Управлять', + }, + documentUpload: { + title: 'Документ', + description: 'Включение Документа позволит модели принимать документы и отвечать на вопросы о них.', + }, + audioUpload: { + title: 'Аудио', + description: 'Включение Аудио позволит модели обрабатывать аудиофайлы для транскрипции и анализа.', + }, }, generate: { title: 'Генератор промпта', @@ -329,6 +356,9 @@ const translation = { atLeastOneOption: 'Требуется хотя бы один вариант', optionRepeat: 'Есть повторяющиеся варианты', }, + 'defaultValue': 'Значение по умолчанию', + 'noDefaultValue': 'Без значения по умолчанию', + 'selectDefaultValue': 'Выберите значение по умолчанию', }, vision: { name: 'Зрение', diff --git a/web/i18n/ru-RU/app.ts b/web/i18n/ru-RU/app.ts index d12f25ed57..b02d01b263 100644 --- a/web/i18n/ru-RU/app.ts +++ b/web/i18n/ru-RU/app.ts @@ -261,6 +261,7 @@ const translation = { noAccessPermission: 'Нет разрешения на доступ к веб-приложению', maxActiveRequests: 'Максимальное количество параллельных запросов', maxActiveRequestsPlaceholder: 'Введите 0 для неограниченного количества', + maxActiveRequestsTip: 'Максимальное количество одновременно активных запросов на одно приложение (0 для неограниченного количества)', } export default translation diff --git a/web/i18n/ru-RU/common.ts b/web/i18n/ru-RU/common.ts index 442efa3782..c761bd9c4c 100644 --- a/web/i18n/ru-RU/common.ts +++ b/web/i18n/ru-RU/common.ts @@ -241,6 +241,7 @@ const translation = { content3: 'Введите новый адрес электронной почты, и мы отправим вам код подтверждения.', content1: 'Если вы продолжите, мы отправим код подтверждения на {{email}} для повторной аутентификации.', authTip: 'После изменения вашего адреса электронной почты учетные записи Google или GitHub, связанные с вашим старым адресом, больше не смогут войти в эту учетную запись.', + unAvailableEmail: 'Этот email временно недоступен.', }, }, members: { diff --git a/web/i18n/ru-RU/plugin.ts b/web/i18n/ru-RU/plugin.ts index f39139aa05..9bbb3c4852 100644 --- a/web/i18n/ru-RU/plugin.ts +++ b/web/i18n/ru-RU/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { name: 'Только исправить', selectedDescription: 'Автообновление только для версий патчей', + description: 'Автообновление только для патч-версий (например, 1.0.1 → 1.0.2). Изменения в минорных версиях не вызовут обновления.', }, latest: { name: 'Новости', diff --git a/web/i18n/ru-RU/workflow.ts b/web/i18n/ru-RU/workflow.ts index d8452122ad..8ab0f04c8e 100644 --- a/web/i18n/ru-RU/workflow.ts +++ b/web/i18n/ru-RU/workflow.ts @@ -497,6 +497,7 @@ const translation = { search: 'Поиск метаданных', }, title: 'Фильтрация метаданных', + tip: 'Фильтрация метаданных — это процесс использования атрибутов метаданных (таких как теги, категории или права доступа) для уточнения и контроля извлечения соответствующей информации внутри системы.', }, }, http: { diff --git a/web/i18n/sl-SI/app-annotation.ts b/web/i18n/sl-SI/app-annotation.ts index 07b175a8e7..6cd88a47ee 100644 --- a/web/i18n/sl-SI/app-annotation.ts +++ b/web/i18n/sl-SI/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: 'Množični uvoz', bulkExport: 'Množični izvoz', clearAll: 'Počisti vse opombe', + clearAllConfirm: 'Izbrišite vse opombe?', }, }, editModal: { diff --git a/web/i18n/sl-SI/app-debug.ts b/web/i18n/sl-SI/app-debug.ts index 7e5c7dd6b0..597a8afa06 100644 --- a/web/i18n/sl-SI/app-debug.ts +++ b/web/i18n/sl-SI/app-debug.ts @@ -236,6 +236,33 @@ const translation = { ok: 'V redu', }, }, + fileUpload: { + title: 'Nalaganje datoteke', + description: 'Pogovorno polje omogoča nalaganje slik, dokumentov in drugih datotek.', + supportedTypes: 'Podprte vrste datotek', + numberLimit: 'Največje število nalaganj', + modalTitle: 'Nastavitve nalaganja datoteke', + }, + imageUpload: { + title: 'Nalaganje slike', + description: 'Omogoči nalaganje slik.', + supportedTypes: 'Podprte vrste datotek', + numberLimit: 'Največje število nalaganj', + modalTitle: 'Nastavitve nalaganja slike', + }, + bar: { + empty: 'Omogoči funkcije za izboljšanje uporabniške izkušnje spletne aplikacije', + enableText: 'Funkcije omogočene', + manage: 'Upravljaj', + }, + documentUpload: { + title: 'Dokument', + description: 'Omogočitev dokumenta bo omogočila modelu, da sprejme dokumente in odgovori na vprašanja o njih.', + }, + audioUpload: { + title: 'Zvok', + description: 'Omogočitev zvoka bo omogočila modelu, da obdela zvočne datoteke za prepisovanje in analizo.', + }, }, } diff --git a/web/i18n/sl-SI/app.ts b/web/i18n/sl-SI/app.ts index cd6d1169a4..337bd10359 100644 --- a/web/i18n/sl-SI/app.ts +++ b/web/i18n/sl-SI/app.ts @@ -261,6 +261,7 @@ const translation = { noAccessPermission: 'Brez dovoljenja za dostop do spletne aplikacije', maxActiveRequestsPlaceholder: 'Vnesite 0 za neomejeno', maxActiveRequests: 'Maksimalno število hkratnih zahtevkov', + maxActiveRequestsTip: 'Največje število hkrati aktivnih zahtevkov na aplikacijo (0 za neomejeno)', } export default translation diff --git a/web/i18n/sl-SI/common.ts b/web/i18n/sl-SI/common.ts index 43fe94b4d0..c7fbf9d7f5 100644 --- a/web/i18n/sl-SI/common.ts +++ b/web/i18n/sl-SI/common.ts @@ -241,6 +241,7 @@ const translation = { newEmail: 'Ustvarite nov e-poštni naslov', content2: 'Vaš trenutni elektronski naslov je {{email}}. Koda za preverjanje je bila poslana na ta elektronski naslov.', authTip: 'Ko bo vaš e-poštni naslov spremenjen, se računi Google ali GitHub, povezani z vašim starim e-poštnim naslovom, ne bodo mogli več prijaviti v ta račun.', + unAvailableEmail: 'Ta e-pošta trenutno ni na voljo.', }, }, members: { diff --git a/web/i18n/sl-SI/plugin.ts b/web/i18n/sl-SI/plugin.ts index 049a80f859..dc435f2302 100644 --- a/web/i18n/sl-SI/plugin.ts +++ b/web/i18n/sl-SI/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { name: 'Popravi samo', selectedDescription: 'Samodejno posodabljanje samo za različice popravkov', + description: 'Samodejno posodabljanje samo za različice popravkov (npr. 1.0.1 → 1.0.2). Spremembe manjših različic ne bodo povzročile posodobitev.', }, latest: { selectedDescription: 'Vedno posodobite na najnovejšo različico', diff --git a/web/i18n/sl-SI/workflow.ts b/web/i18n/sl-SI/workflow.ts index 125d82e78f..72150701de 100644 --- a/web/i18n/sl-SI/workflow.ts +++ b/web/i18n/sl-SI/workflow.ts @@ -279,6 +279,7 @@ const translation = { 'start': 'Določite začetne parametre za zagon delovnega toka', 'variable-assigner': 'Združite večpodružinske spremenljivke v eno samo spremenljivko za enotno konfiguracijo spodnjih vozlišč.', 'variable-aggregator': 'Združite večpodružnične spremenljivke v eno samo spremenljivko za enotno konfiguracijo spodnjih vozlišč.', + 'assigner': 'Vožnji vozlišča za dodelitev spremenljivk se uporablja za dodeljevanje vrednosti spremenljivkam, ki jih je mogoče zapisati (kot so spremenljivke za pogovor).', }, operator: { zoomOut: 'Zoomirati ven', @@ -312,6 +313,7 @@ const translation = { organizeBlocks: 'Organizirajte vozlišča', minimize: 'Izhod iz celotnega zaslona', maximize: 'Maksimiziraj platno', + optional: '(neobvezno)', }, nodes: { common: { @@ -497,6 +499,7 @@ const translation = { add: 'Dodaj pogoj', }, title: 'Filtriranje metapodatkov', + tip: 'Filtriranje metapodatkov je postopek uporabe metapodatkovnih atributov (kot so oznake, kategorije ali dovoljenja za dostop) za natančnejše določanje in nadzorovanje pridobivanja relevantnih informacij znotraj sistema.', }, queryVariable: 'Vprašanje spremenljivka', knowledge: 'Znanje', diff --git a/web/i18n/th-TH/app-annotation.ts b/web/i18n/th-TH/app-annotation.ts index 5fba357d04..f038f5ef8c 100644 --- a/web/i18n/th-TH/app-annotation.ts +++ b/web/i18n/th-TH/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: 'นําเข้าจํานวนมาก', bulkExport: 'ส่งออกจํานวนมาก', clearAll: 'ล้างคําอธิบายประกอบทั้งหมด', + clearAllConfirm: 'ลบหมายเหตุต่างๆ ทั้งหมดหรือไม่?', }, }, editModal: { diff --git a/web/i18n/th-TH/app-debug.ts b/web/i18n/th-TH/app-debug.ts index 928649474b..a2e939b5fe 100644 --- a/web/i18n/th-TH/app-debug.ts +++ b/web/i18n/th-TH/app-debug.ts @@ -1,4 +1,33 @@ const translation = { + feature: { + fileUpload: { + title: 'การอัปโหลดไฟล์', + description: 'กล่องข้อความแชทช่วยให้สามารถอัปโหลดรูปภาพ เอกสาร และไฟล์อื่นๆ ได้', + supportedTypes: 'ประเภทไฟล์ที่รองรับ', + numberLimit: 'จำนวนสูงสุดที่อัปโหลดได้', + modalTitle: 'การตั้งค่าการอัปโหลดไฟล์', + }, + imageUpload: { + title: 'การอัปโหลดรูปภาพ', + description: 'อนุญาตให้อัปโหลดรูปภาพได้', + supportedTypes: 'ประเภทไฟล์ที่รองรับ', + numberLimit: 'จำนวนสูงสุดที่อัปโหลดได้', + modalTitle: 'การตั้งค่าการอัปโหลดรูปภาพ', + }, + bar: { + empty: 'เปิดใช้งานคุณสมบัติเพื่อเพิ่มประสบการณ์ผู้ใช้ของเว็บแอป', + enableText: 'เปิดใช้งานคุณสมบัติแล้ว', + manage: 'จัดการ', + }, + documentUpload: { + title: 'เอกสาร', + description: 'การเปิดใช้งานเอกสารจะทำให้โมเดลสามารถรับเอกสารและตอบคำถามเกี่ยวกับเอกสารเหล่านั้นได้', + }, + audioUpload: { + title: 'เสียง', + description: 'การเปิดใช้งานเสียงจะทำให้โมเดลสามารถประมวลผลไฟล์เสียงเพื่อการถอดข้อความและการวิเคราะห์ได้', + }, + }, } export default translation diff --git a/web/i18n/th-TH/app.ts b/web/i18n/th-TH/app.ts index af2f67bcc1..8c8c0e02a2 100644 --- a/web/i18n/th-TH/app.ts +++ b/web/i18n/th-TH/app.ts @@ -257,6 +257,7 @@ const translation = { noAccessPermission: 'ไม่มีสิทธิ์เข้าถึงเว็บแอป', maxActiveRequestsPlaceholder: 'ใส่ 0 สำหรับไม่จำกัด', maxActiveRequests: 'จำนวนคำขอพร้อมกันสูงสุด', + maxActiveRequestsTip: 'จำนวนการร้องขอที่ใช้งานพร้อมกันสูงสุดต่อแอป (0 หมายถึงไม่จำกัด)', } export default translation diff --git a/web/i18n/th-TH/common.ts b/web/i18n/th-TH/common.ts index d956c36716..dd7dd31cb1 100644 --- a/web/i18n/th-TH/common.ts +++ b/web/i18n/th-TH/common.ts @@ -236,6 +236,7 @@ const translation = { resendCount: 'ส่งอีกครั้งใน {{count}} วินาที', authTip: 'เมื่ออีเมลของคุณถูกเปลี่ยนแปลง บัญชี Google หรือบัญชี GitHub ที่เชื่อมโยงกับอีเมลเก่าของคุณจะไม่สามารถเข้าสู่ระบบบัญชีนี้ได้อีกต่อไป.', title: 'เปลี่ยนอีเมล', + unAvailableEmail: 'อีเมลนี้ไม่สามารถใช้งานได้ชั่วคราว.', }, }, members: { diff --git a/web/i18n/th-TH/plugin.ts b/web/i18n/th-TH/plugin.ts index 6a53350cad..a967280dbd 100644 --- a/web/i18n/th-TH/plugin.ts +++ b/web/i18n/th-TH/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { name: 'ซ่อมเฉพาะ', selectedDescription: 'อัปเดตอัตโนมัติเฉพาะเวอร์ชันแพตช์เท่านั้น', + description: 'การอัปเดตอัตโนมัติสำหรับเฉพาะเวอร์ชันแพทช์ (เช่น 1.0.1 → 1.0.2) การเปลี่ยนแปลงเวอร์ชันย่อยจะไม่ทำให้เกิดการอัปเดต', }, latest: { name: 'ล่าสุด', diff --git a/web/i18n/th-TH/workflow.ts b/web/i18n/th-TH/workflow.ts index f03b021fd1..875f347cbb 100644 --- a/web/i18n/th-TH/workflow.ts +++ b/web/i18n/th-TH/workflow.ts @@ -497,6 +497,7 @@ const translation = { placeholder: 'ใส่ค่า', }, title: 'การกรองข้อมูลเมตา', + tip: 'การกรองข้อมูลเมตาดาต้าเป็นกระบวนการที่ใช้คุณลักษณะของเมตาดาต้า (เช่น แท็ก หมวดหมู่ หรือสิทธิการเข้าถึง) เพื่อปรับแต่งและควบคุมการดึงข้อมูลที่เกี่ยวข้องภายในระบบ.', }, }, http: { diff --git a/web/i18n/tr-TR/app-annotation.ts b/web/i18n/tr-TR/app-annotation.ts index bcd6db1fb2..f9b29bb711 100644 --- a/web/i18n/tr-TR/app-annotation.ts +++ b/web/i18n/tr-TR/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: 'Toplu İçe Aktarma', bulkExport: 'Toplu Dışa Aktarma', clearAll: 'Tüm Ek Açıklamaları Temizle', + clearAllConfirm: 'Tüm açıklamaları silinsin mi?', }, }, editModal: { diff --git a/web/i18n/tr-TR/app-debug.ts b/web/i18n/tr-TR/app-debug.ts index 6ed0e0a5eb..c9a5f7b585 100644 --- a/web/i18n/tr-TR/app-debug.ts +++ b/web/i18n/tr-TR/app-debug.ts @@ -198,6 +198,33 @@ const translation = { }, }, }, + fileUpload: { + title: 'Dosya Yükleme', + description: 'Sohbet giriş kutusu görüntü, belge ve diğer dosyaların yüklenmesine izin verir.', + supportedTypes: 'Desteklenen Dosya Türleri', + numberLimit: 'Maksimum yükleme sayısı', + modalTitle: 'Dosya Yükleme Ayarları', + }, + imageUpload: { + title: 'Görüntü Yükleme', + description: 'Görüntü yüklemeye izin verir.', + supportedTypes: 'Desteklenen Dosya Türleri', + numberLimit: 'Maksimum yükleme sayısı', + modalTitle: 'Görüntü Yükleme Ayarları', + }, + bar: { + empty: 'Web uygulaması kullanıcı deneyimini geliştirmek için özellikleri etkinleştirin', + enableText: 'Özellikler Etkinleştirildi', + manage: 'Yönet', + }, + documentUpload: { + title: 'Belge', + description: 'Belgeyi etkinleştirmek modelin belgeleri almasına ve bunlar hakkında sorulara cevap vermesine izin verir.', + }, + audioUpload: { + title: 'Ses', + description: 'Sesi etkinleştirmek modelin transkripsiyon ve analiz için ses dosyalarını işlemesine izin verir.', + }, }, generate: { title: 'Prompt Oluşturucu', @@ -329,6 +356,9 @@ const translation = { atLeastOneOption: 'En az bir seçenek gereklidir', optionRepeat: 'Yinelenen seçenekler var', }, + defaultValue: 'Varsayılan değer', + noDefaultValue: 'Varsayılan değer yok', + selectDefaultValue: 'Varsayılan değer seç', }, vision: { name: 'Görüş', diff --git a/web/i18n/tr-TR/app.ts b/web/i18n/tr-TR/app.ts index 1847af9cf4..05ad7c1378 100644 --- a/web/i18n/tr-TR/app.ts +++ b/web/i18n/tr-TR/app.ts @@ -257,6 +257,7 @@ const translation = { noAccessPermission: 'Web uygulamasına erişim izni yok', maxActiveRequestsPlaceholder: 'Sınırsız için 0 girin', maxActiveRequests: 'Maksimum eş zamanlı istekler', + maxActiveRequestsTip: 'Her uygulama için maksimum eşzamanlı aktif istek sayısı (sınırsız için 0)', } export default translation diff --git a/web/i18n/tr-TR/common.ts b/web/i18n/tr-TR/common.ts index d6caeba290..d907291146 100644 --- a/web/i18n/tr-TR/common.ts +++ b/web/i18n/tr-TR/common.ts @@ -241,6 +241,7 @@ const translation = { codeLabel: 'Doğrulama kodu', content2: 'Sizin mevcut e-posta adresiniz {{email}}. Doğrulama kodu bu e-posta adresine gönderilmiştir.', authTip: 'E-posta adresiniz değiştiğinde, eski e-posta adresinize bağlı Google veya GitHub hesapları bu hesaba giriş yapamayacak.', + unAvailableEmail: 'Bu e-posta geçici olarak kullanılamıyor.', }, }, members: { diff --git a/web/i18n/tr-TR/plugin.ts b/web/i18n/tr-TR/plugin.ts index 4c2b5510d2..1856a34c7e 100644 --- a/web/i18n/tr-TR/plugin.ts +++ b/web/i18n/tr-TR/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { selectedDescription: 'Sadece yamanın versiyonları için otomatik güncelleme', name: 'Sadece Düzelt', + description: 'Yalnızca yamanın sürüm güncellemeleri için otomatik güncelleme (örneğin, 1.0.1 → 1.0.2). Küçük sürüm değişiklikleri güncellemeleri tetiklemez.', }, latest: { name: 'Son', diff --git a/web/i18n/tr-TR/workflow.ts b/web/i18n/tr-TR/workflow.ts index b30442023a..9572217062 100644 --- a/web/i18n/tr-TR/workflow.ts +++ b/web/i18n/tr-TR/workflow.ts @@ -497,6 +497,7 @@ const translation = { datePlaceholder: 'Bir zaman seçin...', }, title: 'Meta Verileri Filtreleme', + tip: 'Metadata filtreleme, bir sistem içinde ilgili bilgilerin alınmasını ince ayar ve kontrol etmek için metadata özniteliklerini (etiketler, kategoriler veya erişim izinleri gibi) kullanma sürecidir.', }, }, http: { diff --git a/web/i18n/uk-UA/app-annotation.ts b/web/i18n/uk-UA/app-annotation.ts index d34be76d7f..918cea529a 100644 --- a/web/i18n/uk-UA/app-annotation.ts +++ b/web/i18n/uk-UA/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: 'Масовий імпорт', bulkExport: 'Масовий експорт', clearAll: 'Очистити всі анотації', + clearAllConfirm: 'Видалити всі анотації?', }, }, editModal: { diff --git a/web/i18n/uk-UA/app-debug.ts b/web/i18n/uk-UA/app-debug.ts index 70bbebe37e..fe6fefa801 100644 --- a/web/i18n/uk-UA/app-debug.ts +++ b/web/i18n/uk-UA/app-debug.ts @@ -198,6 +198,33 @@ const translation = { }, }, }, + fileUpload: { + title: 'Завантаження файлу', + description: 'Поле вводу чату дозволяє завантажувати зображення, документи та інші файли.', + supportedTypes: 'Підтримувані типи файлів', + numberLimit: 'Максимальна кількість завантажень', + modalTitle: 'Налаштування завантаження файлів', + }, + imageUpload: { + title: 'Завантаження зображення', + description: 'Дозволити завантаження зображень.', + supportedTypes: 'Підтримувані типи файлів', + numberLimit: 'Максимальна кількість завантажень', + modalTitle: 'Налаштування завантаження зображень', + }, + bar: { + empty: 'Увімкніть функції для покращення користувацького досвіду веб-додатка', + enableText: 'Функції увімкнено', + manage: 'Керувати', + }, + documentUpload: { + title: 'Документ', + description: 'Увімкнення документа дозволить моделі приймати документи та відповідати на запитання про них.', + }, + audioUpload: { + title: 'Аудіо', + description: 'Увімкнення аудіо дозволить моделі обробляти аудіофайли для транскрипції та аналізу.', + }, }, automatic: { title: 'Автоматизована оркестрація застосунків', @@ -281,12 +308,14 @@ const translation = { 'required': 'Обов\'язково', 'hide': 'Приховати', 'errorMsg': { - varNameRequired: 'Потрібно вказати назву змінної', labelNameRequired: 'Потрібно вказати назву мітки', varNameCanBeRepeat: 'Назва змінної не може повторюватися', atLeastOneOption: 'Потрібно щонайменше одну опцію', optionRepeat: 'Є повторні опції', }, + 'defaultValue': 'Значення за замовчуванням', + 'noDefaultValue': 'Без значення за замовчуванням', + 'selectDefaultValue': 'Обрати значення за замовчуванням', }, vision: { name: 'Зображення', // Vision diff --git a/web/i18n/uk-UA/app.ts b/web/i18n/uk-UA/app.ts index 0d41e1e631..26c059f727 100644 --- a/web/i18n/uk-UA/app.ts +++ b/web/i18n/uk-UA/app.ts @@ -261,6 +261,7 @@ const translation = { noAccessPermission: 'Немає дозволу на доступ до веб-додатку', maxActiveRequestsPlaceholder: 'Введіть 0 для необмеженого', maxActiveRequests: 'Максимальна кількість одночасних запитів', + maxActiveRequestsTip: 'Максимальна кількість одночасних активних запитів на додаток (0 для необмеженої кількості)', } export default translation diff --git a/web/i18n/uk-UA/common.ts b/web/i18n/uk-UA/common.ts index 1ec367d481..fca3674e12 100644 --- a/web/i18n/uk-UA/common.ts +++ b/web/i18n/uk-UA/common.ts @@ -237,6 +237,7 @@ const translation = { content3: 'Введіть нову електронну адресу, і ми надішлемо вам код підтвердження.', authTip: 'Коли ви зміните свою електронну адресу, облікові записи Google або GitHub, пов\'язані з вашою старою електронною адресою, більше не зможуть увійти в цей обліковий запис.', content1: 'Якщо ви продовжите, ми надішлемо код підтвердження на {{email}} для повторної аутентифікації.', + unAvailableEmail: 'Цей електронний лист тимчасово недоступний.', }, }, members: { diff --git a/web/i18n/uk-UA/plugin.ts b/web/i18n/uk-UA/plugin.ts index 877d7843ff..22b98fbd41 100644 --- a/web/i18n/uk-UA/plugin.ts +++ b/web/i18n/uk-UA/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { name: 'Виправити тільки', selectedDescription: 'Автоматичне оновлення лише для версій патчів', + description: 'Автооновлення лише для патч-версій (наприклад, 1.0.1 → 1.0.2). Зміни в малих версіях не активують оновлення.', }, latest: { name: 'Останні', diff --git a/web/i18n/uk-UA/workflow.ts b/web/i18n/uk-UA/workflow.ts index 5b62ef83ee..65dfab68ad 100644 --- a/web/i18n/uk-UA/workflow.ts +++ b/web/i18n/uk-UA/workflow.ts @@ -497,6 +497,7 @@ const translation = { add: 'Додати умову', }, title: 'Фільтрація метаданих', + tip: 'Фільтрація метаданих — це процес використання атрибутів метаданих (таких як теги, категорії або права доступу) для уточнення та контролю отримання відповідної інформації в системі.', }, }, http: { diff --git a/web/i18n/vi-VN/app-annotation.ts b/web/i18n/vi-VN/app-annotation.ts index 6a9457f3d7..5b9f3b35a5 100644 --- a/web/i18n/vi-VN/app-annotation.ts +++ b/web/i18n/vi-VN/app-annotation.ts @@ -17,6 +17,7 @@ const translation = { bulkImport: 'Nhập hàng loạt', bulkExport: 'Xuất hàng loạt', clearAll: 'Xóa tất cả chú thích', + clearAllConfirm: 'Xóa tất cả các chú thích?', }, }, editModal: { diff --git a/web/i18n/vi-VN/app-debug.ts b/web/i18n/vi-VN/app-debug.ts index cd57f78e79..381b766306 100644 --- a/web/i18n/vi-VN/app-debug.ts +++ b/web/i18n/vi-VN/app-debug.ts @@ -198,6 +198,33 @@ const translation = { }, }, }, + fileUpload: { + title: 'Tải lên tệp', + description: 'Hộp nhập chat cho phép tải lên hình ảnh, tài liệu và các tệp khác.', + supportedTypes: 'Các loại tệp được hỗ trợ', + numberLimit: 'Số lượng tối đa có thể tải lên', + modalTitle: 'Cài đặt tải lên tệp', + }, + imageUpload: { + title: 'Tải lên hình ảnh', + description: 'Cho phép tải lên hình ảnh.', + supportedTypes: 'Các loại tệp được hỗ trợ', + numberLimit: 'Số lượng tối đa có thể tải lên', + modalTitle: 'Cài đặt tải lên hình ảnh', + }, + bar: { + empty: 'Bật tính năng để cải thiện trải nghiệm người dùng ứng dụng web', + enableText: 'Tính năng đã được bật', + manage: 'Quản lý', + }, + documentUpload: { + title: 'Tài liệu', + description: 'Bật tài liệu sẽ cho phép mô hình nhận tài liệu và trả lời các câu hỏi về chúng.', + }, + audioUpload: { + title: 'Âm thanh', + description: 'Bật âm thanh sẽ cho phép mô hình xử lý các tệp âm thanh để phiên âm và phân tích.', + }, }, automatic: { title: 'Tự động hóa triển khai ứng dụng', @@ -281,12 +308,14 @@ const translation = { 'required': 'Bắt buộc', 'hide': 'Ẩn', 'errorMsg': { - varNameRequired: 'Tên biến là bắt buộc', labelNameRequired: 'Tên nhãn là bắt buộc', varNameCanBeRepeat: 'Tên biến không được trùng lặp', atLeastOneOption: 'Cần ít nhất một tùy chọn', optionRepeat: 'Có các tùy chọn trùng lặp', }, + 'defaultValue': 'Giá trị mặc định', + 'noDefaultValue': 'Không có giá trị mặc định', + 'selectDefaultValue': 'Chọn giá trị mặc định', }, vision: { name: 'Thị giác', diff --git a/web/i18n/vi-VN/app.ts b/web/i18n/vi-VN/app.ts index 4100b52b36..9ad2058330 100644 --- a/web/i18n/vi-VN/app.ts +++ b/web/i18n/vi-VN/app.ts @@ -261,6 +261,7 @@ const translation = { accessControl: 'Kiểm soát truy cập ứng dụng web', maxActiveRequestsPlaceholder: 'Nhập 0 để không giới hạn', maxActiveRequests: 'Số yêu cầu đồng thời tối đa', + maxActiveRequestsTip: 'Số yêu cầu hoạt động đồng thời tối đa cho mỗi ứng dụng (0 để không giới hạn)', } export default translation diff --git a/web/i18n/vi-VN/common.ts b/web/i18n/vi-VN/common.ts index 084c7bcb48..24058264d3 100644 --- a/web/i18n/vi-VN/common.ts +++ b/web/i18n/vi-VN/common.ts @@ -237,6 +237,7 @@ const translation = { verifyEmail: 'Xác minh email hiện tại của bạn', codePlaceholder: 'Dán mã 6 chữ số', authTip: 'Khi email của bạn được thay đổi, các tài khoản Google hoặc GitHub liên kết với email cũ của bạn sẽ không còn có thể đăng nhập vào tài khoản này.', + unAvailableEmail: 'Email này hiện không khả dụng tạm thời.', }, }, members: { diff --git a/web/i18n/vi-VN/plugin.ts b/web/i18n/vi-VN/plugin.ts index 677d90e6a7..c0f3dfac5f 100644 --- a/web/i18n/vi-VN/plugin.ts +++ b/web/i18n/vi-VN/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { name: 'Chỉ sửa chữa', selectedDescription: 'Tự động cập nhật chỉ cho các phiên bản bản vá', + description: 'Tự động cập nhật chỉ cho các phiên bản vá (ví dụ: 1.0.1 → 1.0.2). Thay đổi phiên bản nhỏ sẽ không kích hoạt cập nhật.', }, latest: { name: 'Mới nhất', diff --git a/web/i18n/vi-VN/workflow.ts b/web/i18n/vi-VN/workflow.ts index a4525a3ffa..ebe06807b1 100644 --- a/web/i18n/vi-VN/workflow.ts +++ b/web/i18n/vi-VN/workflow.ts @@ -497,6 +497,7 @@ const translation = { search: 'Tìm kiếm siêu dữ liệu', }, title: 'Lọc siêu dữ liệu', + tip: 'Lọc siêu dữ liệu là quá trình sử dụng các thuộc tính siêu dữ liệu (chẳng hạn như thẻ, danh mục hoặc quyền truy cập) để tinh chỉnh và kiểm soát việc truy xuất thông tin liên quan trong một hệ thống.', }, }, http: { diff --git a/web/i18n/zh-Hans/app-annotation.ts b/web/i18n/zh-Hans/app-annotation.ts index 3a6cacf5b5..cb2d3be0cd 100644 --- a/web/i18n/zh-Hans/app-annotation.ts +++ b/web/i18n/zh-Hans/app-annotation.ts @@ -9,8 +9,6 @@ const translation = { table: { header: { question: '提问', - match: '匹配', - response: '回复', answer: '答案', createdAt: '创建时间', hits: '命中次数', @@ -18,7 +16,8 @@ const translation = { addAnnotation: '添加标注', bulkImport: '批量导入', bulkExport: '批量导出', - clearAll: '删除所有标注', + clearAll: '删除所有', + clearAllConfirm: '删除所有标注?', }, }, editModal: { @@ -70,7 +69,6 @@ const translation = { noHitHistory: '没有命中历史', }, hitHistoryTable: { - question: '问题', query: '提问', match: '匹配', response: '回复', diff --git a/web/i18n/zh-Hans/app-debug.ts b/web/i18n/zh-Hans/app-debug.ts index 1fbb1cf67f..8f0053905b 100644 --- a/web/i18n/zh-Hans/app-debug.ts +++ b/web/i18n/zh-Hans/app-debug.ts @@ -222,6 +222,10 @@ const translation = { title: '文档', description: '启用文档后,模型可以接收文档并回答关于它们的问题。', }, + audioUpload: { + title: '音频', + description: '启用音频后,模型可以处理音频文件进行转录和分析。', + }, }, codegen: { title: '代码生成器', @@ -250,7 +254,6 @@ const translation = { noDataLine1: '在左侧描述您的用例,', noDataLine2: '编排预览将在此处显示。', apply: '应用', - noData: '在左侧描述您的用例,编排预览将在此处显示。', loading: '为您编排应用程序中…', overwriteTitle: '覆盖现有配置?', overwriteMessage: '应用此提示将覆盖现有配置。', @@ -307,6 +310,9 @@ const translation = { waitForImgUpload: '请等待图片上传完成', waitForFileUpload: '请等待文件上传完成', }, + warningMessage: { + timeoutExceeded: '由于超时,结果未显示。请参考日志获取完整结果。', + }, chatSubTitle: '提示词', completionSubTitle: '前缀提示词', promptTip: @@ -395,6 +401,9 @@ const translation = { atLeastOneOption: '至少需要一个选项', optionRepeat: '选项不能重复', }, + 'defaultValue': '默认值', + 'noDefaultValue': '无默认值', + 'selectDefaultValue': '选择默认值', }, vision: { name: '视觉', diff --git a/web/i18n/zh-Hans/app.ts b/web/i18n/zh-Hans/app.ts index 9e577a360e..7c8b292ce4 100644 --- a/web/i18n/zh-Hans/app.ts +++ b/web/i18n/zh-Hans/app.ts @@ -35,7 +35,6 @@ const translation = { learnMore: '了解更多', startFromBlank: '创建空白应用', startFromTemplate: '从应用模版创建', - captionAppType: '想要哪种应用类型?', foundResult: '{{count}} 个结果', foundResults: '{{count}} 个结果', noAppsFound: '未找到应用', @@ -45,7 +44,6 @@ const translation = { chatbotUserDescription: '通过简单的配置快速搭建一个基于 LLM 的对话机器人。支持切换为 Chatflow 编排。', completionShortDescription: '用于文本生成任务的 AI 助手', completionUserDescription: '通过简单的配置快速搭建一个面向文本生成类任务的 AI 助手。', - completionWarning: '该类型不久后将不再支持创建', agentShortDescription: '具备推理与自主工具调用的智能助手', agentUserDescription: '能够迭代式的规划推理、自主工具调用,直至完成任务目标的智能助手。', workflowShortDescription: '面向单轮自动化任务的编排工作流', diff --git a/web/i18n/zh-Hans/common.ts b/web/i18n/zh-Hans/common.ts index 1b265494b0..e51b84c37e 100644 --- a/web/i18n/zh-Hans/common.ts +++ b/web/i18n/zh-Hans/common.ts @@ -248,6 +248,7 @@ const translation = { emailLabel: '新邮箱', emailPlaceholder: '输入新邮箱', existingEmail: '该邮箱已存在', + unAvailableEmail: '该邮箱暂时无法使用。', sendVerifyCode: '发送验证码', continue: '继续', changeTo: '更改为 {{email}}', diff --git a/web/i18n/zh-Hans/login.ts b/web/i18n/zh-Hans/login.ts index b63630e288..2276436d0e 100644 --- a/web/i18n/zh-Hans/login.ts +++ b/web/i18n/zh-Hans/login.ts @@ -77,7 +77,6 @@ const translation = { activated: '现在登录', adminInitPassword: '管理员初始化密码', validate: '验证', - sso: '使用 SSO 继续', checkCode: { checkYourEmail: '验证您的电子邮件', tips: '验证码已经发送到您的邮箱 {{email}}', diff --git a/web/i18n/zh-Hans/time.ts b/web/i18n/zh-Hans/time.ts index 5158a710b5..8a223d9dd1 100644 --- a/web/i18n/zh-Hans/time.ts +++ b/web/i18n/zh-Hans/time.ts @@ -26,7 +26,6 @@ const translation = { now: '此刻', ok: '确定', cancel: '取消', - pickDate: '选择日期', }, title: { pickTime: '选择时间', diff --git a/web/i18n/zh-Hans/workflow.ts b/web/i18n/zh-Hans/workflow.ts index 81e207f67e..1f0300ae2a 100644 --- a/web/i18n/zh-Hans/workflow.ts +++ b/web/i18n/zh-Hans/workflow.ts @@ -213,7 +213,6 @@ const translation = { startRun: '开始运行', running: '运行中', testRunIteration: '测试运行迭代', - testRunLoop: '测试运行循环', back: '返回', iteration: '迭代', loop: '循环', diff --git a/web/i18n/zh-Hant/app-annotation.ts b/web/i18n/zh-Hant/app-annotation.ts index e1fee4626d..538546928c 100644 --- a/web/i18n/zh-Hant/app-annotation.ts +++ b/web/i18n/zh-Hant/app-annotation.ts @@ -9,8 +9,6 @@ const translation = { table: { header: { question: '提問', - match: '匹配', - response: '回覆', answer: '答案', createdAt: '建立時間', hits: '命中次數', @@ -19,6 +17,7 @@ const translation = { bulkImport: '批次匯入', bulkExport: '批次匯出', clearAll: '刪除所有標註', + clearAllConfirm: '要刪除所有標註嗎?', }, }, editModal: { @@ -70,7 +69,6 @@ const translation = { noHitHistory: '沒有命中歷史', }, hitHistoryTable: { - question: '問題', query: '提問', match: '匹配', response: '回覆', diff --git a/web/i18n/zh-Hant/app-debug.ts b/web/i18n/zh-Hant/app-debug.ts index b31b9a9d66..434bc830a5 100644 --- a/web/i18n/zh-Hant/app-debug.ts +++ b/web/i18n/zh-Hant/app-debug.ts @@ -25,7 +25,7 @@ const translation = { debugConfig: '除錯', addFeature: '新增功能', automatic: '產生', - stopResponding: '停止響應', + stopResponding: '停止回應', agree: '贊同', disagree: '反對', cancelAgree: '取消贊同', @@ -198,6 +198,33 @@ const translation = { }, }, }, + fileUpload: { + title: '檔案上傳', + description: '聊天輸入框支援上傳檔案。類型包括圖片、文件以及其它類型', + supportedTypes: '支援的檔案類型', + numberLimit: '最大上傳數', + modalTitle: '檔案上傳設定', + }, + imageUpload: { + title: '圖片上傳', + description: '支援上傳圖片', + supportedTypes: '支援的檔案類型', + numberLimit: '最大上傳數', + modalTitle: '圖片上傳設定', + }, + bar: { + empty: '開啟功能增強 web app 使用者體驗', + enableText: '功能已開啟', + manage: '管理', + }, + documentUpload: { + title: '文件', + description: '啟用文件後,模型可以接收文件並回答關於它們的問題。', + }, + audioUpload: { + title: '音訊', + description: '啟用音訊後,模型可以處理音訊檔案進行轉錄和分析。', + }, }, resetConfig: { title: '確認重置?', @@ -272,6 +299,9 @@ const translation = { atLeastOneOption: '至少需要一個選項', optionRepeat: '選項不能重複', }, + 'defaultValue': '預設值', + 'noDefaultValue': '無預設值', + 'selectDefaultValue': '選擇預設值', }, vision: { name: '視覺', diff --git a/web/i18n/zh-Hant/app.ts b/web/i18n/zh-Hant/app.ts index 07b6c85453..0bf99d5067 100644 --- a/web/i18n/zh-Hant/app.ts +++ b/web/i18n/zh-Hant/app.ts @@ -26,21 +26,7 @@ const translation = { newApp: { startFromBlank: '建立空白應用', startFromTemplate: '從應用模版建立', - captionAppType: '想要哪種應用類型?', - chatbotDescription: '使用大型語言模型構建聊天助手', - completionDescription: '構建一個根據提示生成高品質文字的應用程式,例如生成文章、摘要、翻譯等。', - completionWarning: '該類型不久後將不再支援建立', - agentDescription: '構建一個智慧 Agent,可以自主選擇工具來完成任務', - workflowDescription: '以工作流的形式編排生成型應用,提供更多的自訂設定。它適合有經驗的使用者。', workflowWarning: '正在進行 Beta 測試', - chatbotType: '聊天助手編排方法', - basic: '基礎編排', - basicTip: '新手適用,可以切換成工作流編排', - basicFor: '新手適用', - basicDescription: '基本編排允許使用簡單的設定編排聊天機器人應用程式,而無需修改內建提示。它適合初學者。', - advanced: '工作流編排', - advancedFor: '進階使用者適用', - advancedDescription: '工作流編排以工作流的形式編排聊天機器人,提供自訂設定,包括編輯內建提示的能力。它適合有經驗的使用者。', captionName: '應用名稱 & 圖示', appNamePlaceholder: '給你的應用起個名字', captionDescription: '描述', @@ -260,6 +246,7 @@ const translation = { noAccessPermission: '沒有權限訪問網絡應用程式', maxActiveRequestsPlaceholder: '輸入 0 以表示無限', maxActiveRequests: '同時最大請求數', + maxActiveRequestsTip: '每個應用程式可同時活躍請求的最大數量(0為無限制)', } export default translation diff --git a/web/i18n/zh-Hant/billing.ts b/web/i18n/zh-Hant/billing.ts index 6ede2c6213..f957bc4eab 100644 --- a/web/i18n/zh-Hant/billing.ts +++ b/web/i18n/zh-Hant/billing.ts @@ -23,18 +23,13 @@ const translation = { contractOwner: '聯絡團隊管理員', free: '免費', startForFree: '免費開始', - getStartedWith: '開始使用', contactSales: '聯絡銷售', talkToSales: '聯絡銷售', modelProviders: '支援的模型提供商', - teamMembers: '團隊成員', buildApps: '構建應用程式數', vectorSpace: '向量空間', vectorSpaceTooltip: '向量空間是 LLMs 理解您的資料所需的長期記憶系統。', - vectorSpaceBillingTooltip: '向量儲存是將知識庫向量化處理後為讓 LLMs 理解資料而使用的長期記憶儲存,1MB 大約能滿足 1.2 million character 的向量化後資料儲存(以 OpenAI Embedding 模型估算,不同模型計算方式有差異)。在向量化過程中,實際的壓縮或尺寸減小取決於內容的複雜性和冗餘性。', - documentsUploadQuota: '文件上傳配額', documentProcessingPriority: '文件處理優先順序', - documentProcessingPriorityTip: '如需更高的文件處理優先順序,請升級您的套餐', documentProcessingPriorityUpgrade: '以更快的速度、更高的精度處理更多的資料。', priority: { 'standard': '標準', @@ -103,19 +98,16 @@ const translation = { sandbox: { name: 'Sandbox', description: '200 次 GPT 免費試用', - includesTitle: '包括:', for: '核心功能免費試用', }, professional: { name: 'Professional', description: '讓個人和小團隊能夠以經濟實惠的方式釋放更多能力。', - includesTitle: 'Sandbox 計劃中的一切,加上:', for: '適合獨立開發者/小型團隊', }, team: { name: 'Team', description: '協作無限制並享受頂級效能。', - includesTitle: 'Professional 計劃中的一切,加上:', for: '適用於中型團隊', }, enterprise: { @@ -123,15 +115,6 @@ const translation = { description: '獲得大規模關鍵任務系統的完整功能和支援。', includesTitle: 'Team 計劃中的一切,加上:', features: { - 1: '商業許可證授權', - 6: '先進安全與控制', - 3: '多個工作區及企業管理', - 2: '專屬企業功能', - 4: '單一登入', - 8: '專業技術支援', - 0: '企業級可擴展部署解決方案', - 7: 'Dify 官方的更新和維護', - 5: '由 Dify 合作夥伴協商的服務水平協議', }, price: '自訂', btnText: '聯繫銷售', @@ -140,9 +123,6 @@ const translation = { }, community: { features: { - 0: '所有核心功能均在公共存儲庫下釋出', - 2: '遵循 Dify 開源許可證', - 1: '單一工作區域', }, includesTitle: '免費功能:', btnText: '開始使用社區', @@ -153,10 +133,6 @@ const translation = { }, premium: { features: { - 2: '網頁應用程序標誌及品牌自定義', - 0: '各種雲端服務提供商的自我管理可靠性', - 1: '單一工作區域', - 3: '優先電子郵件及聊天支持', }, for: '適用於中型組織和團隊', comingSoon: '微軟 Azure 與 Google Cloud 支持即將推出', @@ -173,8 +149,6 @@ const translation = { fullSolution: '升級您的套餐以獲得更多空間。', }, apps: { - fullTipLine1: '升級您的套餐以', - fullTipLine2: '構建更多的程式。', fullTip1: '升級以創建更多應用程序', fullTip2des: '建議清除不活躍的應用程式以釋放使用空間,或聯繫我們。', contactUs: '聯繫我們', diff --git a/web/i18n/zh-Hant/common.ts b/web/i18n/zh-Hant/common.ts index 9f71b13d61..ccfca85bfe 100644 --- a/web/i18n/zh-Hant/common.ts +++ b/web/i18n/zh-Hant/common.ts @@ -197,7 +197,6 @@ const translation = { showAppLength: '顯示 {{length}} 個應用', delete: '刪除帳戶', deleteTip: '刪除您的帳戶將永久刪除您的所有資料並且無法恢復。', - deleteConfirmTip: '請將以下內容從您的註冊電子郵件發送至 ', account: '帳戶', myAccount: '我的帳戶', studio: '工作室', @@ -237,6 +236,7 @@ const translation = { existingEmail: '此電子郵件的用戶已經存在。', authTip: '一旦您的電子郵件更改,與您的舊電子郵件相關聯的 Google 或 GitHub 帳戶將無法再登錄此帳戶。', resendTip: '沒有收到代碼嗎?', + unAvailableEmail: '此郵件暫時無法使用。', }, }, members: { diff --git a/web/i18n/zh-Hant/dataset-creation.ts b/web/i18n/zh-Hant/dataset-creation.ts index fca1ff651e..e99fb0c320 100644 --- a/web/i18n/zh-Hant/dataset-creation.ts +++ b/web/i18n/zh-Hant/dataset-creation.ts @@ -1,8 +1,6 @@ const translation = { steps: { header: { - creation: '建立知識庫', - update: '上傳檔案', fallbackRoute: '知識', }, one: '選擇資料來源', diff --git a/web/i18n/zh-Hant/dataset-documents.ts b/web/i18n/zh-Hant/dataset-documents.ts index b04a339070..1b482f181f 100644 --- a/web/i18n/zh-Hant/dataset-documents.ts +++ b/web/i18n/zh-Hant/dataset-documents.ts @@ -341,7 +341,6 @@ const translation = { keywords: '關鍵詞', addKeyWord: '新增關鍵詞', keywordError: '關鍵詞最大長度為 20', - characters: '字元', hitCount: '召回次數', vectorHash: '向量雜湊:', questionPlaceholder: '在這裡新增問題', diff --git a/web/i18n/zh-Hant/dataset-hit-testing.ts b/web/i18n/zh-Hant/dataset-hit-testing.ts index 0dbe149025..4b8cc5150a 100644 --- a/web/i18n/zh-Hant/dataset-hit-testing.ts +++ b/web/i18n/zh-Hant/dataset-hit-testing.ts @@ -2,7 +2,6 @@ const translation = { title: '召回測試', desc: '基於給定的查詢文字測試知識庫的召回效果。', dateTimeFormat: 'YYYY-MM-DD HH:mm', - recents: '最近查詢', table: { header: { source: '資料來源', diff --git a/web/i18n/zh-Hant/login.ts b/web/i18n/zh-Hant/login.ts index ae617cb5c0..8187323276 100644 --- a/web/i18n/zh-Hant/login.ts +++ b/web/i18n/zh-Hant/login.ts @@ -70,7 +70,6 @@ const translation = { activated: '現在登入', adminInitPassword: '管理員初始化密碼', validate: '驗證', - sso: '繼續使用 SSO', checkCode: { verify: '驗證', resend: '發送', diff --git a/web/i18n/zh-Hant/plugin.ts b/web/i18n/zh-Hant/plugin.ts index 0d0e1f8782..117491fe05 100644 --- a/web/i18n/zh-Hant/plugin.ts +++ b/web/i18n/zh-Hant/plugin.ts @@ -257,6 +257,7 @@ const translation = { fixOnly: { name: '僅修理', selectedDescription: '僅限於修補版本的自動更新', + description: '僅為補丁版本自動更新(例如:1.0.1 → 1.0.2)。次要版本變更不會觸發更新。', }, latest: { description: '始終更新至最新版本', diff --git a/web/i18n/zh-Hant/tools.ts b/web/i18n/zh-Hant/tools.ts index fbfb09e321..9dad3a74cf 100644 --- a/web/i18n/zh-Hant/tools.ts +++ b/web/i18n/zh-Hant/tools.ts @@ -54,7 +54,6 @@ const translation = { keyTooltip: 'HTTP 頭部名稱,如果你不知道是什麼,可以將其保留為 Authorization 或設定為自定義值', types: { none: '無', - api_key: 'API Key', apiKeyPlaceholder: 'HTTP 頭部名稱,用於傳遞 API Key', apiValuePlaceholder: '輸入 API Key', api_key_query: '查詢參數', diff --git a/web/i18n/zh-Hant/workflow.ts b/web/i18n/zh-Hant/workflow.ts index 0ffdde7713..bcdfbb81d3 100644 --- a/web/i18n/zh-Hant/workflow.ts +++ b/web/i18n/zh-Hant/workflow.ts @@ -107,10 +107,8 @@ const translation = { loadMore: '載入更多工作流', noHistory: '無歷史記錄', publishUpdate: '發布更新', - referenceVar: '參考變量', exportSVG: '匯出為 SVG', exportPNG: '匯出為 PNG', - noExist: '沒有這個變數', versionHistory: '版本歷史', exitVersions: '退出版本', exportImage: '匯出圖像', @@ -497,6 +495,7 @@ const translation = { placeholder: '輸入數值', }, title: '元數據過濾', + tip: '元數據過濾是使用元數據屬性(如標籤、類別或訪問權限)來精煉和控制在系統內檢索相關信息的過程。', }, }, http: { @@ -609,7 +608,6 @@ const translation = { }, select: '選擇', addSubVariable: '子變數', - condition: '條件', }, variableAssigner: { title: '變量賦值', diff --git a/web/models/common.ts b/web/models/common.ts index cb8fb7f2bf..867f4cf8fe 100644 --- a/web/models/common.ts +++ b/web/models/common.ts @@ -1,4 +1,4 @@ -import type { I18nText } from '@/i18n/language' +import type { I18nText } from '@/i18n-config/language' import type { Model } from '@/types/app' export type CommonResponse = { diff --git a/web/models/share.ts b/web/models/share.ts index 1e3b6d6bb7..64725ddf3a 100644 --- a/web/models/share.ts +++ b/web/models/share.ts @@ -1,4 +1,4 @@ -import type { Locale } from '@/i18n' +import type { Locale } from '@/i18n-config' import type { AppIconType } from '@/types/app' export type ResponseHolder = {} diff --git a/web/package.json b/web/package.json index 2470a70dec..d93788a368 100644 --- a/web/package.json +++ b/web/package.json @@ -1,6 +1,6 @@ { "name": "dify-web", - "version": "1.7.0", + "version": "1.7.1", "private": true, "engines": { "node": ">=v22.11.0" @@ -30,8 +30,8 @@ "prepare": "cd ../ && node -e \"if (process.env.NODE_ENV !== 'production'){process.exit(1)} \" || husky ./web/.husky", "gen-icons": "node ./app/components/base/icons/script.mjs", "uglify-embed": "node ./bin/uglify-embed", - "check-i18n": "node ./i18n/check-i18n.js", - "auto-gen-i18n": "node ./i18n/auto-gen-i18n.js", + "check-i18n": "node ./i18n-config/check-i18n.js", + "auto-gen-i18n": "node ./i18n-config/auto-gen-i18n.js", "test": "jest", "test:watch": "jest --watch", "storybook": "storybook dev -p 6006", @@ -152,7 +152,7 @@ "zustand": "^4.5.2" }, "devDependencies": { - "@antfu/eslint-config": "^4.1.1", + "@antfu/eslint-config": "^5.0.0", "@chromatic-com/storybook": "^3.1.0", "@eslint-react/eslint-plugin": "^1.15.0", "@eslint/eslintrc": "^3.1.0", @@ -160,7 +160,7 @@ "@faker-js/faker": "^9.0.3", "@happy-dom/jest-environment": "^17.4.4", "@next/bundle-analyzer": "^15.4.1", - "@next/eslint-plugin-next": "~15.3.5", + "@next/eslint-plugin-next": "~15.4.4", "@rgrove/parse-xml": "^4.1.0", "@storybook/addon-essentials": "8.5.0", "@storybook/addon-interactions": "8.5.0", @@ -197,7 +197,7 @@ "code-inspector-plugin": "^0.18.1", "cross-env": "^7.0.3", "eslint": "^9.20.1", - "eslint-config-next": "~15.3.5", + "eslint-config-next": "~15.4.4", "eslint-plugin-oxlint": "^1.6.0", "eslint-plugin-react-hooks": "^5.1.0", "eslint-plugin-react-refresh": "^0.4.19", @@ -216,7 +216,7 @@ "tailwindcss": "^3.4.14", "ts-node": "^10.9.2", "typescript": "^5.8.3", - "typescript-eslint": "^8.36.0", + "typescript-eslint": "^8.38.0", "uglify-js": "^3.19.3" }, "resolutions": { diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index eaff8c8504..58153b9fc1 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -379,8 +379,8 @@ importers: version: 4.5.7(@types/react@19.1.8)(immer@9.0.21)(react@19.1.0) devDependencies: '@antfu/eslint-config': - specifier: ^4.1.1 - version: 4.17.0(@eslint-react/eslint-plugin@1.52.3(eslint@9.31.0(jiti@1.21.7))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3))(@vue/compiler-sfc@3.5.17)(eslint-plugin-react-hooks@5.2.0(eslint@9.31.0(jiti@1.21.7)))(eslint-plugin-react-refresh@0.4.20(eslint@9.31.0(jiti@1.21.7)))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + specifier: ^5.0.0 + version: 5.0.0(@eslint-react/eslint-plugin@1.52.3(eslint@9.31.0(jiti@1.21.7))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3))(@next/eslint-plugin-next@15.4.4)(@vue/compiler-sfc@3.5.17)(eslint-plugin-react-hooks@5.2.0(eslint@9.31.0(jiti@1.21.7)))(eslint-plugin-react-refresh@0.4.20(eslint@9.31.0(jiti@1.21.7)))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) '@chromatic-com/storybook': specifier: ^3.1.0 version: 3.2.7(react@19.1.0)(storybook@8.5.0) @@ -403,8 +403,8 @@ importers: specifier: ^15.4.1 version: 15.4.1 '@next/eslint-plugin-next': - specifier: ~15.3.5 - version: 15.3.5 + specifier: ~15.4.4 + version: 15.4.4 '@rgrove/parse-xml': specifier: ^4.1.0 version: 4.2.0 @@ -514,8 +514,8 @@ importers: specifier: ^9.20.1 version: 9.31.0(jiti@1.21.7) eslint-config-next: - specifier: ~15.3.5 - version: 15.3.5(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + specifier: ~15.4.4 + version: 15.4.4(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) eslint-plugin-oxlint: specifier: ^1.6.0 version: 1.6.0 @@ -571,8 +571,8 @@ importers: specifier: ^5.8.3 version: 5.8.3 typescript-eslint: - specifier: ^8.36.0 - version: 8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + specifier: ^8.38.0 + version: 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) uglify-js: specifier: ^3.19.3 version: 3.19.3 @@ -590,11 +590,12 @@ packages: resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} engines: {node: '>=6.0.0'} - '@antfu/eslint-config@4.17.0': - resolution: {integrity: sha512-S1y0A1+0DcpV6GmjwB9gQCQc7ni9zlKa3MQRqRCEZ0E1WW+nRL1BUwnbk3DpMJAMsb3UIAt1lsAiIBnvIw2NDw==} + '@antfu/eslint-config@5.0.0': + resolution: {integrity: sha512-uAMv8PiW9BOAGmIyTDtWXGnNfv6PFV4DmpqmlUpST5k4bue38VRdIfnM4jvgPuny1xnjYX3flN3kB9++6LknMw==} hasBin: true peerDependencies: '@eslint-react/eslint-plugin': ^1.38.4 + '@next/eslint-plugin-next': ^15.4.0-canary.115 '@prettier/plugin-xml': ^3.4.1 '@unocss/eslint-plugin': '>=0.50.0' astro-eslint-parser: ^1.0.2 @@ -612,6 +613,8 @@ packages: peerDependenciesMeta: '@eslint-react/eslint-plugin': optional: true + '@next/eslint-plugin-next': + optional: true '@prettier/plugin-xml': optional: true '@unocss/eslint-plugin': @@ -1563,14 +1566,6 @@ packages: resolution: {integrity: sha512-ViuymvFmcJi04qdZeDc2whTHryouGcDlaxPqarTD0ZE10ISpxGUVZGZDx4w01upyIynL3iu6IXH2bS1NhclQMw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/core@0.13.0': - resolution: {integrity: sha512-yfkgDw1KR66rkT5A8ci4irzDysN7FRpq3ttJolR88OqQikAWqwA8j5VZyas+vjyBNFIJ7MfybJ9plMILI2UrCw==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - - '@eslint/core@0.14.0': - resolution: {integrity: sha512-qIbV0/JZr7iSDjqAc60IqbLdsj9GDt16xQtWD+B78d/HAlvysGdZZ6rpJHGAc2T0FQx1X6thsSPdnoiGKdNtdg==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/core@0.15.1': resolution: {integrity: sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -1583,22 +1578,22 @@ packages: resolution: {integrity: sha512-LOm5OVt7D4qiKCqoiPbA7LWmI+tbw1VbTUowBcUMgQSuM6poJufkFkYDcQpo5KfgD39TnNySV26QjOh7VFpSyw==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/markdown@7.0.0': - resolution: {integrity: sha512-0WNH6pSFHNlWSlNaIFQP0sLHpMUJw1FaJtyqapvGqOt0ISRgTUkTLVT0hT/zekDA1QlP2TT8pwjPkqYTu2s8yg==} + '@eslint/markdown@7.1.0': + resolution: {integrity: sha512-Y+X1B1j+/zupKDVJfkKc8uYMjQkGzfnd8lt7vK3y8x9Br6H5dBuhAfFrQ6ff7HAMm/1BwgecyEiRFkYCWPRxmA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} '@eslint/object-schema@2.1.6': resolution: {integrity: sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/plugin-kit@0.2.8': - resolution: {integrity: sha512-ZAoA40rNMPwSm+AeHpCq8STiNAwzWLJuP8Xv4CHIc9wv/PSuExjMrmjfYNj682vW0OOiZ1HKxzvjQr9XZIisQA==} - engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} - '@eslint/plugin-kit@0.3.3': resolution: {integrity: sha512-1+WqvgNMhmlAambTvT3KPtCl/Ibr68VldY2XY40SL1CE0ZXiakFR/cbTspaF5HsnpDMvcYYoJHfl4980NBjGag==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@eslint/plugin-kit@0.3.4': + resolution: {integrity: sha512-Ul5l+lHEcw3L5+k8POx6r74mxEYKG5kOb6Xpy2gCRW6zweT6TEhAf8vhxGgjhqrd/VO/Dirhsb+1hNpD1ue9hw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@faker-js/faker@9.9.0': resolution: {integrity: sha512-OEl393iCOoo/z8bMezRlJu+GlRGlsKbUAN7jKB6LhnKoqKve5DXRpalbItIIcwnCjs1k/FOPjFzcA6Qn+H+YbA==} engines: {node: '>=18.0.0', npm: '>=9.0.0'} @@ -2115,8 +2110,8 @@ packages: '@next/env@15.3.5': resolution: {integrity: sha512-7g06v8BUVtN2njAX/r8gheoVffhiKFVt4nx74Tt6G4Hqw9HCLYQVx/GkH2qHvPtAHZaUNZ0VXAa0pQP6v1wk7g==} - '@next/eslint-plugin-next@15.3.5': - resolution: {integrity: sha512-BZwWPGfp9po/rAnJcwUBaM+yT/+yTWIkWdyDwc74G9jcfTrNrmsHe+hXHljV066YNdVs8cxROxX5IgMQGX190w==} + '@next/eslint-plugin-next@15.4.4': + resolution: {integrity: sha512-1FDsyN//ai3Jd97SEd7scw5h1yLdzDACGOPRofr2GD3sEFsBylEEoL0MHSerd4n2dq9Zm/mFMqi4+NRMOreOKA==} '@next/mdx@15.3.5': resolution: {integrity: sha512-/2rRCgPKNp2ttQscU13auI+cYYACdPa80Okgi/1+NNJJeWn9yVxwGnqZc3SX30T889bZbLqcY4oUjqYGAygL4g==} @@ -2825,8 +2820,8 @@ packages: peerDependencies: storybook: ^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0 - '@stylistic/eslint-plugin@5.2.0': - resolution: {integrity: sha512-RCEdbREv9EBiToUBQTlRhVYKG093I6ZnnQ990j08eJ6uRZh71DXkOnoxtTLfDQ6utVCVQzrhZFHZP0zfrfOIjA==} + '@stylistic/eslint-plugin@5.2.2': + resolution: {integrity: sha512-bE2DUjruqXlHYP3Q2Gpqiuj2bHq7/88FnuaS0FjeGGLCy+X6a07bGVuwtiOYnPSLHR6jmx5Bwdv+j7l8H+G97A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: '>=9.0.0' @@ -3219,16 +3214,16 @@ packages: '@types/yargs@17.0.33': resolution: {integrity: sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==} - '@typescript-eslint/eslint-plugin@8.37.0': - resolution: {integrity: sha512-jsuVWeIkb6ggzB+wPCsR4e6loj+rM72ohW6IBn2C+5NCvfUVY8s33iFPySSVXqtm5Hu29Ne/9bnA0JmyLmgenA==} + '@typescript-eslint/eslint-plugin@8.38.0': + resolution: {integrity: sha512-CPoznzpuAnIOl4nhj4tRr4gIPj5AfKgkiJmGQDaq+fQnRJTYlcBjbX3wbciGmpoPf8DREufuPRe1tNMZnGdanA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: - '@typescript-eslint/parser': ^8.37.0 + '@typescript-eslint/parser': ^8.38.0 eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.9.0' - '@typescript-eslint/parser@8.37.0': - resolution: {integrity: sha512-kVIaQE9vrN9RLCQMQ3iyRlVJpTiDUY6woHGb30JDkfJErqrQEmtdWH3gV0PBAfGZgQXoqzXOO0T3K6ioApbbAA==} + '@typescript-eslint/parser@8.38.0': + resolution: {integrity: sha512-Zhy8HCvBUEfBECzIl1PKqF4p11+d0aUJS1GeUiuqK9WmOug8YCmC4h4bjyBvMyAMI9sbRczmrYL5lKg/YMbrcQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -3240,16 +3235,32 @@ packages: peerDependencies: typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/project-service@8.38.0': + resolution: {integrity: sha512-dbK7Jvqcb8c9QfH01YB6pORpqX1mn5gDZc9n63Ak/+jD67oWXn3Gs0M6vddAN+eDXBCS5EmNWzbSxsn9SzFWWg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/scope-manager@8.37.0': resolution: {integrity: sha512-0vGq0yiU1gbjKob2q691ybTg9JX6ShiVXAAfm2jGf3q0hdP6/BruaFjL/ManAR/lj05AvYCH+5bbVo0VtzmjOA==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/scope-manager@8.38.0': + resolution: {integrity: sha512-WJw3AVlFFcdT9Ri1xs/lg8LwDqgekWXWhH3iAF+1ZM+QPd7oxQ6jvtW/JPwzAScxitILUIFs0/AnQ/UWHzbATQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/tsconfig-utils@8.37.0': resolution: {integrity: sha512-1/YHvAVTimMM9mmlPvTec9NP4bobA1RkDbMydxG8omqwJJLEW/Iy2C4adsAESIXU3WGLXFHSZUU+C9EoFWl4Zg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/tsconfig-utils@8.38.0': + resolution: {integrity: sha512-Lum9RtSE3EroKk/bYns+sPOodqb2Fv50XOl/gMviMKNvanETUuUcC9ObRbzrJ4VSd2JalPqgSAavwrPiPvnAiQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/type-utils@8.37.0': resolution: {integrity: sha512-SPkXWIkVZxhgwSwVq9rqj/4VFo7MnWwVaRNznfQDc/xPYHjXnPfLWn+4L6FF1cAz6e7dsqBeMawgl7QjUMj4Ow==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -3257,16 +3268,33 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/type-utils@8.38.0': + resolution: {integrity: sha512-c7jAvGEZVf0ao2z+nnz8BUaHZD09Agbh+DY7qvBQqLiz8uJzRgVPj5YvOh8I8uEiH8oIUGIfHzMwUcGVco/SJg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/types@8.37.0': resolution: {integrity: sha512-ax0nv7PUF9NOVPs+lmQ7yIE7IQmAf8LGcXbMvHX5Gm+YJUYNAl340XkGnrimxZ0elXyoQJuN5sbg6C4evKA4SQ==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/types@8.38.0': + resolution: {integrity: sha512-wzkUfX3plUqij4YwWaJyqhiPE5UCRVlFpKn1oCRn2O1bJ592XxWJj8ROQ3JD5MYXLORW84063z3tZTb/cs4Tyw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/typescript-estree@8.37.0': resolution: {integrity: sha512-zuWDMDuzMRbQOM+bHyU4/slw27bAUEcKSKKs3hcv2aNnc/tvE/h7w60dwVw8vnal2Pub6RT1T7BI8tFZ1fE+yg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/typescript-estree@8.38.0': + resolution: {integrity: sha512-fooELKcAKzxux6fA6pxOflpNS0jc+nOQEEOipXFNjSlBS6fqrJOVY/whSn70SScHrcJ2LDsxWrneFoWYSVfqhQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/utils@8.37.0': resolution: {integrity: sha512-TSFvkIW6gGjN2p6zbXo20FzCABbyUAuq6tBvNRGsKdsSQ6a7rnV6ADfZ7f4iI3lIiXc4F4WWvtUfDw9CJ9pO5A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -3274,10 +3302,21 @@ packages: eslint: ^8.57.0 || ^9.0.0 typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/utils@8.38.0': + resolution: {integrity: sha512-hHcMA86Hgt+ijJlrD8fX0j1j8w4C92zue/8LOPAFioIno+W0+L7KqE8QZKCcPGc/92Vs9x36w/4MPTJhqXdyvg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <5.9.0' + '@typescript-eslint/visitor-keys@8.37.0': resolution: {integrity: sha512-YzfhzcTnZVPiLfP/oeKtDp2evwvHLMe0LOy7oe+hb9KKIumLNohYS9Hgp1ifwpu42YWxhZE8yieggz6JpqO/1w==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@typescript-eslint/visitor-keys@8.38.0': + resolution: {integrity: sha512-pWrTcoFNWuwHlA9CvlfSsGWs14JxfN1TH25zM5L7o0pRLhsoZkDnTsXfQRJBEWJoV5DL0jf+Z+sxiud+K0mq1g==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + '@ungap/structured-clone@1.3.0': resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} @@ -3876,6 +3915,9 @@ packages: resolution: {integrity: sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==} engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + change-case@5.4.4: + resolution: {integrity: sha512-HRQyTk2/YPEkt9TnUPbOpr64Uw3KOicFWPVBb+xiHvd6eBx/qPr9xqfBFDT8P2vWsvvz4jbEkfDe71W3VyNu2w==} + char-regex@1.0.2: resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} engines: {node: '>=10'} @@ -4649,8 +4691,8 @@ packages: peerDependencies: eslint: ^9.5.0 - eslint-config-next@15.3.5: - resolution: {integrity: sha512-oQdvnIgP68wh2RlR3MdQpvaJ94R6qEFl+lnu8ZKxPj5fsAHrSF/HlAOZcsimLw3DT6bnEQIUdbZC2Ab6sWyptg==} + eslint-config-next@15.4.4: + resolution: {integrity: sha512-sK/lWLUVF5om18O5w76Jt3F8uzu/LP5mVa6TprCMWkjWHUmByq80iHGHcdH7k1dLiJlj+DRIWf98d5piwRsSuA==} peerDependencies: eslint: ^7.23.0 || ^8.0.0 || ^9.0.0 typescript: '>=3.3.1' @@ -4787,8 +4829,8 @@ packages: peerDependencies: eslint: '>=8.45.0' - eslint-plugin-pnpm@1.0.0: - resolution: {integrity: sha512-tyEA10k7psB9HFCx8R4/bU4JS2tSKfXaCnrCcis+1R4FucfMIc6HgcFl4msZbwY2I0D9Vec3xAEkXV0aPechhQ==} + eslint-plugin-pnpm@1.1.0: + resolution: {integrity: sha512-sL93w0muBtjnogzk/loDsxzMbmXQOLP5Blw3swLDBXZgfb+qQI73bPcUbjVR+ZL+K62vGJdErV+43i3r5DsZPg==} peerDependencies: eslint: ^9.0.0 @@ -4901,11 +4943,11 @@ packages: peerDependencies: eslint: '>=6.0.0' - eslint-plugin-unicorn@59.0.1: - resolution: {integrity: sha512-EtNXYuWPUmkgSU2E7Ttn57LbRREQesIP1BiLn7OZLKodopKfDXfBUkC/0j6mpw2JExwf43Uf3qLSvrSvppgy8Q==} - engines: {node: ^18.20.0 || ^20.10.0 || >=21.0.0} + eslint-plugin-unicorn@60.0.0: + resolution: {integrity: sha512-QUzTefvP8stfSXsqKQ+vBQSEsXIlAiCduS/V1Em+FKgL9c21U/IIm20/e3MFy1jyCf14tHAhqC1sX8OTy6VUCg==} + engines: {node: ^20.10.0 || >=21.0.0} peerDependencies: - eslint: '>=9.22.0' + eslint: '>=9.29.0' eslint-plugin-unused-imports@4.1.4: resolution: {integrity: sha512-YptD6IzQjDardkl0POxnnRBhU1OEePMV0nd6siHaRBbd+lyh6NAhFEobiznKU7kTsSsDeSD62Pe7kAM1b7dAZQ==} @@ -6691,8 +6733,8 @@ packages: resolution: {integrity: sha512-2Rb3vm+EXble/sMXNSu6eoBx8e79gKqhNq9F5ZWW6ERNCTE/Q0wQNne5541tE5vKjfM8hpNCYL+LGc1YTfI0dg==} engines: {node: '>=6'} - pnpm-workspace-yaml@1.0.0: - resolution: {integrity: sha512-2RKg3khFgX/oeKIQnxxlj+OUoKbaZjBt7EsmQiLfl8AHZKMIpLmXLRPptZ5eq2Rlumh2gILs6OWNky5dzP+f8A==} + pnpm-workspace-yaml@1.1.0: + resolution: {integrity: sha512-OWUzBxtitpyUV0fBYYwLAfWxn3mSzVbVB7cwgNaHvTTU9P0V2QHjyaY5i7f1hEiT9VeKsNH1Skfhe2E3lx/zhA==} points-on-curve@0.2.0: resolution: {integrity: sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==} @@ -7829,8 +7871,8 @@ packages: resolution: {integrity: sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==} engines: {node: '>=12.20'} - typescript-eslint@8.37.0: - resolution: {integrity: sha512-TnbEjzkE9EmcO0Q2zM+GE8NQLItNAJpMmED1BdgoBMYNdqMhzlbqfdSwiRlAzEK2pA9UzVW0gzaaIzXWg2BjfA==} + typescript-eslint@8.38.0: + resolution: {integrity: sha512-FsZlrYK6bPDGoLeZRuvx2v6qrM03I0U0SnfCLPs/XCCPCFD80xU9Pg09H/K+XFa68uJuZo7l/Xhs+eDRg2l3hg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} peerDependencies: eslint: ^8.57.0 || ^9.0.0 @@ -8243,15 +8285,15 @@ snapshots: '@jridgewell/gen-mapping': 0.3.12 '@jridgewell/trace-mapping': 0.3.29 - '@antfu/eslint-config@4.17.0(@eslint-react/eslint-plugin@1.52.3(eslint@9.31.0(jiti@1.21.7))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3))(@vue/compiler-sfc@3.5.17)(eslint-plugin-react-hooks@5.2.0(eslint@9.31.0(jiti@1.21.7)))(eslint-plugin-react-refresh@0.4.20(eslint@9.31.0(jiti@1.21.7)))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3)': + '@antfu/eslint-config@5.0.0(@eslint-react/eslint-plugin@1.52.3(eslint@9.31.0(jiti@1.21.7))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3))(@next/eslint-plugin-next@15.4.4)(@vue/compiler-sfc@3.5.17)(eslint-plugin-react-hooks@5.2.0(eslint@9.31.0(jiti@1.21.7)))(eslint-plugin-react-refresh@0.4.20(eslint@9.31.0(jiti@1.21.7)))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: '@antfu/install-pkg': 1.1.0 '@clack/prompts': 0.11.0 '@eslint-community/eslint-plugin-eslint-comments': 4.5.0(eslint@9.31.0(jiti@1.21.7)) - '@eslint/markdown': 7.0.0 - '@stylistic/eslint-plugin': 5.2.0(eslint@9.31.0(jiti@1.21.7)) - '@typescript-eslint/eslint-plugin': 8.37.0(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) - '@typescript-eslint/parser': 8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@eslint/markdown': 7.1.0 + '@stylistic/eslint-plugin': 5.2.2(eslint@9.31.0(jiti@1.21.7)) + '@typescript-eslint/eslint-plugin': 8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/parser': 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) '@vitest/eslint-plugin': 1.3.4(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) ansis: 4.1.0 cac: 6.7.14 @@ -8267,12 +8309,12 @@ snapshots: eslint-plugin-n: 17.21.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) eslint-plugin-no-only-tests: 3.3.0 eslint-plugin-perfectionist: 4.15.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) - eslint-plugin-pnpm: 1.0.0(eslint@9.31.0(jiti@1.21.7)) + eslint-plugin-pnpm: 1.1.0(eslint@9.31.0(jiti@1.21.7)) eslint-plugin-regexp: 2.9.0(eslint@9.31.0(jiti@1.21.7)) eslint-plugin-toml: 0.12.0(eslint@9.31.0(jiti@1.21.7)) - eslint-plugin-unicorn: 59.0.1(eslint@9.31.0(jiti@1.21.7)) - eslint-plugin-unused-imports: 4.1.4(@typescript-eslint/eslint-plugin@8.37.0(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7)) - eslint-plugin-vue: 10.3.0(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(vue-eslint-parser@10.2.0(eslint@9.31.0(jiti@1.21.7))) + eslint-plugin-unicorn: 60.0.0(eslint@9.31.0(jiti@1.21.7)) + eslint-plugin-unused-imports: 4.1.4(@typescript-eslint/eslint-plugin@8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7)) + eslint-plugin-vue: 10.3.0(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(vue-eslint-parser@10.2.0(eslint@9.31.0(jiti@1.21.7))) eslint-plugin-yml: 1.18.0(eslint@9.31.0(jiti@1.21.7)) eslint-processor-vue-blocks: 2.0.0(@vue/compiler-sfc@3.5.17)(eslint@9.31.0(jiti@1.21.7)) globals: 16.3.0 @@ -8284,6 +8326,7 @@ snapshots: yaml-eslint-parser: 1.3.0 optionalDependencies: '@eslint-react/eslint-plugin': 1.52.3(eslint@9.31.0(jiti@1.21.7))(ts-api-utils@2.1.0(typescript@5.8.3))(typescript@5.8.3) + '@next/eslint-plugin-next': 15.4.4 eslint-plugin-react-hooks: 5.2.0(eslint@9.31.0(jiti@1.21.7)) eslint-plugin-react-refresh: 0.4.20(eslint@9.31.0(jiti@1.21.7)) transitivePeerDependencies: @@ -9192,7 +9235,7 @@ snapshots: '@es-joy/jsdoccomment@0.50.2': dependencies: '@types/estree': 1.0.8 - '@typescript-eslint/types': 8.37.0 + '@typescript-eslint/types': 8.38.0 comment-parser: 1.4.1 esquery: 1.6.0 jsdoc-type-pratt-parser: 4.1.0 @@ -9200,7 +9243,7 @@ snapshots: '@es-joy/jsdoccomment@0.52.0': dependencies: '@types/estree': 1.0.8 - '@typescript-eslint/types': 8.37.0 + '@typescript-eslint/types': 8.38.0 comment-parser: 1.4.1 esquery: 1.6.0 jsdoc-type-pratt-parser: 4.1.0 @@ -9399,14 +9442,6 @@ snapshots: '@eslint/config-helpers@0.3.0': {} - '@eslint/core@0.13.0': - dependencies: - '@types/json-schema': 7.0.15 - - '@eslint/core@0.14.0': - dependencies: - '@types/json-schema': 7.0.15 - '@eslint/core@0.15.1': dependencies: '@types/json-schema': 7.0.15 @@ -9427,10 +9462,10 @@ snapshots: '@eslint/js@9.31.0': {} - '@eslint/markdown@7.0.0': + '@eslint/markdown@7.1.0': dependencies: - '@eslint/core': 0.14.0 - '@eslint/plugin-kit': 0.3.3 + '@eslint/core': 0.15.1 + '@eslint/plugin-kit': 0.3.4 github-slugger: 2.0.0 mdast-util-from-markdown: 2.0.2 mdast-util-frontmatter: 2.0.1 @@ -9442,12 +9477,12 @@ snapshots: '@eslint/object-schema@2.1.6': {} - '@eslint/plugin-kit@0.2.8': + '@eslint/plugin-kit@0.3.3': dependencies: - '@eslint/core': 0.13.0 + '@eslint/core': 0.15.1 levn: 0.4.1 - '@eslint/plugin-kit@0.3.3': + '@eslint/plugin-kit@0.3.4': dependencies: '@eslint/core': 0.15.1 levn: 0.4.1 @@ -10151,7 +10186,7 @@ snapshots: '@next/env@15.3.5': {} - '@next/eslint-plugin-next@15.3.5': + '@next/eslint-plugin-next@15.4.4': dependencies: fast-glob: 3.3.1 @@ -11003,10 +11038,10 @@ snapshots: dependencies: storybook: 8.5.0 - '@stylistic/eslint-plugin@5.2.0(eslint@9.31.0(jiti@1.21.7))': + '@stylistic/eslint-plugin@5.2.2(eslint@9.31.0(jiti@1.21.7))': dependencies: '@eslint-community/eslint-utils': 4.7.0(eslint@9.31.0(jiti@1.21.7)) - '@typescript-eslint/types': 8.37.0 + '@typescript-eslint/types': 8.38.0 eslint: 9.31.0(jiti@1.21.7) eslint-visitor-keys: 4.2.1 espree: 10.4.0 @@ -11441,14 +11476,14 @@ snapshots: dependencies: '@types/yargs-parser': 21.0.3 - '@typescript-eslint/eslint-plugin@8.37.0(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3)': + '@typescript-eslint/eslint-plugin@8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: '@eslint-community/regexpp': 4.12.1 - '@typescript-eslint/parser': 8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) - '@typescript-eslint/scope-manager': 8.37.0 - '@typescript-eslint/type-utils': 8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) - '@typescript-eslint/utils': 8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) - '@typescript-eslint/visitor-keys': 8.37.0 + '@typescript-eslint/parser': 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/scope-manager': 8.38.0 + '@typescript-eslint/type-utils': 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/utils': 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/visitor-keys': 8.38.0 eslint: 9.31.0(jiti@1.21.7) graphemer: 1.4.0 ignore: 7.0.5 @@ -11458,12 +11493,12 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3)': + '@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: - '@typescript-eslint/scope-manager': 8.37.0 - '@typescript-eslint/types': 8.37.0 - '@typescript-eslint/typescript-estree': 8.37.0(typescript@5.8.3) - '@typescript-eslint/visitor-keys': 8.37.0 + '@typescript-eslint/scope-manager': 8.38.0 + '@typescript-eslint/types': 8.38.0 + '@typescript-eslint/typescript-estree': 8.38.0(typescript@5.8.3) + '@typescript-eslint/visitor-keys': 8.38.0 debug: 4.4.1 eslint: 9.31.0(jiti@1.21.7) typescript: 5.8.3 @@ -11479,15 +11514,33 @@ snapshots: transitivePeerDependencies: - supports-color + '@typescript-eslint/project-service@8.38.0(typescript@5.8.3)': + dependencies: + '@typescript-eslint/tsconfig-utils': 8.38.0(typescript@5.8.3) + '@typescript-eslint/types': 8.38.0 + debug: 4.4.1 + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + '@typescript-eslint/scope-manager@8.37.0': dependencies: '@typescript-eslint/types': 8.37.0 '@typescript-eslint/visitor-keys': 8.37.0 + '@typescript-eslint/scope-manager@8.38.0': + dependencies: + '@typescript-eslint/types': 8.38.0 + '@typescript-eslint/visitor-keys': 8.38.0 + '@typescript-eslint/tsconfig-utils@8.37.0(typescript@5.8.3)': dependencies: typescript: 5.8.3 + '@typescript-eslint/tsconfig-utils@8.38.0(typescript@5.8.3)': + dependencies: + typescript: 5.8.3 + '@typescript-eslint/type-utils@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: '@typescript-eslint/types': 8.37.0 @@ -11500,8 +11553,22 @@ snapshots: transitivePeerDependencies: - supports-color + '@typescript-eslint/type-utils@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3)': + dependencies: + '@typescript-eslint/types': 8.38.0 + '@typescript-eslint/typescript-estree': 8.38.0(typescript@5.8.3) + '@typescript-eslint/utils': 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + debug: 4.4.1 + eslint: 9.31.0(jiti@1.21.7) + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + '@typescript-eslint/types@8.37.0': {} + '@typescript-eslint/types@8.38.0': {} + '@typescript-eslint/typescript-estree@8.37.0(typescript@5.8.3)': dependencies: '@typescript-eslint/project-service': 8.37.0(typescript@5.8.3) @@ -11518,6 +11585,22 @@ snapshots: transitivePeerDependencies: - supports-color + '@typescript-eslint/typescript-estree@8.38.0(typescript@5.8.3)': + dependencies: + '@typescript-eslint/project-service': 8.38.0(typescript@5.8.3) + '@typescript-eslint/tsconfig-utils': 8.38.0(typescript@5.8.3) + '@typescript-eslint/types': 8.38.0 + '@typescript-eslint/visitor-keys': 8.38.0 + debug: 4.4.1 + fast-glob: 3.3.3 + is-glob: 4.0.3 + minimatch: 9.0.5 + semver: 7.7.2 + ts-api-utils: 2.1.0(typescript@5.8.3) + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + '@typescript-eslint/utils@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: '@eslint-community/eslint-utils': 4.7.0(eslint@9.31.0(jiti@1.21.7)) @@ -11529,11 +11612,27 @@ snapshots: transitivePeerDependencies: - supports-color + '@typescript-eslint/utils@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3)': + dependencies: + '@eslint-community/eslint-utils': 4.7.0(eslint@9.31.0(jiti@1.21.7)) + '@typescript-eslint/scope-manager': 8.38.0 + '@typescript-eslint/types': 8.38.0 + '@typescript-eslint/typescript-estree': 8.38.0(typescript@5.8.3) + eslint: 9.31.0(jiti@1.21.7) + typescript: 5.8.3 + transitivePeerDependencies: + - supports-color + '@typescript-eslint/visitor-keys@8.37.0': dependencies: '@typescript-eslint/types': 8.37.0 eslint-visitor-keys: 4.2.1 + '@typescript-eslint/visitor-keys@8.38.0': + dependencies: + '@typescript-eslint/types': 8.38.0 + eslint-visitor-keys: 4.2.1 + '@ungap/structured-clone@1.3.0': {} '@unrs/resolver-binding-android-arm-eabi@1.11.1': @@ -11597,7 +11696,7 @@ snapshots: '@vitest/eslint-plugin@1.3.4(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3)': dependencies: - '@typescript-eslint/utils': 8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/utils': 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) eslint: 9.31.0(jiti@1.21.7) optionalDependencies: typescript: 5.8.3 @@ -12187,6 +12286,8 @@ snapshots: chalk@5.4.1: {} + change-case@5.4.4: {} + char-regex@1.0.2: {} character-entities-html4@2.1.0: {} @@ -12990,16 +13091,16 @@ snapshots: '@eslint/compat': 1.3.1(eslint@9.31.0(jiti@1.21.7)) eslint: 9.31.0(jiti@1.21.7) - eslint-config-next@15.3.5(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3): + eslint-config-next@15.4.4(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3): dependencies: - '@next/eslint-plugin-next': 15.3.5 + '@next/eslint-plugin-next': 15.4.4 '@rushstack/eslint-patch': 1.12.0 - '@typescript-eslint/eslint-plugin': 8.37.0(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) - '@typescript-eslint/parser': 8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/eslint-plugin': 8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/parser': 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) eslint: 9.31.0(jiti@1.21.7) eslint-import-resolver-node: 0.3.9 eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@9.31.0(jiti@1.21.7)) - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.31.0(jiti@1.21.7)) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.31.0(jiti@1.21.7)) eslint-plugin-jsx-a11y: 6.10.2(eslint@9.31.0(jiti@1.21.7)) eslint-plugin-react: 7.37.5(eslint@9.31.0(jiti@1.21.7)) eslint-plugin-react-hooks: 5.2.0(eslint@9.31.0(jiti@1.21.7)) @@ -13033,7 +13134,7 @@ snapshots: tinyglobby: 0.2.14 unrs-resolver: 1.11.1 optionalDependencies: - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.31.0(jiti@1.21.7)) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.31.0(jiti@1.21.7)) transitivePeerDependencies: - supports-color @@ -13047,11 +13148,11 @@ snapshots: dependencies: eslint: 9.31.0(jiti@1.21.7) - eslint-module-utils@2.12.1(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.31.0(jiti@1.21.7)): + eslint-module-utils@2.12.1(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.31.0(jiti@1.21.7)): dependencies: debug: 3.2.7 optionalDependencies: - '@typescript-eslint/parser': 8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/parser': 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) eslint: 9.31.0(jiti@1.21.7) eslint-import-resolver-node: 0.3.9 eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@9.31.0(jiti@1.21.7)) @@ -13077,12 +13178,12 @@ snapshots: eslint-plugin-import-lite@0.3.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3): dependencies: '@eslint-community/eslint-utils': 4.7.0(eslint@9.31.0(jiti@1.21.7)) - '@typescript-eslint/types': 8.37.0 + '@typescript-eslint/types': 8.38.0 eslint: 9.31.0(jiti@1.21.7) optionalDependencies: typescript: 5.8.3 - eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.31.0(jiti@1.21.7)): + eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.31.0(jiti@1.21.7)): dependencies: '@rtsao/scc': 1.1.0 array-includes: '@nolyfill/array-includes@1.0.44' @@ -13093,7 +13194,7 @@ snapshots: doctrine: 2.1.0 eslint: 9.31.0(jiti@1.21.7) eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.31.0(jiti@1.21.7)) + eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.31.0(jiti@1.21.7)) hasown: '@nolyfill/hasown@1.0.44' is-core-module: '@nolyfill/is-core-module@1.0.39' is-glob: 4.0.3 @@ -13105,7 +13206,7 @@ snapshots: string.prototype.trimend: '@nolyfill/string.prototype.trimend@1.0.44' tsconfig-paths: 3.15.0 optionalDependencies: - '@typescript-eslint/parser': 8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/parser': 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) transitivePeerDependencies: - eslint-import-resolver-typescript - eslint-import-resolver-webpack @@ -13183,21 +13284,21 @@ snapshots: eslint-plugin-perfectionist@4.15.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3): dependencies: - '@typescript-eslint/types': 8.37.0 - '@typescript-eslint/utils': 8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/types': 8.38.0 + '@typescript-eslint/utils': 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) eslint: 9.31.0(jiti@1.21.7) natural-orderby: 5.0.0 transitivePeerDependencies: - supports-color - typescript - eslint-plugin-pnpm@1.0.0(eslint@9.31.0(jiti@1.21.7)): + eslint-plugin-pnpm@1.1.0(eslint@9.31.0(jiti@1.21.7)): dependencies: eslint: 9.31.0(jiti@1.21.7) find-up-simple: 1.0.1 jsonc-eslint-parser: 2.4.0 pathe: 2.0.3 - pnpm-workspace-yaml: 1.0.0 + pnpm-workspace-yaml: 1.1.0 tinyglobby: 0.2.14 yaml-eslint-parser: 1.3.0 @@ -13404,11 +13505,12 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-unicorn@59.0.1(eslint@9.31.0(jiti@1.21.7)): + eslint-plugin-unicorn@60.0.0(eslint@9.31.0(jiti@1.21.7)): dependencies: '@babel/helper-validator-identifier': 7.27.1 '@eslint-community/eslint-utils': 4.7.0(eslint@9.31.0(jiti@1.21.7)) - '@eslint/plugin-kit': 0.2.8 + '@eslint/plugin-kit': 0.3.4 + change-case: 5.4.4 ci-info: 4.3.0 clean-regexp: 1.0.0 core-js-compat: 3.44.0 @@ -13425,13 +13527,13 @@ snapshots: semver: 7.7.2 strip-indent: 4.0.0 - eslint-plugin-unused-imports@4.1.4(@typescript-eslint/eslint-plugin@8.37.0(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7)): + eslint-plugin-unused-imports@4.1.4(@typescript-eslint/eslint-plugin@8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7)): dependencies: eslint: 9.31.0(jiti@1.21.7) optionalDependencies: - '@typescript-eslint/eslint-plugin': 8.37.0(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/eslint-plugin': 8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) - eslint-plugin-vue@10.3.0(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(vue-eslint-parser@10.2.0(eslint@9.31.0(jiti@1.21.7))): + eslint-plugin-vue@10.3.0(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(vue-eslint-parser@10.2.0(eslint@9.31.0(jiti@1.21.7))): dependencies: '@eslint-community/eslint-utils': 4.7.0(eslint@9.31.0(jiti@1.21.7)) eslint: 9.31.0(jiti@1.21.7) @@ -13442,7 +13544,7 @@ snapshots: vue-eslint-parser: 10.2.0(eslint@9.31.0(jiti@1.21.7)) xml-name-validator: 4.0.0 optionalDependencies: - '@typescript-eslint/parser': 8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/parser': 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) eslint-plugin-yml@1.18.0(eslint@9.31.0(jiti@1.21.7)): dependencies: @@ -15832,7 +15934,7 @@ snapshots: transitivePeerDependencies: - typescript - pnpm-workspace-yaml@1.0.0: + pnpm-workspace-yaml@1.1.0: dependencies: yaml: 2.8.0 @@ -17106,12 +17208,12 @@ snapshots: type-fest@2.19.0: {} - typescript-eslint@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3): + typescript-eslint@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3): dependencies: - '@typescript-eslint/eslint-plugin': 8.37.0(@typescript-eslint/parser@8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) - '@typescript-eslint/parser': 8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) - '@typescript-eslint/typescript-estree': 8.37.0(typescript@5.8.3) - '@typescript-eslint/utils': 8.37.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/eslint-plugin': 8.38.0(@typescript-eslint/parser@8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3))(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/parser': 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) + '@typescript-eslint/typescript-estree': 8.38.0(typescript@5.8.3) + '@typescript-eslint/utils': 8.38.0(eslint@9.31.0(jiti@1.21.7))(typescript@5.8.3) eslint: 9.31.0(jiti@1.21.7) typescript: 5.8.3 transitivePeerDependencies: diff --git a/web/service/annotation.ts b/web/service/annotation.ts index 5096a4f58a..9f025f8eb9 100644 --- a/web/service/annotation.ts +++ b/web/service/annotation.ts @@ -63,3 +63,7 @@ export const delAnnotation = (appId: string, annotationId: string) => { export const fetchHitHistoryList = (appId: string, annotationId: string, params: Record) => { return get(`apps/${appId}/annotations/${annotationId}/hit-histories`, { params }) } + +export const clearAllAnnotations = (appId: string): Promise => { + return del(`apps/${appId}/annotations`) +} diff --git a/web/service/base.ts b/web/service/base.ts index 8ffacaa0f1..8081899837 100644 --- a/web/service/base.ts +++ b/web/service/base.ts @@ -115,7 +115,7 @@ function requiredWebSSOLogin(message?: string, code?: number) { params.append('message', message) if (code) params.append('code', String(code)) - globalThis.location.href = `/webapp-signin?${params.toString()}` + globalThis.location.href = `${globalThis.location.origin}${basePath}/webapp-signin?${params.toString()}` } export function format(text: string) { diff --git a/web/service/knowledge/use-segment.ts b/web/service/knowledge/use-segment.ts index ca1778fb94..8b3e939e73 100644 --- a/web/service/knowledge/use-segment.ts +++ b/web/service/knowledge/use-segment.ts @@ -154,9 +154,9 @@ export const useUpdateChildSegment = () => { export const useSegmentBatchImport = () => { return useMutation({ mutationKey: [NAME_SPACE, 'batchImport'], - mutationFn: (payload: { url: string; body: FormData }) => { + mutationFn: (payload: { url: string; body: { upload_file_id: string } }) => { const { url, body } = payload - return post(url, { body }, { bodyStringify: false, deleteContentType: true }) + return post(url, { body }) }, }) } diff --git a/web/types/app.ts b/web/types/app.ts index 634fce9845..64c806e1d8 100644 --- a/web/types/app.ts +++ b/web/types/app.ts @@ -1,6 +1,6 @@ import type { AnnotationReplyConfig, ChatPromptConfig, CompletionPromptConfig, DatasetConfigs, PromptMode } from '@/models/debug' import type { CollectionType } from '@/app/components/tools/types' -import type { LanguagesSupported } from '@/i18n/language' +import type { LanguagesSupported } from '@/i18n-config/language' import type { Tag } from '@/app/components/base/tag-management/constant' import type { RerankingModeEnum, diff --git a/web/utils/model-config.ts b/web/utils/model-config.ts index 873dc3ccb4..e317454db1 100644 --- a/web/utils/model-config.ts +++ b/web/utils/model-config.ts @@ -62,6 +62,7 @@ export const userInputsFormToPromptVariables = (useInputs: UserInputFormItem[] | options: content.options, is_context_var, hide: content.hide, + default: content.default, }) } else if (type === 'file') { @@ -148,7 +149,7 @@ export const promptVariablesToUserInputsForm = (promptVariables: PromptVariable[ variable: item.key, required: item.required !== false, // default true options: item.options, - default: '', + default: item.default ?? '', hide: item.hide, }, } as any)