mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into feat/r2
This commit is contained in:
commit
a025db137d
|
|
@ -7,6 +7,7 @@ pipx install uv
|
|||
echo 'alias start-api="cd /workspaces/dify/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
|
||||
echo 'alias start-worker="cd /workspaces/dify/api && uv run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
|
||||
echo 'alias start-web="cd /workspaces/dify/web && pnpm dev"' >> ~/.bashrc
|
||||
echo 'alias start-web-prod="cd /workspaces/dify/web && pnpm build && pnpm start"' >> ~/.bashrc
|
||||
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d"' >> ~/.bashrc
|
||||
echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down"' >> ~/.bashrc
|
||||
|
||||
|
|
|
|||
|
|
@ -31,11 +31,19 @@ jobs:
|
|||
echo "FILES_CHANGED=false" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
run_install: false
|
||||
|
||||
- name: Set up Node.js
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
cache: pnpm
|
||||
cache-dependency-path: ./web/package.json
|
||||
|
||||
- name: Install dependencies
|
||||
if: env.FILES_CHANGED == 'true'
|
||||
|
|
|
|||
|
|
@ -235,7 +235,7 @@ At the same time, please consider supporting Dify by sharing it on social media
|
|||
|
||||
## Community & contact
|
||||
|
||||
- [Github Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions.
|
||||
- [GitHub Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions.
|
||||
- [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
- [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
|
||||
- [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.
|
||||
|
|
|
|||
|
|
@ -223,7 +223,7 @@ docker compose up -d
|
|||
</a>
|
||||
|
||||
## المجتمع والاتصال
|
||||
- [مناقشة Github](https://github.com/langgenius/dify/discussions). الأفضل لـ: مشاركة التعليقات وطرح الأسئلة.
|
||||
- [مناقشة GitHub](https://github.com/langgenius/dify/discussions). الأفضل لـ: مشاركة التعليقات وطرح الأسئلة.
|
||||
- [المشكلات على GitHub](https://github.com/langgenius/dify/issues). الأفضل لـ: الأخطاء التي تواجهها في استخدام Dify.AI، واقتراحات الميزات. انظر [دليل المساهمة](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
- [Discord](https://discord.gg/FngNHpbcY7). الأفضل لـ: مشاركة تطبيقاتك والترفيه مع المجتمع.
|
||||
- [تويتر](https://twitter.com/dify_ai). الأفضل لـ: مشاركة تطبيقاتك والترفيه مع المجتمع.
|
||||
|
|
|
|||
|
|
@ -234,7 +234,7 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন
|
|||
|
||||
## কমিউনিটি এবং যোগাযোগ
|
||||
|
||||
- [Github Discussion](https://github.com/langgenius/dify/discussions) ফিডব্যাক এবং প্রতিক্রিয়া জানানোর মাধ্যম।
|
||||
- [GitHub Discussion](https://github.com/langgenius/dify/discussions) ফিডব্যাক এবং প্রতিক্রিয়া জানানোর মাধ্যম।
|
||||
- [GitHub Issues](https://github.com/langgenius/dify/issues). Dify.AI ব্যবহার করে আপনি যেসব বাগের সম্মুখীন হন এবং ফিচার প্রস্তাবনা। আমাদের [অবদান নির্দেশিকা](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) দেখুন।
|
||||
- [Discord](https://discord.gg/FngNHpbcY7) আপনার এপ্লিকেশন শেয়ার এবং কমিউনিটি আড্ডার মাধ্যম।
|
||||
- [X(Twitter)](https://twitter.com/dify_ai) আপনার এপ্লিকেশন শেয়ার এবং কমিউনিটি আড্ডার মাধ্যম।
|
||||
|
|
|
|||
|
|
@ -243,7 +243,7 @@ docker compose up -d
|
|||
|
||||
我们欢迎您为 Dify 做出贡献,以帮助改善 Dify。包括:提交代码、问题、新想法,或分享您基于 Dify 创建的有趣且有用的 AI 应用程序。同时,我们也欢迎您在不同的活动、会议和社交媒体上分享 Dify。
|
||||
|
||||
- [Github Discussion](https://github.com/langgenius/dify/discussions). 👉:分享您的应用程序并与社区交流。
|
||||
- [GitHub Discussion](https://github.com/langgenius/dify/discussions). 👉:分享您的应用程序并与社区交流。
|
||||
- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](CONTRIBUTING.md)。
|
||||
- [电子邮件支持](mailto:hello@dify.ai?subject=[GitHub]Questions%20About%20Dify)。👉:关于使用 Dify.AI 的问题。
|
||||
- [Discord](https://discord.gg/FngNHpbcY7)。👉:分享您的应用程序并与社区交流。
|
||||
|
|
|
|||
|
|
@ -230,7 +230,7 @@ Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](
|
|||
|
||||
## Gemeinschaft & Kontakt
|
||||
|
||||
* [Github Discussion](https://github.com/langgenius/dify/discussions). Am besten geeignet für: den Austausch von Feedback und das Stellen von Fragen.
|
||||
* [GitHub Discussion](https://github.com/langgenius/dify/discussions). Am besten geeignet für: den Austausch von Feedback und das Stellen von Fragen.
|
||||
* [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community.
|
||||
|
|
|
|||
|
|
@ -236,7 +236,7 @@ docker compose up -d
|
|||
|
||||
## コミュニティ & お問い合わせ
|
||||
|
||||
* [Github Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。
|
||||
* [GitHub Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。
|
||||
* [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](CONTRIBUTING_JA.md)を参照してください
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。
|
||||
|
|
|
|||
|
|
@ -235,7 +235,7 @@ At the same time, please consider supporting Dify by sharing it on social media
|
|||
|
||||
## Community & Contact
|
||||
|
||||
* [Github Discussion](https://github.com/langgenius/dify/discussions
|
||||
* [GitHub Discussion](https://github.com/langgenius/dify/discussions
|
||||
|
||||
). Best for: sharing feedback and asking questions.
|
||||
* [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
|
|
|
|||
|
|
@ -229,7 +229,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
|
|||
|
||||
## 커뮤니티 & 연락처
|
||||
|
||||
* [Github 토론](https://github.com/langgenius/dify/discussions). 피드백 공유 및 질문하기에 적합합니다.
|
||||
* [GitHub 토론](https://github.com/langgenius/dify/discussions). 피드백 공유 및 질문하기에 적합합니다.
|
||||
* [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요.
|
||||
* [디스코드](https://discord.gg/FngNHpbcY7). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.
|
||||
* [트위터](https://twitter.com/dify_ai). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.
|
||||
|
|
|
|||
|
|
@ -229,7 +229,7 @@ Za tiste, ki bi radi prispevali kodo, si oglejte naš vodnik za prispevke . Hkra
|
|||
|
||||
## Skupnost in stik
|
||||
|
||||
* [Github Discussion](https://github.com/langgenius/dify/discussions). Najboljše za: izmenjavo povratnih informacij in postavljanje vprašanj.
|
||||
* [GitHub Discussion](https://github.com/langgenius/dify/discussions). Najboljše za: izmenjavo povratnih informacij in postavljanje vprašanj.
|
||||
* [GitHub Issues](https://github.com/langgenius/dify/issues). Najboljše za: hrošče, na katere naletite pri uporabi Dify.AI, in predloge funkcij. Oglejte si naš [vodnik za prispevke](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo.
|
||||
|
|
|
|||
|
|
@ -227,7 +227,7 @@ Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda p
|
|||
|
||||
## Topluluk & iletişim
|
||||
|
||||
* [Github Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için.
|
||||
* [GitHub Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için.
|
||||
* [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakın.
|
||||
* [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
|
||||
* [X(Twitter)](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
|
||||
|
|
|
|||
|
|
@ -233,7 +233,7 @@ Dify 的所有功能都提供相應的 API,因此您可以輕鬆地將 Dify
|
|||
|
||||
## 社群與聯絡方式
|
||||
|
||||
- [Github Discussion](https://github.com/langgenius/dify/discussions):最適合分享反饋和提問。
|
||||
- [GitHub Discussion](https://github.com/langgenius/dify/discussions):最適合分享反饋和提問。
|
||||
- [GitHub Issues](https://github.com/langgenius/dify/issues):最適合報告使用 Dify.AI 時遇到的問題和提出功能建議。請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。
|
||||
- [Discord](https://discord.gg/FngNHpbcY7):最適合分享您的應用程式並與社群互動。
|
||||
- [X(Twitter)](https://twitter.com/dify_ai):最適合分享您的應用程式並與社群互動。
|
||||
|
|
|
|||
|
|
@ -152,6 +152,7 @@ QDRANT_API_KEY=difyai123456
|
|||
QDRANT_CLIENT_TIMEOUT=20
|
||||
QDRANT_GRPC_ENABLED=false
|
||||
QDRANT_GRPC_PORT=6334
|
||||
QDRANT_REPLICATION_FACTOR=1
|
||||
|
||||
#Couchbase configuration
|
||||
COUCHBASE_CONNECTION_STRING=127.0.0.1
|
||||
|
|
|
|||
|
|
@ -33,3 +33,8 @@ class QdrantConfig(BaseSettings):
|
|||
description="Port number for gRPC connection to Qdrant server (default is 6334)",
|
||||
default=6334,
|
||||
)
|
||||
|
||||
QDRANT_REPLICATION_FACTOR: PositiveInt = Field(
|
||||
description="Replication factor for Qdrant collections (default is 1)",
|
||||
default=1,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
|
|||
|
||||
CURRENT_VERSION: str = Field(
|
||||
description="Dify version",
|
||||
default="1.4.0",
|
||||
default="1.4.1",
|
||||
)
|
||||
|
||||
COMMIT_SHA: str = Field(
|
||||
|
|
|
|||
|
|
@ -12,10 +12,6 @@ if TYPE_CHECKING:
|
|||
from core.workflow.entities.variable_pool import VariablePool
|
||||
|
||||
|
||||
tenant_id: ContextVar[str] = ContextVar("tenant_id")
|
||||
|
||||
workflow_variable_pool: ContextVar["VariablePool"] = ContextVar("workflow_variable_pool")
|
||||
|
||||
"""
|
||||
To avoid race-conditions caused by gunicorn thread recycling, using RecyclableContextVar to replace with
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -41,12 +41,16 @@ class PluginListApi(Resource):
|
|||
@account_initialization_required
|
||||
def get(self):
|
||||
tenant_id = current_user.current_tenant_id
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("page", type=int, required=False, location="args", default=1)
|
||||
parser.add_argument("page_size", type=int, required=False, location="args", default=256)
|
||||
args = parser.parse_args()
|
||||
try:
|
||||
plugins = PluginService.list(tenant_id)
|
||||
plugins_with_total = PluginService.list_with_total(tenant_id, args["page"], args["page_size"])
|
||||
except PluginDaemonClientSideError as e:
|
||||
raise ValueError(e)
|
||||
|
||||
return jsonable_encoder({"plugins": plugins})
|
||||
return jsonable_encoder({"plugins": plugins_with_total.list, "total": plugins_with_total.total})
|
||||
|
||||
|
||||
class PluginListLatestVersionsApi(Resource):
|
||||
|
|
|
|||
|
|
@ -2,12 +2,14 @@ from collections.abc import Callable
|
|||
from functools import wraps
|
||||
from typing import Optional
|
||||
|
||||
from flask import request
|
||||
from flask import current_app, request
|
||||
from flask_login import user_logged_in
|
||||
from flask_restful import reqparse
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from extensions.ext_database import db
|
||||
from libs.login import _get_user
|
||||
from models.account import Account, Tenant
|
||||
from models.model import EndUser
|
||||
from services.account_service import AccountService
|
||||
|
|
@ -80,7 +82,12 @@ def get_user_tenant(view: Optional[Callable] = None):
|
|||
raise ValueError("tenant not found")
|
||||
|
||||
kwargs["tenant_model"] = tenant_model
|
||||
kwargs["user_model"] = get_user(tenant_id, user_id)
|
||||
|
||||
user = get_user(tenant_id, user_id)
|
||||
kwargs["user_model"] = user
|
||||
|
||||
current_app.login_manager._update_request_context_with_user(user) # type: ignore
|
||||
user_logged_in.send(current_app._get_current_object(), user=_get_user()) # type: ignore
|
||||
|
||||
return view_func(*args, **kwargs)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from flask_restful import Resource, marshal, marshal_with, reqparse
|
|||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
from controllers.service_api import api
|
||||
from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token
|
||||
from controllers.service_api.wraps import validate_app_token
|
||||
from extensions.ext_redis import redis_client
|
||||
from fields.annotation_fields import (
|
||||
annotation_fields,
|
||||
|
|
@ -14,7 +14,7 @@ from services.annotation_service import AppAnnotationService
|
|||
|
||||
|
||||
class AnnotationReplyActionApi(Resource):
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON))
|
||||
@validate_app_token
|
||||
def post(self, app_model: App, end_user: EndUser, action):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("score_threshold", required=True, type=float, location="json")
|
||||
|
|
@ -31,7 +31,7 @@ class AnnotationReplyActionApi(Resource):
|
|||
|
||||
|
||||
class AnnotationReplyActionStatusApi(Resource):
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY))
|
||||
@validate_app_token
|
||||
def get(self, app_model: App, end_user: EndUser, job_id, action):
|
||||
job_id = str(job_id)
|
||||
app_annotation_job_key = "{}_app_annotation_job_{}".format(action, str(job_id))
|
||||
|
|
@ -49,7 +49,7 @@ class AnnotationReplyActionStatusApi(Resource):
|
|||
|
||||
|
||||
class AnnotationListApi(Resource):
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY))
|
||||
@validate_app_token
|
||||
def get(self, app_model: App, end_user: EndUser):
|
||||
page = request.args.get("page", default=1, type=int)
|
||||
limit = request.args.get("limit", default=20, type=int)
|
||||
|
|
@ -65,7 +65,7 @@ class AnnotationListApi(Resource):
|
|||
}
|
||||
return response, 200
|
||||
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON))
|
||||
@validate_app_token
|
||||
@marshal_with(annotation_fields)
|
||||
def post(self, app_model: App, end_user: EndUser):
|
||||
parser = reqparse.RequestParser()
|
||||
|
|
@ -77,7 +77,7 @@ class AnnotationListApi(Resource):
|
|||
|
||||
|
||||
class AnnotationUpdateDeleteApi(Resource):
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON))
|
||||
@validate_app_token
|
||||
@marshal_with(annotation_fields)
|
||||
def put(self, app_model: App, end_user: EndUser, annotation_id):
|
||||
if not current_user.is_editor:
|
||||
|
|
@ -91,7 +91,7 @@ class AnnotationUpdateDeleteApi(Resource):
|
|||
annotation = AppAnnotationService.update_app_annotation_directly(args, app_model.id, annotation_id)
|
||||
return annotation
|
||||
|
||||
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY))
|
||||
@validate_app_token
|
||||
def delete(self, app_model: App, end_user: EndUser, annotation_id):
|
||||
if not current_user.is_editor:
|
||||
raise Forbidden()
|
||||
|
|
|
|||
|
|
@ -99,7 +99,12 @@ def validate_app_token(view: Optional[Callable] = None, *, fetch_user_arg: Optio
|
|||
if user_id:
|
||||
user_id = str(user_id)
|
||||
|
||||
kwargs["end_user"] = create_or_update_end_user_for_user_id(app_model, user_id)
|
||||
end_user = create_or_update_end_user_for_user_id(app_model, user_id)
|
||||
kwargs["end_user"] = end_user
|
||||
|
||||
# Set EndUser as current logged-in user for flask_login.current_user
|
||||
current_app.login_manager._update_request_context_with_user(end_user) # type: ignore
|
||||
user_logged_in.send(current_app._get_current_object(), user=end_user) # type: ignore
|
||||
|
||||
return view_func(*args, **kwargs)
|
||||
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@ class CotAgentRunner(BaseAgentRunner, ABC):
|
|||
self._instruction = self._fill_in_inputs_from_external_data_tools(instruction, inputs)
|
||||
|
||||
iteration_step = 1
|
||||
max_iteration_steps = min(app_config.agent.max_iteration if app_config.agent else 5, 5) + 1
|
||||
max_iteration_steps = min(app_config.agent.max_iteration, 99) + 1
|
||||
|
||||
# convert tools into ModelRuntime Tool format
|
||||
tool_instances, prompt_messages_tools = self._init_prompt_tools()
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ class AgentEntity(BaseModel):
|
|||
strategy: Strategy
|
||||
prompt: Optional[AgentPromptEntity] = None
|
||||
tools: Optional[list[AgentToolEntity]] = None
|
||||
max_iteration: int = 5
|
||||
max_iteration: int = 10
|
||||
|
||||
|
||||
class AgentInvokeMessage(ToolInvokeMessage):
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ class FunctionCallAgentRunner(BaseAgentRunner):
|
|||
assert app_config.agent
|
||||
|
||||
iteration_step = 1
|
||||
max_iteration_steps = min(app_config.agent.max_iteration, 5) + 1
|
||||
max_iteration_steps = min(app_config.agent.max_iteration, 99) + 1
|
||||
|
||||
# continue to run until there is not any tool call
|
||||
function_call_state = True
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ class AgentConfigManager:
|
|||
strategy=strategy,
|
||||
prompt=agent_prompt_entity,
|
||||
tools=agent_tools,
|
||||
max_iteration=agent_dict.get("max_iteration", 5),
|
||||
max_iteration=agent_dict.get("max_iteration", 10),
|
||||
)
|
||||
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import uuid
|
|||
from collections.abc import Generator, Mapping
|
||||
from typing import Any, Literal, Optional, Union, overload
|
||||
|
||||
from flask import Flask, current_app
|
||||
from flask import Flask, copy_current_request_context, current_app, has_request_context
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
|
|
@ -158,7 +158,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
trace_manager=trace_manager,
|
||||
workflow_run_id=workflow_run_id,
|
||||
)
|
||||
contexts.tenant_id.set(application_generate_entity.app_config.tenant_id)
|
||||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
|
|
@ -240,7 +239,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
node_id=node_id, inputs=args["inputs"]
|
||||
),
|
||||
)
|
||||
contexts.tenant_id.set(application_generate_entity.app_config.tenant_id)
|
||||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
|
|
@ -316,7 +314,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
extras={"auto_generate_conversation_name": False},
|
||||
single_loop_run=AdvancedChatAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args["inputs"]),
|
||||
)
|
||||
contexts.tenant_id.set(application_generate_entity.app_config.tenant_id)
|
||||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
|
|
@ -399,18 +396,23 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
message_id=message.id,
|
||||
)
|
||||
|
||||
# new thread
|
||||
worker_thread = threading.Thread(
|
||||
target=self._generate_worker,
|
||||
kwargs={
|
||||
"flask_app": current_app._get_current_object(), # type: ignore
|
||||
"application_generate_entity": application_generate_entity,
|
||||
"queue_manager": queue_manager,
|
||||
"conversation_id": conversation.id,
|
||||
"message_id": message.id,
|
||||
"context": contextvars.copy_context(),
|
||||
},
|
||||
)
|
||||
# new thread with request context and contextvars
|
||||
context = contextvars.copy_context()
|
||||
|
||||
@copy_current_request_context
|
||||
def worker_with_context():
|
||||
# Run the worker within the copied context
|
||||
return context.run(
|
||||
self._generate_worker,
|
||||
flask_app=current_app._get_current_object(), # type: ignore
|
||||
application_generate_entity=application_generate_entity,
|
||||
queue_manager=queue_manager,
|
||||
conversation_id=conversation.id,
|
||||
message_id=message.id,
|
||||
context=context,
|
||||
)
|
||||
|
||||
worker_thread = threading.Thread(target=worker_with_context)
|
||||
|
||||
worker_thread.start()
|
||||
|
||||
|
|
@ -449,8 +451,22 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
"""
|
||||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
if has_request_context() and hasattr(g, "_login_user"):
|
||||
saved_user = g._login_user
|
||||
|
||||
with flask_app.app_context():
|
||||
try:
|
||||
# Restore user in new app context
|
||||
if saved_user is not None:
|
||||
from flask import g
|
||||
|
||||
g._login_user = saved_user
|
||||
|
||||
# get conversation and message
|
||||
conversation = self._get_conversation(conversation_id)
|
||||
message = self._get_message(message_id)
|
||||
|
|
|
|||
|
|
@ -315,6 +315,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution=workflow_execution,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
yield workflow_start_resp
|
||||
elif isinstance(
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import uuid
|
|||
from collections.abc import Generator, Mapping
|
||||
from typing import Any, Literal, Union, overload
|
||||
|
||||
from flask import Flask, current_app
|
||||
from flask import Flask, copy_current_request_context, current_app, has_request_context
|
||||
from pydantic import ValidationError
|
||||
|
||||
from configs import dify_config
|
||||
|
|
@ -179,18 +179,23 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
|
|||
message_id=message.id,
|
||||
)
|
||||
|
||||
# new thread
|
||||
worker_thread = threading.Thread(
|
||||
target=self._generate_worker,
|
||||
kwargs={
|
||||
"flask_app": current_app._get_current_object(), # type: ignore
|
||||
"context": contextvars.copy_context(),
|
||||
"application_generate_entity": application_generate_entity,
|
||||
"queue_manager": queue_manager,
|
||||
"conversation_id": conversation.id,
|
||||
"message_id": message.id,
|
||||
},
|
||||
)
|
||||
# new thread with request context and contextvars
|
||||
context = contextvars.copy_context()
|
||||
|
||||
@copy_current_request_context
|
||||
def worker_with_context():
|
||||
# Run the worker within the copied context
|
||||
return context.run(
|
||||
self._generate_worker,
|
||||
flask_app=current_app._get_current_object(), # type: ignore
|
||||
context=context,
|
||||
application_generate_entity=application_generate_entity,
|
||||
queue_manager=queue_manager,
|
||||
conversation_id=conversation.id,
|
||||
message_id=message.id,
|
||||
)
|
||||
|
||||
worker_thread = threading.Thread(target=worker_with_context)
|
||||
|
||||
worker_thread.start()
|
||||
|
||||
|
|
@ -227,8 +232,21 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
|
|||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
if has_request_context() and hasattr(g, "_login_user"):
|
||||
saved_user = g._login_user
|
||||
|
||||
with flask_app.app_context():
|
||||
try:
|
||||
# Restore user in new app context
|
||||
if saved_user is not None:
|
||||
from flask import g
|
||||
|
||||
g._login_user = saved_user
|
||||
|
||||
# get conversation and message
|
||||
conversation = self._get_conversation(conversation_id)
|
||||
message = self._get_message(message_id)
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import uuid
|
|||
from collections.abc import Generator, Mapping
|
||||
from typing import Any, Literal, Union, overload
|
||||
|
||||
from flask import Flask, current_app
|
||||
from flask import Flask, copy_current_request_context, current_app
|
||||
from pydantic import ValidationError
|
||||
|
||||
from configs import dify_config
|
||||
|
|
@ -170,17 +170,18 @@ class ChatAppGenerator(MessageBasedAppGenerator):
|
|||
message_id=message.id,
|
||||
)
|
||||
|
||||
# new thread
|
||||
worker_thread = threading.Thread(
|
||||
target=self._generate_worker,
|
||||
kwargs={
|
||||
"flask_app": current_app._get_current_object(), # type: ignore
|
||||
"application_generate_entity": application_generate_entity,
|
||||
"queue_manager": queue_manager,
|
||||
"conversation_id": conversation.id,
|
||||
"message_id": message.id,
|
||||
},
|
||||
)
|
||||
# new thread with request context
|
||||
@copy_current_request_context
|
||||
def worker_with_context():
|
||||
return self._generate_worker(
|
||||
flask_app=current_app._get_current_object(), # type: ignore
|
||||
application_generate_entity=application_generate_entity,
|
||||
queue_manager=queue_manager,
|
||||
conversation_id=conversation.id,
|
||||
message_id=message.id,
|
||||
)
|
||||
|
||||
worker_thread = threading.Thread(target=worker_with_context)
|
||||
|
||||
worker_thread.start()
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import uuid
|
|||
from collections.abc import Generator, Mapping
|
||||
from typing import Any, Literal, Union, overload
|
||||
|
||||
from flask import Flask, current_app
|
||||
from flask import Flask, copy_current_request_context, current_app
|
||||
from pydantic import ValidationError
|
||||
|
||||
from configs import dify_config
|
||||
|
|
@ -151,16 +151,17 @@ class CompletionAppGenerator(MessageBasedAppGenerator):
|
|||
message_id=message.id,
|
||||
)
|
||||
|
||||
# new thread
|
||||
worker_thread = threading.Thread(
|
||||
target=self._generate_worker,
|
||||
kwargs={
|
||||
"flask_app": current_app._get_current_object(), # type: ignore
|
||||
"application_generate_entity": application_generate_entity,
|
||||
"queue_manager": queue_manager,
|
||||
"message_id": message.id,
|
||||
},
|
||||
)
|
||||
# new thread with request context
|
||||
@copy_current_request_context
|
||||
def worker_with_context():
|
||||
return self._generate_worker(
|
||||
flask_app=current_app._get_current_object(), # type: ignore
|
||||
application_generate_entity=application_generate_entity,
|
||||
queue_manager=queue_manager,
|
||||
message_id=message.id,
|
||||
)
|
||||
|
||||
worker_thread = threading.Thread(target=worker_with_context)
|
||||
|
||||
worker_thread.start()
|
||||
|
||||
|
|
@ -313,16 +314,17 @@ class CompletionAppGenerator(MessageBasedAppGenerator):
|
|||
message_id=message.id,
|
||||
)
|
||||
|
||||
# new thread
|
||||
worker_thread = threading.Thread(
|
||||
target=self._generate_worker,
|
||||
kwargs={
|
||||
"flask_app": current_app._get_current_object(), # type: ignore
|
||||
"application_generate_entity": application_generate_entity,
|
||||
"queue_manager": queue_manager,
|
||||
"message_id": message.id,
|
||||
},
|
||||
)
|
||||
# new thread with request context
|
||||
@copy_current_request_context
|
||||
def worker_with_context():
|
||||
return self._generate_worker(
|
||||
flask_app=current_app._get_current_object(), # type: ignore
|
||||
application_generate_entity=application_generate_entity,
|
||||
queue_manager=queue_manager,
|
||||
message_id=message.id,
|
||||
)
|
||||
|
||||
worker_thread = threading.Thread(target=worker_with_context)
|
||||
|
||||
worker_thread.start()
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import uuid
|
|||
from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import Any, Literal, Optional, Union, overload
|
||||
|
||||
from flask import Flask, current_app
|
||||
from flask import Flask, copy_current_request_context, current_app, has_request_context
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
|
|
@ -135,7 +135,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
workflow_run_id=workflow_run_id,
|
||||
)
|
||||
|
||||
contexts.tenant_id.set(application_generate_entity.app_config.tenant_id)
|
||||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
|
|
@ -207,17 +206,22 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
app_mode=app_model.mode,
|
||||
)
|
||||
|
||||
# new thread
|
||||
worker_thread = threading.Thread(
|
||||
target=self._generate_worker,
|
||||
kwargs={
|
||||
"flask_app": current_app._get_current_object(), # type: ignore
|
||||
"application_generate_entity": application_generate_entity,
|
||||
"queue_manager": queue_manager,
|
||||
"context": contextvars.copy_context(),
|
||||
"workflow_thread_pool_id": workflow_thread_pool_id,
|
||||
},
|
||||
)
|
||||
# new thread with request context and contextvars
|
||||
context = contextvars.copy_context()
|
||||
|
||||
@copy_current_request_context
|
||||
def worker_with_context():
|
||||
# Run the worker within the copied context
|
||||
return context.run(
|
||||
self._generate_worker,
|
||||
flask_app=current_app._get_current_object(), # type: ignore
|
||||
application_generate_entity=application_generate_entity,
|
||||
queue_manager=queue_manager,
|
||||
context=context,
|
||||
workflow_thread_pool_id=workflow_thread_pool_id,
|
||||
)
|
||||
|
||||
worker_thread = threading.Thread(target=worker_with_context)
|
||||
|
||||
worker_thread.start()
|
||||
|
||||
|
|
@ -277,7 +281,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
),
|
||||
workflow_run_id=str(uuid.uuid4()),
|
||||
)
|
||||
contexts.tenant_id.set(application_generate_entity.app_config.tenant_id)
|
||||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
|
|
@ -354,7 +357,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
single_loop_run=WorkflowAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args["inputs"]),
|
||||
workflow_run_id=str(uuid.uuid4()),
|
||||
)
|
||||
contexts.tenant_id.set(application_generate_entity.app_config.tenant_id)
|
||||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
|
|
@ -408,8 +410,22 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
"""
|
||||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
if has_request_context() and hasattr(g, "_login_user"):
|
||||
saved_user = g._login_user
|
||||
|
||||
with flask_app.app_context():
|
||||
try:
|
||||
# Restore user in new app context
|
||||
if saved_user is not None:
|
||||
from flask import g
|
||||
|
||||
g._login_user = saved_user
|
||||
|
||||
# workflow app
|
||||
runner = WorkflowAppRunner(
|
||||
application_generate_entity=application_generate_entity,
|
||||
|
|
|
|||
|
|
@ -455,8 +455,6 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline, MessageCycleMan
|
|||
agent_thought: Optional[MessageAgentThought] = (
|
||||
db.session.query(MessageAgentThought).filter(MessageAgentThought.id == event.agent_thought_id).first()
|
||||
)
|
||||
db.session.refresh(agent_thought)
|
||||
db.session.close()
|
||||
|
||||
if agent_thought:
|
||||
return AgentThoughtStreamResponse(
|
||||
|
|
|
|||
|
|
@ -51,15 +51,19 @@ class LLMGenerator:
|
|||
response = cast(
|
||||
LLMResult,
|
||||
model_instance.invoke_llm(
|
||||
prompt_messages=list(prompts), model_parameters={"max_tokens": 100, "temperature": 1}, stream=False
|
||||
prompt_messages=list(prompts), model_parameters={"max_tokens": 500, "temperature": 1}, stream=False
|
||||
),
|
||||
)
|
||||
answer = cast(str, response.message.content)
|
||||
cleaned_answer = re.sub(r"^.*(\{.*\}).*$", r"\1", answer, flags=re.DOTALL)
|
||||
if cleaned_answer is None:
|
||||
return ""
|
||||
result_dict = json.loads(cleaned_answer)
|
||||
answer = result_dict["Your Output"]
|
||||
try:
|
||||
result_dict = json.loads(cleaned_answer)
|
||||
answer = result_dict["Your Output"]
|
||||
except json.JSONDecodeError as e:
|
||||
logging.exception("Failed to generate name after answer, use query instead")
|
||||
answer = query
|
||||
name = answer.strip()
|
||||
|
||||
if len(name) > 75:
|
||||
|
|
|
|||
|
|
@ -58,6 +58,7 @@ class PluginModelBackwardsInvocation(BaseBackwardsInvocation):
|
|||
LLMNode.deduct_llm_quota(
|
||||
tenant_id=tenant.id, model_instance=model_instance, usage=chunk.delta.usage
|
||||
)
|
||||
chunk.prompt_messages = []
|
||||
yield chunk
|
||||
|
||||
return handle()
|
||||
|
|
@ -68,7 +69,7 @@ class PluginModelBackwardsInvocation(BaseBackwardsInvocation):
|
|||
def handle_non_streaming(response: LLMResult) -> Generator[LLMResultChunk, None, None]:
|
||||
yield LLMResultChunk(
|
||||
model=response.model,
|
||||
prompt_messages=response.prompt_messages,
|
||||
prompt_messages=[],
|
||||
system_fingerprint=response.system_fingerprint,
|
||||
delta=LLMResultChunkDelta(
|
||||
index=0,
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from core.datasource.entities.datasource_entities import DatasourceProviderEntit
|
|||
from core.model_runtime.entities.model_entities import AIModelEntity
|
||||
from core.model_runtime.entities.provider_entities import ProviderEntity
|
||||
from core.plugin.entities.base import BasePluginEntity
|
||||
from core.plugin.entities.plugin import PluginDeclaration
|
||||
from core.plugin.entities.plugin import PluginDeclaration, PluginEntity
|
||||
from core.tools.entities.common_entities import I18nObject
|
||||
from core.tools.entities.tool_entities import ToolProviderEntityWithPlugin
|
||||
|
||||
|
|
@ -175,3 +175,8 @@ class PluginOAuthAuthorizationUrlResponse(BaseModel):
|
|||
|
||||
class PluginOAuthCredentialsResponse(BaseModel):
|
||||
credentials: Mapping[str, Any] = Field(description="The credentials of the OAuth.")
|
||||
|
||||
|
||||
class PluginListResponse(BaseModel):
|
||||
list: list[PluginEntity]
|
||||
total: int
|
||||
|
|
|
|||
|
|
@ -9,7 +9,12 @@ from core.plugin.entities.plugin import (
|
|||
PluginInstallation,
|
||||
PluginInstallationSource,
|
||||
)
|
||||
from core.plugin.entities.plugin_daemon import PluginInstallTask, PluginInstallTaskStartResponse, PluginUploadResponse
|
||||
from core.plugin.entities.plugin_daemon import (
|
||||
PluginInstallTask,
|
||||
PluginInstallTaskStartResponse,
|
||||
PluginListResponse,
|
||||
PluginUploadResponse,
|
||||
)
|
||||
from core.plugin.impl.base import BasePluginClient
|
||||
|
||||
|
||||
|
|
@ -27,11 +32,20 @@ class PluginInstaller(BasePluginClient):
|
|||
)
|
||||
|
||||
def list_plugins(self, tenant_id: str) -> list[PluginEntity]:
|
||||
result = self._request_with_plugin_daemon_response(
|
||||
"GET",
|
||||
f"plugin/{tenant_id}/management/list",
|
||||
PluginListResponse,
|
||||
params={"page": 1, "page_size": 256},
|
||||
)
|
||||
return result.list
|
||||
|
||||
def list_plugins_with_total(self, tenant_id: str, page: int, page_size: int) -> PluginListResponse:
|
||||
return self._request_with_plugin_daemon_response(
|
||||
"GET",
|
||||
f"plugin/{tenant_id}/management/list",
|
||||
list[PluginEntity],
|
||||
params={"page": 1, "page_size": 256},
|
||||
PluginListResponse,
|
||||
params={"page": page, "page_size": page_size},
|
||||
)
|
||||
|
||||
def upload_pkg(
|
||||
|
|
|
|||
|
|
@ -46,6 +46,7 @@ class QdrantConfig(BaseModel):
|
|||
root_path: Optional[str] = None
|
||||
grpc_port: int = 6334
|
||||
prefer_grpc: bool = False
|
||||
replication_factor: int = 1
|
||||
|
||||
def to_qdrant_params(self):
|
||||
if self.endpoint and self.endpoint.startswith("path:"):
|
||||
|
|
@ -119,11 +120,13 @@ class QdrantVector(BaseVector):
|
|||
max_indexing_threads=0,
|
||||
on_disk=False,
|
||||
)
|
||||
|
||||
self._client.create_collection(
|
||||
collection_name=collection_name,
|
||||
vectors_config=vectors_config,
|
||||
hnsw_config=hnsw_config,
|
||||
timeout=int(self._client_config.timeout),
|
||||
replication_factor=self._client_config.replication_factor,
|
||||
)
|
||||
|
||||
# create group_id payload index
|
||||
|
|
@ -466,5 +469,6 @@ class QdrantVectorFactory(AbstractVectorFactory):
|
|||
timeout=dify_config.QDRANT_CLIENT_TIMEOUT,
|
||||
grpc_port=dify_config.QDRANT_GRPC_PORT,
|
||||
prefer_grpc=dify_config.QDRANT_GRPC_ENABLED,
|
||||
replication_factor=dify_config.QDRANT_REPLICATION_FACTOR,
|
||||
),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@ class TidbOnQdrantConfig(BaseModel):
|
|||
root_path: Optional[str] = None
|
||||
grpc_port: int = 6334
|
||||
prefer_grpc: bool = False
|
||||
replication_factor: int = 1
|
||||
|
||||
def to_qdrant_params(self):
|
||||
if self.endpoint and self.endpoint.startswith("path:"):
|
||||
|
|
@ -134,6 +135,7 @@ class TidbOnQdrantVector(BaseVector):
|
|||
vectors_config=vectors_config,
|
||||
hnsw_config=hnsw_config,
|
||||
timeout=int(self._client_config.timeout),
|
||||
replication_factor=self._client_config.replication_factor,
|
||||
)
|
||||
|
||||
# create group_id payload index
|
||||
|
|
@ -484,6 +486,7 @@ class TidbOnQdrantVectorFactory(AbstractVectorFactory):
|
|||
timeout=dify_config.TIDB_ON_QDRANT_CLIENT_TIMEOUT,
|
||||
grpc_port=dify_config.TIDB_ON_QDRANT_GRPC_PORT,
|
||||
prefer_grpc=dify_config.TIDB_ON_QDRANT_GRPC_ENABLED,
|
||||
replication_factor=dify_config.QDRANT_REPLICATION_FACTOR,
|
||||
),
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,21 +1,13 @@
|
|||
import hashlib
|
||||
import json
|
||||
import mimetypes
|
||||
import os
|
||||
import re
|
||||
import site
|
||||
import subprocess
|
||||
import tempfile
|
||||
import unicodedata
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, Optional, cast
|
||||
from collections.abc import Sequence
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Optional, cast
|
||||
from urllib.parse import unquote
|
||||
|
||||
import chardet
|
||||
import cloudscraper # type: ignore
|
||||
from bs4 import BeautifulSoup, CData, Comment, NavigableString # type: ignore
|
||||
from regex import regex # type: ignore
|
||||
from readabilipy import simple_json_from_html_string # type: ignore
|
||||
|
||||
from core.helper import ssrf_proxy
|
||||
from core.rag.extractor import extract_processor
|
||||
|
|
@ -23,9 +15,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor
|
|||
|
||||
FULL_TEMPLATE = """
|
||||
TITLE: {title}
|
||||
AUTHORS: {authors}
|
||||
PUBLISH DATE: {publish_date}
|
||||
TOP_IMAGE_URL: {top_image}
|
||||
AUTHOR: {author}
|
||||
TEXT:
|
||||
|
||||
{text}
|
||||
|
|
@ -73,8 +63,8 @@ def get_url(url: str, user_agent: Optional[str] = None) -> str:
|
|||
response = ssrf_proxy.get(url, headers=headers, follow_redirects=True, timeout=(120, 300))
|
||||
elif response.status_code == 403:
|
||||
scraper = cloudscraper.create_scraper()
|
||||
scraper.perform_request = ssrf_proxy.make_request
|
||||
response = scraper.get(url, headers=headers, follow_redirects=True, timeout=(120, 300))
|
||||
scraper.perform_request = ssrf_proxy.make_request # type: ignore
|
||||
response = scraper.get(url, headers=headers, follow_redirects=True, timeout=(120, 300)) # type: ignore
|
||||
|
||||
if response.status_code != 200:
|
||||
return "URL returned status code {}.".format(response.status_code)
|
||||
|
|
@ -90,273 +80,36 @@ def get_url(url: str, user_agent: Optional[str] = None) -> str:
|
|||
else:
|
||||
content = response.text
|
||||
|
||||
a = extract_using_readabilipy(content)
|
||||
article = extract_using_readabilipy(content)
|
||||
|
||||
if not a["plain_text"] or not a["plain_text"].strip():
|
||||
if not article.text:
|
||||
return ""
|
||||
|
||||
res = FULL_TEMPLATE.format(
|
||||
title=a["title"],
|
||||
authors=a["byline"],
|
||||
publish_date=a["date"],
|
||||
top_image="",
|
||||
text=a["plain_text"] or "",
|
||||
title=article.title,
|
||||
author=article.auther,
|
||||
text=article.text,
|
||||
)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def extract_using_readabilipy(html):
|
||||
with tempfile.NamedTemporaryFile(delete=False, mode="w+") as f_html:
|
||||
f_html.write(html)
|
||||
f_html.close()
|
||||
html_path = f_html.name
|
||||
|
||||
# Call Mozilla's Readability.js Readability.parse() function via node, writing output to a temporary file
|
||||
article_json_path = html_path + ".json"
|
||||
jsdir = os.path.join(find_module_path("readabilipy"), "javascript")
|
||||
with chdir(jsdir):
|
||||
subprocess.check_call(["node", "ExtractArticle.js", "-i", html_path, "-o", article_json_path])
|
||||
|
||||
# Read output of call to Readability.parse() from JSON file and return as Python dictionary
|
||||
input_json = json.loads(Path(article_json_path).read_text(encoding="utf-8"))
|
||||
|
||||
# Deleting files after processing
|
||||
os.unlink(article_json_path)
|
||||
os.unlink(html_path)
|
||||
|
||||
article_json: dict[str, Any] = {
|
||||
"title": None,
|
||||
"byline": None,
|
||||
"date": None,
|
||||
"content": None,
|
||||
"plain_content": None,
|
||||
"plain_text": None,
|
||||
}
|
||||
# Populate article fields from readability fields where present
|
||||
if input_json:
|
||||
if input_json.get("title"):
|
||||
article_json["title"] = input_json["title"]
|
||||
if input_json.get("byline"):
|
||||
article_json["byline"] = input_json["byline"]
|
||||
if input_json.get("date"):
|
||||
article_json["date"] = input_json["date"]
|
||||
if input_json.get("content"):
|
||||
article_json["content"] = input_json["content"]
|
||||
article_json["plain_content"] = plain_content(article_json["content"], False, False)
|
||||
article_json["plain_text"] = extract_text_blocks_as_plain_text(article_json["plain_content"])
|
||||
if input_json.get("textContent"):
|
||||
article_json["plain_text"] = input_json["textContent"]
|
||||
article_json["plain_text"] = re.sub(r"\n\s*\n", "\n", article_json["plain_text"])
|
||||
|
||||
return article_json
|
||||
@dataclass
|
||||
class Article:
|
||||
title: str
|
||||
auther: str
|
||||
text: Sequence[dict]
|
||||
|
||||
|
||||
def find_module_path(module_name):
|
||||
for package_path in site.getsitepackages():
|
||||
potential_path = os.path.join(package_path, module_name)
|
||||
if os.path.exists(potential_path):
|
||||
return potential_path
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@contextmanager
|
||||
def chdir(path):
|
||||
"""Change directory in context and return to original on exit"""
|
||||
# From https://stackoverflow.com/a/37996581, couldn't find a built-in
|
||||
original_path = os.getcwd()
|
||||
os.chdir(path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
os.chdir(original_path)
|
||||
|
||||
|
||||
def extract_text_blocks_as_plain_text(paragraph_html):
|
||||
# Load article as DOM
|
||||
soup = BeautifulSoup(paragraph_html, "html.parser")
|
||||
# Select all lists
|
||||
list_elements = soup.find_all(["ul", "ol"])
|
||||
# Prefix text in all list items with "* " and make lists paragraphs
|
||||
for list_element in list_elements:
|
||||
plain_items = "".join(
|
||||
list(filter(None, [plain_text_leaf_node(li)["text"] for li in list_element.find_all("li")]))
|
||||
)
|
||||
list_element.string = plain_items
|
||||
list_element.name = "p"
|
||||
# Select all text blocks
|
||||
text_blocks = [s.parent for s in soup.find_all(string=True)]
|
||||
text_blocks = [plain_text_leaf_node(block) for block in text_blocks]
|
||||
# Drop empty paragraphs
|
||||
text_blocks = list(filter(lambda p: p["text"] is not None, text_blocks))
|
||||
return text_blocks
|
||||
|
||||
|
||||
def plain_text_leaf_node(element):
|
||||
# Extract all text, stripped of any child HTML elements and normalize it
|
||||
plain_text = normalize_text(element.get_text())
|
||||
if plain_text != "" and element.name == "li":
|
||||
plain_text = "* {}, ".format(plain_text)
|
||||
if plain_text == "":
|
||||
plain_text = None
|
||||
if "data-node-index" in element.attrs:
|
||||
plain = {"node_index": element["data-node-index"], "text": plain_text}
|
||||
else:
|
||||
plain = {"text": plain_text}
|
||||
return plain
|
||||
|
||||
|
||||
def plain_content(readability_content, content_digests, node_indexes):
|
||||
# Load article as DOM
|
||||
soup = BeautifulSoup(readability_content, "html.parser")
|
||||
# Make all elements plain
|
||||
elements = plain_elements(soup.contents, content_digests, node_indexes)
|
||||
if node_indexes:
|
||||
# Add node index attributes to nodes
|
||||
elements = [add_node_indexes(element) for element in elements]
|
||||
# Replace article contents with plain elements
|
||||
soup.contents = elements
|
||||
return str(soup)
|
||||
|
||||
|
||||
def plain_elements(elements, content_digests, node_indexes):
|
||||
# Get plain content versions of all elements
|
||||
elements = [plain_element(element, content_digests, node_indexes) for element in elements]
|
||||
if content_digests:
|
||||
# Add content digest attribute to nodes
|
||||
elements = [add_content_digest(element) for element in elements]
|
||||
return elements
|
||||
|
||||
|
||||
def plain_element(element, content_digests, node_indexes):
|
||||
# For lists, we make each item plain text
|
||||
if is_leaf(element):
|
||||
# For leaf node elements, extract the text content, discarding any HTML tags
|
||||
# 1. Get element contents as text
|
||||
plain_text = element.get_text()
|
||||
# 2. Normalize the extracted text string to a canonical representation
|
||||
plain_text = normalize_text(plain_text)
|
||||
# 3. Update element content to be plain text
|
||||
element.string = plain_text
|
||||
elif is_text(element):
|
||||
if is_non_printing(element):
|
||||
# The simplified HTML may have come from Readability.js so might
|
||||
# have non-printing text (e.g. Comment or CData). In this case, we
|
||||
# keep the structure, but ensure that the string is empty.
|
||||
element = type(element)("")
|
||||
else:
|
||||
plain_text = element.string
|
||||
plain_text = normalize_text(plain_text)
|
||||
element = type(element)(plain_text)
|
||||
else:
|
||||
# If not a leaf node or leaf type call recursively on child nodes, replacing
|
||||
element.contents = plain_elements(element.contents, content_digests, node_indexes)
|
||||
return element
|
||||
|
||||
|
||||
def add_node_indexes(element, node_index="0"):
|
||||
# Can't add attributes to string types
|
||||
if is_text(element):
|
||||
return element
|
||||
# Add index to current element
|
||||
element["data-node-index"] = node_index
|
||||
# Add index to child elements
|
||||
for local_idx, child in enumerate([c for c in element.contents if not is_text(c)], start=1):
|
||||
# Can't add attributes to leaf string types
|
||||
child_index = "{stem}.{local}".format(stem=node_index, local=local_idx)
|
||||
add_node_indexes(child, node_index=child_index)
|
||||
return element
|
||||
|
||||
|
||||
def normalize_text(text):
|
||||
"""Normalize unicode and whitespace."""
|
||||
# Normalize unicode first to try and standardize whitespace characters as much as possible before normalizing them
|
||||
text = strip_control_characters(text)
|
||||
text = normalize_unicode(text)
|
||||
text = normalize_whitespace(text)
|
||||
return text
|
||||
|
||||
|
||||
def strip_control_characters(text):
|
||||
"""Strip out unicode control characters which might break the parsing."""
|
||||
# Unicode control characters
|
||||
# [Cc]: Other, Control [includes new lines]
|
||||
# [Cf]: Other, Format
|
||||
# [Cn]: Other, Not Assigned
|
||||
# [Co]: Other, Private Use
|
||||
# [Cs]: Other, Surrogate
|
||||
control_chars = {"Cc", "Cf", "Cn", "Co", "Cs"}
|
||||
retained_chars = ["\t", "\n", "\r", "\f"]
|
||||
|
||||
# Remove non-printing control characters
|
||||
return "".join(
|
||||
[
|
||||
"" if (unicodedata.category(char) in control_chars) and (char not in retained_chars) else char
|
||||
for char in text
|
||||
]
|
||||
def extract_using_readabilipy(html: str):
|
||||
json_article: dict[str, Any] = simple_json_from_html_string(html, use_readability=True)
|
||||
article = Article(
|
||||
title=json_article.get("title") or "",
|
||||
auther=json_article.get("byline") or "",
|
||||
text=json_article.get("plain_text") or [],
|
||||
)
|
||||
|
||||
|
||||
def normalize_unicode(text):
|
||||
"""Normalize unicode such that things that are visually equivalent map to the same unicode string where possible."""
|
||||
normal_form: Literal["NFC", "NFD", "NFKC", "NFKD"] = "NFKC"
|
||||
text = unicodedata.normalize(normal_form, text)
|
||||
return text
|
||||
|
||||
|
||||
def normalize_whitespace(text):
|
||||
"""Replace runs of whitespace characters with a single space as this is what happens when HTML text is displayed."""
|
||||
text = regex.sub(r"\s+", " ", text)
|
||||
# Remove leading and trailing whitespace
|
||||
text = text.strip()
|
||||
return text
|
||||
|
||||
|
||||
def is_leaf(element):
|
||||
return element.name in {"p", "li"}
|
||||
|
||||
|
||||
def is_text(element):
|
||||
return isinstance(element, NavigableString)
|
||||
|
||||
|
||||
def is_non_printing(element):
|
||||
return any(isinstance(element, _e) for _e in [Comment, CData])
|
||||
|
||||
|
||||
def add_content_digest(element):
|
||||
if not is_text(element):
|
||||
element["data-content-digest"] = content_digest(element)
|
||||
return element
|
||||
|
||||
|
||||
def content_digest(element):
|
||||
digest: Any
|
||||
if is_text(element):
|
||||
# Hash
|
||||
trimmed_string = element.string.strip()
|
||||
if trimmed_string == "":
|
||||
digest = ""
|
||||
else:
|
||||
digest = hashlib.sha256(trimmed_string.encode("utf-8")).hexdigest()
|
||||
else:
|
||||
contents = element.contents
|
||||
num_contents = len(contents)
|
||||
if num_contents == 0:
|
||||
# No hash when no child elements exist
|
||||
digest = ""
|
||||
elif num_contents == 1:
|
||||
# If single child, use digest of child
|
||||
digest = content_digest(contents[0])
|
||||
else:
|
||||
# Build content digest from the "non-empty" digests of child nodes
|
||||
digest = hashlib.sha256()
|
||||
child_digests = list(filter(lambda x: x != "", [content_digest(content) for content in contents]))
|
||||
for child in child_digests:
|
||||
digest.update(child.encode("utf-8"))
|
||||
digest = digest.hexdigest()
|
||||
return digest
|
||||
return article
|
||||
|
||||
|
||||
def get_image_upload_file_ids(content):
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from copy import copy, deepcopy
|
|||
from datetime import UTC, datetime
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
from flask import Flask, current_app
|
||||
from flask import Flask, current_app, has_request_context
|
||||
|
||||
from configs import dify_config
|
||||
from core.app.apps.base_app_queue_manager import GenerateTaskStoppedError
|
||||
|
|
@ -540,8 +540,21 @@ class GraphEngine:
|
|||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
if has_request_context() and hasattr(g, "_login_user"):
|
||||
saved_user = g._login_user
|
||||
|
||||
with flask_app.app_context():
|
||||
try:
|
||||
# Restore user in new app context
|
||||
if saved_user is not None:
|
||||
from flask import g
|
||||
|
||||
g._login_user = saved_user
|
||||
|
||||
q.put(
|
||||
ParallelBranchRunStartedEvent(
|
||||
parallel_id=parallel_id,
|
||||
|
|
|
|||
|
|
@ -356,7 +356,9 @@ class AgentNode(ToolNode):
|
|||
|
||||
def _remove_unsupported_model_features_for_old_version(self, model_schema: AIModelEntity) -> AIModelEntity:
|
||||
if model_schema.features:
|
||||
for feature in model_schema.features:
|
||||
if feature.value not in AgentOldVersionModelFeatures:
|
||||
for feature in model_schema.features[:]: # Create a copy to safely modify during iteration
|
||||
try:
|
||||
AgentOldVersionModelFeatures(feature.value) # Try to create enum member from value
|
||||
except ValueError:
|
||||
model_schema.features.remove(feature)
|
||||
return model_schema
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from enum import Enum
|
||||
from enum import Enum, StrEnum
|
||||
from typing import Any, Literal, Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
|
@ -26,7 +26,7 @@ class ParamsAutoGenerated(Enum):
|
|||
OPEN = 1
|
||||
|
||||
|
||||
class AgentOldVersionModelFeatures(Enum):
|
||||
class AgentOldVersionModelFeatures(StrEnum):
|
||||
"""
|
||||
Enum class for old SDK version llm feature.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import tempfile
|
|||
from collections.abc import Mapping, Sequence
|
||||
from typing import Any, cast
|
||||
|
||||
import chardet
|
||||
import docx
|
||||
import pandas as pd
|
||||
import pypandoc # type: ignore
|
||||
|
|
@ -180,26 +181,64 @@ def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str)
|
|||
|
||||
def _extract_text_from_plain_text(file_content: bytes) -> str:
|
||||
try:
|
||||
return file_content.decode("utf-8", "ignore")
|
||||
except UnicodeDecodeError as e:
|
||||
raise TextExtractionError("Failed to decode plain text file") from e
|
||||
# Detect encoding using chardet
|
||||
result = chardet.detect(file_content)
|
||||
encoding = result["encoding"]
|
||||
|
||||
# Fallback to utf-8 if detection fails
|
||||
if not encoding:
|
||||
encoding = "utf-8"
|
||||
|
||||
return file_content.decode(encoding, errors="ignore")
|
||||
except (UnicodeDecodeError, LookupError) as e:
|
||||
# If decoding fails, try with utf-8 as last resort
|
||||
try:
|
||||
return file_content.decode("utf-8", errors="ignore")
|
||||
except UnicodeDecodeError:
|
||||
raise TextExtractionError(f"Failed to decode plain text file: {e}") from e
|
||||
|
||||
|
||||
def _extract_text_from_json(file_content: bytes) -> str:
|
||||
try:
|
||||
json_data = json.loads(file_content.decode("utf-8", "ignore"))
|
||||
# Detect encoding using chardet
|
||||
result = chardet.detect(file_content)
|
||||
encoding = result["encoding"]
|
||||
|
||||
# Fallback to utf-8 if detection fails
|
||||
if not encoding:
|
||||
encoding = "utf-8"
|
||||
|
||||
json_data = json.loads(file_content.decode(encoding, errors="ignore"))
|
||||
return json.dumps(json_data, indent=2, ensure_ascii=False)
|
||||
except (UnicodeDecodeError, json.JSONDecodeError) as e:
|
||||
raise TextExtractionError(f"Failed to decode or parse JSON file: {e}") from e
|
||||
except (UnicodeDecodeError, LookupError, json.JSONDecodeError) as e:
|
||||
# If decoding fails, try with utf-8 as last resort
|
||||
try:
|
||||
json_data = json.loads(file_content.decode("utf-8", errors="ignore"))
|
||||
return json.dumps(json_data, indent=2, ensure_ascii=False)
|
||||
except (UnicodeDecodeError, json.JSONDecodeError):
|
||||
raise TextExtractionError(f"Failed to decode or parse JSON file: {e}") from e
|
||||
|
||||
|
||||
def _extract_text_from_yaml(file_content: bytes) -> str:
|
||||
"""Extract the content from yaml file"""
|
||||
try:
|
||||
yaml_data = yaml.safe_load_all(file_content.decode("utf-8", "ignore"))
|
||||
# Detect encoding using chardet
|
||||
result = chardet.detect(file_content)
|
||||
encoding = result["encoding"]
|
||||
|
||||
# Fallback to utf-8 if detection fails
|
||||
if not encoding:
|
||||
encoding = "utf-8"
|
||||
|
||||
yaml_data = yaml.safe_load_all(file_content.decode(encoding, errors="ignore"))
|
||||
return cast(str, yaml.dump_all(yaml_data, allow_unicode=True, sort_keys=False))
|
||||
except (UnicodeDecodeError, yaml.YAMLError) as e:
|
||||
raise TextExtractionError(f"Failed to decode or parse YAML file: {e}") from e
|
||||
except (UnicodeDecodeError, LookupError, yaml.YAMLError) as e:
|
||||
# If decoding fails, try with utf-8 as last resort
|
||||
try:
|
||||
yaml_data = yaml.safe_load_all(file_content.decode("utf-8", errors="ignore"))
|
||||
return cast(str, yaml.dump_all(yaml_data, allow_unicode=True, sort_keys=False))
|
||||
except (UnicodeDecodeError, yaml.YAMLError):
|
||||
raise TextExtractionError(f"Failed to decode or parse YAML file: {e}") from e
|
||||
|
||||
|
||||
def _extract_text_from_pdf(file_content: bytes) -> str:
|
||||
|
|
@ -338,7 +377,20 @@ def _extract_text_from_file(file: File):
|
|||
|
||||
def _extract_text_from_csv(file_content: bytes) -> str:
|
||||
try:
|
||||
csv_file = io.StringIO(file_content.decode("utf-8", "ignore"))
|
||||
# Detect encoding using chardet
|
||||
result = chardet.detect(file_content)
|
||||
encoding = result["encoding"]
|
||||
|
||||
# Fallback to utf-8 if detection fails
|
||||
if not encoding:
|
||||
encoding = "utf-8"
|
||||
|
||||
try:
|
||||
csv_file = io.StringIO(file_content.decode(encoding, errors="ignore"))
|
||||
except (UnicodeDecodeError, LookupError):
|
||||
# If decoding fails, try with utf-8 as last resort
|
||||
csv_file = io.StringIO(file_content.decode("utf-8", errors="ignore"))
|
||||
|
||||
csv_reader = csv.reader(csv_file)
|
||||
rows = list(csv_reader)
|
||||
|
||||
|
|
@ -366,7 +418,7 @@ def _extract_text_from_excel(file_content: bytes) -> str:
|
|||
df = excel_file.parse(sheet_name=sheet_name)
|
||||
df.dropna(how="all", inplace=True)
|
||||
# Create Markdown table two times to separate tables with a newline
|
||||
markdown_table += df.to_markdown(index=False) + "\n\n"
|
||||
markdown_table += df.to_markdown(index=False, floatfmt="") + "\n\n"
|
||||
except Exception as e:
|
||||
continue
|
||||
return markdown_table
|
||||
|
|
|
|||
|
|
@ -235,6 +235,10 @@ class Executor:
|
|||
files[key].append(file_tuple)
|
||||
|
||||
# convert files to list for httpx request
|
||||
# If there are no actual files, we still need to force httpx to use `multipart/form-data`.
|
||||
# This is achieved by inserting a harmless placeholder file that will be ignored by the server.
|
||||
if not files:
|
||||
self.files = [("__multipart_placeholder__", ("", b"", "application/octet-stream"))]
|
||||
if files:
|
||||
self.files = []
|
||||
for key, file_tuples in files.items():
|
||||
|
|
@ -373,7 +377,10 @@ class Executor:
|
|||
raw += f"{k}: {v}\r\n"
|
||||
|
||||
body_string = ""
|
||||
if self.files:
|
||||
# Only log actual files if present.
|
||||
# '__multipart_placeholder__' is inserted to force multipart encoding but is not a real file.
|
||||
# This prevents logging meaningless placeholder entries.
|
||||
if self.files and not all(f[0] == "__multipart_placeholder__" for f in self.files):
|
||||
for key, (filename, content, mime_type) in self.files:
|
||||
body_string += f"--{boundary}\r\n"
|
||||
body_string += f'Content-Disposition: form-data; name="{key}"\r\n\r\n'
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from datetime import UTC, datetime
|
|||
from queue import Empty, Queue
|
||||
from typing import TYPE_CHECKING, Any, Optional, cast
|
||||
|
||||
from flask import Flask, current_app
|
||||
from flask import Flask, current_app, has_request_context
|
||||
|
||||
from configs import dify_config
|
||||
from core.variables import ArrayVariable, IntegerVariable, NoneVariable
|
||||
|
|
@ -586,7 +586,21 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
"""
|
||||
for var, val in context.items():
|
||||
var.set(val)
|
||||
|
||||
# FIXME(-LAN-): Save current user before entering new app context
|
||||
from flask import g
|
||||
|
||||
saved_user = None
|
||||
if has_request_context() and hasattr(g, "_login_user"):
|
||||
saved_user = g._login_user
|
||||
|
||||
with flask_app.app_context():
|
||||
# Restore user in new app context
|
||||
if saved_user is not None:
|
||||
from flask import g
|
||||
|
||||
g._login_user = saved_user
|
||||
|
||||
parallel_mode_run_id = uuid.uuid4().hex
|
||||
graph_engine_copy = graph_engine.create_copy()
|
||||
variable_pool_copy = graph_engine_copy.graph_runtime_state.variable_pool
|
||||
|
|
|
|||
|
|
@ -125,6 +125,7 @@ class WorkflowCycleManager:
|
|||
)
|
||||
)
|
||||
|
||||
self._workflow_execution_repository.save(workflow_execution)
|
||||
return workflow_execution
|
||||
|
||||
def handle_workflow_run_partial_success(
|
||||
|
|
@ -158,6 +159,7 @@ class WorkflowCycleManager:
|
|||
)
|
||||
)
|
||||
|
||||
self._workflow_execution_repository.save(execution)
|
||||
return execution
|
||||
|
||||
def handle_workflow_run_failed(
|
||||
|
|
@ -172,44 +174,45 @@ class WorkflowCycleManager:
|
|||
trace_manager: Optional[TraceQueueManager] = None,
|
||||
exceptions_count: int = 0,
|
||||
) -> WorkflowExecution:
|
||||
execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
|
||||
workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
|
||||
|
||||
execution.status = WorkflowExecutionStatus(status.value)
|
||||
execution.error_message = error_message
|
||||
execution.total_tokens = total_tokens
|
||||
execution.total_steps = total_steps
|
||||
execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
execution.exceptions_count = exceptions_count
|
||||
workflow_execution.status = WorkflowExecutionStatus(status.value)
|
||||
workflow_execution.error_message = error_message
|
||||
workflow_execution.total_tokens = total_tokens
|
||||
workflow_execution.total_steps = total_steps
|
||||
workflow_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow_execution.exceptions_count = exceptions_count
|
||||
|
||||
# Use the instance repository to find running executions for a workflow run
|
||||
running_domain_executions = self._workflow_node_execution_repository.get_running_executions(
|
||||
workflow_run_id=execution.id
|
||||
running_node_executions = self._workflow_node_execution_repository.get_running_executions(
|
||||
workflow_run_id=workflow_execution.id
|
||||
)
|
||||
|
||||
# Update the domain models
|
||||
now = datetime.now(UTC).replace(tzinfo=None)
|
||||
for domain_execution in running_domain_executions:
|
||||
if domain_execution.node_execution_id:
|
||||
for node_execution in running_node_executions:
|
||||
if node_execution.node_execution_id:
|
||||
# Update the domain model
|
||||
domain_execution.status = NodeExecutionStatus.FAILED
|
||||
domain_execution.error = error_message
|
||||
domain_execution.finished_at = now
|
||||
domain_execution.elapsed_time = (now - domain_execution.created_at).total_seconds()
|
||||
node_execution.status = NodeExecutionStatus.FAILED
|
||||
node_execution.error = error_message
|
||||
node_execution.finished_at = now
|
||||
node_execution.elapsed_time = (now - node_execution.created_at).total_seconds()
|
||||
|
||||
# Update the repository with the domain model
|
||||
self._workflow_node_execution_repository.save(domain_execution)
|
||||
self._workflow_node_execution_repository.save(node_execution)
|
||||
|
||||
if trace_manager:
|
||||
trace_manager.add_trace_task(
|
||||
TraceTask(
|
||||
TraceTaskName.WORKFLOW_TRACE,
|
||||
workflow_execution=execution,
|
||||
workflow_execution=workflow_execution,
|
||||
conversation_id=conversation_id,
|
||||
user_id=trace_manager.user_id,
|
||||
)
|
||||
)
|
||||
|
||||
return execution
|
||||
self._workflow_execution_repository.save(workflow_execution)
|
||||
return workflow_execution
|
||||
|
||||
def handle_node_execution_start(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -5,11 +5,11 @@ from flask import Response, request
|
|||
from flask_login import user_loaded_from_request, user_logged_in
|
||||
from werkzeug.exceptions import NotFound, Unauthorized
|
||||
|
||||
import contexts
|
||||
from configs import dify_config
|
||||
from dify_app import DifyApp
|
||||
from extensions.ext_database import db
|
||||
from libs.passport import PassportService
|
||||
from models.account import Account
|
||||
from models.account import Account, Tenant, TenantAccountJoin
|
||||
from models.model import EndUser
|
||||
from services.account_service import AccountService
|
||||
|
||||
|
|
@ -32,6 +32,26 @@ def load_user_from_request(request_from_flask_login):
|
|||
else:
|
||||
auth_token = request.args.get("_token")
|
||||
|
||||
# Check for admin API key authentication first
|
||||
if dify_config.ADMIN_API_KEY_ENABLE and auth_header:
|
||||
admin_api_key = dify_config.ADMIN_API_KEY
|
||||
if admin_api_key and admin_api_key == auth_token:
|
||||
workspace_id = request.headers.get("X-WORKSPACE-ID")
|
||||
if workspace_id:
|
||||
tenant_account_join = (
|
||||
db.session.query(Tenant, TenantAccountJoin)
|
||||
.filter(Tenant.id == workspace_id)
|
||||
.filter(TenantAccountJoin.tenant_id == Tenant.id)
|
||||
.filter(TenantAccountJoin.role == "owner")
|
||||
.one_or_none()
|
||||
)
|
||||
if tenant_account_join:
|
||||
tenant, ta = tenant_account_join
|
||||
account = db.session.query(Account).filter_by(id=ta.account_id).first()
|
||||
if account:
|
||||
account.current_tenant = tenant
|
||||
return account
|
||||
|
||||
if request.blueprint in {"console", "inner_api"}:
|
||||
if not auth_token:
|
||||
raise Unauthorized("Invalid Authorization token.")
|
||||
|
|
@ -61,8 +81,8 @@ def on_user_logged_in(_sender, user):
|
|||
Note: AccountService.load_logged_in_account will populate user.current_tenant_id
|
||||
through the load_user method, which calls account.set_tenant_id().
|
||||
"""
|
||||
if user and isinstance(user, Account) and user.current_tenant_id:
|
||||
contexts.tenant_id.set(user.current_tenant_id)
|
||||
# tenant_id context variable removed - using current_user.current_tenant_id directly
|
||||
pass
|
||||
|
||||
|
||||
@login_manager.unauthorized_handler
|
||||
|
|
|
|||
|
|
@ -12,19 +12,30 @@ from flask_login import user_loaded_from_request, user_logged_in # type: ignore
|
|||
|
||||
from configs import dify_config
|
||||
from dify_app import DifyApp
|
||||
from models import Account, EndUser
|
||||
|
||||
|
||||
@user_logged_in.connect
|
||||
@user_loaded_from_request.connect
|
||||
def on_user_loaded(_sender, user):
|
||||
def on_user_loaded(_sender, user: Union["Account", "EndUser"]):
|
||||
if dify_config.ENABLE_OTEL:
|
||||
from opentelemetry.trace import get_current_span
|
||||
|
||||
if user:
|
||||
current_span = get_current_span()
|
||||
if current_span:
|
||||
current_span.set_attribute("service.tenant.id", user.current_tenant_id)
|
||||
current_span.set_attribute("service.user.id", user.id)
|
||||
try:
|
||||
current_span = get_current_span()
|
||||
if isinstance(user, Account) and user.current_tenant_id:
|
||||
tenant_id = user.current_tenant_id
|
||||
elif isinstance(user, EndUser):
|
||||
tenant_id = user.tenant_id
|
||||
else:
|
||||
return
|
||||
if current_span:
|
||||
current_span.set_attribute("service.tenant.id", tenant_id)
|
||||
current_span.set_attribute("service.user.id", user.id)
|
||||
except Exception:
|
||||
logging.exception("Error setting tenant and user attributes")
|
||||
pass
|
||||
|
||||
|
||||
def init_app(app: DifyApp):
|
||||
|
|
@ -47,21 +58,25 @@ def init_app(app: DifyApp):
|
|||
|
||||
def response_hook(span: Span, status: str, response_headers: list):
|
||||
if span and span.is_recording():
|
||||
if status.startswith("2"):
|
||||
span.set_status(StatusCode.OK)
|
||||
else:
|
||||
span.set_status(StatusCode.ERROR, status)
|
||||
try:
|
||||
if status.startswith("2"):
|
||||
span.set_status(StatusCode.OK)
|
||||
else:
|
||||
span.set_status(StatusCode.ERROR, status)
|
||||
|
||||
status = status.split(" ")[0]
|
||||
status_code = int(status)
|
||||
status_class = f"{status_code // 100}xx"
|
||||
attributes: dict[str, str | int] = {"status_code": status_code, "status_class": status_class}
|
||||
request = flask.request
|
||||
if request and request.url_rule:
|
||||
attributes[SpanAttributes.HTTP_TARGET] = str(request.url_rule.rule)
|
||||
if request and request.method:
|
||||
attributes[SpanAttributes.HTTP_METHOD] = str(request.method)
|
||||
_http_response_counter.add(1, attributes)
|
||||
status = status.split(" ")[0]
|
||||
status_code = int(status)
|
||||
status_class = f"{status_code // 100}xx"
|
||||
attributes: dict[str, str | int] = {"status_code": status_code, "status_class": status_class}
|
||||
request = flask.request
|
||||
if request and request.url_rule:
|
||||
attributes[SpanAttributes.HTTP_TARGET] = str(request.url_rule.rule)
|
||||
if request and request.method:
|
||||
attributes[SpanAttributes.HTTP_METHOD] = str(request.method)
|
||||
_http_response_counter.add(1, attributes)
|
||||
except Exception:
|
||||
logging.exception("Error setting status and attributes")
|
||||
pass
|
||||
|
||||
instrumentor = FlaskInstrumentor()
|
||||
if dify_config.DEBUG:
|
||||
|
|
@ -92,7 +107,7 @@ def init_app(app: DifyApp):
|
|||
class ExceptionLoggingHandler(logging.Handler):
|
||||
"""Custom logging handler that creates spans for logging.exception() calls"""
|
||||
|
||||
def emit(self, record):
|
||||
def emit(self, record: logging.LogRecord):
|
||||
try:
|
||||
if record.exc_info:
|
||||
tracer = get_tracer_provider().get_tracer("dify.exception.logging")
|
||||
|
|
@ -107,9 +122,12 @@ def init_app(app: DifyApp):
|
|||
},
|
||||
) as span:
|
||||
span.set_status(StatusCode.ERROR)
|
||||
span.record_exception(record.exc_info[1])
|
||||
span.set_attribute("exception.type", record.exc_info[0].__name__)
|
||||
span.set_attribute("exception.message", str(record.exc_info[1]))
|
||||
if record.exc_info[1]:
|
||||
span.record_exception(record.exc_info[1])
|
||||
span.set_attribute("exception.message", str(record.exc_info[1]))
|
||||
if record.exc_info[0]:
|
||||
span.set_attribute("exception.type", record.exc_info[0].__name__)
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
|
|
|||
|
|
@ -100,6 +100,8 @@ app_partial_fields = {
|
|||
"updated_at": TimestampField,
|
||||
"tags": fields.List(fields.Nested(tag_fields)),
|
||||
"access_mode": fields.String,
|
||||
"create_user_name": fields.String,
|
||||
"author_name": fields.String,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,14 +2,11 @@ from functools import wraps
|
|||
from typing import Any
|
||||
|
||||
from flask import current_app, g, has_request_context, request
|
||||
from flask_login import user_logged_in # type: ignore
|
||||
from flask_login.config import EXEMPT_METHODS # type: ignore
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
from werkzeug.local import LocalProxy
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.ext_database import db
|
||||
from models.account import Account, Tenant, TenantAccountJoin
|
||||
from models.account import Account
|
||||
from models.model import EndUser
|
||||
|
||||
#: A proxy for the current user. If no user is logged in, this will be an
|
||||
|
|
@ -53,36 +50,6 @@ def login_required(func):
|
|||
|
||||
@wraps(func)
|
||||
def decorated_view(*args, **kwargs):
|
||||
auth_header = request.headers.get("Authorization")
|
||||
if dify_config.ADMIN_API_KEY_ENABLE:
|
||||
if auth_header:
|
||||
if " " not in auth_header:
|
||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||
auth_scheme, auth_token = auth_header.split(None, 1)
|
||||
auth_scheme = auth_scheme.lower()
|
||||
if auth_scheme != "bearer":
|
||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||
|
||||
admin_api_key = dify_config.ADMIN_API_KEY
|
||||
if admin_api_key:
|
||||
if admin_api_key == auth_token:
|
||||
workspace_id = request.headers.get("X-WORKSPACE-ID")
|
||||
if workspace_id:
|
||||
tenant_account_join = (
|
||||
db.session.query(Tenant, TenantAccountJoin)
|
||||
.filter(Tenant.id == workspace_id)
|
||||
.filter(TenantAccountJoin.tenant_id == Tenant.id)
|
||||
.filter(TenantAccountJoin.role == "owner")
|
||||
.one_or_none()
|
||||
)
|
||||
if tenant_account_join:
|
||||
tenant, ta = tenant_account_join
|
||||
account = db.session.query(Account).filter_by(id=ta.account_id).first()
|
||||
# Login admin
|
||||
if account:
|
||||
account.current_tenant = tenant
|
||||
current_app.login_manager._update_request_context_with_user(account) # type: ignore
|
||||
user_logged_in.send(current_app._get_current_object(), user=_get_user()) # type: ignore
|
||||
if request.method in EXEMPT_METHODS or dify_config.LOGIN_DISABLED:
|
||||
pass
|
||||
elif not current_user.is_authenticated:
|
||||
|
|
|
|||
|
|
@ -295,6 +295,15 @@ class App(Base):
|
|||
|
||||
return tags or []
|
||||
|
||||
@property
|
||||
def author_name(self):
|
||||
if self.created_by:
|
||||
account = db.session.query(Account).filter(Account.id == self.created_by).first()
|
||||
if account:
|
||||
return account.name
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class AppModelConfig(Base):
|
||||
__tablename__ = "app_model_configs"
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ from enum import Enum, StrEnum
|
|||
from typing import TYPE_CHECKING, Any, Optional, Union
|
||||
from uuid import uuid4
|
||||
|
||||
from flask_login import current_user
|
||||
|
||||
from core.variables import utils as variable_utils
|
||||
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
|
||||
from factories.variable_factory import build_segment
|
||||
|
|
@ -17,7 +19,6 @@ import sqlalchemy as sa
|
|||
from sqlalchemy import UniqueConstraint, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
import contexts
|
||||
from constants import DEFAULT_FILE_NUMBER_LIMITS, HIDDEN_VALUE
|
||||
from core.helper import encrypter
|
||||
from core.variables import SecretVariable, Segment, SegmentType, Variable
|
||||
|
|
@ -280,7 +281,16 @@ class Workflow(Base):
|
|||
if self._environment_variables is None:
|
||||
self._environment_variables = "{}"
|
||||
|
||||
tenant_id = contexts.tenant_id.get()
|
||||
# Get tenant_id from current_user (Account or EndUser)
|
||||
if isinstance(current_user, Account):
|
||||
# Account user
|
||||
tenant_id = current_user.current_tenant_id
|
||||
else:
|
||||
# EndUser
|
||||
tenant_id = current_user.tenant_id
|
||||
|
||||
if not tenant_id:
|
||||
return []
|
||||
|
||||
environment_variables_dict: dict[str, Any] = json.loads(self._environment_variables)
|
||||
results = [
|
||||
|
|
@ -303,7 +313,17 @@ class Workflow(Base):
|
|||
self._environment_variables = "{}"
|
||||
return
|
||||
|
||||
tenant_id = contexts.tenant_id.get()
|
||||
# Get tenant_id from current_user (Account or EndUser)
|
||||
if isinstance(current_user, Account):
|
||||
# Account user
|
||||
tenant_id = current_user.current_tenant_id
|
||||
else:
|
||||
# EndUser
|
||||
tenant_id = current_user.tenant_id
|
||||
|
||||
if not tenant_id:
|
||||
self._environment_variables = "{}"
|
||||
return
|
||||
|
||||
value = list(value)
|
||||
if any(var for var in value if not var.id):
|
||||
|
|
|
|||
|
|
@ -149,6 +149,7 @@ dev = [
|
|||
"types-tqdm~=4.67.0",
|
||||
"types-ujson~=5.10.0",
|
||||
"boto3-stubs>=1.38.20",
|
||||
"types-jmespath>=1.0.2.20240106",
|
||||
]
|
||||
|
||||
############################################################
|
||||
|
|
@ -190,7 +191,7 @@ vdb = [
|
|||
"pymilvus~=2.5.0",
|
||||
"pymochow==1.3.1",
|
||||
"pyobvector~=0.1.6",
|
||||
"qdrant-client==1.7.3",
|
||||
"qdrant-client==1.9.0",
|
||||
"tablestore==6.1.0",
|
||||
"tcvectordb~=1.6.4",
|
||||
"tidb-vector==0.0.9",
|
||||
|
|
|
|||
|
|
@ -648,17 +648,15 @@ class DatasetService:
|
|||
if dataset.permission == DatasetPermissionEnum.ONLY_ME and dataset.created_by != user.id:
|
||||
logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
|
||||
raise NoPermissionError("You do not have permission to access this dataset.")
|
||||
if dataset.permission == "partial_members":
|
||||
user_permission = (
|
||||
db.session.query(DatasetPermission).filter_by(dataset_id=dataset.id, account_id=user.id).first()
|
||||
)
|
||||
if (
|
||||
not user_permission
|
||||
and dataset.tenant_id != user.current_tenant_id
|
||||
and dataset.created_by != user.id
|
||||
):
|
||||
logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
|
||||
raise NoPermissionError("You do not have permission to access this dataset.")
|
||||
if dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM:
|
||||
# For partial team permission, user needs explicit permission or be the creator
|
||||
if dataset.created_by != user.id:
|
||||
user_permission = (
|
||||
db.session.query(DatasetPermission).filter_by(dataset_id=dataset.id, account_id=user.id).first()
|
||||
)
|
||||
if not user_permission:
|
||||
logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
|
||||
raise NoPermissionError("You do not have permission to access this dataset.")
|
||||
|
||||
@staticmethod
|
||||
def check_dataset_operator_permission(user: Optional[Account] = None, dataset: Optional[Dataset] = None):
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ from core.plugin.entities.plugin import (
|
|||
PluginInstallation,
|
||||
PluginInstallationSource,
|
||||
)
|
||||
from core.plugin.entities.plugin_daemon import PluginInstallTask, PluginUploadResponse
|
||||
from core.plugin.entities.plugin_daemon import PluginInstallTask, PluginListResponse, PluginUploadResponse
|
||||
from core.plugin.impl.asset import PluginAssetManager
|
||||
from core.plugin.impl.debugging import PluginDebuggingClient
|
||||
from core.plugin.impl.plugin import PluginInstaller
|
||||
|
|
@ -110,6 +110,15 @@ class PluginService:
|
|||
plugins = manager.list_plugins(tenant_id)
|
||||
return plugins
|
||||
|
||||
@staticmethod
|
||||
def list_with_total(tenant_id: str, page: int, page_size: int) -> PluginListResponse:
|
||||
"""
|
||||
list all plugins of the tenant
|
||||
"""
|
||||
manager = PluginInstaller()
|
||||
plugins = manager.list_plugins_with_total(tenant_id, page, page_size)
|
||||
return plugins
|
||||
|
||||
@staticmethod
|
||||
def list_installations_from_ids(tenant_id: str, ids: Sequence[str]) -> Sequence[PluginInstallation]:
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -4,16 +4,12 @@ from collections.abc import Callable
|
|||
|
||||
import click
|
||||
from celery import shared_task # type: ignore
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy import delete
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from extensions.ext_database import db
|
||||
from models import (
|
||||
Account,
|
||||
ApiToken,
|
||||
App,
|
||||
AppAnnotationHitHistory,
|
||||
AppAnnotationSetting,
|
||||
AppDatasetJoin,
|
||||
|
|
@ -34,7 +30,7 @@ from models import (
|
|||
)
|
||||
from models.tools import WorkflowToolProvider
|
||||
from models.web import PinnedConversation, SavedMessage
|
||||
from models.workflow import ConversationVariable, Workflow, WorkflowAppLog, WorkflowRun
|
||||
from models.workflow import ConversationVariable, Workflow, WorkflowAppLog, WorkflowNodeExecution, WorkflowRun
|
||||
|
||||
|
||||
@shared_task(queue="app_deletion", bind=True, max_retries=3)
|
||||
|
|
@ -191,31 +187,18 @@ def _delete_app_workflow_runs(tenant_id: str, app_id: str):
|
|||
|
||||
|
||||
def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
|
||||
# Get app's owner
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
stmt = select(Account).where(Account.id == App.created_by).where(App.id == app_id)
|
||||
user = session.scalar(stmt)
|
||||
|
||||
if user is None:
|
||||
errmsg = (
|
||||
f"Failed to delete workflow node executions for tenant {tenant_id} and app {app_id}, app's owner not found"
|
||||
def del_workflow_node_execution(workflow_node_execution_id: str):
|
||||
db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution_id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
logging.error(errmsg)
|
||||
raise ValueError(errmsg)
|
||||
|
||||
# Create a repository instance for WorkflowNodeExecution
|
||||
repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=db.engine,
|
||||
user=user,
|
||||
app_id=app_id,
|
||||
triggered_from=None,
|
||||
_delete_records(
|
||||
"""select id from workflow_node_executions where tenant_id=:tenant_id and app_id=:app_id limit 1000""",
|
||||
{"tenant_id": tenant_id, "app_id": app_id},
|
||||
del_workflow_node_execution,
|
||||
"workflow node execution",
|
||||
)
|
||||
|
||||
# Use the clear method to delete all records for this tenant_id and app_id
|
||||
repository.clear()
|
||||
|
||||
logging.info(click.style(f"Deleted workflow node executions for tenant {tenant_id} and app {app_id}", fg="green"))
|
||||
|
||||
|
||||
def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
|
||||
def del_workflow_app_log(workflow_app_log_id: str):
|
||||
|
|
|
|||
|
|
@ -246,7 +246,9 @@ def test_executor_with_form_data():
|
|||
assert "multipart/form-data" in executor.headers["Content-Type"]
|
||||
assert executor.params == []
|
||||
assert executor.json is None
|
||||
assert executor.files is None
|
||||
# '__multipart_placeholder__' is expected when no file inputs exist,
|
||||
# to ensure the request is treated as multipart/form-data by the backend.
|
||||
assert executor.files == [("__multipart_placeholder__", ("", b"", "application/octet-stream"))]
|
||||
assert executor.content is None
|
||||
|
||||
# Check that the form data is correctly loaded in executor.data
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from core.workflow.entities.node_entities import NodeRunResult
|
|||
from core.workflow.nodes.document_extractor import DocumentExtractorNode, DocumentExtractorNodeData
|
||||
from core.workflow.nodes.document_extractor.node import (
|
||||
_extract_text_from_docx,
|
||||
_extract_text_from_excel,
|
||||
_extract_text_from_pdf,
|
||||
_extract_text_from_plain_text,
|
||||
)
|
||||
|
|
@ -149,7 +150,7 @@ def test_extract_text_from_plain_text_non_utf8():
|
|||
temp_file.write(non_utf8_content)
|
||||
temp_file.seek(0)
|
||||
text = _extract_text_from_plain_text(temp_file.read())
|
||||
assert text == "Hello, world."
|
||||
assert text == "Hello, world©."
|
||||
|
||||
|
||||
@patch("pypdfium2.PdfDocument")
|
||||
|
|
@ -182,3 +183,181 @@ def test_extract_text_from_docx(mock_document):
|
|||
|
||||
def test_node_type(document_extractor_node):
|
||||
assert document_extractor_node._node_type == NodeType.DOCUMENT_EXTRACTOR
|
||||
|
||||
|
||||
@patch("pandas.ExcelFile")
|
||||
def test_extract_text_from_excel_single_sheet(mock_excel_file):
|
||||
"""Test extracting text from Excel file with single sheet."""
|
||||
# Mock DataFrame
|
||||
mock_df = Mock()
|
||||
mock_df.dropna = Mock()
|
||||
mock_df.to_markdown.return_value = "| Name | Age |\n|------|-----|\n| John | 25 |"
|
||||
|
||||
# Mock ExcelFile
|
||||
mock_excel_instance = Mock()
|
||||
mock_excel_instance.sheet_names = ["Sheet1"]
|
||||
mock_excel_instance.parse.return_value = mock_df
|
||||
mock_excel_file.return_value = mock_excel_instance
|
||||
|
||||
file_content = b"fake_excel_content"
|
||||
result = _extract_text_from_excel(file_content)
|
||||
|
||||
expected = "| Name | Age |\n|------|-----|\n| John | 25 |\n\n"
|
||||
assert result == expected
|
||||
mock_excel_file.assert_called_once()
|
||||
mock_df.dropna.assert_called_once_with(how="all", inplace=True)
|
||||
mock_df.to_markdown.assert_called_once_with(index=False, floatfmt="")
|
||||
|
||||
|
||||
@patch("pandas.ExcelFile")
|
||||
def test_extract_text_from_excel_multiple_sheets(mock_excel_file):
|
||||
"""Test extracting text from Excel file with multiple sheets."""
|
||||
# Mock DataFrames for different sheets
|
||||
mock_df1 = Mock()
|
||||
mock_df1.dropna = Mock()
|
||||
mock_df1.to_markdown.return_value = "| Product | Price |\n|---------|-------|\n| Apple | 1.50 |"
|
||||
|
||||
mock_df2 = Mock()
|
||||
mock_df2.dropna = Mock()
|
||||
mock_df2.to_markdown.return_value = "| City | Population |\n|------|------------|\n| NYC | 8000000 |"
|
||||
|
||||
# Mock ExcelFile
|
||||
mock_excel_instance = Mock()
|
||||
mock_excel_instance.sheet_names = ["Products", "Cities"]
|
||||
mock_excel_instance.parse.side_effect = [mock_df1, mock_df2]
|
||||
mock_excel_file.return_value = mock_excel_instance
|
||||
|
||||
file_content = b"fake_excel_content_multiple_sheets"
|
||||
result = _extract_text_from_excel(file_content)
|
||||
|
||||
expected = (
|
||||
"| Product | Price |\n|---------|-------|\n| Apple | 1.50 |\n\n"
|
||||
"| City | Population |\n|------|------------|\n| NYC | 8000000 |\n\n"
|
||||
)
|
||||
assert result == expected
|
||||
assert mock_excel_instance.parse.call_count == 2
|
||||
|
||||
|
||||
@patch("pandas.ExcelFile")
|
||||
def test_extract_text_from_excel_empty_sheets(mock_excel_file):
|
||||
"""Test extracting text from Excel file with empty sheets."""
|
||||
# Mock empty DataFrame
|
||||
mock_df = Mock()
|
||||
mock_df.dropna = Mock()
|
||||
mock_df.to_markdown.return_value = ""
|
||||
|
||||
# Mock ExcelFile
|
||||
mock_excel_instance = Mock()
|
||||
mock_excel_instance.sheet_names = ["EmptySheet"]
|
||||
mock_excel_instance.parse.return_value = mock_df
|
||||
mock_excel_file.return_value = mock_excel_instance
|
||||
|
||||
file_content = b"fake_excel_empty_content"
|
||||
result = _extract_text_from_excel(file_content)
|
||||
|
||||
expected = "\n\n"
|
||||
assert result == expected
|
||||
|
||||
|
||||
@patch("pandas.ExcelFile")
|
||||
def test_extract_text_from_excel_sheet_parse_error(mock_excel_file):
|
||||
"""Test handling of sheet parsing errors - should continue with other sheets."""
|
||||
# Mock DataFrames - one successful, one that raises exception
|
||||
mock_df_success = Mock()
|
||||
mock_df_success.dropna = Mock()
|
||||
mock_df_success.to_markdown.return_value = "| Data | Value |\n|------|-------|\n| Test | 123 |"
|
||||
|
||||
# Mock ExcelFile
|
||||
mock_excel_instance = Mock()
|
||||
mock_excel_instance.sheet_names = ["GoodSheet", "BadSheet"]
|
||||
mock_excel_instance.parse.side_effect = [mock_df_success, Exception("Parse error")]
|
||||
mock_excel_file.return_value = mock_excel_instance
|
||||
|
||||
file_content = b"fake_excel_mixed_content"
|
||||
result = _extract_text_from_excel(file_content)
|
||||
|
||||
expected = "| Data | Value |\n|------|-------|\n| Test | 123 |\n\n"
|
||||
assert result == expected
|
||||
|
||||
|
||||
@patch("pandas.ExcelFile")
|
||||
def test_extract_text_from_excel_file_error(mock_excel_file):
|
||||
"""Test handling of Excel file reading errors."""
|
||||
mock_excel_file.side_effect = Exception("Invalid Excel file")
|
||||
|
||||
file_content = b"invalid_excel_content"
|
||||
|
||||
with pytest.raises(Exception) as exc_info:
|
||||
_extract_text_from_excel(file_content)
|
||||
|
||||
# Note: The function should raise TextExtractionError, but since it's not imported in the test,
|
||||
# we check for the general Exception pattern
|
||||
assert "Failed to extract text from Excel file" in str(exc_info.value)
|
||||
|
||||
|
||||
@patch("pandas.ExcelFile")
|
||||
def test_extract_text_from_excel_io_bytesio_usage(mock_excel_file):
|
||||
"""Test that BytesIO is properly used with the file content."""
|
||||
import io
|
||||
|
||||
# Mock DataFrame
|
||||
mock_df = Mock()
|
||||
mock_df.dropna = Mock()
|
||||
mock_df.to_markdown.return_value = "| Test | Data |\n|------|------|\n| 1 | A |"
|
||||
|
||||
# Mock ExcelFile
|
||||
mock_excel_instance = Mock()
|
||||
mock_excel_instance.sheet_names = ["TestSheet"]
|
||||
mock_excel_instance.parse.return_value = mock_df
|
||||
mock_excel_file.return_value = mock_excel_instance
|
||||
|
||||
file_content = b"test_excel_bytes"
|
||||
result = _extract_text_from_excel(file_content)
|
||||
|
||||
# Verify that ExcelFile was called with a BytesIO object
|
||||
mock_excel_file.assert_called_once()
|
||||
call_args = mock_excel_file.call_args[0][0]
|
||||
assert isinstance(call_args, io.BytesIO)
|
||||
|
||||
expected = "| Test | Data |\n|------|------|\n| 1 | A |\n\n"
|
||||
assert result == expected
|
||||
|
||||
|
||||
@patch("pandas.ExcelFile")
|
||||
def test_extract_text_from_excel_all_sheets_fail(mock_excel_file):
|
||||
"""Test when all sheets fail to parse - should return empty string."""
|
||||
# Mock ExcelFile
|
||||
mock_excel_instance = Mock()
|
||||
mock_excel_instance.sheet_names = ["BadSheet1", "BadSheet2"]
|
||||
mock_excel_instance.parse.side_effect = [Exception("Error 1"), Exception("Error 2")]
|
||||
mock_excel_file.return_value = mock_excel_instance
|
||||
|
||||
file_content = b"fake_excel_all_bad_sheets"
|
||||
result = _extract_text_from_excel(file_content)
|
||||
|
||||
# Should return empty string when all sheets fail
|
||||
assert result == ""
|
||||
|
||||
|
||||
@patch("pandas.ExcelFile")
|
||||
def test_extract_text_from_excel_markdown_formatting(mock_excel_file):
|
||||
"""Test that markdown formatting parameters are correctly applied."""
|
||||
# Mock DataFrame
|
||||
mock_df = Mock()
|
||||
mock_df.dropna = Mock()
|
||||
mock_df.to_markdown.return_value = "| Float | Int |\n|-------|-----|\n| 123456.78 | 42 |"
|
||||
|
||||
# Mock ExcelFile
|
||||
mock_excel_instance = Mock()
|
||||
mock_excel_instance.sheet_names = ["NumberSheet"]
|
||||
mock_excel_instance.parse.return_value = mock_df
|
||||
mock_excel_file.return_value = mock_excel_instance
|
||||
|
||||
file_content = b"fake_excel_numbers"
|
||||
result = _extract_text_from_excel(file_content)
|
||||
|
||||
# Verify to_markdown was called with correct parameters
|
||||
mock_df.to_markdown.assert_called_once_with(index=False, floatfmt="")
|
||||
|
||||
expected = "| Float | Int |\n|-------|-----|\n| 123456.78 | 42 |\n\n"
|
||||
assert result == expected
|
||||
|
|
|
|||
|
|
@ -2,14 +2,13 @@ import json
|
|||
from unittest import mock
|
||||
from uuid import uuid4
|
||||
|
||||
import contexts
|
||||
from constants import HIDDEN_VALUE
|
||||
from core.variables import FloatVariable, IntegerVariable, SecretVariable, StringVariable
|
||||
from models.workflow import Workflow, WorkflowNodeExecution
|
||||
|
||||
|
||||
def test_environment_variables():
|
||||
contexts.tenant_id.set("tenant_id")
|
||||
# tenant_id context variable removed - using current_user.current_tenant_id directly
|
||||
|
||||
# Create a Workflow instance
|
||||
workflow = Workflow(
|
||||
|
|
@ -38,9 +37,14 @@ def test_environment_variables():
|
|||
{"name": "var4", "value": 3.14, "id": str(uuid4()), "selector": ["env", "var4"]}
|
||||
)
|
||||
|
||||
# Mock current_user as an EndUser
|
||||
mock_user = mock.Mock()
|
||||
mock_user.tenant_id = "tenant_id"
|
||||
|
||||
with (
|
||||
mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"),
|
||||
mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"),
|
||||
mock.patch("models.workflow.current_user", mock_user),
|
||||
):
|
||||
# Set the environment_variables property of the Workflow instance
|
||||
variables = [variable1, variable2, variable3, variable4]
|
||||
|
|
@ -51,7 +55,7 @@ def test_environment_variables():
|
|||
|
||||
|
||||
def test_update_environment_variables():
|
||||
contexts.tenant_id.set("tenant_id")
|
||||
# tenant_id context variable removed - using current_user.current_tenant_id directly
|
||||
|
||||
# Create a Workflow instance
|
||||
workflow = Workflow(
|
||||
|
|
@ -80,9 +84,14 @@ def test_update_environment_variables():
|
|||
{"name": "var4", "value": 3.14, "id": str(uuid4()), "selector": ["env", "var4"]}
|
||||
)
|
||||
|
||||
# Mock current_user as an EndUser
|
||||
mock_user = mock.Mock()
|
||||
mock_user.tenant_id = "tenant_id"
|
||||
|
||||
with (
|
||||
mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"),
|
||||
mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"),
|
||||
mock.patch("models.workflow.current_user", mock_user),
|
||||
):
|
||||
variables = [variable1, variable2, variable3, variable4]
|
||||
|
||||
|
|
@ -104,7 +113,7 @@ def test_update_environment_variables():
|
|||
|
||||
|
||||
def test_to_dict():
|
||||
contexts.tenant_id.set("tenant_id")
|
||||
# tenant_id context variable removed - using current_user.current_tenant_id directly
|
||||
|
||||
# Create a Workflow instance
|
||||
workflow = Workflow(
|
||||
|
|
@ -121,9 +130,14 @@ def test_to_dict():
|
|||
|
||||
# Create some EnvironmentVariable instances
|
||||
|
||||
# Mock current_user as an EndUser
|
||||
mock_user = mock.Mock()
|
||||
mock_user.tenant_id = "tenant_id"
|
||||
|
||||
with (
|
||||
mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"),
|
||||
mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"),
|
||||
mock.patch("models.workflow.current_user", mock_user),
|
||||
):
|
||||
# Set the environment_variables property of the Workflow instance
|
||||
workflow.environment_variables = [
|
||||
|
|
|
|||
|
|
@ -0,0 +1,158 @@
|
|||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from models.account import Account, TenantAccountRole
|
||||
from models.dataset import Dataset, DatasetPermission, DatasetPermissionEnum
|
||||
from services.dataset_service import DatasetService
|
||||
from services.errors.account import NoPermissionError
|
||||
|
||||
|
||||
class TestDatasetPermissionService:
|
||||
"""Test cases for dataset permission checking functionality"""
|
||||
|
||||
def setup_method(self):
|
||||
"""Set up test fixtures"""
|
||||
# Mock tenant and user
|
||||
self.tenant_id = "test-tenant-123"
|
||||
self.creator_id = "creator-456"
|
||||
self.normal_user_id = "normal-789"
|
||||
self.owner_user_id = "owner-999"
|
||||
|
||||
# Mock dataset
|
||||
self.dataset = Mock(spec=Dataset)
|
||||
self.dataset.id = "dataset-123"
|
||||
self.dataset.tenant_id = self.tenant_id
|
||||
self.dataset.created_by = self.creator_id
|
||||
|
||||
# Mock users
|
||||
self.creator_user = Mock(spec=Account)
|
||||
self.creator_user.id = self.creator_id
|
||||
self.creator_user.current_tenant_id = self.tenant_id
|
||||
self.creator_user.current_role = TenantAccountRole.EDITOR
|
||||
|
||||
self.normal_user = Mock(spec=Account)
|
||||
self.normal_user.id = self.normal_user_id
|
||||
self.normal_user.current_tenant_id = self.tenant_id
|
||||
self.normal_user.current_role = TenantAccountRole.NORMAL
|
||||
|
||||
self.owner_user = Mock(spec=Account)
|
||||
self.owner_user.id = self.owner_user_id
|
||||
self.owner_user.current_tenant_id = self.tenant_id
|
||||
self.owner_user.current_role = TenantAccountRole.OWNER
|
||||
|
||||
def test_permission_check_different_tenant_should_fail(self):
|
||||
"""Test that users from different tenants cannot access dataset"""
|
||||
self.normal_user.current_tenant_id = "different-tenant"
|
||||
|
||||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."):
|
||||
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
|
||||
|
||||
def test_owner_can_access_any_dataset(self):
|
||||
"""Test that tenant owners can access any dataset regardless of permission"""
|
||||
self.dataset.permission = DatasetPermissionEnum.ONLY_ME
|
||||
|
||||
# Should not raise any exception
|
||||
DatasetService.check_dataset_permission(self.dataset, self.owner_user)
|
||||
|
||||
def test_only_me_permission_creator_can_access(self):
|
||||
"""Test ONLY_ME permission allows only creator to access"""
|
||||
self.dataset.permission = DatasetPermissionEnum.ONLY_ME
|
||||
|
||||
# Creator should be able to access
|
||||
DatasetService.check_dataset_permission(self.dataset, self.creator_user)
|
||||
|
||||
def test_only_me_permission_others_cannot_access(self):
|
||||
"""Test ONLY_ME permission denies access to non-creators"""
|
||||
self.dataset.permission = DatasetPermissionEnum.ONLY_ME
|
||||
|
||||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."):
|
||||
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
|
||||
|
||||
def test_all_team_permission_allows_access(self):
|
||||
"""Test ALL_TEAM permission allows any team member to access"""
|
||||
self.dataset.permission = DatasetPermissionEnum.ALL_TEAM
|
||||
|
||||
# Should not raise any exception for team members
|
||||
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
|
||||
DatasetService.check_dataset_permission(self.dataset, self.creator_user)
|
||||
|
||||
@patch("services.dataset_service.db.session")
|
||||
def test_partial_team_permission_creator_can_access(self, mock_session):
|
||||
"""Test PARTIAL_TEAM permission allows creator to access"""
|
||||
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
|
||||
|
||||
# Should not raise any exception for creator
|
||||
DatasetService.check_dataset_permission(self.dataset, self.creator_user)
|
||||
|
||||
# Should not query database for creator
|
||||
mock_session.query.assert_not_called()
|
||||
|
||||
@patch("services.dataset_service.db.session")
|
||||
def test_partial_team_permission_with_explicit_permission(self, mock_session):
|
||||
"""Test PARTIAL_TEAM permission allows users with explicit permission"""
|
||||
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
|
||||
|
||||
# Mock database query to return a permission record
|
||||
mock_permission = Mock(spec=DatasetPermission)
|
||||
mock_session.query().filter_by().first.return_value = mock_permission
|
||||
|
||||
# Should not raise any exception
|
||||
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
|
||||
|
||||
# Verify database was queried correctly
|
||||
mock_session.query().filter_by.assert_called_with(dataset_id=self.dataset.id, account_id=self.normal_user.id)
|
||||
|
||||
@patch("services.dataset_service.db.session")
|
||||
def test_partial_team_permission_without_explicit_permission(self, mock_session):
|
||||
"""Test PARTIAL_TEAM permission denies users without explicit permission"""
|
||||
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
|
||||
|
||||
# Mock database query to return None (no permission record)
|
||||
mock_session.query().filter_by().first.return_value = None
|
||||
|
||||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."):
|
||||
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
|
||||
|
||||
# Verify database was queried correctly
|
||||
mock_session.query().filter_by.assert_called_with(dataset_id=self.dataset.id, account_id=self.normal_user.id)
|
||||
|
||||
@patch("services.dataset_service.db.session")
|
||||
def test_partial_team_permission_non_creator_without_permission_fails(self, mock_session):
|
||||
"""Test that non-creators without explicit permission are denied access"""
|
||||
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
|
||||
|
||||
# Create a different user (not the creator)
|
||||
other_user = Mock(spec=Account)
|
||||
other_user.id = "other-user-123"
|
||||
other_user.current_tenant_id = self.tenant_id
|
||||
other_user.current_role = TenantAccountRole.NORMAL
|
||||
|
||||
# Mock database query to return None (no permission record)
|
||||
mock_session.query().filter_by().first.return_value = None
|
||||
|
||||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."):
|
||||
DatasetService.check_dataset_permission(self.dataset, other_user)
|
||||
|
||||
def test_partial_team_permission_uses_correct_enum(self):
|
||||
"""Test that the method correctly uses DatasetPermissionEnum.PARTIAL_TEAM"""
|
||||
# This test ensures we're using the enum instead of string literals
|
||||
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
|
||||
|
||||
# Creator should always have access
|
||||
DatasetService.check_dataset_permission(self.dataset, self.creator_user)
|
||||
|
||||
@patch("services.dataset_service.logging")
|
||||
@patch("services.dataset_service.db.session")
|
||||
def test_permission_denied_logs_debug_message(self, mock_session, mock_logging):
|
||||
"""Test that permission denied events are logged"""
|
||||
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
|
||||
mock_session.query().filter_by().first.return_value = None
|
||||
|
||||
with pytest.raises(NoPermissionError):
|
||||
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
|
||||
|
||||
# Verify debug message was logged
|
||||
mock_logging.debug.assert_called_with(
|
||||
f"User {self.normal_user.id} does not have permission to access dataset {self.dataset.id}"
|
||||
)
|
||||
4339
api/uv.lock
4339
api/uv.lock
File diff suppressed because it is too large
Load Diff
|
|
@ -412,6 +412,7 @@ QDRANT_API_KEY=difyai123456
|
|||
QDRANT_CLIENT_TIMEOUT=20
|
||||
QDRANT_GRPC_ENABLED=false
|
||||
QDRANT_GRPC_PORT=6334
|
||||
QDRANT_REPLICATION_FACTOR=1
|
||||
|
||||
# Milvus configuration. Only available when VECTOR_STORE is `milvus`.
|
||||
# The milvus uri.
|
||||
|
|
@ -801,7 +802,7 @@ MAX_TOOLS_NUM=10
|
|||
MAX_PARALLEL_LIMIT=10
|
||||
|
||||
# The maximum number of iterations for agent setting
|
||||
MAX_ITERATIONS_NUM=5
|
||||
MAX_ITERATIONS_NUM=99
|
||||
|
||||
# ------------------------------
|
||||
# Environment Variables for web Service
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
|
|||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.4.0
|
||||
image: langgenius/dify-api:1.4.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -31,7 +31,7 @@ services:
|
|||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:1.4.0
|
||||
image: langgenius/dify-api:1.4.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -57,7 +57,7 @@ services:
|
|||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.4.0
|
||||
image: langgenius/dify-web:1.4.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
|
@ -75,7 +75,7 @@ services:
|
|||
LOOP_NODE_MAX_COUNT: ${LOOP_NODE_MAX_COUNT:-100}
|
||||
MAX_TOOLS_NUM: ${MAX_TOOLS_NUM:-10}
|
||||
MAX_PARALLEL_LIMIT: ${MAX_PARALLEL_LIMIT:-10}
|
||||
MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-5}
|
||||
MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-99}
|
||||
ENABLE_WEBSITE_JINAREADER: ${ENABLE_WEBSITE_JINAREADER:-true}
|
||||
ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true}
|
||||
ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true}
|
||||
|
|
@ -142,7 +142,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.0.10-local
|
||||
image: langgenius/dify-plugin-daemon:0.1.1-local
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.0.10-local
|
||||
image: langgenius/dify-plugin-daemon:0.1.1-local
|
||||
restart: always
|
||||
env_file:
|
||||
- ./middleware.env
|
||||
|
|
|
|||
|
|
@ -138,6 +138,7 @@ x-shared-env: &shared-api-worker-env
|
|||
QDRANT_CLIENT_TIMEOUT: ${QDRANT_CLIENT_TIMEOUT:-20}
|
||||
QDRANT_GRPC_ENABLED: ${QDRANT_GRPC_ENABLED:-false}
|
||||
QDRANT_GRPC_PORT: ${QDRANT_GRPC_PORT:-6334}
|
||||
QDRANT_REPLICATION_FACTOR: ${QDRANT_REPLICATION_FACTOR:-1}
|
||||
MILVUS_URI: ${MILVUS_URI:-http://host.docker.internal:19530}
|
||||
MILVUS_DATABASE: ${MILVUS_DATABASE:-}
|
||||
MILVUS_TOKEN: ${MILVUS_TOKEN:-}
|
||||
|
|
@ -353,7 +354,7 @@ x-shared-env: &shared-api-worker-env
|
|||
LOOP_NODE_MAX_COUNT: ${LOOP_NODE_MAX_COUNT:-100}
|
||||
MAX_TOOLS_NUM: ${MAX_TOOLS_NUM:-10}
|
||||
MAX_PARALLEL_LIMIT: ${MAX_PARALLEL_LIMIT:-10}
|
||||
MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-5}
|
||||
MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-99}
|
||||
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
|
||||
PGUSER: ${PGUSER:-${DB_USERNAME}}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-${DB_PASSWORD}}
|
||||
|
|
@ -500,7 +501,7 @@ x-shared-env: &shared-api-worker-env
|
|||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.4.0
|
||||
image: langgenius/dify-api:1.4.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -529,7 +530,7 @@ services:
|
|||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:1.4.0
|
||||
image: langgenius/dify-api:1.4.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -555,7 +556,7 @@ services:
|
|||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.4.0
|
||||
image: langgenius/dify-web:1.4.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
|
@ -573,7 +574,7 @@ services:
|
|||
LOOP_NODE_MAX_COUNT: ${LOOP_NODE_MAX_COUNT:-100}
|
||||
MAX_TOOLS_NUM: ${MAX_TOOLS_NUM:-10}
|
||||
MAX_PARALLEL_LIMIT: ${MAX_PARALLEL_LIMIT:-10}
|
||||
MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-5}
|
||||
MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-99}
|
||||
ENABLE_WEBSITE_JINAREADER: ${ENABLE_WEBSITE_JINAREADER:-true}
|
||||
ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true}
|
||||
ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true}
|
||||
|
|
@ -640,7 +641,7 @@ services:
|
|||
|
||||
# plugin daemon
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.0.10-local
|
||||
image: langgenius/dify-plugin-daemon:0.1.1-local
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
|
|||
Binary file not shown.
|
Before Width: | Height: | Size: 53 KiB After Width: | Height: | Size: 60 KiB |
|
|
@ -50,7 +50,7 @@ NEXT_PUBLIC_MAX_TOOLS_NUM=10
|
|||
NEXT_PUBLIC_MAX_PARALLEL_LIMIT=10
|
||||
|
||||
# The maximum number of iterations for agent setting
|
||||
NEXT_PUBLIC_MAX_ITERATIONS_NUM=5
|
||||
NEXT_PUBLIC_MAX_ITERATIONS_NUM=99
|
||||
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER=true
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL=true
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import { useContext, useContextSelector } from 'use-context-selector'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { RiBuildingLine, RiGlobalLine, RiLockLine, RiMoreFill } from '@remixicon/react'
|
||||
import cn from '@/utils/classnames'
|
||||
|
|
@ -35,6 +35,7 @@ import Tooltip from '@/app/components/base/tooltip'
|
|||
import AccessControl from '@/app/components/app/app-access-control'
|
||||
import { AccessMode } from '@/models/access-control'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { formatTime } from '@/utils/time'
|
||||
|
||||
export type AppCardProps = {
|
||||
app: App
|
||||
|
|
@ -296,6 +297,15 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
|||
setTags(app.tags)
|
||||
}, [app.tags])
|
||||
|
||||
const EditTimeText = useMemo(() => {
|
||||
const timeText = formatTime({
|
||||
date: (app.updated_at || app.created_at) * 1000,
|
||||
dateFormat: 'MM/DD/YYYY h:mm',
|
||||
})
|
||||
return `${t('datasetDocuments.segment.editedAt')} ${timeText}`
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [app.updated_at, app.created_at])
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
|
|
@ -320,12 +330,10 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
|||
<div className='flex items-center text-sm font-semibold leading-5 text-text-secondary'>
|
||||
<div className='truncate' title={app.name}>{app.name}</div>
|
||||
</div>
|
||||
<div className='flex items-center text-[10px] font-medium leading-[18px] text-text-tertiary'>
|
||||
{app.mode === 'advanced-chat' && <div className='truncate'>{t('app.types.advanced').toUpperCase()}</div>}
|
||||
{app.mode === 'chat' && <div className='truncate'>{t('app.types.chatbot').toUpperCase()}</div>}
|
||||
{app.mode === 'agent-chat' && <div className='truncate'>{t('app.types.agent').toUpperCase()}</div>}
|
||||
{app.mode === 'workflow' && <div className='truncate'>{t('app.types.workflow').toUpperCase()}</div>}
|
||||
{app.mode === 'completion' && <div className='truncate'>{t('app.types.completion').toUpperCase()}</div>}
|
||||
<div className='flex items-center gap-1 text-[10px] font-medium leading-[18px] text-text-tertiary'>
|
||||
<div className='truncate' title={app.author_name}>{app.author_name}</div>
|
||||
<div>·</div>
|
||||
<div className='truncate'>{EditTimeText}</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className='flex h-5 w-5 shrink-0 items-center justify-center'>
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ const DatasetCard = ({
|
|||
return (
|
||||
<>
|
||||
<div
|
||||
className='group relative col-span-1 flex min-h-[160px] cursor-pointer flex-col rounded-xl border-[0.5px] border-solid border-components-card-border bg-components-card-bg shadow-sm transition-all duration-200 ease-in-out hover:shadow-lg'
|
||||
className='group relative col-span-1 flex min-h-[171px] cursor-pointer flex-col rounded-xl border-[0.5px] border-solid border-components-card-border bg-components-card-bg shadow-sm transition-all duration-200 ease-in-out hover:shadow-lg'
|
||||
data-disable-nprogress={true}
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
|
|
|
|||
|
|
@ -6,10 +6,12 @@ import Button from '../components/base/button'
|
|||
import Avatar from './avatar'
|
||||
import DifyLogo from '@/app/components/base/logo/dify-logo'
|
||||
import { useCallback } from 'react'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
const Header = () => {
|
||||
const { t } = useTranslation()
|
||||
const router = useRouter()
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
|
||||
const back = useCallback(() => {
|
||||
router.back()
|
||||
|
|
@ -19,7 +21,13 @@ const Header = () => {
|
|||
<div className='flex flex-1 items-center justify-between px-4'>
|
||||
<div className='flex items-center gap-3'>
|
||||
<div className='flex cursor-pointer items-center' onClick={back}>
|
||||
<DifyLogo />
|
||||
{systemFeatures.branding.enabled && systemFeatures.branding.login_page_logo
|
||||
? <img
|
||||
src={systemFeatures.branding.login_page_logo}
|
||||
className='block h-[22px] w-auto object-contain'
|
||||
alt='Dify logo'
|
||||
/>
|
||||
: <DifyLogo />}
|
||||
</div>
|
||||
<div className='h-4 w-[1px] origin-center rotate-[11.31deg] bg-divider-regular' />
|
||||
<p className='title-3xl-semi-bold relative mt-[-2px] text-text-primary'>{t('common.account.account')}</p>
|
||||
|
|
|
|||
|
|
@ -32,7 +32,6 @@ import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
|
|||
import TextGeneration from '@/app/components/app/text-generate/item'
|
||||
import { addFileInfos, sortAgentSorts } from '@/app/components/tools/utils'
|
||||
import MessageLogModal from '@/app/components/base/message-log-modal'
|
||||
import PromptLogModal from '@/app/components/base/prompt-log-modal'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import useTimestamp from '@/hooks/use-timestamp'
|
||||
|
|
@ -191,13 +190,11 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
|
|||
const { userProfile: { timezone } } = useAppContext()
|
||||
const { formatTime } = useTimestamp()
|
||||
const { onClose, appDetail } = useContext(DrawerContext)
|
||||
const { currentLogItem, setCurrentLogItem, showMessageLogModal, setShowMessageLogModal, showPromptLogModal, setShowPromptLogModal, currentLogModalActiveTab } = useAppStore(useShallow(state => ({
|
||||
const { currentLogItem, setCurrentLogItem, showMessageLogModal, setShowMessageLogModal, currentLogModalActiveTab } = useAppStore(useShallow(state => ({
|
||||
currentLogItem: state.currentLogItem,
|
||||
setCurrentLogItem: state.setCurrentLogItem,
|
||||
showMessageLogModal: state.showMessageLogModal,
|
||||
setShowMessageLogModal: state.setShowMessageLogModal,
|
||||
showPromptLogModal: state.showPromptLogModal,
|
||||
setShowPromptLogModal: state.setShowPromptLogModal,
|
||||
currentLogModalActiveTab: state.currentLogModalActiveTab,
|
||||
})))
|
||||
const { t } = useTranslation()
|
||||
|
|
@ -518,16 +515,6 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
|
|||
defaultTab={currentLogModalActiveTab}
|
||||
/>
|
||||
)}
|
||||
{showPromptLogModal && (
|
||||
<PromptLogModal
|
||||
width={width}
|
||||
currentLogItem={currentLogItem}
|
||||
onCancel={() => {
|
||||
setCurrentLogItem()
|
||||
setShowPromptLogModal(false)
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -148,10 +148,12 @@ const Sidebar = ({ isPanel }: Props) => {
|
|||
'flex shrink-0 items-center gap-1.5 px-1',
|
||||
)}>
|
||||
<div className='system-2xs-medium-uppercase text-text-tertiary'>{t('share.chat.poweredBy')}</div>
|
||||
{systemFeatures.branding.enabled ? (
|
||||
<img src={systemFeatures.branding.login_page_logo} alt='logo' className='block h-5 w-auto' />
|
||||
) : (
|
||||
<DifyLogo size='small' />)
|
||||
{
|
||||
systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo
|
||||
? <img src={systemFeatures.branding.workspace_logo} alt='logo' className='block h-5 w-auto' />
|
||||
: appData?.custom_config?.replace_webapp_logo
|
||||
? <img src={`${appData?.custom_config?.replace_webapp_logo}`} alt='logo' className='block h-5 w-auto' />
|
||||
: <DifyLogo size='small' />
|
||||
}
|
||||
</div>
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -234,6 +234,4 @@ const Answer: FC<AnswerProps> = ({
|
|||
)
|
||||
}
|
||||
|
||||
export default memo(Answer, (prevProps, nextProps) =>
|
||||
prevProps.responding === false && nextProps.responding === false,
|
||||
)
|
||||
export default memo(Answer)
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import Divider from '@/app/components/base/divider'
|
|||
import ViewFormDropdown from '@/app/components/base/chat/embedded-chatbot/inputs-form/view-form-dropdown'
|
||||
import DifyLogo from '@/app/components/base/logo/dify-logo'
|
||||
import cn from '@/utils/classnames'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
export type IHeaderProps = {
|
||||
isMobile?: boolean
|
||||
|
|
@ -42,6 +43,7 @@ const Header: FC<IHeaderProps> = ({
|
|||
const [parentOrigin, setParentOrigin] = useState('')
|
||||
const [showToggleExpandButton, setShowToggleExpandButton] = useState(false)
|
||||
const [expanded, setExpanded] = useState(false)
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
|
||||
const handleMessageReceived = useCallback((event: MessageEvent) => {
|
||||
let currentParentOrigin = parentOrigin
|
||||
|
|
@ -85,12 +87,13 @@ const Header: FC<IHeaderProps> = ({
|
|||
'flex shrink-0 items-center gap-1.5 px-2',
|
||||
)}>
|
||||
<div className='system-2xs-medium-uppercase text-text-tertiary'>{t('share.chat.poweredBy')}</div>
|
||||
{appData?.custom_config?.replace_webapp_logo && (
|
||||
<img src={appData?.custom_config?.replace_webapp_logo} alt='logo' className='block h-5 w-auto' />
|
||||
)}
|
||||
{!appData?.custom_config?.replace_webapp_logo && (
|
||||
<DifyLogo size='small' />
|
||||
)}
|
||||
{
|
||||
systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo
|
||||
? <img src={systemFeatures.branding.workspace_logo} alt='logo' className='block h-5 w-auto' />
|
||||
: appData?.custom_config?.replace_webapp_logo
|
||||
? <img src={`${appData?.custom_config?.replace_webapp_logo}`} alt='logo' className='block h-5 w-auto' />
|
||||
: <DifyLogo size='small' />
|
||||
}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ import ChatWrapper from '@/app/components/base/chat/embedded-chatbot/chat-wrappe
|
|||
import DifyLogo from '@/app/components/base/logo/dify-logo'
|
||||
import cn from '@/utils/classnames'
|
||||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
const Chatbot = () => {
|
||||
const {
|
||||
|
|
@ -37,6 +38,7 @@ const Chatbot = () => {
|
|||
themeBuilder,
|
||||
} = useEmbeddedChatbotContext()
|
||||
const { t } = useTranslation()
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
|
||||
const customConfig = appData?.custom_config
|
||||
const site = appData?.site
|
||||
|
|
@ -115,12 +117,13 @@ const Chatbot = () => {
|
|||
'flex shrink-0 items-center gap-1.5 px-2',
|
||||
)}>
|
||||
<div className='system-2xs-medium-uppercase text-text-tertiary'>{t('share.chat.poweredBy')}</div>
|
||||
{appData?.custom_config?.replace_webapp_logo && (
|
||||
<img src={appData?.custom_config?.replace_webapp_logo} alt='logo' className='block h-5 w-auto' />
|
||||
)}
|
||||
{!appData?.custom_config?.replace_webapp_logo && (
|
||||
<DifyLogo size='small' />
|
||||
)}
|
||||
{
|
||||
systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo
|
||||
? <img src={systemFeatures.branding.workspace_logo} alt='logo' className='block h-5 w-auto' />
|
||||
: appData?.custom_config?.replace_webapp_logo
|
||||
? <img src={`${appData?.custom_config?.replace_webapp_logo}`} alt='logo' className='block h-5 w-auto' />
|
||||
: <DifyLogo size='small' />
|
||||
}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import type { FC } from 'react'
|
|||
import classNames from '@/utils/classnames'
|
||||
import useTheme from '@/hooks/use-theme'
|
||||
import { basePath } from '@/utils/var'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
export type LogoStyle = 'default' | 'monochromeWhite'
|
||||
|
||||
export const logoPathMap: Record<LogoStyle, string> = {
|
||||
|
|
@ -32,18 +31,12 @@ const DifyLogo: FC<DifyLogoProps> = ({
|
|||
}) => {
|
||||
const { theme } = useTheme()
|
||||
const themedStyle = (theme === 'dark' && style === 'default') ? 'monochromeWhite' : style
|
||||
const { systemFeatures } = useGlobalPublicStore()
|
||||
const hasBrandingLogo = Boolean(systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo)
|
||||
|
||||
let src = `${basePath}${logoPathMap[themedStyle]}`
|
||||
if (hasBrandingLogo)
|
||||
src = systemFeatures.branding.workspace_logo
|
||||
|
||||
return (
|
||||
<img
|
||||
src={src}
|
||||
className={classNames('block object-contain', logoSizeMap[size], hasBrandingLogo && 'w-auto', className)}
|
||||
alt={hasBrandingLogo ? 'Logo' : 'Dify logo'}
|
||||
src={`${basePath}${logoPathMap[themedStyle]}`}
|
||||
className={classNames('block object-contain', logoSizeMap[size], className)}
|
||||
alt='Dify logo'
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -91,6 +91,11 @@ const initMermaid = () => {
|
|||
numberSectionStyles: 4,
|
||||
axisFormat: '%Y-%m-%d',
|
||||
},
|
||||
mindmap: {
|
||||
useMaxWidth: true,
|
||||
padding: 10,
|
||||
diagramPadding: 20,
|
||||
},
|
||||
maxTextSize: 50000,
|
||||
})
|
||||
isMermaidInitialized = true
|
||||
|
|
@ -289,11 +294,12 @@ const Flowchart = React.forwardRef((props: {
|
|||
try {
|
||||
let finalCode: string
|
||||
|
||||
// Check if it's a gantt chart
|
||||
// Check if it's a gantt chart or mindmap
|
||||
const isGanttChart = primitiveCode.trim().startsWith('gantt')
|
||||
const isMindMap = primitiveCode.trim().startsWith('mindmap')
|
||||
|
||||
if (isGanttChart) {
|
||||
// For gantt charts, ensure each task is on its own line
|
||||
if (isGanttChart || isMindMap) {
|
||||
// For gantt charts and mindmaps, ensure each task is on its own line
|
||||
// and preserve exact whitespace/format
|
||||
finalCode = primitiveCode.trim()
|
||||
}
|
||||
|
|
@ -352,6 +358,11 @@ const Flowchart = React.forwardRef((props: {
|
|||
numberSectionStyles: 4,
|
||||
axisFormat: '%Y-%m-%d',
|
||||
},
|
||||
mindmap: {
|
||||
useMaxWidth: true,
|
||||
padding: 10,
|
||||
diagramPadding: 20,
|
||||
},
|
||||
}
|
||||
|
||||
if (look === 'classic') {
|
||||
|
|
@ -476,15 +487,15 @@ const Flowchart = React.forwardRef((props: {
|
|||
'bg-white': currentTheme === Theme.light,
|
||||
'bg-slate-900': currentTheme === Theme.dark,
|
||||
}),
|
||||
mermaidDiv: cn('mermaid relative h-auto w-full cursor-pointer', {
|
||||
mermaidDiv: cn('mermaid cursor-pointer h-auto w-full relative', {
|
||||
'bg-white': currentTheme === Theme.light,
|
||||
'bg-slate-900': currentTheme === Theme.dark,
|
||||
}),
|
||||
errorMessage: cn('px-[26px] py-4', {
|
||||
errorMessage: cn('py-4 px-[26px]', {
|
||||
'text-red-500': currentTheme === Theme.light,
|
||||
'text-red-400': currentTheme === Theme.dark,
|
||||
}),
|
||||
errorIcon: cn('h-6 w-6', {
|
||||
errorIcon: cn('w-6 h-6', {
|
||||
'text-red-500': currentTheme === Theme.light,
|
||||
'text-red-400': currentTheme === Theme.dark,
|
||||
}),
|
||||
|
|
@ -492,7 +503,7 @@ const Flowchart = React.forwardRef((props: {
|
|||
'text-gray-700': currentTheme === Theme.light,
|
||||
'text-gray-300': currentTheme === Theme.dark,
|
||||
}),
|
||||
themeToggle: cn('flex h-10 w-10 items-center justify-center rounded-full shadow-md backdrop-blur-sm transition-all duration-300', {
|
||||
themeToggle: cn('flex items-center justify-center w-10 h-10 rounded-full transition-all duration-300 shadow-md backdrop-blur-sm', {
|
||||
'bg-white/80 hover:bg-white hover:shadow-lg text-gray-700 border border-gray-200': currentTheme === Theme.light,
|
||||
'bg-slate-800/80 hover:bg-slate-700 hover:shadow-lg text-yellow-300 border border-slate-600': currentTheme === Theme.dark,
|
||||
}),
|
||||
|
|
@ -501,7 +512,7 @@ const Flowchart = React.forwardRef((props: {
|
|||
// Style classes for look options
|
||||
const getLookButtonClass = (lookType: 'classic' | 'handDrawn') => {
|
||||
return cn(
|
||||
'system-sm-medium mb-4 flex h-8 w-[calc((100%-8px)/2)] cursor-pointer items-center justify-center rounded-lg border border-components-option-card-option-border bg-components-option-card-option-bg text-text-secondary',
|
||||
'flex items-center justify-center mb-4 w-[calc((100%-8px)/2)] h-8 rounded-lg border border-components-option-card-option-border bg-components-option-card-option-bg cursor-pointer system-sm-medium text-text-secondary',
|
||||
look === lookType && 'border-[1.5px] border-components-option-card-option-selected-border bg-components-option-card-option-selected-bg text-text-primary',
|
||||
currentTheme === Theme.dark && 'border-slate-600 bg-slate-800 text-slate-300',
|
||||
look === lookType && currentTheme === Theme.dark && 'border-blue-500 bg-slate-700 text-white',
|
||||
|
|
@ -512,7 +523,7 @@ const Flowchart = React.forwardRef((props: {
|
|||
<div ref={ref as React.RefObject<HTMLDivElement>} className={themeClasses.container}>
|
||||
<div className={themeClasses.segmented}>
|
||||
<div className="msh-segmented-group">
|
||||
<label className="msh-segmented-item m-2 flex w-[200px] items-center space-x-1">
|
||||
<label className="msh-segmented-item flex items-center space-x-1 m-2 w-[200px]">
|
||||
<div
|
||||
key='classic'
|
||||
className={getLookButtonClass('classic')}
|
||||
|
|
@ -534,7 +545,7 @@ const Flowchart = React.forwardRef((props: {
|
|||
<div ref={containerRef} style={{ position: 'absolute', visibility: 'hidden', height: 0, overflow: 'hidden' }} />
|
||||
|
||||
{isLoading && !svgCode && (
|
||||
<div className='px-[26px] py-4'>
|
||||
<div className='py-4 px-[26px]'>
|
||||
<LoadingAnim type='text'/>
|
||||
{!isCodeComplete && (
|
||||
<div className="mt-2 text-sm text-gray-500">
|
||||
|
|
@ -546,7 +557,7 @@ const Flowchart = React.forwardRef((props: {
|
|||
|
||||
{svgCode && (
|
||||
<div className={themeClasses.mermaidDiv} style={{ objectFit: 'cover' }} onClick={() => setImagePreviewUrl(svgCode)}>
|
||||
<div className="absolute bottom-2 left-2 z-[100]">
|
||||
<div className="absolute left-2 bottom-2 z-[100]">
|
||||
<button
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
|
|
|
|||
|
|
@ -22,6 +22,10 @@ export function preprocessMermaidCode(code: string): string {
|
|||
.replace(/section\s+([^:]+):/g, (match, sectionName) => `section ${sectionName}:`)
|
||||
// Fix common syntax issues
|
||||
.replace(/fifopacket/g, 'rect')
|
||||
// Ensure graph has direction
|
||||
.replace(/^graph\s+((?:TB|BT|RL|LR)*)/, (match, direction) => {
|
||||
return direction ? match : 'graph TD'
|
||||
})
|
||||
// Clean up empty lines and extra spaces
|
||||
.trim()
|
||||
}
|
||||
|
|
@ -32,9 +36,9 @@ export function preprocessMermaidCode(code: string): string {
|
|||
export function prepareMermaidCode(code: string, style: 'classic' | 'handDrawn'): string {
|
||||
let finalCode = preprocessMermaidCode(code)
|
||||
|
||||
// Special handling for gantt charts
|
||||
if (finalCode.trim().startsWith('gantt')) {
|
||||
// For gantt charts, preserve the structure exactly as is
|
||||
// Special handling for gantt charts and mindmaps
|
||||
if (finalCode.trim().startsWith('gantt') || finalCode.trim().startsWith('mindmap')) {
|
||||
// For gantt charts and mindmaps, preserve the structure exactly as is
|
||||
return finalCode
|
||||
}
|
||||
|
||||
|
|
@ -173,8 +177,15 @@ export function isMermaidCodeComplete(code: string): boolean {
|
|||
return lines.length >= 3
|
||||
}
|
||||
|
||||
// Special handling for mindmaps
|
||||
if (trimmedCode.startsWith('mindmap')) {
|
||||
// For mindmaps, check if it has at least a root node
|
||||
const lines = trimmedCode.split('\n').filter(line => line.trim().length > 0)
|
||||
return lines.length >= 2
|
||||
}
|
||||
|
||||
// Check for basic syntax structure
|
||||
const hasValidStart = /^(graph|flowchart|sequenceDiagram|classDiagram|classDef|class|stateDiagram|gantt|pie|er|journey|requirementDiagram)/.test(trimmedCode)
|
||||
const hasValidStart = /^(graph|flowchart|sequenceDiagram|classDiagram|classDef|class|stateDiagram|gantt|pie|er|journey|requirementDiagram|mindmap)/.test(trimmedCode)
|
||||
|
||||
// Check for balanced brackets and parentheses
|
||||
const isBalanced = (() => {
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ const TabSlider: FC<TabSliderProps> = ({
|
|||
const newIndex = options.findIndex(option => option.value === value)
|
||||
setActiveIndex(newIndex)
|
||||
updateSliderStyle(newIndex)
|
||||
}, [value, options, pluginList])
|
||||
}, [value, options, pluginList?.total])
|
||||
|
||||
return (
|
||||
<div className={cn(className, 'relative inline-flex items-center justify-center rounded-[10px] bg-components-segmented-control-bg-normal p-0.5')}>
|
||||
|
|
@ -69,13 +69,13 @@ const TabSlider: FC<TabSliderProps> = ({
|
|||
{option.text}
|
||||
{/* if no plugin installed, the badge won't show */}
|
||||
{option.value === 'plugins'
|
||||
&& (pluginList?.plugins.length ?? 0) > 0
|
||||
&& (pluginList?.total ?? 0) > 0
|
||||
&& <Badge
|
||||
size='s'
|
||||
uppercase={true}
|
||||
state={BadgeState.Default}
|
||||
>
|
||||
{pluginList?.plugins.length}
|
||||
{pluginList?.total}
|
||||
</Badge>
|
||||
}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ import {
|
|||
} from '@/service/common'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import cn from '@/utils/classnames'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
const ALLOW_FILE_EXTENSIONS = ['svg', 'png']
|
||||
|
||||
|
|
@ -39,6 +40,7 @@ const CustomWebAppBrand = () => {
|
|||
const [fileId, setFileId] = useState('')
|
||||
const [imgKey, setImgKey] = useState(Date.now())
|
||||
const [uploadProgress, setUploadProgress] = useState(0)
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
const isSandbox = enableBilling && plan.type === Plan.sandbox
|
||||
const uploading = uploadProgress > 0 && uploadProgress < 100
|
||||
const webappLogo = currentWorkspace.custom_config?.replace_webapp_logo || ''
|
||||
|
|
@ -244,9 +246,12 @@ const CustomWebAppBrand = () => {
|
|||
{!webappBrandRemoved && (
|
||||
<>
|
||||
<div className='system-2xs-medium-uppercase text-text-tertiary'>POWERED BY</div>
|
||||
{webappLogo
|
||||
? <img src={`${webappLogo}?hash=${imgKey}`} alt='logo' className='block h-5 w-auto' />
|
||||
: <DifyLogo size='small' />
|
||||
{
|
||||
systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo
|
||||
? <img src={systemFeatures.branding.workspace_logo} alt='logo' className='block h-5 w-auto' />
|
||||
: webappLogo
|
||||
? <img src={`${webappLogo}?hash=${imgKey}`} alt='logo' className='block h-5 w-auto' />
|
||||
: <DifyLogo size='small' />
|
||||
}
|
||||
</>
|
||||
)}
|
||||
|
|
@ -303,9 +308,12 @@ const CustomWebAppBrand = () => {
|
|||
{!webappBrandRemoved && (
|
||||
<>
|
||||
<div className='system-2xs-medium-uppercase text-text-tertiary'>POWERED BY</div>
|
||||
{webappLogo
|
||||
? <img src={`${webappLogo}?hash=${imgKey}`} alt='logo' className='block h-5 w-auto' />
|
||||
: <DifyLogo size='small' />
|
||||
{
|
||||
systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo
|
||||
? <img src={systemFeatures.branding.workspace_logo} alt='logo' className='block h-5 w-auto' />
|
||||
: webappLogo
|
||||
? <img src={`${webappLogo}?hash=${imgKey}`} alt='logo' className='block h-5 w-auto' />
|
||||
: <DifyLogo size='small' />
|
||||
}
|
||||
</>
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -91,6 +91,7 @@ const NewChildSegmentModal: FC<NewChildSegmentModalProps> = ({
|
|||
customComponent: isFullDocMode && CustomButton,
|
||||
})
|
||||
handleCancel('add')
|
||||
setContent('')
|
||||
if (isFullDocMode) {
|
||||
refreshTimer.current = setTimeout(() => {
|
||||
onSave()
|
||||
|
|
|
|||
|
|
@ -118,6 +118,9 @@ const NewSegmentModal: FC<NewSegmentModalProps> = ({
|
|||
customComponent: CustomButton,
|
||||
})
|
||||
handleCancel('add')
|
||||
setQuestion('')
|
||||
setAnswer('')
|
||||
setKeywords([])
|
||||
refreshTimer.current = setTimeout(() => {
|
||||
onSave()
|
||||
}, 3000)
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import type { LangGeniusVersionResponse } from '@/models/common'
|
|||
import { IS_CE_EDITION } from '@/config'
|
||||
import DifyLogo from '@/app/components/base/logo/dify-logo'
|
||||
import { noop } from 'lodash-es'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
type IAccountSettingProps = {
|
||||
langeniusVersionInfo: LangGeniusVersionResponse
|
||||
|
|
@ -21,6 +22,7 @@ export default function AccountAbout({
|
|||
}: IAccountSettingProps) {
|
||||
const { t } = useTranslation()
|
||||
const isLatest = langeniusVersionInfo.current_version === langeniusVersionInfo.latest_version
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
|
||||
return (
|
||||
<Modal
|
||||
|
|
@ -33,7 +35,14 @@ export default function AccountAbout({
|
|||
<RiCloseLine className='h-4 w-4 text-text-tertiary' />
|
||||
</div>
|
||||
<div className='flex flex-col items-center gap-4 py-8'>
|
||||
<DifyLogo size='large' className='mx-auto' />
|
||||
{systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo
|
||||
? <img
|
||||
src={systemFeatures.branding.workspace_logo}
|
||||
className='block h-7 w-auto object-contain'
|
||||
alt='logo'
|
||||
/>
|
||||
: <DifyLogo size='large' className='mx-auto' />}
|
||||
|
||||
<div className='text-center text-xs font-normal text-text-tertiary'>Version {langeniusVersionInfo?.current_version}</div>
|
||||
<div className='flex flex-col items-center gap-2 text-center text-xs font-normal text-text-secondary'>
|
||||
<div>© {dayjs().year()} LangGenius, Inc., Contributors.</div>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import type { FC } from 'react'
|
||||
import type { ModelProvider } from '../declarations'
|
||||
import { basePath } from '@/utils/var'
|
||||
import { useLanguage } from '../hooks'
|
||||
import { Openai } from '@/app/components/base/icons/src/vender/other'
|
||||
import { AnthropicDark, AnthropicLight } from '@/app/components/base/icons/src/public/llm'
|
||||
|
|
@ -41,7 +40,7 @@ const ProviderIcon: FC<ProviderIconProps> = ({
|
|||
<div className={cn('inline-flex items-center gap-2', className)}>
|
||||
<img
|
||||
alt='provider-icon'
|
||||
src={basePath + renderI18nObject(provider.icon_small, language)}
|
||||
src={renderI18nObject(provider.icon_small, language)}
|
||||
className='h-6 w-6'
|
||||
/>
|
||||
<div className='system-md-semibold text-text-primary'>
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ import { useModalContext } from '@/context/modal-context'
|
|||
import PlanBadge from './plan-badge'
|
||||
import LicenseNav from './license-env'
|
||||
import { Plan } from '../billing/type'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
const navClassName = `
|
||||
flex items-center relative mr-0 sm:mr-3 px-3 h-8 rounded-xl
|
||||
|
|
@ -36,6 +37,7 @@ const Header = () => {
|
|||
const [isShowNavMenu, { toggle, setFalse: hideNavMenu }] = useBoolean(false)
|
||||
const { enableBilling, plan } = useProviderContext()
|
||||
const { setShowPricingModal, setShowAccountSettingModal } = useModalContext()
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
const isFreePlan = plan.type === Plan.sandbox
|
||||
const handlePlanClick = useCallback(() => {
|
||||
if (isFreePlan)
|
||||
|
|
@ -61,7 +63,13 @@ const Header = () => {
|
|||
!isMobile
|
||||
&& <div className='flex shrink-0 items-center gap-1.5 self-stretch pl-3'>
|
||||
<Link href="/apps" className='flex h-8 shrink-0 items-center justify-center gap-2 px-0.5'>
|
||||
<DifyLogo />
|
||||
{systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo
|
||||
? <img
|
||||
src={systemFeatures.branding.workspace_logo}
|
||||
className='block h-[22px] w-auto object-contain'
|
||||
alt='logo'
|
||||
/>
|
||||
: <DifyLogo />}
|
||||
</Link>
|
||||
<div className='font-light text-divider-deep'>/</div>
|
||||
<div className='flex items-center gap-0.5'>
|
||||
|
|
@ -76,7 +84,13 @@ const Header = () => {
|
|||
{isMobile && (
|
||||
<div className='flex'>
|
||||
<Link href="/apps" className='mr-4 flex items-center'>
|
||||
<DifyLogo />
|
||||
{systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo
|
||||
? <img
|
||||
src={systemFeatures.branding.workspace_logo}
|
||||
className='block h-[22px] w-auto object-contain'
|
||||
alt='logo'
|
||||
/>
|
||||
: <DifyLogo />}
|
||||
</Link>
|
||||
<div className='font-light text-divider-deep'>/</div>
|
||||
{enableBilling ? <PlanBadge allowHover sandboxAsUpgrade plan={plan.type} onClick={handlePlanClick} /> : <LicenseNav />}
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ const ActionList = ({
|
|||
className='w-full'
|
||||
onClick={() => setShowSettingAuth(true)}
|
||||
disabled={!isCurrentWorkspaceManager}
|
||||
>{t('tools.auth.unauthorized')}</Button>
|
||||
>{t('workflow.nodes.tool.authorize')}</Button>
|
||||
)}
|
||||
</div>
|
||||
<div className='flex flex-col gap-2'>
|
||||
|
|
|
|||
|
|
@ -141,7 +141,7 @@ const MultipleToolSelector = ({
|
|||
}
|
||||
panelShowState={panelShowState}
|
||||
onPanelShowStateChange={setPanelShowState}
|
||||
|
||||
isEdit={false}
|
||||
/>
|
||||
{value.length === 0 && (
|
||||
<div className='system-xs-regular flex justify-center rounded-[10px] bg-background-section p-3 text-text-tertiary'>{t('plugin.detailPanel.toolSelector.empty')}</div>
|
||||
|
|
@ -158,6 +158,7 @@ const MultipleToolSelector = ({
|
|||
onSelect={item => handleConfigure(item, index)}
|
||||
onDelete={() => handleDelete(index)}
|
||||
supportEnableSwitch
|
||||
isEdit
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ type Props = {
|
|||
scope?: string
|
||||
value?: ToolValue
|
||||
selectedTools?: ToolValue[]
|
||||
isEdit?: boolean
|
||||
onSelect: (tool: {
|
||||
provider_name: string
|
||||
tool_name: string
|
||||
|
|
@ -77,6 +78,7 @@ type Props = {
|
|||
const ToolSelector: FC<Props> = ({
|
||||
value,
|
||||
selectedTools,
|
||||
isEdit,
|
||||
disabled,
|
||||
placement = 'left',
|
||||
offset = 4,
|
||||
|
|
@ -277,7 +279,7 @@ const ToolSelector: FC<Props> = ({
|
|||
<div className={cn('relative max-h-[642px] min-h-20 w-[361px] rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur pb-4 shadow-lg backdrop-blur-sm', !isShowSettingAuth && 'overflow-y-auto pb-2')}>
|
||||
{!isShowSettingAuth && (
|
||||
<>
|
||||
<div className='system-xl-semibold px-4 pb-1 pt-3.5 text-text-primary'>{t('plugin.detailPanel.toolSelector.title')}</div>
|
||||
<div className='system-xl-semibold px-4 pb-1 pt-3.5 text-text-primary'>{t(`plugin.detailPanel.toolSelector.${isEdit ? 'toolSetting' : 'title'}`)}</div>
|
||||
{/* base form */}
|
||||
<div className='flex flex-col gap-3 px-4 py-2'>
|
||||
<div className='flex flex-col gap-1'>
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
'use client'
|
||||
import { useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import type { FilterState } from './filter-management'
|
||||
import FilterManagement from './filter-management'
|
||||
import List from './list'
|
||||
|
|
@ -7,14 +8,16 @@ import { useInstalledLatestVersion, useInstalledPluginList, useInvalidateInstall
|
|||
import PluginDetailPanel from '@/app/components/plugins/plugin-detail-panel'
|
||||
import { usePluginPageContext } from './context'
|
||||
import { useDebounceFn } from 'ahooks'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Empty from './empty'
|
||||
import Loading from '../../base/loading'
|
||||
import { PluginSource } from '../types'
|
||||
|
||||
const PluginsPanel = () => {
|
||||
const { t } = useTranslation()
|
||||
const filters = usePluginPageContext(v => v.filters) as FilterState
|
||||
const setFilters = usePluginPageContext(v => v.setFilters)
|
||||
const { data: pluginList, isLoading: isPluginListLoading } = useInstalledPluginList()
|
||||
const { data: pluginList, isLoading: isPluginListLoading, isFetching, isLastPage, loadNextPage } = useInstalledPluginList()
|
||||
const { data: installedLatestVersion } = useInstalledLatestVersion(
|
||||
pluginList?.plugins
|
||||
.filter(plugin => plugin.source === PluginSource.marketplace)
|
||||
|
|
@ -64,10 +67,16 @@ const PluginsPanel = () => {
|
|||
/>
|
||||
</div>
|
||||
{isPluginListLoading ? <Loading type='app' /> : (filteredList?.length ?? 0) > 0 ? (
|
||||
<div className='flex grow flex-wrap content-start items-start gap-2 self-stretch px-12'>
|
||||
<div className='flex grow flex-wrap content-start items-start justify-center gap-2 self-stretch px-12'>
|
||||
<div className='w-full'>
|
||||
<List pluginList={filteredList || []} />
|
||||
</div>
|
||||
{!isLastPage && !isFetching && (
|
||||
<Button onClick={loadNextPage}>
|
||||
{t('workflow.common.loadMore')}
|
||||
</Button>
|
||||
)}
|
||||
{isFetching && <div className='system-md-semibold text-text-secondary'>{t('appLog.detail.loading')}</div>}
|
||||
</div>
|
||||
) : (
|
||||
<Empty />
|
||||
|
|
|
|||
|
|
@ -325,6 +325,11 @@ export type InstalledPluginListResponse = {
|
|||
plugins: PluginDetail[]
|
||||
}
|
||||
|
||||
export type InstalledPluginListWithTotalResponse = {
|
||||
plugins: PluginDetail[]
|
||||
total: number
|
||||
}
|
||||
|
||||
export type InstalledLatestVersionResponse = {
|
||||
versions: {
|
||||
[plugin_id: string]: {
|
||||
|
|
|
|||
|
|
@ -641,11 +641,13 @@ const TextGeneration: FC<IMainProps> = ({
|
|||
!isPC && resultExisted && 'rounded-b-2xl border-b-[0.5px] border-divider-regular',
|
||||
)}>
|
||||
<div className='system-2xs-medium-uppercase text-text-tertiary'>{t('share.chat.poweredBy')}</div>
|
||||
{systemFeatures.branding.enabled ? (
|
||||
<img src={systemFeatures.branding.login_page_logo} alt='logo' className='block h-5 w-auto' />
|
||||
) : (
|
||||
<DifyLogo size='small' />
|
||||
)}
|
||||
{
|
||||
systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo
|
||||
? <img src={systemFeatures.branding.workspace_logo} alt='logo' className='block h-5 w-auto' />
|
||||
: customConfig?.replace_webapp_logo
|
||||
? <img src={`${customConfig?.replace_webapp_logo}`} alt='logo' className='block h-5 w-auto' />
|
||||
: <DifyLogo size='small' />
|
||||
}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -126,7 +126,7 @@ const nodeDefault: NodeDefault<AgentNodeType> = {
|
|||
}
|
||||
}
|
||||
// common params
|
||||
if (param.required && !payload.agent_parameters?.[param.name]?.value) {
|
||||
if (param.required && !(payload.agent_parameters?.[param.name]?.value || param.default)) {
|
||||
return {
|
||||
isValid: false,
|
||||
errorMessage: t('workflow.errorMsg.fieldRequired', { field: renderI18nObject(param.label, language) }),
|
||||
|
|
|
|||
|
|
@ -42,6 +42,12 @@ const useConfig = (id: string, payload: HttpNodeType) => {
|
|||
data: transformToBodyPayload(bodyData, [BodyType.formData, BodyType.xWwwFormUrlencoded].includes(newInputs.body.type)),
|
||||
}
|
||||
}
|
||||
else if (!bodyData) {
|
||||
newInputs.body = {
|
||||
...newInputs.body,
|
||||
data: [],
|
||||
}
|
||||
}
|
||||
|
||||
setInputs(newInputs)
|
||||
setIsDataReady(true)
|
||||
|
|
@ -151,7 +157,7 @@ const useConfig = (id: string, payload: HttpNodeType) => {
|
|||
inputs.url,
|
||||
inputs.headers,
|
||||
inputs.params,
|
||||
typeof inputs.body.data === 'string' ? inputs.body.data : inputs.body.data.map(item => item.value).join(''),
|
||||
typeof inputs.body.data === 'string' ? inputs.body.data : inputs.body.data?.map(item => item.value).join(''),
|
||||
fileVarInputs,
|
||||
])
|
||||
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ const Panel: FC<NodePanelProps<ToolNodeType>> = ({
|
|||
className='w-full'
|
||||
onClick={showSetAuthModal}
|
||||
>
|
||||
{t(`${i18nPrefix}.toAuthorize`)}
|
||||
{t(`${i18nPrefix}.authorize`)}
|
||||
</Button>
|
||||
</div>
|
||||
</>
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ const ChangePasswordForm = () => {
|
|||
</div>
|
||||
|
||||
<div className="mx-auto mt-6 w-full">
|
||||
<div className="bg-white">
|
||||
<div>
|
||||
{/* Password */}
|
||||
<div className='mb-5'>
|
||||
<label htmlFor="password" className="system-md-semibold my-2 text-text-secondary">
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ export default function RoutePrefixHandle() {
|
|||
const addPrefixToImg = (e: HTMLImageElement) => {
|
||||
const url = new URL(e.src)
|
||||
const prefix = url.pathname.substr(0, basePath.length)
|
||||
if (prefix !== basePath) {
|
||||
if (prefix !== basePath && !url.href.startsWith('blob:') && !url.href.startsWith('data:')) {
|
||||
url.pathname = basePath + url.pathname
|
||||
e.src = url.toString()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import { languages } from '@/i18n/language'
|
|||
import type { Locale } from '@/i18n'
|
||||
import I18n from '@/context/i18n'
|
||||
import dynamic from 'next/dynamic'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
// Avoid rendering the logo and theme selector on the server
|
||||
const DifyLogo = dynamic(() => import('@/app/components/base/logo/dify-logo'), {
|
||||
|
|
@ -20,10 +21,17 @@ const ThemeSelector = dynamic(() => import('@/app/components/base/theme-selector
|
|||
|
||||
const Header = () => {
|
||||
const { locale, setLocaleOnClient } = useContext(I18n)
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
|
||||
return (
|
||||
<div className='flex w-full items-center justify-between p-6'>
|
||||
<DifyLogo size='large' />
|
||||
{systemFeatures.branding.enabled && systemFeatures.branding.login_page_logo
|
||||
? <img
|
||||
src={systemFeatures.branding.login_page_logo}
|
||||
className='block h-7 w-auto object-contain'
|
||||
alt='logo'
|
||||
/>
|
||||
: <DifyLogo size='large' />}
|
||||
<div className='flex items-center gap-1'>
|
||||
<Select
|
||||
value={locale}
|
||||
|
|
|
|||
|
|
@ -173,7 +173,7 @@ export const MAX_TOOLS_NUM = maxToolsNum
|
|||
|
||||
export const DEFAULT_AGENT_SETTING = {
|
||||
enabled: false,
|
||||
max_iteration: 5,
|
||||
max_iteration: 10,
|
||||
strategy: AgentStrategy.functionCall,
|
||||
tools: [],
|
||||
}
|
||||
|
|
@ -295,7 +295,7 @@ else if (globalThis.document?.body?.getAttribute('data-public-loop-node-max-coun
|
|||
|
||||
export const LOOP_NODE_MAX_COUNT = loopNodeMaxCount
|
||||
|
||||
let maxIterationsNum = 5
|
||||
let maxIterationsNum = 99
|
||||
|
||||
if (process.env.NEXT_PUBLIC_MAX_ITERATIONS_NUM && process.env.NEXT_PUBLIC_MAX_ITERATIONS_NUM !== '')
|
||||
maxIterationsNum = Number.parseInt(process.env.NEXT_PUBLIC_MAX_ITERATIONS_NUM)
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ const translation = {
|
|||
},
|
||||
author: 'Von',
|
||||
auth: {
|
||||
unauthorized: 'Zur Autorisierung',
|
||||
authorized: 'Autorisiert',
|
||||
setup: 'Autorisierung einrichten, um zu nutzen',
|
||||
setupModalTitle: 'Autorisierung einrichten',
|
||||
|
|
|
|||
|
|
@ -648,7 +648,6 @@ const translation = {
|
|||
'assignedVarsDescription': 'Zugewiesene Variablen müssen beschreibbare Variablen sein, z. B. Konversationsvariablen.',
|
||||
},
|
||||
tool: {
|
||||
toAuthorize: 'Autorisieren',
|
||||
inputVars: 'Eingabevariablen',
|
||||
outputVars: {
|
||||
text: 'durch das Tool generierter Inhalt',
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue