mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into feat/rag-2
This commit is contained in:
commit
8c44151e6f
|
|
@ -26,6 +26,7 @@ jobs:
|
|||
- name: ast-grep
|
||||
run: |
|
||||
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all
|
||||
|
||||
- name: mdformat
|
||||
run: |
|
||||
uvx mdformat .
|
||||
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any
|
||||
from typing import Any, Optional
|
||||
|
||||
import flask_restful
|
||||
from flask_login import current_user
|
||||
|
|
@ -49,7 +49,7 @@ class BaseApiKeyListResource(Resource):
|
|||
method_decorators = [account_initialization_required, login_required, setup_required]
|
||||
|
||||
resource_type: str | None = None
|
||||
resource_model: Any = None
|
||||
resource_model: Optional[Any] = None
|
||||
resource_id_field: str | None = None
|
||||
token_prefix: str | None = None
|
||||
max_keys = 10
|
||||
|
|
@ -102,7 +102,7 @@ class BaseApiKeyResource(Resource):
|
|||
method_decorators = [account_initialization_required, login_required, setup_required]
|
||||
|
||||
resource_type: str | None = None
|
||||
resource_model: Any = None
|
||||
resource_model: Optional[Any] = None
|
||||
resource_id_field: str | None = None
|
||||
|
||||
def delete(self, resource_id, api_key_id):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
|
|
@ -178,7 +179,7 @@ def cloud_edition_billing_rate_limit_check(resource: str):
|
|||
def cloud_utm_record(view):
|
||||
@wraps(view)
|
||||
def decorated(*args, **kwargs):
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
features = FeatureService.get_features(current_user.current_tenant_id)
|
||||
|
||||
if features.billing.enabled:
|
||||
|
|
@ -187,8 +188,7 @@ def cloud_utm_record(view):
|
|||
if utm_info:
|
||||
utm_info_dict: dict = json.loads(utm_info)
|
||||
OperationService.record_utm(current_user.current_tenant_id, utm_info_dict)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
return view(*args, **kwargs)
|
||||
|
||||
return decorated
|
||||
|
|
|
|||
|
|
@ -512,7 +512,6 @@ class BaseAgentRunner(AppRunner):
|
|||
if not file_objs:
|
||||
return UserPromptMessage(content=message.query)
|
||||
prompt_message_contents: list[PromptMessageContentUnionTypes] = []
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=message.query))
|
||||
for file in file_objs:
|
||||
prompt_message_contents.append(
|
||||
file_manager.to_prompt_message_content(
|
||||
|
|
@ -520,4 +519,6 @@ class BaseAgentRunner(AppRunner):
|
|||
image_detail_config=image_detail_config,
|
||||
)
|
||||
)
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=message.query))
|
||||
|
||||
return UserPromptMessage(content=prompt_message_contents)
|
||||
|
|
|
|||
|
|
@ -39,9 +39,6 @@ class CotChatAgentRunner(CotAgentRunner):
|
|||
Organize user query
|
||||
"""
|
||||
if self.files:
|
||||
prompt_message_contents: list[PromptMessageContentUnionTypes] = []
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=query))
|
||||
|
||||
# get image detail config
|
||||
image_detail_config = (
|
||||
self.application_generate_entity.file_upload_config.image_config.detail
|
||||
|
|
@ -52,6 +49,8 @@ class CotChatAgentRunner(CotAgentRunner):
|
|||
else None
|
||||
)
|
||||
image_detail_config = image_detail_config or ImagePromptMessageContent.DETAIL.LOW
|
||||
|
||||
prompt_message_contents: list[PromptMessageContentUnionTypes] = []
|
||||
for file in self.files:
|
||||
prompt_message_contents.append(
|
||||
file_manager.to_prompt_message_content(
|
||||
|
|
@ -59,6 +58,7 @@ class CotChatAgentRunner(CotAgentRunner):
|
|||
image_detail_config=image_detail_config,
|
||||
)
|
||||
)
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=query))
|
||||
|
||||
prompt_messages.append(UserPromptMessage(content=prompt_message_contents))
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -395,9 +395,6 @@ class FunctionCallAgentRunner(BaseAgentRunner):
|
|||
Organize user query
|
||||
"""
|
||||
if self.files:
|
||||
prompt_message_contents: list[PromptMessageContentUnionTypes] = []
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=query))
|
||||
|
||||
# get image detail config
|
||||
image_detail_config = (
|
||||
self.application_generate_entity.file_upload_config.image_config.detail
|
||||
|
|
@ -408,6 +405,8 @@ class FunctionCallAgentRunner(BaseAgentRunner):
|
|||
else None
|
||||
)
|
||||
image_detail_config = image_detail_config or ImagePromptMessageContent.DETAIL.LOW
|
||||
|
||||
prompt_message_contents: list[PromptMessageContentUnionTypes] = []
|
||||
for file in self.files:
|
||||
prompt_message_contents.append(
|
||||
file_manager.to_prompt_message_content(
|
||||
|
|
@ -415,6 +414,7 @@ class FunctionCallAgentRunner(BaseAgentRunner):
|
|||
image_detail_config=image_detail_config,
|
||||
)
|
||||
)
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=query))
|
||||
|
||||
prompt_messages.append(UserPromptMessage(content=prompt_message_contents))
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -178,7 +178,7 @@ class ModelConfig(BaseModel):
|
|||
provider: str
|
||||
name: str
|
||||
mode: LLMMode
|
||||
completion_params: dict[str, Any] = {}
|
||||
completion_params: dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
|
||||
class Condition(BaseModel):
|
||||
|
|
|
|||
|
|
@ -610,7 +610,7 @@ class QueueErrorEvent(AppQueueEvent):
|
|||
"""
|
||||
|
||||
event: QueueEvent = QueueEvent.ERROR
|
||||
error: Any = None
|
||||
error: Optional[Any] = None
|
||||
|
||||
|
||||
class QueuePingEvent(AppQueueEvent):
|
||||
|
|
|
|||
|
|
@ -142,7 +142,7 @@ class MessageEndStreamResponse(StreamResponse):
|
|||
|
||||
event: StreamEvent = StreamEvent.MESSAGE_END
|
||||
id: str
|
||||
metadata: dict = {}
|
||||
metadata: dict = Field(default_factory=dict)
|
||||
files: Optional[Sequence[Mapping[str, Any]]] = None
|
||||
|
||||
|
||||
|
|
@ -261,7 +261,7 @@ class NodeStartStreamResponse(StreamResponse):
|
|||
predecessor_node_id: Optional[str] = None
|
||||
inputs: Optional[Mapping[str, Any]] = None
|
||||
created_at: int
|
||||
extras: dict = {}
|
||||
extras: dict = Field(default_factory=dict)
|
||||
parallel_id: Optional[str] = None
|
||||
parallel_start_node_id: Optional[str] = None
|
||||
parent_parallel_id: Optional[str] = None
|
||||
|
|
@ -503,7 +503,7 @@ class IterationNodeStartStreamResponse(StreamResponse):
|
|||
node_type: str
|
||||
title: str
|
||||
created_at: int
|
||||
extras: dict = {}
|
||||
extras: dict = Field(default_factory=dict)
|
||||
metadata: Mapping = {}
|
||||
inputs: Mapping = {}
|
||||
parallel_id: Optional[str] = None
|
||||
|
|
@ -531,7 +531,7 @@ class IterationNodeNextStreamResponse(StreamResponse):
|
|||
index: int
|
||||
created_at: int
|
||||
pre_iteration_output: Optional[Any] = None
|
||||
extras: dict = {}
|
||||
extras: dict = Field(default_factory=dict)
|
||||
parallel_id: Optional[str] = None
|
||||
parallel_start_node_id: Optional[str] = None
|
||||
parallel_mode_run_id: Optional[str] = None
|
||||
|
|
@ -590,7 +590,7 @@ class LoopNodeStartStreamResponse(StreamResponse):
|
|||
node_type: str
|
||||
title: str
|
||||
created_at: int
|
||||
extras: dict = {}
|
||||
extras: dict = Field(default_factory=dict)
|
||||
metadata: Mapping = {}
|
||||
inputs: Mapping = {}
|
||||
parallel_id: Optional[str] = None
|
||||
|
|
@ -618,7 +618,7 @@ class LoopNodeNextStreamResponse(StreamResponse):
|
|||
index: int
|
||||
created_at: int
|
||||
pre_loop_output: Optional[Any] = None
|
||||
extras: dict = {}
|
||||
extras: dict = Field(default_factory=dict)
|
||||
parallel_id: Optional[str] = None
|
||||
parallel_start_node_id: Optional[str] = None
|
||||
parallel_mode_run_id: Optional[str] = None
|
||||
|
|
@ -764,7 +764,7 @@ class ChatbotAppBlockingResponse(AppBlockingResponse):
|
|||
conversation_id: str
|
||||
message_id: str
|
||||
answer: str
|
||||
metadata: dict = {}
|
||||
metadata: dict = Field(default_factory=dict)
|
||||
created_at: int
|
||||
|
||||
data: Data
|
||||
|
|
@ -784,7 +784,7 @@ class CompletionAppBlockingResponse(AppBlockingResponse):
|
|||
mode: str
|
||||
message_id: str
|
||||
answer: str
|
||||
metadata: dict = {}
|
||||
metadata: dict = Field(default_factory=dict)
|
||||
created_at: int
|
||||
|
||||
data: Data
|
||||
|
|
|
|||
|
|
@ -52,7 +52,8 @@ class BasedGenerateTaskPipeline:
|
|||
elif isinstance(e, InvokeError | ValueError):
|
||||
err = e
|
||||
else:
|
||||
err = Exception(e.description if getattr(e, "description", None) is not None else str(e))
|
||||
description = getattr(e, "description", None)
|
||||
err = Exception(description if description is not None else str(e))
|
||||
|
||||
if not message_id or not session:
|
||||
return err
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ class ExtensionModule(enum.Enum):
|
|||
|
||||
|
||||
class ModuleExtension(BaseModel):
|
||||
extension_class: Any = None
|
||||
extension_class: Optional[Any] = None
|
||||
name: str
|
||||
label: Optional[dict] = None
|
||||
form_schema: Optional[list] = None
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ class Extension:
|
|||
|
||||
def extension_class(self, module: ExtensionModule, extension_name: str) -> type:
|
||||
module_extension = self.module_extension(module, extension_name)
|
||||
assert module_extension.extension_class is not None
|
||||
t: type = module_extension.extension_class
|
||||
return t
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
import re
|
||||
from collections.abc import Mapping
|
||||
from typing import Any, Optional
|
||||
|
|
@ -97,10 +98,8 @@ def parse_traceparent_header(traceparent: str) -> Optional[str]:
|
|||
Reference:
|
||||
W3C Trace Context Specification: https://www.w3.org/TR/trace-context/
|
||||
"""
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
parts = traceparent.split("-")
|
||||
if len(parts) == 4 and len(parts[1]) == 32:
|
||||
return parts[1]
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ import uuid
|
|||
from typing import Any, Optional, cast
|
||||
|
||||
from flask import current_app
|
||||
from flask_login import current_user
|
||||
from sqlalchemy.orm.exc import ObjectDeletedError
|
||||
|
||||
from configs import dify_config
|
||||
|
|
@ -295,7 +294,7 @@ class IndexingRunner:
|
|||
text_docs,
|
||||
embedding_model_instance=embedding_model_instance,
|
||||
process_rule=processing_rule.to_dict(),
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
tenant_id=tenant_id,
|
||||
doc_language=doc_language,
|
||||
preview=True,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import os
|
|||
import secrets
|
||||
import urllib.parse
|
||||
from typing import Optional
|
||||
from urllib.parse import urljoin
|
||||
from urllib.parse import urljoin, urlparse
|
||||
|
||||
import httpx
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
|
@ -99,9 +99,37 @@ def handle_callback(state_key: str, authorization_code: str) -> OAuthCallbackSta
|
|||
return full_state_data
|
||||
|
||||
|
||||
def check_support_resource_discovery(server_url: str) -> tuple[bool, str]:
|
||||
"""Check if the server supports OAuth 2.0 Resource Discovery."""
|
||||
b_scheme, b_netloc, b_path, b_params, b_query, b_fragment = urlparse(server_url, "", True)
|
||||
url_for_resource_discovery = f"{b_scheme}://{b_netloc}/.well-known/oauth-protected-resource{b_path}"
|
||||
if b_query:
|
||||
url_for_resource_discovery += f"?{b_query}"
|
||||
if b_fragment:
|
||||
url_for_resource_discovery += f"#{b_fragment}"
|
||||
try:
|
||||
headers = {"MCP-Protocol-Version": LATEST_PROTOCOL_VERSION, "User-Agent": "Dify"}
|
||||
response = httpx.get(url_for_resource_discovery, headers=headers)
|
||||
if 200 <= response.status_code < 300:
|
||||
body = response.json()
|
||||
if "authorization_server_url" in body:
|
||||
return True, body["authorization_server_url"][0]
|
||||
else:
|
||||
return False, ""
|
||||
return False, ""
|
||||
except httpx.RequestError as e:
|
||||
# Not support resource discovery, fall back to well-known OAuth metadata
|
||||
return False, ""
|
||||
|
||||
|
||||
def discover_oauth_metadata(server_url: str, protocol_version: Optional[str] = None) -> Optional[OAuthMetadata]:
|
||||
"""Looks up RFC 8414 OAuth 2.0 Authorization Server Metadata."""
|
||||
url = urljoin(server_url, "/.well-known/oauth-authorization-server")
|
||||
# First check if the server supports OAuth 2.0 Resource Discovery
|
||||
support_resource_discovery, oauth_discovery_url = check_support_resource_discovery(server_url)
|
||||
if support_resource_discovery:
|
||||
url = oauth_discovery_url
|
||||
else:
|
||||
url = urljoin(server_url, "/.well-known/oauth-authorization-server")
|
||||
|
||||
try:
|
||||
headers = {"MCP-Protocol-Version": protocol_version or LATEST_PROTOCOL_VERSION}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from collections.abc import Callable
|
|||
from concurrent.futures import Future, ThreadPoolExecutor, TimeoutError
|
||||
from datetime import timedelta
|
||||
from types import TracebackType
|
||||
from typing import Any, Generic, Self, TypeVar
|
||||
from typing import Any, Generic, Optional, Self, TypeVar
|
||||
|
||||
from httpx import HTTPStatusError
|
||||
from pydantic import BaseModel
|
||||
|
|
@ -209,7 +209,7 @@ class BaseSession(
|
|||
request: SendRequestT,
|
||||
result_type: type[ReceiveResultT],
|
||||
request_read_timeout_seconds: timedelta | None = None,
|
||||
metadata: MessageMetadata = None,
|
||||
metadata: Optional[MessageMetadata] = None,
|
||||
) -> ReceiveResultT:
|
||||
"""
|
||||
Sends a request and wait for a response. Raises an McpError if the
|
||||
|
|
|
|||
|
|
@ -1173,7 +1173,7 @@ class SessionMessage:
|
|||
"""A message with specific metadata for transport-specific features."""
|
||||
|
||||
message: JSONRPCMessage
|
||||
metadata: MessageMetadata = None
|
||||
metadata: Optional[MessageMetadata] = None
|
||||
|
||||
|
||||
class OAuthClientMetadata(BaseModel):
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping, Sequence
|
||||
from decimal import Decimal
|
||||
from enum import StrEnum
|
||||
|
|
@ -54,7 +56,7 @@ class LLMUsage(ModelUsage):
|
|||
)
|
||||
|
||||
@classmethod
|
||||
def from_metadata(cls, metadata: dict) -> "LLMUsage":
|
||||
def from_metadata(cls, metadata: dict) -> LLMUsage:
|
||||
"""
|
||||
Create LLMUsage instance from metadata dictionary with default values.
|
||||
|
||||
|
|
@ -84,7 +86,7 @@ class LLMUsage(ModelUsage):
|
|||
latency=metadata.get("latency", 0.0),
|
||||
)
|
||||
|
||||
def plus(self, other: "LLMUsage") -> "LLMUsage":
|
||||
def plus(self, other: LLMUsage) -> LLMUsage:
|
||||
"""
|
||||
Add two LLMUsage instances together.
|
||||
|
||||
|
|
@ -109,7 +111,7 @@ class LLMUsage(ModelUsage):
|
|||
latency=self.latency + other.latency,
|
||||
)
|
||||
|
||||
def __add__(self, other: "LLMUsage") -> "LLMUsage":
|
||||
def __add__(self, other: LLMUsage) -> LLMUsage:
|
||||
"""
|
||||
Overload the + operator to add two LLMUsage instances.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import logging
|
||||
from threading import Lock
|
||||
from typing import Any
|
||||
from typing import Any, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_tokenizer: Any = None
|
||||
_tokenizer: Optional[Any] = None
|
||||
_lock = Lock()
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from core.extension.api_based_extension_requestor import APIBasedExtensionPoint, APIBasedExtensionRequestor
|
||||
from core.helper.encrypter import decrypt_token
|
||||
|
|
@ -11,7 +11,7 @@ from models.api_based_extension import APIBasedExtension
|
|||
|
||||
class ModerationInputParams(BaseModel):
|
||||
app_id: str = ""
|
||||
inputs: dict = {}
|
||||
inputs: dict = Field(default_factory=dict)
|
||||
query: str = ""
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from abc import ABC, abstractmethod
|
|||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from core.extension.extensible import Extensible, ExtensionModule
|
||||
|
||||
|
|
@ -16,7 +16,7 @@ class ModerationInputsResult(BaseModel):
|
|||
flagged: bool = False
|
||||
action: ModerationAction
|
||||
preset_response: str = ""
|
||||
inputs: dict = {}
|
||||
inputs: dict = Field(default_factory=dict)
|
||||
query: str = ""
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -125,11 +125,11 @@ class AdvancedPromptTransform(PromptTransform):
|
|||
|
||||
if files:
|
||||
prompt_message_contents: list[PromptMessageContentUnionTypes] = []
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=prompt))
|
||||
for file in files:
|
||||
prompt_message_contents.append(
|
||||
file_manager.to_prompt_message_content(file, image_detail_config=image_detail_config)
|
||||
)
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=prompt))
|
||||
|
||||
prompt_messages.append(UserPromptMessage(content=prompt_message_contents))
|
||||
else:
|
||||
|
|
@ -196,16 +196,17 @@ class AdvancedPromptTransform(PromptTransform):
|
|||
|
||||
query = parser.format(prompt_inputs)
|
||||
|
||||
prompt_message_contents: list[PromptMessageContentUnionTypes] = []
|
||||
if memory and memory_config:
|
||||
prompt_messages = self._append_chat_histories(memory, memory_config, prompt_messages, model_config)
|
||||
|
||||
if files and query is not None:
|
||||
prompt_message_contents: list[PromptMessageContentUnionTypes] = []
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=query))
|
||||
for file in files:
|
||||
prompt_message_contents.append(
|
||||
file_manager.to_prompt_message_content(file, image_detail_config=image_detail_config)
|
||||
)
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=query))
|
||||
|
||||
prompt_messages.append(UserPromptMessage(content=prompt_message_contents))
|
||||
else:
|
||||
prompt_messages.append(UserPromptMessage(content=query))
|
||||
|
|
@ -215,27 +216,27 @@ class AdvancedPromptTransform(PromptTransform):
|
|||
last_message = prompt_messages[-1] if prompt_messages else None
|
||||
if last_message and last_message.role == PromptMessageRole.USER:
|
||||
# get last user message content and add files
|
||||
prompt_message_contents = [TextPromptMessageContent(data=cast(str, last_message.content))]
|
||||
for file in files:
|
||||
prompt_message_contents.append(
|
||||
file_manager.to_prompt_message_content(file, image_detail_config=image_detail_config)
|
||||
)
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=cast(str, last_message.content)))
|
||||
|
||||
last_message.content = prompt_message_contents
|
||||
else:
|
||||
prompt_message_contents = [TextPromptMessageContent(data="")] # not for query
|
||||
for file in files:
|
||||
prompt_message_contents.append(
|
||||
file_manager.to_prompt_message_content(file, image_detail_config=image_detail_config)
|
||||
)
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=""))
|
||||
|
||||
prompt_messages.append(UserPromptMessage(content=prompt_message_contents))
|
||||
else:
|
||||
prompt_message_contents = [TextPromptMessageContent(data=query)]
|
||||
for file in files:
|
||||
prompt_message_contents.append(
|
||||
file_manager.to_prompt_message_content(file, image_detail_config=image_detail_config)
|
||||
)
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=query))
|
||||
|
||||
prompt_messages.append(UserPromptMessage(content=prompt_message_contents))
|
||||
elif query:
|
||||
|
|
|
|||
|
|
@ -265,11 +265,11 @@ class SimplePromptTransform(PromptTransform):
|
|||
) -> UserPromptMessage:
|
||||
if files:
|
||||
prompt_message_contents: list[PromptMessageContentUnionTypes] = []
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=prompt))
|
||||
for file in files:
|
||||
prompt_message_contents.append(
|
||||
file_manager.to_prompt_message_content(file, image_detail_config=image_detail_config)
|
||||
)
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=prompt))
|
||||
|
||||
prompt_message = UserPromptMessage(content=prompt_message_contents)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
import json
|
||||
from collections import defaultdict
|
||||
from json import JSONDecodeError
|
||||
|
|
@ -624,14 +625,12 @@ class ProviderManager:
|
|||
|
||||
for variable in provider_credential_secret_variables:
|
||||
if variable in provider_credentials:
|
||||
try:
|
||||
with contextlib.suppress(ValueError):
|
||||
provider_credentials[variable] = encrypter.decrypt_token_with_decoding(
|
||||
provider_credentials.get(variable) or "", # type: ignore
|
||||
self.decoding_rsa_key,
|
||||
self.decoding_cipher_rsa,
|
||||
)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# cache provider credentials
|
||||
provider_credentials_cache.set(credentials=provider_credentials)
|
||||
|
|
@ -672,14 +671,12 @@ class ProviderManager:
|
|||
|
||||
for variable in model_credential_secret_variables:
|
||||
if variable in provider_model_credentials:
|
||||
try:
|
||||
with contextlib.suppress(ValueError):
|
||||
provider_model_credentials[variable] = encrypter.decrypt_token_with_decoding(
|
||||
provider_model_credentials.get(variable),
|
||||
self.decoding_rsa_key,
|
||||
self.decoding_cipher_rsa,
|
||||
)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# cache provider model credentials
|
||||
provider_model_credentials_cache.set(credentials=provider_model_credentials)
|
||||
|
|
|
|||
|
|
@ -105,9 +105,11 @@ class AnalyticdbVectorBySql:
|
|||
conn.close()
|
||||
self.pool = self._create_connection_pool()
|
||||
with self._get_cursor() as cur:
|
||||
conn = cur.connection
|
||||
try:
|
||||
cur.execute("CREATE EXTENSION IF NOT EXISTS zhparser;")
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
raise RuntimeError(
|
||||
"Failed to create zhparser extension. Please ensure it is available in your AnalyticDB."
|
||||
) from e
|
||||
|
|
@ -115,6 +117,7 @@ class AnalyticdbVectorBySql:
|
|||
cur.execute("CREATE TEXT SEARCH CONFIGURATION zh_cn (PARSER = zhparser)")
|
||||
cur.execute("ALTER TEXT SEARCH CONFIGURATION zh_cn ADD MAPPING FOR n,v,a,i,e,l,x WITH simple")
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
if "already exists" not in str(e):
|
||||
raise e
|
||||
cur.execute(
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
import json
|
||||
import logging
|
||||
import queue
|
||||
|
|
@ -214,10 +215,8 @@ class ClickzettaConnectionPool:
|
|||
return connection
|
||||
else:
|
||||
# Connection expired or invalid, close it
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
connection.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# No valid connection found, create new one
|
||||
return self._create_connection(config)
|
||||
|
|
@ -228,10 +227,8 @@ class ClickzettaConnectionPool:
|
|||
|
||||
if config_key not in self._pool_locks:
|
||||
# Pool was cleaned up, just close the connection
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
connection.close()
|
||||
except Exception:
|
||||
pass
|
||||
return
|
||||
|
||||
with self._pool_locks[config_key]:
|
||||
|
|
@ -243,10 +240,8 @@ class ClickzettaConnectionPool:
|
|||
logger.debug("Returned ClickZetta connection to pool")
|
||||
else:
|
||||
# Pool full or connection invalid, close it
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
connection.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _cleanup_expired_connections(self) -> None:
|
||||
"""Clean up expired connections from all pools."""
|
||||
|
|
@ -265,10 +260,8 @@ class ClickzettaConnectionPool:
|
|||
if current_time - last_used < self._connection_timeout:
|
||||
valid_connections.append((connection, last_used))
|
||||
else:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
connection.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self._pools[config_key] = valid_connections
|
||||
|
||||
|
|
@ -299,10 +292,8 @@ class ClickzettaConnectionPool:
|
|||
with self._pool_locks[config_key]:
|
||||
pool = self._pools[config_key]
|
||||
for connection, _ in pool:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
connection.close()
|
||||
except Exception:
|
||||
pass
|
||||
pool.clear()
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
"""Abstract interface for document loader implementations."""
|
||||
|
||||
import contextlib
|
||||
from collections.abc import Iterator
|
||||
from typing import Optional, cast
|
||||
|
||||
|
|
@ -25,12 +26,10 @@ class PdfExtractor(BaseExtractor):
|
|||
def extract(self) -> list[Document]:
|
||||
plaintext_file_exists = False
|
||||
if self._file_cache_key:
|
||||
try:
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
text = cast(bytes, storage.load(self._file_cache_key)).decode("utf-8")
|
||||
plaintext_file_exists = True
|
||||
return [Document(page_content=text)]
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
documents = list(self.load())
|
||||
text_list = []
|
||||
for document in documents:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import base64
|
||||
import contextlib
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
|
|
@ -33,7 +34,7 @@ class UnstructuredEmailExtractor(BaseExtractor):
|
|||
elements = partition_email(filename=self._file_path)
|
||||
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
for element in elements:
|
||||
element_text = element.text.strip()
|
||||
|
||||
|
|
@ -43,8 +44,6 @@ class UnstructuredEmailExtractor(BaseExtractor):
|
|||
element_decode = base64.b64decode(element_text)
|
||||
soup = BeautifulSoup(element_decode.decode("utf-8"), "html.parser")
|
||||
element.text = soup.get_text()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from unstructured.chunking.title import chunk_by_title
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from collections.abc import Generator
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from typing import Any, Optional
|
||||
|
||||
from core.rag.extractor.watercrawl.client import WaterCrawlAPIClient
|
||||
|
||||
|
|
@ -9,7 +9,7 @@ class WaterCrawlProvider:
|
|||
def __init__(self, api_key, base_url: str | None = None):
|
||||
self.client = WaterCrawlAPIClient(api_key, base_url)
|
||||
|
||||
def crawl_url(self, url, options: dict | Any = None) -> dict:
|
||||
def crawl_url(self, url, options: Optional[dict | Any] = None) -> dict:
|
||||
options = options or {}
|
||||
spider_options = {
|
||||
"max_depth": 1,
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from abc import ABC, abstractmethod
|
|||
from collections.abc import Sequence
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ChildDocument(BaseModel):
|
||||
|
|
@ -15,7 +15,7 @@ class ChildDocument(BaseModel):
|
|||
"""Arbitrary metadata about the page content (e.g., source, relationships to other
|
||||
documents, etc.).
|
||||
"""
|
||||
metadata: dict = {}
|
||||
metadata: dict = Field(default_factory=dict)
|
||||
|
||||
|
||||
class Document(BaseModel):
|
||||
|
|
@ -28,7 +28,7 @@ class Document(BaseModel):
|
|||
"""Arbitrary metadata about the page content (e.g., source, relationships to other
|
||||
documents, etc.).
|
||||
"""
|
||||
metadata: dict = {}
|
||||
metadata: dict = Field(default_factory=dict)
|
||||
|
||||
provider: Optional[str] = "dify"
|
||||
|
||||
|
|
|
|||
|
|
@ -1012,7 +1012,7 @@ class DatasetRetrieval:
|
|||
def _process_metadata_filter_func(
|
||||
self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list
|
||||
):
|
||||
if value is None:
|
||||
if value is None and condition not in ("empty", "not empty"):
|
||||
return
|
||||
|
||||
key = f"{metadata_name}_{sequence}"
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import base64
|
||||
import contextlib
|
||||
import enum
|
||||
from collections.abc import Mapping
|
||||
from enum import Enum
|
||||
|
|
@ -227,10 +228,8 @@ class ToolInvokeMessage(BaseModel):
|
|||
@classmethod
|
||||
def decode_blob_message(cls, v):
|
||||
if isinstance(v, dict) and "blob" in v:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
v["blob"] = base64.b64decode(v["blob"])
|
||||
except Exception:
|
||||
pass
|
||||
return v
|
||||
|
||||
@field_serializer("message")
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
import json
|
||||
from collections.abc import Generator, Iterable
|
||||
from copy import deepcopy
|
||||
|
|
@ -69,10 +70,8 @@ class ToolEngine:
|
|||
if parameters and len(parameters) == 1:
|
||||
tool_parameters = {parameters[0].name: tool_parameters}
|
||||
else:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
tool_parameters = json.loads(tool_parameters)
|
||||
except Exception:
|
||||
pass
|
||||
if not isinstance(tool_parameters, dict):
|
||||
raise ValueError(f"tool_parameters should be a dict, but got a string: {tool_parameters}")
|
||||
|
||||
|
|
@ -270,14 +269,12 @@ class ToolEngine:
|
|||
if response.meta.get("mime_type"):
|
||||
mimetype = response.meta.get("mime_type")
|
||||
else:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
url = URL(cast(ToolInvokeMessage.TextMessage, response.message).text)
|
||||
extension = url.suffix
|
||||
guess_type_result, _ = guess_type(f"a{extension}")
|
||||
if guess_type_result:
|
||||
mimetype = guess_type_result
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not mimetype:
|
||||
mimetype = "image/jpeg"
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
from copy import deepcopy
|
||||
from typing import Any
|
||||
|
||||
|
|
@ -137,11 +138,9 @@ class ToolParameterConfigurationManager:
|
|||
and parameter.type == ToolParameter.ToolParameterType.SECRET_INPUT
|
||||
):
|
||||
if parameter.name in parameters:
|
||||
try:
|
||||
has_secret_input = True
|
||||
has_secret_input = True
|
||||
with contextlib.suppress(Exception):
|
||||
parameters[parameter.name] = encrypter.decrypt_token(self.tenant_id, parameters[parameter.name])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if has_secret_input:
|
||||
cache.set(parameters)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
from copy import deepcopy
|
||||
from typing import Any, Optional, Protocol
|
||||
|
||||
|
|
@ -111,14 +112,12 @@ class ProviderConfigEncrypter:
|
|||
for field_name, field in fields.items():
|
||||
if field.type == BasicProviderConfig.Type.SECRET_INPUT:
|
||||
if field_name in data:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
# if the value is None or empty string, skip decrypt
|
||||
if not data[field_name]:
|
||||
continue
|
||||
|
||||
data[field_name] = encrypter.decrypt_token(self.tenant_id, data[field_name])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
self.provider_config_cache.set(data)
|
||||
return data
|
||||
|
|
|
|||
|
|
@ -80,7 +80,7 @@ def get_url(url: str, user_agent: Optional[str] = None) -> str:
|
|||
else:
|
||||
content = response.text
|
||||
|
||||
article = extract_using_readability(content)
|
||||
article = extract_using_readabilipy(content)
|
||||
|
||||
if not article.text:
|
||||
return ""
|
||||
|
|
@ -101,7 +101,7 @@ class Article:
|
|||
text: Sequence[dict]
|
||||
|
||||
|
||||
def extract_using_readability(html: str):
|
||||
def extract_using_readabilipy(html: str):
|
||||
json_article: dict[str, Any] = simple_json_from_html_string(html, use_readability=True)
|
||||
article = Article(
|
||||
title=json_article.get("title") or "",
|
||||
|
|
|
|||
|
|
@ -126,7 +126,7 @@ class SegmentType(StrEnum):
|
|||
"""
|
||||
if self.is_array_type():
|
||||
return self._validate_array(value, array_validation)
|
||||
elif self == SegmentType.NUMBER:
|
||||
elif self in [SegmentType.INTEGER, SegmentType.FLOAT, SegmentType.NUMBER]:
|
||||
return isinstance(value, (int, float))
|
||||
elif self == SegmentType.STRING:
|
||||
return isinstance(value, str)
|
||||
|
|
@ -166,7 +166,6 @@ _ARRAY_TYPES = frozenset(
|
|||
]
|
||||
)
|
||||
|
||||
|
||||
_NUMERICAL_TYPES = frozenset(
|
||||
[
|
||||
SegmentType.NUMBER,
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ class GraphRuntimeState(BaseModel):
|
|||
#
|
||||
# Note: Since the type of this field is `dict[str, Any]`, its values may not remain consistent
|
||||
# after a serialization and deserialization round trip.
|
||||
outputs: dict[str, Any] = {}
|
||||
outputs: dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
node_run_steps: int = 0
|
||||
"""node run steps"""
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ from typing import TYPE_CHECKING, Any, Optional, cast
|
|||
|
||||
from sqlalchemy import Float, and_, func, or_, text
|
||||
from sqlalchemy import cast as sqlalchemy_cast
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.app.app_config.entities import DatasetRetrieveConfigEntity
|
||||
from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity
|
||||
|
|
@ -175,7 +175,7 @@ class KnowledgeRetrievalNode(BaseNode):
|
|||
redis_client.zremrangebyscore(key, 0, current_time - 60000)
|
||||
request_count = redis_client.zcard(key)
|
||||
if request_count > knowledge_rate_limit.limit:
|
||||
with Session(db.engine) as session:
|
||||
with sessionmaker(db.engine).begin() as session:
|
||||
# add ratelimit record
|
||||
rate_limit_log = RateLimitLog(
|
||||
tenant_id=self.tenant_id,
|
||||
|
|
@ -183,7 +183,6 @@ class KnowledgeRetrievalNode(BaseNode):
|
|||
operation="knowledge",
|
||||
)
|
||||
session.add(rate_limit_log)
|
||||
session.commit()
|
||||
return NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.FAILED,
|
||||
inputs=variables,
|
||||
|
|
@ -389,6 +388,15 @@ class KnowledgeRetrievalNode(BaseNode):
|
|||
"segment_id": segment.id,
|
||||
"retriever_from": "workflow",
|
||||
"score": record.score or 0.0,
|
||||
"child_chunks": [
|
||||
{
|
||||
"id": str(getattr(chunk, "id", "")),
|
||||
"content": str(getattr(chunk, "content", "")),
|
||||
"position": int(getattr(chunk, "position", 0)),
|
||||
"score": float(getattr(chunk, "score", 0.0)),
|
||||
}
|
||||
for chunk in (record.child_chunks or [])
|
||||
],
|
||||
"segment_hit_count": segment.hit_count,
|
||||
"segment_word_count": segment.word_count,
|
||||
"segment_position": segment.position,
|
||||
|
|
@ -572,7 +580,7 @@ class KnowledgeRetrievalNode(BaseNode):
|
|||
def _process_metadata_filter_func(
|
||||
self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list
|
||||
):
|
||||
if value is None:
|
||||
if value is None and condition not in ("empty", "not empty"):
|
||||
return
|
||||
|
||||
key = f"{metadata_name}_{sequence}"
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ class ModelConfig(BaseModel):
|
|||
provider: str
|
||||
name: str
|
||||
mode: LLMMode
|
||||
completion_params: dict[str, Any] = {}
|
||||
completion_params: dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
|
||||
class ContextConfig(BaseModel):
|
||||
|
|
|
|||
|
|
@ -313,30 +313,31 @@ class LoopNode(BaseNode):
|
|||
and event.node_type == NodeType.LOOP_END
|
||||
and not isinstance(event, NodeRunStreamChunkEvent)
|
||||
):
|
||||
check_break_result = True
|
||||
# Check if variables in break conditions exist and process conditions
|
||||
# Allow loop internal variables to be used in break conditions
|
||||
available_conditions = []
|
||||
for condition in break_conditions:
|
||||
variable = self.graph_runtime_state.variable_pool.get(condition.variable_selector)
|
||||
if variable:
|
||||
available_conditions.append(condition)
|
||||
|
||||
# Process conditions if at least one variable is available
|
||||
if available_conditions:
|
||||
input_conditions, group_result, check_break_result = condition_processor.process_conditions(
|
||||
variable_pool=self.graph_runtime_state.variable_pool,
|
||||
conditions=available_conditions,
|
||||
operator=logical_operator,
|
||||
)
|
||||
if check_break_result:
|
||||
break
|
||||
else:
|
||||
check_break_result = True
|
||||
yield self._handle_event_metadata(event=event, iter_run_index=current_index)
|
||||
break
|
||||
|
||||
if isinstance(event, NodeRunSucceededEvent):
|
||||
yield self._handle_event_metadata(event=event, iter_run_index=current_index)
|
||||
|
||||
# Check if all variables in break conditions exist
|
||||
exists_variable = False
|
||||
for condition in break_conditions:
|
||||
if not self.graph_runtime_state.variable_pool.get(condition.variable_selector):
|
||||
exists_variable = False
|
||||
break
|
||||
else:
|
||||
exists_variable = True
|
||||
if exists_variable:
|
||||
input_conditions, group_result, check_break_result = condition_processor.process_conditions(
|
||||
variable_pool=self.graph_runtime_state.variable_pool,
|
||||
conditions=break_conditions,
|
||||
operator=logical_operator,
|
||||
)
|
||||
if check_break_result:
|
||||
break
|
||||
|
||||
elif isinstance(event, BaseGraphEvent):
|
||||
if isinstance(event, GraphRunFailedEvent):
|
||||
# Loop run failed
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
|
|
@ -666,10 +667,8 @@ class ParameterExtractorNode(BaseNode):
|
|||
if result[idx] == "{" or result[idx] == "[":
|
||||
json_str = extract_json(result[idx:])
|
||||
if json_str:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
return cast(dict, json.loads(json_str))
|
||||
except Exception:
|
||||
pass
|
||||
logger.info("extra error: %s", result)
|
||||
return None
|
||||
|
||||
|
|
@ -686,10 +685,9 @@ class ParameterExtractorNode(BaseNode):
|
|||
if result[idx] == "{" or result[idx] == "[":
|
||||
json_str = extract_json(result[idx:])
|
||||
if json_str:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
return cast(dict, json.loads(json_str))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
logger.info("extra error: %s", result)
|
||||
return None
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
import logging
|
||||
import time
|
||||
|
||||
|
|
@ -38,12 +39,11 @@ def handle(sender, **kwargs):
|
|||
db.session.add(document)
|
||||
db.session.commit()
|
||||
|
||||
try:
|
||||
indexing_runner = IndexingRunner()
|
||||
indexing_runner.run(documents)
|
||||
end_at = time.perf_counter()
|
||||
logging.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green"))
|
||||
except DocumentIsPausedError as ex:
|
||||
logging.info(click.style(str(ex), fg="yellow"))
|
||||
except Exception:
|
||||
pass
|
||||
with contextlib.suppress(Exception):
|
||||
try:
|
||||
indexing_runner = IndexingRunner()
|
||||
indexing_runner.run(documents)
|
||||
end_at = time.perf_counter()
|
||||
logging.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green"))
|
||||
except DocumentIsPausedError as ex:
|
||||
logging.info(click.style(str(ex), fg="yellow"))
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import atexit
|
||||
import contextlib
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
|
|
@ -106,7 +107,7 @@ def init_app(app: DifyApp):
|
|||
"""Custom logging handler that creates spans for logging.exception() calls"""
|
||||
|
||||
def emit(self, record: logging.LogRecord):
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
if record.exc_info:
|
||||
tracer = get_tracer_provider().get_tracer("dify.exception.logging")
|
||||
with tracer.start_as_current_span(
|
||||
|
|
@ -126,9 +127,6 @@ def init_app(app: DifyApp):
|
|||
if record.exc_info[0]:
|
||||
span.set_attribute("exception.type", record.exc_info[0].__name__)
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
from opentelemetry import trace
|
||||
from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import OTLPMetricExporter as GRPCMetricExporter
|
||||
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter as GRPCSpanExporter
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import logging
|
|||
import ssl
|
||||
from collections.abc import Callable
|
||||
from datetime import timedelta
|
||||
from typing import TYPE_CHECKING, Any, Union
|
||||
from typing import TYPE_CHECKING, Any, Optional, Union
|
||||
|
||||
import redis
|
||||
from redis import RedisError
|
||||
|
|
@ -246,7 +246,7 @@ def init_app(app: DifyApp):
|
|||
app.extensions["redis"] = redis_client
|
||||
|
||||
|
||||
def redis_fallback(default_return: Any = None):
|
||||
def redis_fallback(default_return: Optional[Any] = None):
|
||||
"""
|
||||
decorator to handle Redis operation exceptions and return a default value when Redis is unavailable.
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ from typing import TYPE_CHECKING, Any, Optional, Union
|
|||
from uuid import uuid4
|
||||
|
||||
import sqlalchemy as sa
|
||||
from flask_login import current_user
|
||||
from sqlalchemy import DateTime, orm
|
||||
|
||||
from core.file.constants import maybe_file_object
|
||||
|
|
@ -18,7 +17,6 @@ from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIAB
|
|||
from core.workflow.nodes.enums import NodeType
|
||||
from factories.variable_factory import TypeMismatchError, build_segment_with_type
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.helper import extract_tenant_id
|
||||
|
||||
from ._workflow_exc import NodeNotFoundError, WorkflowDataError
|
||||
|
||||
|
|
@ -363,8 +361,8 @@ class Workflow(Base):
|
|||
if self._environment_variables is None:
|
||||
self._environment_variables = "{}"
|
||||
|
||||
# Get tenant_id from current_user (Account or EndUser)
|
||||
tenant_id = extract_tenant_id(current_user)
|
||||
# Use workflow.tenant_id to avoid relying on request user in background threads
|
||||
tenant_id = self.tenant_id
|
||||
|
||||
if not tenant_id:
|
||||
return []
|
||||
|
|
@ -394,8 +392,8 @@ class Workflow(Base):
|
|||
self._environment_variables = "{}"
|
||||
return
|
||||
|
||||
# Get tenant_id from current_user (Account or EndUser)
|
||||
tenant_id = extract_tenant_id(current_user)
|
||||
# Use workflow.tenant_id to avoid relying on request user in background threads
|
||||
tenant_id = self.tenant_id
|
||||
|
||||
if not tenant_id:
|
||||
self._environment_variables = "{}"
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ dependencies = [
|
|||
"cachetools~=5.3.0",
|
||||
"celery~=5.5.2",
|
||||
"chardet~=5.1.0",
|
||||
"flask~=3.1.0",
|
||||
"flask~=3.1.2",
|
||||
"flask-compress~=1.17",
|
||||
"flask-cors~=6.0.0",
|
||||
"flask-login~=0.6.3",
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
from collections.abc import Callable, Sequence
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
|
|
@ -142,13 +143,11 @@ class ConversationService:
|
|||
raise MessageNotExistsError()
|
||||
|
||||
# generate conversation name
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
name = LLMGenerator.generate_conversation_name(
|
||||
app_model.tenant_id, message.query, conversation.id, app_model.id
|
||||
)
|
||||
conversation.name = name
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
db.session.commit()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from core.tools.entities.api_entities import ToolProviderTypeApiLiteral
|
||||
from core.tools.tool_manager import ToolManager
|
||||
|
|
@ -9,7 +10,7 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
class ToolCommonService:
|
||||
@staticmethod
|
||||
def list_tool_providers(user_id: str, tenant_id: str, typ: ToolProviderTypeApiLiteral = None):
|
||||
def list_tool_providers(user_id: str, tenant_id: str, typ: Optional[ToolProviderTypeApiLiteral] = None):
|
||||
"""
|
||||
list tool providers
|
||||
|
||||
|
|
|
|||
|
|
@ -402,7 +402,7 @@ class WorkflowConverter:
|
|||
)
|
||||
|
||||
role_prefix = None
|
||||
prompts: Any = None
|
||||
prompts: Optional[Any] = None
|
||||
|
||||
# Chat Model
|
||||
if model_config.mode == LLMMode.CHAT.value:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import os
|
||||
from collections import UserDict
|
||||
from typing import Optional
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
|
@ -21,7 +22,7 @@ class MockBaiduVectorDBClass:
|
|||
def mock_vector_db_client(
|
||||
self,
|
||||
config=None,
|
||||
adapter: HTTPAdapter = None,
|
||||
adapter: Optional[HTTPAdapter] = None,
|
||||
):
|
||||
self.conn = MagicMock()
|
||||
self._config = MagicMock()
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ class MockTcvectordbClass:
|
|||
key="",
|
||||
read_consistency: ReadConsistency = ReadConsistency.EVENTUAL_CONSISTENCY,
|
||||
timeout=10,
|
||||
adapter: HTTPAdapter = None,
|
||||
adapter: Optional[HTTPAdapter] = None,
|
||||
pool_size: int = 2,
|
||||
proxies: Optional[dict] = None,
|
||||
password: Optional[str] = None,
|
||||
|
|
@ -72,11 +72,11 @@ class MockTcvectordbClass:
|
|||
shard: int,
|
||||
replicas: int,
|
||||
description: Optional[str] = None,
|
||||
index: Index = None,
|
||||
embedding: Embedding = None,
|
||||
index: Optional[Index] = None,
|
||||
embedding: Optional[Embedding] = None,
|
||||
timeout: Optional[float] = None,
|
||||
ttl_config: Optional[dict] = None,
|
||||
filter_index_config: FilterIndexConfig = None,
|
||||
filter_index_config: Optional[FilterIndexConfig] = None,
|
||||
indexes: Optional[list[IndexField]] = None,
|
||||
) -> RPCCollection:
|
||||
return RPCCollection(
|
||||
|
|
@ -113,7 +113,7 @@ class MockTcvectordbClass:
|
|||
database_name: str,
|
||||
collection_name: str,
|
||||
vectors: list[list[float]],
|
||||
filter: Filter = None,
|
||||
filter: Optional[Filter] = None,
|
||||
params=None,
|
||||
retrieve_vector: bool = False,
|
||||
limit: int = 10,
|
||||
|
|
@ -128,7 +128,7 @@ class MockTcvectordbClass:
|
|||
collection_name: str,
|
||||
ann: Optional[Union[list[AnnSearch], AnnSearch]] = None,
|
||||
match: Optional[Union[list[KeywordSearch], KeywordSearch]] = None,
|
||||
filter: Union[Filter, str] = None,
|
||||
filter: Optional[Union[Filter, str]] = None,
|
||||
rerank: Optional[Rerank] = None,
|
||||
retrieve_vector: Optional[bool] = None,
|
||||
output_fields: Optional[list[str]] = None,
|
||||
|
|
@ -158,7 +158,7 @@ class MockTcvectordbClass:
|
|||
database_name: str,
|
||||
collection_name: str,
|
||||
document_ids: Optional[list[str]] = None,
|
||||
filter: Filter = None,
|
||||
filter: Optional[Filter] = None,
|
||||
timeout: Optional[float] = None,
|
||||
):
|
||||
return {"code": 0, "msg": "operation success"}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
|
@ -44,10 +45,8 @@ class TestClickzettaVector(AbstractVectorTest):
|
|||
yield vector
|
||||
|
||||
# Cleanup: delete the test collection
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
vector.delete()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def test_clickzetta_vector_basic_operations(self, vector_store):
|
||||
"""Test basic CRUD operations on Clickzetta vector store."""
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
import json
|
||||
import queue
|
||||
import threading
|
||||
|
|
@ -124,13 +125,10 @@ def test_sse_client_connection_validation():
|
|||
mock_event_source.iter_sse.return_value = [endpoint_event]
|
||||
|
||||
# Test connection
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
with sse_client(test_url) as (read_queue, write_queue):
|
||||
assert read_queue is not None
|
||||
assert write_queue is not None
|
||||
except Exception as e:
|
||||
# Connection might fail due to mocking, but we're testing the validation logic
|
||||
pass
|
||||
|
||||
|
||||
def test_sse_client_error_handling():
|
||||
|
|
@ -178,7 +176,7 @@ def test_sse_client_timeout_configuration():
|
|||
mock_event_source.iter_sse.return_value = []
|
||||
mock_sse_connect.return_value.__enter__.return_value = mock_event_source
|
||||
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
with sse_client(
|
||||
test_url, headers=custom_headers, timeout=custom_timeout, sse_read_timeout=custom_sse_timeout
|
||||
) as (read_queue, write_queue):
|
||||
|
|
@ -190,9 +188,6 @@ def test_sse_client_timeout_configuration():
|
|||
assert call_args is not None
|
||||
timeout_arg = call_args[1]["timeout"]
|
||||
assert timeout_arg.read == custom_sse_timeout
|
||||
except Exception:
|
||||
# Connection might fail due to mocking, but we tested the configuration
|
||||
pass
|
||||
|
||||
|
||||
def test_sse_transport_endpoint_validation():
|
||||
|
|
@ -251,12 +246,10 @@ def test_sse_client_queue_cleanup():
|
|||
# Mock connection that raises an exception
|
||||
mock_sse_connect.side_effect = Exception("Connection failed")
|
||||
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
with sse_client(test_url) as (rq, wq):
|
||||
read_queue = rq
|
||||
write_queue = wq
|
||||
except Exception:
|
||||
pass # Expected to fail
|
||||
|
||||
# Queues should be cleaned up even on exception
|
||||
# Note: In real implementation, cleanup should put None to signal shutdown
|
||||
|
|
@ -283,11 +276,9 @@ def test_sse_client_headers_propagation():
|
|||
mock_event_source.iter_sse.return_value = []
|
||||
mock_sse_connect.return_value.__enter__.return_value = mock_event_source
|
||||
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
with sse_client(test_url, headers=custom_headers):
|
||||
pass
|
||||
except Exception:
|
||||
pass # Expected due to mocking
|
||||
|
||||
# Verify headers were passed to client factory
|
||||
mock_client_factory.assert_called_with(headers=custom_headers)
|
||||
|
|
|
|||
|
|
@ -164,7 +164,7 @@ def test__get_chat_model_prompt_messages_with_files_no_memory(get_chat_model_arg
|
|||
)
|
||||
assert isinstance(prompt_messages[3].content, list)
|
||||
assert len(prompt_messages[3].content) == 2
|
||||
assert prompt_messages[3].content[1].data == files[0].remote_url
|
||||
assert prompt_messages[3].content[0].data == files[0].remote_url
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
|
|||
|
|
@ -0,0 +1,181 @@
|
|||
import copy
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from core.entities.provider_entities import BasicProviderConfig
|
||||
from core.tools.utils.encryption import ProviderConfigEncrypter
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# A no-op cache
|
||||
# ---------------------------
|
||||
class NoopCache:
|
||||
"""Simple cache stub: always returns None, does nothing for set/delete."""
|
||||
|
||||
def get(self):
|
||||
return None
|
||||
|
||||
def set(self, config):
|
||||
pass
|
||||
|
||||
def delete(self):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def secret_field() -> BasicProviderConfig:
|
||||
"""A SECRET_INPUT field named 'password'."""
|
||||
return BasicProviderConfig(
|
||||
name="password",
|
||||
type=BasicProviderConfig.Type.SECRET_INPUT,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def normal_field() -> BasicProviderConfig:
|
||||
"""A TEXT_INPUT field named 'username'."""
|
||||
return BasicProviderConfig(
|
||||
name="username",
|
||||
type=BasicProviderConfig.Type.TEXT_INPUT,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def encrypter_obj(secret_field, normal_field):
|
||||
"""
|
||||
Build ProviderConfigEncrypter with:
|
||||
- tenant_id = tenant123
|
||||
- one secret field (password) and one normal field (username)
|
||||
- NoopCache as cache
|
||||
"""
|
||||
return ProviderConfigEncrypter(
|
||||
tenant_id="tenant123",
|
||||
config=[secret_field, normal_field],
|
||||
provider_config_cache=NoopCache(),
|
||||
)
|
||||
|
||||
|
||||
# ============================================================
|
||||
# ProviderConfigEncrypter.encrypt()
|
||||
# ============================================================
|
||||
|
||||
|
||||
def test_encrypt_only_secret_is_encrypted_and_non_secret_unchanged(encrypter_obj):
|
||||
"""
|
||||
Secret field should be encrypted, non-secret field unchanged.
|
||||
Verify encrypt_token called only for secret field.
|
||||
Also check deep copy (input not modified).
|
||||
"""
|
||||
data_in = {"username": "alice", "password": "plain_pwd"}
|
||||
data_copy = copy.deepcopy(data_in)
|
||||
|
||||
with patch("core.tools.utils.encryption.encrypter.encrypt_token", return_value="CIPHERTEXT") as mock_encrypt:
|
||||
out = encrypter_obj.encrypt(data_in)
|
||||
|
||||
assert out["username"] == "alice"
|
||||
assert out["password"] == "CIPHERTEXT"
|
||||
mock_encrypt.assert_called_once_with("tenant123", "plain_pwd")
|
||||
assert data_in == data_copy # deep copy semantics
|
||||
|
||||
|
||||
def test_encrypt_missing_secret_key_is_ok(encrypter_obj):
|
||||
"""If secret field missing in input, no error and no encryption called."""
|
||||
with patch("core.tools.utils.encryption.encrypter.encrypt_token") as mock_encrypt:
|
||||
out = encrypter_obj.encrypt({"username": "alice"})
|
||||
assert out["username"] == "alice"
|
||||
mock_encrypt.assert_not_called()
|
||||
|
||||
|
||||
# ============================================================
|
||||
# ProviderConfigEncrypter.mask_tool_credentials()
|
||||
# ============================================================
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("raw", "prefix", "suffix"),
|
||||
[
|
||||
("longsecret", "lo", "et"),
|
||||
("abcdefg", "ab", "fg"),
|
||||
("1234567", "12", "67"),
|
||||
],
|
||||
)
|
||||
def test_mask_tool_credentials_long_secret(encrypter_obj, raw, prefix, suffix):
|
||||
"""
|
||||
For length > 6: keep first 2 and last 2, mask middle with '*'.
|
||||
"""
|
||||
data_in = {"username": "alice", "password": raw}
|
||||
data_copy = copy.deepcopy(data_in)
|
||||
|
||||
out = encrypter_obj.mask_tool_credentials(data_in)
|
||||
masked = out["password"]
|
||||
|
||||
assert masked.startswith(prefix)
|
||||
assert masked.endswith(suffix)
|
||||
assert "*" in masked
|
||||
assert len(masked) == len(raw)
|
||||
assert data_in == data_copy # deep copy semantics
|
||||
|
||||
|
||||
@pytest.mark.parametrize("raw", ["", "1", "12", "123", "123456"])
|
||||
def test_mask_tool_credentials_short_secret(encrypter_obj, raw):
|
||||
"""
|
||||
For length <= 6: fully mask with '*' of same length.
|
||||
"""
|
||||
out = encrypter_obj.mask_tool_credentials({"password": raw})
|
||||
assert out["password"] == ("*" * len(raw))
|
||||
|
||||
|
||||
def test_mask_tool_credentials_missing_key_noop(encrypter_obj):
|
||||
"""If secret key missing, leave other fields unchanged."""
|
||||
data_in = {"username": "alice"}
|
||||
data_copy = copy.deepcopy(data_in)
|
||||
|
||||
out = encrypter_obj.mask_tool_credentials(data_in)
|
||||
assert out["username"] == "alice"
|
||||
assert data_in == data_copy
|
||||
|
||||
|
||||
# ============================================================
|
||||
# ProviderConfigEncrypter.decrypt()
|
||||
# ============================================================
|
||||
|
||||
|
||||
def test_decrypt_normal_flow(encrypter_obj):
|
||||
"""
|
||||
Normal decrypt flow:
|
||||
- decrypt_token called for secret field
|
||||
- secret replaced with decrypted value
|
||||
- non-secret unchanged
|
||||
"""
|
||||
data_in = {"username": "alice", "password": "ENC"}
|
||||
data_copy = copy.deepcopy(data_in)
|
||||
|
||||
with patch("core.tools.utils.encryption.encrypter.decrypt_token", return_value="PLAIN") as mock_decrypt:
|
||||
out = encrypter_obj.decrypt(data_in)
|
||||
|
||||
assert out["username"] == "alice"
|
||||
assert out["password"] == "PLAIN"
|
||||
mock_decrypt.assert_called_once_with("tenant123", "ENC")
|
||||
assert data_in == data_copy # deep copy semantics
|
||||
|
||||
|
||||
@pytest.mark.parametrize("empty_val", ["", None])
|
||||
def test_decrypt_skip_empty_values(encrypter_obj, empty_val):
|
||||
"""Skip decrypt if value is empty or None, keep original."""
|
||||
with patch("core.tools.utils.encryption.encrypter.decrypt_token") as mock_decrypt:
|
||||
out = encrypter_obj.decrypt({"password": empty_val})
|
||||
|
||||
mock_decrypt.assert_not_called()
|
||||
assert out["password"] == empty_val
|
||||
|
||||
|
||||
def test_decrypt_swallow_exception_and_keep_original(encrypter_obj):
|
||||
"""
|
||||
If decrypt_token raises, exception should be swallowed,
|
||||
and original value preserved.
|
||||
"""
|
||||
with patch("core.tools.utils.encryption.encrypter.decrypt_token", side_effect=Exception("boom")):
|
||||
out = encrypter_obj.decrypt({"password": "ENC_ERR"})
|
||||
|
||||
assert out["password"] == "ENC_ERR"
|
||||
|
|
@ -1,6 +1,293 @@
|
|||
from core.tools.utils.web_reader_tool import get_image_upload_file_ids
|
||||
import pytest
|
||||
|
||||
from core.tools.utils.web_reader_tool import (
|
||||
extract_using_readabilipy,
|
||||
get_image_upload_file_ids,
|
||||
get_url,
|
||||
page_result,
|
||||
)
|
||||
|
||||
|
||||
class FakeResponse:
|
||||
"""Minimal fake response object for ssrf_proxy / cloudscraper."""
|
||||
|
||||
def __init__(self, *, status_code=200, headers=None, content=b"", text=""):
|
||||
self.status_code = status_code
|
||||
self.headers = headers or {}
|
||||
self.content = content
|
||||
self.text = text if text else content.decode("utf-8", errors="ignore")
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# Tests: page_result
|
||||
# ---------------------------
|
||||
@pytest.mark.parametrize(
|
||||
("text", "cursor", "maxlen", "expected"),
|
||||
[
|
||||
("abcdef", 0, 3, "abc"),
|
||||
("abcdef", 2, 10, "cdef"), # maxlen beyond end
|
||||
("abcdef", 6, 5, ""), # cursor at end
|
||||
("abcdef", 7, 5, ""), # cursor beyond end
|
||||
("", 0, 5, ""), # empty text
|
||||
],
|
||||
)
|
||||
def test_page_result(text, cursor, maxlen, expected):
|
||||
assert page_result(text, cursor, maxlen) == expected
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# Tests: get_url
|
||||
# ---------------------------
|
||||
@pytest.fixture
|
||||
def stub_support_types(monkeypatch):
|
||||
"""Stub supported content types list."""
|
||||
import core.tools.utils.web_reader_tool as mod
|
||||
|
||||
# e.g. binary types supported by ExtractProcessor
|
||||
monkeypatch.setattr(mod.extract_processor, "SUPPORT_URL_CONTENT_TYPES", ["application/pdf", "text/plain"])
|
||||
return mod
|
||||
|
||||
|
||||
def test_get_url_unsupported_content_type(monkeypatch, stub_support_types):
|
||||
# HEAD 200 but content-type not supported and not text/html
|
||||
def fake_head(url, headers=None, follow_redirects=True, timeout=None):
|
||||
return FakeResponse(
|
||||
status_code=200,
|
||||
headers={"Content-Type": "image/png"}, # not supported
|
||||
)
|
||||
|
||||
monkeypatch.setattr(stub_support_types.ssrf_proxy, "head", fake_head)
|
||||
|
||||
result = get_url("https://x.test/file.png")
|
||||
assert result == "Unsupported content-type [image/png] of URL."
|
||||
|
||||
|
||||
def test_get_url_supported_binary_type_uses_extract_processor(monkeypatch, stub_support_types):
|
||||
"""
|
||||
When content-type is in SUPPORT_URL_CONTENT_TYPES,
|
||||
should call ExtractProcessor.load_from_url and return its text.
|
||||
"""
|
||||
calls = {"load": 0}
|
||||
|
||||
def fake_head(url, headers=None, follow_redirects=True, timeout=None):
|
||||
return FakeResponse(
|
||||
status_code=200,
|
||||
headers={"Content-Type": "application/pdf"},
|
||||
)
|
||||
|
||||
def fake_load_from_url(url, return_text=False):
|
||||
calls["load"] += 1
|
||||
assert return_text is True
|
||||
return "PDF extracted text"
|
||||
|
||||
monkeypatch.setattr(stub_support_types.ssrf_proxy, "head", fake_head)
|
||||
monkeypatch.setattr(stub_support_types.ExtractProcessor, "load_from_url", staticmethod(fake_load_from_url))
|
||||
|
||||
result = get_url("https://x.test/doc.pdf")
|
||||
assert calls["load"] == 1
|
||||
assert result == "PDF extracted text"
|
||||
|
||||
|
||||
def test_get_url_html_flow_with_chardet_and_readability(monkeypatch, stub_support_types):
|
||||
"""200 + text/html → GET, chardet detects encoding, readability returns article which is templated."""
|
||||
|
||||
def fake_head(url, headers=None, follow_redirects=True, timeout=None):
|
||||
return FakeResponse(status_code=200, headers={"Content-Type": "text/html"})
|
||||
|
||||
def fake_get(url, headers=None, follow_redirects=True, timeout=None):
|
||||
html = b"<html><head><title>x</title></head><body>hello</body></html>"
|
||||
return FakeResponse(status_code=200, headers={"Content-Type": "text/html"}, content=html)
|
||||
|
||||
# chardet.detect returns utf-8
|
||||
import core.tools.utils.web_reader_tool as mod
|
||||
|
||||
monkeypatch.setattr(mod.ssrf_proxy, "head", fake_head)
|
||||
monkeypatch.setattr(mod.ssrf_proxy, "get", fake_get)
|
||||
monkeypatch.setattr(mod.chardet, "detect", lambda b: {"encoding": "utf-8"})
|
||||
|
||||
# readability → a dict that maps to Article, then FULL_TEMPLATE
|
||||
def fake_simple_json_from_html_string(html, use_readability=True):
|
||||
return {
|
||||
"title": "My Title",
|
||||
"byline": "Bob",
|
||||
"plain_text": [{"type": "text", "text": "Hello world"}],
|
||||
}
|
||||
|
||||
monkeypatch.setattr(mod, "simple_json_from_html_string", fake_simple_json_from_html_string)
|
||||
|
||||
out = get_url("https://x.test/page")
|
||||
assert "TITLE: My Title" in out
|
||||
assert "AUTHOR: Bob" in out
|
||||
assert "Hello world" in out
|
||||
|
||||
|
||||
def test_get_url_html_flow_empty_article_text_returns_empty(monkeypatch, stub_support_types):
|
||||
"""If readability returns no text, should return empty string."""
|
||||
|
||||
def fake_head(url, headers=None, follow_redirects=True, timeout=None):
|
||||
return FakeResponse(status_code=200, headers={"Content-Type": "text/html"})
|
||||
|
||||
def fake_get(url, headers=None, follow_redirects=True, timeout=None):
|
||||
return FakeResponse(status_code=200, headers={"Content-Type": "text/html"}, content=b"<html/>")
|
||||
|
||||
import core.tools.utils.web_reader_tool as mod
|
||||
|
||||
monkeypatch.setattr(mod.ssrf_proxy, "head", fake_head)
|
||||
monkeypatch.setattr(mod.ssrf_proxy, "get", fake_get)
|
||||
monkeypatch.setattr(mod.chardet, "detect", lambda b: {"encoding": "utf-8"})
|
||||
# readability returns empty plain_text
|
||||
monkeypatch.setattr(mod, "simple_json_from_html_string", lambda html, use_readability=True: {"plain_text": []})
|
||||
|
||||
out = get_url("https://x.test/empty")
|
||||
assert out == ""
|
||||
|
||||
|
||||
def test_get_url_403_cloudscraper_fallback(monkeypatch, stub_support_types):
|
||||
"""HEAD 403 → use cloudscraper.get via ssrf_proxy.make_request, then proceed."""
|
||||
|
||||
def fake_head(url, headers=None, follow_redirects=True, timeout=None):
|
||||
return FakeResponse(status_code=403, headers={})
|
||||
|
||||
# cloudscraper.create_scraper() → object with .get()
|
||||
class FakeScraper:
|
||||
def __init__(self):
|
||||
pass # removed unused attribute
|
||||
|
||||
def get(self, url, headers=None, follow_redirects=True, timeout=None):
|
||||
# mimic html 200
|
||||
html = b"<html><body>hi</body></html>"
|
||||
return FakeResponse(status_code=200, headers={"Content-Type": "text/html"}, content=html)
|
||||
|
||||
import core.tools.utils.web_reader_tool as mod
|
||||
|
||||
monkeypatch.setattr(mod.ssrf_proxy, "head", fake_head)
|
||||
monkeypatch.setattr(mod.cloudscraper, "create_scraper", lambda: FakeScraper())
|
||||
monkeypatch.setattr(mod.chardet, "detect", lambda b: {"encoding": "utf-8"})
|
||||
monkeypatch.setattr(
|
||||
mod,
|
||||
"simple_json_from_html_string",
|
||||
lambda html, use_readability=True: {"title": "T", "byline": "A", "plain_text": [{"type": "text", "text": "X"}]},
|
||||
)
|
||||
|
||||
out = get_url("https://x.test/403")
|
||||
assert "TITLE: T" in out
|
||||
assert "AUTHOR: A" in out
|
||||
assert "X" in out
|
||||
|
||||
|
||||
def test_get_url_head_non_200_returns_status(monkeypatch, stub_support_types):
|
||||
"""HEAD returns non-200 and non-403 → should directly return code message."""
|
||||
|
||||
def fake_head(url, headers=None, follow_redirects=True, timeout=None):
|
||||
return FakeResponse(status_code=500)
|
||||
|
||||
import core.tools.utils.web_reader_tool as mod
|
||||
|
||||
monkeypatch.setattr(mod.ssrf_proxy, "head", fake_head)
|
||||
|
||||
out = get_url("https://x.test/fail")
|
||||
assert out == "URL returned status code 500."
|
||||
|
||||
|
||||
def test_get_url_content_disposition_filename_detection(monkeypatch, stub_support_types):
|
||||
"""
|
||||
If HEAD 200 with no Content-Type but Content-Disposition filename suggests a supported type,
|
||||
it should route to ExtractProcessor.load_from_url.
|
||||
"""
|
||||
calls = {"load": 0}
|
||||
|
||||
def fake_head(url, headers=None, follow_redirects=True, timeout=None):
|
||||
return FakeResponse(status_code=200, headers={"Content-Disposition": 'attachment; filename="doc.pdf"'})
|
||||
|
||||
def fake_load_from_url(url, return_text=False):
|
||||
calls["load"] += 1
|
||||
return "From ExtractProcessor via filename"
|
||||
|
||||
import core.tools.utils.web_reader_tool as mod
|
||||
|
||||
monkeypatch.setattr(mod.ssrf_proxy, "head", fake_head)
|
||||
monkeypatch.setattr(mod.ExtractProcessor, "load_from_url", staticmethod(fake_load_from_url))
|
||||
|
||||
out = get_url("https://x.test/fname")
|
||||
assert calls["load"] == 1
|
||||
assert out == "From ExtractProcessor via filename"
|
||||
|
||||
|
||||
def test_get_url_html_encoding_fallback_when_decode_fails(monkeypatch, stub_support_types):
|
||||
"""
|
||||
If chardet returns an encoding but content.decode raises, should fallback to response.text.
|
||||
"""
|
||||
|
||||
def fake_head(url, headers=None, follow_redirects=True, timeout=None):
|
||||
return FakeResponse(status_code=200, headers={"Content-Type": "text/html"})
|
||||
|
||||
# Return bytes that will raise with the chosen encoding
|
||||
def fake_get(url, headers=None, follow_redirects=True, timeout=None):
|
||||
return FakeResponse(
|
||||
status_code=200,
|
||||
headers={"Content-Type": "text/html"},
|
||||
content=b"\xff\xfe\xfa", # likely to fail under utf-8
|
||||
text="<html>fallback text</html>",
|
||||
)
|
||||
|
||||
import core.tools.utils.web_reader_tool as mod
|
||||
|
||||
monkeypatch.setattr(mod.ssrf_proxy, "head", fake_head)
|
||||
monkeypatch.setattr(mod.ssrf_proxy, "get", fake_get)
|
||||
monkeypatch.setattr(mod.chardet, "detect", lambda b: {"encoding": "utf-8"})
|
||||
monkeypatch.setattr(
|
||||
mod,
|
||||
"simple_json_from_html_string",
|
||||
lambda html, use_readability=True: {"title": "", "byline": "", "plain_text": [{"type": "text", "text": "ok"}]},
|
||||
)
|
||||
|
||||
out = get_url("https://x.test/enc-fallback")
|
||||
assert "ok" in out
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# Tests: extract_using_readabilipy
|
||||
# ---------------------------
|
||||
|
||||
|
||||
def test_extract_using_readabilipy_field_mapping_and_defaults(monkeypatch):
|
||||
# stub readabilipy.simple_json_from_html_string
|
||||
def fake_simple_json_from_html_string(html, use_readability=True):
|
||||
return {
|
||||
"title": "Hello",
|
||||
"byline": "Alice",
|
||||
"plain_text": [{"type": "text", "text": "world"}],
|
||||
}
|
||||
|
||||
import core.tools.utils.web_reader_tool as mod
|
||||
|
||||
monkeypatch.setattr(mod, "simple_json_from_html_string", fake_simple_json_from_html_string)
|
||||
|
||||
article = extract_using_readabilipy("<html>...</html>")
|
||||
assert article.title == "Hello"
|
||||
assert article.author == "Alice"
|
||||
assert isinstance(article.text, list)
|
||||
assert article.text
|
||||
assert article.text[0]["text"] == "world"
|
||||
|
||||
|
||||
def test_extract_using_readabilipy_defaults_when_missing(monkeypatch):
|
||||
def fake_simple_json_from_html_string(html, use_readability=True):
|
||||
return {} # all missing
|
||||
|
||||
import core.tools.utils.web_reader_tool as mod
|
||||
|
||||
monkeypatch.setattr(mod, "simple_json_from_html_string", fake_simple_json_from_html_string)
|
||||
|
||||
article = extract_using_readabilipy("<html>...</html>")
|
||||
assert article.title == ""
|
||||
assert article.author == ""
|
||||
assert article.text == []
|
||||
|
||||
|
||||
# ---------------------------
|
||||
# Tests: get_image_upload_file_ids
|
||||
# ---------------------------
|
||||
def test_get_image_upload_file_ids():
|
||||
# should extract id from https + file-preview
|
||||
content = ""
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ from core.file.models import File
|
|||
from core.variables import FloatVariable, IntegerVariable, SecretVariable, StringVariable
|
||||
from core.variables.segments import IntegerSegment, Segment
|
||||
from factories.variable_factory import build_segment
|
||||
from models.model import EndUser
|
||||
from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel, is_system_variable_editable
|
||||
|
||||
|
||||
|
|
@ -43,14 +42,9 @@ def test_environment_variables():
|
|||
{"name": "var4", "value": 3.14, "id": str(uuid4()), "selector": ["env", "var4"]}
|
||||
)
|
||||
|
||||
# Mock current_user as an EndUser
|
||||
mock_user = mock.Mock(spec=EndUser)
|
||||
mock_user.tenant_id = "tenant_id"
|
||||
|
||||
with (
|
||||
mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"),
|
||||
mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"),
|
||||
mock.patch("models.workflow.current_user", mock_user),
|
||||
):
|
||||
# Set the environment_variables property of the Workflow instance
|
||||
variables = [variable1, variable2, variable3, variable4]
|
||||
|
|
@ -90,14 +84,9 @@ def test_update_environment_variables():
|
|||
{"name": "var4", "value": 3.14, "id": str(uuid4()), "selector": ["env", "var4"]}
|
||||
)
|
||||
|
||||
# Mock current_user as an EndUser
|
||||
mock_user = mock.Mock(spec=EndUser)
|
||||
mock_user.tenant_id = "tenant_id"
|
||||
|
||||
with (
|
||||
mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"),
|
||||
mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"),
|
||||
mock.patch("models.workflow.current_user", mock_user),
|
||||
):
|
||||
variables = [variable1, variable2, variable3, variable4]
|
||||
|
||||
|
|
@ -136,14 +125,9 @@ def test_to_dict():
|
|||
|
||||
# Create some EnvironmentVariable instances
|
||||
|
||||
# Mock current_user as an EndUser
|
||||
mock_user = mock.Mock(spec=EndUser)
|
||||
mock_user.tenant_id = "tenant_id"
|
||||
|
||||
with (
|
||||
mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"),
|
||||
mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"),
|
||||
mock.patch("models.workflow.current_user", mock_user),
|
||||
):
|
||||
# Set the environment_variables property of the Workflow instance
|
||||
workflow.environment_variables = [
|
||||
|
|
|
|||
|
|
@ -1436,7 +1436,7 @@ requires-dist = [
|
|||
{ name = "cachetools", specifier = "~=5.3.0" },
|
||||
{ name = "celery", specifier = "~=5.5.2" },
|
||||
{ name = "chardet", specifier = "~=5.1.0" },
|
||||
{ name = "flask", specifier = "~=3.1.0" },
|
||||
{ name = "flask", specifier = "~=3.1.2" },
|
||||
{ name = "flask-compress", specifier = "~=1.17" },
|
||||
{ name = "flask-cors", specifier = "~=6.0.0" },
|
||||
{ name = "flask-login", specifier = "~=0.6.3" },
|
||||
|
|
@ -1790,7 +1790,7 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "flask"
|
||||
version = "3.1.1"
|
||||
version = "3.1.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "blinker" },
|
||||
|
|
@ -1800,9 +1800,9 @@ dependencies = [
|
|||
{ name = "markupsafe" },
|
||||
{ name = "werkzeug" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c0/de/e47735752347f4128bcf354e0da07ef311a78244eba9e3dc1d4a5ab21a98/flask-3.1.1.tar.gz", hash = "sha256:284c7b8f2f58cb737f0cf1c30fd7eaf0ccfcde196099d24ecede3fc2005aa59e", size = 753440, upload-time = "2025-05-13T15:01:17.447Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/dc/6d/cfe3c0fcc5e477df242b98bfe186a4c34357b4847e87ecaef04507332dab/flask-3.1.2.tar.gz", hash = "sha256:bf656c15c80190ed628ad08cdfd3aaa35beb087855e2f494910aa3774cc4fd87", size = 720160, upload-time = "2025-08-19T21:03:21.205Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/68/9d4508e893976286d2ead7f8f571314af6c2037af34853a30fd769c02e9d/flask-3.1.1-py3-none-any.whl", hash = "sha256:07aae2bb5eaf77993ef57e357491839f5fd9f4dc281593a81a9e4d79a24f295c", size = 103305, upload-time = "2025-05-13T15:01:15.591Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/f9/7f9263c5695f4bd0023734af91bedb2ff8209e8de6ead162f35d8dc762fd/flask-3.1.2-py3-none-any.whl", hash = "sha256:ca1d8112ec8a6158cc29ea4858963350011b5c846a414cdb7a954aa9e967d03c", size = 103308, upload-time = "2025-08-19T21:03:19.499Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ export const EditTitle: FC<{ className?: string; title: string }> = ({ className
|
|||
<RiEditFill className='mr-1 h-3.5 w-3.5' />
|
||||
<div>{title}</div>
|
||||
<div
|
||||
className='ml-2 h-[1px] grow'
|
||||
className='ml-2 h-px grow'
|
||||
style={{
|
||||
background: 'linear-gradient(90deg, rgba(0, 0, 0, 0.05) -1.65%, rgba(0, 0, 0, 0.00) 100%)',
|
||||
}}
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ const GroupName: FC<IGroupNameProps> = ({
|
|||
return (
|
||||
<div className='mb-1 flex items-center'>
|
||||
<div className='mr-3 text-xs font-semibold uppercase leading-[18px] text-text-tertiary'>{name}</div>
|
||||
<div className='h-[1px] grow'
|
||||
<div className='h-px grow'
|
||||
style={{
|
||||
background: 'linear-gradient(270deg, rgba(243, 244, 246, 0) 0%, #F3F4F6 100%)',
|
||||
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ const SelectVarType: FC<Props> = ({
|
|||
<SelectItem type={InputVarType.select} value='select' text={t('appDebug.variableConfig.select')} onClick={handleChange}></SelectItem>
|
||||
<SelectItem type={InputVarType.number} value='number' text={t('appDebug.variableConfig.number')} onClick={handleChange}></SelectItem>
|
||||
</div>
|
||||
<div className='h-[1px] border-t border-components-panel-border'></div>
|
||||
<div className='h-px border-t border-components-panel-border'></div>
|
||||
<div className='p-1'>
|
||||
<SelectItem Icon={ApiConnection} value='api' text={t('appDebug.variableConfig.apiBasedVar')} onClick={handleChange}></SelectItem>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ const AssistantTypePicker: FC<Props> = ({
|
|||
|
||||
const agentConfigUI = (
|
||||
<>
|
||||
<div className='my-4 h-[1px] bg-gray-100'></div>
|
||||
<div className='my-4 h-px bg-gray-100'></div>
|
||||
<div
|
||||
className={cn(isAgent ? 'group cursor-pointer hover:bg-primary-50' : 'opacity-30', 'rounded-xl bg-gray-50 p-3 pr-4 ')}
|
||||
onClick={() => {
|
||||
|
|
|
|||
|
|
@ -678,7 +678,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
|
|||
? <div className="px-6 py-4">
|
||||
<div className='flex h-[18px] items-center space-x-3'>
|
||||
<div className='system-xs-semibold-uppercase text-text-tertiary'>{t('appLog.table.header.output')}</div>
|
||||
<div className='h-[1px] grow' style={{
|
||||
<div className='h-px grow' style={{
|
||||
background: 'linear-gradient(270deg, rgba(243, 244, 246, 0) 0%, rgb(243, 244, 246) 100%)',
|
||||
}}></div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ const WorkflowAppLogList: FC<ILogs> = ({ logs, appDetail, onRefresh }) => {
|
|||
</div>
|
||||
)}
|
||||
</td>
|
||||
<td className='w-[160px] p-3 pr-2'>{formatTime(log.created_at, t('appLog.dateTimeFormat') as string)}</td>
|
||||
<td className='w-[180px] p-3 pr-2'>{formatTime(log.created_at, t('appLog.dateTimeFormat') as string)}</td>
|
||||
<td className='p-3 pr-2'>{statusTdRender(log.workflow_run.status)}</td>
|
||||
<td className='p-3 pr-2'>
|
||||
<div className={cn(
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ const Iteration: FC<Props> = ({ iterationInfo, isFinal, index }) => {
|
|||
{!isFinal && (
|
||||
<div className='mr-3 shrink-0 text-xs font-semibold leading-[18px] text-text-tertiary'>{`${t('appLog.agentLogDetail.iteration').toUpperCase()} ${index}`}</div>
|
||||
)}
|
||||
<Divider bgStyle='gradient' className='mx-0 h-[1px] grow'/>
|
||||
<Divider bgStyle='gradient' className='mx-0 h-px grow'/>
|
||||
</div>
|
||||
<ToolCall
|
||||
isLLM
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@ const Citation: FC<CitationProps> = ({
|
|||
<div className='-mb-1 mt-3'>
|
||||
<div className='system-xs-medium mb-2 flex items-center text-text-tertiary'>
|
||||
{t('common.chat.citation.title')}
|
||||
<div className='ml-2 h-[1px] grow bg-divider-regular' />
|
||||
<div className='ml-2 h-px grow bg-divider-regular' />
|
||||
</div>
|
||||
<div className='relative flex flex-wrap'>
|
||||
{
|
||||
|
|
|
|||
|
|
@ -114,7 +114,7 @@ const Popup: FC<PopupProps> = ({
|
|||
</div>
|
||||
{
|
||||
index !== data.sources.length - 1 && (
|
||||
<div className='my-1 h-[1px] bg-divider-regular' />
|
||||
<div className='my-1 h-px bg-divider-regular' />
|
||||
)
|
||||
}
|
||||
</Fragment>
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ const Dropdown: FC<DropdownProps> = ({
|
|||
}
|
||||
{
|
||||
(!!items.length && !!secondItems?.length) && (
|
||||
<div className='h-[1px] bg-divider-regular' />
|
||||
<div className='h-px bg-divider-regular' />
|
||||
)
|
||||
}
|
||||
{
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ const ScoreSlider: FC<Props> = ({
|
|||
|
||||
return (
|
||||
<div className={className}>
|
||||
<div className='mt-[14px] h-[1px]'>
|
||||
<div className='mt-[14px] h-px'>
|
||||
<Slider
|
||||
max={100}
|
||||
min={80}
|
||||
|
|
|
|||
|
|
@ -101,9 +101,9 @@ const FileFromLinkOrLocal = ({
|
|||
{
|
||||
showFromLink && showFromLocal && (
|
||||
<div className='system-2xs-medium-uppercase flex h-7 items-center p-2 text-text-quaternary'>
|
||||
<div className='mr-2 h-[1px] w-[93px] bg-gradient-to-l from-[rgba(16,24,40,0.08)]' />
|
||||
<div className='mr-2 h-px w-[93px] bg-gradient-to-l from-[rgba(16,24,40,0.08)]' />
|
||||
OR
|
||||
<div className='ml-2 h-[1px] w-[93px] bg-gradient-to-r from-[rgba(16,24,40,0.08)]' />
|
||||
<div className='ml-2 h-px w-[93px] bg-gradient-to-r from-[rgba(16,24,40,0.08)]' />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -93,9 +93,9 @@ const UploaderButton: FC<UploaderButtonProps> = ({
|
|||
{hasUploadFromLocal && (
|
||||
<>
|
||||
<div className="mt-2 flex items-center px-2 text-xs font-medium text-gray-400">
|
||||
<div className="mr-3 h-[1px] w-[93px] bg-gradient-to-l from-[#F3F4F6]" />
|
||||
<div className="mr-3 h-px w-[93px] bg-gradient-to-l from-[#F3F4F6]" />
|
||||
OR
|
||||
<div className="ml-3 h-[1px] w-[93px] bg-gradient-to-r from-[#F3F4F6]" />
|
||||
<div className="ml-3 h-px w-[93px] bg-gradient-to-r from-[#F3F4F6]" />
|
||||
</div>
|
||||
<Uploader
|
||||
onUpload={handleUpload}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,16 @@
|
|||
class TooltipManager {
|
||||
private activeCloser: (() => void) | null = null
|
||||
|
||||
register(closeFn: () => void) {
|
||||
if (this.activeCloser)
|
||||
this.activeCloser()
|
||||
this.activeCloser = closeFn
|
||||
}
|
||||
|
||||
clear(closeFn: () => void) {
|
||||
if (this.activeCloser === closeFn)
|
||||
this.activeCloser = null
|
||||
}
|
||||
}
|
||||
|
||||
export const tooltipManager = new TooltipManager()
|
||||
|
|
@ -6,6 +6,8 @@ import type { OffsetOptions, Placement } from '@floating-ui/react'
|
|||
import { RiQuestionLine } from '@remixicon/react'
|
||||
import cn from '@/utils/classnames'
|
||||
import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigger } from '@/app/components/base/portal-to-follow-elem'
|
||||
import { tooltipManager } from './TooltipManager'
|
||||
|
||||
export type TooltipProps = {
|
||||
position?: Placement
|
||||
triggerMethod?: 'hover' | 'click'
|
||||
|
|
@ -56,22 +58,26 @@ const Tooltip: FC<TooltipProps> = ({
|
|||
isHoverTriggerRef.current = isHoverTrigger
|
||||
}, [isHoverTrigger])
|
||||
|
||||
const close = () => setOpen(false)
|
||||
|
||||
const handleLeave = (isTrigger: boolean) => {
|
||||
if (isTrigger)
|
||||
setNotHoverTrigger()
|
||||
|
||||
else
|
||||
setNotHoverPopup()
|
||||
|
||||
// give time to move to the popup
|
||||
if (needsDelay) {
|
||||
setTimeout(() => {
|
||||
if (!isHoverPopupRef.current && !isHoverTriggerRef.current)
|
||||
if (!isHoverPopupRef.current && !isHoverTriggerRef.current) {
|
||||
setOpen(false)
|
||||
tooltipManager.clear(close)
|
||||
}
|
||||
}, 300)
|
||||
}
|
||||
else {
|
||||
setOpen(false)
|
||||
tooltipManager.clear(close)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -87,6 +93,7 @@ const Tooltip: FC<TooltipProps> = ({
|
|||
onMouseEnter={() => {
|
||||
if (triggerMethod === 'hover') {
|
||||
setHoverTrigger()
|
||||
tooltipManager.register(close)
|
||||
setOpen(true)
|
||||
}
|
||||
}}
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ const ChildSegmentList: FC<IChildSegmentCardProps> = ({
|
|||
isParagraphMode ? 'pb-2 pt-1' : 'grow px-3',
|
||||
(isFullDocMode && isLoading) && 'overflow-y-hidden',
|
||||
)}>
|
||||
{isFullDocMode ? <Divider type='horizontal' className='my-1 h-[1px] bg-divider-subtle' /> : null}
|
||||
{isFullDocMode ? <Divider type='horizontal' className='my-1 h-px bg-divider-subtle' /> : null}
|
||||
<div className={cn('flex items-center justify-between', isFullDocMode ? 'sticky -top-2 left-0 bg-background-default pb-3 pt-2' : '')}>
|
||||
<div className={cn(
|
||||
'flex h-7 items-center rounded-lg pl-1 pr-3',
|
||||
|
|
|
|||
|
|
@ -650,7 +650,7 @@ const Completed: FC<ICompletedProps> = ({
|
|||
/>
|
||||
}
|
||||
{/* Pagination */}
|
||||
<Divider type='horizontal' className='mx-6 my-0 h-[1px] w-auto bg-divider-subtle' />
|
||||
<Divider type='horizontal' className='mx-6 my-0 h-px w-auto bg-divider-subtle' />
|
||||
<Pagination
|
||||
current={currentPage - 1}
|
||||
onChange={cur => setCurrentPage(cur + 1)}
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ const ApiBasedExtensionSelector: FC<ApiBasedExtensionSelectorProps> = ({
|
|||
}
|
||||
</div>
|
||||
</div>
|
||||
<div className='h-[1px] bg-divider-regular' />
|
||||
<div className='h-px bg-divider-regular' />
|
||||
<div className='p-1'>
|
||||
<div
|
||||
className='flex h-8 cursor-pointer items-center px-3 text-sm text-text-accent'
|
||||
|
|
|
|||
|
|
@ -206,7 +206,7 @@ const ModelParameterModal: FC<ModelParameterModalProps> = ({
|
|||
</div>
|
||||
{
|
||||
!!parameterRules.length && (
|
||||
<div className='my-3 h-[1px] bg-divider-subtle' />
|
||||
<div className='my-3 h-px bg-divider-subtle' />
|
||||
)
|
||||
}
|
||||
{
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ const IntersectionLine = ({
|
|||
useScrollIntersection(ref, intersectionContainerId)
|
||||
|
||||
return (
|
||||
<div ref={ref} className='mb-4 h-[1px] shrink-0 bg-transparent'></div>
|
||||
<div ref={ref} className='mb-4 h-px shrink-0 bg-transparent'></div>
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -294,7 +294,7 @@ const Authorized = ({
|
|||
)
|
||||
}
|
||||
</div>
|
||||
<div className='h-[1px] bg-divider-subtle'></div>
|
||||
<div className='h-px bg-divider-subtle'></div>
|
||||
<div className='p-2'>
|
||||
<Authorize
|
||||
pluginPayload={pluginPayload}
|
||||
|
|
|
|||
|
|
@ -248,7 +248,7 @@ const ModelParameterModal: FC<ModelParameterModalProps> = ({
|
|||
/>
|
||||
</div>
|
||||
{(currentModel?.model_type === ModelTypeEnum.textGeneration || currentModel?.model_type === ModelTypeEnum.tts) && (
|
||||
<div className='my-3 h-[1px] bg-divider-subtle' />
|
||||
<div className='my-3 h-px bg-divider-subtle' />
|
||||
)}
|
||||
{currentModel?.model_type === ModelTypeEnum.textGeneration && (
|
||||
<LLMParamsPanel
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ const PluginsPanel = () => {
|
|||
{!isPluginListLoading && (
|
||||
<>
|
||||
{(filteredList?.length ?? 0) > 0 ? (
|
||||
<div className='flex grow flex-wrap content-start items-start justify-center gap-2 self-stretch px-12'>
|
||||
<div className='flex grow flex-wrap content-start items-start justify-center gap-2 self-stretch overflow-y-auto px-12'>
|
||||
<div className='w-full'>
|
||||
<List pluginList={filteredList || []} />
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -175,7 +175,7 @@ const WorkflowToolConfigureButton = ({
|
|||
|
||||
return (
|
||||
<>
|
||||
<Divider type='horizontal' className='h-[1px] bg-divider-subtle' />
|
||||
<Divider type='horizontal' className='h-px bg-divider-subtle' />
|
||||
{(!published || !isLoading) && (
|
||||
<div className={cn(
|
||||
'group rounded-lg bg-background-section-burn transition-colors',
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ const HelpLineHorizontal = memo(({
|
|||
|
||||
return (
|
||||
<div
|
||||
className='absolute z-[9] h-[1px] bg-primary-300'
|
||||
className='absolute z-[9] h-px bg-primary-300'
|
||||
style={{
|
||||
top: top * zoom + y,
|
||||
left: left * zoom + x,
|
||||
|
|
|
|||
|
|
@ -80,7 +80,7 @@ const PanelOperatorPopup = ({
|
|||
)
|
||||
}
|
||||
</div>
|
||||
<div className='h-[1px] bg-divider-regular'></div>
|
||||
<div className='h-px bg-divider-regular'></div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
|
@ -109,7 +109,7 @@ const PanelOperatorPopup = ({
|
|||
<ShortcutsName keys={['ctrl', 'd']} />
|
||||
</div>
|
||||
</div>
|
||||
<div className='h-[1px] bg-divider-regular'></div>
|
||||
<div className='h-px bg-divider-regular'></div>
|
||||
{
|
||||
!nodeMetaData.isUndeletable && (
|
||||
<>
|
||||
|
|
@ -125,7 +125,7 @@ const PanelOperatorPopup = ({
|
|||
<ShortcutsName keys={['del']} />
|
||||
</div>
|
||||
</div>
|
||||
<div className='h-[1px] bg-divider-regular'></div>
|
||||
<div className='h-px bg-divider-regular'></div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
|
@ -144,7 +144,7 @@ const PanelOperatorPopup = ({
|
|||
{t('workflow.panel.helpLink')}
|
||||
</a>
|
||||
</div>
|
||||
<div className='h-[1px] bg-divider-regular'></div>
|
||||
<div className='h-px bg-divider-regular'></div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -204,7 +204,7 @@ const ConditionWrap: FC<Props> = ({
|
|||
</div>
|
||||
</div>
|
||||
{!isSubVariable && (
|
||||
<div className='mx-3 my-2 h-[1px] bg-divider-subtle'></div>
|
||||
<div className='mx-3 my-2 h-px bg-divider-subtle'></div>
|
||||
)}
|
||||
</div>
|
||||
))
|
||||
|
|
|
|||
|
|
@ -73,7 +73,7 @@ const Panel: FC<NodePanelProps<IfElseNodeType>> = ({
|
|||
ELIF
|
||||
</Button>
|
||||
</div>
|
||||
<div className='mx-3 my-2 h-[1px] bg-divider-subtle'></div>
|
||||
<div className='mx-3 my-2 h-px bg-divider-subtle'></div>
|
||||
<Field
|
||||
title={t(`${i18nPrefix}.else`)}
|
||||
className='px-4 py-2'
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ const Operator = ({
|
|||
<ShortcutsName keys={['ctrl', 'd']} />
|
||||
</div>
|
||||
</div>
|
||||
<div className='h-[1px] bg-divider-subtle'></div>
|
||||
<div className='h-px bg-divider-subtle'></div>
|
||||
<div className='p-1'>
|
||||
<div
|
||||
className='flex h-8 cursor-pointer items-center justify-between rounded-md px-3 text-sm text-text-secondary hover:bg-state-base-hover'
|
||||
|
|
@ -85,7 +85,7 @@ const Operator = ({
|
|||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div className='h-[1px] bg-divider-subtle'></div>
|
||||
<div className='h-px bg-divider-subtle'></div>
|
||||
<div className='p-1'>
|
||||
<div
|
||||
className='flex h-8 cursor-pointer items-center justify-between rounded-md px-3 text-sm text-text-secondary hover:bg-state-destructive-hover hover:text-text-destructive'
|
||||
|
|
|
|||
|
|
@ -107,7 +107,7 @@ const ConversationVariableModal = ({
|
|||
<div className='flex h-0 grow flex-col p-4 pt-2'>
|
||||
<div className='mb-2 flex shrink-0 items-center gap-2'>
|
||||
<div className='system-xs-medium-uppercase shrink-0 text-text-tertiary'>{t('workflow.chatVariable.storedContent').toLocaleUpperCase()}</div>
|
||||
<div className='h-[1px] grow' style={{
|
||||
<div className='h-px grow' style={{
|
||||
background: 'linear-gradient(to right, rgba(16, 24, 40, 0.08) 0%, rgba(255, 255, 255) 100%)',
|
||||
}}></div>
|
||||
{latestValueTimestampMap[currentVar.id] && (
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ const ContextMenu: FC<ContextMenuProps> = (props: ContextMenuProps) => {
|
|||
{
|
||||
isShowDelete && (
|
||||
<>
|
||||
<Divider type='horizontal' className='my-0 h-[1px] bg-divider-subtle' />
|
||||
<Divider type='horizontal' className='my-0 h-px bg-divider-subtle' />
|
||||
<div className='p-1'>
|
||||
<MenuItem
|
||||
item={deleteOperation}
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ const Filter: FC<FilterProps> = ({
|
|||
})
|
||||
}
|
||||
</div>
|
||||
<Divider type='horizontal' className='my-0 h-[1px] bg-divider-subtle' />
|
||||
<Divider type='horizontal' className='my-0 h-px bg-divider-subtle' />
|
||||
<FilterSwitch enabled={isOnlyShowNamedVersions} handleSwitch={handleSwitch} />
|
||||
</div>
|
||||
</PortalToFollowElemContent>
|
||||
|
|
|
|||
|
|
@ -414,7 +414,7 @@ const SelectionContextmenu = () => {
|
|||
{t('workflow.operator.distributeVertical')}
|
||||
</div>
|
||||
</div>
|
||||
<div className='h-[1px] bg-divider-regular'></div>
|
||||
<div className='h-px bg-divider-regular'></div>
|
||||
<div className='p-1'>
|
||||
<div className='system-xs-medium px-2 py-2 text-text-tertiary'>
|
||||
{t('workflow.operator.horizontal')}
|
||||
|
|
|
|||
|
|
@ -154,7 +154,7 @@ const EducationApplyAge = () => {
|
|||
>
|
||||
{t('education.submit')}
|
||||
</Button>
|
||||
<div className='mb-4 mt-5 h-[1px] bg-gradient-to-r from-[rgba(16,24,40,0.08)]'></div>
|
||||
<div className='mb-4 mt-5 h-px bg-gradient-to-r from-[rgba(16,24,40,0.08)]'></div>
|
||||
<a
|
||||
className='system-xs-regular flex items-center text-text-accent'
|
||||
href={docLink('/getting-started/dify-for-education')}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const translation = {
|
||||
title: 'Protokolle',
|
||||
description: 'Die Protokolle zeichnen den Betriebsstatus der Anwendung auf, einschließlich Benutzereingaben und KI-Antworten.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
dateFormat: 'MM/DD/YYYY',
|
||||
table: {
|
||||
header: {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const translation = {
|
||||
title: 'Logs',
|
||||
description: 'The logs record the running status of the application, including user inputs and AI replies.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
dateFormat: 'MM/DD/YYYY',
|
||||
table: {
|
||||
header: {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const translation = {
|
||||
title: 'Registros',
|
||||
description: 'Los registros registran el estado de ejecución de la aplicación, incluyendo las entradas de usuario y las respuestas de la IA.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'Hora actualizada',
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const translation = {
|
||||
title: 'لاگها',
|
||||
description: 'لاگها وضعیت اجرایی برنامه را ثبت میکنند، شامل ورودیهای کاربر و پاسخهای هوش مصنوعی.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'زمان بهروزرسانی',
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const translation = {
|
||||
title: 'Journaux',
|
||||
description: 'Les journaux enregistrent l\'état d\'exécution de l\'application, y compris les entrées utilisateur et les réponses de l\'IA.',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'Heure de mise à jour',
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const translation = {
|
||||
title: 'लॉग्स',
|
||||
description: 'लॉग्स एप्लिकेशन के रनिंग स्टेटस को रिकॉर्ड करते हैं, जिसमें यूजर इनपुट और एआई रिप्लाईज़ शामिल हैं।',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm A',
|
||||
dateTimeFormat: 'MM/DD/YYYY hh:mm:ss A',
|
||||
table: {
|
||||
header: {
|
||||
updatedTime: 'अपडेट का समय',
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue