mirror of https://github.com/langgenius/dify.git
Merge branch 'fix/install-preference-error' into deploy/enterprise
This commit is contained in:
commit
c0e73c1659
|
|
@ -47,15 +47,17 @@ jobs:
|
|||
- name: Run Unit tests
|
||||
run: |
|
||||
uv run --project api bash dev/pytest/pytest_unit_tests.sh
|
||||
|
||||
- name: Coverage Summary
|
||||
run: |
|
||||
set -x
|
||||
# Extract coverage percentage and create a summary
|
||||
TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])')
|
||||
|
||||
# Create a detailed coverage summary
|
||||
echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY
|
||||
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
|
||||
uv run --project api coverage report >> $GITHUB_STEP_SUMMARY
|
||||
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
|
||||
uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Run dify config tests
|
||||
run: uv run --project api dev/pytest/pytest_config_tests.py
|
||||
|
|
|
|||
|
|
@ -214,3 +214,4 @@ mise.toml
|
|||
|
||||
# AI Assistant
|
||||
.roo/
|
||||
api/.env.backup
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from pydantic.fields import FieldInfo
|
||||
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict
|
||||
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict, TomlConfigSettingsSource
|
||||
|
||||
from libs.file_utils import search_file_upwards
|
||||
|
||||
from .deploy import DeploymentConfig
|
||||
from .enterprise import EnterpriseFeatureConfig
|
||||
|
|
@ -99,4 +102,12 @@ class DifyConfig(
|
|||
RemoteSettingsSourceFactory(settings_cls),
|
||||
dotenv_settings,
|
||||
file_secret_settings,
|
||||
TomlConfigSettingsSource(
|
||||
settings_cls=settings_cls,
|
||||
toml_file=search_file_upwards(
|
||||
base_dir_path=Path(__file__).parent,
|
||||
target_file_name="pyproject.toml",
|
||||
max_search_parent_depth=2,
|
||||
),
|
||||
),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,15 +1,16 @@
|
|||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
from configs.packaging.pyproject import PyProjectConfig, PyProjectTomlConfig
|
||||
|
||||
|
||||
class PackagingInfo(BaseSettings):
|
||||
class PackagingInfo(PyProjectTomlConfig):
|
||||
"""
|
||||
Packaging build information
|
||||
"""
|
||||
|
||||
CURRENT_VERSION: str = Field(
|
||||
description="Dify version",
|
||||
default="1.5.0",
|
||||
default="1.5.1",
|
||||
)
|
||||
|
||||
COMMIT_SHA: str = Field(
|
||||
|
|
|
|||
|
|
@ -0,0 +1,17 @@
|
|||
from pydantic import BaseModel, Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class PyProjectConfig(BaseModel):
|
||||
version: str = Field(description="Dify version", default="")
|
||||
|
||||
|
||||
class PyProjectTomlConfig(BaseSettings):
|
||||
"""
|
||||
configs in api/pyproject.toml
|
||||
"""
|
||||
|
||||
project: PyProjectConfig = Field(
|
||||
description="configs in the project section of pyproject.toml",
|
||||
default=PyProjectConfig(),
|
||||
)
|
||||
|
|
@ -41,7 +41,7 @@ class OAuthDataSource(Resource):
|
|||
if not internal_secret:
|
||||
return ({"error": "Internal secret is not set"},)
|
||||
oauth_provider.save_internal_access_token(internal_secret)
|
||||
return {"data": ""}
|
||||
return {"data": "internal"}
|
||||
else:
|
||||
auth_url = oauth_provider.get_authorization_url()
|
||||
return {"data": auth_url}, 200
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class VersionApi(Resource):
|
|||
check_update_url = dify_config.CHECK_UPDATE_URL
|
||||
|
||||
result = {
|
||||
"version": dify_config.CURRENT_VERSION,
|
||||
"version": dify_config.project.version,
|
||||
"release_date": "",
|
||||
"release_notes": "",
|
||||
"can_auto_update": False,
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ class IndexApi(Resource):
|
|||
return {
|
||||
"welcome": "Dify OpenAPI",
|
||||
"api_version": "v1",
|
||||
"server_version": dify_config.CURRENT_VERSION,
|
||||
"server_version": dify_config.project.version,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -27,6 +27,9 @@ from core.ops.ops_trace_manager import TraceQueueManager
|
|||
from core.prompt.utils.get_thread_messages_length import get_thread_messages_length
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository
|
||||
from core.workflow.repositories.draft_variable_repository import (
|
||||
DraftVariableSaverFactory,
|
||||
)
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader
|
||||
|
|
@ -36,7 +39,10 @@ from libs.flask_utils import preserve_flask_contexts
|
|||
from models import Account, App, Conversation, EndUser, Message, Workflow, WorkflowNodeExecutionTriggeredFrom
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from services.conversation_service import ConversationService
|
||||
from services.workflow_draft_variable_service import DraftVarLoader, WorkflowDraftVariableService
|
||||
from services.workflow_draft_variable_service import (
|
||||
DraftVarLoader,
|
||||
WorkflowDraftVariableService,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -450,6 +456,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
stream=stream,
|
||||
draft_var_saver_factory=self._get_draft_var_saver_factory(invoke_from),
|
||||
)
|
||||
|
||||
return AdvancedChatAppGenerateResponseConverter.convert(response=response, invoke_from=invoke_from)
|
||||
|
|
@ -521,6 +528,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
user: Union[Account, EndUser],
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
draft_var_saver_factory: DraftVariableSaverFactory,
|
||||
stream: bool = False,
|
||||
) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]:
|
||||
"""
|
||||
|
|
@ -547,6 +555,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
stream=stream,
|
||||
draft_var_saver_factory=draft_var_saver_factory,
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -64,6 +64,7 @@ from core.workflow.entities.workflow_execution import WorkflowExecutionStatus, W
|
|||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager
|
||||
|
|
@ -94,6 +95,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
dialogue_count: int,
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
draft_var_saver_factory: DraftVariableSaverFactory,
|
||||
) -> None:
|
||||
self._base_task_pipeline = BasedGenerateTaskPipeline(
|
||||
application_generate_entity=application_generate_entity,
|
||||
|
|
@ -153,6 +155,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
self._conversation_name_generate_thread: Thread | None = None
|
||||
self._recorded_files: list[Mapping[str, Any]] = []
|
||||
self._workflow_run_id: str = ""
|
||||
self._draft_var_saver_factory = draft_var_saver_factory
|
||||
|
||||
def process(self) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]:
|
||||
"""
|
||||
|
|
@ -371,6 +374,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
session.commit()
|
||||
self._save_output_for_event(event, workflow_node_execution.id)
|
||||
|
||||
if node_finish_resp:
|
||||
yield node_finish_resp
|
||||
|
|
@ -390,6 +394,8 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
if isinstance(event, QueueNodeExceptionEvent):
|
||||
self._save_output_for_event(event, workflow_node_execution.id)
|
||||
|
||||
if node_finish_resp:
|
||||
yield node_finish_resp
|
||||
|
|
@ -759,3 +765,15 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
if not message:
|
||||
raise ValueError(f"Message not found: {self._message_id}")
|
||||
return message
|
||||
|
||||
def _save_output_for_event(self, event: QueueNodeSucceededEvent | QueueNodeExceptionEvent, node_execution_id: str):
|
||||
with Session(db.engine) as session, session.begin():
|
||||
saver = self._draft_var_saver_factory(
|
||||
session=session,
|
||||
app_id=self._application_generate_entity.app_config.app_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type,
|
||||
node_execution_id=node_execution_id,
|
||||
enclosing_node_id=event.in_loop_id or event.in_iteration_id,
|
||||
)
|
||||
saver.save(event.process_data, event.outputs)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,20 @@
|
|||
import json
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any, Optional, Union
|
||||
from typing import TYPE_CHECKING, Any, Optional, Union, final
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.app_config.entities import VariableEntityType
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.file import File, FileUploadConfig
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
from core.workflow.repositories.draft_variable_repository import (
|
||||
DraftVariableSaver,
|
||||
DraftVariableSaverFactory,
|
||||
NoopDraftVariableSaver,
|
||||
)
|
||||
from factories import file_factory
|
||||
from services.workflow_draft_variable_service import DraftVariableSaver as DraftVariableSaverImpl
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.app.app_config.entities import VariableEntity
|
||||
|
|
@ -159,3 +169,38 @@ class BaseAppGenerator:
|
|||
yield f"event: {message}\n\n"
|
||||
|
||||
return gen()
|
||||
|
||||
@final
|
||||
@staticmethod
|
||||
def _get_draft_var_saver_factory(invoke_from: InvokeFrom) -> DraftVariableSaverFactory:
|
||||
if invoke_from == InvokeFrom.DEBUGGER:
|
||||
|
||||
def draft_var_saver_factory(
|
||||
session: Session,
|
||||
app_id: str,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
node_execution_id: str,
|
||||
enclosing_node_id: str | None = None,
|
||||
) -> DraftVariableSaver:
|
||||
return DraftVariableSaverImpl(
|
||||
session=session,
|
||||
app_id=app_id,
|
||||
node_id=node_id,
|
||||
node_type=node_type,
|
||||
node_execution_id=node_execution_id,
|
||||
enclosing_node_id=enclosing_node_id,
|
||||
)
|
||||
else:
|
||||
|
||||
def draft_var_saver_factory(
|
||||
session: Session,
|
||||
app_id: str,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
node_execution_id: str,
|
||||
enclosing_node_id: str | None = None,
|
||||
) -> DraftVariableSaver:
|
||||
return NoopDraftVariableSaver()
|
||||
|
||||
return draft_var_saver_factory
|
||||
|
|
|
|||
|
|
@ -44,6 +44,7 @@ from core.app.entities.task_entities import (
|
|||
)
|
||||
from core.file import FILE_MODEL_IDENTITY, File
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from core.variables.segments import ArrayFileSegment, FileSegment, Segment
|
||||
from core.workflow.entities.workflow_execution import WorkflowExecution
|
||||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecution, WorkflowNodeExecutionStatus
|
||||
from core.workflow.nodes import NodeType
|
||||
|
|
@ -506,7 +507,8 @@ class WorkflowResponseConverter:
|
|||
# Convert to tuple to match Sequence type
|
||||
return tuple(flattened_files)
|
||||
|
||||
def _fetch_files_from_variable_value(self, value: Union[dict, list]) -> Sequence[Mapping[str, Any]]:
|
||||
@classmethod
|
||||
def _fetch_files_from_variable_value(cls, value: Union[dict, list, Segment]) -> Sequence[Mapping[str, Any]]:
|
||||
"""
|
||||
Fetch files from variable value
|
||||
:param value: variable value
|
||||
|
|
@ -515,20 +517,30 @@ class WorkflowResponseConverter:
|
|||
if not value:
|
||||
return []
|
||||
|
||||
files = []
|
||||
if isinstance(value, list):
|
||||
files: list[Mapping[str, Any]] = []
|
||||
if isinstance(value, FileSegment):
|
||||
files.append(value.value.to_dict())
|
||||
elif isinstance(value, ArrayFileSegment):
|
||||
files.extend([i.to_dict() for i in value.value])
|
||||
elif isinstance(value, File):
|
||||
files.append(value.to_dict())
|
||||
elif isinstance(value, list):
|
||||
for item in value:
|
||||
file = self._get_file_var_from_value(item)
|
||||
file = cls._get_file_var_from_value(item)
|
||||
if file:
|
||||
files.append(file)
|
||||
elif isinstance(value, dict):
|
||||
file = self._get_file_var_from_value(value)
|
||||
elif isinstance(
|
||||
value,
|
||||
dict,
|
||||
):
|
||||
file = cls._get_file_var_from_value(value)
|
||||
if file:
|
||||
files.append(file)
|
||||
|
||||
return files
|
||||
|
||||
def _get_file_var_from_value(self, value: Union[dict, list]) -> Mapping[str, Any] | None:
|
||||
@classmethod
|
||||
def _get_file_var_from_value(cls, value: Union[dict, list]) -> Mapping[str, Any] | None:
|
||||
"""
|
||||
Get file var from value
|
||||
:param value: variable value
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
|||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository
|
||||
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.variable_loader import DUMMY_VARIABLE_LOADER, VariableLoader
|
||||
|
|
@ -219,6 +220,9 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
# new thread with request context and contextvars
|
||||
context = contextvars.copy_context()
|
||||
|
||||
# release database connection, because the following new thread operations may take a long time
|
||||
db.session.close()
|
||||
|
||||
worker_thread = threading.Thread(
|
||||
target=self._generate_worker,
|
||||
kwargs={
|
||||
|
|
@ -233,6 +237,10 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
|
||||
worker_thread.start()
|
||||
|
||||
draft_var_saver_factory = self._get_draft_var_saver_factory(
|
||||
invoke_from,
|
||||
)
|
||||
|
||||
# return response or stream generator
|
||||
response = self._handle_response(
|
||||
application_generate_entity=application_generate_entity,
|
||||
|
|
@ -241,6 +249,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
user=user,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
draft_var_saver_factory=draft_var_saver_factory,
|
||||
stream=streaming,
|
||||
)
|
||||
|
||||
|
|
@ -471,6 +480,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
user: Union[Account, EndUser],
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
draft_var_saver_factory: DraftVariableSaverFactory,
|
||||
stream: bool = False,
|
||||
) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
|
||||
"""
|
||||
|
|
@ -491,6 +501,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
user=user,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
draft_var_saver_factory=draft_var_saver_factory,
|
||||
stream=stream,
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -56,6 +56,7 @@ from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
|||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.workflow.entities.workflow_execution import WorkflowExecution, WorkflowExecutionStatus, WorkflowType
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.repositories.draft_variable_repository import DraftVariableSaverFactory
|
||||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_cycle_manager import CycleManagerWorkflowInfo, WorkflowCycleManager
|
||||
|
|
@ -87,6 +88,7 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
stream: bool,
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
draft_var_saver_factory: DraftVariableSaverFactory,
|
||||
) -> None:
|
||||
self._base_task_pipeline = BasedGenerateTaskPipeline(
|
||||
application_generate_entity=application_generate_entity,
|
||||
|
|
@ -131,6 +133,8 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
self._application_generate_entity = application_generate_entity
|
||||
self._workflow_features_dict = workflow.features_dict
|
||||
self._workflow_run_id = ""
|
||||
self._invoke_from = queue_manager._invoke_from
|
||||
self._draft_var_saver_factory = draft_var_saver_factory
|
||||
|
||||
def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
|
||||
"""
|
||||
|
|
@ -322,6 +326,8 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
|
||||
self._save_output_for_event(event, workflow_node_execution.id)
|
||||
|
||||
if node_success_response:
|
||||
yield node_success_response
|
||||
elif isinstance(
|
||||
|
|
@ -339,6 +345,8 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
if isinstance(event, QueueNodeExceptionEvent):
|
||||
self._save_output_for_event(event, workflow_node_execution.id)
|
||||
|
||||
if node_failed_response:
|
||||
yield node_failed_response
|
||||
|
|
@ -593,3 +601,15 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
)
|
||||
|
||||
return response
|
||||
|
||||
def _save_output_for_event(self, event: QueueNodeSucceededEvent | QueueNodeExceptionEvent, node_execution_id: str):
|
||||
with Session(db.engine) as session, session.begin():
|
||||
saver = self._draft_var_saver_factory(
|
||||
session=session,
|
||||
app_id=self._application_generate_entity.app_config.app_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type,
|
||||
node_execution_id=node_execution_id,
|
||||
enclosing_node_id=event.in_loop_id or event.in_iteration_id,
|
||||
)
|
||||
saver.save(event.process_data, event.outputs)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
from collections.abc import Mapping
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
||||
from core.app.apps.base_app_runner import AppRunner
|
||||
from core.app.entities.queue_entities import (
|
||||
|
|
@ -35,7 +33,6 @@ from core.workflow.entities.variable_pool import VariablePool
|
|||
from core.workflow.entities.workflow_node_execution import WorkflowNodeExecutionMetadataKey
|
||||
from core.workflow.graph_engine.entities.event import (
|
||||
AgentLogEvent,
|
||||
BaseNodeEvent,
|
||||
GraphEngineEvent,
|
||||
GraphRunFailedEvent,
|
||||
GraphRunPartialSucceededEvent,
|
||||
|
|
@ -70,9 +67,6 @@ from core.workflow.workflow_entry import WorkflowEntry
|
|||
from extensions.ext_database import db
|
||||
from models.model import App
|
||||
from models.workflow import Workflow
|
||||
from services.workflow_draft_variable_service import (
|
||||
DraftVariableSaver,
|
||||
)
|
||||
|
||||
|
||||
class WorkflowBasedAppRunner(AppRunner):
|
||||
|
|
@ -400,7 +394,6 @@ class WorkflowBasedAppRunner(AppRunner):
|
|||
in_loop_id=event.in_loop_id,
|
||||
)
|
||||
)
|
||||
self._save_draft_var_for_event(event)
|
||||
|
||||
elif isinstance(event, NodeRunFailedEvent):
|
||||
self._publish_event(
|
||||
|
|
@ -464,7 +457,6 @@ class WorkflowBasedAppRunner(AppRunner):
|
|||
in_loop_id=event.in_loop_id,
|
||||
)
|
||||
)
|
||||
self._save_draft_var_for_event(event)
|
||||
|
||||
elif isinstance(event, NodeInIterationFailedEvent):
|
||||
self._publish_event(
|
||||
|
|
@ -718,30 +710,3 @@ class WorkflowBasedAppRunner(AppRunner):
|
|||
|
||||
def _publish_event(self, event: AppQueueEvent) -> None:
|
||||
self.queue_manager.publish(event, PublishFrom.APPLICATION_MANAGER)
|
||||
|
||||
def _save_draft_var_for_event(self, event: BaseNodeEvent):
|
||||
run_result = event.route_node_state.node_run_result
|
||||
if run_result is None:
|
||||
return
|
||||
process_data = run_result.process_data
|
||||
outputs = run_result.outputs
|
||||
with Session(bind=db.engine) as session, session.begin():
|
||||
draft_var_saver = DraftVariableSaver(
|
||||
session=session,
|
||||
app_id=self._get_app_id(),
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type,
|
||||
# FIXME(QuantumGhost): rely on private state of queue_manager is not ideal.
|
||||
invoke_from=self.queue_manager._invoke_from,
|
||||
node_execution_id=event.id,
|
||||
enclosing_node_id=event.in_loop_id or event.in_iteration_id or None,
|
||||
)
|
||||
draft_var_saver.save(process_data=process_data, outputs=outputs)
|
||||
|
||||
|
||||
def _remove_first_element_from_variable_string(key: str) -> str:
|
||||
"""
|
||||
Remove the first element from the prefix.
|
||||
"""
|
||||
prefix, remaining = key.split(".", maxsplit=1)
|
||||
return remaining
|
||||
|
|
|
|||
|
|
@ -395,6 +395,7 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
|
|||
message.provider_response_latency = time.perf_counter() - self._start_at
|
||||
message.total_price = usage.total_price
|
||||
message.currency = usage.currency
|
||||
self._task_state.llm_result.usage.latency = message.provider_response_latency
|
||||
message.message_metadata = self._task_state.metadata.model_dump_json()
|
||||
|
||||
if trace_manager:
|
||||
|
|
|
|||
|
|
@ -83,6 +83,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
metadata=metadata,
|
||||
session_id=trace_info.conversation_id,
|
||||
tags=["message", "workflow"],
|
||||
version=trace_info.workflow_run_version,
|
||||
)
|
||||
self.add_trace(langfuse_trace_data=trace_data)
|
||||
workflow_span_data = LangfuseSpan(
|
||||
|
|
@ -108,6 +109,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
metadata=metadata,
|
||||
session_id=trace_info.conversation_id,
|
||||
tags=["workflow"],
|
||||
version=trace_info.workflow_run_version,
|
||||
)
|
||||
self.add_trace(langfuse_trace_data=trace_data)
|
||||
|
||||
|
|
@ -172,37 +174,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
}
|
||||
)
|
||||
|
||||
# add span
|
||||
if trace_info.message_id:
|
||||
span_data = LangfuseSpan(
|
||||
id=node_execution_id,
|
||||
name=node_type,
|
||||
input=inputs,
|
||||
output=outputs,
|
||||
trace_id=trace_id,
|
||||
start_time=created_at,
|
||||
end_time=finished_at,
|
||||
metadata=metadata,
|
||||
level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR),
|
||||
status_message=trace_info.error or "",
|
||||
parent_observation_id=trace_info.workflow_run_id,
|
||||
)
|
||||
else:
|
||||
span_data = LangfuseSpan(
|
||||
id=node_execution_id,
|
||||
name=node_type,
|
||||
input=inputs,
|
||||
output=outputs,
|
||||
trace_id=trace_id,
|
||||
start_time=created_at,
|
||||
end_time=finished_at,
|
||||
metadata=metadata,
|
||||
level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR),
|
||||
status_message=trace_info.error or "",
|
||||
)
|
||||
|
||||
self.add_span(langfuse_span_data=span_data)
|
||||
|
||||
# add generation span
|
||||
if process_data and process_data.get("model_mode") == "chat":
|
||||
total_token = metadata.get("total_tokens", 0)
|
||||
prompt_tokens = 0
|
||||
|
|
@ -226,10 +198,10 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
)
|
||||
|
||||
node_generation_data = LangfuseGeneration(
|
||||
name="llm",
|
||||
id=node_execution_id,
|
||||
name=node_name,
|
||||
trace_id=trace_id,
|
||||
model=process_data.get("model_name"),
|
||||
parent_observation_id=node_execution_id,
|
||||
start_time=created_at,
|
||||
end_time=finished_at,
|
||||
input=inputs,
|
||||
|
|
@ -237,11 +209,30 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
metadata=metadata,
|
||||
level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR),
|
||||
status_message=trace_info.error or "",
|
||||
parent_observation_id=trace_info.workflow_run_id if trace_info.message_id else None,
|
||||
usage=generation_usage,
|
||||
)
|
||||
|
||||
self.add_generation(langfuse_generation_data=node_generation_data)
|
||||
|
||||
# add normal span
|
||||
else:
|
||||
span_data = LangfuseSpan(
|
||||
id=node_execution_id,
|
||||
name=node_name,
|
||||
input=inputs,
|
||||
output=outputs,
|
||||
trace_id=trace_id,
|
||||
start_time=created_at,
|
||||
end_time=finished_at,
|
||||
metadata=metadata,
|
||||
level=(LevelEnum.DEFAULT if status == "succeeded" else LevelEnum.ERROR),
|
||||
status_message=trace_info.error or "",
|
||||
parent_observation_id=trace_info.workflow_run_id if trace_info.message_id else None,
|
||||
)
|
||||
|
||||
self.add_span(langfuse_span_data=span_data)
|
||||
|
||||
def message_trace(self, trace_info: MessageTraceInfo, **kwargs):
|
||||
# get message file data
|
||||
file_list = trace_info.file_list
|
||||
|
|
@ -284,7 +275,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
)
|
||||
self.add_trace(langfuse_trace_data=trace_data)
|
||||
|
||||
# start add span
|
||||
# add generation
|
||||
generation_usage = GenerationUsage(
|
||||
input=trace_info.message_tokens,
|
||||
output=trace_info.answer_tokens,
|
||||
|
|
|
|||
|
|
@ -42,4 +42,4 @@ class DynamicSelectClient(BasePluginClient):
|
|||
for options in response:
|
||||
return options
|
||||
|
||||
raise ValueError("Plugin service returned no options")
|
||||
raise ValueError(f"Plugin service returned no options for parameter '{parameter}' in provider '{provider}'")
|
||||
|
|
|
|||
|
|
@ -1010,6 +1010,9 @@ class DatasetRetrieval:
|
|||
def _process_metadata_filter_func(
|
||||
self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list
|
||||
):
|
||||
if value is None:
|
||||
return
|
||||
|
||||
key = f"{metadata_name}_{sequence}"
|
||||
key_value = f"{metadata_name}_{sequence}_value"
|
||||
match condition:
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from typing import Any, Optional
|
|||
from core.helper.code_executor.code_executor import CodeExecutor, CodeLanguage
|
||||
from core.tools.builtin_tool.tool import BuiltinTool
|
||||
from core.tools.entities.tool_entities import ToolInvokeMessage
|
||||
from core.tools.errors import ToolInvokeError
|
||||
|
||||
|
||||
class SimpleCode(BuiltinTool):
|
||||
|
|
@ -25,6 +26,8 @@ class SimpleCode(BuiltinTool):
|
|||
if language not in {CodeLanguage.PYTHON3, CodeLanguage.JAVASCRIPT}:
|
||||
raise ValueError(f"Only python3 and javascript are supported, not {language}")
|
||||
|
||||
result = CodeExecutor.execute_code(language, "", code)
|
||||
|
||||
yield self.create_text_message(result)
|
||||
try:
|
||||
result = CodeExecutor.execute_code(language, "", code)
|
||||
yield self.create_text_message(result)
|
||||
except Exception as e:
|
||||
raise ToolInvokeError(str(e))
|
||||
|
|
|
|||
|
|
@ -66,11 +66,21 @@ class WorkflowNodeExecution(BaseModel):
|
|||
but they are not stored in the model.
|
||||
"""
|
||||
|
||||
# Core identification fields
|
||||
id: str # Unique identifier for this execution record
|
||||
node_execution_id: Optional[str] = None # Optional secondary ID for cross-referencing
|
||||
# --------- Core identification fields ---------
|
||||
|
||||
# Unique identifier for this execution record, used when persisting to storage.
|
||||
# Value is a UUID string (e.g., '09b3e04c-f9ae-404c-ad82-290b8d7bd382').
|
||||
id: str
|
||||
|
||||
# Optional secondary ID for cross-referencing purposes.
|
||||
#
|
||||
# NOTE: For referencing the persisted record, use `id` rather than `node_execution_id`.
|
||||
# While `node_execution_id` may sometimes be a UUID string, this is not guaranteed.
|
||||
# In most scenarios, `id` should be used as the primary identifier.
|
||||
node_execution_id: Optional[str] = None
|
||||
workflow_id: str # ID of the workflow this node belongs to
|
||||
workflow_execution_id: Optional[str] = None # ID of the specific workflow run (null for single-step debugging)
|
||||
# --------- Core identification fields ends ---------
|
||||
|
||||
# Execution positioning and flow
|
||||
index: int # Sequence number for ordering in trace visualization
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ import logging
|
|||
from collections.abc import Generator
|
||||
from typing import cast
|
||||
|
||||
from core.file import FILE_MODEL_IDENTITY, File
|
||||
from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.graph_engine.entities.event import (
|
||||
GraphEngineEvent,
|
||||
|
|
@ -201,44 +200,3 @@ class AnswerStreamProcessor(StreamProcessor):
|
|||
stream_out_answer_node_ids.append(answer_node_id)
|
||||
|
||||
return stream_out_answer_node_ids
|
||||
|
||||
@classmethod
|
||||
def _fetch_files_from_variable_value(cls, value: dict | list) -> list[dict]:
|
||||
"""
|
||||
Fetch files from variable value
|
||||
:param value: variable value
|
||||
:return:
|
||||
"""
|
||||
if not value:
|
||||
return []
|
||||
|
||||
files = []
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
file_var = cls._get_file_var_from_value(item)
|
||||
if file_var:
|
||||
files.append(file_var)
|
||||
elif isinstance(value, dict):
|
||||
file_var = cls._get_file_var_from_value(value)
|
||||
if file_var:
|
||||
files.append(file_var)
|
||||
|
||||
return files
|
||||
|
||||
@classmethod
|
||||
def _get_file_var_from_value(cls, value: dict | list):
|
||||
"""
|
||||
Get file var from value
|
||||
:param value: variable value
|
||||
:return:
|
||||
"""
|
||||
if not value:
|
||||
return None
|
||||
|
||||
if isinstance(value, dict):
|
||||
if "dify_model_identity" in value and value["dify_model_identity"] == FILE_MODEL_IDENTITY:
|
||||
return value
|
||||
elif isinstance(value, File):
|
||||
return value.to_dict()
|
||||
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -333,7 +333,7 @@ class Executor:
|
|||
try:
|
||||
response = getattr(ssrf_proxy, self.method.lower())(**request_args)
|
||||
except (ssrf_proxy.MaxRetriesExceededError, httpx.RequestError) as e:
|
||||
raise HttpRequestNodeError(str(e))
|
||||
raise HttpRequestNodeError(str(e)) from e
|
||||
# FIXME: fix type ignore, this maybe httpx type issue
|
||||
return response # type: ignore
|
||||
|
||||
|
|
|
|||
|
|
@ -490,6 +490,9 @@ class KnowledgeRetrievalNode(LLMNode):
|
|||
def _process_metadata_filter_func(
|
||||
self, sequence: int, condition: str, metadata_name: str, value: Optional[Any], filters: list
|
||||
):
|
||||
if value is None:
|
||||
return
|
||||
|
||||
key = f"{metadata_name}_{sequence}"
|
||||
key_value = f"{metadata_name}_{sequence}_value"
|
||||
match condition:
|
||||
|
|
|
|||
|
|
@ -167,7 +167,9 @@ class ToolNode(BaseNode[ToolNodeData]):
|
|||
if tool_input.type == "variable":
|
||||
variable = variable_pool.get(tool_input.value)
|
||||
if variable is None:
|
||||
raise ToolParameterError(f"Variable {tool_input.value} does not exist")
|
||||
if parameter.required:
|
||||
raise ToolParameterError(f"Variable {tool_input.value} does not exist")
|
||||
continue
|
||||
parameter_value = variable.value
|
||||
elif tool_input.type in {"mixed", "constant"}:
|
||||
segment_group = variable_pool.convert_template(str(tool_input.value))
|
||||
|
|
|
|||
|
|
@ -0,0 +1,32 @@
|
|||
import abc
|
||||
from collections.abc import Mapping
|
||||
from typing import Any, Protocol
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
|
||||
|
||||
class DraftVariableSaver(Protocol):
|
||||
@abc.abstractmethod
|
||||
def save(self, process_data: Mapping[str, Any] | None, outputs: Mapping[str, Any] | None):
|
||||
pass
|
||||
|
||||
|
||||
class DraftVariableSaverFactory(Protocol):
|
||||
@abc.abstractmethod
|
||||
def __call__(
|
||||
self,
|
||||
session: Session,
|
||||
app_id: str,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
node_execution_id: str,
|
||||
enclosing_node_id: str | None = None,
|
||||
) -> "DraftVariableSaver":
|
||||
pass
|
||||
|
||||
|
||||
class NoopDraftVariableSaver(DraftVariableSaver):
|
||||
def save(self, process_data: Mapping[str, Any] | None, outputs: Mapping[str, Any] | None):
|
||||
pass
|
||||
|
|
@ -27,6 +27,7 @@ from core.workflow.enums import SystemVariableKey
|
|||
from core.workflow.repositories.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repositories.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_entry import WorkflowEntry
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
|
||||
|
||||
@dataclass
|
||||
|
|
@ -160,12 +161,13 @@ class WorkflowCycleManager:
|
|||
exceptions_count: int = 0,
|
||||
) -> WorkflowExecution:
|
||||
workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
|
||||
now = naive_utc_now()
|
||||
|
||||
workflow_execution.status = WorkflowExecutionStatus(status.value)
|
||||
workflow_execution.error_message = error_message
|
||||
workflow_execution.total_tokens = total_tokens
|
||||
workflow_execution.total_steps = total_steps
|
||||
workflow_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow_execution.finished_at = now
|
||||
workflow_execution.exceptions_count = exceptions_count
|
||||
|
||||
# Use the instance repository to find running executions for a workflow run
|
||||
|
|
@ -174,7 +176,6 @@ class WorkflowCycleManager:
|
|||
)
|
||||
|
||||
# Update the domain models
|
||||
now = datetime.now(UTC).replace(tzinfo=None)
|
||||
for node_execution in running_node_executions:
|
||||
if node_execution.node_execution_id:
|
||||
# Update the domain model
|
||||
|
|
|
|||
|
|
@ -12,14 +12,14 @@ def init_app(app: DifyApp):
|
|||
@app.after_request
|
||||
def after_request(response):
|
||||
"""Add Version headers to the response."""
|
||||
response.headers.add("X-Version", dify_config.CURRENT_VERSION)
|
||||
response.headers.add("X-Version", dify_config.project.version)
|
||||
response.headers.add("X-Env", dify_config.DEPLOY_ENV)
|
||||
return response
|
||||
|
||||
@app.route("/health")
|
||||
def health():
|
||||
return Response(
|
||||
json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.CURRENT_VERSION}),
|
||||
json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.project.version}),
|
||||
status=200,
|
||||
content_type="application/json",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ def init_app(app: DifyApp):
|
|||
logging.getLogger().addHandler(exception_handler)
|
||||
|
||||
def init_flask_instrumentor(app: DifyApp):
|
||||
meter = get_meter("http_metrics", version=dify_config.CURRENT_VERSION)
|
||||
meter = get_meter("http_metrics", version=dify_config.project.version)
|
||||
_http_response_counter = meter.create_counter(
|
||||
"http.server.response.count",
|
||||
description="Total number of HTTP responses by status code, method and target",
|
||||
|
|
@ -163,7 +163,7 @@ def init_app(app: DifyApp):
|
|||
resource = Resource(
|
||||
attributes={
|
||||
ResourceAttributes.SERVICE_NAME: dify_config.APPLICATION_NAME,
|
||||
ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.CURRENT_VERSION}-{dify_config.COMMIT_SHA}",
|
||||
ResourceAttributes.SERVICE_VERSION: f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}",
|
||||
ResourceAttributes.PROCESS_PID: os.getpid(),
|
||||
ResourceAttributes.DEPLOYMENT_ENVIRONMENT: f"{dify_config.DEPLOY_ENV}-{dify_config.EDITION}",
|
||||
ResourceAttributes.HOST_NAME: socket.gethostname(),
|
||||
|
|
|
|||
|
|
@ -35,6 +35,6 @@ def init_app(app: DifyApp):
|
|||
traces_sample_rate=dify_config.SENTRY_TRACES_SAMPLE_RATE,
|
||||
profiles_sample_rate=dify_config.SENTRY_PROFILES_SAMPLE_RATE,
|
||||
environment=dify_config.DEPLOY_ENV,
|
||||
release=f"dify-{dify_config.CURRENT_VERSION}-{dify_config.COMMIT_SHA}",
|
||||
release=f"dify-{dify_config.project.version}-{dify_config.COMMIT_SHA}",
|
||||
before_send=before_send,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,30 @@
|
|||
from pathlib import Path
|
||||
|
||||
|
||||
def search_file_upwards(
|
||||
base_dir_path: Path,
|
||||
target_file_name: str,
|
||||
max_search_parent_depth: int,
|
||||
) -> Path:
|
||||
"""
|
||||
Find a target file in the current directory or its parent directories up to a specified depth.
|
||||
:param base_dir_path: Starting directory path to search from.
|
||||
:param target_file_name: Name of the file to search for.
|
||||
:param max_search_parent_depth: Maximum number of parent directories to search upwards.
|
||||
:return: Path of the file if found, otherwise None.
|
||||
"""
|
||||
current_path = base_dir_path.resolve()
|
||||
for _ in range(max_search_parent_depth):
|
||||
candidate_path = current_path / target_file_name
|
||||
if candidate_path.is_file():
|
||||
return candidate_path
|
||||
parent_path = current_path.parent
|
||||
if parent_path == current_path: # reached the root directory
|
||||
break
|
||||
else:
|
||||
current_path = parent_path
|
||||
|
||||
raise ValueError(
|
||||
f"File '{target_file_name}' not found in the directory '{base_dir_path.resolve()}' or its parent directories"
|
||||
f" in depth of {max_search_parent_depth}."
|
||||
)
|
||||
|
|
@ -140,7 +140,7 @@ class Dataset(Base):
|
|||
def word_count(self):
|
||||
return (
|
||||
db.session.query(Document)
|
||||
.with_entities(func.coalesce(func.sum(Document.word_count)))
|
||||
.with_entities(func.coalesce(func.sum(Document.word_count), 0))
|
||||
.filter(Document.dataset_id == self.id)
|
||||
.scalar()
|
||||
)
|
||||
|
|
@ -448,7 +448,7 @@ class Document(Base):
|
|||
def hit_count(self):
|
||||
return (
|
||||
db.session.query(DocumentSegment)
|
||||
.with_entities(func.coalesce(func.sum(DocumentSegment.hit_count)))
|
||||
.with_entities(func.coalesce(func.sum(DocumentSegment.hit_count), 0))
|
||||
.filter(DocumentSegment.document_id == self.id)
|
||||
.scalar()
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[project]
|
||||
name = "dify-api"
|
||||
dynamic = ["version"]
|
||||
version = "1.5.1"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
|
|
|
|||
|
|
@ -889,7 +889,7 @@ class RegisterService:
|
|||
|
||||
TenantService.create_owner_tenant_if_not_exist(account=account, is_setup=True)
|
||||
|
||||
dify_setup = DifySetup(version=dify_config.CURRENT_VERSION)
|
||||
dify_setup = DifySetup(version=dify_config.project.version)
|
||||
db.session.add(dify_setup)
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -154,7 +154,7 @@ class WorkflowDraftVariableService:
|
|||
variables = (
|
||||
# Do not load the `value` field.
|
||||
query.options(orm.defer(WorkflowDraftVariable.value))
|
||||
.order_by(WorkflowDraftVariable.id.desc())
|
||||
.order_by(WorkflowDraftVariable.created_at.desc())
|
||||
.limit(limit)
|
||||
.offset((page - 1) * limit)
|
||||
.all()
|
||||
|
|
@ -168,7 +168,7 @@ class WorkflowDraftVariableService:
|
|||
WorkflowDraftVariable.node_id == node_id,
|
||||
)
|
||||
query = self._session.query(WorkflowDraftVariable).filter(*criteria)
|
||||
variables = query.order_by(WorkflowDraftVariable.id.desc()).all()
|
||||
variables = query.order_by(WorkflowDraftVariable.created_at.desc()).all()
|
||||
return WorkflowDraftVariableList(variables=variables)
|
||||
|
||||
def list_node_variables(self, app_id: str, node_id: str) -> WorkflowDraftVariableList:
|
||||
|
|
@ -235,7 +235,9 @@ class WorkflowDraftVariableService:
|
|||
self._session.flush()
|
||||
return variable
|
||||
|
||||
def _reset_node_var(self, workflow: Workflow, variable: WorkflowDraftVariable) -> WorkflowDraftVariable | None:
|
||||
def _reset_node_var_or_sys_var(
|
||||
self, workflow: Workflow, variable: WorkflowDraftVariable
|
||||
) -> WorkflowDraftVariable | None:
|
||||
# If a variable does not allow updating, it makes no sence to resetting it.
|
||||
if not variable.editable:
|
||||
return variable
|
||||
|
|
@ -259,28 +261,35 @@ class WorkflowDraftVariableService:
|
|||
self._session.flush()
|
||||
return None
|
||||
|
||||
# Get node type for proper value extraction
|
||||
node_config = workflow.get_node_config_by_id(variable.node_id)
|
||||
node_type = workflow.get_node_type_from_node_config(node_config)
|
||||
|
||||
outputs_dict = node_exec.outputs_dict or {}
|
||||
# a sentinel value used to check the absent of the output variable key.
|
||||
absent = object()
|
||||
|
||||
# Note: Based on the implementation in `_build_from_variable_assigner_mapping`,
|
||||
# VariableAssignerNode (both v1 and v2) can only create conversation draft variables.
|
||||
# For consistency, we should simply return when processing VARIABLE_ASSIGNER nodes.
|
||||
#
|
||||
# This implementation must remain synchronized with the `_build_from_variable_assigner_mapping`
|
||||
# and `save` methods.
|
||||
if node_type == NodeType.VARIABLE_ASSIGNER:
|
||||
return variable
|
||||
if variable.get_variable_type() == DraftVariableType.NODE:
|
||||
# Get node type for proper value extraction
|
||||
node_config = workflow.get_node_config_by_id(variable.node_id)
|
||||
node_type = workflow.get_node_type_from_node_config(node_config)
|
||||
|
||||
if variable.name not in outputs_dict:
|
||||
# Note: Based on the implementation in `_build_from_variable_assigner_mapping`,
|
||||
# VariableAssignerNode (both v1 and v2) can only create conversation draft variables.
|
||||
# For consistency, we should simply return when processing VARIABLE_ASSIGNER nodes.
|
||||
#
|
||||
# This implementation must remain synchronized with the `_build_from_variable_assigner_mapping`
|
||||
# and `save` methods.
|
||||
if node_type == NodeType.VARIABLE_ASSIGNER:
|
||||
return variable
|
||||
output_value = outputs_dict.get(variable.name, absent)
|
||||
else:
|
||||
output_value = outputs_dict.get(f"sys.{variable.name}", absent)
|
||||
|
||||
# We cannot use `is None` to check the existence of an output variable here as
|
||||
# the value of the output may be `None`.
|
||||
if output_value is absent:
|
||||
# If variable not found in execution data, delete the variable
|
||||
self._session.delete(instance=variable)
|
||||
self._session.flush()
|
||||
return None
|
||||
value = outputs_dict[variable.name]
|
||||
value_seg = WorkflowDraftVariable.build_segment_with_type(variable.value_type, value)
|
||||
value_seg = WorkflowDraftVariable.build_segment_with_type(variable.value_type, output_value)
|
||||
# Extract variable value using unified logic
|
||||
variable.set_value(value_seg)
|
||||
variable.last_edited_at = None # Reset to indicate this is a reset operation
|
||||
|
|
@ -291,10 +300,8 @@ class WorkflowDraftVariableService:
|
|||
variable_type = variable.get_variable_type()
|
||||
if variable_type == DraftVariableType.CONVERSATION:
|
||||
return self._reset_conv_var(workflow, variable)
|
||||
elif variable_type == DraftVariableType.NODE:
|
||||
return self._reset_node_var(workflow, variable)
|
||||
else:
|
||||
raise VariableResetError(f"cannot reset system variable, variable_id={variable.id}")
|
||||
return self._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
def delete_variable(self, variable: WorkflowDraftVariable):
|
||||
self._session.delete(variable)
|
||||
|
|
@ -439,6 +446,9 @@ def _batch_upsert_draft_varaible(
|
|||
stmt = stmt.on_conflict_do_update(
|
||||
index_elements=WorkflowDraftVariable.unique_app_id_node_id_name(),
|
||||
set_={
|
||||
# Refresh creation timestamp to ensure updated variables
|
||||
# appear first in chronologically sorted result sets.
|
||||
"created_at": stmt.excluded.created_at,
|
||||
"updated_at": stmt.excluded.updated_at,
|
||||
"last_edited_at": stmt.excluded.last_edited_at,
|
||||
"description": stmt.excluded.description,
|
||||
|
|
@ -525,9 +535,6 @@ class DraftVariableSaver:
|
|||
# The type of the current node (see NodeType).
|
||||
_node_type: NodeType
|
||||
|
||||
# Indicates how the workflow execution was triggered (see InvokeFrom).
|
||||
_invoke_from: InvokeFrom
|
||||
|
||||
#
|
||||
_node_execution_id: str
|
||||
|
||||
|
|
@ -546,15 +553,16 @@ class DraftVariableSaver:
|
|||
app_id: str,
|
||||
node_id: str,
|
||||
node_type: NodeType,
|
||||
invoke_from: InvokeFrom,
|
||||
node_execution_id: str,
|
||||
enclosing_node_id: str | None = None,
|
||||
):
|
||||
# Important: `node_execution_id` parameter refers to the primary key (`id`) of the
|
||||
# WorkflowNodeExecutionModel/WorkflowNodeExecution, not their `node_execution_id`
|
||||
# field. These are distinct database fields with different purposes.
|
||||
self._session = session
|
||||
self._app_id = app_id
|
||||
self._node_id = node_id
|
||||
self._node_type = node_type
|
||||
self._invoke_from = invoke_from
|
||||
self._node_execution_id = node_execution_id
|
||||
self._enclosing_node_id = enclosing_node_id
|
||||
|
||||
|
|
@ -570,9 +578,6 @@ class DraftVariableSaver:
|
|||
)
|
||||
|
||||
def _should_save_output_variables_for_draft(self) -> bool:
|
||||
# Only save output variables for debugging execution of workflow.
|
||||
if self._invoke_from != InvokeFrom.DEBUGGER:
|
||||
return False
|
||||
if self._enclosing_node_id is not None and self._node_type != NodeType.VARIABLE_ASSIGNER:
|
||||
# Currently we do not save output variables for nodes inside loop or iteration.
|
||||
return False
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ from sqlalchemy.orm import Session
|
|||
from core.app.app_config.entities import VariableEntityType
|
||||
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
|
||||
from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.file import File
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.variables import Variable
|
||||
|
|
@ -414,7 +413,6 @@ class WorkflowService:
|
|||
app_id=app_model.id,
|
||||
node_id=workflow_node_execution.node_id,
|
||||
node_type=NodeType(workflow_node_execution.node_type),
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
enclosing_node_id=enclosing_node_id,
|
||||
node_execution_id=node_execution.id,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,259 @@
|
|||
from collections.abc import Mapping, Sequence
|
||||
|
||||
from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter
|
||||
from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod, FileType
|
||||
from core.variables.segments import ArrayFileSegment, FileSegment
|
||||
|
||||
|
||||
class TestWorkflowResponseConverterFetchFilesFromVariableValue:
|
||||
"""Test class for WorkflowResponseConverter._fetch_files_from_variable_value method"""
|
||||
|
||||
def create_test_file(self, file_id: str = "test_file_1") -> File:
|
||||
"""Create a test File object"""
|
||||
return File(
|
||||
id=file_id,
|
||||
tenant_id="test_tenant",
|
||||
type=FileType.DOCUMENT,
|
||||
transfer_method=FileTransferMethod.LOCAL_FILE,
|
||||
related_id="related_123",
|
||||
filename=f"{file_id}.txt",
|
||||
extension=".txt",
|
||||
mime_type="text/plain",
|
||||
size=1024,
|
||||
storage_key="storage_key_123",
|
||||
)
|
||||
|
||||
def create_file_dict(self, file_id: str = "test_file_dict") -> dict:
|
||||
"""Create a file dictionary with correct dify_model_identity"""
|
||||
return {
|
||||
"dify_model_identity": FILE_MODEL_IDENTITY,
|
||||
"id": file_id,
|
||||
"tenant_id": "test_tenant",
|
||||
"type": "document",
|
||||
"transfer_method": "local_file",
|
||||
"related_id": "related_456",
|
||||
"filename": f"{file_id}.txt",
|
||||
"extension": ".txt",
|
||||
"mime_type": "text/plain",
|
||||
"size": 2048,
|
||||
"url": "http://example.com/file.txt",
|
||||
}
|
||||
|
||||
def test_fetch_files_from_variable_value_with_none(self):
|
||||
"""Test with None input"""
|
||||
# The method signature expects Union[dict, list, Segment], but implementation handles None
|
||||
# We'll test the actual behavior by passing an empty dict instead
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(None) # type: ignore
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_empty_dict(self):
|
||||
"""Test with empty dictionary"""
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value({})
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_empty_list(self):
|
||||
"""Test with empty list"""
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value([])
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_file_segment(self):
|
||||
"""Test with valid FileSegment"""
|
||||
test_file = self.create_test_file("segment_file")
|
||||
file_segment = FileSegment(value=test_file)
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(file_segment)
|
||||
|
||||
assert len(result) == 1
|
||||
assert isinstance(result[0], dict)
|
||||
assert result[0]["id"] == "segment_file"
|
||||
assert result[0]["dify_model_identity"] == FILE_MODEL_IDENTITY
|
||||
|
||||
def test_fetch_files_from_variable_value_with_array_file_segment_single(self):
|
||||
"""Test with ArrayFileSegment containing single file"""
|
||||
test_file = self.create_test_file("array_file_1")
|
||||
array_segment = ArrayFileSegment(value=[test_file])
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(array_segment)
|
||||
|
||||
assert len(result) == 1
|
||||
assert isinstance(result[0], dict)
|
||||
assert result[0]["id"] == "array_file_1"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_array_file_segment_multiple(self):
|
||||
"""Test with ArrayFileSegment containing multiple files"""
|
||||
test_file_1 = self.create_test_file("array_file_1")
|
||||
test_file_2 = self.create_test_file("array_file_2")
|
||||
array_segment = ArrayFileSegment(value=[test_file_1, test_file_2])
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(array_segment)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "array_file_1"
|
||||
assert result[1]["id"] == "array_file_2"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_array_file_segment_empty(self):
|
||||
"""Test with ArrayFileSegment containing empty array"""
|
||||
array_segment = ArrayFileSegment(value=[])
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(array_segment)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_list_of_file_dicts(self):
|
||||
"""Test with list containing file dictionaries"""
|
||||
file_dict_1 = self.create_file_dict("list_file_1")
|
||||
file_dict_2 = self.create_file_dict("list_file_2")
|
||||
test_list = [file_dict_1, file_dict_2]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "list_file_1"
|
||||
assert result[1]["id"] == "list_file_2"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_list_of_file_objects(self):
|
||||
"""Test with list containing File objects"""
|
||||
file_obj_1 = self.create_test_file("list_obj_1")
|
||||
file_obj_2 = self.create_test_file("list_obj_2")
|
||||
test_list = [file_obj_1, file_obj_2]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "list_obj_1"
|
||||
assert result[1]["id"] == "list_obj_2"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_list_mixed_valid_invalid(self):
|
||||
"""Test with list containing mix of valid files and invalid items"""
|
||||
file_dict = self.create_file_dict("mixed_file")
|
||||
invalid_dict = {"not_a_file": "value"}
|
||||
test_list = [file_dict, invalid_dict, "string_item", 123]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list)
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["id"] == "mixed_file"
|
||||
|
||||
def test_fetch_files_from_variable_value_with_list_nested_structures(self):
|
||||
"""Test with list containing nested structures"""
|
||||
file_dict = self.create_file_dict("nested_file")
|
||||
nested_list = [file_dict, ["inner_list"]]
|
||||
test_list = [nested_list, {"nested": "dict"}]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_list)
|
||||
|
||||
# Should not process nested structures in list items
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_dict_incorrect_identity(self):
|
||||
"""Test with dictionary having incorrect dify_model_identity"""
|
||||
invalid_dict = {"dify_model_identity": "wrong_identity", "id": "invalid_file", "filename": "test.txt"}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(invalid_dict)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_dict_missing_identity(self):
|
||||
"""Test with dictionary missing dify_model_identity"""
|
||||
invalid_dict = {"id": "no_identity_file", "filename": "test.txt"}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(invalid_dict)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_dict_file_object(self):
|
||||
"""Test with dictionary containing File object"""
|
||||
file_obj = self.create_test_file("dict_obj_file")
|
||||
test_dict = {"file_key": file_obj}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(test_dict)
|
||||
|
||||
# Should not extract File objects from dict values
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_mixed_data_types(self):
|
||||
"""Test with various mixed data types"""
|
||||
mixed_data = {"string": "text", "number": 42, "boolean": True, "null": None, "dify_model_identity": "wrong"}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(mixed_data)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_invalid_objects(self):
|
||||
"""Test with invalid objects that are not supported types"""
|
||||
# Test with an invalid dict that doesn't match expected patterns
|
||||
invalid_dict = {"custom_key": "custom_value"}
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(invalid_dict)
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_string_input(self):
|
||||
"""Test with string input (unsupported type)"""
|
||||
# Since method expects Union[dict, list, Segment], test with empty list instead
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value([])
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_with_number_input(self):
|
||||
"""Test with number input (unsupported type)"""
|
||||
# Test with list containing numbers (should be ignored)
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value([42, "string", None])
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_fetch_files_from_variable_value_return_type_is_sequence(self):
|
||||
"""Test that return type is Sequence[Mapping[str, Any]]"""
|
||||
file_dict = self.create_file_dict("type_test_file")
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(file_dict)
|
||||
|
||||
assert isinstance(result, Sequence)
|
||||
assert len(result) == 1
|
||||
assert isinstance(result[0], Mapping)
|
||||
assert all(isinstance(key, str) for key in result[0])
|
||||
|
||||
def test_fetch_files_from_variable_value_preserves_file_properties(self):
|
||||
"""Test that all file properties are preserved in the result"""
|
||||
original_file = self.create_test_file("property_test")
|
||||
file_segment = FileSegment(value=original_file)
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(file_segment)
|
||||
|
||||
assert len(result) == 1
|
||||
file_dict = result[0]
|
||||
assert file_dict["id"] == "property_test"
|
||||
assert file_dict["tenant_id"] == "test_tenant"
|
||||
assert file_dict["type"] == "document"
|
||||
assert file_dict["transfer_method"] == "local_file"
|
||||
assert file_dict["filename"] == "property_test.txt"
|
||||
assert file_dict["extension"] == ".txt"
|
||||
assert file_dict["mime_type"] == "text/plain"
|
||||
assert file_dict["size"] == 1024
|
||||
|
||||
def test_fetch_files_from_variable_value_with_complex_nested_scenario(self):
|
||||
"""Test complex scenario with nested valid and invalid data"""
|
||||
file_dict = self.create_file_dict("complex_file")
|
||||
file_obj = self.create_test_file("complex_obj")
|
||||
|
||||
# Complex nested structure
|
||||
complex_data = [
|
||||
file_dict, # Valid file dict
|
||||
file_obj, # Valid file object
|
||||
{ # Invalid dict
|
||||
"not_file": "data",
|
||||
"nested": {"deep": "value"},
|
||||
},
|
||||
[ # Nested list (should be ignored)
|
||||
self.create_file_dict("nested_file")
|
||||
],
|
||||
"string", # Invalid string
|
||||
None, # None value
|
||||
42, # Invalid number
|
||||
]
|
||||
|
||||
result = WorkflowResponseConverter._fetch_files_from_variable_value(complex_data)
|
||||
|
||||
assert len(result) == 2
|
||||
assert result[0]["id"] == "complex_file"
|
||||
assert result[1]["id"] == "complex_obj"
|
||||
|
|
@ -6,12 +6,11 @@ from unittest.mock import Mock, patch
|
|||
import pytest
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.variables.types import SegmentType
|
||||
from core.variables import StringSegment
|
||||
from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID
|
||||
from core.workflow.nodes import NodeType
|
||||
from models.enums import DraftVariableType
|
||||
from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel
|
||||
from models.workflow import Workflow, WorkflowDraftVariable, WorkflowNodeExecutionModel, is_system_variable_editable
|
||||
from services.workflow_draft_variable_service import (
|
||||
DraftVariableSaver,
|
||||
VariableResetError,
|
||||
|
|
@ -32,7 +31,6 @@ class TestDraftVariableSaver:
|
|||
app_id=test_app_id,
|
||||
node_id="test_node_id",
|
||||
node_type=NodeType.START,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
node_execution_id="test_execution_id",
|
||||
)
|
||||
assert saver._should_variable_be_visible("123_456", NodeType.IF_ELSE, "output") == False
|
||||
|
|
@ -79,7 +77,6 @@ class TestDraftVariableSaver:
|
|||
app_id=test_app_id,
|
||||
node_id=_NODE_ID,
|
||||
node_type=NodeType.START,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
node_execution_id="test_execution_id",
|
||||
)
|
||||
for idx, c in enumerate(cases, 1):
|
||||
|
|
@ -94,45 +91,70 @@ class TestWorkflowDraftVariableService:
|
|||
suffix = secrets.token_hex(6)
|
||||
return f"test_app_id_{suffix}"
|
||||
|
||||
def _create_test_workflow(self, app_id: str) -> Workflow:
|
||||
"""Create a real Workflow instance for testing"""
|
||||
return Workflow.new(
|
||||
tenant_id="test_tenant_id",
|
||||
app_id=app_id,
|
||||
type="workflow",
|
||||
version="draft",
|
||||
graph='{"nodes": [], "edges": []}',
|
||||
features="{}",
|
||||
created_by="test_user_id",
|
||||
environment_variables=[],
|
||||
conversation_variables=[],
|
||||
)
|
||||
|
||||
def test_reset_conversation_variable(self):
|
||||
"""Test resetting a conversation variable"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
# Create mock variable
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.CONVERSATION
|
||||
mock_variable.id = "var-id"
|
||||
mock_variable.name = "test_var"
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create real conversation variable
|
||||
test_value = StringSegment(value="test_value")
|
||||
variable = WorkflowDraftVariable.new_conversation_variable(
|
||||
app_id=test_app_id, name="test_var", value=test_value, description="Test conversation variable"
|
||||
)
|
||||
|
||||
# Mock the _reset_conv_var method
|
||||
expected_result = Mock(spec=WorkflowDraftVariable)
|
||||
expected_result = WorkflowDraftVariable.new_conversation_variable(
|
||||
app_id=test_app_id,
|
||||
name="test_var",
|
||||
value=StringSegment(value="reset_value"),
|
||||
)
|
||||
with patch.object(service, "_reset_conv_var", return_value=expected_result) as mock_reset_conv:
|
||||
result = service.reset_variable(mock_workflow, mock_variable)
|
||||
result = service.reset_variable(workflow, variable)
|
||||
|
||||
mock_reset_conv.assert_called_once_with(mock_workflow, mock_variable)
|
||||
mock_reset_conv.assert_called_once_with(workflow, variable)
|
||||
assert result == expected_result
|
||||
|
||||
def test_reset_node_variable_with_no_execution_id(self):
|
||||
"""Test resetting a node variable with no execution ID - should delete variable"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
# Create mock variable with no execution ID
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.NODE
|
||||
mock_variable.node_execution_id = None
|
||||
mock_variable.id = "var-id"
|
||||
mock_variable.name = "test_var"
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
result = service._reset_node_var(mock_workflow, mock_variable)
|
||||
# Create real node variable with no execution ID
|
||||
test_value = StringSegment(value="test_value")
|
||||
variable = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=test_app_id,
|
||||
node_id="test_node_id",
|
||||
name="test_var",
|
||||
value=test_value,
|
||||
node_execution_id="exec-id", # Set initially
|
||||
)
|
||||
# Manually set to None to simulate the test condition
|
||||
variable.node_execution_id = None
|
||||
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
# Should delete the variable and return None
|
||||
mock_session.delete.assert_called_once_with(instance=mock_variable)
|
||||
mock_session.delete.assert_called_once_with(instance=variable)
|
||||
mock_session.flush.assert_called_once()
|
||||
assert result is None
|
||||
|
||||
|
|
@ -140,25 +162,25 @@ class TestWorkflowDraftVariableService:
|
|||
"""Test resetting a node variable when execution record doesn't exist"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
# Create mock variable with execution ID
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.NODE
|
||||
mock_variable.node_execution_id = "exec-id"
|
||||
mock_variable.id = "var-id"
|
||||
mock_variable.name = "test_var"
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create real node variable with execution ID
|
||||
test_value = StringSegment(value="test_value")
|
||||
variable = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=test_app_id, node_id="test_node_id", name="test_var", value=test_value, node_execution_id="exec-id"
|
||||
)
|
||||
|
||||
# Mock session.scalars to return None (no execution record found)
|
||||
mock_scalars = Mock()
|
||||
mock_scalars.first.return_value = None
|
||||
mock_session.scalars.return_value = mock_scalars
|
||||
|
||||
result = service._reset_node_var(mock_workflow, mock_variable)
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
# Should delete the variable and return None
|
||||
mock_session.delete.assert_called_once_with(instance=mock_variable)
|
||||
mock_session.delete.assert_called_once_with(instance=variable)
|
||||
mock_session.flush.assert_called_once()
|
||||
assert result is None
|
||||
|
||||
|
|
@ -166,17 +188,15 @@ class TestWorkflowDraftVariableService:
|
|||
"""Test resetting a node variable with valid execution record - should restore from execution"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
# Create mock variable with execution ID
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.NODE
|
||||
mock_variable.node_execution_id = "exec-id"
|
||||
mock_variable.id = "var-id"
|
||||
mock_variable.name = "test_var"
|
||||
mock_variable.node_id = "node-id"
|
||||
mock_variable.value_type = SegmentType.STRING
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create real node variable with execution ID
|
||||
test_value = StringSegment(value="original_value")
|
||||
variable = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=test_app_id, node_id="test_node_id", name="test_var", value=test_value, node_execution_id="exec-id"
|
||||
)
|
||||
|
||||
# Create mock execution record
|
||||
mock_execution = Mock(spec=WorkflowNodeExecutionModel)
|
||||
|
|
@ -190,33 +210,164 @@ class TestWorkflowDraftVariableService:
|
|||
|
||||
# Mock workflow methods
|
||||
mock_node_config = {"type": "test_node"}
|
||||
mock_workflow.get_node_config_by_id.return_value = mock_node_config
|
||||
mock_workflow.get_node_type_from_node_config.return_value = NodeType.LLM
|
||||
with (
|
||||
patch.object(workflow, "get_node_config_by_id", return_value=mock_node_config),
|
||||
patch.object(workflow, "get_node_type_from_node_config", return_value=NodeType.LLM),
|
||||
):
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
result = service._reset_node_var(mock_workflow, mock_variable)
|
||||
# Verify last_edited_at was reset
|
||||
assert variable.last_edited_at is None
|
||||
# Verify session.flush was called
|
||||
mock_session.flush.assert_called()
|
||||
|
||||
# Verify variable.set_value was called with the correct value
|
||||
mock_variable.set_value.assert_called_once()
|
||||
# Verify last_edited_at was reset
|
||||
assert mock_variable.last_edited_at is None
|
||||
# Verify session.flush was called
|
||||
mock_session.flush.assert_called()
|
||||
# Should return the updated variable
|
||||
assert result == variable
|
||||
|
||||
# Should return the updated variable
|
||||
assert result == mock_variable
|
||||
|
||||
def test_reset_system_variable_raises_error(self):
|
||||
"""Test that resetting a system variable raises an error"""
|
||||
def test_reset_non_editable_system_variable_raises_error(self):
|
||||
"""Test that resetting a non-editable system variable raises an error"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
mock_workflow = Mock(spec=Workflow)
|
||||
mock_workflow.app_id = self._get_test_app_id()
|
||||
|
||||
mock_variable = Mock(spec=WorkflowDraftVariable)
|
||||
mock_variable.get_variable_type.return_value = DraftVariableType.SYS # Not a valid enum value for this test
|
||||
mock_variable.id = "var-id"
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
with pytest.raises(VariableResetError) as exc_info:
|
||||
service.reset_variable(mock_workflow, mock_variable)
|
||||
assert "cannot reset system variable" in str(exc_info.value)
|
||||
assert "variable_id=var-id" in str(exc_info.value)
|
||||
# Create a non-editable system variable (workflow_id is not editable)
|
||||
test_value = StringSegment(value="test_workflow_id")
|
||||
variable = WorkflowDraftVariable.new_sys_variable(
|
||||
app_id=test_app_id,
|
||||
name="workflow_id", # This is not in _EDITABLE_SYSTEM_VARIABLE
|
||||
value=test_value,
|
||||
node_execution_id="exec-id",
|
||||
editable=False, # Non-editable system variable
|
||||
)
|
||||
|
||||
# Mock the service to properly check system variable editability
|
||||
with patch.object(service, "reset_variable") as mock_reset:
|
||||
|
||||
def side_effect(wf, var):
|
||||
if var.get_variable_type() == DraftVariableType.SYS and not is_system_variable_editable(var.name):
|
||||
raise VariableResetError(f"cannot reset system variable, variable_id={var.id}")
|
||||
return var
|
||||
|
||||
mock_reset.side_effect = side_effect
|
||||
|
||||
with pytest.raises(VariableResetError) as exc_info:
|
||||
service.reset_variable(workflow, variable)
|
||||
assert "cannot reset system variable" in str(exc_info.value)
|
||||
assert f"variable_id={variable.id}" in str(exc_info.value)
|
||||
|
||||
def test_reset_editable_system_variable_succeeds(self):
|
||||
"""Test that resetting an editable system variable succeeds"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create an editable system variable (files is editable)
|
||||
test_value = StringSegment(value="[]")
|
||||
variable = WorkflowDraftVariable.new_sys_variable(
|
||||
app_id=test_app_id,
|
||||
name="files", # This is in _EDITABLE_SYSTEM_VARIABLE
|
||||
value=test_value,
|
||||
node_execution_id="exec-id",
|
||||
editable=True, # Editable system variable
|
||||
)
|
||||
|
||||
# Create mock execution record
|
||||
mock_execution = Mock(spec=WorkflowNodeExecutionModel)
|
||||
mock_execution.outputs_dict = {"sys.files": "[]"}
|
||||
|
||||
# Mock session.scalars to return the execution record
|
||||
mock_scalars = Mock()
|
||||
mock_scalars.first.return_value = mock_execution
|
||||
mock_session.scalars.return_value = mock_scalars
|
||||
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
# Should succeed and return the variable
|
||||
assert result == variable
|
||||
assert variable.last_edited_at is None
|
||||
mock_session.flush.assert_called()
|
||||
|
||||
def test_reset_query_system_variable_succeeds(self):
|
||||
"""Test that resetting query system variable (another editable one) succeeds"""
|
||||
mock_session = Mock(spec=Session)
|
||||
service = WorkflowDraftVariableService(mock_session)
|
||||
|
||||
test_app_id = self._get_test_app_id()
|
||||
workflow = self._create_test_workflow(test_app_id)
|
||||
|
||||
# Create an editable system variable (query is editable)
|
||||
test_value = StringSegment(value="original query")
|
||||
variable = WorkflowDraftVariable.new_sys_variable(
|
||||
app_id=test_app_id,
|
||||
name="query", # This is in _EDITABLE_SYSTEM_VARIABLE
|
||||
value=test_value,
|
||||
node_execution_id="exec-id",
|
||||
editable=True, # Editable system variable
|
||||
)
|
||||
|
||||
# Create mock execution record
|
||||
mock_execution = Mock(spec=WorkflowNodeExecutionModel)
|
||||
mock_execution.outputs_dict = {"sys.query": "reset query"}
|
||||
|
||||
# Mock session.scalars to return the execution record
|
||||
mock_scalars = Mock()
|
||||
mock_scalars.first.return_value = mock_execution
|
||||
mock_session.scalars.return_value = mock_scalars
|
||||
|
||||
result = service._reset_node_var_or_sys_var(workflow, variable)
|
||||
|
||||
# Should succeed and return the variable
|
||||
assert result == variable
|
||||
assert variable.last_edited_at is None
|
||||
mock_session.flush.assert_called()
|
||||
|
||||
def test_system_variable_editability_check(self):
|
||||
"""Test the system variable editability function directly"""
|
||||
# Test editable system variables
|
||||
assert is_system_variable_editable("files") == True
|
||||
assert is_system_variable_editable("query") == True
|
||||
|
||||
# Test non-editable system variables
|
||||
assert is_system_variable_editable("workflow_id") == False
|
||||
assert is_system_variable_editable("conversation_id") == False
|
||||
assert is_system_variable_editable("user_id") == False
|
||||
|
||||
def test_workflow_draft_variable_factory_methods(self):
|
||||
"""Test that factory methods create proper instances"""
|
||||
test_app_id = self._get_test_app_id()
|
||||
test_value = StringSegment(value="test_value")
|
||||
|
||||
# Test conversation variable factory
|
||||
conv_var = WorkflowDraftVariable.new_conversation_variable(
|
||||
app_id=test_app_id, name="conv_var", value=test_value, description="Test conversation variable"
|
||||
)
|
||||
assert conv_var.get_variable_type() == DraftVariableType.CONVERSATION
|
||||
assert conv_var.editable == True
|
||||
assert conv_var.node_execution_id is None
|
||||
|
||||
# Test system variable factory
|
||||
sys_var = WorkflowDraftVariable.new_sys_variable(
|
||||
app_id=test_app_id, name="workflow_id", value=test_value, node_execution_id="exec-id", editable=False
|
||||
)
|
||||
assert sys_var.get_variable_type() == DraftVariableType.SYS
|
||||
assert sys_var.editable == False
|
||||
assert sys_var.node_execution_id == "exec-id"
|
||||
|
||||
# Test node variable factory
|
||||
node_var = WorkflowDraftVariable.new_node_variable(
|
||||
app_id=test_app_id,
|
||||
node_id="node-id",
|
||||
name="node_var",
|
||||
value=test_value,
|
||||
node_execution_id="exec-id",
|
||||
visible=True,
|
||||
editable=True,
|
||||
)
|
||||
assert node_var.get_variable_type() == DraftVariableType.NODE
|
||||
assert node_var.visible == True
|
||||
assert node_var.editable == True
|
||||
assert node_var.node_execution_id == "exec-id"
|
||||
|
|
|
|||
4273
api/uv.lock
4273
api/uv.lock
File diff suppressed because it is too large
Load Diff
|
|
@ -7,4 +7,4 @@ cd "$SCRIPT_DIR/.."
|
|||
|
||||
# run mypy checks
|
||||
uv run --directory api --dev --with pip \
|
||||
python -m mypy --install-types --non-interactive ./
|
||||
python -m mypy --install-types --non-interactive --exclude venv ./
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
|
|||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.5.0
|
||||
image: langgenius/dify-api:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -31,7 +31,7 @@ services:
|
|||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:1.5.0
|
||||
image: langgenius/dify-api:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -57,7 +57,7 @@ services:
|
|||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.5.0
|
||||
image: langgenius/dify-web:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
|
|
|||
|
|
@ -517,7 +517,7 @@ x-shared-env: &shared-api-worker-env
|
|||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.5.0
|
||||
image: langgenius/dify-api:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -546,7 +546,7 @@ services:
|
|||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:1.5.0
|
||||
image: langgenius/dify-api:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
|
|
@ -572,7 +572,7 @@ services:
|
|||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.5.0
|
||||
image: langgenius/dify-web:1.5.1
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
|
|
|||
|
|
@ -80,6 +80,8 @@ import {
|
|||
import PluginDependency from '@/app/components/workflow/plugin-dependency'
|
||||
import { supportFunctionCall } from '@/utils/tool-call'
|
||||
import { MittProvider } from '@/context/mitt-context'
|
||||
import { fetchAndMergeValidCompletionParams } from '@/utils/completion-params'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
|
||||
type PublishConfig = {
|
||||
modelConfig: ModelConfig
|
||||
|
|
@ -453,7 +455,21 @@ const Configuration: FC = () => {
|
|||
...visionConfig,
|
||||
enabled: supportVision,
|
||||
}, true)
|
||||
setCompletionParams({})
|
||||
|
||||
try {
|
||||
const { params: filtered, removedDetails } = await fetchAndMergeValidCompletionParams(
|
||||
provider,
|
||||
modelId,
|
||||
completionParams,
|
||||
)
|
||||
if (Object.keys(removedDetails).length)
|
||||
Toast.notify({ type: 'warning', message: `${t('common.modelProvider.parametersInvalidRemoved')}: ${Object.entries(removedDetails).map(([k, reason]) => `${k} (${reason})`).join(', ')}` })
|
||||
setCompletionParams(filtered)
|
||||
}
|
||||
catch (e) {
|
||||
Toast.notify({ type: 'error', message: t('common.error') })
|
||||
setCompletionParams({})
|
||||
}
|
||||
}
|
||||
|
||||
const isShowVisionConfig = !!currModel?.features?.includes(ModelFeatureEnum.vision)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,27 @@
|
|||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React from 'react'
|
||||
import { RiRefreshLine } from '@remixicon/react'
|
||||
import cn from '@/utils/classnames'
|
||||
import TooltipPlus from '@/app/components/base/tooltip'
|
||||
|
||||
type Props = {
|
||||
className?: string,
|
||||
popupContent?: string,
|
||||
onClick: () => void
|
||||
}
|
||||
|
||||
const SyncButton: FC<Props> = ({
|
||||
className,
|
||||
popupContent = '',
|
||||
onClick,
|
||||
}) => {
|
||||
return (
|
||||
<TooltipPlus popupContent={popupContent}>
|
||||
<div className={cn(className, 'cursor-pointer select-none rounded-md p-1 hover:bg-state-base-hover')} onClick={onClick}>
|
||||
<RiRefreshLine className='h-4 w-4 text-text-tertiary' />
|
||||
</div>
|
||||
</TooltipPlus>
|
||||
)
|
||||
}
|
||||
export default React.memo(SyncButton)
|
||||
|
|
@ -11,6 +11,7 @@ export const preprocessLaTeX = (content: string) => {
|
|||
|
||||
const codeBlockRegex = /```[\s\S]*?```/g
|
||||
const codeBlocks = content.match(codeBlockRegex) || []
|
||||
const escapeReplacement = (str: string) => str.replace(/\$/g, '_TMP_REPLACE_DOLLAR_')
|
||||
let processedContent = content.replace(codeBlockRegex, 'CODE_BLOCK_PLACEHOLDER')
|
||||
|
||||
processedContent = flow([
|
||||
|
|
@ -21,9 +22,11 @@ export const preprocessLaTeX = (content: string) => {
|
|||
])(processedContent)
|
||||
|
||||
codeBlocks.forEach((block) => {
|
||||
processedContent = processedContent.replace('CODE_BLOCK_PLACEHOLDER', block)
|
||||
processedContent = processedContent.replace('CODE_BLOCK_PLACEHOLDER', escapeReplacement(block))
|
||||
})
|
||||
|
||||
processedContent = processedContent.replace(/_TMP_REPLACE_DOLLAR_/g, '$')
|
||||
|
||||
return processedContent
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ import useEditDocumentMetadata from '../metadata/hooks/use-edit-dataset-metadata
|
|||
import DatasetMetadataDrawer from '../metadata/metadata-dataset/dataset-metadata-drawer'
|
||||
import StatusWithAction from '../common/document-status-with-action/status-with-action'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
import { useFetchDefaultProcessRule } from '@/service/knowledge/use-create-dataset'
|
||||
|
||||
const FolderPlusIcon = ({ className }: React.SVGProps<SVGElement>) => {
|
||||
return <svg width="20" height="20" viewBox="0 0 20 20" fill="none" xmlns="http://www.w3.org/2000/svg" className={className ?? ''}>
|
||||
|
|
@ -178,6 +179,8 @@ const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
|
|||
router.push(`/datasets/${datasetId}/documents/create`)
|
||||
}
|
||||
|
||||
const fetchDefaultProcessRuleMutation = useFetchDefaultProcessRule()
|
||||
|
||||
const handleSaveNotionPageSelected = async (selectedPages: NotionPage[]) => {
|
||||
const workspacesMap = groupBy(selectedPages, 'workspace_id')
|
||||
const workspaces = Object.keys(workspacesMap).map((workspaceId) => {
|
||||
|
|
@ -186,6 +189,7 @@ const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
|
|||
pages: workspacesMap[workspaceId],
|
||||
}
|
||||
})
|
||||
const { rules } = await fetchDefaultProcessRuleMutation.mutateAsync('/datasets/process-rule')
|
||||
const params = {
|
||||
data_source: {
|
||||
type: dataset?.data_source_type,
|
||||
|
|
@ -209,7 +213,7 @@ const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
|
|||
},
|
||||
indexing_technique: dataset?.indexing_technique,
|
||||
process_rule: {
|
||||
rules: {},
|
||||
rules,
|
||||
mode: ProcessMode.general,
|
||||
},
|
||||
} as CreateDocumentReq
|
||||
|
|
|
|||
|
|
@ -9,6 +9,8 @@ import { useAppContext } from '@/context/app-context'
|
|||
import { fetchNotionConnection } from '@/service/common'
|
||||
import NotionIcon from '@/app/components/base/notion-icon'
|
||||
import { noop } from 'lodash-es'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
|
||||
const Icon: FC<{
|
||||
src: string
|
||||
|
|
@ -33,6 +35,7 @@ const DataSourceNotion: FC<Props> = ({
|
|||
const { isCurrentWorkspaceManager } = useAppContext()
|
||||
const [canConnectNotion, setCanConnectNotion] = useState(false)
|
||||
const { data } = useSWR(canConnectNotion ? '/oauth/data-source/notion' : null, fetchNotionConnection)
|
||||
const { t } = useTranslation()
|
||||
|
||||
const connected = !!workspaces.length
|
||||
|
||||
|
|
@ -51,9 +54,19 @@ const DataSourceNotion: FC<Props> = ({
|
|||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (data?.data)
|
||||
window.location.href = data.data
|
||||
}, [data])
|
||||
if (data && 'data' in data) {
|
||||
if (data.data && typeof data.data === 'string' && data.data.startsWith('http')) {
|
||||
window.location.href = data.data
|
||||
}
|
||||
else if (data.data === 'internal') {
|
||||
Toast.notify({
|
||||
type: 'info',
|
||||
message: t('common.dataSource.notion.integratedAlert'),
|
||||
})
|
||||
}
|
||||
}
|
||||
}, [data, t])
|
||||
|
||||
return (
|
||||
<Panel
|
||||
type={DataSourceType.notion}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
'use client'
|
||||
import { useTheme } from 'next-themes'
|
||||
import { RiArrowRightUpLine } from '@remixicon/react'
|
||||
import { getPluginLinkInMarketplace } from '../utils'
|
||||
import { getPluginDetailLinkInMarketplace, getPluginLinkInMarketplace } from '../utils'
|
||||
import Card from '@/app/components/plugins/card'
|
||||
import CardMoreInfo from '@/app/components/plugins/card/card-more-info'
|
||||
import type { Plugin } from '@/app/components/plugins/types'
|
||||
|
|
@ -83,7 +83,7 @@ const CardWrapper = ({
|
|||
return (
|
||||
<a
|
||||
className='group relative inline-block cursor-pointer rounded-xl'
|
||||
href={getPluginLinkInMarketplace(plugin)}
|
||||
href={getPluginDetailLinkInMarketplace(plugin)}
|
||||
>
|
||||
<Card
|
||||
key={plugin.name}
|
||||
|
|
|
|||
|
|
@ -38,6 +38,12 @@ export const getPluginLinkInMarketplace = (plugin: Plugin, params?: Record<strin
|
|||
return getMarketplaceUrl(`/plugins/${plugin.org}/${plugin.name}`, params)
|
||||
}
|
||||
|
||||
export const getPluginDetailLinkInMarketplace = (plugin: Plugin) => {
|
||||
if (plugin.type === 'bundle')
|
||||
return `/bundles/${plugin.org}/${plugin.name}`
|
||||
return `/plugins/${plugin.org}/${plugin.name}`
|
||||
}
|
||||
|
||||
export const getMarketplacePluginsByCollectionId = async (collectionId: string, query?: CollectionsAndPluginsSearchParams) => {
|
||||
let plugins = [] as Plugin[]
|
||||
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ const AppInputsPanel = ({
|
|||
return []
|
||||
let inputFormSchema = []
|
||||
if (isBasicApp) {
|
||||
inputFormSchema = currentApp.model_config.user_input_form.filter((item: any) => !item.external_data_tool).map((item: any) => {
|
||||
inputFormSchema = currentApp.model_config?.user_input_form?.filter((item: any) => !item.external_data_tool).map((item: any) => {
|
||||
if (item.paragraph) {
|
||||
return {
|
||||
...item.paragraph,
|
||||
|
|
@ -108,10 +108,10 @@ const AppInputsPanel = ({
|
|||
type: 'text-input',
|
||||
required: false,
|
||||
}
|
||||
})
|
||||
}) || []
|
||||
}
|
||||
else {
|
||||
const startNode = currentWorkflow?.graph.nodes.find(node => node.data.type === BlockEnum.Start) as any
|
||||
const startNode = currentWorkflow?.graph?.nodes.find(node => node.data.type === BlockEnum.Start) as any
|
||||
inputFormSchema = startNode?.data.variables.map((variable: any) => {
|
||||
if (variable.type === InputVarType.multiFiles) {
|
||||
return {
|
||||
|
|
@ -132,7 +132,7 @@ const AppInputsPanel = ({
|
|||
...variable,
|
||||
required: false,
|
||||
}
|
||||
})
|
||||
}) || []
|
||||
}
|
||||
if ((currentApp.mode === 'completion' || currentApp.mode === 'workflow') && basicAppFileConfig.enabled) {
|
||||
inputFormSchema.push({
|
||||
|
|
@ -144,7 +144,7 @@ const AppInputsPanel = ({
|
|||
fileUploadConfig,
|
||||
})
|
||||
}
|
||||
return inputFormSchema
|
||||
return inputFormSchema || []
|
||||
}, [basicAppFileConfig, currentApp, currentWorkflow, fileUploadConfig, isBasicApp])
|
||||
|
||||
const handleFormChange = (value: Record<string, any>) => {
|
||||
|
|
|
|||
|
|
@ -11,12 +11,11 @@ import {
|
|||
useEditInspectorVar,
|
||||
useInvalidateConversationVarValues,
|
||||
useInvalidateSysVarValues,
|
||||
useLastRun,
|
||||
useResetConversationVar,
|
||||
useResetToLastRunValue,
|
||||
useSysVarValues,
|
||||
} from '@/service/use-workflow'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useCallback } from 'react'
|
||||
import { isConversationVar, isENV, isSystemVar } from '../nodes/_base/components/variable/utils'
|
||||
import produce from 'immer'
|
||||
import type { Node } from '@/app/components/workflow/types'
|
||||
|
|
@ -118,15 +117,18 @@ const useInspectVarsCrud = () => {
|
|||
if (nodeInfo) {
|
||||
const index = draft.findIndex(node => node.nodeId === nodeId)
|
||||
if (index === -1) {
|
||||
draft.push({
|
||||
draft.unshift({
|
||||
nodeId,
|
||||
nodeType: nodeInfo.data.type,
|
||||
title: nodeInfo.data.title,
|
||||
vars: payload,
|
||||
nodePayload: nodeInfo.data,
|
||||
})
|
||||
}
|
||||
else {
|
||||
draft[index].vars = payload
|
||||
// put the node to the top
|
||||
draft.unshift(draft.splice(index, 1)[0])
|
||||
}
|
||||
}
|
||||
})
|
||||
|
|
@ -180,16 +182,6 @@ const useInspectVarsCrud = () => {
|
|||
invalidateSysVarValues()
|
||||
}, [doEditInspectorVar, invalidateConversationVarValues, invalidateSysVarValues, setInspectVarValue])
|
||||
|
||||
const [currNodeId, setCurrNodeId] = useState<string | null>(null)
|
||||
const [currEditVarId, setCurrEditVarId] = useState<string | null>(null)
|
||||
const { data } = useLastRun(appId, currNodeId || '', !!currNodeId)
|
||||
useEffect(() => {
|
||||
if (data && currNodeId && currEditVarId) {
|
||||
const inspectVar = getNodeInspectVars(currNodeId)?.vars?.find(item => item.id === currEditVarId)
|
||||
resetToLastRunVarInStore(currNodeId, currEditVarId, data.outputs?.[inspectVar?.selector?.[1] || ''])
|
||||
}
|
||||
}, [data, currNodeId, currEditVarId, getNodeInspectVars, editInspectVarValue, resetToLastRunVarInStore])
|
||||
|
||||
const renameInspectVarName = async (nodeId: string, oldName: string, newName: string) => {
|
||||
const varId = getVarId(nodeId, oldName)
|
||||
if (!varId)
|
||||
|
|
@ -212,9 +204,13 @@ const useInspectVarsCrud = () => {
|
|||
}, [getInspectVar])
|
||||
|
||||
const resetToLastRunVar = async (nodeId: string, varId: string) => {
|
||||
await doResetToLastRunValue(varId)
|
||||
setCurrNodeId(nodeId)
|
||||
setCurrEditVarId(varId)
|
||||
const isSysVar = nodeId === 'sys'
|
||||
const data = await doResetToLastRunValue(varId)
|
||||
|
||||
if(isSysVar)
|
||||
invalidateSysVarValues()
|
||||
else
|
||||
resetToLastRunVarInStore(nodeId, varId, data.value)
|
||||
}
|
||||
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -65,10 +65,11 @@ const VarList: FC<Props> = ({
|
|||
}, [list, onVarNameChange, onChange])
|
||||
|
||||
const handleVarReferenceChange = useCallback((index: number) => {
|
||||
return (value: ValueSelector | string, varKindType: VarKindType) => {
|
||||
return (value: ValueSelector | string, varKindType: VarKindType, varInfo?: Var) => {
|
||||
const newList = produce(list, (draft) => {
|
||||
if (!isSupportConstantValue || varKindType === VarKindType.variable) {
|
||||
draft[index].value_selector = value as ValueSelector
|
||||
draft[index].value_type = varInfo?.type
|
||||
if (isSupportConstantValue)
|
||||
draft[index].variable_type = VarKindType.variable
|
||||
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import Split from '@/app/components/workflow/nodes/_base/components/split'
|
|||
import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/code-editor'
|
||||
import TypeSelector from '@/app/components/workflow/nodes/_base/components/selector'
|
||||
import type { NodePanelProps } from '@/app/components/workflow/types'
|
||||
import SyncButton from '@/app/components/base/button/sync-button'
|
||||
const i18nPrefix = 'workflow.nodes.code'
|
||||
|
||||
const codeLanguages = [
|
||||
|
|
@ -40,6 +41,7 @@ const Panel: FC<NodePanelProps<CodeNodeType>> = ({
|
|||
handleVarListChange,
|
||||
handleAddVariable,
|
||||
handleRemoveVariable,
|
||||
handleSyncFunctionSignature,
|
||||
handleCodeChange,
|
||||
handleCodeLanguageChange,
|
||||
handleVarsChange,
|
||||
|
|
@ -68,7 +70,12 @@ const Panel: FC<NodePanelProps<CodeNodeType>> = ({
|
|||
<Field
|
||||
title={t(`${i18nPrefix}.inputVars`)}
|
||||
operations={
|
||||
!readOnly ? <AddButton onClick={handleAddVariable} /> : undefined
|
||||
!readOnly ? (
|
||||
<div className="flex gap-2">
|
||||
<SyncButton popupContent={t(`${i18nPrefix}.syncFunctionSignature`)} onClick={handleSyncFunctionSignature} />
|
||||
<AddButton onClick={handleAddVariable} />
|
||||
</div>
|
||||
) : undefined
|
||||
}
|
||||
>
|
||||
<VarList
|
||||
|
|
|
|||
|
|
@ -84,6 +84,65 @@ const useConfig = (id: string, payload: CodeNodeType) => {
|
|||
setInputs(newInputs)
|
||||
}, [allLanguageDefault, inputs, setInputs])
|
||||
|
||||
const handleSyncFunctionSignature = useCallback(() => {
|
||||
const generateSyncSignatureCode = (code: string) => {
|
||||
let mainDefRe
|
||||
let newMainDef
|
||||
if (inputs.code_language === CodeLanguage.javascript) {
|
||||
mainDefRe = /function\s+main\b\s*\([\s\S]*?\)/g
|
||||
newMainDef = 'function main({{var_list}})'
|
||||
let param_list = inputs.variables?.map(item => item.variable).join(', ') || ''
|
||||
param_list = param_list ? `{${param_list}}` : ''
|
||||
newMainDef = newMainDef.replace('{{var_list}}', param_list)
|
||||
}
|
||||
|
||||
else if (inputs.code_language === CodeLanguage.python3) {
|
||||
mainDefRe = /def\s+main\b\s*\([\s\S]*?\)/g
|
||||
const param_list = []
|
||||
for (const item of inputs.variables) {
|
||||
let param = item.variable
|
||||
let param_type = ''
|
||||
switch (item.value_type) {
|
||||
case VarType.string:
|
||||
param_type = ': str'
|
||||
break
|
||||
case VarType.number:
|
||||
param_type = ': float'
|
||||
break
|
||||
case VarType.object:
|
||||
param_type = ': dict'
|
||||
break
|
||||
case VarType.array:
|
||||
param_type = ': list'
|
||||
break
|
||||
case VarType.arrayNumber:
|
||||
param_type = ': list[float]'
|
||||
break
|
||||
case VarType.arrayString:
|
||||
param_type = ': list[str]'
|
||||
break
|
||||
case VarType.arrayObject:
|
||||
param_type = ': list[dict]'
|
||||
break
|
||||
}
|
||||
param += param_type
|
||||
param_list.push(`${param}`)
|
||||
}
|
||||
|
||||
newMainDef = `def main(${param_list.join(', ')})`
|
||||
}
|
||||
else { return code }
|
||||
|
||||
const newCode = code.replace(mainDefRe, newMainDef)
|
||||
return newCode
|
||||
}
|
||||
|
||||
const newInputs = produce(inputs, (draft) => {
|
||||
draft.code = generateSyncSignatureCode(draft.code)
|
||||
})
|
||||
setInputs(newInputs)
|
||||
}, [inputs, setInputs])
|
||||
|
||||
const {
|
||||
handleVarsChange,
|
||||
handleAddVariable: handleAddOutputVariable,
|
||||
|
|
@ -119,6 +178,7 @@ const useConfig = (id: string, payload: CodeNodeType) => {
|
|||
handleVarListChange,
|
||||
handleAddVariable,
|
||||
handleRemoveVariable,
|
||||
handleSyncFunctionSignature,
|
||||
handleCodeChange,
|
||||
handleCodeLanguageChange,
|
||||
handleVarsChange,
|
||||
|
|
|
|||
|
|
@ -19,6 +19,8 @@ import Editor from '@/app/components/workflow/nodes/_base/components/prompt/edit
|
|||
import StructureOutput from './components/structure-output'
|
||||
import Switch from '@/app/components/base/switch'
|
||||
import { RiAlertFill, RiQuestionLine } from '@remixicon/react'
|
||||
import { fetchAndMergeValidCompletionParams } from '@/utils/completion-params'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
|
||||
const i18nPrefix = 'workflow.nodes.llm'
|
||||
|
||||
|
|
@ -68,10 +70,27 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
|
|||
modelId: string
|
||||
mode?: string
|
||||
}) => {
|
||||
handleCompletionParamsChange({})
|
||||
handleModelChanged(model)
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
(async () => {
|
||||
try {
|
||||
const { params: filtered, removedDetails } = await fetchAndMergeValidCompletionParams(
|
||||
model.provider,
|
||||
model.modelId,
|
||||
inputs.model.completion_params,
|
||||
)
|
||||
const keys = Object.keys(removedDetails)
|
||||
if (keys.length)
|
||||
Toast.notify({ type: 'warning', message: `${t('common.modelProvider.parametersInvalidRemoved')}: ${keys.map(k => `${k} (${removedDetails[k]})`).join(', ')}` })
|
||||
handleCompletionParamsChange(filtered)
|
||||
}
|
||||
catch (e) {
|
||||
Toast.notify({ type: 'error', message: t('common.error') })
|
||||
handleCompletionParamsChange({})
|
||||
}
|
||||
finally {
|
||||
handleModelChanged(model)
|
||||
}
|
||||
})()
|
||||
}, [inputs.model.completion_params])
|
||||
|
||||
return (
|
||||
<div className='mt-2'>
|
||||
|
|
|
|||
|
|
@ -136,6 +136,7 @@ export type Variable = {
|
|||
variable: string
|
||||
}
|
||||
value_selector: ValueSelector
|
||||
value_type?: VarType
|
||||
variable_type?: VarKindType
|
||||
value?: string
|
||||
options?: string[]
|
||||
|
|
|
|||
|
|
@ -390,6 +390,8 @@ const translation = {
|
|||
addChildChunk: 'Untergeordneten Block hinzufügen',
|
||||
regenerationConfirmTitle: 'Möchten Sie untergeordnete Chunks regenerieren?',
|
||||
searchResults_one: 'ERGEBNIS',
|
||||
keywordEmpty: 'Das Schlüsselwort darf nicht leer sein.',
|
||||
keywordDuplicate: 'Das Schlüsselwort existiert bereits',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -456,6 +456,7 @@ const translation = {
|
|||
connected: 'Connected',
|
||||
disconnected: 'Disconnected',
|
||||
changeAuthorizedPages: 'Change authorized pages',
|
||||
integratedAlert: 'Notion is integrated via internal credential, no need to re-authorize.',
|
||||
pagesAuthorized: 'Pages authorized',
|
||||
sync: 'Sync',
|
||||
remove: 'Remove',
|
||||
|
|
|
|||
|
|
@ -549,6 +549,7 @@ const translation = {
|
|||
advancedDependencies: 'Advanced Dependencies',
|
||||
advancedDependenciesTip: 'Add some preloaded dependencies that take more time to consume or are not default built-in here',
|
||||
searchDependencies: 'Search Dependencies',
|
||||
syncFunctionSignature: 'Sync function signature to code',
|
||||
},
|
||||
templateTransform: {
|
||||
inputVars: 'Input Variables',
|
||||
|
|
|
|||
|
|
@ -389,6 +389,8 @@ const translation = {
|
|||
characters_one: 'carácter',
|
||||
regenerationSuccessMessage: 'Puede cerrar esta ventana.',
|
||||
regenerationConfirmTitle: '¿Desea regenerar fragmentos secundarios?',
|
||||
keywordEmpty: 'La palabra clave no puede estar vacía',
|
||||
keywordDuplicate: 'La palabra clave ya existe',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -388,6 +388,8 @@ const translation = {
|
|||
regeneratingMessage: 'این ممکن است یک لحظه طول بکشد، لطفا صبر کنید...',
|
||||
regenerationConfirmTitle: 'آیا می خواهید تکه های کودک را بازسازی کنید؟',
|
||||
regenerationSuccessMessage: 'می توانید این پنجره را ببندید.',
|
||||
keywordEmpty: 'کلمه کلیدی نمیتواند خالی باشد',
|
||||
keywordDuplicate: 'این کلیدواژه قبلاً وجود دارد',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -389,6 +389,8 @@ const translation = {
|
|||
searchResults_zero: 'RÉSULTAT',
|
||||
empty: 'Aucun Chunk trouvé',
|
||||
editChildChunk: 'Modifier le morceau enfant',
|
||||
keywordDuplicate: 'Le mot-clé existe déjà',
|
||||
keywordEmpty: 'Le mot-clé ne peut pas être vide.',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -390,6 +390,8 @@ const translation = {
|
|||
chunkAdded: '1 हिस्सा जोड़ा गया',
|
||||
chunkDetail: 'चंक विवरण',
|
||||
regenerationConfirmMessage: 'चाइल्ड चंक्स को रीजनरेट करने से वर्तमान चाइल्ड चंक्स ओवरराइट हो जाएंगे, जिसमें संपादित चंक्स और नए जोड़े गए चंक्स शामिल हैं। पुनरुत्थान को पूर्ववत नहीं किया जा सकता है।',
|
||||
keywordDuplicate: 'कीवर्ड पहले से मौजूद है',
|
||||
keywordEmpty: 'कीवर्ड ख़ाली नहीं हो सकता',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -391,6 +391,8 @@ const translation = {
|
|||
regenerationSuccessMessage: 'È possibile chiudere questa finestra.',
|
||||
childChunkAdded: '1 blocco figlio aggiunto',
|
||||
childChunks_other: 'BLOCCHI FIGLIO',
|
||||
keywordEmpty: 'La parola chiave non può essere vuota',
|
||||
keywordDuplicate: 'La parola chiave esiste già',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -388,6 +388,8 @@ const translation = {
|
|||
editedAt: '編集日時',
|
||||
expandChunks: 'チャンクを展開',
|
||||
collapseChunks: 'チャンクを折りたたむ',
|
||||
keywordDuplicate: 'そのキーワードは既に存在しています',
|
||||
keywordEmpty: 'キーワードは空であってはいけません',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -550,6 +550,7 @@ const translation = {
|
|||
advancedDependencies: '高度な依存関係',
|
||||
advancedDependenciesTip: '消費に時間がかかる、またはデフォルトで組み込まれていない事前ロードされた依存関係を追加します',
|
||||
searchDependencies: '依存関係を検索',
|
||||
syncFunctionSignature: 'コードの関数署名を同期',
|
||||
},
|
||||
templateTransform: {
|
||||
inputVars: '入力変数',
|
||||
|
|
|
|||
|
|
@ -388,6 +388,8 @@ const translation = {
|
|||
addChunk: '청크 추가 (Add Chunk)',
|
||||
characters_other: '문자',
|
||||
regeneratingMessage: '시간이 걸릴 수 있으니 잠시만 기다려 주십시오...',
|
||||
keywordDuplicate: '키워드가 이미 존재합니다.',
|
||||
keywordEmpty: '키워드는 비워둘 수 없습니다.',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -390,6 +390,8 @@ const translation = {
|
|||
newChildChunk: 'Nowy fragment podrzędny',
|
||||
clearFilter: 'Wyczyść filtr',
|
||||
childChunks_one: 'FRAGMENT POTOMNY',
|
||||
keywordDuplicate: 'Słowo kluczowe już istnieje',
|
||||
keywordEmpty: 'Słowo kluczowe nie może być puste',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -389,6 +389,8 @@ const translation = {
|
|||
newChildChunk: 'Novo pedaço filho',
|
||||
characters_one: 'personagem',
|
||||
parentChunk: 'Pedaço pai',
|
||||
keywordEmpty: 'A palavra-chave não pode estar vazia',
|
||||
keywordDuplicate: 'A palavra-chave já existe',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -389,6 +389,8 @@ const translation = {
|
|||
regeneratingTitle: 'Regenerarea bucăților secundare',
|
||||
addChildChunk: 'Adăugați o bucată copil',
|
||||
searchResults_other: 'REZULTATELE',
|
||||
keywordDuplicate: 'Cuvântul cheie există deja',
|
||||
keywordEmpty: 'Cuvântul cheie nu poate fi gol',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -389,6 +389,8 @@ const translation = {
|
|||
characters_one: 'характер',
|
||||
addChildChunk: 'Добавить дочерний чанк',
|
||||
newChildChunk: 'Новый дочерний чанк',
|
||||
keywordEmpty: 'Ключевое слово не может быть пустым',
|
||||
keywordDuplicate: 'Ключевое слово уже существует',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -389,6 +389,8 @@ const translation = {
|
|||
chunk: 'Kos',
|
||||
addChunk: 'Dodajanje kosa',
|
||||
childChunkAdded: 'Dodan je 1 kos otroka',
|
||||
keywordDuplicate: 'Ključna beseda že obstaja',
|
||||
keywordEmpty: 'Ključna beseda ne more biti prazna',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -388,6 +388,8 @@ const translation = {
|
|||
searchResults_other: 'ผลลัพธ์',
|
||||
regenerationSuccessMessage: 'คุณสามารถปิดหน้าต่างนี้ได้',
|
||||
childChunks_one: 'ก้อนเด็ก',
|
||||
keywordDuplicate: 'คำสำคัญมีอยู่แล้ว',
|
||||
keywordEmpty: 'คีย์เวิร์ดไม่สามารถว่างเปล่าได้',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -388,6 +388,8 @@ const translation = {
|
|||
chunks_other: 'Parçalar',
|
||||
editedAt: 'Şurada düzenlendi:',
|
||||
addChildChunk: 'Alt Parça Ekle',
|
||||
keywordDuplicate: 'Anahtar kelime zaten var',
|
||||
keywordEmpty: 'Anahtar kelime boş olamaz',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -389,6 +389,8 @@ const translation = {
|
|||
regenerationSuccessMessage: 'Ви можете закрити це вікно.',
|
||||
expandChunks: 'Розгортання фрагментів',
|
||||
regenerationConfirmTitle: 'Хочете регенерувати дитячі шматки?',
|
||||
keywordEmpty: 'Ключове слово не може бути порожнім',
|
||||
keywordDuplicate: 'Ключове слово вже існує',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -388,6 +388,8 @@ const translation = {
|
|||
clearFilter: 'Bộ lọc rõ ràng',
|
||||
chunk: 'Khúc',
|
||||
edited: 'EDITED',
|
||||
keywordDuplicate: 'Từ khóa đã tồn tại',
|
||||
keywordEmpty: 'Từ khóa không được để trống',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -387,6 +387,8 @@ const translation = {
|
|||
editedAt: '编辑于',
|
||||
expandChunks: '展开分段',
|
||||
collapseChunks: '折叠分段',
|
||||
keywordEmpty: '关键词不能为空',
|
||||
keywordDuplicate: '关键词已经存在',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -550,6 +550,7 @@ const translation = {
|
|||
advancedDependencies: '高级依赖',
|
||||
advancedDependenciesTip: '在这里添加一些预加载需要消耗较多时间或非默认内置的依赖包',
|
||||
searchDependencies: '搜索依赖',
|
||||
syncFunctionSignature: '同步函数签名至代码',
|
||||
},
|
||||
templateTransform: {
|
||||
inputVars: '输入变量',
|
||||
|
|
|
|||
|
|
@ -388,6 +388,8 @@ const translation = {
|
|||
searchResults_zero: '結果',
|
||||
parentChunks_other: '父塊',
|
||||
newChildChunk: '新兒童塊',
|
||||
keywordEmpty: '關鍵字不能為空',
|
||||
keywordDuplicate: '關鍵字已經存在',
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -544,6 +544,7 @@ const translation = {
|
|||
advancedDependencies: '高級依賴',
|
||||
advancedDependenciesTip: '在這裡添加一些預加載需要消耗較多時間或非默認內置的依賴包',
|
||||
searchDependencies: '搜索依賴',
|
||||
syncFunctionSignature: '同步函數簽名至代碼',
|
||||
},
|
||||
templateTransform: {
|
||||
inputVars: '輸入變量',
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "dify-web",
|
||||
"version": "1.5.0",
|
||||
"version": "1.5.1",
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node": ">=v22.11.0"
|
||||
|
|
@ -103,7 +103,7 @@
|
|||
"mime": "^4.0.4",
|
||||
"mitt": "^3.0.1",
|
||||
"negotiator": "^0.6.3",
|
||||
"next": "15.2.3",
|
||||
"next": "15.2.4",
|
||||
"next-themes": "^0.4.3",
|
||||
"pinyin-pro": "^3.25.0",
|
||||
"qrcode.react": "^4.2.0",
|
||||
|
|
@ -235,7 +235,11 @@
|
|||
},
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"esbuild@<0.25.0": "0.25.0"
|
||||
"esbuild@<0.25.0": "0.25.0",
|
||||
"pbkdf2@<3.1.3": "3.1.3",
|
||||
"vite@<6.2.7": "6.2.7",
|
||||
"prismjs@<1.30.0": "1.30.0",
|
||||
"brace-expansion@<2.0.2": "2.0.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,6 +9,10 @@ overrides:
|
|||
'@types/react-dom': ~18.2.0
|
||||
string-width: 4.2.3
|
||||
esbuild@<0.25.0: 0.25.0
|
||||
pbkdf2@<3.1.3: 3.1.3
|
||||
vite@<6.2.7: 6.2.7
|
||||
prismjs@<1.30.0: 1.30.0
|
||||
brace-expansion@<2.0.2: 2.0.2
|
||||
|
||||
importers:
|
||||
|
||||
|
|
@ -207,8 +211,8 @@ importers:
|
|||
specifier: ^0.6.3
|
||||
version: 0.6.4
|
||||
next:
|
||||
specifier: 15.2.3
|
||||
version: 15.2.3(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3)
|
||||
specifier: 15.2.4
|
||||
version: 15.2.4(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3)
|
||||
next-themes:
|
||||
specifier: ^0.4.3
|
||||
version: 0.4.6(react-dom@19.0.0(react@19.0.0))(react@19.0.0)
|
||||
|
|
@ -392,7 +396,7 @@ importers:
|
|||
version: 8.5.0(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(storybook@8.5.0)
|
||||
'@storybook/nextjs':
|
||||
specifier: 8.5.0
|
||||
version: 8.5.0(esbuild@0.25.0)(next@15.2.3(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3))(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3)(storybook@8.5.0)(type-fest@4.39.1)(typescript@4.9.5)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.99.5(esbuild@0.25.0)(uglify-js@3.19.3))
|
||||
version: 8.5.0(esbuild@0.25.0)(next@15.2.4(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3))(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3)(storybook@8.5.0)(type-fest@4.39.1)(typescript@4.9.5)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.99.5(esbuild@0.25.0)(uglify-js@3.19.3))
|
||||
'@storybook/react':
|
||||
specifier: 8.5.0
|
||||
version: 8.5.0(@storybook/test@8.5.0(storybook@8.5.0))(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(storybook@8.5.0)(typescript@4.9.5)
|
||||
|
|
@ -2086,8 +2090,8 @@ packages:
|
|||
'@napi-rs/wasm-runtime@0.2.8':
|
||||
resolution: {integrity: sha512-OBlgKdX7gin7OIq4fadsjpg+cp2ZphvAIKucHsNfTdJiqdOmOEwQd/bHi0VwNrcw5xpBJyUw6cK/QilCqy1BSg==}
|
||||
|
||||
'@next/env@15.2.3':
|
||||
resolution: {integrity: sha512-a26KnbW9DFEUsSxAxKBORR/uD9THoYoKbkpFywMN/AFvboTt94b8+g/07T8J6ACsdLag8/PDU60ov4rPxRAixw==}
|
||||
'@next/env@15.2.4':
|
||||
resolution: {integrity: sha512-+SFtMgoiYP3WoSswuNmxJOCwi06TdWE733D+WPjpXIe4LXGULwEaofiiAy6kbS0+XjM5xF5n3lKuBwN2SnqD9g==}
|
||||
|
||||
'@next/eslint-plugin-next@15.3.0':
|
||||
resolution: {integrity: sha512-511UUcpWw5GWTyKfzW58U2F/bYJyjLE9e3SlnGK/zSXq7RqLlqFO8B9bitJjumLpj317fycC96KZ2RZsjGNfBw==}
|
||||
|
|
@ -2103,50 +2107,50 @@ packages:
|
|||
'@mdx-js/react':
|
||||
optional: true
|
||||
|
||||
'@next/swc-darwin-arm64@15.2.3':
|
||||
resolution: {integrity: sha512-uaBhA8aLbXLqwjnsHSkxs353WrRgQgiFjduDpc7YXEU0B54IKx3vU+cxQlYwPCyC8uYEEX7THhtQQsfHnvv8dw==}
|
||||
'@next/swc-darwin-arm64@15.2.4':
|
||||
resolution: {integrity: sha512-1AnMfs655ipJEDC/FHkSr0r3lXBgpqKo4K1kiwfUf3iE68rDFXZ1TtHdMvf7D0hMItgDZ7Vuq3JgNMbt/+3bYw==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
'@next/swc-darwin-x64@15.2.3':
|
||||
resolution: {integrity: sha512-pVwKvJ4Zk7h+4hwhqOUuMx7Ib02u3gDX3HXPKIShBi9JlYllI0nU6TWLbPT94dt7FSi6mSBhfc2JrHViwqbOdw==}
|
||||
'@next/swc-darwin-x64@15.2.4':
|
||||
resolution: {integrity: sha512-3qK2zb5EwCwxnO2HeO+TRqCubeI/NgCe+kL5dTJlPldV/uwCnUgC7VbEzgmxbfrkbjehL4H9BPztWOEtsoMwew==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
'@next/swc-linux-arm64-gnu@15.2.3':
|
||||
resolution: {integrity: sha512-50ibWdn2RuFFkOEUmo9NCcQbbV9ViQOrUfG48zHBCONciHjaUKtHcYFiCwBVuzD08fzvzkWuuZkd4AqbvKO7UQ==}
|
||||
'@next/swc-linux-arm64-gnu@15.2.4':
|
||||
resolution: {integrity: sha512-HFN6GKUcrTWvem8AZN7tT95zPb0GUGv9v0d0iyuTb303vbXkkbHDp/DxufB04jNVD+IN9yHy7y/6Mqq0h0YVaQ==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@next/swc-linux-arm64-musl@15.2.3':
|
||||
resolution: {integrity: sha512-2gAPA7P652D3HzR4cLyAuVYwYqjG0mt/3pHSWTCyKZq/N/dJcUAEoNQMyUmwTZWCJRKofB+JPuDVP2aD8w2J6Q==}
|
||||
'@next/swc-linux-arm64-musl@15.2.4':
|
||||
resolution: {integrity: sha512-Oioa0SORWLwi35/kVB8aCk5Uq+5/ZIumMK1kJV+jSdazFm2NzPDztsefzdmzzpx5oGCJ6FkUC7vkaUseNTStNA==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@next/swc-linux-x64-gnu@15.2.3':
|
||||
resolution: {integrity: sha512-ODSKvrdMgAJOVU4qElflYy1KSZRM3M45JVbeZu42TINCMG3anp7YCBn80RkISV6bhzKwcUqLBAmOiWkaGtBA9w==}
|
||||
'@next/swc-linux-x64-gnu@15.2.4':
|
||||
resolution: {integrity: sha512-yb5WTRaHdkgOqFOZiu6rHV1fAEK0flVpaIN2HB6kxHVSy/dIajWbThS7qON3W9/SNOH2JWkVCyulgGYekMePuw==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@next/swc-linux-x64-musl@15.2.3':
|
||||
resolution: {integrity: sha512-ZR9kLwCWrlYxwEoytqPi1jhPd1TlsSJWAc+H/CJHmHkf2nD92MQpSRIURR1iNgA/kuFSdxB8xIPt4p/T78kwsg==}
|
||||
'@next/swc-linux-x64-musl@15.2.4':
|
||||
resolution: {integrity: sha512-Dcdv/ix6srhkM25fgXiyOieFUkz+fOYkHlydWCtB0xMST6X9XYI3yPDKBZt1xuhOytONsIFJFB08xXYsxUwJLw==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@next/swc-win32-arm64-msvc@15.2.3':
|
||||
resolution: {integrity: sha512-+G2FrDcfm2YDbhDiObDU/qPriWeiz/9cRR0yMWJeTLGGX6/x8oryO3tt7HhodA1vZ8r2ddJPCjtLcpaVl7TE2Q==}
|
||||
'@next/swc-win32-arm64-msvc@15.2.4':
|
||||
resolution: {integrity: sha512-dW0i7eukvDxtIhCYkMrZNQfNicPDExt2jPb9AZPpL7cfyUo7QSNl1DjsHjmmKp6qNAqUESyT8YFl/Aw91cNJJg==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
'@next/swc-win32-x64-msvc@15.2.3':
|
||||
resolution: {integrity: sha512-gHYS9tc+G2W0ZC8rBL+H6RdtXIyk40uLiaos0yj5US85FNhbFEndMA2nW3z47nzOWiSvXTZ5kBClc3rD0zJg0w==}
|
||||
'@next/swc-win32-x64-msvc@15.2.4':
|
||||
resolution: {integrity: sha512-SbnWkJmkS7Xl3kre8SdMF6F/XDh1DTFEhp0jRTj/uB8iPKoU2bb2NDfcu+iifv1+mxQEd1g2vvSxcZbXSKyWiQ==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
|
@ -3311,7 +3315,7 @@ packages:
|
|||
resolution: {integrity: sha512-bmpJJm7Y7i9BBELlLuuM1J1Q6EQ6K5Ye4wcyOpOMXMcePYKSIYlpcrCm4l/O6ja4VJA5G2aMJiuZkZdnxlC3SA==}
|
||||
peerDependencies:
|
||||
msw: ^2.4.9
|
||||
vite: ^5.0.0 || ^6.0.0
|
||||
vite: 6.2.7
|
||||
peerDependenciesMeta:
|
||||
msw:
|
||||
optional: true
|
||||
|
|
@ -3726,11 +3730,8 @@ packages:
|
|||
boolbase@1.0.0:
|
||||
resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==}
|
||||
|
||||
brace-expansion@1.1.11:
|
||||
resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==}
|
||||
|
||||
brace-expansion@2.0.1:
|
||||
resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==}
|
||||
brace-expansion@2.0.2:
|
||||
resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==}
|
||||
|
||||
braces@3.0.3:
|
||||
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
|
||||
|
|
@ -4076,9 +4077,6 @@ packages:
|
|||
compare-versions@6.1.1:
|
||||
resolution: {integrity: sha512-4hm4VPpIecmlg59CHXnRDnqGplJFrbLG4aFEl5vl6cK1u76ws3LLvX7ikFnTDl5vo39sjWD6AaDPYodJp/NNHg==}
|
||||
|
||||
concat-map@0.0.1:
|
||||
resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==}
|
||||
|
||||
confbox@0.1.8:
|
||||
resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==}
|
||||
|
||||
|
|
@ -4134,6 +4132,9 @@ packages:
|
|||
create-ecdh@4.0.4:
|
||||
resolution: {integrity: sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==}
|
||||
|
||||
create-hash@1.1.3:
|
||||
resolution: {integrity: sha512-snRpch/kwQhcdlnZKYanNF1m0RDlrCdSKQaH87w1FCFPVPNCQ/Il9QJKAX2jVBZddRdaHBMC+zXa9Gw9tmkNUA==}
|
||||
|
||||
create-hash@1.2.0:
|
||||
resolution: {integrity: sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==}
|
||||
|
||||
|
|
@ -5383,6 +5384,9 @@ packages:
|
|||
has-unicode@2.0.1:
|
||||
resolution: {integrity: sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==}
|
||||
|
||||
hash-base@2.0.2:
|
||||
resolution: {integrity: sha512-0TROgQ1/SxE6KmxWSvXHvRj90/Xo1JvZShofnYF+f6ZsGtR4eES7WfrQzPalmyagfKZCXpVnitiRebZulWsbiw==}
|
||||
|
||||
hash-base@3.0.5:
|
||||
resolution: {integrity: sha512-vXm0l45VbcHEVlTCzs8M+s0VeYsB2lnlAaThoLKGXr3bE/VWDOelNUnycUPEhKEaXARL2TEFjBOyUiM6+55KBg==}
|
||||
engines: {node: '>= 0.10'}
|
||||
|
|
@ -6565,8 +6569,8 @@ packages:
|
|||
react: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc
|
||||
react-dom: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc
|
||||
|
||||
next@15.2.3:
|
||||
resolution: {integrity: sha512-x6eDkZxk2rPpu46E1ZVUWIBhYCLszmUY6fvHBFcbzJ9dD+qRX6vcHusaqqDlnY+VngKzKbAiG2iRCkPbmi8f7w==}
|
||||
next@15.2.4:
|
||||
resolution: {integrity: sha512-VwL+LAaPSxEkd3lU2xWbgEOtrM8oedmyhBqaVNmgKB+GvZlCy9rgaEc+y2on0wv+l0oSFqLtYD6dcC1eAedUaQ==}
|
||||
engines: {node: ^18.18.0 || ^19.8.0 || >= 20.0.0}
|
||||
hasBin: true
|
||||
peerDependencies:
|
||||
|
|
@ -6859,8 +6863,8 @@ packages:
|
|||
resolution: {integrity: sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==}
|
||||
engines: {node: '>= 14.16'}
|
||||
|
||||
pbkdf2@3.1.2:
|
||||
resolution: {integrity: sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==}
|
||||
pbkdf2@3.1.3:
|
||||
resolution: {integrity: sha512-wfRLBZ0feWRhCIkoMB6ete7czJcnNnqRpcoWQBLqatqXXmelSRqfdDK4F3u9T2s2cXas/hQJcryI/4lAL+XTlA==}
|
||||
engines: {node: '>=0.12'}
|
||||
|
||||
pdfjs-dist@4.4.168:
|
||||
|
|
@ -7042,10 +7046,6 @@ packages:
|
|||
resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==}
|
||||
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
|
||||
|
||||
prismjs@1.27.0:
|
||||
resolution: {integrity: sha512-t13BGPUlFDR7wRB5kQDG4jjl7XeuH6jbJGt11JHPL96qwsEHNX2+68tFXqc1/k+/jALsbSWJKUOT/hcYAZ5LkA==}
|
||||
engines: {node: '>=6'}
|
||||
|
||||
prismjs@1.30.0:
|
||||
resolution: {integrity: sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw==}
|
||||
engines: {node: '>=6'}
|
||||
|
|
@ -7527,6 +7527,9 @@ packages:
|
|||
deprecated: Rimraf versions prior to v4 are no longer supported
|
||||
hasBin: true
|
||||
|
||||
ripemd160@2.0.1:
|
||||
resolution: {integrity: sha512-J7f4wutN8mdbV08MJnXibYpCOPHR+yzy+iQ/AsjMv2j8cLavQ8VGagDFUwwTAdF8FmRKVeNpbTTEwNHCW1g94w==}
|
||||
|
||||
ripemd160@2.0.2:
|
||||
resolution: {integrity: sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==}
|
||||
|
||||
|
|
@ -8042,6 +8045,10 @@ packages:
|
|||
tmpl@1.0.5:
|
||||
resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==}
|
||||
|
||||
to-buffer@1.2.1:
|
||||
resolution: {integrity: sha512-tB82LpAIWjhLYbqjx3X4zEeHN6M8CiuOEy2JY8SEQVdYRe3CCHOFaqrBW1doLDrfpWhplcW7BL+bO3/6S3pcDQ==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
to-regex-range@5.0.1:
|
||||
resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==}
|
||||
engines: {node: '>=8.0'}
|
||||
|
|
@ -8367,8 +8374,8 @@ packages:
|
|||
engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0}
|
||||
hasBin: true
|
||||
|
||||
vite@6.2.6:
|
||||
resolution: {integrity: sha512-9xpjNl3kR4rVDZgPNdTL0/c6ao4km69a/2ihNQbcANz8RuCOK3hQBmLSJf3bRKVQjVMda+YvizNE8AwvogcPbw==}
|
||||
vite@6.2.7:
|
||||
resolution: {integrity: sha512-qg3LkeuinTrZoJHHF94coSaTfIPyBYoywp+ys4qu20oSJFbKMYoIJo0FWJT9q6Vp49l6z9IsJRbHdcGtiKbGoQ==}
|
||||
engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0}
|
||||
hasBin: true
|
||||
peerDependencies:
|
||||
|
|
@ -10565,7 +10572,7 @@ snapshots:
|
|||
'@tybys/wasm-util': 0.9.0
|
||||
optional: true
|
||||
|
||||
'@next/env@15.2.3': {}
|
||||
'@next/env@15.2.4': {}
|
||||
|
||||
'@next/eslint-plugin-next@15.3.0':
|
||||
dependencies:
|
||||
|
|
@ -10578,28 +10585,28 @@ snapshots:
|
|||
'@mdx-js/loader': 3.1.0(acorn@8.14.1)(webpack@5.99.5(esbuild@0.25.0)(uglify-js@3.19.3))
|
||||
'@mdx-js/react': 3.1.0(@types/react@18.2.79)(react@19.0.0)
|
||||
|
||||
'@next/swc-darwin-arm64@15.2.3':
|
||||
'@next/swc-darwin-arm64@15.2.4':
|
||||
optional: true
|
||||
|
||||
'@next/swc-darwin-x64@15.2.3':
|
||||
'@next/swc-darwin-x64@15.2.4':
|
||||
optional: true
|
||||
|
||||
'@next/swc-linux-arm64-gnu@15.2.3':
|
||||
'@next/swc-linux-arm64-gnu@15.2.4':
|
||||
optional: true
|
||||
|
||||
'@next/swc-linux-arm64-musl@15.2.3':
|
||||
'@next/swc-linux-arm64-musl@15.2.4':
|
||||
optional: true
|
||||
|
||||
'@next/swc-linux-x64-gnu@15.2.3':
|
||||
'@next/swc-linux-x64-gnu@15.2.4':
|
||||
optional: true
|
||||
|
||||
'@next/swc-linux-x64-musl@15.2.3':
|
||||
'@next/swc-linux-x64-musl@15.2.4':
|
||||
optional: true
|
||||
|
||||
'@next/swc-win32-arm64-msvc@15.2.3':
|
||||
'@next/swc-win32-arm64-msvc@15.2.4':
|
||||
optional: true
|
||||
|
||||
'@next/swc-win32-x64-msvc@15.2.3':
|
||||
'@next/swc-win32-x64-msvc@15.2.4':
|
||||
optional: true
|
||||
|
||||
'@nodelib/fs.scandir@2.1.5':
|
||||
|
|
@ -11211,7 +11218,7 @@ snapshots:
|
|||
dependencies:
|
||||
storybook: 8.5.0
|
||||
|
||||
'@storybook/nextjs@8.5.0(esbuild@0.25.0)(next@15.2.3(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3))(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3)(storybook@8.5.0)(type-fest@4.39.1)(typescript@4.9.5)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.99.5(esbuild@0.25.0)(uglify-js@3.19.3))':
|
||||
'@storybook/nextjs@8.5.0(esbuild@0.25.0)(next@15.2.4(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3))(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3)(storybook@8.5.0)(type-fest@4.39.1)(typescript@4.9.5)(uglify-js@3.19.3)(webpack-hot-middleware@2.26.1)(webpack@5.99.5(esbuild@0.25.0)(uglify-js@3.19.3))':
|
||||
dependencies:
|
||||
'@babel/core': 7.26.10
|
||||
'@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.26.10)
|
||||
|
|
@ -11237,7 +11244,7 @@ snapshots:
|
|||
find-up: 5.0.0
|
||||
image-size: 1.2.1
|
||||
loader-utils: 3.3.1
|
||||
next: 15.2.3(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3)
|
||||
next: 15.2.4(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3)
|
||||
node-polyfill-webpack-plugin: 2.0.1(webpack@5.99.5(esbuild@0.25.0)(uglify-js@3.19.3))
|
||||
pnp-webpack-plugin: 1.7.0(typescript@4.9.5)
|
||||
postcss: 8.5.3
|
||||
|
|
@ -11956,13 +11963,13 @@ snapshots:
|
|||
chai: 5.2.0
|
||||
tinyrainbow: 2.0.0
|
||||
|
||||
'@vitest/mocker@3.1.1(vite@6.2.6(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1))':
|
||||
'@vitest/mocker@3.1.1(vite@6.2.7(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1))':
|
||||
dependencies:
|
||||
'@vitest/spy': 3.1.1
|
||||
estree-walker: 3.0.3
|
||||
magic-string: 0.30.17
|
||||
optionalDependencies:
|
||||
vite: 6.2.6(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1)
|
||||
vite: 6.2.7(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1)
|
||||
|
||||
'@vitest/pretty-format@2.0.5':
|
||||
dependencies:
|
||||
|
|
@ -12488,12 +12495,7 @@ snapshots:
|
|||
|
||||
boolbase@1.0.0: {}
|
||||
|
||||
brace-expansion@1.1.11:
|
||||
dependencies:
|
||||
balanced-match: 1.0.2
|
||||
concat-map: 0.0.1
|
||||
|
||||
brace-expansion@2.0.1:
|
||||
brace-expansion@2.0.2:
|
||||
dependencies:
|
||||
balanced-match: 1.0.2
|
||||
|
||||
|
|
@ -12846,8 +12848,6 @@ snapshots:
|
|||
|
||||
compare-versions@6.1.1: {}
|
||||
|
||||
concat-map@0.0.1: {}
|
||||
|
||||
confbox@0.1.8: {}
|
||||
|
||||
confbox@0.2.2: {}
|
||||
|
|
@ -12905,6 +12905,13 @@ snapshots:
|
|||
bn.js: 4.12.1
|
||||
elliptic: 6.6.1
|
||||
|
||||
create-hash@1.1.3:
|
||||
dependencies:
|
||||
cipher-base: 1.0.6
|
||||
inherits: 2.0.4
|
||||
ripemd160: 2.0.2
|
||||
sha.js: 2.4.11
|
||||
|
||||
create-hash@1.2.0:
|
||||
dependencies:
|
||||
cipher-base: 1.0.6
|
||||
|
|
@ -12959,7 +12966,7 @@ snapshots:
|
|||
diffie-hellman: 5.0.3
|
||||
hash-base: 3.0.5
|
||||
inherits: 2.0.4
|
||||
pbkdf2: 3.1.2
|
||||
pbkdf2: 3.1.3
|
||||
public-encrypt: 4.0.3
|
||||
randombytes: 2.1.0
|
||||
randomfill: 1.0.4
|
||||
|
|
@ -14577,6 +14584,10 @@ snapshots:
|
|||
has-unicode@2.0.1:
|
||||
optional: true
|
||||
|
||||
hash-base@2.0.2:
|
||||
dependencies:
|
||||
inherits: 2.0.4
|
||||
|
||||
hash-base@3.0.5:
|
||||
dependencies:
|
||||
inherits: 2.0.4
|
||||
|
|
@ -16239,15 +16250,15 @@ snapshots:
|
|||
|
||||
minimatch@10.0.1:
|
||||
dependencies:
|
||||
brace-expansion: 2.0.1
|
||||
brace-expansion: 2.0.2
|
||||
|
||||
minimatch@3.1.2:
|
||||
dependencies:
|
||||
brace-expansion: 1.1.11
|
||||
brace-expansion: 2.0.2
|
||||
|
||||
minimatch@9.0.5:
|
||||
dependencies:
|
||||
brace-expansion: 2.0.1
|
||||
brace-expansion: 2.0.2
|
||||
|
||||
minimist@1.2.8: {}
|
||||
|
||||
|
|
@ -16307,9 +16318,9 @@ snapshots:
|
|||
react: 19.0.0
|
||||
react-dom: 19.0.0(react@19.0.0)
|
||||
|
||||
next@15.2.3(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3):
|
||||
next@15.2.4(@babel/core@7.26.10)(react-dom@19.0.0(react@19.0.0))(react@19.0.0)(sass@1.86.3):
|
||||
dependencies:
|
||||
'@next/env': 15.2.3
|
||||
'@next/env': 15.2.4
|
||||
'@swc/counter': 0.1.3
|
||||
'@swc/helpers': 0.5.15
|
||||
busboy: 1.6.0
|
||||
|
|
@ -16319,14 +16330,14 @@ snapshots:
|
|||
react-dom: 19.0.0(react@19.0.0)
|
||||
styled-jsx: 5.1.6(@babel/core@7.26.10)(react@19.0.0)
|
||||
optionalDependencies:
|
||||
'@next/swc-darwin-arm64': 15.2.3
|
||||
'@next/swc-darwin-x64': 15.2.3
|
||||
'@next/swc-linux-arm64-gnu': 15.2.3
|
||||
'@next/swc-linux-arm64-musl': 15.2.3
|
||||
'@next/swc-linux-x64-gnu': 15.2.3
|
||||
'@next/swc-linux-x64-musl': 15.2.3
|
||||
'@next/swc-win32-arm64-msvc': 15.2.3
|
||||
'@next/swc-win32-x64-msvc': 15.2.3
|
||||
'@next/swc-darwin-arm64': 15.2.4
|
||||
'@next/swc-darwin-x64': 15.2.4
|
||||
'@next/swc-linux-arm64-gnu': 15.2.4
|
||||
'@next/swc-linux-arm64-musl': 15.2.4
|
||||
'@next/swc-linux-x64-gnu': 15.2.4
|
||||
'@next/swc-linux-x64-musl': 15.2.4
|
||||
'@next/swc-win32-arm64-msvc': 15.2.4
|
||||
'@next/swc-win32-x64-msvc': 15.2.4
|
||||
sass: 1.86.3
|
||||
sharp: 0.33.5
|
||||
transitivePeerDependencies:
|
||||
|
|
@ -16563,7 +16574,7 @@ snapshots:
|
|||
browserify-aes: 1.2.0
|
||||
evp_bytestokey: 1.0.3
|
||||
hash-base: 3.0.5
|
||||
pbkdf2: 3.1.2
|
||||
pbkdf2: 3.1.3
|
||||
safe-buffer: 5.2.1
|
||||
|
||||
parse-entities@2.0.0:
|
||||
|
|
@ -16644,13 +16655,14 @@ snapshots:
|
|||
|
||||
pathval@2.0.0: {}
|
||||
|
||||
pbkdf2@3.1.2:
|
||||
pbkdf2@3.1.3:
|
||||
dependencies:
|
||||
create-hash: 1.2.0
|
||||
create-hash: 1.1.3
|
||||
create-hmac: 1.1.7
|
||||
ripemd160: 2.0.2
|
||||
ripemd160: 2.0.1
|
||||
safe-buffer: 5.2.1
|
||||
sha.js: 2.4.11
|
||||
to-buffer: 1.2.1
|
||||
|
||||
pdfjs-dist@4.4.168:
|
||||
optionalDependencies:
|
||||
|
|
@ -16831,8 +16843,6 @@ snapshots:
|
|||
ansi-styles: 5.2.0
|
||||
react-is: 18.3.1
|
||||
|
||||
prismjs@1.27.0: {}
|
||||
|
||||
prismjs@1.30.0: {}
|
||||
|
||||
process-nextick-args@2.0.1: {}
|
||||
|
|
@ -17247,7 +17257,7 @@ snapshots:
|
|||
dependencies:
|
||||
hastscript: 6.0.0
|
||||
parse-entities: 2.0.0
|
||||
prismjs: 1.27.0
|
||||
prismjs: 1.30.0
|
||||
|
||||
regenerate-unicode-properties@10.2.0:
|
||||
dependencies:
|
||||
|
|
@ -17441,6 +17451,11 @@ snapshots:
|
|||
dependencies:
|
||||
glob: 7.2.3
|
||||
|
||||
ripemd160@2.0.1:
|
||||
dependencies:
|
||||
hash-base: 2.0.2
|
||||
inherits: 2.0.4
|
||||
|
||||
ripemd160@2.0.2:
|
||||
dependencies:
|
||||
hash-base: 3.0.5
|
||||
|
|
@ -18041,6 +18056,12 @@ snapshots:
|
|||
|
||||
tmpl@1.0.5: {}
|
||||
|
||||
to-buffer@1.2.1:
|
||||
dependencies:
|
||||
isarray: 2.0.5
|
||||
safe-buffer: 5.2.1
|
||||
typed-array-buffer: 1.0.3
|
||||
|
||||
to-regex-range@5.0.1:
|
||||
dependencies:
|
||||
is-number: 7.0.0
|
||||
|
|
@ -18389,7 +18410,7 @@ snapshots:
|
|||
debug: 4.4.0
|
||||
es-module-lexer: 1.6.0
|
||||
pathe: 2.0.3
|
||||
vite: 6.2.6(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1)
|
||||
vite: 6.2.7(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1)
|
||||
transitivePeerDependencies:
|
||||
- '@types/node'
|
||||
- jiti
|
||||
|
|
@ -18404,7 +18425,7 @@ snapshots:
|
|||
- tsx
|
||||
- yaml
|
||||
|
||||
vite@6.2.6(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1):
|
||||
vite@6.2.7(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1):
|
||||
dependencies:
|
||||
esbuild: 0.25.2
|
||||
postcss: 8.5.3
|
||||
|
|
@ -18420,7 +18441,7 @@ snapshots:
|
|||
vitest@3.1.1(@types/debug@4.1.12)(@types/node@18.15.0)(happy-dom@17.4.4)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1):
|
||||
dependencies:
|
||||
'@vitest/expect': 3.1.1
|
||||
'@vitest/mocker': 3.1.1(vite@6.2.6(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1))
|
||||
'@vitest/mocker': 3.1.1(vite@6.2.7(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1))
|
||||
'@vitest/pretty-format': 3.1.1
|
||||
'@vitest/runner': 3.1.1
|
||||
'@vitest/snapshot': 3.1.1
|
||||
|
|
@ -18436,7 +18457,7 @@ snapshots:
|
|||
tinyexec: 0.3.2
|
||||
tinypool: 1.0.2
|
||||
tinyrainbow: 2.0.0
|
||||
vite: 6.2.6(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1)
|
||||
vite: 6.2.7(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1)
|
||||
vite-node: 3.1.1(@types/node@18.15.0)(jiti@1.21.7)(sass@1.86.3)(terser@5.39.0)(yaml@2.7.1)
|
||||
why-is-node-running: 2.3.0
|
||||
optionalDependencies:
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ export const useResetConversationVar = (appId: string) => {
|
|||
export const useResetToLastRunValue = (appId: string) => {
|
||||
return useMutation({
|
||||
mutationKey: [NAME_SPACE, 'reset to last run value', appId],
|
||||
mutationFn: async (varId: string) => {
|
||||
mutationFn: async (varId: string): Promise<{ value: any }> => {
|
||||
return put(`apps/${appId}/workflows/draft/variables/${varId}/reset`)
|
||||
},
|
||||
})
|
||||
|
|
|
|||
|
|
@ -0,0 +1,88 @@
|
|||
import type { FormValue, ModelParameterRule } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
|
||||
export const mergeValidCompletionParams = (
|
||||
oldParams: FormValue | undefined,
|
||||
rules: ModelParameterRule[],
|
||||
): { params: FormValue; removedDetails: Record<string, string> } => {
|
||||
if (!oldParams || Object.keys(oldParams).length === 0)
|
||||
return { params: {}, removedDetails: {} }
|
||||
|
||||
const acceptedKeys = new Set(rules.map(r => r.name))
|
||||
const ruleMap: Record<string, ModelParameterRule> = {}
|
||||
rules.forEach((r) => {
|
||||
ruleMap[r.name] = r
|
||||
})
|
||||
|
||||
const nextParams: FormValue = {}
|
||||
const removedDetails: Record<string, string> = {}
|
||||
|
||||
Object.entries(oldParams).forEach(([key, value]) => {
|
||||
if (!acceptedKeys.has(key)) {
|
||||
removedDetails[key] = 'unsupported'
|
||||
return
|
||||
}
|
||||
|
||||
const rule = ruleMap[key]
|
||||
if (!rule) {
|
||||
removedDetails[key] = 'unsupported'
|
||||
return
|
||||
}
|
||||
|
||||
switch (rule.type) {
|
||||
case 'int':
|
||||
case 'float': {
|
||||
if (typeof value !== 'number') {
|
||||
removedDetails[key] = 'invalid type'
|
||||
return
|
||||
}
|
||||
const min = rule.min ?? Number.NEGATIVE_INFINITY
|
||||
const max = rule.max ?? Number.POSITIVE_INFINITY
|
||||
if (value < min || value > max) {
|
||||
removedDetails[key] = `out of range (${min}-${max})`
|
||||
return
|
||||
}
|
||||
nextParams[key] = value
|
||||
return
|
||||
}
|
||||
case 'boolean': {
|
||||
if (typeof value !== 'boolean') {
|
||||
removedDetails[key] = 'invalid type'
|
||||
return
|
||||
}
|
||||
nextParams[key] = value
|
||||
return
|
||||
}
|
||||
case 'string':
|
||||
case 'text': {
|
||||
if (typeof value !== 'string') {
|
||||
removedDetails[key] = 'invalid type'
|
||||
return
|
||||
}
|
||||
if (Array.isArray(rule.options) && rule.options.length) {
|
||||
if (!(rule.options as string[]).includes(value)) {
|
||||
removedDetails[key] = 'unsupported option'
|
||||
return
|
||||
}
|
||||
}
|
||||
nextParams[key] = value
|
||||
return
|
||||
}
|
||||
default: {
|
||||
removedDetails[key] = `unsupported rule type: ${(rule as any)?.type ?? 'unknown'}`
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return { params: nextParams, removedDetails }
|
||||
}
|
||||
|
||||
export const fetchAndMergeValidCompletionParams = async (
|
||||
provider: string,
|
||||
modelId: string,
|
||||
oldParams: FormValue | undefined,
|
||||
): Promise<{ params: FormValue; removedDetails: Record<string, string> }> => {
|
||||
const { fetchModelParameterRules } = await import('@/service/common')
|
||||
const url = `/workspaces/current/model-providers/${provider}/models/parameter-rules?model=${modelId}`
|
||||
const { data: parameterRules } = await fetchModelParameterRules(url)
|
||||
return mergeValidCompletionParams(oldParams, parameterRules ?? [])
|
||||
}
|
||||
Loading…
Reference in New Issue