mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into e-300
This commit is contained in:
commit
e7003902b7
|
|
@ -1,5 +1,4 @@
|
|||
FROM mcr.microsoft.com/devcontainers/python:3.12
|
||||
|
||||
# [Optional] Uncomment this section to install additional OS packages.
|
||||
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
||||
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
&& apt-get -y install libgmp-dev libmpfr-dev libmpc-dev
|
||||
|
|
|
|||
|
|
@ -348,6 +348,7 @@ SENTRY_DSN=
|
|||
|
||||
# DEBUG
|
||||
DEBUG=false
|
||||
ENABLE_REQUEST_LOGGING=False
|
||||
SQLALCHEMY_ECHO=false
|
||||
|
||||
# Notion import configuration, support public and internal
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ def initialize_extensions(app: DifyApp):
|
|||
ext_otel,
|
||||
ext_proxy_fix,
|
||||
ext_redis,
|
||||
ext_request_logging,
|
||||
ext_sentry,
|
||||
ext_set_secretkey,
|
||||
ext_storage,
|
||||
|
|
@ -83,6 +84,7 @@ def initialize_extensions(app: DifyApp):
|
|||
ext_blueprints,
|
||||
ext_commands,
|
||||
ext_otel,
|
||||
ext_request_logging,
|
||||
]
|
||||
for ext in extensions:
|
||||
short_name = ext.__name__.split(".")[-1]
|
||||
|
|
|
|||
|
|
@ -17,6 +17,12 @@ class DeploymentConfig(BaseSettings):
|
|||
default=False,
|
||||
)
|
||||
|
||||
# Request logging configuration
|
||||
ENABLE_REQUEST_LOGGING: bool = Field(
|
||||
description="Enable request and response body logging",
|
||||
default=False,
|
||||
)
|
||||
|
||||
EDITION: str = Field(
|
||||
description="Deployment edition of the application (e.g., 'SELF_HOSTED', 'CLOUD')",
|
||||
default="SELF_HOSTED",
|
||||
|
|
|
|||
|
|
@ -81,8 +81,7 @@ class DraftWorkflowApi(Resource):
|
|||
parser.add_argument("graph", type=dict, required=True, nullable=False, location="json")
|
||||
parser.add_argument("features", type=dict, required=True, nullable=False, location="json")
|
||||
parser.add_argument("hash", type=str, required=False, location="json")
|
||||
# TODO: set this to required=True after frontend is updated
|
||||
parser.add_argument("environment_variables", type=list, required=False, location="json")
|
||||
parser.add_argument("environment_variables", type=list, required=True, location="json")
|
||||
parser.add_argument("conversation_variables", type=list, required=False, location="json")
|
||||
args = parser.parse_args()
|
||||
elif "text/plain" in content_type:
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
from typing import cast
|
||||
|
||||
from flask_login import current_user
|
||||
from flask_restful import Resource, marshal_with, reqparse
|
||||
from flask_restful.inputs import int_range
|
||||
|
||||
|
|
@ -12,8 +15,7 @@ from fields.workflow_run_fields import (
|
|||
)
|
||||
from libs.helper import uuid_value
|
||||
from libs.login import login_required
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
from models import Account, App, AppMode, EndUser
|
||||
from services.workflow_run_service import WorkflowRunService
|
||||
|
||||
|
||||
|
|
@ -90,7 +92,12 @@ class WorkflowRunNodeExecutionListApi(Resource):
|
|||
run_id = str(run_id)
|
||||
|
||||
workflow_run_service = WorkflowRunService()
|
||||
node_executions = workflow_run_service.get_workflow_run_node_executions(app_model=app_model, run_id=run_id)
|
||||
user = cast("Account | EndUser", current_user)
|
||||
node_executions = workflow_run_service.get_workflow_run_node_executions(
|
||||
app_model=app_model,
|
||||
run_id=run_id,
|
||||
user=user,
|
||||
)
|
||||
|
||||
return {"data": node_executions}
|
||||
|
||||
|
|
|
|||
|
|
@ -29,9 +29,7 @@ from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
|||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from extensions.ext_database import db
|
||||
from factories import file_factory
|
||||
from models.account import Account
|
||||
from models.model import App, Conversation, EndUser, Message
|
||||
from models.workflow import Workflow
|
||||
from models import Account, App, Conversation, EndUser, Message, Workflow, WorkflowNodeExecutionTriggeredFrom
|
||||
from services.conversation_service import ConversationService
|
||||
from services.errors.message import MessageNotExistsError
|
||||
|
||||
|
|
@ -165,8 +163,9 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
tenant_id=application_generate_entity.app_config.tenant_id,
|
||||
user=user,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
|
||||
return self._generate(
|
||||
|
|
@ -231,8 +230,9 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
tenant_id=application_generate_entity.app_config.tenant_id,
|
||||
user=user,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
|
||||
)
|
||||
|
||||
return self._generate(
|
||||
|
|
@ -295,8 +295,9 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
tenant_id=application_generate_entity.app_config.tenant_id,
|
||||
user=user,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
|
||||
)
|
||||
|
||||
return self._generate(
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ from events.message_event import message_was_created
|
|||
from extensions.ext_database import db
|
||||
from models import Conversation, EndUser, Message, MessageFile
|
||||
from models.account import Account
|
||||
from models.enums import CreatedByRole
|
||||
from models.enums import CreatorUserRole
|
||||
from models.workflow import (
|
||||
Workflow,
|
||||
WorkflowRunStatus,
|
||||
|
|
@ -105,11 +105,11 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
if isinstance(user, EndUser):
|
||||
self._user_id = user.id
|
||||
user_session_id = user.session_id
|
||||
self._created_by_role = CreatedByRole.END_USER
|
||||
self._created_by_role = CreatorUserRole.END_USER
|
||||
elif isinstance(user, Account):
|
||||
self._user_id = user.id
|
||||
user_session_id = user.id
|
||||
self._created_by_role = CreatedByRole.ACCOUNT
|
||||
self._created_by_role = CreatorUserRole.ACCOUNT
|
||||
else:
|
||||
raise NotImplementedError(f"User type not supported: {type(user)}")
|
||||
|
||||
|
|
@ -739,9 +739,9 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
url=file["remote_url"],
|
||||
belongs_to="assistant",
|
||||
upload_file_id=file["related_id"],
|
||||
created_by_role=CreatedByRole.ACCOUNT
|
||||
created_by_role=CreatorUserRole.ACCOUNT
|
||||
if message.invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER}
|
||||
else CreatedByRole.END_USER,
|
||||
else CreatorUserRole.END_USER,
|
||||
created_by=message.from_account_id or message.from_end_user_id or "",
|
||||
)
|
||||
for file in self._recorded_files
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ from core.app.task_pipeline.easy_ui_based_generate_task_pipeline import EasyUIBa
|
|||
from core.prompt.utils.prompt_template_parser import PromptTemplateParser
|
||||
from extensions.ext_database import db
|
||||
from models import Account
|
||||
from models.enums import CreatedByRole
|
||||
from models.enums import CreatorUserRole
|
||||
from models.model import App, AppMode, AppModelConfig, Conversation, EndUser, Message, MessageFile
|
||||
from services.errors.app_model_config import AppModelConfigBrokenError
|
||||
from services.errors.conversation import ConversationNotExistsError
|
||||
|
|
@ -223,7 +223,7 @@ class MessageBasedAppGenerator(BaseAppGenerator):
|
|||
belongs_to="user",
|
||||
url=file.remote_url,
|
||||
upload_file_id=file.related_id,
|
||||
created_by_role=(CreatedByRole.ACCOUNT if account_id else CreatedByRole.END_USER),
|
||||
created_by_role=(CreatorUserRole.ACCOUNT if account_id else CreatorUserRole.END_USER),
|
||||
created_by=account_id or end_user_id or "",
|
||||
)
|
||||
db.session.add(message_file)
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ from core.workflow.repository.workflow_node_execution_repository import Workflow
|
|||
from core.workflow.workflow_app_generate_task_pipeline import WorkflowAppGenerateTaskPipeline
|
||||
from extensions.ext_database import db
|
||||
from factories import file_factory
|
||||
from models import Account, App, EndUser, Workflow
|
||||
from models import Account, App, EndUser, Workflow, WorkflowNodeExecutionTriggeredFrom
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -138,10 +138,12 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
|
||||
# Create workflow node execution repository
|
||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
tenant_id=application_generate_entity.app_config.tenant_id,
|
||||
user=user,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
|
||||
return self._generate(
|
||||
|
|
@ -262,10 +264,12 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
|
||||
# Create workflow node execution repository
|
||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
tenant_id=application_generate_entity.app_config.tenant_id,
|
||||
user=user,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
|
||||
)
|
||||
|
||||
return self._generate(
|
||||
|
|
@ -325,10 +329,12 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
|
||||
# Create workflow node execution repository
|
||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
tenant_id=application_generate_entity.app_config.tenant_id,
|
||||
user=user,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
|
||||
)
|
||||
|
||||
return self._generate(
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from pydantic import BaseModel, ConfigDict
|
|||
|
||||
from core.model_runtime.entities.llm_entities import LLMResult
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.workflow.entities.node_entities import AgentNodeStrategyInit
|
||||
from core.workflow.entities.node_entities import AgentNodeStrategyInit, NodeRunMetadataKey
|
||||
from models.workflow import WorkflowNodeExecutionStatus
|
||||
|
||||
|
||||
|
|
@ -244,7 +244,7 @@ class NodeStartStreamResponse(StreamResponse):
|
|||
title: str
|
||||
index: int
|
||||
predecessor_node_id: Optional[str] = None
|
||||
inputs: Optional[dict] = None
|
||||
inputs: Optional[Mapping[str, Any]] = None
|
||||
created_at: int
|
||||
extras: dict = {}
|
||||
parallel_id: Optional[str] = None
|
||||
|
|
@ -301,13 +301,13 @@ class NodeFinishStreamResponse(StreamResponse):
|
|||
title: str
|
||||
index: int
|
||||
predecessor_node_id: Optional[str] = None
|
||||
inputs: Optional[dict] = None
|
||||
process_data: Optional[dict] = None
|
||||
outputs: Optional[dict] = None
|
||||
inputs: Optional[Mapping[str, Any]] = None
|
||||
process_data: Optional[Mapping[str, Any]] = None
|
||||
outputs: Optional[Mapping[str, Any]] = None
|
||||
status: str
|
||||
error: Optional[str] = None
|
||||
elapsed_time: float
|
||||
execution_metadata: Optional[dict] = None
|
||||
execution_metadata: Optional[Mapping[NodeRunMetadataKey, Any]] = None
|
||||
created_at: int
|
||||
finished_at: int
|
||||
files: Optional[Sequence[Mapping[str, Any]]] = []
|
||||
|
|
@ -370,13 +370,13 @@ class NodeRetryStreamResponse(StreamResponse):
|
|||
title: str
|
||||
index: int
|
||||
predecessor_node_id: Optional[str] = None
|
||||
inputs: Optional[dict] = None
|
||||
process_data: Optional[dict] = None
|
||||
outputs: Optional[dict] = None
|
||||
inputs: Optional[Mapping[str, Any]] = None
|
||||
process_data: Optional[Mapping[str, Any]] = None
|
||||
outputs: Optional[Mapping[str, Any]] = None
|
||||
status: str
|
||||
error: Optional[str] = None
|
||||
elapsed_time: float
|
||||
execution_metadata: Optional[dict] = None
|
||||
execution_metadata: Optional[Mapping[NodeRunMetadataKey, Any]] = None
|
||||
created_at: int
|
||||
finished_at: int
|
||||
files: Optional[Sequence[Mapping[str, Any]]] = []
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from collections.abc import Mapping
|
||||
from datetime import datetime
|
||||
from enum import StrEnum
|
||||
from typing import Any, Optional, Union
|
||||
|
|
@ -155,10 +156,10 @@ class LangfuseSpan(BaseModel):
|
|||
description="The status message of the span. Additional field for context of the event. E.g. the error "
|
||||
"message of an error event.",
|
||||
)
|
||||
input: Optional[Union[str, dict[str, Any], list, None]] = Field(
|
||||
input: Optional[Union[str, Mapping[str, Any], list, None]] = Field(
|
||||
default=None, description="The input of the span. Can be any JSON object."
|
||||
)
|
||||
output: Optional[Union[str, dict[str, Any], list, None]] = Field(
|
||||
output: Optional[Union[str, Mapping[str, Any], list, None]] = Field(
|
||||
default=None, description="The output of the span. Can be any JSON object."
|
||||
)
|
||||
version: Optional[str] = Field(
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
from langfuse import Langfuse # type: ignore
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from core.ops.base_trace_instance import BaseTraceInstance
|
||||
from core.ops.entities.config_entity import LangfuseConfig
|
||||
|
|
@ -30,8 +29,9 @@ from core.ops.langfuse_trace.entities.langfuse_trace_entity import (
|
|||
)
|
||||
from core.ops.utils import filter_none_values
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
from extensions.ext_database import db
|
||||
from models.model import EndUser
|
||||
from models import Account, App, EndUser, WorkflowNodeExecutionTriggeredFrom
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -113,8 +113,29 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
|
||||
# through workflow_run_id get all_nodes_execution using repository
|
||||
session_factory = sessionmaker(bind=db.engine)
|
||||
# Find the app's creator account
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# Get the app to find its creator
|
||||
app_id = trace_info.metadata.get("app_id")
|
||||
if not app_id:
|
||||
raise ValueError("No app_id found in trace_info metadata")
|
||||
|
||||
app = session.query(App).filter(App.id == app_id).first()
|
||||
if not app:
|
||||
raise ValueError(f"App with id {app_id} not found")
|
||||
|
||||
if not app.created_by:
|
||||
raise ValueError(f"App with id {app_id} has no creator (created_by is None)")
|
||||
|
||||
service_account = session.query(Account).filter(Account.id == app.created_by).first()
|
||||
if not service_account:
|
||||
raise ValueError(f"Creator account with id {app.created_by} not found for app {app_id}")
|
||||
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory, tenant_id=trace_info.tenant_id
|
||||
session_factory=session_factory,
|
||||
user=service_account,
|
||||
app_id=trace_info.metadata.get("app_id"),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
|
||||
# Get all executions for this workflow run
|
||||
|
|
@ -124,23 +145,22 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
|
||||
for node_execution in workflow_node_executions:
|
||||
node_execution_id = node_execution.id
|
||||
tenant_id = node_execution.tenant_id
|
||||
app_id = node_execution.app_id
|
||||
tenant_id = trace_info.tenant_id # Use from trace_info instead
|
||||
app_id = trace_info.metadata.get("app_id") # Use from trace_info instead
|
||||
node_name = node_execution.title
|
||||
node_type = node_execution.node_type
|
||||
status = node_execution.status
|
||||
if node_type == "llm":
|
||||
inputs = (
|
||||
json.loads(node_execution.process_data).get("prompts", {}) if node_execution.process_data else {}
|
||||
)
|
||||
if node_type == NodeType.LLM:
|
||||
inputs = node_execution.process_data.get("prompts", {}) if node_execution.process_data else {}
|
||||
else:
|
||||
inputs = json.loads(node_execution.inputs) if node_execution.inputs else {}
|
||||
outputs = json.loads(node_execution.outputs) if node_execution.outputs else {}
|
||||
inputs = node_execution.inputs if node_execution.inputs else {}
|
||||
outputs = node_execution.outputs if node_execution.outputs else {}
|
||||
created_at = node_execution.created_at or datetime.now()
|
||||
elapsed_time = node_execution.elapsed_time
|
||||
finished_at = created_at + timedelta(seconds=elapsed_time)
|
||||
|
||||
metadata = json.loads(node_execution.execution_metadata) if node_execution.execution_metadata else {}
|
||||
execution_metadata = node_execution.metadata if node_execution.metadata else {}
|
||||
metadata = {str(k): v for k, v in execution_metadata.items()}
|
||||
metadata.update(
|
||||
{
|
||||
"workflow_run_id": trace_info.workflow_run_id,
|
||||
|
|
@ -152,7 +172,7 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||
"status": status,
|
||||
}
|
||||
)
|
||||
process_data = json.loads(node_execution.process_data) if node_execution.process_data else {}
|
||||
process_data = node_execution.process_data if node_execution.process_data else {}
|
||||
model_provider = process_data.get("model_provider", None)
|
||||
model_name = process_data.get("model_name", None)
|
||||
if model_provider is not None and model_name is not None:
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from collections.abc import Mapping
|
||||
from datetime import datetime
|
||||
from enum import StrEnum
|
||||
from typing import Any, Optional, Union
|
||||
|
|
@ -30,8 +31,8 @@ class LangSmithMultiModel(BaseModel):
|
|||
|
||||
class LangSmithRunModel(LangSmithTokenUsage, LangSmithMultiModel):
|
||||
name: Optional[str] = Field(..., description="Name of the run")
|
||||
inputs: Optional[Union[str, dict[str, Any], list, None]] = Field(None, description="Inputs of the run")
|
||||
outputs: Optional[Union[str, dict[str, Any], list, None]] = Field(None, description="Outputs of the run")
|
||||
inputs: Optional[Union[str, Mapping[str, Any], list, None]] = Field(None, description="Inputs of the run")
|
||||
outputs: Optional[Union[str, Mapping[str, Any], list, None]] = Field(None, description="Outputs of the run")
|
||||
run_type: LangSmithRunType = Field(..., description="Type of the run")
|
||||
start_time: Optional[datetime | str] = Field(None, description="Start time of the run")
|
||||
end_time: Optional[datetime | str] = Field(None, description="End time of the run")
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
|
|
@ -7,7 +6,7 @@ from typing import Optional, cast
|
|||
|
||||
from langsmith import Client
|
||||
from langsmith.schemas import RunBase
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from core.ops.base_trace_instance import BaseTraceInstance
|
||||
from core.ops.entities.config_entity import LangSmithConfig
|
||||
|
|
@ -29,8 +28,10 @@ from core.ops.langsmith_trace.entities.langsmith_trace_entity import (
|
|||
)
|
||||
from core.ops.utils import filter_none_values, generate_dotted_order
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
from extensions.ext_database import db
|
||||
from models.model import EndUser, MessageFile
|
||||
from models import Account, App, EndUser, MessageFile, WorkflowNodeExecutionTriggeredFrom
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -137,8 +138,29 @@ class LangSmithDataTrace(BaseTraceInstance):
|
|||
|
||||
# through workflow_run_id get all_nodes_execution using repository
|
||||
session_factory = sessionmaker(bind=db.engine)
|
||||
# Find the app's creator account
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# Get the app to find its creator
|
||||
app_id = trace_info.metadata.get("app_id")
|
||||
if not app_id:
|
||||
raise ValueError("No app_id found in trace_info metadata")
|
||||
|
||||
app = session.query(App).filter(App.id == app_id).first()
|
||||
if not app:
|
||||
raise ValueError(f"App with id {app_id} not found")
|
||||
|
||||
if not app.created_by:
|
||||
raise ValueError(f"App with id {app_id} has no creator (created_by is None)")
|
||||
|
||||
service_account = session.query(Account).filter(Account.id == app.created_by).first()
|
||||
if not service_account:
|
||||
raise ValueError(f"Creator account with id {app.created_by} not found for app {app_id}")
|
||||
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory, tenant_id=trace_info.tenant_id, app_id=trace_info.metadata.get("app_id")
|
||||
session_factory=session_factory,
|
||||
user=service_account,
|
||||
app_id=trace_info.metadata.get("app_id"),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
|
||||
# Get all executions for this workflow run
|
||||
|
|
@ -148,27 +170,23 @@ class LangSmithDataTrace(BaseTraceInstance):
|
|||
|
||||
for node_execution in workflow_node_executions:
|
||||
node_execution_id = node_execution.id
|
||||
tenant_id = node_execution.tenant_id
|
||||
app_id = node_execution.app_id
|
||||
tenant_id = trace_info.tenant_id # Use from trace_info instead
|
||||
app_id = trace_info.metadata.get("app_id") # Use from trace_info instead
|
||||
node_name = node_execution.title
|
||||
node_type = node_execution.node_type
|
||||
status = node_execution.status
|
||||
if node_type == "llm":
|
||||
inputs = (
|
||||
json.loads(node_execution.process_data).get("prompts", {}) if node_execution.process_data else {}
|
||||
)
|
||||
if node_type == NodeType.LLM:
|
||||
inputs = node_execution.process_data.get("prompts", {}) if node_execution.process_data else {}
|
||||
else:
|
||||
inputs = json.loads(node_execution.inputs) if node_execution.inputs else {}
|
||||
outputs = json.loads(node_execution.outputs) if node_execution.outputs else {}
|
||||
inputs = node_execution.inputs if node_execution.inputs else {}
|
||||
outputs = node_execution.outputs if node_execution.outputs else {}
|
||||
created_at = node_execution.created_at or datetime.now()
|
||||
elapsed_time = node_execution.elapsed_time
|
||||
finished_at = created_at + timedelta(seconds=elapsed_time)
|
||||
|
||||
execution_metadata = (
|
||||
json.loads(node_execution.execution_metadata) if node_execution.execution_metadata else {}
|
||||
)
|
||||
node_total_tokens = execution_metadata.get("total_tokens", 0)
|
||||
metadata = execution_metadata.copy()
|
||||
execution_metadata = node_execution.metadata if node_execution.metadata else {}
|
||||
node_total_tokens = execution_metadata.get(NodeRunMetadataKey.TOTAL_TOKENS) or 0
|
||||
metadata = {str(key): value for key, value in execution_metadata.items()}
|
||||
metadata.update(
|
||||
{
|
||||
"workflow_run_id": trace_info.workflow_run_id,
|
||||
|
|
@ -181,7 +199,7 @@ class LangSmithDataTrace(BaseTraceInstance):
|
|||
}
|
||||
)
|
||||
|
||||
process_data = json.loads(node_execution.process_data) if node_execution.process_data else {}
|
||||
process_data = node_execution.process_data if node_execution.process_data else {}
|
||||
|
||||
if process_data and process_data.get("model_mode") == "chat":
|
||||
run_type = LangSmithRunType.llm
|
||||
|
|
@ -191,7 +209,7 @@ class LangSmithDataTrace(BaseTraceInstance):
|
|||
"ls_model_name": process_data.get("model_name", ""),
|
||||
}
|
||||
)
|
||||
elif node_type == "knowledge-retrieval":
|
||||
elif node_type == NodeType.KNOWLEDGE_RETRIEVAL:
|
||||
run_type = LangSmithRunType.retriever
|
||||
else:
|
||||
run_type = LangSmithRunType.tool
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
|
|
@ -7,7 +6,7 @@ from typing import Optional, cast
|
|||
|
||||
from opik import Opik, Trace
|
||||
from opik.id_helpers import uuid4_to_uuid7
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from core.ops.base_trace_instance import BaseTraceInstance
|
||||
from core.ops.entities.config_entity import OpikConfig
|
||||
|
|
@ -23,8 +22,10 @@ from core.ops.entities.trace_entity import (
|
|||
WorkflowTraceInfo,
|
||||
)
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
from extensions.ext_database import db
|
||||
from models.model import EndUser, MessageFile
|
||||
from models import Account, App, EndUser, MessageFile, WorkflowNodeExecutionTriggeredFrom
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -150,8 +151,29 @@ class OpikDataTrace(BaseTraceInstance):
|
|||
|
||||
# through workflow_run_id get all_nodes_execution using repository
|
||||
session_factory = sessionmaker(bind=db.engine)
|
||||
# Find the app's creator account
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# Get the app to find its creator
|
||||
app_id = trace_info.metadata.get("app_id")
|
||||
if not app_id:
|
||||
raise ValueError("No app_id found in trace_info metadata")
|
||||
|
||||
app = session.query(App).filter(App.id == app_id).first()
|
||||
if not app:
|
||||
raise ValueError(f"App with id {app_id} not found")
|
||||
|
||||
if not app.created_by:
|
||||
raise ValueError(f"App with id {app_id} has no creator (created_by is None)")
|
||||
|
||||
service_account = session.query(Account).filter(Account.id == app.created_by).first()
|
||||
if not service_account:
|
||||
raise ValueError(f"Creator account with id {app.created_by} not found for app {app_id}")
|
||||
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory, tenant_id=trace_info.tenant_id, app_id=trace_info.metadata.get("app_id")
|
||||
session_factory=session_factory,
|
||||
user=service_account,
|
||||
app_id=trace_info.metadata.get("app_id"),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
|
||||
# Get all executions for this workflow run
|
||||
|
|
@ -161,26 +183,22 @@ class OpikDataTrace(BaseTraceInstance):
|
|||
|
||||
for node_execution in workflow_node_executions:
|
||||
node_execution_id = node_execution.id
|
||||
tenant_id = node_execution.tenant_id
|
||||
app_id = node_execution.app_id
|
||||
tenant_id = trace_info.tenant_id # Use from trace_info instead
|
||||
app_id = trace_info.metadata.get("app_id") # Use from trace_info instead
|
||||
node_name = node_execution.title
|
||||
node_type = node_execution.node_type
|
||||
status = node_execution.status
|
||||
if node_type == "llm":
|
||||
inputs = (
|
||||
json.loads(node_execution.process_data).get("prompts", {}) if node_execution.process_data else {}
|
||||
)
|
||||
if node_type == NodeType.LLM:
|
||||
inputs = node_execution.process_data.get("prompts", {}) if node_execution.process_data else {}
|
||||
else:
|
||||
inputs = json.loads(node_execution.inputs) if node_execution.inputs else {}
|
||||
outputs = json.loads(node_execution.outputs) if node_execution.outputs else {}
|
||||
inputs = node_execution.inputs if node_execution.inputs else {}
|
||||
outputs = node_execution.outputs if node_execution.outputs else {}
|
||||
created_at = node_execution.created_at or datetime.now()
|
||||
elapsed_time = node_execution.elapsed_time
|
||||
finished_at = created_at + timedelta(seconds=elapsed_time)
|
||||
|
||||
execution_metadata = (
|
||||
json.loads(node_execution.execution_metadata) if node_execution.execution_metadata else {}
|
||||
)
|
||||
metadata = execution_metadata.copy()
|
||||
execution_metadata = node_execution.metadata if node_execution.metadata else {}
|
||||
metadata = {str(k): v for k, v in execution_metadata.items()}
|
||||
metadata.update(
|
||||
{
|
||||
"workflow_run_id": trace_info.workflow_run_id,
|
||||
|
|
@ -193,7 +211,7 @@ class OpikDataTrace(BaseTraceInstance):
|
|||
}
|
||||
)
|
||||
|
||||
process_data = json.loads(node_execution.process_data) if node_execution.process_data else {}
|
||||
process_data = node_execution.process_data if node_execution.process_data else {}
|
||||
|
||||
provider = None
|
||||
model = None
|
||||
|
|
@ -226,7 +244,7 @@ class OpikDataTrace(BaseTraceInstance):
|
|||
parent_span_id = trace_info.workflow_app_log_id or trace_info.workflow_run_id
|
||||
|
||||
if not total_tokens:
|
||||
total_tokens = execution_metadata.get("total_tokens", 0)
|
||||
total_tokens = execution_metadata.get(NodeRunMetadataKey.TOTAL_TOKENS) or 0
|
||||
|
||||
span_data = {
|
||||
"trace_id": opik_trace_id,
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
from collections.abc import Mapping
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
|
@ -19,8 +20,8 @@ class WeaveMultiModel(BaseModel):
|
|||
class WeaveTraceModel(WeaveTokenUsage, WeaveMultiModel):
|
||||
id: str = Field(..., description="ID of the trace")
|
||||
op: str = Field(..., description="Name of the operation")
|
||||
inputs: Optional[Union[str, dict[str, Any], list, None]] = Field(None, description="Inputs of the trace")
|
||||
outputs: Optional[Union[str, dict[str, Any], list, None]] = Field(None, description="Outputs of the trace")
|
||||
inputs: Optional[Union[str, Mapping[str, Any], list, None]] = Field(None, description="Inputs of the trace")
|
||||
outputs: Optional[Union[str, Mapping[str, Any], list, None]] = Field(None, description="Outputs of the trace")
|
||||
attributes: Optional[Union[str, dict[str, Any], list, None]] = Field(
|
||||
None, description="Metadata and attributes associated with trace"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
|
|
@ -7,6 +6,7 @@ from typing import Any, Optional, cast
|
|||
|
||||
import wandb
|
||||
import weave
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from core.ops.base_trace_instance import BaseTraceInstance
|
||||
from core.ops.entities.config_entity import WeaveConfig
|
||||
|
|
@ -22,9 +22,11 @@ from core.ops.entities.trace_entity import (
|
|||
WorkflowTraceInfo,
|
||||
)
|
||||
from core.ops.weave_trace.entities.weave_trace_entity import WeaveTraceModel
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
from extensions.ext_database import db
|
||||
from models.model import EndUser, MessageFile
|
||||
from models.workflow import WorkflowNodeExecution
|
||||
from models import Account, App, EndUser, MessageFile, WorkflowNodeExecutionTriggeredFrom
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -128,58 +130,57 @@ class WeaveDataTrace(BaseTraceInstance):
|
|||
|
||||
self.start_call(workflow_run, parent_run_id=trace_info.message_id)
|
||||
|
||||
# through workflow_run_id get all_nodes_execution
|
||||
workflow_nodes_execution_id_records = (
|
||||
db.session.query(WorkflowNodeExecution.id)
|
||||
.filter(WorkflowNodeExecution.workflow_run_id == trace_info.workflow_run_id)
|
||||
.all()
|
||||
# through workflow_run_id get all_nodes_execution using repository
|
||||
session_factory = sessionmaker(bind=db.engine)
|
||||
# Find the app's creator account
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# Get the app to find its creator
|
||||
app_id = trace_info.metadata.get("app_id")
|
||||
if not app_id:
|
||||
raise ValueError("No app_id found in trace_info metadata")
|
||||
|
||||
app = session.query(App).filter(App.id == app_id).first()
|
||||
if not app:
|
||||
raise ValueError(f"App with id {app_id} not found")
|
||||
|
||||
if not app.created_by:
|
||||
raise ValueError(f"App with id {app_id} has no creator (created_by is None)")
|
||||
|
||||
service_account = session.query(Account).filter(Account.id == app.created_by).first()
|
||||
if not service_account:
|
||||
raise ValueError(f"Creator account with id {app.created_by} not found for app {app_id}")
|
||||
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
user=service_account,
|
||||
app_id=trace_info.metadata.get("app_id"),
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
|
||||
for node_execution_id_record in workflow_nodes_execution_id_records:
|
||||
node_execution = (
|
||||
db.session.query(
|
||||
WorkflowNodeExecution.id,
|
||||
WorkflowNodeExecution.tenant_id,
|
||||
WorkflowNodeExecution.app_id,
|
||||
WorkflowNodeExecution.title,
|
||||
WorkflowNodeExecution.node_type,
|
||||
WorkflowNodeExecution.status,
|
||||
WorkflowNodeExecution.inputs,
|
||||
WorkflowNodeExecution.outputs,
|
||||
WorkflowNodeExecution.created_at,
|
||||
WorkflowNodeExecution.elapsed_time,
|
||||
WorkflowNodeExecution.process_data,
|
||||
WorkflowNodeExecution.execution_metadata,
|
||||
)
|
||||
.filter(WorkflowNodeExecution.id == node_execution_id_record.id)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not node_execution:
|
||||
continue
|
||||
# Get all executions for this workflow run
|
||||
workflow_node_executions = workflow_node_execution_repository.get_by_workflow_run(
|
||||
workflow_run_id=trace_info.workflow_run_id
|
||||
)
|
||||
|
||||
for node_execution in workflow_node_executions:
|
||||
node_execution_id = node_execution.id
|
||||
tenant_id = node_execution.tenant_id
|
||||
app_id = node_execution.app_id
|
||||
tenant_id = trace_info.tenant_id # Use from trace_info instead
|
||||
app_id = trace_info.metadata.get("app_id") # Use from trace_info instead
|
||||
node_name = node_execution.title
|
||||
node_type = node_execution.node_type
|
||||
status = node_execution.status
|
||||
if node_type == "llm":
|
||||
inputs = (
|
||||
json.loads(node_execution.process_data).get("prompts", {}) if node_execution.process_data else {}
|
||||
)
|
||||
if node_type == NodeType.LLM:
|
||||
inputs = node_execution.process_data.get("prompts", {}) if node_execution.process_data else {}
|
||||
else:
|
||||
inputs = json.loads(node_execution.inputs) if node_execution.inputs else {}
|
||||
outputs = json.loads(node_execution.outputs) if node_execution.outputs else {}
|
||||
inputs = node_execution.inputs if node_execution.inputs else {}
|
||||
outputs = node_execution.outputs if node_execution.outputs else {}
|
||||
created_at = node_execution.created_at or datetime.now()
|
||||
elapsed_time = node_execution.elapsed_time
|
||||
finished_at = created_at + timedelta(seconds=elapsed_time)
|
||||
|
||||
execution_metadata = (
|
||||
json.loads(node_execution.execution_metadata) if node_execution.execution_metadata else {}
|
||||
)
|
||||
node_total_tokens = execution_metadata.get("total_tokens", 0)
|
||||
attributes = execution_metadata.copy()
|
||||
execution_metadata = node_execution.metadata if node_execution.metadata else {}
|
||||
node_total_tokens = execution_metadata.get(NodeRunMetadataKey.TOTAL_TOKENS) or 0
|
||||
attributes = {str(k): v for k, v in execution_metadata.items()}
|
||||
attributes.update(
|
||||
{
|
||||
"workflow_run_id": trace_info.workflow_run_id,
|
||||
|
|
@ -192,7 +193,7 @@ class WeaveDataTrace(BaseTraceInstance):
|
|||
}
|
||||
)
|
||||
|
||||
process_data = json.loads(node_execution.process_data) if node_execution.process_data else {}
|
||||
process_data = node_execution.process_data if node_execution.process_data else {}
|
||||
if process_data and process_data.get("model_mode") == "chat":
|
||||
attributes.update(
|
||||
{
|
||||
|
|
|
|||
|
|
@ -64,9 +64,9 @@ class PluginNodeBackwardsInvocation(BaseBackwardsInvocation):
|
|||
)
|
||||
|
||||
return {
|
||||
"inputs": execution.inputs_dict,
|
||||
"outputs": execution.outputs_dict,
|
||||
"process_data": execution.process_data_dict,
|
||||
"inputs": execution.inputs,
|
||||
"outputs": execution.outputs,
|
||||
"process_data": execution.process_data,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
|
@ -113,7 +113,7 @@ class PluginNodeBackwardsInvocation(BaseBackwardsInvocation):
|
|||
)
|
||||
|
||||
return {
|
||||
"inputs": execution.inputs_dict,
|
||||
"outputs": execution.outputs_dict,
|
||||
"process_data": execution.process_data_dict,
|
||||
"inputs": execution.inputs,
|
||||
"outputs": execution.outputs,
|
||||
"process_data": execution.process_data,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ from core.rag.extractor.extractor_base import BaseExtractor
|
|||
from core.rag.models.document import Document
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_storage import storage
|
||||
from models.enums import CreatedByRole
|
||||
from models.enums import CreatorUserRole
|
||||
from models.model import UploadFile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -116,7 +116,7 @@ class WordExtractor(BaseExtractor):
|
|||
extension=str(image_ext),
|
||||
mime_type=mime_type or "",
|
||||
created_by=self.user_id,
|
||||
created_by_role=CreatedByRole.ACCOUNT,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||
used=True,
|
||||
used_by=self.user_id,
|
||||
|
|
|
|||
|
|
@ -2,16 +2,29 @@
|
|||
SQLAlchemy implementation of the WorkflowNodeExecutionRepository.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from collections.abc import Sequence
|
||||
from typing import Optional
|
||||
from typing import Optional, Union
|
||||
|
||||
from sqlalchemy import UnaryExpression, asc, delete, desc, select
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.workflow.entities.node_execution_entities import (
|
||||
NodeExecution,
|
||||
NodeExecutionStatus,
|
||||
)
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
from core.workflow.repository.workflow_node_execution_repository import OrderConfig, WorkflowNodeExecutionRepository
|
||||
from models.workflow import WorkflowNodeExecution, WorkflowNodeExecutionStatus, WorkflowNodeExecutionTriggeredFrom
|
||||
from models import (
|
||||
Account,
|
||||
CreatorUserRole,
|
||||
EndUser,
|
||||
WorkflowNodeExecution,
|
||||
WorkflowNodeExecutionStatus,
|
||||
WorkflowNodeExecutionTriggeredFrom,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -23,16 +36,26 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
This implementation supports multi-tenancy by filtering operations based on tenant_id.
|
||||
Each method creates its own session, handles the transaction, and commits changes
|
||||
to the database. This prevents long-running connections in the workflow core.
|
||||
|
||||
This implementation also includes an in-memory cache for node executions to improve
|
||||
performance by reducing database queries.
|
||||
"""
|
||||
|
||||
def __init__(self, session_factory: sessionmaker | Engine, tenant_id: str, app_id: Optional[str] = None):
|
||||
def __init__(
|
||||
self,
|
||||
session_factory: sessionmaker | Engine,
|
||||
user: Union[Account, EndUser],
|
||||
app_id: Optional[str],
|
||||
triggered_from: Optional[WorkflowNodeExecutionTriggeredFrom],
|
||||
):
|
||||
"""
|
||||
Initialize the repository with a SQLAlchemy sessionmaker or engine and tenant context.
|
||||
Initialize the repository with a SQLAlchemy sessionmaker or engine and context information.
|
||||
|
||||
Args:
|
||||
session_factory: SQLAlchemy sessionmaker or engine for creating sessions
|
||||
tenant_id: Tenant ID for multi-tenancy
|
||||
app_id: Optional app ID for filtering by application
|
||||
user: Account or EndUser object containing tenant_id, user ID, and role information
|
||||
app_id: App ID for filtering by application (can be None)
|
||||
triggered_from: Source of the execution trigger (SINGLE_STEP or WORKFLOW_RUN)
|
||||
"""
|
||||
# If an engine is provided, create a sessionmaker from it
|
||||
if isinstance(session_factory, Engine):
|
||||
|
|
@ -44,38 +67,163 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
f"Invalid session_factory type {type(session_factory).__name__}; expected sessionmaker or Engine"
|
||||
)
|
||||
|
||||
# Extract tenant_id from user
|
||||
tenant_id: str | None = user.tenant_id if isinstance(user, EndUser) else user.current_tenant_id
|
||||
if not tenant_id:
|
||||
raise ValueError("User must have a tenant_id or current_tenant_id")
|
||||
self._tenant_id = tenant_id
|
||||
|
||||
# Store app context
|
||||
self._app_id = app_id
|
||||
|
||||
def save(self, execution: WorkflowNodeExecution) -> None:
|
||||
# Extract user context
|
||||
self._triggered_from = triggered_from
|
||||
self._creator_user_id = user.id
|
||||
|
||||
# Determine user role based on user type
|
||||
self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER
|
||||
|
||||
# Initialize in-memory cache for node executions
|
||||
# Key: node_execution_id, Value: NodeExecution
|
||||
self._node_execution_cache: dict[str, NodeExecution] = {}
|
||||
|
||||
def _to_domain_model(self, db_model: WorkflowNodeExecution) -> NodeExecution:
|
||||
"""
|
||||
Save a WorkflowNodeExecution instance and commit changes to the database.
|
||||
Convert a database model to a domain model.
|
||||
|
||||
Args:
|
||||
execution: The WorkflowNodeExecution instance to save
|
||||
db_model: The database model to convert
|
||||
|
||||
Returns:
|
||||
The domain model
|
||||
"""
|
||||
# Parse JSON fields
|
||||
inputs = db_model.inputs_dict
|
||||
process_data = db_model.process_data_dict
|
||||
outputs = db_model.outputs_dict
|
||||
metadata = db_model.execution_metadata_dict
|
||||
|
||||
# Convert status to domain enum
|
||||
status = NodeExecutionStatus(db_model.status)
|
||||
|
||||
return NodeExecution(
|
||||
id=db_model.id,
|
||||
node_execution_id=db_model.node_execution_id,
|
||||
workflow_id=db_model.workflow_id,
|
||||
workflow_run_id=db_model.workflow_run_id,
|
||||
index=db_model.index,
|
||||
predecessor_node_id=db_model.predecessor_node_id,
|
||||
node_id=db_model.node_id,
|
||||
node_type=NodeType(db_model.node_type),
|
||||
title=db_model.title,
|
||||
inputs=inputs,
|
||||
process_data=process_data,
|
||||
outputs=outputs,
|
||||
status=status,
|
||||
error=db_model.error,
|
||||
elapsed_time=db_model.elapsed_time,
|
||||
metadata=metadata,
|
||||
created_at=db_model.created_at,
|
||||
finished_at=db_model.finished_at,
|
||||
)
|
||||
|
||||
def to_db_model(self, domain_model: NodeExecution) -> WorkflowNodeExecution:
|
||||
"""
|
||||
Convert a domain model to a database model.
|
||||
|
||||
Args:
|
||||
domain_model: The domain model to convert
|
||||
|
||||
Returns:
|
||||
The database model
|
||||
"""
|
||||
# Use values from constructor if provided
|
||||
if not self._triggered_from:
|
||||
raise ValueError("triggered_from is required in repository constructor")
|
||||
if not self._creator_user_id:
|
||||
raise ValueError("created_by is required in repository constructor")
|
||||
if not self._creator_user_role:
|
||||
raise ValueError("created_by_role is required in repository constructor")
|
||||
|
||||
db_model = WorkflowNodeExecution()
|
||||
db_model.id = domain_model.id
|
||||
db_model.tenant_id = self._tenant_id
|
||||
if self._app_id is not None:
|
||||
db_model.app_id = self._app_id
|
||||
db_model.workflow_id = domain_model.workflow_id
|
||||
db_model.triggered_from = self._triggered_from
|
||||
db_model.workflow_run_id = domain_model.workflow_run_id
|
||||
db_model.index = domain_model.index
|
||||
db_model.predecessor_node_id = domain_model.predecessor_node_id
|
||||
db_model.node_execution_id = domain_model.node_execution_id
|
||||
db_model.node_id = domain_model.node_id
|
||||
db_model.node_type = domain_model.node_type
|
||||
db_model.title = domain_model.title
|
||||
db_model.inputs = json.dumps(domain_model.inputs) if domain_model.inputs else None
|
||||
db_model.process_data = json.dumps(domain_model.process_data) if domain_model.process_data else None
|
||||
db_model.outputs = json.dumps(domain_model.outputs) if domain_model.outputs else None
|
||||
db_model.status = domain_model.status
|
||||
db_model.error = domain_model.error
|
||||
db_model.elapsed_time = domain_model.elapsed_time
|
||||
db_model.execution_metadata = json.dumps(domain_model.metadata) if domain_model.metadata else None
|
||||
db_model.created_at = domain_model.created_at
|
||||
db_model.created_by_role = self._creator_user_role
|
||||
db_model.created_by = self._creator_user_id
|
||||
db_model.finished_at = domain_model.finished_at
|
||||
return db_model
|
||||
|
||||
def save(self, execution: NodeExecution) -> None:
|
||||
"""
|
||||
Save or update a NodeExecution domain entity to the database.
|
||||
|
||||
This method serves as a domain-to-database adapter that:
|
||||
1. Converts the domain entity to its database representation
|
||||
2. Persists the database model using SQLAlchemy's merge operation
|
||||
3. Maintains proper multi-tenancy by including tenant context during conversion
|
||||
4. Updates the in-memory cache for faster subsequent lookups
|
||||
|
||||
The method handles both creating new records and updating existing ones through
|
||||
SQLAlchemy's merge operation.
|
||||
|
||||
Args:
|
||||
execution: The NodeExecution domain entity to persist
|
||||
"""
|
||||
# Convert domain model to database model using tenant context and other attributes
|
||||
db_model = self.to_db_model(execution)
|
||||
|
||||
# Create a new database session
|
||||
with self._session_factory() as session:
|
||||
# Ensure tenant_id is set
|
||||
if not execution.tenant_id:
|
||||
execution.tenant_id = self._tenant_id
|
||||
|
||||
# Set app_id if provided and not already set
|
||||
if self._app_id and not execution.app_id:
|
||||
execution.app_id = self._app_id
|
||||
|
||||
session.add(execution)
|
||||
# SQLAlchemy merge intelligently handles both insert and update operations
|
||||
# based on the presence of the primary key
|
||||
session.merge(db_model)
|
||||
session.commit()
|
||||
|
||||
def get_by_node_execution_id(self, node_execution_id: str) -> Optional[WorkflowNodeExecution]:
|
||||
# Update the in-memory cache for faster subsequent lookups
|
||||
# Only cache if we have a node_execution_id to use as the cache key
|
||||
if db_model.node_execution_id:
|
||||
logger.debug(f"Updating cache for node_execution_id: {db_model.node_execution_id}")
|
||||
self._node_execution_cache[db_model.node_execution_id] = db_model
|
||||
|
||||
def get_by_node_execution_id(self, node_execution_id: str) -> Optional[NodeExecution]:
|
||||
"""
|
||||
Retrieve a WorkflowNodeExecution by its node_execution_id.
|
||||
Retrieve a NodeExecution by its node_execution_id.
|
||||
|
||||
First checks the in-memory cache, and if not found, queries the database.
|
||||
If found in the database, adds it to the cache for future lookups.
|
||||
|
||||
Args:
|
||||
node_execution_id: The node execution ID
|
||||
|
||||
Returns:
|
||||
The WorkflowNodeExecution instance if found, None otherwise
|
||||
The NodeExecution instance if found, None otherwise
|
||||
"""
|
||||
# First check the cache
|
||||
if node_execution_id in self._node_execution_cache:
|
||||
logger.debug(f"Cache hit for node_execution_id: {node_execution_id}")
|
||||
return self._node_execution_cache[node_execution_id]
|
||||
|
||||
# If not in cache, query the database
|
||||
logger.debug(f"Cache miss for node_execution_id: {node_execution_id}, querying database")
|
||||
with self._session_factory() as session:
|
||||
stmt = select(WorkflowNodeExecution).where(
|
||||
WorkflowNodeExecution.node_execution_id == node_execution_id,
|
||||
|
|
@ -85,15 +233,28 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
if self._app_id:
|
||||
stmt = stmt.where(WorkflowNodeExecution.app_id == self._app_id)
|
||||
|
||||
return session.scalar(stmt)
|
||||
db_model = session.scalar(stmt)
|
||||
if db_model:
|
||||
# Convert to domain model
|
||||
domain_model = self._to_domain_model(db_model)
|
||||
|
||||
# Add to cache
|
||||
self._node_execution_cache[node_execution_id] = domain_model
|
||||
|
||||
return domain_model
|
||||
|
||||
return None
|
||||
|
||||
def get_by_workflow_run(
|
||||
self,
|
||||
workflow_run_id: str,
|
||||
order_config: Optional[OrderConfig] = None,
|
||||
) -> Sequence[WorkflowNodeExecution]:
|
||||
) -> Sequence[NodeExecution]:
|
||||
"""
|
||||
Retrieve all WorkflowNodeExecution instances for a specific workflow run.
|
||||
Retrieve all NodeExecution instances for a specific workflow run.
|
||||
|
||||
This method always queries the database to ensure complete and ordered results,
|
||||
but updates the cache with any retrieved executions.
|
||||
|
||||
Args:
|
||||
workflow_run_id: The workflow run ID
|
||||
|
|
@ -102,7 +263,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
order_config.order_direction: Direction to order ("asc" or "desc")
|
||||
|
||||
Returns:
|
||||
A list of WorkflowNodeExecution instances
|
||||
A list of NodeExecution instances
|
||||
"""
|
||||
with self._session_factory() as session:
|
||||
stmt = select(WorkflowNodeExecution).where(
|
||||
|
|
@ -129,17 +290,31 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
if order_columns:
|
||||
stmt = stmt.order_by(*order_columns)
|
||||
|
||||
return session.scalars(stmt).all()
|
||||
db_models = session.scalars(stmt).all()
|
||||
|
||||
def get_running_executions(self, workflow_run_id: str) -> Sequence[WorkflowNodeExecution]:
|
||||
# Convert database models to domain models and update cache
|
||||
domain_models = []
|
||||
for model in db_models:
|
||||
domain_model = self._to_domain_model(model)
|
||||
# Update cache if node_execution_id is present
|
||||
if domain_model.node_execution_id:
|
||||
self._node_execution_cache[domain_model.node_execution_id] = domain_model
|
||||
domain_models.append(domain_model)
|
||||
|
||||
return domain_models
|
||||
|
||||
def get_running_executions(self, workflow_run_id: str) -> Sequence[NodeExecution]:
|
||||
"""
|
||||
Retrieve all running WorkflowNodeExecution instances for a specific workflow run.
|
||||
Retrieve all running NodeExecution instances for a specific workflow run.
|
||||
|
||||
This method queries the database directly and updates the cache with any
|
||||
retrieved executions that have a node_execution_id.
|
||||
|
||||
Args:
|
||||
workflow_run_id: The workflow run ID
|
||||
|
||||
Returns:
|
||||
A list of running WorkflowNodeExecution instances
|
||||
A list of running NodeExecution instances
|
||||
"""
|
||||
with self._session_factory() as session:
|
||||
stmt = select(WorkflowNodeExecution).where(
|
||||
|
|
@ -152,26 +327,17 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
if self._app_id:
|
||||
stmt = stmt.where(WorkflowNodeExecution.app_id == self._app_id)
|
||||
|
||||
return session.scalars(stmt).all()
|
||||
db_models = session.scalars(stmt).all()
|
||||
domain_models = []
|
||||
|
||||
def update(self, execution: WorkflowNodeExecution) -> None:
|
||||
"""
|
||||
Update an existing WorkflowNodeExecution instance and commit changes to the database.
|
||||
for model in db_models:
|
||||
domain_model = self._to_domain_model(model)
|
||||
# Update cache if node_execution_id is present
|
||||
if domain_model.node_execution_id:
|
||||
self._node_execution_cache[domain_model.node_execution_id] = domain_model
|
||||
domain_models.append(domain_model)
|
||||
|
||||
Args:
|
||||
execution: The WorkflowNodeExecution instance to update
|
||||
"""
|
||||
with self._session_factory() as session:
|
||||
# Ensure tenant_id is set
|
||||
if not execution.tenant_id:
|
||||
execution.tenant_id = self._tenant_id
|
||||
|
||||
# Set app_id if provided and not already set
|
||||
if self._app_id and not execution.app_id:
|
||||
execution.app_id = self._app_id
|
||||
|
||||
session.merge(execution)
|
||||
session.commit()
|
||||
return domain_models
|
||||
|
||||
def clear(self) -> None:
|
||||
"""
|
||||
|
|
@ -179,6 +345,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
|
||||
This method deletes all WorkflowNodeExecution records that match the tenant_id
|
||||
and app_id (if provided) associated with this repository instance.
|
||||
It also clears the in-memory cache.
|
||||
"""
|
||||
with self._session_factory() as session:
|
||||
stmt = delete(WorkflowNodeExecution).where(WorkflowNodeExecution.tenant_id == self._tenant_id)
|
||||
|
|
@ -194,3 +361,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
f"Cleared {deleted_count} workflow node execution records for tenant {self._tenant_id}"
|
||||
+ (f" and app {self._app_id}" if self._app_id else "")
|
||||
)
|
||||
|
||||
# Clear the in-memory cache
|
||||
self._node_execution_cache.clear()
|
||||
logger.info("Cleared in-memory node execution cache")
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ from core.tools.errors import (
|
|||
from core.tools.utils.message_transformer import ToolFileMessageTransformer
|
||||
from core.tools.workflow_as_tool.tool import WorkflowTool
|
||||
from extensions.ext_database import db
|
||||
from models.enums import CreatedByRole
|
||||
from models.enums import CreatorUserRole
|
||||
from models.model import Message, MessageFile
|
||||
|
||||
|
||||
|
|
@ -339,9 +339,9 @@ class ToolEngine:
|
|||
url=message.url,
|
||||
upload_file_id=tool_file_id,
|
||||
created_by_role=(
|
||||
CreatedByRole.ACCOUNT
|
||||
CreatorUserRole.ACCOUNT
|
||||
if invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER}
|
||||
else CreatedByRole.END_USER
|
||||
else CreatorUserRole.END_USER
|
||||
),
|
||||
created_by=user_id,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,98 @@
|
|||
"""
|
||||
Domain entities for workflow node execution.
|
||||
|
||||
This module contains the domain model for workflow node execution, which is used
|
||||
by the core workflow module. These models are independent of the storage mechanism
|
||||
and don't contain implementation details like tenant_id, app_id, etc.
|
||||
"""
|
||||
|
||||
from collections.abc import Mapping
|
||||
from datetime import datetime
|
||||
from enum import StrEnum
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
|
||||
|
||||
class NodeExecutionStatus(StrEnum):
|
||||
"""
|
||||
Node Execution Status Enum.
|
||||
"""
|
||||
|
||||
RUNNING = "running"
|
||||
SUCCEEDED = "succeeded"
|
||||
FAILED = "failed"
|
||||
EXCEPTION = "exception"
|
||||
RETRY = "retry"
|
||||
|
||||
|
||||
class NodeExecution(BaseModel):
|
||||
"""
|
||||
Domain model for workflow node execution.
|
||||
|
||||
This model represents the core business entity of a node execution,
|
||||
without implementation details like tenant_id, app_id, etc.
|
||||
|
||||
Note: User/context-specific fields (triggered_from, created_by, created_by_role)
|
||||
have been moved to the repository implementation to keep the domain model clean.
|
||||
These fields are still accepted in the constructor for backward compatibility,
|
||||
but they are not stored in the model.
|
||||
"""
|
||||
|
||||
# Core identification fields
|
||||
id: str # Unique identifier for this execution record
|
||||
node_execution_id: Optional[str] = None # Optional secondary ID for cross-referencing
|
||||
workflow_id: str # ID of the workflow this node belongs to
|
||||
workflow_run_id: Optional[str] = None # ID of the specific workflow run (null for single-step debugging)
|
||||
|
||||
# Execution positioning and flow
|
||||
index: int # Sequence number for ordering in trace visualization
|
||||
predecessor_node_id: Optional[str] = None # ID of the node that executed before this one
|
||||
node_id: str # ID of the node being executed
|
||||
node_type: NodeType # Type of node (e.g., start, llm, knowledge)
|
||||
title: str # Display title of the node
|
||||
|
||||
# Execution data
|
||||
inputs: Optional[Mapping[str, Any]] = None # Input variables used by this node
|
||||
process_data: Optional[Mapping[str, Any]] = None # Intermediate processing data
|
||||
outputs: Optional[Mapping[str, Any]] = None # Output variables produced by this node
|
||||
|
||||
# Execution state
|
||||
status: NodeExecutionStatus = NodeExecutionStatus.RUNNING # Current execution status
|
||||
error: Optional[str] = None # Error message if execution failed
|
||||
elapsed_time: float = Field(default=0.0) # Time taken for execution in seconds
|
||||
|
||||
# Additional metadata
|
||||
metadata: Optional[Mapping[NodeRunMetadataKey, Any]] = None # Execution metadata (tokens, cost, etc.)
|
||||
|
||||
# Timing information
|
||||
created_at: datetime # When execution started
|
||||
finished_at: Optional[datetime] = None # When execution completed
|
||||
|
||||
def update_from_mapping(
|
||||
self,
|
||||
inputs: Optional[Mapping[str, Any]] = None,
|
||||
process_data: Optional[Mapping[str, Any]] = None,
|
||||
outputs: Optional[Mapping[str, Any]] = None,
|
||||
metadata: Optional[Mapping[NodeRunMetadataKey, Any]] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Update the model from mappings.
|
||||
|
||||
Args:
|
||||
inputs: The inputs to update
|
||||
process_data: The process data to update
|
||||
outputs: The outputs to update
|
||||
metadata: The metadata to update
|
||||
"""
|
||||
if inputs is not None:
|
||||
self.inputs = dict(inputs)
|
||||
if process_data is not None:
|
||||
self.process_data = dict(process_data)
|
||||
if outputs is not None:
|
||||
self.outputs = dict(outputs)
|
||||
if metadata is not None:
|
||||
self.metadata = dict(metadata)
|
||||
|
|
@ -2,12 +2,12 @@ from collections.abc import Sequence
|
|||
from dataclasses import dataclass
|
||||
from typing import Literal, Optional, Protocol
|
||||
|
||||
from models.workflow import WorkflowNodeExecution
|
||||
from core.workflow.entities.node_execution_entities import NodeExecution
|
||||
|
||||
|
||||
@dataclass
|
||||
class OrderConfig:
|
||||
"""Configuration for ordering WorkflowNodeExecution instances."""
|
||||
"""Configuration for ordering NodeExecution instances."""
|
||||
|
||||
order_by: list[str]
|
||||
order_direction: Optional[Literal["asc", "desc"]] = None
|
||||
|
|
@ -15,10 +15,10 @@ class OrderConfig:
|
|||
|
||||
class WorkflowNodeExecutionRepository(Protocol):
|
||||
"""
|
||||
Repository interface for WorkflowNodeExecution.
|
||||
Repository interface for NodeExecution.
|
||||
|
||||
This interface defines the contract for accessing and manipulating
|
||||
WorkflowNodeExecution data, regardless of the underlying storage mechanism.
|
||||
NodeExecution data, regardless of the underlying storage mechanism.
|
||||
|
||||
Note: Domain-specific concepts like multi-tenancy (tenant_id), application context (app_id),
|
||||
and trigger sources (triggered_from) should be handled at the implementation level, not in
|
||||
|
|
@ -26,24 +26,28 @@ class WorkflowNodeExecutionRepository(Protocol):
|
|||
application domains or deployment scenarios.
|
||||
"""
|
||||
|
||||
def save(self, execution: WorkflowNodeExecution) -> None:
|
||||
def save(self, execution: NodeExecution) -> None:
|
||||
"""
|
||||
Save a WorkflowNodeExecution instance.
|
||||
Save or update a NodeExecution instance.
|
||||
|
||||
This method handles both creating new records and updating existing ones.
|
||||
The implementation should determine whether to create or update based on
|
||||
the execution's ID or other identifying fields.
|
||||
|
||||
Args:
|
||||
execution: The WorkflowNodeExecution instance to save
|
||||
execution: The NodeExecution instance to save or update
|
||||
"""
|
||||
...
|
||||
|
||||
def get_by_node_execution_id(self, node_execution_id: str) -> Optional[WorkflowNodeExecution]:
|
||||
def get_by_node_execution_id(self, node_execution_id: str) -> Optional[NodeExecution]:
|
||||
"""
|
||||
Retrieve a WorkflowNodeExecution by its node_execution_id.
|
||||
Retrieve a NodeExecution by its node_execution_id.
|
||||
|
||||
Args:
|
||||
node_execution_id: The node execution ID
|
||||
|
||||
Returns:
|
||||
The WorkflowNodeExecution instance if found, None otherwise
|
||||
The NodeExecution instance if found, None otherwise
|
||||
"""
|
||||
...
|
||||
|
||||
|
|
@ -51,9 +55,9 @@ class WorkflowNodeExecutionRepository(Protocol):
|
|||
self,
|
||||
workflow_run_id: str,
|
||||
order_config: Optional[OrderConfig] = None,
|
||||
) -> Sequence[WorkflowNodeExecution]:
|
||||
) -> Sequence[NodeExecution]:
|
||||
"""
|
||||
Retrieve all WorkflowNodeExecution instances for a specific workflow run.
|
||||
Retrieve all NodeExecution instances for a specific workflow run.
|
||||
|
||||
Args:
|
||||
workflow_run_id: The workflow run ID
|
||||
|
|
@ -62,34 +66,25 @@ class WorkflowNodeExecutionRepository(Protocol):
|
|||
order_config.order_direction: Direction to order ("asc" or "desc")
|
||||
|
||||
Returns:
|
||||
A list of WorkflowNodeExecution instances
|
||||
A list of NodeExecution instances
|
||||
"""
|
||||
...
|
||||
|
||||
def get_running_executions(self, workflow_run_id: str) -> Sequence[WorkflowNodeExecution]:
|
||||
def get_running_executions(self, workflow_run_id: str) -> Sequence[NodeExecution]:
|
||||
"""
|
||||
Retrieve all running WorkflowNodeExecution instances for a specific workflow run.
|
||||
Retrieve all running NodeExecution instances for a specific workflow run.
|
||||
|
||||
Args:
|
||||
workflow_run_id: The workflow run ID
|
||||
|
||||
Returns:
|
||||
A list of running WorkflowNodeExecution instances
|
||||
"""
|
||||
...
|
||||
|
||||
def update(self, execution: WorkflowNodeExecution) -> None:
|
||||
"""
|
||||
Update an existing WorkflowNodeExecution instance.
|
||||
|
||||
Args:
|
||||
execution: The WorkflowNodeExecution instance to update
|
||||
A list of running NodeExecution instances
|
||||
"""
|
||||
...
|
||||
|
||||
def clear(self) -> None:
|
||||
"""
|
||||
Clear all WorkflowNodeExecution records based on implementation-specific criteria.
|
||||
Clear all NodeExecution records based on implementation-specific criteria.
|
||||
|
||||
This method is intended to be used for bulk deletion operations, such as removing
|
||||
all records associated with a specific app_id and tenant_id in multi-tenant implementations.
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ from core.workflow.repository.workflow_node_execution_repository import Workflow
|
|||
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
||||
from extensions.ext_database import db
|
||||
from models.account import Account
|
||||
from models.enums import CreatedByRole
|
||||
from models.enums import CreatorUserRole
|
||||
from models.model import EndUser
|
||||
from models.workflow import (
|
||||
Workflow,
|
||||
|
|
@ -94,11 +94,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
if isinstance(user, EndUser):
|
||||
self._user_id = user.id
|
||||
user_session_id = user.session_id
|
||||
self._created_by_role = CreatedByRole.END_USER
|
||||
self._created_by_role = CreatorUserRole.END_USER
|
||||
elif isinstance(user, Account):
|
||||
self._user_id = user.id
|
||||
user_session_id = user.id
|
||||
self._created_by_role = CreatedByRole.ACCOUNT
|
||||
self._created_by_role = CreatorUserRole.ACCOUNT
|
||||
else:
|
||||
raise ValueError(f"Invalid user type: {type(user)}")
|
||||
|
||||
|
|
|
|||
|
|
@ -46,26 +46,28 @@ from core.app.entities.task_entities import (
|
|||
)
|
||||
from core.app.task_pipeline.exc import WorkflowRunNotFoundError
|
||||
from core.file import FILE_MODEL_IDENTITY, File
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.ops.entities.trace_entity import TraceTaskName
|
||||
from core.ops.ops_trace_manager import TraceQueueManager, TraceTask
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||
from core.workflow.entities.node_execution_entities import (
|
||||
NodeExecution,
|
||||
NodeExecutionStatus,
|
||||
)
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.nodes.tool.entities import ToolNodeData
|
||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_entry import WorkflowEntry
|
||||
from models.account import Account
|
||||
from models.enums import CreatedByRole, WorkflowRunTriggeredFrom
|
||||
from models.model import EndUser
|
||||
from models.workflow import (
|
||||
from models import (
|
||||
Account,
|
||||
CreatorUserRole,
|
||||
EndUser,
|
||||
Workflow,
|
||||
WorkflowNodeExecution,
|
||||
WorkflowNodeExecutionStatus,
|
||||
WorkflowNodeExecutionTriggeredFrom,
|
||||
WorkflowRun,
|
||||
WorkflowRunStatus,
|
||||
WorkflowRunTriggeredFrom,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -78,7 +80,6 @@ class WorkflowCycleManager:
|
|||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
) -> None:
|
||||
self._workflow_run: WorkflowRun | None = None
|
||||
self._workflow_node_executions: dict[str, WorkflowNodeExecution] = {}
|
||||
self._application_generate_entity = application_generate_entity
|
||||
self._workflow_system_variables = workflow_system_variables
|
||||
self._workflow_node_execution_repository = workflow_node_execution_repository
|
||||
|
|
@ -89,7 +90,7 @@ class WorkflowCycleManager:
|
|||
session: Session,
|
||||
workflow_id: str,
|
||||
user_id: str,
|
||||
created_by_role: CreatedByRole,
|
||||
created_by_role: CreatorUserRole,
|
||||
) -> WorkflowRun:
|
||||
workflow_stmt = select(Workflow).where(Workflow.id == workflow_id)
|
||||
workflow = session.scalar(workflow_stmt)
|
||||
|
|
@ -258,21 +259,22 @@ class WorkflowCycleManager:
|
|||
workflow_run.exceptions_count = exceptions_count
|
||||
|
||||
# Use the instance repository to find running executions for a workflow run
|
||||
running_workflow_node_executions = self._workflow_node_execution_repository.get_running_executions(
|
||||
running_domain_executions = self._workflow_node_execution_repository.get_running_executions(
|
||||
workflow_run_id=workflow_run.id
|
||||
)
|
||||
|
||||
# Update the cache with the retrieved executions
|
||||
for execution in running_workflow_node_executions:
|
||||
if execution.node_execution_id:
|
||||
self._workflow_node_executions[execution.node_execution_id] = execution
|
||||
# Update the domain models
|
||||
now = datetime.now(UTC).replace(tzinfo=None)
|
||||
for domain_execution in running_domain_executions:
|
||||
if domain_execution.node_execution_id:
|
||||
# Update the domain model
|
||||
domain_execution.status = NodeExecutionStatus.FAILED
|
||||
domain_execution.error = error
|
||||
domain_execution.finished_at = now
|
||||
domain_execution.elapsed_time = (now - domain_execution.created_at).total_seconds()
|
||||
|
||||
for workflow_node_execution in running_workflow_node_executions:
|
||||
now = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value
|
||||
workflow_node_execution.error = error
|
||||
workflow_node_execution.finished_at = now
|
||||
workflow_node_execution.elapsed_time = (now - workflow_node_execution.created_at).total_seconds()
|
||||
# Update the repository with the domain model
|
||||
self._workflow_node_execution_repository.save(domain_execution)
|
||||
|
||||
if trace_manager:
|
||||
trace_manager.add_trace_task(
|
||||
|
|
@ -286,63 +288,67 @@ class WorkflowCycleManager:
|
|||
|
||||
return workflow_run
|
||||
|
||||
def _handle_node_execution_start(
|
||||
self, *, workflow_run: WorkflowRun, event: QueueNodeStartedEvent
|
||||
) -> WorkflowNodeExecution:
|
||||
workflow_node_execution = WorkflowNodeExecution()
|
||||
workflow_node_execution.id = str(uuid4())
|
||||
workflow_node_execution.tenant_id = workflow_run.tenant_id
|
||||
workflow_node_execution.app_id = workflow_run.app_id
|
||||
workflow_node_execution.workflow_id = workflow_run.workflow_id
|
||||
workflow_node_execution.triggered_from = WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value
|
||||
workflow_node_execution.workflow_run_id = workflow_run.id
|
||||
workflow_node_execution.predecessor_node_id = event.predecessor_node_id
|
||||
workflow_node_execution.index = event.node_run_index
|
||||
workflow_node_execution.node_execution_id = event.node_execution_id
|
||||
workflow_node_execution.node_id = event.node_id
|
||||
workflow_node_execution.node_type = event.node_type.value
|
||||
workflow_node_execution.title = event.node_data.title
|
||||
workflow_node_execution.status = WorkflowNodeExecutionStatus.RUNNING.value
|
||||
workflow_node_execution.created_by_role = workflow_run.created_by_role
|
||||
workflow_node_execution.created_by = workflow_run.created_by
|
||||
workflow_node_execution.execution_metadata = json.dumps(
|
||||
{
|
||||
NodeRunMetadataKey.PARALLEL_MODE_RUN_ID: event.parallel_mode_run_id,
|
||||
NodeRunMetadataKey.ITERATION_ID: event.in_iteration_id,
|
||||
NodeRunMetadataKey.LOOP_ID: event.in_loop_id,
|
||||
}
|
||||
def _handle_node_execution_start(self, *, workflow_run: WorkflowRun, event: QueueNodeStartedEvent) -> NodeExecution:
|
||||
# Create a domain model
|
||||
created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
metadata = {
|
||||
NodeRunMetadataKey.PARALLEL_MODE_RUN_ID: event.parallel_mode_run_id,
|
||||
NodeRunMetadataKey.ITERATION_ID: event.in_iteration_id,
|
||||
NodeRunMetadataKey.LOOP_ID: event.in_loop_id,
|
||||
}
|
||||
|
||||
domain_execution = NodeExecution(
|
||||
id=str(uuid4()),
|
||||
workflow_id=workflow_run.workflow_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
predecessor_node_id=event.predecessor_node_id,
|
||||
index=event.node_run_index,
|
||||
node_execution_id=event.node_execution_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type,
|
||||
title=event.node_data.title,
|
||||
status=NodeExecutionStatus.RUNNING,
|
||||
metadata=metadata,
|
||||
created_at=created_at,
|
||||
)
|
||||
workflow_node_execution.created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
|
||||
# Use the instance repository to save the workflow node execution
|
||||
self._workflow_node_execution_repository.save(workflow_node_execution)
|
||||
# Use the instance repository to save the domain model
|
||||
self._workflow_node_execution_repository.save(domain_execution)
|
||||
|
||||
self._workflow_node_executions[event.node_execution_id] = workflow_node_execution
|
||||
return workflow_node_execution
|
||||
return domain_execution
|
||||
|
||||
def _handle_workflow_node_execution_success(self, *, event: QueueNodeSucceededEvent) -> WorkflowNodeExecution:
|
||||
workflow_node_execution = self._get_workflow_node_execution(node_execution_id=event.node_execution_id)
|
||||
def _handle_workflow_node_execution_success(self, *, event: QueueNodeSucceededEvent) -> NodeExecution:
|
||||
# Get the domain model from repository
|
||||
domain_execution = self._workflow_node_execution_repository.get_by_node_execution_id(event.node_execution_id)
|
||||
if not domain_execution:
|
||||
raise ValueError(f"Domain node execution not found: {event.node_execution_id}")
|
||||
|
||||
# Process data
|
||||
inputs = WorkflowEntry.handle_special_values(event.inputs)
|
||||
process_data = WorkflowEntry.handle_special_values(event.process_data)
|
||||
outputs = WorkflowEntry.handle_special_values(event.outputs)
|
||||
execution_metadata_dict = dict(event.execution_metadata or {})
|
||||
execution_metadata = json.dumps(jsonable_encoder(execution_metadata_dict)) if execution_metadata_dict else None
|
||||
|
||||
# Convert metadata keys to strings
|
||||
execution_metadata_dict = {}
|
||||
if event.execution_metadata:
|
||||
for key, value in event.execution_metadata.items():
|
||||
execution_metadata_dict[key] = value
|
||||
|
||||
finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
elapsed_time = (finished_at - event.start_at).total_seconds()
|
||||
|
||||
process_data = WorkflowEntry.handle_special_values(event.process_data)
|
||||
# Update domain model
|
||||
domain_execution.status = NodeExecutionStatus.SUCCEEDED
|
||||
domain_execution.update_from_mapping(
|
||||
inputs=inputs, process_data=process_data, outputs=outputs, metadata=execution_metadata_dict
|
||||
)
|
||||
domain_execution.finished_at = finished_at
|
||||
domain_execution.elapsed_time = elapsed_time
|
||||
|
||||
workflow_node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED.value
|
||||
workflow_node_execution.inputs = json.dumps(inputs) if inputs else None
|
||||
workflow_node_execution.process_data = json.dumps(process_data) if process_data else None
|
||||
workflow_node_execution.outputs = json.dumps(outputs) if outputs else None
|
||||
workflow_node_execution.execution_metadata = execution_metadata
|
||||
workflow_node_execution.finished_at = finished_at
|
||||
workflow_node_execution.elapsed_time = elapsed_time
|
||||
# Update the repository with the domain model
|
||||
self._workflow_node_execution_repository.save(domain_execution)
|
||||
|
||||
# Use the instance repository to update the workflow node execution
|
||||
self._workflow_node_execution_repository.update(workflow_node_execution)
|
||||
return workflow_node_execution
|
||||
return domain_execution
|
||||
|
||||
def _handle_workflow_node_execution_failed(
|
||||
self,
|
||||
|
|
@ -351,43 +357,52 @@ class WorkflowCycleManager:
|
|||
| QueueNodeInIterationFailedEvent
|
||||
| QueueNodeInLoopFailedEvent
|
||||
| QueueNodeExceptionEvent,
|
||||
) -> WorkflowNodeExecution:
|
||||
) -> NodeExecution:
|
||||
"""
|
||||
Workflow node execution failed
|
||||
:param event: queue node failed event
|
||||
:return:
|
||||
"""
|
||||
workflow_node_execution = self._get_workflow_node_execution(node_execution_id=event.node_execution_id)
|
||||
# Get the domain model from repository
|
||||
domain_execution = self._workflow_node_execution_repository.get_by_node_execution_id(event.node_execution_id)
|
||||
if not domain_execution:
|
||||
raise ValueError(f"Domain node execution not found: {event.node_execution_id}")
|
||||
|
||||
# Process data
|
||||
inputs = WorkflowEntry.handle_special_values(event.inputs)
|
||||
process_data = WorkflowEntry.handle_special_values(event.process_data)
|
||||
outputs = WorkflowEntry.handle_special_values(event.outputs)
|
||||
|
||||
# Convert metadata keys to strings
|
||||
execution_metadata_dict = {}
|
||||
if event.execution_metadata:
|
||||
for key, value in event.execution_metadata.items():
|
||||
execution_metadata_dict[key] = value
|
||||
|
||||
finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
elapsed_time = (finished_at - event.start_at).total_seconds()
|
||||
execution_metadata = (
|
||||
json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None
|
||||
)
|
||||
process_data = WorkflowEntry.handle_special_values(event.process_data)
|
||||
workflow_node_execution.status = (
|
||||
WorkflowNodeExecutionStatus.FAILED.value
|
||||
|
||||
# Update domain model
|
||||
domain_execution.status = (
|
||||
NodeExecutionStatus.FAILED
|
||||
if not isinstance(event, QueueNodeExceptionEvent)
|
||||
else WorkflowNodeExecutionStatus.EXCEPTION.value
|
||||
else NodeExecutionStatus.EXCEPTION
|
||||
)
|
||||
workflow_node_execution.error = event.error
|
||||
workflow_node_execution.inputs = json.dumps(inputs) if inputs else None
|
||||
workflow_node_execution.process_data = json.dumps(process_data) if process_data else None
|
||||
workflow_node_execution.outputs = json.dumps(outputs) if outputs else None
|
||||
workflow_node_execution.finished_at = finished_at
|
||||
workflow_node_execution.elapsed_time = elapsed_time
|
||||
workflow_node_execution.execution_metadata = execution_metadata
|
||||
domain_execution.error = event.error
|
||||
domain_execution.update_from_mapping(
|
||||
inputs=inputs, process_data=process_data, outputs=outputs, metadata=execution_metadata_dict
|
||||
)
|
||||
domain_execution.finished_at = finished_at
|
||||
domain_execution.elapsed_time = elapsed_time
|
||||
|
||||
self._workflow_node_execution_repository.update(workflow_node_execution)
|
||||
# Update the repository with the domain model
|
||||
self._workflow_node_execution_repository.save(domain_execution)
|
||||
|
||||
return workflow_node_execution
|
||||
return domain_execution
|
||||
|
||||
def _handle_workflow_node_execution_retried(
|
||||
self, *, workflow_run: WorkflowRun, event: QueueNodeRetryEvent
|
||||
) -> WorkflowNodeExecution:
|
||||
) -> NodeExecution:
|
||||
"""
|
||||
Workflow node execution failed
|
||||
:param workflow_run: workflow run
|
||||
|
|
@ -399,47 +414,47 @@ class WorkflowCycleManager:
|
|||
elapsed_time = (finished_at - created_at).total_seconds()
|
||||
inputs = WorkflowEntry.handle_special_values(event.inputs)
|
||||
outputs = WorkflowEntry.handle_special_values(event.outputs)
|
||||
|
||||
# Convert metadata keys to strings
|
||||
origin_metadata = {
|
||||
NodeRunMetadataKey.ITERATION_ID: event.in_iteration_id,
|
||||
NodeRunMetadataKey.PARALLEL_MODE_RUN_ID: event.parallel_mode_run_id,
|
||||
NodeRunMetadataKey.LOOP_ID: event.in_loop_id,
|
||||
}
|
||||
merged_metadata = (
|
||||
{**jsonable_encoder(event.execution_metadata), **origin_metadata}
|
||||
if event.execution_metadata is not None
|
||||
else origin_metadata
|
||||
|
||||
# Convert execution metadata keys to strings
|
||||
execution_metadata_dict: dict[NodeRunMetadataKey, str | None] = {}
|
||||
if event.execution_metadata:
|
||||
for key, value in event.execution_metadata.items():
|
||||
execution_metadata_dict[key] = value
|
||||
|
||||
merged_metadata = {**execution_metadata_dict, **origin_metadata} if execution_metadata_dict else origin_metadata
|
||||
|
||||
# Create a domain model
|
||||
domain_execution = NodeExecution(
|
||||
id=str(uuid4()),
|
||||
workflow_id=workflow_run.workflow_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
predecessor_node_id=event.predecessor_node_id,
|
||||
node_execution_id=event.node_execution_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type,
|
||||
title=event.node_data.title,
|
||||
status=NodeExecutionStatus.RETRY,
|
||||
created_at=created_at,
|
||||
finished_at=finished_at,
|
||||
elapsed_time=elapsed_time,
|
||||
error=event.error,
|
||||
index=event.node_run_index,
|
||||
)
|
||||
execution_metadata = json.dumps(merged_metadata)
|
||||
|
||||
workflow_node_execution = WorkflowNodeExecution()
|
||||
workflow_node_execution.id = str(uuid4())
|
||||
workflow_node_execution.tenant_id = workflow_run.tenant_id
|
||||
workflow_node_execution.app_id = workflow_run.app_id
|
||||
workflow_node_execution.workflow_id = workflow_run.workflow_id
|
||||
workflow_node_execution.triggered_from = WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value
|
||||
workflow_node_execution.workflow_run_id = workflow_run.id
|
||||
workflow_node_execution.predecessor_node_id = event.predecessor_node_id
|
||||
workflow_node_execution.node_execution_id = event.node_execution_id
|
||||
workflow_node_execution.node_id = event.node_id
|
||||
workflow_node_execution.node_type = event.node_type.value
|
||||
workflow_node_execution.title = event.node_data.title
|
||||
workflow_node_execution.status = WorkflowNodeExecutionStatus.RETRY.value
|
||||
workflow_node_execution.created_by_role = workflow_run.created_by_role
|
||||
workflow_node_execution.created_by = workflow_run.created_by
|
||||
workflow_node_execution.created_at = created_at
|
||||
workflow_node_execution.finished_at = finished_at
|
||||
workflow_node_execution.elapsed_time = elapsed_time
|
||||
workflow_node_execution.error = event.error
|
||||
workflow_node_execution.inputs = json.dumps(inputs) if inputs else None
|
||||
workflow_node_execution.outputs = json.dumps(outputs) if outputs else None
|
||||
workflow_node_execution.execution_metadata = execution_metadata
|
||||
workflow_node_execution.index = event.node_run_index
|
||||
# Update with mappings
|
||||
domain_execution.update_from_mapping(inputs=inputs, outputs=outputs, metadata=merged_metadata)
|
||||
|
||||
# Use the instance repository to save the workflow node execution
|
||||
self._workflow_node_execution_repository.save(workflow_node_execution)
|
||||
# Use the instance repository to save the domain model
|
||||
self._workflow_node_execution_repository.save(domain_execution)
|
||||
|
||||
self._workflow_node_executions[event.node_execution_id] = workflow_node_execution
|
||||
return workflow_node_execution
|
||||
return domain_execution
|
||||
|
||||
def _workflow_start_to_stream_response(
|
||||
self,
|
||||
|
|
@ -469,7 +484,7 @@ class WorkflowCycleManager:
|
|||
workflow_run: WorkflowRun,
|
||||
) -> WorkflowFinishStreamResponse:
|
||||
created_by = None
|
||||
if workflow_run.created_by_role == CreatedByRole.ACCOUNT:
|
||||
if workflow_run.created_by_role == CreatorUserRole.ACCOUNT:
|
||||
stmt = select(Account).where(Account.id == workflow_run.created_by)
|
||||
account = session.scalar(stmt)
|
||||
if account:
|
||||
|
|
@ -478,7 +493,7 @@ class WorkflowCycleManager:
|
|||
"name": account.name,
|
||||
"email": account.email,
|
||||
}
|
||||
elif workflow_run.created_by_role == CreatedByRole.END_USER:
|
||||
elif workflow_run.created_by_role == CreatorUserRole.END_USER:
|
||||
stmt = select(EndUser).where(EndUser.id == workflow_run.created_by)
|
||||
end_user = session.scalar(stmt)
|
||||
if end_user:
|
||||
|
|
@ -515,9 +530,9 @@ class WorkflowCycleManager:
|
|||
*,
|
||||
event: QueueNodeStartedEvent,
|
||||
task_id: str,
|
||||
workflow_node_execution: WorkflowNodeExecution,
|
||||
workflow_node_execution: NodeExecution,
|
||||
) -> Optional[NodeStartStreamResponse]:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION.value, NodeType.LOOP.value}:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||
return None
|
||||
if not workflow_node_execution.workflow_run_id:
|
||||
return None
|
||||
|
|
@ -532,7 +547,7 @@ class WorkflowCycleManager:
|
|||
title=workflow_node_execution.title,
|
||||
index=workflow_node_execution.index,
|
||||
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
||||
inputs=workflow_node_execution.inputs_dict,
|
||||
inputs=workflow_node_execution.inputs,
|
||||
created_at=int(workflow_node_execution.created_at.timestamp()),
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
|
|
@ -565,9 +580,9 @@ class WorkflowCycleManager:
|
|||
| QueueNodeInLoopFailedEvent
|
||||
| QueueNodeExceptionEvent,
|
||||
task_id: str,
|
||||
workflow_node_execution: WorkflowNodeExecution,
|
||||
workflow_node_execution: NodeExecution,
|
||||
) -> Optional[NodeFinishStreamResponse]:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION.value, NodeType.LOOP.value}:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||
return None
|
||||
if not workflow_node_execution.workflow_run_id:
|
||||
return None
|
||||
|
|
@ -584,16 +599,16 @@ class WorkflowCycleManager:
|
|||
index=workflow_node_execution.index,
|
||||
title=workflow_node_execution.title,
|
||||
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
||||
inputs=workflow_node_execution.inputs_dict,
|
||||
process_data=workflow_node_execution.process_data_dict,
|
||||
outputs=workflow_node_execution.outputs_dict,
|
||||
inputs=workflow_node_execution.inputs,
|
||||
process_data=workflow_node_execution.process_data,
|
||||
outputs=workflow_node_execution.outputs,
|
||||
status=workflow_node_execution.status,
|
||||
error=workflow_node_execution.error,
|
||||
elapsed_time=workflow_node_execution.elapsed_time,
|
||||
execution_metadata=workflow_node_execution.execution_metadata_dict,
|
||||
execution_metadata=workflow_node_execution.metadata,
|
||||
created_at=int(workflow_node_execution.created_at.timestamp()),
|
||||
finished_at=int(workflow_node_execution.finished_at.timestamp()),
|
||||
files=self._fetch_files_from_node_outputs(workflow_node_execution.outputs_dict or {}),
|
||||
files=self._fetch_files_from_node_outputs(workflow_node_execution.outputs or {}),
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
parent_parallel_id=event.parent_parallel_id,
|
||||
|
|
@ -608,9 +623,9 @@ class WorkflowCycleManager:
|
|||
*,
|
||||
event: QueueNodeRetryEvent,
|
||||
task_id: str,
|
||||
workflow_node_execution: WorkflowNodeExecution,
|
||||
workflow_node_execution: NodeExecution,
|
||||
) -> Optional[Union[NodeRetryStreamResponse, NodeFinishStreamResponse]]:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION.value, NodeType.LOOP.value}:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||
return None
|
||||
if not workflow_node_execution.workflow_run_id:
|
||||
return None
|
||||
|
|
@ -627,16 +642,16 @@ class WorkflowCycleManager:
|
|||
index=workflow_node_execution.index,
|
||||
title=workflow_node_execution.title,
|
||||
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
||||
inputs=workflow_node_execution.inputs_dict,
|
||||
process_data=workflow_node_execution.process_data_dict,
|
||||
outputs=workflow_node_execution.outputs_dict,
|
||||
inputs=workflow_node_execution.inputs,
|
||||
process_data=workflow_node_execution.process_data,
|
||||
outputs=workflow_node_execution.outputs,
|
||||
status=workflow_node_execution.status,
|
||||
error=workflow_node_execution.error,
|
||||
elapsed_time=workflow_node_execution.elapsed_time,
|
||||
execution_metadata=workflow_node_execution.execution_metadata_dict,
|
||||
execution_metadata=workflow_node_execution.metadata,
|
||||
created_at=int(workflow_node_execution.created_at.timestamp()),
|
||||
finished_at=int(workflow_node_execution.finished_at.timestamp()),
|
||||
files=self._fetch_files_from_node_outputs(workflow_node_execution.outputs_dict or {}),
|
||||
files=self._fetch_files_from_node_outputs(workflow_node_execution.outputs or {}),
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
parent_parallel_id=event.parent_parallel_id,
|
||||
|
|
@ -908,23 +923,6 @@ class WorkflowCycleManager:
|
|||
|
||||
return workflow_run
|
||||
|
||||
def _get_workflow_node_execution(self, node_execution_id: str) -> WorkflowNodeExecution:
|
||||
# First check the cache for performance
|
||||
if node_execution_id in self._workflow_node_executions:
|
||||
cached_execution = self._workflow_node_executions[node_execution_id]
|
||||
# No need to merge with session since expire_on_commit=False
|
||||
return cached_execution
|
||||
|
||||
# If not in cache, use the instance repository to get by node_execution_id
|
||||
execution = self._workflow_node_execution_repository.get_by_node_execution_id(node_execution_id)
|
||||
|
||||
if not execution:
|
||||
raise ValueError(f"Workflow node execution not found: {node_execution_id}")
|
||||
|
||||
# Update cache
|
||||
self._workflow_node_executions[node_execution_id] = execution
|
||||
return execution
|
||||
|
||||
def _handle_agent_log(self, task_id: str, event: QueueAgentLogEvent) -> AgentLogStreamResponse:
|
||||
"""
|
||||
Handle agent log
|
||||
|
|
|
|||
|
|
@ -0,0 +1,73 @@
|
|||
import json
|
||||
import logging
|
||||
|
||||
import flask
|
||||
import werkzeug.http
|
||||
from flask import Flask
|
||||
from flask.signals import request_finished, request_started
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _is_content_type_json(content_type: str) -> bool:
|
||||
if not content_type:
|
||||
return False
|
||||
content_type_no_option, _ = werkzeug.http.parse_options_header(content_type)
|
||||
return content_type_no_option.lower() == "application/json"
|
||||
|
||||
|
||||
def _log_request_started(_sender, **_extra):
|
||||
"""Log the start of a request."""
|
||||
if not _logger.isEnabledFor(logging.DEBUG):
|
||||
return
|
||||
|
||||
request = flask.request
|
||||
if not (_is_content_type_json(request.content_type) and request.data):
|
||||
_logger.debug("Received Request %s -> %s", request.method, request.path)
|
||||
return
|
||||
try:
|
||||
json_data = json.loads(request.data)
|
||||
except (TypeError, ValueError):
|
||||
_logger.exception("Failed to parse JSON request")
|
||||
return
|
||||
formatted_json = json.dumps(json_data, ensure_ascii=False, indent=2)
|
||||
_logger.debug(
|
||||
"Received Request %s -> %s, Request Body:\n%s",
|
||||
request.method,
|
||||
request.path,
|
||||
formatted_json,
|
||||
)
|
||||
|
||||
|
||||
def _log_request_finished(_sender, response, **_extra):
|
||||
"""Log the end of a request."""
|
||||
if not _logger.isEnabledFor(logging.DEBUG) or response is None:
|
||||
return
|
||||
|
||||
if not _is_content_type_json(response.content_type):
|
||||
_logger.debug("Response %s %s", response.status, response.content_type)
|
||||
return
|
||||
|
||||
response_data = response.get_data(as_text=True)
|
||||
try:
|
||||
json_data = json.loads(response_data)
|
||||
except (TypeError, ValueError):
|
||||
_logger.exception("Failed to parse JSON response")
|
||||
return
|
||||
formatted_json = json.dumps(json_data, ensure_ascii=False, indent=2)
|
||||
_logger.debug(
|
||||
"Response %s %s, Response Body:\n%s",
|
||||
response.status,
|
||||
response.content_type,
|
||||
formatted_json,
|
||||
)
|
||||
|
||||
|
||||
def init_app(app: Flask):
|
||||
"""Initialize the request logging extension."""
|
||||
if not dify_config.ENABLE_REQUEST_LOGGING:
|
||||
return
|
||||
request_started.connect(_log_request_started, app)
|
||||
request_finished.connect(_log_request_finished, app)
|
||||
|
|
@ -27,7 +27,7 @@ from .dataset import (
|
|||
Whitelist,
|
||||
)
|
||||
from .engine import db
|
||||
from .enums import CreatedByRole, UserFrom, WorkflowRunTriggeredFrom
|
||||
from .enums import CreatorUserRole, UserFrom, WorkflowRunTriggeredFrom
|
||||
from .model import (
|
||||
ApiRequest,
|
||||
ApiToken,
|
||||
|
|
@ -112,7 +112,7 @@ __all__ = [
|
|||
"CeleryTaskSet",
|
||||
"Conversation",
|
||||
"ConversationVariable",
|
||||
"CreatedByRole",
|
||||
"CreatorUserRole",
|
||||
"DataSourceApiKeyAuthBinding",
|
||||
"DataSourceOauthBinding",
|
||||
"Dataset",
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from enum import StrEnum
|
||||
|
||||
|
||||
class CreatedByRole(StrEnum):
|
||||
class CreatorUserRole(StrEnum):
|
||||
ACCOUNT = "account"
|
||||
END_USER = "end_user"
|
||||
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ from libs.helper import generate_string
|
|||
from .account import Account, Tenant
|
||||
from .base import Base
|
||||
from .engine import db
|
||||
from .enums import CreatedByRole
|
||||
from .enums import CreatorUserRole
|
||||
from .types import StringUUID
|
||||
from .workflow import WorkflowRunStatus
|
||||
|
||||
|
|
@ -1270,7 +1270,7 @@ class MessageFile(Base):
|
|||
url: str | None = None,
|
||||
belongs_to: Literal["user", "assistant"] | None = None,
|
||||
upload_file_id: str | None = None,
|
||||
created_by_role: CreatedByRole,
|
||||
created_by_role: CreatorUserRole,
|
||||
created_by: str,
|
||||
):
|
||||
self.message_id = message_id
|
||||
|
|
@ -1417,7 +1417,7 @@ class EndUser(Base, UserMixin):
|
|||
)
|
||||
|
||||
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
||||
tenant_id = db.Column(StringUUID, nullable=False)
|
||||
tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False)
|
||||
app_id = db.Column(StringUUID, nullable=True)
|
||||
type = db.Column(db.String(255), nullable=False)
|
||||
external_user_id = db.Column(db.String(255), nullable=True)
|
||||
|
|
@ -1547,7 +1547,7 @@ class UploadFile(Base):
|
|||
size: int,
|
||||
extension: str,
|
||||
mime_type: str,
|
||||
created_by_role: CreatedByRole,
|
||||
created_by_role: CreatorUserRole,
|
||||
created_by: str,
|
||||
created_at: datetime,
|
||||
used: bool,
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ from libs import helper
|
|||
from .account import Account
|
||||
from .base import Base
|
||||
from .engine import db
|
||||
from .enums import CreatedByRole
|
||||
from .enums import CreatorUserRole
|
||||
from .types import StringUUID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
|
@ -429,15 +429,15 @@ class WorkflowRun(Base):
|
|||
|
||||
@property
|
||||
def created_by_account(self):
|
||||
created_by_role = CreatedByRole(self.created_by_role)
|
||||
return db.session.get(Account, self.created_by) if created_by_role == CreatedByRole.ACCOUNT else None
|
||||
created_by_role = CreatorUserRole(self.created_by_role)
|
||||
return db.session.get(Account, self.created_by) if created_by_role == CreatorUserRole.ACCOUNT else None
|
||||
|
||||
@property
|
||||
def created_by_end_user(self):
|
||||
from models.model import EndUser
|
||||
|
||||
created_by_role = CreatedByRole(self.created_by_role)
|
||||
return db.session.get(EndUser, self.created_by) if created_by_role == CreatedByRole.END_USER else None
|
||||
created_by_role = CreatorUserRole(self.created_by_role)
|
||||
return db.session.get(EndUser, self.created_by) if created_by_role == CreatorUserRole.END_USER else None
|
||||
|
||||
@property
|
||||
def graph_dict(self):
|
||||
|
|
@ -634,17 +634,17 @@ class WorkflowNodeExecution(Base):
|
|||
|
||||
@property
|
||||
def created_by_account(self):
|
||||
created_by_role = CreatedByRole(self.created_by_role)
|
||||
created_by_role = CreatorUserRole(self.created_by_role)
|
||||
# TODO(-LAN-): Avoid using db.session.get() here.
|
||||
return db.session.get(Account, self.created_by) if created_by_role == CreatedByRole.ACCOUNT else None
|
||||
return db.session.get(Account, self.created_by) if created_by_role == CreatorUserRole.ACCOUNT else None
|
||||
|
||||
@property
|
||||
def created_by_end_user(self):
|
||||
from models.model import EndUser
|
||||
|
||||
created_by_role = CreatedByRole(self.created_by_role)
|
||||
created_by_role = CreatorUserRole(self.created_by_role)
|
||||
# TODO(-LAN-): Avoid using db.session.get() here.
|
||||
return db.session.get(EndUser, self.created_by) if created_by_role == CreatedByRole.END_USER else None
|
||||
return db.session.get(EndUser, self.created_by) if created_by_role == CreatorUserRole.END_USER else None
|
||||
|
||||
@property
|
||||
def inputs_dict(self):
|
||||
|
|
@ -755,15 +755,15 @@ class WorkflowAppLog(Base):
|
|||
|
||||
@property
|
||||
def created_by_account(self):
|
||||
created_by_role = CreatedByRole(self.created_by_role)
|
||||
return db.session.get(Account, self.created_by) if created_by_role == CreatedByRole.ACCOUNT else None
|
||||
created_by_role = CreatorUserRole(self.created_by_role)
|
||||
return db.session.get(Account, self.created_by) if created_by_role == CreatorUserRole.ACCOUNT else None
|
||||
|
||||
@property
|
||||
def created_by_end_user(self):
|
||||
from models.model import EndUser
|
||||
|
||||
created_by_role = CreatedByRole(self.created_by_role)
|
||||
return db.session.get(EndUser, self.created_by) if created_by_role == CreatedByRole.END_USER else None
|
||||
created_by_role = CreatorUserRole(self.created_by_role)
|
||||
return db.session.get(EndUser, self.created_by) if created_by_role == CreatorUserRole.END_USER else None
|
||||
|
||||
|
||||
class ConversationVariable(Base):
|
||||
|
|
|
|||
|
|
@ -992,7 +992,7 @@ class DocumentService:
|
|||
created_by=account.id,
|
||||
)
|
||||
else:
|
||||
logging.warn(
|
||||
logging.warning(
|
||||
f"Invalid process rule mode: {process_rule.mode}, can not find dataset process rule"
|
||||
)
|
||||
return
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor
|
|||
from extensions.ext_database import db
|
||||
from extensions.ext_storage import storage
|
||||
from models.account import Account
|
||||
from models.enums import CreatedByRole
|
||||
from models.enums import CreatorUserRole
|
||||
from models.model import EndUser, UploadFile
|
||||
|
||||
from .errors.file import FileTooLargeError, UnsupportedFileTypeError
|
||||
|
|
@ -81,7 +81,7 @@ class FileService:
|
|||
size=file_size,
|
||||
extension=extension,
|
||||
mime_type=mimetype,
|
||||
created_by_role=(CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER),
|
||||
created_by_role=(CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER),
|
||||
created_by=user.id,
|
||||
created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||
used=False,
|
||||
|
|
@ -133,7 +133,7 @@ class FileService:
|
|||
extension="txt",
|
||||
mime_type="text/plain",
|
||||
created_by=current_user.id,
|
||||
created_by_role=CreatedByRole.ACCOUNT,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
created_at=datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||
used=True,
|
||||
used_by=current_user.id,
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from sqlalchemy import and_, func, or_, select
|
|||
from sqlalchemy.orm import Session
|
||||
|
||||
from models import App, EndUser, WorkflowAppLog, WorkflowRun
|
||||
from models.enums import CreatedByRole
|
||||
from models.enums import CreatorUserRole
|
||||
from models.workflow import WorkflowRunStatus
|
||||
|
||||
|
||||
|
|
@ -58,7 +58,7 @@ class WorkflowAppService:
|
|||
|
||||
stmt = stmt.outerjoin(
|
||||
EndUser,
|
||||
and_(WorkflowRun.created_by == EndUser.id, WorkflowRun.created_by_role == CreatedByRole.END_USER),
|
||||
and_(WorkflowRun.created_by == EndUser.id, WorkflowRun.created_by_role == CreatorUserRole.END_USER),
|
||||
).where(or_(*keyword_conditions))
|
||||
|
||||
if status:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import threading
|
||||
from collections.abc import Sequence
|
||||
from typing import Optional
|
||||
|
||||
import contexts
|
||||
|
|
@ -6,11 +7,13 @@ from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
|||
from core.workflow.repository.workflow_node_execution_repository import OrderConfig
|
||||
from extensions.ext_database import db
|
||||
from libs.infinite_scroll_pagination import InfiniteScrollPagination
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from models.model import App
|
||||
from models.workflow import (
|
||||
from models import (
|
||||
Account,
|
||||
App,
|
||||
EndUser,
|
||||
WorkflowNodeExecution,
|
||||
WorkflowRun,
|
||||
WorkflowRunTriggeredFrom,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -116,7 +119,12 @@ class WorkflowRunService:
|
|||
|
||||
return workflow_run
|
||||
|
||||
def get_workflow_run_node_executions(self, app_model: App, run_id: str) -> list[WorkflowNodeExecution]:
|
||||
def get_workflow_run_node_executions(
|
||||
self,
|
||||
app_model: App,
|
||||
run_id: str,
|
||||
user: Account | EndUser,
|
||||
) -> Sequence[WorkflowNodeExecution]:
|
||||
"""
|
||||
Get workflow run node execution list
|
||||
"""
|
||||
|
|
@ -128,13 +136,18 @@ class WorkflowRunService:
|
|||
if not workflow_run:
|
||||
return []
|
||||
|
||||
# Use the repository to get the node executions
|
||||
repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=db.engine, tenant_id=app_model.tenant_id, app_id=app_model.id
|
||||
session_factory=db.engine,
|
||||
user=user,
|
||||
app_id=app_model.id,
|
||||
triggered_from=None,
|
||||
)
|
||||
|
||||
# Use the repository to get the node executions with ordering
|
||||
order_config = OrderConfig(order_by=["index"], order_direction="desc")
|
||||
node_executions = repository.get_by_workflow_run(workflow_run_id=run_id, order_config=order_config)
|
||||
|
||||
return list(node_executions)
|
||||
# Convert domain models to database models
|
||||
workflow_node_executions = [repository.to_db_model(node_execution) for node_execution in node_executions]
|
||||
|
||||
return workflow_node_executions
|
||||
|
|
|
|||
|
|
@ -10,10 +10,10 @@ from sqlalchemy.orm import Session
|
|||
|
||||
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
|
||||
from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.variables import Variable
|
||||
from core.workflow.entities.node_entities import NodeRunResult
|
||||
from core.workflow.entities.node_execution_entities import NodeExecution, NodeExecutionStatus
|
||||
from core.workflow.errors import WorkflowNodeRunFailedError
|
||||
from core.workflow.graph_engine.entities.event import InNodeEvent
|
||||
from core.workflow.nodes import NodeType
|
||||
|
|
@ -26,7 +26,6 @@ from core.workflow.workflow_entry import WorkflowEntry
|
|||
from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated
|
||||
from extensions.ext_database import db
|
||||
from models.account import Account
|
||||
from models.enums import CreatedByRole
|
||||
from models.model import App, AppMode
|
||||
from models.tools import WorkflowToolProvider
|
||||
from models.workflow import (
|
||||
|
|
@ -268,33 +267,37 @@ class WorkflowService:
|
|||
# run draft workflow node
|
||||
start_at = time.perf_counter()
|
||||
|
||||
workflow_node_execution = self._handle_node_run_result(
|
||||
getter=lambda: WorkflowEntry.single_step_run(
|
||||
node_execution = self._handle_node_run_result(
|
||||
invoke_node_fn=lambda: WorkflowEntry.single_step_run(
|
||||
workflow=draft_workflow,
|
||||
node_id=node_id,
|
||||
user_inputs=user_inputs,
|
||||
user_id=account.id,
|
||||
),
|
||||
start_at=start_at,
|
||||
tenant_id=app_model.tenant_id,
|
||||
node_id=node_id,
|
||||
)
|
||||
|
||||
workflow_node_execution.app_id = app_model.id
|
||||
workflow_node_execution.created_by = account.id
|
||||
workflow_node_execution.workflow_id = draft_workflow.id
|
||||
# Set workflow_id on the NodeExecution
|
||||
node_execution.workflow_id = draft_workflow.id
|
||||
|
||||
# Use the repository to save the workflow node execution
|
||||
# Create repository and save the node execution
|
||||
repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=db.engine, tenant_id=app_model.tenant_id, app_id=app_model.id
|
||||
session_factory=db.engine,
|
||||
user=account,
|
||||
app_id=app_model.id,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
|
||||
)
|
||||
repository.save(workflow_node_execution)
|
||||
repository.save(node_execution)
|
||||
|
||||
# Convert node_execution to WorkflowNodeExecution after save
|
||||
workflow_node_execution = repository.to_db_model(node_execution)
|
||||
|
||||
return workflow_node_execution
|
||||
|
||||
def run_free_workflow_node(
|
||||
self, node_data: dict, tenant_id: str, user_id: str, node_id: str, user_inputs: dict[str, Any]
|
||||
) -> WorkflowNodeExecution:
|
||||
) -> NodeExecution:
|
||||
"""
|
||||
Run draft workflow node
|
||||
"""
|
||||
|
|
@ -302,7 +305,7 @@ class WorkflowService:
|
|||
start_at = time.perf_counter()
|
||||
|
||||
workflow_node_execution = self._handle_node_run_result(
|
||||
getter=lambda: WorkflowEntry.run_free_node(
|
||||
invoke_node_fn=lambda: WorkflowEntry.run_free_node(
|
||||
node_id=node_id,
|
||||
node_data=node_data,
|
||||
tenant_id=tenant_id,
|
||||
|
|
@ -310,7 +313,6 @@ class WorkflowService:
|
|||
user_inputs=user_inputs,
|
||||
),
|
||||
start_at=start_at,
|
||||
tenant_id=tenant_id,
|
||||
node_id=node_id,
|
||||
)
|
||||
|
||||
|
|
@ -318,21 +320,12 @@ class WorkflowService:
|
|||
|
||||
def _handle_node_run_result(
|
||||
self,
|
||||
getter: Callable[[], tuple[BaseNode, Generator[NodeEvent | InNodeEvent, None, None]]],
|
||||
invoke_node_fn: Callable[[], tuple[BaseNode, Generator[NodeEvent | InNodeEvent, None, None]]],
|
||||
start_at: float,
|
||||
tenant_id: str,
|
||||
node_id: str,
|
||||
) -> WorkflowNodeExecution:
|
||||
"""
|
||||
Handle node run result
|
||||
|
||||
:param getter: Callable[[], tuple[BaseNode, Generator[RunEvent | InNodeEvent, None, None]]]
|
||||
:param start_at: float
|
||||
:param tenant_id: str
|
||||
:param node_id: str
|
||||
"""
|
||||
) -> NodeExecution:
|
||||
try:
|
||||
node_instance, generator = getter()
|
||||
node_instance, generator = invoke_node_fn()
|
||||
|
||||
node_run_result: NodeRunResult | None = None
|
||||
for event in generator:
|
||||
|
|
@ -381,20 +374,21 @@ class WorkflowService:
|
|||
node_run_result = None
|
||||
error = e.error
|
||||
|
||||
workflow_node_execution = WorkflowNodeExecution()
|
||||
workflow_node_execution.id = str(uuid4())
|
||||
workflow_node_execution.tenant_id = tenant_id
|
||||
workflow_node_execution.triggered_from = WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP.value
|
||||
workflow_node_execution.index = 1
|
||||
workflow_node_execution.node_id = node_id
|
||||
workflow_node_execution.node_type = node_instance.node_type
|
||||
workflow_node_execution.title = node_instance.node_data.title
|
||||
workflow_node_execution.elapsed_time = time.perf_counter() - start_at
|
||||
workflow_node_execution.created_by_role = CreatedByRole.ACCOUNT.value
|
||||
workflow_node_execution.created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow_node_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
# Create a NodeExecution domain model
|
||||
node_execution = NodeExecution(
|
||||
id=str(uuid4()),
|
||||
workflow_id="", # This is a single-step execution, so no workflow ID
|
||||
index=1,
|
||||
node_id=node_id,
|
||||
node_type=node_instance.node_type,
|
||||
title=node_instance.node_data.title,
|
||||
elapsed_time=time.perf_counter() - start_at,
|
||||
created_at=datetime.now(UTC).replace(tzinfo=None),
|
||||
finished_at=datetime.now(UTC).replace(tzinfo=None),
|
||||
)
|
||||
|
||||
if run_succeeded and node_run_result:
|
||||
# create workflow node execution
|
||||
# Set inputs, process_data, and outputs as dictionaries (not JSON strings)
|
||||
inputs = WorkflowEntry.handle_special_values(node_run_result.inputs) if node_run_result.inputs else None
|
||||
process_data = (
|
||||
WorkflowEntry.handle_special_values(node_run_result.process_data)
|
||||
|
|
@ -403,23 +397,23 @@ class WorkflowService:
|
|||
)
|
||||
outputs = WorkflowEntry.handle_special_values(node_run_result.outputs) if node_run_result.outputs else None
|
||||
|
||||
workflow_node_execution.inputs = json.dumps(inputs)
|
||||
workflow_node_execution.process_data = json.dumps(process_data)
|
||||
workflow_node_execution.outputs = json.dumps(outputs)
|
||||
workflow_node_execution.execution_metadata = (
|
||||
json.dumps(jsonable_encoder(node_run_result.metadata)) if node_run_result.metadata else None
|
||||
)
|
||||
if node_run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED:
|
||||
workflow_node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED.value
|
||||
elif node_run_result.status == WorkflowNodeExecutionStatus.EXCEPTION:
|
||||
workflow_node_execution.status = WorkflowNodeExecutionStatus.EXCEPTION.value
|
||||
workflow_node_execution.error = node_run_result.error
|
||||
else:
|
||||
# create workflow node execution
|
||||
workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value
|
||||
workflow_node_execution.error = error
|
||||
node_execution.inputs = inputs
|
||||
node_execution.process_data = process_data
|
||||
node_execution.outputs = outputs
|
||||
node_execution.metadata = node_run_result.metadata
|
||||
|
||||
return workflow_node_execution
|
||||
# Map status from WorkflowNodeExecutionStatus to NodeExecutionStatus
|
||||
if node_run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED:
|
||||
node_execution.status = NodeExecutionStatus.SUCCEEDED
|
||||
elif node_run_result.status == WorkflowNodeExecutionStatus.EXCEPTION:
|
||||
node_execution.status = NodeExecutionStatus.EXCEPTION
|
||||
node_execution.error = node_run_result.error
|
||||
else:
|
||||
# Set failed status and error
|
||||
node_execution.status = NodeExecutionStatus.FAILED
|
||||
node_execution.error = error
|
||||
|
||||
return node_execution
|
||||
|
||||
def convert_to_workflow(self, app_model: App, account: Account, args: dict) -> App:
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -4,16 +4,19 @@ from collections.abc import Callable
|
|||
|
||||
import click
|
||||
from celery import shared_task # type: ignore
|
||||
from sqlalchemy import delete
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from extensions.ext_database import db
|
||||
from models.dataset import AppDatasetJoin
|
||||
from models.model import (
|
||||
from models import (
|
||||
Account,
|
||||
ApiToken,
|
||||
App,
|
||||
AppAnnotationHitHistory,
|
||||
AppAnnotationSetting,
|
||||
AppDatasetJoin,
|
||||
AppModelConfig,
|
||||
Conversation,
|
||||
EndUser,
|
||||
|
|
@ -188,9 +191,24 @@ def _delete_app_workflow_runs(tenant_id: str, app_id: str):
|
|||
|
||||
|
||||
def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
|
||||
# Get app's owner
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
stmt = select(Account).where(Account.id == App.owner_id).where(App.id == app_id)
|
||||
user = session.scalar(stmt)
|
||||
|
||||
if user is None:
|
||||
errmsg = (
|
||||
f"Failed to delete workflow node executions for tenant {tenant_id} and app {app_id}, app's owner not found"
|
||||
)
|
||||
logging.error(errmsg)
|
||||
raise ValueError(errmsg)
|
||||
|
||||
# Create a repository instance for WorkflowNodeExecution
|
||||
repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=db.engine, tenant_id=tenant_id, app_id=app_id
|
||||
session_factory=db.engine,
|
||||
user=user,
|
||||
app_id=app_id,
|
||||
triggered_from=None,
|
||||
)
|
||||
|
||||
# Use the clear method to delete all records for this tenant_id and app_id
|
||||
|
|
|
|||
|
|
@ -16,10 +16,9 @@ from core.workflow.enums import SystemVariableKey
|
|||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
||||
from models.enums import CreatedByRole
|
||||
from models.enums import CreatorUserRole
|
||||
from models.workflow import (
|
||||
Workflow,
|
||||
WorkflowNodeExecution,
|
||||
WorkflowNodeExecutionStatus,
|
||||
WorkflowRun,
|
||||
WorkflowRunStatus,
|
||||
|
|
@ -94,7 +93,7 @@ def mock_workflow_run():
|
|||
workflow_run.app_id = "test-app-id"
|
||||
workflow_run.workflow_id = "test-workflow-id"
|
||||
workflow_run.status = WorkflowRunStatus.RUNNING
|
||||
workflow_run.created_by_role = CreatedByRole.ACCOUNT
|
||||
workflow_run.created_by_role = CreatorUserRole.ACCOUNT
|
||||
workflow_run.created_by = "test-user-id"
|
||||
workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow_run.inputs_dict = {"query": "test query"}
|
||||
|
|
@ -107,7 +106,6 @@ def test_init(
|
|||
):
|
||||
"""Test initialization of WorkflowCycleManager"""
|
||||
assert workflow_cycle_manager._workflow_run is None
|
||||
assert workflow_cycle_manager._workflow_node_executions == {}
|
||||
assert workflow_cycle_manager._application_generate_entity == mock_app_generate_entity
|
||||
assert workflow_cycle_manager._workflow_system_variables == mock_workflow_system_variables
|
||||
assert workflow_cycle_manager._workflow_node_execution_repository == mock_node_execution_repository
|
||||
|
|
@ -123,7 +121,7 @@ def test_handle_workflow_run_start(workflow_cycle_manager, mock_session, mock_wo
|
|||
session=mock_session,
|
||||
workflow_id="test-workflow-id",
|
||||
user_id="test-user-id",
|
||||
created_by_role=CreatedByRole.ACCOUNT,
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
)
|
||||
|
||||
# Verify the result
|
||||
|
|
@ -132,7 +130,7 @@ def test_handle_workflow_run_start(workflow_cycle_manager, mock_session, mock_wo
|
|||
assert workflow_run.workflow_id == mock_workflow.id
|
||||
assert workflow_run.sequence_number == 6 # max_sequence + 1
|
||||
assert workflow_run.status == WorkflowRunStatus.RUNNING
|
||||
assert workflow_run.created_by_role == CreatedByRole.ACCOUNT
|
||||
assert workflow_run.created_by_role == CreatorUserRole.ACCOUNT
|
||||
assert workflow_run.created_by == "test-user-id"
|
||||
|
||||
# Verify session.add was called
|
||||
|
|
@ -215,24 +213,23 @@ def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_run):
|
|||
)
|
||||
|
||||
# Verify the result
|
||||
assert result.tenant_id == mock_workflow_run.tenant_id
|
||||
assert result.app_id == mock_workflow_run.app_id
|
||||
# NodeExecution doesn't have tenant_id attribute, it's handled at repository level
|
||||
# assert result.tenant_id == mock_workflow_run.tenant_id
|
||||
# assert result.app_id == mock_workflow_run.app_id
|
||||
assert result.workflow_id == mock_workflow_run.workflow_id
|
||||
assert result.workflow_run_id == mock_workflow_run.id
|
||||
assert result.node_execution_id == event.node_execution_id
|
||||
assert result.node_id == event.node_id
|
||||
assert result.node_type == event.node_type.value
|
||||
assert result.node_type == event.node_type
|
||||
assert result.title == event.node_data.title
|
||||
assert result.status == WorkflowNodeExecutionStatus.RUNNING.value
|
||||
assert result.created_by_role == mock_workflow_run.created_by_role
|
||||
assert result.created_by == mock_workflow_run.created_by
|
||||
# NodeExecution doesn't have created_by_role and created_by attributes, they're handled at repository level
|
||||
# assert result.created_by_role == mock_workflow_run.created_by_role
|
||||
# assert result.created_by == mock_workflow_run.created_by
|
||||
|
||||
# Verify save was called
|
||||
workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(result)
|
||||
|
||||
# Verify the node execution was added to the cache
|
||||
assert workflow_cycle_manager._workflow_node_executions[event.node_execution_id] == result
|
||||
|
||||
|
||||
def test_get_workflow_run(workflow_cycle_manager, mock_session, mock_workflow_run):
|
||||
"""Test _get_workflow_run method"""
|
||||
|
|
@ -261,28 +258,24 @@ def test_handle_workflow_node_execution_success(workflow_cycle_manager):
|
|||
event.execution_metadata = {"metadata": "test metadata"}
|
||||
event.start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
|
||||
# Create a mock workflow node execution
|
||||
node_execution = MagicMock(spec=WorkflowNodeExecution)
|
||||
# Create a mock node execution
|
||||
node_execution = MagicMock()
|
||||
node_execution.node_execution_id = "test-node-execution-id"
|
||||
|
||||
# Mock _get_workflow_node_execution to return the mock node execution
|
||||
with patch.object(workflow_cycle_manager, "_get_workflow_node_execution", return_value=node_execution):
|
||||
# Call the method
|
||||
result = workflow_cycle_manager._handle_workflow_node_execution_success(
|
||||
event=event,
|
||||
)
|
||||
# Mock the repository to return the node execution
|
||||
workflow_cycle_manager._workflow_node_execution_repository.get_by_node_execution_id.return_value = node_execution
|
||||
|
||||
# Verify the result
|
||||
assert result == node_execution
|
||||
assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED.value
|
||||
assert result.inputs == json.dumps(event.inputs)
|
||||
assert result.process_data == json.dumps(event.process_data)
|
||||
assert result.outputs == json.dumps(event.outputs)
|
||||
assert result.finished_at is not None
|
||||
assert result.elapsed_time is not None
|
||||
# Call the method
|
||||
result = workflow_cycle_manager._handle_workflow_node_execution_success(
|
||||
event=event,
|
||||
)
|
||||
|
||||
# Verify update was called
|
||||
workflow_cycle_manager._workflow_node_execution_repository.update.assert_called_once_with(node_execution)
|
||||
# Verify the result
|
||||
assert result == node_execution
|
||||
assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED.value
|
||||
|
||||
# Verify save was called
|
||||
workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(node_execution)
|
||||
|
||||
|
||||
def test_handle_workflow_run_partial_success(workflow_cycle_manager, mock_session, mock_workflow_run):
|
||||
|
|
@ -322,27 +315,22 @@ def test_handle_workflow_node_execution_failed(workflow_cycle_manager):
|
|||
event.start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
event.error = "Test error message"
|
||||
|
||||
# Create a mock workflow node execution
|
||||
node_execution = MagicMock(spec=WorkflowNodeExecution)
|
||||
# Create a mock node execution
|
||||
node_execution = MagicMock()
|
||||
node_execution.node_execution_id = "test-node-execution-id"
|
||||
|
||||
# Mock _get_workflow_node_execution to return the mock node execution
|
||||
with patch.object(workflow_cycle_manager, "_get_workflow_node_execution", return_value=node_execution):
|
||||
# Call the method
|
||||
result = workflow_cycle_manager._handle_workflow_node_execution_failed(
|
||||
event=event,
|
||||
)
|
||||
# Mock the repository to return the node execution
|
||||
workflow_cycle_manager._workflow_node_execution_repository.get_by_node_execution_id.return_value = node_execution
|
||||
|
||||
# Verify the result
|
||||
assert result == node_execution
|
||||
assert result.status == WorkflowNodeExecutionStatus.FAILED.value
|
||||
assert result.error == "Test error message"
|
||||
assert result.inputs == json.dumps(event.inputs)
|
||||
assert result.process_data == json.dumps(event.process_data)
|
||||
assert result.outputs == json.dumps(event.outputs)
|
||||
assert result.finished_at is not None
|
||||
assert result.elapsed_time is not None
|
||||
assert result.execution_metadata == json.dumps(event.execution_metadata)
|
||||
# Call the method
|
||||
result = workflow_cycle_manager._handle_workflow_node_execution_failed(
|
||||
event=event,
|
||||
)
|
||||
|
||||
# Verify update was called
|
||||
workflow_cycle_manager._workflow_node_execution_repository.update.assert_called_once_with(node_execution)
|
||||
# Verify the result
|
||||
assert result == node_execution
|
||||
assert result.status == WorkflowNodeExecutionStatus.FAILED.value
|
||||
assert result.error == "Test error message"
|
||||
|
||||
# Verify save was called
|
||||
workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(node_execution)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,265 @@
|
|||
import json
|
||||
import logging
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
from flask import Flask, Response
|
||||
|
||||
from configs import dify_config
|
||||
from extensions import ext_request_logging
|
||||
from extensions.ext_request_logging import _is_content_type_json, _log_request_finished, init_app
|
||||
|
||||
|
||||
def test_is_content_type_json():
|
||||
"""
|
||||
Test the _is_content_type_json function.
|
||||
"""
|
||||
|
||||
assert _is_content_type_json("application/json") is True
|
||||
# content type header with charset option.
|
||||
assert _is_content_type_json("application/json; charset=utf-8") is True
|
||||
# content type header with charset option, in uppercase.
|
||||
assert _is_content_type_json("APPLICATION/JSON; CHARSET=UTF-8") is True
|
||||
assert _is_content_type_json("text/html") is False
|
||||
assert _is_content_type_json("") is False
|
||||
|
||||
|
||||
_KEY_NEEDLE = "needle"
|
||||
_VALUE_NEEDLE = _KEY_NEEDLE[::-1]
|
||||
_RESPONSE_NEEDLE = "response"
|
||||
|
||||
|
||||
def _get_test_app():
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.route("/", methods=["GET", "POST"])
|
||||
def handler():
|
||||
return _RESPONSE_NEEDLE
|
||||
|
||||
return app
|
||||
|
||||
|
||||
# NOTE(QuantumGhost): Due to the design of Flask, we need to use monkey patch to write tests.
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_request_receiver(monkeypatch) -> mock.Mock:
|
||||
mock_log_request_started = mock.Mock()
|
||||
monkeypatch.setattr(ext_request_logging, "_log_request_started", mock_log_request_started)
|
||||
return mock_log_request_started
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_response_receiver(monkeypatch) -> mock.Mock:
|
||||
mock_log_request_finished = mock.Mock()
|
||||
monkeypatch.setattr(ext_request_logging, "_log_request_finished", mock_log_request_finished)
|
||||
return mock_log_request_finished
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_logger(monkeypatch) -> logging.Logger:
|
||||
_logger = mock.MagicMock(spec=logging.Logger)
|
||||
monkeypatch.setattr(ext_request_logging, "_logger", _logger)
|
||||
return _logger
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def enable_request_logging(monkeypatch):
|
||||
monkeypatch.setattr(dify_config, "ENABLE_REQUEST_LOGGING", True)
|
||||
|
||||
|
||||
class TestRequestLoggingExtension:
|
||||
def test_receiver_should_not_be_invoked_if_configuration_is_disabled(
|
||||
self,
|
||||
monkeypatch,
|
||||
mock_request_receiver,
|
||||
mock_response_receiver,
|
||||
):
|
||||
monkeypatch.setattr(dify_config, "ENABLE_REQUEST_LOGGING", False)
|
||||
|
||||
app = _get_test_app()
|
||||
init_app(app)
|
||||
|
||||
with app.test_client() as client:
|
||||
client.get("/")
|
||||
|
||||
mock_request_receiver.assert_not_called()
|
||||
mock_response_receiver.assert_not_called()
|
||||
|
||||
def test_receiver_should_be_called_if_enabled(
|
||||
self,
|
||||
enable_request_logging,
|
||||
mock_request_receiver,
|
||||
mock_response_receiver,
|
||||
):
|
||||
"""
|
||||
Test the request logging extension with JSON data.
|
||||
"""
|
||||
|
||||
app = _get_test_app()
|
||||
init_app(app)
|
||||
|
||||
with app.test_client() as client:
|
||||
client.post("/", json={_KEY_NEEDLE: _VALUE_NEEDLE})
|
||||
|
||||
mock_request_receiver.assert_called_once()
|
||||
mock_response_receiver.assert_called_once()
|
||||
|
||||
|
||||
class TestLoggingLevel:
|
||||
@pytest.mark.usefixtures("enable_request_logging")
|
||||
def test_logging_should_be_skipped_if_level_is_above_debug(self, enable_request_logging, mock_logger):
|
||||
mock_logger.isEnabledFor.return_value = False
|
||||
app = _get_test_app()
|
||||
init_app(app)
|
||||
|
||||
with app.test_client() as client:
|
||||
client.post("/", json={_KEY_NEEDLE: _VALUE_NEEDLE})
|
||||
mock_logger.debug.assert_not_called()
|
||||
|
||||
|
||||
class TestRequestReceiverLogging:
|
||||
@pytest.mark.usefixtures("enable_request_logging")
|
||||
def test_non_json_request(self, enable_request_logging, mock_logger, mock_response_receiver):
|
||||
mock_logger.isEnabledFor.return_value = True
|
||||
app = _get_test_app()
|
||||
init_app(app)
|
||||
|
||||
with app.test_client() as client:
|
||||
client.post("/", data="plain text")
|
||||
assert mock_logger.debug.call_count == 1
|
||||
call_args = mock_logger.debug.call_args[0]
|
||||
assert "Received Request" in call_args[0]
|
||||
assert call_args[1] == "POST"
|
||||
assert call_args[2] == "/"
|
||||
assert "Request Body" not in call_args[0]
|
||||
|
||||
@pytest.mark.usefixtures("enable_request_logging")
|
||||
def test_json_request(self, enable_request_logging, mock_logger, mock_response_receiver):
|
||||
mock_logger.isEnabledFor.return_value = True
|
||||
app = _get_test_app()
|
||||
init_app(app)
|
||||
|
||||
with app.test_client() as client:
|
||||
client.post("/", json={_KEY_NEEDLE: _VALUE_NEEDLE})
|
||||
assert mock_logger.debug.call_count == 1
|
||||
call_args = mock_logger.debug.call_args[0]
|
||||
assert "Received Request" in call_args[0]
|
||||
assert "Request Body" in call_args[0]
|
||||
assert call_args[1] == "POST"
|
||||
assert call_args[2] == "/"
|
||||
assert _KEY_NEEDLE in call_args[3]
|
||||
|
||||
@pytest.mark.usefixtures("enable_request_logging")
|
||||
def test_json_request_with_empty_body(self, enable_request_logging, mock_logger, mock_response_receiver):
|
||||
mock_logger.isEnabledFor.return_value = True
|
||||
app = _get_test_app()
|
||||
init_app(app)
|
||||
|
||||
with app.test_client() as client:
|
||||
client.post("/", headers={"Content-Type": "application/json"})
|
||||
|
||||
assert mock_logger.debug.call_count == 1
|
||||
call_args = mock_logger.debug.call_args[0]
|
||||
assert "Received Request" in call_args[0]
|
||||
assert "Request Body" not in call_args[0]
|
||||
assert call_args[1] == "POST"
|
||||
assert call_args[2] == "/"
|
||||
|
||||
@pytest.mark.usefixtures("enable_request_logging")
|
||||
def test_json_request_with_invalid_json_as_body(self, enable_request_logging, mock_logger, mock_response_receiver):
|
||||
mock_logger.isEnabledFor.return_value = True
|
||||
app = _get_test_app()
|
||||
init_app(app)
|
||||
|
||||
with app.test_client() as client:
|
||||
client.post(
|
||||
"/",
|
||||
headers={"Content-Type": "application/json"},
|
||||
data="{",
|
||||
)
|
||||
assert mock_logger.debug.call_count == 0
|
||||
assert mock_logger.exception.call_count == 1
|
||||
|
||||
exception_call_args = mock_logger.exception.call_args[0]
|
||||
assert exception_call_args[0] == "Failed to parse JSON request"
|
||||
|
||||
|
||||
class TestResponseReceiverLogging:
|
||||
@pytest.mark.usefixtures("enable_request_logging")
|
||||
def test_non_json_response(self, enable_request_logging, mock_logger):
|
||||
mock_logger.isEnabledFor.return_value = True
|
||||
app = _get_test_app()
|
||||
response = Response(
|
||||
"OK",
|
||||
headers={"Content-Type": "text/plain"},
|
||||
)
|
||||
_log_request_finished(app, response)
|
||||
assert mock_logger.debug.call_count == 1
|
||||
call_args = mock_logger.debug.call_args[0]
|
||||
assert "Response" in call_args[0]
|
||||
assert "200" in call_args[1]
|
||||
assert call_args[2] == "text/plain"
|
||||
assert "Response Body" not in call_args[0]
|
||||
|
||||
@pytest.mark.usefixtures("enable_request_logging")
|
||||
def test_json_response(self, enable_request_logging, mock_logger, mock_response_receiver):
|
||||
mock_logger.isEnabledFor.return_value = True
|
||||
app = _get_test_app()
|
||||
response = Response(
|
||||
json.dumps({_KEY_NEEDLE: _VALUE_NEEDLE}),
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
_log_request_finished(app, response)
|
||||
assert mock_logger.debug.call_count == 1
|
||||
call_args = mock_logger.debug.call_args[0]
|
||||
assert "Response" in call_args[0]
|
||||
assert "Response Body" in call_args[0]
|
||||
assert "200" in call_args[1]
|
||||
assert call_args[2] == "application/json"
|
||||
assert _KEY_NEEDLE in call_args[3]
|
||||
|
||||
@pytest.mark.usefixtures("enable_request_logging")
|
||||
def test_json_request_with_invalid_json_as_body(self, enable_request_logging, mock_logger, mock_response_receiver):
|
||||
mock_logger.isEnabledFor.return_value = True
|
||||
app = _get_test_app()
|
||||
|
||||
response = Response(
|
||||
"{",
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
_log_request_finished(app, response)
|
||||
assert mock_logger.debug.call_count == 0
|
||||
assert mock_logger.exception.call_count == 1
|
||||
|
||||
exception_call_args = mock_logger.exception.call_args[0]
|
||||
assert exception_call_args[0] == "Failed to parse JSON response"
|
||||
|
||||
|
||||
class TestResponseUnmodified:
|
||||
def test_when_request_logging_disabled(self):
|
||||
app = _get_test_app()
|
||||
init_app(app)
|
||||
|
||||
with app.test_client() as client:
|
||||
response = client.post(
|
||||
"/",
|
||||
headers={"Content-Type": "application/json"},
|
||||
data="{",
|
||||
)
|
||||
assert response.text == _RESPONSE_NEEDLE
|
||||
assert response.status_code == 200
|
||||
|
||||
@pytest.mark.usefixtures("enable_request_logging")
|
||||
def test_when_request_logging_enabled(self, enable_request_logging):
|
||||
app = _get_test_app()
|
||||
init_app(app)
|
||||
|
||||
with app.test_client() as client:
|
||||
response = client.post(
|
||||
"/",
|
||||
headers={"Content-Type": "application/json"},
|
||||
data="{",
|
||||
)
|
||||
assert response.text == _RESPONSE_NEEDLE
|
||||
assert response.status_code == 200
|
||||
|
|
@ -2,15 +2,36 @@
|
|||
Unit tests for the SQLAlchemy implementation of WorkflowNodeExecutionRepository.
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
import json
|
||||
from datetime import datetime
|
||||
from unittest.mock import MagicMock, PropertyMock
|
||||
|
||||
import pytest
|
||||
from pytest_mock import MockerFixture
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||
from core.workflow.entities.node_execution_entities import NodeExecution, NodeExecutionStatus
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
from core.workflow.repository.workflow_node_execution_repository import OrderConfig
|
||||
from models.workflow import WorkflowNodeExecution
|
||||
from models.account import Account, Tenant
|
||||
from models.workflow import WorkflowNodeExecution, WorkflowNodeExecutionStatus, WorkflowNodeExecutionTriggeredFrom
|
||||
|
||||
|
||||
def configure_mock_execution(mock_execution):
|
||||
"""Configure a mock execution with proper JSON serializable values."""
|
||||
# Configure inputs, outputs, process_data, and execution_metadata to return JSON serializable values
|
||||
type(mock_execution).inputs = PropertyMock(return_value='{"key": "value"}')
|
||||
type(mock_execution).outputs = PropertyMock(return_value='{"result": "success"}')
|
||||
type(mock_execution).process_data = PropertyMock(return_value='{"process": "data"}')
|
||||
type(mock_execution).execution_metadata = PropertyMock(return_value='{"metadata": "info"}')
|
||||
|
||||
# Configure status and triggered_from to be valid enum values
|
||||
mock_execution.status = "running"
|
||||
mock_execution.triggered_from = "workflow-run"
|
||||
|
||||
return mock_execution
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
@ -28,13 +49,30 @@ def session():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def repository(session):
|
||||
def mock_user():
|
||||
"""Create a user instance for testing."""
|
||||
user = Account()
|
||||
user.id = "test-user-id"
|
||||
|
||||
tenant = Tenant()
|
||||
tenant.id = "test-tenant"
|
||||
tenant.name = "Test Workspace"
|
||||
user._current_tenant = MagicMock()
|
||||
user._current_tenant.id = "test-tenant"
|
||||
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def repository(session, mock_user):
|
||||
"""Create a repository instance with test data."""
|
||||
_, session_factory = session
|
||||
tenant_id = "test-tenant"
|
||||
app_id = "test-app"
|
||||
return SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory, tenant_id=tenant_id, app_id=app_id
|
||||
session_factory=session_factory,
|
||||
user=mock_user,
|
||||
app_id=app_id,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -45,16 +83,23 @@ def test_save(repository, session):
|
|||
execution = MagicMock(spec=WorkflowNodeExecution)
|
||||
execution.tenant_id = None
|
||||
execution.app_id = None
|
||||
execution.inputs = None
|
||||
execution.process_data = None
|
||||
execution.outputs = None
|
||||
execution.metadata = None
|
||||
|
||||
# Mock the to_db_model method to return the execution itself
|
||||
# This simulates the behavior of setting tenant_id and app_id
|
||||
repository.to_db_model = MagicMock(return_value=execution)
|
||||
|
||||
# Call save method
|
||||
repository.save(execution)
|
||||
|
||||
# Assert tenant_id and app_id are set
|
||||
assert execution.tenant_id == repository._tenant_id
|
||||
assert execution.app_id == repository._app_id
|
||||
# Assert to_db_model was called with the execution
|
||||
repository.to_db_model.assert_called_once_with(execution)
|
||||
|
||||
# Assert session.add was called
|
||||
session_obj.add.assert_called_once_with(execution)
|
||||
# Assert session.merge was called (now using merge for both save and update)
|
||||
session_obj.merge.assert_called_once_with(execution)
|
||||
|
||||
|
||||
def test_save_with_existing_tenant_id(repository, session):
|
||||
|
|
@ -64,16 +109,27 @@ def test_save_with_existing_tenant_id(repository, session):
|
|||
execution = MagicMock(spec=WorkflowNodeExecution)
|
||||
execution.tenant_id = "existing-tenant"
|
||||
execution.app_id = None
|
||||
execution.inputs = None
|
||||
execution.process_data = None
|
||||
execution.outputs = None
|
||||
execution.metadata = None
|
||||
|
||||
# Create a modified execution that will be returned by _to_db_model
|
||||
modified_execution = MagicMock(spec=WorkflowNodeExecution)
|
||||
modified_execution.tenant_id = "existing-tenant" # Tenant ID should not change
|
||||
modified_execution.app_id = repository._app_id # App ID should be set
|
||||
|
||||
# Mock the to_db_model method to return the modified execution
|
||||
repository.to_db_model = MagicMock(return_value=modified_execution)
|
||||
|
||||
# Call save method
|
||||
repository.save(execution)
|
||||
|
||||
# Assert tenant_id is not changed and app_id is set
|
||||
assert execution.tenant_id == "existing-tenant"
|
||||
assert execution.app_id == repository._app_id
|
||||
# Assert to_db_model was called with the execution
|
||||
repository.to_db_model.assert_called_once_with(execution)
|
||||
|
||||
# Assert session.add was called
|
||||
session_obj.add.assert_called_once_with(execution)
|
||||
# Assert session.merge was called with the modified execution (now using merge for both save and update)
|
||||
session_obj.merge.assert_called_once_with(modified_execution)
|
||||
|
||||
|
||||
def test_get_by_node_execution_id(repository, session, mocker: MockerFixture):
|
||||
|
|
@ -84,7 +140,16 @@ def test_get_by_node_execution_id(repository, session, mocker: MockerFixture):
|
|||
mock_stmt = mocker.MagicMock()
|
||||
mock_select.return_value = mock_stmt
|
||||
mock_stmt.where.return_value = mock_stmt
|
||||
session_obj.scalar.return_value = mocker.MagicMock(spec=WorkflowNodeExecution)
|
||||
|
||||
# Create a properly configured mock execution
|
||||
mock_execution = mocker.MagicMock(spec=WorkflowNodeExecution)
|
||||
configure_mock_execution(mock_execution)
|
||||
session_obj.scalar.return_value = mock_execution
|
||||
|
||||
# Create a mock domain model to be returned by _to_domain_model
|
||||
mock_domain_model = mocker.MagicMock()
|
||||
# Mock the _to_domain_model method to return our mock domain model
|
||||
repository._to_domain_model = mocker.MagicMock(return_value=mock_domain_model)
|
||||
|
||||
# Call method
|
||||
result = repository.get_by_node_execution_id("test-node-execution-id")
|
||||
|
|
@ -92,7 +157,10 @@ def test_get_by_node_execution_id(repository, session, mocker: MockerFixture):
|
|||
# Assert select was called with correct parameters
|
||||
mock_select.assert_called_once()
|
||||
session_obj.scalar.assert_called_once_with(mock_stmt)
|
||||
assert result is not None
|
||||
# Assert _to_domain_model was called with the mock execution
|
||||
repository._to_domain_model.assert_called_once_with(mock_execution)
|
||||
# Assert the result is our mock domain model
|
||||
assert result is mock_domain_model
|
||||
|
||||
|
||||
def test_get_by_workflow_run(repository, session, mocker: MockerFixture):
|
||||
|
|
@ -104,7 +172,16 @@ def test_get_by_workflow_run(repository, session, mocker: MockerFixture):
|
|||
mock_select.return_value = mock_stmt
|
||||
mock_stmt.where.return_value = mock_stmt
|
||||
mock_stmt.order_by.return_value = mock_stmt
|
||||
session_obj.scalars.return_value.all.return_value = [mocker.MagicMock(spec=WorkflowNodeExecution)]
|
||||
|
||||
# Create a properly configured mock execution
|
||||
mock_execution = mocker.MagicMock(spec=WorkflowNodeExecution)
|
||||
configure_mock_execution(mock_execution)
|
||||
session_obj.scalars.return_value.all.return_value = [mock_execution]
|
||||
|
||||
# Create a mock domain model to be returned by _to_domain_model
|
||||
mock_domain_model = mocker.MagicMock()
|
||||
# Mock the _to_domain_model method to return our mock domain model
|
||||
repository._to_domain_model = mocker.MagicMock(return_value=mock_domain_model)
|
||||
|
||||
# Call method
|
||||
order_config = OrderConfig(order_by=["index"], order_direction="desc")
|
||||
|
|
@ -113,7 +190,11 @@ def test_get_by_workflow_run(repository, session, mocker: MockerFixture):
|
|||
# Assert select was called with correct parameters
|
||||
mock_select.assert_called_once()
|
||||
session_obj.scalars.assert_called_once_with(mock_stmt)
|
||||
# Assert _to_domain_model was called with the mock execution
|
||||
repository._to_domain_model.assert_called_once_with(mock_execution)
|
||||
# Assert the result contains our mock domain model
|
||||
assert len(result) == 1
|
||||
assert result[0] is mock_domain_model
|
||||
|
||||
|
||||
def test_get_running_executions(repository, session, mocker: MockerFixture):
|
||||
|
|
@ -124,7 +205,16 @@ def test_get_running_executions(repository, session, mocker: MockerFixture):
|
|||
mock_stmt = mocker.MagicMock()
|
||||
mock_select.return_value = mock_stmt
|
||||
mock_stmt.where.return_value = mock_stmt
|
||||
session_obj.scalars.return_value.all.return_value = [mocker.MagicMock(spec=WorkflowNodeExecution)]
|
||||
|
||||
# Create a properly configured mock execution
|
||||
mock_execution = mocker.MagicMock(spec=WorkflowNodeExecution)
|
||||
configure_mock_execution(mock_execution)
|
||||
session_obj.scalars.return_value.all.return_value = [mock_execution]
|
||||
|
||||
# Create a mock domain model to be returned by _to_domain_model
|
||||
mock_domain_model = mocker.MagicMock()
|
||||
# Mock the _to_domain_model method to return our mock domain model
|
||||
repository._to_domain_model = mocker.MagicMock(return_value=mock_domain_model)
|
||||
|
||||
# Call method
|
||||
result = repository.get_running_executions("test-workflow-run-id")
|
||||
|
|
@ -132,25 +222,36 @@ def test_get_running_executions(repository, session, mocker: MockerFixture):
|
|||
# Assert select was called with correct parameters
|
||||
mock_select.assert_called_once()
|
||||
session_obj.scalars.assert_called_once_with(mock_stmt)
|
||||
# Assert _to_domain_model was called with the mock execution
|
||||
repository._to_domain_model.assert_called_once_with(mock_execution)
|
||||
# Assert the result contains our mock domain model
|
||||
assert len(result) == 1
|
||||
assert result[0] is mock_domain_model
|
||||
|
||||
|
||||
def test_update(repository, session):
|
||||
"""Test update method."""
|
||||
def test_update_via_save(repository, session):
|
||||
"""Test updating an existing record via save method."""
|
||||
session_obj, _ = session
|
||||
# Create a mock execution
|
||||
execution = MagicMock(spec=WorkflowNodeExecution)
|
||||
execution.tenant_id = None
|
||||
execution.app_id = None
|
||||
execution.inputs = None
|
||||
execution.process_data = None
|
||||
execution.outputs = None
|
||||
execution.metadata = None
|
||||
|
||||
# Call update method
|
||||
repository.update(execution)
|
||||
# Mock the to_db_model method to return the execution itself
|
||||
# This simulates the behavior of setting tenant_id and app_id
|
||||
repository.to_db_model = MagicMock(return_value=execution)
|
||||
|
||||
# Assert tenant_id and app_id are set
|
||||
assert execution.tenant_id == repository._tenant_id
|
||||
assert execution.app_id == repository._app_id
|
||||
# Call save method to update an existing record
|
||||
repository.save(execution)
|
||||
|
||||
# Assert session.merge was called
|
||||
# Assert to_db_model was called with the execution
|
||||
repository.to_db_model.assert_called_once_with(execution)
|
||||
|
||||
# Assert session.merge was called (for updates)
|
||||
session_obj.merge.assert_called_once_with(execution)
|
||||
|
||||
|
||||
|
|
@ -176,3 +277,118 @@ def test_clear(repository, session, mocker: MockerFixture):
|
|||
mock_stmt.where.assert_called()
|
||||
session_obj.execute.assert_called_once_with(mock_stmt)
|
||||
session_obj.commit.assert_called_once()
|
||||
|
||||
|
||||
def test_to_db_model(repository):
|
||||
"""Test to_db_model method."""
|
||||
# Create a domain model
|
||||
domain_model = NodeExecution(
|
||||
id="test-id",
|
||||
workflow_id="test-workflow-id",
|
||||
node_execution_id="test-node-execution-id",
|
||||
workflow_run_id="test-workflow-run-id",
|
||||
index=1,
|
||||
predecessor_node_id="test-predecessor-id",
|
||||
node_id="test-node-id",
|
||||
node_type=NodeType.START,
|
||||
title="Test Node",
|
||||
inputs={"input_key": "input_value"},
|
||||
process_data={"process_key": "process_value"},
|
||||
outputs={"output_key": "output_value"},
|
||||
status=NodeExecutionStatus.RUNNING,
|
||||
error=None,
|
||||
elapsed_time=1.5,
|
||||
metadata={NodeRunMetadataKey.TOTAL_TOKENS: 100},
|
||||
created_at=datetime.now(),
|
||||
finished_at=None,
|
||||
)
|
||||
|
||||
# Convert to DB model
|
||||
db_model = repository.to_db_model(domain_model)
|
||||
|
||||
# Assert DB model has correct values
|
||||
assert isinstance(db_model, WorkflowNodeExecution)
|
||||
assert db_model.id == domain_model.id
|
||||
assert db_model.tenant_id == repository._tenant_id
|
||||
assert db_model.app_id == repository._app_id
|
||||
assert db_model.workflow_id == domain_model.workflow_id
|
||||
assert db_model.triggered_from == repository._triggered_from
|
||||
assert db_model.workflow_run_id == domain_model.workflow_run_id
|
||||
assert db_model.index == domain_model.index
|
||||
assert db_model.predecessor_node_id == domain_model.predecessor_node_id
|
||||
assert db_model.node_execution_id == domain_model.node_execution_id
|
||||
assert db_model.node_id == domain_model.node_id
|
||||
assert db_model.node_type == domain_model.node_type
|
||||
assert db_model.title == domain_model.title
|
||||
|
||||
assert db_model.inputs_dict == domain_model.inputs
|
||||
assert db_model.process_data_dict == domain_model.process_data
|
||||
assert db_model.outputs_dict == domain_model.outputs
|
||||
assert db_model.execution_metadata_dict == domain_model.metadata
|
||||
|
||||
assert db_model.status == domain_model.status
|
||||
assert db_model.error == domain_model.error
|
||||
assert db_model.elapsed_time == domain_model.elapsed_time
|
||||
assert db_model.created_at == domain_model.created_at
|
||||
assert db_model.created_by_role == repository._creator_user_role
|
||||
assert db_model.created_by == repository._creator_user_id
|
||||
assert db_model.finished_at == domain_model.finished_at
|
||||
|
||||
|
||||
def test_to_domain_model(repository):
|
||||
"""Test _to_domain_model method."""
|
||||
# Create input dictionaries
|
||||
inputs_dict = {"input_key": "input_value"}
|
||||
process_data_dict = {"process_key": "process_value"}
|
||||
outputs_dict = {"output_key": "output_value"}
|
||||
metadata_dict = {str(NodeRunMetadataKey.TOTAL_TOKENS): 100}
|
||||
|
||||
# Create a DB model using our custom subclass
|
||||
db_model = WorkflowNodeExecution()
|
||||
db_model.id = "test-id"
|
||||
db_model.tenant_id = "test-tenant-id"
|
||||
db_model.app_id = "test-app-id"
|
||||
db_model.workflow_id = "test-workflow-id"
|
||||
db_model.triggered_from = "workflow-run"
|
||||
db_model.workflow_run_id = "test-workflow-run-id"
|
||||
db_model.index = 1
|
||||
db_model.predecessor_node_id = "test-predecessor-id"
|
||||
db_model.node_execution_id = "test-node-execution-id"
|
||||
db_model.node_id = "test-node-id"
|
||||
db_model.node_type = NodeType.START.value
|
||||
db_model.title = "Test Node"
|
||||
db_model.inputs = json.dumps(inputs_dict)
|
||||
db_model.process_data = json.dumps(process_data_dict)
|
||||
db_model.outputs = json.dumps(outputs_dict)
|
||||
db_model.status = WorkflowNodeExecutionStatus.RUNNING
|
||||
db_model.error = None
|
||||
db_model.elapsed_time = 1.5
|
||||
db_model.execution_metadata = json.dumps(metadata_dict)
|
||||
db_model.created_at = datetime.now()
|
||||
db_model.created_by_role = "account"
|
||||
db_model.created_by = "test-user-id"
|
||||
db_model.finished_at = None
|
||||
|
||||
# Convert to domain model
|
||||
domain_model = repository._to_domain_model(db_model)
|
||||
|
||||
# Assert domain model has correct values
|
||||
assert isinstance(domain_model, NodeExecution)
|
||||
assert domain_model.id == db_model.id
|
||||
assert domain_model.workflow_id == db_model.workflow_id
|
||||
assert domain_model.workflow_run_id == db_model.workflow_run_id
|
||||
assert domain_model.index == db_model.index
|
||||
assert domain_model.predecessor_node_id == db_model.predecessor_node_id
|
||||
assert domain_model.node_execution_id == db_model.node_execution_id
|
||||
assert domain_model.node_id == db_model.node_id
|
||||
assert domain_model.node_type == NodeType(db_model.node_type)
|
||||
assert domain_model.title == db_model.title
|
||||
assert domain_model.inputs == inputs_dict
|
||||
assert domain_model.process_data == process_data_dict
|
||||
assert domain_model.outputs == outputs_dict
|
||||
assert domain_model.status == NodeExecutionStatus(db_model.status)
|
||||
assert domain_model.error == db_model.error
|
||||
assert domain_model.elapsed_time == db_model.elapsed_time
|
||||
assert domain_model.metadata == metadata_dict
|
||||
assert domain_model.created_at == db_model.created_at
|
||||
assert domain_model.finished_at == db_model.finished_at
|
||||
|
|
|
|||
|
|
@ -74,6 +74,10 @@ DEBUG=false
|
|||
# which is convenient for debugging.
|
||||
FLASK_DEBUG=false
|
||||
|
||||
# Enable request logging, which will log the request and response information.
|
||||
# And the log level is DEBUG
|
||||
ENABLE_REQUEST_LOGGING=False
|
||||
|
||||
# A secret key that is used for securely signing the session cookie
|
||||
# and encrypting sensitive information on the database.
|
||||
# You can generate a strong key using `openssl rand -base64 42`.
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ x-shared-env: &shared-api-worker-env
|
|||
LOG_TZ: ${LOG_TZ:-UTC}
|
||||
DEBUG: ${DEBUG:-false}
|
||||
FLASK_DEBUG: ${FLASK_DEBUG:-false}
|
||||
ENABLE_REQUEST_LOGGING: ${ENABLE_REQUEST_LOGGING:-False}
|
||||
SECRET_KEY: ${SECRET_KEY:-sk-9f73s3ljTXVcMT3Blb3ljTqtsKiGHXVcMT3BlbkFJLK7U}
|
||||
INIT_PASSWORD: ${INIT_PASSWORD:-}
|
||||
DEPLOY_ENV: ${DEPLOY_ENV:-PRODUCTION}
|
||||
|
|
|
|||
|
|
@ -6,12 +6,10 @@ NEXT_PUBLIC_EDITION=SELF_HOSTED
|
|||
# different from api or web app domain.
|
||||
# example: http://cloud.dify.ai/console/api
|
||||
NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
|
||||
NEXT_PUBLIC_WEB_PREFIX=http://localhost:3000
|
||||
# The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from
|
||||
# console or api domain.
|
||||
# example: http://udify.app/api
|
||||
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
|
||||
NEXT_PUBLIC_PUBLIC_WEB_PREFIX=http://localhost:3000
|
||||
# The API PREFIX for MARKETPLACE
|
||||
NEXT_PUBLIC_MARKETPLACE_API_PREFIX=https://marketplace.dify.ai/api/v1
|
||||
# The URL for MARKETPLACE
|
||||
|
|
|
|||
|
|
@ -31,12 +31,10 @@ NEXT_PUBLIC_EDITION=SELF_HOSTED
|
|||
# different from api or web app domain.
|
||||
# example: http://cloud.dify.ai/console/api
|
||||
NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
|
||||
NEXT_PUBLIC_WEB_PREFIX=http://localhost:3000
|
||||
# The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from
|
||||
# console or api domain.
|
||||
# example: http://udify.app/api
|
||||
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
|
||||
NEXT_PUBLIC_PUBLIC_WEB_PREFIX=http://localhost:3000
|
||||
|
||||
# SENTRY
|
||||
NEXT_PUBLIC_SENTRY_DSN=
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ import AppsContext, { useAppContext } from '@/context/app-context'
|
|||
import type { HtmlContentProps } from '@/app/components/base/popover'
|
||||
import CustomPopover from '@/app/components/base/popover'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import { WEB_PREFIX } from '@/config'
|
||||
import { basePath } from '@/utils/var'
|
||||
import { getRedirection } from '@/utils/app-redirection'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
|
|
@ -235,7 +235,7 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
|||
try {
|
||||
const { installed_apps }: any = await fetchInstalledAppList(app.id) || {}
|
||||
if (installed_apps?.length > 0)
|
||||
window.open(`${WEB_PREFIX}/explore/installed/${installed_apps[0].id}`, '_blank')
|
||||
window.open(`${basePath}/explore/installed/${installed_apps[0].id}`, '_blank')
|
||||
else
|
||||
throw new Error('No app found in Explore')
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
'use client'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Link from 'next/link'
|
||||
import { basePath } from '@/utils/var'
|
||||
import {
|
||||
RiAddLine,
|
||||
RiArrowRightLine,
|
||||
|
|
@ -18,7 +18,7 @@ const CreateAppCard = (
|
|||
<div className='bg-background-default-dimm flex min-h-[160px] flex-col rounded-xl border-[0.5px]
|
||||
border-components-panel-border transition-all duration-200 ease-in-out'
|
||||
>
|
||||
<Link ref={ref} className='group flex grow cursor-pointer items-start p-4' href={'/datasets/create'}>
|
||||
<a ref={ref} className='group flex grow cursor-pointer items-start p-4' href={`${basePath}/datasets/create`}>
|
||||
<div className='flex items-center gap-3'>
|
||||
<div className='flex h-10 w-10 items-center justify-center rounded-lg border border-dashed border-divider-regular bg-background-default-lighter
|
||||
p-2 group-hover:border-solid group-hover:border-effects-highlight group-hover:bg-background-default-dodge'
|
||||
|
|
@ -27,12 +27,12 @@ const CreateAppCard = (
|
|||
</div>
|
||||
<div className='system-md-semibold text-text-secondary group-hover:text-text-accent'>{t('dataset.createDataset')}</div>
|
||||
</div>
|
||||
</Link>
|
||||
</a>
|
||||
<div className='system-xs-regular p-4 pt-0 text-text-tertiary'>{t('dataset.createDatasetIntro')}</div>
|
||||
<Link className='group flex cursor-pointer items-center gap-1 rounded-b-xl border-t-[0.5px] border-divider-subtle p-4' href={'datasets/connect'}>
|
||||
<a className='group flex cursor-pointer items-center gap-1 rounded-b-xl border-t-[0.5px] border-divider-subtle p-4' href={`${basePath}/datasets/connect`}>
|
||||
<div className='system-xs-medium text-text-tertiary group-hover:text-text-accent'>{t('dataset.connectDataset')}</div>
|
||||
<RiArrowRightLine className='h-3.5 w-3.5 text-text-tertiary group-hover:text-text-accent' />
|
||||
</Link>
|
||||
</a>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
|||
</Property>
|
||||
<Property name='process_rule' type='object' key='process_rule'>
|
||||
Processing rules
|
||||
- <code>mode</code> (string) Cleaning, segmentation mode, automatic / custom
|
||||
- <code>mode</code> (string) Cleaning, segmentation mode, automatic / custom / hierarchical
|
||||
- <code>rules</code> (object) Custom rules (in automatic mode, this field is empty)
|
||||
- <code>pre_processing_rules</code> (array[object]) Preprocessing rules
|
||||
- <code>id</code> (string) Unique identifier for the preprocessing rule
|
||||
|
|
@ -203,7 +203,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
|||
- <code>doc_language</code> In Q&A mode, specify the language of the document, for example: <code>English</code>, <code>Chinese</code>
|
||||
|
||||
- <code>process_rule</code> Processing rules
|
||||
- <code>mode</code> (string) Cleaning, segmentation mode, automatic / custom
|
||||
- <code>mode</code> (string) Cleaning, segmentation mode, automatic / custom / hierarchical
|
||||
- <code>rules</code> (object) Custom rules (in automatic mode, this field is empty)
|
||||
- <code>pre_processing_rules</code> (array[object]) Preprocessing rules
|
||||
- <code>id</code> (string) Unique identifier for the preprocessing rule
|
||||
|
|
@ -783,7 +783,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
|||
</Property>
|
||||
<Property name='process_rule' type='object' key='process_rule'>
|
||||
Processing rules
|
||||
- <code>mode</code> (string) Cleaning, segmentation mode, automatic / custom
|
||||
- <code>mode</code> (string) Cleaning, segmentation mode, automatic / custom / hierarchical
|
||||
- <code>rules</code> (object) Custom rules (in automatic mode, this field is empty)
|
||||
- <code>pre_processing_rules</code> (array[object]) Preprocessing rules
|
||||
- <code>id</code> (string) Unique identifier for the preprocessing rule
|
||||
|
|
@ -885,7 +885,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
|||
</Property>
|
||||
<Property name='process_rule' type='object' key='process_rule'>
|
||||
Processing rules
|
||||
- <code>mode</code> (string) Cleaning, segmentation mode, automatic / custom
|
||||
- <code>mode</code> (string) Cleaning, segmentation mode, automatic / custom / hierarchical
|
||||
- <code>rules</code> (object) Custom rules (in automatic mode, this field is empty)
|
||||
- <code>pre_processing_rules</code> (array[object]) Preprocessing rules
|
||||
- <code>id</code> (string) Unique identifier for the preprocessing rule
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
|||
</Property>
|
||||
<Property name='process_rule' type='object' key='process_rule'>
|
||||
处理规则
|
||||
- <code>mode</code> (string) 清洗、分段模式 ,automatic 自动 / custom 自定义
|
||||
- <code>mode</code> (string) 清洗、分段模式 ,automatic 自动 / custom 自定义 / hierarchical 父子
|
||||
- <code>rules</code> (object) 自定义规则(自动模式下,该字段为空)
|
||||
- <code>pre_processing_rules</code> (array[object]) 预处理规则
|
||||
- <code>id</code> (string) 预处理规则的唯一标识符
|
||||
|
|
@ -207,7 +207,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
|||
- <code>doc_language</code> 在 Q&A 模式下,指定文档的语言,例如:<code>English</code>、<code>Chinese</code>
|
||||
|
||||
- <code>process_rule</code> 处理规则
|
||||
- <code>mode</code> (string) 清洗、分段模式 ,automatic 自动 / custom 自定义
|
||||
- <code>mode</code> (string) 清洗、分段模式 ,automatic 自动 / custom 自定义 / hierarchical 父子
|
||||
- <code>rules</code> (object) 自定义规则(自动模式下,该字段为空)
|
||||
- <code>pre_processing_rules</code> (array[object]) 预处理规则
|
||||
- <code>id</code> (string) 预处理规则的唯一标识符
|
||||
|
|
@ -790,7 +790,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
|||
</Property>
|
||||
<Property name='process_rule' type='object' key='process_rule'>
|
||||
处理规则(选填)
|
||||
- <code>mode</code> (string) 清洗、分段模式 ,automatic 自动 / custom 自定义
|
||||
- <code>mode</code> (string) 清洗、分段模式 ,automatic 自动 / custom 自定义 / hierarchical 父子
|
||||
- <code>rules</code> (object) 自定义规则(自动模式下,该字段为空)
|
||||
- <code>pre_processing_rules</code> (array[object]) 预处理规则
|
||||
- <code>id</code> (string) 预处理规则的唯一标识符
|
||||
|
|
@ -892,7 +892,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi
|
|||
</Property>
|
||||
<Property name='process_rule' type='object' key='process_rule'>
|
||||
处理规则(选填)
|
||||
- <code>mode</code> (string) 清洗、分段模式 ,automatic 自动 / custom 自定义
|
||||
- <code>mode</code> (string) 清洗、分段模式 ,automatic 自动 / custom 自定义 / hierarchical 父子
|
||||
- <code>rules</code> (object) 自定义规则(自动模式下,该字段为空)
|
||||
- <code>pre_processing_rules</code> (array[object]) 预处理规则
|
||||
- <code>id</code> (string) 预处理规则的唯一标识符
|
||||
|
|
|
|||
|
|
@ -248,7 +248,7 @@ const AppInfo = ({ expand }: IAppInfoProps) => {
|
|||
</div>
|
||||
{/* description */}
|
||||
{appDetail.description && (
|
||||
<div className='system-xs-regular text-text-tertiary'>{appDetail.description}</div>
|
||||
<div className='system-xs-regular overflow-wrap-anywhere w-full max-w-full whitespace-normal break-words text-text-tertiary'>{appDetail.description}</div>
|
||||
)}
|
||||
{/* operations */}
|
||||
<div className='flex flex-wrap items-center gap-1 self-stretch'>
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ import {
|
|||
PortalToFollowElemContent,
|
||||
PortalToFollowElemTrigger,
|
||||
} from '@/app/components/base/portal-to-follow-elem'
|
||||
import { WEB_PREFIX } from '@/config'
|
||||
import { basePath } from '@/utils/var'
|
||||
import { fetchInstalledAppList } from '@/service/explore'
|
||||
import EmbeddedModal from '@/app/components/app/overview/embedded'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
|
|
@ -89,7 +89,7 @@ const AppPublisher = ({
|
|||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
const { app_base_url: appBaseURL = '', access_token: accessToken = '' } = appDetail?.site ?? {}
|
||||
const appMode = (appDetail?.mode !== 'completion' && appDetail?.mode !== 'workflow') ? 'chat' : appDetail.mode
|
||||
const appURL = `${appBaseURL}/${appMode}/${accessToken}`
|
||||
const appURL = `${appBaseURL}${basePath}/${appMode}/${accessToken}`
|
||||
const isChatApp = ['chat', 'agent-chat', 'completion'].includes(appDetail?.mode || '')
|
||||
const { data: userCanAccessApp, isLoading: isGettingUserCanAccessApp, refetch } = useGetUserCanAccessApp({ appId: appDetail?.id, enabled: false })
|
||||
const { data: appAccessSubjects, isLoading: isGettingAppWhiteListSubjects } = useAppWhiteListSubjects(appDetail?.id, open && systemFeatures.webapp_auth.enabled && appDetail?.access_mode === AccessMode.SPECIFIC_GROUPS_MEMBERS)
|
||||
|
|
@ -154,7 +154,7 @@ const AppPublisher = ({
|
|||
try {
|
||||
const { installed_apps }: any = await fetchInstalledAppList(appDetail?.id) || {}
|
||||
if (installed_apps?.length > 0)
|
||||
window.open(`${WEB_PREFIX}/explore/installed/${installed_apps[0].id}`, '_blank')
|
||||
window.open(`${basePath}/explore/installed/${installed_apps[0].id}`, '_blank')
|
||||
else
|
||||
throw new Error('No app found in Explore')
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ const SuggestedAction = ({ icon, link, disabled, children, className, onClick, .
|
|||
target='_blank'
|
||||
rel='noreferrer'
|
||||
className={classNames(
|
||||
'flex justify-start items-center gap-2 py-2 px-2.5 bg-background-section-burn rounded-lg transition-colors [&:not(:first-child)]:mt-1',
|
||||
'flex justify-start items-center gap-2 py-2 px-2.5 bg-background-section-burn rounded-lg text-text-secondary transition-colors [&:not(:first-child)]:mt-1',
|
||||
disabled ? 'shadow-xs opacity-30 cursor-not-allowed' : 'text-text-secondary hover:bg-state-accent-hover hover:text-text-accent cursor-pointer',
|
||||
className,
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import Loading from '@/app/components/base/loading'
|
|||
import Badge from '@/app/components/base/badge'
|
||||
import { useKnowledge } from '@/hooks/use-knowledge'
|
||||
import cn from '@/utils/classnames'
|
||||
import { basePath } from '@/utils/var'
|
||||
|
||||
export type ISelectDataSetProps = {
|
||||
isShow: boolean
|
||||
|
|
@ -111,7 +112,7 @@ const SelectDataSet: FC<ISelectDataSetProps> = ({
|
|||
}}
|
||||
>
|
||||
<span className='text-text-tertiary'>{t('appDebug.feature.dataSet.noDataSet')}</span>
|
||||
<Link href={'/datasets/create'} className='font-normal text-text-accent'>{t('appDebug.feature.dataSet.toCreate')}</Link>
|
||||
<Link href={`${basePath}/datasets/create`} className='font-normal text-text-accent'>{t('appDebug.feature.dataSet.toCreate')}</Link>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ import type { AppIconSelection } from '../../base/app-icon-picker'
|
|||
import Button from '@/app/components/base/button'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import cn from '@/utils/classnames'
|
||||
import { WEB_PREFIX } from '@/config'
|
||||
import { basePath } from '@/utils/var'
|
||||
import AppsContext, { useAppContext } from '@/context/app-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
|
|
@ -353,11 +353,11 @@ function AppScreenShot({ mode, show }: { mode: AppMode; show: boolean }) {
|
|||
'workflow': 'Workflow',
|
||||
}
|
||||
return <picture>
|
||||
<source media="(resolution: 1x)" srcSet={`${WEB_PREFIX}/screenshots/${theme}/${modeToImageMap[mode]}.png`} />
|
||||
<source media="(resolution: 2x)" srcSet={`${WEB_PREFIX}/screenshots/${theme}/${modeToImageMap[mode]}@2x.png`} />
|
||||
<source media="(resolution: 3x)" srcSet={`${WEB_PREFIX}/screenshots/${theme}/${modeToImageMap[mode]}@3x.png`} />
|
||||
<source media="(resolution: 1x)" srcSet={`${basePath}/screenshots/${theme}/${modeToImageMap[mode]}.png`} />
|
||||
<source media="(resolution: 2x)" srcSet={`${basePath}/screenshots/${theme}/${modeToImageMap[mode]}@2x.png`} />
|
||||
<source media="(resolution: 3x)" srcSet={`${basePath}/screenshots/${theme}/${modeToImageMap[mode]}@3x.png`} />
|
||||
<Image className={show ? '' : 'hidden'}
|
||||
src={`${WEB_PREFIX}/screenshots/${theme}/${modeToImageMap[mode]}.png`}
|
||||
src={`${basePath}/screenshots/${theme}/${modeToImageMap[mode]}.png`}
|
||||
alt='App Screen Shot'
|
||||
width={664} height={448} />
|
||||
</picture>
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import { usePathname } from 'next/navigation'
|
|||
import { useDebounce } from 'ahooks'
|
||||
import { omit } from 'lodash-es'
|
||||
import dayjs from 'dayjs'
|
||||
import { basePath } from '@/utils/var'
|
||||
import { Trans, useTranslation } from 'react-i18next'
|
||||
import List from './list'
|
||||
import Filter, { TIME_PERIOD_MAPPING } from './filter'
|
||||
|
|
@ -109,7 +110,7 @@ const Logs: FC<ILogsProps> = ({ appDetail }) => {
|
|||
? <Loading type='app' />
|
||||
: total > 0
|
||||
? <List logs={isChatMode ? chatConversations : completionConversations} appDetail={appDetail} onRefresh={isChatMode ? mutateChatList : mutateCompletionList} />
|
||||
: <EmptyElement appUrl={`${appDetail.site.app_base_url}/${getWebAppType(appDetail.mode)}/${appDetail.site.access_token}`} />
|
||||
: <EmptyElement appUrl={`${appDetail.site.app_base_url}${basePath}/${getWebAppType(appDetail.mode)}/${appDetail.site.access_token}`} />
|
||||
}
|
||||
{/* Show Pagination only if the total is more than the limit */}
|
||||
{(total && total > APP_PAGE_LIMIT)
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ import type { ConfigParams } from './settings'
|
|||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import AppBasic from '@/app/components/app-sidebar/basic'
|
||||
import { asyncRunSafe, randomString } from '@/utils'
|
||||
import { basePath } from '@/utils/var'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Switch from '@/app/components/base/switch'
|
||||
|
|
@ -100,7 +101,7 @@ function AppCard({
|
|||
const runningStatus = isApp ? appInfo.enable_site : appInfo.enable_api
|
||||
const { app_base_url, access_token } = appInfo.site ?? {}
|
||||
const appMode = (appInfo.mode !== 'completion' && appInfo.mode !== 'workflow') ? 'chat' : appInfo.mode
|
||||
const appUrl = `${app_base_url}/${appMode}/${access_token}`
|
||||
const appUrl = `${app_base_url}${basePath}/${appMode}/${access_token}`
|
||||
const apiUrl = appInfo?.api_base_url
|
||||
|
||||
const genClickFuncByName = (opName: string) => {
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import { IS_CE_EDITION } from '@/config'
|
|||
import type { SiteInfo } from '@/models/share'
|
||||
import { useThemeContext } from '@/app/components/base/chat/embedded-chatbot/theme/theme-context'
|
||||
import ActionButton from '@/app/components/base/action-button'
|
||||
import { basePath } from '@/utils/var'
|
||||
import cn from '@/utils/classnames'
|
||||
|
||||
type Props = {
|
||||
|
|
@ -28,7 +29,7 @@ const OPTION_MAP = {
|
|||
iframe: {
|
||||
getContent: (url: string, token: string) =>
|
||||
`<iframe
|
||||
src="${url}/chatbot/${token}"
|
||||
src="${url}${basePath}/chatbot/${token}"
|
||||
style="width: 100%; height: 100%; min-height: 700px"
|
||||
frameborder="0"
|
||||
allow="microphone">
|
||||
|
|
@ -43,7 +44,7 @@ const OPTION_MAP = {
|
|||
isDev: true`
|
||||
: ''}${IS_CE_EDITION
|
||||
? `,
|
||||
baseUrl: '${url}'`
|
||||
baseUrl: '${url}${basePath}'`
|
||||
: ''},
|
||||
systemVariables: {
|
||||
// user_id: 'YOU CAN DEFINE USER ID HERE',
|
||||
|
|
@ -52,7 +53,7 @@ const OPTION_MAP = {
|
|||
}
|
||||
</script>
|
||||
<script
|
||||
src="${url}/embed.min.js"
|
||||
src="${url}${basePath}/embed.min.js"
|
||||
id="${token}"
|
||||
defer>
|
||||
</script>
|
||||
|
|
@ -67,7 +68,7 @@ const OPTION_MAP = {
|
|||
</style>`,
|
||||
},
|
||||
chromePlugin: {
|
||||
getContent: (url: string, token: string) => `ChatBot URL: ${url}/chatbot/${token}`,
|
||||
getContent: (url: string, token: string) => `ChatBot URL: ${url}${basePath}/chatbot/${token}`,
|
||||
},
|
||||
}
|
||||
const prefixEmbedded = 'appOverview.overview.appInfo.embedded'
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import timezone from 'dayjs/plugin/timezone'
|
|||
import { Trans, useTranslation } from 'react-i18next'
|
||||
import Link from 'next/link'
|
||||
import List from './list'
|
||||
import { basePath } from '@/utils/var'
|
||||
import Filter, { TIME_PERIOD_MAPPING } from './filter'
|
||||
import Pagination from '@/app/components/base/pagination'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
|
|
@ -100,7 +101,7 @@ const Logs: FC<ILogsProps> = ({ appDetail }) => {
|
|||
? <Loading type='app' />
|
||||
: total > 0
|
||||
? <List logs={workflowLogs} appDetail={appDetail} onRefresh={mutate} />
|
||||
: <EmptyElement appUrl={`${appDetail.site.app_base_url}/${getWebAppType(appDetail.mode)}/${appDetail.site.access_token}`} />
|
||||
: <EmptyElement appUrl={`${appDetail.site.app_base_url}${basePath}/${getWebAppType(appDetail.mode)}/${appDetail.site.access_token}`} />
|
||||
}
|
||||
{/* Show Pagination only if the total is more than the limit */}
|
||||
{(total && total > APP_PAGE_LIMIT)
|
||||
|
|
|
|||
|
|
@ -1,10 +1,9 @@
|
|||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import { WEB_PREFIX } from '@/config'
|
||||
import classNames from '@/utils/classnames'
|
||||
import useTheme from '@/hooks/use-theme'
|
||||
import { basePath } from '@/utils/var'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
export type LogoStyle = 'default' | 'monochromeWhite'
|
||||
|
||||
export const logoPathMap: Record<LogoStyle, string> = {
|
||||
|
|
@ -35,7 +34,7 @@ const DifyLogo: FC<DifyLogoProps> = ({
|
|||
const themedStyle = (theme === 'dark' && style === 'default') ? 'monochromeWhite' : style
|
||||
const { systemFeatures } = useGlobalPublicStore()
|
||||
|
||||
let src = `${WEB_PREFIX}${logoPathMap[themedStyle]}`
|
||||
let src = `${basePath}${logoPathMap[themedStyle]}`
|
||||
if (systemFeatures.branding.enabled)
|
||||
src = systemFeatures.branding.workspace_logo
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import type { FC } from 'react'
|
||||
import { WEB_PREFIX } from '@/config'
|
||||
import { basePath } from '@/utils/var'
|
||||
|
||||
type LogoEmbeddedChatAvatarProps = {
|
||||
className?: string
|
||||
|
|
@ -9,7 +9,7 @@ const LogoEmbeddedChatAvatar: FC<LogoEmbeddedChatAvatarProps> = ({
|
|||
}) => {
|
||||
return (
|
||||
<img
|
||||
src={`${WEB_PREFIX}/logo/logo-embedded-chat-avatar.png`}
|
||||
src={`${basePath}/logo/logo-embedded-chat-avatar.png`}
|
||||
className={`block h-10 w-10 ${className}`}
|
||||
alt='logo'
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import classNames from '@/utils/classnames'
|
||||
import type { FC } from 'react'
|
||||
import { WEB_PREFIX } from '@/config'
|
||||
import { basePath } from '@/utils/var'
|
||||
|
||||
type LogoEmbeddedChatHeaderProps = {
|
||||
className?: string
|
||||
|
|
@ -14,7 +14,7 @@ const LogoEmbeddedChatHeader: FC<LogoEmbeddedChatHeaderProps> = ({
|
|||
<source media="(resolution: 2x)" srcSet='/logo/logo-embedded-chat-header@2x.png' />
|
||||
<source media="(resolution: 3x)" srcSet='/logo/logo-embedded-chat-header@3x.png' />
|
||||
<img
|
||||
src={`${WEB_PREFIX}/logo/logo-embedded-chat-header.png`}
|
||||
src={`${basePath}/logo/logo-embedded-chat-header.png`}
|
||||
alt='logo'
|
||||
className={classNames('block h-6 w-auto', className)}
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,22 @@
|
|||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import { basePath } from '@/utils/var'
|
||||
import classNames from '@/utils/classnames'
|
||||
|
||||
type LogoSiteProps = {
|
||||
className?: string
|
||||
}
|
||||
|
||||
const LogoSite: FC<LogoSiteProps> = ({
|
||||
className,
|
||||
}) => {
|
||||
return (
|
||||
<img
|
||||
src={`${basePath}/logo/logo.png`}
|
||||
className={classNames('block w-[22.651px] h-[24.5px]', className)}
|
||||
alt='logo'
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export default LogoSite
|
||||
|
|
@ -15,7 +15,7 @@ export default function Group({ children, value, onChange, className = '' }: TRa
|
|||
onChange?.(value)
|
||||
}
|
||||
return (
|
||||
<div className={cn('flex items-center bg-gray-50', s.container, className)}>
|
||||
<div className={cn('flex items-center bg-workflow-block-parma-bg text-text-secondary', s.container, className)}>
|
||||
<RadioGroupContext.Provider value={{ value, onChange: onRadioChange }}>
|
||||
{children}
|
||||
</RadioGroupContext.Provider>
|
||||
|
|
|
|||
|
|
@ -248,7 +248,7 @@ The text generation application offers non-session support and is ideal for tran
|
|||
</Col>
|
||||
<Col sticky>
|
||||
### Request Example
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif] \\\n--form 'user=abc-123'`}>
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif]' \\\n--form 'user=abc-123'`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X POST '${props.appDetail.api_base_url}/files/upload' \
|
||||
|
|
|
|||
|
|
@ -247,7 +247,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||
</Col>
|
||||
<Col sticky>
|
||||
### リクエスト例
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif] \\\n--form 'user=abc-123'`}>
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif]' \\\n--form 'user=abc-123'`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X POST '${props.appDetail.api_base_url}/files/upload' \
|
||||
|
|
|
|||
|
|
@ -226,7 +226,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
|||
</Col>
|
||||
<Col sticky>
|
||||
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif] \\\n--form 'user=abc-123'`}>
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif]' \\\n--form 'user=abc-123'`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X POST '${props.appDetail.api_base_url}/files/upload' \
|
||||
|
|
|
|||
|
|
@ -352,7 +352,7 @@ Chat applications support session persistence, allowing previous chat history to
|
|||
</Col>
|
||||
<Col sticky>
|
||||
### Request Example
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif] \\\n--form 'user=abc-123'`}>
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif]' \\\n--form 'user=abc-123'`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X POST '${props.appDetail.api_base_url}/files/upload' \
|
||||
|
|
@ -1165,6 +1165,13 @@ Chat applications support session persistence, allowing previous chat history to
|
|||
- `enabled` (bool) Whether it is enabled
|
||||
- `speech_to_text` (object) Speech to text
|
||||
- `enabled` (bool) Whether it is enabled
|
||||
- `text_to_speech` (object) Text to speech
|
||||
- `enabled` (bool) Whether it is enabled
|
||||
- `voice` (string) Voice type
|
||||
- `language` (string) Language
|
||||
- `autoPlay` (string) Auto play
|
||||
- `enabled` Enabled
|
||||
- `disabled` Disabled
|
||||
- `retriever_resource` (object) Citation and Attribution
|
||||
- `enabled` (bool) Whether it is enabled
|
||||
- `annotation_reply` (object) Annotation reply
|
||||
|
|
@ -1220,6 +1227,12 @@ Chat applications support session persistence, allowing previous chat history to
|
|||
"speech_to_text": {
|
||||
"enabled": true
|
||||
},
|
||||
"text_to_speech": {
|
||||
"enabled": true,
|
||||
"voice": "sambert-zhinan-v1",
|
||||
"language": "zh-Hans",
|
||||
"autoPlay": "disabled"
|
||||
},
|
||||
"retriever_resource": {
|
||||
"enabled": true
|
||||
},
|
||||
|
|
|
|||
|
|
@ -352,7 +352,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||
</Col>
|
||||
<Col sticky>
|
||||
### リクエスト例
|
||||
<CodeGroup title="リクエスト" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif] \\\n--form 'user=abc-123'`}>
|
||||
<CodeGroup title="リクエスト" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif]' \\\n--form 'user=abc-123'`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X POST '${props.appDetail.api_base_url}/files/upload' \
|
||||
|
|
@ -1165,6 +1165,13 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||
- `enabled` (bool) 有効かどうか
|
||||
- `speech_to_text` (object) 音声からテキストへ
|
||||
- `enabled` (bool) 有効かどうか
|
||||
- `text_to_speech` (object) テキストから音声へ
|
||||
- `enabled` (bool) 有効かどうか
|
||||
- `voice` (string) 音声タイプ
|
||||
- `language` (string) 言語
|
||||
- `autoPlay` (string) 自動再生
|
||||
- `enabled` 有効
|
||||
- `disabled` 無効
|
||||
- `retriever_resource` (object) 引用と帰属
|
||||
- `enabled` (bool) 有効かどうか
|
||||
- `annotation_reply` (object) 注釈返信
|
||||
|
|
@ -1220,6 +1227,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||
"speech_to_text": {
|
||||
"enabled": true
|
||||
},
|
||||
"text_to_speech": {
|
||||
"enabled": true,
|
||||
"voice": "sambert-zhinan-v1",
|
||||
"language": "zh-Hans",
|
||||
"autoPlay": "disabled"
|
||||
},
|
||||
"retriever_resource": {
|
||||
"enabled": true
|
||||
},
|
||||
|
|
|
|||
|
|
@ -362,7 +362,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
|||
</Col>
|
||||
<Col sticky>
|
||||
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif] \\\n--form 'user=abc-123'`}>
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif]' \\\n--form 'user=abc-123'`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X POST '${props.appDetail.api_base_url}/files/upload' \
|
||||
|
|
@ -1199,6 +1199,13 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
|||
- `enabled` (bool) 是否开启
|
||||
- `speech_to_text` (object) 语音转文本
|
||||
- `enabled` (bool) 是否开启
|
||||
- `text_to_speech` (object) 文本转语音
|
||||
- `enabled` (bool) 是否开启
|
||||
- `voice` (string) 语音类型
|
||||
- `language` (string) 语言
|
||||
- `autoPlay` (string) 自动播放
|
||||
- `enabled` 开启
|
||||
- `disabled` 关闭
|
||||
- `retriever_resource` (object) 引用和归属
|
||||
- `enabled` (bool) 是否开启
|
||||
- `annotation_reply` (object) 标记回复
|
||||
|
|
|
|||
|
|
@ -315,7 +315,7 @@ Chat applications support session persistence, allowing previous chat history to
|
|||
</Col>
|
||||
<Col sticky>
|
||||
### Request Example
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif] \\\n--form 'user=abc-123'`}>
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif]' \\\n--form 'user=abc-123'`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X POST '${props.appDetail.api_base_url}/files/upload' \
|
||||
|
|
@ -1201,6 +1201,13 @@ Chat applications support session persistence, allowing previous chat history to
|
|||
- `enabled` (bool) Whether it is enabled
|
||||
- `speech_to_text` (object) Speech to text
|
||||
- `enabled` (bool) Whether it is enabled
|
||||
- `text_to_speech` (object) Text to speech
|
||||
- `enabled` (bool) Whether it is enabled
|
||||
- `voice` (string) Voice type
|
||||
- `language` (string) Language
|
||||
- `autoPlay` (string) Auto play
|
||||
- `enabled` Enabled
|
||||
- `disabled` Disabled
|
||||
- `retriever_resource` (object) Citation and Attribution
|
||||
- `enabled` (bool) Whether it is enabled
|
||||
- `annotation_reply` (object) Annotation reply
|
||||
|
|
@ -1256,6 +1263,12 @@ Chat applications support session persistence, allowing previous chat history to
|
|||
"speech_to_text": {
|
||||
"enabled": true
|
||||
},
|
||||
"text_to_speech": {
|
||||
"enabled": true,
|
||||
"voice": "sambert-zhinan-v1",
|
||||
"language": "zh-Hans",
|
||||
"autoPlay": "disabled"
|
||||
},
|
||||
"retriever_resource": {
|
||||
"enabled": true
|
||||
},
|
||||
|
|
|
|||
|
|
@ -315,7 +315,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||
</Col>
|
||||
<Col sticky>
|
||||
### リクエスト例
|
||||
<CodeGroup title="リクエスト" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif] \\\n--form 'user=abc-123'`}>
|
||||
<CodeGroup title="リクエスト" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif]' \\\n--form 'user=abc-123'`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X POST '${props.appDetail.api_base_url}/files/upload' \
|
||||
|
|
@ -1192,6 +1192,13 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||
- `enabled` (bool) 有効かどうか
|
||||
- `speech_to_text` (object) 音声からテキストへ
|
||||
- `enabled` (bool) 有効かどうか
|
||||
- `text_to_speech` (object) テキストから音声へ
|
||||
- `enabled` (bool) 有効かどうか
|
||||
- `voice` (string) 音声タイプ
|
||||
- `language` (string) 言語
|
||||
- `autoPlay` (string) 自動再生
|
||||
- `enabled` 有効
|
||||
- `disabled` 無効
|
||||
- `retriever_resource` (object) 引用と帰属
|
||||
- `enabled` (bool) 有効かどうか
|
||||
- `annotation_reply` (object) 注釈返信
|
||||
|
|
@ -1247,6 +1254,12 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||
"speech_to_text": {
|
||||
"enabled": true
|
||||
},
|
||||
"text_to_speech": {
|
||||
"enabled": true,
|
||||
"voice": "sambert-zhinan-v1",
|
||||
"language": "zh-Hans",
|
||||
"autoPlay": "disabled"
|
||||
},
|
||||
"retriever_resource": {
|
||||
"enabled": true
|
||||
},
|
||||
|
|
|
|||
|
|
@ -333,7 +333,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
|||
</Col>
|
||||
<Col sticky>
|
||||
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif] \\\n--form 'user=abc-123'`}>
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif]' \\\n--form 'user=abc-123'`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X POST '${props.appDetail.api_base_url}/files/upload' \
|
||||
|
|
@ -1204,6 +1204,13 @@ import { Row, Col, Properties, Property, Heading, SubProperty } from '../md.tsx'
|
|||
- `enabled` (bool) 是否开启
|
||||
- `speech_to_text` (object) 语音转文本
|
||||
- `enabled` (bool) 是否开启
|
||||
- `text_to_speech` (object) 文本转语音
|
||||
- `enabled` (bool) 是否开启
|
||||
- `voice` (string) 语音类型
|
||||
- `language` (string) 语言
|
||||
- `autoPlay` (string) 自动播放
|
||||
- `enabled` 开启
|
||||
- `disabled` 关闭
|
||||
- `retriever_resource` (object) 引用和归属
|
||||
- `enabled` (bool) 是否开启
|
||||
- `annotation_reply` (object) 标记回复
|
||||
|
|
|
|||
|
|
@ -476,7 +476,7 @@ Workflow applications offers non-session support and is ideal for translation, a
|
|||
</Col>
|
||||
<Col sticky>
|
||||
### Request Example
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif] \\\n--form 'user=abc-123'`}>
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif]' \\\n--form 'user=abc-123'`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X POST '${props.appDetail.api_base_url}/files/upload' \
|
||||
|
|
|
|||
|
|
@ -479,7 +479,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||
</Col>
|
||||
<Col sticky>
|
||||
### リクエスト例
|
||||
<CodeGroup title="リクエスト" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif] \\\n--form 'user=abc-123'`}>
|
||||
<CodeGroup title="リクエスト" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif]' \\\n--form 'user=abc-123'`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X POST '${props.appDetail.api_base_url}/files/upload' \
|
||||
|
|
|
|||
|
|
@ -470,7 +470,7 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
|
|||
</Col>
|
||||
<Col sticky>
|
||||
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif] \\\n--form 'user=abc-123'`}>
|
||||
<CodeGroup title="Request" tag="POST" label="/files/upload" targetCode={`curl -X POST '${props.appDetail.api_base_url}/files/upload' \\\n--header 'Authorization: Bearer {api_key}' \\\n--form 'file=@localfile;type=image/[png|jpeg|jpg|webp|gif]' \\\n--form 'user=abc-123'`}>
|
||||
|
||||
```bash {{ title: 'cURL' }}
|
||||
curl -X POST '${props.appDetail.api_base_url}/files/upload' \
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import { useTranslation } from 'react-i18next'
|
|||
import { Menu, MenuButton, MenuItems, Transition } from '@headlessui/react'
|
||||
import { RiArrowDownSLine } from '@remixicon/react'
|
||||
import cn from '@/utils/classnames'
|
||||
import { WEB_PREFIX } from '@/config'
|
||||
import { basePath } from '@/utils/var'
|
||||
import PlanBadge from '@/app/components/header/plan-badge'
|
||||
import { switchWorkspace } from '@/service/common'
|
||||
import { useWorkspacesContext } from '@/context/workspace-context'
|
||||
|
|
@ -23,7 +23,7 @@ const WorkplaceSelector = () => {
|
|||
return
|
||||
await switchWorkspace({ url: '/workspaces/switch', body: { tenant_id } })
|
||||
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
|
||||
location.assign(WEB_PREFIX)
|
||||
location.assign(`${location.origin}${basePath}`)
|
||||
}
|
||||
catch {
|
||||
notify({ type: 'error', message: t('common.provider.saveFailed') })
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
import cn from '@/utils/classnames'
|
||||
import Modal from '@/app/components/base/modal'
|
||||
import Input from '@/app/components/base/input'
|
||||
import { WEB_PREFIX } from '@/config'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useState } from 'react'
|
||||
import { useContext } from 'use-context-selector'
|
||||
|
|
@ -34,7 +33,7 @@ const EditWorkspaceModal = ({
|
|||
},
|
||||
})
|
||||
notify({ type: 'success', message: t('common.actionMsg.modifiedSuccessfully') })
|
||||
location.assign(WEB_PREFIX)
|
||||
location.assign(`${location.origin}`)
|
||||
}
|
||||
catch {
|
||||
notify({ type: 'error', message: t('common.actionMsg.modifiedUnsuccessfully') })
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import type { FC } from 'react'
|
||||
import type { ModelProvider } from '../declarations'
|
||||
import { basePath } from '@/utils/var'
|
||||
import { useLanguage } from '../hooks'
|
||||
import { Openai } from '@/app/components/base/icons/src/vender/other'
|
||||
import { AnthropicDark, AnthropicLight } from '@/app/components/base/icons/src/public/llm'
|
||||
|
|
@ -40,7 +41,7 @@ const ProviderIcon: FC<ProviderIconProps> = ({
|
|||
<div className={cn('inline-flex items-center gap-2', className)}>
|
||||
<img
|
||||
alt='provider-icon'
|
||||
src={renderI18nObject(provider.icon_small, language)}
|
||||
src={basePath + renderI18nObject(provider.icon_small, language)}
|
||||
className='h-6 w-6'
|
||||
/>
|
||||
<div className='system-md-semibold text-text-primary'>
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import Nav from '../nav'
|
|||
import type { NavItem } from '../nav/nav-selector'
|
||||
import { fetchDatasetDetail, fetchDatasets } from '@/service/datasets'
|
||||
import type { DataSetListResponse } from '@/models/datasets'
|
||||
import { basePath } from '@/utils/var'
|
||||
|
||||
const getKey = (pageIndex: number, previousPageData: DataSetListResponse) => {
|
||||
if (!pageIndex || previousPageData.has_more)
|
||||
|
|
@ -56,7 +57,7 @@ const DatasetNav = () => {
|
|||
icon_background: dataset.icon_background,
|
||||
})) as NavItem[]}
|
||||
createText={t('common.menus.newDataset')}
|
||||
onCreate={() => router.push('/datasets/create')}
|
||||
onCreate={() => router.push(`${basePath}/datasets/create`)}
|
||||
onLoadmore={handleLoadmore}
|
||||
/>
|
||||
)
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ const NavSelector = ({ curNav, navs, createText, isApp, onCreate, onLoadmore }:
|
|||
<MenuItems
|
||||
className="
|
||||
absolute -left-11 right-0 mt-1.5 w-60 max-w-80
|
||||
origin-top-right divide-y divide-gray-100 rounded-lg bg-white
|
||||
origin-top-right divide-y divide-divider-regular rounded-lg bg-components-panel-bg-blur
|
||||
shadow-lg
|
||||
"
|
||||
>
|
||||
|
|
@ -78,7 +78,7 @@ const NavSelector = ({ curNav, navs, createText, isApp, onCreate, onLoadmore }:
|
|||
{
|
||||
navs.map(nav => (
|
||||
<MenuItem key={nav.id}>
|
||||
<div className='flex w-full cursor-pointer items-center truncate rounded-lg px-3 py-[6px] text-[14px] font-normal text-gray-700 hover:bg-gray-100' onClick={() => {
|
||||
<div className='flex w-full cursor-pointer items-center truncate rounded-lg px-3 py-[6px] text-[14px] font-normal text-text-secondary hover:bg-state-base-hover' onClick={() => {
|
||||
if (curNav?.id === nav.id)
|
||||
return
|
||||
setAppDetail()
|
||||
|
|
@ -119,12 +119,12 @@ const NavSelector = ({ curNav, navs, createText, isApp, onCreate, onLoadmore }:
|
|||
{!isApp && isCurrentWorkspaceEditor && (
|
||||
<MenuItem as="div" className='w-full p-1'>
|
||||
<div onClick={() => onCreate('')} className={cn(
|
||||
'flex cursor-pointer items-center gap-2 rounded-lg px-3 py-[6px] hover:bg-gray-100',
|
||||
'flex cursor-pointer items-center gap-2 rounded-lg px-3 py-[6px] hover:bg-state-base-hover ',
|
||||
)}>
|
||||
<div className='flex h-6 w-6 shrink-0 items-center justify-center rounded-[6px] border border-[0.5px] border-gray-200 bg-gray-50'>
|
||||
<RiAddLine className='h-4 w-4 text-gray-500' />
|
||||
<div className='flex h-6 w-6 shrink-0 items-center justify-center rounded-[6px] border-[0.5px] border-divider-regular bg-background-default'>
|
||||
<RiAddLine className='h-4 w-4 text-text-primary' />
|
||||
</div>
|
||||
<div className='grow text-left text-[14px] font-normal text-gray-700'>{createText}</div>
|
||||
<div className='grow text-left text-[14px] font-normal text-text-secondary'>{createText}</div>
|
||||
</div>
|
||||
</MenuItem>
|
||||
)}
|
||||
|
|
@ -134,14 +134,14 @@ const NavSelector = ({ curNav, navs, createText, isApp, onCreate, onLoadmore }:
|
|||
<>
|
||||
<MenuButton className='w-full p-1'>
|
||||
<div className={cn(
|
||||
'flex cursor-pointer items-center gap-2 rounded-lg px-3 py-[6px] hover:bg-gray-100',
|
||||
open && '!bg-gray-100',
|
||||
'flex cursor-pointer items-center gap-2 rounded-lg px-3 py-[6px] hover:bg-state-base-hover',
|
||||
open && '!bg-state-base-hover',
|
||||
)}>
|
||||
<div className='flex h-6 w-6 shrink-0 items-center justify-center rounded-[6px] border border-[0.5px] border-gray-200 bg-gray-50'>
|
||||
<RiAddLine className='h-4 w-4 text-gray-500' />
|
||||
<div className='flex h-6 w-6 shrink-0 items-center justify-center rounded-[6px] border-[0.5px] border-divider-regular bg-background-default'>
|
||||
<RiAddLine className='h-4 w-4 text-text-primary' />
|
||||
</div>
|
||||
<div className='grow text-left text-[14px] font-normal text-gray-700'>{createText}</div>
|
||||
<RiArrowRightSLine className='h-3.5 w-3.5 shrink-0 text-gray-500' />
|
||||
<div className='grow text-left text-[14px] font-normal text-text-secondary'>{createText}</div>
|
||||
<RiArrowRightSLine className='h-3.5 w-3.5 shrink-0 text-text-primary' />
|
||||
</div>
|
||||
</MenuButton>
|
||||
<Transition
|
||||
|
|
@ -154,21 +154,21 @@ const NavSelector = ({ curNav, navs, createText, isApp, onCreate, onLoadmore }:
|
|||
leaveTo="transform opacity-0 scale-95"
|
||||
>
|
||||
<MenuItems className={cn(
|
||||
'absolute right-[-198px] top-[3px] z-10 min-w-[200px] rounded-lg border-[0.5px] border-gray-200 bg-white shadow-lg',
|
||||
'absolute right-[-198px] top-[3px] z-10 min-w-[200px] rounded-lg bg-components-panel-bg-blur shadow-lg',
|
||||
)}>
|
||||
<div className='p-1'>
|
||||
<div className={cn('flex cursor-pointer items-center rounded-lg px-3 py-[6px] font-normal text-gray-700 hover:bg-gray-100')} onClick={() => onCreate('blank')}>
|
||||
<FilePlus01 className='mr-2 h-4 w-4 shrink-0 text-gray-600' />
|
||||
<div className={cn('flex cursor-pointer items-center rounded-lg px-3 py-[6px] font-normal text-text-secondary hover:bg-state-base-hover')} onClick={() => onCreate('blank')}>
|
||||
<FilePlus01 className='mr-2 h-4 w-4 shrink-0 text-text-secondary' />
|
||||
{t('app.newApp.startFromBlank')}
|
||||
</div>
|
||||
<div className={cn('flex cursor-pointer items-center rounded-lg px-3 py-[6px] font-normal text-gray-700 hover:bg-gray-100')} onClick={() => onCreate('template')}>
|
||||
<FilePlus02 className='mr-2 h-4 w-4 shrink-0 text-gray-600' />
|
||||
<div className={cn('flex cursor-pointer items-center rounded-lg px-3 py-[6px] font-normal text-text-secondary hover:bg-state-base-hover')} onClick={() => onCreate('template')}>
|
||||
<FilePlus02 className='mr-2 h-4 w-4 shrink-0 text-text-secondary' />
|
||||
{t('app.newApp.startFromTemplate')}
|
||||
</div>
|
||||
</div>
|
||||
<div className='border-t border-gray-100 p-1'>
|
||||
<div className={cn('flex cursor-pointer items-center rounded-lg px-3 py-[6px] font-normal text-gray-700 hover:bg-gray-100')} onClick={() => onCreate('dsl')}>
|
||||
<FileArrow01 className='mr-2 h-4 w-4 shrink-0 text-gray-600' />
|
||||
<div className='border-t border-divider-regular p-1'>
|
||||
<div className={cn('flex cursor-pointer items-center rounded-lg px-3 py-[6px] font-normal text-text-secondary hover:bg-state-base-hover')} onClick={() => onCreate('dsl')}>
|
||||
<FileArrow01 className='mr-2 h-4 w-4 shrink-0 text-text-secondary' />
|
||||
{t('app.importDSL')}
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import {
|
|||
memo,
|
||||
useCallback,
|
||||
} from 'react'
|
||||
import Link from 'next/link'
|
||||
import { basePath } from '@/utils/var'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import {
|
||||
RiAddLine,
|
||||
|
|
@ -54,7 +54,7 @@ const Blocks = ({
|
|||
>
|
||||
<div className='flex h-[22px] w-full items-center justify-between pl-3 pr-1 text-xs font-medium text-gray-500'>
|
||||
{toolWithProvider.label[language]}
|
||||
<Link className='hidden cursor-pointer items-center group-hover:flex' href={`/tools?category=${toolWithProvider.type}`} target='_blank'>{t('tools.addToolModal.manageInTools')}<ArrowUpRight className='ml-0.5 h-3 w-3' /></Link>
|
||||
<a className='hidden cursor-pointer items-center group-hover:flex' href={`${basePath}/tools?category=${toolWithProvider.type}`} target='_blank'>{t('tools.addToolModal.manageInTools')}<ArrowUpRight className='ml-0.5 h-3 w-3' /></a>
|
||||
</div>
|
||||
{list.map((tool) => {
|
||||
const labelContent = (() => {
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import {
|
|||
RiCloseLine,
|
||||
} from '@remixicon/react'
|
||||
import { AuthHeaderPrefix, AuthType, CollectionType } from '../types'
|
||||
import Link from 'next/link'
|
||||
import { basePath } from '@/utils/var'
|
||||
import type { Collection, CustomCollectionBackend, Tool, WorkflowToolProviderRequest, WorkflowToolProviderResponse } from '../types'
|
||||
import ToolItem from './tool-item'
|
||||
import cn from '@/utils/classnames'
|
||||
|
|
@ -279,10 +279,10 @@ const ProviderDetail = ({
|
|||
variant='primary'
|
||||
className={cn('my-3 w-[183px] shrink-0')}
|
||||
>
|
||||
<Link className='flex items-center' href={`/app/${(customCollection as WorkflowToolProviderResponse).workflow_app_id}/workflow`} rel='noreferrer' target='_blank'>
|
||||
<a className='flex items-center' href={`${basePath}/app/${(customCollection as WorkflowToolProviderResponse).workflow_app_id}/workflow`} rel='noreferrer' target='_blank'>
|
||||
<div className='system-sm-medium'>{t('tools.openInStudio')}</div>
|
||||
<LinkExternal02 className='ml-1 h-4 w-4' />
|
||||
</Link>
|
||||
</a>
|
||||
</Button>
|
||||
<Button
|
||||
className={cn('my-3 w-[183px] shrink-0')}
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import type { FC } from 'react'
|
|||
import Editor, { loader } from '@monaco-editor/react'
|
||||
import React, { useEffect, useMemo, useRef, useState } from 'react'
|
||||
import Base from '../base'
|
||||
import { WEB_PREFIX } from '@/config'
|
||||
import cn from '@/utils/classnames'
|
||||
import { CodeLanguage } from '@/app/components/workflow/nodes/code/types'
|
||||
import {
|
||||
|
|
@ -13,9 +12,10 @@ import { Theme } from '@/types/app'
|
|||
import useTheme from '@/hooks/use-theme'
|
||||
import './style.css'
|
||||
import { noop } from 'lodash-es'
|
||||
import { basePath } from '@/utils/var'
|
||||
|
||||
// load file from local instead of cdn https://github.com/suren-atoyan/monaco-react/issues/482
|
||||
loader.config({ paths: { vs: `${WEB_PREFIX}/vs` } })
|
||||
loader.config({ paths: { vs: `${basePath}/vs` } })
|
||||
|
||||
const CODE_EDITOR_LINE_HEIGHT = 18
|
||||
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ const ConstantField: FC<Props> = ({
|
|||
{schema.type === FormTypeEnum.textNumber && (
|
||||
<input
|
||||
type='number'
|
||||
className='h-8 w-full overflow-hidden rounded-lg bg-gray-100 p-2 text-[13px] font-normal leading-8 text-gray-900 placeholder:text-gray-400 focus:outline-none'
|
||||
className='h-8 w-full overflow-hidden rounded-lg bg-workflow-block-parma-bg p-2 text-[13px] font-normal leading-8 text-text-secondary placeholder:text-gray-400 focus:outline-none'
|
||||
value={value}
|
||||
onChange={handleStaticChange}
|
||||
readOnly={readonly}
|
||||
|
|
|
|||
|
|
@ -124,11 +124,11 @@ const ConditionItem = ({
|
|||
)}>
|
||||
<div className='flex items-center p-1'>
|
||||
<div className='w-0 grow'>
|
||||
<div className='inline-flex h-6 items-center rounded-md border-[0.5px] border-components-panel-border-subtle bg-components-badge-white-to-dark pl-1 pr-1.5 shadow-xs'>
|
||||
<div className='flex h-6 min-w-0 items-center rounded-md border-[0.5px] border-components-panel-border-subtle bg-components-badge-white-to-dark pl-1 pr-1.5 shadow-xs'>
|
||||
<div className='mr-0.5 p-[1px]'>
|
||||
<MetadataIcon type={currentMetadata?.type} className='h-3 w-3' />
|
||||
</div>
|
||||
<div className='system-xs-medium mr-0.5 text-text-secondary'>{currentMetadata?.name}</div>
|
||||
<div className='system-xs-medium mr-0.5 min-w-0 flex-1 truncate text-text-secondary'>{currentMetadata?.name}</div>
|
||||
<div className='system-xs-regular text-text-tertiary'>{currentMetadata?.type}</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { useCallback, useState } from 'react'
|
|||
import { useTranslation } from 'react-i18next'
|
||||
import useSWR from 'swr'
|
||||
import { useSearchParams } from 'next/navigation'
|
||||
import Link from 'next/link'
|
||||
import { basePath } from '@/utils/var'
|
||||
import cn from 'classnames'
|
||||
import { CheckCircleIcon } from '@heroicons/react/24/solid'
|
||||
import Input from '../components/base/input'
|
||||
|
|
@ -164,7 +164,7 @@ const ChangePasswordForm = () => {
|
|||
</div>
|
||||
<div className="mx-auto mt-6 w-full">
|
||||
<Button variant='primary' className='w-full'>
|
||||
<Link href={'/signin'}>{t('login.passwordChanged')}</Link>
|
||||
<a href={`${basePath}/signin`}>{t('login.passwordChanged')}</a>
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ import { zodResolver } from '@hookform/resolvers/zod'
|
|||
import Loading from '../components/base/loading'
|
||||
import Input from '../components/base/input'
|
||||
import Button from '@/app/components/base/button'
|
||||
import { WEB_PREFIX } from '@/config'
|
||||
import { basePath } from '@/utils/var'
|
||||
|
||||
import {
|
||||
fetchInitValidateStatus,
|
||||
|
|
@ -71,7 +71,7 @@ const ForgotPasswordForm = () => {
|
|||
fetchSetupStatus().then(() => {
|
||||
fetchInitValidateStatus().then((res: InitValidateStatusResponse) => {
|
||||
if (res.status === 'not_started')
|
||||
window.location.href = `${WEB_PREFIX}/init`
|
||||
window.location.href = `${basePath}/init`
|
||||
})
|
||||
|
||||
setLoading(false)
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import { useRouter } from 'next/navigation'
|
|||
import Toast from '../components/base/toast'
|
||||
import Loading from '../components/base/loading'
|
||||
import Button from '@/app/components/base/button'
|
||||
import { WEB_PREFIX } from '@/config'
|
||||
import { basePath } from '@/utils/var'
|
||||
import { fetchInitValidateStatus, initValidate } from '@/service/common'
|
||||
import type { InitValidateStatusResponse } from '@/models/common'
|
||||
import useDocumentTitle from '@/hooks/use-document-title'
|
||||
|
|
@ -44,7 +44,7 @@ const InitPasswordPopup = () => {
|
|||
useEffect(() => {
|
||||
fetchInitValidateStatus().then((res: InitValidateStatusResponse) => {
|
||||
if (res.status === 'finished')
|
||||
window.location.href = `${WEB_PREFIX}/install`
|
||||
window.location.href = `${basePath}/install`
|
||||
else
|
||||
setLoading(false)
|
||||
})
|
||||
|
|
|
|||
|
|
@ -36,9 +36,7 @@ const LocaleLayout = async ({
|
|||
<body
|
||||
className="color-scheme h-full select-auto"
|
||||
data-api-prefix={process.env.NEXT_PUBLIC_API_PREFIX}
|
||||
data-web-prefix={process.env.NEXT_PUBLIC_WEB_PREFIX}
|
||||
data-pubic-api-prefix={process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX}
|
||||
data-pubic-web-prefix={process.env.NEXT_PUBLIC_PUBLIC_WEB_PREFIX}
|
||||
data-marketplace-api-prefix={process.env.NEXT_PUBLIC_MARKETPLACE_API_PREFIX}
|
||||
data-marketplace-url-prefix={process.env.NEXT_PUBLIC_MARKETPLACE_URL_PREFIX}
|
||||
data-public-edition={process.env.NEXT_PUBLIC_EDITION}
|
||||
|
|
|
|||
|
|
@ -3,23 +3,14 @@ import { AgentStrategy } from '@/types/app'
|
|||
import { PromptRole } from '@/models/debug'
|
||||
|
||||
export let apiPrefix = ''
|
||||
export let webPrefix = ''
|
||||
export let publicApiPrefix = ''
|
||||
export let publicWebPrefix = ''
|
||||
export let marketplaceApiPrefix = ''
|
||||
export let marketplaceUrlPrefix = ''
|
||||
|
||||
// NEXT_PUBLIC_API_PREFIX=/console/api NEXT_PUBLIC_PUBLIC_API_PREFIX=/api npm run start
|
||||
if (
|
||||
process.env.NEXT_PUBLIC_API_PREFIX
|
||||
&& process.env.NEXT_PUBLIC_WEB_PREFIX
|
||||
&& process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX
|
||||
&& process.env.NEXT_PUBLIC_PUBLIC_WEB_PREFIX
|
||||
) {
|
||||
if (process.env.NEXT_PUBLIC_API_PREFIX && process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX) {
|
||||
apiPrefix = process.env.NEXT_PUBLIC_API_PREFIX
|
||||
webPrefix = process.env.NEXT_PUBLIC_WEB_PREFIX
|
||||
publicApiPrefix = process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX
|
||||
publicWebPrefix = process.env.NEXT_PUBLIC_PUBLIC_WEB_PREFIX
|
||||
}
|
||||
else if (
|
||||
globalThis.document?.body?.getAttribute('data-api-prefix')
|
||||
|
|
@ -27,18 +18,14 @@ else if (
|
|||
) {
|
||||
// Not build can not get env from process.env.NEXT_PUBLIC_ in browser https://nextjs.org/docs/basic-features/environment-variables#exposing-environment-variables-to-the-browser
|
||||
apiPrefix = globalThis.document.body.getAttribute('data-api-prefix') as string
|
||||
webPrefix = (globalThis.document.body.getAttribute('data-web-prefix') as string || globalThis.location.origin)
|
||||
publicApiPrefix = globalThis.document.body.getAttribute('data-pubic-api-prefix') as string
|
||||
publicWebPrefix = (globalThis.document.body.getAttribute('data-pubic-web-prefix') as string || globalThis.location.origin)
|
||||
}
|
||||
else {
|
||||
// const domainParts = globalThis.location?.host?.split('.');
|
||||
// in production env, the host is dify.app . In other env, the host is [dev].dify.app
|
||||
// const env = domainParts.length === 2 ? 'ai' : domainParts?.[0];
|
||||
apiPrefix = 'http://localhost:5001/console/api'
|
||||
webPrefix = 'http://localhost:3000'
|
||||
publicApiPrefix = 'http://localhost:5001/api' // avoid browser private mode api cross origin
|
||||
publicWebPrefix = 'http://localhost:3000'
|
||||
marketplaceApiPrefix = 'http://localhost:5002/api'
|
||||
}
|
||||
|
||||
|
|
@ -52,9 +39,7 @@ else {
|
|||
}
|
||||
|
||||
export const API_PREFIX: string = apiPrefix
|
||||
export const WEB_PREFIX: string = webPrefix
|
||||
export const PUBLIC_API_PREFIX: string = publicApiPrefix
|
||||
export const PUBLIC_WEB_PREFIX: string = publicWebPrefix
|
||||
export const MARKETPLACE_API_PREFIX: string = marketplaceApiPrefix
|
||||
export const MARKETPLACE_URL_PREFIX: string = marketplaceUrlPrefix
|
||||
|
||||
|
|
|
|||
|
|
@ -15,9 +15,7 @@ set -e
|
|||
export NEXT_PUBLIC_DEPLOY_ENV=${DEPLOY_ENV}
|
||||
export NEXT_PUBLIC_EDITION=${EDITION}
|
||||
export NEXT_PUBLIC_API_PREFIX=${CONSOLE_API_URL}/console/api
|
||||
export NEXT_PUBLIC_WEB_PREFIX=${CONSOLE_WEB_URL}
|
||||
export NEXT_PUBLIC_PUBLIC_API_PREFIX=${APP_API_URL}/api
|
||||
export NEXT_PUBLIC_PUBLIC_WEB_PREFIX=${APP_WEB_URL}
|
||||
export NEXT_PUBLIC_MARKETPLACE_API_PREFIX=${MARKETPLACE_API_URL}/api/v1
|
||||
export NEXT_PUBLIC_MARKETPLACE_URL_PREFIX=${MARKETPLACE_URL}
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ const translation = {
|
|||
nameError: 'Name cannot be empty',
|
||||
desc: 'Knowledge Description',
|
||||
descInfo: 'Please write a clear textual description to outline the content of the Knowledge. This description will be used as a basis for matching when selecting from multiple Knowledge for inference.',
|
||||
descPlaceholder: 'Describe what is in this data set. A detailed description allows AI to access the content of the data set in a timely manner. If empty, LangGenius will use the default hit strategy.',
|
||||
descPlaceholder: 'Describe what is in this data set. A detailed description allows AI to access the content of the data set in a timely manner. If empty, Dify will use the default hit strategy.',
|
||||
helpText: 'Learn how to write a good dataset description.',
|
||||
descWrite: 'Learn how to write a good Knowledge description.',
|
||||
permissions: 'Permissions',
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ const translation = {
|
|||
nameError: '名称不能为空',
|
||||
desc: '知识库描述',
|
||||
descInfo: '请写出清楚的文字描述来概述知识库的内容。当从多个知识库中进行选择匹配时,该描述将用作匹配的基础。',
|
||||
descPlaceholder: '描述该数据集的内容。详细描述可以让 AI 更快地访问数据集的内容。如果为空,LangGenius 将使用默认的命中策略。',
|
||||
descPlaceholder: '描述该数据集的内容。详细描述可以让 AI 更快地访问数据集的内容。如果为空,Dify 将使用默认的命中策略。',
|
||||
helpText: '学习如何编写一份优秀的数据集描述。',
|
||||
descWrite: '了解如何编写更好的知识库描述。',
|
||||
permissions: '可见权限',
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ export function middleware(request: NextRequest) {
|
|||
style-src 'self' 'unsafe-inline' ${scheme_source} ${whiteList};
|
||||
worker-src 'self' ${scheme_source} ${csp} ${whiteList};
|
||||
media-src 'self' ${scheme_source} ${csp} ${whiteList};
|
||||
img-src * data:;
|
||||
img-src * data: blob:;
|
||||
font-src 'self';
|
||||
object-src 'none';
|
||||
base-uri 'self';
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import { API_PREFIX, IS_CE_EDITION, PUBLIC_API_PREFIX, WEB_PREFIX } from '@/config'
|
||||
import { API_PREFIX, IS_CE_EDITION, PUBLIC_API_PREFIX } from '@/config'
|
||||
import { refreshAccessTokenOrRelogin } from './refresh-token'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import { basePath } from '@/utils/var'
|
||||
import type { AnnotationReply, MessageEnd, MessageReplace, ThoughtItem } from '@/app/components/base/chat/chat/type'
|
||||
import type { VisionFile } from '@/types/app'
|
||||
import type {
|
||||
|
|
@ -473,7 +474,7 @@ export const request = async<T>(url: string, options = {}, otherOptions?: IOther
|
|||
const errResp: Response = err as any
|
||||
if (errResp.status === 401) {
|
||||
const [parseErr, errRespData] = await asyncRunSafe<ResponseError>(errResp.json())
|
||||
const loginUrl = `${WEB_PREFIX}/signin`
|
||||
const loginUrl = `${globalThis.location.origin}${basePath}/signin`
|
||||
if (parseErr) {
|
||||
globalThis.location.href = loginUrl
|
||||
return Promise.reject(err)
|
||||
|
|
@ -509,11 +510,11 @@ export const request = async<T>(url: string, options = {}, otherOptions?: IOther
|
|||
return Promise.reject(err)
|
||||
}
|
||||
if (code === 'not_init_validated' && IS_CE_EDITION) {
|
||||
globalThis.location.href = `${WEB_PREFIX}/init`
|
||||
globalThis.location.href = `${globalThis.location.origin}${basePath}/init`
|
||||
return Promise.reject(err)
|
||||
}
|
||||
if (code === 'not_setup' && IS_CE_EDITION) {
|
||||
globalThis.location.href = `${WEB_PREFIX}/install`
|
||||
globalThis.location.href = `${globalThis.location.origin}${basePath}/install`
|
||||
return Promise.reject(err)
|
||||
}
|
||||
|
||||
|
|
@ -521,7 +522,7 @@ export const request = async<T>(url: string, options = {}, otherOptions?: IOther
|
|||
const [refreshErr] = await asyncRunSafe(refreshAccessTokenOrRelogin(TIME_OUT))
|
||||
if (refreshErr === null)
|
||||
return baseFetch<T>(url, options, otherOptionsForBaseFetch)
|
||||
if (!location.pathname.includes('/signin') || !IS_CE_EDITION) {
|
||||
if (location.pathname !== `${basePath}/signin` || !IS_CE_EDITION) {
|
||||
globalThis.location.href = loginUrl
|
||||
return Promise.reject(err)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import type { AfterResponseHook, BeforeErrorHook, BeforeRequestHook, Hooks } fro
|
|||
import ky from 'ky'
|
||||
import type { IOtherOptions } from './base'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import { API_PREFIX, MARKETPLACE_API_PREFIX, PUBLIC_API_PREFIX, WEB_PREFIX } from '@/config'
|
||||
import { API_PREFIX, MARKETPLACE_API_PREFIX, PUBLIC_API_PREFIX } from '@/config'
|
||||
import { getInitialTokenV2, isTokenV1 } from '@/app/components/share/utils'
|
||||
import { getProcessedSystemVariablesFromUrlParams } from '@/app/components/base/chat/utils'
|
||||
|
||||
|
|
@ -44,7 +44,7 @@ const afterResponseErrorCode = (otherOptions: IOtherOptions): AfterResponseHook
|
|||
if (!otherOptions.silent)
|
||||
Toast.notify({ type: 'error', message: data.message })
|
||||
if (data.code === 'already_setup')
|
||||
globalThis.location.href = `${WEB_PREFIX}/signin`
|
||||
globalThis.location.href = `${globalThis.location.origin}/signin`
|
||||
})
|
||||
break
|
||||
case 401:
|
||||
|
|
|
|||
Loading…
Reference in New Issue