mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into feat/r2
This commit is contained in:
commit
35be8721b9
|
|
@ -269,6 +269,7 @@ OPENSEARCH_PORT=9200
|
|||
OPENSEARCH_USER=admin
|
||||
OPENSEARCH_PASSWORD=admin
|
||||
OPENSEARCH_SECURE=true
|
||||
OPENSEARCH_VERIFY_CERTS=true
|
||||
|
||||
# Baidu configuration
|
||||
BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Optional
|
||||
from typing import Literal, Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
|
@ -34,7 +34,7 @@ class S3StorageConfig(BaseSettings):
|
|||
default=None,
|
||||
)
|
||||
|
||||
S3_ADDRESS_STYLE: str = Field(
|
||||
S3_ADDRESS_STYLE: Literal["auto", "virtual", "path"] = Field(
|
||||
description="S3 addressing style: 'auto', 'path', or 'virtual'",
|
||||
default="auto",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -33,6 +33,11 @@ class OpenSearchConfig(BaseSettings):
|
|||
default=False,
|
||||
)
|
||||
|
||||
OPENSEARCH_VERIFY_CERTS: bool = Field(
|
||||
description="Whether to verify SSL certificates for HTTPS connections (recommended to set True in production)",
|
||||
default=True,
|
||||
)
|
||||
|
||||
OPENSEARCH_AUTH_METHOD: AuthMethod = Field(
|
||||
description="Authentication method for OpenSearch connection (default is 'basic')",
|
||||
default=AuthMethod.BASIC,
|
||||
|
|
|
|||
|
|
@ -202,18 +202,18 @@ class EmailCodeLoginApi(Resource):
|
|||
except AccountRegisterError as are:
|
||||
raise AccountInFreezeError()
|
||||
if account:
|
||||
tenant = TenantService.get_join_tenants(account)
|
||||
if not tenant:
|
||||
tenants = TenantService.get_join_tenants(account)
|
||||
if not tenants:
|
||||
workspaces = FeatureService.get_system_features().license.workspaces
|
||||
if not workspaces.is_available():
|
||||
raise WorkspacesLimitExceeded()
|
||||
if not FeatureService.get_system_features().is_allow_create_workspace:
|
||||
raise NotAllowedCreateWorkspace()
|
||||
else:
|
||||
tenant = TenantService.create_tenant(f"{account.name}'s Workspace")
|
||||
TenantService.create_tenant_member(tenant, account, role="owner")
|
||||
account.current_tenant = tenant
|
||||
tenant_was_created.send(tenant)
|
||||
new_tenant = TenantService.create_tenant(f"{account.name}'s Workspace")
|
||||
TenantService.create_tenant_member(new_tenant, account, role="owner")
|
||||
account.current_tenant = new_tenant
|
||||
tenant_was_created.send(new_tenant)
|
||||
|
||||
if account is None:
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -148,15 +148,15 @@ def _generate_account(provider: str, user_info: OAuthUserInfo):
|
|||
account = _get_account_by_openid_or_email(provider, user_info)
|
||||
|
||||
if account:
|
||||
tenant = TenantService.get_join_tenants(account)
|
||||
if not tenant:
|
||||
tenants = TenantService.get_join_tenants(account)
|
||||
if not tenants:
|
||||
if not FeatureService.get_system_features().is_allow_create_workspace:
|
||||
raise WorkSpaceNotAllowedCreateError()
|
||||
else:
|
||||
tenant = TenantService.create_tenant(f"{account.name}'s Workspace")
|
||||
TenantService.create_tenant_member(tenant, account, role="owner")
|
||||
account.current_tenant = tenant
|
||||
tenant_was_created.send(tenant)
|
||||
new_tenant = TenantService.create_tenant(f"{account.name}'s Workspace")
|
||||
TenantService.create_tenant_member(new_tenant, account, role="owner")
|
||||
account.current_tenant = new_tenant
|
||||
tenant_was_created.send(new_tenant)
|
||||
|
||||
if not account:
|
||||
if not FeatureService.get_system_features().is_allow_register:
|
||||
|
|
|
|||
|
|
@ -540,9 +540,22 @@ class DatasetIndexingStatusApi(Resource):
|
|||
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
||||
.count()
|
||||
)
|
||||
document.completed_segments = completed_segments
|
||||
document.total_segments = total_segments
|
||||
documents_status.append(marshal(document, document_status_fields))
|
||||
# Create a dictionary with document attributes and additional fields
|
||||
document_dict = {
|
||||
"id": document.id,
|
||||
"indexing_status": document.indexing_status,
|
||||
"processing_started_at": document.processing_started_at,
|
||||
"parsing_completed_at": document.parsing_completed_at,
|
||||
"cleaning_completed_at": document.cleaning_completed_at,
|
||||
"splitting_completed_at": document.splitting_completed_at,
|
||||
"completed_at": document.completed_at,
|
||||
"paused_at": document.paused_at,
|
||||
"error": document.error,
|
||||
"stopped_at": document.stopped_at,
|
||||
"completed_segments": completed_segments,
|
||||
"total_segments": total_segments,
|
||||
}
|
||||
documents_status.append(marshal(document_dict, document_status_fields))
|
||||
data = {"data": documents_status}
|
||||
return data
|
||||
|
||||
|
|
|
|||
|
|
@ -583,11 +583,22 @@ class DocumentBatchIndexingStatusApi(DocumentResource):
|
|||
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
||||
.count()
|
||||
)
|
||||
document.completed_segments = completed_segments
|
||||
document.total_segments = total_segments
|
||||
if document.is_paused:
|
||||
document.indexing_status = "paused"
|
||||
documents_status.append(marshal(document, document_status_fields))
|
||||
# Create a dictionary with document attributes and additional fields
|
||||
document_dict = {
|
||||
"id": document.id,
|
||||
"indexing_status": "paused" if document.is_paused else document.indexing_status,
|
||||
"processing_started_at": document.processing_started_at,
|
||||
"parsing_completed_at": document.parsing_completed_at,
|
||||
"cleaning_completed_at": document.cleaning_completed_at,
|
||||
"splitting_completed_at": document.splitting_completed_at,
|
||||
"completed_at": document.completed_at,
|
||||
"paused_at": document.paused_at,
|
||||
"error": document.error,
|
||||
"stopped_at": document.stopped_at,
|
||||
"completed_segments": completed_segments,
|
||||
"total_segments": total_segments,
|
||||
}
|
||||
documents_status.append(marshal(document_dict, document_status_fields))
|
||||
data = {"data": documents_status}
|
||||
return data
|
||||
|
||||
|
|
@ -616,11 +627,22 @@ class DocumentIndexingStatusApi(DocumentResource):
|
|||
.count()
|
||||
)
|
||||
|
||||
document.completed_segments = completed_segments
|
||||
document.total_segments = total_segments
|
||||
if document.is_paused:
|
||||
document.indexing_status = "paused"
|
||||
return marshal(document, document_status_fields)
|
||||
# Create a dictionary with document attributes and additional fields
|
||||
document_dict = {
|
||||
"id": document.id,
|
||||
"indexing_status": "paused" if document.is_paused else document.indexing_status,
|
||||
"processing_started_at": document.processing_started_at,
|
||||
"parsing_completed_at": document.parsing_completed_at,
|
||||
"cleaning_completed_at": document.cleaning_completed_at,
|
||||
"splitting_completed_at": document.splitting_completed_at,
|
||||
"completed_at": document.completed_at,
|
||||
"paused_at": document.paused_at,
|
||||
"error": document.error,
|
||||
"stopped_at": document.stopped_at,
|
||||
"completed_segments": completed_segments,
|
||||
"total_segments": total_segments,
|
||||
}
|
||||
return marshal(document_dict, document_status_fields)
|
||||
|
||||
|
||||
class DocumentDetailApi(DocumentResource):
|
||||
|
|
|
|||
|
|
@ -68,16 +68,24 @@ class TenantListApi(Resource):
|
|||
@account_initialization_required
|
||||
def get(self):
|
||||
tenants = TenantService.get_join_tenants(current_user)
|
||||
tenant_dicts = []
|
||||
|
||||
for tenant in tenants:
|
||||
features = FeatureService.get_features(tenant.id)
|
||||
if features.billing.enabled:
|
||||
tenant.plan = features.billing.subscription.plan
|
||||
else:
|
||||
tenant.plan = "sandbox"
|
||||
if tenant.id == current_user.current_tenant_id:
|
||||
tenant.current = True # Set current=True for current tenant
|
||||
return {"workspaces": marshal(tenants, tenants_fields)}, 200
|
||||
|
||||
# Create a dictionary with tenant attributes
|
||||
tenant_dict = {
|
||||
"id": tenant.id,
|
||||
"name": tenant.name,
|
||||
"status": tenant.status,
|
||||
"created_at": tenant.created_at,
|
||||
"plan": features.billing.subscription.plan if features.billing.enabled else "sandbox",
|
||||
"current": tenant.id == current_user.current_tenant_id,
|
||||
}
|
||||
|
||||
tenant_dicts.append(tenant_dict)
|
||||
|
||||
return {"workspaces": marshal(tenant_dicts, tenants_fields)}, 200
|
||||
|
||||
|
||||
class WorkspaceListApi(Resource):
|
||||
|
|
|
|||
|
|
@ -64,9 +64,24 @@ class PluginUploadFileApi(Resource):
|
|||
|
||||
extension = guess_extension(tool_file.mimetype) or ".bin"
|
||||
preview_url = ToolFileManager.sign_file(tool_file_id=tool_file.id, extension=extension)
|
||||
tool_file.mime_type = mimetype
|
||||
tool_file.extension = extension
|
||||
tool_file.preview_url = preview_url
|
||||
|
||||
# Create a dictionary with all the necessary attributes
|
||||
result = {
|
||||
"id": tool_file.id,
|
||||
"user_id": tool_file.user_id,
|
||||
"tenant_id": tool_file.tenant_id,
|
||||
"conversation_id": tool_file.conversation_id,
|
||||
"file_key": tool_file.file_key,
|
||||
"mimetype": tool_file.mimetype,
|
||||
"original_url": tool_file.original_url,
|
||||
"name": tool_file.name,
|
||||
"size": tool_file.size,
|
||||
"mime_type": mimetype,
|
||||
"extension": extension,
|
||||
"preview_url": preview_url,
|
||||
}
|
||||
|
||||
return result, 201
|
||||
except services.errors.file.FileTooLargeError as file_too_large_error:
|
||||
raise FileTooLargeError(file_too_large_error.description)
|
||||
except services.errors.file.UnsupportedFileTypeError:
|
||||
|
|
|
|||
|
|
@ -388,11 +388,22 @@ class DocumentIndexingStatusApi(DatasetApiResource):
|
|||
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
||||
.count()
|
||||
)
|
||||
document.completed_segments = completed_segments
|
||||
document.total_segments = total_segments
|
||||
if document.is_paused:
|
||||
document.indexing_status = "paused"
|
||||
documents_status.append(marshal(document, document_status_fields))
|
||||
# Create a dictionary with document attributes and additional fields
|
||||
document_dict = {
|
||||
"id": document.id,
|
||||
"indexing_status": "paused" if document.is_paused else document.indexing_status,
|
||||
"processing_started_at": document.processing_started_at,
|
||||
"parsing_completed_at": document.parsing_completed_at,
|
||||
"cleaning_completed_at": document.cleaning_completed_at,
|
||||
"splitting_completed_at": document.splitting_completed_at,
|
||||
"completed_at": document.completed_at,
|
||||
"paused_at": document.paused_at,
|
||||
"error": document.error,
|
||||
"stopped_at": document.stopped_at,
|
||||
"completed_segments": completed_segments,
|
||||
"total_segments": total_segments,
|
||||
}
|
||||
documents_status.append(marshal(document_dict, document_status_fields))
|
||||
data = {"data": documents_status}
|
||||
return data
|
||||
|
||||
|
|
|
|||
|
|
@ -109,6 +109,7 @@ class VariableEntity(BaseModel):
|
|||
description: str = ""
|
||||
type: VariableEntityType
|
||||
required: bool = False
|
||||
hide: bool = False
|
||||
max_length: Optional[int] = None
|
||||
options: Sequence[str] = Field(default_factory=list)
|
||||
allowed_file_types: Sequence[FileType] = Field(default_factory=list)
|
||||
|
|
|
|||
|
|
@ -26,10 +26,13 @@ from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
|||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.prompt.utils.get_thread_messages_length import get_thread_messages_length
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository
|
||||
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from extensions.ext_database import db
|
||||
from factories import file_factory
|
||||
from models import Account, App, Conversation, EndUser, Message, Workflow, WorkflowNodeExecutionTriggeredFrom
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from services.conversation_service import ConversationService
|
||||
from services.errors.message import MessageNotExistsError
|
||||
|
||||
|
|
@ -159,8 +162,22 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
# Create workflow node execution repository
|
||||
# Create repositories
|
||||
#
|
||||
# Create session factory
|
||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
# Create workflow execution(aka workflow run) repository
|
||||
if invoke_from == InvokeFrom.DEBUGGER:
|
||||
workflow_triggered_from = WorkflowRunTriggeredFrom.DEBUGGING
|
||||
else:
|
||||
workflow_triggered_from = WorkflowRunTriggeredFrom.APP_RUN
|
||||
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
user=user,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
triggered_from=workflow_triggered_from,
|
||||
)
|
||||
# Create workflow node execution repository
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
user=user,
|
||||
|
|
@ -173,6 +190,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
user=user,
|
||||
invoke_from=invoke_from,
|
||||
application_generate_entity=application_generate_entity,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
conversation=conversation,
|
||||
stream=streaming,
|
||||
|
|
@ -226,8 +244,18 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
# Create workflow node execution repository
|
||||
# Create repositories
|
||||
#
|
||||
# Create session factory
|
||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
# Create workflow execution(aka workflow run) repository
|
||||
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
user=user,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
)
|
||||
# Create workflow node execution repository
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
user=user,
|
||||
|
|
@ -240,6 +268,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
user=user,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
application_generate_entity=application_generate_entity,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
conversation=None,
|
||||
stream=streaming,
|
||||
|
|
@ -291,8 +320,18 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
# Create workflow node execution repository
|
||||
# Create repositories
|
||||
#
|
||||
# Create session factory
|
||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
# Create workflow execution(aka workflow run) repository
|
||||
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
user=user,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
)
|
||||
# Create workflow node execution repository
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
user=user,
|
||||
|
|
@ -305,6 +344,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
user=user,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
application_generate_entity=application_generate_entity,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
conversation=None,
|
||||
stream=streaming,
|
||||
|
|
@ -317,6 +357,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
user: Union[Account, EndUser],
|
||||
invoke_from: InvokeFrom,
|
||||
application_generate_entity: AdvancedChatAppGenerateEntity,
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
conversation: Optional[Conversation] = None,
|
||||
stream: bool = True,
|
||||
|
|
@ -381,6 +422,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
conversation=conversation,
|
||||
message=message,
|
||||
user=user,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
stream=stream,
|
||||
)
|
||||
|
|
@ -453,6 +495,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
conversation: Conversation,
|
||||
message: Message,
|
||||
user: Union[Account, EndUser],
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
stream: bool = False,
|
||||
) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]:
|
||||
|
|
@ -476,9 +519,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
conversation=conversation,
|
||||
message=message,
|
||||
user=user,
|
||||
stream=stream,
|
||||
dialogue_count=self._dialogue_count,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
stream=stream,
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from sqlalchemy.orm import Session
|
|||
|
||||
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
||||
from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter
|
||||
from core.app.entities.app_invoke_entities import (
|
||||
AdvancedChatAppGenerateEntity,
|
||||
InvokeFrom,
|
||||
|
|
@ -64,6 +65,7 @@ from core.ops.ops_trace_manager import TraceQueueManager
|
|||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
||||
from events.message_event import message_was_created
|
||||
|
|
@ -94,6 +96,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
user: Union[Account, EndUser],
|
||||
stream: bool,
|
||||
dialogue_count: int,
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
) -> None:
|
||||
self._base_task_pipeline = BasedGenerateTaskPipeline(
|
||||
|
|
@ -125,9 +128,14 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
SystemVariableKey.WORKFLOW_ID: workflow.id,
|
||||
SystemVariableKey.WORKFLOW_RUN_ID: application_generate_entity.workflow_run_id,
|
||||
},
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
)
|
||||
|
||||
self._workflow_response_converter = WorkflowResponseConverter(
|
||||
application_generate_entity=application_generate_entity,
|
||||
)
|
||||
|
||||
self._task_state = WorkflowTaskState()
|
||||
self._message_cycle_manager = MessageCycleManage(
|
||||
application_generate_entity=application_generate_entity, task_state=self._task_state
|
||||
|
|
@ -294,21 +302,19 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# init workflow run
|
||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_start(
|
||||
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_start(
|
||||
session=session,
|
||||
workflow_id=self._workflow_id,
|
||||
user_id=self._user_id,
|
||||
created_by_role=self._created_by_role,
|
||||
)
|
||||
self._workflow_run_id = workflow_run.id
|
||||
self._workflow_run_id = workflow_execution.id
|
||||
message = self._get_message(session=session)
|
||||
if not message:
|
||||
raise ValueError(f"Message not found: {self._message_id}")
|
||||
message.workflow_run_id = workflow_run.id
|
||||
workflow_start_resp = self._workflow_cycle_manager._workflow_start_to_stream_response(
|
||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
||||
message.workflow_run_id = workflow_execution.id
|
||||
workflow_start_resp = self._workflow_response_converter.workflow_start_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution=workflow_execution,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
yield workflow_start_resp
|
||||
elif isinstance(
|
||||
|
|
@ -319,13 +325,10 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried(
|
||||
workflow_execution_id=self._workflow_run_id, event=event
|
||||
)
|
||||
workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_retried(
|
||||
workflow_run=workflow_run, event=event
|
||||
)
|
||||
node_retry_resp = self._workflow_cycle_manager._workflow_node_retry_to_stream_response(
|
||||
node_retry_resp = self._workflow_response_converter.workflow_node_retry_to_stream_response(
|
||||
event=event,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
|
|
@ -338,20 +341,15 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
workflow_node_execution = self._workflow_cycle_manager._handle_node_execution_start(
|
||||
workflow_run=workflow_run, event=event
|
||||
)
|
||||
workflow_node_execution = self._workflow_cycle_manager.handle_node_execution_start(
|
||||
workflow_execution_id=self._workflow_run_id, event=event
|
||||
)
|
||||
|
||||
node_start_resp = self._workflow_cycle_manager._workflow_node_start_to_stream_response(
|
||||
event=event,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
session.commit()
|
||||
node_start_resp = self._workflow_response_converter.workflow_node_start_to_stream_response(
|
||||
event=event,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
|
||||
if node_start_resp:
|
||||
yield node_start_resp
|
||||
|
|
@ -359,15 +357,15 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
# Record files if it's an answer node or end node
|
||||
if event.node_type in [NodeType.ANSWER, NodeType.END]:
|
||||
self._recorded_files.extend(
|
||||
self._workflow_cycle_manager._fetch_files_from_node_outputs(event.outputs or {})
|
||||
self._workflow_response_converter.fetch_files_from_node_outputs(event.outputs or {})
|
||||
)
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_success(
|
||||
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_success(
|
||||
event=event
|
||||
)
|
||||
|
||||
node_finish_resp = self._workflow_cycle_manager._workflow_node_finish_to_stream_response(
|
||||
node_finish_resp = self._workflow_response_converter.workflow_node_finish_to_stream_response(
|
||||
event=event,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
|
|
@ -383,11 +381,11 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
| QueueNodeInLoopFailedEvent
|
||||
| QueueNodeExceptionEvent,
|
||||
):
|
||||
workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_failed(
|
||||
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_failed(
|
||||
event=event
|
||||
)
|
||||
|
||||
node_finish_resp = self._workflow_cycle_manager._workflow_node_finish_to_stream_response(
|
||||
node_finish_resp = self._workflow_response_converter.workflow_node_finish_to_stream_response(
|
||||
event=event,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
|
|
@ -399,132 +397,92 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
parallel_start_resp = (
|
||||
self._workflow_cycle_manager._workflow_parallel_branch_start_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
parallel_start_resp = (
|
||||
self._workflow_response_converter.workflow_parallel_branch_start_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
)
|
||||
|
||||
yield parallel_start_resp
|
||||
elif isinstance(event, QueueParallelBranchRunSucceededEvent | QueueParallelBranchRunFailedEvent):
|
||||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
parallel_finish_resp = (
|
||||
self._workflow_cycle_manager._workflow_parallel_branch_finished_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
parallel_finish_resp = (
|
||||
self._workflow_response_converter.workflow_parallel_branch_finished_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
)
|
||||
|
||||
yield parallel_finish_resp
|
||||
elif isinstance(event, QueueIterationStartEvent):
|
||||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
iter_start_resp = self._workflow_cycle_manager._workflow_iteration_start_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
iter_start_resp = self._workflow_response_converter.workflow_iteration_start_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
yield iter_start_resp
|
||||
elif isinstance(event, QueueIterationNextEvent):
|
||||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
iter_next_resp = self._workflow_cycle_manager._workflow_iteration_next_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
iter_next_resp = self._workflow_response_converter.workflow_iteration_next_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
yield iter_next_resp
|
||||
elif isinstance(event, QueueIterationCompletedEvent):
|
||||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
iter_finish_resp = self._workflow_cycle_manager._workflow_iteration_completed_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
iter_finish_resp = self._workflow_response_converter.workflow_iteration_completed_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
yield iter_finish_resp
|
||||
elif isinstance(event, QueueLoopStartEvent):
|
||||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
loop_start_resp = self._workflow_cycle_manager._workflow_loop_start_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
loop_start_resp = self._workflow_response_converter.workflow_loop_start_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
yield loop_start_resp
|
||||
elif isinstance(event, QueueLoopNextEvent):
|
||||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
loop_next_resp = self._workflow_cycle_manager._workflow_loop_next_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
loop_next_resp = self._workflow_response_converter.workflow_loop_next_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
yield loop_next_resp
|
||||
elif isinstance(event, QueueLoopCompletedEvent):
|
||||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
loop_finish_resp = self._workflow_cycle_manager._workflow_loop_completed_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
loop_finish_resp = self._workflow_response_converter.workflow_loop_completed_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
yield loop_finish_resp
|
||||
elif isinstance(event, QueueWorkflowSucceededEvent):
|
||||
|
|
@ -535,10 +493,8 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_success(
|
||||
session=session,
|
||||
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_success(
|
||||
workflow_run_id=self._workflow_run_id,
|
||||
start_at=graph_runtime_state.start_at,
|
||||
total_tokens=graph_runtime_state.total_tokens,
|
||||
total_steps=graph_runtime_state.node_run_steps,
|
||||
outputs=event.outputs,
|
||||
|
|
@ -546,10 +502,11 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
trace_manager=trace_manager,
|
||||
)
|
||||
|
||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
||||
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution=workflow_execution,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
yield workflow_finish_resp
|
||||
self._base_task_pipeline._queue_manager.publish(
|
||||
|
|
@ -562,10 +519,8 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
raise ValueError("graph runtime state not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_partial_success(
|
||||
session=session,
|
||||
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_partial_success(
|
||||
workflow_run_id=self._workflow_run_id,
|
||||
start_at=graph_runtime_state.start_at,
|
||||
total_tokens=graph_runtime_state.total_tokens,
|
||||
total_steps=graph_runtime_state.node_run_steps,
|
||||
outputs=event.outputs,
|
||||
|
|
@ -573,10 +528,11 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
conversation_id=None,
|
||||
trace_manager=trace_manager,
|
||||
)
|
||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
||||
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution=workflow_execution,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
yield workflow_finish_resp
|
||||
self._base_task_pipeline._queue_manager.publish(
|
||||
|
|
@ -589,26 +545,25 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
raise ValueError("graph runtime state not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_failed(
|
||||
session=session,
|
||||
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed(
|
||||
workflow_run_id=self._workflow_run_id,
|
||||
start_at=graph_runtime_state.start_at,
|
||||
total_tokens=graph_runtime_state.total_tokens,
|
||||
total_steps=graph_runtime_state.node_run_steps,
|
||||
status=WorkflowRunStatus.FAILED,
|
||||
error=event.error,
|
||||
error_message=event.error,
|
||||
conversation_id=self._conversation_id,
|
||||
trace_manager=trace_manager,
|
||||
exceptions_count=event.exceptions_count,
|
||||
)
|
||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
||||
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution=workflow_execution,
|
||||
)
|
||||
err_event = QueueErrorEvent(error=ValueError(f"Run failed: {workflow_run.error}"))
|
||||
err_event = QueueErrorEvent(error=ValueError(f"Run failed: {workflow_execution.error_message}"))
|
||||
err = self._base_task_pipeline._handle_error(
|
||||
event=err_event, session=session, message_id=self._message_id
|
||||
)
|
||||
session.commit()
|
||||
|
||||
yield workflow_finish_resp
|
||||
yield self._base_task_pipeline._error_to_stream_response(err)
|
||||
|
|
@ -616,21 +571,19 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
elif isinstance(event, QueueStopEvent):
|
||||
if self._workflow_run_id and graph_runtime_state:
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_failed(
|
||||
session=session,
|
||||
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed(
|
||||
workflow_run_id=self._workflow_run_id,
|
||||
start_at=graph_runtime_state.start_at,
|
||||
total_tokens=graph_runtime_state.total_tokens,
|
||||
total_steps=graph_runtime_state.node_run_steps,
|
||||
status=WorkflowRunStatus.STOPPED,
|
||||
error=event.get_stop_reason(),
|
||||
error_message=event.get_stop_reason(),
|
||||
conversation_id=self._conversation_id,
|
||||
trace_manager=trace_manager,
|
||||
)
|
||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
||||
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
workflow_execution=workflow_execution,
|
||||
)
|
||||
# Save message
|
||||
self._save_message(session=session, graph_runtime_state=graph_runtime_state)
|
||||
|
|
@ -711,7 +664,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
|
||||
yield self._message_end_to_stream_response()
|
||||
elif isinstance(event, QueueAgentLogEvent):
|
||||
yield self._workflow_cycle_manager._handle_agent_log(
|
||||
yield self._workflow_response_converter.handle_agent_log(
|
||||
task_id=self._application_generate_entity.task_id, event=event
|
||||
)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,564 @@
|
|||
import time
|
||||
from collections.abc import Mapping, Sequence
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any, Optional, Union, cast
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity
|
||||
from core.app.entities.queue_entities import (
|
||||
QueueAgentLogEvent,
|
||||
QueueIterationCompletedEvent,
|
||||
QueueIterationNextEvent,
|
||||
QueueIterationStartEvent,
|
||||
QueueLoopCompletedEvent,
|
||||
QueueLoopNextEvent,
|
||||
QueueLoopStartEvent,
|
||||
QueueNodeExceptionEvent,
|
||||
QueueNodeFailedEvent,
|
||||
QueueNodeInIterationFailedEvent,
|
||||
QueueNodeInLoopFailedEvent,
|
||||
QueueNodeRetryEvent,
|
||||
QueueNodeStartedEvent,
|
||||
QueueNodeSucceededEvent,
|
||||
QueueParallelBranchRunFailedEvent,
|
||||
QueueParallelBranchRunStartedEvent,
|
||||
QueueParallelBranchRunSucceededEvent,
|
||||
)
|
||||
from core.app.entities.task_entities import (
|
||||
AgentLogStreamResponse,
|
||||
IterationNodeCompletedStreamResponse,
|
||||
IterationNodeNextStreamResponse,
|
||||
IterationNodeStartStreamResponse,
|
||||
LoopNodeCompletedStreamResponse,
|
||||
LoopNodeNextStreamResponse,
|
||||
LoopNodeStartStreamResponse,
|
||||
NodeFinishStreamResponse,
|
||||
NodeRetryStreamResponse,
|
||||
NodeStartStreamResponse,
|
||||
ParallelBranchFinishedStreamResponse,
|
||||
ParallelBranchStartStreamResponse,
|
||||
WorkflowFinishStreamResponse,
|
||||
WorkflowStartStreamResponse,
|
||||
)
|
||||
from core.file import FILE_MODEL_IDENTITY, File
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from core.workflow.entities.node_execution_entities import NodeExecution
|
||||
from core.workflow.entities.workflow_execution_entities import WorkflowExecution
|
||||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.nodes.tool.entities import ToolNodeData
|
||||
from models import (
|
||||
Account,
|
||||
CreatorUserRole,
|
||||
EndUser,
|
||||
WorkflowNodeExecutionStatus,
|
||||
WorkflowRun,
|
||||
)
|
||||
|
||||
|
||||
class WorkflowResponseConverter:
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity],
|
||||
) -> None:
|
||||
self._application_generate_entity = application_generate_entity
|
||||
|
||||
def workflow_start_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
task_id: str,
|
||||
workflow_execution: WorkflowExecution,
|
||||
) -> WorkflowStartStreamResponse:
|
||||
return WorkflowStartStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_execution.id,
|
||||
data=WorkflowStartStreamResponse.Data(
|
||||
id=workflow_execution.id,
|
||||
workflow_id=workflow_execution.workflow_id,
|
||||
sequence_number=workflow_execution.sequence_number,
|
||||
inputs=workflow_execution.inputs,
|
||||
created_at=int(workflow_execution.started_at.timestamp()),
|
||||
),
|
||||
)
|
||||
|
||||
def workflow_finish_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
session: Session,
|
||||
task_id: str,
|
||||
workflow_execution: WorkflowExecution,
|
||||
) -> WorkflowFinishStreamResponse:
|
||||
created_by = None
|
||||
workflow_run = session.scalar(select(WorkflowRun).where(WorkflowRun.id == workflow_execution.id))
|
||||
assert workflow_run is not None
|
||||
if workflow_run.created_by_role == CreatorUserRole.ACCOUNT:
|
||||
stmt = select(Account).where(Account.id == workflow_run.created_by)
|
||||
account = session.scalar(stmt)
|
||||
if account:
|
||||
created_by = {
|
||||
"id": account.id,
|
||||
"name": account.name,
|
||||
"email": account.email,
|
||||
}
|
||||
elif workflow_run.created_by_role == CreatorUserRole.END_USER:
|
||||
stmt = select(EndUser).where(EndUser.id == workflow_run.created_by)
|
||||
end_user = session.scalar(stmt)
|
||||
if end_user:
|
||||
created_by = {
|
||||
"id": end_user.id,
|
||||
"user": end_user.session_id,
|
||||
}
|
||||
else:
|
||||
raise NotImplementedError(f"unknown created_by_role: {workflow_run.created_by_role}")
|
||||
|
||||
# Handle the case where finished_at is None by using current time as default
|
||||
finished_at_timestamp = (
|
||||
int(workflow_execution.finished_at.timestamp())
|
||||
if workflow_execution.finished_at
|
||||
else int(datetime.now(UTC).timestamp())
|
||||
)
|
||||
|
||||
return WorkflowFinishStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_execution.id,
|
||||
data=WorkflowFinishStreamResponse.Data(
|
||||
id=workflow_execution.id,
|
||||
workflow_id=workflow_execution.workflow_id,
|
||||
sequence_number=workflow_execution.sequence_number,
|
||||
status=workflow_execution.status,
|
||||
outputs=workflow_execution.outputs,
|
||||
error=workflow_execution.error_message,
|
||||
elapsed_time=workflow_execution.elapsed_time,
|
||||
total_tokens=workflow_execution.total_tokens,
|
||||
total_steps=workflow_execution.total_steps,
|
||||
created_by=created_by,
|
||||
created_at=int(workflow_execution.started_at.timestamp()),
|
||||
finished_at=finished_at_timestamp,
|
||||
files=self.fetch_files_from_node_outputs(workflow_execution.outputs),
|
||||
exceptions_count=workflow_execution.exceptions_count,
|
||||
),
|
||||
)
|
||||
|
||||
def workflow_node_start_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
event: QueueNodeStartedEvent,
|
||||
task_id: str,
|
||||
workflow_node_execution: NodeExecution,
|
||||
) -> Optional[NodeStartStreamResponse]:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||
return None
|
||||
if not workflow_node_execution.workflow_run_id:
|
||||
return None
|
||||
|
||||
response = NodeStartStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_node_execution.workflow_run_id,
|
||||
data=NodeStartStreamResponse.Data(
|
||||
id=workflow_node_execution.id,
|
||||
node_id=workflow_node_execution.node_id,
|
||||
node_type=workflow_node_execution.node_type,
|
||||
title=workflow_node_execution.title,
|
||||
index=workflow_node_execution.index,
|
||||
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
||||
inputs=workflow_node_execution.inputs,
|
||||
created_at=int(workflow_node_execution.created_at.timestamp()),
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
parent_parallel_id=event.parent_parallel_id,
|
||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||
iteration_id=event.in_iteration_id,
|
||||
loop_id=event.in_loop_id,
|
||||
parallel_run_id=event.parallel_mode_run_id,
|
||||
agent_strategy=event.agent_strategy,
|
||||
),
|
||||
)
|
||||
|
||||
# extras logic
|
||||
if event.node_type == NodeType.TOOL:
|
||||
node_data = cast(ToolNodeData, event.node_data)
|
||||
response.data.extras["icon"] = ToolManager.get_tool_icon(
|
||||
tenant_id=self._application_generate_entity.app_config.tenant_id,
|
||||
provider_type=node_data.provider_type,
|
||||
provider_id=node_data.provider_id,
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
def workflow_node_finish_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
event: QueueNodeSucceededEvent
|
||||
| QueueNodeFailedEvent
|
||||
| QueueNodeInIterationFailedEvent
|
||||
| QueueNodeInLoopFailedEvent
|
||||
| QueueNodeExceptionEvent,
|
||||
task_id: str,
|
||||
workflow_node_execution: NodeExecution,
|
||||
) -> Optional[NodeFinishStreamResponse]:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||
return None
|
||||
if not workflow_node_execution.workflow_run_id:
|
||||
return None
|
||||
if not workflow_node_execution.finished_at:
|
||||
return None
|
||||
|
||||
return NodeFinishStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_node_execution.workflow_run_id,
|
||||
data=NodeFinishStreamResponse.Data(
|
||||
id=workflow_node_execution.id,
|
||||
node_id=workflow_node_execution.node_id,
|
||||
node_type=workflow_node_execution.node_type,
|
||||
index=workflow_node_execution.index,
|
||||
title=workflow_node_execution.title,
|
||||
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
||||
inputs=workflow_node_execution.inputs,
|
||||
process_data=workflow_node_execution.process_data,
|
||||
outputs=workflow_node_execution.outputs,
|
||||
status=workflow_node_execution.status,
|
||||
error=workflow_node_execution.error,
|
||||
elapsed_time=workflow_node_execution.elapsed_time,
|
||||
execution_metadata=workflow_node_execution.metadata,
|
||||
created_at=int(workflow_node_execution.created_at.timestamp()),
|
||||
finished_at=int(workflow_node_execution.finished_at.timestamp()),
|
||||
files=self.fetch_files_from_node_outputs(workflow_node_execution.outputs or {}),
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
parent_parallel_id=event.parent_parallel_id,
|
||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||
iteration_id=event.in_iteration_id,
|
||||
loop_id=event.in_loop_id,
|
||||
),
|
||||
)
|
||||
|
||||
def workflow_node_retry_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
event: QueueNodeRetryEvent,
|
||||
task_id: str,
|
||||
workflow_node_execution: NodeExecution,
|
||||
) -> Optional[Union[NodeRetryStreamResponse, NodeFinishStreamResponse]]:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||
return None
|
||||
if not workflow_node_execution.workflow_run_id:
|
||||
return None
|
||||
if not workflow_node_execution.finished_at:
|
||||
return None
|
||||
|
||||
return NodeRetryStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_node_execution.workflow_run_id,
|
||||
data=NodeRetryStreamResponse.Data(
|
||||
id=workflow_node_execution.id,
|
||||
node_id=workflow_node_execution.node_id,
|
||||
node_type=workflow_node_execution.node_type,
|
||||
index=workflow_node_execution.index,
|
||||
title=workflow_node_execution.title,
|
||||
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
||||
inputs=workflow_node_execution.inputs,
|
||||
process_data=workflow_node_execution.process_data,
|
||||
outputs=workflow_node_execution.outputs,
|
||||
status=workflow_node_execution.status,
|
||||
error=workflow_node_execution.error,
|
||||
elapsed_time=workflow_node_execution.elapsed_time,
|
||||
execution_metadata=workflow_node_execution.metadata,
|
||||
created_at=int(workflow_node_execution.created_at.timestamp()),
|
||||
finished_at=int(workflow_node_execution.finished_at.timestamp()),
|
||||
files=self.fetch_files_from_node_outputs(workflow_node_execution.outputs or {}),
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
parent_parallel_id=event.parent_parallel_id,
|
||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||
iteration_id=event.in_iteration_id,
|
||||
loop_id=event.in_loop_id,
|
||||
retry_index=event.retry_index,
|
||||
),
|
||||
)
|
||||
|
||||
def workflow_parallel_branch_start_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
task_id: str,
|
||||
workflow_execution_id: str,
|
||||
event: QueueParallelBranchRunStartedEvent,
|
||||
) -> ParallelBranchStartStreamResponse:
|
||||
return ParallelBranchStartStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_execution_id,
|
||||
data=ParallelBranchStartStreamResponse.Data(
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_branch_id=event.parallel_start_node_id,
|
||||
parent_parallel_id=event.parent_parallel_id,
|
||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||
iteration_id=event.in_iteration_id,
|
||||
loop_id=event.in_loop_id,
|
||||
created_at=int(time.time()),
|
||||
),
|
||||
)
|
||||
|
||||
def workflow_parallel_branch_finished_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
task_id: str,
|
||||
workflow_execution_id: str,
|
||||
event: QueueParallelBranchRunSucceededEvent | QueueParallelBranchRunFailedEvent,
|
||||
) -> ParallelBranchFinishedStreamResponse:
|
||||
return ParallelBranchFinishedStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_execution_id,
|
||||
data=ParallelBranchFinishedStreamResponse.Data(
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_branch_id=event.parallel_start_node_id,
|
||||
parent_parallel_id=event.parent_parallel_id,
|
||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||
iteration_id=event.in_iteration_id,
|
||||
loop_id=event.in_loop_id,
|
||||
status="succeeded" if isinstance(event, QueueParallelBranchRunSucceededEvent) else "failed",
|
||||
error=event.error if isinstance(event, QueueParallelBranchRunFailedEvent) else None,
|
||||
created_at=int(time.time()),
|
||||
),
|
||||
)
|
||||
|
||||
def workflow_iteration_start_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
task_id: str,
|
||||
workflow_execution_id: str,
|
||||
event: QueueIterationStartEvent,
|
||||
) -> IterationNodeStartStreamResponse:
|
||||
return IterationNodeStartStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_execution_id,
|
||||
data=IterationNodeStartStreamResponse.Data(
|
||||
id=event.node_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type.value,
|
||||
title=event.node_data.title,
|
||||
created_at=int(time.time()),
|
||||
extras={},
|
||||
inputs=event.inputs or {},
|
||||
metadata=event.metadata or {},
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
),
|
||||
)
|
||||
|
||||
def workflow_iteration_next_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
task_id: str,
|
||||
workflow_execution_id: str,
|
||||
event: QueueIterationNextEvent,
|
||||
) -> IterationNodeNextStreamResponse:
|
||||
return IterationNodeNextStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_execution_id,
|
||||
data=IterationNodeNextStreamResponse.Data(
|
||||
id=event.node_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type.value,
|
||||
title=event.node_data.title,
|
||||
index=event.index,
|
||||
pre_iteration_output=event.output,
|
||||
created_at=int(time.time()),
|
||||
extras={},
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
parallel_mode_run_id=event.parallel_mode_run_id,
|
||||
duration=event.duration,
|
||||
),
|
||||
)
|
||||
|
||||
def workflow_iteration_completed_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
task_id: str,
|
||||
workflow_execution_id: str,
|
||||
event: QueueIterationCompletedEvent,
|
||||
) -> IterationNodeCompletedStreamResponse:
|
||||
return IterationNodeCompletedStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_execution_id,
|
||||
data=IterationNodeCompletedStreamResponse.Data(
|
||||
id=event.node_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type.value,
|
||||
title=event.node_data.title,
|
||||
outputs=event.outputs,
|
||||
created_at=int(time.time()),
|
||||
extras={},
|
||||
inputs=event.inputs or {},
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED
|
||||
if event.error is None
|
||||
else WorkflowNodeExecutionStatus.FAILED,
|
||||
error=None,
|
||||
elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(),
|
||||
total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0,
|
||||
execution_metadata=event.metadata,
|
||||
finished_at=int(time.time()),
|
||||
steps=event.steps,
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
),
|
||||
)
|
||||
|
||||
def workflow_loop_start_to_stream_response(
|
||||
self, *, task_id: str, workflow_execution_id: str, event: QueueLoopStartEvent
|
||||
) -> LoopNodeStartStreamResponse:
|
||||
return LoopNodeStartStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_execution_id,
|
||||
data=LoopNodeStartStreamResponse.Data(
|
||||
id=event.node_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type.value,
|
||||
title=event.node_data.title,
|
||||
created_at=int(time.time()),
|
||||
extras={},
|
||||
inputs=event.inputs or {},
|
||||
metadata=event.metadata or {},
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
),
|
||||
)
|
||||
|
||||
def workflow_loop_next_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
task_id: str,
|
||||
workflow_execution_id: str,
|
||||
event: QueueLoopNextEvent,
|
||||
) -> LoopNodeNextStreamResponse:
|
||||
return LoopNodeNextStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_execution_id,
|
||||
data=LoopNodeNextStreamResponse.Data(
|
||||
id=event.node_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type.value,
|
||||
title=event.node_data.title,
|
||||
index=event.index,
|
||||
pre_loop_output=event.output,
|
||||
created_at=int(time.time()),
|
||||
extras={},
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
parallel_mode_run_id=event.parallel_mode_run_id,
|
||||
duration=event.duration,
|
||||
),
|
||||
)
|
||||
|
||||
def workflow_loop_completed_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
task_id: str,
|
||||
workflow_execution_id: str,
|
||||
event: QueueLoopCompletedEvent,
|
||||
) -> LoopNodeCompletedStreamResponse:
|
||||
return LoopNodeCompletedStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_execution_id,
|
||||
data=LoopNodeCompletedStreamResponse.Data(
|
||||
id=event.node_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type.value,
|
||||
title=event.node_data.title,
|
||||
outputs=event.outputs,
|
||||
created_at=int(time.time()),
|
||||
extras={},
|
||||
inputs=event.inputs or {},
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED
|
||||
if event.error is None
|
||||
else WorkflowNodeExecutionStatus.FAILED,
|
||||
error=None,
|
||||
elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(),
|
||||
total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0,
|
||||
execution_metadata=event.metadata,
|
||||
finished_at=int(time.time()),
|
||||
steps=event.steps,
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
),
|
||||
)
|
||||
|
||||
def fetch_files_from_node_outputs(self, outputs_dict: Mapping[str, Any] | None) -> Sequence[Mapping[str, Any]]:
|
||||
"""
|
||||
Fetch files from node outputs
|
||||
:param outputs_dict: node outputs dict
|
||||
:return:
|
||||
"""
|
||||
if not outputs_dict:
|
||||
return []
|
||||
|
||||
files = [self._fetch_files_from_variable_value(output_value) for output_value in outputs_dict.values()]
|
||||
# Remove None
|
||||
files = [file for file in files if file]
|
||||
# Flatten list
|
||||
# Flatten the list of sequences into a single list of mappings
|
||||
flattened_files = [file for sublist in files if sublist for file in sublist]
|
||||
|
||||
# Convert to tuple to match Sequence type
|
||||
return tuple(flattened_files)
|
||||
|
||||
def _fetch_files_from_variable_value(self, value: Union[dict, list]) -> Sequence[Mapping[str, Any]]:
|
||||
"""
|
||||
Fetch files from variable value
|
||||
:param value: variable value
|
||||
:return:
|
||||
"""
|
||||
if not value:
|
||||
return []
|
||||
|
||||
files = []
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
file = self._get_file_var_from_value(item)
|
||||
if file:
|
||||
files.append(file)
|
||||
elif isinstance(value, dict):
|
||||
file = self._get_file_var_from_value(value)
|
||||
if file:
|
||||
files.append(file)
|
||||
|
||||
return files
|
||||
|
||||
def _get_file_var_from_value(self, value: Union[dict, list]) -> Mapping[str, Any] | None:
|
||||
"""
|
||||
Get file var from value
|
||||
:param value: variable value
|
||||
:return:
|
||||
"""
|
||||
if not value:
|
||||
return None
|
||||
|
||||
if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY:
|
||||
return value
|
||||
elif isinstance(value, File):
|
||||
return value.to_dict()
|
||||
|
||||
return None
|
||||
|
||||
def handle_agent_log(self, task_id: str, event: QueueAgentLogEvent) -> AgentLogStreamResponse:
|
||||
"""
|
||||
Handle agent log
|
||||
:param task_id: task id
|
||||
:param event: agent log event
|
||||
:return:
|
||||
"""
|
||||
return AgentLogStreamResponse(
|
||||
task_id=task_id,
|
||||
data=AgentLogStreamResponse.Data(
|
||||
node_execution_id=event.node_execution_id,
|
||||
id=event.id,
|
||||
parent_id=event.parent_id,
|
||||
label=event.label,
|
||||
error=event.error,
|
||||
status=event.status,
|
||||
data=event.data,
|
||||
metadata=event.metadata,
|
||||
node_id=event.node_id,
|
||||
),
|
||||
)
|
||||
|
|
@ -18,16 +18,19 @@ from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
|
|||
from core.app.apps.workflow.app_queue_manager import WorkflowAppQueueManager
|
||||
from core.app.apps.workflow.app_runner import WorkflowAppRunner
|
||||
from core.app.apps.workflow.generate_response_converter import WorkflowAppGenerateResponseConverter
|
||||
from core.app.apps.workflow.generate_task_pipeline import WorkflowAppGenerateTaskPipeline
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity
|
||||
from core.app.entities.task_entities import WorkflowAppBlockingResponse, WorkflowAppStreamResponse
|
||||
from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
||||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository
|
||||
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_app_generate_task_pipeline import WorkflowAppGenerateTaskPipeline
|
||||
from extensions.ext_database import db
|
||||
from factories import file_factory
|
||||
from models import Account, App, EndUser, Workflow, WorkflowNodeExecutionTriggeredFrom
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -136,9 +139,22 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
# Create workflow node execution repository
|
||||
# Create repositories
|
||||
#
|
||||
# Create session factory
|
||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
|
||||
# Create workflow execution(aka workflow run) repository
|
||||
if invoke_from == InvokeFrom.DEBUGGER:
|
||||
workflow_triggered_from = WorkflowRunTriggeredFrom.DEBUGGING
|
||||
else:
|
||||
workflow_triggered_from = WorkflowRunTriggeredFrom.APP_RUN
|
||||
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
user=user,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
triggered_from=workflow_triggered_from,
|
||||
)
|
||||
# Create workflow node execution repository
|
||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
user=user,
|
||||
|
|
@ -152,6 +168,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
user=user,
|
||||
application_generate_entity=application_generate_entity,
|
||||
invoke_from=invoke_from,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
streaming=streaming,
|
||||
workflow_thread_pool_id=workflow_thread_pool_id,
|
||||
|
|
@ -165,6 +182,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
user: Union[Account, EndUser],
|
||||
application_generate_entity: WorkflowAppGenerateEntity,
|
||||
invoke_from: InvokeFrom,
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
streaming: bool = True,
|
||||
workflow_thread_pool_id: Optional[str] = None,
|
||||
|
|
@ -209,6 +227,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
workflow=workflow,
|
||||
queue_manager=queue_manager,
|
||||
user=user,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
stream=streaming,
|
||||
)
|
||||
|
|
@ -262,6 +281,17 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
# Create repositories
|
||||
#
|
||||
# Create session factory
|
||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
# Create workflow execution(aka workflow run) repository
|
||||
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
user=user,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
)
|
||||
# Create workflow node execution repository
|
||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
|
||||
|
|
@ -278,6 +308,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
user=user,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
application_generate_entity=application_generate_entity,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
streaming=streaming,
|
||||
)
|
||||
|
|
@ -327,6 +358,17 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
# Create repositories
|
||||
#
|
||||
# Create session factory
|
||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
# Create workflow execution(aka workflow run) repository
|
||||
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
|
||||
session_factory=session_factory,
|
||||
user=user,
|
||||
app_id=application_generate_entity.app_config.app_id,
|
||||
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||
)
|
||||
# Create workflow node execution repository
|
||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||
|
||||
|
|
@ -343,6 +385,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
user=user,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
application_generate_entity=application_generate_entity,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
streaming=streaming,
|
||||
)
|
||||
|
|
@ -400,6 +443,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
workflow: Workflow,
|
||||
queue_manager: AppQueueManager,
|
||||
user: Union[Account, EndUser],
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
stream: bool = False,
|
||||
) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
|
||||
|
|
@ -419,8 +463,9 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||
workflow=workflow,
|
||||
queue_manager=queue_manager,
|
||||
user=user,
|
||||
stream=stream,
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
stream=stream,
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -3,10 +3,12 @@ import time
|
|||
from collections.abc import Generator
|
||||
from typing import Optional, Union
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||
from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter
|
||||
from core.app.entities.app_invoke_entities import (
|
||||
InvokeFrom,
|
||||
WorkflowAppGenerateEntity,
|
||||
|
|
@ -53,7 +55,9 @@ from core.app.entities.task_entities import (
|
|||
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
||||
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
||||
from core.ops.ops_trace_manager import TraceQueueManager
|
||||
from core.workflow.entities.workflow_execution_entities import WorkflowExecution
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
||||
from extensions.ext_database import db
|
||||
|
|
@ -83,6 +87,7 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
queue_manager: AppQueueManager,
|
||||
user: Union[Account, EndUser],
|
||||
stream: bool,
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
) -> None:
|
||||
self._base_task_pipeline = BasedGenerateTaskPipeline(
|
||||
|
|
@ -111,9 +116,14 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
SystemVariableKey.WORKFLOW_ID: workflow.id,
|
||||
SystemVariableKey.WORKFLOW_RUN_ID: application_generate_entity.workflow_run_id,
|
||||
},
|
||||
workflow_execution_repository=workflow_execution_repository,
|
||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||
)
|
||||
|
||||
self._workflow_response_converter = WorkflowResponseConverter(
|
||||
application_generate_entity=application_generate_entity,
|
||||
)
|
||||
|
||||
self._application_generate_entity = application_generate_entity
|
||||
self._workflow_id = workflow.id
|
||||
self._workflow_features_dict = workflow.features_dict
|
||||
|
|
@ -258,17 +268,15 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# init workflow run
|
||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_start(
|
||||
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_start(
|
||||
session=session,
|
||||
workflow_id=self._workflow_id,
|
||||
user_id=self._user_id,
|
||||
created_by_role=self._created_by_role,
|
||||
)
|
||||
self._workflow_run_id = workflow_run.id
|
||||
start_resp = self._workflow_cycle_manager._workflow_start_to_stream_response(
|
||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
||||
self._workflow_run_id = workflow_execution.id
|
||||
start_resp = self._workflow_response_converter.workflow_start_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution=workflow_execution,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
yield start_resp
|
||||
elif isinstance(
|
||||
|
|
@ -278,13 +286,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried(
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_retried(
|
||||
workflow_run=workflow_run, event=event
|
||||
)
|
||||
response = self._workflow_cycle_manager._workflow_node_retry_to_stream_response(
|
||||
response = self._workflow_response_converter.workflow_node_retry_to_stream_response(
|
||||
event=event,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
|
|
@ -297,27 +303,22 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
workflow_node_execution = self._workflow_cycle_manager._handle_node_execution_start(
|
||||
workflow_run=workflow_run, event=event
|
||||
)
|
||||
node_start_response = self._workflow_cycle_manager._workflow_node_start_to_stream_response(
|
||||
event=event,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
session.commit()
|
||||
workflow_node_execution = self._workflow_cycle_manager.handle_node_execution_start(
|
||||
workflow_execution_id=self._workflow_run_id, event=event
|
||||
)
|
||||
node_start_response = self._workflow_response_converter.workflow_node_start_to_stream_response(
|
||||
event=event,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
)
|
||||
|
||||
if node_start_response:
|
||||
yield node_start_response
|
||||
elif isinstance(event, QueueNodeSucceededEvent):
|
||||
workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_success(
|
||||
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_success(
|
||||
event=event
|
||||
)
|
||||
node_success_response = self._workflow_cycle_manager._workflow_node_finish_to_stream_response(
|
||||
node_success_response = self._workflow_response_converter.workflow_node_finish_to_stream_response(
|
||||
event=event,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
|
|
@ -332,10 +333,10 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
| QueueNodeInLoopFailedEvent
|
||||
| QueueNodeExceptionEvent,
|
||||
):
|
||||
workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_failed(
|
||||
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_failed(
|
||||
event=event,
|
||||
)
|
||||
node_failed_response = self._workflow_cycle_manager._workflow_node_finish_to_stream_response(
|
||||
node_failed_response = self._workflow_response_converter.workflow_node_finish_to_stream_response(
|
||||
event=event,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_node_execution=workflow_node_execution,
|
||||
|
|
@ -348,18 +349,13 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
parallel_start_resp = (
|
||||
self._workflow_cycle_manager._workflow_parallel_branch_start_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
parallel_start_resp = (
|
||||
self._workflow_response_converter.workflow_parallel_branch_start_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
)
|
||||
|
||||
yield parallel_start_resp
|
||||
|
||||
|
|
@ -367,18 +363,13 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
parallel_finish_resp = (
|
||||
self._workflow_cycle_manager._workflow_parallel_branch_finished_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
parallel_finish_resp = (
|
||||
self._workflow_response_converter.workflow_parallel_branch_finished_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
)
|
||||
|
||||
yield parallel_finish_resp
|
||||
|
||||
|
|
@ -386,16 +377,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
iter_start_resp = self._workflow_cycle_manager._workflow_iteration_start_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
iter_start_resp = self._workflow_response_converter.workflow_iteration_start_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
yield iter_start_resp
|
||||
|
||||
|
|
@ -403,16 +389,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
iter_next_resp = self._workflow_cycle_manager._workflow_iteration_next_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
iter_next_resp = self._workflow_response_converter.workflow_iteration_next_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
yield iter_next_resp
|
||||
|
||||
|
|
@ -420,16 +401,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
iter_finish_resp = self._workflow_cycle_manager._workflow_iteration_completed_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
iter_finish_resp = self._workflow_response_converter.workflow_iteration_completed_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
yield iter_finish_resp
|
||||
|
||||
|
|
@ -437,16 +413,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
loop_start_resp = self._workflow_cycle_manager._workflow_loop_start_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
loop_start_resp = self._workflow_response_converter.workflow_loop_start_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
yield loop_start_resp
|
||||
|
||||
|
|
@ -454,16 +425,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
loop_next_resp = self._workflow_cycle_manager._workflow_loop_next_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
loop_next_resp = self._workflow_response_converter.workflow_loop_next_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
yield loop_next_resp
|
||||
|
||||
|
|
@ -471,16 +437,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
||||
session=session, workflow_run_id=self._workflow_run_id
|
||||
)
|
||||
loop_finish_resp = self._workflow_cycle_manager._workflow_loop_completed_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
event=event,
|
||||
)
|
||||
loop_finish_resp = self._workflow_response_converter.workflow_loop_completed_to_stream_response(
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution_id=self._workflow_run_id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
yield loop_finish_resp
|
||||
|
||||
|
|
@ -491,10 +452,8 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
raise ValueError("graph runtime state not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_success(
|
||||
session=session,
|
||||
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_success(
|
||||
workflow_run_id=self._workflow_run_id,
|
||||
start_at=graph_runtime_state.start_at,
|
||||
total_tokens=graph_runtime_state.total_tokens,
|
||||
total_steps=graph_runtime_state.node_run_steps,
|
||||
outputs=event.outputs,
|
||||
|
|
@ -503,12 +462,12 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
)
|
||||
|
||||
# save workflow app log
|
||||
self._save_workflow_app_log(session=session, workflow_run=workflow_run)
|
||||
self._save_workflow_app_log(session=session, workflow_execution=workflow_execution)
|
||||
|
||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
||||
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_run=workflow_run,
|
||||
workflow_execution=workflow_execution,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
|
|
@ -520,10 +479,8 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
raise ValueError("graph runtime state not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_partial_success(
|
||||
session=session,
|
||||
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_partial_success(
|
||||
workflow_run_id=self._workflow_run_id,
|
||||
start_at=graph_runtime_state.start_at,
|
||||
total_tokens=graph_runtime_state.total_tokens,
|
||||
total_steps=graph_runtime_state.node_run_steps,
|
||||
outputs=event.outputs,
|
||||
|
|
@ -533,10 +490,12 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
)
|
||||
|
||||
# save workflow app log
|
||||
self._save_workflow_app_log(session=session, workflow_run=workflow_run)
|
||||
self._save_workflow_app_log(session=session, workflow_execution=workflow_execution)
|
||||
|
||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
||||
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution=workflow_execution,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
|
|
@ -548,26 +507,28 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
raise ValueError("graph runtime state not initialized.")
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_failed(
|
||||
session=session,
|
||||
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed(
|
||||
workflow_run_id=self._workflow_run_id,
|
||||
start_at=graph_runtime_state.start_at,
|
||||
total_tokens=graph_runtime_state.total_tokens,
|
||||
total_steps=graph_runtime_state.node_run_steps,
|
||||
status=WorkflowRunStatus.FAILED
|
||||
if isinstance(event, QueueWorkflowFailedEvent)
|
||||
else WorkflowRunStatus.STOPPED,
|
||||
error=event.error if isinstance(event, QueueWorkflowFailedEvent) else event.get_stop_reason(),
|
||||
error_message=event.error
|
||||
if isinstance(event, QueueWorkflowFailedEvent)
|
||||
else event.get_stop_reason(),
|
||||
conversation_id=None,
|
||||
trace_manager=trace_manager,
|
||||
exceptions_count=event.exceptions_count if isinstance(event, QueueWorkflowFailedEvent) else 0,
|
||||
)
|
||||
|
||||
# save workflow app log
|
||||
self._save_workflow_app_log(session=session, workflow_run=workflow_run)
|
||||
self._save_workflow_app_log(session=session, workflow_execution=workflow_execution)
|
||||
|
||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
||||
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||
session=session,
|
||||
task_id=self._application_generate_entity.task_id,
|
||||
workflow_execution=workflow_execution,
|
||||
)
|
||||
session.commit()
|
||||
|
||||
|
|
@ -586,7 +547,7 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
delta_text, from_variable_selector=event.from_variable_selector
|
||||
)
|
||||
elif isinstance(event, QueueAgentLogEvent):
|
||||
yield self._workflow_cycle_manager._handle_agent_log(
|
||||
yield self._workflow_response_converter.handle_agent_log(
|
||||
task_id=self._application_generate_entity.task_id, event=event
|
||||
)
|
||||
else:
|
||||
|
|
@ -595,11 +556,9 @@ class WorkflowAppGenerateTaskPipeline:
|
|||
if tts_publisher:
|
||||
tts_publisher.publish(None)
|
||||
|
||||
def _save_workflow_app_log(self, *, session: Session, workflow_run: WorkflowRun) -> None:
|
||||
"""
|
||||
Save workflow app log.
|
||||
:return:
|
||||
"""
|
||||
def _save_workflow_app_log(self, *, session: Session, workflow_execution: WorkflowExecution) -> None:
|
||||
workflow_run = session.scalar(select(WorkflowRun).where(WorkflowRun.id == workflow_execution.id))
|
||||
assert workflow_run is not None
|
||||
invoke_from = self._application_generate_entity.invoke_from
|
||||
if invoke_from == InvokeFrom.SERVICE_API:
|
||||
created_from = WorkflowAppLogCreatedFrom.SERVICE_API
|
||||
|
|
@ -190,7 +190,7 @@ class WorkflowStartStreamResponse(StreamResponse):
|
|||
id: str
|
||||
workflow_id: str
|
||||
sequence_number: int
|
||||
inputs: dict
|
||||
inputs: Mapping[str, Any]
|
||||
created_at: int
|
||||
|
||||
event: StreamEvent = StreamEvent.WORKFLOW_STARTED
|
||||
|
|
@ -212,7 +212,7 @@ class WorkflowFinishStreamResponse(StreamResponse):
|
|||
workflow_id: str
|
||||
sequence_number: int
|
||||
status: str
|
||||
outputs: Optional[dict] = None
|
||||
outputs: Optional[Mapping[str, Any]] = None
|
||||
error: Optional[str] = None
|
||||
elapsed_time: float
|
||||
total_tokens: int
|
||||
|
|
@ -788,7 +788,7 @@ class WorkflowAppBlockingResponse(AppBlockingResponse):
|
|||
id: str
|
||||
workflow_id: str
|
||||
status: str
|
||||
outputs: Optional[dict] = None
|
||||
outputs: Optional[Mapping[str, Any]] = None
|
||||
error: Optional[str] = None
|
||||
elapsed_time: float
|
||||
total_tokens: int
|
||||
|
|
|
|||
|
|
@ -754,7 +754,7 @@ class ProviderConfiguration(BaseModel):
|
|||
:param only_active: return active model only
|
||||
:return:
|
||||
"""
|
||||
provider_models = self.get_provider_models(model_type, only_active)
|
||||
provider_models = self.get_provider_models(model_type, only_active, model)
|
||||
|
||||
for provider_model in provider_models:
|
||||
if provider_model.model == model:
|
||||
|
|
@ -763,12 +763,13 @@ class ProviderConfiguration(BaseModel):
|
|||
return None
|
||||
|
||||
def get_provider_models(
|
||||
self, model_type: Optional[ModelType] = None, only_active: bool = False
|
||||
self, model_type: Optional[ModelType] = None, only_active: bool = False, model: Optional[str] = None
|
||||
) -> list[ModelWithProviderEntity]:
|
||||
"""
|
||||
Get provider models.
|
||||
:param model_type: model type
|
||||
:param only_active: only active models
|
||||
:param model: model name
|
||||
:return:
|
||||
"""
|
||||
model_provider_factory = ModelProviderFactory(self.tenant_id)
|
||||
|
|
@ -791,7 +792,10 @@ class ProviderConfiguration(BaseModel):
|
|||
)
|
||||
else:
|
||||
provider_models = self._get_custom_provider_models(
|
||||
model_types=model_types, provider_schema=provider_schema, model_setting_map=model_setting_map
|
||||
model_types=model_types,
|
||||
provider_schema=provider_schema,
|
||||
model_setting_map=model_setting_map,
|
||||
model=model,
|
||||
)
|
||||
|
||||
if only_active:
|
||||
|
|
@ -943,6 +947,7 @@ class ProviderConfiguration(BaseModel):
|
|||
model_types: Sequence[ModelType],
|
||||
provider_schema: ProviderEntity,
|
||||
model_setting_map: dict[ModelType, dict[str, ModelSettings]],
|
||||
model: Optional[str] = None,
|
||||
) -> list[ModelWithProviderEntity]:
|
||||
"""
|
||||
Get custom provider models.
|
||||
|
|
@ -995,7 +1000,8 @@ class ProviderConfiguration(BaseModel):
|
|||
for model_configuration in self.custom_configuration.models:
|
||||
if model_configuration.model_type not in model_types:
|
||||
continue
|
||||
|
||||
if model and model != model_configuration.model:
|
||||
continue
|
||||
try:
|
||||
custom_model_schema = self.get_model_schema(
|
||||
model_type=model_configuration.model_type,
|
||||
|
|
|
|||
|
|
@ -115,6 +115,7 @@ class OpikDataTrace(BaseTraceInstance):
|
|||
"metadata": workflow_metadata,
|
||||
"input": wrap_dict("input", trace_info.workflow_run_inputs),
|
||||
"output": wrap_dict("output", trace_info.workflow_run_outputs),
|
||||
"thread_id": trace_info.conversation_id,
|
||||
"tags": ["message", "workflow"],
|
||||
"project_name": self.project,
|
||||
}
|
||||
|
|
@ -144,6 +145,7 @@ class OpikDataTrace(BaseTraceInstance):
|
|||
"metadata": workflow_metadata,
|
||||
"input": wrap_dict("input", trace_info.workflow_run_inputs),
|
||||
"output": wrap_dict("output", trace_info.workflow_run_outputs),
|
||||
"thread_id": trace_info.conversation_id,
|
||||
"tags": ["workflow"],
|
||||
"project_name": self.project,
|
||||
}
|
||||
|
|
@ -306,6 +308,7 @@ class OpikDataTrace(BaseTraceInstance):
|
|||
"metadata": wrap_metadata(metadata),
|
||||
"input": trace_info.inputs,
|
||||
"output": message_data.answer,
|
||||
"thread_id": message_data.conversation_id,
|
||||
"tags": ["message", str(trace_info.conversation_mode)],
|
||||
"project_name": self.project,
|
||||
}
|
||||
|
|
@ -420,6 +423,7 @@ class OpikDataTrace(BaseTraceInstance):
|
|||
"metadata": wrap_metadata(trace_info.metadata),
|
||||
"input": trace_info.inputs,
|
||||
"output": trace_info.outputs,
|
||||
"thread_id": trace_info.conversation_id,
|
||||
"tags": ["generate_name"],
|
||||
"project_name": self.project,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ from core.ops.entities.trace_entity import (
|
|||
WorkflowTraceInfo,
|
||||
)
|
||||
from core.ops.utils import get_message_data
|
||||
from core.workflow.entities.workflow_execution_entities import WorkflowExecution
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_storage import storage
|
||||
from models.model import App, AppModelConfig, Conversation, Message, MessageFile, TraceAppConfig
|
||||
|
|
@ -234,7 +235,11 @@ class OpsTraceManager:
|
|||
return None
|
||||
|
||||
tracing_provider = app_ops_trace_config.get("tracing_provider")
|
||||
if tracing_provider is None or tracing_provider not in provider_config_map:
|
||||
if tracing_provider is None:
|
||||
return None
|
||||
try:
|
||||
provider_config_map[tracing_provider]
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
# decrypt_token
|
||||
|
|
@ -287,10 +292,14 @@ class OpsTraceManager:
|
|||
:return:
|
||||
"""
|
||||
# auth check
|
||||
try:
|
||||
provider_config_map[tracing_provider]
|
||||
except KeyError:
|
||||
raise ValueError(f"Invalid tracing provider: {tracing_provider}")
|
||||
if enabled == True:
|
||||
try:
|
||||
provider_config_map[tracing_provider]
|
||||
except KeyError:
|
||||
raise ValueError(f"Invalid tracing provider: {tracing_provider}")
|
||||
else:
|
||||
if tracing_provider is not None:
|
||||
raise ValueError(f"Invalid tracing provider: {tracing_provider}")
|
||||
|
||||
app_config: Optional[App] = db.session.query(App).filter(App.id == app_id).first()
|
||||
if not app_config:
|
||||
|
|
@ -369,7 +378,7 @@ class TraceTask:
|
|||
self,
|
||||
trace_type: Any,
|
||||
message_id: Optional[str] = None,
|
||||
workflow_run: Optional[WorkflowRun] = None,
|
||||
workflow_execution: Optional[WorkflowExecution] = None,
|
||||
conversation_id: Optional[str] = None,
|
||||
user_id: Optional[str] = None,
|
||||
timer: Optional[Any] = None,
|
||||
|
|
@ -377,7 +386,7 @@ class TraceTask:
|
|||
):
|
||||
self.trace_type = trace_type
|
||||
self.message_id = message_id
|
||||
self.workflow_run_id = workflow_run.id if workflow_run else None
|
||||
self.workflow_run_id = workflow_execution.id if workflow_execution else None
|
||||
self.conversation_id = conversation_id
|
||||
self.user_id = user_id
|
||||
self.timer = timer
|
||||
|
|
|
|||
|
|
@ -405,7 +405,29 @@ class RetrievalService:
|
|||
record["child_chunks"] = segment_child_map[record["segment"].id].get("child_chunks") # type: ignore
|
||||
record["score"] = segment_child_map[record["segment"].id]["max_score"]
|
||||
|
||||
return [RetrievalSegments(**record) for record in records]
|
||||
result = []
|
||||
for record in records:
|
||||
# Extract segment
|
||||
segment = record["segment"]
|
||||
|
||||
# Extract child_chunks, ensuring it's a list or None
|
||||
child_chunks = record.get("child_chunks")
|
||||
if not isinstance(child_chunks, list):
|
||||
child_chunks = None
|
||||
|
||||
# Extract score, ensuring it's a float or None
|
||||
score_value = record.get("score")
|
||||
score = (
|
||||
float(score_value)
|
||||
if score_value is not None and isinstance(score_value, int | float | str)
|
||||
else None
|
||||
)
|
||||
|
||||
# Create RetrievalSegments object
|
||||
retrieval_segment = RetrievalSegments(segment=segment, child_chunks=child_chunks, score=score)
|
||||
result.append(retrieval_segment)
|
||||
|
||||
return result
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
raise e
|
||||
|
|
|
|||
|
|
@ -23,7 +23,8 @@ logger = logging.getLogger(__name__)
|
|||
class OpenSearchConfig(BaseModel):
|
||||
host: str
|
||||
port: int
|
||||
secure: bool = False
|
||||
secure: bool = False # use_ssl
|
||||
verify_certs: bool = True
|
||||
auth_method: Literal["basic", "aws_managed_iam"] = "basic"
|
||||
user: Optional[str] = None
|
||||
password: Optional[str] = None
|
||||
|
|
@ -42,6 +43,8 @@ class OpenSearchConfig(BaseModel):
|
|||
raise ValueError("config OPENSEARCH_AWS_REGION is required for AWS_MANAGED_IAM auth method")
|
||||
if not values.get("aws_service"):
|
||||
raise ValueError("config OPENSEARCH_AWS_SERVICE is required for AWS_MANAGED_IAM auth method")
|
||||
if not values.get("OPENSEARCH_SECURE") and values.get("OPENSEARCH_VERIFY_CERTS"):
|
||||
raise ValueError("verify_certs=True requires secure (HTTPS) connection")
|
||||
return values
|
||||
|
||||
def create_aws_managed_iam_auth(self) -> Urllib3AWSV4SignerAuth:
|
||||
|
|
@ -57,7 +60,7 @@ class OpenSearchConfig(BaseModel):
|
|||
params = {
|
||||
"hosts": [{"host": self.host, "port": self.port}],
|
||||
"use_ssl": self.secure,
|
||||
"verify_certs": self.secure,
|
||||
"verify_certs": self.verify_certs,
|
||||
"connection_class": Urllib3HttpConnection,
|
||||
"pool_maxsize": 20,
|
||||
}
|
||||
|
|
@ -279,6 +282,7 @@ class OpenSearchVectorFactory(AbstractVectorFactory):
|
|||
host=dify_config.OPENSEARCH_HOST or "localhost",
|
||||
port=dify_config.OPENSEARCH_PORT,
|
||||
secure=dify_config.OPENSEARCH_SECURE,
|
||||
verify_certs=dify_config.OPENSEARCH_VERIFY_CERTS,
|
||||
auth_method=dify_config.OPENSEARCH_AUTH_METHOD.value,
|
||||
user=dify_config.OPENSEARCH_USER,
|
||||
password=dify_config.OPENSEARCH_PASSWORD,
|
||||
|
|
|
|||
|
|
@ -271,12 +271,15 @@ class TencentVector(BaseVector):
|
|||
|
||||
for result in res[0]:
|
||||
meta = result.get(self.field_metadata)
|
||||
if isinstance(meta, str):
|
||||
# Compatible with version 1.1.3 and below.
|
||||
meta = json.loads(meta)
|
||||
score = 1 - result.get("score", 0.0)
|
||||
score = result.get("score", 0.0)
|
||||
if score > score_threshold:
|
||||
meta["score"] = score
|
||||
doc = Document(page_content=result.get(self.field_text), metadata=meta)
|
||||
docs.append(doc)
|
||||
|
||||
return docs
|
||||
|
||||
def delete(self) -> None:
|
||||
|
|
|
|||
|
|
@ -190,7 +190,7 @@ class DatasetRetrieval:
|
|||
retrieve_config.rerank_mode or "reranking_model",
|
||||
retrieve_config.reranking_model,
|
||||
retrieve_config.weights,
|
||||
retrieve_config.reranking_enabled or True,
|
||||
True if retrieve_config.reranking_enabled is None else retrieve_config.reranking_enabled,
|
||||
message_id,
|
||||
metadata_filter_document_ids,
|
||||
metadata_condition,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,242 @@
|
|||
"""
|
||||
SQLAlchemy implementation of the WorkflowExecutionRepository.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional, Union
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.workflow.entities.workflow_execution_entities import (
|
||||
WorkflowExecution,
|
||||
WorkflowExecutionStatus,
|
||||
WorkflowType,
|
||||
)
|
||||
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from models import (
|
||||
Account,
|
||||
CreatorUserRole,
|
||||
EndUser,
|
||||
WorkflowRun,
|
||||
)
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SQLAlchemyWorkflowExecutionRepository(WorkflowExecutionRepository):
|
||||
"""
|
||||
SQLAlchemy implementation of the WorkflowExecutionRepository interface.
|
||||
|
||||
This implementation supports multi-tenancy by filtering operations based on tenant_id.
|
||||
Each method creates its own session, handles the transaction, and commits changes
|
||||
to the database. This prevents long-running connections in the workflow core.
|
||||
|
||||
This implementation also includes an in-memory cache for workflow executions to improve
|
||||
performance by reducing database queries.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session_factory: sessionmaker | Engine,
|
||||
user: Union[Account, EndUser],
|
||||
app_id: Optional[str],
|
||||
triggered_from: Optional[WorkflowRunTriggeredFrom],
|
||||
):
|
||||
"""
|
||||
Initialize the repository with a SQLAlchemy sessionmaker or engine and context information.
|
||||
|
||||
Args:
|
||||
session_factory: SQLAlchemy sessionmaker or engine for creating sessions
|
||||
user: Account or EndUser object containing tenant_id, user ID, and role information
|
||||
app_id: App ID for filtering by application (can be None)
|
||||
triggered_from: Source of the execution trigger (DEBUGGING or APP_RUN)
|
||||
"""
|
||||
# If an engine is provided, create a sessionmaker from it
|
||||
if isinstance(session_factory, Engine):
|
||||
self._session_factory = sessionmaker(bind=session_factory, expire_on_commit=False)
|
||||
elif isinstance(session_factory, sessionmaker):
|
||||
self._session_factory = session_factory
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Invalid session_factory type {type(session_factory).__name__}; expected sessionmaker or Engine"
|
||||
)
|
||||
|
||||
# Extract tenant_id from user
|
||||
tenant_id: str | None = user.tenant_id if isinstance(user, EndUser) else user.current_tenant_id
|
||||
if not tenant_id:
|
||||
raise ValueError("User must have a tenant_id or current_tenant_id")
|
||||
self._tenant_id = tenant_id
|
||||
|
||||
# Store app context
|
||||
self._app_id = app_id
|
||||
|
||||
# Extract user context
|
||||
self._triggered_from = triggered_from
|
||||
self._creator_user_id = user.id
|
||||
|
||||
# Determine user role based on user type
|
||||
self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER
|
||||
|
||||
# Initialize in-memory cache for workflow executions
|
||||
# Key: execution_id, Value: WorkflowRun (DB model)
|
||||
self._execution_cache: dict[str, WorkflowRun] = {}
|
||||
|
||||
def _to_domain_model(self, db_model: WorkflowRun) -> WorkflowExecution:
|
||||
"""
|
||||
Convert a database model to a domain model.
|
||||
|
||||
Args:
|
||||
db_model: The database model to convert
|
||||
|
||||
Returns:
|
||||
The domain model
|
||||
"""
|
||||
# Parse JSON fields
|
||||
inputs = db_model.inputs_dict
|
||||
outputs = db_model.outputs_dict
|
||||
graph = db_model.graph_dict
|
||||
|
||||
# Convert status to domain enum
|
||||
status = WorkflowExecutionStatus(db_model.status)
|
||||
|
||||
return WorkflowExecution(
|
||||
id=db_model.id,
|
||||
workflow_id=db_model.workflow_id,
|
||||
sequence_number=db_model.sequence_number,
|
||||
type=WorkflowType(db_model.type),
|
||||
workflow_version=db_model.version,
|
||||
graph=graph,
|
||||
inputs=inputs,
|
||||
outputs=outputs,
|
||||
status=status,
|
||||
error_message=db_model.error or "",
|
||||
total_tokens=db_model.total_tokens,
|
||||
total_steps=db_model.total_steps,
|
||||
exceptions_count=db_model.exceptions_count,
|
||||
started_at=db_model.created_at,
|
||||
finished_at=db_model.finished_at,
|
||||
)
|
||||
|
||||
def _to_db_model(self, domain_model: WorkflowExecution) -> WorkflowRun:
|
||||
"""
|
||||
Convert a domain model to a database model.
|
||||
|
||||
Args:
|
||||
domain_model: The domain model to convert
|
||||
|
||||
Returns:
|
||||
The database model
|
||||
"""
|
||||
# Use values from constructor if provided
|
||||
if not self._triggered_from:
|
||||
raise ValueError("triggered_from is required in repository constructor")
|
||||
if not self._creator_user_id:
|
||||
raise ValueError("created_by is required in repository constructor")
|
||||
if not self._creator_user_role:
|
||||
raise ValueError("created_by_role is required in repository constructor")
|
||||
|
||||
db_model = WorkflowRun()
|
||||
db_model.id = domain_model.id
|
||||
db_model.tenant_id = self._tenant_id
|
||||
if self._app_id is not None:
|
||||
db_model.app_id = self._app_id
|
||||
db_model.workflow_id = domain_model.workflow_id
|
||||
db_model.triggered_from = self._triggered_from
|
||||
db_model.sequence_number = domain_model.sequence_number
|
||||
db_model.type = domain_model.type
|
||||
db_model.version = domain_model.workflow_version
|
||||
db_model.graph = json.dumps(domain_model.graph) if domain_model.graph else None
|
||||
db_model.inputs = json.dumps(domain_model.inputs) if domain_model.inputs else None
|
||||
db_model.outputs = json.dumps(domain_model.outputs) if domain_model.outputs else None
|
||||
db_model.status = domain_model.status
|
||||
db_model.error = domain_model.error_message if domain_model.error_message else None
|
||||
db_model.total_tokens = domain_model.total_tokens
|
||||
db_model.total_steps = domain_model.total_steps
|
||||
db_model.exceptions_count = domain_model.exceptions_count
|
||||
db_model.created_by_role = self._creator_user_role
|
||||
db_model.created_by = self._creator_user_id
|
||||
db_model.created_at = domain_model.started_at
|
||||
db_model.finished_at = domain_model.finished_at
|
||||
|
||||
# Calculate elapsed time if finished_at is available
|
||||
if domain_model.finished_at:
|
||||
db_model.elapsed_time = (domain_model.finished_at - domain_model.started_at).total_seconds()
|
||||
else:
|
||||
db_model.elapsed_time = 0
|
||||
|
||||
return db_model
|
||||
|
||||
def save(self, execution: WorkflowExecution) -> None:
|
||||
"""
|
||||
Save or update a WorkflowExecution domain entity to the database.
|
||||
|
||||
This method serves as a domain-to-database adapter that:
|
||||
1. Converts the domain entity to its database representation
|
||||
2. Persists the database model using SQLAlchemy's merge operation
|
||||
3. Maintains proper multi-tenancy by including tenant context during conversion
|
||||
4. Updates the in-memory cache for faster subsequent lookups
|
||||
|
||||
The method handles both creating new records and updating existing ones through
|
||||
SQLAlchemy's merge operation.
|
||||
|
||||
Args:
|
||||
execution: The WorkflowExecution domain entity to persist
|
||||
"""
|
||||
# Convert domain model to database model using tenant context and other attributes
|
||||
db_model = self._to_db_model(execution)
|
||||
|
||||
# Create a new database session
|
||||
with self._session_factory() as session:
|
||||
# SQLAlchemy merge intelligently handles both insert and update operations
|
||||
# based on the presence of the primary key
|
||||
session.merge(db_model)
|
||||
session.commit()
|
||||
|
||||
# Update the in-memory cache for faster subsequent lookups
|
||||
logger.debug(f"Updating cache for execution_id: {db_model.id}")
|
||||
self._execution_cache[db_model.id] = db_model
|
||||
|
||||
def get(self, execution_id: str) -> Optional[WorkflowExecution]:
|
||||
"""
|
||||
Retrieve a WorkflowExecution by its ID.
|
||||
|
||||
First checks the in-memory cache, and if not found, queries the database.
|
||||
If found in the database, adds it to the cache for future lookups.
|
||||
|
||||
Args:
|
||||
execution_id: The workflow execution ID
|
||||
|
||||
Returns:
|
||||
The WorkflowExecution instance if found, None otherwise
|
||||
"""
|
||||
# First check the cache
|
||||
if execution_id in self._execution_cache:
|
||||
logger.debug(f"Cache hit for execution_id: {execution_id}")
|
||||
# Convert cached DB model to domain model
|
||||
cached_db_model = self._execution_cache[execution_id]
|
||||
return self._to_domain_model(cached_db_model)
|
||||
|
||||
# If not in cache, query the database
|
||||
logger.debug(f"Cache miss for execution_id: {execution_id}, querying database")
|
||||
with self._session_factory() as session:
|
||||
stmt = select(WorkflowRun).where(
|
||||
WorkflowRun.id == execution_id,
|
||||
WorkflowRun.tenant_id == self._tenant_id,
|
||||
)
|
||||
|
||||
if self._app_id:
|
||||
stmt = stmt.where(WorkflowRun.app_id == self._app_id)
|
||||
|
||||
db_model = session.scalar(stmt)
|
||||
if db_model:
|
||||
# Add DB model to cache
|
||||
self._execution_cache[execution_id] = db_model
|
||||
|
||||
# Convert to domain model and return
|
||||
return self._to_domain_model(db_model)
|
||||
|
||||
return None
|
||||
|
|
@ -4,13 +4,14 @@ SQLAlchemy implementation of the WorkflowNodeExecutionRepository.
|
|||
|
||||
import json
|
||||
import logging
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import Any, Optional, Union, cast
|
||||
from collections.abc import Sequence
|
||||
from typing import Optional, Union
|
||||
|
||||
from sqlalchemy import UnaryExpression, asc, delete, desc, select
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||
from core.workflow.entities.node_execution_entities import (
|
||||
NodeExecution,
|
||||
|
|
@ -85,8 +86,8 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER
|
||||
|
||||
# Initialize in-memory cache for node executions
|
||||
# Key: node_execution_id, Value: NodeExecution
|
||||
self._node_execution_cache: dict[str, NodeExecution] = {}
|
||||
# Key: node_execution_id, Value: WorkflowNodeExecution (DB model)
|
||||
self._node_execution_cache: dict[str, WorkflowNodeExecution] = {}
|
||||
|
||||
def _to_domain_model(self, db_model: WorkflowNodeExecution) -> NodeExecution:
|
||||
"""
|
||||
|
|
@ -102,7 +103,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
inputs = db_model.inputs_dict
|
||||
process_data = db_model.process_data_dict
|
||||
outputs = db_model.outputs_dict
|
||||
metadata = db_model.execution_metadata_dict
|
||||
metadata = {NodeRunMetadataKey(k): v for k, v in db_model.execution_metadata_dict.items()}
|
||||
|
||||
# Convert status to domain enum
|
||||
status = NodeExecutionStatus(db_model.status)
|
||||
|
|
@ -123,12 +124,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
status=status,
|
||||
error=db_model.error,
|
||||
elapsed_time=db_model.elapsed_time,
|
||||
# FIXME(QuantumGhost): a temporary workaround for the following type check failure in Python 3.11.
|
||||
# However, this problem is not occurred in Python 3.12.
|
||||
#
|
||||
# A case of this error is:
|
||||
# https://github.com/langgenius/dify/actions/runs/15112698604/job/42475659482?pr=19737#step:9:24
|
||||
metadata=cast(Mapping[NodeRunMetadataKey, Any] | None, metadata),
|
||||
metadata=metadata,
|
||||
created_at=db_model.created_at,
|
||||
finished_at=db_model.finished_at,
|
||||
)
|
||||
|
|
@ -171,7 +167,9 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
db_model.status = domain_model.status
|
||||
db_model.error = domain_model.error
|
||||
db_model.elapsed_time = domain_model.elapsed_time
|
||||
db_model.execution_metadata = json.dumps(domain_model.metadata) if domain_model.metadata else None
|
||||
db_model.execution_metadata = (
|
||||
json.dumps(jsonable_encoder(domain_model.metadata)) if domain_model.metadata else None
|
||||
)
|
||||
db_model.created_at = domain_model.created_at
|
||||
db_model.created_by_role = self._creator_user_role
|
||||
db_model.created_by = self._creator_user_id
|
||||
|
|
@ -208,7 +206,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
# Only cache if we have a node_execution_id to use as the cache key
|
||||
if db_model.node_execution_id:
|
||||
logger.debug(f"Updating cache for node_execution_id: {db_model.node_execution_id}")
|
||||
self._node_execution_cache[db_model.node_execution_id] = execution
|
||||
self._node_execution_cache[db_model.node_execution_id] = db_model
|
||||
|
||||
def get_by_node_execution_id(self, node_execution_id: str) -> Optional[NodeExecution]:
|
||||
"""
|
||||
|
|
@ -226,7 +224,9 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
# First check the cache
|
||||
if node_execution_id in self._node_execution_cache:
|
||||
logger.debug(f"Cache hit for node_execution_id: {node_execution_id}")
|
||||
return self._node_execution_cache[node_execution_id]
|
||||
# Convert cached DB model to domain model
|
||||
cached_db_model = self._node_execution_cache[node_execution_id]
|
||||
return self._to_domain_model(cached_db_model)
|
||||
|
||||
# If not in cache, query the database
|
||||
logger.debug(f"Cache miss for node_execution_id: {node_execution_id}, querying database")
|
||||
|
|
@ -241,26 +241,25 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
|
||||
db_model = session.scalar(stmt)
|
||||
if db_model:
|
||||
# Convert to domain model
|
||||
domain_model = self._to_domain_model(db_model)
|
||||
# Add DB model to cache
|
||||
self._node_execution_cache[node_execution_id] = db_model
|
||||
|
||||
# Add to cache
|
||||
self._node_execution_cache[node_execution_id] = domain_model
|
||||
|
||||
return domain_model
|
||||
# Convert to domain model and return
|
||||
return self._to_domain_model(db_model)
|
||||
|
||||
return None
|
||||
|
||||
def get_by_workflow_run(
|
||||
def get_db_models_by_workflow_run(
|
||||
self,
|
||||
workflow_run_id: str,
|
||||
order_config: Optional[OrderConfig] = None,
|
||||
) -> Sequence[NodeExecution]:
|
||||
) -> Sequence[WorkflowNodeExecution]:
|
||||
"""
|
||||
Retrieve all NodeExecution instances for a specific workflow run.
|
||||
Retrieve all WorkflowNodeExecution database models for a specific workflow run.
|
||||
|
||||
This method always queries the database to ensure complete and ordered results,
|
||||
but updates the cache with any retrieved executions.
|
||||
This method directly returns database models without converting to domain models,
|
||||
which is useful when you need to access database-specific fields like triggered_from.
|
||||
It also updates the in-memory cache with the retrieved models.
|
||||
|
||||
Args:
|
||||
workflow_run_id: The workflow run ID
|
||||
|
|
@ -269,7 +268,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
order_config.order_direction: Direction to order ("asc" or "desc")
|
||||
|
||||
Returns:
|
||||
A list of NodeExecution instances
|
||||
A list of WorkflowNodeExecution database models
|
||||
"""
|
||||
with self._session_factory() as session:
|
||||
stmt = select(WorkflowNodeExecution).where(
|
||||
|
|
@ -298,16 +297,43 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
|
||||
db_models = session.scalars(stmt).all()
|
||||
|
||||
# Convert database models to domain models and update cache
|
||||
domain_models = []
|
||||
# Update the cache with the retrieved DB models
|
||||
for model in db_models:
|
||||
domain_model = self._to_domain_model(model)
|
||||
# Update cache if node_execution_id is present
|
||||
if domain_model.node_execution_id:
|
||||
self._node_execution_cache[domain_model.node_execution_id] = domain_model
|
||||
domain_models.append(domain_model)
|
||||
if model.node_execution_id:
|
||||
self._node_execution_cache[model.node_execution_id] = model
|
||||
|
||||
return domain_models
|
||||
return db_models
|
||||
|
||||
def get_by_workflow_run(
|
||||
self,
|
||||
workflow_run_id: str,
|
||||
order_config: Optional[OrderConfig] = None,
|
||||
) -> Sequence[NodeExecution]:
|
||||
"""
|
||||
Retrieve all NodeExecution instances for a specific workflow run.
|
||||
|
||||
This method always queries the database to ensure complete and ordered results,
|
||||
but updates the cache with any retrieved executions.
|
||||
|
||||
Args:
|
||||
workflow_run_id: The workflow run ID
|
||||
order_config: Optional configuration for ordering results
|
||||
order_config.order_by: List of fields to order by (e.g., ["index", "created_at"])
|
||||
order_config.order_direction: Direction to order ("asc" or "desc")
|
||||
|
||||
Returns:
|
||||
A list of NodeExecution instances
|
||||
"""
|
||||
# Get the database models using the new method
|
||||
db_models = self.get_db_models_by_workflow_run(workflow_run_id, order_config)
|
||||
|
||||
# Convert database models to domain models
|
||||
domain_models = []
|
||||
for model in db_models:
|
||||
domain_model = self._to_domain_model(model)
|
||||
domain_models.append(domain_model)
|
||||
|
||||
return domain_models
|
||||
|
||||
def get_running_executions(self, workflow_run_id: str) -> Sequence[NodeExecution]:
|
||||
"""
|
||||
|
|
@ -337,10 +363,12 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||
domain_models = []
|
||||
|
||||
for model in db_models:
|
||||
domain_model = self._to_domain_model(model)
|
||||
# Update cache if node_execution_id is present
|
||||
if domain_model.node_execution_id:
|
||||
self._node_execution_cache[domain_model.node_execution_id] = domain_model
|
||||
if model.node_execution_id:
|
||||
self._node_execution_cache[model.node_execution_id] = model
|
||||
|
||||
# Convert to domain model
|
||||
domain_model = self._to_domain_model(model)
|
||||
domain_models.append(domain_model)
|
||||
|
||||
return domain_models
|
||||
|
|
|
|||
|
|
@ -528,7 +528,7 @@ class ToolManager:
|
|||
yield provider
|
||||
|
||||
except Exception:
|
||||
logger.exception(f"load builtin provider {provider}")
|
||||
logger.exception(f"load builtin provider {provider_path}")
|
||||
continue
|
||||
# set builtin providers loaded
|
||||
cls._builtin_providers_loaded = True
|
||||
|
|
@ -644,10 +644,10 @@ class ToolManager:
|
|||
)
|
||||
|
||||
workflow_provider_controllers: list[WorkflowToolProviderController] = []
|
||||
for provider in workflow_providers:
|
||||
for workflow_provider in workflow_providers:
|
||||
try:
|
||||
workflow_provider_controllers.append(
|
||||
ToolTransformService.workflow_provider_to_controller(db_provider=provider)
|
||||
ToolTransformService.workflow_provider_to_controller(db_provider=workflow_provider)
|
||||
)
|
||||
except Exception:
|
||||
# app has been deleted
|
||||
|
|
|
|||
|
|
@ -125,6 +125,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
|
|||
return ""
|
||||
# get retrieval model , if the model is not setting , using default
|
||||
retrieval_model: dict[str, Any] = dataset.retrieval_model or default_retrieval_model
|
||||
retrieval_resource_list = []
|
||||
if dataset.indexing_technique == "economy":
|
||||
# use keyword table query
|
||||
documents = RetrievalService.retrieve(
|
||||
|
|
@ -181,7 +182,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
|
|||
score=record.score,
|
||||
)
|
||||
)
|
||||
retrieval_resource_list = []
|
||||
|
||||
if self.return_resource:
|
||||
for record in records:
|
||||
segment = record.segment
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
import json
|
||||
import logging
|
||||
from collections.abc import Generator
|
||||
from typing import Any, Optional, Union, cast
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
from flask_login import current_user
|
||||
|
||||
from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod
|
||||
from core.tools.__base.tool import Tool
|
||||
|
|
@ -87,7 +89,7 @@ class WorkflowTool(Tool):
|
|||
result = generator.generate(
|
||||
app_model=app,
|
||||
workflow=workflow,
|
||||
user=self._get_user(user_id),
|
||||
user=cast("Account | EndUser", current_user),
|
||||
args={"inputs": tool_parameters, "files": files},
|
||||
invoke_from=self.runtime.invoke_from,
|
||||
streaming=False,
|
||||
|
|
@ -111,20 +113,6 @@ class WorkflowTool(Tool):
|
|||
yield self.create_text_message(json.dumps(outputs, ensure_ascii=False))
|
||||
yield self.create_json_message(outputs)
|
||||
|
||||
def _get_user(self, user_id: str) -> Union[EndUser, Account]:
|
||||
"""
|
||||
get the user by user id
|
||||
"""
|
||||
|
||||
user = db.session.query(EndUser).filter(EndUser.id == user_id).first()
|
||||
if not user:
|
||||
user = db.session.query(Account).filter(Account.id == user_id).first()
|
||||
|
||||
if not user:
|
||||
raise ValueError("user not found")
|
||||
|
||||
return user
|
||||
|
||||
def fork_tool_runtime(self, runtime: ToolRuntime) -> "WorkflowTool":
|
||||
"""
|
||||
fork a new tool with metadata
|
||||
|
|
|
|||
|
|
@ -0,0 +1,91 @@
|
|||
"""
|
||||
Domain entities for workflow execution.
|
||||
|
||||
Models are independent of the storage mechanism and don't contain
|
||||
implementation details like tenant_id, app_id, etc.
|
||||
"""
|
||||
|
||||
from collections.abc import Mapping
|
||||
from datetime import UTC, datetime
|
||||
from enum import StrEnum
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class WorkflowType(StrEnum):
|
||||
"""
|
||||
Workflow Type Enum for domain layer
|
||||
"""
|
||||
|
||||
WORKFLOW = "workflow"
|
||||
CHAT = "chat"
|
||||
|
||||
|
||||
class WorkflowExecutionStatus(StrEnum):
|
||||
RUNNING = "running"
|
||||
SUCCEEDED = "succeeded"
|
||||
FAILED = "failed"
|
||||
STOPPED = "stopped"
|
||||
PARTIAL_SUCCEEDED = "partial-succeeded"
|
||||
|
||||
|
||||
class WorkflowExecution(BaseModel):
|
||||
"""
|
||||
Domain model for workflow execution based on WorkflowRun but without
|
||||
user, tenant, and app attributes.
|
||||
"""
|
||||
|
||||
id: str = Field(...)
|
||||
workflow_id: str = Field(...)
|
||||
workflow_version: str = Field(...)
|
||||
sequence_number: int = Field(...)
|
||||
|
||||
type: WorkflowType = Field(...)
|
||||
graph: Mapping[str, Any] = Field(...)
|
||||
|
||||
inputs: Mapping[str, Any] = Field(...)
|
||||
outputs: Optional[Mapping[str, Any]] = None
|
||||
|
||||
status: WorkflowExecutionStatus = WorkflowExecutionStatus.RUNNING
|
||||
error_message: str = Field(default="")
|
||||
total_tokens: int = Field(default=0)
|
||||
total_steps: int = Field(default=0)
|
||||
exceptions_count: int = Field(default=0)
|
||||
|
||||
started_at: datetime = Field(...)
|
||||
finished_at: Optional[datetime] = None
|
||||
|
||||
@property
|
||||
def elapsed_time(self) -> float:
|
||||
"""
|
||||
Calculate elapsed time in seconds.
|
||||
If workflow is not finished, use current time.
|
||||
"""
|
||||
end_time = self.finished_at or datetime.now(UTC).replace(tzinfo=None)
|
||||
return (end_time - self.started_at).total_seconds()
|
||||
|
||||
@classmethod
|
||||
def new(
|
||||
cls,
|
||||
*,
|
||||
id: str,
|
||||
workflow_id: str,
|
||||
sequence_number: int,
|
||||
type: WorkflowType,
|
||||
workflow_version: str,
|
||||
graph: Mapping[str, Any],
|
||||
inputs: Mapping[str, Any],
|
||||
started_at: datetime,
|
||||
) -> "WorkflowExecution":
|
||||
return WorkflowExecution(
|
||||
id=id,
|
||||
workflow_id=workflow_id,
|
||||
sequence_number=sequence_number,
|
||||
type=type,
|
||||
workflow_version=workflow_version,
|
||||
graph=graph,
|
||||
inputs=inputs,
|
||||
status=WorkflowExecutionStatus.RUNNING,
|
||||
started_at=started_at,
|
||||
)
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
from typing import Optional, Protocol
|
||||
|
||||
from core.workflow.entities.workflow_execution_entities import WorkflowExecution
|
||||
|
||||
|
||||
class WorkflowExecutionRepository(Protocol):
|
||||
"""
|
||||
Repository interface for WorkflowExecution.
|
||||
|
||||
This interface defines the contract for accessing and manipulating
|
||||
WorkflowExecution data, regardless of the underlying storage mechanism.
|
||||
|
||||
Note: Domain-specific concepts like multi-tenancy (tenant_id), application context (app_id),
|
||||
and other implementation details should be handled at the implementation level, not in
|
||||
the core interface. This keeps the core domain model clean and independent of specific
|
||||
application domains or deployment scenarios.
|
||||
"""
|
||||
|
||||
def save(self, execution: WorkflowExecution) -> None:
|
||||
"""
|
||||
Save or update a WorkflowExecution instance.
|
||||
|
||||
This method handles both creating new records and updating existing ones.
|
||||
The implementation should determine whether to create or update based on
|
||||
the execution's ID or other identifying fields.
|
||||
|
||||
Args:
|
||||
execution: The WorkflowExecution instance to save or update
|
||||
"""
|
||||
...
|
||||
|
||||
def get(self, execution_id: str) -> Optional[WorkflowExecution]:
|
||||
"""
|
||||
Retrieve a WorkflowExecution by its ID.
|
||||
|
||||
Args:
|
||||
execution_id: The workflow execution ID
|
||||
|
||||
Returns:
|
||||
The WorkflowExecution instance if found, None otherwise
|
||||
"""
|
||||
...
|
||||
|
|
@ -1,22 +1,13 @@
|
|||
import json
|
||||
import time
|
||||
from collections.abc import Mapping, Sequence
|
||||
from collections.abc import Mapping
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any, Optional, Union, cast
|
||||
from typing import Any, Optional, Union
|
||||
from uuid import uuid4
|
||||
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom, WorkflowAppGenerateEntity
|
||||
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity
|
||||
from core.app.entities.queue_entities import (
|
||||
QueueAgentLogEvent,
|
||||
QueueIterationCompletedEvent,
|
||||
QueueIterationNextEvent,
|
||||
QueueIterationStartEvent,
|
||||
QueueLoopCompletedEvent,
|
||||
QueueLoopNextEvent,
|
||||
QueueLoopStartEvent,
|
||||
QueueNodeExceptionEvent,
|
||||
QueueNodeFailedEvent,
|
||||
QueueNodeInIterationFailedEvent,
|
||||
|
|
@ -24,50 +15,24 @@ from core.app.entities.queue_entities import (
|
|||
QueueNodeRetryEvent,
|
||||
QueueNodeStartedEvent,
|
||||
QueueNodeSucceededEvent,
|
||||
QueueParallelBranchRunFailedEvent,
|
||||
QueueParallelBranchRunStartedEvent,
|
||||
QueueParallelBranchRunSucceededEvent,
|
||||
)
|
||||
from core.app.entities.task_entities import (
|
||||
AgentLogStreamResponse,
|
||||
IterationNodeCompletedStreamResponse,
|
||||
IterationNodeNextStreamResponse,
|
||||
IterationNodeStartStreamResponse,
|
||||
LoopNodeCompletedStreamResponse,
|
||||
LoopNodeNextStreamResponse,
|
||||
LoopNodeStartStreamResponse,
|
||||
NodeFinishStreamResponse,
|
||||
NodeRetryStreamResponse,
|
||||
NodeStartStreamResponse,
|
||||
ParallelBranchFinishedStreamResponse,
|
||||
ParallelBranchStartStreamResponse,
|
||||
WorkflowFinishStreamResponse,
|
||||
WorkflowStartStreamResponse,
|
||||
)
|
||||
from core.app.task_pipeline.exc import WorkflowRunNotFoundError
|
||||
from core.file import FILE_MODEL_IDENTITY, File
|
||||
from core.ops.entities.trace_entity import TraceTaskName
|
||||
from core.ops.ops_trace_manager import TraceQueueManager, TraceTask
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||
from core.workflow.entities.node_execution_entities import (
|
||||
NodeExecution,
|
||||
NodeExecutionStatus,
|
||||
)
|
||||
from core.workflow.entities.workflow_execution_entities import WorkflowExecution, WorkflowExecutionStatus, WorkflowType
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.nodes.tool.entities import ToolNodeData
|
||||
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_entry import WorkflowEntry
|
||||
from models import (
|
||||
Account,
|
||||
CreatorUserRole,
|
||||
EndUser,
|
||||
Workflow,
|
||||
WorkflowNodeExecutionStatus,
|
||||
WorkflowRun,
|
||||
WorkflowRunStatus,
|
||||
WorkflowRunTriggeredFrom,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -77,21 +42,20 @@ class WorkflowCycleManager:
|
|||
*,
|
||||
application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity],
|
||||
workflow_system_variables: dict[SystemVariableKey, Any],
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
) -> None:
|
||||
self._workflow_run: WorkflowRun | None = None
|
||||
self._application_generate_entity = application_generate_entity
|
||||
self._workflow_system_variables = workflow_system_variables
|
||||
self._workflow_execution_repository = workflow_execution_repository
|
||||
self._workflow_node_execution_repository = workflow_node_execution_repository
|
||||
|
||||
def _handle_workflow_run_start(
|
||||
def handle_workflow_run_start(
|
||||
self,
|
||||
*,
|
||||
session: Session,
|
||||
workflow_id: str,
|
||||
user_id: str,
|
||||
created_by_role: CreatorUserRole,
|
||||
) -> WorkflowRun:
|
||||
) -> WorkflowExecution:
|
||||
workflow_stmt = select(Workflow).where(Workflow.id == workflow_id)
|
||||
workflow = session.scalar(workflow_stmt)
|
||||
if not workflow:
|
||||
|
|
@ -110,157 +74,116 @@ class WorkflowCycleManager:
|
|||
continue
|
||||
inputs[f"sys.{key.value}"] = value
|
||||
|
||||
triggered_from = (
|
||||
WorkflowRunTriggeredFrom.DEBUGGING
|
||||
if self._application_generate_entity.invoke_from == InvokeFrom.DEBUGGER
|
||||
else WorkflowRunTriggeredFrom.APP_RUN
|
||||
)
|
||||
|
||||
# handle special values
|
||||
inputs = dict(WorkflowEntry.handle_special_values(inputs) or {})
|
||||
|
||||
# init workflow run
|
||||
# TODO: This workflow_run_id should always not be None, maybe we can use a more elegant way to handle this
|
||||
workflow_run_id = str(self._workflow_system_variables.get(SystemVariableKey.WORKFLOW_RUN_ID) or uuid4())
|
||||
execution_id = str(self._workflow_system_variables.get(SystemVariableKey.WORKFLOW_RUN_ID) or uuid4())
|
||||
execution = WorkflowExecution.new(
|
||||
id=execution_id,
|
||||
workflow_id=workflow.id,
|
||||
sequence_number=new_sequence_number,
|
||||
type=WorkflowType(workflow.type),
|
||||
workflow_version=workflow.version,
|
||||
graph=workflow.graph_dict,
|
||||
inputs=inputs,
|
||||
started_at=datetime.now(UTC).replace(tzinfo=None),
|
||||
)
|
||||
|
||||
workflow_run = WorkflowRun()
|
||||
workflow_run.id = workflow_run_id
|
||||
workflow_run.tenant_id = workflow.tenant_id
|
||||
workflow_run.app_id = workflow.app_id
|
||||
workflow_run.sequence_number = new_sequence_number
|
||||
workflow_run.workflow_id = workflow.id
|
||||
workflow_run.type = workflow.type
|
||||
workflow_run.triggered_from = triggered_from.value
|
||||
workflow_run.version = workflow.version
|
||||
workflow_run.graph = workflow.graph
|
||||
workflow_run.inputs = json.dumps(inputs)
|
||||
workflow_run.status = WorkflowRunStatus.RUNNING
|
||||
workflow_run.created_by_role = created_by_role
|
||||
workflow_run.created_by = user_id
|
||||
workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
self._workflow_execution_repository.save(execution)
|
||||
|
||||
session.add(workflow_run)
|
||||
return execution
|
||||
|
||||
return workflow_run
|
||||
|
||||
def _handle_workflow_run_success(
|
||||
def handle_workflow_run_success(
|
||||
self,
|
||||
*,
|
||||
session: Session,
|
||||
workflow_run_id: str,
|
||||
start_at: float,
|
||||
total_tokens: int,
|
||||
total_steps: int,
|
||||
outputs: Mapping[str, Any] | None = None,
|
||||
conversation_id: Optional[str] = None,
|
||||
trace_manager: Optional[TraceQueueManager] = None,
|
||||
) -> WorkflowRun:
|
||||
"""
|
||||
Workflow run success
|
||||
:param workflow_run_id: workflow run id
|
||||
:param start_at: start time
|
||||
:param total_tokens: total tokens
|
||||
:param total_steps: total steps
|
||||
:param outputs: outputs
|
||||
:param conversation_id: conversation id
|
||||
:return:
|
||||
"""
|
||||
workflow_run = self._get_workflow_run(session=session, workflow_run_id=workflow_run_id)
|
||||
) -> WorkflowExecution:
|
||||
workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
|
||||
|
||||
outputs = WorkflowEntry.handle_special_values(outputs)
|
||||
|
||||
workflow_run.status = WorkflowRunStatus.SUCCEEDED
|
||||
workflow_run.outputs = json.dumps(outputs or {})
|
||||
workflow_run.elapsed_time = time.perf_counter() - start_at
|
||||
workflow_run.total_tokens = total_tokens
|
||||
workflow_run.total_steps = total_steps
|
||||
workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow_execution.status = WorkflowExecutionStatus.SUCCEEDED
|
||||
workflow_execution.outputs = outputs or {}
|
||||
workflow_execution.total_tokens = total_tokens
|
||||
workflow_execution.total_steps = total_steps
|
||||
workflow_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
|
||||
if trace_manager:
|
||||
trace_manager.add_trace_task(
|
||||
TraceTask(
|
||||
TraceTaskName.WORKFLOW_TRACE,
|
||||
workflow_run=workflow_run,
|
||||
workflow_execution=workflow_execution,
|
||||
conversation_id=conversation_id,
|
||||
user_id=trace_manager.user_id,
|
||||
)
|
||||
)
|
||||
|
||||
return workflow_run
|
||||
return workflow_execution
|
||||
|
||||
def _handle_workflow_run_partial_success(
|
||||
def handle_workflow_run_partial_success(
|
||||
self,
|
||||
*,
|
||||
session: Session,
|
||||
workflow_run_id: str,
|
||||
start_at: float,
|
||||
total_tokens: int,
|
||||
total_steps: int,
|
||||
outputs: Mapping[str, Any] | None = None,
|
||||
exceptions_count: int = 0,
|
||||
conversation_id: Optional[str] = None,
|
||||
trace_manager: Optional[TraceQueueManager] = None,
|
||||
) -> WorkflowRun:
|
||||
workflow_run = self._get_workflow_run(session=session, workflow_run_id=workflow_run_id)
|
||||
) -> WorkflowExecution:
|
||||
execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
|
||||
outputs = WorkflowEntry.handle_special_values(dict(outputs) if outputs else None)
|
||||
|
||||
workflow_run.status = WorkflowRunStatus.PARTIAL_SUCCEEDED.value
|
||||
workflow_run.outputs = json.dumps(outputs or {})
|
||||
workflow_run.elapsed_time = time.perf_counter() - start_at
|
||||
workflow_run.total_tokens = total_tokens
|
||||
workflow_run.total_steps = total_steps
|
||||
workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow_run.exceptions_count = exceptions_count
|
||||
execution.status = WorkflowExecutionStatus.PARTIAL_SUCCEEDED
|
||||
execution.outputs = outputs or {}
|
||||
execution.total_tokens = total_tokens
|
||||
execution.total_steps = total_steps
|
||||
execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
execution.exceptions_count = exceptions_count
|
||||
|
||||
if trace_manager:
|
||||
trace_manager.add_trace_task(
|
||||
TraceTask(
|
||||
TraceTaskName.WORKFLOW_TRACE,
|
||||
workflow_run=workflow_run,
|
||||
workflow_execution=execution,
|
||||
conversation_id=conversation_id,
|
||||
user_id=trace_manager.user_id,
|
||||
)
|
||||
)
|
||||
|
||||
return workflow_run
|
||||
return execution
|
||||
|
||||
def _handle_workflow_run_failed(
|
||||
def handle_workflow_run_failed(
|
||||
self,
|
||||
*,
|
||||
session: Session,
|
||||
workflow_run_id: str,
|
||||
start_at: float,
|
||||
total_tokens: int,
|
||||
total_steps: int,
|
||||
status: WorkflowRunStatus,
|
||||
error: str,
|
||||
error_message: str,
|
||||
conversation_id: Optional[str] = None,
|
||||
trace_manager: Optional[TraceQueueManager] = None,
|
||||
exceptions_count: int = 0,
|
||||
) -> WorkflowRun:
|
||||
"""
|
||||
Workflow run failed
|
||||
:param workflow_run_id: workflow run id
|
||||
:param start_at: start time
|
||||
:param total_tokens: total tokens
|
||||
:param total_steps: total steps
|
||||
:param status: status
|
||||
:param error: error message
|
||||
:return:
|
||||
"""
|
||||
workflow_run = self._get_workflow_run(session=session, workflow_run_id=workflow_run_id)
|
||||
) -> WorkflowExecution:
|
||||
execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
|
||||
|
||||
workflow_run.status = status.value
|
||||
workflow_run.error = error
|
||||
workflow_run.elapsed_time = time.perf_counter() - start_at
|
||||
workflow_run.total_tokens = total_tokens
|
||||
workflow_run.total_steps = total_steps
|
||||
workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow_run.exceptions_count = exceptions_count
|
||||
execution.status = WorkflowExecutionStatus(status.value)
|
||||
execution.error_message = error_message
|
||||
execution.total_tokens = total_tokens
|
||||
execution.total_steps = total_steps
|
||||
execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
execution.exceptions_count = exceptions_count
|
||||
|
||||
# Use the instance repository to find running executions for a workflow run
|
||||
running_domain_executions = self._workflow_node_execution_repository.get_running_executions(
|
||||
workflow_run_id=workflow_run.id
|
||||
workflow_run_id=execution.id
|
||||
)
|
||||
|
||||
# Update the domain models
|
||||
|
|
@ -269,7 +192,7 @@ class WorkflowCycleManager:
|
|||
if domain_execution.node_execution_id:
|
||||
# Update the domain model
|
||||
domain_execution.status = NodeExecutionStatus.FAILED
|
||||
domain_execution.error = error
|
||||
domain_execution.error = error_message
|
||||
domain_execution.finished_at = now
|
||||
domain_execution.elapsed_time = (now - domain_execution.created_at).total_seconds()
|
||||
|
||||
|
|
@ -280,15 +203,22 @@ class WorkflowCycleManager:
|
|||
trace_manager.add_trace_task(
|
||||
TraceTask(
|
||||
TraceTaskName.WORKFLOW_TRACE,
|
||||
workflow_run=workflow_run,
|
||||
workflow_execution=execution,
|
||||
conversation_id=conversation_id,
|
||||
user_id=trace_manager.user_id,
|
||||
)
|
||||
)
|
||||
|
||||
return workflow_run
|
||||
return execution
|
||||
|
||||
def handle_node_execution_start(
|
||||
self,
|
||||
*,
|
||||
workflow_execution_id: str,
|
||||
event: QueueNodeStartedEvent,
|
||||
) -> NodeExecution:
|
||||
workflow_execution = self._get_workflow_execution_or_raise_error(workflow_execution_id)
|
||||
|
||||
def _handle_node_execution_start(self, *, workflow_run: WorkflowRun, event: QueueNodeStartedEvent) -> NodeExecution:
|
||||
# Create a domain model
|
||||
created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
metadata = {
|
||||
|
|
@ -299,8 +229,8 @@ class WorkflowCycleManager:
|
|||
|
||||
domain_execution = NodeExecution(
|
||||
id=str(uuid4()),
|
||||
workflow_id=workflow_run.workflow_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
workflow_id=workflow_execution.workflow_id,
|
||||
workflow_run_id=workflow_execution.id,
|
||||
predecessor_node_id=event.predecessor_node_id,
|
||||
index=event.node_run_index,
|
||||
node_execution_id=event.node_execution_id,
|
||||
|
|
@ -317,7 +247,7 @@ class WorkflowCycleManager:
|
|||
|
||||
return domain_execution
|
||||
|
||||
def _handle_workflow_node_execution_success(self, *, event: QueueNodeSucceededEvent) -> NodeExecution:
|
||||
def handle_workflow_node_execution_success(self, *, event: QueueNodeSucceededEvent) -> NodeExecution:
|
||||
# Get the domain model from repository
|
||||
domain_execution = self._workflow_node_execution_repository.get_by_node_execution_id(event.node_execution_id)
|
||||
if not domain_execution:
|
||||
|
|
@ -350,7 +280,7 @@ class WorkflowCycleManager:
|
|||
|
||||
return domain_execution
|
||||
|
||||
def _handle_workflow_node_execution_failed(
|
||||
def handle_workflow_node_execution_failed(
|
||||
self,
|
||||
*,
|
||||
event: QueueNodeFailedEvent
|
||||
|
|
@ -400,15 +330,10 @@ class WorkflowCycleManager:
|
|||
|
||||
return domain_execution
|
||||
|
||||
def _handle_workflow_node_execution_retried(
|
||||
self, *, workflow_run: WorkflowRun, event: QueueNodeRetryEvent
|
||||
def handle_workflow_node_execution_retried(
|
||||
self, *, workflow_execution_id: str, event: QueueNodeRetryEvent
|
||||
) -> NodeExecution:
|
||||
"""
|
||||
Workflow node execution failed
|
||||
:param workflow_run: workflow run
|
||||
:param event: queue node failed event
|
||||
:return:
|
||||
"""
|
||||
workflow_execution = self._get_workflow_execution_or_raise_error(workflow_execution_id)
|
||||
created_at = event.start_at
|
||||
finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
elapsed_time = (finished_at - created_at).total_seconds()
|
||||
|
|
@ -433,8 +358,8 @@ class WorkflowCycleManager:
|
|||
# Create a domain model
|
||||
domain_execution = NodeExecution(
|
||||
id=str(uuid4()),
|
||||
workflow_id=workflow_run.workflow_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
workflow_id=workflow_execution.workflow_id,
|
||||
workflow_run_id=workflow_execution.id,
|
||||
predecessor_node_id=event.predecessor_node_id,
|
||||
node_execution_id=event.node_execution_id,
|
||||
node_id=event.node_id,
|
||||
|
|
@ -456,491 +381,8 @@ class WorkflowCycleManager:
|
|||
|
||||
return domain_execution
|
||||
|
||||
def _workflow_start_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
session: Session,
|
||||
task_id: str,
|
||||
workflow_run: WorkflowRun,
|
||||
) -> WorkflowStartStreamResponse:
|
||||
_ = session
|
||||
return WorkflowStartStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
data=WorkflowStartStreamResponse.Data(
|
||||
id=workflow_run.id,
|
||||
workflow_id=workflow_run.workflow_id,
|
||||
sequence_number=workflow_run.sequence_number,
|
||||
inputs=dict(workflow_run.inputs_dict or {}),
|
||||
created_at=int(workflow_run.created_at.timestamp()),
|
||||
),
|
||||
)
|
||||
|
||||
def _workflow_finish_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
session: Session,
|
||||
task_id: str,
|
||||
workflow_run: WorkflowRun,
|
||||
) -> WorkflowFinishStreamResponse:
|
||||
created_by = None
|
||||
if workflow_run.created_by_role == CreatorUserRole.ACCOUNT:
|
||||
stmt = select(Account).where(Account.id == workflow_run.created_by)
|
||||
account = session.scalar(stmt)
|
||||
if account:
|
||||
created_by = {
|
||||
"id": account.id,
|
||||
"name": account.name,
|
||||
"email": account.email,
|
||||
}
|
||||
elif workflow_run.created_by_role == CreatorUserRole.END_USER:
|
||||
stmt = select(EndUser).where(EndUser.id == workflow_run.created_by)
|
||||
end_user = session.scalar(stmt)
|
||||
if end_user:
|
||||
created_by = {
|
||||
"id": end_user.id,
|
||||
"user": end_user.session_id,
|
||||
}
|
||||
else:
|
||||
raise NotImplementedError(f"unknown created_by_role: {workflow_run.created_by_role}")
|
||||
|
||||
return WorkflowFinishStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
data=WorkflowFinishStreamResponse.Data(
|
||||
id=workflow_run.id,
|
||||
workflow_id=workflow_run.workflow_id,
|
||||
sequence_number=workflow_run.sequence_number,
|
||||
status=workflow_run.status,
|
||||
outputs=dict(workflow_run.outputs_dict) if workflow_run.outputs_dict else None,
|
||||
error=workflow_run.error,
|
||||
elapsed_time=workflow_run.elapsed_time,
|
||||
total_tokens=workflow_run.total_tokens,
|
||||
total_steps=workflow_run.total_steps,
|
||||
created_by=created_by,
|
||||
created_at=int(workflow_run.created_at.timestamp()),
|
||||
finished_at=int(workflow_run.finished_at.timestamp()),
|
||||
files=self._fetch_files_from_node_outputs(dict(workflow_run.outputs_dict)),
|
||||
exceptions_count=workflow_run.exceptions_count,
|
||||
),
|
||||
)
|
||||
|
||||
def _workflow_node_start_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
event: QueueNodeStartedEvent,
|
||||
task_id: str,
|
||||
workflow_node_execution: NodeExecution,
|
||||
) -> Optional[NodeStartStreamResponse]:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||
return None
|
||||
if not workflow_node_execution.workflow_run_id:
|
||||
return None
|
||||
|
||||
response = NodeStartStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_node_execution.workflow_run_id,
|
||||
data=NodeStartStreamResponse.Data(
|
||||
id=workflow_node_execution.id,
|
||||
node_id=workflow_node_execution.node_id,
|
||||
node_type=workflow_node_execution.node_type,
|
||||
title=workflow_node_execution.title,
|
||||
index=workflow_node_execution.index,
|
||||
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
||||
inputs=workflow_node_execution.inputs,
|
||||
created_at=int(workflow_node_execution.created_at.timestamp()),
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
parent_parallel_id=event.parent_parallel_id,
|
||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||
iteration_id=event.in_iteration_id,
|
||||
loop_id=event.in_loop_id,
|
||||
parallel_run_id=event.parallel_mode_run_id,
|
||||
agent_strategy=event.agent_strategy,
|
||||
),
|
||||
)
|
||||
|
||||
# extras logic
|
||||
if event.node_type == NodeType.TOOL:
|
||||
node_data = cast(ToolNodeData, event.node_data)
|
||||
response.data.extras["icon"] = ToolManager.get_tool_icon(
|
||||
tenant_id=self._application_generate_entity.app_config.tenant_id,
|
||||
provider_type=node_data.provider_type,
|
||||
provider_id=node_data.provider_id,
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
def _workflow_node_finish_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
event: QueueNodeSucceededEvent
|
||||
| QueueNodeFailedEvent
|
||||
| QueueNodeInIterationFailedEvent
|
||||
| QueueNodeInLoopFailedEvent
|
||||
| QueueNodeExceptionEvent,
|
||||
task_id: str,
|
||||
workflow_node_execution: NodeExecution,
|
||||
) -> Optional[NodeFinishStreamResponse]:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||
return None
|
||||
if not workflow_node_execution.workflow_run_id:
|
||||
return None
|
||||
if not workflow_node_execution.finished_at:
|
||||
return None
|
||||
|
||||
return NodeFinishStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_node_execution.workflow_run_id,
|
||||
data=NodeFinishStreamResponse.Data(
|
||||
id=workflow_node_execution.id,
|
||||
node_id=workflow_node_execution.node_id,
|
||||
node_type=workflow_node_execution.node_type,
|
||||
index=workflow_node_execution.index,
|
||||
title=workflow_node_execution.title,
|
||||
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
||||
inputs=workflow_node_execution.inputs,
|
||||
process_data=workflow_node_execution.process_data,
|
||||
outputs=workflow_node_execution.outputs,
|
||||
status=workflow_node_execution.status,
|
||||
error=workflow_node_execution.error,
|
||||
elapsed_time=workflow_node_execution.elapsed_time,
|
||||
execution_metadata=workflow_node_execution.metadata,
|
||||
created_at=int(workflow_node_execution.created_at.timestamp()),
|
||||
finished_at=int(workflow_node_execution.finished_at.timestamp()),
|
||||
files=self._fetch_files_from_node_outputs(workflow_node_execution.outputs or {}),
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
parent_parallel_id=event.parent_parallel_id,
|
||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||
iteration_id=event.in_iteration_id,
|
||||
loop_id=event.in_loop_id,
|
||||
),
|
||||
)
|
||||
|
||||
def _workflow_node_retry_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
event: QueueNodeRetryEvent,
|
||||
task_id: str,
|
||||
workflow_node_execution: NodeExecution,
|
||||
) -> Optional[Union[NodeRetryStreamResponse, NodeFinishStreamResponse]]:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||
return None
|
||||
if not workflow_node_execution.workflow_run_id:
|
||||
return None
|
||||
if not workflow_node_execution.finished_at:
|
||||
return None
|
||||
|
||||
return NodeRetryStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_node_execution.workflow_run_id,
|
||||
data=NodeRetryStreamResponse.Data(
|
||||
id=workflow_node_execution.id,
|
||||
node_id=workflow_node_execution.node_id,
|
||||
node_type=workflow_node_execution.node_type,
|
||||
index=workflow_node_execution.index,
|
||||
title=workflow_node_execution.title,
|
||||
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
||||
inputs=workflow_node_execution.inputs,
|
||||
process_data=workflow_node_execution.process_data,
|
||||
outputs=workflow_node_execution.outputs,
|
||||
status=workflow_node_execution.status,
|
||||
error=workflow_node_execution.error,
|
||||
elapsed_time=workflow_node_execution.elapsed_time,
|
||||
execution_metadata=workflow_node_execution.metadata,
|
||||
created_at=int(workflow_node_execution.created_at.timestamp()),
|
||||
finished_at=int(workflow_node_execution.finished_at.timestamp()),
|
||||
files=self._fetch_files_from_node_outputs(workflow_node_execution.outputs or {}),
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
parent_parallel_id=event.parent_parallel_id,
|
||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||
iteration_id=event.in_iteration_id,
|
||||
loop_id=event.in_loop_id,
|
||||
retry_index=event.retry_index,
|
||||
),
|
||||
)
|
||||
|
||||
def _workflow_parallel_branch_start_to_stream_response(
|
||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueParallelBranchRunStartedEvent
|
||||
) -> ParallelBranchStartStreamResponse:
|
||||
_ = session
|
||||
return ParallelBranchStartStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
data=ParallelBranchStartStreamResponse.Data(
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_branch_id=event.parallel_start_node_id,
|
||||
parent_parallel_id=event.parent_parallel_id,
|
||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||
iteration_id=event.in_iteration_id,
|
||||
loop_id=event.in_loop_id,
|
||||
created_at=int(time.time()),
|
||||
),
|
||||
)
|
||||
|
||||
def _workflow_parallel_branch_finished_to_stream_response(
|
||||
self,
|
||||
*,
|
||||
session: Session,
|
||||
task_id: str,
|
||||
workflow_run: WorkflowRun,
|
||||
event: QueueParallelBranchRunSucceededEvent | QueueParallelBranchRunFailedEvent,
|
||||
) -> ParallelBranchFinishedStreamResponse:
|
||||
_ = session
|
||||
return ParallelBranchFinishedStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
data=ParallelBranchFinishedStreamResponse.Data(
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_branch_id=event.parallel_start_node_id,
|
||||
parent_parallel_id=event.parent_parallel_id,
|
||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||
iteration_id=event.in_iteration_id,
|
||||
loop_id=event.in_loop_id,
|
||||
status="succeeded" if isinstance(event, QueueParallelBranchRunSucceededEvent) else "failed",
|
||||
error=event.error if isinstance(event, QueueParallelBranchRunFailedEvent) else None,
|
||||
created_at=int(time.time()),
|
||||
),
|
||||
)
|
||||
|
||||
def _workflow_iteration_start_to_stream_response(
|
||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueIterationStartEvent
|
||||
) -> IterationNodeStartStreamResponse:
|
||||
_ = session
|
||||
return IterationNodeStartStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
data=IterationNodeStartStreamResponse.Data(
|
||||
id=event.node_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type.value,
|
||||
title=event.node_data.title,
|
||||
created_at=int(time.time()),
|
||||
extras={},
|
||||
inputs=event.inputs or {},
|
||||
metadata=event.metadata or {},
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
),
|
||||
)
|
||||
|
||||
def _workflow_iteration_next_to_stream_response(
|
||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueIterationNextEvent
|
||||
) -> IterationNodeNextStreamResponse:
|
||||
_ = session
|
||||
return IterationNodeNextStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
data=IterationNodeNextStreamResponse.Data(
|
||||
id=event.node_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type.value,
|
||||
title=event.node_data.title,
|
||||
index=event.index,
|
||||
pre_iteration_output=event.output,
|
||||
created_at=int(time.time()),
|
||||
extras={},
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
parallel_mode_run_id=event.parallel_mode_run_id,
|
||||
duration=event.duration,
|
||||
),
|
||||
)
|
||||
|
||||
def _workflow_iteration_completed_to_stream_response(
|
||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueIterationCompletedEvent
|
||||
) -> IterationNodeCompletedStreamResponse:
|
||||
_ = session
|
||||
return IterationNodeCompletedStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
data=IterationNodeCompletedStreamResponse.Data(
|
||||
id=event.node_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type.value,
|
||||
title=event.node_data.title,
|
||||
outputs=event.outputs,
|
||||
created_at=int(time.time()),
|
||||
extras={},
|
||||
inputs=event.inputs or {},
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED
|
||||
if event.error is None
|
||||
else WorkflowNodeExecutionStatus.FAILED,
|
||||
error=None,
|
||||
elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(),
|
||||
total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0,
|
||||
execution_metadata=event.metadata,
|
||||
finished_at=int(time.time()),
|
||||
steps=event.steps,
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
),
|
||||
)
|
||||
|
||||
def _workflow_loop_start_to_stream_response(
|
||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueLoopStartEvent
|
||||
) -> LoopNodeStartStreamResponse:
|
||||
_ = session
|
||||
return LoopNodeStartStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
data=LoopNodeStartStreamResponse.Data(
|
||||
id=event.node_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type.value,
|
||||
title=event.node_data.title,
|
||||
created_at=int(time.time()),
|
||||
extras={},
|
||||
inputs=event.inputs or {},
|
||||
metadata=event.metadata or {},
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
),
|
||||
)
|
||||
|
||||
def _workflow_loop_next_to_stream_response(
|
||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueLoopNextEvent
|
||||
) -> LoopNodeNextStreamResponse:
|
||||
_ = session
|
||||
return LoopNodeNextStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
data=LoopNodeNextStreamResponse.Data(
|
||||
id=event.node_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type.value,
|
||||
title=event.node_data.title,
|
||||
index=event.index,
|
||||
pre_loop_output=event.output,
|
||||
created_at=int(time.time()),
|
||||
extras={},
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
parallel_mode_run_id=event.parallel_mode_run_id,
|
||||
duration=event.duration,
|
||||
),
|
||||
)
|
||||
|
||||
def _workflow_loop_completed_to_stream_response(
|
||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueLoopCompletedEvent
|
||||
) -> LoopNodeCompletedStreamResponse:
|
||||
_ = session
|
||||
return LoopNodeCompletedStreamResponse(
|
||||
task_id=task_id,
|
||||
workflow_run_id=workflow_run.id,
|
||||
data=LoopNodeCompletedStreamResponse.Data(
|
||||
id=event.node_id,
|
||||
node_id=event.node_id,
|
||||
node_type=event.node_type.value,
|
||||
title=event.node_data.title,
|
||||
outputs=event.outputs,
|
||||
created_at=int(time.time()),
|
||||
extras={},
|
||||
inputs=event.inputs or {},
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED
|
||||
if event.error is None
|
||||
else WorkflowNodeExecutionStatus.FAILED,
|
||||
error=None,
|
||||
elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(),
|
||||
total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0,
|
||||
execution_metadata=event.metadata,
|
||||
finished_at=int(time.time()),
|
||||
steps=event.steps,
|
||||
parallel_id=event.parallel_id,
|
||||
parallel_start_node_id=event.parallel_start_node_id,
|
||||
),
|
||||
)
|
||||
|
||||
def _fetch_files_from_node_outputs(self, outputs_dict: Mapping[str, Any]) -> Sequence[Mapping[str, Any]]:
|
||||
"""
|
||||
Fetch files from node outputs
|
||||
:param outputs_dict: node outputs dict
|
||||
:return:
|
||||
"""
|
||||
if not outputs_dict:
|
||||
return []
|
||||
|
||||
files = [self._fetch_files_from_variable_value(output_value) for output_value in outputs_dict.values()]
|
||||
# Remove None
|
||||
files = [file for file in files if file]
|
||||
# Flatten list
|
||||
# Flatten the list of sequences into a single list of mappings
|
||||
flattened_files = [file for sublist in files if sublist for file in sublist]
|
||||
|
||||
# Convert to tuple to match Sequence type
|
||||
return tuple(flattened_files)
|
||||
|
||||
def _fetch_files_from_variable_value(self, value: Union[dict, list]) -> Sequence[Mapping[str, Any]]:
|
||||
"""
|
||||
Fetch files from variable value
|
||||
:param value: variable value
|
||||
:return:
|
||||
"""
|
||||
if not value:
|
||||
return []
|
||||
|
||||
files = []
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
file = self._get_file_var_from_value(item)
|
||||
if file:
|
||||
files.append(file)
|
||||
elif isinstance(value, dict):
|
||||
file = self._get_file_var_from_value(value)
|
||||
if file:
|
||||
files.append(file)
|
||||
|
||||
return files
|
||||
|
||||
def _get_file_var_from_value(self, value: Union[dict, list]) -> Mapping[str, Any] | None:
|
||||
"""
|
||||
Get file var from value
|
||||
:param value: variable value
|
||||
:return:
|
||||
"""
|
||||
if not value:
|
||||
return None
|
||||
|
||||
if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY:
|
||||
return value
|
||||
elif isinstance(value, File):
|
||||
return value.to_dict()
|
||||
|
||||
return None
|
||||
|
||||
def _get_workflow_run(self, *, session: Session, workflow_run_id: str) -> WorkflowRun:
|
||||
if self._workflow_run and self._workflow_run.id == workflow_run_id:
|
||||
cached_workflow_run = self._workflow_run
|
||||
cached_workflow_run = session.merge(cached_workflow_run)
|
||||
return cached_workflow_run
|
||||
stmt = select(WorkflowRun).where(WorkflowRun.id == workflow_run_id)
|
||||
workflow_run = session.scalar(stmt)
|
||||
if not workflow_run:
|
||||
raise WorkflowRunNotFoundError(workflow_run_id)
|
||||
self._workflow_run = workflow_run
|
||||
|
||||
return workflow_run
|
||||
|
||||
def _handle_agent_log(self, task_id: str, event: QueueAgentLogEvent) -> AgentLogStreamResponse:
|
||||
"""
|
||||
Handle agent log
|
||||
:param task_id: task id
|
||||
:param event: agent log event
|
||||
:return:
|
||||
"""
|
||||
return AgentLogStreamResponse(
|
||||
task_id=task_id,
|
||||
data=AgentLogStreamResponse.Data(
|
||||
node_execution_id=event.node_execution_id,
|
||||
id=event.id,
|
||||
parent_id=event.parent_id,
|
||||
label=event.label,
|
||||
error=event.error,
|
||||
status=event.status,
|
||||
data=event.data,
|
||||
metadata=event.metadata,
|
||||
node_id=event.node_id,
|
||||
),
|
||||
)
|
||||
def _get_workflow_execution_or_raise_error(self, id: str, /) -> WorkflowExecution:
|
||||
execution = self._workflow_execution_repository.get(id)
|
||||
if not execution:
|
||||
raise WorkflowRunNotFoundError(id)
|
||||
return execution
|
||||
|
|
|
|||
|
|
@ -3,11 +3,14 @@ import json
|
|||
import flask_login # type: ignore
|
||||
from flask import Response, request
|
||||
from flask_login import user_loaded_from_request, user_logged_in
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
from werkzeug.exceptions import NotFound, Unauthorized
|
||||
|
||||
import contexts
|
||||
from dify_app import DifyApp
|
||||
from extensions.ext_database import db
|
||||
from libs.passport import PassportService
|
||||
from models.account import Account
|
||||
from models.model import EndUser
|
||||
from services.account_service import AccountService
|
||||
|
||||
login_manager = flask_login.LoginManager()
|
||||
|
|
@ -17,34 +20,48 @@ login_manager = flask_login.LoginManager()
|
|||
@login_manager.request_loader
|
||||
def load_user_from_request(request_from_flask_login):
|
||||
"""Load user based on the request."""
|
||||
if request.blueprint not in {"console", "inner_api"}:
|
||||
return None
|
||||
# Check if the user_id contains a dot, indicating the old format
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
if not auth_header:
|
||||
auth_token = request.args.get("_token")
|
||||
if not auth_token:
|
||||
raise Unauthorized("Invalid Authorization token.")
|
||||
else:
|
||||
auth_token: str | None = None
|
||||
if auth_header:
|
||||
if " " not in auth_header:
|
||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||
auth_scheme, auth_token = auth_header.split(None, 1)
|
||||
auth_scheme, auth_token = auth_header.split(maxsplit=1)
|
||||
auth_scheme = auth_scheme.lower()
|
||||
if auth_scheme != "bearer":
|
||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||
else:
|
||||
auth_token = request.args.get("_token")
|
||||
|
||||
decoded = PassportService().verify(auth_token)
|
||||
user_id = decoded.get("user_id")
|
||||
if request.blueprint in {"console", "inner_api"}:
|
||||
if not auth_token:
|
||||
raise Unauthorized("Invalid Authorization token.")
|
||||
decoded = PassportService().verify(auth_token)
|
||||
user_id = decoded.get("user_id")
|
||||
if not user_id:
|
||||
raise Unauthorized("Invalid Authorization token.")
|
||||
|
||||
logged_in_account = AccountService.load_logged_in_account(account_id=user_id)
|
||||
return logged_in_account
|
||||
logged_in_account = AccountService.load_logged_in_account(account_id=user_id)
|
||||
return logged_in_account
|
||||
elif request.blueprint == "web":
|
||||
decoded = PassportService().verify(auth_token)
|
||||
end_user_id = decoded.get("end_user_id")
|
||||
if not end_user_id:
|
||||
raise Unauthorized("Invalid Authorization token.")
|
||||
end_user = db.session.query(EndUser).filter(EndUser.id == decoded["end_user_id"]).first()
|
||||
if not end_user:
|
||||
raise NotFound("End user not found.")
|
||||
return end_user
|
||||
|
||||
|
||||
@user_logged_in.connect
|
||||
@user_loaded_from_request.connect
|
||||
def on_user_logged_in(_sender, user):
|
||||
"""Called when a user logged in."""
|
||||
if user:
|
||||
"""Called when a user logged in.
|
||||
|
||||
Note: AccountService.load_logged_in_account will populate user.current_tenant_id
|
||||
through the load_user method, which calls account.set_tenant_id().
|
||||
"""
|
||||
if user and isinstance(user, Account) and user.current_tenant_id:
|
||||
contexts.tenant_id.set(user.current_tenant_id)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import enum
|
||||
import json
|
||||
from typing import cast
|
||||
from typing import Optional, cast
|
||||
|
||||
from flask_login import UserMixin # type: ignore
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy.orm import Mapped, mapped_column, reconstructor
|
||||
|
||||
from models.base import Base
|
||||
|
||||
|
|
@ -12,6 +12,66 @@ from .engine import db
|
|||
from .types import StringUUID
|
||||
|
||||
|
||||
class TenantAccountRole(enum.StrEnum):
|
||||
OWNER = "owner"
|
||||
ADMIN = "admin"
|
||||
EDITOR = "editor"
|
||||
NORMAL = "normal"
|
||||
DATASET_OPERATOR = "dataset_operator"
|
||||
|
||||
@staticmethod
|
||||
def is_valid_role(role: str) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {
|
||||
TenantAccountRole.OWNER,
|
||||
TenantAccountRole.ADMIN,
|
||||
TenantAccountRole.EDITOR,
|
||||
TenantAccountRole.NORMAL,
|
||||
TenantAccountRole.DATASET_OPERATOR,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def is_privileged_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN}
|
||||
|
||||
@staticmethod
|
||||
def is_admin_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role == TenantAccountRole.ADMIN
|
||||
|
||||
@staticmethod
|
||||
def is_non_owner_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {
|
||||
TenantAccountRole.ADMIN,
|
||||
TenantAccountRole.EDITOR,
|
||||
TenantAccountRole.NORMAL,
|
||||
TenantAccountRole.DATASET_OPERATOR,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def is_editing_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN, TenantAccountRole.EDITOR}
|
||||
|
||||
@staticmethod
|
||||
def is_dataset_edit_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {
|
||||
TenantAccountRole.OWNER,
|
||||
TenantAccountRole.ADMIN,
|
||||
TenantAccountRole.EDITOR,
|
||||
TenantAccountRole.DATASET_OPERATOR,
|
||||
}
|
||||
|
||||
|
||||
class AccountStatus(enum.StrEnum):
|
||||
PENDING = "pending"
|
||||
UNINITIALIZED = "uninitialized"
|
||||
|
|
@ -41,24 +101,27 @@ class Account(UserMixin, Base):
|
|||
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
|
||||
@reconstructor
|
||||
def init_on_load(self):
|
||||
self.role: Optional[TenantAccountRole] = None
|
||||
self._current_tenant: Optional[Tenant] = None
|
||||
|
||||
@property
|
||||
def is_password_set(self):
|
||||
return self.password is not None
|
||||
|
||||
@property
|
||||
def current_tenant(self):
|
||||
return self._current_tenant # type: ignore
|
||||
return self._current_tenant
|
||||
|
||||
@current_tenant.setter
|
||||
def current_tenant(self, value: "Tenant"):
|
||||
tenant = value
|
||||
def current_tenant(self, tenant: "Tenant"):
|
||||
ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=self.id).first()
|
||||
if ta:
|
||||
tenant.current_role = ta.role
|
||||
else:
|
||||
tenant = None # type: ignore
|
||||
|
||||
self._current_tenant = tenant
|
||||
self.role = TenantAccountRole(ta.role)
|
||||
self._current_tenant = tenant
|
||||
return
|
||||
self._current_tenant = None
|
||||
|
||||
@property
|
||||
def current_tenant_id(self) -> str | None:
|
||||
|
|
@ -80,12 +143,12 @@ class Account(UserMixin, Base):
|
|||
return
|
||||
|
||||
tenant, join = tenant_account_join
|
||||
tenant.current_role = join.role
|
||||
self.role = join.role
|
||||
self._current_tenant = tenant
|
||||
|
||||
@property
|
||||
def current_role(self):
|
||||
return self._current_tenant.current_role
|
||||
return self.role
|
||||
|
||||
def get_status(self) -> AccountStatus:
|
||||
status_str = self.status
|
||||
|
|
@ -105,23 +168,23 @@ class Account(UserMixin, Base):
|
|||
# check current_user.current_tenant.current_role in ['admin', 'owner']
|
||||
@property
|
||||
def is_admin_or_owner(self):
|
||||
return TenantAccountRole.is_privileged_role(self._current_tenant.current_role)
|
||||
return TenantAccountRole.is_privileged_role(self.role)
|
||||
|
||||
@property
|
||||
def is_admin(self):
|
||||
return TenantAccountRole.is_admin_role(self._current_tenant.current_role)
|
||||
return TenantAccountRole.is_admin_role(self.role)
|
||||
|
||||
@property
|
||||
def is_editor(self):
|
||||
return TenantAccountRole.is_editing_role(self._current_tenant.current_role)
|
||||
return TenantAccountRole.is_editing_role(self.role)
|
||||
|
||||
@property
|
||||
def is_dataset_editor(self):
|
||||
return TenantAccountRole.is_dataset_edit_role(self._current_tenant.current_role)
|
||||
return TenantAccountRole.is_dataset_edit_role(self.role)
|
||||
|
||||
@property
|
||||
def is_dataset_operator(self):
|
||||
return self._current_tenant.current_role == TenantAccountRole.DATASET_OPERATOR
|
||||
return self.role == TenantAccountRole.DATASET_OPERATOR
|
||||
|
||||
|
||||
class TenantStatus(enum.StrEnum):
|
||||
|
|
@ -129,66 +192,6 @@ class TenantStatus(enum.StrEnum):
|
|||
ARCHIVE = "archive"
|
||||
|
||||
|
||||
class TenantAccountRole(enum.StrEnum):
|
||||
OWNER = "owner"
|
||||
ADMIN = "admin"
|
||||
EDITOR = "editor"
|
||||
NORMAL = "normal"
|
||||
DATASET_OPERATOR = "dataset_operator"
|
||||
|
||||
@staticmethod
|
||||
def is_valid_role(role: str) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {
|
||||
TenantAccountRole.OWNER,
|
||||
TenantAccountRole.ADMIN,
|
||||
TenantAccountRole.EDITOR,
|
||||
TenantAccountRole.NORMAL,
|
||||
TenantAccountRole.DATASET_OPERATOR,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def is_privileged_role(role: str) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN}
|
||||
|
||||
@staticmethod
|
||||
def is_admin_role(role: str) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role == TenantAccountRole.ADMIN
|
||||
|
||||
@staticmethod
|
||||
def is_non_owner_role(role: str) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {
|
||||
TenantAccountRole.ADMIN,
|
||||
TenantAccountRole.EDITOR,
|
||||
TenantAccountRole.NORMAL,
|
||||
TenantAccountRole.DATASET_OPERATOR,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def is_editing_role(role: str) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN, TenantAccountRole.EDITOR}
|
||||
|
||||
@staticmethod
|
||||
def is_dataset_edit_role(role: str) -> bool:
|
||||
if not role:
|
||||
return False
|
||||
return role in {
|
||||
TenantAccountRole.OWNER,
|
||||
TenantAccountRole.ADMIN,
|
||||
TenantAccountRole.EDITOR,
|
||||
TenantAccountRole.DATASET_OPERATOR,
|
||||
}
|
||||
|
||||
|
||||
class Tenant(Base):
|
||||
__tablename__ = "tenants"
|
||||
__table_args__ = (db.PrimaryKeyConstraint("id", name="tenant_pkey"),)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
from sqlalchemy.orm import declarative_base
|
||||
from sqlalchemy.orm import DeclarativeBase
|
||||
|
||||
from models.engine import metadata
|
||||
|
||||
Base = declarative_base(metadata=metadata)
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
metadata = metadata
|
||||
|
|
|
|||
|
|
@ -206,10 +206,6 @@ class WorkflowToolProvider(Base):
|
|||
db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")
|
||||
)
|
||||
|
||||
@property
|
||||
def schema_type(self) -> ApiProviderSchemaType:
|
||||
return ApiProviderSchemaType.value_of(self.schema_type_str)
|
||||
|
||||
@property
|
||||
def user(self) -> Account | None:
|
||||
return db.session.query(Account).filter(Account.id == self.user_id).first()
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import logging
|
|||
from collections.abc import Mapping, Sequence
|
||||
from datetime import UTC, datetime
|
||||
from enum import Enum, StrEnum
|
||||
from typing import TYPE_CHECKING, Any, Optional, Self, Union
|
||||
from typing import TYPE_CHECKING, Any, Optional, Union
|
||||
from uuid import uuid4
|
||||
|
||||
from core.variables import utils as variable_utils
|
||||
|
|
@ -154,7 +154,7 @@ class Workflow(Base):
|
|||
conversation_variables: Sequence[Variable],
|
||||
marked_name: str = "",
|
||||
marked_comment: str = "",
|
||||
) -> Self:
|
||||
) -> "Workflow":
|
||||
workflow = Workflow()
|
||||
workflow.id = str(uuid4())
|
||||
workflow.tenant_id = tenant_id
|
||||
|
|
@ -203,7 +203,9 @@ class Workflow(Base):
|
|||
features["file_upload"]["number_limits"] = image_number_limits
|
||||
features["file_upload"]["allowed_file_upload_methods"] = image_transfer_methods
|
||||
features["file_upload"]["allowed_file_types"] = features["file_upload"].get("allowed_file_types", ["image"])
|
||||
features["file_upload"]["allowed_file_extensions"] = []
|
||||
features["file_upload"]["allowed_file_extensions"] = features["file_upload"].get(
|
||||
"allowed_file_extensions", []
|
||||
)
|
||||
del features["file_upload"]["image"]
|
||||
self._features = json.dumps(features)
|
||||
return self._features
|
||||
|
|
@ -447,14 +449,14 @@ class WorkflowRun(Base):
|
|||
status: Mapped[str] = mapped_column(db.String(255)) # running, succeeded, failed, stopped, partial-succeeded
|
||||
outputs: Mapped[Optional[str]] = mapped_column(sa.Text, default="{}")
|
||||
error: Mapped[Optional[str]] = mapped_column(db.Text)
|
||||
elapsed_time = db.Column(db.Float, nullable=False, server_default=sa.text("0"))
|
||||
elapsed_time: Mapped[float] = mapped_column(db.Float, nullable=False, server_default=sa.text("0"))
|
||||
total_tokens: Mapped[int] = mapped_column(sa.BigInteger, server_default=sa.text("0"))
|
||||
total_steps = db.Column(db.Integer, server_default=db.text("0"))
|
||||
total_steps: Mapped[int] = mapped_column(db.Integer, server_default=db.text("0"))
|
||||
created_by_role: Mapped[str] = mapped_column(db.String(255)) # account, end_user
|
||||
created_by = db.Column(StringUUID, nullable=False)
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
finished_at = db.Column(db.DateTime)
|
||||
exceptions_count = db.Column(db.Integer, server_default=db.text("0"))
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
finished_at: Mapped[Optional[datetime]] = mapped_column(db.DateTime)
|
||||
exceptions_count: Mapped[int] = mapped_column(db.Integer, server_default=db.text("0"))
|
||||
|
||||
@property
|
||||
def created_by_account(self):
|
||||
|
|
@ -469,7 +471,7 @@ class WorkflowRun(Base):
|
|||
return db.session.get(EndUser, self.created_by) if created_by_role == CreatorUserRole.END_USER else None
|
||||
|
||||
@property
|
||||
def graph_dict(self):
|
||||
def graph_dict(self) -> Mapping[str, Any]:
|
||||
return json.loads(self.graph) if self.graph else {}
|
||||
|
||||
@property
|
||||
|
|
@ -688,8 +690,11 @@ class WorkflowNodeExecution(Base):
|
|||
return json.loads(self.process_data) if self.process_data else None
|
||||
|
||||
@property
|
||||
def execution_metadata_dict(self) -> dict[str, Any] | None:
|
||||
return json.loads(self.execution_metadata) if self.execution_metadata else None
|
||||
def execution_metadata_dict(self) -> dict[str, Any]:
|
||||
# When the metadata is unset, we return an empty dictionary instead of `None`.
|
||||
# This approach streamlines the logic for the caller, making it easier to handle
|
||||
# cases where metadata is absent.
|
||||
return json.loads(self.execution_metadata) if self.execution_metadata else {}
|
||||
|
||||
@property
|
||||
def extras(self):
|
||||
|
|
@ -771,12 +776,12 @@ class WorkflowAppLog(Base):
|
|||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID)
|
||||
workflow_id = db.Column(StringUUID, nullable=False)
|
||||
workflow_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
workflow_run_id: Mapped[str] = mapped_column(StringUUID)
|
||||
created_from = db.Column(db.String(255), nullable=False)
|
||||
created_by_role = db.Column(db.String(255), nullable=False)
|
||||
created_by = db.Column(StringUUID, nullable=False)
|
||||
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
created_from: Mapped[str] = mapped_column(db.String(255), nullable=False)
|
||||
created_by_role: Mapped[str] = mapped_column(db.String(255), nullable=False)
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
|
||||
@property
|
||||
def workflow_run(self):
|
||||
|
|
@ -801,9 +806,11 @@ class ConversationVariable(Base):
|
|||
id: Mapped[str] = mapped_column(StringUUID, primary_key=True)
|
||||
conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False, primary_key=True, index=True)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False, index=True)
|
||||
data = mapped_column(db.Text, nullable=False)
|
||||
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp(), index=True)
|
||||
updated_at = mapped_column(
|
||||
data: Mapped[str] = mapped_column(db.Text, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
db.DateTime, nullable=False, server_default=func.current_timestamp(), index=True
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
|
||||
)
|
||||
|
||||
|
|
@ -851,14 +858,14 @@ class WorkflowDraftVariable(Base):
|
|||
# id is the unique identifier of a draft variable.
|
||||
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
|
||||
|
||||
created_at = mapped_column(
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
db.DateTime,
|
||||
nullable=False,
|
||||
default=_naive_utc_datetime,
|
||||
server_default=func.current_timestamp(),
|
||||
)
|
||||
|
||||
updated_at = mapped_column(
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
db.DateTime,
|
||||
nullable=False,
|
||||
default=_naive_utc_datetime,
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ dependencies = [
|
|||
"oci~=2.135.1",
|
||||
"openai~=1.61.0",
|
||||
"openpyxl~=3.1.5",
|
||||
"opik~=1.3.4",
|
||||
"opik~=1.7.25",
|
||||
"opentelemetry-api==1.27.0",
|
||||
"opentelemetry-distro==0.48b0",
|
||||
"opentelemetry-exporter-otlp==1.27.0",
|
||||
|
|
@ -148,6 +148,7 @@ dev = [
|
|||
"types-tensorflow~=2.18.0",
|
||||
"types-tqdm~=4.67.0",
|
||||
"types-ujson~=5.10.0",
|
||||
"boto3-stubs>=1.38.20",
|
||||
]
|
||||
|
||||
############################################################
|
||||
|
|
|
|||
|
|
@ -1044,11 +1044,11 @@ class DocumentService:
|
|||
"score_threshold_enabled": False,
|
||||
}
|
||||
|
||||
dataset.retrieval_model = (
|
||||
knowledge_config.retrieval_model.model_dump()
|
||||
if knowledge_config.retrieval_model
|
||||
else default_retrieval_model
|
||||
) # type: ignore
|
||||
dataset.retrieval_model = (
|
||||
knowledge_config.retrieval_model.model_dump()
|
||||
if knowledge_config.retrieval_model
|
||||
else default_retrieval_model
|
||||
) # type: ignore
|
||||
|
||||
documents = []
|
||||
if knowledge_config.original_document_id:
|
||||
|
|
|
|||
|
|
@ -23,11 +23,10 @@ class VectorService:
|
|||
):
|
||||
documents: list[Document] = []
|
||||
|
||||
document: Document | None = None
|
||||
for segment in segments:
|
||||
if doc_form == IndexType.PARENT_CHILD_INDEX:
|
||||
document = db.session.query(DatasetDocument).filter_by(id=segment.document_id).first()
|
||||
if not document:
|
||||
dataset_document = db.session.query(DatasetDocument).filter_by(id=segment.document_id).first()
|
||||
if not dataset_document:
|
||||
_logger.warning(
|
||||
"Expected DatasetDocument record to exist, but none was found, document_id=%s, segment_id=%s",
|
||||
segment.document_id,
|
||||
|
|
@ -37,7 +36,7 @@ class VectorService:
|
|||
# get the process rule
|
||||
processing_rule = (
|
||||
db.session.query(DatasetProcessRule)
|
||||
.filter(DatasetProcessRule.id == document.dataset_process_rule_id)
|
||||
.filter(DatasetProcessRule.id == dataset_document.dataset_process_rule_id)
|
||||
.first()
|
||||
)
|
||||
if not processing_rule:
|
||||
|
|
@ -61,9 +60,11 @@ class VectorService:
|
|||
)
|
||||
else:
|
||||
raise ValueError("The knowledge base index technique is not high quality!")
|
||||
cls.generate_child_chunks(segment, document, dataset, embedding_model_instance, processing_rule, False)
|
||||
cls.generate_child_chunks(
|
||||
segment, dataset_document, dataset, embedding_model_instance, processing_rule, False
|
||||
)
|
||||
else:
|
||||
document = Document(
|
||||
rag_document = Document(
|
||||
page_content=segment.content,
|
||||
metadata={
|
||||
"doc_id": segment.index_node_id,
|
||||
|
|
@ -72,7 +73,7 @@ class VectorService:
|
|||
"dataset_id": segment.dataset_id,
|
||||
},
|
||||
)
|
||||
documents.append(document)
|
||||
documents.append(rag_document)
|
||||
if len(documents) > 0:
|
||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||
index_processor.load(dataset, documents, with_keywords=True, keywords_list=keywords_list)
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ from models import (
|
|||
WorkflowRun,
|
||||
WorkflowRunTriggeredFrom,
|
||||
)
|
||||
from models.workflow import WorkflowNodeExecutionTriggeredFrom
|
||||
|
||||
|
||||
class WorkflowRunService:
|
||||
|
|
@ -140,14 +141,13 @@ class WorkflowRunService:
|
|||
session_factory=db.engine,
|
||||
user=user,
|
||||
app_id=app_model.id,
|
||||
triggered_from=None,
|
||||
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||
)
|
||||
|
||||
# Use the repository to get the node executions with ordering
|
||||
# Use the repository to get the database models directly
|
||||
order_config = OrderConfig(order_by=["index"], order_direction="desc")
|
||||
node_executions = repository.get_by_workflow_run(workflow_run_id=run_id, order_config=order_config)
|
||||
|
||||
# Convert domain models to database models
|
||||
workflow_node_executions = [repository.to_db_model(node_execution) for node_execution in node_executions]
|
||||
workflow_node_executions = repository.get_db_models_by_workflow_run(
|
||||
workflow_run_id=run_id, order_config=order_config
|
||||
)
|
||||
|
||||
return workflow_node_executions
|
||||
|
|
|
|||
|
|
@ -507,11 +507,11 @@ class WorkflowService:
|
|||
raise DraftWorkflowDeletionError("Cannot delete draft workflow versions")
|
||||
|
||||
# Check if this workflow is currently referenced by an app
|
||||
stmt = select(App).where(App.workflow_id == workflow_id)
|
||||
app = session.scalar(stmt)
|
||||
app_stmt = select(App).where(App.workflow_id == workflow_id)
|
||||
app = session.scalar(app_stmt)
|
||||
if app:
|
||||
# Cannot delete a workflow that's currently in use by an app
|
||||
raise WorkflowInUseError(f"Cannot delete workflow that is currently in use by app '{app.name}'")
|
||||
raise WorkflowInUseError(f"Cannot delete workflow that is currently in use by app '{app.id}'")
|
||||
|
||||
# Don't use workflow.tool_published as it's not accurate for specific workflow versions
|
||||
# Check if there's a tool provider using this specific workflow version
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ def add_document_to_index_task(dataset_document_id: str):
|
|||
logging.exception("add document to index failed")
|
||||
dataset_document.enabled = False
|
||||
dataset_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||
dataset_document.status = "error"
|
||||
dataset_document.indexing_status = "error"
|
||||
dataset_document.error = str(e)
|
||||
db.session.commit()
|
||||
finally:
|
||||
|
|
|
|||
|
|
@ -193,7 +193,7 @@ def _delete_app_workflow_runs(tenant_id: str, app_id: str):
|
|||
def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
|
||||
# Get app's owner
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
stmt = select(Account).where(Account.id == App.owner_id).where(App.id == app_id)
|
||||
stmt = select(Account).where(Account.id == App.created_by).where(App.id == app_id)
|
||||
user = session.scalar(stmt)
|
||||
|
||||
if user is None:
|
||||
|
|
|
|||
|
|
@ -34,13 +34,13 @@ def test_workflow_tool_should_raise_tool_invoke_error_when_result_has_error_fiel
|
|||
# needs to patch those methods to avoid database access.
|
||||
monkeypatch.setattr(tool, "_get_app", lambda *args, **kwargs: None)
|
||||
monkeypatch.setattr(tool, "_get_workflow", lambda *args, **kwargs: None)
|
||||
monkeypatch.setattr(tool, "_get_user", lambda *args, **kwargs: None)
|
||||
|
||||
# replace `WorkflowAppGenerator.generate` 's return value.
|
||||
monkeypatch.setattr(
|
||||
"core.app.apps.workflow.app_generator.WorkflowAppGenerator.generate",
|
||||
lambda *args, **kwargs: {"data": {"error": "oops"}},
|
||||
)
|
||||
monkeypatch.setattr("flask_login.current_user", lambda *args, **kwargs: None)
|
||||
|
||||
with pytest.raises(ToolInvokeError) as exc_info:
|
||||
# WorkflowTool always returns a generator, so we need to iterate to
|
||||
|
|
|
|||
|
|
@ -1,45 +1,73 @@
|
|||
import json
|
||||
import time
|
||||
from datetime import UTC, datetime
|
||||
from unittest.mock import MagicMock, patch
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.app_config.entities import AppAdditionalFeatures, WorkflowUIBasedAppConfig
|
||||
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom
|
||||
from core.app.entities.queue_entities import (
|
||||
QueueNodeFailedEvent,
|
||||
QueueNodeStartedEvent,
|
||||
QueueNodeSucceededEvent,
|
||||
)
|
||||
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||
from core.workflow.entities.node_execution_entities import NodeExecution, NodeExecutionStatus
|
||||
from core.workflow.entities.workflow_execution_entities import WorkflowExecution, WorkflowExecutionStatus, WorkflowType
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
from core.workflow.nodes import NodeType
|
||||
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
||||
from models.enums import CreatorUserRole
|
||||
from models.model import AppMode
|
||||
from models.workflow import (
|
||||
Workflow,
|
||||
WorkflowNodeExecutionStatus,
|
||||
WorkflowRun,
|
||||
WorkflowRunStatus,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_app_generate_entity():
|
||||
entity = MagicMock(spec=AdvancedChatAppGenerateEntity)
|
||||
entity.inputs = {"query": "test query"}
|
||||
entity.invoke_from = InvokeFrom.WEB_APP
|
||||
# Create app_config as a separate mock
|
||||
app_config = MagicMock()
|
||||
app_config.tenant_id = "test-tenant-id"
|
||||
app_config.app_id = "test-app-id"
|
||||
entity.app_config = app_config
|
||||
def real_app_generate_entity():
|
||||
additional_features = AppAdditionalFeatures(
|
||||
file_upload=None,
|
||||
opening_statement=None,
|
||||
suggested_questions=[],
|
||||
suggested_questions_after_answer=False,
|
||||
show_retrieve_source=False,
|
||||
more_like_this=False,
|
||||
speech_to_text=False,
|
||||
text_to_speech=None,
|
||||
trace_config=None,
|
||||
)
|
||||
|
||||
app_config = WorkflowUIBasedAppConfig(
|
||||
tenant_id="test-tenant-id",
|
||||
app_id="test-app-id",
|
||||
app_mode=AppMode.WORKFLOW,
|
||||
additional_features=additional_features,
|
||||
workflow_id="test-workflow-id",
|
||||
)
|
||||
|
||||
entity = AdvancedChatAppGenerateEntity(
|
||||
task_id="test-task-id",
|
||||
app_config=app_config,
|
||||
inputs={"query": "test query"},
|
||||
files=[],
|
||||
user_id="test-user-id",
|
||||
stream=False,
|
||||
invoke_from=InvokeFrom.WEB_APP,
|
||||
query="test query",
|
||||
conversation_id="test-conversation-id",
|
||||
)
|
||||
|
||||
return entity
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_workflow_system_variables():
|
||||
def real_workflow_system_variables():
|
||||
return {
|
||||
SystemVariableKey.QUERY: "test query",
|
||||
SystemVariableKey.CONVERSATION_ID: "test-conversation-id",
|
||||
|
|
@ -59,10 +87,23 @@ def mock_node_execution_repository():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_cycle_manager(mock_app_generate_entity, mock_workflow_system_variables, mock_node_execution_repository):
|
||||
def mock_workflow_execution_repository():
|
||||
repo = MagicMock(spec=WorkflowExecutionRepository)
|
||||
repo.get.return_value = None
|
||||
return repo
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_cycle_manager(
|
||||
real_app_generate_entity,
|
||||
real_workflow_system_variables,
|
||||
mock_workflow_execution_repository,
|
||||
mock_node_execution_repository,
|
||||
):
|
||||
return WorkflowCycleManager(
|
||||
application_generate_entity=mock_app_generate_entity,
|
||||
workflow_system_variables=mock_workflow_system_variables,
|
||||
application_generate_entity=real_app_generate_entity,
|
||||
workflow_system_variables=real_workflow_system_variables,
|
||||
workflow_execution_repository=mock_workflow_execution_repository,
|
||||
workflow_node_execution_repository=mock_node_execution_repository,
|
||||
)
|
||||
|
||||
|
|
@ -74,121 +115,173 @@ def mock_session():
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_workflow():
|
||||
workflow = MagicMock(spec=Workflow)
|
||||
def real_workflow():
|
||||
workflow = Workflow()
|
||||
workflow.id = "test-workflow-id"
|
||||
workflow.tenant_id = "test-tenant-id"
|
||||
workflow.app_id = "test-app-id"
|
||||
workflow.type = "chat"
|
||||
workflow.version = "1.0"
|
||||
workflow.graph = json.dumps({"nodes": [], "edges": []})
|
||||
|
||||
graph_data = {"nodes": [], "edges": []}
|
||||
workflow.graph = json.dumps(graph_data)
|
||||
workflow.features = json.dumps({"file_upload": {"enabled": False}})
|
||||
workflow.created_by = "test-user-id"
|
||||
workflow.created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow._environment_variables = "{}"
|
||||
workflow._conversation_variables = "{}"
|
||||
|
||||
return workflow
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_workflow_run():
|
||||
workflow_run = MagicMock(spec=WorkflowRun)
|
||||
def real_workflow_run():
|
||||
workflow_run = WorkflowRun()
|
||||
workflow_run.id = "test-workflow-run-id"
|
||||
workflow_run.tenant_id = "test-tenant-id"
|
||||
workflow_run.app_id = "test-app-id"
|
||||
workflow_run.workflow_id = "test-workflow-id"
|
||||
workflow_run.sequence_number = 1
|
||||
workflow_run.type = "chat"
|
||||
workflow_run.triggered_from = "app-run"
|
||||
workflow_run.version = "1.0"
|
||||
workflow_run.graph = json.dumps({"nodes": [], "edges": []})
|
||||
workflow_run.inputs = json.dumps({"query": "test query"})
|
||||
workflow_run.status = WorkflowRunStatus.RUNNING
|
||||
workflow_run.outputs = json.dumps({"answer": "test answer"})
|
||||
workflow_run.created_by_role = CreatorUserRole.ACCOUNT
|
||||
workflow_run.created_by = "test-user-id"
|
||||
workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
workflow_run.inputs_dict = {"query": "test query"}
|
||||
workflow_run.outputs_dict = {"answer": "test answer"}
|
||||
|
||||
return workflow_run
|
||||
|
||||
|
||||
def test_init(
|
||||
workflow_cycle_manager, mock_app_generate_entity, mock_workflow_system_variables, mock_node_execution_repository
|
||||
workflow_cycle_manager,
|
||||
real_app_generate_entity,
|
||||
real_workflow_system_variables,
|
||||
mock_workflow_execution_repository,
|
||||
mock_node_execution_repository,
|
||||
):
|
||||
"""Test initialization of WorkflowCycleManager"""
|
||||
assert workflow_cycle_manager._workflow_run is None
|
||||
assert workflow_cycle_manager._application_generate_entity == mock_app_generate_entity
|
||||
assert workflow_cycle_manager._workflow_system_variables == mock_workflow_system_variables
|
||||
assert workflow_cycle_manager._application_generate_entity == real_app_generate_entity
|
||||
assert workflow_cycle_manager._workflow_system_variables == real_workflow_system_variables
|
||||
assert workflow_cycle_manager._workflow_execution_repository == mock_workflow_execution_repository
|
||||
assert workflow_cycle_manager._workflow_node_execution_repository == mock_node_execution_repository
|
||||
|
||||
|
||||
def test_handle_workflow_run_start(workflow_cycle_manager, mock_session, mock_workflow):
|
||||
"""Test _handle_workflow_run_start method"""
|
||||
def test_handle_workflow_run_start(workflow_cycle_manager, mock_session, real_workflow):
|
||||
"""Test handle_workflow_run_start method"""
|
||||
# Mock session.scalar to return the workflow and max sequence
|
||||
mock_session.scalar.side_effect = [mock_workflow, 5]
|
||||
mock_session.scalar.side_effect = [real_workflow, 5]
|
||||
|
||||
# Call the method
|
||||
workflow_run = workflow_cycle_manager._handle_workflow_run_start(
|
||||
workflow_execution = workflow_cycle_manager.handle_workflow_run_start(
|
||||
session=mock_session,
|
||||
workflow_id="test-workflow-id",
|
||||
user_id="test-user-id",
|
||||
created_by_role=CreatorUserRole.ACCOUNT,
|
||||
)
|
||||
|
||||
# Verify the result
|
||||
assert workflow_run.tenant_id == mock_workflow.tenant_id
|
||||
assert workflow_run.app_id == mock_workflow.app_id
|
||||
assert workflow_run.workflow_id == mock_workflow.id
|
||||
assert workflow_run.sequence_number == 6 # max_sequence + 1
|
||||
assert workflow_run.status == WorkflowRunStatus.RUNNING
|
||||
assert workflow_run.created_by_role == CreatorUserRole.ACCOUNT
|
||||
assert workflow_run.created_by == "test-user-id"
|
||||
assert workflow_execution.workflow_id == real_workflow.id
|
||||
assert workflow_execution.sequence_number == 6 # max_sequence + 1
|
||||
|
||||
# Verify session.add was called
|
||||
mock_session.add.assert_called_once_with(workflow_run)
|
||||
# Verify the workflow_execution_repository.save was called
|
||||
workflow_cycle_manager._workflow_execution_repository.save.assert_called_once_with(workflow_execution)
|
||||
|
||||
|
||||
def test_handle_workflow_run_success(workflow_cycle_manager, mock_session, mock_workflow_run):
|
||||
"""Test _handle_workflow_run_success method"""
|
||||
# Mock _get_workflow_run to return the mock_workflow_run
|
||||
with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run):
|
||||
# Call the method
|
||||
result = workflow_cycle_manager._handle_workflow_run_success(
|
||||
session=mock_session,
|
||||
workflow_run_id="test-workflow-run-id",
|
||||
start_at=time.perf_counter() - 10, # 10 seconds ago
|
||||
total_tokens=100,
|
||||
total_steps=5,
|
||||
outputs={"answer": "test answer"},
|
||||
)
|
||||
def test_handle_workflow_run_success(workflow_cycle_manager, mock_workflow_execution_repository):
|
||||
"""Test handle_workflow_run_success method"""
|
||||
# Create a real WorkflowExecution
|
||||
|
||||
# Verify the result
|
||||
assert result == mock_workflow_run
|
||||
assert result.status == WorkflowRunStatus.SUCCEEDED
|
||||
assert result.outputs == json.dumps({"answer": "test answer"})
|
||||
assert result.total_tokens == 100
|
||||
assert result.total_steps == 5
|
||||
assert result.finished_at is not None
|
||||
workflow_execution = WorkflowExecution(
|
||||
id="test-workflow-run-id",
|
||||
workflow_id="test-workflow-id",
|
||||
workflow_version="1.0",
|
||||
sequence_number=1,
|
||||
type=WorkflowType.CHAT,
|
||||
graph={"nodes": [], "edges": []},
|
||||
inputs={"query": "test query"},
|
||||
started_at=datetime.now(UTC).replace(tzinfo=None),
|
||||
)
|
||||
|
||||
# Mock _get_workflow_execution_or_raise_error to return the real workflow_execution
|
||||
workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution
|
||||
|
||||
# Call the method
|
||||
result = workflow_cycle_manager.handle_workflow_run_success(
|
||||
workflow_run_id="test-workflow-run-id",
|
||||
total_tokens=100,
|
||||
total_steps=5,
|
||||
outputs={"answer": "test answer"},
|
||||
)
|
||||
|
||||
# Verify the result
|
||||
assert result == workflow_execution
|
||||
assert result.status == WorkflowExecutionStatus.SUCCEEDED
|
||||
assert result.outputs == {"answer": "test answer"}
|
||||
assert result.total_tokens == 100
|
||||
assert result.total_steps == 5
|
||||
assert result.finished_at is not None
|
||||
|
||||
|
||||
def test_handle_workflow_run_failed(workflow_cycle_manager, mock_session, mock_workflow_run):
|
||||
"""Test _handle_workflow_run_failed method"""
|
||||
# Mock _get_workflow_run to return the mock_workflow_run
|
||||
with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run):
|
||||
# Mock get_running_executions to return an empty list
|
||||
workflow_cycle_manager._workflow_node_execution_repository.get_running_executions.return_value = []
|
||||
def test_handle_workflow_run_failed(workflow_cycle_manager, mock_workflow_execution_repository):
|
||||
"""Test handle_workflow_run_failed method"""
|
||||
# Create a real WorkflowExecution
|
||||
|
||||
# Call the method
|
||||
result = workflow_cycle_manager._handle_workflow_run_failed(
|
||||
session=mock_session,
|
||||
workflow_run_id="test-workflow-run-id",
|
||||
start_at=time.perf_counter() - 10, # 10 seconds ago
|
||||
total_tokens=50,
|
||||
total_steps=3,
|
||||
status=WorkflowRunStatus.FAILED,
|
||||
error="Test error message",
|
||||
)
|
||||
workflow_execution = WorkflowExecution(
|
||||
id="test-workflow-run-id",
|
||||
workflow_id="test-workflow-id",
|
||||
workflow_version="1.0",
|
||||
sequence_number=1,
|
||||
type=WorkflowType.CHAT,
|
||||
graph={"nodes": [], "edges": []},
|
||||
inputs={"query": "test query"},
|
||||
started_at=datetime.now(UTC).replace(tzinfo=None),
|
||||
)
|
||||
|
||||
# Verify the result
|
||||
assert result == mock_workflow_run
|
||||
assert result.status == WorkflowRunStatus.FAILED.value
|
||||
assert result.error == "Test error message"
|
||||
assert result.total_tokens == 50
|
||||
assert result.total_steps == 3
|
||||
assert result.finished_at is not None
|
||||
# Mock _get_workflow_execution_or_raise_error to return the real workflow_execution
|
||||
workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution
|
||||
|
||||
# Mock get_running_executions to return an empty list
|
||||
workflow_cycle_manager._workflow_node_execution_repository.get_running_executions.return_value = []
|
||||
|
||||
# Call the method
|
||||
result = workflow_cycle_manager.handle_workflow_run_failed(
|
||||
workflow_run_id="test-workflow-run-id",
|
||||
total_tokens=50,
|
||||
total_steps=3,
|
||||
status=WorkflowRunStatus.FAILED,
|
||||
error_message="Test error message",
|
||||
)
|
||||
|
||||
# Verify the result
|
||||
assert result == workflow_execution
|
||||
assert result.status == WorkflowExecutionStatus(WorkflowRunStatus.FAILED.value)
|
||||
assert result.error_message == "Test error message"
|
||||
assert result.total_tokens == 50
|
||||
assert result.total_steps == 3
|
||||
assert result.finished_at is not None
|
||||
|
||||
|
||||
def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_run):
|
||||
"""Test _handle_node_execution_start method"""
|
||||
def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_execution_repository):
|
||||
"""Test handle_node_execution_start method"""
|
||||
# Create a real WorkflowExecution
|
||||
|
||||
workflow_execution = WorkflowExecution(
|
||||
id="test-workflow-execution-id",
|
||||
workflow_id="test-workflow-id",
|
||||
workflow_version="1.0",
|
||||
sequence_number=1,
|
||||
type=WorkflowType.CHAT,
|
||||
graph={"nodes": [], "edges": []},
|
||||
inputs={"query": "test query"},
|
||||
started_at=datetime.now(UTC).replace(tzinfo=None),
|
||||
)
|
||||
|
||||
# Mock _get_workflow_execution_or_raise_error to return the real workflow_execution
|
||||
workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution
|
||||
|
||||
# Create a mock event
|
||||
event = MagicMock(spec=QueueNodeStartedEvent)
|
||||
event.node_execution_id = "test-node-execution-id"
|
||||
|
|
@ -207,129 +300,171 @@ def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_run):
|
|||
event.in_loop_id = "test-loop-id"
|
||||
|
||||
# Call the method
|
||||
result = workflow_cycle_manager._handle_node_execution_start(
|
||||
workflow_run=mock_workflow_run,
|
||||
result = workflow_cycle_manager.handle_node_execution_start(
|
||||
workflow_execution_id=workflow_execution.id,
|
||||
event=event,
|
||||
)
|
||||
|
||||
# Verify the result
|
||||
# NodeExecution doesn't have tenant_id attribute, it's handled at repository level
|
||||
# assert result.tenant_id == mock_workflow_run.tenant_id
|
||||
# assert result.app_id == mock_workflow_run.app_id
|
||||
assert result.workflow_id == mock_workflow_run.workflow_id
|
||||
assert result.workflow_run_id == mock_workflow_run.id
|
||||
assert result.workflow_id == workflow_execution.workflow_id
|
||||
assert result.workflow_run_id == workflow_execution.id
|
||||
assert result.node_execution_id == event.node_execution_id
|
||||
assert result.node_id == event.node_id
|
||||
assert result.node_type == event.node_type
|
||||
assert result.title == event.node_data.title
|
||||
assert result.status == WorkflowNodeExecutionStatus.RUNNING.value
|
||||
# NodeExecution doesn't have created_by_role and created_by attributes, they're handled at repository level
|
||||
# assert result.created_by_role == mock_workflow_run.created_by_role
|
||||
# assert result.created_by == mock_workflow_run.created_by
|
||||
assert result.status == NodeExecutionStatus.RUNNING
|
||||
|
||||
# Verify save was called
|
||||
workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(result)
|
||||
|
||||
|
||||
def test_get_workflow_run(workflow_cycle_manager, mock_session, mock_workflow_run):
|
||||
"""Test _get_workflow_run method"""
|
||||
# Mock session.scalar to return the workflow run
|
||||
mock_session.scalar.return_value = mock_workflow_run
|
||||
def test_get_workflow_execution_or_raise_error(workflow_cycle_manager, mock_workflow_execution_repository):
|
||||
"""Test _get_workflow_execution_or_raise_error method"""
|
||||
# Create a real WorkflowExecution
|
||||
|
||||
# Call the method
|
||||
result = workflow_cycle_manager._get_workflow_run(
|
||||
session=mock_session,
|
||||
workflow_run_id="test-workflow-run-id",
|
||||
workflow_execution = WorkflowExecution(
|
||||
id="test-workflow-run-id",
|
||||
workflow_id="test-workflow-id",
|
||||
workflow_version="1.0",
|
||||
sequence_number=1,
|
||||
type=WorkflowType.CHAT,
|
||||
graph={"nodes": [], "edges": []},
|
||||
inputs={"query": "test query"},
|
||||
started_at=datetime.now(UTC).replace(tzinfo=None),
|
||||
)
|
||||
|
||||
# Mock the repository get method to return the real execution
|
||||
workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution
|
||||
|
||||
# Call the method
|
||||
result = workflow_cycle_manager._get_workflow_execution_or_raise_error("test-workflow-run-id")
|
||||
|
||||
# Verify the result
|
||||
assert result == mock_workflow_run
|
||||
assert workflow_cycle_manager._workflow_run == mock_workflow_run
|
||||
assert result == workflow_execution
|
||||
|
||||
# Test error case
|
||||
workflow_cycle_manager._workflow_execution_repository.get.return_value = None
|
||||
|
||||
# Expect an error when execution is not found
|
||||
with pytest.raises(ValueError):
|
||||
workflow_cycle_manager._get_workflow_execution_or_raise_error("non-existent-id")
|
||||
|
||||
|
||||
def test_handle_workflow_node_execution_success(workflow_cycle_manager):
|
||||
"""Test _handle_workflow_node_execution_success method"""
|
||||
"""Test handle_workflow_node_execution_success method"""
|
||||
# Create a mock event
|
||||
event = MagicMock(spec=QueueNodeSucceededEvent)
|
||||
event.node_execution_id = "test-node-execution-id"
|
||||
event.inputs = {"input": "test input"}
|
||||
event.process_data = {"process": "test process"}
|
||||
event.outputs = {"output": "test output"}
|
||||
event.execution_metadata = {"metadata": "test metadata"}
|
||||
event.execution_metadata = {NodeRunMetadataKey.TOTAL_TOKENS: 100}
|
||||
event.start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
|
||||
# Create a mock node execution
|
||||
node_execution = MagicMock()
|
||||
node_execution.node_execution_id = "test-node-execution-id"
|
||||
# Create a real node execution
|
||||
|
||||
node_execution = NodeExecution(
|
||||
id="test-node-execution-record-id",
|
||||
node_execution_id="test-node-execution-id",
|
||||
workflow_id="test-workflow-id",
|
||||
workflow_run_id="test-workflow-run-id",
|
||||
index=1,
|
||||
node_id="test-node-id",
|
||||
node_type=NodeType.LLM,
|
||||
title="Test Node",
|
||||
created_at=datetime.now(UTC).replace(tzinfo=None),
|
||||
)
|
||||
|
||||
# Mock the repository to return the node execution
|
||||
workflow_cycle_manager._workflow_node_execution_repository.get_by_node_execution_id.return_value = node_execution
|
||||
|
||||
# Call the method
|
||||
result = workflow_cycle_manager._handle_workflow_node_execution_success(
|
||||
result = workflow_cycle_manager.handle_workflow_node_execution_success(
|
||||
event=event,
|
||||
)
|
||||
|
||||
# Verify the result
|
||||
assert result == node_execution
|
||||
assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED.value
|
||||
assert result.status == NodeExecutionStatus.SUCCEEDED
|
||||
|
||||
# Verify save was called
|
||||
workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(node_execution)
|
||||
|
||||
|
||||
def test_handle_workflow_run_partial_success(workflow_cycle_manager, mock_session, mock_workflow_run):
|
||||
"""Test _handle_workflow_run_partial_success method"""
|
||||
# Mock _get_workflow_run to return the mock_workflow_run
|
||||
with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run):
|
||||
# Call the method
|
||||
result = workflow_cycle_manager._handle_workflow_run_partial_success(
|
||||
session=mock_session,
|
||||
workflow_run_id="test-workflow-run-id",
|
||||
start_at=time.perf_counter() - 10, # 10 seconds ago
|
||||
total_tokens=75,
|
||||
total_steps=4,
|
||||
outputs={"partial_answer": "test partial answer"},
|
||||
exceptions_count=2,
|
||||
)
|
||||
def test_handle_workflow_run_partial_success(workflow_cycle_manager, mock_workflow_execution_repository):
|
||||
"""Test handle_workflow_run_partial_success method"""
|
||||
# Create a real WorkflowExecution
|
||||
|
||||
# Verify the result
|
||||
assert result == mock_workflow_run
|
||||
assert result.status == WorkflowRunStatus.PARTIAL_SUCCEEDED.value
|
||||
assert result.outputs == json.dumps({"partial_answer": "test partial answer"})
|
||||
assert result.total_tokens == 75
|
||||
assert result.total_steps == 4
|
||||
assert result.exceptions_count == 2
|
||||
assert result.finished_at is not None
|
||||
workflow_execution = WorkflowExecution(
|
||||
id="test-workflow-run-id",
|
||||
workflow_id="test-workflow-id",
|
||||
workflow_version="1.0",
|
||||
sequence_number=1,
|
||||
type=WorkflowType.CHAT,
|
||||
graph={"nodes": [], "edges": []},
|
||||
inputs={"query": "test query"},
|
||||
started_at=datetime.now(UTC).replace(tzinfo=None),
|
||||
)
|
||||
|
||||
# Mock _get_workflow_execution_or_raise_error to return the real workflow_execution
|
||||
workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution
|
||||
|
||||
# Call the method
|
||||
result = workflow_cycle_manager.handle_workflow_run_partial_success(
|
||||
workflow_run_id="test-workflow-run-id",
|
||||
total_tokens=75,
|
||||
total_steps=4,
|
||||
outputs={"partial_answer": "test partial answer"},
|
||||
exceptions_count=2,
|
||||
)
|
||||
|
||||
# Verify the result
|
||||
assert result == workflow_execution
|
||||
assert result.status == WorkflowExecutionStatus.PARTIAL_SUCCEEDED
|
||||
assert result.outputs == {"partial_answer": "test partial answer"}
|
||||
assert result.total_tokens == 75
|
||||
assert result.total_steps == 4
|
||||
assert result.exceptions_count == 2
|
||||
assert result.finished_at is not None
|
||||
|
||||
|
||||
def test_handle_workflow_node_execution_failed(workflow_cycle_manager):
|
||||
"""Test _handle_workflow_node_execution_failed method"""
|
||||
"""Test handle_workflow_node_execution_failed method"""
|
||||
# Create a mock event
|
||||
event = MagicMock(spec=QueueNodeFailedEvent)
|
||||
event.node_execution_id = "test-node-execution-id"
|
||||
event.inputs = {"input": "test input"}
|
||||
event.process_data = {"process": "test process"}
|
||||
event.outputs = {"output": "test output"}
|
||||
event.execution_metadata = {"metadata": "test metadata"}
|
||||
event.execution_metadata = {NodeRunMetadataKey.TOTAL_TOKENS: 100}
|
||||
event.start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||
event.error = "Test error message"
|
||||
|
||||
# Create a mock node execution
|
||||
node_execution = MagicMock()
|
||||
node_execution.node_execution_id = "test-node-execution-id"
|
||||
# Create a real node execution
|
||||
|
||||
node_execution = NodeExecution(
|
||||
id="test-node-execution-record-id",
|
||||
node_execution_id="test-node-execution-id",
|
||||
workflow_id="test-workflow-id",
|
||||
workflow_run_id="test-workflow-run-id",
|
||||
index=1,
|
||||
node_id="test-node-id",
|
||||
node_type=NodeType.LLM,
|
||||
title="Test Node",
|
||||
created_at=datetime.now(UTC).replace(tzinfo=None),
|
||||
)
|
||||
|
||||
# Mock the repository to return the node execution
|
||||
workflow_cycle_manager._workflow_node_execution_repository.get_by_node_execution_id.return_value = node_execution
|
||||
|
||||
# Call the method
|
||||
result = workflow_cycle_manager._handle_workflow_node_execution_failed(
|
||||
result = workflow_cycle_manager.handle_workflow_node_execution_failed(
|
||||
event=event,
|
||||
)
|
||||
|
||||
# Verify the result
|
||||
assert result == node_execution
|
||||
assert result.status == WorkflowNodeExecutionStatus.FAILED.value
|
||||
assert result.status == NodeExecutionStatus.FAILED
|
||||
assert result.error == "Test error message"
|
||||
|
||||
# Verify save was called
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import json
|
||||
from unittest import mock
|
||||
from uuid import uuid4
|
||||
|
||||
import contexts
|
||||
from constants import HIDDEN_VALUE
|
||||
from core.variables import FloatVariable, IntegerVariable, SecretVariable, StringVariable
|
||||
from models.workflow import Workflow
|
||||
from models.workflow import Workflow, WorkflowNodeExecution
|
||||
|
||||
|
||||
def test_environment_variables():
|
||||
|
|
@ -137,3 +138,14 @@ def test_to_dict():
|
|||
workflow_dict = workflow.to_dict(include_secret=True)
|
||||
assert workflow_dict["environment_variables"][0]["value"] == "secret"
|
||||
assert workflow_dict["environment_variables"][1]["value"] == "text"
|
||||
|
||||
|
||||
class TestWorkflowNodeExecution:
|
||||
def test_execution_metadata_dict(self):
|
||||
node_exec = WorkflowNodeExecution()
|
||||
node_exec.execution_metadata = None
|
||||
assert node_exec.execution_metadata_dict == {}
|
||||
|
||||
original = {"a": 1, "b": ["2"]}
|
||||
node_exec.execution_metadata = json.dumps(original)
|
||||
assert node_exec.execution_metadata_dict == original
|
||||
|
|
|
|||
|
|
@ -4,12 +4,14 @@ Unit tests for the SQLAlchemy implementation of WorkflowNodeExecutionRepository.
|
|||
|
||||
import json
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from unittest.mock import MagicMock, PropertyMock
|
||||
|
||||
import pytest
|
||||
from pytest_mock import MockerFixture
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||
from core.workflow.entities.node_execution_entities import NodeExecution, NodeExecutionStatus
|
||||
|
|
@ -298,7 +300,7 @@ def test_to_db_model(repository):
|
|||
status=NodeExecutionStatus.RUNNING,
|
||||
error=None,
|
||||
elapsed_time=1.5,
|
||||
metadata={NodeRunMetadataKey.TOTAL_TOKENS: 100},
|
||||
metadata={NodeRunMetadataKey.TOTAL_TOKENS: 100, NodeRunMetadataKey.TOTAL_PRICE: Decimal("0.0")},
|
||||
created_at=datetime.now(),
|
||||
finished_at=None,
|
||||
)
|
||||
|
|
@ -324,7 +326,7 @@ def test_to_db_model(repository):
|
|||
assert db_model.inputs_dict == domain_model.inputs
|
||||
assert db_model.process_data_dict == domain_model.process_data
|
||||
assert db_model.outputs_dict == domain_model.outputs
|
||||
assert db_model.execution_metadata_dict == domain_model.metadata
|
||||
assert db_model.execution_metadata_dict == jsonable_encoder(domain_model.metadata)
|
||||
|
||||
assert db_model.status == domain_model.status
|
||||
assert db_model.error == domain_model.error
|
||||
|
|
|
|||
73
api/uv.lock
73
api/uv.lock
|
|
@ -540,6 +540,25 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/65/77/8bbca82f70b062181cf0ae53fd43f1ac6556f3078884bfef9da2269c06a3/boto3-1.35.99-py3-none-any.whl", hash = "sha256:83e560faaec38a956dfb3d62e05e1703ee50432b45b788c09e25107c5058bd71", size = 139178, upload-time = "2025-01-14T20:20:25.48Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "boto3-stubs"
|
||||
version = "1.38.20"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "botocore-stubs" },
|
||||
{ name = "types-s3transfer" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.12'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/4e/89/824fb0a9bebf9f1d6df70bb145f8e9c24fc4d918d4050b5d4dca075cc292/boto3_stubs-1.38.20.tar.gz", hash = "sha256:7f1d7bfff7355eb4d17e7984fbf27f44709cd8484abb54bd6ba34ec73a552605", size = 99063, upload-time = "2025-05-20T23:30:19.84Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/57/69/cfc45dfce3b4ea417f9aec708ade1eda7f280fe8ae7feca796b036619587/boto3_stubs-1.38.20-py3-none-any.whl", hash = "sha256:5406da868980a3854cc9b57db150c6f2e39a4fe4a58f2872e61ac5a3d46f734e", size = 68667, upload-time = "2025-05-20T23:30:12.393Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
bedrock-runtime = [
|
||||
{ name = "mypy-boto3-bedrock-runtime" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.35.99"
|
||||
|
|
@ -554,6 +573,18 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/fc/dd/d87e2a145fad9e08d0ec6edcf9d71f838ccc7acdd919acc4c0d4a93515f8/botocore-1.35.99-py3-none-any.whl", hash = "sha256:b22d27b6b617fc2d7342090d6129000af2efd20174215948c0d7ae2da0fab445", size = 13293216, upload-time = "2025-01-14T20:20:06.427Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "botocore-stubs"
|
||||
version = "1.38.19"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "types-awscrt" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/43/70/6204c97f8d8362364f11c16085566abdcaa114c264d3a4d709ff697b203b/botocore_stubs-1.38.19.tar.gz", hash = "sha256:84f67a42bb240a8ea0c5fe4f05d497cc411177db600bc7012182e499ac24bf19", size = 42269, upload-time = "2025-05-19T20:18:13.556Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/ce/28b143452c22b678678d832bf8b41218e3d319bf94062b48c28fe5d81163/botocore_stubs-1.38.19-py3-none-any.whl", hash = "sha256:66fd7d231c21134a12acbe313ef7a6b152cbf9bfd7bfa12a62f8c33e94737e26", size = 65603, upload-time = "2025-05-19T20:18:10.445Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bottleneck"
|
||||
version = "1.4.2"
|
||||
|
|
@ -1260,6 +1291,7 @@ dependencies = [
|
|||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "boto3-stubs" },
|
||||
{ name = "coverage" },
|
||||
{ name = "dotenv-linter" },
|
||||
{ name = "faker" },
|
||||
|
|
@ -1399,7 +1431,7 @@ requires-dist = [
|
|||
{ name = "opentelemetry-sdk", specifier = "==1.27.0" },
|
||||
{ name = "opentelemetry-semantic-conventions", specifier = "==0.48b0" },
|
||||
{ name = "opentelemetry-util-http", specifier = "==0.48b0" },
|
||||
{ name = "opik", specifier = "~=1.3.4" },
|
||||
{ name = "opik", specifier = "~=1.7.25" },
|
||||
{ name = "pandas", extras = ["excel", "output-formatting", "performance"], specifier = "~=2.2.2" },
|
||||
{ name = "pandas-stubs", specifier = "~=2.2.3.241009" },
|
||||
{ name = "pandoc", specifier = "~=2.4" },
|
||||
|
|
@ -1430,6 +1462,7 @@ requires-dist = [
|
|||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "boto3-stubs", specifier = ">=1.38.20" },
|
||||
{ name = "coverage", specifier = "~=7.2.4" },
|
||||
{ name = "dotenv-linter", specifier = "~=0.5.0" },
|
||||
{ name = "faker", specifier = "~=32.1.0" },
|
||||
|
|
@ -3201,6 +3234,18 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777, upload-time = "2025-02-05T03:50:08.348Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-boto3-bedrock-runtime"
|
||||
version = "1.38.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.12'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/7d/55/56ce6f23d7fb98ce5b8a4261f089890bc94250666ea7089539dab55f6c25/mypy_boto3_bedrock_runtime-1.38.4.tar.gz", hash = "sha256:315a5f84c014c54e5406fdb80b030aba5cc79eb27982aff3d09ef331fb2cdd4d", size = 26169, upload-time = "2025-04-28T19:26:13.437Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/43/eb/3015c6504540ca4888789ee14f47590c0340b748a33b059eeb6a48b406bb/mypy_boto3_bedrock_runtime-1.38.4-py3-none-any.whl", hash = "sha256:af14320532e9b798095129a3307f4b186ba80258917bb31410cda7f423592d72", size = 31858, upload-time = "2025-04-28T19:26:09.667Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-extensions"
|
||||
version = "1.1.0"
|
||||
|
|
@ -3692,11 +3737,13 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "opik"
|
||||
version = "1.3.6"
|
||||
version = "1.7.25"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "boto3-stubs", extra = ["bedrock-runtime"] },
|
||||
{ name = "click" },
|
||||
{ name = "httpx" },
|
||||
{ name = "jinja2" },
|
||||
{ name = "levenshtein" },
|
||||
{ name = "litellm" },
|
||||
{ name = "openai" },
|
||||
|
|
@ -3709,9 +3756,9 @@ dependencies = [
|
|||
{ name = "tqdm" },
|
||||
{ name = "uuid6" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/16/b37208d6a77f3cc74750cff4e0970e6f596aef0f491a675a40aa879157e6/opik-1.3.6.tar.gz", hash = "sha256:25d6fa8b7aa1ef23d10d598040e539440912c12b765eabfc75c8780bbbfc8ad3", size = 177174, upload-time = "2025-01-15T17:20:48.71Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5c/dd/313895410761ee3eb36c1141fa339254c093b3cdfceb79b111c80eb396be/opik-1.7.25.tar.gz", hash = "sha256:5fcdb05bbc98e995f3eea2f94096f98c5ff7a2aca2c895d50636c44d00a07d4b", size = 286950, upload-time = "2025-05-20T13:51:16.6Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/3f/e9d14a97f85d34505770b7c7715bd72bbfc40a778163818f0d3e871264bb/opik-1.3.6-py3-none-any.whl", hash = "sha256:888973c2a1276d68c9b3cf26d8078db8aa675d2c907edda328cdab4995a8e29b", size = 341630, upload-time = "2025-01-15T17:20:45.983Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/0a/daee58db3cdd56681672dbc62e5a71200af6d41f34bac2425d1556d3e004/opik-1.7.25-py3-none-any.whl", hash = "sha256:595fc2e6794e35d87449f64dc5d6092705645575d2c34469d04dc2bbe44dd32f", size = 547198, upload-time = "2025-05-20T13:51:14.964Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -5557,6 +5604,15 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/0e/18/1016ffd4c7775f24371f6a0309483dc5597e8245b5add67924e54ea3b83a/types_aiofiles-24.1.0.20250326-py3-none-any.whl", hash = "sha256:dfb58c9aa18bd449e80fb5d7f49dc3dd20d31de920a46223a61798ee4a521a70", size = 14344, upload-time = "2025-03-26T02:53:31.856Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-awscrt"
|
||||
version = "0.27.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/36/6c/583522cfb3c330e92e726af517a91c13247e555e021791a60f1b03c6ff16/types_awscrt-0.27.2.tar.gz", hash = "sha256:acd04f57119eb15626ab0ba9157fc24672421de56e7bd7b9f61681fedee44e91", size = 16304, upload-time = "2025-05-16T03:10:08.712Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/82/1ee2e5c9d28deac086ab3a6ff07c8bc393ef013a083f546c623699881715/types_awscrt-0.27.2-py3-none-any.whl", hash = "sha256:49a045f25bbd5ad2865f314512afced933aed35ddbafc252e2268efa8a787e4e", size = 37761, upload-time = "2025-05-16T03:10:07.466Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-beautifulsoup4"
|
||||
version = "4.12.0.20250204"
|
||||
|
|
@ -5854,6 +5910,15 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/54/b1/f4ba392a3341cd9d613f2dce855e82471073c5ec34996fe84ac3857956d0/types_requests_oauthlib-2.0.0.20250306-py3-none-any.whl", hash = "sha256:37707de81d9ce54894afcccd70d4a845dbe4c59e747908faaeba59a96453d993", size = 14446, upload-time = "2025-03-06T02:49:24.364Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-s3transfer"
|
||||
version = "0.12.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fb/d5/830e9efe91a26601a2bebde6f299239d2d26e542f5d4b3bc7e8c23c81a3f/types_s3transfer-0.12.0.tar.gz", hash = "sha256:f8f59201481e904362873bf0be3267f259d60ad946ebdfcb847d092a1fa26f98", size = 14096, upload-time = "2025-04-23T00:38:19.131Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/43/6097275152463ac9bacf1e00aab30bc6682bf45f6a031be8bf029c030ba2/types_s3transfer-0.12.0-py3-none-any.whl", hash = "sha256:101bbc5b7f00b71512374df881f480fc6bf63c948b5098ab024bf3370fbfb0e8", size = 19553, upload-time = "2025-04-23T00:38:17.865Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-shapely"
|
||||
version = "2.0.0.20250404"
|
||||
|
|
|
|||
|
|
@ -531,6 +531,7 @@ RELYT_DATABASE=postgres
|
|||
OPENSEARCH_HOST=opensearch
|
||||
OPENSEARCH_PORT=9200
|
||||
OPENSEARCH_SECURE=true
|
||||
OPENSEARCH_VERIFY_CERTS=true
|
||||
OPENSEARCH_AUTH_METHOD=basic
|
||||
OPENSEARCH_USER=admin
|
||||
OPENSEARCH_PASSWORD=admin
|
||||
|
|
@ -1055,7 +1056,7 @@ PLUGIN_MAX_EXECUTION_TIMEOUT=600
|
|||
PIP_MIRROR_URL=
|
||||
|
||||
# https://github.com/langgenius/dify-plugin-daemon/blob/main/.env.example
|
||||
# Plugin storage type, local aws_s3 tencent_cos azure_blob
|
||||
# Plugin storage type, local aws_s3 tencent_cos azure_blob aliyun_oss
|
||||
PLUGIN_STORAGE_TYPE=local
|
||||
PLUGIN_STORAGE_LOCAL_ROOT=/app/storage
|
||||
PLUGIN_WORKING_PATH=/app/storage/cwd
|
||||
|
|
@ -1078,6 +1079,13 @@ PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING=
|
|||
PLUGIN_TENCENT_COS_SECRET_KEY=
|
||||
PLUGIN_TENCENT_COS_SECRET_ID=
|
||||
PLUGIN_TENCENT_COS_REGION=
|
||||
# Plugin oss aliyun oss
|
||||
PLUGIN_ALIYUN_OSS_REGION=
|
||||
PLUGIN_ALIYUN_OSS_ENDPOINT=
|
||||
PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID=
|
||||
PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET=
|
||||
PLUGIN_ALIYUN_OSS_AUTH_VERSION=v4
|
||||
PLUGIN_ALIYUN_OSS_PATH=
|
||||
|
||||
# ------------------------------
|
||||
# OTLP Collector Configuration
|
||||
|
|
|
|||
|
|
@ -178,6 +178,12 @@ services:
|
|||
TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
||||
TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
||||
TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
||||
ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-}
|
||||
ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-}
|
||||
ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-}
|
||||
ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-}
|
||||
ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4}
|
||||
ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-}
|
||||
ports:
|
||||
- "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}"
|
||||
volumes:
|
||||
|
|
@ -438,6 +444,7 @@ services:
|
|||
OB_SYS_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456}
|
||||
OB_TENANT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456}
|
||||
OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai}
|
||||
OB_SERVER_IP: 127.0.0.1
|
||||
MODE: MINI
|
||||
ports:
|
||||
- "${OCEANBASE_VECTOR_PORT:-2881}:2881"
|
||||
|
|
|
|||
|
|
@ -115,6 +115,12 @@ services:
|
|||
TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
||||
TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
||||
TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
||||
ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-}
|
||||
ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-}
|
||||
ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-}
|
||||
ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-}
|
||||
ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4}
|
||||
ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-}
|
||||
ports:
|
||||
- "${EXPOSE_PLUGIN_DAEMON_PORT:-5002}:${PLUGIN_DAEMON_PORT:-5002}"
|
||||
- "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}"
|
||||
|
|
|
|||
|
|
@ -227,6 +227,7 @@ x-shared-env: &shared-api-worker-env
|
|||
OPENSEARCH_HOST: ${OPENSEARCH_HOST:-opensearch}
|
||||
OPENSEARCH_PORT: ${OPENSEARCH_PORT:-9200}
|
||||
OPENSEARCH_SECURE: ${OPENSEARCH_SECURE:-true}
|
||||
OPENSEARCH_VERIFY_CERTS: ${OPENSEARCH_VERIFY_CERTS:-true}
|
||||
OPENSEARCH_AUTH_METHOD: ${OPENSEARCH_AUTH_METHOD:-basic}
|
||||
OPENSEARCH_USER: ${OPENSEARCH_USER:-admin}
|
||||
OPENSEARCH_PASSWORD: ${OPENSEARCH_PASSWORD:-admin}
|
||||
|
|
@ -476,6 +477,12 @@ x-shared-env: &shared-api-worker-env
|
|||
PLUGIN_TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
||||
PLUGIN_TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
||||
PLUGIN_TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
||||
PLUGIN_ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-}
|
||||
PLUGIN_ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-}
|
||||
PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-}
|
||||
PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-}
|
||||
PLUGIN_ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4}
|
||||
PLUGIN_ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-}
|
||||
ENABLE_OTEL: ${ENABLE_OTEL:-false}
|
||||
OTLP_BASE_ENDPOINT: ${OTLP_BASE_ENDPOINT:-http://localhost:4318}
|
||||
OTLP_API_KEY: ${OTLP_API_KEY:-}
|
||||
|
|
@ -669,6 +676,12 @@ services:
|
|||
TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
||||
TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
||||
TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
||||
ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-}
|
||||
ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-}
|
||||
ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-}
|
||||
ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-}
|
||||
ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4}
|
||||
ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-}
|
||||
ports:
|
||||
- "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}"
|
||||
volumes:
|
||||
|
|
@ -929,6 +942,7 @@ services:
|
|||
OB_SYS_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456}
|
||||
OB_TENANT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456}
|
||||
OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai}
|
||||
OB_SERVER_IP: 127.0.0.1
|
||||
MODE: MINI
|
||||
ports:
|
||||
- "${OCEANBASE_VECTOR_PORT:-2881}:2881"
|
||||
|
|
|
|||
|
|
@ -145,3 +145,10 @@ PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING=
|
|||
PLUGIN_TENCENT_COS_SECRET_KEY=
|
||||
PLUGIN_TENCENT_COS_SECRET_ID=
|
||||
PLUGIN_TENCENT_COS_REGION=
|
||||
# Plugin oss aliyun oss
|
||||
PLUGIN_ALIYUN_OSS_REGION=
|
||||
PLUGIN_ALIYUN_OSS_ENDPOINT=
|
||||
PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID=
|
||||
PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET=
|
||||
PLUGIN_ALIYUN_OSS_AUTH_VERSION=v4
|
||||
PLUGIN_ALIYUN_OSS_PATH=
|
||||
|
|
|
|||
|
|
@ -31,126 +31,98 @@ type Props = {
|
|||
appDetail: App
|
||||
}
|
||||
|
||||
const Annotation: FC<Props> = ({
|
||||
appDetail,
|
||||
}) => {
|
||||
const Annotation: FC<Props> = (props) => {
|
||||
const { appDetail } = props
|
||||
const { t } = useTranslation()
|
||||
const [isShowEdit, setIsShowEdit] = React.useState(false)
|
||||
const [isShowEdit, setIsShowEdit] = useState(false)
|
||||
const [annotationConfig, setAnnotationConfig] = useState<AnnotationReplyConfig | null>(null)
|
||||
const [isChatApp, setIsChatApp] = useState(false)
|
||||
const [isChatApp] = useState(appDetail.mode !== 'completion')
|
||||
const [controlRefreshSwitch, setControlRefreshSwitch] = useState(Date.now())
|
||||
const { plan, enableBilling } = useProviderContext()
|
||||
const isAnnotationFull = enableBilling && plan.usage.annotatedResponse >= plan.total.annotatedResponse
|
||||
const [isShowAnnotationFullModal, setIsShowAnnotationFullModal] = useState(false)
|
||||
const [queryParams, setQueryParams] = useState<QueryParam>({})
|
||||
const [currPage, setCurrPage] = useState(0)
|
||||
const [limit, setLimit] = useState(APP_PAGE_LIMIT)
|
||||
const [list, setList] = useState<AnnotationItem[]>([])
|
||||
const [total, setTotal] = useState(0)
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const [controlUpdateList, setControlUpdateList] = useState(Date.now())
|
||||
const [currItem, setCurrItem] = useState<AnnotationItem | null>(null)
|
||||
const [isShowViewModal, setIsShowViewModal] = useState(false)
|
||||
const debouncedQueryParams = useDebounce(queryParams, { wait: 500 })
|
||||
|
||||
const fetchAnnotationConfig = async () => {
|
||||
const res = await doFetchAnnotationConfig(appDetail.id)
|
||||
setAnnotationConfig(res as AnnotationReplyConfig)
|
||||
return (res as AnnotationReplyConfig).id
|
||||
}
|
||||
useEffect(() => {
|
||||
const isChatApp = appDetail.mode !== 'completion'
|
||||
setIsChatApp(isChatApp)
|
||||
if (isChatApp)
|
||||
fetchAnnotationConfig()
|
||||
}, [])
|
||||
const [controlRefreshSwitch, setControlRefreshSwitch] = useState(Date.now())
|
||||
const { plan, enableBilling } = useProviderContext()
|
||||
const isAnnotationFull = (enableBilling && plan.usage.annotatedResponse >= plan.total.annotatedResponse)
|
||||
const [isShowAnnotationFullModal, setIsShowAnnotationFullModal] = useState(false)
|
||||
const ensureJobCompleted = async (jobId: string, status: AnnotationEnableStatus) => {
|
||||
let isCompleted = false
|
||||
while (!isCompleted) {
|
||||
const res: any = await queryAnnotationJobStatus(appDetail.id, status, jobId)
|
||||
isCompleted = res.job_status === JobStatus.completed
|
||||
if (isCompleted)
|
||||
break
|
||||
|
||||
useEffect(() => {
|
||||
if (isChatApp) fetchAnnotationConfig()
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [])
|
||||
|
||||
const ensureJobCompleted = async (jobId: string, status: AnnotationEnableStatus) => {
|
||||
while (true) {
|
||||
const res: any = await queryAnnotationJobStatus(appDetail.id, status, jobId)
|
||||
if (res.job_status === JobStatus.completed) break
|
||||
await sleep(2000)
|
||||
}
|
||||
}
|
||||
|
||||
const [queryParams, setQueryParams] = useState<QueryParam>({})
|
||||
const [currPage, setCurrPage] = React.useState<number>(0)
|
||||
const debouncedQueryParams = useDebounce(queryParams, { wait: 500 })
|
||||
const [limit, setLimit] = React.useState<number>(APP_PAGE_LIMIT)
|
||||
const query = {
|
||||
page: currPage + 1,
|
||||
limit,
|
||||
keyword: debouncedQueryParams.keyword || '',
|
||||
}
|
||||
|
||||
const [controlUpdateList, setControlUpdateList] = useState(Date.now())
|
||||
const [list, setList] = useState<AnnotationItem[]>([])
|
||||
const [total, setTotal] = useState(10)
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const fetchList = async (page = 1) => {
|
||||
setIsLoading(true)
|
||||
try {
|
||||
const { data, total }: any = await fetchAnnotationList(appDetail.id, {
|
||||
...query,
|
||||
page,
|
||||
limit,
|
||||
keyword: debouncedQueryParams.keyword || '',
|
||||
})
|
||||
setList(data as AnnotationItem[])
|
||||
setTotal(total)
|
||||
}
|
||||
catch {
|
||||
|
||||
finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
setIsLoading(false)
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
fetchList(currPage + 1)
|
||||
}, [currPage])
|
||||
|
||||
useEffect(() => {
|
||||
fetchList(1)
|
||||
setControlUpdateList(Date.now())
|
||||
}, [queryParams])
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [currPage, limit, debouncedQueryParams])
|
||||
|
||||
const handleAdd = async (payload: AnnotationItemBasic) => {
|
||||
await addAnnotation(appDetail.id, {
|
||||
...payload,
|
||||
})
|
||||
Toast.notify({
|
||||
message: t('common.api.actionSuccess'),
|
||||
type: 'success',
|
||||
})
|
||||
await addAnnotation(appDetail.id, payload)
|
||||
Toast.notify({ message: t('common.api.actionSuccess'), type: 'success' })
|
||||
fetchList()
|
||||
setControlUpdateList(Date.now())
|
||||
}
|
||||
|
||||
const handleRemove = async (id: string) => {
|
||||
await delAnnotation(appDetail.id, id)
|
||||
Toast.notify({
|
||||
message: t('common.api.actionSuccess'),
|
||||
type: 'success',
|
||||
})
|
||||
Toast.notify({ message: t('common.api.actionSuccess'), type: 'success' })
|
||||
fetchList()
|
||||
setControlUpdateList(Date.now())
|
||||
}
|
||||
|
||||
const [currItem, setCurrItem] = useState<AnnotationItem | null>(list[0])
|
||||
const [isShowViewModal, setIsShowViewModal] = useState(false)
|
||||
useEffect(() => {
|
||||
if (!isShowEdit)
|
||||
setControlRefreshSwitch(Date.now())
|
||||
}, [isShowEdit])
|
||||
const handleView = (item: AnnotationItem) => {
|
||||
setCurrItem(item)
|
||||
setIsShowViewModal(true)
|
||||
}
|
||||
|
||||
const handleSave = async (question: string, answer: string) => {
|
||||
await editAnnotation(appDetail.id, (currItem as AnnotationItem).id, {
|
||||
question,
|
||||
answer,
|
||||
})
|
||||
Toast.notify({
|
||||
message: t('common.api.actionSuccess'),
|
||||
type: 'success',
|
||||
})
|
||||
if (!currItem) return
|
||||
await editAnnotation(appDetail.id, currItem.id, { question, answer })
|
||||
Toast.notify({ message: t('common.api.actionSuccess'), type: 'success' })
|
||||
fetchList()
|
||||
setControlUpdateList(Date.now())
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (!isShowEdit) setControlRefreshSwitch(Date.now())
|
||||
}, [isShowEdit])
|
||||
|
||||
return (
|
||||
<div className='flex h-full flex-col'>
|
||||
<p className='system-sm-regular text-text-tertiary'>{t('appLog.description')}</p>
|
||||
|
|
@ -211,6 +183,7 @@ const Annotation: FC<Props> = ({
|
|||
</Filter>
|
||||
{isLoading
|
||||
? <Loading type='app' />
|
||||
// eslint-disable-next-line sonarjs/no-nested-conditional
|
||||
: total > 0
|
||||
? <List
|
||||
list={list}
|
||||
|
|
|
|||
|
|
@ -234,9 +234,14 @@ const ConfigModal: FC<IConfigModalProps> = ({
|
|||
)}
|
||||
|
||||
<div className='!mt-5 flex h-6 items-center space-x-2'>
|
||||
<Checkbox checked={tempPayload.required} onCheck={() => handlePayloadChange('required')(!tempPayload.required)} />
|
||||
<Checkbox checked={tempPayload.required} disabled={tempPayload.hide} onCheck={() => handlePayloadChange('required')(!tempPayload.required)} />
|
||||
<span className='system-sm-semibold text-text-secondary'>{t('appDebug.variableConfig.required')}</span>
|
||||
</div>
|
||||
|
||||
<div className='!mt-5 flex h-6 items-center space-x-2'>
|
||||
<Checkbox checked={tempPayload.hide} disabled={tempPayload.required} onCheck={() => handlePayloadChange('hide')(!tempPayload.hide)} />
|
||||
<span className='system-sm-semibold text-text-secondary'>{t('appDebug.variableConfig.hide')}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<ModalFoot
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next'
|
|||
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { useContext, useContextSelector } from 'use-context-selector'
|
||||
import { RiArrowRightLine, RiCommandLine, RiCornerDownLeftLine, RiExchange2Fill } from '@remixicon/react'
|
||||
import { RiArrowRightLine, RiArrowRightSLine, RiCommandLine, RiCornerDownLeftLine, RiExchange2Fill } from '@remixicon/react'
|
||||
import Link from 'next/link'
|
||||
import { useDebounceFn, useKeyPress } from 'ahooks'
|
||||
import Image from 'next/image'
|
||||
|
|
@ -43,11 +43,12 @@ function CreateApp({ onClose, onSuccess, onCreateFromTemplate }: CreateAppProps)
|
|||
const { notify } = useContext(ToastContext)
|
||||
const mutateApps = useContextSelector(AppsContext, state => state.mutateApps)
|
||||
|
||||
const [appMode, setAppMode] = useState<AppMode>('chat')
|
||||
const [appMode, setAppMode] = useState<AppMode>('advanced-chat')
|
||||
const [appIcon, setAppIcon] = useState<AppIconSelection>({ type: 'emoji', icon: '🤖', background: '#FFEAD5' })
|
||||
const [showAppIconPicker, setShowAppIconPicker] = useState(false)
|
||||
const [name, setName] = useState('')
|
||||
const [description, setDescription] = useState('')
|
||||
const [isAppTypeExpanded, setIsAppTypeExpanded] = useState(false)
|
||||
|
||||
const { plan, enableBilling } = useProviderContext()
|
||||
const isAppsFull = (enableBilling && plan.usage.buildApps >= plan.total.buildApps)
|
||||
|
|
@ -116,57 +117,7 @@ function CreateApp({ onClose, onSuccess, onCreateFromTemplate }: CreateAppProps)
|
|||
</div>
|
||||
<div className='flex w-[660px] flex-col gap-4'>
|
||||
<div>
|
||||
<div className='mb-2'>
|
||||
<span className='system-2xs-medium-uppercase text-text-tertiary'>{t('app.newApp.forBeginners')}</span>
|
||||
</div>
|
||||
<div className='flex flex-row gap-2'>
|
||||
<AppTypeCard
|
||||
active={appMode === 'chat'}
|
||||
title={t('app.types.chatbot')}
|
||||
description={t('app.newApp.chatbotShortDescription')}
|
||||
icon={<div className='flex h-6 w-6 items-center justify-center rounded-md bg-components-icon-bg-blue-solid'>
|
||||
<ChatBot className='h-4 w-4 text-components-avatar-shape-fill-stop-100' />
|
||||
</div>}
|
||||
onClick={() => {
|
||||
setAppMode('chat')
|
||||
}} />
|
||||
<AppTypeCard
|
||||
active={appMode === 'agent-chat'}
|
||||
title={t('app.types.agent')}
|
||||
description={t('app.newApp.agentShortDescription')}
|
||||
icon={<div className='flex h-6 w-6 items-center justify-center rounded-md bg-components-icon-bg-violet-solid'>
|
||||
<Logic className='h-4 w-4 text-components-avatar-shape-fill-stop-100' />
|
||||
</div>}
|
||||
onClick={() => {
|
||||
setAppMode('agent-chat')
|
||||
}} />
|
||||
<AppTypeCard
|
||||
active={appMode === 'completion'}
|
||||
title={t('app.newApp.completeApp')}
|
||||
description={t('app.newApp.completionShortDescription')}
|
||||
icon={<div className='flex h-6 w-6 items-center justify-center rounded-md bg-components-icon-bg-teal-solid'>
|
||||
<ListSparkle className='h-4 w-4 text-components-avatar-shape-fill-stop-100' />
|
||||
</div>}
|
||||
onClick={() => {
|
||||
setAppMode('completion')
|
||||
}} />
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div className='mb-2'>
|
||||
<span className='system-2xs-medium-uppercase text-text-tertiary'>{t('app.newApp.forAdvanced')}</span>
|
||||
</div>
|
||||
<div className='flex flex-row gap-2'>
|
||||
<AppTypeCard
|
||||
active={appMode === 'advanced-chat'}
|
||||
title={t('app.types.advanced')}
|
||||
description={t('app.newApp.advancedShortDescription')}
|
||||
icon={<div className='flex h-6 w-6 items-center justify-center rounded-md bg-components-icon-bg-blue-light-solid'>
|
||||
<BubbleTextMod className='h-4 w-4 text-components-avatar-shape-fill-stop-100' />
|
||||
</div>}
|
||||
onClick={() => {
|
||||
setAppMode('advanced-chat')
|
||||
}} />
|
||||
<AppTypeCard
|
||||
active={appMode === 'workflow'}
|
||||
title={t('app.types.workflow')}
|
||||
|
|
@ -177,8 +128,63 @@ function CreateApp({ onClose, onSuccess, onCreateFromTemplate }: CreateAppProps)
|
|||
onClick={() => {
|
||||
setAppMode('workflow')
|
||||
}} />
|
||||
<AppTypeCard
|
||||
active={appMode === 'advanced-chat'}
|
||||
title={t('app.types.advanced')}
|
||||
description={t('app.newApp.advancedShortDescription')}
|
||||
icon={<div className='flex h-6 w-6 items-center justify-center rounded-md bg-components-icon-bg-blue-light-solid'>
|
||||
<BubbleTextMod className='h-4 w-4 text-components-avatar-shape-fill-stop-100' />
|
||||
</div>}
|
||||
onClick={() => {
|
||||
setAppMode('advanced-chat')
|
||||
}} />
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<div className='mb-2 flex items-center'>
|
||||
<button
|
||||
className='flex cursor-pointer items-center border-0 bg-transparent p-0'
|
||||
onClick={() => setIsAppTypeExpanded(!isAppTypeExpanded)}
|
||||
>
|
||||
<span className='system-2xs-medium-uppercase text-text-tertiary'>{t('app.newApp.forBeginners')}</span>
|
||||
<RiArrowRightSLine className={`ml-1 h-4 w-4 text-text-tertiary transition-transform ${isAppTypeExpanded ? 'rotate-90' : ''}`} />
|
||||
</button>
|
||||
</div>
|
||||
{isAppTypeExpanded && (
|
||||
<div className='flex flex-row gap-2'>
|
||||
<AppTypeCard
|
||||
active={appMode === 'chat'}
|
||||
title={t('app.types.chatbot')}
|
||||
description={t('app.newApp.chatbotShortDescription')}
|
||||
icon={<div className='flex h-6 w-6 items-center justify-center rounded-md bg-components-icon-bg-blue-solid'>
|
||||
<ChatBot className='h-4 w-4 text-components-avatar-shape-fill-stop-100' />
|
||||
</div>}
|
||||
onClick={() => {
|
||||
setAppMode('chat')
|
||||
}} />
|
||||
<AppTypeCard
|
||||
active={appMode === 'agent-chat'}
|
||||
title={t('app.types.agent')}
|
||||
description={t('app.newApp.agentShortDescription')}
|
||||
icon={<div className='flex h-6 w-6 items-center justify-center rounded-md bg-components-icon-bg-violet-solid'>
|
||||
<Logic className='h-4 w-4 text-components-avatar-shape-fill-stop-100' />
|
||||
</div>}
|
||||
onClick={() => {
|
||||
setAppMode('agent-chat')
|
||||
}} />
|
||||
<AppTypeCard
|
||||
active={appMode === 'completion'}
|
||||
title={t('app.newApp.completeApp')}
|
||||
description={t('app.newApp.completionShortDescription')}
|
||||
icon={<div className='flex h-6 w-6 items-center justify-center rounded-md bg-components-icon-bg-teal-solid'>
|
||||
<ListSparkle className='h-4 w-4 text-components-avatar-shape-fill-stop-100' />
|
||||
</div>}
|
||||
onClick={() => {
|
||||
setAppMode('completion')
|
||||
}} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<Divider style={{ margin: 0 }} />
|
||||
<div className='flex items-center space-x-3'>
|
||||
<div className='flex-1'>
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ import style from './style.module.css'
|
|||
import type { ConfigParams } from './settings'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import AppBasic from '@/app/components/app-sidebar/basic'
|
||||
import { asyncRunSafe, randomString } from '@/utils'
|
||||
import { asyncRunSafe } from '@/utils'
|
||||
import { basePath } from '@/utils/var'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import Button from '@/app/components/base/button'
|
||||
|
|
@ -184,7 +184,7 @@ function AppCard({
|
|||
: t('appOverview.overview.apiInfo.explanation')
|
||||
}
|
||||
/>
|
||||
<div className='flex items-center gap-1'>
|
||||
<div className='flex shrink-0 items-center gap-1'>
|
||||
<Indicator color={runningStatus ? 'green' : 'yellow'} />
|
||||
<div className={`${runningStatus ? 'text-text-success' : 'text-text-warning'} system-xs-semibold-uppercase`}>
|
||||
{runningStatus
|
||||
|
|
@ -210,7 +210,7 @@ function AppCard({
|
|||
content={isApp ? appUrl : apiUrl}
|
||||
className={'!size-6'}
|
||||
/>
|
||||
{isApp && <ShareQRCode content={isApp ? appUrl : apiUrl} className='z-50 !size-6 rounded-md hover:bg-state-base-hover' selectorId={randomString(8)} />}
|
||||
{isApp && <ShareQRCode content={isApp ? appUrl : apiUrl} />}
|
||||
{isApp && <Divider type="vertical" className="!mx-0.5 !h-3.5 shrink-0" />}
|
||||
{/* button copy link/ button regenerate */}
|
||||
{showConfirmDelete && (
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ const ImageInput: FC<UploaderProps> = ({
|
|||
<div
|
||||
className={classNames(
|
||||
isDragActive && 'border-primary-600',
|
||||
'relative aspect-square bg-gray-50 border-[1.5px] border-gray-200 border-dashed rounded-lg flex flex-col justify-center items-center text-gray-500')}
|
||||
'relative aspect-square border-[1.5px] border-dashed rounded-lg flex flex-col justify-center items-center text-gray-500')}
|
||||
onDragEnter={handleDragEnter}
|
||||
onDragOver={handleDragOver}
|
||||
onDragLeave={handleDragLeave}
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ const AppIconPicker: FC<AppIconPickerProps> = ({
|
|||
className={cn(s.container, '!w-[362px] !p-0')}
|
||||
>
|
||||
{!DISABLE_UPLOAD_IMAGE_AS_ICON && <div className="w-full p-2 pb-0">
|
||||
<div className='flex items-center justify-center gap-2 rounded-xl bg-background-body p-1'>
|
||||
<div className='flex items-center justify-center gap-2 rounded-xl bg-background-body p-1 text-text-primary'>
|
||||
{tabs.map(tab => (
|
||||
<button
|
||||
key={tab.key}
|
||||
|
|
|
|||
|
|
@ -4,9 +4,6 @@
|
|||
align-items: flex-start;
|
||||
width: 362px;
|
||||
max-height: 552px;
|
||||
|
||||
border: 0.5px solid #EAECF0;
|
||||
box-shadow: 0px 12px 16px -4px rgba(16, 24, 40, 0.08), 0px 4px 6px -2px rgba(16, 24, 40, 0.03);
|
||||
border-radius: 12px;
|
||||
background: #fff;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -47,6 +47,7 @@ const ChatWrapper = () => {
|
|||
clearChatList,
|
||||
setClearChatList,
|
||||
setIsResponding,
|
||||
allInputsHidden,
|
||||
} = useChatWithHistoryContext()
|
||||
const appConfig = useMemo(() => {
|
||||
const config = appParams || {}
|
||||
|
|
@ -81,6 +82,9 @@ const ChatWrapper = () => {
|
|||
)
|
||||
const inputsFormValue = currentConversationId ? currentConversationInputs : newConversationInputsRef?.current
|
||||
const inputDisabled = useMemo(() => {
|
||||
if (allInputsHidden)
|
||||
return false
|
||||
|
||||
let hasEmptyInput = ''
|
||||
let fileIsUploading = false
|
||||
const requiredVars = inputsForms.filter(({ required }) => required)
|
||||
|
|
@ -110,7 +114,7 @@ const ChatWrapper = () => {
|
|||
if (fileIsUploading)
|
||||
return true
|
||||
return false
|
||||
}, [inputsFormValue, inputsForms])
|
||||
}, [inputsFormValue, inputsForms, allInputsHidden])
|
||||
|
||||
useEffect(() => {
|
||||
if (currentChatInstanceRef.current)
|
||||
|
|
@ -161,7 +165,7 @@ const ChatWrapper = () => {
|
|||
const [collapsed, setCollapsed] = useState(!!currentConversationId)
|
||||
|
||||
const chatNode = useMemo(() => {
|
||||
if (!inputsForms.length)
|
||||
if (allInputsHidden || !inputsForms.length)
|
||||
return null
|
||||
if (isMobile) {
|
||||
if (!currentConversationId)
|
||||
|
|
@ -171,7 +175,7 @@ const ChatWrapper = () => {
|
|||
else {
|
||||
return <InputsForm collapsed={collapsed} setCollapsed={setCollapsed} />
|
||||
}
|
||||
}, [inputsForms.length, isMobile, currentConversationId, collapsed])
|
||||
}, [inputsForms.length, isMobile, currentConversationId, collapsed, allInputsHidden])
|
||||
|
||||
const welcome = useMemo(() => {
|
||||
const welcomeMessage = chatList.find(item => item.isOpeningStatement)
|
||||
|
|
@ -181,7 +185,7 @@ const ChatWrapper = () => {
|
|||
return null
|
||||
if (!welcomeMessage)
|
||||
return null
|
||||
if (!collapsed && inputsForms.length > 0)
|
||||
if (!collapsed && inputsForms.length > 0 && !allInputsHidden)
|
||||
return null
|
||||
if (welcomeMessage.suggestedQuestions && welcomeMessage.suggestedQuestions?.length > 0) {
|
||||
return (
|
||||
|
|
@ -218,7 +222,7 @@ const ChatWrapper = () => {
|
|||
</div>
|
||||
</div>
|
||||
)
|
||||
}, [appData?.site.icon, appData?.site.icon_background, appData?.site.icon_type, appData?.site.icon_url, chatList, collapsed, currentConversationId, inputsForms.length, respondingState])
|
||||
}, [appData?.site.icon, appData?.site.icon_background, appData?.site.icon_type, appData?.site.icon_url, chatList, collapsed, currentConversationId, inputsForms.length, respondingState, allInputsHidden])
|
||||
|
||||
const answerIcon = (appData?.site && appData.site.use_icon_as_answer_icon)
|
||||
? <AnswerIcon
|
||||
|
|
|
|||
|
|
@ -60,6 +60,7 @@ export type ChatWithHistoryContextValue = {
|
|||
setIsResponding: (state: boolean) => void,
|
||||
currentConversationInputs: Record<string, any> | null,
|
||||
setCurrentConversationInputs: (v: Record<string, any>) => void,
|
||||
allInputsHidden: boolean,
|
||||
}
|
||||
|
||||
export const ChatWithHistoryContext = createContext<ChatWithHistoryContextValue>({
|
||||
|
|
@ -95,5 +96,6 @@ export const ChatWithHistoryContext = createContext<ChatWithHistoryContextValue>
|
|||
setIsResponding: noop,
|
||||
currentConversationInputs: {},
|
||||
setCurrentConversationInputs: noop,
|
||||
allInputsHidden: false,
|
||||
})
|
||||
export const useChatWithHistoryContext = () => useContext(ChatWithHistoryContext)
|
||||
|
|
|
|||
|
|
@ -240,6 +240,11 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => {
|
|||
}
|
||||
})
|
||||
}, [appParams])
|
||||
|
||||
const allInputsHidden = useMemo(() => {
|
||||
return inputsForms.length > 0 && inputsForms.every(item => item.hide === true)
|
||||
}, [inputsForms])
|
||||
|
||||
useEffect(() => {
|
||||
const conversationInputs: Record<string, any> = {}
|
||||
|
||||
|
|
@ -304,6 +309,9 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => {
|
|||
|
||||
const { notify } = useToastContext()
|
||||
const checkInputsRequired = useCallback((silent?: boolean) => {
|
||||
if (allInputsHidden)
|
||||
return true
|
||||
|
||||
let hasEmptyInput = ''
|
||||
let fileIsUploading = false
|
||||
const requiredVars = inputsForms.filter(({ required }) => required)
|
||||
|
|
@ -339,7 +347,7 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => {
|
|||
}
|
||||
|
||||
return true
|
||||
}, [inputsForms, notify, t])
|
||||
}, [inputsForms, notify, t, allInputsHidden])
|
||||
const handleStartChat = useCallback((callback: any) => {
|
||||
if (checkInputsRequired()) {
|
||||
setShowNewConversationItemInList(true)
|
||||
|
|
@ -507,5 +515,6 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => {
|
|||
setIsResponding,
|
||||
currentConversationInputs,
|
||||
setCurrentConversationInputs,
|
||||
allInputsHidden,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -161,6 +161,7 @@ const ChatWithHistoryWrap: FC<ChatWithHistoryWrapProps> = ({
|
|||
setIsResponding,
|
||||
currentConversationInputs,
|
||||
setCurrentConversationInputs,
|
||||
allInputsHidden,
|
||||
} = useChatWithHistory(installedAppInfo)
|
||||
|
||||
return (
|
||||
|
|
@ -206,6 +207,7 @@ const ChatWithHistoryWrap: FC<ChatWithHistoryWrapProps> = ({
|
|||
setIsResponding,
|
||||
currentConversationInputs,
|
||||
setCurrentConversationInputs,
|
||||
allInputsHidden,
|
||||
}}>
|
||||
<ChatWithHistory className={className} />
|
||||
</ChatWithHistoryContext.Provider>
|
||||
|
|
|
|||
|
|
@ -36,9 +36,11 @@ const InputsFormContent = ({ showTip }: Props) => {
|
|||
})
|
||||
}, [newConversationInputsRef, handleNewConversationInputsChange, currentConversationInputs, setCurrentConversationInputs])
|
||||
|
||||
const visibleInputsForms = inputsForms.filter(form => form.hide !== true)
|
||||
|
||||
return (
|
||||
<div className='space-y-4'>
|
||||
{inputsForms.map(form => (
|
||||
{visibleInputsForms.map(form => (
|
||||
<div key={form.variable} className='space-y-1'>
|
||||
<div className='flex h-6 items-center gap-1'>
|
||||
<div className='system-md-semibold text-text-secondary'>{form.label}</div>
|
||||
|
|
|
|||
|
|
@ -21,9 +21,14 @@ const InputsFormNode = ({
|
|||
isMobile,
|
||||
currentConversationId,
|
||||
handleStartChat,
|
||||
allInputsHidden,
|
||||
themeBuilder,
|
||||
inputsForms,
|
||||
} = useChatWithHistoryContext()
|
||||
|
||||
if (allInputsHidden || inputsForms.length === 0)
|
||||
return null
|
||||
|
||||
return (
|
||||
<div className={cn('flex flex-col items-center px-4 pt-6', isMobile && 'pt-4')}>
|
||||
<div className={cn(
|
||||
|
|
|
|||
|
|
@ -143,5 +143,6 @@ export type InputForm = {
|
|||
label: string
|
||||
variable: any
|
||||
required: boolean
|
||||
hide: boolean
|
||||
[key: string]: any
|
||||
}
|
||||
|
|
|
|||
|
|
@ -48,6 +48,7 @@ const ChatWrapper = () => {
|
|||
clearChatList,
|
||||
setClearChatList,
|
||||
setIsResponding,
|
||||
allInputsHidden,
|
||||
} = useEmbeddedChatbotContext()
|
||||
const appConfig = useMemo(() => {
|
||||
const config = appParams || {}
|
||||
|
|
@ -82,6 +83,9 @@ const ChatWrapper = () => {
|
|||
)
|
||||
const inputsFormValue = currentConversationId ? currentConversationInputs : newConversationInputsRef?.current
|
||||
const inputDisabled = useMemo(() => {
|
||||
if (allInputsHidden)
|
||||
return false
|
||||
|
||||
let hasEmptyInput = ''
|
||||
let fileIsUploading = false
|
||||
const requiredVars = inputsForms.filter(({ required }) => required)
|
||||
|
|
@ -111,7 +115,7 @@ const ChatWrapper = () => {
|
|||
if (fileIsUploading)
|
||||
return true
|
||||
return false
|
||||
}, [inputsFormValue, inputsForms])
|
||||
}, [inputsFormValue, inputsForms, allInputsHidden])
|
||||
|
||||
useEffect(() => {
|
||||
if (currentChatInstanceRef.current)
|
||||
|
|
@ -160,7 +164,7 @@ const ChatWrapper = () => {
|
|||
const [collapsed, setCollapsed] = useState(!!currentConversationId)
|
||||
|
||||
const chatNode = useMemo(() => {
|
||||
if (!inputsForms.length)
|
||||
if (allInputsHidden || !inputsForms.length)
|
||||
return null
|
||||
if (isMobile) {
|
||||
if (!currentConversationId)
|
||||
|
|
@ -170,7 +174,7 @@ const ChatWrapper = () => {
|
|||
else {
|
||||
return <InputsForm collapsed={collapsed} setCollapsed={setCollapsed} />
|
||||
}
|
||||
}, [inputsForms.length, isMobile, currentConversationId, collapsed])
|
||||
}, [inputsForms.length, isMobile, currentConversationId, collapsed, allInputsHidden])
|
||||
|
||||
const welcome = useMemo(() => {
|
||||
const welcomeMessage = chatList.find(item => item.isOpeningStatement)
|
||||
|
|
@ -180,7 +184,7 @@ const ChatWrapper = () => {
|
|||
return null
|
||||
if (!welcomeMessage)
|
||||
return null
|
||||
if (!collapsed && inputsForms.length > 0)
|
||||
if (!collapsed && inputsForms.length > 0 && !allInputsHidden)
|
||||
return null
|
||||
if (welcomeMessage.suggestedQuestions && welcomeMessage.suggestedQuestions?.length > 0) {
|
||||
return (
|
||||
|
|
@ -215,7 +219,7 @@ const ChatWrapper = () => {
|
|||
</div>
|
||||
</div>
|
||||
)
|
||||
}, [appData?.site.icon, appData?.site.icon_background, appData?.site.icon_type, appData?.site.icon_url, chatList, collapsed, currentConversationId, inputsForms.length, respondingState])
|
||||
}, [appData?.site.icon, appData?.site.icon_background, appData?.site.icon_type, appData?.site.icon_url, chatList, collapsed, currentConversationId, inputsForms.length, respondingState, allInputsHidden])
|
||||
|
||||
const answerIcon = isDify()
|
||||
? <LogoAvatar className='relative shrink-0' />
|
||||
|
|
|
|||
|
|
@ -53,6 +53,7 @@ export type EmbeddedChatbotContextValue = {
|
|||
setIsResponding: (state: boolean) => void,
|
||||
currentConversationInputs: Record<string, any> | null,
|
||||
setCurrentConversationInputs: (v: Record<string, any>) => void,
|
||||
allInputsHidden: boolean
|
||||
}
|
||||
|
||||
export const EmbeddedChatbotContext = createContext<EmbeddedChatbotContextValue>({
|
||||
|
|
@ -82,5 +83,6 @@ export const EmbeddedChatbotContext = createContext<EmbeddedChatbotContextValue>
|
|||
setIsResponding: noop,
|
||||
currentConversationInputs: {},
|
||||
setCurrentConversationInputs: noop,
|
||||
allInputsHidden: false,
|
||||
})
|
||||
export const useEmbeddedChatbotContext = () => useContext(EmbeddedChatbotContext)
|
||||
|
|
|
|||
|
|
@ -235,6 +235,10 @@ export const useEmbeddedChatbot = () => {
|
|||
})
|
||||
}, [initInputs, appParams])
|
||||
|
||||
const allInputsHidden = useMemo(() => {
|
||||
return inputsForms.length > 0 && inputsForms.every(item => item.hide === true)
|
||||
}, [inputsForms])
|
||||
|
||||
useEffect(() => {
|
||||
// init inputs from url params
|
||||
(async () => {
|
||||
|
|
@ -306,6 +310,9 @@ export const useEmbeddedChatbot = () => {
|
|||
|
||||
const { notify } = useToastContext()
|
||||
const checkInputsRequired = useCallback((silent?: boolean) => {
|
||||
if (allInputsHidden)
|
||||
return true
|
||||
|
||||
let hasEmptyInput = ''
|
||||
let fileIsUploading = false
|
||||
const requiredVars = inputsForms.filter(({ required }) => required)
|
||||
|
|
@ -341,7 +348,7 @@ export const useEmbeddedChatbot = () => {
|
|||
}
|
||||
|
||||
return true
|
||||
}, [inputsForms, notify, t])
|
||||
}, [inputsForms, notify, t, allInputsHidden])
|
||||
const handleStartChat = useCallback((callback?: any) => {
|
||||
if (checkInputsRequired()) {
|
||||
setShowNewConversationItemInList(true)
|
||||
|
|
@ -417,5 +424,6 @@ export const useEmbeddedChatbot = () => {
|
|||
setIsResponding,
|
||||
currentConversationInputs,
|
||||
setCurrentConversationInputs,
|
||||
allInputsHidden,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -168,6 +168,7 @@ const EmbeddedChatbotWrapper = () => {
|
|||
setIsResponding,
|
||||
currentConversationInputs,
|
||||
setCurrentConversationInputs,
|
||||
allInputsHidden,
|
||||
} = useEmbeddedChatbot()
|
||||
|
||||
return <EmbeddedChatbotContext.Provider value={{
|
||||
|
|
@ -206,6 +207,7 @@ const EmbeddedChatbotWrapper = () => {
|
|||
setIsResponding,
|
||||
currentConversationInputs,
|
||||
setCurrentConversationInputs,
|
||||
allInputsHidden,
|
||||
}}>
|
||||
<Chatbot />
|
||||
</EmbeddedChatbotContext.Provider>
|
||||
|
|
|
|||
|
|
@ -36,9 +36,11 @@ const InputsFormContent = ({ showTip }: Props) => {
|
|||
})
|
||||
}, [newConversationInputsRef, handleNewConversationInputsChange, currentConversationInputs, setCurrentConversationInputs])
|
||||
|
||||
const visibleInputsForms = inputsForms.filter(form => form.hide !== true)
|
||||
|
||||
return (
|
||||
<div className='space-y-4'>
|
||||
{inputsForms.map(form => (
|
||||
{visibleInputsForms.map(form => (
|
||||
<div key={form.variable} className='space-y-1'>
|
||||
<div className='flex h-6 items-center gap-1'>
|
||||
<div className='system-md-semibold text-text-secondary'>{form.label}</div>
|
||||
|
|
|
|||
|
|
@ -22,8 +22,13 @@ const InputsFormNode = ({
|
|||
currentConversationId,
|
||||
themeBuilder,
|
||||
handleStartChat,
|
||||
allInputsHidden,
|
||||
inputsForms,
|
||||
} = useEmbeddedChatbotContext()
|
||||
|
||||
if (allInputsHidden || inputsForms.length === 0)
|
||||
return null
|
||||
|
||||
return (
|
||||
<div className={cn('mb-6 flex flex-col items-center px-4 pt-6', isMobile && 'mb-4 pt-4')}>
|
||||
<div className={cn(
|
||||
|
|
|
|||
|
|
@ -33,16 +33,17 @@ const DifyLogo: FC<DifyLogoProps> = ({
|
|||
const { theme } = useTheme()
|
||||
const themedStyle = (theme === 'dark' && style === 'default') ? 'monochromeWhite' : style
|
||||
const { systemFeatures } = useGlobalPublicStore()
|
||||
const hasBrandingLogo = Boolean(systemFeatures.branding.enabled && systemFeatures.branding.workspace_logo)
|
||||
|
||||
let src = `${basePath}${logoPathMap[themedStyle]}`
|
||||
if (systemFeatures.branding.enabled)
|
||||
if (hasBrandingLogo)
|
||||
src = systemFeatures.branding.workspace_logo
|
||||
|
||||
return (
|
||||
<img
|
||||
src={src}
|
||||
className={classNames('block object-contain', logoSizeMap[size], className)}
|
||||
alt='Dify logo'
|
||||
className={classNames('block object-contain', logoSizeMap[size], hasBrandingLogo && 'w-auto', className)}
|
||||
alt={hasBrandingLogo ? 'Logo' : 'Dify logo'}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import {
|
|||
atelierHeathDark,
|
||||
atelierHeathLight,
|
||||
} from 'react-syntax-highlighter/dist/esm/styles/hljs'
|
||||
import { Component, memo, useMemo, useRef, useState } from 'react'
|
||||
import { Component, memo, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { flow } from 'lodash-es'
|
||||
import ActionButton from '@/app/components/base/action-button'
|
||||
import CopyIcon from '@/app/components/base/copy-icon'
|
||||
|
|
@ -74,7 +74,7 @@ const preprocessLaTeX = (content: string) => {
|
|||
|
||||
processedContent = flow([
|
||||
(str: string) => str.replace(/\\\[(.*?)\\\]/g, (_, equation) => `$$${equation}$$`),
|
||||
(str: string) => str.replace(/\\\[(.*?)\\\]/gs, (_, equation) => `$$${equation}$$`),
|
||||
(str: string) => str.replace(/\\\[([\s\S]*?)\\\]/g, (_, equation) => `$$${equation}$$`),
|
||||
(str: string) => str.replace(/\\\((.*?)\\\)/g, (_, equation) => `$$${equation}$$`),
|
||||
(str: string) => str.replace(/(^|[^\\])\$(.+?)\$/g, (_, prefix, equation) => `${prefix}$${equation}$`),
|
||||
])(processedContent)
|
||||
|
|
@ -124,23 +124,161 @@ export function PreCode(props: { children: any }) {
|
|||
const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any) => {
|
||||
const { theme } = useTheme()
|
||||
const [isSVG, setIsSVG] = useState(true)
|
||||
const [chartState, setChartState] = useState<'loading' | 'success' | 'error'>('loading')
|
||||
const [finalChartOption, setFinalChartOption] = useState<any>(null)
|
||||
const echartsRef = useRef<any>(null)
|
||||
const contentRef = useRef<string>('')
|
||||
const processedRef = useRef<boolean>(false) // Track if content was successfully processed
|
||||
const match = /language-(\w+)/.exec(className || '')
|
||||
const language = match?.[1]
|
||||
const languageShowName = getCorrectCapitalizationLanguageName(language || '')
|
||||
const chartData = useMemo(() => {
|
||||
const str = String(children).replace(/\n$/, '')
|
||||
if (language === 'echarts') {
|
||||
try {
|
||||
return JSON.parse(str)
|
||||
}
|
||||
catch { }
|
||||
try {
|
||||
// eslint-disable-next-line no-new-func, sonarjs/code-eval
|
||||
return new Function(`return ${str}`)()
|
||||
}
|
||||
catch { }
|
||||
const isDarkMode = theme === Theme.dark
|
||||
|
||||
const echartsStyle = useMemo(() => ({
|
||||
height: '350px',
|
||||
width: '100%',
|
||||
}), [])
|
||||
|
||||
const echartsOpts = useMemo(() => ({
|
||||
renderer: 'canvas',
|
||||
width: 'auto',
|
||||
}) as any, [])
|
||||
|
||||
const echartsOnEvents = useMemo(() => ({
|
||||
finished: () => {
|
||||
const instance = echartsRef.current?.getEchartsInstance?.()
|
||||
if (instance)
|
||||
instance.resize()
|
||||
},
|
||||
}), [echartsRef]) // echartsRef is stable, so this effectively runs once.
|
||||
|
||||
// Handle container resize for echarts
|
||||
useEffect(() => {
|
||||
if (language !== 'echarts' || !echartsRef.current) return
|
||||
|
||||
const handleResize = () => {
|
||||
// This gets the echarts instance from the component
|
||||
const instance = echartsRef.current?.getEchartsInstance?.()
|
||||
if (instance)
|
||||
instance.resize()
|
||||
}
|
||||
|
||||
window.addEventListener('resize', handleResize)
|
||||
|
||||
// Also manually trigger resize after a short delay to ensure proper sizing
|
||||
const resizeTimer = setTimeout(handleResize, 200)
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', handleResize)
|
||||
clearTimeout(resizeTimer)
|
||||
}
|
||||
}, [language, echartsRef.current])
|
||||
|
||||
// Process chart data when content changes
|
||||
useEffect(() => {
|
||||
// Only process echarts content
|
||||
if (language !== 'echarts') return
|
||||
|
||||
// Reset state when new content is detected
|
||||
if (!contentRef.current) {
|
||||
setChartState('loading')
|
||||
processedRef.current = false
|
||||
}
|
||||
|
||||
const newContent = String(children).replace(/\n$/, '')
|
||||
|
||||
// Skip if content hasn't changed
|
||||
if (contentRef.current === newContent) return
|
||||
contentRef.current = newContent
|
||||
|
||||
const trimmedContent = newContent.trim()
|
||||
if (!trimmedContent) return
|
||||
|
||||
// Detect if this is historical data (already complete)
|
||||
// Historical data typically comes as a complete code block with complete JSON
|
||||
const isCompleteJson
|
||||
= (trimmedContent.startsWith('{') && trimmedContent.endsWith('}')
|
||||
&& trimmedContent.split('{').length === trimmedContent.split('}').length)
|
||||
|| (trimmedContent.startsWith('[') && trimmedContent.endsWith(']')
|
||||
&& trimmedContent.split('[').length === trimmedContent.split(']').length)
|
||||
|
||||
// If the JSON structure looks complete, try to parse it right away
|
||||
if (isCompleteJson && !processedRef.current) {
|
||||
try {
|
||||
const parsed = JSON.parse(trimmedContent)
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
setFinalChartOption(parsed)
|
||||
setChartState('success')
|
||||
processedRef.current = true
|
||||
return
|
||||
}
|
||||
}
|
||||
catch {
|
||||
try {
|
||||
// eslint-disable-next-line no-new-func, sonarjs/code-eval
|
||||
const result = new Function(`return ${trimmedContent}`)()
|
||||
if (typeof result === 'object' && result !== null) {
|
||||
setFinalChartOption(result)
|
||||
setChartState('success')
|
||||
processedRef.current = true
|
||||
return
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// If we have a complete JSON structure but it doesn't parse,
|
||||
// it's likely an error rather than incomplete data
|
||||
setChartState('error')
|
||||
processedRef.current = true
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we get here, either the JSON isn't complete yet, or we failed to parse it
|
||||
// Check more conditions for streaming data
|
||||
const isIncomplete
|
||||
= trimmedContent.length < 5
|
||||
|| (trimmedContent.startsWith('{')
|
||||
&& (!trimmedContent.endsWith('}')
|
||||
|| trimmedContent.split('{').length !== trimmedContent.split('}').length))
|
||||
|| (trimmedContent.startsWith('[')
|
||||
&& (!trimmedContent.endsWith(']')
|
||||
|| trimmedContent.split('[').length !== trimmedContent.split('}').length))
|
||||
|| (trimmedContent.split('"').length % 2 !== 1)
|
||||
|| (trimmedContent.includes('{"') && !trimmedContent.includes('"}'))
|
||||
|
||||
// Only try to parse streaming data if it looks complete and hasn't been processed
|
||||
if (!isIncomplete && !processedRef.current) {
|
||||
let isValidOption = false
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(trimmedContent)
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
setFinalChartOption(parsed)
|
||||
isValidOption = true
|
||||
}
|
||||
}
|
||||
catch {
|
||||
try {
|
||||
// eslint-disable-next-line no-new-func, sonarjs/code-eval
|
||||
const result = new Function(`return ${trimmedContent}`)()
|
||||
if (typeof result === 'object' && result !== null) {
|
||||
setFinalChartOption(result)
|
||||
isValidOption = true
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Both parsing methods failed, but content looks complete
|
||||
setChartState('error')
|
||||
processedRef.current = true
|
||||
}
|
||||
}
|
||||
|
||||
if (isValidOption) {
|
||||
setChartState('success')
|
||||
processedRef.current = true
|
||||
}
|
||||
}
|
||||
return JSON.parse('{"title":{"text":"ECharts error - Wrong option."}}')
|
||||
}, [language, children])
|
||||
|
||||
const renderCodeContent = useMemo(() => {
|
||||
|
|
@ -150,14 +288,106 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
|||
if (isSVG)
|
||||
return <Flowchart PrimitiveCode={content} />
|
||||
break
|
||||
case 'echarts':
|
||||
case 'echarts': {
|
||||
// Loading state: show loading indicator
|
||||
if (chartState === 'loading') {
|
||||
return (
|
||||
<div style={{
|
||||
minHeight: '350px',
|
||||
width: '100%',
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
borderBottomLeftRadius: '10px',
|
||||
borderBottomRightRadius: '10px',
|
||||
backgroundColor: isDarkMode ? 'var(--color-components-input-bg-normal)' : 'transparent',
|
||||
color: 'var(--color-text-secondary)',
|
||||
}}>
|
||||
<div style={{
|
||||
marginBottom: '12px',
|
||||
width: '24px',
|
||||
height: '24px',
|
||||
}}>
|
||||
{/* Rotating spinner that works in both light and dark modes */}
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg" style={{ animation: 'spin 1.5s linear infinite' }}>
|
||||
<style>
|
||||
{`
|
||||
@keyframes spin {
|
||||
0% { transform: rotate(0deg); }
|
||||
100% { transform: rotate(360deg); }
|
||||
}
|
||||
`}
|
||||
</style>
|
||||
<circle opacity="0.2" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="2" />
|
||||
<path d="M12 2C6.47715 2 2 6.47715 2 12" stroke="currentColor" strokeWidth="2" strokeLinecap="round" />
|
||||
</svg>
|
||||
</div>
|
||||
<div style={{
|
||||
fontFamily: 'var(--font-family)',
|
||||
fontSize: '14px',
|
||||
}}>Chart loading...</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Success state: show the chart
|
||||
if (chartState === 'success' && finalChartOption) {
|
||||
return (
|
||||
<div style={{
|
||||
minWidth: '300px',
|
||||
minHeight: '350px',
|
||||
width: '100%',
|
||||
overflowX: 'auto',
|
||||
borderBottomLeftRadius: '10px',
|
||||
borderBottomRightRadius: '10px',
|
||||
transition: 'background-color 0.3s ease',
|
||||
}}>
|
||||
<ErrorBoundary>
|
||||
<ReactEcharts
|
||||
ref={echartsRef}
|
||||
option={finalChartOption}
|
||||
style={echartsStyle}
|
||||
theme={isDarkMode ? 'dark' : undefined}
|
||||
opts={echartsOpts}
|
||||
notMerge={true}
|
||||
onEvents={echartsOnEvents}
|
||||
/>
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Error state: show error message
|
||||
const errorOption = {
|
||||
title: {
|
||||
text: 'ECharts error - Wrong option.',
|
||||
},
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={{ minHeight: '350px', minWidth: '100%', overflowX: 'scroll' }}>
|
||||
<div style={{
|
||||
minWidth: '300px',
|
||||
minHeight: '350px',
|
||||
width: '100%',
|
||||
overflowX: 'auto',
|
||||
borderBottomLeftRadius: '10px',
|
||||
borderBottomRightRadius: '10px',
|
||||
transition: 'background-color 0.3s ease',
|
||||
}}>
|
||||
<ErrorBoundary>
|
||||
<ReactEcharts option={chartData} style={{ minWidth: '700px' }} />
|
||||
<ReactEcharts
|
||||
ref={echartsRef}
|
||||
option={errorOption}
|
||||
style={echartsStyle}
|
||||
theme={isDarkMode ? 'dark' : undefined}
|
||||
opts={echartsOpts}
|
||||
notMerge={true}
|
||||
/>
|
||||
</ErrorBoundary>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
case 'svg':
|
||||
if (isSVG) {
|
||||
return (
|
||||
|
|
@ -192,7 +422,7 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
|||
</SyntaxHighlighter>
|
||||
)
|
||||
}
|
||||
}, [children, language, isSVG, chartData, props, theme, match])
|
||||
}, [children, language, isSVG, finalChartOption, props, theme, match, chartState, isDarkMode, echartsStyle, echartsOpts, echartsOnEvents])
|
||||
|
||||
if (inline || !match)
|
||||
return <code {...props} className={className}>{children}</code>
|
||||
|
|
@ -216,16 +446,24 @@ CodeBlock.displayName = 'CodeBlock'
|
|||
|
||||
const VideoBlock: any = memo(({ node }: any) => {
|
||||
const srcs = node.children.filter((child: any) => 'properties' in child).map((child: any) => (child as any).properties.src)
|
||||
if (srcs.length === 0)
|
||||
if (srcs.length === 0) {
|
||||
const src = node.properties?.src
|
||||
if (src)
|
||||
return <VideoGallery key={src} srcs={[src]} />
|
||||
return null
|
||||
}
|
||||
return <VideoGallery key={srcs.join()} srcs={srcs} />
|
||||
})
|
||||
VideoBlock.displayName = 'VideoBlock'
|
||||
|
||||
const AudioBlock: any = memo(({ node }: any) => {
|
||||
const srcs = node.children.filter((child: any) => 'properties' in child).map((child: any) => (child as any).properties.src)
|
||||
if (srcs.length === 0)
|
||||
if (srcs.length === 0) {
|
||||
const src = node.properties?.src
|
||||
if (src)
|
||||
return <AudioGallery key={src} srcs={[src]} />
|
||||
return null
|
||||
}
|
||||
return <AudioGallery key={srcs.join()} srcs={srcs} />
|
||||
})
|
||||
AudioBlock.displayName = 'AudioBlock'
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ const TagInput: FC<TagInputProps> = ({
|
|||
<div className={cn('group/tag-add mt-1 flex items-center gap-x-0.5', !isSpecialMode ? 'rounded-md border border-dashed border-divider-deep px-1.5' : '')}>
|
||||
{!isSpecialMode && !focused && <RiAddLine className='h-3.5 w-3.5 text-text-placeholder group-hover/tag-add:text-text-secondary' />}
|
||||
<AutosizeInput
|
||||
inputClassName={cn('appearance-none caret-[#295EFF] outline-none placeholder:text-text-placeholder group-hover/tag-add:placeholder:text-text-secondary', isSpecialMode ? 'bg-transparent' : '')}
|
||||
inputClassName={cn('appearance-none text-text-primary caret-[#295EFF] outline-none placeholder:text-text-placeholder group-hover/tag-add:placeholder:text-text-secondary', isSpecialMode ? 'bg-transparent' : '')}
|
||||
className={cn(
|
||||
!isInWorkflow && 'max-w-[300px]',
|
||||
isInWorkflow && 'max-w-[146px]',
|
||||
|
|
|
|||
|
|
@ -70,7 +70,7 @@ const EmptyElement: FC<{ canAdd: boolean; onClick: () => void; type?: 'upload' |
|
|||
<div className={s.emptyTip}>
|
||||
{t(`datasetDocuments.list.empty.${type}.tip`)}
|
||||
</div>
|
||||
{type === 'upload' && canAdd && <Button onClick={onClick} className={s.addFileBtn}>
|
||||
{type === 'upload' && canAdd && <Button onClick={onClick} className={s.addFileBtn} variant='secondary-accent'>
|
||||
<PlusIcon className={s.plusIcon} />{t('datasetDocuments.list.addFile')}
|
||||
</Button>}
|
||||
</div>
|
||||
|
|
@ -267,7 +267,7 @@ const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
|
|||
? 'https://docs.dify.ai/zh-hans/guides/knowledge-base/integrate-knowledge-within-application'
|
||||
: 'https://docs.dify.ai/en/guides/knowledge-base/integrate-knowledge-within-application'
|
||||
}
|
||||
>
|
||||
>
|
||||
<span>{t('datasetDocuments.list.learnMore')}</span>
|
||||
<RiExternalLinkLine className='h-3 w-3' />
|
||||
</a>
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@
|
|||
@apply text-text-secondary text-sm;
|
||||
}
|
||||
.addFileBtn {
|
||||
@apply mt-4 w-fit !text-[13px] text-primary-600 font-medium bg-white border-[0.5px];
|
||||
@apply mt-4 w-fit !text-[13px] font-medium border-[0.5px];
|
||||
}
|
||||
.plusIcon {
|
||||
@apply w-4 h-4 mr-2 stroke-current stroke-[1.5px];
|
||||
|
|
@ -35,16 +35,16 @@
|
|||
@apply flex items-center justify-center h-full;
|
||||
}
|
||||
.emptyElement {
|
||||
@apply bg-gray-50 w-[560px] h-fit box-border px-5 py-4 rounded-2xl;
|
||||
@apply bg-components-panel-on-panel-item-bg border-divider-subtle w-[560px] h-fit box-border px-5 py-4 rounded-2xl;
|
||||
}
|
||||
.emptyTitle {
|
||||
@apply text-gray-700 font-semibold;
|
||||
@apply text-text-secondary font-semibold;
|
||||
}
|
||||
.emptyTip {
|
||||
@apply mt-2 text-gray-500 text-sm font-normal;
|
||||
@apply mt-2 text-text-primary text-sm font-normal;
|
||||
}
|
||||
.emptySymbolIconWrapper {
|
||||
@apply w-[44px] h-[44px] border border-solid border-gray-100 rounded-lg flex items-center justify-center mb-2;
|
||||
@apply w-[44px] h-[44px] border border-solid border-components-button-secondary-border rounded-lg flex items-center justify-center mb-2;
|
||||
}
|
||||
.commonIcon {
|
||||
@apply w-4 h-4 inline-block align-middle;
|
||||
|
|
|
|||
|
|
@ -151,7 +151,7 @@ const PermissionSelector = ({ disabled, permission, value, memberList, onChange,
|
|||
</div>
|
||||
{isPartialMembers && (
|
||||
<div className='max-h-[360px] overflow-y-auto border-t-[1px] border-divider-regular pb-1 pl-1 pr-1'>
|
||||
<div className='sticky left-0 top-0 z-10 bg-white p-2 pb-1'>
|
||||
<div className='sticky left-0 top-0 z-10 bg-components-panel-on-panel-item-bg p-2 pb-1'>
|
||||
<Input
|
||||
showLeftIcon
|
||||
showClearIcon
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ const InvitationLink = ({
|
|||
<Tooltip
|
||||
popupContent={isCopied ? `${t('appApi.copied')}` : `${t('appApi.copy')}`}
|
||||
>
|
||||
<div className='r-0 absolute left-0 top-0 w-full cursor-pointer truncate pl-2 pr-2' onClick={copyHandle}>{value.url}</div>
|
||||
<div className='r-0 absolute left-0 top-0 w-full cursor-pointer truncate pl-2 pr-2 text-text-primary' onClick={copyHandle}>{value.url}</div>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<div className="h-4 shrink-0 border bg-divider-regular" />
|
||||
|
|
|
|||
|
|
@ -2,6 +2,11 @@
|
|||
import { useQuery } from '@tanstack/react-query'
|
||||
import type { FC } from 'react'
|
||||
import type { GithubRepo } from '@/models/common'
|
||||
import { RiLoader2Line } from '@remixicon/react'
|
||||
|
||||
const defaultData = {
|
||||
stargazers_count: 98570,
|
||||
}
|
||||
|
||||
const getStar = async () => {
|
||||
const res = await fetch('https://api.github.com/repos/langgenius/dify')
|
||||
|
|
@ -13,15 +18,21 @@ const getStar = async () => {
|
|||
}
|
||||
|
||||
const GithubStar: FC<{ className: string }> = (props) => {
|
||||
const { isFetching, data } = useQuery<GithubRepo>({
|
||||
const { isFetching, isError, data } = useQuery<GithubRepo>({
|
||||
queryKey: ['github-star'],
|
||||
queryFn: getStar,
|
||||
enabled: process.env.NODE_ENV !== 'development',
|
||||
initialData: { stargazers_count: 81204 },
|
||||
retry: false,
|
||||
placeholderData: defaultData,
|
||||
})
|
||||
|
||||
if (isFetching)
|
||||
return null
|
||||
return <span {...props}>{data.stargazers_count.toLocaleString()}</span>
|
||||
return <RiLoader2Line className='size-3 shrink-0 animate-spin text-text-tertiary' />
|
||||
|
||||
if (isError)
|
||||
return <span {...props}>{defaultData.stargazers_count.toLocaleString()}</span>
|
||||
|
||||
return <span {...props}>{data?.stargazers_count.toLocaleString()}</span>
|
||||
}
|
||||
|
||||
export default GithubStar
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ const Header = () => {
|
|||
{
|
||||
!isMobile
|
||||
&& <div className='flex shrink-0 items-center gap-1.5 self-stretch pl-3'>
|
||||
<Link href="/apps" className='flex h-8 w-[52px] shrink-0 items-center justify-center gap-2'>
|
||||
<Link href="/apps" className='flex h-8 shrink-0 items-center justify-center gap-2 px-0.5'>
|
||||
<DifyLogo />
|
||||
</Link>
|
||||
<div className='font-light text-divider-deep'>/</div>
|
||||
|
|
|
|||
|
|
@ -1,17 +1,39 @@
|
|||
'use client'
|
||||
import useTheme from '@/hooks/use-theme'
|
||||
|
||||
type LineProps = {
|
||||
className?: string
|
||||
}
|
||||
|
||||
const Line = ({
|
||||
className,
|
||||
}: LineProps) => {
|
||||
const { theme } = useTheme()
|
||||
const isDarkMode = theme === 'dark'
|
||||
|
||||
if (isDarkMode) {
|
||||
return (
|
||||
<svg xmlns='http://www.w3.org/2000/svg' width='2' height='240' viewBox='0 0 2 240' fill='none' className={className}>
|
||||
<path d='M1 0L1 240' stroke='url(#paint0_linear_6295_52176)' />
|
||||
<defs>
|
||||
<linearGradient id='paint0_linear_6295_52176' x1='-7.99584' y1='240' x2='-7.88094' y2='3.95539e-05' gradientUnits='userSpaceOnUse'>
|
||||
<stop stopOpacity='0.01' />
|
||||
<stop offset='0.503965' stopColor='#C8CEDA' stopOpacity='0.14' />
|
||||
<stop offset='1' stopOpacity='0.01' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="2" height="241" viewBox="0 0 2 241" fill="none" className={className}>
|
||||
<path d="M1 0.5L1 240.5" stroke="url(#paint0_linear_1989_74474)"/>
|
||||
<svg xmlns='http://www.w3.org/2000/svg' width='2' height='241' viewBox='0 0 2 241' fill='none' className={className}>
|
||||
<path d='M1 0.5L1 240.5' stroke='url(#paint0_linear_1989_74474)' />
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_1989_74474" x1="-7.99584" y1="240.5" x2="-7.88094" y2="0.50004" gradientUnits="userSpaceOnUse">
|
||||
<stop stopColor="white" stopOpacity="0.01"/>
|
||||
<stop offset="0.503965" stopColor="#101828" stopOpacity="0.08"/>
|
||||
<stop offset="1" stopColor="white" stopOpacity="0.01"/>
|
||||
<linearGradient id='paint0_linear_1989_74474' x1='-7.99584' y1='240.5' x2='-7.88094' y2='0.50004' gradientUnits='userSpaceOnUse'>
|
||||
<stop stopColor='white' stopOpacity='0.01' />
|
||||
<stop offset='0.503965' stopColor='#101828' stopOpacity='0.08' />
|
||||
<stop offset='1' stopColor='white' stopOpacity='0.01' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import { useTranslation } from 'react-i18next'
|
|||
import { SUPPORT_INSTALL_LOCAL_FILE_EXTENSIONS } from '@/config'
|
||||
import { noop } from 'lodash-es'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import Button from '@/app/components/base/button'
|
||||
|
||||
const Empty = () => {
|
||||
const { t } = useTranslation()
|
||||
|
|
@ -43,14 +44,14 @@ const Empty = () => {
|
|||
{/* skeleton */}
|
||||
<div className='absolute top-0 z-10 grid h-full w-full grid-cols-2 gap-2 overflow-hidden px-12'>
|
||||
{Array.from({ length: 20 }).fill(0).map((_, i) => (
|
||||
<div key={i} className='h-[100px] rounded-xl bg-components-card-bg' />
|
||||
<div key={i} className='h-24 rounded-xl bg-components-card-bg' />
|
||||
))}
|
||||
</div>
|
||||
{/* mask */}
|
||||
<div className='absolute z-20 h-full w-full bg-gradient-to-b from-components-panel-bg-transparent to-components-panel-bg' />
|
||||
<div className='relative z-30 flex h-full items-center justify-center'>
|
||||
<div className='flex flex-col items-center gap-y-3'>
|
||||
<div className='relative -z-10 flex h-[52px] w-[52px] items-center justify-center rounded-xl
|
||||
<div className='relative -z-10 flex size-14 items-center justify-center rounded-xl
|
||||
border-[1px] border-dashed border-divider-deep bg-components-card-bg shadow-xl shadow-shadow-shadow-5'>
|
||||
<Group className='h-5 w-5 text-text-tertiary' />
|
||||
<Line className='absolute right-[-1px] top-1/2 -translate-y-1/2' />
|
||||
|
|
@ -58,10 +59,10 @@ const Empty = () => {
|
|||
<Line className='absolute left-1/2 top-0 -translate-x-1/2 -translate-y-1/2 rotate-90' />
|
||||
<Line className='absolute left-1/2 top-full -translate-x-1/2 -translate-y-1/2 rotate-90' />
|
||||
</div>
|
||||
<div className='text-sm font-normal text-text-tertiary'>
|
||||
<div className='system-md-regular text-text-tertiary'>
|
||||
{text}
|
||||
</div>
|
||||
<div className='flex w-[240px] flex-col'>
|
||||
<div className='flex w-[236px] flex-col'>
|
||||
<input
|
||||
type='file'
|
||||
ref={fileInputRef}
|
||||
|
|
@ -79,10 +80,9 @@ const Empty = () => {
|
|||
{ icon: Github, text: t('plugin.list.source.github'), action: 'github' },
|
||||
{ icon: FileZip, text: t('plugin.list.source.local'), action: 'local' },
|
||||
].map(({ icon: Icon, text, action }) => (
|
||||
<div
|
||||
<Button
|
||||
key={action}
|
||||
className='flex cursor-pointer items-center gap-x-1 rounded-lg border-[0.5px] bg-components-button-secondary-bg
|
||||
px-3 py-2 shadow-xs shadow-shadow-shadow-3 hover:bg-state-base-hover'
|
||||
className='justify-start gap-x-0.5 px-3'
|
||||
onClick={() => {
|
||||
if (action === 'local')
|
||||
fileInputRef.current?.click()
|
||||
|
|
@ -92,9 +92,9 @@ const Empty = () => {
|
|||
setSelectedAction(action)
|
||||
}}
|
||||
>
|
||||
<Icon className="h-4 w-4 text-text-tertiary" />
|
||||
<span className='system-md-regular text-text-secondary'>{text}</span>
|
||||
</div>
|
||||
<Icon className='size-4' />
|
||||
<span className='px-0.5'>{text}</span>
|
||||
</Button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -186,6 +186,17 @@ const PluginPage = ({
|
|||
{
|
||||
isExploringMarketplace && (
|
||||
<>
|
||||
<Link
|
||||
href='https://github.com/langgenius/dify-plugins/issues/new?template=plugin_request.yaml'
|
||||
target='_blank'
|
||||
>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='text-text-tertiary'
|
||||
>
|
||||
{t('plugin.requestAPlugin')}
|
||||
</Button>
|
||||
</Link>
|
||||
<Link
|
||||
href={getDocsUrl(locale, '/plugins/publish-plugins/publish-to-dify-marketplace/README')}
|
||||
target='_blank'
|
||||
|
|
@ -198,7 +209,7 @@ const PluginPage = ({
|
|||
{t('plugin.submitPlugin')}
|
||||
</Button>
|
||||
</Link>
|
||||
<div className='mx-2 h-3.5 w-[1px] bg-divider-regular'></div>
|
||||
<div className='mx-1 h-3.5 w-[1px] shrink-0 bg-divider-regular'></div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ const HelpLink = ({
|
|||
<a
|
||||
href={link}
|
||||
target='_blank'
|
||||
className='mr-1 flex h-6 w-6 items-center justify-center'
|
||||
className='mr-1 flex h-6 w-6 items-center justify-center rounded-md hover:bg-state-base-hover'
|
||||
>
|
||||
<RiBookOpenLine className='h-4 w-4 text-gray-500' />
|
||||
</a>
|
||||
|
|
|
|||
|
|
@ -0,0 +1,54 @@
|
|||
import { memo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { RiCrosshairLine } from '@remixicon/react'
|
||||
import type { XYPosition } from 'reactflow'
|
||||
import { useReactFlow, useStoreApi } from 'reactflow'
|
||||
import TooltipPlus from '@/app/components/base/tooltip'
|
||||
import { useNodesSyncDraft } from '@/app/components/workflow-app/hooks'
|
||||
|
||||
type NodePositionProps = {
|
||||
nodePosition: XYPosition,
|
||||
nodeWidth?: number | null,
|
||||
nodeHeight?: number | null,
|
||||
}
|
||||
const NodePosition = ({
|
||||
nodePosition,
|
||||
nodeWidth,
|
||||
nodeHeight,
|
||||
}: NodePositionProps) => {
|
||||
const { t } = useTranslation()
|
||||
const reactflow = useReactFlow()
|
||||
const store = useStoreApi()
|
||||
const { doSyncWorkflowDraft } = useNodesSyncDraft()
|
||||
|
||||
if (!nodePosition || !nodeWidth || !nodeHeight) return null
|
||||
|
||||
const workflowContainer = document.getElementById('workflow-container')
|
||||
const { transform } = store.getState()
|
||||
const zoom = transform[2]
|
||||
|
||||
const { clientWidth, clientHeight } = workflowContainer!
|
||||
const { setViewport } = reactflow
|
||||
|
||||
return (
|
||||
<TooltipPlus
|
||||
popupContent={t('workflow.panel.moveToThisNode')}
|
||||
>
|
||||
<div
|
||||
className='mr-1 flex h-6 w-6 cursor-pointer items-center justify-center rounded-md hover:bg-state-base-hover'
|
||||
onClick={() => {
|
||||
setViewport({
|
||||
x: (clientWidth - 400 - nodeWidth * zoom) / 2 - nodePosition.x * zoom,
|
||||
y: (clientHeight - nodeHeight * zoom) / 2 - nodePosition.y * zoom,
|
||||
zoom: transform[2],
|
||||
})
|
||||
doSyncWorkflowDraft()
|
||||
}}
|
||||
>
|
||||
<RiCrosshairLine className='h-4 w-4 text-text-tertiary' />
|
||||
</div>
|
||||
</TooltipPlus>
|
||||
)
|
||||
}
|
||||
|
||||
export default memo(NodePosition)
|
||||
|
|
@ -16,6 +16,7 @@ import { useTranslation } from 'react-i18next'
|
|||
import NextStep from './components/next-step'
|
||||
import PanelOperator from './components/panel-operator'
|
||||
import HelpLink from './components/help-link'
|
||||
import NodePosition from './components/node-position'
|
||||
import {
|
||||
DescriptionInput,
|
||||
TitleInput,
|
||||
|
|
@ -55,6 +56,9 @@ const BasePanel: FC<BasePanelProps> = ({
|
|||
id,
|
||||
data,
|
||||
children,
|
||||
position,
|
||||
width,
|
||||
height,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const { showMessageLogModal } = useAppStore(useShallow(state => ({
|
||||
|
|
@ -150,6 +154,7 @@ const BasePanel: FC<BasePanelProps> = ({
|
|||
</Tooltip>
|
||||
)
|
||||
}
|
||||
<NodePosition nodePosition={position} nodeWidth={width} nodeHeight={height}></NodePosition>
|
||||
<HelpLink nodeType={data.type} />
|
||||
<PanelOperator id={id} data={data} showHelpLink={false} />
|
||||
<div className='mx-3 h-3.5 w-[1px] bg-divider-regular' />
|
||||
|
|
|
|||
|
|
@ -39,6 +39,7 @@ const DebugAndPreview = () => {
|
|||
const nodes = useNodes<StartNodeType>()
|
||||
const startNode = nodes.find(node => node.data.type === BlockEnum.Start)
|
||||
const variables = startNode?.data.variables || []
|
||||
const visibleVariables = variables.filter(v => v.hide !== true)
|
||||
|
||||
const [showConversationVariableModal, setShowConversationVariableModal] = useState(false)
|
||||
|
||||
|
|
@ -107,7 +108,7 @@ const DebugAndPreview = () => {
|
|||
</ActionButton>
|
||||
</Tooltip>
|
||||
)}
|
||||
{variables.length > 0 && (
|
||||
{visibleVariables.length > 0 && (
|
||||
<div className='relative'>
|
||||
<Tooltip
|
||||
popupContent={t('workflow.panel.userInputField')}
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ const UserInput = () => {
|
|||
const nodes = useNodes<StartNodeType>()
|
||||
const startNode = nodes.find(node => node.data.type === BlockEnum.Start)
|
||||
const variables = startNode?.data.variables || []
|
||||
const visibleVariables = variables.filter(v => v.hide !== true)
|
||||
|
||||
const handleValueChange = (variable: string, v: string) => {
|
||||
const {
|
||||
|
|
@ -29,13 +30,13 @@ const UserInput = () => {
|
|||
})
|
||||
}
|
||||
|
||||
if (!variables.length)
|
||||
if (!visibleVariables.length)
|
||||
return null
|
||||
|
||||
return (
|
||||
<div className={cn('sticky top-0 z-[1] rounded-xl border-[0.5px] border-components-panel-border-subtle bg-components-panel-on-panel-item-bg shadow-xs')}>
|
||||
<div className='px-4 pb-4 pt-3'>
|
||||
{variables.map((variable, index) => (
|
||||
{visibleVariables.map((variable, index) => (
|
||||
<div
|
||||
key={variable.variable}
|
||||
className='mb-4 last-of-type:mb-0'
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ const IterationLogTrigger = ({
|
|||
if (parallelNodes.length > 0)
|
||||
return parallelNodes
|
||||
|
||||
const serialIndex = parseInt(key, 10)
|
||||
const serialIndex = Number.parseInt(key, 10)
|
||||
if (!isNaN(serialIndex)) {
|
||||
const serialNodes = allExecutions.filter(exec =>
|
||||
exec.execution_metadata?.iteration_id === nodeInfo.node_id
|
||||
|
|
@ -49,7 +49,6 @@ const IterationLogTrigger = ({
|
|||
const iterDurationMap = nodeInfo?.iterDurationMap || iterationNodeMeta?.iteration_duration_map || {}
|
||||
|
||||
let structuredList: NodeTracing[][] = []
|
||||
|
||||
if (iterationNodeMeta?.iteration_duration_map) {
|
||||
const instanceKeys = Object.keys(iterationNodeMeta.iteration_duration_map)
|
||||
structuredList = instanceKeys
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ const LoopLogTrigger = ({
|
|||
if (parallelNodes.length > 0)
|
||||
return parallelNodes
|
||||
|
||||
const serialIndex = parseInt(key, 10)
|
||||
const serialIndex = Number.parseInt(key, 10)
|
||||
if (!isNaN(serialIndex)) {
|
||||
const serialNodes = allExecutions.filter(exec =>
|
||||
exec.execution_metadata?.loop_id === nodeInfo.node_id
|
||||
|
|
@ -51,16 +51,15 @@ const LoopLogTrigger = ({
|
|||
const loopVarMap = loopNodeMeta?.loop_variable_map || {}
|
||||
|
||||
let structuredList: NodeTracing[][] = []
|
||||
|
||||
if (loopNodeMeta?.loop_duration_map) {
|
||||
if (nodeInfo.details?.length) {
|
||||
structuredList = nodeInfo.details
|
||||
}
|
||||
else if (loopNodeMeta?.loop_duration_map) {
|
||||
const instanceKeys = Object.keys(loopNodeMeta.loop_duration_map)
|
||||
structuredList = instanceKeys
|
||||
.map(key => filterNodesForInstance(key))
|
||||
.filter(branchNodes => branchNodes.length > 0)
|
||||
}
|
||||
else if (nodeInfo.details?.length) {
|
||||
structuredList = nodeInfo.details
|
||||
}
|
||||
|
||||
onShowLoopResultList(
|
||||
structuredList,
|
||||
|
|
|
|||
|
|
@ -21,4 +21,6 @@
|
|||
z-index: -1000 !important;
|
||||
}
|
||||
|
||||
#workflow-container .react-flow {}
|
||||
#workflow-container .react-flow__attribution {
|
||||
background: none !important;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -198,6 +198,7 @@ export type InputVar = {
|
|||
hint?: string
|
||||
options?: string[]
|
||||
value_selector?: ValueSelector
|
||||
hide: boolean
|
||||
} & Partial<UploadFileSetting>
|
||||
|
||||
export type ModelConfig = {
|
||||
|
|
|
|||
|
|
@ -77,20 +77,20 @@ const translation = {
|
|||
learnMore: 'Weitere Informationen',
|
||||
optional: 'Wahlfrei',
|
||||
noTemplateFound: 'Keine Vorlagen gefunden',
|
||||
workflowUserDescription: 'Workflow-Orchestrierung für Aufgaben in einer einzigen Runde wie Automatisierung und Stapelverarbeitung.',
|
||||
workflowUserDescription: 'Autonome KI-Arbeitsabläufe visuell per Drag-and-Drop erstellen.',
|
||||
foundResults: '{{Anzahl}} Befund',
|
||||
chatbotShortDescription: 'LLM-basierter Chatbot mit einfacher Einrichtung',
|
||||
completionUserDescription: 'Erstellen Sie schnell einen KI-Assistenten für Textgenerierungsaufgaben mit einfacher Konfiguration.',
|
||||
noAppsFound: 'Keine Apps gefunden',
|
||||
advancedShortDescription: 'Workflow für komplexe Dialoge mit mehreren Durchläufen mit Speicher',
|
||||
advancedShortDescription: 'Workflow optimiert für mehrstufige Chats',
|
||||
forAdvanced: 'FÜR FORTGESCHRITTENE',
|
||||
chooseAppType: 'App-Typ auswählen',
|
||||
completionShortDescription: 'KI-Assistent für Textgenerierungsaufgaben',
|
||||
forBeginners: 'FÜR ANFÄNGER',
|
||||
forBeginners: 'Einfachere App-Typen',
|
||||
noIdeaTip: 'Keine Ideen? Schauen Sie sich unsere Vorlagen an',
|
||||
workflowShortDescription: 'Orchestrierung für Single-Turn-Automatisierungsaufgaben',
|
||||
workflowShortDescription: 'Agentischer Ablauf für intelligente Automatisierungen',
|
||||
noTemplateFoundTip: 'Versuchen Sie, mit verschiedenen Schlüsselwörtern zu suchen.',
|
||||
advancedUserDescription: 'Workflow-Orchestrierung für komplexe Dialogaufgaben mit mehreren Runden und Speicherkapazitäten.',
|
||||
advancedUserDescription: 'Workflow mit Speicherfunktionen und Chatbot-Oberfläche.',
|
||||
chatbotUserDescription: 'Erstellen Sie schnell einen LLM-basierten Chatbot mit einfacher Konfiguration. Sie können später zu Chatflow wechseln.',
|
||||
foundResult: '{{Anzahl}} Ergebnis',
|
||||
agentUserDescription: 'Ein intelligenter Agent, der in der Lage ist, iteratives Denken zu führen und autonome Werkzeuge zu verwenden, um Aufgabenziele zu erreichen.',
|
||||
|
|
|
|||
|
|
@ -302,6 +302,7 @@ const translation = {
|
|||
organizeBlocks: 'Blöcke organisieren',
|
||||
change: 'Ändern',
|
||||
optional: '(optional)',
|
||||
moveToThisNode: 'Bewege zu diesem Knoten',
|
||||
},
|
||||
nodes: {
|
||||
common: {
|
||||
|
|
|
|||
|
|
@ -368,6 +368,7 @@ const translation = {
|
|||
'inputPlaceholder': 'Please input',
|
||||
'content': 'Content',
|
||||
'required': 'Required',
|
||||
'hide': 'Hide',
|
||||
'file': {
|
||||
supportFileTypes: 'Support File Types',
|
||||
image: {
|
||||
|
|
|
|||
|
|
@ -47,13 +47,13 @@ const translation = {
|
|||
completionUserDescription: 'Quickly build an AI assistant for text generation tasks with simple configuration.',
|
||||
agentShortDescription: 'Intelligent agent with reasoning and autonomous tool use',
|
||||
agentUserDescription: 'An intelligent agent capable of iterative reasoning and autonomous tool use to achieve task goals.',
|
||||
workflowShortDescription: 'Orchestration for single-turn automation tasks',
|
||||
workflowUserDescription: 'Workflow orchestration for single-round tasks like automation and batch processing.',
|
||||
workflowShortDescription: 'Agentic flow for intelligent automations',
|
||||
workflowUserDescription: 'Visually build autonomous AI workflows with drag-and-drop simplicity.',
|
||||
workflowWarning: 'Currently in beta',
|
||||
advancedShortDescription: 'Workflow for complex multi-turn dialogues with memory',
|
||||
advancedUserDescription: 'Workflow orchestration for multi-round complex dialogue tasks with memory capabilities.',
|
||||
chooseAppType: 'Choose App Type',
|
||||
forBeginners: 'FOR BEGINNERS',
|
||||
advancedShortDescription: 'Workflow enhanced for multi-turn chats',
|
||||
advancedUserDescription: 'Workflow with additional memory features and a chatbot interface.',
|
||||
chooseAppType: 'Choose an App Type',
|
||||
forBeginners: 'More basic app types',
|
||||
forAdvanced: 'FOR ADVANCED USERS',
|
||||
noIdeaTip: 'No ideas? Check out our templates',
|
||||
captionName: 'App Name & Icon',
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue