Merge branch 'main' into feature/tool-invocation-add-inputs-param

This commit is contained in:
Zino 2025-11-19 17:27:57 +08:00 committed by GitHub
commit 03049edeea
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
33 changed files with 381 additions and 195 deletions

View File

@ -1,7 +1,10 @@
name: Run VDB Tests
on:
workflow_call:
push:
branches: [main]
paths:
- 'api/core/rag/*.py'
concurrency:
group: vdb-tests-${{ github.head_ref || github.run_id }}

View File

@ -159,8 +159,7 @@ SUPABASE_URL=your-server-url
# CORS configuration
WEB_API_CORS_ALLOW_ORIGINS=http://localhost:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,*
# Set COOKIE_DOMAIN when the console frontend and API are on different subdomains.
# Provide the registrable domain (e.g. example.com); leading dots are optional.
# When the frontend and backend run on different subdomains, set COOKIE_DOMAIN to the sites top-level domain (e.g., `example.com`). Leading dots are optional.
COOKIE_DOMAIN=
# Vector database configuration

View File

@ -26,6 +26,10 @@
cp .env.example .env
```
> [!IMPORTANT]
>
> When the frontend and backend run on different subdomains, set COOKIE_DOMAIN to the sites top-level domain (e.g., `example.com`). The frontend and backend must be under the same top-level domain in order to share authentication cookies.
1. Generate a `SECRET_KEY` in the `.env` file.
bash for Linux

View File

@ -192,7 +192,6 @@ class GraphEngine:
self._dispatcher = Dispatcher(
event_queue=self._event_queue,
event_handler=self._event_handler_registry,
event_collector=self._event_manager,
execution_coordinator=self._execution_coordinator,
event_emitter=self._event_manager,
)

View File

@ -43,7 +43,6 @@ class Dispatcher:
self,
event_queue: queue.Queue[GraphNodeEventBase],
event_handler: "EventHandler",
event_collector: EventManager,
execution_coordinator: ExecutionCoordinator,
event_emitter: EventManager | None = None,
) -> None:
@ -53,13 +52,11 @@ class Dispatcher:
Args:
event_queue: Queue of events from workers
event_handler: Event handler registry for processing events
event_collector: Event manager for collecting unhandled events
execution_coordinator: Coordinator for execution flow
event_emitter: Optional event manager to signal completion
"""
self._event_queue = event_queue
self._event_handler = event_handler
self._event_collector = event_collector
self._execution_coordinator = execution_coordinator
self._event_emitter = event_emitter
@ -86,37 +83,31 @@ class Dispatcher:
def _dispatcher_loop(self) -> None:
"""Main dispatcher loop."""
try:
self._process_commands()
while not self._stop_event.is_set():
commands_checked = False
should_check_commands = False
should_break = False
if (
self._execution_coordinator.aborted
or self._execution_coordinator.paused
or self._execution_coordinator.execution_complete
):
break
if self._execution_coordinator.is_execution_complete():
should_check_commands = True
should_break = True
else:
# Check for scaling
self._execution_coordinator.check_scaling()
self._execution_coordinator.check_scaling()
try:
event = self._event_queue.get(timeout=0.1)
self._event_handler.dispatch(event)
self._event_queue.task_done()
self._process_commands(event)
except queue.Empty:
time.sleep(0.1)
# Process events
try:
event = self._event_queue.get(timeout=0.1)
# Route to the event handler
self._event_handler.dispatch(event)
should_check_commands = self._should_check_commands(event)
self._event_queue.task_done()
except queue.Empty:
# Process commands even when no new events arrive so abort requests are not missed
should_check_commands = True
time.sleep(0.1)
if should_check_commands and not commands_checked:
self._execution_coordinator.check_commands()
commands_checked = True
if should_break:
if not commands_checked:
self._execution_coordinator.check_commands()
self._process_commands()
while True:
try:
event = self._event_queue.get(block=False)
self._event_handler.dispatch(event)
self._event_queue.task_done()
except queue.Empty:
break
except Exception as e:
@ -129,6 +120,6 @@ class Dispatcher:
if self._event_emitter:
self._event_emitter.mark_complete()
def _should_check_commands(self, event: GraphNodeEventBase) -> bool:
"""Return True if the event represents a node completion."""
return isinstance(event, self._COMMAND_TRIGGER_EVENTS)
def _process_commands(self, event: GraphNodeEventBase | None = None):
if event is None or isinstance(event, self._COMMAND_TRIGGER_EVENTS):
self._execution_coordinator.process_commands()

View File

@ -40,7 +40,7 @@ class ExecutionCoordinator:
self._command_processor = command_processor
self._worker_pool = worker_pool
def check_commands(self) -> None:
def process_commands(self) -> None:
"""Process any pending commands."""
self._command_processor.process_commands()
@ -48,24 +48,16 @@ class ExecutionCoordinator:
"""Check and perform worker scaling if needed."""
self._worker_pool.check_and_scale()
def is_execution_complete(self) -> bool:
"""
Check if execution is complete.
Returns:
True if execution is complete
"""
# Treat paused, aborted, or failed executions as terminal states
if self._graph_execution.is_paused:
return True
if self._graph_execution.aborted or self._graph_execution.has_error:
return True
@property
def execution_complete(self):
return self._state_manager.is_execution_complete()
@property
def is_paused(self) -> bool:
def aborted(self):
return self._graph_execution.aborted or self._graph_execution.has_error
@property
def paused(self) -> bool:
"""Expose whether the underlying graph execution is paused."""
return self._graph_execution.is_paused

View File

@ -9,7 +9,7 @@ from core.rag.index_processor.index_processor_factory import IndexProcessorFacto
from core.tools.utils.web_reader_tool import get_image_upload_file_ids
from extensions.ext_database import db
from extensions.ext_storage import storage
from models.dataset import Dataset, DocumentSegment
from models.dataset import Dataset, DatasetMetadataBinding, DocumentSegment
from models.model import UploadFile
logger = logging.getLogger(__name__)
@ -37,6 +37,11 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
if not dataset:
raise Exception("Document has no dataset")
db.session.query(DatasetMetadataBinding).where(
DatasetMetadataBinding.dataset_id == dataset_id,
DatasetMetadataBinding.document_id.in_(document_ids),
).delete(synchronize_session=False)
segments = db.session.scalars(
select(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
).all()
@ -71,7 +76,8 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
except Exception:
logger.exception("Delete file failed when document deleted, file_id: %s", file.id)
db.session.delete(file)
db.session.commit()
db.session.commit()
end_at = time.perf_counter()
logger.info(

View File

@ -0,0 +1,189 @@
"""Tests for dispatcher command checking behavior."""
from __future__ import annotations
import queue
from datetime import datetime
from unittest import mock
from core.workflow.entities.pause_reason import SchedulingPause
from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
from core.workflow.graph_engine.event_management.event_handlers import EventHandler
from core.workflow.graph_engine.orchestration.dispatcher import Dispatcher
from core.workflow.graph_engine.orchestration.execution_coordinator import ExecutionCoordinator
from core.workflow.graph_events import (
GraphNodeEventBase,
NodeRunPauseRequestedEvent,
NodeRunStartedEvent,
NodeRunSucceededEvent,
)
from core.workflow.node_events import NodeRunResult
def test_dispatcher_should_consume_remains_events_after_pause():
event_queue = queue.Queue()
event_queue.put(
GraphNodeEventBase(
id="test",
node_id="test",
node_type=NodeType.START,
)
)
event_handler = mock.Mock(spec=EventHandler)
execution_coordinator = mock.Mock(spec=ExecutionCoordinator)
execution_coordinator.paused.return_value = True
dispatcher = Dispatcher(
event_queue=event_queue,
event_handler=event_handler,
execution_coordinator=execution_coordinator,
)
dispatcher._dispatcher_loop()
assert event_queue.empty()
class _StubExecutionCoordinator:
"""Stub execution coordinator that tracks command checks."""
def __init__(self) -> None:
self.command_checks = 0
self.scaling_checks = 0
self.execution_complete = False
self.failed = False
self._paused = False
def process_commands(self) -> None:
self.command_checks += 1
def check_scaling(self) -> None:
self.scaling_checks += 1
@property
def paused(self) -> bool:
return self._paused
@property
def aborted(self) -> bool:
return False
def mark_complete(self) -> None:
self.execution_complete = True
def mark_failed(self, error: Exception) -> None: # pragma: no cover - defensive, not triggered in tests
self.failed = True
class _StubEventHandler:
"""Minimal event handler that marks execution complete after handling an event."""
def __init__(self, coordinator: _StubExecutionCoordinator) -> None:
self._coordinator = coordinator
self.events = []
def dispatch(self, event) -> None:
self.events.append(event)
self._coordinator.mark_complete()
def _run_dispatcher_for_event(event) -> int:
"""Run the dispatcher loop for a single event and return command check count."""
event_queue: queue.Queue = queue.Queue()
event_queue.put(event)
coordinator = _StubExecutionCoordinator()
event_handler = _StubEventHandler(coordinator)
dispatcher = Dispatcher(
event_queue=event_queue,
event_handler=event_handler,
execution_coordinator=coordinator,
)
dispatcher._dispatcher_loop()
return coordinator.command_checks
def _make_started_event() -> NodeRunStartedEvent:
return NodeRunStartedEvent(
id="start-event",
node_id="node-1",
node_type=NodeType.CODE,
node_title="Test Node",
start_at=datetime.utcnow(),
)
def _make_succeeded_event() -> NodeRunSucceededEvent:
return NodeRunSucceededEvent(
id="success-event",
node_id="node-1",
node_type=NodeType.CODE,
node_title="Test Node",
start_at=datetime.utcnow(),
node_run_result=NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED),
)
def test_dispatcher_checks_commands_during_idle_and_on_completion() -> None:
"""Dispatcher polls commands when idle and after completion events."""
started_checks = _run_dispatcher_for_event(_make_started_event())
succeeded_checks = _run_dispatcher_for_event(_make_succeeded_event())
assert started_checks == 2
assert succeeded_checks == 3
class _PauseStubEventHandler:
"""Minimal event handler that marks execution complete after handling an event."""
def __init__(self, coordinator: _StubExecutionCoordinator) -> None:
self._coordinator = coordinator
self.events = []
def dispatch(self, event) -> None:
self.events.append(event)
if isinstance(event, NodeRunPauseRequestedEvent):
self._coordinator.mark_complete()
def test_dispatcher_drain_event_queue():
events = [
NodeRunStartedEvent(
id="start-event",
node_id="node-1",
node_type=NodeType.CODE,
node_title="Code",
start_at=datetime.utcnow(),
),
NodeRunPauseRequestedEvent(
id="pause-event",
node_id="node-1",
node_type=NodeType.CODE,
reason=SchedulingPause(message="test pause"),
),
NodeRunSucceededEvent(
id="success-event",
node_id="node-1",
node_type=NodeType.CODE,
start_at=datetime.utcnow(),
node_run_result=NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED),
),
]
event_queue: queue.Queue = queue.Queue()
for e in events:
event_queue.put(e)
coordinator = _StubExecutionCoordinator()
event_handler = _PauseStubEventHandler(coordinator)
dispatcher = Dispatcher(
event_queue=event_queue,
event_handler=event_handler,
execution_coordinator=coordinator,
)
dispatcher._dispatcher_loop()
# ensure all events are drained.
assert event_queue.empty()

View File

@ -3,13 +3,17 @@
import time
from unittest.mock import MagicMock
from core.app.entities.app_invoke_entities import InvokeFrom
from core.workflow.entities.graph_init_params import GraphInitParams
from core.workflow.entities.pause_reason import SchedulingPause
from core.workflow.graph import Graph
from core.workflow.graph_engine import GraphEngine
from core.workflow.graph_engine.command_channels import InMemoryChannel
from core.workflow.graph_engine.entities.commands import AbortCommand, CommandType, PauseCommand
from core.workflow.graph_events import GraphRunAbortedEvent, GraphRunPausedEvent, GraphRunStartedEvent
from core.workflow.nodes.start.start_node import StartNode
from core.workflow.runtime import GraphRuntimeState, VariablePool
from models.enums import UserFrom
def test_abort_command():
@ -26,11 +30,23 @@ def test_abort_command():
mock_graph.root_node.id = "start"
# Create mock nodes with required attributes - using shared runtime state
mock_start_node = MagicMock()
mock_start_node.state = None
mock_start_node.id = "start"
mock_start_node.graph_runtime_state = shared_runtime_state # Use shared instance
mock_graph.nodes["start"] = mock_start_node
start_node = StartNode(
id="start",
config={"id": "start"},
graph_init_params=GraphInitParams(
tenant_id="test_tenant",
app_id="test_app",
workflow_id="test_workflow",
graph_config={},
user_id="test_user",
user_from=UserFrom.ACCOUNT,
invoke_from=InvokeFrom.DEBUGGER,
call_depth=0,
),
graph_runtime_state=shared_runtime_state,
)
start_node.init_node_data({"title": "start", "variables": []})
mock_graph.nodes["start"] = start_node
# Mock graph methods
mock_graph.get_outgoing_edges = MagicMock(return_value=[])
@ -124,11 +140,23 @@ def test_pause_command():
mock_graph.root_node = MagicMock()
mock_graph.root_node.id = "start"
mock_start_node = MagicMock()
mock_start_node.state = None
mock_start_node.id = "start"
mock_start_node.graph_runtime_state = shared_runtime_state
mock_graph.nodes["start"] = mock_start_node
start_node = StartNode(
id="start",
config={"id": "start"},
graph_init_params=GraphInitParams(
tenant_id="test_tenant",
app_id="test_app",
workflow_id="test_workflow",
graph_config={},
user_id="test_user",
user_from=UserFrom.ACCOUNT,
invoke_from=InvokeFrom.DEBUGGER,
call_depth=0,
),
graph_runtime_state=shared_runtime_state,
)
start_node.init_node_data({"title": "start", "variables": []})
mock_graph.nodes["start"] = start_node
mock_graph.get_outgoing_edges = MagicMock(return_value=[])
mock_graph.get_incoming_edges = MagicMock(return_value=[])
@ -153,5 +181,5 @@ def test_pause_command():
assert pause_events[0].reason == SchedulingPause(message="User requested pause")
graph_execution = engine.graph_runtime_state.graph_execution
assert graph_execution.is_paused
assert graph_execution.paused
assert graph_execution.pause_reason == SchedulingPause(message="User requested pause")

View File

@ -1,109 +0,0 @@
"""Tests for dispatcher command checking behavior."""
from __future__ import annotations
import queue
from datetime import datetime
from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
from core.workflow.graph_engine.event_management.event_manager import EventManager
from core.workflow.graph_engine.orchestration.dispatcher import Dispatcher
from core.workflow.graph_events import NodeRunStartedEvent, NodeRunSucceededEvent
from core.workflow.node_events import NodeRunResult
class _StubExecutionCoordinator:
"""Stub execution coordinator that tracks command checks."""
def __init__(self) -> None:
self.command_checks = 0
self.scaling_checks = 0
self._execution_complete = False
self.mark_complete_called = False
self.failed = False
self._paused = False
def check_commands(self) -> None:
self.command_checks += 1
def check_scaling(self) -> None:
self.scaling_checks += 1
@property
def is_paused(self) -> bool:
return self._paused
def is_execution_complete(self) -> bool:
return self._execution_complete
def mark_complete(self) -> None:
self.mark_complete_called = True
def mark_failed(self, error: Exception) -> None: # pragma: no cover - defensive, not triggered in tests
self.failed = True
def set_execution_complete(self) -> None:
self._execution_complete = True
class _StubEventHandler:
"""Minimal event handler that marks execution complete after handling an event."""
def __init__(self, coordinator: _StubExecutionCoordinator) -> None:
self._coordinator = coordinator
self.events = []
def dispatch(self, event) -> None:
self.events.append(event)
self._coordinator.set_execution_complete()
def _run_dispatcher_for_event(event) -> int:
"""Run the dispatcher loop for a single event and return command check count."""
event_queue: queue.Queue = queue.Queue()
event_queue.put(event)
coordinator = _StubExecutionCoordinator()
event_handler = _StubEventHandler(coordinator)
event_manager = EventManager()
dispatcher = Dispatcher(
event_queue=event_queue,
event_handler=event_handler,
event_collector=event_manager,
execution_coordinator=coordinator,
)
dispatcher._dispatcher_loop()
return coordinator.command_checks
def _make_started_event() -> NodeRunStartedEvent:
return NodeRunStartedEvent(
id="start-event",
node_id="node-1",
node_type=NodeType.CODE,
node_title="Test Node",
start_at=datetime.utcnow(),
)
def _make_succeeded_event() -> NodeRunSucceededEvent:
return NodeRunSucceededEvent(
id="success-event",
node_id="node-1",
node_type=NodeType.CODE,
node_title="Test Node",
start_at=datetime.utcnow(),
node_run_result=NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED),
)
def test_dispatcher_checks_commands_during_idle_and_on_completion() -> None:
"""Dispatcher polls commands when idle and after completion events."""
started_checks = _run_dispatcher_for_event(_make_started_event())
succeeded_checks = _run_dispatcher_for_event(_make_succeeded_event())
assert started_checks == 1
assert succeeded_checks == 2

View File

@ -48,15 +48,3 @@ def test_handle_pause_noop_when_execution_running() -> None:
worker_pool.stop.assert_not_called()
state_manager.clear_executing.assert_not_called()
def test_is_execution_complete_when_paused() -> None:
"""Paused execution should be treated as complete."""
graph_execution = GraphExecution(workflow_id="workflow")
graph_execution.start()
graph_execution.pause("Awaiting input")
coordinator, state_manager, _worker_pool = _build_coordinator(graph_execution)
state_manager.is_execution_complete.return_value = False
assert coordinator.is_execution_complete()

View File

@ -365,10 +365,9 @@ WEB_API_CORS_ALLOW_ORIGINS=*
# Specifies the allowed origins for cross-origin requests to the console API,
# e.g. https://cloud.dify.ai or * for all origins.
CONSOLE_CORS_ALLOW_ORIGINS=*
# Set COOKIE_DOMAIN when the console frontend and API are on different subdomains.
# Provide the registrable domain (e.g. example.com); leading dots are optional.
# When the frontend and backend run on different subdomains, set COOKIE_DOMAIN to the sites top-level domain (e.g., `example.com`). Leading dots are optional.
COOKIE_DOMAIN=
# The frontend reads NEXT_PUBLIC_COOKIE_DOMAIN to align cookie handling with the API.
# When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
NEXT_PUBLIC_COOKIE_DOMAIN=
# ------------------------------

View File

@ -55,7 +55,8 @@ services:
- ./volumes/data:/data
- ./volumes/logs:/logs
command:
- --config=/tiflash.toml
- server
- --config-file=/tiflash.toml
depends_on:
- "tikv"
- "tidb"

View File

@ -12,6 +12,9 @@ NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
# console or api domain.
# example: http://udify.app/api
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
# When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
NEXT_PUBLIC_COOKIE_DOMAIN=
# The API PREFIX for MARKETPLACE
NEXT_PUBLIC_MARKETPLACE_API_PREFIX=https://marketplace.dify.ai/api/v1
# The URL for MARKETPLACE
@ -34,9 +37,6 @@ NEXT_PUBLIC_CSP_WHITELIST=
# Default is not allow to embed into iframe to prevent Clickjacking: https://owasp.org/www-community/attacks/Clickjacking
NEXT_PUBLIC_ALLOW_EMBED=
# Shared cookie domain when console UI and API use different subdomains (e.g. example.com)
NEXT_PUBLIC_COOKIE_DOMAIN=
# Allow rendering unsafe URLs which have "data:" scheme.
NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME=false

View File

@ -32,6 +32,7 @@ NEXT_PUBLIC_EDITION=SELF_HOSTED
# different from api or web app domain.
# example: http://cloud.dify.ai/console/api
NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
NEXT_PUBLIC_COOKIE_DOMAIN=
# The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from
# console or api domain.
# example: http://udify.app/api
@ -41,6 +42,11 @@ NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
NEXT_PUBLIC_SENTRY_DSN=
```
> [!IMPORTANT]
>
> 1. When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1. The frontend and backend must be under the same top-level domain in order to share authentication cookies.
> 1. It's necessary to set NEXT_PUBLIC_API_PREFIX and NEXT_PUBLIC_PUBLIC_API_PREFIX to the correct backend API URL.
Finally, run the development server:
```bash

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: 'Abschießen',
enableTooltip: {},
},
apiInfo: {
title: 'Backend-Service-API',
@ -125,6 +126,10 @@ const translation = {
running: 'In Betrieb',
disable: 'Deaktivieren',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'Die Funktion {{feature}} wird im Trigger-Knoten-Modus nicht unterstützt.',
},
},
analysis: {
title: 'Analyse',

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: 'Lanzar',
enableTooltip: {},
},
apiInfo: {
title: 'API del servicio backend',
@ -125,6 +126,10 @@ const translation = {
running: 'En servicio',
disable: 'Deshabilitar',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'La función {{feature}} no es compatible en el modo Nodo de disparo.',
},
},
analysis: {
title: 'Análisis',

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: 'راه اندازی',
enableTooltip: {},
},
apiInfo: {
title: 'API سرویس بک‌اند',
@ -125,6 +126,10 @@ const translation = {
running: 'در حال سرویس‌دهی',
disable: 'غیرفعال',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'ویژگی {{feature}} در حالت گره تریگر پشتیبانی نمی‌شود.',
},
},
analysis: {
title: 'تحلیل',

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: 'Lancer',
enableTooltip: {},
},
apiInfo: {
title: 'API de service Backend',
@ -125,6 +126,10 @@ const translation = {
running: 'En service',
disable: 'Désactiver',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'La fonctionnalité {{feature}} n\'est pas prise en charge en mode Nœud Déclencheur.',
},
},
analysis: {
title: 'Analyse',

View File

@ -125,6 +125,7 @@ const translation = {
},
},
launch: 'लॉन्च',
enableTooltip: {},
},
apiInfo: {
title: 'बैकएंड सेवा एपीआई',
@ -136,6 +137,10 @@ const translation = {
running: 'सेवा में',
disable: 'अक्षम करें',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'ट्रिगर नोड मोड में {{feature}} फ़ीचर समर्थित नहीं है।',
},
},
analysis: {
title: 'विश्लेषण',

View File

@ -111,6 +111,7 @@ const translation = {
preUseReminder: 'Harap aktifkan aplikasi web sebelum melanjutkan.',
regenerateNotice: 'Apakah Anda ingin membuat ulang URL publik?',
explanation: 'Aplikasi web AI siap pakai',
enableTooltip: {},
},
apiInfo: {
accessibleAddress: 'Titik Akhir API Layanan',
@ -123,6 +124,10 @@ const translation = {
running: 'Berjalan',
},
title: 'Ikhtisar',
triggerInfo: {},
disableTooltip: {
triggerMode: 'Fitur {{feature}} tidak didukung dalam mode Node Pemicu.',
},
},
analysis: {
totalMessages: {

View File

@ -127,6 +127,7 @@ const translation = {
},
},
launch: 'Lanciare',
enableTooltip: {},
},
apiInfo: {
title: 'API del servizio backend',
@ -138,6 +139,10 @@ const translation = {
running: 'In servizio',
disable: 'Disabilita',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'La funzionalità {{feature}} non è supportata in modalità Nodo Trigger.',
},
},
analysis: {
title: 'Analisi',

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: '발사',
enableTooltip: {},
},
apiInfo: {
title: '백엔드 서비스 API',
@ -125,6 +126,10 @@ const translation = {
running: '서비스 중',
disable: '비활성',
},
triggerInfo: {},
disableTooltip: {
triggerMode: '트리거 노드 모드에서는 {{feature}} 기능이 지원되지 않습니다.',
},
},
analysis: {
title: '분석',

View File

@ -125,6 +125,7 @@ const translation = {
},
},
launch: 'Uruchomić',
enableTooltip: {},
},
apiInfo: {
title: 'API usługi w tle',
@ -136,6 +137,10 @@ const translation = {
running: 'W usłudze',
disable: 'Wyłącz',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'Funkcja {{feature}} nie jest obsługiwana w trybie węzła wyzwalającego.',
},
},
analysis: {
title: 'Analiza',

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: 'Lançar',
enableTooltip: {},
},
apiInfo: {
title: 'API de Serviço de Back-end',
@ -125,6 +126,10 @@ const translation = {
running: 'Em serviço',
disable: 'Desabilitar',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'O recurso {{feature}} não é compatível no modo Nó de Gatilho.',
},
},
analysis: {
title: 'Análise',

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: 'Lansa',
enableTooltip: {},
},
apiInfo: {
title: 'API serviciu backend',
@ -125,6 +126,10 @@ const translation = {
running: 'În service',
disable: 'Dezactivat',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'Funcționalitatea {{feature}} nu este suportată în modul Nod Trigger.',
},
},
analysis: {
title: 'Analiză',

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: 'Баркас',
enableTooltip: {},
},
apiInfo: {
title: 'API серверной части',
@ -125,6 +126,10 @@ const translation = {
running: 'В работе',
disable: 'Отключено',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'Функция {{feature}} не поддерживается в режиме узла триггера.',
},
},
analysis: {
title: 'Анализ',

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: 'Začetek',
enableTooltip: {},
},
apiInfo: {
title: 'API storitev v ozadju',
@ -125,6 +126,10 @@ const translation = {
running: 'V storitvi',
disable: 'Onemogočeno',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'Funkcija {{feature}} ni podprta v načinu vozlišča sprožilca.',
},
},
analysis: {
title: 'Analiza',

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: 'เรือยนต์',
enableTooltip: {},
},
apiInfo: {
title: 'API บริการแบ็กเอนด์',
@ -125,6 +126,10 @@ const translation = {
running: 'ให้บริการ',
disable: 'พิการ',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'โหมดโหนดทริกเกอร์ไม่รองรับฟีเจอร์ {{feature}}.',
},
},
analysis: {
title: 'การวิเคราะห์',

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: 'Başlat',
enableTooltip: {},
},
apiInfo: {
title: 'Arka Uç Servis API\'si',
@ -125,6 +126,10 @@ const translation = {
running: 'Hizmette',
disable: 'Devre Dışı',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'Trigger Düğümü modunda {{feature}} özelliği desteklenmiyor.',
},
},
analysis: {
title: 'Analiz',

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: 'Запуску',
enableTooltip: {},
},
apiInfo: {
title: 'API сервісу Backend',
@ -125,6 +126,10 @@ const translation = {
running: 'У роботі',
disable: 'Вимкнути',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'Функція {{feature}} не підтримується в режимі вузла тригера.',
},
},
analysis: {
title: 'Аналіз',

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: 'Phóng',
enableTooltip: {},
},
apiInfo: {
title: 'API dịch vụ backend',
@ -125,6 +126,10 @@ const translation = {
running: 'Đang hoạt động',
disable: 'Đã tắt',
},
triggerInfo: {},
disableTooltip: {
triggerMode: 'Tính năng {{feature}} không được hỗ trợ trong chế độ Nút Kích hoạt.',
},
},
analysis: {
title: 'Phân tích',

View File

@ -114,6 +114,7 @@ const translation = {
},
},
launch: '發射',
enableTooltip: {},
},
apiInfo: {
title: '後端服務 API',
@ -125,6 +126,10 @@ const translation = {
running: '執行中',
disable: '已停用',
},
triggerInfo: {},
disableTooltip: {
triggerMode: '觸發節點模式不支援 {{feature}} 功能。',
},
},
analysis: {
title: '分析',