Merge branch 'main' into feat/rag-plugin-recommendation-optimization

This commit is contained in:
twwu 2025-10-27 11:49:05 +08:00
commit 4590f7daa5
63 changed files with 644 additions and 325 deletions

View File

@ -11,7 +11,7 @@
"nodeGypDependencies": true,
"version": "lts"
},
"ghcr.io/devcontainers-contrib/features/npm-package:1": {
"ghcr.io/devcontainers-extra/features/npm-package:1": {
"package": "typescript",
"version": "latest"
},

View File

@ -63,7 +63,7 @@ Dify is an open-source platform for developing LLM applications. Its intuitive i
> - CPU >= 2 Core
> - RAM >= 4 GiB
</br>
<br/>
The easiest way to start the Dify server is through [Docker Compose](docker/docker-compose.yaml). Before running Dify with the following commands, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine:
@ -109,15 +109,15 @@ All of Dify's offerings come with corresponding APIs, so you could effortlessly
## Using Dify
- **Cloud </br>**
- **Cloud <br/>**
We host a [Dify Cloud](https://dify.ai) service for anyone to try with zero setup. It provides all the capabilities of the self-deployed version, and includes 200 free GPT-4 calls in the sandbox plan.
- **Self-hosting Dify Community Edition</br>**
- **Self-hosting Dify Community Edition<br/>**
Quickly get Dify running in your environment with this [starter guide](#quick-start).
Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions.
- **Dify for enterprise / organizations</br>**
We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) to discuss enterprise needs. </br>
- **Dify for enterprise / organizations<br/>**
We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) to discuss enterprise needs. <br/>
> For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one click. It's an affordable AMI offering with the option to create apps with custom logo and branding.

View File

@ -80,7 +80,7 @@
1. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
```bash
uv run celery -A app.celery worker -P gevent -c 2 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation
uv run celery -A app.celery worker -P gevent -c 2 --loglevel INFO -Q dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation
```
Additionally, if you want to debug the celery scheduled tasks, you can run the following command in another terminal to start the beat service:

View File

@ -29,6 +29,7 @@ from libs.token import (
clear_access_token_from_cookie,
clear_csrf_token_from_cookie,
clear_refresh_token_from_cookie,
extract_refresh_token,
set_access_token_to_cookie,
set_csrf_token_to_cookie,
set_refresh_token_to_cookie,
@ -270,7 +271,7 @@ class EmailCodeLoginApi(Resource):
class RefreshTokenApi(Resource):
def post(self):
# Get refresh token from cookie instead of request body
refresh_token = request.cookies.get("refresh_token")
refresh_token = extract_refresh_token(request)
if not refresh_token:
return {"result": "fail", "message": "No refresh token provided"}, 401

View File

@ -22,7 +22,7 @@ from core.errors.error import (
from core.model_runtime.errors.invoke import InvokeError
from core.workflow.graph_engine.manager import GraphEngineManager
from libs import helper
from libs.login import current_user as current_user_
from libs.login import current_account_with_tenant
from models.model import AppMode, InstalledApp
from services.app_generate_service import AppGenerateService
from services.errors.llm import InvokeRateLimitError
@ -31,8 +31,6 @@ from .. import console_ns
logger = logging.getLogger(__name__)
current_user = current_user_._get_current_object() # type: ignore
@console_ns.route("/installed-apps/<uuid:installed_app_id>/workflows/run")
class InstalledAppWorkflowRunApi(InstalledAppResource):
@ -40,6 +38,7 @@ class InstalledAppWorkflowRunApi(InstalledAppResource):
"""
Run workflow
"""
current_user, _ = current_account_with_tenant()
app_model = installed_app.app
if not app_model:
raise NotWorkflowAppError()
@ -53,7 +52,6 @@ class InstalledAppWorkflowRunApi(InstalledAppResource):
.add_argument("files", type=list, required=False, location="json")
)
args = parser.parse_args()
assert current_user is not None
try:
response = AppGenerateService.generate(
app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.EXPLORE, streaming=True
@ -89,7 +87,6 @@ class InstalledAppWorkflowTaskStopApi(InstalledAppResource):
app_mode = AppMode.value_of(app_model.mode)
if app_mode != AppMode.WORKFLOW:
raise NotWorkflowAppError()
assert current_user is not None
# Stop using both mechanisms for backward compatibility
# Legacy stop flag mechanism (without user check)

View File

@ -14,10 +14,25 @@ from services.file_service import FileService
@files_ns.route("/<uuid:file_id>/image-preview")
class ImagePreviewApi(Resource):
"""
Deprecated
"""
"""Deprecated endpoint for retrieving image previews."""
@files_ns.doc("get_image_preview")
@files_ns.doc(description="Retrieve a signed image preview for a file")
@files_ns.doc(
params={
"file_id": "ID of the file to preview",
"timestamp": "Unix timestamp used in the signature",
"nonce": "Random string used in the signature",
"sign": "HMAC signature verifying the request",
}
)
@files_ns.doc(
responses={
200: "Image preview returned successfully",
400: "Missing or invalid signature parameters",
415: "Unsupported file type",
}
)
def get(self, file_id):
file_id = str(file_id)
@ -43,6 +58,25 @@ class ImagePreviewApi(Resource):
@files_ns.route("/<uuid:file_id>/file-preview")
class FilePreviewApi(Resource):
@files_ns.doc("get_file_preview")
@files_ns.doc(description="Download a file preview or attachment using signed parameters")
@files_ns.doc(
params={
"file_id": "ID of the file to preview",
"timestamp": "Unix timestamp used in the signature",
"nonce": "Random string used in the signature",
"sign": "HMAC signature verifying the request",
"as_attachment": "Whether to download the file as an attachment",
}
)
@files_ns.doc(
responses={
200: "File stream returned successfully",
400: "Missing or invalid signature parameters",
404: "File not found",
415: "Unsupported file type",
}
)
def get(self, file_id):
file_id = str(file_id)
@ -101,6 +135,20 @@ class FilePreviewApi(Resource):
@files_ns.route("/workspaces/<uuid:workspace_id>/webapp-logo")
class WorkspaceWebappLogoApi(Resource):
@files_ns.doc("get_workspace_webapp_logo")
@files_ns.doc(description="Fetch the custom webapp logo for a workspace")
@files_ns.doc(
params={
"workspace_id": "Workspace identifier",
}
)
@files_ns.doc(
responses={
200: "Logo returned successfully",
404: "Webapp logo not configured",
415: "Unsupported file type",
}
)
def get(self, workspace_id):
workspace_id = str(workspace_id)

View File

@ -13,6 +13,26 @@ from extensions.ext_database import db as global_db
@files_ns.route("/tools/<uuid:file_id>.<string:extension>")
class ToolFileApi(Resource):
@files_ns.doc("get_tool_file")
@files_ns.doc(description="Download a tool file by ID using signed parameters")
@files_ns.doc(
params={
"file_id": "Tool file identifier",
"extension": "Expected file extension",
"timestamp": "Unix timestamp used in the signature",
"nonce": "Random string used in the signature",
"sign": "HMAC signature verifying the request",
"as_attachment": "Whether to download the file as an attachment",
}
)
@files_ns.doc(
responses={
200: "Tool file stream returned successfully",
403: "Forbidden - invalid signature",
404: "File not found",
415: "Unsupported file type",
}
)
def get(self, file_id, extension):
file_id = str(file_id)

View File

@ -415,7 +415,6 @@ class IndexingRunner:
document_id=dataset_document.id,
after_indexing_status="splitting",
extra_update_params={
DatasetDocument.word_count: sum(len(text_doc.page_content) for text_doc in text_docs),
DatasetDocument.parsing_completed_at: naive_utc_now(),
},
)
@ -755,6 +754,7 @@ class IndexingRunner:
extra_update_params={
DatasetDocument.cleaning_completed_at: cur_time,
DatasetDocument.splitting_completed_at: cur_time,
DatasetDocument.word_count: sum(len(doc.page_content) for doc in documents),
},
)

View File

@ -161,7 +161,7 @@ class OpenSearchVector(BaseVector):
logger.exception("Error deleting document: %s", error)
def delete(self):
self._client.indices.delete(index=self._collection_name.lower())
self._client.indices.delete(index=self._collection_name.lower(), ignore_unavailable=True)
def text_exists(self, id: str) -> bool:
try:

View File

@ -31,6 +31,7 @@ VARIABLE_TO_PARAMETER_TYPE_MAPPING = {
VariableEntityType.PARAGRAPH: ToolParameter.ToolParameterType.STRING,
VariableEntityType.SELECT: ToolParameter.ToolParameterType.SELECT,
VariableEntityType.NUMBER: ToolParameter.ToolParameterType.NUMBER,
VariableEntityType.CHECKBOX: ToolParameter.ToolParameterType.BOOLEAN,
VariableEntityType.FILE: ToolParameter.ToolParameterType.FILE,
VariableEntityType.FILE_LIST: ToolParameter.ToolParameterType.FILES,
}

View File

@ -24,6 +24,7 @@ from core.workflow.graph_events import (
NodeRunLoopStartedEvent,
NodeRunLoopSucceededEvent,
NodeRunPauseRequestedEvent,
NodeRunRetrieverResourceEvent,
NodeRunRetryEvent,
NodeRunStartedEvent,
NodeRunStreamChunkEvent,
@ -112,6 +113,7 @@ class EventHandler:
@_dispatch.register(NodeRunLoopSucceededEvent)
@_dispatch.register(NodeRunLoopFailedEvent)
@_dispatch.register(NodeRunAgentLogEvent)
@_dispatch.register(NodeRunRetrieverResourceEvent)
def _(self, event: GraphNodeEventBase) -> None:
self._event_collector.collect(event)

View File

@ -193,15 +193,19 @@ class QuestionClassifierNode(Node):
finish_reason = event.finish_reason
break
category_name = node_data.classes[0].name
category_id = node_data.classes[0].id
rendered_classes = [
c.model_copy(update={"name": variable_pool.convert_template(c.name).text}) for c in node_data.classes
]
category_name = rendered_classes[0].name
category_id = rendered_classes[0].id
if "<think>" in result_text:
result_text = re.sub(r"<think[^>]*>[\s\S]*?</think>", "", result_text, flags=re.IGNORECASE)
result_text_json = parse_and_check_json_markdown(result_text, [])
# result_text_json = json.loads(result_text.strip('```JSON\n'))
if "category_name" in result_text_json and "category_id" in result_text_json:
category_id_result = result_text_json["category_id"]
classes = node_data.classes
classes = rendered_classes
classes_map = {class_.id: class_.name for class_ in classes}
category_ids = [_class.id for _class in classes]
if category_id_result in category_ids:

View File

@ -5,6 +5,7 @@ import json
from collections.abc import Mapping, Sequence
from collections.abc import Mapping as TypingMapping
from copy import deepcopy
from dataclasses import dataclass
from typing import Any, Protocol
from pydantic.json import pydantic_encoder
@ -106,6 +107,23 @@ class GraphProtocol(Protocol):
def get_outgoing_edges(self, node_id: str) -> Sequence[object]: ...
@dataclass(slots=True)
class _GraphRuntimeStateSnapshot:
"""Immutable view of a serialized runtime state snapshot."""
start_at: float
total_tokens: int
node_run_steps: int
llm_usage: LLMUsage
outputs: dict[str, Any]
variable_pool: VariablePool
has_variable_pool: bool
ready_queue_dump: str | None
graph_execution_dump: str | None
response_coordinator_dump: str | None
paused_nodes: tuple[str, ...]
class GraphRuntimeState:
"""Mutable runtime state shared across graph execution components."""
@ -293,69 +311,28 @@ class GraphRuntimeState:
return json.dumps(snapshot, default=pydantic_encoder)
def loads(self, data: str | Mapping[str, Any]) -> None:
@classmethod
def from_snapshot(cls, data: str | Mapping[str, Any]) -> GraphRuntimeState:
"""Restore runtime state from a serialized snapshot."""
payload: dict[str, Any]
if isinstance(data, str):
payload = json.loads(data)
else:
payload = dict(data)
snapshot = cls._parse_snapshot_payload(data)
version = payload.get("version")
if version != "1.0":
raise ValueError(f"Unsupported GraphRuntimeState snapshot version: {version}")
state = cls(
variable_pool=snapshot.variable_pool,
start_at=snapshot.start_at,
total_tokens=snapshot.total_tokens,
llm_usage=snapshot.llm_usage,
outputs=snapshot.outputs,
node_run_steps=snapshot.node_run_steps,
)
state._apply_snapshot(snapshot)
return state
self._start_at = float(payload.get("start_at", 0.0))
total_tokens = int(payload.get("total_tokens", 0))
if total_tokens < 0:
raise ValueError("total_tokens must be non-negative")
self._total_tokens = total_tokens
def loads(self, data: str | Mapping[str, Any]) -> None:
"""Restore runtime state from a serialized snapshot (legacy API)."""
node_run_steps = int(payload.get("node_run_steps", 0))
if node_run_steps < 0:
raise ValueError("node_run_steps must be non-negative")
self._node_run_steps = node_run_steps
llm_usage_payload = payload.get("llm_usage", {})
self._llm_usage = LLMUsage.model_validate(llm_usage_payload)
self._outputs = deepcopy(payload.get("outputs", {}))
variable_pool_payload = payload.get("variable_pool")
if variable_pool_payload is not None:
self._variable_pool = VariablePool.model_validate(variable_pool_payload)
ready_queue_payload = payload.get("ready_queue")
if ready_queue_payload is not None:
self._ready_queue = self._build_ready_queue()
self._ready_queue.loads(ready_queue_payload)
else:
self._ready_queue = None
graph_execution_payload = payload.get("graph_execution")
self._graph_execution = None
self._pending_graph_execution_workflow_id = None
if graph_execution_payload is not None:
try:
execution_payload = json.loads(graph_execution_payload)
self._pending_graph_execution_workflow_id = execution_payload.get("workflow_id")
except (json.JSONDecodeError, TypeError, AttributeError):
self._pending_graph_execution_workflow_id = None
self.graph_execution.loads(graph_execution_payload)
response_payload = payload.get("response_coordinator")
if response_payload is not None:
if self._graph is not None:
self.response_coordinator.loads(response_payload)
else:
self._pending_response_coordinator_dump = response_payload
else:
self._pending_response_coordinator_dump = None
self._response_coordinator = None
paused_nodes_payload = payload.get("paused_nodes", [])
self._paused_nodes = set(map(str, paused_nodes_payload))
snapshot = self._parse_snapshot_payload(data)
self._apply_snapshot(snapshot)
def register_paused_node(self, node_id: str) -> None:
"""Record a node that should resume when execution is continued."""
@ -391,3 +368,106 @@ class GraphRuntimeState:
module = importlib.import_module("core.workflow.graph_engine.response_coordinator")
coordinator_cls = module.ResponseStreamCoordinator
return coordinator_cls(variable_pool=self.variable_pool, graph=graph)
# ------------------------------------------------------------------
# Snapshot helpers
# ------------------------------------------------------------------
@classmethod
def _parse_snapshot_payload(cls, data: str | Mapping[str, Any]) -> _GraphRuntimeStateSnapshot:
payload: dict[str, Any]
if isinstance(data, str):
payload = json.loads(data)
else:
payload = dict(data)
version = payload.get("version")
if version != "1.0":
raise ValueError(f"Unsupported GraphRuntimeState snapshot version: {version}")
start_at = float(payload.get("start_at", 0.0))
total_tokens = int(payload.get("total_tokens", 0))
if total_tokens < 0:
raise ValueError("total_tokens must be non-negative")
node_run_steps = int(payload.get("node_run_steps", 0))
if node_run_steps < 0:
raise ValueError("node_run_steps must be non-negative")
llm_usage_payload = payload.get("llm_usage", {})
llm_usage = LLMUsage.model_validate(llm_usage_payload)
outputs_payload = deepcopy(payload.get("outputs", {}))
variable_pool_payload = payload.get("variable_pool")
has_variable_pool = variable_pool_payload is not None
variable_pool = VariablePool.model_validate(variable_pool_payload) if has_variable_pool else VariablePool()
ready_queue_payload = payload.get("ready_queue")
graph_execution_payload = payload.get("graph_execution")
response_payload = payload.get("response_coordinator")
paused_nodes_payload = payload.get("paused_nodes", [])
return _GraphRuntimeStateSnapshot(
start_at=start_at,
total_tokens=total_tokens,
node_run_steps=node_run_steps,
llm_usage=llm_usage,
outputs=outputs_payload,
variable_pool=variable_pool,
has_variable_pool=has_variable_pool,
ready_queue_dump=ready_queue_payload,
graph_execution_dump=graph_execution_payload,
response_coordinator_dump=response_payload,
paused_nodes=tuple(map(str, paused_nodes_payload)),
)
def _apply_snapshot(self, snapshot: _GraphRuntimeStateSnapshot) -> None:
self._start_at = snapshot.start_at
self._total_tokens = snapshot.total_tokens
self._node_run_steps = snapshot.node_run_steps
self._llm_usage = snapshot.llm_usage.model_copy()
self._outputs = deepcopy(snapshot.outputs)
if snapshot.has_variable_pool or self._variable_pool is None:
self._variable_pool = snapshot.variable_pool
self._restore_ready_queue(snapshot.ready_queue_dump)
self._restore_graph_execution(snapshot.graph_execution_dump)
self._restore_response_coordinator(snapshot.response_coordinator_dump)
self._paused_nodes = set(snapshot.paused_nodes)
def _restore_ready_queue(self, payload: str | None) -> None:
if payload is not None:
self._ready_queue = self._build_ready_queue()
self._ready_queue.loads(payload)
else:
self._ready_queue = None
def _restore_graph_execution(self, payload: str | None) -> None:
self._graph_execution = None
self._pending_graph_execution_workflow_id = None
if payload is None:
return
try:
execution_payload = json.loads(payload)
self._pending_graph_execution_workflow_id = execution_payload.get("workflow_id")
except (json.JSONDecodeError, TypeError, AttributeError):
self._pending_graph_execution_workflow_id = None
self.graph_execution.loads(payload)
def _restore_response_coordinator(self, payload: str | None) -> None:
if payload is None:
self._pending_response_coordinator_dump = None
self._response_coordinator = None
return
if self._graph is not None:
self.response_coordinator.loads(payload)
self._pending_response_coordinator_dump = None
return
self._pending_response_coordinator_dump = payload
self._response_coordinator = None

View File

@ -32,7 +32,7 @@ if [[ "${MODE}" == "worker" ]]; then
exec celery -A celery_entrypoint.celery worker -P ${CELERY_WORKER_CLASS:-gevent} $CONCURRENCY_OPTION \
--max-tasks-per-child ${MAX_TASKS_PER_CHILD:-50} --loglevel ${LOG_LEVEL:-INFO} \
-Q ${CELERY_QUEUES:-dataset,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation} \
-Q ${CELERY_QUEUES:-dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation} \
--prefetch-multiplier=1
elif [[ "${MODE}" == "beat" ]]; then

View File

@ -6,10 +6,11 @@ from flask_login import user_loaded_from_request, user_logged_in
from werkzeug.exceptions import NotFound, Unauthorized
from configs import dify_config
from constants import HEADER_NAME_APP_CODE
from dify_app import DifyApp
from extensions.ext_database import db
from libs.passport import PassportService
from libs.token import extract_access_token
from libs.token import extract_access_token, extract_webapp_passport
from models import Account, Tenant, TenantAccountJoin
from models.model import AppMCPServer, EndUser
from services.account_service import AccountService
@ -61,14 +62,30 @@ def load_user_from_request(request_from_flask_login):
logged_in_account = AccountService.load_logged_in_account(account_id=user_id)
return logged_in_account
elif request.blueprint == "web":
decoded = PassportService().verify(auth_token)
end_user_id = decoded.get("end_user_id")
if not end_user_id:
raise Unauthorized("Invalid Authorization token.")
end_user = db.session.query(EndUser).where(EndUser.id == decoded["end_user_id"]).first()
if not end_user:
raise NotFound("End user not found.")
return end_user
app_code = request.headers.get(HEADER_NAME_APP_CODE)
webapp_token = extract_webapp_passport(app_code, request) if app_code else None
if webapp_token:
decoded = PassportService().verify(webapp_token)
end_user_id = decoded.get("end_user_id")
if not end_user_id:
raise Unauthorized("Invalid Authorization token.")
end_user = db.session.query(EndUser).where(EndUser.id == end_user_id).first()
if not end_user:
raise NotFound("End user not found.")
return end_user
else:
if not auth_token:
raise Unauthorized("Invalid Authorization token.")
decoded = PassportService().verify(auth_token)
end_user_id = decoded.get("end_user_id")
if end_user_id:
end_user = db.session.query(EndUser).where(EndUser.id == end_user_id).first()
if not end_user:
raise NotFound("End user not found.")
return end_user
else:
raise Unauthorized("Invalid Authorization token for web API.")
elif request.blueprint == "mcp":
server_code = request.view_args.get("server_code") if request.view_args else None
if not server_code:

View File

@ -38,9 +38,6 @@ def _real_cookie_name(cookie_name: str) -> str:
def _try_extract_from_header(request: Request) -> str | None:
"""
Try to extract access token from header
"""
auth_header = request.headers.get("Authorization")
if auth_header:
if " " not in auth_header:
@ -55,27 +52,19 @@ def _try_extract_from_header(request: Request) -> str | None:
return None
def extract_refresh_token(request: Request) -> str | None:
return request.cookies.get(_real_cookie_name(COOKIE_NAME_REFRESH_TOKEN))
def extract_csrf_token(request: Request) -> str | None:
"""
Try to extract CSRF token from header or cookie.
"""
return request.headers.get(HEADER_NAME_CSRF_TOKEN)
def extract_csrf_token_from_cookie(request: Request) -> str | None:
"""
Try to extract CSRF token from cookie.
"""
return request.cookies.get(_real_cookie_name(COOKIE_NAME_CSRF_TOKEN))
def extract_access_token(request: Request) -> str | None:
"""
Try to extract access token from cookie, header or params.
Access token is either for console session or webapp passport exchange.
"""
def _try_extract_from_cookie(request: Request) -> str | None:
return request.cookies.get(_real_cookie_name(COOKIE_NAME_ACCESS_TOKEN))
@ -83,20 +72,10 @@ def extract_access_token(request: Request) -> str | None:
def extract_webapp_access_token(request: Request) -> str | None:
"""
Try to extract webapp access token from cookie, then header.
"""
return request.cookies.get(_real_cookie_name(COOKIE_NAME_WEBAPP_ACCESS_TOKEN)) or _try_extract_from_header(request)
def extract_webapp_passport(app_code: str, request: Request) -> str | None:
"""
Try to extract app token from header or params.
Webapp access token (part of passport) is only used for webapp session.
"""
def _try_extract_passport_token_from_cookie(request: Request) -> str | None:
return request.cookies.get(_real_cookie_name(COOKIE_NAME_PASSPORT + "-" + app_code))

View File

@ -117,7 +117,7 @@ dev = [
"pytest-cov~=4.1.0",
"pytest-env~=1.1.3",
"pytest-mock~=3.14.0",
"testcontainers~=4.10.0",
"testcontainers~=4.13.2",
"types-aiofiles~=24.1.0",
"types-beautifulsoup4~=4.12.0",
"types-cachetools~=5.5.0",

View File

@ -82,54 +82,51 @@ class AudioService:
message_id: str | None = None,
is_draft: bool = False,
):
from app import app
def invoke_tts(text_content: str, app_model: App, voice: str | None = None, is_draft: bool = False):
with app.app_context():
if voice is None:
if app_model.mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}:
if is_draft:
workflow = WorkflowService().get_draft_workflow(app_model=app_model)
else:
workflow = app_model.workflow
if (
workflow is None
or "text_to_speech" not in workflow.features_dict
or not workflow.features_dict["text_to_speech"].get("enabled")
):
if voice is None:
if app_model.mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}:
if is_draft:
workflow = WorkflowService().get_draft_workflow(app_model=app_model)
else:
workflow = app_model.workflow
if (
workflow is None
or "text_to_speech" not in workflow.features_dict
or not workflow.features_dict["text_to_speech"].get("enabled")
):
raise ValueError("TTS is not enabled")
voice = workflow.features_dict["text_to_speech"].get("voice")
else:
if not is_draft:
if app_model.app_model_config is None:
raise ValueError("AppModelConfig not found")
text_to_speech_dict = app_model.app_model_config.text_to_speech_dict
if not text_to_speech_dict.get("enabled"):
raise ValueError("TTS is not enabled")
voice = workflow.features_dict["text_to_speech"].get("voice")
else:
if not is_draft:
if app_model.app_model_config is None:
raise ValueError("AppModelConfig not found")
text_to_speech_dict = app_model.app_model_config.text_to_speech_dict
voice = text_to_speech_dict.get("voice")
if not text_to_speech_dict.get("enabled"):
raise ValueError("TTS is not enabled")
voice = text_to_speech_dict.get("voice")
model_manager = ModelManager()
model_instance = model_manager.get_default_model_instance(
tenant_id=app_model.tenant_id, model_type=ModelType.TTS
)
try:
if not voice:
voices = model_instance.get_tts_voices()
if voices:
voice = voices[0].get("value")
if not voice:
raise ValueError("Sorry, no voice available.")
else:
model_manager = ModelManager()
model_instance = model_manager.get_default_model_instance(
tenant_id=app_model.tenant_id, model_type=ModelType.TTS
)
try:
if not voice:
voices = model_instance.get_tts_voices()
if voices:
voice = voices[0].get("value")
if not voice:
raise ValueError("Sorry, no voice available.")
else:
raise ValueError("Sorry, no voice available.")
return model_instance.invoke_tts(
content_text=text_content.strip(), user=end_user, tenant_id=app_model.tenant_id, voice=voice
)
except Exception as e:
raise e
return model_instance.invoke_tts(
content_text=text_content.strip(), user=end_user, tenant_id=app_model.tenant_id, voice=voice
)
except Exception as e:
raise e
if message_id:
try:

View File

@ -283,7 +283,7 @@ class VariableTruncator:
break
remaining_budget = target_size - used_size
if item is None or isinstance(item, (str, list, dict, bool, int, float)):
if item is None or isinstance(item, (str, list, dict, bool, int, float, UpdatedVariable)):
part_result = self._truncate_json_primitives(item, remaining_budget)
else:
raise UnknownTypeError(f"got unknown type {type(item)} in array truncation")
@ -373,6 +373,11 @@ class VariableTruncator:
return _PartResult(truncated_obj, used_size, truncated)
@overload
def _truncate_json_primitives(
self, val: UpdatedVariable, target_size: int
) -> _PartResult[Mapping[str, object]]: ...
@overload
def _truncate_json_primitives(self, val: str, target_size: int) -> _PartResult[str]: ...

View File

@ -182,6 +182,28 @@ class TestOpenSearchVector:
assert len(ids) == 1
assert ids[0] == "mock_id"
def test_delete_nonexistent_index(self):
"""Test deleting a non-existent index."""
# Create a vector instance with a non-existent collection name
self.vector._client.indices.exists.return_value = False
# Should not raise an exception
self.vector.delete()
# Verify that exists was called but delete was not
self.vector._client.indices.exists.assert_called_once_with(index=self.collection_name.lower())
self.vector._client.indices.delete.assert_not_called()
def test_delete_existing_index(self):
"""Test deleting an existing index."""
self.vector._client.indices.exists.return_value = True
self.vector.delete()
# Verify both exists and delete were called
self.vector._client.indices.exists.assert_called_once_with(index=self.collection_name.lower())
self.vector._client.indices.delete.assert_called_once_with(index=self.collection_name.lower())
@pytest.mark.usefixtures("setup_mock_redis")
class TestOpenSearchVectorWithRedis:

View File

@ -8,6 +8,18 @@ from core.model_runtime.entities.llm_entities import LLMUsage
from core.workflow.runtime import GraphRuntimeState, ReadOnlyGraphRuntimeStateWrapper, VariablePool
class StubCoordinator:
def __init__(self) -> None:
self.state = "initial"
def dumps(self) -> str:
return json.dumps({"state": self.state})
def loads(self, data: str) -> None:
payload = json.loads(data)
self.state = payload["state"]
class TestGraphRuntimeState:
def test_property_getters_and_setters(self):
# FIXME(-LAN-): Mock VariablePool if needed
@ -191,17 +203,6 @@ class TestGraphRuntimeState:
graph_execution.exceptions_count = 4
graph_execution.started = True
class StubCoordinator:
def __init__(self) -> None:
self.state = "initial"
def dumps(self) -> str:
return json.dumps({"state": self.state})
def loads(self, data: str) -> None:
payload = json.loads(data)
self.state = payload["state"]
mock_graph = MagicMock()
stub = StubCoordinator()
with patch.object(GraphRuntimeState, "_build_response_coordinator", return_value=stub):
@ -211,8 +212,7 @@ class TestGraphRuntimeState:
snapshot = state.dumps()
restored = GraphRuntimeState(variable_pool=VariablePool(), start_at=0.0)
restored.loads(snapshot)
restored = GraphRuntimeState.from_snapshot(snapshot)
assert restored.total_tokens == 10
assert restored.node_run_steps == 3
@ -235,3 +235,47 @@ class TestGraphRuntimeState:
restored.attach_graph(mock_graph)
assert new_stub.state == "configured"
def test_loads_rehydrates_existing_instance(self):
variable_pool = VariablePool()
variable_pool.add(("node", "key"), "value")
state = GraphRuntimeState(variable_pool=variable_pool, start_at=time())
state.total_tokens = 7
state.node_run_steps = 2
state.set_output("foo", "bar")
state.ready_queue.put("node-1")
execution = state.graph_execution
execution.workflow_id = "wf-456"
execution.started = True
mock_graph = MagicMock()
original_stub = StubCoordinator()
with patch.object(GraphRuntimeState, "_build_response_coordinator", return_value=original_stub):
state.attach_graph(mock_graph)
original_stub.state = "configured"
snapshot = state.dumps()
new_stub = StubCoordinator()
with patch.object(GraphRuntimeState, "_build_response_coordinator", return_value=new_stub):
restored = GraphRuntimeState(variable_pool=VariablePool(), start_at=0.0)
restored.attach_graph(mock_graph)
restored.loads(snapshot)
assert restored.total_tokens == 7
assert restored.node_run_steps == 2
assert restored.get_output("foo") == "bar"
assert restored.ready_queue.qsize() == 1
assert restored.ready_queue.get(timeout=0.01) == "node-1"
restored_segment = restored.variable_pool.get(("node", "key"))
assert restored_segment is not None
assert restored_segment.value == "value"
restored_execution = restored.graph_execution
assert restored_execution.workflow_id == "wf-456"
assert restored_execution.started is True
assert new_stub.state == "configured"

View File

@ -1590,7 +1590,7 @@ dev = [
{ name = "ruff", specifier = "~=0.14.0" },
{ name = "scipy-stubs", specifier = ">=1.15.3.0" },
{ name = "sseclient-py", specifier = ">=1.8.0" },
{ name = "testcontainers", specifier = "~=4.10.0" },
{ name = "testcontainers", specifier = "~=4.13.2" },
{ name = "ty", specifier = "~=0.0.1a19" },
{ name = "types-aiofiles", specifier = "~=24.1.0" },
{ name = "types-beautifulsoup4", specifier = "~=4.12.0" },
@ -5907,7 +5907,7 @@ wheels = [
[[package]]
name = "testcontainers"
version = "4.10.0"
version = "4.13.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "docker" },
@ -5916,9 +5916,9 @@ dependencies = [
{ name = "urllib3" },
{ name = "wrapt" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a1/49/9c618aff1c50121d183cdfbc3a4a5cf2727a2cde1893efe6ca55c7009196/testcontainers-4.10.0.tar.gz", hash = "sha256:03f85c3e505d8b4edeb192c72a961cebbcba0dd94344ae778b4a159cb6dcf8d3", size = 63327, upload-time = "2025-04-02T16:13:27.582Z" }
sdist = { url = "https://files.pythonhosted.org/packages/18/51/edac83edab339d8b4dce9a7b659163afb1ea7e011bfed1d5573d495a4485/testcontainers-4.13.2.tar.gz", hash = "sha256:2315f1e21b059427a9d11e8921f85fef322fbe0d50749bcca4eaa11271708ba4", size = 78692, upload-time = "2025-10-07T21:53:07.531Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1c/0a/824b0c1ecf224802125279c3effff2e25ed785ed046e67da6e53d928de4c/testcontainers-4.10.0-py3-none-any.whl", hash = "sha256:31ed1a81238c7e131a2a29df6db8f23717d892b592fa5a1977fd0dcd0c23fc23", size = 107414, upload-time = "2025-04-02T16:13:25.785Z" },
{ url = "https://files.pythonhosted.org/packages/2a/5e/73aa94770f1df0595364aed526f31d54440db5492911e2857318ed326e51/testcontainers-4.13.2-py3-none-any.whl", hash = "sha256:0209baf8f4274b568cde95bef2cadf7b1d33b375321f793790462e235cd684ee", size = 124771, upload-time = "2025-10-07T21:53:05.937Z" },
]
[[package]]

View File

@ -201,6 +201,10 @@ ENABLE_WEBSITE_JINAREADER=true
ENABLE_WEBSITE_FIRECRAWL=true
ENABLE_WEBSITE_WATERCRAWL=true
# Enable inline LaTeX rendering with single dollar signs ($...$) in the web frontend
# Default is false for security reasons to prevent conflicts with regular text
NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false
# ------------------------------
# Database Configuration
# The database uses PostgreSQL. Please use the public schema.
@ -260,16 +264,18 @@ POSTGRES_MAINTENANCE_WORK_MEM=64MB
POSTGRES_EFFECTIVE_CACHE_SIZE=4096MB
# Sets the maximum allowed duration of any statement before termination.
# Default is 60000 milliseconds.
# Default is 0 (no timeout).
#
# Reference: https://www.postgresql.org/docs/current/runtime-config-client.html#GUC-STATEMENT-TIMEOUT
POSTGRES_STATEMENT_TIMEOUT=60000
# A value of 0 prevents the server from timing out statements.
POSTGRES_STATEMENT_TIMEOUT=0
# Sets the maximum allowed duration of any idle in-transaction session before termination.
# Default is 60000 milliseconds.
# Default is 0 (no timeout).
#
# Reference: https://www.postgresql.org/docs/current/runtime-config-client.html#GUC-IDLE-IN-TRANSACTION-SESSION-TIMEOUT
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=60000
# A value of 0 prevents the server from terminating idle sessions.
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=0
# ------------------------------
# Redis Configuration
@ -314,7 +320,7 @@ REDIS_CLUSTERS_PASSWORD=
# Celery Configuration
# ------------------------------
# Use standalone redis as the broker, and redis db 1 for celery broker. (redis_username is usually set by defualt as empty)
# Use standalone redis as the broker, and redis db 1 for celery broker. (redis_username is usually set by default as empty)
# Format as follows: `redis://<redis_username>:<redis_password>@<redis_host>:<redis_port>/<redis_database>`.
# Example: redis://:difyai123456@redis:6379/1
# If use Redis Sentinel, format as follows: `sentinel://<redis_username>:<redis_password>@<sentinel_host1>:<sentinel_port>/<redis_database>`

View File

@ -115,8 +115,8 @@ services:
-c 'work_mem=${POSTGRES_WORK_MEM:-4MB}'
-c 'maintenance_work_mem=${POSTGRES_MAINTENANCE_WORK_MEM:-64MB}'
-c 'effective_cache_size=${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB}'
-c 'statement_timeout=${POSTGRES_STATEMENT_TIMEOUT:-60000}'
-c 'idle_in_transaction_session_timeout=${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-60000}'
-c 'statement_timeout=${POSTGRES_STATEMENT_TIMEOUT:-0}'
-c 'idle_in_transaction_session_timeout=${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-0}'
volumes:
- ./volumes/db/data:/var/lib/postgresql/data
healthcheck:

View File

@ -15,8 +15,8 @@ services:
-c 'work_mem=${POSTGRES_WORK_MEM:-4MB}'
-c 'maintenance_work_mem=${POSTGRES_MAINTENANCE_WORK_MEM:-64MB}'
-c 'effective_cache_size=${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB}'
-c 'statement_timeout=${POSTGRES_STATEMENT_TIMEOUT:-60000}'
-c 'idle_in_transaction_session_timeout=${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-60000}'
-c 'statement_timeout=${POSTGRES_STATEMENT_TIMEOUT:-0}'
-c 'idle_in_transaction_session_timeout=${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-0}'
volumes:
- ${PGDATA_HOST_VOLUME:-./volumes/db/data}:/var/lib/postgresql/data
ports:

View File

@ -51,6 +51,7 @@ x-shared-env: &shared-api-worker-env
ENABLE_WEBSITE_JINAREADER: ${ENABLE_WEBSITE_JINAREADER:-true}
ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true}
ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true}
NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX: ${NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX:-false}
DB_USERNAME: ${DB_USERNAME:-postgres}
DB_PASSWORD: ${DB_PASSWORD:-difyai123456}
DB_HOST: ${DB_HOST:-db}
@ -68,8 +69,8 @@ x-shared-env: &shared-api-worker-env
POSTGRES_WORK_MEM: ${POSTGRES_WORK_MEM:-4MB}
POSTGRES_MAINTENANCE_WORK_MEM: ${POSTGRES_MAINTENANCE_WORK_MEM:-64MB}
POSTGRES_EFFECTIVE_CACHE_SIZE: ${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB}
POSTGRES_STATEMENT_TIMEOUT: ${POSTGRES_STATEMENT_TIMEOUT:-60000}
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: ${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-60000}
POSTGRES_STATEMENT_TIMEOUT: ${POSTGRES_STATEMENT_TIMEOUT:-0}
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: ${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-0}
REDIS_HOST: ${REDIS_HOST:-redis}
REDIS_PORT: ${REDIS_PORT:-6379}
REDIS_USERNAME: ${REDIS_USERNAME:-}
@ -724,8 +725,8 @@ services:
-c 'work_mem=${POSTGRES_WORK_MEM:-4MB}'
-c 'maintenance_work_mem=${POSTGRES_MAINTENANCE_WORK_MEM:-64MB}'
-c 'effective_cache_size=${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB}'
-c 'statement_timeout=${POSTGRES_STATEMENT_TIMEOUT:-60000}'
-c 'idle_in_transaction_session_timeout=${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-60000}'
-c 'statement_timeout=${POSTGRES_STATEMENT_TIMEOUT:-0}'
-c 'idle_in_transaction_session_timeout=${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-0}'
volumes:
- ./volumes/db/data:/var/lib/postgresql/data
healthcheck:

View File

@ -41,16 +41,18 @@ POSTGRES_MAINTENANCE_WORK_MEM=64MB
POSTGRES_EFFECTIVE_CACHE_SIZE=4096MB
# Sets the maximum allowed duration of any statement before termination.
# Default is 60000 milliseconds.
# Default is 0 (no timeout).
#
# Reference: https://www.postgresql.org/docs/current/runtime-config-client.html#GUC-STATEMENT-TIMEOUT
POSTGRES_STATEMENT_TIMEOUT=60000
# A value of 0 prevents the server from timing out statements.
POSTGRES_STATEMENT_TIMEOUT=0
# Sets the maximum allowed duration of any idle in-transaction session before termination.
# Default is 60000 milliseconds.
# Default is 0 (no timeout).
#
# Reference: https://www.postgresql.org/docs/current/runtime-config-client.html#GUC-IDLE-IN-TRANSACTION-SESSION-TIMEOUT
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=60000
# A value of 0 prevents the server from terminating idle sessions.
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=0
# -----------------------------
# Environment Variables for redis Service

View File

@ -61,5 +61,9 @@ NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER=true
NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL=true
NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL=true
# Enable inline LaTeX rendering with single dollar signs ($...$)
# Default is false for security reasons to prevent conflicts with regular text
NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false
# The maximum number of tree node depth for workflow
NEXT_PUBLIC_MAX_TREE_DEPTH=50

View File

@ -132,8 +132,6 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS
importedVersion: imported_dsl_version ?? '',
systemVersion: current_dsl_version ?? '',
})
if (onClose)
onClose()
setTimeout(() => {
setShowErrorModal(true)
}, 300)

View File

@ -14,7 +14,6 @@ import timezone from 'dayjs/plugin/timezone'
import { createContext, useContext } from 'use-context-selector'
import { useShallow } from 'zustand/react/shallow'
import { useTranslation } from 'react-i18next'
import { usePathname, useRouter, useSearchParams } from 'next/navigation'
import type { ChatItemInTree } from '../../base/chat/types'
import Indicator from '../../header/indicator'
import VarPanel from './var-panel'
@ -43,10 +42,6 @@ import cn from '@/utils/classnames'
import { noop } from 'lodash-es'
import PromptLogModal from '../../base/prompt-log-modal'
type AppStoreState = ReturnType<typeof useAppStore.getState>
type ConversationListItem = ChatConversationGeneralDetail | CompletionConversationGeneralDetail
type ConversationSelection = ConversationListItem | { id: string; isPlaceholder?: true }
dayjs.extend(utc)
dayjs.extend(timezone)
@ -206,7 +201,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
const { formatTime } = useTimestamp()
const { onClose, appDetail } = useContext(DrawerContext)
const { notify } = useContext(ToastContext)
const { currentLogItem, setCurrentLogItem, showMessageLogModal, setShowMessageLogModal, showPromptLogModal, setShowPromptLogModal, currentLogModalActiveTab } = useAppStore(useShallow((state: AppStoreState) => ({
const { currentLogItem, setCurrentLogItem, showMessageLogModal, setShowMessageLogModal, showPromptLogModal, setShowPromptLogModal, currentLogModalActiveTab } = useAppStore(useShallow(state => ({
currentLogItem: state.currentLogItem,
setCurrentLogItem: state.setCurrentLogItem,
showMessageLogModal: state.showMessageLogModal,
@ -898,113 +893,20 @@ const ChatConversationDetailComp: FC<{ appId?: string; conversationId?: string }
const ConversationList: FC<IConversationList> = ({ logs, appDetail, onRefresh }) => {
const { t } = useTranslation()
const { formatTime } = useTimestamp()
const router = useRouter()
const pathname = usePathname()
const searchParams = useSearchParams()
const conversationIdInUrl = searchParams.get('conversation_id') ?? undefined
const media = useBreakpoints()
const isMobile = media === MediaType.mobile
const [showDrawer, setShowDrawer] = useState<boolean>(false) // Whether to display the chat details drawer
const [currentConversation, setCurrentConversation] = useState<ConversationSelection | undefined>() // Currently selected conversation
const closingConversationIdRef = useRef<string | null>(null)
const pendingConversationIdRef = useRef<string | null>(null)
const pendingConversationCacheRef = useRef<ConversationSelection | undefined>(undefined)
const [currentConversation, setCurrentConversation] = useState<ChatConversationGeneralDetail | CompletionConversationGeneralDetail | undefined>() // Currently selected conversation
const isChatMode = appDetail.mode !== 'completion' // Whether the app is a chat app
const isChatflow = appDetail.mode === 'advanced-chat' // Whether the app is a chatflow app
const { setShowPromptLogModal, setShowAgentLogModal, setShowMessageLogModal } = useAppStore(useShallow((state: AppStoreState) => ({
const { setShowPromptLogModal, setShowAgentLogModal, setShowMessageLogModal } = useAppStore(useShallow(state => ({
setShowPromptLogModal: state.setShowPromptLogModal,
setShowAgentLogModal: state.setShowAgentLogModal,
setShowMessageLogModal: state.setShowMessageLogModal,
})))
const activeConversationId = conversationIdInUrl ?? pendingConversationIdRef.current ?? currentConversation?.id
const buildUrlWithConversation = useCallback((conversationId?: string) => {
const params = new URLSearchParams(searchParams.toString())
if (conversationId)
params.set('conversation_id', conversationId)
else
params.delete('conversation_id')
const queryString = params.toString()
return queryString ? `${pathname}?${queryString}` : pathname
}, [pathname, searchParams])
const handleRowClick = useCallback((log: ConversationListItem) => {
if (conversationIdInUrl === log.id) {
if (!showDrawer)
setShowDrawer(true)
if (!currentConversation || currentConversation.id !== log.id)
setCurrentConversation(log)
return
}
pendingConversationIdRef.current = log.id
pendingConversationCacheRef.current = log
if (!showDrawer)
setShowDrawer(true)
if (currentConversation?.id !== log.id)
setCurrentConversation(undefined)
router.push(buildUrlWithConversation(log.id), { scroll: false })
}, [buildUrlWithConversation, conversationIdInUrl, currentConversation, router, showDrawer])
const currentConversationId = currentConversation?.id
useEffect(() => {
if (!conversationIdInUrl) {
if (pendingConversationIdRef.current)
return
if (showDrawer || currentConversationId) {
setShowDrawer(false)
setCurrentConversation(undefined)
}
closingConversationIdRef.current = null
pendingConversationCacheRef.current = undefined
return
}
if (closingConversationIdRef.current === conversationIdInUrl)
return
if (pendingConversationIdRef.current === conversationIdInUrl)
pendingConversationIdRef.current = null
const matchedConversation = logs?.data?.find((item: ConversationListItem) => item.id === conversationIdInUrl)
const nextConversation: ConversationSelection = matchedConversation
?? pendingConversationCacheRef.current
?? { id: conversationIdInUrl, isPlaceholder: true }
if (!showDrawer)
setShowDrawer(true)
if (!currentConversation || currentConversation.id !== conversationIdInUrl || (matchedConversation && currentConversation !== matchedConversation))
setCurrentConversation(nextConversation)
if (pendingConversationCacheRef.current?.id === conversationIdInUrl || matchedConversation)
pendingConversationCacheRef.current = undefined
}, [conversationIdInUrl, currentConversation, isChatMode, logs?.data, showDrawer])
const onCloseDrawer = useCallback(() => {
onRefresh()
setShowDrawer(false)
setCurrentConversation(undefined)
setShowPromptLogModal(false)
setShowAgentLogModal(false)
setShowMessageLogModal(false)
pendingConversationIdRef.current = null
pendingConversationCacheRef.current = undefined
closingConversationIdRef.current = conversationIdInUrl ?? null
if (conversationIdInUrl)
router.replace(buildUrlWithConversation(), { scroll: false })
}, [buildUrlWithConversation, conversationIdInUrl, onRefresh, router, setShowAgentLogModal, setShowMessageLogModal, setShowPromptLogModal])
// Annotated data needs to be highlighted
const renderTdValue = (value: string | number | null, isEmptyStyle: boolean, isHighlight = false, annotation?: LogAnnotation) => {
return (
@ -1023,6 +925,15 @@ const ConversationList: FC<IConversationList> = ({ logs, appDetail, onRefresh })
)
}
const onCloseDrawer = () => {
onRefresh()
setShowDrawer(false)
setCurrentConversation(undefined)
setShowPromptLogModal(false)
setShowAgentLogModal(false)
setShowMessageLogModal(false)
}
if (!logs)
return <Loading />
@ -1049,8 +960,11 @@ const ConversationList: FC<IConversationList> = ({ logs, appDetail, onRefresh })
const rightValue = get(log, isChatMode ? 'message_count' : 'message.answer')
return <tr
key={log.id}
className={cn('cursor-pointer border-b border-divider-subtle hover:bg-background-default-hover', activeConversationId !== log.id ? '' : 'bg-background-default-hover')}
onClick={() => handleRowClick(log)}>
className={cn('cursor-pointer border-b border-divider-subtle hover:bg-background-default-hover', currentConversation?.id !== log.id ? '' : 'bg-background-default-hover')}
onClick={() => {
setShowDrawer(true)
setCurrentConversation(log)
}}>
<td className='h-4'>
{!log.read_at && (
<div className='flex items-center p-3 pr-0.5'>

View File

@ -4,6 +4,7 @@ import RemarkBreaks from 'remark-breaks'
import RehypeKatex from 'rehype-katex'
import RemarkGfm from 'remark-gfm'
import RehypeRaw from 'rehype-raw'
import { ENABLE_SINGLE_DOLLAR_LATEX } from '@/config'
import AudioBlock from '@/app/components/base/markdown-blocks/audio-block'
import Img from '@/app/components/base/markdown-blocks/img'
import Link from '@/app/components/base/markdown-blocks/link'
@ -34,7 +35,7 @@ export const ReactMarkdownWrapper: FC<ReactMarkdownWrapperProps> = (props) => {
<ReactMarkdown
remarkPlugins={[
RemarkGfm,
[RemarkMath, { singleDollarTextMath: false }],
[RemarkMath, { singleDollarTextMath: ENABLE_SINGLE_DOLLAR_LATEX }],
RemarkBreaks,
]}
rehypePlugins={[

View File

@ -324,7 +324,7 @@ const FileUploader = ({
<div>{t('datasetCreation.stepOne.uploader.tip', {
size: fileUploadConfig.file_size_limit,
supportTypes: supportTypesShowNames,
batchCount: fileUploadConfig.batch_count_limit,
batchCount: notSupportBatchUpload ? 1 : fileUploadConfig.batch_count_limit,
})}</div>
{dragging && <div ref={dragRef} className='absolute left-0 top-0 h-full w-full' />}
</div>

View File

@ -287,7 +287,7 @@ const LocalFile = ({
<RiUploadCloud2Line className='mr-2 size-5' />
<span>
{t('datasetCreation.stepOne.uploader.button')}
{notSupportBatchUpload ? t('datasetCreation.stepOne.uploader.buttonSingleFile') : t('datasetCreation.stepOne.uploader.button')}
{allowedExtensions.length > 0 && (
<label className='ml-1 cursor-pointer text-text-accent' onClick={selectHandle}>{t('datasetCreation.stepOne.uploader.browse')}</label>
)}
@ -296,7 +296,7 @@ const LocalFile = ({
<div>{t('datasetCreation.stepOne.uploader.tip', {
size: fileUploadConfig.file_size_limit,
supportTypes: supportTypesShowNames,
batchCount: fileUploadConfig.batch_count_limit,
batchCount: notSupportBatchUpload ? 1 : fileUploadConfig.batch_count_limit,
})}</div>
{dragging && <div ref={dragRef} className='absolute left-0 top-0 h-full w-full' />}
</div>

View File

@ -56,10 +56,10 @@ const SwrInitializer = ({
}
const redirectUrl = resolvePostLoginRedirect(searchParams)
if (redirectUrl)
if (redirectUrl) {
location.replace(redirectUrl)
else
router.replace(pathname)
return
}
setInit(true)
}

View File

@ -57,6 +57,7 @@ const LocaleLayout = async ({
[DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_JINAREADER]: process.env.NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER,
[DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_FIRECRAWL]: process.env.NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL,
[DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_WATERCRAWL]: process.env.NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL,
[DatasetAttr.DATA_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX]: process.env.NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX,
[DatasetAttr.NEXT_PUBLIC_ZENDESK_WIDGET_KEY]: process.env.NEXT_PUBLIC_ZENDESK_WIDGET_KEY,
[DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT]: process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT,
[DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION]: process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION,

View File

@ -135,8 +135,8 @@ const NormalForm = () => {
{!systemFeatures.branding.enabled && <p className='body-md-regular mt-2 text-text-tertiary'>{t('login.joinTipStart')}{workspaceName}{t('login.joinTipEnd')}</p>}
</div>
: <div className="mx-auto w-full">
<h2 className="title-4xl-semi-bold text-text-primary">{t('login.pageTitle')}</h2>
{!systemFeatures.branding.enabled && <p className='body-md-regular mt-2 text-text-tertiary'>{t('login.welcome')}</p>}
<h2 className="title-4xl-semi-bold text-text-primary">{systemFeatures.branding.enabled ? t('login.pageTitleForE') : t('login.pageTitle')}</h2>
<p className='body-md-regular mt-2 text-text-tertiary'>{t('login.welcome')}</p>
</div>}
<div className="relative">
<div className="mt-6 flex flex-col gap-3">

View File

@ -375,6 +375,11 @@ export const ENABLE_WEBSITE_WATERCRAWL = getBooleanConfig(
DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_WATERCRAWL,
false,
)
export const ENABLE_SINGLE_DOLLAR_LATEX = getBooleanConfig(
process.env.NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX,
DatasetAttr.DATA_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX,
false,
)
export const VALUE_SELECTOR_DELIMITER = '@@@'

View File

@ -34,6 +34,7 @@ export NEXT_PUBLIC_MAX_TOOLS_NUM=${MAX_TOOLS_NUM}
export NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER=${ENABLE_WEBSITE_JINAREADER:-true}
export NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL=${ENABLE_WEBSITE_FIRECRAWL:-true}
export NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL=${ENABLE_WEBSITE_WATERCRAWL:-true}
export NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=${NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX:-false}
export NEXT_PUBLIC_LOOP_NODE_MAX_COUNT=${LOOP_NODE_MAX_COUNT}
export NEXT_PUBLIC_MAX_PARALLEL_LIMIT=${MAX_PARALLEL_LIMIT}
export NEXT_PUBLIC_MAX_ITERATIONS_NUM=${MAX_ITERATIONS_NUM}

View File

@ -120,6 +120,7 @@ const translation = {
noAccount: 'Haben Sie kein Konto?',
verifyMail: 'Fahren Sie mit dem Bestätigungscode fort',
},
pageTitleForE: 'Hey, lass uns anfangen!',
}
export default translation

View File

@ -1,5 +1,6 @@
const translation = {
pageTitle: 'Log in to Dify',
pageTitleForE: 'Hey, let\'s get started!',
welcome: '👋 Welcome! Please log in to get started.',
email: 'Email address',
emailPlaceholder: 'Your email',

View File

@ -120,6 +120,7 @@ const translation = {
welcome: '👋 ¡Bienvenido! Por favor, completa los detalles para comenzar.',
verifyMail: 'Continuar con el código de verificación',
},
pageTitleForE: '¡Hola, vamos a empezar!',
}
export default translation

View File

@ -120,6 +120,7 @@ const translation = {
noAccount: 'حساب کاربری ندارید؟',
verifyMail: 'ادامه با کد تأیید',
},
pageTitleForE: 'هی، بیا شروع کنیم!',
}
export default translation

View File

@ -120,6 +120,7 @@ const translation = {
verifyMail: 'Continuez avec le code de vérification',
createAccount: 'Créez votre compte',
},
pageTitleForE: 'Hé, commençons !',
}
export default translation

View File

@ -125,6 +125,7 @@ const translation = {
welcome: '👋 स्वागत है! कृपया शुरू करने के लिए विवरण भरें।',
haveAccount: 'क्या आपका पहले से एक खाता है?',
},
pageTitleForE: 'अरे, चलो शुरू करें!',
}
export default translation

View File

@ -120,6 +120,7 @@ const translation = {
noAccount: 'Tidak punya akun?',
welcome: '👋 Selamat datang! Silakan isi detail untuk memulai.',
},
pageTitleForE: 'Hei, ayo kita mulai!',
}
export default translation

View File

@ -130,6 +130,7 @@ const translation = {
signUp: 'Iscriviti',
welcome: '👋 Benvenuto! Per favore compila i dettagli per iniziare.',
},
pageTitleForE: 'Ehi, cominciamo!',
}
export default translation

View File

@ -1,5 +1,6 @@
const translation = {
pageTitle: 'Dify にログイン',
pageTitleForE: 'はじめましょう!',
welcome: '👋 ようこそ!まずはログインしてご利用ください。',
email: 'メールアドレス',
emailPlaceholder: 'メールアドレスを入力してください',

View File

@ -120,6 +120,7 @@ const translation = {
noAccount: '계정이 없으신가요?',
welcome: '👋 환영합니다! 시작하려면 세부 정보를 입력해 주세요.',
},
pageTitleForE: '이봐, 시작하자!',
}
export default translation

View File

@ -125,6 +125,7 @@ const translation = {
haveAccount: 'Masz już konto?',
welcome: '👋 Witaj! Proszę wypełnić szczegóły, aby rozpocząć.',
},
pageTitleForE: 'Hej, zaczynajmy!',
}
export default translation

View File

@ -120,6 +120,7 @@ const translation = {
signUp: 'Inscreva-se',
welcome: '👋 Bem-vindo! Por favor, preencha os detalhes para começar.',
},
pageTitleForE: 'Ei, vamos começar!',
}
export default translation

View File

@ -120,6 +120,7 @@ const translation = {
createAccount: 'Creează-ți contul',
welcome: '👋 Buna! Te rugăm să completezi detaliile pentru a începe.',
},
pageTitleForE: 'Hei, hai să începem!',
}
export default translation

View File

@ -120,6 +120,7 @@ const translation = {
verifyMail: 'Продолжите с кодом проверки',
welcome: '👋 Добро пожаловать! Пожалуйста, заполните данные, чтобы начать.',
},
pageTitleForE: 'Привет, давай начнем!',
}
export default translation

View File

@ -120,6 +120,7 @@ const translation = {
noAccount: 'Nimate računa?',
welcome: '👋 Dobrodošli! Prosimo, izpolnite podatke, da začnete.',
},
pageTitleForE: 'Hej, začnimo!',
}
export default translation

View File

@ -120,6 +120,7 @@ const translation = {
verifyMail: 'โปรดดำเนินการต่อด้วยรหัสการตรวจสอบ',
haveAccount: 'มีบัญชีอยู่แล้วใช่ไหม?',
},
pageTitleForE: 'เฮ้ เรามาเริ่มกันเถอะ!',
}
export default translation

View File

@ -120,6 +120,7 @@ const translation = {
haveAccount: 'Zaten bir hesabınız var mı?',
welcome: '👋 Hoş geldiniz! Başlamak için lütfen detayları doldurun.',
},
pageTitleForE: 'Hey, haydi başlayalım!',
}
export default translation

View File

@ -120,6 +120,7 @@ const translation = {
noAccount: 'Не маєте облікового запису?',
welcome: '👋 Ласкаво просимо! Будь ласка, заповніть деталі, щоб почати.',
},
pageTitleForE: 'Гей, давай почнемо!',
}
export default translation

View File

@ -120,6 +120,7 @@ const translation = {
verifyMail: 'Tiếp tục với mã xác minh',
welcome: '👋 Chào mừng! Vui lòng điền vào các chi tiết để bắt đầu.',
},
pageTitleForE: 'Này, hãy bắt đầu nào!',
}
export default translation

View File

@ -1,5 +1,6 @@
const translation = {
pageTitle: '登录 Dify',
pageTitleForE: '嗨,近来可好',
welcome: '👋 欢迎!请登录以开始使用。',
email: '邮箱',
emailPlaceholder: '输入邮箱地址',

View File

@ -1,5 +1,6 @@
const translation = {
pageTitle: '嗨,近來可好',
pageTitleForE: '嗨,近來可好',
welcome: '👋 歡迎來到 Dify, 登入以繼續',
email: '郵箱',
emailPlaceholder: '輸入郵箱地址',

View File

@ -2,7 +2,7 @@
"name": "dify-web",
"version": "1.9.2",
"private": true,
"packageManager": "pnpm@10.18.3+sha512.bbd16e6d7286fd7e01f6b3c0b3c932cda2965c06a908328f74663f10a9aea51f1129eea615134bf992831b009eabe167ecb7008b597f40ff9bc75946aadfb08d",
"packageManager": "pnpm@10.19.0+sha512.c9fc7236e92adf5c8af42fd5bf1612df99c2ceb62f27047032f4720b33f8eacdde311865e91c411f2774f618d82f320808ecb51718bfa82c060c4ba7c76a32b8",
"engines": {
"node": ">=v22.11.0"
},
@ -22,7 +22,7 @@
"dev": "cross-env NODE_OPTIONS='--inspect' next dev --turbopack",
"build": "next build",
"build:docker": "next build && node scripts/optimize-standalone.js",
"start": "cp -r .next/static .next/standalone/.next/static && cp -r public .next/standalone/public && cross-env PORT=$npm_config_port HOSTNAME=$npm_config_host node .next/standalone/server.js",
"start": "node ./scripts/copy-and-start.mjs",
"lint": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache",
"lint:fix": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix",
"lint:quiet": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --quiet",
@ -144,7 +144,7 @@
"@babel/core": "^7.28.4",
"@chromatic-com/storybook": "^4.1.1",
"@eslint-react/eslint-plugin": "^1.53.1",
"@happy-dom/jest-environment": "^20.0.7",
"@happy-dom/jest-environment": "^20.0.8",
"@mdx-js/loader": "^3.1.1",
"@mdx-js/react": "^3.1.1",
"@next/bundle-analyzer": "15.5.4",

View File

@ -356,8 +356,8 @@ importers:
specifier: ^1.53.1
version: 1.53.1(eslint@9.38.0(jiti@1.21.7))(ts-api-utils@2.1.0(typescript@5.9.3))(typescript@5.9.3)
'@happy-dom/jest-environment':
specifier: ^20.0.7
version: 20.0.7(@jest/environment@29.7.0)(@jest/fake-timers@29.7.0)(@jest/types@29.6.3)(jest-mock@29.7.0)(jest-util@29.7.0)
specifier: ^20.0.8
version: 20.0.8(@jest/environment@29.7.0)(@jest/fake-timers@29.7.0)(@jest/types@29.6.3)(jest-mock@29.7.0)(jest-util@29.7.0)
'@mdx-js/loader':
specifier: ^3.1.1
version: 3.1.1(webpack@5.102.1(esbuild@0.25.0)(uglify-js@3.19.3))
@ -688,6 +688,10 @@ packages:
resolution: {integrity: sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==}
engines: {node: '>=6.9.0'}
'@babel/helper-validator-identifier@7.28.5':
resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==}
engines: {node: '>=6.9.0'}
'@babel/helper-validator-option@7.27.1':
resolution: {integrity: sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==}
engines: {node: '>=6.9.0'}
@ -705,6 +709,11 @@ packages:
engines: {node: '>=6.0.0'}
hasBin: true
'@babel/parser@7.28.5':
resolution: {integrity: sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==}
engines: {node: '>=6.0.0'}
hasBin: true
'@babel/plugin-bugfix-firefox-class-in-computed-class-key@7.27.1':
resolution: {integrity: sha512-QPG3C9cCVRQLxAVwmefEmwdTanECuUBMQZ/ym5kiw3XKCGA7qkuQLcjWWHcrD/GKbn/WmJwaezfuuAOcyKlRPA==}
engines: {node: '>=6.9.0'}
@ -1230,6 +1239,10 @@ packages:
resolution: {integrity: sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==}
engines: {node: '>=6.9.0'}
'@babel/types@7.28.5':
resolution: {integrity: sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==}
engines: {node: '>=6.9.0'}
'@bcoe/v8-coverage@0.2.3':
resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==}
@ -1747,8 +1760,8 @@ packages:
'@formatjs/intl-localematcher@0.5.10':
resolution: {integrity: sha512-af3qATX+m4Rnd9+wHcjJ4w2ijq+rAVP3CCinJQvFv1kgSu1W6jypUmvleJxcewdxmutM8dmIRZFxO/IQBZmP2Q==}
'@happy-dom/jest-environment@20.0.7':
resolution: {integrity: sha512-f7cvUghxPIUS8L21uSNab1GYXPr6+7FvltpsWyzrSzhSbjhDWr5Ixcy5bv2DqaQEhAKIQ7SYBYD5n4+SSHwfig==}
'@happy-dom/jest-environment@20.0.8':
resolution: {integrity: sha512-e8/c1EW+vUF7MFTZZtPbWrD3rStPnx3X8M4pAaOU++x+1lsXr/bsdoLoHs6bQ2kEZyPRhate3sC6MnpVD/O/9A==}
engines: {node: '>=20.0.0'}
peerDependencies:
'@jest/environment': '>=25.0.0'
@ -3525,8 +3538,8 @@ packages:
'@types/node@18.15.0':
resolution: {integrity: sha512-z6nr0TTEOBGkzLGmbypWOGnpSpSIBorEhC4L+4HeQ2iezKCi4f77kyslRwvHeNitymGQ+oFyIWGP96l/DPSV9w==}
'@types/node@20.19.22':
resolution: {integrity: sha512-hRnu+5qggKDSyWHlnmThnUqg62l29Aj/6vcYgUaSFL9oc7DVjeWEQN3PRgdSc6F8d9QRMWkf36CLMch1Do/+RQ==}
'@types/node@20.19.23':
resolution: {integrity: sha512-yIdlVVVHXpmqRhtyovZAcSy0MiPcYWGkoO4CGe/+jpP0hmNuihm4XhHbADpK++MsiLHP5MVlv+bcgdF99kSiFQ==}
'@types/papaparse@5.3.16':
resolution: {integrity: sha512-T3VuKMC2H0lgsjI9buTB3uuKj3EMD2eap1MOuEQuBQ44EnDx/IkGhU6EwiTf9zG3za4SKlmwKAImdDKdNnCsXg==}
@ -5575,8 +5588,8 @@ packages:
hachure-fill@0.5.2:
resolution: {integrity: sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==}
happy-dom@20.0.7:
resolution: {integrity: sha512-CywLfzmYxP5OYpuAG0usFY0CpxJtwYR+w8Mms5J8W29Y2Pzf6rbfQS2M523tRZTb0oLA+URopPtnAQX2fupHZQ==}
happy-dom@20.0.8:
resolution: {integrity: sha512-TlYaNQNtzsZ97rNMBAm8U+e2cUQXNithgfCizkDgc11lgmN4j9CKMhO3FPGKWQYPwwkFcPpoXYF/CqEPLgzfOg==}
engines: {node: '>=20.0.0'}
has-flag@4.0.0:
@ -5803,6 +5816,7 @@ packages:
intersection-observer@0.12.2:
resolution: {integrity: sha512-7m1vEcPCxXYI8HqnL8CKI6siDyD+eIWSwgB3DZA+ZTogxk9I4CDnj4wilt9x/+/QbHI4YG5YZNmC6458/e9Ktg==}
deprecated: The Intersection Observer polyfill is no longer needed and can safely be removed. Intersection Observer has been Baseline since 2019.
is-alphabetical@1.0.4:
resolution: {integrity: sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==}
@ -6354,6 +6368,9 @@ packages:
magic-string@0.30.19:
resolution: {integrity: sha512-2N21sPY9Ws53PZvsEpVtNuSW+ScYbQdp4b9qUaL+9QkHUrGFKo56Lg9Emg5s9V/qrtNBmiR01sYhUOwu3H+VOw==}
magic-string@0.30.21:
resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==}
magicast@0.3.5:
resolution: {integrity: sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==}
@ -8932,6 +8949,8 @@ snapshots:
'@babel/helper-validator-identifier@7.27.1': {}
'@babel/helper-validator-identifier@7.28.5': {}
'@babel/helper-validator-option@7.27.1': {}
'@babel/helper-wrap-function@7.28.3':
@ -8951,6 +8970,10 @@ snapshots:
dependencies:
'@babel/types': 7.28.4
'@babel/parser@7.28.5':
dependencies:
'@babel/types': 7.28.5
'@babel/plugin-bugfix-firefox-class-in-computed-class-key@7.27.1(@babel/core@7.28.4)':
dependencies:
'@babel/core': 7.28.4
@ -9607,6 +9630,11 @@ snapshots:
'@babel/helper-string-parser': 7.27.1
'@babel/helper-validator-identifier': 7.27.1
'@babel/types@7.28.5':
dependencies:
'@babel/helper-string-parser': 7.27.1
'@babel/helper-validator-identifier': 7.28.5
'@bcoe/v8-coverage@0.2.3': {}
'@braintree/sanitize-url@7.1.1': {}
@ -10099,12 +10127,12 @@ snapshots:
dependencies:
tslib: 2.8.1
'@happy-dom/jest-environment@20.0.7(@jest/environment@29.7.0)(@jest/fake-timers@29.7.0)(@jest/types@29.6.3)(jest-mock@29.7.0)(jest-util@29.7.0)':
'@happy-dom/jest-environment@20.0.8(@jest/environment@29.7.0)(@jest/fake-timers@29.7.0)(@jest/types@29.6.3)(jest-mock@29.7.0)(jest-util@29.7.0)':
dependencies:
'@jest/environment': 29.7.0
'@jest/fake-timers': 29.7.0
'@jest/types': 29.6.3
happy-dom: 20.0.7
happy-dom: 20.0.8
jest-mock: 29.7.0
jest-util: 29.7.0
@ -12091,7 +12119,7 @@ snapshots:
'@types/node@18.15.0': {}
'@types/node@20.19.22':
'@types/node@20.19.23':
dependencies:
undici-types: 6.21.0
@ -12292,7 +12320,7 @@ snapshots:
'@vue/compiler-core@3.5.17':
dependencies:
'@babel/parser': 7.28.4
'@babel/parser': 7.28.5
'@vue/shared': 3.5.17
entities: 4.5.0
estree-walker: 2.0.2
@ -12318,13 +12346,13 @@ snapshots:
'@vue/compiler-sfc@3.5.17':
dependencies:
'@babel/parser': 7.28.4
'@babel/parser': 7.28.5
'@vue/compiler-core': 3.5.17
'@vue/compiler-dom': 3.5.17
'@vue/compiler-ssr': 3.5.17
'@vue/shared': 3.5.17
estree-walker: 2.0.2
magic-string: 0.30.19
magic-string: 0.30.21
postcss: 8.5.6
source-map-js: 1.2.1
@ -14504,9 +14532,9 @@ snapshots:
hachure-fill@0.5.2: {}
happy-dom@20.0.7:
happy-dom@20.0.8:
dependencies:
'@types/node': 20.19.22
'@types/node': 20.19.23
'@types/whatwg-mimetype': 3.0.2
whatwg-mimetype: 3.0.0
@ -15518,6 +15546,10 @@ snapshots:
dependencies:
'@jridgewell/sourcemap-codec': 1.5.5
magic-string@0.30.21:
dependencies:
'@jridgewell/sourcemap-codec': 1.5.5
magicast@0.3.5:
dependencies:
'@babel/parser': 7.28.4

View File

@ -0,0 +1,115 @@
#!/usr/bin/env node
/**
* This script copies static files to the target directory and starts the server.
* It is intended to be used as a replacement for `next start`.
*/
import { cp, mkdir, stat } from 'node:fs/promises'
import { spawn } from 'node:child_process'
import path from 'node:path'
// Configuration for directories to copy
const DIRS_TO_COPY = [
{
src: path.join('.next', 'static'),
dest: path.join('.next', 'standalone', '.next', 'static'),
},
{
src: 'public',
dest: path.join('.next', 'standalone', 'public'),
},
]
// Path to the server script
const SERVER_SCRIPT_PATH = path.join('.next', 'standalone', 'server.js')
// Function to check if a path exists
const pathExists = async (path) => {
try {
console.debug(`Checking if path exists: ${path}`)
await stat(path)
console.debug(`Path exists: ${path}`)
return true
}
catch (err) {
if (err.code === 'ENOENT') {
console.warn(`Path does not exist: ${path}`)
return false
}
throw err
}
}
// Function to recursively copy directories
const copyDir = async (src, dest) => {
console.debug(`Copying directory from ${src} to ${dest}`)
await cp(src, dest, { recursive: true })
console.info(`Successfully copied ${src} to ${dest}`)
}
// Process each directory copy operation
const copyAllDirs = async () => {
console.debug('Starting directory copy operations')
for (const { src, dest } of DIRS_TO_COPY) {
try {
// Instead of pre-creating destination directory, we ensure parent directory exists
const destParent = path.dirname(dest)
console.debug(`Ensuring destination parent directory exists: ${destParent}`)
await mkdir(destParent, { recursive: true })
if (await pathExists(src)) {
await copyDir(src, dest)
}
else {
console.error(`Error: ${src} directory does not exist. This is a required build artifact.`)
process.exit(1)
}
}
catch (err) {
console.error(`Error processing ${src}:`, err.message)
process.exit(1)
}
}
console.debug('Finished directory copy operations')
}
// Run copy operations and start server
const main = async () => {
console.debug('Starting copy-and-start script')
await copyAllDirs()
// Start server
const port = process.env.npm_config_port || process.env.PORT || '3000'
const host = process.env.npm_config_host || process.env.HOSTNAME || '0.0.0.0'
console.info(`Starting server on ${host}:${port}`)
console.debug(`Server script path: ${SERVER_SCRIPT_PATH}`)
console.debug(`Environment variables - PORT: ${port}, HOSTNAME: ${host}`)
const server = spawn(
process.execPath,
[SERVER_SCRIPT_PATH],
{
env: {
...process.env,
PORT: port,
HOSTNAME: host,
},
stdio: 'inherit',
},
)
server.on('error', (err) => {
console.error('Failed to start server:', err)
process.exit(1)
})
server.on('exit', (code) => {
console.debug(`Server exited with code: ${code}`)
process.exit(code || 0)
})
}
main().catch((err) => {
console.error('Unexpected error:', err)
process.exit(1)
})

View File

@ -122,6 +122,7 @@ export enum DatasetAttr {
DATA_PUBLIC_ENABLE_WEBSITE_JINAREADER = 'data-public-enable-website-jinareader',
DATA_PUBLIC_ENABLE_WEBSITE_FIRECRAWL = 'data-public-enable-website-firecrawl',
DATA_PUBLIC_ENABLE_WEBSITE_WATERCRAWL = 'data-public-enable-website-watercrawl',
DATA_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX = 'data-public-enable-single-dollar-latex',
NEXT_PUBLIC_ZENDESK_WIDGET_KEY = 'next-public-zendesk-widget-key',
NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT = 'next-public-zendesk-field-id-environment',
NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION = 'next-public-zendesk-field-id-version',