Merge branch 'main' into fix/29745-persian-conversation-title

This commit is contained in:
Nour Zakhma 2025-12-23 16:41:21 +01:00 committed by GitHub
commit 97f750fa22
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3465 changed files with 94612 additions and 82764 deletions

View File

@ -68,25 +68,4 @@ jobs:
run: |
uvx --python 3.13 mdformat . --exclude ".claude/skills/**/SKILL.md"
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: web/package.json
run_install: false
- name: Setup NodeJS
uses: actions/setup-node@v4
with:
node-version: 22
cache: pnpm
cache-dependency-path: ./web/pnpm-lock.yaml
- name: Web dependencies
working-directory: ./web
run: pnpm install --frozen-lockfile
- name: oxlint
working-directory: ./web
run: pnpm exec oxlint --config .oxlintrc.json --fix .
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27

11
.gitignore vendored
View File

@ -139,7 +139,6 @@ pyrightconfig.json
.idea/'
.DS_Store
web/.vscode/settings.json
# Intellij IDEA Files
.idea/*
@ -205,7 +204,6 @@ sdks/python-client/dify_client.egg-info
!.vscode/launch.json.template
!.vscode/README.md
api/.vscode
web/.vscode
# vscode Code History Extension
.history
@ -220,15 +218,6 @@ plugins.jsonl
# mise
mise.toml
# Next.js build output
.next/
# PWA generated files
web/public/sw.js
web/public/sw.js.map
web/public/workbox-*.js
web/public/workbox-*.js.map
web/public/fallback-*.js
# AI Assistant
.roo/

View File

@ -7,9 +7,9 @@ from controllers.console import console_ns
from controllers.console.error import AlreadyActivateError
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from libs.helper import EmailStr, extract_remote_ip, timezone
from libs.helper import EmailStr, timezone
from models import AccountStatus
from services.account_service import AccountService, RegisterService
from services.account_service import RegisterService
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
@ -93,7 +93,6 @@ class ActivateApi(Resource):
"ActivationResponse",
{
"result": fields.String(description="Operation result"),
"data": fields.Raw(description="Login token data"),
},
),
)
@ -117,6 +116,4 @@ class ActivateApi(Resource):
account.initialized_at = naive_utc_now()
db.session.commit()
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))
return {"result": "success", "data": token_pair.model_dump()}
return {"result": "success"}

View File

@ -1,7 +1,8 @@
import urllib.parse
import httpx
from flask_restx import marshal_with, reqparse
from flask_restx import marshal_with
from pydantic import BaseModel, Field, HttpUrl
import services
from controllers.common import helpers
@ -10,14 +11,23 @@ from controllers.common.errors import (
RemoteFileUploadError,
UnsupportedFileTypeError,
)
from controllers.web import web_ns
from controllers.web.wraps import WebApiResource
from core.file import helpers as file_helpers
from core.helper import ssrf_proxy
from extensions.ext_database import db
from fields.file_fields import build_file_with_signed_url_model, build_remote_file_info_model
from services.file_service import FileService
from ..common.schema import register_schema_models
from . import web_ns
from .wraps import WebApiResource
class RemoteFileUploadPayload(BaseModel):
url: HttpUrl = Field(description="Remote file URL")
register_schema_models(web_ns, RemoteFileUploadPayload)
@web_ns.route("/remote-files/<path:url>")
class RemoteFileInfoApi(WebApiResource):
@ -97,10 +107,8 @@ class RemoteFileUploadApi(WebApiResource):
FileTooLargeError: File exceeds size limit
UnsupportedFileTypeError: File type not supported
"""
parser = reqparse.RequestParser().add_argument("url", type=str, required=True, help="URL is required")
args = parser.parse_args()
url = args["url"]
payload = RemoteFileUploadPayload.model_validate(web_ns.payload or {})
url = str(payload.url)
try:
resp = ssrf_proxy.head(url=url)

View File

@ -72,6 +72,22 @@ def _get_ssrf_client(ssl_verify_enabled: bool) -> httpx.Client:
)
def _get_user_provided_host_header(headers: dict | None) -> str | None:
"""
Extract the user-provided Host header from the headers dict.
This is needed because when using a forward proxy, httpx may override the Host header.
We preserve the user's explicit Host header to support virtual hosting and other use cases.
"""
if not headers:
return None
# Case-insensitive lookup for Host header
for key, value in headers.items():
if key.lower() == "host":
return value
return None
def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
if "allow_redirects" in kwargs:
allow_redirects = kwargs.pop("allow_redirects")
@ -90,10 +106,26 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
verify_option = kwargs.pop("ssl_verify", dify_config.HTTP_REQUEST_NODE_SSL_VERIFY)
client = _get_ssrf_client(verify_option)
# Preserve user-provided Host header
# When using a forward proxy, httpx may override the Host header based on the URL.
# We extract and preserve any explicitly set Host header to support virtual hosting.
headers = kwargs.get("headers", {})
user_provided_host = _get_user_provided_host_header(headers)
retries = 0
while retries <= max_retries:
try:
response = client.request(method=method, url=url, **kwargs)
# Build the request manually to preserve the Host header
# httpx may override the Host header when using a proxy, so we use
# the request API to explicitly set headers before sending
request = client.build_request(method=method, url=url, **kwargs)
# If user explicitly provided a Host header, ensure it's preserved
if user_provided_host is not None:
request.headers["Host"] = user_provided_host
response = client.send(request)
# Check for SSRF protection by Squid proxy
if response.status_code in (401, 403):
# Check if this is a Squid SSRF rejection

View File

@ -61,6 +61,7 @@ class SSETransport:
self.timeout = timeout
self.sse_read_timeout = sse_read_timeout
self.endpoint_url: str | None = None
self.event_source: EventSource | None = None
def _validate_endpoint_url(self, endpoint_url: str) -> bool:
"""Validate that the endpoint URL matches the connection origin.
@ -237,6 +238,9 @@ class SSETransport:
write_queue: WriteQueue = queue.Queue()
status_queue: StatusQueue = queue.Queue()
# Store event_source for graceful shutdown
self.event_source = event_source
# Start SSE reader thread
executor.submit(self.sse_reader, event_source, read_queue, status_queue)
@ -296,6 +300,13 @@ def sse_client(
logger.exception("Error connecting to SSE endpoint")
raise
finally:
# Close the SSE connection to unblock the reader thread
if transport.event_source is not None:
try:
transport.event_source.response.close()
except RuntimeError:
pass
# Clean up queues
if read_queue:
read_queue.put(None)

View File

@ -8,6 +8,7 @@ and session management.
import logging
import queue
import threading
from collections.abc import Callable, Generator
from concurrent.futures import ThreadPoolExecutor
from contextlib import contextmanager
@ -103,6 +104,9 @@ class StreamableHTTPTransport:
CONTENT_TYPE: JSON,
**self.headers,
}
self.stop_event = threading.Event()
self._active_responses: list[httpx.Response] = []
self._lock = threading.Lock()
def _update_headers_with_session(self, base_headers: dict[str, str]) -> dict[str, str]:
"""Update headers with session ID if available."""
@ -111,6 +115,30 @@ class StreamableHTTPTransport:
headers[MCP_SESSION_ID] = self.session_id
return headers
def _register_response(self, response: httpx.Response):
"""Register a response for cleanup on shutdown."""
with self._lock:
self._active_responses.append(response)
def _unregister_response(self, response: httpx.Response):
"""Unregister a response after it's closed."""
with self._lock:
try:
self._active_responses.remove(response)
except ValueError as e:
logger.debug("Ignoring error during response unregister: %s", e)
def close_active_responses(self):
"""Close all active SSE connections to unblock threads."""
with self._lock:
responses_to_close = list(self._active_responses)
self._active_responses.clear()
for response in responses_to_close:
try:
response.close()
except RuntimeError as e:
logger.debug("Ignoring error during active response close: %s", e)
def _is_initialization_request(self, message: JSONRPCMessage) -> bool:
"""Check if the message is an initialization request."""
return isinstance(message.root, JSONRPCRequest) and message.root.method == "initialize"
@ -195,11 +223,21 @@ class StreamableHTTPTransport:
event_source.response.raise_for_status()
logger.debug("GET SSE connection established")
for sse in event_source.iter_sse():
self._handle_sse_event(sse, server_to_client_queue)
# Register response for cleanup
self._register_response(event_source.response)
try:
for sse in event_source.iter_sse():
if self.stop_event.is_set():
logger.debug("GET stream received stop signal")
break
self._handle_sse_event(sse, server_to_client_queue)
finally:
self._unregister_response(event_source.response)
except Exception as exc:
logger.debug("GET stream error (non-fatal): %s", exc)
if not self.stop_event.is_set():
logger.debug("GET stream error (non-fatal): %s", exc)
def _handle_resumption_request(self, ctx: RequestContext):
"""Handle a resumption request using GET with SSE."""
@ -224,15 +262,24 @@ class StreamableHTTPTransport:
event_source.response.raise_for_status()
logger.debug("Resumption GET SSE connection established")
for sse in event_source.iter_sse():
is_complete = self._handle_sse_event(
sse,
ctx.server_to_client_queue,
original_request_id,
ctx.metadata.on_resumption_token_update if ctx.metadata else None,
)
if is_complete:
break
# Register response for cleanup
self._register_response(event_source.response)
try:
for sse in event_source.iter_sse():
if self.stop_event.is_set():
logger.debug("Resumption stream received stop signal")
break
is_complete = self._handle_sse_event(
sse,
ctx.server_to_client_queue,
original_request_id,
ctx.metadata.on_resumption_token_update if ctx.metadata else None,
)
if is_complete:
break
finally:
self._unregister_response(event_source.response)
def _handle_post_request(self, ctx: RequestContext):
"""Handle a POST request with response processing."""
@ -295,17 +342,27 @@ class StreamableHTTPTransport:
def _handle_sse_response(self, response: httpx.Response, ctx: RequestContext):
"""Handle SSE response from the server."""
try:
# Register response for cleanup
self._register_response(response)
event_source = EventSource(response)
for sse in event_source.iter_sse():
is_complete = self._handle_sse_event(
sse,
ctx.server_to_client_queue,
resumption_callback=(ctx.metadata.on_resumption_token_update if ctx.metadata else None),
)
if is_complete:
break
try:
for sse in event_source.iter_sse():
if self.stop_event.is_set():
logger.debug("SSE response stream received stop signal")
break
is_complete = self._handle_sse_event(
sse,
ctx.server_to_client_queue,
resumption_callback=(ctx.metadata.on_resumption_token_update if ctx.metadata else None),
)
if is_complete:
break
finally:
self._unregister_response(response)
except Exception as e:
ctx.server_to_client_queue.put(e)
if not self.stop_event.is_set():
ctx.server_to_client_queue.put(e)
def _handle_unexpected_content_type(
self,
@ -345,6 +402,11 @@ class StreamableHTTPTransport:
"""
while True:
try:
# Check if we should stop
if self.stop_event.is_set():
logger.debug("Post writer received stop signal")
break
# Read message from client queue with timeout to check stop_event periodically
session_message = client_to_server_queue.get(timeout=DEFAULT_QUEUE_READ_TIMEOUT)
if session_message is None:
@ -381,7 +443,8 @@ class StreamableHTTPTransport:
except queue.Empty:
continue
except Exception as exc:
server_to_client_queue.put(exc)
if not self.stop_event.is_set():
server_to_client_queue.put(exc)
def terminate_session(self, client: httpx.Client):
"""Terminate the session by sending a DELETE request."""
@ -465,6 +528,12 @@ def streamablehttp_client(
transport.get_session_id,
)
finally:
# Set stop event to signal all threads to stop
transport.stop_event.set()
# Close all active SSE connections to unblock threads
transport.close_active_responses()
if transport.session_id and terminate_on_close:
transport.terminate_session(client)

View File

@ -54,7 +54,7 @@ def generate_dotted_order(run_id: str, start_time: Union[str, datetime], parent_
generate dotted_order for langsmith
"""
start_time = datetime.fromisoformat(start_time) if isinstance(start_time, str) else start_time
timestamp = start_time.strftime("%Y%m%dT%H%M%S%f")[:-3] + "Z"
timestamp = start_time.strftime("%Y%m%dT%H%M%S%f") + "Z"
current_segment = f"{timestamp}{run_id}"
if parent_dotted_order is None:

View File

@ -289,7 +289,8 @@ class OracleVector(BaseVector):
words = pseg.cut(query)
current_entity = ""
for word, pos in words:
if pos in {"nr", "Ng", "eng", "nz", "n", "ORG", "v"}: # nr: 人名ns: 地名nt: 机构名
# `nr`: Person, `ns`: Location, `nt`: Organization
if pos in {"nr", "Ng", "eng", "nz", "n", "ORG", "v"}:
current_entity += word
else:
if current_entity:

View File

@ -213,7 +213,7 @@ class VastbaseVector(BaseVector):
with self._get_cursor() as cur:
cur.execute(SQL_CREATE_TABLE.format(table_name=self.table_name, dimension=dimension))
# Vastbase 支持的向量维度取值范围为 [1,16000]
# Vastbase supports vector dimensions in the range [1, 16,000]
if dimension <= 16000:
cur.execute(SQL_CREATE_INDEX.format(table_name=self.table_name))
redis_client.set(collection_exist_cache_key, 1, ex=3600)

View File

@ -231,7 +231,7 @@ class BaseIndexProcessor(ABC):
if not filename:
parsed_url = urlparse(image_url)
# unquote 处理 URL 中的中文
# Decode percent-encoded characters in the URL path.
path = unquote(parsed_url.path)
filename = os.path.basename(path)

View File

@ -155,6 +155,7 @@ class AppDslService:
parsed_url.scheme == "https"
and parsed_url.netloc == "github.com"
and parsed_url.path.endswith((".yml", ".yaml"))
and "/blob/" in parsed_url.path
):
yaml_url = yaml_url.replace("https://github.com", "https://raw.githubusercontent.com")
yaml_url = yaml_url.replace("/blob/", "/")

View File

@ -163,34 +163,17 @@ class TestActivateApi:
"account": mock_account,
}
@pytest.fixture
def mock_token_pair(self):
"""Create mock token pair object."""
token_pair = MagicMock()
token_pair.access_token = "access_token"
token_pair.refresh_token = "refresh_token"
token_pair.csrf_token = "csrf_token"
token_pair.model_dump.return_value = {
"access_token": "access_token",
"refresh_token": "refresh_token",
"csrf_token": "csrf_token",
}
return token_pair
@patch("controllers.console.auth.activate.RegisterService.get_invitation_if_token_valid")
@patch("controllers.console.auth.activate.RegisterService.revoke_token")
@patch("controllers.console.auth.activate.db")
@patch("controllers.console.auth.activate.AccountService.login")
def test_successful_account_activation(
self,
mock_login,
mock_db,
mock_revoke_token,
mock_get_invitation,
app,
mock_invitation,
mock_account,
mock_token_pair,
):
"""
Test successful account activation.
@ -198,12 +181,10 @@ class TestActivateApi:
Verifies that:
- Account is activated with user preferences
- Account status is set to ACTIVE
- User is logged in after activation
- Invitation token is revoked
"""
# Arrange
mock_get_invitation.return_value = mock_invitation
mock_login.return_value = mock_token_pair
# Act
with app.test_request_context(
@ -230,7 +211,6 @@ class TestActivateApi:
assert mock_account.initialized_at is not None
mock_revoke_token.assert_called_once_with("workspace-123", "invitee@example.com", "valid_token")
mock_db.session.commit.assert_called_once()
mock_login.assert_called_once()
@patch("controllers.console.auth.activate.RegisterService.get_invitation_if_token_valid")
def test_activation_with_invalid_token(self, mock_get_invitation, app):
@ -264,17 +244,14 @@ class TestActivateApi:
@patch("controllers.console.auth.activate.RegisterService.get_invitation_if_token_valid")
@patch("controllers.console.auth.activate.RegisterService.revoke_token")
@patch("controllers.console.auth.activate.db")
@patch("controllers.console.auth.activate.AccountService.login")
def test_activation_sets_interface_theme(
self,
mock_login,
mock_db,
mock_revoke_token,
mock_get_invitation,
app,
mock_invitation,
mock_account,
mock_token_pair,
):
"""
Test that activation sets default interface theme.
@ -284,7 +261,6 @@ class TestActivateApi:
"""
# Arrange
mock_get_invitation.return_value = mock_invitation
mock_login.return_value = mock_token_pair
# Act
with app.test_request_context(
@ -317,17 +293,14 @@ class TestActivateApi:
@patch("controllers.console.auth.activate.RegisterService.get_invitation_if_token_valid")
@patch("controllers.console.auth.activate.RegisterService.revoke_token")
@patch("controllers.console.auth.activate.db")
@patch("controllers.console.auth.activate.AccountService.login")
def test_activation_with_different_locales(
self,
mock_login,
mock_db,
mock_revoke_token,
mock_get_invitation,
app,
mock_invitation,
mock_account,
mock_token_pair,
language,
timezone,
):
@ -341,7 +314,6 @@ class TestActivateApi:
"""
# Arrange
mock_get_invitation.return_value = mock_invitation
mock_login.return_value = mock_token_pair
# Act
with app.test_request_context(
@ -367,27 +339,23 @@ class TestActivateApi:
@patch("controllers.console.auth.activate.RegisterService.get_invitation_if_token_valid")
@patch("controllers.console.auth.activate.RegisterService.revoke_token")
@patch("controllers.console.auth.activate.db")
@patch("controllers.console.auth.activate.AccountService.login")
def test_activation_returns_token_data(
def test_activation_returns_success_response(
self,
mock_login,
mock_db,
mock_revoke_token,
mock_get_invitation,
app,
mock_invitation,
mock_token_pair,
):
"""
Test that activation returns authentication tokens.
Test that activation returns a success response without authentication tokens.
Verifies that:
- Token pair is returned in response
- All token types are included (access, refresh, csrf)
- Response contains a success result
- No token data is returned
"""
# Arrange
mock_get_invitation.return_value = mock_invitation
mock_login.return_value = mock_token_pair
# Act
with app.test_request_context(
@ -406,24 +374,18 @@ class TestActivateApi:
response = api.post()
# Assert
assert "data" in response
assert response["data"]["access_token"] == "access_token"
assert response["data"]["refresh_token"] == "refresh_token"
assert response["data"]["csrf_token"] == "csrf_token"
assert response == {"result": "success"}
@patch("controllers.console.auth.activate.RegisterService.get_invitation_if_token_valid")
@patch("controllers.console.auth.activate.RegisterService.revoke_token")
@patch("controllers.console.auth.activate.db")
@patch("controllers.console.auth.activate.AccountService.login")
def test_activation_without_workspace_id(
self,
mock_login,
mock_db,
mock_revoke_token,
mock_get_invitation,
app,
mock_invitation,
mock_token_pair,
):
"""
Test account activation without workspace_id.
@ -434,7 +396,6 @@ class TestActivateApi:
"""
# Arrange
mock_get_invitation.return_value = mock_invitation
mock_login.return_value = mock_token_pair
# Act
with app.test_request_context(

View File

@ -3,50 +3,160 @@ from unittest.mock import MagicMock, patch
import pytest
from core.helper.ssrf_proxy import SSRF_DEFAULT_MAX_RETRIES, STATUS_FORCELIST, make_request
from core.helper.ssrf_proxy import (
SSRF_DEFAULT_MAX_RETRIES,
STATUS_FORCELIST,
_get_user_provided_host_header,
make_request,
)
@patch("httpx.Client.request")
def test_successful_request(mock_request):
@patch("core.helper.ssrf_proxy._get_ssrf_client")
def test_successful_request(mock_get_client):
mock_client = MagicMock()
mock_request = MagicMock()
mock_response = MagicMock()
mock_response.status_code = 200
mock_request.return_value = mock_response
mock_client.send.return_value = mock_response
mock_client.build_request.return_value = mock_request
mock_get_client.return_value = mock_client
response = make_request("GET", "http://example.com")
assert response.status_code == 200
@patch("httpx.Client.request")
def test_retry_exceed_max_retries(mock_request):
@patch("core.helper.ssrf_proxy._get_ssrf_client")
def test_retry_exceed_max_retries(mock_get_client):
mock_client = MagicMock()
mock_request = MagicMock()
mock_response = MagicMock()
mock_response.status_code = 500
side_effects = [mock_response] * SSRF_DEFAULT_MAX_RETRIES
mock_request.side_effect = side_effects
mock_client.send.return_value = mock_response
mock_client.build_request.return_value = mock_request
mock_get_client.return_value = mock_client
with pytest.raises(Exception) as e:
make_request("GET", "http://example.com", max_retries=SSRF_DEFAULT_MAX_RETRIES - 1)
assert str(e.value) == f"Reached maximum retries ({SSRF_DEFAULT_MAX_RETRIES - 1}) for URL http://example.com"
@patch("httpx.Client.request")
def test_retry_logic_success(mock_request):
side_effects = []
@patch("core.helper.ssrf_proxy._get_ssrf_client")
def test_retry_logic_success(mock_get_client):
mock_client = MagicMock()
mock_request = MagicMock()
mock_response = MagicMock()
mock_response.status_code = 200
side_effects = []
for _ in range(SSRF_DEFAULT_MAX_RETRIES):
status_code = secrets.choice(STATUS_FORCELIST)
mock_response = MagicMock()
mock_response.status_code = status_code
side_effects.append(mock_response)
retry_response = MagicMock()
retry_response.status_code = status_code
side_effects.append(retry_response)
mock_response_200 = MagicMock()
mock_response_200.status_code = 200
side_effects.append(mock_response_200)
mock_request.side_effect = side_effects
side_effects.append(mock_response)
mock_client.send.side_effect = side_effects
mock_client.build_request.return_value = mock_request
mock_get_client.return_value = mock_client
response = make_request("GET", "http://example.com", max_retries=SSRF_DEFAULT_MAX_RETRIES)
assert response.status_code == 200
assert mock_request.call_count == SSRF_DEFAULT_MAX_RETRIES + 1
assert mock_request.call_args_list[0][1].get("method") == "GET"
assert mock_client.send.call_count == SSRF_DEFAULT_MAX_RETRIES + 1
assert mock_client.build_request.call_count == SSRF_DEFAULT_MAX_RETRIES + 1
class TestGetUserProvidedHostHeader:
"""Tests for _get_user_provided_host_header function."""
def test_returns_none_when_headers_is_none(self):
assert _get_user_provided_host_header(None) is None
def test_returns_none_when_headers_is_empty(self):
assert _get_user_provided_host_header({}) is None
def test_returns_none_when_host_header_not_present(self):
headers = {"Content-Type": "application/json", "Authorization": "Bearer token"}
assert _get_user_provided_host_header(headers) is None
def test_returns_host_header_lowercase(self):
headers = {"host": "example.com"}
assert _get_user_provided_host_header(headers) == "example.com"
def test_returns_host_header_uppercase(self):
headers = {"HOST": "example.com"}
assert _get_user_provided_host_header(headers) == "example.com"
def test_returns_host_header_mixed_case(self):
headers = {"HoSt": "example.com"}
assert _get_user_provided_host_header(headers) == "example.com"
def test_returns_host_header_from_multiple_headers(self):
headers = {"Content-Type": "application/json", "Host": "api.example.com", "Authorization": "Bearer token"}
assert _get_user_provided_host_header(headers) == "api.example.com"
def test_returns_first_host_header_when_duplicates(self):
headers = {"host": "first.com", "Host": "second.com"}
# Should return the first one encountered (iteration order is preserved in dict)
result = _get_user_provided_host_header(headers)
assert result in ("first.com", "second.com")
@patch("core.helper.ssrf_proxy._get_ssrf_client")
def test_host_header_preservation_without_user_header(mock_get_client):
"""Test that when no Host header is provided, the default behavior is maintained."""
mock_client = MagicMock()
mock_request = MagicMock()
mock_request.headers = {}
mock_response = MagicMock()
mock_response.status_code = 200
mock_client.send.return_value = mock_response
mock_client.build_request.return_value = mock_request
mock_get_client.return_value = mock_client
response = make_request("GET", "http://example.com")
assert response.status_code == 200
# build_request should be called without headers dict containing Host
mock_client.build_request.assert_called_once()
# Host should not be set if not provided by user
assert "Host" not in mock_request.headers or mock_request.headers.get("Host") is None
@patch("core.helper.ssrf_proxy._get_ssrf_client")
def test_host_header_preservation_with_user_header(mock_get_client):
"""Test that user-provided Host header is preserved in the request."""
mock_client = MagicMock()
mock_request = MagicMock()
mock_request.headers = {}
mock_response = MagicMock()
mock_response.status_code = 200
mock_client.send.return_value = mock_response
mock_client.build_request.return_value = mock_request
mock_get_client.return_value = mock_client
custom_host = "custom.example.com:8080"
response = make_request("GET", "http://example.com", headers={"Host": custom_host})
assert response.status_code == 200
# Verify build_request was called
mock_client.build_request.assert_called_once()
# Verify the Host header was set on the request object
assert mock_request.headers.get("Host") == custom_host
mock_client.send.assert_called_once_with(mock_request)
@patch("core.helper.ssrf_proxy._get_ssrf_client")
@pytest.mark.parametrize("host_key", ["host", "HOST"])
def test_host_header_preservation_case_insensitive(mock_get_client, host_key):
"""Test that Host header is preserved regardless of case."""
mock_client = MagicMock()
mock_request = MagicMock()
mock_request.headers = {}
mock_response = MagicMock()
mock_response.status_code = 200
mock_client.send.return_value = mock_response
mock_client.build_request.return_value = mock_request
mock_get_client.return_value = mock_client
response = make_request("GET", "http://example.com", headers={host_key: "api.example.com"})
assert mock_request.headers.get("Host") == "api.example.com"

View File

@ -1,6 +1,9 @@
import re
from datetime import datetime
import pytest
from core.ops.utils import validate_project_name, validate_url, validate_url_with_path
from core.ops.utils import generate_dotted_order, validate_project_name, validate_url, validate_url_with_path
class TestValidateUrl:
@ -136,3 +139,51 @@ class TestValidateProjectName:
"""Test custom default name"""
result = validate_project_name("", "Custom Default")
assert result == "Custom Default"
class TestGenerateDottedOrder:
"""Test cases for generate_dotted_order function"""
def test_dotted_order_has_6_digit_microseconds(self):
"""Test that timestamp includes full 6-digit microseconds for LangSmith API compatibility.
LangSmith API expects timestamps in format: YYYYMMDDTHHMMSSffffffZ (6-digit microseconds).
Previously, the code truncated to 3 digits which caused API errors:
'cannot parse .111 as .000000'
"""
start_time = datetime(2025, 12, 23, 4, 19, 55, 111000)
run_id = "test-run-id"
result = generate_dotted_order(run_id, start_time)
# Extract timestamp portion (before the run_id)
timestamp_match = re.match(r"^(\d{8}T\d{6})(\d+)Z", result)
assert timestamp_match is not None, "Timestamp format should match YYYYMMDDTHHMMSSffffffZ"
microseconds = timestamp_match.group(2)
assert len(microseconds) == 6, f"Microseconds should be 6 digits, got {len(microseconds)}: {microseconds}"
def test_dotted_order_format_matches_langsmith_expected(self):
"""Test that dotted_order format matches LangSmith API expected format."""
start_time = datetime(2025, 1, 15, 10, 30, 45, 123456)
run_id = "abc123"
result = generate_dotted_order(run_id, start_time)
# LangSmith expects: YYYYMMDDTHHMMSSffffffZ followed by run_id
assert result == "20250115T103045123456Zabc123"
def test_dotted_order_with_parent(self):
"""Test dotted_order generation with parent order uses dot separator."""
start_time = datetime(2025, 12, 23, 4, 19, 55, 111000)
run_id = "child-run-id"
parent_order = "20251223T041955000000Zparent-run-id"
result = generate_dotted_order(run_id, start_time, parent_order)
assert result == "20251223T041955000000Zparent-run-id.20251223T041955111000Zchild-run-id"
def test_dotted_order_without_parent_has_no_dot(self):
"""Test dotted_order generation without parent has no dot separator."""
start_time = datetime(2025, 12, 23, 4, 19, 55, 111000)
run_id = "test-run-id"
result = generate_dotted_order(run_id, start_time, None)
assert "." not in result

View File

@ -0,0 +1,71 @@
from unittest.mock import MagicMock
import httpx
from models import Account
from services import app_dsl_service
from services.app_dsl_service import AppDslService, ImportMode, ImportStatus
def _build_response(url: str, status_code: int, content: bytes = b"") -> httpx.Response:
request = httpx.Request("GET", url)
return httpx.Response(status_code=status_code, request=request, content=content)
def _pending_yaml_content(version: str = "99.0.0") -> bytes:
return (f'version: "{version}"\nkind: app\napp:\n name: Loop Test\n mode: workflow\n').encode()
def _account_mock() -> MagicMock:
account = MagicMock(spec=Account)
account.current_tenant_id = "tenant-1"
return account
def test_import_app_yaml_url_user_attachments_keeps_original_url(monkeypatch):
yaml_url = "https://github.com/user-attachments/files/24290802/loop-test.yml"
raw_url = "https://raw.githubusercontent.com/user-attachments/files/24290802/loop-test.yml"
yaml_bytes = _pending_yaml_content()
def fake_get(url: str, **kwargs):
if url == raw_url:
return _build_response(url, status_code=404)
assert url == yaml_url
return _build_response(url, status_code=200, content=yaml_bytes)
monkeypatch.setattr(app_dsl_service.ssrf_proxy, "get", fake_get)
service = AppDslService(MagicMock())
result = service.import_app(
account=_account_mock(),
import_mode=ImportMode.YAML_URL,
yaml_url=yaml_url,
)
assert result.status == ImportStatus.PENDING
assert result.imported_dsl_version == "99.0.0"
def test_import_app_yaml_url_github_blob_rewrites_to_raw(monkeypatch):
yaml_url = "https://github.com/acme/repo/blob/main/app.yml"
raw_url = "https://raw.githubusercontent.com/acme/repo/main/app.yml"
yaml_bytes = _pending_yaml_content()
requested_urls: list[str] = []
def fake_get(url: str, **kwargs):
requested_urls.append(url)
assert url == raw_url
return _build_response(url, status_code=200, content=yaml_bytes)
monkeypatch.setattr(app_dsl_service.ssrf_proxy, "get", fake_get)
service = AppDslService(MagicMock())
result = service.import_app(
account=_account_mock(),
import_mode=ImportMode.YAML_URL,
yaml_url=yaml_url,
)
assert result.status == ImportStatus.PENDING
assert requested_urls == [raw_url]

View File

@ -1,48 +1,40 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# Dependencies
node_modules/
# dependencies
/node_modules
/.pnp
.pnp.js
# Build output
dist/
# testing
/coverage
# Testing
coverage/
# next.js
/.next/
/out/
# IDE
.idea/
.vscode/
*.swp
*.swo
# production
/build
# misc
# OS
.DS_Store
*.pem
Thumbs.db
# debug
# Debug logs
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
pnpm-debug.log*
# local env files
.env*.local
# Environment
.env
.env.local
.env.*.local
# vercel
.vercel
# typescript
# TypeScript
*.tsbuildinfo
next-env.d.ts
# npm
# Lock files (use pnpm-lock.yaml in CI if needed)
package-lock.json
yarn.lock
# yarn
.pnp.cjs
.pnp.loader.mjs
.yarn/
.yarnrc.yml
# pmpm
pnpm-lock.yaml
# Misc
*.pem
*.tgz

View File

@ -0,0 +1,22 @@
MIT License
Copyright (c) 2023 LangGenius
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -13,54 +13,92 @@ npm install dify-client
After installing the SDK, you can use it in your project like this:
```js
import { DifyClient, ChatClient, CompletionClient } from 'dify-client'
import {
DifyClient,
ChatClient,
CompletionClient,
WorkflowClient,
KnowledgeBaseClient,
WorkspaceClient
} from 'dify-client'
const API_KEY = 'your-api-key-here'
const user = `random-user-id`
const API_KEY = 'your-app-api-key'
const DATASET_API_KEY = 'your-dataset-api-key'
const user = 'random-user-id'
const query = 'Please tell me a short story in 10 words or less.'
const remote_url_files = [{
type: 'image',
transfer_method: 'remote_url',
url: 'your_url_address'
}]
// Create a completion client
const completionClient = new CompletionClient(API_KEY)
// Create a completion message
completionClient.createCompletionMessage({'query': query}, user)
// Create a completion message with vision model
completionClient.createCompletionMessage({'query': 'Describe the picture.'}, user, false, remote_url_files)
// Create a chat client
const chatClient = new ChatClient(API_KEY)
// Create a chat message in stream mode
const response = await chatClient.createChatMessage({}, query, user, true, null)
const stream = response.data;
stream.on('data', data => {
console.log(data);
});
stream.on('end', () => {
console.log('stream done');
});
// Create a chat message with vision model
chatClient.createChatMessage({}, 'Describe the picture.', user, false, null, remote_url_files)
// Fetch conversations
chatClient.getConversations(user)
// Fetch conversation messages
chatClient.getConversationMessages(conversationId, user)
// Rename conversation
chatClient.renameConversation(conversationId, name, user)
const completionClient = new CompletionClient(API_KEY)
const workflowClient = new WorkflowClient(API_KEY)
const kbClient = new KnowledgeBaseClient(DATASET_API_KEY)
const workspaceClient = new WorkspaceClient(DATASET_API_KEY)
const client = new DifyClient(API_KEY)
// Fetch application parameters
client.getApplicationParameters(user)
// Provide feedback for a message
client.messageFeedback(messageId, rating, user)
// App core
await client.getApplicationParameters(user)
await client.messageFeedback('message-id', 'like', user)
// Completion (blocking)
await completionClient.createCompletionMessage({
inputs: { query },
user,
response_mode: 'blocking'
})
// Chat (streaming)
const stream = await chatClient.createChatMessage({
inputs: {},
query,
user,
response_mode: 'streaming'
})
for await (const event of stream) {
console.log(event.event, event.data)
}
// Chatflow (advanced chat via workflow_id)
await chatClient.createChatMessage({
inputs: {},
query,
user,
workflow_id: 'workflow-id',
response_mode: 'blocking'
})
// Workflow run (blocking or streaming)
await workflowClient.run({
inputs: { query },
user,
response_mode: 'blocking'
})
// Knowledge base (dataset token required)
await kbClient.listDatasets({ page: 1, limit: 20 })
await kbClient.createDataset({ name: 'KB', indexing_technique: 'economy' })
// RAG pipeline (may require service API route registration)
const pipelineStream = await kbClient.runPipeline('dataset-id', {
inputs: {},
datasource_type: 'online_document',
datasource_info_list: [],
start_node_id: 'start-node-id',
is_published: true,
response_mode: 'streaming'
})
for await (const event of pipelineStream) {
console.log(event.data)
}
// Workspace models (dataset token required)
await workspaceClient.getModelsByType('text-embedding')
```
Replace 'your-api-key-here' with your actual Dify API key.Replace 'your-app-id-here' with your actual Dify APP ID.
Notes:
- App endpoints use an app API token; knowledge base and workspace endpoints use a dataset API token.
- Chat/completion require a stable `user` identifier in the request payload.
- For streaming responses, iterate the returned AsyncIterable. Use `stream.toText()` to collect text.
## License

View File

@ -1,12 +0,0 @@
module.exports = {
presets: [
[
"@babel/preset-env",
{
targets: {
node: "current",
},
},
],
],
};

View File

@ -0,0 +1,45 @@
import js from "@eslint/js";
import tsParser from "@typescript-eslint/parser";
import tsPlugin from "@typescript-eslint/eslint-plugin";
import { fileURLToPath } from "node:url";
import path from "node:path";
const tsconfigRootDir = path.dirname(fileURLToPath(import.meta.url));
const typeCheckedRules =
tsPlugin.configs["recommended-type-checked"]?.rules ??
tsPlugin.configs.recommendedTypeChecked?.rules ??
{};
export default [
{
ignores: ["dist", "node_modules", "scripts", "tests", "**/*.test.*", "**/*.spec.*"],
},
js.configs.recommended,
{
files: ["src/**/*.ts"],
languageOptions: {
parser: tsParser,
ecmaVersion: "latest",
parserOptions: {
project: "./tsconfig.json",
tsconfigRootDir,
sourceType: "module",
},
},
plugins: {
"@typescript-eslint": tsPlugin,
},
rules: {
...tsPlugin.configs.recommended.rules,
...typeCheckedRules,
"no-undef": "off",
"no-unused-vars": "off",
"@typescript-eslint/no-unsafe-call": "error",
"@typescript-eslint/no-unsafe-return": "error",
"@typescript-eslint/consistent-type-imports": [
"error",
{ prefer: "type-imports", fixStyle: "separate-type-imports" },
],
},
},
];

View File

@ -1,107 +0,0 @@
// Types.d.ts
export const BASE_URL: string;
export type RequestMethods = 'GET' | 'POST' | 'PATCH' | 'DELETE';
interface Params {
[key: string]: any;
}
interface HeaderParams {
[key: string]: string;
}
interface User {
}
interface DifyFileBase {
type: "image"
}
export interface DifyRemoteFile extends DifyFileBase {
transfer_method: "remote_url"
url: string
}
export interface DifyLocalFile extends DifyFileBase {
transfer_method: "local_file"
upload_file_id: string
}
export type DifyFile = DifyRemoteFile | DifyLocalFile;
export declare class DifyClient {
constructor(apiKey: string, baseUrl?: string);
updateApiKey(apiKey: string): void;
sendRequest(
method: RequestMethods,
endpoint: string,
data?: any,
params?: Params,
stream?: boolean,
headerParams?: HeaderParams
): Promise<any>;
messageFeedback(message_id: string, rating: number, user: User): Promise<any>;
getApplicationParameters(user: User): Promise<any>;
fileUpload(data: FormData): Promise<any>;
textToAudio(text: string ,user: string, streaming?: boolean): Promise<any>;
getMeta(user: User): Promise<any>;
}
export declare class CompletionClient extends DifyClient {
createCompletionMessage(
inputs: any,
user: User,
stream?: boolean,
files?: DifyFile[] | null
): Promise<any>;
}
export declare class ChatClient extends DifyClient {
createChatMessage(
inputs: any,
query: string,
user: User,
stream?: boolean,
conversation_id?: string | null,
files?: DifyFile[] | null
): Promise<any>;
getSuggested(message_id: string, user: User): Promise<any>;
stopMessage(task_id: string, user: User) : Promise<any>;
getConversations(
user: User,
first_id?: string | null,
limit?: number | null,
pinned?: boolean | null
): Promise<any>;
getConversationMessages(
user: User,
conversation_id?: string,
first_id?: string | null,
limit?: number | null
): Promise<any>;
renameConversation(conversation_id: string, name: string, user: User,auto_generate:boolean): Promise<any>;
deleteConversation(conversation_id: string, user: User): Promise<any>;
audioToText(data: FormData): Promise<any>;
}
export declare class WorkflowClient extends DifyClient {
run(inputs: any, user: User, stream?: boolean,): Promise<any>;
stop(task_id: string, user: User): Promise<any>;
}

View File

@ -1,351 +0,0 @@
import axios from "axios";
export const BASE_URL = "https://api.dify.ai/v1";
export const routes = {
// app's
feedback: {
method: "POST",
url: (message_id) => `/messages/${message_id}/feedbacks`,
},
application: {
method: "GET",
url: () => `/parameters`,
},
fileUpload: {
method: "POST",
url: () => `/files/upload`,
},
textToAudio: {
method: "POST",
url: () => `/text-to-audio`,
},
getMeta: {
method: "GET",
url: () => `/meta`,
},
// completion's
createCompletionMessage: {
method: "POST",
url: () => `/completion-messages`,
},
// chat's
createChatMessage: {
method: "POST",
url: () => `/chat-messages`,
},
getSuggested:{
method: "GET",
url: (message_id) => `/messages/${message_id}/suggested`,
},
stopChatMessage: {
method: "POST",
url: (task_id) => `/chat-messages/${task_id}/stop`,
},
getConversations: {
method: "GET",
url: () => `/conversations`,
},
getConversationMessages: {
method: "GET",
url: () => `/messages`,
},
renameConversation: {
method: "POST",
url: (conversation_id) => `/conversations/${conversation_id}/name`,
},
deleteConversation: {
method: "DELETE",
url: (conversation_id) => `/conversations/${conversation_id}`,
},
audioToText: {
method: "POST",
url: () => `/audio-to-text`,
},
// workflows
runWorkflow: {
method: "POST",
url: () => `/workflows/run`,
},
stopWorkflow: {
method: "POST",
url: (task_id) => `/workflows/tasks/${task_id}/stop`,
}
};
export class DifyClient {
constructor(apiKey, baseUrl = BASE_URL) {
this.apiKey = apiKey;
this.baseUrl = baseUrl;
}
updateApiKey(apiKey) {
this.apiKey = apiKey;
}
async sendRequest(
method,
endpoint,
data = null,
params = null,
stream = false,
headerParams = {}
) {
const isFormData =
(typeof FormData !== "undefined" && data instanceof FormData) ||
(data && data.constructor && data.constructor.name === "FormData");
const headers = {
Authorization: `Bearer ${this.apiKey}`,
...(isFormData ? {} : { "Content-Type": "application/json" }),
...headerParams,
};
const url = `${this.baseUrl}${endpoint}`;
let response;
if (stream) {
response = await axios({
method,
url,
data,
params,
headers,
responseType: "stream",
});
} else {
response = await axios({
method,
url,
...(method !== "GET" && { data }),
params,
headers,
responseType: "json",
});
}
return response;
}
messageFeedback(message_id, rating, user) {
const data = {
rating,
user,
};
return this.sendRequest(
routes.feedback.method,
routes.feedback.url(message_id),
data
);
}
getApplicationParameters(user) {
const params = { user };
return this.sendRequest(
routes.application.method,
routes.application.url(),
null,
params
);
}
fileUpload(data) {
return this.sendRequest(
routes.fileUpload.method,
routes.fileUpload.url(),
data
);
}
textToAudio(text, user, streaming = false) {
const data = {
text,
user,
streaming
};
return this.sendRequest(
routes.textToAudio.method,
routes.textToAudio.url(),
data,
null,
streaming
);
}
getMeta(user) {
const params = { user };
return this.sendRequest(
routes.getMeta.method,
routes.getMeta.url(),
null,
params
);
}
}
export class CompletionClient extends DifyClient {
createCompletionMessage(inputs, user, stream = false, files = null) {
const data = {
inputs,
user,
response_mode: stream ? "streaming" : "blocking",
files,
};
return this.sendRequest(
routes.createCompletionMessage.method,
routes.createCompletionMessage.url(),
data,
null,
stream
);
}
runWorkflow(inputs, user, stream = false, files = null) {
const data = {
inputs,
user,
response_mode: stream ? "streaming" : "blocking",
};
return this.sendRequest(
routes.runWorkflow.method,
routes.runWorkflow.url(),
data,
null,
stream
);
}
}
export class ChatClient extends DifyClient {
createChatMessage(
inputs,
query,
user,
stream = false,
conversation_id = null,
files = null
) {
const data = {
inputs,
query,
user,
response_mode: stream ? "streaming" : "blocking",
files,
};
if (conversation_id) data.conversation_id = conversation_id;
return this.sendRequest(
routes.createChatMessage.method,
routes.createChatMessage.url(),
data,
null,
stream
);
}
getSuggested(message_id, user) {
const data = { user };
return this.sendRequest(
routes.getSuggested.method,
routes.getSuggested.url(message_id),
data
);
}
stopMessage(task_id, user) {
const data = { user };
return this.sendRequest(
routes.stopChatMessage.method,
routes.stopChatMessage.url(task_id),
data
);
}
getConversations(user, first_id = null, limit = null, pinned = null) {
const params = { user, first_id: first_id, limit, pinned };
return this.sendRequest(
routes.getConversations.method,
routes.getConversations.url(),
null,
params
);
}
getConversationMessages(
user,
conversation_id = "",
first_id = null,
limit = null
) {
const params = { user };
if (conversation_id) params.conversation_id = conversation_id;
if (first_id) params.first_id = first_id;
if (limit) params.limit = limit;
return this.sendRequest(
routes.getConversationMessages.method,
routes.getConversationMessages.url(),
null,
params
);
}
renameConversation(conversation_id, name, user, auto_generate) {
const data = { name, user, auto_generate };
return this.sendRequest(
routes.renameConversation.method,
routes.renameConversation.url(conversation_id),
data
);
}
deleteConversation(conversation_id, user) {
const data = { user };
return this.sendRequest(
routes.deleteConversation.method,
routes.deleteConversation.url(conversation_id),
data
);
}
audioToText(data) {
return this.sendRequest(
routes.audioToText.method,
routes.audioToText.url(),
data
);
}
}
export class WorkflowClient extends DifyClient {
run(inputs,user,stream) {
const data = {
inputs,
response_mode: stream ? "streaming" : "blocking",
user
};
return this.sendRequest(
routes.runWorkflow.method,
routes.runWorkflow.url(),
data,
null,
stream
);
}
stop(task_id, user) {
const data = { user };
return this.sendRequest(
routes.stopWorkflow.method,
routes.stopWorkflow.url(task_id),
data
);
}
}

View File

@ -1,141 +0,0 @@
import { DifyClient, WorkflowClient, BASE_URL, routes } from ".";
import axios from 'axios'
jest.mock('axios')
afterEach(() => {
jest.resetAllMocks()
})
describe('Client', () => {
let difyClient
beforeEach(() => {
difyClient = new DifyClient('test')
})
test('should create a client', () => {
expect(difyClient).toBeDefined();
})
// test updateApiKey
test('should update the api key', () => {
difyClient.updateApiKey('test2');
expect(difyClient.apiKey).toBe('test2');
})
});
describe('Send Requests', () => {
let difyClient
beforeEach(() => {
difyClient = new DifyClient('test')
})
it('should make a successful request to the application parameter', async () => {
const method = 'GET'
const endpoint = routes.application.url()
const expectedResponse = { data: 'response' }
axios.mockResolvedValue(expectedResponse)
await difyClient.sendRequest(method, endpoint)
expect(axios).toHaveBeenCalledWith({
method,
url: `${BASE_URL}${endpoint}`,
params: null,
headers: {
Authorization: `Bearer ${difyClient.apiKey}`,
'Content-Type': 'application/json',
},
responseType: 'json',
})
})
it('should handle errors from the API', async () => {
const method = 'GET'
const endpoint = '/test-endpoint'
const errorMessage = 'Request failed with status code 404'
axios.mockRejectedValue(new Error(errorMessage))
await expect(difyClient.sendRequest(method, endpoint)).rejects.toThrow(
errorMessage
)
})
it('uses the getMeta route configuration', async () => {
axios.mockResolvedValue({ data: 'ok' })
await difyClient.getMeta('end-user')
expect(axios).toHaveBeenCalledWith({
method: routes.getMeta.method,
url: `${BASE_URL}${routes.getMeta.url()}`,
params: { user: 'end-user' },
headers: {
Authorization: `Bearer ${difyClient.apiKey}`,
'Content-Type': 'application/json',
},
responseType: 'json',
})
})
})
describe('File uploads', () => {
let difyClient
const OriginalFormData = global.FormData
beforeAll(() => {
global.FormData = class FormDataMock {}
})
afterAll(() => {
global.FormData = OriginalFormData
})
beforeEach(() => {
difyClient = new DifyClient('test')
})
it('does not override multipart boundary headers for FormData', async () => {
const form = new FormData()
axios.mockResolvedValue({ data: 'ok' })
await difyClient.fileUpload(form)
expect(axios).toHaveBeenCalledWith({
method: routes.fileUpload.method,
url: `${BASE_URL}${routes.fileUpload.url()}`,
data: form,
params: null,
headers: {
Authorization: `Bearer ${difyClient.apiKey}`,
},
responseType: 'json',
})
})
})
describe('Workflow client', () => {
let workflowClient
beforeEach(() => {
workflowClient = new WorkflowClient('test')
})
it('uses tasks stop path for workflow stop', async () => {
axios.mockResolvedValue({ data: 'stopped' })
await workflowClient.stop('task-1', 'end-user')
expect(axios).toHaveBeenCalledWith({
method: routes.stopWorkflow.method,
url: `${BASE_URL}${routes.stopWorkflow.url('task-1')}`,
data: { user: 'end-user' },
params: null,
headers: {
Authorization: `Bearer ${workflowClient.apiKey}`,
'Content-Type': 'application/json',
},
responseType: 'json',
})
})
})

View File

@ -1,6 +0,0 @@
module.exports = {
testEnvironment: "node",
transform: {
"^.+\\.[tj]sx?$": "babel-jest",
},
};

View File

@ -1,30 +1,70 @@
{
"name": "dify-client",
"version": "2.3.2",
"version": "3.0.0",
"description": "This is the Node.js SDK for the Dify.AI API, which allows you to easily integrate Dify.AI into your Node.js applications.",
"main": "index.js",
"type": "module",
"types":"index.d.ts",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.js"
}
},
"engines": {
"node": ">=18.0.0"
},
"files": [
"dist",
"README.md",
"LICENSE"
],
"keywords": [
"Dify",
"Dify.AI",
"LLM"
"LLM",
"AI",
"SDK",
"API"
],
"author": "Joel",
"author": "LangGenius",
"contributors": [
"<crazywoola> <<427733928@qq.com>> (https://github.com/crazywoola)"
"Joel <iamjoel007@gmail.com> (https://github.com/iamjoel)",
"lyzno1 <yuanyouhuilyz@gmail.com> (https://github.com/lyzno1)",
"crazywoola <427733928@qq.com> (https://github.com/crazywoola)"
],
"repository": {
"type": "git",
"url": "https://github.com/langgenius/dify.git",
"directory": "sdks/nodejs-client"
},
"bugs": {
"url": "https://github.com/langgenius/dify/issues"
},
"homepage": "https://dify.ai",
"license": "MIT",
"scripts": {
"test": "jest"
"build": "tsup",
"lint": "eslint",
"lint:fix": "eslint --fix",
"type-check": "tsc -p tsconfig.json --noEmit",
"test": "vitest run",
"test:coverage": "vitest run --coverage",
"publish:check": "./scripts/publish.sh --dry-run",
"publish:npm": "./scripts/publish.sh"
},
"dependencies": {
"axios": "^1.3.5"
},
"devDependencies": {
"@babel/core": "^7.21.8",
"@babel/preset-env": "^7.21.5",
"babel-jest": "^29.5.0",
"jest": "^29.5.0"
"@eslint/js": "^9.2.0",
"@types/node": "^20.11.30",
"@typescript-eslint/eslint-plugin": "^8.50.1",
"@typescript-eslint/parser": "^8.50.1",
"@vitest/coverage-v8": "1.6.1",
"eslint": "^9.2.0",
"tsup": "^8.5.1",
"typescript": "^5.4.5",
"vitest": "^1.5.0"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,261 @@
#!/usr/bin/env bash
#
# Dify Node.js SDK Publish Script
# ================================
# A beautiful and reliable script to publish the SDK to npm
#
# Usage:
# ./scripts/publish.sh # Normal publish
# ./scripts/publish.sh --dry-run # Test without publishing
# ./scripts/publish.sh --skip-tests # Skip tests (not recommended)
#
set -euo pipefail
# ============================================================================
# Colors and Formatting
# ============================================================================
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
MAGENTA='\033[0;35m'
CYAN='\033[0;36m'
BOLD='\033[1m'
DIM='\033[2m'
NC='\033[0m' # No Color
# ============================================================================
# Helper Functions
# ============================================================================
print_banner() {
echo -e "${CYAN}"
echo "╔═══════════════════════════════════════════════════════════════╗"
echo "║ ║"
echo "║ 🚀 Dify Node.js SDK Publish Script 🚀 ║"
echo "║ ║"
echo "╚═══════════════════════════════════════════════════════════════╝"
echo -e "${NC}"
}
info() {
echo -e "${BLUE} ${NC}$1"
}
success() {
echo -e "${GREEN}${NC}$1"
}
warning() {
echo -e "${YELLOW}${NC}$1"
}
error() {
echo -e "${RED}${NC}$1"
}
step() {
echo -e "\n${MAGENTA}${BOLD}$1${NC}"
}
divider() {
echo -e "${DIM}─────────────────────────────────────────────────────────────────${NC}"
}
# ============================================================================
# Configuration
# ============================================================================
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
DRY_RUN=false
SKIP_TESTS=false
# Parse arguments
for arg in "$@"; do
case $arg in
--dry-run)
DRY_RUN=true
;;
--skip-tests)
SKIP_TESTS=true
;;
--help|-h)
echo "Usage: $0 [options]"
echo ""
echo "Options:"
echo " --dry-run Run without actually publishing"
echo " --skip-tests Skip running tests (not recommended)"
echo " --help, -h Show this help message"
exit 0
;;
esac
done
# ============================================================================
# Main Script
# ============================================================================
main() {
print_banner
cd "$PROJECT_DIR"
# Show mode
if [[ "$DRY_RUN" == true ]]; then
warning "Running in DRY-RUN mode - no actual publish will occur"
divider
fi
# ========================================================================
# Step 1: Environment Check
# ========================================================================
step "Step 1/6: Checking environment..."
# Check Node.js
if ! command -v node &> /dev/null; then
error "Node.js is not installed"
exit 1
fi
NODE_VERSION=$(node -v)
success "Node.js: $NODE_VERSION"
# Check npm
if ! command -v npm &> /dev/null; then
error "npm is not installed"
exit 1
fi
NPM_VERSION=$(npm -v)
success "npm: v$NPM_VERSION"
# Check pnpm (optional, for local dev)
if command -v pnpm &> /dev/null; then
PNPM_VERSION=$(pnpm -v)
success "pnpm: v$PNPM_VERSION"
else
info "pnpm not found (optional)"
fi
# Check npm login status
if ! npm whoami &> /dev/null; then
error "Not logged in to npm. Run 'npm login' first."
exit 1
fi
NPM_USER=$(npm whoami)
success "Logged in as: ${BOLD}$NPM_USER${NC}"
# ========================================================================
# Step 2: Read Package Info
# ========================================================================
step "Step 2/6: Reading package info..."
PACKAGE_NAME=$(node -p "require('./package.json').name")
PACKAGE_VERSION=$(node -p "require('./package.json').version")
success "Package: ${BOLD}$PACKAGE_NAME${NC}"
success "Version: ${BOLD}$PACKAGE_VERSION${NC}"
# Check if version already exists on npm
if npm view "$PACKAGE_NAME@$PACKAGE_VERSION" version &> /dev/null; then
error "Version $PACKAGE_VERSION already exists on npm!"
echo ""
info "Current published versions:"
npm view "$PACKAGE_NAME" versions --json 2>/dev/null | tail -5
echo ""
warning "Please update the version in package.json before publishing."
exit 1
fi
success "Version $PACKAGE_VERSION is available"
# ========================================================================
# Step 3: Install Dependencies
# ========================================================================
step "Step 3/6: Installing dependencies..."
if command -v pnpm &> /dev/null; then
pnpm install --frozen-lockfile 2>/dev/null || pnpm install
else
npm ci 2>/dev/null || npm install
fi
success "Dependencies installed"
# ========================================================================
# Step 4: Run Tests
# ========================================================================
step "Step 4/6: Running tests..."
if [[ "$SKIP_TESTS" == true ]]; then
warning "Skipping tests (--skip-tests flag)"
else
if command -v pnpm &> /dev/null; then
pnpm test
else
npm test
fi
success "All tests passed"
fi
# ========================================================================
# Step 5: Build
# ========================================================================
step "Step 5/6: Building package..."
# Clean previous build
rm -rf dist
if command -v pnpm &> /dev/null; then
pnpm run build
else
npm run build
fi
success "Build completed"
# Verify build output
if [[ ! -f "dist/index.js" ]]; then
error "Build failed - dist/index.js not found"
exit 1
fi
if [[ ! -f "dist/index.d.ts" ]]; then
error "Build failed - dist/index.d.ts not found"
exit 1
fi
success "Build output verified"
# ========================================================================
# Step 6: Publish
# ========================================================================
step "Step 6/6: Publishing to npm..."
divider
echo -e "${CYAN}Package contents:${NC}"
npm pack --dry-run 2>&1 | head -30
divider
if [[ "$DRY_RUN" == true ]]; then
warning "DRY-RUN: Skipping actual publish"
echo ""
info "To publish for real, run without --dry-run flag"
else
echo ""
echo -e "${YELLOW}About to publish ${BOLD}$PACKAGE_NAME@$PACKAGE_VERSION${NC}${YELLOW} to npm${NC}"
echo -e "${DIM}Press Enter to continue, or Ctrl+C to cancel...${NC}"
read -r
npm publish --access public
echo ""
success "🎉 Successfully published ${BOLD}$PACKAGE_NAME@$PACKAGE_VERSION${NC} to npm!"
echo ""
echo -e "${GREEN}Install with:${NC}"
echo -e " ${CYAN}npm install $PACKAGE_NAME${NC}"
echo -e " ${CYAN}pnpm add $PACKAGE_NAME${NC}"
echo -e " ${CYAN}yarn add $PACKAGE_NAME${NC}"
echo ""
echo -e "${GREEN}View on npm:${NC}"
echo -e " ${CYAN}https://www.npmjs.com/package/$PACKAGE_NAME${NC}"
fi
divider
echo -e "${GREEN}${BOLD}✨ All done!${NC}"
}
# Run main function
main "$@"

View File

@ -0,0 +1,175 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { DifyClient } from "./base";
import { ValidationError } from "../errors/dify-error";
import { createHttpClientWithSpies } from "../../tests/test-utils";
describe("DifyClient base", () => {
beforeEach(() => {
vi.restoreAllMocks();
});
it("getRoot calls root endpoint", async () => {
const { client, request } = createHttpClientWithSpies();
const dify = new DifyClient(client);
await dify.getRoot();
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/",
});
});
it("getApplicationParameters includes optional user", async () => {
const { client, request } = createHttpClientWithSpies();
const dify = new DifyClient(client);
await dify.getApplicationParameters();
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/parameters",
query: undefined,
});
await dify.getApplicationParameters("user-1");
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/parameters",
query: { user: "user-1" },
});
});
it("getMeta includes optional user", async () => {
const { client, request } = createHttpClientWithSpies();
const dify = new DifyClient(client);
await dify.getMeta("user-1");
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/meta",
query: { user: "user-1" },
});
});
it("getInfo and getSite support optional user", async () => {
const { client, request } = createHttpClientWithSpies();
const dify = new DifyClient(client);
await dify.getInfo();
await dify.getSite("user");
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/info",
query: undefined,
});
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/site",
query: { user: "user" },
});
});
it("messageFeedback builds payload from request object", async () => {
const { client, request } = createHttpClientWithSpies();
const dify = new DifyClient(client);
await dify.messageFeedback({
messageId: "msg",
user: "user",
rating: "like",
content: "good",
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/messages/msg/feedbacks",
data: { user: "user", rating: "like", content: "good" },
});
});
it("fileUpload appends user to form data", async () => {
const { client, request } = createHttpClientWithSpies();
const dify = new DifyClient(client);
const form = { append: vi.fn(), getHeaders: () => ({}) };
await dify.fileUpload(form, "user");
expect(form.append).toHaveBeenCalledWith("user", "user");
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/files/upload",
data: form,
});
});
it("filePreview uses arraybuffer response", async () => {
const { client, request } = createHttpClientWithSpies();
const dify = new DifyClient(client);
await dify.filePreview("file", "user", true);
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/files/file/preview",
query: { user: "user", as_attachment: "true" },
responseType: "arraybuffer",
});
});
it("audioToText appends user and sends form", async () => {
const { client, request } = createHttpClientWithSpies();
const dify = new DifyClient(client);
const form = { append: vi.fn(), getHeaders: () => ({}) };
await dify.audioToText(form, "user");
expect(form.append).toHaveBeenCalledWith("user", "user");
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/audio-to-text",
data: form,
});
});
it("textToAudio supports streaming and message id", async () => {
const { client, request, requestBinaryStream } = createHttpClientWithSpies();
const dify = new DifyClient(client);
await dify.textToAudio({
user: "user",
message_id: "msg",
streaming: true,
});
expect(requestBinaryStream).toHaveBeenCalledWith({
method: "POST",
path: "/text-to-audio",
data: {
user: "user",
message_id: "msg",
streaming: true,
},
});
await dify.textToAudio("hello", "user", false, "voice");
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/text-to-audio",
data: {
text: "hello",
user: "user",
streaming: false,
voice: "voice",
},
responseType: "arraybuffer",
});
});
it("textToAudio requires text or message id", async () => {
const { client } = createHttpClientWithSpies();
const dify = new DifyClient(client);
expect(() => dify.textToAudio({ user: "user" })).toThrow(ValidationError);
});
});

View File

@ -0,0 +1,284 @@
import type {
BinaryStream,
DifyClientConfig,
DifyResponse,
MessageFeedbackRequest,
QueryParams,
RequestMethod,
TextToAudioRequest,
} from "../types/common";
import { HttpClient } from "../http/client";
import { ensureNonEmptyString, ensureRating } from "./validation";
import { FileUploadError, ValidationError } from "../errors/dify-error";
import { isFormData } from "../http/form-data";
const toConfig = (
init: string | DifyClientConfig,
baseUrl?: string
): DifyClientConfig => {
if (typeof init === "string") {
return {
apiKey: init,
baseUrl,
};
}
return init;
};
const appendUserToFormData = (form: unknown, user: string): void => {
if (!isFormData(form)) {
throw new FileUploadError("FormData is required for file uploads");
}
if (typeof form.append === "function") {
form.append("user", user);
}
};
export class DifyClient {
protected http: HttpClient;
constructor(config: string | DifyClientConfig | HttpClient, baseUrl?: string) {
if (config instanceof HttpClient) {
this.http = config;
} else {
this.http = new HttpClient(toConfig(config, baseUrl));
}
}
updateApiKey(apiKey: string): void {
ensureNonEmptyString(apiKey, "apiKey");
this.http.updateApiKey(apiKey);
}
getHttpClient(): HttpClient {
return this.http;
}
sendRequest(
method: RequestMethod,
endpoint: string,
data: unknown = null,
params: QueryParams | null = null,
stream = false,
headerParams: Record<string, string> = {}
): ReturnType<HttpClient["requestRaw"]> {
return this.http.requestRaw({
method,
path: endpoint,
data,
query: params ?? undefined,
headers: headerParams,
responseType: stream ? "stream" : "json",
});
}
getRoot(): Promise<DifyResponse<unknown>> {
return this.http.request({
method: "GET",
path: "/",
});
}
getApplicationParameters(user?: string): Promise<DifyResponse<unknown>> {
if (user) {
ensureNonEmptyString(user, "user");
}
return this.http.request({
method: "GET",
path: "/parameters",
query: user ? { user } : undefined,
});
}
async getParameters(user?: string): Promise<DifyResponse<unknown>> {
return this.getApplicationParameters(user);
}
getMeta(user?: string): Promise<DifyResponse<unknown>> {
if (user) {
ensureNonEmptyString(user, "user");
}
return this.http.request({
method: "GET",
path: "/meta",
query: user ? { user } : undefined,
});
}
messageFeedback(
request: MessageFeedbackRequest
): Promise<DifyResponse<Record<string, unknown>>>;
messageFeedback(
messageId: string,
rating: "like" | "dislike" | null,
user: string,
content?: string
): Promise<DifyResponse<Record<string, unknown>>>;
messageFeedback(
messageIdOrRequest: string | MessageFeedbackRequest,
rating?: "like" | "dislike" | null,
user?: string,
content?: string
): Promise<DifyResponse<Record<string, unknown>>> {
let messageId: string;
const payload: Record<string, unknown> = {};
if (typeof messageIdOrRequest === "string") {
messageId = messageIdOrRequest;
ensureNonEmptyString(messageId, "messageId");
ensureNonEmptyString(user, "user");
payload.user = user;
if (rating !== undefined && rating !== null) {
ensureRating(rating);
payload.rating = rating;
}
if (content !== undefined) {
payload.content = content;
}
} else {
const request = messageIdOrRequest;
messageId = request.messageId;
ensureNonEmptyString(messageId, "messageId");
ensureNonEmptyString(request.user, "user");
payload.user = request.user;
if (request.rating !== undefined && request.rating !== null) {
ensureRating(request.rating);
payload.rating = request.rating;
}
if (request.content !== undefined) {
payload.content = request.content;
}
}
return this.http.request({
method: "POST",
path: `/messages/${messageId}/feedbacks`,
data: payload,
});
}
getInfo(user?: string): Promise<DifyResponse<unknown>> {
if (user) {
ensureNonEmptyString(user, "user");
}
return this.http.request({
method: "GET",
path: "/info",
query: user ? { user } : undefined,
});
}
getSite(user?: string): Promise<DifyResponse<unknown>> {
if (user) {
ensureNonEmptyString(user, "user");
}
return this.http.request({
method: "GET",
path: "/site",
query: user ? { user } : undefined,
});
}
fileUpload(form: unknown, user: string): Promise<DifyResponse<unknown>> {
if (!isFormData(form)) {
throw new FileUploadError("FormData is required for file uploads");
}
ensureNonEmptyString(user, "user");
appendUserToFormData(form, user);
return this.http.request({
method: "POST",
path: "/files/upload",
data: form,
});
}
filePreview(
fileId: string,
user: string,
asAttachment?: boolean
): Promise<DifyResponse<Buffer>> {
ensureNonEmptyString(fileId, "fileId");
ensureNonEmptyString(user, "user");
return this.http.request<Buffer>({
method: "GET",
path: `/files/${fileId}/preview`,
query: {
user,
as_attachment: asAttachment ? "true" : undefined,
},
responseType: "arraybuffer",
});
}
audioToText(form: unknown, user: string): Promise<DifyResponse<unknown>> {
if (!isFormData(form)) {
throw new FileUploadError("FormData is required for audio uploads");
}
ensureNonEmptyString(user, "user");
appendUserToFormData(form, user);
return this.http.request({
method: "POST",
path: "/audio-to-text",
data: form,
});
}
textToAudio(
request: TextToAudioRequest
): Promise<DifyResponse<Buffer> | BinaryStream>;
textToAudio(
text: string,
user: string,
streaming?: boolean,
voice?: string
): Promise<DifyResponse<Buffer> | BinaryStream>;
textToAudio(
textOrRequest: string | TextToAudioRequest,
user?: string,
streaming = false,
voice?: string
): Promise<DifyResponse<Buffer> | BinaryStream> {
let payload: TextToAudioRequest;
if (typeof textOrRequest === "string") {
ensureNonEmptyString(textOrRequest, "text");
ensureNonEmptyString(user, "user");
payload = {
text: textOrRequest,
user,
streaming,
};
if (voice) {
payload.voice = voice;
}
} else {
payload = { ...textOrRequest };
ensureNonEmptyString(payload.user, "user");
if (payload.text !== undefined && payload.text !== null) {
ensureNonEmptyString(payload.text, "text");
}
if (payload.message_id !== undefined && payload.message_id !== null) {
ensureNonEmptyString(payload.message_id, "messageId");
}
if (!payload.text && !payload.message_id) {
throw new ValidationError("text or message_id is required");
}
payload.streaming = payload.streaming ?? false;
}
if (payload.streaming) {
return this.http.requestBinaryStream({
method: "POST",
path: "/text-to-audio",
data: payload,
});
}
return this.http.request<Buffer>({
method: "POST",
path: "/text-to-audio",
data: payload,
responseType: "arraybuffer",
});
}
}

View File

@ -0,0 +1,239 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { ChatClient } from "./chat";
import { ValidationError } from "../errors/dify-error";
import { createHttpClientWithSpies } from "../../tests/test-utils";
describe("ChatClient", () => {
beforeEach(() => {
vi.restoreAllMocks();
});
it("creates chat messages in blocking mode", async () => {
const { client, request } = createHttpClientWithSpies();
const chat = new ChatClient(client);
await chat.createChatMessage({ input: "x" }, "hello", "user", false, null);
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/chat-messages",
data: {
inputs: { input: "x" },
query: "hello",
user: "user",
response_mode: "blocking",
files: undefined,
},
});
});
it("creates chat messages in streaming mode", async () => {
const { client, requestStream } = createHttpClientWithSpies();
const chat = new ChatClient(client);
await chat.createChatMessage({
inputs: { input: "x" },
query: "hello",
user: "user",
response_mode: "streaming",
});
expect(requestStream).toHaveBeenCalledWith({
method: "POST",
path: "/chat-messages",
data: {
inputs: { input: "x" },
query: "hello",
user: "user",
response_mode: "streaming",
},
});
});
it("stops chat messages", async () => {
const { client, request } = createHttpClientWithSpies();
const chat = new ChatClient(client);
await chat.stopChatMessage("task", "user");
await chat.stopMessage("task", "user");
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/chat-messages/task/stop",
data: { user: "user" },
});
});
it("gets suggested questions", async () => {
const { client, request } = createHttpClientWithSpies();
const chat = new ChatClient(client);
await chat.getSuggested("msg", "user");
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/messages/msg/suggested",
query: { user: "user" },
});
});
it("submits message feedback", async () => {
const { client, request } = createHttpClientWithSpies();
const chat = new ChatClient(client);
await chat.messageFeedback("msg", "like", "user", "good");
await chat.messageFeedback({
messageId: "msg",
user: "user",
rating: "dislike",
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/messages/msg/feedbacks",
data: { user: "user", rating: "like", content: "good" },
});
});
it("lists app feedbacks", async () => {
const { client, request } = createHttpClientWithSpies();
const chat = new ChatClient(client);
await chat.getAppFeedbacks(2, 5);
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/app/feedbacks",
query: { page: 2, limit: 5 },
});
});
it("lists conversations and messages", async () => {
const { client, request } = createHttpClientWithSpies();
const chat = new ChatClient(client);
await chat.getConversations("user", "last", 10, "-updated_at");
await chat.getConversationMessages("user", "conv", "first", 5);
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/conversations",
query: {
user: "user",
last_id: "last",
limit: 10,
sort_by: "-updated_at",
},
});
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/messages",
query: {
user: "user",
conversation_id: "conv",
first_id: "first",
limit: 5,
},
});
});
it("renames conversations with optional auto-generate", async () => {
const { client, request } = createHttpClientWithSpies();
const chat = new ChatClient(client);
await chat.renameConversation("conv", "name", "user", false);
await chat.renameConversation("conv", "user", { autoGenerate: true });
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/conversations/conv/name",
data: { user: "user", auto_generate: false, name: "name" },
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/conversations/conv/name",
data: { user: "user", auto_generate: true },
});
});
it("requires name when autoGenerate is false", async () => {
const { client } = createHttpClientWithSpies();
const chat = new ChatClient(client);
expect(() =>
chat.renameConversation("conv", "", "user", false)
).toThrow(ValidationError);
});
it("deletes conversations", async () => {
const { client, request } = createHttpClientWithSpies();
const chat = new ChatClient(client);
await chat.deleteConversation("conv", "user");
expect(request).toHaveBeenCalledWith({
method: "DELETE",
path: "/conversations/conv",
data: { user: "user" },
});
});
it("manages conversation variables", async () => {
const { client, request } = createHttpClientWithSpies();
const chat = new ChatClient(client);
await chat.getConversationVariables("conv", "user", "last", 10, "name");
await chat.updateConversationVariable("conv", "var", "user", "value");
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/conversations/conv/variables",
query: {
user: "user",
last_id: "last",
limit: 10,
variable_name: "name",
},
});
expect(request).toHaveBeenCalledWith({
method: "PUT",
path: "/conversations/conv/variables/var",
data: { user: "user", value: "value" },
});
});
it("handles annotation APIs", async () => {
const { client, request } = createHttpClientWithSpies();
const chat = new ChatClient(client);
await chat.annotationReplyAction("enable", {
score_threshold: 0.5,
embedding_provider_name: "prov",
embedding_model_name: "model",
});
await chat.getAnnotationReplyStatus("enable", "job");
await chat.listAnnotations({ page: 1, limit: 10, keyword: "k" });
await chat.createAnnotation({ question: "q", answer: "a" });
await chat.updateAnnotation("id", { question: "q", answer: "a" });
await chat.deleteAnnotation("id");
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/apps/annotation-reply/enable",
data: {
score_threshold: 0.5,
embedding_provider_name: "prov",
embedding_model_name: "model",
},
});
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/apps/annotation-reply/enable/status/job",
});
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/apps/annotations",
query: { page: 1, limit: 10, keyword: "k" },
});
});
});

View File

@ -0,0 +1,377 @@
import { DifyClient } from "./base";
import type { ChatMessageRequest, ChatMessageResponse } from "../types/chat";
import type {
AnnotationCreateRequest,
AnnotationListOptions,
AnnotationReplyActionRequest,
AnnotationResponse,
} from "../types/annotation";
import type {
DifyResponse,
DifyStream,
QueryParams,
} from "../types/common";
import {
ensureNonEmptyString,
ensureOptionalInt,
ensureOptionalString,
} from "./validation";
export class ChatClient extends DifyClient {
createChatMessage(
request: ChatMessageRequest
): Promise<DifyResponse<ChatMessageResponse> | DifyStream<ChatMessageResponse>>;
createChatMessage(
inputs: Record<string, unknown>,
query: string,
user: string,
stream?: boolean,
conversationId?: string | null,
files?: Array<Record<string, unknown>> | null
): Promise<DifyResponse<ChatMessageResponse> | DifyStream<ChatMessageResponse>>;
createChatMessage(
inputOrRequest: ChatMessageRequest | Record<string, unknown>,
query?: string,
user?: string,
stream = false,
conversationId?: string | null,
files?: Array<Record<string, unknown>> | null
): Promise<DifyResponse<ChatMessageResponse> | DifyStream<ChatMessageResponse>> {
let payload: ChatMessageRequest;
let shouldStream = stream;
if (query === undefined && "user" in (inputOrRequest as ChatMessageRequest)) {
payload = inputOrRequest as ChatMessageRequest;
shouldStream = payload.response_mode === "streaming";
} else {
ensureNonEmptyString(query, "query");
ensureNonEmptyString(user, "user");
payload = {
inputs: inputOrRequest as Record<string, unknown>,
query,
user,
response_mode: stream ? "streaming" : "blocking",
files,
};
if (conversationId) {
payload.conversation_id = conversationId;
}
}
ensureNonEmptyString(payload.user, "user");
ensureNonEmptyString(payload.query, "query");
if (shouldStream) {
return this.http.requestStream<ChatMessageResponse>({
method: "POST",
path: "/chat-messages",
data: payload,
});
}
return this.http.request<ChatMessageResponse>({
method: "POST",
path: "/chat-messages",
data: payload,
});
}
stopChatMessage(
taskId: string,
user: string
): Promise<DifyResponse<ChatMessageResponse>> {
ensureNonEmptyString(taskId, "taskId");
ensureNonEmptyString(user, "user");
return this.http.request<ChatMessageResponse>({
method: "POST",
path: `/chat-messages/${taskId}/stop`,
data: { user },
});
}
stopMessage(
taskId: string,
user: string
): Promise<DifyResponse<ChatMessageResponse>> {
return this.stopChatMessage(taskId, user);
}
getSuggested(
messageId: string,
user: string
): Promise<DifyResponse<ChatMessageResponse>> {
ensureNonEmptyString(messageId, "messageId");
ensureNonEmptyString(user, "user");
return this.http.request<ChatMessageResponse>({
method: "GET",
path: `/messages/${messageId}/suggested`,
query: { user },
});
}
// Note: messageFeedback is inherited from DifyClient
getAppFeedbacks(
page?: number,
limit?: number
): Promise<DifyResponse<Record<string, unknown>>> {
ensureOptionalInt(page, "page");
ensureOptionalInt(limit, "limit");
return this.http.request({
method: "GET",
path: "/app/feedbacks",
query: {
page,
limit,
},
});
}
getConversations(
user: string,
lastId?: string | null,
limit?: number | null,
sortByOrPinned?: string | boolean | null
): Promise<DifyResponse<Record<string, unknown>>> {
ensureNonEmptyString(user, "user");
ensureOptionalString(lastId, "lastId");
ensureOptionalInt(limit, "limit");
const params: QueryParams = { user };
if (lastId) {
params.last_id = lastId;
}
if (limit) {
params.limit = limit;
}
if (typeof sortByOrPinned === "string") {
params.sort_by = sortByOrPinned;
} else if (typeof sortByOrPinned === "boolean") {
params.pinned = sortByOrPinned;
}
return this.http.request({
method: "GET",
path: "/conversations",
query: params,
});
}
getConversationMessages(
user: string,
conversationId: string,
firstId?: string | null,
limit?: number | null
): Promise<DifyResponse<Record<string, unknown>>> {
ensureNonEmptyString(user, "user");
ensureNonEmptyString(conversationId, "conversationId");
ensureOptionalString(firstId, "firstId");
ensureOptionalInt(limit, "limit");
const params: QueryParams = { user };
params.conversation_id = conversationId;
if (firstId) {
params.first_id = firstId;
}
if (limit) {
params.limit = limit;
}
return this.http.request({
method: "GET",
path: "/messages",
query: params,
});
}
renameConversation(
conversationId: string,
name: string,
user: string,
autoGenerate?: boolean
): Promise<DifyResponse<Record<string, unknown>>>;
renameConversation(
conversationId: string,
user: string,
options?: { name?: string | null; autoGenerate?: boolean }
): Promise<DifyResponse<Record<string, unknown>>>;
renameConversation(
conversationId: string,
nameOrUser: string,
userOrOptions?: string | { name?: string | null; autoGenerate?: boolean },
autoGenerate?: boolean
): Promise<DifyResponse<Record<string, unknown>>> {
ensureNonEmptyString(conversationId, "conversationId");
let name: string | null | undefined;
let user: string;
let resolvedAutoGenerate: boolean;
if (typeof userOrOptions === "string" || userOrOptions === undefined) {
name = nameOrUser;
user = userOrOptions ?? "";
resolvedAutoGenerate = autoGenerate ?? false;
} else {
user = nameOrUser;
name = userOrOptions.name;
resolvedAutoGenerate = userOrOptions.autoGenerate ?? false;
}
ensureNonEmptyString(user, "user");
if (!resolvedAutoGenerate) {
ensureNonEmptyString(name, "name");
}
const payload: Record<string, unknown> = {
user,
auto_generate: resolvedAutoGenerate,
};
if (typeof name === "string" && name.trim().length > 0) {
payload.name = name;
}
return this.http.request({
method: "POST",
path: `/conversations/${conversationId}/name`,
data: payload,
});
}
deleteConversation(
conversationId: string,
user: string
): Promise<DifyResponse<Record<string, unknown>>> {
ensureNonEmptyString(conversationId, "conversationId");
ensureNonEmptyString(user, "user");
return this.http.request({
method: "DELETE",
path: `/conversations/${conversationId}`,
data: { user },
});
}
getConversationVariables(
conversationId: string,
user: string,
lastId?: string | null,
limit?: number | null,
variableName?: string | null
): Promise<DifyResponse<Record<string, unknown>>> {
ensureNonEmptyString(conversationId, "conversationId");
ensureNonEmptyString(user, "user");
ensureOptionalString(lastId, "lastId");
ensureOptionalInt(limit, "limit");
ensureOptionalString(variableName, "variableName");
return this.http.request({
method: "GET",
path: `/conversations/${conversationId}/variables`,
query: {
user,
last_id: lastId ?? undefined,
limit: limit ?? undefined,
variable_name: variableName ?? undefined,
},
});
}
updateConversationVariable(
conversationId: string,
variableId: string,
user: string,
value: unknown
): Promise<DifyResponse<Record<string, unknown>>> {
ensureNonEmptyString(conversationId, "conversationId");
ensureNonEmptyString(variableId, "variableId");
ensureNonEmptyString(user, "user");
return this.http.request({
method: "PUT",
path: `/conversations/${conversationId}/variables/${variableId}`,
data: {
user,
value,
},
});
}
annotationReplyAction(
action: "enable" | "disable",
request: AnnotationReplyActionRequest
): Promise<DifyResponse<AnnotationResponse>> {
ensureNonEmptyString(action, "action");
ensureNonEmptyString(request.embedding_provider_name, "embedding_provider_name");
ensureNonEmptyString(request.embedding_model_name, "embedding_model_name");
return this.http.request({
method: "POST",
path: `/apps/annotation-reply/${action}`,
data: request,
});
}
getAnnotationReplyStatus(
action: "enable" | "disable",
jobId: string
): Promise<DifyResponse<AnnotationResponse>> {
ensureNonEmptyString(action, "action");
ensureNonEmptyString(jobId, "jobId");
return this.http.request({
method: "GET",
path: `/apps/annotation-reply/${action}/status/${jobId}`,
});
}
listAnnotations(
options?: AnnotationListOptions
): Promise<DifyResponse<AnnotationResponse>> {
ensureOptionalInt(options?.page, "page");
ensureOptionalInt(options?.limit, "limit");
ensureOptionalString(options?.keyword, "keyword");
return this.http.request({
method: "GET",
path: "/apps/annotations",
query: {
page: options?.page,
limit: options?.limit,
keyword: options?.keyword ?? undefined,
},
});
}
createAnnotation(
request: AnnotationCreateRequest
): Promise<DifyResponse<AnnotationResponse>> {
ensureNonEmptyString(request.question, "question");
ensureNonEmptyString(request.answer, "answer");
return this.http.request({
method: "POST",
path: "/apps/annotations",
data: request,
});
}
updateAnnotation(
annotationId: string,
request: AnnotationCreateRequest
): Promise<DifyResponse<AnnotationResponse>> {
ensureNonEmptyString(annotationId, "annotationId");
ensureNonEmptyString(request.question, "question");
ensureNonEmptyString(request.answer, "answer");
return this.http.request({
method: "PUT",
path: `/apps/annotations/${annotationId}`,
data: request,
});
}
deleteAnnotation(
annotationId: string
): Promise<DifyResponse<AnnotationResponse>> {
ensureNonEmptyString(annotationId, "annotationId");
return this.http.request({
method: "DELETE",
path: `/apps/annotations/${annotationId}`,
});
}
// Note: audioToText is inherited from DifyClient
}

View File

@ -0,0 +1,83 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { CompletionClient } from "./completion";
import { createHttpClientWithSpies } from "../../tests/test-utils";
describe("CompletionClient", () => {
beforeEach(() => {
vi.restoreAllMocks();
});
it("creates completion messages in blocking mode", async () => {
const { client, request } = createHttpClientWithSpies();
const completion = new CompletionClient(client);
await completion.createCompletionMessage({ input: "x" }, "user", false);
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/completion-messages",
data: {
inputs: { input: "x" },
user: "user",
files: undefined,
response_mode: "blocking",
},
});
});
it("creates completion messages in streaming mode", async () => {
const { client, requestStream } = createHttpClientWithSpies();
const completion = new CompletionClient(client);
await completion.createCompletionMessage({
inputs: { input: "x" },
user: "user",
response_mode: "streaming",
});
expect(requestStream).toHaveBeenCalledWith({
method: "POST",
path: "/completion-messages",
data: {
inputs: { input: "x" },
user: "user",
response_mode: "streaming",
},
});
});
it("stops completion messages", async () => {
const { client, request } = createHttpClientWithSpies();
const completion = new CompletionClient(client);
await completion.stopCompletionMessage("task", "user");
await completion.stop("task", "user");
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/completion-messages/task/stop",
data: { user: "user" },
});
});
it("supports deprecated runWorkflow", async () => {
const { client, request, requestStream } = createHttpClientWithSpies();
const completion = new CompletionClient(client);
const warn = vi.spyOn(console, "warn").mockImplementation(() => {});
await completion.runWorkflow({ input: "x" }, "user", false);
await completion.runWorkflow({ input: "x" }, "user", true);
expect(warn).toHaveBeenCalled();
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/workflows/run",
data: { inputs: { input: "x" }, user: "user", response_mode: "blocking" },
});
expect(requestStream).toHaveBeenCalledWith({
method: "POST",
path: "/workflows/run",
data: { inputs: { input: "x" }, user: "user", response_mode: "streaming" },
});
});
});

View File

@ -0,0 +1,111 @@
import { DifyClient } from "./base";
import type { CompletionRequest, CompletionResponse } from "../types/completion";
import type { DifyResponse, DifyStream } from "../types/common";
import { ensureNonEmptyString } from "./validation";
const warned = new Set<string>();
const warnOnce = (message: string): void => {
if (warned.has(message)) {
return;
}
warned.add(message);
console.warn(message);
};
export class CompletionClient extends DifyClient {
createCompletionMessage(
request: CompletionRequest
): Promise<DifyResponse<CompletionResponse> | DifyStream<CompletionResponse>>;
createCompletionMessage(
inputs: Record<string, unknown>,
user: string,
stream?: boolean,
files?: Array<Record<string, unknown>> | null
): Promise<DifyResponse<CompletionResponse> | DifyStream<CompletionResponse>>;
createCompletionMessage(
inputOrRequest: CompletionRequest | Record<string, unknown>,
user?: string,
stream = false,
files?: Array<Record<string, unknown>> | null
): Promise<DifyResponse<CompletionResponse> | DifyStream<CompletionResponse>> {
let payload: CompletionRequest;
let shouldStream = stream;
if (user === undefined && "user" in (inputOrRequest as CompletionRequest)) {
payload = inputOrRequest as CompletionRequest;
shouldStream = payload.response_mode === "streaming";
} else {
ensureNonEmptyString(user, "user");
payload = {
inputs: inputOrRequest as Record<string, unknown>,
user,
files,
response_mode: stream ? "streaming" : "blocking",
};
}
ensureNonEmptyString(payload.user, "user");
if (shouldStream) {
return this.http.requestStream<CompletionResponse>({
method: "POST",
path: "/completion-messages",
data: payload,
});
}
return this.http.request<CompletionResponse>({
method: "POST",
path: "/completion-messages",
data: payload,
});
}
stopCompletionMessage(
taskId: string,
user: string
): Promise<DifyResponse<CompletionResponse>> {
ensureNonEmptyString(taskId, "taskId");
ensureNonEmptyString(user, "user");
return this.http.request<CompletionResponse>({
method: "POST",
path: `/completion-messages/${taskId}/stop`,
data: { user },
});
}
stop(
taskId: string,
user: string
): Promise<DifyResponse<CompletionResponse>> {
return this.stopCompletionMessage(taskId, user);
}
runWorkflow(
inputs: Record<string, unknown>,
user: string,
stream = false
): Promise<DifyResponse<Record<string, unknown>> | DifyStream<Record<string, unknown>>> {
warnOnce(
"CompletionClient.runWorkflow is deprecated. Use WorkflowClient.run instead."
);
ensureNonEmptyString(user, "user");
const payload = {
inputs,
user,
response_mode: stream ? "streaming" : "blocking",
};
if (stream) {
return this.http.requestStream<Record<string, unknown>>({
method: "POST",
path: "/workflows/run",
data: payload,
});
}
return this.http.request<Record<string, unknown>>({
method: "POST",
path: "/workflows/run",
data: payload,
});
}
}

View File

@ -0,0 +1,249 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { KnowledgeBaseClient } from "./knowledge-base";
import { createHttpClientWithSpies } from "../../tests/test-utils";
describe("KnowledgeBaseClient", () => {
beforeEach(() => {
vi.restoreAllMocks();
});
it("handles dataset and tag operations", async () => {
const { client, request } = createHttpClientWithSpies();
const kb = new KnowledgeBaseClient(client);
await kb.listDatasets({
page: 1,
limit: 2,
keyword: "k",
includeAll: true,
tagIds: ["t1"],
});
await kb.createDataset({ name: "dataset" });
await kb.getDataset("ds");
await kb.updateDataset("ds", { name: "new" });
await kb.deleteDataset("ds");
await kb.updateDocumentStatus("ds", "enable", ["doc1"]);
await kb.listTags();
await kb.createTag({ name: "tag" });
await kb.updateTag({ tag_id: "tag", name: "name" });
await kb.deleteTag({ tag_id: "tag" });
await kb.bindTags({ tag_ids: ["tag"], target_id: "doc" });
await kb.unbindTags({ tag_id: "tag", target_id: "doc" });
await kb.getDatasetTags("ds");
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/datasets",
query: {
page: 1,
limit: 2,
keyword: "k",
include_all: true,
tag_ids: ["t1"],
},
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/datasets",
data: { name: "dataset" },
});
expect(request).toHaveBeenCalledWith({
method: "PATCH",
path: "/datasets/ds",
data: { name: "new" },
});
expect(request).toHaveBeenCalledWith({
method: "PATCH",
path: "/datasets/ds/documents/status/enable",
data: { document_ids: ["doc1"] },
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/datasets/tags/binding",
data: { tag_ids: ["tag"], target_id: "doc" },
});
});
it("handles document operations", async () => {
const { client, request } = createHttpClientWithSpies();
const kb = new KnowledgeBaseClient(client);
const form = { append: vi.fn(), getHeaders: () => ({}) };
await kb.createDocumentByText("ds", { name: "doc", text: "text" });
await kb.updateDocumentByText("ds", "doc", { name: "doc2" });
await kb.createDocumentByFile("ds", form);
await kb.updateDocumentByFile("ds", "doc", form);
await kb.listDocuments("ds", { page: 1, limit: 20, keyword: "k" });
await kb.getDocument("ds", "doc", { metadata: "all" });
await kb.deleteDocument("ds", "doc");
await kb.getDocumentIndexingStatus("ds", "batch");
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/datasets/ds/document/create_by_text",
data: { name: "doc", text: "text" },
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/datasets/ds/documents/doc/update_by_text",
data: { name: "doc2" },
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/datasets/ds/document/create_by_file",
data: form,
});
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/datasets/ds/documents",
query: { page: 1, limit: 20, keyword: "k", status: undefined },
});
});
it("handles segments and child chunks", async () => {
const { client, request } = createHttpClientWithSpies();
const kb = new KnowledgeBaseClient(client);
await kb.createSegments("ds", "doc", { segments: [{ content: "x" }] });
await kb.listSegments("ds", "doc", { page: 1, limit: 10, keyword: "k" });
await kb.getSegment("ds", "doc", "seg");
await kb.updateSegment("ds", "doc", "seg", {
segment: { content: "y" },
});
await kb.deleteSegment("ds", "doc", "seg");
await kb.createChildChunk("ds", "doc", "seg", { content: "c" });
await kb.listChildChunks("ds", "doc", "seg", { page: 1, limit: 10 });
await kb.updateChildChunk("ds", "doc", "seg", "child", {
content: "c2",
});
await kb.deleteChildChunk("ds", "doc", "seg", "child");
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/datasets/ds/documents/doc/segments",
data: { segments: [{ content: "x" }] },
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/datasets/ds/documents/doc/segments/seg",
data: { segment: { content: "y" } },
});
expect(request).toHaveBeenCalledWith({
method: "PATCH",
path: "/datasets/ds/documents/doc/segments/seg/child_chunks/child",
data: { content: "c2" },
});
});
it("handles metadata and retrieval", async () => {
const { client, request } = createHttpClientWithSpies();
const kb = new KnowledgeBaseClient(client);
await kb.listMetadata("ds");
await kb.createMetadata("ds", { name: "m", type: "string" });
await kb.updateMetadata("ds", "mid", { name: "m2" });
await kb.deleteMetadata("ds", "mid");
await kb.listBuiltInMetadata("ds");
await kb.updateBuiltInMetadata("ds", "enable");
await kb.updateDocumentsMetadata("ds", {
operation_data: [
{ document_id: "doc", metadata_list: [{ id: "m", name: "n" }] },
],
});
await kb.hitTesting("ds", { query: "q" });
await kb.retrieve("ds", { query: "q" });
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/datasets/ds/metadata",
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/datasets/ds/metadata",
data: { name: "m", type: "string" },
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/datasets/ds/hit-testing",
data: { query: "q" },
});
});
it("handles pipeline operations", async () => {
const { client, request, requestStream } = createHttpClientWithSpies();
const kb = new KnowledgeBaseClient(client);
const warn = vi.spyOn(console, "warn").mockImplementation(() => {});
const form = { append: vi.fn(), getHeaders: () => ({}) };
await kb.listDatasourcePlugins("ds", { isPublished: true });
await kb.runDatasourceNode("ds", "node", {
inputs: { input: "x" },
datasource_type: "custom",
is_published: true,
});
await kb.runPipeline("ds", {
inputs: { input: "x" },
datasource_type: "custom",
datasource_info_list: [],
start_node_id: "start",
is_published: true,
response_mode: "streaming",
});
await kb.runPipeline("ds", {
inputs: { input: "x" },
datasource_type: "custom",
datasource_info_list: [],
start_node_id: "start",
is_published: true,
response_mode: "blocking",
});
await kb.uploadPipelineFile(form);
expect(warn).toHaveBeenCalled();
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/datasets/ds/pipeline/datasource-plugins",
query: { is_published: true },
});
expect(requestStream).toHaveBeenCalledWith({
method: "POST",
path: "/datasets/ds/pipeline/datasource/nodes/node/run",
data: {
inputs: { input: "x" },
datasource_type: "custom",
is_published: true,
},
});
expect(requestStream).toHaveBeenCalledWith({
method: "POST",
path: "/datasets/ds/pipeline/run",
data: {
inputs: { input: "x" },
datasource_type: "custom",
datasource_info_list: [],
start_node_id: "start",
is_published: true,
response_mode: "streaming",
},
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/datasets/ds/pipeline/run",
data: {
inputs: { input: "x" },
datasource_type: "custom",
datasource_info_list: [],
start_node_id: "start",
is_published: true,
response_mode: "blocking",
},
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/datasets/pipeline/file-upload",
data: form,
});
});
});

View File

@ -0,0 +1,706 @@
import { DifyClient } from "./base";
import type {
DatasetCreateRequest,
DatasetListOptions,
DatasetTagBindingRequest,
DatasetTagCreateRequest,
DatasetTagDeleteRequest,
DatasetTagUnbindingRequest,
DatasetTagUpdateRequest,
DatasetUpdateRequest,
DocumentGetOptions,
DocumentListOptions,
DocumentStatusAction,
DocumentTextCreateRequest,
DocumentTextUpdateRequest,
SegmentCreateRequest,
SegmentListOptions,
SegmentUpdateRequest,
ChildChunkCreateRequest,
ChildChunkListOptions,
ChildChunkUpdateRequest,
MetadataCreateRequest,
MetadataOperationRequest,
MetadataUpdateRequest,
HitTestingRequest,
DatasourcePluginListOptions,
DatasourceNodeRunRequest,
PipelineRunRequest,
KnowledgeBaseResponse,
PipelineStreamEvent,
} from "../types/knowledge-base";
import type { DifyResponse, DifyStream, QueryParams } from "../types/common";
import {
ensureNonEmptyString,
ensureOptionalBoolean,
ensureOptionalInt,
ensureOptionalString,
ensureStringArray,
} from "./validation";
import { FileUploadError, ValidationError } from "../errors/dify-error";
import { isFormData } from "../http/form-data";
const warned = new Set<string>();
const warnOnce = (message: string): void => {
if (warned.has(message)) {
return;
}
warned.add(message);
console.warn(message);
};
const ensureFormData = (form: unknown, context: string): void => {
if (!isFormData(form)) {
throw new FileUploadError(`${context} requires FormData`);
}
};
const ensureNonEmptyArray = (value: unknown, name: string): void => {
if (!Array.isArray(value) || value.length === 0) {
throw new ValidationError(`${name} must be a non-empty array`);
}
};
const warnPipelineRoutes = (): void => {
warnOnce(
"RAG pipeline endpoints may be unavailable unless the service API registers dataset/rag_pipeline routes."
);
};
export class KnowledgeBaseClient extends DifyClient {
async listDatasets(
options?: DatasetListOptions
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureOptionalInt(options?.page, "page");
ensureOptionalInt(options?.limit, "limit");
ensureOptionalString(options?.keyword, "keyword");
ensureOptionalBoolean(options?.includeAll, "includeAll");
const query: QueryParams = {
page: options?.page,
limit: options?.limit,
keyword: options?.keyword ?? undefined,
include_all: options?.includeAll ?? undefined,
};
if (options?.tagIds && options.tagIds.length > 0) {
ensureStringArray(options.tagIds, "tagIds");
query.tag_ids = options.tagIds;
}
return this.http.request({
method: "GET",
path: "/datasets",
query,
});
}
async createDataset(
request: DatasetCreateRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(request.name, "name");
return this.http.request({
method: "POST",
path: "/datasets",
data: request,
});
}
async getDataset(datasetId: string): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
return this.http.request({
method: "GET",
path: `/datasets/${datasetId}`,
});
}
async updateDataset(
datasetId: string,
request: DatasetUpdateRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
if (request.name !== undefined && request.name !== null) {
ensureNonEmptyString(request.name, "name");
}
return this.http.request({
method: "PATCH",
path: `/datasets/${datasetId}`,
data: request,
});
}
async deleteDataset(datasetId: string): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
return this.http.request({
method: "DELETE",
path: `/datasets/${datasetId}`,
});
}
async updateDocumentStatus(
datasetId: string,
action: DocumentStatusAction,
documentIds: string[]
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(action, "action");
ensureStringArray(documentIds, "documentIds");
return this.http.request({
method: "PATCH",
path: `/datasets/${datasetId}/documents/status/${action}`,
data: {
document_ids: documentIds,
},
});
}
async listTags(): Promise<DifyResponse<KnowledgeBaseResponse>> {
return this.http.request({
method: "GET",
path: "/datasets/tags",
});
}
async createTag(
request: DatasetTagCreateRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(request.name, "name");
return this.http.request({
method: "POST",
path: "/datasets/tags",
data: request,
});
}
async updateTag(
request: DatasetTagUpdateRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(request.tag_id, "tag_id");
ensureNonEmptyString(request.name, "name");
return this.http.request({
method: "PATCH",
path: "/datasets/tags",
data: request,
});
}
async deleteTag(
request: DatasetTagDeleteRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(request.tag_id, "tag_id");
return this.http.request({
method: "DELETE",
path: "/datasets/tags",
data: request,
});
}
async bindTags(
request: DatasetTagBindingRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureStringArray(request.tag_ids, "tag_ids");
ensureNonEmptyString(request.target_id, "target_id");
return this.http.request({
method: "POST",
path: "/datasets/tags/binding",
data: request,
});
}
async unbindTags(
request: DatasetTagUnbindingRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(request.tag_id, "tag_id");
ensureNonEmptyString(request.target_id, "target_id");
return this.http.request({
method: "POST",
path: "/datasets/tags/unbinding",
data: request,
});
}
async getDatasetTags(
datasetId: string
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
return this.http.request({
method: "GET",
path: `/datasets/${datasetId}/tags`,
});
}
async createDocumentByText(
datasetId: string,
request: DocumentTextCreateRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(request.name, "name");
ensureNonEmptyString(request.text, "text");
return this.http.request({
method: "POST",
path: `/datasets/${datasetId}/document/create_by_text`,
data: request,
});
}
async updateDocumentByText(
datasetId: string,
documentId: string,
request: DocumentTextUpdateRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(documentId, "documentId");
if (request.name !== undefined && request.name !== null) {
ensureNonEmptyString(request.name, "name");
}
return this.http.request({
method: "POST",
path: `/datasets/${datasetId}/documents/${documentId}/update_by_text`,
data: request,
});
}
async createDocumentByFile(
datasetId: string,
form: unknown
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureFormData(form, "createDocumentByFile");
return this.http.request({
method: "POST",
path: `/datasets/${datasetId}/document/create_by_file`,
data: form,
});
}
async updateDocumentByFile(
datasetId: string,
documentId: string,
form: unknown
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(documentId, "documentId");
ensureFormData(form, "updateDocumentByFile");
return this.http.request({
method: "POST",
path: `/datasets/${datasetId}/documents/${documentId}/update_by_file`,
data: form,
});
}
async listDocuments(
datasetId: string,
options?: DocumentListOptions
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureOptionalInt(options?.page, "page");
ensureOptionalInt(options?.limit, "limit");
ensureOptionalString(options?.keyword, "keyword");
ensureOptionalString(options?.status, "status");
return this.http.request({
method: "GET",
path: `/datasets/${datasetId}/documents`,
query: {
page: options?.page,
limit: options?.limit,
keyword: options?.keyword ?? undefined,
status: options?.status ?? undefined,
},
});
}
async getDocument(
datasetId: string,
documentId: string,
options?: DocumentGetOptions
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(documentId, "documentId");
if (options?.metadata) {
const allowed = new Set(["all", "only", "without"]);
if (!allowed.has(options.metadata)) {
throw new ValidationError("metadata must be one of all, only, without");
}
}
return this.http.request({
method: "GET",
path: `/datasets/${datasetId}/documents/${documentId}`,
query: {
metadata: options?.metadata ?? undefined,
},
});
}
async deleteDocument(
datasetId: string,
documentId: string
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(documentId, "documentId");
return this.http.request({
method: "DELETE",
path: `/datasets/${datasetId}/documents/${documentId}`,
});
}
async getDocumentIndexingStatus(
datasetId: string,
batch: string
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(batch, "batch");
return this.http.request({
method: "GET",
path: `/datasets/${datasetId}/documents/${batch}/indexing-status`,
});
}
async createSegments(
datasetId: string,
documentId: string,
request: SegmentCreateRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(documentId, "documentId");
ensureNonEmptyArray(request.segments, "segments");
return this.http.request({
method: "POST",
path: `/datasets/${datasetId}/documents/${documentId}/segments`,
data: request,
});
}
async listSegments(
datasetId: string,
documentId: string,
options?: SegmentListOptions
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(documentId, "documentId");
ensureOptionalInt(options?.page, "page");
ensureOptionalInt(options?.limit, "limit");
ensureOptionalString(options?.keyword, "keyword");
if (options?.status && options.status.length > 0) {
ensureStringArray(options.status, "status");
}
const query: QueryParams = {
page: options?.page,
limit: options?.limit,
keyword: options?.keyword ?? undefined,
};
if (options?.status && options.status.length > 0) {
query.status = options.status;
}
return this.http.request({
method: "GET",
path: `/datasets/${datasetId}/documents/${documentId}/segments`,
query,
});
}
async getSegment(
datasetId: string,
documentId: string,
segmentId: string
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(documentId, "documentId");
ensureNonEmptyString(segmentId, "segmentId");
return this.http.request({
method: "GET",
path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}`,
});
}
async updateSegment(
datasetId: string,
documentId: string,
segmentId: string,
request: SegmentUpdateRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(documentId, "documentId");
ensureNonEmptyString(segmentId, "segmentId");
return this.http.request({
method: "POST",
path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}`,
data: request,
});
}
async deleteSegment(
datasetId: string,
documentId: string,
segmentId: string
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(documentId, "documentId");
ensureNonEmptyString(segmentId, "segmentId");
return this.http.request({
method: "DELETE",
path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}`,
});
}
async createChildChunk(
datasetId: string,
documentId: string,
segmentId: string,
request: ChildChunkCreateRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(documentId, "documentId");
ensureNonEmptyString(segmentId, "segmentId");
ensureNonEmptyString(request.content, "content");
return this.http.request({
method: "POST",
path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks`,
data: request,
});
}
async listChildChunks(
datasetId: string,
documentId: string,
segmentId: string,
options?: ChildChunkListOptions
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(documentId, "documentId");
ensureNonEmptyString(segmentId, "segmentId");
ensureOptionalInt(options?.page, "page");
ensureOptionalInt(options?.limit, "limit");
ensureOptionalString(options?.keyword, "keyword");
return this.http.request({
method: "GET",
path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks`,
query: {
page: options?.page,
limit: options?.limit,
keyword: options?.keyword ?? undefined,
},
});
}
async updateChildChunk(
datasetId: string,
documentId: string,
segmentId: string,
childChunkId: string,
request: ChildChunkUpdateRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(documentId, "documentId");
ensureNonEmptyString(segmentId, "segmentId");
ensureNonEmptyString(childChunkId, "childChunkId");
ensureNonEmptyString(request.content, "content");
return this.http.request({
method: "PATCH",
path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks/${childChunkId}`,
data: request,
});
}
async deleteChildChunk(
datasetId: string,
documentId: string,
segmentId: string,
childChunkId: string
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(documentId, "documentId");
ensureNonEmptyString(segmentId, "segmentId");
ensureNonEmptyString(childChunkId, "childChunkId");
return this.http.request({
method: "DELETE",
path: `/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks/${childChunkId}`,
});
}
async listMetadata(
datasetId: string
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
return this.http.request({
method: "GET",
path: `/datasets/${datasetId}/metadata`,
});
}
async createMetadata(
datasetId: string,
request: MetadataCreateRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(request.name, "name");
ensureNonEmptyString(request.type, "type");
return this.http.request({
method: "POST",
path: `/datasets/${datasetId}/metadata`,
data: request,
});
}
async updateMetadata(
datasetId: string,
metadataId: string,
request: MetadataUpdateRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(metadataId, "metadataId");
ensureNonEmptyString(request.name, "name");
return this.http.request({
method: "PATCH",
path: `/datasets/${datasetId}/metadata/${metadataId}`,
data: request,
});
}
async deleteMetadata(
datasetId: string,
metadataId: string
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(metadataId, "metadataId");
return this.http.request({
method: "DELETE",
path: `/datasets/${datasetId}/metadata/${metadataId}`,
});
}
async listBuiltInMetadata(
datasetId: string
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
return this.http.request({
method: "GET",
path: `/datasets/${datasetId}/metadata/built-in`,
});
}
async updateBuiltInMetadata(
datasetId: string,
action: "enable" | "disable"
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(action, "action");
return this.http.request({
method: "POST",
path: `/datasets/${datasetId}/metadata/built-in/${action}`,
});
}
async updateDocumentsMetadata(
datasetId: string,
request: MetadataOperationRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyArray(request.operation_data, "operation_data");
return this.http.request({
method: "POST",
path: `/datasets/${datasetId}/documents/metadata`,
data: request,
});
}
async hitTesting(
datasetId: string,
request: HitTestingRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
if (request.query !== undefined && request.query !== null) {
ensureOptionalString(request.query, "query");
}
if (request.attachment_ids && request.attachment_ids.length > 0) {
ensureStringArray(request.attachment_ids, "attachment_ids");
}
return this.http.request({
method: "POST",
path: `/datasets/${datasetId}/hit-testing`,
data: request,
});
}
async retrieve(
datasetId: string,
request: HitTestingRequest
): Promise<DifyResponse<KnowledgeBaseResponse>> {
ensureNonEmptyString(datasetId, "datasetId");
return this.http.request({
method: "POST",
path: `/datasets/${datasetId}/retrieve`,
data: request,
});
}
async listDatasourcePlugins(
datasetId: string,
options?: DatasourcePluginListOptions
): Promise<DifyResponse<KnowledgeBaseResponse>> {
warnPipelineRoutes();
ensureNonEmptyString(datasetId, "datasetId");
ensureOptionalBoolean(options?.isPublished, "isPublished");
return this.http.request({
method: "GET",
path: `/datasets/${datasetId}/pipeline/datasource-plugins`,
query: {
is_published: options?.isPublished ?? undefined,
},
});
}
async runDatasourceNode(
datasetId: string,
nodeId: string,
request: DatasourceNodeRunRequest
): Promise<DifyStream<PipelineStreamEvent>> {
warnPipelineRoutes();
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(nodeId, "nodeId");
ensureNonEmptyString(request.datasource_type, "datasource_type");
return this.http.requestStream<PipelineStreamEvent>({
method: "POST",
path: `/datasets/${datasetId}/pipeline/datasource/nodes/${nodeId}/run`,
data: request,
});
}
async runPipeline(
datasetId: string,
request: PipelineRunRequest
): Promise<DifyResponse<KnowledgeBaseResponse> | DifyStream<PipelineStreamEvent>> {
warnPipelineRoutes();
ensureNonEmptyString(datasetId, "datasetId");
ensureNonEmptyString(request.datasource_type, "datasource_type");
ensureNonEmptyString(request.start_node_id, "start_node_id");
const shouldStream = request.response_mode === "streaming";
if (shouldStream) {
return this.http.requestStream<PipelineStreamEvent>({
method: "POST",
path: `/datasets/${datasetId}/pipeline/run`,
data: request,
});
}
return this.http.request<KnowledgeBaseResponse>({
method: "POST",
path: `/datasets/${datasetId}/pipeline/run`,
data: request,
});
}
async uploadPipelineFile(
form: unknown
): Promise<DifyResponse<KnowledgeBaseResponse>> {
warnPipelineRoutes();
ensureFormData(form, "uploadPipelineFile");
return this.http.request({
method: "POST",
path: "/datasets/pipeline/file-upload",
data: form,
});
}
}

View File

@ -0,0 +1,91 @@
import { describe, expect, it } from "vitest";
import {
ensureNonEmptyString,
ensureOptionalBoolean,
ensureOptionalInt,
ensureOptionalString,
ensureOptionalStringArray,
ensureRating,
ensureStringArray,
validateParams,
} from "./validation";
const makeLongString = (length) => "a".repeat(length);
describe("validation utilities", () => {
it("ensureNonEmptyString throws on empty or whitespace", () => {
expect(() => ensureNonEmptyString("", "name")).toThrow();
expect(() => ensureNonEmptyString(" ", "name")).toThrow();
});
it("ensureNonEmptyString throws on overly long strings", () => {
expect(() =>
ensureNonEmptyString(makeLongString(10001), "name")
).toThrow();
});
it("ensureOptionalString ignores undefined and validates when set", () => {
expect(() => ensureOptionalString(undefined, "opt")).not.toThrow();
expect(() => ensureOptionalString("", "opt")).toThrow();
});
it("ensureOptionalString throws on overly long strings", () => {
expect(() => ensureOptionalString(makeLongString(10001), "opt")).toThrow();
});
it("ensureOptionalInt validates integer", () => {
expect(() => ensureOptionalInt(undefined, "limit")).not.toThrow();
expect(() => ensureOptionalInt(1.2, "limit")).toThrow();
});
it("ensureOptionalBoolean validates boolean", () => {
expect(() => ensureOptionalBoolean(undefined, "flag")).not.toThrow();
expect(() => ensureOptionalBoolean("yes", "flag")).toThrow();
});
it("ensureStringArray enforces size and content", () => {
expect(() => ensureStringArray([], "items")).toThrow();
expect(() => ensureStringArray([""], "items")).toThrow();
expect(() =>
ensureStringArray(Array.from({ length: 1001 }, () => "a"), "items")
).toThrow();
expect(() => ensureStringArray(["ok"], "items")).not.toThrow();
});
it("ensureOptionalStringArray ignores undefined", () => {
expect(() => ensureOptionalStringArray(undefined, "tags")).not.toThrow();
});
it("ensureOptionalStringArray validates when set", () => {
expect(() => ensureOptionalStringArray(["valid"], "tags")).not.toThrow();
expect(() => ensureOptionalStringArray([], "tags")).toThrow();
expect(() => ensureOptionalStringArray([""], "tags")).toThrow();
});
it("ensureRating validates allowed values", () => {
expect(() => ensureRating(undefined)).not.toThrow();
expect(() => ensureRating("like")).not.toThrow();
expect(() => ensureRating("bad")).toThrow();
});
it("validateParams enforces generic rules", () => {
expect(() => validateParams({ user: 123 })).toThrow();
expect(() => validateParams({ rating: "bad" })).toThrow();
expect(() => validateParams({ page: 1.1 })).toThrow();
expect(() => validateParams({ files: "bad" })).toThrow();
// Empty strings are allowed for optional params (e.g., keyword: "" means no filter)
expect(() => validateParams({ keyword: "" })).not.toThrow();
expect(() => validateParams({ name: makeLongString(10001) })).toThrow();
expect(() =>
validateParams({ items: Array.from({ length: 1001 }, () => "a") })
).toThrow();
expect(() =>
validateParams({
data: Object.fromEntries(
Array.from({ length: 101 }, (_, i) => [String(i), i])
),
})
).toThrow();
expect(() => validateParams({ user: "u", page: 1 })).not.toThrow();
});
});

View File

@ -0,0 +1,136 @@
import { ValidationError } from "../errors/dify-error";
const MAX_STRING_LENGTH = 10000;
const MAX_LIST_LENGTH = 1000;
const MAX_DICT_LENGTH = 100;
export function ensureNonEmptyString(
value: unknown,
name: string
): asserts value is string {
if (typeof value !== "string" || value.trim().length === 0) {
throw new ValidationError(`${name} must be a non-empty string`);
}
if (value.length > MAX_STRING_LENGTH) {
throw new ValidationError(
`${name} exceeds maximum length of ${MAX_STRING_LENGTH} characters`
);
}
}
/**
* Validates optional string fields that must be non-empty when provided.
* Use this for fields like `name` that are optional but should not be empty strings.
*
* For filter parameters that accept empty strings (e.g., `keyword: ""`),
* use `validateParams` which allows empty strings for optional params.
*/
export function ensureOptionalString(value: unknown, name: string): void {
if (value === undefined || value === null) {
return;
}
if (typeof value !== "string" || value.trim().length === 0) {
throw new ValidationError(`${name} must be a non-empty string when set`);
}
if (value.length > MAX_STRING_LENGTH) {
throw new ValidationError(
`${name} exceeds maximum length of ${MAX_STRING_LENGTH} characters`
);
}
}
export function ensureOptionalInt(value: unknown, name: string): void {
if (value === undefined || value === null) {
return;
}
if (!Number.isInteger(value)) {
throw new ValidationError(`${name} must be an integer when set`);
}
}
export function ensureOptionalBoolean(value: unknown, name: string): void {
if (value === undefined || value === null) {
return;
}
if (typeof value !== "boolean") {
throw new ValidationError(`${name} must be a boolean when set`);
}
}
export function ensureStringArray(value: unknown, name: string): void {
if (!Array.isArray(value) || value.length === 0) {
throw new ValidationError(`${name} must be a non-empty string array`);
}
if (value.length > MAX_LIST_LENGTH) {
throw new ValidationError(
`${name} exceeds maximum size of ${MAX_LIST_LENGTH} items`
);
}
value.forEach((item) => {
if (typeof item !== "string" || item.trim().length === 0) {
throw new ValidationError(`${name} must contain non-empty strings`);
}
});
}
export function ensureOptionalStringArray(value: unknown, name: string): void {
if (value === undefined || value === null) {
return;
}
ensureStringArray(value, name);
}
export function ensureRating(value: unknown): void {
if (value === undefined || value === null) {
return;
}
if (value !== "like" && value !== "dislike") {
throw new ValidationError("rating must be either 'like' or 'dislike'");
}
}
export function validateParams(params: Record<string, unknown>): void {
Object.entries(params).forEach(([key, value]) => {
if (value === undefined || value === null) {
return;
}
// Only check max length for strings; empty strings are allowed for optional params
// Required fields are validated at method level via ensureNonEmptyString
if (typeof value === "string") {
if (value.length > MAX_STRING_LENGTH) {
throw new ValidationError(
`Parameter '${key}' exceeds maximum length of ${MAX_STRING_LENGTH} characters`
);
}
} else if (Array.isArray(value)) {
if (value.length > MAX_LIST_LENGTH) {
throw new ValidationError(
`Parameter '${key}' exceeds maximum size of ${MAX_LIST_LENGTH} items`
);
}
} else if (typeof value === "object") {
if (Object.keys(value as Record<string, unknown>).length > MAX_DICT_LENGTH) {
throw new ValidationError(
`Parameter '${key}' exceeds maximum size of ${MAX_DICT_LENGTH} items`
);
}
}
if (key === "user" && typeof value !== "string") {
throw new ValidationError(`Parameter '${key}' must be a string`);
}
if (
(key === "page" || key === "limit" || key === "page_size") &&
!Number.isInteger(value)
) {
throw new ValidationError(`Parameter '${key}' must be an integer`);
}
if (key === "files" && !Array.isArray(value) && typeof value !== "object") {
throw new ValidationError(`Parameter '${key}' must be a list or dict`);
}
if (key === "rating" && value !== "like" && value !== "dislike") {
throw new ValidationError(`Parameter '${key}' must be 'like' or 'dislike'`);
}
});
}

View File

@ -0,0 +1,119 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { WorkflowClient } from "./workflow";
import { createHttpClientWithSpies } from "../../tests/test-utils";
describe("WorkflowClient", () => {
beforeEach(() => {
vi.restoreAllMocks();
});
it("runs workflows with blocking and streaming modes", async () => {
const { client, request, requestStream } = createHttpClientWithSpies();
const workflow = new WorkflowClient(client);
await workflow.run({ inputs: { input: "x" }, user: "user" });
await workflow.run({ input: "x" }, "user", true);
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/workflows/run",
data: {
inputs: { input: "x" },
user: "user",
},
});
expect(requestStream).toHaveBeenCalledWith({
method: "POST",
path: "/workflows/run",
data: {
inputs: { input: "x" },
user: "user",
response_mode: "streaming",
},
});
});
it("runs workflow by id", async () => {
const { client, request, requestStream } = createHttpClientWithSpies();
const workflow = new WorkflowClient(client);
await workflow.runById("wf", {
inputs: { input: "x" },
user: "user",
response_mode: "blocking",
});
await workflow.runById("wf", {
inputs: { input: "x" },
user: "user",
response_mode: "streaming",
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/workflows/wf/run",
data: {
inputs: { input: "x" },
user: "user",
response_mode: "blocking",
},
});
expect(requestStream).toHaveBeenCalledWith({
method: "POST",
path: "/workflows/wf/run",
data: {
inputs: { input: "x" },
user: "user",
response_mode: "streaming",
},
});
});
it("gets run details and stops workflow", async () => {
const { client, request } = createHttpClientWithSpies();
const workflow = new WorkflowClient(client);
await workflow.getRun("run");
await workflow.stop("task", "user");
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/workflows/run/run",
});
expect(request).toHaveBeenCalledWith({
method: "POST",
path: "/workflows/tasks/task/stop",
data: { user: "user" },
});
});
it("fetches workflow logs", async () => {
const { client, request } = createHttpClientWithSpies();
const workflow = new WorkflowClient(client);
// Use createdByEndUserSessionId to filter by user session (backend API parameter)
await workflow.getLogs({
keyword: "k",
status: "succeeded",
startTime: "2024-01-01",
endTime: "2024-01-02",
createdByEndUserSessionId: "session-123",
page: 1,
limit: 20,
});
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/workflows/logs",
query: {
keyword: "k",
status: "succeeded",
created_at__before: "2024-01-02",
created_at__after: "2024-01-01",
created_by_end_user_session_id: "session-123",
created_by_account: undefined,
page: 1,
limit: 20,
},
});
});
});

View File

@ -0,0 +1,165 @@
import { DifyClient } from "./base";
import type { WorkflowRunRequest, WorkflowRunResponse } from "../types/workflow";
import type { DifyResponse, DifyStream, QueryParams } from "../types/common";
import {
ensureNonEmptyString,
ensureOptionalInt,
ensureOptionalString,
} from "./validation";
export class WorkflowClient extends DifyClient {
run(
request: WorkflowRunRequest
): Promise<DifyResponse<WorkflowRunResponse> | DifyStream<WorkflowRunResponse>>;
run(
inputs: Record<string, unknown>,
user: string,
stream?: boolean
): Promise<DifyResponse<WorkflowRunResponse> | DifyStream<WorkflowRunResponse>>;
run(
inputOrRequest: WorkflowRunRequest | Record<string, unknown>,
user?: string,
stream = false
): Promise<DifyResponse<WorkflowRunResponse> | DifyStream<WorkflowRunResponse>> {
let payload: WorkflowRunRequest;
let shouldStream = stream;
if (user === undefined && "user" in (inputOrRequest as WorkflowRunRequest)) {
payload = inputOrRequest as WorkflowRunRequest;
shouldStream = payload.response_mode === "streaming";
} else {
ensureNonEmptyString(user, "user");
payload = {
inputs: inputOrRequest as Record<string, unknown>,
user,
response_mode: stream ? "streaming" : "blocking",
};
}
ensureNonEmptyString(payload.user, "user");
if (shouldStream) {
return this.http.requestStream<WorkflowRunResponse>({
method: "POST",
path: "/workflows/run",
data: payload,
});
}
return this.http.request<WorkflowRunResponse>({
method: "POST",
path: "/workflows/run",
data: payload,
});
}
runById(
workflowId: string,
request: WorkflowRunRequest
): Promise<DifyResponse<WorkflowRunResponse> | DifyStream<WorkflowRunResponse>> {
ensureNonEmptyString(workflowId, "workflowId");
ensureNonEmptyString(request.user, "user");
if (request.response_mode === "streaming") {
return this.http.requestStream<WorkflowRunResponse>({
method: "POST",
path: `/workflows/${workflowId}/run`,
data: request,
});
}
return this.http.request<WorkflowRunResponse>({
method: "POST",
path: `/workflows/${workflowId}/run`,
data: request,
});
}
getRun(workflowRunId: string): Promise<DifyResponse<WorkflowRunResponse>> {
ensureNonEmptyString(workflowRunId, "workflowRunId");
return this.http.request({
method: "GET",
path: `/workflows/run/${workflowRunId}`,
});
}
stop(
taskId: string,
user: string
): Promise<DifyResponse<WorkflowRunResponse>> {
ensureNonEmptyString(taskId, "taskId");
ensureNonEmptyString(user, "user");
return this.http.request<WorkflowRunResponse>({
method: "POST",
path: `/workflows/tasks/${taskId}/stop`,
data: { user },
});
}
/**
* Get workflow execution logs with filtering options.
*
* Note: The backend API filters by `createdByEndUserSessionId` (end user session ID)
* or `createdByAccount` (account ID), not by a generic `user` parameter.
*/
getLogs(options?: {
keyword?: string;
status?: string;
createdAtBefore?: string;
createdAtAfter?: string;
createdByEndUserSessionId?: string;
createdByAccount?: string;
page?: number;
limit?: number;
startTime?: string;
endTime?: string;
}): Promise<DifyResponse<Record<string, unknown>>> {
if (options?.keyword) {
ensureOptionalString(options.keyword, "keyword");
}
if (options?.status) {
ensureOptionalString(options.status, "status");
}
if (options?.createdAtBefore) {
ensureOptionalString(options.createdAtBefore, "createdAtBefore");
}
if (options?.createdAtAfter) {
ensureOptionalString(options.createdAtAfter, "createdAtAfter");
}
if (options?.createdByEndUserSessionId) {
ensureOptionalString(
options.createdByEndUserSessionId,
"createdByEndUserSessionId"
);
}
if (options?.createdByAccount) {
ensureOptionalString(options.createdByAccount, "createdByAccount");
}
if (options?.startTime) {
ensureOptionalString(options.startTime, "startTime");
}
if (options?.endTime) {
ensureOptionalString(options.endTime, "endTime");
}
ensureOptionalInt(options?.page, "page");
ensureOptionalInt(options?.limit, "limit");
const createdAtAfter = options?.createdAtAfter ?? options?.startTime;
const createdAtBefore = options?.createdAtBefore ?? options?.endTime;
const query: QueryParams = {
keyword: options?.keyword,
status: options?.status,
created_at__before: createdAtBefore,
created_at__after: createdAtAfter,
created_by_end_user_session_id: options?.createdByEndUserSessionId,
created_by_account: options?.createdByAccount,
page: options?.page,
limit: options?.limit,
};
return this.http.request({
method: "GET",
path: "/workflows/logs",
query,
});
}
}

View File

@ -0,0 +1,21 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import { WorkspaceClient } from "./workspace";
import { createHttpClientWithSpies } from "../../tests/test-utils";
describe("WorkspaceClient", () => {
beforeEach(() => {
vi.restoreAllMocks();
});
it("gets models by type", async () => {
const { client, request } = createHttpClientWithSpies();
const workspace = new WorkspaceClient(client);
await workspace.getModelsByType("llm");
expect(request).toHaveBeenCalledWith({
method: "GET",
path: "/workspaces/current/models/model-types/llm",
});
});
});

View File

@ -0,0 +1,16 @@
import { DifyClient } from "./base";
import type { WorkspaceModelType, WorkspaceModelsResponse } from "../types/workspace";
import type { DifyResponse } from "../types/common";
import { ensureNonEmptyString } from "./validation";
export class WorkspaceClient extends DifyClient {
async getModelsByType(
modelType: WorkspaceModelType
): Promise<DifyResponse<WorkspaceModelsResponse>> {
ensureNonEmptyString(modelType, "modelType");
return this.http.request({
method: "GET",
path: `/workspaces/current/models/model-types/${modelType}`,
});
}
}

View File

@ -0,0 +1,37 @@
import { describe, expect, it } from "vitest";
import {
APIError,
AuthenticationError,
DifyError,
FileUploadError,
NetworkError,
RateLimitError,
TimeoutError,
ValidationError,
} from "./dify-error";
describe("Dify errors", () => {
it("sets base error fields", () => {
const err = new DifyError("base", {
statusCode: 400,
responseBody: { message: "bad" },
requestId: "req",
retryAfter: 1,
});
expect(err.name).toBe("DifyError");
expect(err.statusCode).toBe(400);
expect(err.responseBody).toEqual({ message: "bad" });
expect(err.requestId).toBe("req");
expect(err.retryAfter).toBe(1);
});
it("creates specific error types", () => {
expect(new APIError("api").name).toBe("APIError");
expect(new AuthenticationError("auth").name).toBe("AuthenticationError");
expect(new RateLimitError("rate").name).toBe("RateLimitError");
expect(new ValidationError("val").name).toBe("ValidationError");
expect(new NetworkError("net").name).toBe("NetworkError");
expect(new TimeoutError("timeout").name).toBe("TimeoutError");
expect(new FileUploadError("upload").name).toBe("FileUploadError");
});
});

View File

@ -0,0 +1,75 @@
export type DifyErrorOptions = {
statusCode?: number;
responseBody?: unknown;
requestId?: string;
retryAfter?: number;
cause?: unknown;
};
export class DifyError extends Error {
statusCode?: number;
responseBody?: unknown;
requestId?: string;
retryAfter?: number;
constructor(message: string, options: DifyErrorOptions = {}) {
super(message);
this.name = "DifyError";
this.statusCode = options.statusCode;
this.responseBody = options.responseBody;
this.requestId = options.requestId;
this.retryAfter = options.retryAfter;
if (options.cause) {
(this as { cause?: unknown }).cause = options.cause;
}
}
}
export class APIError extends DifyError {
constructor(message: string, options: DifyErrorOptions = {}) {
super(message, options);
this.name = "APIError";
}
}
export class AuthenticationError extends APIError {
constructor(message: string, options: DifyErrorOptions = {}) {
super(message, options);
this.name = "AuthenticationError";
}
}
export class RateLimitError extends APIError {
constructor(message: string, options: DifyErrorOptions = {}) {
super(message, options);
this.name = "RateLimitError";
}
}
export class ValidationError extends APIError {
constructor(message: string, options: DifyErrorOptions = {}) {
super(message, options);
this.name = "ValidationError";
}
}
export class NetworkError extends DifyError {
constructor(message: string, options: DifyErrorOptions = {}) {
super(message, options);
this.name = "NetworkError";
}
}
export class TimeoutError extends DifyError {
constructor(message: string, options: DifyErrorOptions = {}) {
super(message, options);
this.name = "TimeoutError";
}
}
export class FileUploadError extends DifyError {
constructor(message: string, options: DifyErrorOptions = {}) {
super(message, options);
this.name = "FileUploadError";
}
}

View File

@ -0,0 +1,304 @@
import axios from "axios";
import { Readable } from "node:stream";
import { beforeEach, describe, expect, it, vi } from "vitest";
import {
APIError,
AuthenticationError,
FileUploadError,
NetworkError,
RateLimitError,
TimeoutError,
ValidationError,
} from "../errors/dify-error";
import { HttpClient } from "./client";
describe("HttpClient", () => {
beforeEach(() => {
vi.restoreAllMocks();
});
it("builds requests with auth headers and JSON content type", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: { ok: true },
headers: { "x-request-id": "req" },
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
const response = await client.request({
method: "POST",
path: "/chat-messages",
data: { user: "u" },
});
expect(response.requestId).toBe("req");
const config = mockRequest.mock.calls[0][0];
expect(config.headers.Authorization).toBe("Bearer test");
expect(config.headers["Content-Type"]).toBe("application/json");
expect(config.responseType).toBe("json");
});
it("serializes array query params", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: "ok",
headers: {},
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
await client.requestRaw({
method: "GET",
path: "/datasets",
query: { tag_ids: ["a", "b"], limit: 2 },
});
const config = mockRequest.mock.calls[0][0];
const queryString = config.paramsSerializer.serialize({
tag_ids: ["a", "b"],
limit: 2,
});
expect(queryString).toBe("tag_ids=a&tag_ids=b&limit=2");
});
it("returns SSE stream helpers", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: Readable.from(["data: {\"text\":\"hi\"}\n\n"]),
headers: { "x-request-id": "req" },
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
const stream = await client.requestStream({
method: "POST",
path: "/chat-messages",
data: { user: "u" },
});
expect(stream.status).toBe(200);
expect(stream.requestId).toBe("req");
await expect(stream.toText()).resolves.toBe("hi");
});
it("returns binary stream helpers", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: Readable.from(["chunk"]),
headers: { "x-request-id": "req" },
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
const stream = await client.requestBinaryStream({
method: "POST",
path: "/text-to-audio",
data: { user: "u", text: "hi" },
});
expect(stream.status).toBe(200);
expect(stream.requestId).toBe("req");
});
it("respects form-data headers", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: "ok",
headers: {},
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
const form = {
append: () => {},
getHeaders: () => ({ "content-type": "multipart/form-data; boundary=abc" }),
};
await client.requestRaw({
method: "POST",
path: "/files/upload",
data: form,
});
const config = mockRequest.mock.calls[0][0];
expect(config.headers["content-type"]).toBe(
"multipart/form-data; boundary=abc"
);
expect(config.headers["Content-Type"]).toBeUndefined();
});
it("maps 401 and 429 errors", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
response: {
status: 401,
data: { message: "unauthorized" },
headers: {},
},
});
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(AuthenticationError);
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
response: {
status: 429,
data: { message: "rate" },
headers: { "retry-after": "2" },
},
});
const error = await client
.requestRaw({ method: "GET", path: "/meta" })
.catch((err) => err);
expect(error).toBeInstanceOf(RateLimitError);
expect(error.retryAfter).toBe(2);
});
it("maps validation and upload errors", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
response: {
status: 422,
data: { message: "invalid" },
headers: {},
},
});
await expect(
client.requestRaw({ method: "POST", path: "/chat-messages", data: { user: "u" } })
).rejects.toBeInstanceOf(ValidationError);
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
config: { url: "/files/upload" },
response: {
status: 400,
data: { message: "bad upload" },
headers: {},
},
});
await expect(
client.requestRaw({ method: "POST", path: "/files/upload", data: { user: "u" } })
).rejects.toBeInstanceOf(FileUploadError);
});
it("maps timeout and network errors", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
code: "ECONNABORTED",
message: "timeout",
});
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(TimeoutError);
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
message: "network",
});
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(NetworkError);
});
it("retries on timeout errors", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 1, retryDelay: 0 });
mockRequest
.mockRejectedValueOnce({
isAxiosError: true,
code: "ECONNABORTED",
message: "timeout",
})
.mockResolvedValueOnce({ status: 200, data: "ok", headers: {} });
await client.requestRaw({ method: "GET", path: "/meta" });
expect(mockRequest).toHaveBeenCalledTimes(2);
});
it("validates query parameters before request", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test" });
await expect(
client.requestRaw({ method: "GET", path: "/meta", query: { user: 1 } })
).rejects.toBeInstanceOf(ValidationError);
expect(mockRequest).not.toHaveBeenCalled();
});
it("returns APIError for other http failures", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", maxRetries: 0 });
mockRequest.mockRejectedValueOnce({
isAxiosError: true,
response: { status: 500, data: { message: "server" }, headers: {} },
});
await expect(
client.requestRaw({ method: "GET", path: "/meta" })
).rejects.toBeInstanceOf(APIError);
});
it("logs requests and responses when enableLogging is true", async () => {
const mockRequest = vi.fn().mockResolvedValue({
status: 200,
data: { ok: true },
headers: {},
});
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
const client = new HttpClient({ apiKey: "test", enableLogging: true });
await client.requestRaw({ method: "GET", path: "/meta" });
expect(consoleInfo).toHaveBeenCalledWith(
expect.stringContaining("dify-client-node response 200 GET")
);
consoleInfo.mockRestore();
});
it("logs retry attempts when enableLogging is true", async () => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const consoleInfo = vi.spyOn(console, "info").mockImplementation(() => {});
const client = new HttpClient({
apiKey: "test",
maxRetries: 1,
retryDelay: 0,
enableLogging: true,
});
mockRequest
.mockRejectedValueOnce({
isAxiosError: true,
code: "ECONNABORTED",
message: "timeout",
})
.mockResolvedValueOnce({ status: 200, data: "ok", headers: {} });
await client.requestRaw({ method: "GET", path: "/meta" });
expect(consoleInfo).toHaveBeenCalledWith(
expect.stringContaining("dify-client-node retry")
);
consoleInfo.mockRestore();
});
});

View File

@ -0,0 +1,368 @@
import axios from "axios";
import type {
AxiosError,
AxiosInstance,
AxiosRequestConfig,
AxiosResponse,
} from "axios";
import type { Readable } from "node:stream";
import {
DEFAULT_BASE_URL,
DEFAULT_MAX_RETRIES,
DEFAULT_RETRY_DELAY_SECONDS,
DEFAULT_TIMEOUT_SECONDS,
} from "../types/common";
import type {
DifyClientConfig,
DifyResponse,
Headers,
QueryParams,
RequestMethod,
} from "../types/common";
import type { DifyError } from "../errors/dify-error";
import {
APIError,
AuthenticationError,
FileUploadError,
NetworkError,
RateLimitError,
TimeoutError,
ValidationError,
} from "../errors/dify-error";
import { getFormDataHeaders, isFormData } from "./form-data";
import { createBinaryStream, createSseStream } from "./sse";
import { getRetryDelayMs, shouldRetry, sleep } from "./retry";
import { validateParams } from "../client/validation";
const DEFAULT_USER_AGENT = "dify-client-node";
export type RequestOptions = {
method: RequestMethod;
path: string;
query?: QueryParams;
data?: unknown;
headers?: Headers;
responseType?: AxiosRequestConfig["responseType"];
};
export type HttpClientSettings = Required<
Omit<DifyClientConfig, "apiKey">
> & {
apiKey: string;
};
const normalizeSettings = (config: DifyClientConfig): HttpClientSettings => ({
apiKey: config.apiKey,
baseUrl: config.baseUrl ?? DEFAULT_BASE_URL,
timeout: config.timeout ?? DEFAULT_TIMEOUT_SECONDS,
maxRetries: config.maxRetries ?? DEFAULT_MAX_RETRIES,
retryDelay: config.retryDelay ?? DEFAULT_RETRY_DELAY_SECONDS,
enableLogging: config.enableLogging ?? false,
});
const normalizeHeaders = (headers: AxiosResponse["headers"]): Headers => {
const result: Headers = {};
if (!headers) {
return result;
}
Object.entries(headers).forEach(([key, value]) => {
if (Array.isArray(value)) {
result[key.toLowerCase()] = value.join(", ");
} else if (typeof value === "string") {
result[key.toLowerCase()] = value;
} else if (typeof value === "number") {
result[key.toLowerCase()] = value.toString();
}
});
return result;
};
const resolveRequestId = (headers: Headers): string | undefined =>
headers["x-request-id"] ?? headers["x-requestid"];
const buildRequestUrl = (baseUrl: string, path: string): string => {
const trimmed = baseUrl.replace(/\/+$/, "");
return `${trimmed}${path}`;
};
const buildQueryString = (params?: QueryParams): string => {
if (!params) {
return "";
}
const searchParams = new URLSearchParams();
Object.entries(params).forEach(([key, value]) => {
if (value === undefined || value === null) {
return;
}
if (Array.isArray(value)) {
value.forEach((item) => {
searchParams.append(key, String(item));
});
return;
}
searchParams.append(key, String(value));
});
return searchParams.toString();
};
const parseRetryAfterSeconds = (headerValue?: string): number | undefined => {
if (!headerValue) {
return undefined;
}
const asNumber = Number.parseInt(headerValue, 10);
if (!Number.isNaN(asNumber)) {
return asNumber;
}
const asDate = Date.parse(headerValue);
if (!Number.isNaN(asDate)) {
const diff = asDate - Date.now();
return diff > 0 ? Math.ceil(diff / 1000) : 0;
}
return undefined;
};
const isReadableStream = (value: unknown): value is Readable => {
if (!value || typeof value !== "object") {
return false;
}
return typeof (value as { pipe?: unknown }).pipe === "function";
};
const isUploadLikeRequest = (config?: AxiosRequestConfig): boolean => {
const url = (config?.url ?? "").toLowerCase();
if (!url) {
return false;
}
return (
url.includes("upload") ||
url.includes("/files/") ||
url.includes("audio-to-text") ||
url.includes("create_by_file") ||
url.includes("update_by_file")
);
};
const resolveErrorMessage = (status: number, responseBody: unknown): string => {
if (typeof responseBody === "string" && responseBody.trim().length > 0) {
return responseBody;
}
if (
responseBody &&
typeof responseBody === "object" &&
"message" in responseBody
) {
const message = (responseBody as Record<string, unknown>).message;
if (typeof message === "string" && message.trim().length > 0) {
return message;
}
}
return `Request failed with status code ${status}`;
};
const mapAxiosError = (error: unknown): DifyError => {
if (axios.isAxiosError(error)) {
const axiosError = error as AxiosError;
if (axiosError.response) {
const status = axiosError.response.status;
const headers = normalizeHeaders(axiosError.response.headers);
const requestId = resolveRequestId(headers);
const responseBody = axiosError.response.data;
const message = resolveErrorMessage(status, responseBody);
if (status === 401) {
return new AuthenticationError(message, {
statusCode: status,
responseBody,
requestId,
});
}
if (status === 429) {
const retryAfter = parseRetryAfterSeconds(headers["retry-after"]);
return new RateLimitError(message, {
statusCode: status,
responseBody,
requestId,
retryAfter,
});
}
if (status === 422) {
return new ValidationError(message, {
statusCode: status,
responseBody,
requestId,
});
}
if (status === 400) {
if (isUploadLikeRequest(axiosError.config)) {
return new FileUploadError(message, {
statusCode: status,
responseBody,
requestId,
});
}
}
return new APIError(message, {
statusCode: status,
responseBody,
requestId,
});
}
if (axiosError.code === "ECONNABORTED") {
return new TimeoutError("Request timed out", { cause: axiosError });
}
return new NetworkError(axiosError.message, { cause: axiosError });
}
if (error instanceof Error) {
return new NetworkError(error.message, { cause: error });
}
return new NetworkError("Unexpected network error", { cause: error });
};
export class HttpClient {
private axios: AxiosInstance;
private settings: HttpClientSettings;
constructor(config: DifyClientConfig) {
this.settings = normalizeSettings(config);
this.axios = axios.create({
baseURL: this.settings.baseUrl,
timeout: this.settings.timeout * 1000,
});
}
updateApiKey(apiKey: string): void {
this.settings.apiKey = apiKey;
}
getSettings(): HttpClientSettings {
return { ...this.settings };
}
async request<T>(options: RequestOptions): Promise<DifyResponse<T>> {
const response = await this.requestRaw(options);
const headers = normalizeHeaders(response.headers);
return {
data: response.data as T,
status: response.status,
headers,
requestId: resolveRequestId(headers),
};
}
async requestStream<T>(options: RequestOptions) {
const response = await this.requestRaw({
...options,
responseType: "stream",
});
const headers = normalizeHeaders(response.headers);
return createSseStream<T>(response.data as Readable, {
status: response.status,
headers,
requestId: resolveRequestId(headers),
});
}
async requestBinaryStream(options: RequestOptions) {
const response = await this.requestRaw({
...options,
responseType: "stream",
});
const headers = normalizeHeaders(response.headers);
return createBinaryStream(response.data as Readable, {
status: response.status,
headers,
requestId: resolveRequestId(headers),
});
}
async requestRaw(options: RequestOptions): Promise<AxiosResponse> {
const { method, path, query, data, headers, responseType } = options;
const { apiKey, enableLogging, maxRetries, retryDelay, timeout } =
this.settings;
if (query) {
validateParams(query as Record<string, unknown>);
}
if (
data &&
typeof data === "object" &&
!Array.isArray(data) &&
!isFormData(data) &&
!isReadableStream(data)
) {
validateParams(data as Record<string, unknown>);
}
const requestHeaders: Headers = {
Authorization: `Bearer ${apiKey}`,
...headers,
};
if (
typeof process !== "undefined" &&
!!process.versions?.node &&
!requestHeaders["User-Agent"] &&
!requestHeaders["user-agent"]
) {
requestHeaders["User-Agent"] = DEFAULT_USER_AGENT;
}
if (isFormData(data)) {
Object.assign(requestHeaders, getFormDataHeaders(data));
} else if (data && method !== "GET") {
requestHeaders["Content-Type"] = "application/json";
}
const url = buildRequestUrl(this.settings.baseUrl, path);
if (enableLogging) {
console.info(`dify-client-node request ${method} ${url}`);
}
const axiosConfig: AxiosRequestConfig = {
method,
url: path,
params: query,
paramsSerializer: {
serialize: (params) => buildQueryString(params as QueryParams),
},
headers: requestHeaders,
responseType: responseType ?? "json",
timeout: timeout * 1000,
};
if (method !== "GET" && data !== undefined) {
axiosConfig.data = data;
}
let attempt = 0;
// `attempt` is a zero-based retry counter
// Total attempts = 1 (initial) + maxRetries
// e.g., maxRetries=3 means: attempt 0 (initial), then retries at 1, 2, 3
while (true) {
try {
const response = await this.axios.request(axiosConfig);
if (enableLogging) {
console.info(
`dify-client-node response ${response.status} ${method} ${url}`
);
}
return response;
} catch (error) {
const mapped = mapAxiosError(error);
if (!shouldRetry(mapped, attempt, maxRetries)) {
throw mapped;
}
const retryAfterSeconds =
mapped instanceof RateLimitError ? mapped.retryAfter : undefined;
const delay = getRetryDelayMs(attempt + 1, retryDelay, retryAfterSeconds);
if (enableLogging) {
console.info(
`dify-client-node retry ${attempt + 1} in ${delay}ms for ${method} ${url}`
);
}
attempt += 1;
await sleep(delay);
}
}
}
}

View File

@ -0,0 +1,23 @@
import { describe, expect, it } from "vitest";
import { getFormDataHeaders, isFormData } from "./form-data";
describe("form-data helpers", () => {
it("detects form-data like objects", () => {
const formLike = {
append: () => {},
getHeaders: () => ({ "content-type": "multipart/form-data" }),
};
expect(isFormData(formLike)).toBe(true);
expect(isFormData({})).toBe(false);
});
it("returns headers from form-data", () => {
const formLike = {
append: () => {},
getHeaders: () => ({ "content-type": "multipart/form-data" }),
};
expect(getFormDataHeaders(formLike)).toEqual({
"content-type": "multipart/form-data",
});
});
});

View File

@ -0,0 +1,31 @@
import type { Headers } from "../types/common";
export type FormDataLike = {
append: (...args: unknown[]) => void;
getHeaders?: () => Headers;
constructor?: { name?: string };
};
export const isFormData = (value: unknown): value is FormDataLike => {
if (!value || typeof value !== "object") {
return false;
}
if (typeof FormData !== "undefined" && value instanceof FormData) {
return true;
}
const candidate = value as FormDataLike;
if (typeof candidate.append !== "function") {
return false;
}
if (typeof candidate.getHeaders === "function") {
return true;
}
return candidate.constructor?.name === "FormData";
};
export const getFormDataHeaders = (form: FormDataLike): Headers => {
if (typeof form.getHeaders === "function") {
return form.getHeaders();
}
return {};
};

View File

@ -0,0 +1,38 @@
import { describe, expect, it } from "vitest";
import { getRetryDelayMs, shouldRetry } from "./retry";
import { NetworkError, RateLimitError, TimeoutError } from "../errors/dify-error";
const withMockedRandom = (value, fn) => {
const original = Math.random;
Math.random = () => value;
try {
fn();
} finally {
Math.random = original;
}
};
describe("retry helpers", () => {
it("getRetryDelayMs honors retry-after header", () => {
expect(getRetryDelayMs(1, 1, 3)).toBe(3000);
});
it("getRetryDelayMs uses exponential backoff with jitter", () => {
withMockedRandom(0, () => {
expect(getRetryDelayMs(1, 1)).toBe(1000);
expect(getRetryDelayMs(2, 1)).toBe(2000);
expect(getRetryDelayMs(3, 1)).toBe(4000);
});
});
it("shouldRetry respects max retries", () => {
expect(shouldRetry(new TimeoutError("timeout"), 3, 3)).toBe(false);
});
it("shouldRetry retries on network, timeout, and rate limit", () => {
expect(shouldRetry(new TimeoutError("timeout"), 0, 3)).toBe(true);
expect(shouldRetry(new NetworkError("network"), 0, 3)).toBe(true);
expect(shouldRetry(new RateLimitError("limit"), 0, 3)).toBe(true);
expect(shouldRetry(new Error("other"), 0, 3)).toBe(false);
});
});

View File

@ -0,0 +1,40 @@
import { RateLimitError, NetworkError, TimeoutError } from "../errors/dify-error";
export const sleep = (ms: number): Promise<void> =>
new Promise((resolve) => {
setTimeout(resolve, ms);
});
export const getRetryDelayMs = (
attempt: number,
retryDelaySeconds: number,
retryAfterSeconds?: number
): number => {
if (retryAfterSeconds && retryAfterSeconds > 0) {
return retryAfterSeconds * 1000;
}
const base = retryDelaySeconds * 1000;
const exponential = base * Math.pow(2, Math.max(0, attempt - 1));
const jitter = Math.random() * base;
return exponential + jitter;
};
export const shouldRetry = (
error: unknown,
attempt: number,
maxRetries: number
): boolean => {
if (attempt >= maxRetries) {
return false;
}
if (error instanceof TimeoutError) {
return true;
}
if (error instanceof NetworkError) {
return true;
}
if (error instanceof RateLimitError) {
return true;
}
return false;
};

View File

@ -0,0 +1,76 @@
import { Readable } from "node:stream";
import { describe, expect, it } from "vitest";
import { createBinaryStream, createSseStream, parseSseStream } from "./sse";
describe("sse parsing", () => {
it("parses event and data lines", async () => {
const stream = Readable.from([
"event: message\n",
"data: {\"answer\":\"hi\"}\n",
"\n",
]);
const events = [];
for await (const event of parseSseStream(stream)) {
events.push(event);
}
expect(events).toHaveLength(1);
expect(events[0].event).toBe("message");
expect(events[0].data).toEqual({ answer: "hi" });
});
it("handles multi-line data payloads", async () => {
const stream = Readable.from(["data: line1\n", "data: line2\n", "\n"]);
const events = [];
for await (const event of parseSseStream(stream)) {
events.push(event);
}
expect(events[0].raw).toBe("line1\nline2");
expect(events[0].data).toBe("line1\nline2");
});
it("createSseStream exposes toText", async () => {
const stream = Readable.from([
"data: {\"answer\":\"hello\"}\n\n",
"data: {\"delta\":\" world\"}\n\n",
]);
const sseStream = createSseStream(stream, {
status: 200,
headers: {},
requestId: "req",
});
const text = await sseStream.toText();
expect(text).toBe("hello world");
});
it("toText extracts text from string data", async () => {
const stream = Readable.from(["data: plain text\n\n"]);
const sseStream = createSseStream(stream, { status: 200, headers: {} });
const text = await sseStream.toText();
expect(text).toBe("plain text");
});
it("toText extracts text field from object", async () => {
const stream = Readable.from(['data: {"text":"hello"}\n\n']);
const sseStream = createSseStream(stream, { status: 200, headers: {} });
const text = await sseStream.toText();
expect(text).toBe("hello");
});
it("toText returns empty for invalid data", async () => {
const stream = Readable.from(["data: null\n\n", "data: 123\n\n"]);
const sseStream = createSseStream(stream, { status: 200, headers: {} });
const text = await sseStream.toText();
expect(text).toBe("");
});
it("createBinaryStream exposes metadata", () => {
const stream = Readable.from(["chunk"]);
const binary = createBinaryStream(stream, {
status: 200,
headers: { "content-type": "audio/mpeg" },
requestId: "req",
});
expect(binary.status).toBe(200);
expect(binary.headers["content-type"]).toBe("audio/mpeg");
});
});

View File

@ -0,0 +1,133 @@
import type { Readable } from "node:stream";
import { StringDecoder } from "node:string_decoder";
import type { BinaryStream, DifyStream, Headers, StreamEvent } from "../types/common";
const readLines = async function* (stream: Readable): AsyncIterable<string> {
const decoder = new StringDecoder("utf8");
let buffered = "";
for await (const chunk of stream) {
buffered += decoder.write(chunk as Buffer);
let index = buffered.indexOf("\n");
while (index >= 0) {
let line = buffered.slice(0, index);
buffered = buffered.slice(index + 1);
if (line.endsWith("\r")) {
line = line.slice(0, -1);
}
yield line;
index = buffered.indexOf("\n");
}
}
buffered += decoder.end();
if (buffered) {
yield buffered;
}
};
const parseMaybeJson = (value: string): unknown => {
if (!value) {
return null;
}
try {
return JSON.parse(value);
} catch {
return value;
}
};
export const parseSseStream = async function* <T>(
stream: Readable
): AsyncIterable<StreamEvent<T>> {
let eventName: string | undefined;
const dataLines: string[] = [];
const emitEvent = function* (): Iterable<StreamEvent<T>> {
if (!eventName && dataLines.length === 0) {
return;
}
const raw = dataLines.join("\n");
const parsed = parseMaybeJson(raw) as T | string | null;
yield {
event: eventName,
data: parsed,
raw,
};
eventName = undefined;
dataLines.length = 0;
};
for await (const line of readLines(stream)) {
if (!line) {
yield* emitEvent();
continue;
}
if (line.startsWith(":")) {
continue;
}
if (line.startsWith("event:")) {
eventName = line.slice("event:".length).trim();
continue;
}
if (line.startsWith("data:")) {
dataLines.push(line.slice("data:".length).trimStart());
continue;
}
}
yield* emitEvent();
};
const extractTextFromEvent = (data: unknown): string => {
if (typeof data === "string") {
return data;
}
if (!data || typeof data !== "object") {
return "";
}
const record = data as Record<string, unknown>;
if (typeof record.answer === "string") {
return record.answer;
}
if (typeof record.text === "string") {
return record.text;
}
if (typeof record.delta === "string") {
return record.delta;
}
return "";
};
export const createSseStream = <T>(
stream: Readable,
meta: { status: number; headers: Headers; requestId?: string }
): DifyStream<T> => {
const iterator = parseSseStream<T>(stream)[Symbol.asyncIterator]();
const iterable = {
[Symbol.asyncIterator]: () => iterator,
data: stream,
status: meta.status,
headers: meta.headers,
requestId: meta.requestId,
toReadable: () => stream,
toText: async () => {
let text = "";
for await (const event of iterable) {
text += extractTextFromEvent(event.data);
}
return text;
},
} satisfies DifyStream<T>;
return iterable;
};
export const createBinaryStream = (
stream: Readable,
meta: { status: number; headers: Headers; requestId?: string }
): BinaryStream => ({
data: stream,
status: meta.status,
headers: meta.headers,
requestId: meta.requestId,
toReadable: () => stream,
});

View File

@ -0,0 +1,227 @@
import { afterAll, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
import { ChatClient, DifyClient, WorkflowClient, BASE_URL, routes } from "./index";
import axios from "axios";
const mockRequest = vi.fn();
const setupAxiosMock = () => {
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
};
beforeEach(() => {
vi.restoreAllMocks();
mockRequest.mockReset();
setupAxiosMock();
});
describe("Client", () => {
it("should create a client", () => {
new DifyClient("test");
expect(axios.create).toHaveBeenCalledWith({
baseURL: BASE_URL,
timeout: 60000,
});
});
it("should update the api key", () => {
const difyClient = new DifyClient("test");
difyClient.updateApiKey("test2");
expect(difyClient.getHttpClient().getSettings().apiKey).toBe("test2");
});
});
describe("Send Requests", () => {
it("should make a successful request to the application parameter", async () => {
const difyClient = new DifyClient("test");
const method = "GET";
const endpoint = routes.application.url();
mockRequest.mockResolvedValue({
status: 200,
data: "response",
headers: {},
});
await difyClient.sendRequest(method, endpoint);
const requestConfig = mockRequest.mock.calls[0][0];
expect(requestConfig).toMatchObject({
method,
url: endpoint,
params: undefined,
responseType: "json",
timeout: 60000,
});
expect(requestConfig.headers.Authorization).toBe("Bearer test");
});
it("uses the getMeta route configuration", async () => {
const difyClient = new DifyClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await difyClient.getMeta("end-user");
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.getMeta.method,
url: routes.getMeta.url(),
params: { user: "end-user" },
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
});
describe("File uploads", () => {
const OriginalFormData = globalThis.FormData;
beforeAll(() => {
globalThis.FormData = class FormDataMock {
append() {}
getHeaders() {
return {
"content-type": "multipart/form-data; boundary=test",
};
}
};
});
afterAll(() => {
globalThis.FormData = OriginalFormData;
});
it("does not override multipart boundary headers for FormData", async () => {
const difyClient = new DifyClient("test");
const form = new globalThis.FormData();
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await difyClient.fileUpload(form, "end-user");
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.fileUpload.method,
url: routes.fileUpload.url(),
params: undefined,
headers: expect.objectContaining({
Authorization: "Bearer test",
"content-type": "multipart/form-data; boundary=test",
}),
responseType: "json",
timeout: 60000,
data: form,
}));
});
});
describe("Workflow client", () => {
it("uses tasks stop path for workflow stop", async () => {
const workflowClient = new WorkflowClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "stopped", headers: {} });
await workflowClient.stop("task-1", "end-user");
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.stopWorkflow.method,
url: routes.stopWorkflow.url("task-1"),
params: undefined,
headers: expect.objectContaining({
Authorization: "Bearer test",
"Content-Type": "application/json",
}),
responseType: "json",
timeout: 60000,
data: { user: "end-user" },
}));
});
it("maps workflow log filters to service api params", async () => {
const workflowClient = new WorkflowClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await workflowClient.getLogs({
createdAtAfter: "2024-01-01T00:00:00Z",
createdAtBefore: "2024-01-02T00:00:00Z",
createdByEndUserSessionId: "sess-1",
createdByAccount: "acc-1",
page: 2,
limit: 10,
});
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: "GET",
url: "/workflows/logs",
params: {
created_at__after: "2024-01-01T00:00:00Z",
created_at__before: "2024-01-02T00:00:00Z",
created_by_end_user_session_id: "sess-1",
created_by_account: "acc-1",
page: 2,
limit: 10,
},
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
});
describe("Chat client", () => {
it("places user in query for suggested messages", async () => {
const chatClient = new ChatClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await chatClient.getSuggested("msg-1", "end-user");
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.getSuggested.method,
url: routes.getSuggested.url("msg-1"),
params: { user: "end-user" },
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
it("uses last_id when listing conversations", async () => {
const chatClient = new ChatClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await chatClient.getConversations("end-user", "last-1", 10);
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: routes.getConversations.method,
url: routes.getConversations.url(),
params: { user: "end-user", last_id: "last-1", limit: 10 },
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
it("lists app feedbacks without user params", async () => {
const chatClient = new ChatClient("test");
mockRequest.mockResolvedValue({ status: 200, data: "ok", headers: {} });
await chatClient.getAppFeedbacks(1, 20);
expect(mockRequest).toHaveBeenCalledWith(expect.objectContaining({
method: "GET",
url: "/app/feedbacks",
params: { page: 1, limit: 20 },
headers: expect.objectContaining({
Authorization: "Bearer test",
}),
responseType: "json",
timeout: 60000,
}));
});
});

View File

@ -0,0 +1,103 @@
import { DEFAULT_BASE_URL } from "./types/common";
export const BASE_URL = DEFAULT_BASE_URL;
export const routes = {
feedback: {
method: "POST",
url: (messageId: string) => `/messages/${messageId}/feedbacks`,
},
application: {
method: "GET",
url: () => "/parameters",
},
fileUpload: {
method: "POST",
url: () => "/files/upload",
},
filePreview: {
method: "GET",
url: (fileId: string) => `/files/${fileId}/preview`,
},
textToAudio: {
method: "POST",
url: () => "/text-to-audio",
},
audioToText: {
method: "POST",
url: () => "/audio-to-text",
},
getMeta: {
method: "GET",
url: () => "/meta",
},
getInfo: {
method: "GET",
url: () => "/info",
},
getSite: {
method: "GET",
url: () => "/site",
},
createCompletionMessage: {
method: "POST",
url: () => "/completion-messages",
},
stopCompletionMessage: {
method: "POST",
url: (taskId: string) => `/completion-messages/${taskId}/stop`,
},
createChatMessage: {
method: "POST",
url: () => "/chat-messages",
},
getSuggested: {
method: "GET",
url: (messageId: string) => `/messages/${messageId}/suggested`,
},
stopChatMessage: {
method: "POST",
url: (taskId: string) => `/chat-messages/${taskId}/stop`,
},
getConversations: {
method: "GET",
url: () => "/conversations",
},
getConversationMessages: {
method: "GET",
url: () => "/messages",
},
renameConversation: {
method: "POST",
url: (conversationId: string) => `/conversations/${conversationId}/name`,
},
deleteConversation: {
method: "DELETE",
url: (conversationId: string) => `/conversations/${conversationId}`,
},
runWorkflow: {
method: "POST",
url: () => "/workflows/run",
},
stopWorkflow: {
method: "POST",
url: (taskId: string) => `/workflows/tasks/${taskId}/stop`,
},
};
export { DifyClient } from "./client/base";
export { ChatClient } from "./client/chat";
export { CompletionClient } from "./client/completion";
export { WorkflowClient } from "./client/workflow";
export { KnowledgeBaseClient } from "./client/knowledge-base";
export { WorkspaceClient } from "./client/workspace";
export * from "./errors/dify-error";
export * from "./types/common";
export * from "./types/annotation";
export * from "./types/chat";
export * from "./types/completion";
export * from "./types/knowledge-base";
export * from "./types/workflow";
export * from "./types/workspace";
export { HttpClient } from "./http/client";

View File

@ -0,0 +1,18 @@
export type AnnotationCreateRequest = {
question: string;
answer: string;
};
export type AnnotationReplyActionRequest = {
score_threshold: number;
embedding_provider_name: string;
embedding_model_name: string;
};
export type AnnotationListOptions = {
page?: number;
limit?: number;
keyword?: string;
};
export type AnnotationResponse = Record<string, unknown>;

View File

@ -0,0 +1,17 @@
import type { StreamEvent } from "./common";
export type ChatMessageRequest = {
inputs?: Record<string, unknown>;
query: string;
user: string;
response_mode?: "blocking" | "streaming";
files?: Array<Record<string, unknown>> | null;
conversation_id?: string;
auto_generate_name?: boolean;
workflow_id?: string;
retriever_from?: "app" | "dataset";
};
export type ChatMessageResponse = Record<string, unknown>;
export type ChatStreamEvent = StreamEvent<Record<string, unknown>>;

View File

@ -0,0 +1,71 @@
export const DEFAULT_BASE_URL = "https://api.dify.ai/v1";
export const DEFAULT_TIMEOUT_SECONDS = 60;
export const DEFAULT_MAX_RETRIES = 3;
export const DEFAULT_RETRY_DELAY_SECONDS = 1;
export type RequestMethod = "GET" | "POST" | "PATCH" | "PUT" | "DELETE";
export type QueryParamValue =
| string
| number
| boolean
| Array<string | number | boolean>
| undefined;
export type QueryParams = Record<string, QueryParamValue>;
export type Headers = Record<string, string>;
export type DifyClientConfig = {
apiKey: string;
baseUrl?: string;
timeout?: number;
maxRetries?: number;
retryDelay?: number;
enableLogging?: boolean;
};
export type DifyResponse<T> = {
data: T;
status: number;
headers: Headers;
requestId?: string;
};
export type MessageFeedbackRequest = {
messageId: string;
user: string;
rating?: "like" | "dislike" | null;
content?: string | null;
};
export type TextToAudioRequest = {
user: string;
text?: string;
message_id?: string;
streaming?: boolean;
voice?: string;
};
export type StreamEvent<T = unknown> = {
event?: string;
data: T | string | null;
raw: string;
};
export type DifyStream<T = unknown> = AsyncIterable<StreamEvent<T>> & {
data: NodeJS.ReadableStream;
status: number;
headers: Headers;
requestId?: string;
toText(): Promise<string>;
toReadable(): NodeJS.ReadableStream;
};
export type BinaryStream = {
data: NodeJS.ReadableStream;
status: number;
headers: Headers;
requestId?: string;
toReadable(): NodeJS.ReadableStream;
};

View File

@ -0,0 +1,13 @@
import type { StreamEvent } from "./common";
export type CompletionRequest = {
inputs?: Record<string, unknown>;
response_mode?: "blocking" | "streaming";
user: string;
files?: Array<Record<string, unknown>> | null;
retriever_from?: "app" | "dataset";
};
export type CompletionResponse = Record<string, unknown>;
export type CompletionStreamEvent = StreamEvent<Record<string, unknown>>;

View File

@ -0,0 +1,184 @@
export type DatasetListOptions = {
page?: number;
limit?: number;
keyword?: string | null;
tagIds?: string[];
includeAll?: boolean;
};
export type DatasetCreateRequest = {
name: string;
description?: string;
indexing_technique?: "high_quality" | "economy";
permission?: string | null;
external_knowledge_api_id?: string | null;
provider?: string;
external_knowledge_id?: string | null;
retrieval_model?: Record<string, unknown> | null;
embedding_model?: string | null;
embedding_model_provider?: string | null;
};
export type DatasetUpdateRequest = {
name?: string;
description?: string | null;
indexing_technique?: "high_quality" | "economy" | null;
permission?: string | null;
embedding_model?: string | null;
embedding_model_provider?: string | null;
retrieval_model?: Record<string, unknown> | null;
partial_member_list?: Array<Record<string, string>> | null;
external_retrieval_model?: Record<string, unknown> | null;
external_knowledge_id?: string | null;
external_knowledge_api_id?: string | null;
};
export type DocumentStatusAction = "enable" | "disable" | "archive" | "un_archive";
export type DatasetTagCreateRequest = {
name: string;
};
export type DatasetTagUpdateRequest = {
tag_id: string;
name: string;
};
export type DatasetTagDeleteRequest = {
tag_id: string;
};
export type DatasetTagBindingRequest = {
tag_ids: string[];
target_id: string;
};
export type DatasetTagUnbindingRequest = {
tag_id: string;
target_id: string;
};
export type DocumentTextCreateRequest = {
name: string;
text: string;
process_rule?: Record<string, unknown> | null;
original_document_id?: string | null;
doc_form?: string;
doc_language?: string;
indexing_technique?: string | null;
retrieval_model?: Record<string, unknown> | null;
embedding_model?: string | null;
embedding_model_provider?: string | null;
};
export type DocumentTextUpdateRequest = {
name?: string | null;
text?: string | null;
process_rule?: Record<string, unknown> | null;
doc_form?: string;
doc_language?: string;
retrieval_model?: Record<string, unknown> | null;
};
export type DocumentListOptions = {
page?: number;
limit?: number;
keyword?: string | null;
status?: string | null;
};
export type DocumentGetOptions = {
metadata?: "all" | "only" | "without";
};
export type SegmentCreateRequest = {
segments: Array<Record<string, unknown>>;
};
export type SegmentUpdateRequest = {
segment: {
content?: string | null;
answer?: string | null;
keywords?: string[] | null;
regenerate_child_chunks?: boolean;
enabled?: boolean | null;
attachment_ids?: string[] | null;
};
};
export type SegmentListOptions = {
page?: number;
limit?: number;
status?: string[];
keyword?: string | null;
};
export type ChildChunkCreateRequest = {
content: string;
};
export type ChildChunkUpdateRequest = {
content: string;
};
export type ChildChunkListOptions = {
page?: number;
limit?: number;
keyword?: string | null;
};
export type MetadataCreateRequest = {
type: "string" | "number" | "time";
name: string;
};
export type MetadataUpdateRequest = {
name: string;
value?: string | number | null;
};
export type DocumentMetadataDetail = {
id: string;
name: string;
value?: string | number | null;
};
export type DocumentMetadataOperation = {
document_id: string;
metadata_list: DocumentMetadataDetail[];
partial_update?: boolean;
};
export type MetadataOperationRequest = {
operation_data: DocumentMetadataOperation[];
};
export type HitTestingRequest = {
query?: string | null;
retrieval_model?: Record<string, unknown> | null;
external_retrieval_model?: Record<string, unknown> | null;
attachment_ids?: string[] | null;
};
export type DatasourcePluginListOptions = {
isPublished?: boolean;
};
export type DatasourceNodeRunRequest = {
inputs: Record<string, unknown>;
datasource_type: string;
credential_id?: string | null;
is_published: boolean;
};
export type PipelineRunRequest = {
inputs: Record<string, unknown>;
datasource_type: string;
datasource_info_list: Array<Record<string, unknown>>;
start_node_id: string;
is_published: boolean;
response_mode: "streaming" | "blocking";
};
export type KnowledgeBaseResponse = Record<string, unknown>;
export type PipelineStreamEvent = Record<string, unknown>;

View File

@ -0,0 +1,12 @@
import type { StreamEvent } from "./common";
export type WorkflowRunRequest = {
inputs?: Record<string, unknown>;
user: string;
response_mode?: "blocking" | "streaming";
files?: Array<Record<string, unknown>> | null;
};
export type WorkflowRunResponse = Record<string, unknown>;
export type WorkflowStreamEvent = StreamEvent<Record<string, unknown>>;

View File

@ -0,0 +1,2 @@
export type WorkspaceModelType = string;
export type WorkspaceModelsResponse = Record<string, unknown>;

View File

@ -0,0 +1,30 @@
import axios from "axios";
import { vi } from "vitest";
import { HttpClient } from "../src/http/client";
export const createHttpClient = (configOverrides = {}) => {
const mockRequest = vi.fn();
vi.spyOn(axios, "create").mockReturnValue({ request: mockRequest });
const client = new HttpClient({ apiKey: "test", ...configOverrides });
return { client, mockRequest };
};
export const createHttpClientWithSpies = (configOverrides = {}) => {
const { client, mockRequest } = createHttpClient(configOverrides);
const request = vi
.spyOn(client, "request")
.mockResolvedValue({ data: "ok", status: 200, headers: {} });
const requestStream = vi
.spyOn(client, "requestStream")
.mockResolvedValue({ data: null });
const requestBinaryStream = vi
.spyOn(client, "requestBinaryStream")
.mockResolvedValue({ data: null });
return {
client,
mockRequest,
request,
requestStream,
requestBinaryStream,
};
};

View File

@ -0,0 +1,17 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ESNext",
"moduleResolution": "Bundler",
"rootDir": "src",
"outDir": "dist",
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"strict": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"skipLibCheck": true
},
"include": ["src/**/*.ts"]
}

View File

@ -0,0 +1,12 @@
import { defineConfig } from "tsup";
export default defineConfig({
entry: ["src/index.ts"],
format: ["esm"],
dts: true,
clean: true,
sourcemap: true,
splitting: false,
treeshake: true,
outDir: "dist",
});

View File

@ -0,0 +1,14 @@
import { defineConfig } from "vitest/config";
export default defineConfig({
test: {
environment: "node",
include: ["**/*.test.js"],
coverage: {
provider: "v8",
reporter: ["text", "text-summary"],
include: ["src/**/*.ts"],
exclude: ["src/**/*.test.*", "src/**/*.spec.*"],
},
},
});

10
web/.gitignore vendored
View File

@ -54,3 +54,13 @@ package-lock.json
# mise
mise.toml
# PWA generated files
public/sw.js
public/sw.js.map
public/workbox-*.js
public/workbox-*.js.map
public/fallback-*.js
.vscode/settings.json
.vscode/mcp.json

View File

@ -1,144 +0,0 @@
{
"plugins": [
"unicorn",
"typescript",
"oxc"
],
"categories": {},
"rules": {
"for-direction": "error",
"no-async-promise-executor": "error",
"no-caller": "error",
"no-class-assign": "error",
"no-compare-neg-zero": "error",
"no-cond-assign": "warn",
"no-const-assign": "warn",
"no-constant-binary-expression": "error",
"no-constant-condition": "warn",
"no-control-regex": "warn",
"no-debugger": "warn",
"no-delete-var": "warn",
"no-dupe-class-members": "warn",
"no-dupe-else-if": "warn",
"no-dupe-keys": "warn",
"no-duplicate-case": "warn",
"no-empty-character-class": "warn",
"no-empty-pattern": "warn",
"no-empty-static-block": "warn",
"no-eval": "warn",
"no-ex-assign": "warn",
"no-extra-boolean-cast": "warn",
"no-func-assign": "warn",
"no-global-assign": "warn",
"no-import-assign": "warn",
"no-invalid-regexp": "warn",
"no-irregular-whitespace": "warn",
"no-loss-of-precision": "warn",
"no-new-native-nonconstructor": "warn",
"no-nonoctal-decimal-escape": "warn",
"no-obj-calls": "warn",
"no-self-assign": "warn",
"no-setter-return": "warn",
"no-shadow-restricted-names": "warn",
"no-sparse-arrays": "warn",
"no-this-before-super": "warn",
"no-unassigned-vars": "warn",
"no-unsafe-finally": "warn",
"no-unsafe-negation": "warn",
"no-unsafe-optional-chaining": "error",
"no-unused-labels": "warn",
"no-unused-private-class-members": "warn",
"no-unused-vars": "warn",
"no-useless-backreference": "warn",
"no-useless-catch": "error",
"no-useless-escape": "warn",
"no-useless-rename": "warn",
"no-with": "warn",
"require-yield": "warn",
"use-isnan": "warn",
"valid-typeof": "warn",
"oxc/bad-array-method-on-arguments": "warn",
"oxc/bad-char-at-comparison": "warn",
"oxc/bad-comparison-sequence": "warn",
"oxc/bad-min-max-func": "warn",
"oxc/bad-object-literal-comparison": "warn",
"oxc/bad-replace-all-arg": "warn",
"oxc/const-comparisons": "warn",
"oxc/double-comparisons": "warn",
"oxc/erasing-op": "warn",
"oxc/missing-throw": "warn",
"oxc/number-arg-out-of-range": "warn",
"oxc/only-used-in-recursion": "warn",
"oxc/uninvoked-array-callback": "warn",
"typescript/await-thenable": "warn",
"typescript/no-array-delete": "warn",
"typescript/no-base-to-string": "warn",
"typescript/no-confusing-void-expression": "warn",
"typescript/no-duplicate-enum-values": "warn",
"typescript/no-duplicate-type-constituents": "warn",
"typescript/no-extra-non-null-assertion": "warn",
"typescript/no-floating-promises": "warn",
"typescript/no-for-in-array": "warn",
"typescript/no-implied-eval": "warn",
"typescript/no-meaningless-void-operator": "warn",
"typescript/no-misused-new": "warn",
"typescript/no-misused-spread": "warn",
"typescript/no-non-null-asserted-optional-chain": "warn",
"typescript/no-redundant-type-constituents": "warn",
"typescript/no-this-alias": "warn",
"typescript/no-unnecessary-parameter-property-assignment": "warn",
"typescript/no-unsafe-declaration-merging": "warn",
"typescript/no-unsafe-unary-minus": "warn",
"typescript/no-useless-empty-export": "warn",
"typescript/no-wrapper-object-types": "warn",
"typescript/prefer-as-const": "warn",
"typescript/require-array-sort-compare": "warn",
"typescript/restrict-template-expressions": "warn",
"typescript/triple-slash-reference": "warn",
"typescript/unbound-method": "warn",
"unicorn/no-await-in-promise-methods": "warn",
"unicorn/no-empty-file": "warn",
"unicorn/no-invalid-fetch-options": "warn",
"unicorn/no-invalid-remove-event-listener": "warn",
"unicorn/no-new-array": "warn",
"unicorn/no-single-promise-in-promise-methods": "warn",
"unicorn/no-thenable": "warn",
"unicorn/no-unnecessary-await": "warn",
"unicorn/no-useless-fallback-in-spread": "warn",
"unicorn/no-useless-length-check": "warn",
"unicorn/no-useless-spread": "warn",
"unicorn/prefer-set-size": "warn",
"unicorn/prefer-string-starts-ends-with": "warn"
},
"settings": {
"jsx-a11y": {
"polymorphicPropName": null,
"components": {},
"attributes": {}
},
"next": {
"rootDir": []
},
"react": {
"formComponents": [],
"linkComponents": []
},
"jsdoc": {
"ignorePrivate": false,
"ignoreInternal": false,
"ignoreReplacesDocs": true,
"overrideReplacesDocs": true,
"augmentsExtendsReplacesDocs": false,
"implementsReplacesDocs": false,
"exemptDestructuredRootsFromChecks": false,
"tagNamePreference": {}
}
},
"env": {
"builtin": true
},
"globals": {},
"ignorePatterns": [
"**/*.js"
]
}

View File

@ -1,8 +1,8 @@
import type { Preview } from '@storybook/react'
import { withThemeByDataAttribute } from '@storybook/addon-themes'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import I18N from '../app/components/i18n'
import { ToastProvider } from '../app/components/base/toast'
import I18N from '../app/components/i18n'
import '../app/styles/globals.css'
import '../app/styles/markdown.scss'

View File

@ -1,6 +1,6 @@
import { useState } from 'react'
import type { ReactNode } from 'react'
import { useStore } from '@tanstack/react-form'
import { useState } from 'react'
import { useAppForm } from '@/app/components/base/form'
type UseAppFormOptions = Parameters<typeof useAppForm>[0]
@ -49,7 +49,12 @@ export const FormStoryWrapper = ({
<aside className="w-full max-w-sm rounded-xl border border-divider-subtle bg-components-panel-bg p-4 text-xs text-text-secondary shadow-sm">
<div className="flex items-center justify-between text-[11px] uppercase tracking-wide text-text-tertiary">
<span>Form State</span>
<span>{submitCount} submit{submitCount === 1 ? '' : 's'}</span>
<span>
{submitCount}
{' '}
submit
{submitCount === 1 ? '' : 's'}
</span>
</div>
<dl className="mt-2 space-y-1">
<div className="flex items-center justify-between rounded-md bg-components-button-tertiary-bg px-2 py-1">

View File

@ -1,15 +1,15 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "chrome",
"request": "launch",
"name": "Launch Chrome against localhost",
"url": "http://localhost:3000",
"webRoot": "${workspaceFolder}"
}
]
}
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "chrome",
"request": "launch",
"name": "Launch Chrome against localhost",
"url": "http://localhost:3000",
"webRoot": "${workspaceFolder}"
}
]
}

View File

@ -1,26 +1,50 @@
{
// Disable the default formatter, use eslint instead
"prettier.enable": false,
"editor.formatOnSave": true,
"editor.formatOnSave": false,
// Auto fix
"editor.codeActionsOnSave": {
"source.fixAll.eslint": "explicit"
"source.fixAll.eslint": "explicit",
"source.organizeImports": "never"
},
"eslint.format.enable": true,
"[python]": {
"editor.formatOnType": true
},
"[html]": {
"editor.defaultFormatter": "vscode.html-language-features"
},
"[typescriptreact]": {
"editor.defaultFormatter": "vscode.typescript-language-features"
},
"[javascriptreact]": {
"editor.defaultFormatter": "vscode.typescript-language-features"
},
"[jsonc]": {
"editor.defaultFormatter": "vscode.json-language-features"
},
"typescript.tsdk": "node_modules/typescript/lib",
"typescript.enablePromptUseWorkspaceTsdk": true,
"npm.packageManager": "pnpm"
// Silent the stylistic rules in your IDE, but still auto fix them
"eslint.rules.customizations": [
{ "rule": "style/*", "severity": "off", "fixable": true },
{ "rule": "format/*", "severity": "off", "fixable": true },
{ "rule": "*-indent", "severity": "off", "fixable": true },
{ "rule": "*-spacing", "severity": "off", "fixable": true },
{ "rule": "*-spaces", "severity": "off", "fixable": true },
{ "rule": "*-order", "severity": "off", "fixable": true },
{ "rule": "*-dangle", "severity": "off", "fixable": true },
{ "rule": "*-newline", "severity": "off", "fixable": true },
{ "rule": "*quotes", "severity": "off", "fixable": true },
{ "rule": "*semi", "severity": "off", "fixable": true }
],
// Enable eslint for all supported languages
"eslint.validate": [
"javascript",
"javascriptreact",
"typescript",
"typescriptreact",
"vue",
"html",
"markdown",
"json",
"jsonc",
"yaml",
"toml",
"xml",
"gql",
"graphql",
"astro",
"svelte",
"css",
"less",
"scss",
"pcss",
"postcss"
]
}

View File

@ -1,7 +1,7 @@
import type { Plan, UsagePlanInfo } from '@/app/components/billing/type'
import type { ProviderContextState } from '@/context/provider-context'
import { merge, noop } from 'lodash-es'
import { defaultPlan } from '@/app/components/billing/config'
import type { ProviderContextState } from '@/context/provider-context'
import type { Plan, UsagePlanInfo } from '@/app/components/billing/type'
// Avoid being mocked in tests
export const baseProviderContextValue: ProviderContextState = {

View File

@ -1,9 +1,7 @@
import fs from 'node:fs'
import path from 'node:path'
// Mock functions to simulate the check-i18n functionality
const vm = require('node:vm')
const transpile = require('typescript').transpile
import vm from 'node:vm'
import { transpile } from 'typescript'
describe('check-i18n script functionality', () => {
const testDir = path.join(__dirname, '../i18n-test')
@ -33,8 +31,7 @@ describe('check-i18n script functionality', () => {
const filePath = path.join(folderPath, file)
const fileName = file.replace(/\.[^/.]+$/, '')
const camelCaseFileName = fileName.replace(/[-_](.)/g, (_, c) =>
c.toUpperCase(),
)
c.toUpperCase())
try {
const content = fs.readFileSync(filePath, 'utf8')
@ -617,9 +614,10 @@ export default translation
// Check if this line ends the value (ends with quote and comma/no comma)
if ((trimmed.endsWith('\',') || trimmed.endsWith('",') || trimmed.endsWith('`,')
|| trimmed.endsWith('\'') || trimmed.endsWith('"') || trimmed.endsWith('`'))
&& !trimmed.startsWith('//'))
|| trimmed.endsWith('\'') || trimmed.endsWith('"') || trimmed.endsWith('`'))
&& !trimmed.startsWith('//')) {
break
}
}
else {
break

View File

@ -15,19 +15,19 @@ describe('Description Validation Logic', () => {
}
describe('Backend Validation Function', () => {
test('allows description within 400 characters', () => {
it('allows description within 400 characters', () => {
const validDescription = 'x'.repeat(400)
expect(() => validateDescriptionLength(validDescription)).not.toThrow()
expect(validateDescriptionLength(validDescription)).toBe(validDescription)
})
test('allows empty description', () => {
it('allows empty description', () => {
expect(() => validateDescriptionLength('')).not.toThrow()
expect(() => validateDescriptionLength(null)).not.toThrow()
expect(() => validateDescriptionLength(undefined)).not.toThrow()
})
test('rejects description exceeding 400 characters', () => {
it('rejects description exceeding 400 characters', () => {
const invalidDescription = 'x'.repeat(401)
expect(() => validateDescriptionLength(invalidDescription)).toThrow(
'Description cannot exceed 400 characters.',
@ -36,7 +36,7 @@ describe('Description Validation Logic', () => {
})
describe('Backend Validation Consistency', () => {
test('App and Dataset have consistent validation limits', () => {
it('App and Dataset have consistent validation limits', () => {
const maxLength = 400
const validDescription = 'x'.repeat(maxLength)
const invalidDescription = 'x'.repeat(maxLength + 1)
@ -50,7 +50,7 @@ describe('Description Validation Logic', () => {
expect(() => validateDescriptionLength(invalidDescription)).toThrow()
})
test('validation error messages are consistent', () => {
it('validation error messages are consistent', () => {
const expectedErrorMessage = 'Description cannot exceed 400 characters.'
// This would be the error message from both App and Dataset backend validation
@ -78,7 +78,7 @@ describe('Description Validation Logic', () => {
]
testCases.forEach(({ length, shouldPass, description }) => {
test(`handles ${description} correctly`, () => {
it(`handles ${description} correctly`, () => {
const testDescription = length > 0 ? 'x'.repeat(length) : ''
expect(testDescription.length).toBe(length)

View File

@ -40,7 +40,7 @@ vi.mock('@/service/knowledge/use-segment', () => ({
}))
// Create a minimal version of the DocumentDetail component that includes our fix
const DocumentDetailWithFix = ({ datasetId, documentId }: { datasetId: string; documentId: string }) => {
const DocumentDetailWithFix = ({ datasetId, documentId }: { datasetId: string, documentId: string }) => {
const router = useRouter()
// This is the FIXED implementation from detail/index.tsx
@ -59,7 +59,12 @@ const DocumentDetailWithFix = ({ datasetId, documentId }: { datasetId: string; d
Back to Documents
</button>
<div data-testid="document-info">
Dataset: {datasetId}, Document: {documentId}
Dataset:
{' '}
{datasetId}
, Document:
{' '}
{documentId}
</div>
</div>
)
@ -88,7 +93,7 @@ describe('Document Detail Navigation Fix Verification', () => {
})
describe('Query Parameter Preservation', () => {
test('preserves pagination state (page 3, limit 25)', () => {
it('preserves pagination state (page 3, limit 25)', () => {
// Simulate user coming from page 3 with 25 items per page
Object.defineProperty(window, 'location', {
value: {
@ -108,7 +113,7 @@ describe('Document Detail Navigation Fix Verification', () => {
console.log('✅ Pagination state preserved: page=3&limit=25')
})
test('preserves search keyword and filters', () => {
it('preserves search keyword and filters', () => {
// Simulate user with search and filters applied
Object.defineProperty(window, 'location', {
value: {
@ -127,7 +132,7 @@ describe('Document Detail Navigation Fix Verification', () => {
console.log('✅ Search and filters preserved')
})
test('handles complex query parameters with special characters', () => {
it('handles complex query parameters with special characters', () => {
// Test with complex query string including encoded characters
Object.defineProperty(window, 'location', {
value: {
@ -152,7 +157,7 @@ describe('Document Detail Navigation Fix Verification', () => {
console.log('✅ Complex query parameters handled:', expectedCall)
})
test('handles empty query parameters gracefully', () => {
it('handles empty query parameters gracefully', () => {
// No query parameters in URL
Object.defineProperty(window, 'location', {
value: {
@ -173,7 +178,7 @@ describe('Document Detail Navigation Fix Verification', () => {
})
describe('Different Dataset IDs', () => {
test('works with different dataset identifiers', () => {
it('works with different dataset identifiers', () => {
Object.defineProperty(window, 'location', {
value: {
search: '?page=5&limit=10',
@ -193,7 +198,7 @@ describe('Document Detail Navigation Fix Verification', () => {
})
describe('Real User Scenarios', () => {
test('scenario: user searches, goes to page 3, views document, clicks back', () => {
it('scenario: user searches, goes to page 3, views document, clicks back', () => {
// User searched for "API" and navigated to page 3
Object.defineProperty(window, 'location', {
value: {
@ -213,7 +218,7 @@ describe('Document Detail Navigation Fix Verification', () => {
console.log('✅ Real user scenario: search + pagination preserved')
})
test('scenario: user applies multiple filters, goes to document, returns', () => {
it('scenario: user applies multiple filters, goes to document, returns', () => {
// User has applied multiple filters and is on page 2
Object.defineProperty(window, 'location', {
value: {
@ -234,7 +239,7 @@ describe('Document Detail Navigation Fix Verification', () => {
})
describe('Error Handling and Edge Cases', () => {
test('handles malformed query parameters gracefully', () => {
it('handles malformed query parameters gracefully', () => {
// Test with potentially problematic query string
Object.defineProperty(window, 'location', {
value: {
@ -258,7 +263,7 @@ describe('Document Detail Navigation Fix Verification', () => {
console.log('✅ Malformed parameters handled gracefully:', navigationPath)
})
test('handles very long query strings', () => {
it('handles very long query strings', () => {
// Test with a very long query string
const longKeyword = 'a'.repeat(1000)
Object.defineProperty(window, 'location', {
@ -281,7 +286,7 @@ describe('Document Detail Navigation Fix Verification', () => {
})
describe('Performance Verification', () => {
test('navigation function executes quickly', () => {
it('navigation function executes quickly', () => {
Object.defineProperty(window, 'location', {
value: {
search: '?page=1&limit=10&keyword=test',

View File

@ -46,32 +46,32 @@ describe('Document List Sorting', () => {
})
}
test('sorts by name descending (default for UI consistency)', () => {
it('sorts by name descending (default for UI consistency)', () => {
const sorted = sortDocuments(mockDocuments, 'name', 'desc')
expect(sorted.map(doc => doc.name)).toEqual(['Gamma.docx', 'Beta.pdf', 'Alpha.txt'])
})
test('sorts by name ascending (after toggle)', () => {
it('sorts by name ascending (after toggle)', () => {
const sorted = sortDocuments(mockDocuments, 'name', 'asc')
expect(sorted.map(doc => doc.name)).toEqual(['Alpha.txt', 'Beta.pdf', 'Gamma.docx'])
})
test('sorts by word_count descending', () => {
it('sorts by word_count descending', () => {
const sorted = sortDocuments(mockDocuments, 'word_count', 'desc')
expect(sorted.map(doc => doc.word_count)).toEqual([800, 500, 200])
})
test('sorts by hit_count descending', () => {
it('sorts by hit_count descending', () => {
const sorted = sortDocuments(mockDocuments, 'hit_count', 'desc')
expect(sorted.map(doc => doc.hit_count)).toEqual([25, 10, 5])
})
test('sorts by created_at descending (newest first)', () => {
it('sorts by created_at descending (newest first)', () => {
const sorted = sortDocuments(mockDocuments, 'created_at', 'desc')
expect(sorted.map(doc => doc.created_at)).toEqual([1699123500, 1699123456, 1699123400])
})
test('handles empty values correctly', () => {
it('handles empty values correctly', () => {
const docsWithEmpty = [
{ id: '1', name: 'Test', word_count: 100, hit_count: 5, created_at: 1699123456 },
{ id: '2', name: 'Empty', word_count: 0, hit_count: 0, created_at: 1699123400 },

View File

@ -1,8 +1,8 @@
import React from 'react'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import * as React from 'react'
import MailAndPasswordAuth from '@/app/(shareLayout)/webapp-signin/components/mail-and-password-auth'
import CheckCode from '@/app/(shareLayout)/webapp-signin/check-code/page'
import MailAndPasswordAuth from '@/app/(shareLayout)/webapp-signin/components/mail-and-password-auth'
const replaceMock = vi.fn()
const backMock = vi.fn()

View File

@ -1,9 +1,9 @@
import React from 'react'
import { render, screen, waitFor } from '@testing-library/react'
import { AccessMode } from '@/models/access-control'
import * as React from 'react'
import WebAppStoreProvider, { useWebAppStore } from '@/context/web-app-context'
import { AccessMode } from '@/models/access-control'
vi.mock('next/navigation', () => ({
usePathname: vi.fn(() => '/chatbot/sample-app'),
useSearchParams: vi.fn(() => {

View File

@ -1,7 +1,7 @@
import React from 'react'
import { fireEvent, render, screen } from '@testing-library/react'
import CommandSelector from '../../app/components/goto-anything/command-selector'
import type { ActionItem } from '../../app/components/goto-anything/actions/types'
import { fireEvent, render, screen } from '@testing-library/react'
import * as React from 'react'
import CommandSelector from '../../app/components/goto-anything/command-selector'
vi.mock('cmdk', () => ({
Command: {

View File

@ -1,6 +1,10 @@
import type { Mock } from 'vitest'
import type { ActionItem } from '../../app/components/goto-anything/actions/types'
// Import after mocking to get mocked version
import { matchAction } from '../../app/components/goto-anything/actions'
import { slashCommandRegistry } from '../../app/components/goto-anything/actions/commands/registry'
// Mock the entire actions module to avoid import issues
vi.mock('../../app/components/goto-anything/actions', () => ({
matchAction: vi.fn(),
@ -8,10 +12,6 @@ vi.mock('../../app/components/goto-anything/actions', () => ({
vi.mock('../../app/components/goto-anything/actions/commands/registry')
// Import after mocking to get mocked version
import { matchAction } from '../../app/components/goto-anything/actions'
import { slashCommandRegistry } from '../../app/components/goto-anything/actions/commands/registry'
// Implement the actual matchAction logic for testing
const actualMatchAction = (query: string, actions: Record<string, ActionItem>) => {
const result = Object.values(actions).find((action) => {

View File

@ -1,12 +1,13 @@
import React from 'react'
import { render, screen } from '@testing-library/react'
import * as React from 'react'
// Type alias for search mode
type SearchMode = 'scopes' | 'commands' | null
// Mock component to test tag display logic
const TagDisplay: React.FC<{ searchMode: SearchMode }> = ({ searchMode }) => {
if (!searchMode) return null
if (!searchMode)
return null
return (
<div className="flex items-center gap-1 text-xs text-text-tertiary">
@ -37,8 +38,10 @@ describe('Scope and Command Tags', () => {
describe('Search Mode Detection', () => {
const getSearchMode = (query: string): SearchMode => {
if (query.startsWith('@')) return 'scopes'
if (query.startsWith('/')) return 'commands'
if (query.startsWith('@'))
return 'scopes'
if (query.startsWith('/'))
return 'commands'
return null
}
@ -90,8 +93,10 @@ describe('Scope and Command Tags', () => {
const SearchComponent: React.FC<{ query: string }> = ({ query }) => {
let searchMode: SearchMode = null
if (query.startsWith('@')) searchMode = 'scopes'
else if (query.startsWith('/')) searchMode = 'commands'
if (query.startsWith('@'))
searchMode = 'scopes'
else if (query.startsWith('/'))
searchMode = 'commands'
return (
<div>

View File

@ -10,8 +10,8 @@ import type { MockedFunction } from 'vitest'
*/
import { Actions, searchAnything } from '@/app/components/goto-anything/actions'
import { postMarketplace } from '@/service/base'
import { fetchAppList } from '@/service/apps'
import { postMarketplace } from '@/service/base'
import { fetchDatasets } from '@/service/datasets'
// Mock API functions

View File

@ -1,5 +1,5 @@
import { slashCommandRegistry } from '../../app/components/goto-anything/actions/commands/registry'
import type { SlashCommandHandler } from '../../app/components/goto-anything/actions/commands/types'
import { slashCommandRegistry } from '../../app/components/goto-anything/actions/commands/registry'
// Mock the registry
vi.mock('../../app/components/goto-anything/actions/commands/registry')
@ -50,8 +50,10 @@ describe('Slash Command Dual-Mode System', () => {
beforeEach(() => {
vi.clearAllMocks()
;(slashCommandRegistry as any).findCommand = vi.fn((name: string) => {
if (name === 'docs') return mockDirectCommand
if (name === 'theme') return mockSubmenuCommand
if (name === 'docs')
return mockDirectCommand
if (name === 'theme')
return mockSubmenuCommand
return null
})
;(slashCommandRegistry as any).getAllCommands = vi.fn(() => [

View File

@ -27,11 +27,11 @@ const loadTranslationContent = (locale: string): string => {
// Helper function to check if upload features exist
const hasUploadFeatures = (content: string): { [key: string]: boolean } => {
return {
fileUpload: /fileUpload\s*:\s*{/.test(content),
imageUpload: /imageUpload\s*:\s*{/.test(content),
documentUpload: /documentUpload\s*:\s*{/.test(content),
audioUpload: /audioUpload\s*:\s*{/.test(content),
featureBar: /bar\s*:\s*{/.test(content),
fileUpload: /fileUpload\s*:\s*\{/.test(content),
imageUpload: /imageUpload\s*:\s*\{/.test(content),
documentUpload: /documentUpload\s*:\s*\{/.test(content),
audioUpload: /audioUpload\s*:\s*\{/.test(content),
featureBar: /bar\s*:\s*\{/.test(content),
}
}
@ -43,14 +43,14 @@ describe('Upload Features i18n Translations - Issue #23062', () => {
console.log(`Testing ${supportedLocales.length} locales for upload features`)
})
test('all locales should have translation files', () => {
it('all locales should have translation files', () => {
supportedLocales.forEach((locale) => {
const filePath = path.join(I18N_DIR, locale, 'app-debug.ts')
expect(fs.existsSync(filePath)).toBe(true)
})
})
test('all locales should have required upload features', () => {
it('all locales should have required upload features', () => {
const results: { [locale: string]: { [feature: string]: boolean } } = {}
supportedLocales.forEach((locale) => {
@ -69,7 +69,7 @@ describe('Upload Features i18n Translations - Issue #23062', () => {
console.log('✅ All locales have complete upload features')
})
test('previously missing locales should now have audioUpload - Issue #23062', () => {
it('previously missing locales should now have audioUpload - Issue #23062', () => {
// These locales were specifically missing audioUpload
const previouslyMissingLocales = ['fa-IR', 'hi-IN', 'ro-RO', 'sl-SI', 'th-TH', 'uk-UA', 'vi-VN']
@ -77,7 +77,7 @@ describe('Upload Features i18n Translations - Issue #23062', () => {
const content = loadTranslationContent(locale)
// Verify audioUpload exists
expect(/audioUpload\s*:\s*{/.test(content)).toBe(true)
expect(/audioUpload\s*:\s*\{/.test(content)).toBe(true)
// Verify it has title and description
expect(/audioUpload[^}]*title\s*:/.test(content)).toBe(true)
@ -87,30 +87,30 @@ describe('Upload Features i18n Translations - Issue #23062', () => {
})
})
test('upload features should have required properties', () => {
it('upload features should have required properties', () => {
supportedLocales.forEach((locale) => {
const content = loadTranslationContent(locale)
// Check fileUpload has required properties
if (/fileUpload\s*:\s*{/.test(content)) {
if (/fileUpload\s*:\s*\{/.test(content)) {
expect(/fileUpload[^}]*title\s*:/.test(content)).toBe(true)
expect(/fileUpload[^}]*description\s*:/.test(content)).toBe(true)
}
// Check imageUpload has required properties
if (/imageUpload\s*:\s*{/.test(content)) {
if (/imageUpload\s*:\s*\{/.test(content)) {
expect(/imageUpload[^}]*title\s*:/.test(content)).toBe(true)
expect(/imageUpload[^}]*description\s*:/.test(content)).toBe(true)
}
// Check documentUpload has required properties
if (/documentUpload\s*:\s*{/.test(content)) {
if (/documentUpload\s*:\s*\{/.test(content)) {
expect(/documentUpload[^}]*title\s*:/.test(content)).toBe(true)
expect(/documentUpload[^}]*description\s*:/.test(content)).toBe(true)
}
// Check audioUpload has required properties
if (/audioUpload\s*:\s*{/.test(content)) {
if (/audioUpload\s*:\s*\{/.test(content)) {
expect(/audioUpload[^}]*title\s*:/.test(content)).toBe(true)
expect(/audioUpload[^}]*description\s*:/.test(content)).toBe(true)
}

View File

@ -24,7 +24,7 @@ describe('Navigation Utilities', () => {
})
describe('createNavigationPath', () => {
test('preserves query parameters by default', () => {
it('preserves query parameters by default', () => {
Object.defineProperty(window, 'location', {
value: { search: '?page=3&limit=10&keyword=test' },
writable: true,
@ -34,7 +34,7 @@ describe('Navigation Utilities', () => {
expect(path).toBe('/datasets/123/documents?page=3&limit=10&keyword=test')
})
test('returns clean path when preserveParams is false', () => {
it('returns clean path when preserveParams is false', () => {
Object.defineProperty(window, 'location', {
value: { search: '?page=3&limit=10' },
writable: true,
@ -44,7 +44,7 @@ describe('Navigation Utilities', () => {
expect(path).toBe('/datasets/123/documents')
})
test('handles empty query parameters', () => {
it('handles empty query parameters', () => {
Object.defineProperty(window, 'location', {
value: { search: '' },
writable: true,
@ -54,7 +54,7 @@ describe('Navigation Utilities', () => {
expect(path).toBe('/datasets/123/documents')
})
test('handles errors gracefully', () => {
it('handles errors gracefully', () => {
// Mock window.location to throw an error
Object.defineProperty(window, 'location', {
get: () => {
@ -74,7 +74,7 @@ describe('Navigation Utilities', () => {
})
describe('createBackNavigation', () => {
test('creates function that navigates with preserved params', () => {
it('creates function that navigates with preserved params', () => {
Object.defineProperty(window, 'location', {
value: { search: '?page=2&limit=25' },
writable: true,
@ -86,7 +86,7 @@ describe('Navigation Utilities', () => {
expect(mockPush).toHaveBeenCalledWith('/datasets/123/documents?page=2&limit=25')
})
test('creates function that navigates without params when specified', () => {
it('creates function that navigates without params when specified', () => {
Object.defineProperty(window, 'location', {
value: { search: '?page=2&limit=25' },
writable: true,
@ -100,7 +100,7 @@ describe('Navigation Utilities', () => {
})
describe('extractQueryParams', () => {
test('extracts specified parameters', () => {
it('extracts specified parameters', () => {
Object.defineProperty(window, 'location', {
value: { search: '?page=3&limit=10&keyword=test&other=value' },
writable: true,
@ -114,7 +114,7 @@ describe('Navigation Utilities', () => {
})
})
test('handles missing parameters', () => {
it('handles missing parameters', () => {
Object.defineProperty(window, 'location', {
value: { search: '?page=3' },
writable: true,
@ -126,7 +126,7 @@ describe('Navigation Utilities', () => {
})
})
test('handles errors gracefully', () => {
it('handles errors gracefully', () => {
Object.defineProperty(window, 'location', {
get: () => {
throw new Error('Location access denied')
@ -145,7 +145,7 @@ describe('Navigation Utilities', () => {
})
describe('createNavigationPathWithParams', () => {
test('creates path with specified parameters', () => {
it('creates path with specified parameters', () => {
const path = createNavigationPathWithParams('/datasets/123/documents', {
page: 1,
limit: 25,
@ -155,7 +155,7 @@ describe('Navigation Utilities', () => {
expect(path).toBe('/datasets/123/documents?page=1&limit=25&keyword=search+term')
})
test('filters out empty values', () => {
it('filters out empty values', () => {
const path = createNavigationPathWithParams('/datasets/123/documents', {
page: 1,
limit: '',
@ -166,7 +166,7 @@ describe('Navigation Utilities', () => {
expect(path).toBe('/datasets/123/documents?page=1&keyword=test')
})
test('handles errors gracefully', () => {
it('handles errors gracefully', () => {
// Mock URLSearchParams to throw an error
const originalURLSearchParams = globalThis.URLSearchParams
globalThis.URLSearchParams = vi.fn(() => {
@ -185,7 +185,7 @@ describe('Navigation Utilities', () => {
})
describe('mergeQueryParams', () => {
test('merges new params with existing ones', () => {
it('merges new params with existing ones', () => {
Object.defineProperty(window, 'location', {
value: { search: '?page=3&limit=10' },
writable: true,
@ -199,7 +199,7 @@ describe('Navigation Utilities', () => {
expect(result).toContain('keyword=test') // added
})
test('removes parameters when value is null', () => {
it('removes parameters when value is null', () => {
Object.defineProperty(window, 'location', {
value: { search: '?page=3&limit=10&keyword=test' },
writable: true,
@ -214,7 +214,7 @@ describe('Navigation Utilities', () => {
expect(result).toContain('filter=active')
})
test('creates fresh params when preserveExisting is false', () => {
it('creates fresh params when preserveExisting is false', () => {
Object.defineProperty(window, 'location', {
value: { search: '?page=3&limit=10' },
writable: true,
@ -228,7 +228,7 @@ describe('Navigation Utilities', () => {
})
describe('datasetNavigation', () => {
test('backToDocuments creates correct navigation function', () => {
it('backToDocuments creates correct navigation function', () => {
Object.defineProperty(window, 'location', {
value: { search: '?page=2&limit=25' },
writable: true,
@ -240,14 +240,14 @@ describe('Navigation Utilities', () => {
expect(mockPush).toHaveBeenCalledWith('/datasets/dataset-123/documents?page=2&limit=25')
})
test('toDocumentDetail creates correct navigation function', () => {
it('toDocumentDetail creates correct navigation function', () => {
const detailFn = datasetNavigation.toDocumentDetail(mockRouter, 'dataset-123', 'doc-456')
detailFn()
expect(mockPush).toHaveBeenCalledWith('/datasets/dataset-123/documents/doc-456')
})
test('toDocumentSettings creates correct navigation function', () => {
it('toDocumentSettings creates correct navigation function', () => {
const settingsFn = datasetNavigation.toDocumentSettings(mockRouter, 'dataset-123', 'doc-456')
settingsFn()
@ -256,7 +256,7 @@ describe('Navigation Utilities', () => {
})
describe('Real-world Integration Scenarios', () => {
test('complete user workflow: list -> detail -> back', () => {
it('complete user workflow: list -> detail -> back', () => {
// User starts on page 3 with search
Object.defineProperty(window, 'location', {
value: { search: '?page=3&keyword=API&limit=25' },
@ -273,7 +273,7 @@ describe('Navigation Utilities', () => {
expect(mockPush).toHaveBeenCalledWith('/datasets/main-dataset/documents?page=3&keyword=API&limit=25')
})
test('user applies filters then views document', () => {
it('user applies filters then views document', () => {
// Complex filter state
Object.defineProperty(window, 'location', {
value: { search: '?page=1&limit=50&status=active&type=pdf&sort=created_at&order=desc' },
@ -288,7 +288,7 @@ describe('Navigation Utilities', () => {
})
describe('Edge Cases and Error Handling', () => {
test('handles special characters in query parameters', () => {
it('handles special characters in query parameters', () => {
Object.defineProperty(window, 'location', {
value: { search: '?keyword=hello%20world&filter=type%3Apdf&tag=%E4%B8%AD%E6%96%87' },
writable: true,
@ -300,7 +300,7 @@ describe('Navigation Utilities', () => {
expect(path).toContain('%E4%B8%AD%E6%96%87')
})
test('handles duplicate query parameters', () => {
it('handles duplicate query parameters', () => {
Object.defineProperty(window, 'location', {
value: { search: '?tag=tag1&tag=tag2&tag=tag3' },
writable: true,
@ -311,7 +311,7 @@ describe('Navigation Utilities', () => {
expect(params.tag).toBe('tag1')
})
test('handles very long query strings', () => {
it('handles very long query strings', () => {
const longValue = 'a'.repeat(1000)
Object.defineProperty(window, 'location', {
value: { search: `?data=${longValue}` },
@ -323,7 +323,7 @@ describe('Navigation Utilities', () => {
expect(path.length).toBeGreaterThan(1000)
})
test('handles empty string values in query parameters', () => {
it('handles empty string values in query parameters', () => {
const path = createNavigationPathWithParams('/datasets/123/documents', {
page: 1,
keyword: '',
@ -336,7 +336,7 @@ describe('Navigation Utilities', () => {
expect(path).not.toContain('filter=')
})
test('handles null and undefined values in mergeQueryParams', () => {
it('handles null and undefined values in mergeQueryParams', () => {
Object.defineProperty(window, 'location', {
value: { search: '?page=1&limit=10&keyword=test' },
writable: true,
@ -355,7 +355,7 @@ describe('Navigation Utilities', () => {
expect(result).toContain('sort=name')
})
test('handles navigation with hash fragments', () => {
it('handles navigation with hash fragments', () => {
Object.defineProperty(window, 'location', {
value: { search: '?page=1', hash: '#section-2' },
writable: true,
@ -366,7 +366,7 @@ describe('Navigation Utilities', () => {
expect(path).toBe('/datasets/123/documents?page=1')
})
test('handles malformed query strings gracefully', () => {
it('handles malformed query strings gracefully', () => {
Object.defineProperty(window, 'location', {
value: { search: '?page=1&invalid&limit=10&=value&key=' },
writable: true,
@ -382,7 +382,7 @@ describe('Navigation Utilities', () => {
})
describe('Performance Tests', () => {
test('handles large number of query parameters efficiently', () => {
it('handles large number of query parameters efficiently', () => {
const manyParams = Array.from({ length: 50 }, (_, i) => `param${i}=value${i}`).join('&')
Object.defineProperty(window, 'location', {
value: { search: `?${manyParams}` },

View File

@ -10,8 +10,8 @@
import { render, screen, waitFor } from '@testing-library/react'
import { ThemeProvider } from 'next-themes'
import useTheme from '@/hooks/use-theme'
import { useEffect, useState } from 'react'
import useTheme from '@/hooks/use-theme'
const DARK_MODE_MEDIA_QUERY = /prefers-color-scheme:\s*dark/i
@ -81,9 +81,9 @@ const setupMockEnvironment = (storedTheme: string | null, systemPrefersDark = fa
// Helper function to create timing page component
const createTimingPageComponent = (
timingData: Array<{ phase: string; timestamp: number; styles: { backgroundColor: string; color: string } }>,
timingData: Array<{ phase: string, timestamp: number, styles: { backgroundColor: string, color: string } }>,
) => {
const recordTiming = (phase: string, styles: { backgroundColor: string; color: string }) => {
const recordTiming = (phase: string, styles: { backgroundColor: string, color: string }) => {
timingData.push({
phase,
timestamp: performance.now(),
@ -113,7 +113,17 @@ const createTimingPageComponent = (
style={currentStyles}
>
<div data-testid="timing-status">
Phase: {mounted ? 'CSR' : 'Initial'} | Theme: {theme} | Visual: {isDark ? 'dark' : 'light'}
Phase:
{' '}
{mounted ? 'CSR' : 'Initial'}
{' '}
| Theme:
{' '}
{theme}
{' '}
| Visual:
{' '}
{isDark ? 'dark' : 'light'}
</div>
</div>
)
@ -124,7 +134,7 @@ const createTimingPageComponent = (
// Helper function to create CSS test component
const createCSSTestComponent = (
cssStates: Array<{ className: string; timestamp: number }>,
cssStates: Array<{ className: string, timestamp: number }>,
) => {
const recordCSSState = (className: string) => {
cssStates.push({
@ -151,7 +161,10 @@ const createCSSTestComponent = (
data-testid="css-component"
className={className}
>
<div data-testid="css-classes">Classes: {className}</div>
<div data-testid="css-classes">
Classes:
{className}
</div>
</div>
)
}
@ -161,7 +174,7 @@ const createCSSTestComponent = (
// Helper function to create performance test component
const createPerformanceTestComponent = (
performanceMarks: Array<{ event: string; timestamp: number }>,
performanceMarks: Array<{ event: string, timestamp: number }>,
) => {
const recordPerformanceMark = (event: string) => {
performanceMarks.push({ event, timestamp: performance.now() })
@ -186,7 +199,13 @@ const createPerformanceTestComponent = (
return (
<div data-testid="performance-test">
Mounted: {mounted.toString()} | Theme: {theme || 'loading'}
Mounted:
{' '}
{mounted.toString()}
{' '}
| Theme:
{' '}
{theme || 'loading'}
</div>
)
}
@ -216,10 +235,14 @@ const PageComponent = () => {
Dify Application
</h1>
<div data-testid="theme-indicator">
Current Theme: {mounted ? theme : 'unknown'}
Current Theme:
{' '}
{mounted ? theme : 'unknown'}
</div>
<div data-testid="visual-appearance">
Appearance: {isDark ? 'dark' : 'light'}
Appearance:
{' '}
{isDark ? 'dark' : 'light'}
</div>
</div>
</div>
@ -254,7 +277,7 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => {
})
describe('Page Refresh Scenario Simulation', () => {
test('simulates complete page loading process with dark theme', async () => {
it('simulates complete page loading process with dark theme', async () => {
// Setup: User previously selected dark mode
setupMockEnvironment('dark')
@ -286,7 +309,7 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => {
console.log('State change detection: Initial -> Final')
})
test('handles light theme correctly', async () => {
it('handles light theme correctly', async () => {
setupMockEnvironment('light')
render(
@ -302,7 +325,7 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => {
expect(screen.getByTestId('visual-appearance')).toHaveTextContent('Appearance: light')
})
test('handles system theme with dark preference', async () => {
it('handles system theme with dark preference', async () => {
setupMockEnvironment('system', true) // system theme, dark preference
render(
@ -318,7 +341,7 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => {
expect(screen.getByTestId('visual-appearance')).toHaveTextContent('Appearance: dark')
})
test('handles system theme with light preference', async () => {
it('handles system theme with light preference', async () => {
setupMockEnvironment('system', false) // system theme, light preference
render(
@ -334,7 +357,7 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => {
expect(screen.getByTestId('visual-appearance')).toHaveTextContent('Appearance: light')
})
test('handles no stored theme (defaults to system)', async () => {
it('handles no stored theme (defaults to system)', async () => {
setupMockEnvironment(null, false) // no stored theme, system prefers light
render(
@ -348,10 +371,10 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => {
})
})
test('measures timing window of style changes', async () => {
it('measures timing window of style changes', async () => {
setupMockEnvironment('dark')
const timingData: Array<{ phase: string; timestamp: number; styles: any }> = []
const timingData: Array<{ phase: string, timestamp: number, styles: any }> = []
const TimingPageComponent = createTimingPageComponent(timingData)
render(
@ -384,10 +407,10 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => {
})
describe('CSS Application Timing Tests', () => {
test('checks CSS class changes causing flicker', async () => {
it('checks CSS class changes causing flicker', async () => {
setupMockEnvironment('dark')
const cssStates: Array<{ className: string; timestamp: number }> = []
const cssStates: Array<{ className: string, timestamp: number }> = []
const CSSTestComponent = createCSSTestComponent(cssStates)
render(
@ -420,7 +443,7 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => {
})
describe('Edge Cases and Error Handling', () => {
test('handles localStorage access errors gracefully', async () => {
it('handles localStorage access errors gracefully', async () => {
setupMockEnvironment(null)
const mockStorage = {
@ -457,7 +480,7 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => {
}
})
test('handles invalid theme values in localStorage', async () => {
it('handles invalid theme values in localStorage', async () => {
setupMockEnvironment('invalid-theme-value')
render(
@ -477,8 +500,8 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => {
})
describe('Performance and Regression Tests', () => {
test('verifies ThemeProvider position fix reduces initialization delay', async () => {
const performanceMarks: Array<{ event: string; timestamp: number }> = []
it('verifies ThemeProvider position fix reduces initialization delay', async () => {
const performanceMarks: Array<{ event: string, timestamp: number }> = []
setupMockEnvironment('dark')
@ -507,7 +530,7 @@ describe('Real Browser Environment Dark Mode Flicker Test', () => {
})
describe('Solution Requirements Definition', () => {
test('defines technical requirements to eliminate flicker', () => {
it('defines technical requirements to eliminate flicker', () => {
const technicalRequirements = {
ssrConsistency: 'SSR and CSR must render identical initial styles',
synchronousDetection: 'Theme detection must complete synchronously before first render',

View File

@ -14,8 +14,8 @@ describe('Unified Tags Editing - Pure Logic Tests', () => {
// This is the valueNotChanged logic from TagSelector component
const valueNotChanged
= currentValue.length === newSelectedTagIDs.length
&& currentValue.every(v => newSelectedTagIDs.includes(v))
&& newSelectedTagIDs.every(v => currentValue.includes(v))
&& currentValue.every(v => newSelectedTagIDs.includes(v))
&& newSelectedTagIDs.every(v => currentValue.includes(v))
expect(valueNotChanged).toBe(false)
})
@ -26,8 +26,8 @@ describe('Unified Tags Editing - Pure Logic Tests', () => {
const valueNotChanged
= currentValue.length === newSelectedTagIDs.length
&& currentValue.every(v => newSelectedTagIDs.includes(v))
&& newSelectedTagIDs.every(v => currentValue.includes(v))
&& currentValue.every(v => newSelectedTagIDs.includes(v))
&& newSelectedTagIDs.every(v => currentValue.includes(v))
expect(valueNotChanged).toBe(true)
})
@ -70,7 +70,7 @@ describe('Unified Tags Editing - Pure Logic Tests', () => {
})
describe('Fallback Logic (from layout-main.tsx)', () => {
type Tag = { id: string; name: string }
type Tag = { id: string, name: string }
type AppDetail = { tags: Tag[] }
type FallbackResult = { tags?: Tag[] } | null
// no-op
@ -316,7 +316,7 @@ describe('Unified Tags Editing - Pure Logic Tests', () => {
]
// Filter out invalid entries
const validTags = mixedData.filter((tag): tag is { id: string; name: string; type: string; binding_count: number } =>
const validTags = mixedData.filter((tag): tag is { id: string, name: string, type: string, binding_count: number } =>
tag != null
&& typeof tag === 'object'
&& 'id' in tag

View File

@ -1,6 +1,6 @@
import type { Mock } from 'vitest'
import { BlockEnum } from '@/app/components/workflow/types'
import { useWorkflowStore } from '@/app/components/workflow/store'
import { BlockEnum } from '@/app/components/workflow/types'
// Type for mocked store
type MockWorkflowStore = {
@ -103,9 +103,9 @@ describe('Workflow Onboarding Integration Logic', () => {
// Simulate the validation logic from use-nodes-sync-draft.ts
const isValidStartNode = mockNode.data.type === BlockEnum.Start
|| mockNode.data.type === BlockEnum.TriggerSchedule
|| mockNode.data.type === BlockEnum.TriggerWebhook
|| mockNode.data.type === BlockEnum.TriggerPlugin
|| mockNode.data.type === BlockEnum.TriggerSchedule
|| mockNode.data.type === BlockEnum.TriggerWebhook
|| mockNode.data.type === BlockEnum.TriggerPlugin
expect(isValidStartNode).toBe(true)
})
@ -117,9 +117,9 @@ describe('Workflow Onboarding Integration Logic', () => {
}
const isValidStartNode = mockNode.data.type === BlockEnum.Start
|| mockNode.data.type === BlockEnum.TriggerSchedule
|| mockNode.data.type === BlockEnum.TriggerWebhook
|| mockNode.data.type === BlockEnum.TriggerPlugin
|| mockNode.data.type === BlockEnum.TriggerSchedule
|| mockNode.data.type === BlockEnum.TriggerWebhook
|| mockNode.data.type === BlockEnum.TriggerPlugin
expect(isValidStartNode).toBe(true)
})
@ -131,9 +131,9 @@ describe('Workflow Onboarding Integration Logic', () => {
}
const isValidStartNode = mockNode.data.type === BlockEnum.Start
|| mockNode.data.type === BlockEnum.TriggerSchedule
|| mockNode.data.type === BlockEnum.TriggerWebhook
|| mockNode.data.type === BlockEnum.TriggerPlugin
|| mockNode.data.type === BlockEnum.TriggerSchedule
|| mockNode.data.type === BlockEnum.TriggerWebhook
|| mockNode.data.type === BlockEnum.TriggerPlugin
expect(isValidStartNode).toBe(true)
})
@ -145,9 +145,9 @@ describe('Workflow Onboarding Integration Logic', () => {
}
const isValidStartNode = mockNode.data.type === BlockEnum.Start
|| mockNode.data.type === BlockEnum.TriggerSchedule
|| mockNode.data.type === BlockEnum.TriggerWebhook
|| mockNode.data.type === BlockEnum.TriggerPlugin
|| mockNode.data.type === BlockEnum.TriggerSchedule
|| mockNode.data.type === BlockEnum.TriggerWebhook
|| mockNode.data.type === BlockEnum.TriggerPlugin
expect(isValidStartNode).toBe(true)
})
@ -159,9 +159,9 @@ describe('Workflow Onboarding Integration Logic', () => {
}
const isValidStartNode = mockNode.data.type === BlockEnum.Start
|| mockNode.data.type === BlockEnum.TriggerSchedule
|| mockNode.data.type === BlockEnum.TriggerWebhook
|| mockNode.data.type === BlockEnum.TriggerPlugin
|| mockNode.data.type === BlockEnum.TriggerSchedule
|| mockNode.data.type === BlockEnum.TriggerWebhook
|| mockNode.data.type === BlockEnum.TriggerPlugin
expect(isValidStartNode).toBe(false)
})

View File

@ -6,7 +6,7 @@
*/
import { render, screen } from '@testing-library/react'
import React from 'react'
import * as React from 'react'
// Mock environment variables before importing constants
const originalEnv = process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
@ -35,11 +35,16 @@ function restoreEnvironment() {
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string) => {
if (key.includes('MaxParallelismTitle')) return 'Max Parallelism'
if (key.includes('MaxParallelismDesc')) return 'Maximum number of parallel executions'
if (key.includes('parallelMode')) return 'Parallel Mode'
if (key.includes('parallelPanelDesc')) return 'Enable parallel execution'
if (key.includes('errorResponseMethod')) return 'Error Response Method'
if (key.includes('MaxParallelismTitle'))
return 'Max Parallelism'
if (key.includes('MaxParallelismDesc'))
return 'Maximum number of parallel executions'
if (key.includes('parallelMode'))
return 'Parallel Mode'
if (key.includes('parallelPanelDesc'))
return 'Enable parallel execution'
if (key.includes('errorResponseMethod'))
return 'Error Response Method'
return key
},
}),

View File

@ -5,8 +5,8 @@
* components have been properly fixed by replacing dangerouslySetInnerHTML with safe React rendering.
*/
import React from 'react'
import { cleanup, render } from '@testing-library/react'
import * as React from 'react'
import BlockInput from '../app/components/base/block-input'
import SupportVarInput from '../app/components/workflow/nodes/_base/components/support-var-input'

View File

@ -1,4 +1,4 @@
import React from 'react'
import * as React from 'react'
import Main from '@/app/components/app/log-annotation'
import { PageType } from '@/app/components/base/features/new-feature-panel/annotation-reply/type'

View File

@ -1,4 +1,4 @@
import React from 'react'
import * as React from 'react'
import Configuration from '@/app/components/app/configuration'
const IConfiguration = async () => {

View File

@ -1,9 +1,9 @@
import React from 'react'
import type { Locale } from '@/i18n-config'
import * as React from 'react'
import DevelopMain from '@/app/components/develop'
export type IDevelopProps = {
params: Promise<{ locale: Locale; appId: string }>
params: Promise<{ locale: Locale, appId: string }>
}
const Develop = async (props: IDevelopProps) => {

View File

@ -1,8 +1,7 @@
'use client'
import type { FC } from 'react'
import { useUnmount } from 'ahooks'
import React, { useCallback, useEffect, useState } from 'react'
import { usePathname, useRouter } from 'next/navigation'
import type { NavIcon } from '@/app/components/app-sidebar/navLink'
import type { App } from '@/types/app'
import {
RiDashboard2Fill,
RiDashboard2Line,
@ -13,21 +12,24 @@ import {
RiTerminalWindowFill,
RiTerminalWindowLine,
} from '@remixicon/react'
import { useUnmount } from 'ahooks'
import dynamic from 'next/dynamic'
import { usePathname, useRouter } from 'next/navigation'
import * as React from 'react'
import { useCallback, useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useShallow } from 'zustand/react/shallow'
import s from './style.module.css'
import { cn } from '@/utils/classnames'
import { useStore } from '@/app/components/app/store'
import AppSideBar from '@/app/components/app-sidebar'
import type { NavIcon } from '@/app/components/app-sidebar/navLink'
import { fetchAppDetailDirect } from '@/service/apps'
import { useAppContext } from '@/context/app-context'
import { useStore } from '@/app/components/app/store'
import Loading from '@/app/components/base/loading'
import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
import { type App, AppModeEnum } from '@/types/app'
import useDocumentTitle from '@/hooks/use-document-title'
import { useStore as useTagStore } from '@/app/components/base/tag-management/store'
import dynamic from 'next/dynamic'
import { useAppContext } from '@/context/app-context'
import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
import useDocumentTitle from '@/hooks/use-document-title'
import { fetchAppDetailDirect } from '@/service/apps'
import { AppModeEnum } from '@/types/app'
import { cn } from '@/utils/classnames'
import s from './style.module.css'
const TagManagementModal = dynamic(() => import('@/app/components/base/tag-management'), {
ssr: false,
@ -68,11 +70,11 @@ const AppDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
const navConfig = [
...(isCurrentWorkspaceEditor
? [{
name: t('common.appMenus.promptEng'),
href: `/app/${appId}/${(mode === AppModeEnum.WORKFLOW || mode === AppModeEnum.ADVANCED_CHAT) ? 'workflow' : 'configuration'}`,
icon: RiTerminalWindowLine,
selectedIcon: RiTerminalWindowFill,
}]
name: t('common.appMenus.promptEng'),
href: `/app/${appId}/${(mode === AppModeEnum.WORKFLOW || mode === AppModeEnum.ADVANCED_CHAT) ? 'workflow' : 'configuration'}`,
icon: RiTerminalWindowLine,
selectedIcon: RiTerminalWindowFill,
}]
: []
),
{
@ -83,13 +85,13 @@ const AppDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
},
...(isCurrentWorkspaceEditor
? [{
name: mode !== AppModeEnum.WORKFLOW
? t('common.appMenus.logAndAnn')
: t('common.appMenus.logs'),
href: `/app/${appId}/logs`,
icon: RiFileList3Line,
selectedIcon: RiFileList3Fill,
}]
name: mode !== AppModeEnum.WORKFLOW
? t('common.appMenus.logAndAnn')
: t('common.appMenus.logs'),
href: `/app/${appId}/logs`,
icon: RiFileList3Line,
selectedIcon: RiFileList3Fill,
}]
: []
),
{
@ -156,7 +158,7 @@ const AppDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
if (!appDetail) {
return (
<div className='flex h-full items-center justify-center bg-background-body'>
<div className="flex h-full items-center justify-center bg-background-body">
<Loading />
</div>
)
@ -173,7 +175,7 @@ const AppDetailLayout: FC<IAppDetailLayoutProps> = (props) => {
{children}
</div>
{showTagManagementModal && (
<TagManagementModal type='app' show={showTagManagementModal} />
<TagManagementModal type="app" show={showTagManagementModal} />
)}
</div>
)

View File

@ -1,4 +1,4 @@
import React from 'react'
import * as React from 'react'
import Main from '@/app/components/app/log-annotation'
import { PageType } from '@/app/components/base/features/new-feature-panel/annotation-reply/type'

View File

@ -1,30 +1,31 @@
'use client'
import type { FC } from 'react'
import React, { useCallback, useMemo } from 'react'
import type { IAppCardProps } from '@/app/components/app/overview/app-card'
import type { BlockEnum } from '@/app/components/workflow/types'
import type { UpdateAppSiteCodeResponse } from '@/models/app'
import type { App } from '@/types/app'
import * as React from 'react'
import { useCallback, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import AppCard from '@/app/components/app/overview/app-card'
import Loading from '@/app/components/base/loading'
import MCPServiceCard from '@/app/components/tools/mcp/mcp-service-card'
import TriggerCard from '@/app/components/app/overview/trigger-card'
import { useStore as useAppStore } from '@/app/components/app/store'
import Loading from '@/app/components/base/loading'
import { ToastContext } from '@/app/components/base/toast'
import MCPServiceCard from '@/app/components/tools/mcp/mcp-service-card'
import { isTriggerNode } from '@/app/components/workflow/types'
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
import { useDocLink } from '@/context/i18n'
import {
fetchAppDetail,
updateAppSiteAccessToken,
updateAppSiteConfig,
updateAppSiteStatus,
} from '@/service/apps'
import type { App } from '@/types/app'
import { AppModeEnum } from '@/types/app'
import type { UpdateAppSiteCodeResponse } from '@/models/app'
import { asyncRunSafe } from '@/utils'
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
import type { IAppCardProps } from '@/app/components/app/overview/app-card'
import { useStore as useAppStore } from '@/app/components/app/store'
import { useAppWorkflow } from '@/service/use-workflow'
import type { BlockEnum } from '@/app/components/workflow/types'
import { isTriggerNode } from '@/app/components/workflow/types'
import { useDocLink } from '@/context/i18n'
import { AppModeEnum } from '@/types/app'
import { asyncRunSafe } from '@/utils'
export type ICardViewProps = {
appId: string
@ -59,12 +60,12 @@ const CardView: FC<ICardViewProps> = ({ appId, isInPanel, className }) => {
const triggerDocUrl = docLink('/guides/workflow/node/start')
const buildTriggerModeMessage = useCallback((featureName: string) => (
<div className='flex flex-col gap-1'>
<div className='text-xs text-text-secondary'>
<div className="flex flex-col gap-1">
<div className="text-xs text-text-secondary">
{t('appOverview.overview.disableTooltip.triggerMode', { feature: featureName })}
</div>
<div
className='cursor-pointer text-xs font-medium text-text-accent hover:underline'
className="cursor-pointer text-xs font-medium text-text-accent hover:underline"
onClick={(event) => {
event.stopPropagation()
window.open(triggerDocUrl, '_blank')
@ -185,12 +186,14 @@ const CardView: FC<ICardViewProps> = ({ appId, isInPanel, className }) => {
</>
)
const triggerCardNode = showTriggerCard ? (
<TriggerCard
appInfo={appDetail}
onToggleResult={handleCallbackResult}
/>
) : null
const triggerCardNode = showTriggerCard
? (
<TriggerCard
appInfo={appDetail}
onToggleResult={handleCallbackResult}
/>
)
: null
return (
<div className={className || 'mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2'}>

View File

@ -1,15 +1,16 @@
'use client'
import React, { useState } from 'react'
import type { PeriodParams } from '@/app/components/app/overview/app-chart'
import dayjs from 'dayjs'
import quarterOfYear from 'dayjs/plugin/quarterOfYear'
import * as React from 'react'
import { useState } from 'react'
import { useTranslation } from 'react-i18next'
import type { PeriodParams } from '@/app/components/app/overview/app-chart'
import { TIME_PERIOD_MAPPING as LONG_TIME_PERIOD_MAPPING } from '@/app/components/app/log/filter'
import { AvgResponseTime, AvgSessionInteractions, AvgUserInteractions, ConversationsChart, CostChart, EndUsersChart, MessagesChart, TokenPerSecond, UserSatisfactionRate, WorkflowCostChart, WorkflowDailyTerminalsChart, WorkflowMessagesChart } from '@/app/components/app/overview/app-chart'
import { useStore as useAppStore } from '@/app/components/app/store'
import TimeRangePicker from './time-range-picker'
import { TIME_PERIOD_MAPPING as LONG_TIME_PERIOD_MAPPING } from '@/app/components/app/log/filter'
import { IS_CLOUD_EDITION } from '@/config'
import LongTimeRangePicker from './long-time-range-picker'
import TimeRangePicker from './time-range-picker'
dayjs.extend(quarterOfYear)
@ -43,63 +44,65 @@ export default function ChartView({ appId, headerRight }: IChartViewProps) {
return (
<div>
<div className='mb-4'>
<div className='system-xl-semibold mb-2 text-text-primary'>{t('common.appMenus.overview')}</div>
<div className='flex items-center justify-between'>
{IS_CLOUD_EDITION ? (
<TimeRangePicker
ranges={TIME_PERIOD_MAPPING}
onSelect={setPeriod}
queryDateFormat={queryDateFormat}
/>
) : (
<LongTimeRangePicker
periodMapping={LONG_TIME_PERIOD_MAPPING}
onSelect={setPeriod}
queryDateFormat={queryDateFormat}
/>
)}
<div className="mb-4">
<div className="system-xl-semibold mb-2 text-text-primary">{t('common.appMenus.overview')}</div>
<div className="flex items-center justify-between">
{IS_CLOUD_EDITION
? (
<TimeRangePicker
ranges={TIME_PERIOD_MAPPING}
onSelect={setPeriod}
queryDateFormat={queryDateFormat}
/>
)
: (
<LongTimeRangePicker
periodMapping={LONG_TIME_PERIOD_MAPPING}
onSelect={setPeriod}
queryDateFormat={queryDateFormat}
/>
)}
{headerRight}
</div>
</div>
{!isWorkflow && (
<div className='mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2'>
<div className="mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2">
<ConversationsChart period={period} id={appId} />
<EndUsersChart period={period} id={appId} />
</div>
)}
{!isWorkflow && (
<div className='mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2'>
<div className="mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2">
{isChatApp
? (
<AvgSessionInteractions period={period} id={appId} />
)
<AvgSessionInteractions period={period} id={appId} />
)
: (
<AvgResponseTime period={period} id={appId} />
)}
<AvgResponseTime period={period} id={appId} />
)}
<TokenPerSecond period={period} id={appId} />
</div>
)}
{!isWorkflow && (
<div className='mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2'>
<div className="mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2">
<UserSatisfactionRate period={period} id={appId} />
<CostChart period={period} id={appId} />
</div>
)}
{!isWorkflow && isChatApp && (
<div className='mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2'>
<div className="mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2">
<MessagesChart period={period} id={appId} />
</div>
)}
{isWorkflow && (
<div className='mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2'>
<div className="mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2">
<WorkflowMessagesChart period={period} id={appId} />
<WorkflowDailyTerminalsChart period={period} id={appId} />
</div>
)}
{isWorkflow && (
<div className='mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2'>
<div className="mb-6 grid w-full grid-cols-1 gap-6 xl:grid-cols-2">
<WorkflowCostChart period={period} id={appId} />
<AvgUserInteractions period={period} id={appId} />
</div>

View File

@ -1,13 +1,14 @@
'use client'
import type { PeriodParams } from '@/app/components/app/overview/app-chart'
import type { FC } from 'react'
import React from 'react'
import type { PeriodParams } from '@/app/components/app/overview/app-chart'
import type { Item } from '@/app/components/base/select'
import { SimpleSelect } from '@/app/components/base/select'
import { useTranslation } from 'react-i18next'
import dayjs from 'dayjs'
import * as React from 'react'
import { useTranslation } from 'react-i18next'
import { SimpleSelect } from '@/app/components/base/select'
type Props = {
periodMapping: { [key: string]: { value: number; name: string } }
periodMapping: { [key: string]: { value: number, name: string } }
onSelect: (payload: PeriodParams) => void
queryDateFormat: string
}
@ -53,10 +54,10 @@ const LongTimeRangePicker: FC<Props> = ({
return (
<SimpleSelect
items={Object.entries(periodMapping).map(([k, v]) => ({ value: k, name: t(`appLog.filter.period.${v.name}`) }))}
className='mt-0 !w-40'
className="mt-0 !w-40"
notClearable={true}
onSelect={handleSelect}
defaultValue={'2'}
defaultValue="2"
/>
)
}

View File

@ -1,7 +1,7 @@
import React from 'react'
import * as React from 'react'
import ApikeyInfoPanel from '@/app/components/app/overview/apikey-info-panel'
import ChartView from './chart-view'
import TracingPanel from './tracing/panel'
import ApikeyInfoPanel from '@/app/components/app/overview/apikey-info-panel'
export type IDevelopProps = {
params: Promise<{ appId: string }>

View File

@ -1,15 +1,16 @@
'use client'
import { RiCalendarLine } from '@remixicon/react'
import type { Dayjs } from 'dayjs'
import type { FC } from 'react'
import React, { useCallback } from 'react'
import type { TriggerProps } from '@/app/components/base/date-and-time-picker/types'
import { RiCalendarLine } from '@remixicon/react'
import dayjs from 'dayjs'
import { noop } from 'lodash-es'
import * as React from 'react'
import { useCallback } from 'react'
import Picker from '@/app/components/base/date-and-time-picker/date-picker'
import { useI18N } from '@/context/i18n'
import { cn } from '@/utils/classnames'
import { formatToLocalTime } from '@/utils/format'
import { useI18N } from '@/context/i18n'
import Picker from '@/app/components/base/date-and-time-picker/date-picker'
import type { TriggerProps } from '@/app/components/base/date-and-time-picker/types'
import { noop } from 'lodash-es'
import dayjs from 'dayjs'
type Props = {
start: Dayjs
@ -50,9 +51,9 @@ const DatePicker: FC<Props> = ({
}, [availableEndDate, start])
return (
<div className='flex h-8 items-center space-x-0.5 rounded-lg bg-components-input-bg-normal px-2'>
<div className='p-px'>
<RiCalendarLine className='size-3.5 text-text-tertiary' />
<div className="flex h-8 items-center space-x-0.5 rounded-lg bg-components-input-bg-normal px-2">
<div className="p-px">
<RiCalendarLine className="size-3.5 text-text-tertiary" />
</div>
<Picker
value={start}
@ -63,7 +64,7 @@ const DatePicker: FC<Props> = ({
noConfirm
getIsDateDisabled={startDateDisabled}
/>
<span className='system-sm-regular text-text-tertiary'>-</span>
<span className="system-sm-regular text-text-tertiary">-</span>
<Picker
value={end}
onChange={onEndChange}

Some files were not shown because too many files have changed in this diff Show More