= ({
return (
{showTypeSwitch && (
-
+
)}
{isString && (
= ({
placeholder={placeholder?.[language] || placeholder?.en_US}
/>
)}
- {isBoolean && (
+ {isBoolean && isConstant && (
Date: Wed, 3 Sep 2025 16:22:13 +0800
Subject: [PATCH 07/15] Fix advanced chat workflow event handler signature
mismatch (#25078)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
---
Makefile | 2 +-
api/core/app/apps/advanced_chat/generate_task_pipeline.py | 2 +-
api/extensions/storage/clickzetta_volume/file_lifecycle.py | 5 +++--
.../storage/clickzetta_volume/volume_permissions.py | 3 ++-
4 files changed, 7 insertions(+), 5 deletions(-)
diff --git a/Makefile b/Makefile
index 388c367fdf..d82f6f24ad 100644
--- a/Makefile
+++ b/Makefile
@@ -30,7 +30,7 @@ prepare-web:
prepare-api:
@echo "🔧 Setting up API environment..."
@cp -n api/.env.example api/.env 2>/dev/null || echo "API .env already exists"
- @cd api && uv sync --dev --extra all
+ @cd api && uv sync --dev
@cd api && uv run flask db upgrade
@echo "✅ API environment prepared (not started)"
diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py
index 2f0e4ef319..750e13c502 100644
--- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py
+++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py
@@ -310,7 +310,7 @@ class AdvancedChatAppGenerateTaskPipeline:
err = self._base_task_pipeline._handle_error(event=event, session=session, message_id=self._message_id)
yield self._base_task_pipeline._error_to_stream_response(err)
- def _handle_workflow_started_event(self, **kwargs) -> Generator[StreamResponse, None, None]:
+ def _handle_workflow_started_event(self, *args, **kwargs) -> Generator[StreamResponse, None, None]:
"""Handle workflow started events."""
with self._database_session() as session:
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_start()
diff --git a/api/extensions/storage/clickzetta_volume/file_lifecycle.py b/api/extensions/storage/clickzetta_volume/file_lifecycle.py
index 29210dd0f0..f5d6fd6f22 100644
--- a/api/extensions/storage/clickzetta_volume/file_lifecycle.py
+++ b/api/extensions/storage/clickzetta_volume/file_lifecycle.py
@@ -1,7 +1,8 @@
"""ClickZetta Volume file lifecycle management
-This module provides file lifecycle management features including version control, automatic cleanup, backup and restore.
-Supports complete lifecycle management for knowledge base files.
+This module provides file lifecycle management features including version control,
+automatic cleanup, backup and restore. Supports complete lifecycle management for
+knowledge base files.
"""
import json
diff --git a/api/extensions/storage/clickzetta_volume/volume_permissions.py b/api/extensions/storage/clickzetta_volume/volume_permissions.py
index e9503595af..d216790f17 100644
--- a/api/extensions/storage/clickzetta_volume/volume_permissions.py
+++ b/api/extensions/storage/clickzetta_volume/volume_permissions.py
@@ -121,7 +121,8 @@ class VolumePermissionManager:
except Exception:
logger.exception("User Volume permission check failed")
- # For User Volume, if permission check fails, it might be a configuration issue, provide friendlier error message
+ # For User Volume, if permission check fails, it might be a configuration issue,
+ # provide friendlier error message
logger.info("User Volume permission check failed, but permission checking is disabled in this version")
return False
From 67cc70ad6146ae84f78fb96bfc1e8c3f3d1d981e Mon Sep 17 00:00:00 2001
From: zxhlyh
Date: Wed, 3 Sep 2025 18:23:57 +0800
Subject: [PATCH 08/15] fix: model credential name (#25081)
Co-authored-by: hjlarry
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
---
api/core/entities/provider_configuration.py | 16 ++++++++--------
.../model-provider-page/model-modal/index.tsx | 4 ++--
2 files changed, 10 insertions(+), 10 deletions(-)
diff --git a/api/core/entities/provider_configuration.py b/api/core/entities/provider_configuration.py
index b74e081dd4..9119462aca 100644
--- a/api/core/entities/provider_configuration.py
+++ b/api/core/entities/provider_configuration.py
@@ -410,10 +410,9 @@ class ProviderConfiguration(BaseModel):
:return:
"""
with Session(db.engine) as session:
- if credential_name and self._check_provider_credential_name_exists(
- credential_name=credential_name, session=session
- ):
- raise ValueError(f"Credential with name '{credential_name}' already exists.")
+ if credential_name:
+ if self._check_provider_credential_name_exists(credential_name=credential_name, session=session):
+ raise ValueError(f"Credential with name '{credential_name}' already exists.")
else:
credential_name = self._generate_provider_credential_name(session)
@@ -890,10 +889,11 @@ class ProviderConfiguration(BaseModel):
:return:
"""
with Session(db.engine) as session:
- if credential_name and self._check_custom_model_credential_name_exists(
- model=model, model_type=model_type, credential_name=credential_name, session=session
- ):
- raise ValueError(f"Model credential with name '{credential_name}' already exists for {model}.")
+ if credential_name:
+ if self._check_custom_model_credential_name_exists(
+ model=model, model_type=model_type, credential_name=credential_name, session=session
+ ):
+ raise ValueError(f"Model credential with name '{credential_name}' already exists for {model}.")
else:
credential_name = self._generate_custom_model_credential_name(
model=model, model_type=model_type, session=session
diff --git a/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx b/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx
index adf633831b..4ffbc8f191 100644
--- a/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx
+++ b/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx
@@ -115,7 +115,7 @@ const ModelModal: FC = ({
const [selectedCredential, setSelectedCredential] = useState()
const formRef2 = useRef(null)
const isEditMode = !!Object.keys(formValues).filter((key) => {
- return key !== '__model_name' && key !== '__model_type'
+ return key !== '__model_name' && key !== '__model_type' && !!formValues[key]
}).length && isCurrentWorkspaceManager
const handleSave = useCallback(async () => {
@@ -167,7 +167,7 @@ const ModelModal: FC = ({
__authorization_name__,
...rest
} = values
- if (__model_name && __model_type && __authorization_name__) {
+ if (__model_name && __model_type) {
await handleSaveCredential({
credential_id: credential?.credential_id,
credentials: rest,
From d011ddfc643a49369e5f1021d7a45e56b97eeb33 Mon Sep 17 00:00:00 2001
From: Stream
Date: Wed, 3 Sep 2025 18:54:07 +0800
Subject: [PATCH 09/15] chore(version): bump version to 1.8.1 (#25060)
---
api/pyproject.toml | 2 +-
api/uv.lock | 2 +-
docker/docker-compose-template.yaml | 2 +-
docker/docker-compose.yaml | 2 +-
web/package.json | 2 +-
5 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/api/pyproject.toml b/api/pyproject.toml
index d6f74fc510..a0c108cd2c 100644
--- a/api/pyproject.toml
+++ b/api/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "dify-api"
-version = "1.8.0"
+version = "1.8.1"
requires-python = ">=3.11,<3.13"
dependencies = [
diff --git a/api/uv.lock b/api/uv.lock
index 32254faa8e..7e67a84ce2 100644
--- a/api/uv.lock
+++ b/api/uv.lock
@@ -1260,7 +1260,7 @@ wheels = [
[[package]]
name = "dify-api"
-version = "1.8.0"
+version = "1.8.1"
source = { virtual = "." }
dependencies = [
{ name = "arize-phoenix-otel" },
diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml
index a779999983..e6f76e1fb4 100644
--- a/docker/docker-compose-template.yaml
+++ b/docker/docker-compose-template.yaml
@@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
- image: langgenius/dify-api:1.8.0
+ image: langgenius/dify-api:1.8.1
restart: always
environment:
# Use the shared environment variables.
diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml
index bd668be17f..a451a114bc 100644
--- a/docker/docker-compose.yaml
+++ b/docker/docker-compose.yaml
@@ -582,7 +582,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
- image: langgenius/dify-api:1.8.0
+ image: langgenius/dify-api:1.8.1
restart: always
environment:
# Use the shared environment variables.
diff --git a/web/package.json b/web/package.json
index e82b41636c..e64d548824 100644
--- a/web/package.json
+++ b/web/package.json
@@ -1,6 +1,6 @@
{
"name": "dify-web",
- "version": "1.8.0",
+ "version": "1.8.1",
"private": true,
"packageManager": "pnpm@10.15.0",
"engines": {
From c7700ac1762a4feccf60211d3dca3e39ec65a83c Mon Sep 17 00:00:00 2001
From: -LAN-
Date: Wed, 3 Sep 2025 20:25:44 +0800
Subject: [PATCH 10/15] chore(docker): bump version (#25092)
Signed-off-by: -LAN-
---
docker/docker-compose-template.yaml | 107 ++++++++++++++++++----------
docker/docker-compose.yaml | 107 ++++++++++++++++++----------
2 files changed, 140 insertions(+), 74 deletions(-)
diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml
index e6f76e1fb4..b479795c93 100644
--- a/docker/docker-compose-template.yaml
+++ b/docker/docker-compose-template.yaml
@@ -31,7 +31,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
- image: langgenius/dify-api:1.8.0
+ image: langgenius/dify-api:1.8.1
restart: always
environment:
# Use the shared environment variables.
@@ -58,7 +58,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
- image: langgenius/dify-api:1.8.0
+ image: langgenius/dify-api:1.8.1
restart: always
environment:
# Use the shared environment variables.
@@ -76,7 +76,7 @@ services:
# Frontend web application.
web:
- image: langgenius/dify-web:1.8.0
+ image: langgenius/dify-web:1.8.1
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@@ -118,7 +118,17 @@ services:
volumes:
- ./volumes/db/data:/var/lib/postgresql/data
healthcheck:
- test: [ 'CMD', 'pg_isready', '-h', 'db', '-U', '${PGUSER:-postgres}', '-d', '${POSTGRES_DB:-dify}' ]
+ test:
+ [
+ "CMD",
+ "pg_isready",
+ "-h",
+ "db",
+ "-U",
+ "${PGUSER:-postgres}",
+ "-d",
+ "${POSTGRES_DB:-dify}",
+ ]
interval: 1s
timeout: 3s
retries: 60
@@ -135,7 +145,11 @@ services:
# Set the redis password when startup redis server.
command: redis-server --requirepass ${REDIS_PASSWORD:-difyai123456}
healthcheck:
- test: [ 'CMD-SHELL', 'redis-cli -a ${REDIS_PASSWORD:-difyai123456} ping | grep -q PONG' ]
+ test:
+ [
+ "CMD-SHELL",
+ "redis-cli -a ${REDIS_PASSWORD:-difyai123456} ping | grep -q PONG",
+ ]
# The DifySandbox
sandbox:
@@ -157,7 +171,7 @@ services:
- ./volumes/sandbox/dependencies:/dependencies
- ./volumes/sandbox/conf:/conf
healthcheck:
- test: [ 'CMD', 'curl', '-f', 'http://localhost:8194/health' ]
+ test: ["CMD", "curl", "-f", "http://localhost:8194/health"]
networks:
- ssrf_proxy_network
@@ -231,7 +245,12 @@ services:
volumes:
- ./ssrf_proxy/squid.conf.template:/etc/squid/squid.conf.template
- ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint-mount.sh
- entrypoint: [ 'sh', '-c', "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ]
+ entrypoint:
+ [
+ "sh",
+ "-c",
+ "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh",
+ ]
environment:
# pls clearly modify the squid env vars to fit your network environment.
HTTP_PORT: ${SSRF_HTTP_PORT:-3128}
@@ -260,8 +279,8 @@ services:
- CERTBOT_EMAIL=${CERTBOT_EMAIL}
- CERTBOT_DOMAIN=${CERTBOT_DOMAIN}
- CERTBOT_OPTIONS=${CERTBOT_OPTIONS:-}
- entrypoint: [ '/docker-entrypoint.sh' ]
- command: [ 'tail', '-f', '/dev/null' ]
+ entrypoint: ["/docker-entrypoint.sh"]
+ command: ["tail", "-f", "/dev/null"]
# The nginx reverse proxy.
# used for reverse proxying the API service and Web service.
@@ -278,7 +297,12 @@ services:
- ./volumes/certbot/conf/live:/etc/letsencrypt/live # cert dir (with certbot container)
- ./volumes/certbot/conf:/etc/letsencrypt
- ./volumes/certbot/www:/var/www/html
- entrypoint: [ 'sh', '-c', "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ]
+ entrypoint:
+ [
+ "sh",
+ "-c",
+ "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh",
+ ]
environment:
NGINX_SERVER_NAME: ${NGINX_SERVER_NAME:-_}
NGINX_HTTPS_ENABLED: ${NGINX_HTTPS_ENABLED:-false}
@@ -300,14 +324,14 @@ services:
- api
- web
ports:
- - '${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}'
- - '${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}'
+ - "${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}"
+ - "${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}"
# The Weaviate vector store.
weaviate:
image: semitechnologies/weaviate:1.19.0
profiles:
- - ''
+ - ""
- weaviate
restart: always
volumes:
@@ -360,13 +384,17 @@ services:
working_dir: /opt/couchbase
stdin_open: true
tty: true
- entrypoint: [ "" ]
+ entrypoint: [""]
command: sh -c "/opt/couchbase/init/init-cbserver.sh"
volumes:
- ./volumes/couchbase/data:/opt/couchbase/var/lib/couchbase/data
healthcheck:
# ensure bucket was created before proceeding
- test: [ "CMD-SHELL", "curl -s -f -u Administrator:password http://localhost:8091/pools/default/buckets | grep -q '\\[{' || exit 1" ]
+ test:
+ [
+ "CMD-SHELL",
+ "curl -s -f -u Administrator:password http://localhost:8091/pools/default/buckets | grep -q '\\[{' || exit 1",
+ ]
interval: 10s
retries: 10
start_period: 30s
@@ -392,9 +420,9 @@ services:
volumes:
- ./volumes/pgvector/data:/var/lib/postgresql/data
- ./pgvector/docker-entrypoint.sh:/docker-entrypoint.sh
- entrypoint: [ '/docker-entrypoint.sh' ]
+ entrypoint: ["/docker-entrypoint.sh"]
healthcheck:
- test: [ 'CMD', 'pg_isready' ]
+ test: ["CMD", "pg_isready"]
interval: 1s
timeout: 3s
retries: 30
@@ -411,14 +439,14 @@ services:
- VB_USERNAME=dify
- VB_PASSWORD=Difyai123456
ports:
- - '5434:5432'
+ - "5434:5432"
volumes:
- ./vastbase/lic:/home/vastbase/vastbase/lic
- ./vastbase/data:/home/vastbase/data
- ./vastbase/backup:/home/vastbase/backup
- ./vastbase/backup_log:/home/vastbase/backup_log
healthcheck:
- test: [ 'CMD', 'pg_isready' ]
+ test: ["CMD", "pg_isready"]
interval: 1s
timeout: 3s
retries: 30
@@ -440,7 +468,7 @@ services:
volumes:
- ./volumes/pgvecto_rs/data:/var/lib/postgresql/data
healthcheck:
- test: [ 'CMD', 'pg_isready' ]
+ test: ["CMD", "pg_isready"]
interval: 1s
timeout: 3s
retries: 30
@@ -479,7 +507,11 @@ services:
ports:
- "${OCEANBASE_VECTOR_PORT:-2881}:2881"
healthcheck:
- test: [ 'CMD-SHELL', 'obclient -h127.0.0.1 -P2881 -uroot@test -p$${OB_TENANT_PASSWORD} -e "SELECT 1;"' ]
+ test:
+ [
+ "CMD-SHELL",
+ 'obclient -h127.0.0.1 -P2881 -uroot@test -p$${OB_TENANT_PASSWORD} -e "SELECT 1;"',
+ ]
interval: 10s
retries: 30
start_period: 30s
@@ -515,7 +547,7 @@ services:
- ./volumes/milvus/etcd:/etcd
command: etcd -advertise-client-urls=http://127.0.0.1:2379 -listen-client-urls http://0.0.0.0:2379 --data-dir /etcd
healthcheck:
- test: [ 'CMD', 'etcdctl', 'endpoint', 'health' ]
+ test: ["CMD", "etcdctl", "endpoint", "health"]
interval: 30s
timeout: 20s
retries: 3
@@ -534,7 +566,7 @@ services:
- ./volumes/milvus/minio:/minio_data
command: minio server /minio_data --console-address ":9001"
healthcheck:
- test: [ 'CMD', 'curl', '-f', 'http://localhost:9000/minio/health/live' ]
+ test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s
timeout: 20s
retries: 3
@@ -546,7 +578,7 @@ services:
image: milvusdb/milvus:v2.5.15
profiles:
- milvus
- command: [ 'milvus', 'run', 'standalone' ]
+ command: ["milvus", "run", "standalone"]
environment:
ETCD_ENDPOINTS: ${ETCD_ENDPOINTS:-etcd:2379}
MINIO_ADDRESS: ${MINIO_ADDRESS:-minio:9000}
@@ -554,7 +586,7 @@ services:
volumes:
- ./volumes/milvus/milvus:/var/lib/milvus
healthcheck:
- test: [ 'CMD', 'curl', '-f', 'http://localhost:9091/healthz' ]
+ test: ["CMD", "curl", "-f", "http://localhost:9091/healthz"]
interval: 30s
start_period: 90s
timeout: 20s
@@ -620,7 +652,7 @@ services:
volumes:
- ./volumes/opengauss/data:/var/lib/opengauss/data
healthcheck:
- test: [ "CMD-SHELL", "netstat -lntp | grep tcp6 > /dev/null 2>&1" ]
+ test: ["CMD-SHELL", "netstat -lntp | grep tcp6 > /dev/null 2>&1"]
interval: 10s
timeout: 10s
retries: 10
@@ -673,18 +705,19 @@ services:
node.name: dify-es0
discovery.type: single-node
xpack.license.self_generated.type: basic
- xpack.security.enabled: 'true'
- xpack.security.enrollment.enabled: 'false'
- xpack.security.http.ssl.enabled: 'false'
+ xpack.security.enabled: "true"
+ xpack.security.enrollment.enabled: "false"
+ xpack.security.http.ssl.enabled: "false"
ports:
- ${ELASTICSEARCH_PORT:-9200}:9200
deploy:
resources:
limits:
memory: 2g
- entrypoint: [ 'sh', '-c', "sh /docker-entrypoint-mount.sh" ]
+ entrypoint: ["sh", "-c", "sh /docker-entrypoint-mount.sh"]
healthcheck:
- test: [ 'CMD', 'curl', '-s', 'http://localhost:9200/_cluster/health?pretty' ]
+ test:
+ ["CMD", "curl", "-s", "http://localhost:9200/_cluster/health?pretty"]
interval: 30s
timeout: 10s
retries: 50
@@ -702,17 +735,17 @@ services:
environment:
XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY: d1a66dfd-c4d3-4a0a-8290-2abcb83ab3aa
NO_PROXY: localhost,127.0.0.1,elasticsearch,kibana
- XPACK_SECURITY_ENABLED: 'true'
- XPACK_SECURITY_ENROLLMENT_ENABLED: 'false'
- XPACK_SECURITY_HTTP_SSL_ENABLED: 'false'
- XPACK_FLEET_ISAIRGAPPED: 'true'
+ XPACK_SECURITY_ENABLED: "true"
+ XPACK_SECURITY_ENROLLMENT_ENABLED: "false"
+ XPACK_SECURITY_HTTP_SSL_ENABLED: "false"
+ XPACK_FLEET_ISAIRGAPPED: "true"
I18N_LOCALE: zh-CN
- SERVER_PORT: '5601'
+ SERVER_PORT: "5601"
ELASTICSEARCH_HOSTS: http://elasticsearch:9200
ports:
- ${KIBANA_PORT:-5601}:5601
healthcheck:
- test: [ 'CMD-SHELL', 'curl -s http://localhost:5601 >/dev/null || exit 1' ]
+ test: ["CMD-SHELL", "curl -s http://localhost:5601 >/dev/null || exit 1"]
interval: 30s
timeout: 10s
retries: 3
diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml
index a451a114bc..9774df3df5 100644
--- a/docker/docker-compose.yaml
+++ b/docker/docker-compose.yaml
@@ -611,7 +611,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
- image: langgenius/dify-api:1.8.0
+ image: langgenius/dify-api:1.8.1
restart: always
environment:
# Use the shared environment variables.
@@ -638,7 +638,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
- image: langgenius/dify-api:1.8.0
+ image: langgenius/dify-api:1.8.1
restart: always
environment:
# Use the shared environment variables.
@@ -656,7 +656,7 @@ services:
# Frontend web application.
web:
- image: langgenius/dify-web:1.8.0
+ image: langgenius/dify-web:1.8.1
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@@ -698,7 +698,17 @@ services:
volumes:
- ./volumes/db/data:/var/lib/postgresql/data
healthcheck:
- test: [ 'CMD', 'pg_isready', '-h', 'db', '-U', '${PGUSER:-postgres}', '-d', '${POSTGRES_DB:-dify}' ]
+ test:
+ [
+ "CMD",
+ "pg_isready",
+ "-h",
+ "db",
+ "-U",
+ "${PGUSER:-postgres}",
+ "-d",
+ "${POSTGRES_DB:-dify}",
+ ]
interval: 1s
timeout: 3s
retries: 60
@@ -715,7 +725,11 @@ services:
# Set the redis password when startup redis server.
command: redis-server --requirepass ${REDIS_PASSWORD:-difyai123456}
healthcheck:
- test: [ 'CMD-SHELL', 'redis-cli -a ${REDIS_PASSWORD:-difyai123456} ping | grep -q PONG' ]
+ test:
+ [
+ "CMD-SHELL",
+ "redis-cli -a ${REDIS_PASSWORD:-difyai123456} ping | grep -q PONG",
+ ]
# The DifySandbox
sandbox:
@@ -737,7 +751,7 @@ services:
- ./volumes/sandbox/dependencies:/dependencies
- ./volumes/sandbox/conf:/conf
healthcheck:
- test: [ 'CMD', 'curl', '-f', 'http://localhost:8194/health' ]
+ test: ["CMD", "curl", "-f", "http://localhost:8194/health"]
networks:
- ssrf_proxy_network
@@ -811,7 +825,12 @@ services:
volumes:
- ./ssrf_proxy/squid.conf.template:/etc/squid/squid.conf.template
- ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint-mount.sh
- entrypoint: [ 'sh', '-c', "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ]
+ entrypoint:
+ [
+ "sh",
+ "-c",
+ "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh",
+ ]
environment:
# pls clearly modify the squid env vars to fit your network environment.
HTTP_PORT: ${SSRF_HTTP_PORT:-3128}
@@ -840,8 +859,8 @@ services:
- CERTBOT_EMAIL=${CERTBOT_EMAIL}
- CERTBOT_DOMAIN=${CERTBOT_DOMAIN}
- CERTBOT_OPTIONS=${CERTBOT_OPTIONS:-}
- entrypoint: [ '/docker-entrypoint.sh' ]
- command: [ 'tail', '-f', '/dev/null' ]
+ entrypoint: ["/docker-entrypoint.sh"]
+ command: ["tail", "-f", "/dev/null"]
# The nginx reverse proxy.
# used for reverse proxying the API service and Web service.
@@ -858,7 +877,12 @@ services:
- ./volumes/certbot/conf/live:/etc/letsencrypt/live # cert dir (with certbot container)
- ./volumes/certbot/conf:/etc/letsencrypt
- ./volumes/certbot/www:/var/www/html
- entrypoint: [ 'sh', '-c', "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ]
+ entrypoint:
+ [
+ "sh",
+ "-c",
+ "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh",
+ ]
environment:
NGINX_SERVER_NAME: ${NGINX_SERVER_NAME:-_}
NGINX_HTTPS_ENABLED: ${NGINX_HTTPS_ENABLED:-false}
@@ -880,14 +904,14 @@ services:
- api
- web
ports:
- - '${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}'
- - '${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}'
+ - "${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}"
+ - "${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}"
# The Weaviate vector store.
weaviate:
image: semitechnologies/weaviate:1.19.0
profiles:
- - ''
+ - ""
- weaviate
restart: always
volumes:
@@ -940,13 +964,17 @@ services:
working_dir: /opt/couchbase
stdin_open: true
tty: true
- entrypoint: [ "" ]
+ entrypoint: [""]
command: sh -c "/opt/couchbase/init/init-cbserver.sh"
volumes:
- ./volumes/couchbase/data:/opt/couchbase/var/lib/couchbase/data
healthcheck:
# ensure bucket was created before proceeding
- test: [ "CMD-SHELL", "curl -s -f -u Administrator:password http://localhost:8091/pools/default/buckets | grep -q '\\[{' || exit 1" ]
+ test:
+ [
+ "CMD-SHELL",
+ "curl -s -f -u Administrator:password http://localhost:8091/pools/default/buckets | grep -q '\\[{' || exit 1",
+ ]
interval: 10s
retries: 10
start_period: 30s
@@ -972,9 +1000,9 @@ services:
volumes:
- ./volumes/pgvector/data:/var/lib/postgresql/data
- ./pgvector/docker-entrypoint.sh:/docker-entrypoint.sh
- entrypoint: [ '/docker-entrypoint.sh' ]
+ entrypoint: ["/docker-entrypoint.sh"]
healthcheck:
- test: [ 'CMD', 'pg_isready' ]
+ test: ["CMD", "pg_isready"]
interval: 1s
timeout: 3s
retries: 30
@@ -991,14 +1019,14 @@ services:
- VB_USERNAME=dify
- VB_PASSWORD=Difyai123456
ports:
- - '5434:5432'
+ - "5434:5432"
volumes:
- ./vastbase/lic:/home/vastbase/vastbase/lic
- ./vastbase/data:/home/vastbase/data
- ./vastbase/backup:/home/vastbase/backup
- ./vastbase/backup_log:/home/vastbase/backup_log
healthcheck:
- test: [ 'CMD', 'pg_isready' ]
+ test: ["CMD", "pg_isready"]
interval: 1s
timeout: 3s
retries: 30
@@ -1020,7 +1048,7 @@ services:
volumes:
- ./volumes/pgvecto_rs/data:/var/lib/postgresql/data
healthcheck:
- test: [ 'CMD', 'pg_isready' ]
+ test: ["CMD", "pg_isready"]
interval: 1s
timeout: 3s
retries: 30
@@ -1059,7 +1087,11 @@ services:
ports:
- "${OCEANBASE_VECTOR_PORT:-2881}:2881"
healthcheck:
- test: [ 'CMD-SHELL', 'obclient -h127.0.0.1 -P2881 -uroot@test -p$${OB_TENANT_PASSWORD} -e "SELECT 1;"' ]
+ test:
+ [
+ "CMD-SHELL",
+ 'obclient -h127.0.0.1 -P2881 -uroot@test -p$${OB_TENANT_PASSWORD} -e "SELECT 1;"',
+ ]
interval: 10s
retries: 30
start_period: 30s
@@ -1095,7 +1127,7 @@ services:
- ./volumes/milvus/etcd:/etcd
command: etcd -advertise-client-urls=http://127.0.0.1:2379 -listen-client-urls http://0.0.0.0:2379 --data-dir /etcd
healthcheck:
- test: [ 'CMD', 'etcdctl', 'endpoint', 'health' ]
+ test: ["CMD", "etcdctl", "endpoint", "health"]
interval: 30s
timeout: 20s
retries: 3
@@ -1114,7 +1146,7 @@ services:
- ./volumes/milvus/minio:/minio_data
command: minio server /minio_data --console-address ":9001"
healthcheck:
- test: [ 'CMD', 'curl', '-f', 'http://localhost:9000/minio/health/live' ]
+ test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s
timeout: 20s
retries: 3
@@ -1126,7 +1158,7 @@ services:
image: milvusdb/milvus:v2.5.15
profiles:
- milvus
- command: [ 'milvus', 'run', 'standalone' ]
+ command: ["milvus", "run", "standalone"]
environment:
ETCD_ENDPOINTS: ${ETCD_ENDPOINTS:-etcd:2379}
MINIO_ADDRESS: ${MINIO_ADDRESS:-minio:9000}
@@ -1134,7 +1166,7 @@ services:
volumes:
- ./volumes/milvus/milvus:/var/lib/milvus
healthcheck:
- test: [ 'CMD', 'curl', '-f', 'http://localhost:9091/healthz' ]
+ test: ["CMD", "curl", "-f", "http://localhost:9091/healthz"]
interval: 30s
start_period: 90s
timeout: 20s
@@ -1200,7 +1232,7 @@ services:
volumes:
- ./volumes/opengauss/data:/var/lib/opengauss/data
healthcheck:
- test: [ "CMD-SHELL", "netstat -lntp | grep tcp6 > /dev/null 2>&1" ]
+ test: ["CMD-SHELL", "netstat -lntp | grep tcp6 > /dev/null 2>&1"]
interval: 10s
timeout: 10s
retries: 10
@@ -1253,18 +1285,19 @@ services:
node.name: dify-es0
discovery.type: single-node
xpack.license.self_generated.type: basic
- xpack.security.enabled: 'true'
- xpack.security.enrollment.enabled: 'false'
- xpack.security.http.ssl.enabled: 'false'
+ xpack.security.enabled: "true"
+ xpack.security.enrollment.enabled: "false"
+ xpack.security.http.ssl.enabled: "false"
ports:
- ${ELASTICSEARCH_PORT:-9200}:9200
deploy:
resources:
limits:
memory: 2g
- entrypoint: [ 'sh', '-c', "sh /docker-entrypoint-mount.sh" ]
+ entrypoint: ["sh", "-c", "sh /docker-entrypoint-mount.sh"]
healthcheck:
- test: [ 'CMD', 'curl', '-s', 'http://localhost:9200/_cluster/health?pretty' ]
+ test:
+ ["CMD", "curl", "-s", "http://localhost:9200/_cluster/health?pretty"]
interval: 30s
timeout: 10s
retries: 50
@@ -1282,17 +1315,17 @@ services:
environment:
XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY: d1a66dfd-c4d3-4a0a-8290-2abcb83ab3aa
NO_PROXY: localhost,127.0.0.1,elasticsearch,kibana
- XPACK_SECURITY_ENABLED: 'true'
- XPACK_SECURITY_ENROLLMENT_ENABLED: 'false'
- XPACK_SECURITY_HTTP_SSL_ENABLED: 'false'
- XPACK_FLEET_ISAIRGAPPED: 'true'
+ XPACK_SECURITY_ENABLED: "true"
+ XPACK_SECURITY_ENROLLMENT_ENABLED: "false"
+ XPACK_SECURITY_HTTP_SSL_ENABLED: "false"
+ XPACK_FLEET_ISAIRGAPPED: "true"
I18N_LOCALE: zh-CN
- SERVER_PORT: '5601'
+ SERVER_PORT: "5601"
ELASTICSEARCH_HOSTS: http://elasticsearch:9200
ports:
- ${KIBANA_PORT:-5601}:5601
healthcheck:
- test: [ 'CMD-SHELL', 'curl -s http://localhost:5601 >/dev/null || exit 1' ]
+ test: ["CMD-SHELL", "curl -s http://localhost:5601 >/dev/null || exit 1"]
interval: 30s
timeout: 10s
retries: 3
From ff7a0e3170492222954843f6ffa31731d538eb7b Mon Sep 17 00:00:00 2001
From: GuanMu
Date: Wed, 3 Sep 2025 22:24:45 +0800
Subject: [PATCH 11/15] fix: improve error logging for vector search operation
in MyScale (#25087)
---
api/core/rag/datasource/vdb/myscale/myscale_vector.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/api/core/rag/datasource/vdb/myscale/myscale_vector.py b/api/core/rag/datasource/vdb/myscale/myscale_vector.py
index 99f766a88a..d048f3b34e 100644
--- a/api/core/rag/datasource/vdb/myscale/myscale_vector.py
+++ b/api/core/rag/datasource/vdb/myscale/myscale_vector.py
@@ -152,8 +152,8 @@ class MyScaleVector(BaseVector):
)
for r in self._client.query(sql).named_results()
]
- except Exception as e:
- logger.exception("\033[91m\033[1m%s\033[0m \033[95m%s\033[0m", type(e), str(e)) # noqa:TRY401
+ except Exception:
+ logger.exception("Vector search operation failed")
return []
def delete(self) -> None:
From db53656a45fdb13447a410b9b4609d991013d89d Mon Sep 17 00:00:00 2001
From: Yongtao Huang
Date: Wed, 3 Sep 2025 22:27:41 +0800
Subject: [PATCH 12/15] Fix jsonschema compliance: use number instead of float
(#25049)
Signed-off-by: Yongtao Huang
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
---
api/core/mcp/server/streamable_http.py | 2 +-
.../core/mcp/server/test_streamable_http.py | 65 ++++++++++++++++++-
2 files changed, 65 insertions(+), 2 deletions(-)
diff --git a/api/core/mcp/server/streamable_http.py b/api/core/mcp/server/streamable_http.py
index 5851c6d406..3d51ac2333 100644
--- a/api/core/mcp/server/streamable_http.py
+++ b/api/core/mcp/server/streamable_http.py
@@ -258,5 +258,5 @@ def convert_input_form_to_parameters(
parameters[item.variable]["type"] = "string"
parameters[item.variable]["enum"] = item.options
elif item.type == VariableEntityType.NUMBER:
- parameters[item.variable]["type"] = "float"
+ parameters[item.variable]["type"] = "number"
return parameters, required
diff --git a/api/tests/unit_tests/core/mcp/server/test_streamable_http.py b/api/tests/unit_tests/core/mcp/server/test_streamable_http.py
index ccc5d42bcf..f1d741602a 100644
--- a/api/tests/unit_tests/core/mcp/server/test_streamable_http.py
+++ b/api/tests/unit_tests/core/mcp/server/test_streamable_http.py
@@ -1,6 +1,7 @@
import json
from unittest.mock import Mock, patch
+import jsonschema
import pytest
from core.app.app_config.entities import VariableEntity, VariableEntityType
@@ -434,7 +435,7 @@ class TestUtilityFunctions:
assert parameters["category"]["enum"] == ["A", "B", "C"]
assert "count" in parameters
- assert parameters["count"]["type"] == "float"
+ assert parameters["count"]["type"] == "number"
# FILE type should be skipped - it creates empty dict but gets filtered later
# Check that it doesn't have any meaningful content
@@ -447,3 +448,65 @@ class TestUtilityFunctions:
assert "category" not in required
# Note: _get_request_id function has been removed as request_id is now passed as parameter
+
+ def test_convert_input_form_to_parameters_jsonschema_validation_ok(self):
+ """Current schema uses 'number' for numeric fields; it should be a valid JSON Schema."""
+ user_input_form = [
+ VariableEntity(
+ type=VariableEntityType.NUMBER,
+ variable="count",
+ description="Count",
+ label="Count",
+ required=True,
+ ),
+ VariableEntity(
+ type=VariableEntityType.TEXT_INPUT,
+ variable="name",
+ description="User name",
+ label="Name",
+ required=False,
+ ),
+ ]
+
+ parameters_dict = {
+ "count": "Enter count",
+ "name": "Enter your name",
+ }
+
+ parameters, required = convert_input_form_to_parameters(user_input_form, parameters_dict)
+
+ # Build a complete JSON Schema
+ schema = {
+ "type": "object",
+ "properties": parameters,
+ "required": required,
+ }
+
+ # 1) The schema itself must be valid
+ jsonschema.Draft202012Validator.check_schema(schema)
+
+ # 2) Both float and integer instances should pass validation
+ jsonschema.validate(instance={"count": 3.14, "name": "alice"}, schema=schema)
+ jsonschema.validate(instance={"count": 2, "name": "bob"}, schema=schema)
+
+ def test_legacy_float_type_schema_is_invalid(self):
+ """Legacy/buggy behavior: using 'float' should produce an invalid JSON Schema."""
+ # Manually construct a legacy/incorrect schema (simulating old behavior)
+ bad_schema = {
+ "type": "object",
+ "properties": {
+ "count": {
+ "type": "float", # Invalid type: JSON Schema does not support 'float'
+ "description": "Enter count",
+ }
+ },
+ "required": ["count"],
+ }
+
+ # The schema itself should raise a SchemaError
+ with pytest.raises(jsonschema.exceptions.SchemaError):
+ jsonschema.Draft202012Validator.check_schema(bad_schema)
+
+ # Or validation should also raise SchemaError
+ with pytest.raises(jsonschema.exceptions.SchemaError):
+ jsonschema.validate(instance={"count": 1.23}, schema=bad_schema)
From aae792a9dd043f1d50a390816302efb61fb4cd3f Mon Sep 17 00:00:00 2001
From: 17hz <0x149527@gmail.com>
Date: Wed, 3 Sep 2025 22:28:03 +0800
Subject: [PATCH 13/15] chore: Updated pnpm version to 10.15.1 (#25065)
---
web/package.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/web/package.json b/web/package.json
index e64d548824..c736a37281 100644
--- a/web/package.json
+++ b/web/package.json
@@ -2,7 +2,7 @@
"name": "dify-web",
"version": "1.8.1",
"private": true,
- "packageManager": "pnpm@10.15.0",
+ "packageManager": "pnpm@10.15.1",
"engines": {
"node": ">=v22.11.0"
},
From a9c7669c16b02df2617e0477e3e1f4e5552c61ab Mon Sep 17 00:00:00 2001
From: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com>
Date: Wed, 3 Sep 2025 22:29:08 +0800
Subject: [PATCH 14/15] chore: comply to RFC 6750 and improve bearer token
split (#24955)
---
api/controllers/console/auth/oauth_server.py | 29 +++++++++++++++-----
1 file changed, 22 insertions(+), 7 deletions(-)
diff --git a/api/controllers/console/auth/oauth_server.py b/api/controllers/console/auth/oauth_server.py
index f730cfa3fe..a8ba417847 100644
--- a/api/controllers/console/auth/oauth_server.py
+++ b/api/controllers/console/auth/oauth_server.py
@@ -2,7 +2,7 @@ from functools import wraps
from typing import cast
import flask_login
-from flask import request
+from flask import jsonify, request
from flask_restx import Resource, reqparse
from werkzeug.exceptions import BadRequest, NotFound
@@ -46,23 +46,38 @@ def oauth_server_access_token_required(view):
authorization_header = request.headers.get("Authorization")
if not authorization_header:
- raise BadRequest("Authorization header is required")
+ response = jsonify({"error": "Authorization header is required"})
+ response.status_code = 401
+ response.headers["WWW-Authenticate"] = "Bearer"
+ return response
- parts = authorization_header.strip().split(" ")
+ parts = authorization_header.strip().split(None, 1)
if len(parts) != 2:
- raise BadRequest("Invalid Authorization header format")
+ response = jsonify({"error": "Invalid Authorization header format"})
+ response.status_code = 401
+ response.headers["WWW-Authenticate"] = "Bearer"
+ return response
token_type = parts[0].strip()
if token_type.lower() != "bearer":
- raise BadRequest("token_type is invalid")
+ response = jsonify({"error": "token_type is invalid"})
+ response.status_code = 401
+ response.headers["WWW-Authenticate"] = "Bearer"
+ return response
access_token = parts[1].strip()
if not access_token:
- raise BadRequest("access_token is required")
+ response = jsonify({"error": "access_token is required"})
+ response.status_code = 401
+ response.headers["WWW-Authenticate"] = "Bearer"
+ return response
account = OAuthServerService.validate_oauth_access_token(oauth_provider_app.client_id, access_token)
if not account:
- raise BadRequest("access_token or client_id is invalid")
+ response = jsonify({"error": "access_token or client_id is invalid"})
+ response.status_code = 401
+ response.headers["WWW-Authenticate"] = "Bearer"
+ return response
kwargs["account"] = account
From 56afb3fd64401a2ff30832f37104b89e52d4aeaf Mon Sep 17 00:00:00 2001
From: zz_xu <591933870@qq.com>
Date: Wed, 3 Sep 2025 22:44:22 +0800
Subject: [PATCH 15/15] db internal server error (#24947)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
---
api/.env.example | 1 +
1 file changed, 1 insertion(+)
diff --git a/api/.env.example b/api/.env.example
index e947c5584b..eb88c114e6 100644
--- a/api/.env.example
+++ b/api/.env.example
@@ -75,6 +75,7 @@ DB_PASSWORD=difyai123456
DB_HOST=localhost
DB_PORT=5432
DB_DATABASE=dify
+SQLALCHEMY_POOL_PRE_PING=true
# Storage configuration
# use for store upload files, private keys...