diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index bd47abc710..a08e7aacae 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -99,7 +99,7 @@ jobs: - name: Set up dotenvs run: | cp docker/.env.example docker/.env - cp docker/middleware.env.example docker/middleware.env + cp docker/envs/middleware.env.example docker/middleware.env - name: Expose Service Ports run: sh .github/workflows/expose_service_ports.sh diff --git a/.github/workflows/db-migration-test.yml b/.github/workflows/db-migration-test.yml index 65f0149a74..9d3ccb34b2 100644 --- a/.github/workflows/db-migration-test.yml +++ b/.github/workflows/db-migration-test.yml @@ -37,7 +37,7 @@ jobs: - name: Prepare middleware env run: | cd docker - cp middleware.env.example middleware.env + cp envs/middleware.env.example middleware.env - name: Set up Middlewares uses: hoverkraft-tech/compose-action@d2bee4f07e8ca410d6b196d00f90c12e7d48c33a # v2.6.0 @@ -87,7 +87,7 @@ jobs: - name: Prepare middleware env for MySQL run: | cd docker - cp middleware.env.example middleware.env + cp envs/middleware.env.example middleware.env sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' middleware.env sed -i 's/DB_HOST=db_postgres/DB_HOST=db_mysql/' middleware.env sed -i 's/DB_PORT=5432/DB_PORT=3306/' middleware.env diff --git a/.github/workflows/main-ci.yml b/.github/workflows/main-ci.yml index 8071d6204d..f624e8f872 100644 --- a/.github/workflows/main-ci.yml +++ b/.github/workflows/main-ci.yml @@ -57,7 +57,7 @@ jobs: - '.github/workflows/api-tests.yml' - '.github/workflows/expose_service_ports.sh' - 'docker/.env.example' - - 'docker/middleware.env.example' + - 'docker/envs/middleware.env.example' - 'docker/docker-compose.middleware.yaml' - 'docker/docker-compose-template.yaml' - 'docker/generate_docker_compose' @@ -84,7 +84,7 @@ jobs: - 'pnpm-workspace.yaml' - '.nvmrc' - 'docker/docker-compose.middleware.yaml' - - 'docker/middleware.env.example' + - 'docker/envs/middleware.env.example' - '.github/workflows/web-e2e.yml' - '.github/actions/setup-web/**' vdb: @@ -94,7 +94,7 @@ jobs: - '.github/workflows/vdb-tests.yml' - '.github/workflows/expose_service_ports.sh' - 'docker/.env.example' - - 'docker/middleware.env.example' + - 'docker/envs/middleware.env.example' - 'docker/docker-compose.yaml' - 'docker/docker-compose-template.yaml' - 'docker/generate_docker_compose' @@ -116,7 +116,7 @@ jobs: - '.github/workflows/db-migration-test.yml' - '.github/workflows/expose_service_ports.sh' - 'docker/.env.example' - - 'docker/middleware.env.example' + - 'docker/envs/middleware.env.example' - 'docker/docker-compose.middleware.yaml' - 'docker/docker-compose-template.yaml' - 'docker/generate_docker_compose' diff --git a/.github/workflows/vdb-tests-full.yml b/.github/workflows/vdb-tests-full.yml index 5c241af5c5..1405eb4eeb 100644 --- a/.github/workflows/vdb-tests-full.yml +++ b/.github/workflows/vdb-tests-full.yml @@ -51,7 +51,7 @@ jobs: - name: Set up dotenvs run: | cp docker/.env.example docker/.env - cp docker/middleware.env.example docker/middleware.env + cp docker/envs/middleware.env.example docker/middleware.env - name: Expose Service Ports run: sh .github/workflows/expose_service_ports.sh diff --git a/.github/workflows/vdb-tests.yml b/.github/workflows/vdb-tests.yml index 38ec96f00f..cdcdcb27d7 100644 --- a/.github/workflows/vdb-tests.yml +++ b/.github/workflows/vdb-tests.yml @@ -48,7 +48,7 @@ jobs: - name: Set up dotenvs run: | cp docker/.env.example docker/.env - cp docker/middleware.env.example docker/middleware.env + cp docker/envs/middleware.env.example docker/middleware.env - name: Expose Service Ports run: sh .github/workflows/expose_service_ports.sh diff --git a/README.md b/README.md index e6f8d84931..b6cbb0e126 100644 --- a/README.md +++ b/README.md @@ -76,11 +76,10 @@ The easiest way to start the Dify server is through [Docker Compose](docker/dock ```bash cd dify cd docker -./dify-compose up -d +cp .env.example .env +docker compose up -d ``` -On Windows PowerShell, run `.\dify-compose.ps1 up -d` from the `docker` directory. - After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization process. #### Seeking help @@ -138,7 +137,7 @@ Star Dify on GitHub and be instantly notified of new releases. ### Custom configurations -If you need to customize the configuration, add only the values you want to override to `docker/.env`. The default values live in [`docker/.env.default`](docker/.env.default), and the full reference remains in [`docker/.env.example`](docker/.env.example). After making any changes, re-run `./dify-compose up -d` or `.\dify-compose.ps1 up -d` from the `docker` directory. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments). +If you need to customize the configuration, edit `docker/.env`. The essential startup defaults live in [`docker/.env.example`](docker/.env.example), and optional advanced variables are split under `docker/envs/` by theme. After making any changes, re-run `docker compose up -d` from the `docker` directory. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments). ### Metrics Monitoring with Grafana diff --git a/dev/pytest/pytest_config_tests.py b/dev/pytest/pytest_config_tests.py index d56cceff5e..b136f09c61 100644 --- a/dev/pytest/pytest_config_tests.py +++ b/dev/pytest/pytest_config_tests.py @@ -93,10 +93,16 @@ BASE_API_AND_DOCKER_COMPOSE_CONFIG_SET_DIFF: frozenset[str] = frozenset( API_CONFIG_SET = set(dotenv_values(Path("api") / Path(".env.example")).keys()) DOCKER_CONFIG_SET = set(dotenv_values(Path("docker") / Path(".env.example")).keys()) -DOCKER_COMPOSE_CONFIG_SET = set() +DOCKER_COMPOSE_CONFIG_SET = set(DOCKER_CONFIG_SET) -with open(Path("docker") / Path("docker-compose.yaml")) as f: - DOCKER_COMPOSE_CONFIG_SET = set(yaml.safe_load(f.read())["x-shared-env"].keys()) +# Read environment variables from the split env files used by docker-compose +# Walk through all .env.example files in subdirectories (per-module structure) +envs_dir = Path("docker") / Path("envs") +if envs_dir.exists(): + for env_file_path in envs_dir.rglob("*.env.example"): + env_keys = set(dotenv_values(env_file_path).keys()) + DOCKER_CONFIG_SET.update(env_keys) + DOCKER_COMPOSE_CONFIG_SET.update(env_keys) def test_yaml_config(): diff --git a/docker/.env.default b/docker/.env.default deleted file mode 100644 index 6f6683b9f5..0000000000 --- a/docker/.env.default +++ /dev/null @@ -1,51 +0,0 @@ -# ------------------------------------------------------------------ -# Minimal defaults for Docker Compose deployments. -# -# Keep local changes in .env. Use .env.example as the full reference -# for advanced and service-specific settings. -# ------------------------------------------------------------------ - -# Public URLs used when Dify generates links. Change these together when -# exposing Dify under another hostname, IP address, or port. -CONSOLE_WEB_URL=http://localhost -SERVICE_API_URL=http://localhost -APP_WEB_URL=http://localhost -FILES_URL=http://localhost -INTERNAL_FILES_URL=http://api:5001 -TRIGGER_URL=http://localhost -ENDPOINT_URL_TEMPLATE=http://localhost/e/{hook_id} -NEXT_PUBLIC_SOCKET_URL=ws://localhost -EXPOSE_PLUGIN_DEBUGGING_HOST=localhost -EXPOSE_PLUGIN_DEBUGGING_PORT=5003 - -# Built-in metadata database defaults. -DB_TYPE=postgresql -DB_USERNAME=postgres -DB_PASSWORD=difyai123456 -DB_HOST=db_postgres -DB_PORT=5432 -DB_DATABASE=dify - -# Built-in Redis defaults. -REDIS_HOST=redis -REDIS_PORT=6379 -REDIS_PASSWORD=difyai123456 - -# Default file storage. -STORAGE_TYPE=opendal -OPENDAL_SCHEME=fs -OPENDAL_FS_ROOT=storage - -# Default vector database. -VECTOR_STORE=weaviate - -# Internal service authentication. Paired values must match. -PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi -PLUGIN_DIFY_INNER_API_KEY=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1 - -# Host ports. -EXPOSE_NGINX_PORT=80 -EXPOSE_NGINX_SSL_PORT=443 - -# Docker Compose profiles for bundled services. -COMPOSE_PROFILES=${VECTOR_STORE:-weaviate},${DB_TYPE:-postgresql} diff --git a/docker/.env.example b/docker/.env.example index ad2c96718a..82bd837ffb 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -1,1239 +1,157 @@ -# ------------------------------ -# Environment Variables for API service & worker -# ------------------------------ +# ------------------------------------------------------------------ +# Essential defaults for Docker Compose deployments. +# +# For a default deployment, copy this file to .env and run: +# docker compose up -d +# +# Optional and provider-specific variables live under docker/envs/. +# Copy an optional *.env.example file beside itself without the +# .example suffix when you need those advanced settings. +# Values in docker/.env take precedence over docker/envs/*.env files. +# ------------------------------------------------------------------ -# ------------------------------ -# Common Variables -# ------------------------------ - -# The backend URL of the console API, -# used to concatenate the authorization callback. -# If empty, it is the same domain. -# Example: https://api.console.dify.ai +# Core service URLs CONSOLE_API_URL= - -# The front-end URL of the console web, -# used to concatenate some front-end addresses and for CORS configuration use. -# If empty, it is the same domain. -# Example: https://console.dify.ai CONSOLE_WEB_URL= - -# Service API Url, -# used to display Service API Base Url to the front-end. -# If empty, it is the same domain. -# Example: https://api.dify.ai SERVICE_API_URL= - -# Trigger external URL -# used to display trigger endpoint API Base URL to the front-end. -# Example: https://api.dify.ai TRIGGER_URL=http://localhost - -# WebApp API backend Url, -# used to declare the back-end URL for the front-end API. -# If empty, it is the same domain. -# Example: https://api.app.dify.ai APP_API_URL= - -# WebApp Url, -# used to display WebAPP API Base Url to the front-end. -# If empty, it is the same domain. -# Example: https://app.dify.ai APP_WEB_URL= - -# File preview or download Url prefix. -# used to display File preview or download Url to the front-end or as Multi-model inputs; -# Url is signed and has expiration time. -# Setting FILES_URL is required for file processing plugins. -# - For https://example.com, use FILES_URL=https://example.com -# - For http://example.com, use FILES_URL=http://example.com -# Recommendation: use a dedicated domain (e.g., https://upload.example.com). -# Alternatively, use http://:5001 or http://api:5001, -# ensuring port 5001 is externally accessible (see docker-compose.yaml). FILES_URL= - -# INTERNAL_FILES_URL is used for plugin daemon communication within Docker network. -# Set this to the internal Docker service URL for proper plugin file access. -# Example: INTERNAL_FILES_URL=http://api:5001 INTERNAL_FILES_URL= +ENDPOINT_URL_TEMPLATE=http://localhost/e/{hook_id} +NEXT_PUBLIC_SOCKET_URL=ws://localhost -# Ensure UTF-8 encoding +# Runtime and security LANG=C.UTF-8 LC_ALL=C.UTF-8 PYTHONIOENCODING=utf-8 - -# Set UV cache directory to avoid permission issues with non-existent home directory UV_CACHE_DIR=/tmp/.uv-cache - -# ------------------------------ -# Server Configuration -# ------------------------------ - -# The log level for the application. -# Supported values are `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL` -LOG_LEVEL=INFO -# Log output format: text or json -LOG_OUTPUT_FORMAT=text -# Log file path -LOG_FILE=/app/logs/server.log -# Log file max size, the unit is MB -LOG_FILE_MAX_SIZE=20 -# Log file max backup count -LOG_FILE_BACKUP_COUNT=5 -# Log dateformat -LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S -# Log Timezone -LOG_TZ=UTC - -# Debug mode, default is false. -# It is recommended to turn on this configuration for local development -# to prevent some problems caused by monkey patch. -DEBUG=false - -# Flask debug mode, it can output trace information at the interface when turned on, -# which is convenient for debugging. -FLASK_DEBUG=false - -# Enable request logging, which will log the request and response information. -# And the log level is DEBUG -ENABLE_REQUEST_LOGGING=False - -# A secret key that is used for securely signing the session cookie -# and encrypting sensitive information on the database. -# You can generate a strong key using `openssl rand -base64 42`. SECRET_KEY=sk-9f73s3ljTXVcMT3Blb3ljTqtsKiGHXVcMT3BlbkFJLK7U - -# Password for admin user initialization. -# If left unset, admin user will not be prompted for a password -# when creating the initial admin account. -# The length of the password cannot exceed 30 characters. INIT_PASSWORD= - -# Deployment environment. -# Supported values are `PRODUCTION`, `TESTING`. Default is `PRODUCTION`. -# Testing environment. There will be a distinct color label on the front-end page, -# indicating that this environment is a testing environment. DEPLOY_ENV=PRODUCTION - -# Whether to enable the version check policy. -# If set to empty, https://updates.dify.ai will be called for version check. CHECK_UPDATE_URL=https://updates.dify.ai - -# Used to change the OpenAI base address, default is https://api.openai.com/v1. -# When OpenAI cannot be accessed in China, replace it with a domestic mirror address, -# or when a local model provides OpenAI compatible API, it can be replaced. OPENAI_API_BASE=https://api.openai.com/v1 - -# When enabled, migrations will be executed prior to application startup -# and the application will start after the migrations have completed. MIGRATION_ENABLED=true - -# File Access Time specifies a time interval in seconds for the file to be accessed. -# The default value is 300 seconds. FILES_ACCESS_TIMEOUT=300 - -# Collaboration mode toggle -# To open collaboration features, you also need to set SERVER_WORKER_CLASS=geventwebsocket.gunicorn.workers.GeventWebSocketWorker ENABLE_COLLABORATION_MODE=false -# Access token expiration time in minutes -ACCESS_TOKEN_EXPIRE_MINUTES=60 - -# Refresh token expiration time in days -REFRESH_TOKEN_EXPIRE_DAYS=30 - -# The default number of active requests for the application, where 0 means unlimited, should be a non-negative integer. -APP_DEFAULT_ACTIVE_REQUESTS=0 -# The maximum number of active requests for the application, where 0 means unlimited, should be a non-negative integer. -APP_MAX_ACTIVE_REQUESTS=0 -APP_MAX_EXECUTION_TIME=1200 - -# ------------------------------ -# Container Startup Related Configuration -# Only effective when starting with docker image or docker-compose. -# ------------------------------ - -# API service binding address, default: 0.0.0.0, i.e., all addresses can be accessed. +# Logging and server workers +LOG_LEVEL=INFO +LOG_OUTPUT_FORMAT=text +LOG_FILE=/app/logs/server.log +LOG_FILE_MAX_SIZE=20 +LOG_FILE_BACKUP_COUNT=5 +LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S +LOG_TZ=UTC +DEBUG=false +FLASK_DEBUG=false +ENABLE_REQUEST_LOGGING=False DIFY_BIND_ADDRESS=0.0.0.0 - -# API service binding port number, default 5001. DIFY_PORT=5001 - -# The number of API server workers, i.e., the number of workers. -# Formula: number of cpu cores x 2 + 1 for sync, 1 for Gevent -# Reference: https://docs.gunicorn.org/en/stable/design.html#how-many-workers SERVER_WORKER_AMOUNT=1 - -# Defaults to gevent. If using windows, it can be switched to sync or solo. -# -# Warning: Changing this parameter requires disabling patching for -# psycopg2 and gRPC (see `gunicorn.conf.py` and `celery_entrypoint.py`). -# Modifying it may also decrease throughput. -# -# It is strongly discouraged to change this parameter. -# If enable collaboration mode, it must be set to geventwebsocket.gunicorn.workers.GeventWebSocketWorker SERVER_WORKER_CLASS=gevent - -# Default number of worker connections, the default is 10. SERVER_WORKER_CONNECTIONS=10 - -# Similar to SERVER_WORKER_CLASS. -# If using windows, it can be switched to sync or solo. -# -# Warning: Changing this parameter requires disabling patching for -# psycopg2 and gRPC (see `gunicorn_conf.py` and `celery_entrypoint.py`). -# Modifying it may also decrease throughput. -# -# It is strongly discouraged to change this parameter. -CELERY_WORKER_CLASS= - -# Request handling timeout. The default is 200, -# it is recommended to set it to 360 to support a longer sse connection time. GUNICORN_TIMEOUT=360 - -# The number of Celery workers. The default is 4 for development environments -# to allow parallel processing of workflows, document indexing, and other async tasks. -# Adjust based on your system resources and workload requirements. +CELERY_WORKER_CLASS= CELERY_WORKER_AMOUNT=4 - -# Flag indicating whether to enable autoscaling of Celery workers. -# -# Autoscaling is useful when tasks are CPU intensive and can be dynamically -# allocated and deallocated based on the workload. -# -# When autoscaling is enabled, the maximum and minimum number of workers can -# be specified. The autoscaling algorithm will dynamically adjust the number -# of workers within the specified range. -# -# Default is false (i.e., autoscaling is disabled). -# -# Example: -# CELERY_AUTO_SCALE=true CELERY_AUTO_SCALE=false - -# The maximum number of Celery workers that can be autoscaled. -# This is optional and only used when autoscaling is enabled. -# Default is not set. CELERY_MAX_WORKERS= - -# The minimum number of Celery workers that can be autoscaled. -# This is optional and only used when autoscaling is enabled. -# Default is not set. CELERY_MIN_WORKERS= +COMPOSE_WORKER_HEALTHCHECK_DISABLED=true +COMPOSE_WORKER_HEALTHCHECK_INTERVAL=30s +COMPOSE_WORKER_HEALTHCHECK_TIMEOUT=30s -# API Tool configuration -API_TOOL_DEFAULT_CONNECT_TIMEOUT=10 -API_TOOL_DEFAULT_READ_TIMEOUT=60 - -# ------------------------------- -# Datasource Configuration -# -------------------------------- -ENABLE_WEBSITE_JINAREADER=true -ENABLE_WEBSITE_FIRECRAWL=true -ENABLE_WEBSITE_WATERCRAWL=true - -# Enable inline LaTeX rendering with single dollar signs ($...$) in the web frontend -# Default is false for security reasons to prevent conflicts with regular text -NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false - -# ------------------------------ -# Database Configuration -# The database uses PostgreSQL or MySQL. OceanBase and seekdb are also supported. Please use the public schema. -# It is consistent with the configuration in the database service below. -# You can adjust the database configuration according to your needs. -# ------------------------------ - -# Database type, supported values are `postgresql`, `mysql`, `oceanbase`, `seekdb` +# Database DB_TYPE=postgresql -# For MySQL, only `root` user is supported for now DB_USERNAME=postgres DB_PASSWORD=difyai123456 DB_HOST=db_postgres DB_PORT=5432 DB_DATABASE=dify - -# The size of the database connection pool. -# The default is 30 connections, which can be appropriately increased. SQLALCHEMY_POOL_SIZE=30 -# The default is 10 connections, which allows temporary overflow beyond the pool size. SQLALCHEMY_MAX_OVERFLOW=10 -# Database connection pool recycling time, the default is 3600 seconds. SQLALCHEMY_POOL_RECYCLE=3600 -# Whether to print SQL, default is false. SQLALCHEMY_ECHO=false -# If True, will test connections for liveness upon each checkout SQLALCHEMY_POOL_PRE_PING=false -# Whether to enable the Last in first out option or use default FIFO queue if is false SQLALCHEMY_POOL_USE_LIFO=false -# Number of seconds to wait for a connection from the pool before raising a timeout error. -# Default is 30 SQLALCHEMY_POOL_TIMEOUT=30 -# Connection pool reset behavior on return SQLALCHEMY_POOL_RESET_ON_RETURN=rollback - -# Maximum number of connections to the database -# Default is 100 -# -# Reference: https://www.postgresql.org/docs/current/runtime-config-connection.html#GUC-MAX-CONNECTIONS +PGDATA=/var/lib/postgresql/data/pgdata POSTGRES_MAX_CONNECTIONS=200 - -# Sets the amount of shared memory used for postgres's shared buffers. -# Default is 128MB -# Recommended value: 25% of available memory -# Reference: https://www.postgresql.org/docs/current/runtime-config-resource.html#GUC-SHARED-BUFFERS POSTGRES_SHARED_BUFFERS=128MB - -# Sets the amount of memory used by each database worker for working space. -# Default is 4MB -# -# Reference: https://www.postgresql.org/docs/current/runtime-config-resource.html#GUC-WORK-MEM POSTGRES_WORK_MEM=4MB - -# Sets the amount of memory reserved for maintenance activities. -# Default is 64MB -# -# Reference: https://www.postgresql.org/docs/current/runtime-config-resource.html#GUC-MAINTENANCE-WORK-MEM POSTGRES_MAINTENANCE_WORK_MEM=64MB - -# Sets the planner's assumption about the effective cache size. -# Default is 4096MB -# -# Reference: https://www.postgresql.org/docs/current/runtime-config-query.html#GUC-EFFECTIVE-CACHE-SIZE POSTGRES_EFFECTIVE_CACHE_SIZE=4096MB - -# Sets the maximum allowed duration of any statement before termination. -# Default is 0 (no timeout). -# -# Reference: https://www.postgresql.org/docs/current/runtime-config-client.html#GUC-STATEMENT-TIMEOUT -# A value of 0 prevents the server from timing out statements. POSTGRES_STATEMENT_TIMEOUT=0 - -# Sets the maximum allowed duration of any idle in-transaction session before termination. -# Default is 0 (no timeout). -# -# Reference: https://www.postgresql.org/docs/current/runtime-config-client.html#GUC-IDLE-IN-TRANSACTION-SESSION-TIMEOUT -# A value of 0 prevents the server from terminating idle sessions. POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=0 -# MySQL Performance Configuration -# Maximum number of connections to MySQL -# -# Default is 1000 -MYSQL_MAX_CONNECTIONS=1000 - -# InnoDB buffer pool size -# Default is 512M -# Recommended value: 70-80% of available memory for dedicated MySQL server -# Reference: https://dev.mysql.com/doc/refman/8.0/en/innodb-parameters.html#sysvar_innodb_buffer_pool_size -MYSQL_INNODB_BUFFER_POOL_SIZE=512M - -# InnoDB log file size -# Default is 128M -# Reference: https://dev.mysql.com/doc/refman/8.0/en/innodb-parameters.html#sysvar_innodb_log_file_size -MYSQL_INNODB_LOG_FILE_SIZE=128M - -# InnoDB flush log at transaction commit -# Default is 2 (flush to OS cache, sync every second) -# Options: 0 (no flush), 1 (flush and sync), 2 (flush to OS cache) -# Reference: https://dev.mysql.com/doc/refman/8.0/en/innodb-parameters.html#sysvar_innodb_flush_log_at_trx_commit -MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT=2 - -# ------------------------------ -# Redis Configuration -# This Redis configuration is used for caching and for pub/sub during conversation. -# ------------------------------ - +# Redis and Celery REDIS_HOST=redis REDIS_PORT=6379 REDIS_USERNAME= REDIS_PASSWORD=difyai123456 REDIS_USE_SSL=false -# SSL configuration for Redis (when REDIS_USE_SSL=true) REDIS_SSL_CERT_REQS=CERT_NONE -# Options: CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED REDIS_SSL_CA_CERTS= -# Path to CA certificate file for SSL verification REDIS_SSL_CERTFILE= -# Path to client certificate file for SSL authentication REDIS_SSL_KEYFILE= -# Path to client private key file for SSL authentication REDIS_DB=0 -# Optional global prefix for Redis keys, topics, streams, and Celery Redis transport artifacts. -# Leave empty to preserve current unprefixed behavior. REDIS_KEY_PREFIX= -# Optional: limit total Redis connections used by API/Worker (unset for default) -# Align with API's REDIS_MAX_CONNECTIONS in configs REDIS_MAX_CONNECTIONS= - -# Whether to use Redis Sentinel mode. -# If set to true, the application will automatically discover and connect to the master node through Sentinel. -REDIS_USE_SENTINEL=false - -# List of Redis Sentinel nodes. If Sentinel mode is enabled, provide at least one Sentinel IP and port. -# Format: `:,:,:` -REDIS_SENTINELS= -REDIS_SENTINEL_SERVICE_NAME= -REDIS_SENTINEL_USERNAME= -REDIS_SENTINEL_PASSWORD= -REDIS_SENTINEL_SOCKET_TIMEOUT=0.1 - -# List of Redis Cluster nodes. If Cluster mode is enabled, provide at least one Cluster IP and port. -# Format: `:,:,:` -REDIS_USE_CLUSTERS=false -REDIS_CLUSTERS= -REDIS_CLUSTERS_PASSWORD= - -# Redis connection and retry configuration -# max redis retry REDIS_RETRY_RETRIES=3 -# Base delay (in seconds) for exponential backoff on retries REDIS_RETRY_BACKOFF_BASE=1.0 -# Cap (in seconds) for exponential backoff on retries REDIS_RETRY_BACKOFF_CAP=10.0 -# Timeout (in seconds) for Redis socket operations REDIS_SOCKET_TIMEOUT=5.0 -# Timeout (in seconds) for establishing a Redis connection REDIS_SOCKET_CONNECT_TIMEOUT=5.0 -# Interval (in seconds) for Redis health checks REDIS_HEALTH_CHECK_INTERVAL=30 - -# ------------------------------ -# Celery Configuration -# ------------------------------ - -# Use standalone redis as the broker, and redis db 1 for celery broker. (redis_username is usually set by default as empty) -# Format as follows: `redis://:@:/`. -# Example: redis://:difyai123456@redis:6379/1 -# If use Redis Sentinel, format as follows: `sentinel://:@:/` -# For high availability, you can configure multiple Sentinel nodes (if provided) separated by semicolons like below example: -# Example: sentinel://:difyai123456@localhost:26379/1;sentinel://:difyai12345@localhost:26379/1;sentinel://:difyai12345@localhost:26379/1 CELERY_BROKER_URL=redis://:difyai123456@redis:6379/1 CELERY_BACKEND=redis BROKER_USE_SSL=false - -# If you are using Redis Sentinel for high availability, configure the following settings. -CELERY_USE_SENTINEL=false -CELERY_SENTINEL_MASTER_NAME= -CELERY_SENTINEL_PASSWORD= -CELERY_SENTINEL_SOCKET_TIMEOUT=0.1 -# e.g. {"tasks.add": {"rate_limit": "10/s"}} CELERY_TASK_ANNOTATIONS=null +EVENT_BUS_REDIS_URL= +EVENT_BUS_REDIS_CHANNEL_TYPE=pubsub +EVENT_BUS_REDIS_USE_CLUSTERS=false -# ------------------------------ -# CORS Configuration -# Used to set the front-end cross-domain access policy. -# ------------------------------ - -# Specifies the allowed origins for cross-origin requests to the Web API, -# e.g. https://dify.app or * for all origins. +# Web and app limits WEB_API_CORS_ALLOW_ORIGINS=* - -# Specifies the allowed origins for cross-origin requests to the console API, -# e.g. https://cloud.dify.ai or * for all origins. CONSOLE_CORS_ALLOW_ORIGINS=* -# When the frontend and backend run on different subdomains, set COOKIE_DOMAIN to the site's top-level domain (e.g., `example.com`). Leading dots are optional. COOKIE_DOMAIN= -# When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1. NEXT_PUBLIC_COOKIE_DOMAIN= -# WebSocket server URL. -NEXT_PUBLIC_SOCKET_URL=ws://localhost NEXT_PUBLIC_BATCH_CONCURRENCY=5 +API_SENTRY_DSN= +API_SENTRY_TRACES_SAMPLE_RATE=1.0 +API_SENTRY_PROFILES_SAMPLE_RATE=1.0 +WEB_SENTRY_DSN= +AMPLITUDE_API_KEY= +TEXT_GENERATION_TIMEOUT_MS=60000 +CSP_WHITELIST= +ALLOW_EMBED=false +ALLOW_INLINE_STYLES=false +ALLOW_UNSAFE_DATA_SCHEME=false +TOP_K_MAX_VALUE=10 +INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000 +LOOP_NODE_MAX_COUNT=100 +MAX_TOOLS_NUM=10 +MAX_PARALLEL_LIMIT=10 +MAX_ITERATIONS_NUM=99 +MAX_TREE_DEPTH=50 +ENABLE_WEBSITE_JINAREADER=true +ENABLE_WEBSITE_FIRECRAWL=true +ENABLE_WEBSITE_WATERCRAWL=true +NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false +EXPERIMENTAL_ENABLE_VINEXT=false -# ------------------------------ -# File Storage Configuration -# ------------------------------ - -# The type of storage to use for storing user files. +# Storage and default vector store STORAGE_TYPE=opendal - -# Apache OpenDAL Configuration -# The configuration for OpenDAL consists of the following format: OPENDAL__. -# You can find all the service configurations (CONFIG_NAME) in the repository at: https://github.com/apache/opendal/tree/main/core/src/services. -# Dify will scan configurations starting with OPENDAL_ and automatically apply them. -# The scheme name for the OpenDAL storage. OPENDAL_SCHEME=fs -# Configurations for OpenDAL Local File System. OPENDAL_FS_ROOT=storage - -# ClickZetta Volume Configuration (for storage backend) -# To use ClickZetta Volume as storage backend, set STORAGE_TYPE=clickzetta-volume -# Note: ClickZetta Volume will reuse the existing CLICKZETTA_* connection parameters - -# Volume type selection (three types available): -# - user: Personal/small team use, simple config, user-level permissions -# - table: Enterprise multi-tenant, smart routing, table-level + user-level permissions -# - external: Data lake integration, external storage connection, volume-level + storage-level permissions -CLICKZETTA_VOLUME_TYPE=user - -# External Volume name (required only when TYPE=external) -CLICKZETTA_VOLUME_NAME= - -# Table Volume table prefix (used only when TYPE=table) -CLICKZETTA_VOLUME_TABLE_PREFIX=dataset_ - -# Dify file directory prefix (isolates from other apps, recommended to keep default) -CLICKZETTA_VOLUME_DIFY_PREFIX=dify_km - -# S3 Configuration -# -S3_ENDPOINT= -S3_REGION=us-east-1 -S3_BUCKET_NAME=difyai -S3_ACCESS_KEY= -S3_SECRET_KEY= -S3_ADDRESS_STYLE=auto -# Whether to use AWS managed IAM roles for authenticating with the S3 service. -# If set to false, the access key and secret key must be provided. -S3_USE_AWS_MANAGED_IAM=false - -# Workflow run and Conversation archive storage (S3-compatible) -ARCHIVE_STORAGE_ENABLED=false -ARCHIVE_STORAGE_ENDPOINT= -ARCHIVE_STORAGE_ARCHIVE_BUCKET= -ARCHIVE_STORAGE_EXPORT_BUCKET= -ARCHIVE_STORAGE_ACCESS_KEY= -ARCHIVE_STORAGE_SECRET_KEY= -ARCHIVE_STORAGE_REGION=auto - -# Azure Blob Configuration -# -AZURE_BLOB_ACCOUNT_NAME=difyai -AZURE_BLOB_ACCOUNT_KEY=difyai -AZURE_BLOB_CONTAINER_NAME=difyai-container -AZURE_BLOB_ACCOUNT_URL=https://.blob.core.windows.net - -# Google Storage Configuration -# -GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name -GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64= - -# The Alibaba Cloud OSS configurations, -# -ALIYUN_OSS_BUCKET_NAME=your-bucket-name -ALIYUN_OSS_ACCESS_KEY=your-access-key -ALIYUN_OSS_SECRET_KEY=your-secret-key -ALIYUN_OSS_ENDPOINT=https://oss-ap-southeast-1-internal.aliyuncs.com -ALIYUN_OSS_REGION=ap-southeast-1 -ALIYUN_OSS_AUTH_VERSION=v4 -# Don't start with '/'. OSS doesn't support leading slash in object names. -ALIYUN_OSS_PATH=your-path -# Optional CloudBox ID for Aliyun OSS, DO NOT enable it if you are not using CloudBox. -#ALIYUN_CLOUDBOX_ID=your-cloudbox-id - -# Tencent COS Configuration -# -TENCENT_COS_BUCKET_NAME=your-bucket-name -TENCENT_COS_SECRET_KEY=your-secret-key -TENCENT_COS_SECRET_ID=your-secret-id -TENCENT_COS_REGION=your-region -TENCENT_COS_SCHEME=your-scheme -TENCENT_COS_CUSTOM_DOMAIN=your-custom-domain - -# Oracle Storage Configuration -# -OCI_ENDPOINT=https://your-object-storage-namespace.compat.objectstorage.us-ashburn-1.oraclecloud.com -OCI_BUCKET_NAME=your-bucket-name -OCI_ACCESS_KEY=your-access-key -OCI_SECRET_KEY=your-secret-key -OCI_REGION=us-ashburn-1 - -# Huawei OBS Configuration -# -HUAWEI_OBS_BUCKET_NAME=your-bucket-name -HUAWEI_OBS_SECRET_KEY=your-secret-key -HUAWEI_OBS_ACCESS_KEY=your-access-key -HUAWEI_OBS_SERVER=your-server-url -HUAWEI_OBS_PATH_STYLE=false - -# Volcengine TOS Configuration -# -VOLCENGINE_TOS_BUCKET_NAME=your-bucket-name -VOLCENGINE_TOS_SECRET_KEY=your-secret-key -VOLCENGINE_TOS_ACCESS_KEY=your-access-key -VOLCENGINE_TOS_ENDPOINT=your-server-url -VOLCENGINE_TOS_REGION=your-region - -# Baidu OBS Storage Configuration -# -BAIDU_OBS_BUCKET_NAME=your-bucket-name -BAIDU_OBS_SECRET_KEY=your-secret-key -BAIDU_OBS_ACCESS_KEY=your-access-key -BAIDU_OBS_ENDPOINT=your-server-url - -# Supabase Storage Configuration -# -SUPABASE_BUCKET_NAME=your-bucket-name -SUPABASE_API_KEY=your-access-key -SUPABASE_URL=your-server-url - -# ------------------------------ -# Vector Database Configuration -# ------------------------------ - -# The type of vector store to use. -# Supported values are `weaviate`, `oceanbase`, `seekdb`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `opengauss`, `tablestore`, `vastbase`, `tidb`, `tidb_on_qdrant`, `baidu`, `lindorm`, `huawei_cloud`, `upstash`, `matrixone`, `clickzetta`, `alibabacloud_mysql`, `iris`, `hologres`. VECTOR_STORE=weaviate -# Prefix used to create collection name in vector database VECTOR_INDEX_NAME_PREFIX=Vector_index - -# The Weaviate endpoint URL. Only available when VECTOR_STORE is `weaviate`. WEAVIATE_ENDPOINT=http://weaviate:8080 WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih WEAVIATE_GRPC_ENDPOINT=grpc://weaviate:50051 WEAVIATE_TOKENIZATION=word - -# For OceanBase metadata database configuration, available when `DB_TYPE` is `oceanbase`. -# For OceanBase vector database configuration, available when `VECTOR_STORE` is `oceanbase` -# If you want to use OceanBase as both vector database and metadata database, you need to set both `DB_TYPE` and `VECTOR_STORE` to `oceanbase`, and set Database Configuration is the same as the vector database. -# seekdb is the lite version of OceanBase and shares the connection configuration with OceanBase. -OCEANBASE_VECTOR_HOST=oceanbase -OCEANBASE_VECTOR_PORT=2881 -OCEANBASE_VECTOR_USER=root@test -OCEANBASE_VECTOR_PASSWORD=difyai123456 -OCEANBASE_VECTOR_DATABASE=test -OCEANBASE_CLUSTER_NAME=difyai -OCEANBASE_MEMORY_LIMIT=6G -OCEANBASE_ENABLE_HYBRID_SEARCH=false -# For OceanBase vector database, built-in fulltext parsers are `ngram`, `beng`, `space`, `ngram2`, `ik` -# For OceanBase vector database, external fulltext parsers (require plugin installation) are `japanese_ftparser`, `thai_ftparser` -OCEANBASE_FULLTEXT_PARSER=ik -SEEKDB_MEMORY_LIMIT=2G - -# The Qdrant endpoint URL. Only available when VECTOR_STORE is `qdrant`. -QDRANT_URL=http://qdrant:6333 -QDRANT_API_KEY=difyai123456 -QDRANT_CLIENT_TIMEOUT=20 -QDRANT_GRPC_ENABLED=false -QDRANT_GRPC_PORT=6334 -QDRANT_REPLICATION_FACTOR=1 - -# Milvus configuration. Only available when VECTOR_STORE is `milvus`. -# The milvus uri. -MILVUS_URI=http://host.docker.internal:19530 -MILVUS_DATABASE= -MILVUS_TOKEN= -MILVUS_USER= -MILVUS_PASSWORD= -MILVUS_ENABLE_HYBRID_SEARCH=False -MILVUS_ANALYZER_PARAMS= - -# MyScale configuration, only available when VECTOR_STORE is `myscale` -# For multi-language support, please set MYSCALE_FTS_PARAMS with referring to: -# https://myscale.com/docs/en/text-search/#understanding-fts-index-parameters -MYSCALE_HOST=myscale -MYSCALE_PORT=8123 -MYSCALE_USER=default -MYSCALE_PASSWORD= -MYSCALE_DATABASE=dify -MYSCALE_FTS_PARAMS= - -# Couchbase configurations, only available when VECTOR_STORE is `couchbase` -# The connection string must include hostname defined in the docker-compose file (couchbase-server in this case) -COUCHBASE_CONNECTION_STRING=couchbase://couchbase-server -COUCHBASE_USER=Administrator -COUCHBASE_PASSWORD=password -COUCHBASE_BUCKET_NAME=Embeddings -COUCHBASE_SCOPE_NAME=_default - -# Hologres configurations, only available when VECTOR_STORE is `hologres` -# access_key_id is used as the PG username, access_key_secret is used as the PG password -HOLOGRES_HOST= -HOLOGRES_PORT=80 -HOLOGRES_DATABASE= -HOLOGRES_ACCESS_KEY_ID= -HOLOGRES_ACCESS_KEY_SECRET= -HOLOGRES_SCHEMA=public -HOLOGRES_TOKENIZER=jieba -HOLOGRES_DISTANCE_METHOD=Cosine -HOLOGRES_BASE_QUANTIZATION_TYPE=rabitq -HOLOGRES_MAX_DEGREE=64 -HOLOGRES_EF_CONSTRUCTION=400 - -# pgvector configurations, only available when VECTOR_STORE is `pgvector` -PGVECTOR_HOST=pgvector -PGVECTOR_PORT=5432 -PGVECTOR_USER=postgres -PGVECTOR_PASSWORD=difyai123456 -PGVECTOR_DATABASE=dify -PGVECTOR_MIN_CONNECTION=1 -PGVECTOR_MAX_CONNECTION=5 -PGVECTOR_PG_BIGM=false -PGVECTOR_PG_BIGM_VERSION=1.2-20240606 - -# vastbase configurations, only available when VECTOR_STORE is `vastbase` -VASTBASE_HOST=vastbase -VASTBASE_PORT=5432 -VASTBASE_USER=dify -VASTBASE_PASSWORD=Difyai123456 -VASTBASE_DATABASE=dify -VASTBASE_MIN_CONNECTION=1 -VASTBASE_MAX_CONNECTION=5 - -# pgvecto-rs configurations, only available when VECTOR_STORE is `pgvecto-rs` -PGVECTO_RS_HOST=pgvecto-rs -PGVECTO_RS_PORT=5432 -PGVECTO_RS_USER=postgres -PGVECTO_RS_PASSWORD=difyai123456 -PGVECTO_RS_DATABASE=dify - -# analyticdb configurations, only available when VECTOR_STORE is `analyticdb` -ANALYTICDB_KEY_ID=your-ak -ANALYTICDB_KEY_SECRET=your-sk -ANALYTICDB_REGION_ID=cn-hangzhou -ANALYTICDB_INSTANCE_ID=gp-ab123456 -ANALYTICDB_ACCOUNT=testaccount -ANALYTICDB_PASSWORD=testpassword -ANALYTICDB_NAMESPACE=dify -ANALYTICDB_NAMESPACE_PASSWORD=difypassword -ANALYTICDB_HOST=gp-test.aliyuncs.com -ANALYTICDB_PORT=5432 -ANALYTICDB_MIN_CONNECTION=1 -ANALYTICDB_MAX_CONNECTION=5 - -# TiDB vector configurations, only available when VECTOR_STORE is `tidb_vector` -TIDB_VECTOR_HOST=tidb -TIDB_VECTOR_PORT=4000 -TIDB_VECTOR_USER= -TIDB_VECTOR_PASSWORD= -TIDB_VECTOR_DATABASE=dify - -# Matrixone vector configurations. -MATRIXONE_HOST=matrixone -MATRIXONE_PORT=6001 -MATRIXONE_USER=dump -MATRIXONE_PASSWORD=111 -MATRIXONE_DATABASE=dify - -# Tidb on qdrant configuration, only available when VECTOR_STORE is `tidb_on_qdrant` -TIDB_ON_QDRANT_URL=http://127.0.0.1 -TIDB_ON_QDRANT_API_KEY=dify -TIDB_ON_QDRANT_CLIENT_TIMEOUT=20 -TIDB_ON_QDRANT_GRPC_ENABLED=false -TIDB_ON_QDRANT_GRPC_PORT=6334 -TIDB_PUBLIC_KEY=dify -TIDB_PRIVATE_KEY=dify -TIDB_API_URL=http://127.0.0.1 -TIDB_IAM_API_URL=http://127.0.0.1 -TIDB_REGION=regions/aws-us-east-1 -TIDB_PROJECT_ID=dify -TIDB_SPEND_LIMIT=100 - -# Chroma configuration, only available when VECTOR_STORE is `chroma` -CHROMA_HOST=127.0.0.1 -CHROMA_PORT=8000 -CHROMA_TENANT=default_tenant -CHROMA_DATABASE=default_database -CHROMA_AUTH_PROVIDER=chromadb.auth.token_authn.TokenAuthClientProvider -CHROMA_AUTH_CREDENTIALS= - -# Oracle configuration, only available when VECTOR_STORE is `oracle` -ORACLE_USER=dify -ORACLE_PASSWORD=dify -ORACLE_DSN=oracle:1521/FREEPDB1 -ORACLE_CONFIG_DIR=/app/api/storage/wallet -ORACLE_WALLET_LOCATION=/app/api/storage/wallet -ORACLE_WALLET_PASSWORD=dify -ORACLE_IS_AUTONOMOUS=false - -# AlibabaCloud MySQL configuration, only available when VECTOR_STORE is `alibabcloud_mysql` -ALIBABACLOUD_MYSQL_HOST=127.0.0.1 -ALIBABACLOUD_MYSQL_PORT=3306 -ALIBABACLOUD_MYSQL_USER=root -ALIBABACLOUD_MYSQL_PASSWORD=difyai123456 -ALIBABACLOUD_MYSQL_DATABASE=dify -ALIBABACLOUD_MYSQL_MAX_CONNECTION=5 -ALIBABACLOUD_MYSQL_HNSW_M=6 - -# relyt configurations, only available when VECTOR_STORE is `relyt` -RELYT_HOST=db -RELYT_PORT=5432 -RELYT_USER=postgres -RELYT_PASSWORD=difyai123456 -RELYT_DATABASE=postgres - -# open search configuration, only available when VECTOR_STORE is `opensearch` -OPENSEARCH_HOST=opensearch -OPENSEARCH_PORT=9200 -OPENSEARCH_SECURE=true -OPENSEARCH_VERIFY_CERTS=true -OPENSEARCH_AUTH_METHOD=basic -OPENSEARCH_USER=admin -OPENSEARCH_PASSWORD=admin -# If using AWS managed IAM, e.g. Managed Cluster or OpenSearch Serverless -OPENSEARCH_AWS_REGION=ap-southeast-1 -OPENSEARCH_AWS_SERVICE=aoss - -# tencent vector configurations, only available when VECTOR_STORE is `tencent` -TENCENT_VECTOR_DB_URL=http://127.0.0.1 -TENCENT_VECTOR_DB_API_KEY=dify -TENCENT_VECTOR_DB_TIMEOUT=30 -TENCENT_VECTOR_DB_USERNAME=dify -TENCENT_VECTOR_DB_DATABASE=dify -TENCENT_VECTOR_DB_SHARD=1 -TENCENT_VECTOR_DB_REPLICAS=2 -TENCENT_VECTOR_DB_ENABLE_HYBRID_SEARCH=false - -# ElasticSearch configuration, only available when VECTOR_STORE is `elasticsearch` -ELASTICSEARCH_HOST=0.0.0.0 -ELASTICSEARCH_PORT=9200 -ELASTICSEARCH_USERNAME=elastic -ELASTICSEARCH_PASSWORD=elastic -KIBANA_PORT=5601 - -# Using ElasticSearch Cloud Serverless, or not. -ELASTICSEARCH_USE_CLOUD=false -ELASTICSEARCH_CLOUD_URL=YOUR-ELASTICSEARCH_CLOUD_URL -ELASTICSEARCH_API_KEY=YOUR-ELASTICSEARCH_API_KEY - -ELASTICSEARCH_VERIFY_CERTS=False -ELASTICSEARCH_CA_CERTS= -ELASTICSEARCH_REQUEST_TIMEOUT=100000 -ELASTICSEARCH_RETRY_ON_TIMEOUT=True -ELASTICSEARCH_MAX_RETRIES=10 - -# baidu vector configurations, only available when VECTOR_STORE is `baidu` -BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287 -BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS=30000 -BAIDU_VECTOR_DB_ACCOUNT=root -BAIDU_VECTOR_DB_API_KEY=dify -BAIDU_VECTOR_DB_DATABASE=dify -BAIDU_VECTOR_DB_SHARD=1 -BAIDU_VECTOR_DB_REPLICAS=3 -BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER=DEFAULT_ANALYZER -BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE=COARSE_MODE -BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT=500 -BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO=0.05 -BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS=300 - -# VikingDB configurations, only available when VECTOR_STORE is `vikingdb` -VIKINGDB_ACCESS_KEY=your-ak -VIKINGDB_SECRET_KEY=your-sk -VIKINGDB_REGION=cn-shanghai -VIKINGDB_HOST=api-vikingdb.xxx.volces.com -VIKINGDB_SCHEME=http -VIKINGDB_CONNECTION_TIMEOUT=30 -VIKINGDB_SOCKET_TIMEOUT=30 - -# Lindorm configuration, only available when VECTOR_STORE is `lindorm` -LINDORM_URL=http://localhost:30070 -LINDORM_USERNAME=admin -LINDORM_PASSWORD=admin -LINDORM_USING_UGC=True -LINDORM_QUERY_TIMEOUT=1 - -# opengauss configurations, only available when VECTOR_STORE is `opengauss` -OPENGAUSS_HOST=opengauss -OPENGAUSS_PORT=6600 -OPENGAUSS_USER=postgres -OPENGAUSS_PASSWORD=Dify@123 -OPENGAUSS_DATABASE=dify -OPENGAUSS_MIN_CONNECTION=1 -OPENGAUSS_MAX_CONNECTION=5 -OPENGAUSS_ENABLE_PQ=false - -# huawei cloud search service vector configurations, only available when VECTOR_STORE is `huawei_cloud` -HUAWEI_CLOUD_HOSTS=https://127.0.0.1:9200 -HUAWEI_CLOUD_USER=admin -HUAWEI_CLOUD_PASSWORD=admin - -# Upstash Vector configuration, only available when VECTOR_STORE is `upstash` -UPSTASH_VECTOR_URL=https://xxx-vector.upstash.io -UPSTASH_VECTOR_TOKEN=dify - -# TableStore Vector configuration -# (only used when VECTOR_STORE is tablestore) -TABLESTORE_ENDPOINT=https://instance-name.cn-hangzhou.ots.aliyuncs.com -TABLESTORE_INSTANCE_NAME=instance-name -TABLESTORE_ACCESS_KEY_ID=xxx -TABLESTORE_ACCESS_KEY_SECRET=xxx -TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE=false - -# Clickzetta configuration, only available when VECTOR_STORE is `clickzetta` -CLICKZETTA_USERNAME= -CLICKZETTA_PASSWORD= -CLICKZETTA_INSTANCE= -CLICKZETTA_SERVICE=api.clickzetta.com -CLICKZETTA_WORKSPACE=quick_start -CLICKZETTA_VCLUSTER=default_ap -CLICKZETTA_SCHEMA=dify -CLICKZETTA_BATCH_SIZE=100 -CLICKZETTA_ENABLE_INVERTED_INDEX=true -CLICKZETTA_ANALYZER_TYPE=chinese -CLICKZETTA_ANALYZER_MODE=smart -CLICKZETTA_VECTOR_DISTANCE_FUNCTION=cosine_distance - -# InterSystems IRIS configuration, only available when VECTOR_STORE is `iris` -IRIS_HOST=iris -IRIS_SUPER_SERVER_PORT=1972 -IRIS_WEB_SERVER_PORT=52773 -IRIS_USER=_SYSTEM -IRIS_PASSWORD=Dify@1234 -IRIS_DATABASE=USER -IRIS_SCHEMA=dify -IRIS_CONNECTION_URL= -IRIS_MIN_CONNECTION=1 -IRIS_MAX_CONNECTION=3 -IRIS_TEXT_INDEX=true -IRIS_TEXT_INDEX_LANGUAGE=en -IRIS_TIMEZONE=UTC - -# ------------------------------ -# Knowledge Configuration -# ------------------------------ - -# Upload file size limit, default 15M. -UPLOAD_FILE_SIZE_LIMIT=15 - -# The maximum number of files that can be uploaded at a time, default 5. -UPLOAD_FILE_BATCH_LIMIT=5 - -# Comma-separated list of file extensions blocked from upload for security reasons. -# Extensions should be lowercase without dots (e.g., exe,bat,sh,dll). -# Empty by default to allow all file types. -# Recommended: exe,bat,cmd,com,scr,vbs,ps1,msi,dll -UPLOAD_FILE_EXTENSION_BLACKLIST= - -# Maximum number of files allowed in a single chunk attachment, default 10. -SINGLE_CHUNK_ATTACHMENT_LIMIT=10 - -# Maximum number of files allowed in a image batch upload operation -IMAGE_FILE_BATCH_LIMIT=10 - -# Maximum allowed image file size for attachments in megabytes, default 2. -ATTACHMENT_IMAGE_FILE_SIZE_LIMIT=2 - -# Timeout for downloading image attachments in seconds, default 60. -ATTACHMENT_IMAGE_DOWNLOAD_TIMEOUT=60 - - -# ETL type, support: `dify`, `Unstructured` -# `dify` Dify's proprietary file extraction scheme -# `Unstructured` Unstructured.io file extraction scheme -ETL_TYPE=dify - -# Unstructured API path and API key, needs to be configured when ETL_TYPE is Unstructured -# Or using Unstructured for document extractor node for pptx. -# For example: http://unstructured:8000/general/v0/general -UNSTRUCTURED_API_URL= -UNSTRUCTURED_API_KEY= -SCARF_NO_ANALYTICS=true - -# ------------------------------ -# Model Configuration -# ------------------------------ - -# Enable or disable plugin based token counting. If disabled, token counting will return 0. -# This can improve performance by skipping token counting operations. -# Default: false (disabled). -PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false - -# ------------------------------ -# Multi-modal Configuration -# ------------------------------ - -# The format of the image/video/audio/document sent when the multi-modal model is input, -# the default is base64, optional url. -# The delay of the call in url mode will be lower than that in base64 mode. -# It is generally recommended to use the more compatible base64 mode. -# If configured as url, you need to configure FILES_URL as an externally accessible address so that the multi-modal model can access the image/video/audio/document. -MULTIMODAL_SEND_FORMAT=base64 -# Upload image file size limit, default 10M. -UPLOAD_IMAGE_FILE_SIZE_LIMIT=10 -# Upload video file size limit, default 100M. -UPLOAD_VIDEO_FILE_SIZE_LIMIT=100 -# Upload audio file size limit, default 50M. -UPLOAD_AUDIO_FILE_SIZE_LIMIT=50 - -# ------------------------------ -# Sentry Configuration -# Used for application monitoring and error log tracking. -# ------------------------------ -SENTRY_DSN= - -# API Service Sentry DSN address, default is empty, when empty, -# all monitoring information is not reported to Sentry. -# If not set, Sentry error reporting will be disabled. -API_SENTRY_DSN= -# API Service The reporting ratio of Sentry events, if it is 0.01, it is 1%. -API_SENTRY_TRACES_SAMPLE_RATE=1.0 -# API Service The reporting ratio of Sentry profiles, if it is 0.01, it is 1%. -API_SENTRY_PROFILES_SAMPLE_RATE=1.0 - -# Web Service Sentry DSN address, default is empty, when empty, -# all monitoring information is not reported to Sentry. -# If not set, Sentry error reporting will be disabled. -WEB_SENTRY_DSN= - -# Plugin_daemon Service Sentry DSN address, default is empty, when empty, -# all monitoring information is not reported to Sentry. -# If not set, Sentry error reporting will be disabled. -PLUGIN_SENTRY_ENABLED=false -PLUGIN_SENTRY_DSN= - -# ------------------------------ -# Notion Integration Configuration -# Variables can be obtained by applying for Notion integration: https://www.notion.so/my-integrations -# ------------------------------ - -# Configure as "public" or "internal". -# Since Notion's OAuth redirect URL only supports HTTPS, -# if deploying locally, please use Notion's internal integration. -NOTION_INTEGRATION_TYPE=public -# Notion OAuth client secret (used for public integration type) -NOTION_CLIENT_SECRET= -# Notion OAuth client id (used for public integration type) -NOTION_CLIENT_ID= -# Notion internal integration secret. -# If the value of NOTION_INTEGRATION_TYPE is "internal", -# you need to configure this variable. -NOTION_INTERNAL_SECRET= - -# ------------------------------ -# Mail related configuration -# ------------------------------ - -# Mail type, support: resend, smtp, sendgrid -MAIL_TYPE= - -# Default send from email address, if not specified -# If using SendGrid, use the 'from' field for authentication if necessary. -MAIL_DEFAULT_SEND_FROM= - -# API-Key for the Resend email provider, used when MAIL_TYPE is `resend`. -RESEND_API_URL=https://api.resend.com -RESEND_API_KEY= - - -# SMTP server configuration, used when MAIL_TYPE is `smtp` -SMTP_SERVER= -SMTP_PORT=465 -SMTP_USERNAME= -SMTP_PASSWORD= -SMTP_USE_TLS=true -SMTP_OPPORTUNISTIC_TLS=false -# Optional: override the local hostname used for SMTP HELO/EHLO -SMTP_LOCAL_HOSTNAME= - -# Sendgid configuration -SENDGRID_API_KEY= - -# ------------------------------ -# Others Configuration -# ------------------------------ - -# Maximum length of segmentation tokens for indexing -INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000 - -# Member invitation link valid time (hours), -# Default: 72. -INVITE_EXPIRY_HOURS=72 - -# Reset password token valid time (minutes), -RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5 -EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES=5 -CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5 -OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5 - -# The sandbox service endpoint. -CODE_EXECUTION_ENDPOINT=http://sandbox:8194 -CODE_EXECUTION_API_KEY=dify-sandbox -CODE_EXECUTION_SSL_VERIFY=True -CODE_EXECUTION_POOL_MAX_CONNECTIONS=100 -CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20 -CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0 -CODE_MAX_NUMBER=9223372036854775807 -CODE_MIN_NUMBER=-9223372036854775808 -CODE_MAX_DEPTH=5 -CODE_MAX_PRECISION=20 -CODE_MAX_STRING_LENGTH=400000 -CODE_MAX_STRING_ARRAY_LENGTH=30 -CODE_MAX_OBJECT_ARRAY_LENGTH=30 -CODE_MAX_NUMBER_ARRAY_LENGTH=1000 -CODE_EXECUTION_CONNECT_TIMEOUT=10 -CODE_EXECUTION_READ_TIMEOUT=60 -CODE_EXECUTION_WRITE_TIMEOUT=10 -TEMPLATE_TRANSFORM_MAX_LENGTH=400000 - -# Workflow runtime configuration -WORKFLOW_MAX_EXECUTION_STEPS=500 -WORKFLOW_MAX_EXECUTION_TIME=1200 -WORKFLOW_CALL_MAX_DEPTH=5 -MAX_VARIABLE_SIZE=204800 -WORKFLOW_FILE_UPLOAD_LIMIT=10 - -# GraphEngine Worker Pool Configuration -# Minimum number of workers per GraphEngine instance (default: 1) -GRAPH_ENGINE_MIN_WORKERS=1 -# Maximum number of workers per GraphEngine instance (default: 10) -GRAPH_ENGINE_MAX_WORKERS=10 -# Queue depth threshold that triggers worker scale up (default: 3) -GRAPH_ENGINE_SCALE_UP_THRESHOLD=3 -# Seconds of idle time before scaling down workers (default: 5.0) -GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME=5.0 - -# Workflow storage configuration -# Options: rdbms, hybrid -# rdbms: Use only the relational database (default) -# hybrid: Save new data to object storage, read from both object storage and RDBMS -WORKFLOW_NODE_EXECUTION_STORAGE=rdbms - -# Repository configuration -# Core workflow execution repository implementation -# Options: -# - core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository (default) -# - core.repositories.celery_workflow_execution_repository.CeleryWorkflowExecutionRepository -# - extensions.logstore.repositories.logstore_workflow_execution_repository.LogstoreWorkflowExecutionRepository -CORE_WORKFLOW_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository - -# Core workflow node execution repository implementation -# Options: -# - core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository (default) -# - core.repositories.celery_workflow_node_execution_repository.CeleryWorkflowNodeExecutionRepository -# - extensions.logstore.repositories.logstore_workflow_node_execution_repository.LogstoreWorkflowNodeExecutionRepository -CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository - -# API workflow run repository implementation -# Options: -# - repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository (default) -# - extensions.logstore.repositories.logstore_api_workflow_run_repository.LogstoreAPIWorkflowRunRepository -API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository - -# API workflow node execution repository implementation -# Options: -# - repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository (default) -# - extensions.logstore.repositories.logstore_api_workflow_node_execution_repository.LogstoreAPIWorkflowNodeExecutionRepository -API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository - -# Workflow log cleanup configuration -# Enable automatic cleanup of workflow run logs to manage database size -WORKFLOW_LOG_CLEANUP_ENABLED=false -# Number of days to retain workflow run logs (default: 30 days) -WORKFLOW_LOG_RETENTION_DAYS=30 -# Batch size for workflow log cleanup operations (default: 100) -WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100 -# Comma-separated list of workflow IDs to clean logs for -WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS= - -# Aliyun SLS Logstore Configuration -# Aliyun Access Key ID -ALIYUN_SLS_ACCESS_KEY_ID= -# Aliyun Access Key Secret -ALIYUN_SLS_ACCESS_KEY_SECRET= -# Aliyun SLS Endpoint (e.g., cn-hangzhou.log.aliyuncs.com) -ALIYUN_SLS_ENDPOINT= -# Aliyun SLS Region (e.g., cn-hangzhou) -ALIYUN_SLS_REGION= -# Aliyun SLS Project Name -ALIYUN_SLS_PROJECT_NAME= -# Number of days to retain workflow run logs (default: 365 days, 3650 for permanent storage) -ALIYUN_SLS_LOGSTORE_TTL=365 -# Enable dual-write to both SLS LogStore and SQL database (default: false) -LOGSTORE_DUAL_WRITE_ENABLED=false -# Enable dual-read fallback to SQL database when LogStore returns no results (default: true) -# Useful for migration scenarios where historical data exists only in SQL database -LOGSTORE_DUAL_READ_ENABLED=true -# Control flag for whether to write the `graph` field to LogStore. -# If LOGSTORE_ENABLE_PUT_GRAPH_FIELD is "true", write the full `graph` field; -# otherwise write an empty {} instead. Defaults to writing the `graph` field. -LOGSTORE_ENABLE_PUT_GRAPH_FIELD=true - -# HTTP request node in workflow configuration -HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 -HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 -HTTP_REQUEST_NODE_SSL_VERIFY=True - -# HTTP request node timeout configuration -# Maximum timeout values (in seconds) that users can set in HTTP request nodes -# - Connect timeout: Time to wait for establishing connection (default: 10s) -# - Read timeout: Time to wait for receiving response data (default: 600s, 10 minutes) -# - Write timeout: Time to wait for sending request data (default: 600s, 10 minutes) -HTTP_REQUEST_MAX_CONNECT_TIMEOUT=10 -HTTP_REQUEST_MAX_READ_TIMEOUT=600 -HTTP_REQUEST_MAX_WRITE_TIMEOUT=600 - -# Base64 encoded CA certificate data for custom certificate verification (PEM format, optional) -# HTTP_REQUEST_NODE_SSL_CERT_DATA=LS0tLS1CRUdJTi... -# Base64 encoded client certificate data for mutual TLS authentication (PEM format, optional) -# HTTP_REQUEST_NODE_SSL_CLIENT_CERT_DATA=LS0tLS1CRUdJTi... -# Base64 encoded client private key data for mutual TLS authentication (PEM format, optional) -# HTTP_REQUEST_NODE_SSL_CLIENT_KEY_DATA=LS0tLS1CRUdJTi... - -# Webhook request configuration -WEBHOOK_REQUEST_BODY_MAX_SIZE=10485760 - -# Respect X-* headers to redirect clients -RESPECT_XFORWARD_HEADERS_ENABLED=false - -# SSRF Proxy server HTTP URL -SSRF_PROXY_HTTP_URL=http://ssrf_proxy:3128 -# SSRF Proxy server HTTPS URL -SSRF_PROXY_HTTPS_URL=http://ssrf_proxy:3128 - -# Maximum loop count in the workflow -LOOP_NODE_MAX_COUNT=100 - -# The maximum number of tools that can be used in the agent. -MAX_TOOLS_NUM=10 - -# Maximum number of Parallelism branches in the workflow -MAX_PARALLEL_LIMIT=10 - -# The maximum number of iterations for agent setting -MAX_ITERATIONS_NUM=99 - -# ------------------------------ -# Environment Variables for web Service -# ------------------------------ - -# The timeout for the text generation in millisecond -TEXT_GENERATION_TIMEOUT_MS=60000 - -# Enable the experimental vinext runtime shipped in the image. -EXPERIMENTAL_ENABLE_VINEXT=false - -# Allow inline style attributes in Markdown rendering. -# Enable this if your workflows use Jinja2 templates with styled HTML. -# Only recommended for self-hosted deployments with trusted content. -ALLOW_INLINE_STYLES=false - -# Allow rendering unsafe URLs which have "data:" scheme. -ALLOW_UNSAFE_DATA_SCHEME=false - -# Maximum number of tree depth in the workflow -MAX_TREE_DEPTH=50 - -# ------------------------------ -# Environment Variables for database Service -# ------------------------------ -# Postgres data directory -PGDATA=/var/lib/postgresql/data/pgdata - -# MySQL Default Configuration -MYSQL_HOST_VOLUME=./volumes/mysql/data - -# ------------------------------ -# Environment Variables for sandbox Service -# ------------------------------ - -# The API key for the sandbox service -SANDBOX_API_KEY=dify-sandbox -# The mode in which the Gin framework runs -SANDBOX_GIN_MODE=release -# The timeout for the worker in seconds -SANDBOX_WORKER_TIMEOUT=15 -# Enable network for the sandbox service -SANDBOX_ENABLE_NETWORK=true -# HTTP proxy URL for SSRF protection -SANDBOX_HTTP_PROXY=http://ssrf_proxy:3128 -# HTTPS proxy URL for SSRF protection -SANDBOX_HTTPS_PROXY=http://ssrf_proxy:3128 -# The port on which the sandbox service runs -SANDBOX_PORT=8194 - -# ------------------------------ -# Environment Variables for weaviate Service -# (only used when VECTOR_STORE is weaviate) -# ------------------------------ WEAVIATE_PERSISTENCE_DATA_PATH=/var/lib/weaviate WEAVIATE_QUERY_DEFAULTS_LIMIT=25 WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true @@ -1249,118 +167,26 @@ WEAVIATE_ENABLE_TOKENIZER_GSE=false WEAVIATE_ENABLE_TOKENIZER_KAGOME_JA=false WEAVIATE_ENABLE_TOKENIZER_KAGOME_KR=false -# ------------------------------ -# Environment Variables for Chroma -# (only used when VECTOR_STORE is chroma) -# ------------------------------ - -# Authentication credentials for Chroma server -CHROMA_SERVER_AUTHN_CREDENTIALS=difyai123456 -# Authentication provider for Chroma server -CHROMA_SERVER_AUTHN_PROVIDER=chromadb.auth.token_authn.TokenAuthenticationServerProvider -# Persistence setting for Chroma server -CHROMA_IS_PERSISTENT=TRUE - -# ------------------------------ -# Environment Variables for Oracle Service -# (only used when VECTOR_STORE is oracle) -# ------------------------------ -ORACLE_PWD=Dify123456 -ORACLE_CHARACTERSET=AL32UTF8 - -# ------------------------------ -# Environment Variables for milvus Service -# (only used when VECTOR_STORE is milvus) -# ------------------------------ -# ETCD configuration for auto compaction mode -ETCD_AUTO_COMPACTION_MODE=revision -# ETCD configuration for auto compaction retention in terms of number of revisions -ETCD_AUTO_COMPACTION_RETENTION=1000 -# ETCD configuration for backend quota in bytes -ETCD_QUOTA_BACKEND_BYTES=4294967296 -# ETCD configuration for the number of changes before triggering a snapshot -ETCD_SNAPSHOT_COUNT=50000 -# MinIO access key for authentication -MINIO_ACCESS_KEY=minioadmin -# MinIO secret key for authentication -MINIO_SECRET_KEY=minioadmin -# ETCD service endpoints -ETCD_ENDPOINTS=etcd:2379 -# MinIO service address -MINIO_ADDRESS=minio:9000 -# Enable or disable security authorization -MILVUS_AUTHORIZATION_ENABLED=true - -# ------------------------------ -# Environment Variables for pgvector / pgvector-rs Service -# (only used when VECTOR_STORE is pgvector / pgvector-rs) -# ------------------------------ -PGVECTOR_PGUSER=postgres -# The password for the default postgres user. -PGVECTOR_POSTGRES_PASSWORD=difyai123456 -# The name of the default postgres database. -PGVECTOR_POSTGRES_DB=dify -# postgres data directory -PGVECTOR_PGDATA=/var/lib/postgresql/data/pgdata - -# ------------------------------ -# Environment Variables for opensearch -# (only used when VECTOR_STORE is opensearch) -# ------------------------------ -OPENSEARCH_DISCOVERY_TYPE=single-node -OPENSEARCH_BOOTSTRAP_MEMORY_LOCK=true -OPENSEARCH_JAVA_OPTS_MIN=512m -OPENSEARCH_JAVA_OPTS_MAX=1024m -OPENSEARCH_INITIAL_ADMIN_PASSWORD=Qazwsxedc!@#123 -OPENSEARCH_MEMLOCK_SOFT=-1 -OPENSEARCH_MEMLOCK_HARD=-1 -OPENSEARCH_NOFILE_SOFT=65536 -OPENSEARCH_NOFILE_HARD=65536 - -# ------------------------------ -# Environment Variables for Nginx reverse proxy -# ------------------------------ -NGINX_SERVER_NAME=_ -NGINX_HTTPS_ENABLED=false -# HTTP port -NGINX_PORT=80 -# SSL settings are only applied when HTTPS_ENABLED is true -NGINX_SSL_PORT=443 -# if HTTPS_ENABLED is true, you're required to add your own SSL certificates/keys to the `./nginx/ssl` directory -# and modify the env vars below accordingly. -NGINX_SSL_CERT_FILENAME=dify.crt -NGINX_SSL_CERT_KEY_FILENAME=dify.key -NGINX_SSL_PROTOCOLS=TLSv1.2 TLSv1.3 - -# Nginx performance tuning -NGINX_WORKER_PROCESSES=auto -NGINX_CLIENT_MAX_BODY_SIZE=100M -NGINX_KEEPALIVE_TIMEOUT=65 - -# Proxy settings -NGINX_PROXY_READ_TIMEOUT=3600s -NGINX_PROXY_SEND_TIMEOUT=3600s - -# Set true to accept requests for /.well-known/acme-challenge/ -NGINX_ENABLE_CERTBOT_CHALLENGE=false - -# ------------------------------ -# Certbot Configuration -# ------------------------------ - -# Email address (required to get certificates from Let's Encrypt) -CERTBOT_EMAIL= - -# Domain name -CERTBOT_DOMAIN= - -# certbot command options -# i.e: --force-renewal --dry-run --test-cert --debug -CERTBOT_OPTIONS= - -# ------------------------------ -# Environment Variables for SSRF Proxy -# ------------------------------ +# Sandbox and SSRF proxy +CODE_EXECUTION_ENDPOINT=http://sandbox:8194 +CODE_EXECUTION_API_KEY=dify-sandbox +CODE_EXECUTION_SSL_VERIFY=True +CODE_EXECUTION_POOL_MAX_CONNECTIONS=100 +CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20 +CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0 +CODE_EXECUTION_CONNECT_TIMEOUT=10 +CODE_EXECUTION_READ_TIMEOUT=60 +CODE_EXECUTION_WRITE_TIMEOUT=10 +SANDBOX_API_KEY=dify-sandbox +SANDBOX_GIN_MODE=release +SANDBOX_WORKER_TIMEOUT=15 +SANDBOX_ENABLE_NETWORK=true +SANDBOX_HTTP_PROXY=http://ssrf_proxy:3128 +SANDBOX_HTTPS_PROXY=http://ssrf_proxy:3128 +SANDBOX_PORT=8194 +PIP_MIRROR_URL= +SSRF_PROXY_HTTP_URL=http://ssrf_proxy:3128 +SSRF_PROXY_HTTPS_URL=http://ssrf_proxy:3128 SSRF_HTTP_PORT=3128 SSRF_COREDUMP_DIR=/var/spool/squid SSRF_REVERSE_PROXY_PORT=8194 @@ -1373,67 +199,7 @@ SSRF_POOL_MAX_CONNECTIONS=100 SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20 SSRF_POOL_KEEPALIVE_EXPIRY=5.0 -# ------------------------------ -# docker env var for specifying vector db and metadata db type at startup -# (based on the vector db and metadata db type, the corresponding docker -# compose profile will be used) -# if you want to use unstructured, add ',unstructured' to the end -# ------------------------------ -COMPOSE_PROFILES=${VECTOR_STORE:-weaviate},${DB_TYPE:-postgresql} - -# ------------------------------ -# Worker health check configuration for worker and worker_beat services. -# Set to false to enable the health check. -# Note: enabling the health check may cause periodic CPU spikes and increased load, -# as it establishes a broker connection and sends a Celery ping on every check interval. -# ------------------------------ -COMPOSE_WORKER_HEALTHCHECK_DISABLED=true -# Interval between health checks (e.g. 30s, 1m) -COMPOSE_WORKER_HEALTHCHECK_INTERVAL=30s -# Timeout for each health check (e.g. 30s, 1m) -COMPOSE_WORKER_HEALTHCHECK_TIMEOUT=30s - -# ------------------------------ -# Docker Compose Service Expose Host Port Configurations -# ------------------------------ -EXPOSE_NGINX_PORT=80 -EXPOSE_NGINX_SSL_PORT=443 - -# ---------------------------------------------------------------------------- -# ModelProvider & Tool Position Configuration -# Used to specify the model providers and tools that can be used in the app. -# ---------------------------------------------------------------------------- - -# Pin, include, and exclude tools -# Use comma-separated values with no spaces between items. -# Example: POSITION_TOOL_PINS=bing,google -POSITION_TOOL_PINS= -POSITION_TOOL_INCLUDES= -POSITION_TOOL_EXCLUDES= - -# Pin, include, and exclude model providers -# Use comma-separated values with no spaces between items. -# Example: POSITION_PROVIDER_PINS=openai,openllm -POSITION_PROVIDER_PINS= -POSITION_PROVIDER_INCLUDES= -POSITION_PROVIDER_EXCLUDES= - -# CSP https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP -CSP_WHITELIST= - -# Enable or disable create tidb service job -CREATE_TIDB_SERVICE_JOB_ENABLED=false - -# Maximum number of submitted thread count in a ThreadPool for parallel node execution -MAX_SUBMIT_COUNT=100 - -# The maximum number of top-k value for RAG. -TOP_K_MAX_VALUE=10 - -# ------------------------------ -# Plugin Daemon Configuration -# ------------------------------ - +# Plugin daemon DB_PLUGIN_DATABASE=dify_plugin EXPOSE_PLUGIN_DAEMON_PORT=5002 PLUGIN_DAEMON_PORT=5002 @@ -1442,179 +208,44 @@ PLUGIN_DAEMON_URL=http://plugin_daemon:5002 PLUGIN_MAX_PACKAGE_SIZE=52428800 PLUGIN_MODEL_SCHEMA_CACHE_TTL=3600 PLUGIN_PPROF_ENABLED=false - PLUGIN_DEBUGGING_HOST=0.0.0.0 PLUGIN_DEBUGGING_PORT=5003 EXPOSE_PLUGIN_DEBUGGING_HOST=localhost EXPOSE_PLUGIN_DEBUGGING_PORT=5003 - -# If this key is changed, DIFY_INNER_API_KEY in plugin_daemon service must also be updated or agent node will fail. PLUGIN_DIFY_INNER_API_KEY=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1 PLUGIN_DIFY_INNER_API_URL=http://api:5001 - -ENDPOINT_URL_TEMPLATE=http://localhost/e/{hook_id} - -MARKETPLACE_ENABLED=true -MARKETPLACE_API_URL=https://marketplace.dify.ai - -# Creators Platform configuration -CREATORS_PLATFORM_FEATURES_ENABLED=true -CREATORS_PLATFORM_API_URL=https://creators.dify.ai -CREATORS_PLATFORM_OAUTH_CLIENT_ID= - FORCE_VERIFYING_SIGNATURE=true - PLUGIN_STDIO_BUFFER_SIZE=1024 PLUGIN_STDIO_MAX_BUFFER_SIZE=5242880 - PLUGIN_PYTHON_ENV_INIT_TIMEOUT=120 -# Plugin Daemon side timeout (configure to match the API side below) PLUGIN_MAX_EXECUTION_TIMEOUT=600 -# API side timeout (configure to match the Plugin Daemon side above) -PLUGIN_DAEMON_TIMEOUT=600.0 -# PIP_MIRROR_URL=https://pypi.tuna.tsinghua.edu.cn/simple -PIP_MIRROR_URL= - -# https://github.com/langgenius/dify-plugin-daemon/blob/main/.env.example -# Plugin storage type, local aws_s3 tencent_cos azure_blob aliyun_oss volcengine_tos PLUGIN_STORAGE_TYPE=local PLUGIN_STORAGE_LOCAL_ROOT=/app/storage PLUGIN_WORKING_PATH=/app/storage/cwd PLUGIN_INSTALLED_PATH=plugin PLUGIN_PACKAGE_CACHE_PATH=plugin_packages PLUGIN_MEDIA_CACHE_PATH=assets -# Plugin oss bucket PLUGIN_STORAGE_OSS_BUCKET= -# Plugin oss s3 credentials -PLUGIN_S3_USE_AWS=false -PLUGIN_S3_USE_AWS_MANAGED_IAM=false -PLUGIN_S3_ENDPOINT= -PLUGIN_S3_USE_PATH_STYLE=false -PLUGIN_AWS_ACCESS_KEY= -PLUGIN_AWS_SECRET_KEY= -PLUGIN_AWS_REGION= -# Plugin oss azure blob -PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME= -PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING= -# Plugin oss tencent cos -PLUGIN_TENCENT_COS_SECRET_KEY= -PLUGIN_TENCENT_COS_SECRET_ID= -PLUGIN_TENCENT_COS_REGION= -# Plugin oss aliyun oss -PLUGIN_ALIYUN_OSS_REGION= -PLUGIN_ALIYUN_OSS_ENDPOINT= -PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID= -PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET= -PLUGIN_ALIYUN_OSS_AUTH_VERSION=v4 -PLUGIN_ALIYUN_OSS_PATH= -# Plugin oss volcengine tos -PLUGIN_VOLCENGINE_TOS_ENDPOINT= -PLUGIN_VOLCENGINE_TOS_ACCESS_KEY= -PLUGIN_VOLCENGINE_TOS_SECRET_KEY= -PLUGIN_VOLCENGINE_TOS_REGION= +PLUGIN_SENTRY_ENABLED=false +PLUGIN_SENTRY_DSN= +MARKETPLACE_ENABLED=true +MARKETPLACE_API_URL=https://marketplace.dify.ai +MARKETPLACE_URL= -# ------------------------------ -# OTLP Collector Configuration -# ------------------------------ -ENABLE_OTEL=false -OTLP_TRACE_ENDPOINT= -OTLP_METRIC_ENDPOINT= -OTLP_BASE_ENDPOINT=http://localhost:4318 -OTLP_API_KEY= -OTEL_EXPORTER_OTLP_PROTOCOL= -OTEL_EXPORTER_TYPE=otlp -OTEL_SAMPLING_RATE=0.1 -OTEL_BATCH_EXPORT_SCHEDULE_DELAY=5000 -OTEL_MAX_QUEUE_SIZE=2048 -OTEL_MAX_EXPORT_BATCH_SIZE=512 -OTEL_METRIC_EXPORT_INTERVAL=60000 -OTEL_BATCH_EXPORT_TIMEOUT=10000 -OTEL_METRIC_EXPORT_TIMEOUT=30000 - -# Prevent Clickjacking -ALLOW_EMBED=false - -# Dataset queue monitor configuration -QUEUE_MONITOR_THRESHOLD=200 -# You can configure multiple ones, separated by commas. eg: test1@dify.ai,test2@dify.ai -QUEUE_MONITOR_ALERT_EMAILS= -# Monitor interval in minutes, default is 30 minutes -QUEUE_MONITOR_INTERVAL=30 - -# Swagger UI configuration -SWAGGER_UI_ENABLED=false -SWAGGER_UI_PATH=/swagger-ui.html - -# Whether to encrypt dataset IDs when exporting DSL files (default: true) -# Set to false to export dataset IDs as plain text for easier cross-environment import -DSL_EXPORT_ENCRYPT_DATASET_ID=true - -# Maximum number of segments for dataset segments API (0 for unlimited) -DATASET_MAX_SEGMENTS_PER_REQUEST=0 - -# Celery schedule tasks configuration -ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false -ENABLE_CLEAN_UNUSED_DATASETS_TASK=false -ENABLE_CREATE_TIDB_SERVERLESS_TASK=false -ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false -ENABLE_CLEAN_MESSAGES=false -ENABLE_WORKFLOW_RUN_CLEANUP_TASK=false -ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false -ENABLE_DATASETS_QUEUE_MONITOR=false -ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true -ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK=true -WORKFLOW_SCHEDULE_POLLER_INTERVAL=1 -WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE=100 -WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK=0 - -# Tenant isolated task queue configuration -TENANT_ISOLATED_TASK_CONCURRENCY=1 - -# Maximum allowed CSV file size for annotation import in megabytes -ANNOTATION_IMPORT_FILE_SIZE_LIMIT=2 -#Maximum number of annotation records allowed in a single import -ANNOTATION_IMPORT_MAX_RECORDS=10000 -# Minimum number of annotation records required in a single import -ANNOTATION_IMPORT_MIN_RECORDS=1 -ANNOTATION_IMPORT_RATE_LIMIT_PER_MINUTE=5 -ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR=20 -# Maximum number of concurrent annotation import tasks per tenant -ANNOTATION_IMPORT_MAX_CONCURRENT=5 - -# The API key of amplitude -AMPLITUDE_API_KEY= - -# Sandbox expired records clean configuration -SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21 -SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000 -SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200 -SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30 - - -# Redis URL used for event bus between API and -# celery worker -# defaults to url constructed from `REDIS_*` -# configurations -EVENT_BUS_REDIS_URL= -# Event transport type. Options are: -# -# - pubsub: normal Pub/Sub (at-most-once) -# - sharded: sharded Pub/Sub (at-most-once) -# - streams: Redis Streams (at-least-once, recommended to avoid subscriber races) -# -# Note: Before enabling 'streams' in production, estimate your expected event volume and retention needs. -# Configure Redis memory limits and stream trimming appropriately (e.g., MAXLEN and key expiry) to reduce -# the risk of data loss from Redis auto-eviction under memory pressure. -# Also accepts ENV: EVENT_BUS_REDIS_CHANNEL_TYPE. -EVENT_BUS_REDIS_CHANNEL_TYPE=pubsub -# Whether to use Redis cluster mode while use redis as event bus. -# It's highly recommended to enable this for large deployments. -EVENT_BUS_REDIS_USE_CLUSTERS=false - -# Whether to Enable human input timeout check task -ENABLE_HUMAN_INPUT_TIMEOUT_TASK=true -# Human input timeout check interval in minutes -HUMAN_INPUT_TIMEOUT_TASK_INTERVAL=1 - - -SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000 +# Nginx and Docker Compose +NGINX_SERVER_NAME=_ +NGINX_HTTPS_ENABLED=false +NGINX_PORT=80 +NGINX_SSL_PORT=443 +NGINX_SSL_CERT_FILENAME=dify.crt +NGINX_SSL_CERT_KEY_FILENAME=dify.key +NGINX_SSL_PROTOCOLS=TLSv1.2 TLSv1.3 +NGINX_WORKER_PROCESSES=auto +NGINX_CLIENT_MAX_BODY_SIZE=100M +NGINX_KEEPALIVE_TIMEOUT=65 +NGINX_PROXY_READ_TIMEOUT=3600s +NGINX_PROXY_SEND_TIMEOUT=3600s +NGINX_ENABLE_CERTBOT_CHALLENGE=false +EXPOSE_NGINX_PORT=80 +EXPOSE_NGINX_SSL_PORT=443 +COMPOSE_PROFILES=${VECTOR_STORE:-weaviate},${DB_TYPE:-postgresql} diff --git a/docker/.gitignore b/docker/.gitignore new file mode 100644 index 0000000000..c3a47ad592 --- /dev/null +++ b/docker/.gitignore @@ -0,0 +1,3 @@ +# Ignore actual .env files (keep only .env.example files in git) +*.env +!*.env.example diff --git a/docker/README.md b/docker/README.md index 3a7f4c2ad5..a2d9b2eeba 100644 --- a/docker/README.md +++ b/docker/README.md @@ -7,29 +7,31 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T - **Certbot Container**: `docker-compose.yaml` now contains `certbot` for managing SSL certificates. This container automatically renews certificates and ensures secure HTTPS connections.\ For more information, refer `docker/certbot/README.md`. -- **Persistent Environment Variables**: Default environment variables are managed through `.env.default`, while local overrides are stored in `.env`, ensuring that your configurations persist across deployments. +- **Persistent Environment Variables**: Essential startup defaults are provided in `.env.example`, while local values are stored in `.env`, ensuring that your configurations persist across deployments. > What is `.env`?

- > The `.env` file is a local override file. Keep it small by adding only the values that differ from `.env.default`. Use `.env.example` as the full reference when you need advanced configuration. + > The `.env` file is the local startup file. Copy it from `.env.example` for a default deployment. Optional advanced settings live in `envs/*.env.example` files. - **Unified Vector Database Services**: All vector database services are now managed from a single Docker Compose file `docker-compose.yaml`. You can switch between different vector databases by setting the `VECTOR_STORE` environment variable in your `.env` file. -- **Local .env Overrides**: The `dify-compose` and `dify-compose.ps1` wrappers create `.env` if it is missing and generate a persistent `SECRET_KEY` for this deployment. - ### How to Deploy Dify with `docker-compose.yaml` 1. **Prerequisites**: Ensure Docker and Docker Compose are installed on your system. 1. **Environment Setup**: - Navigate to the `docker` directory. - - No copy step is required. The `dify-compose` wrappers create `.env` if it is missing and write a generated `SECRET_KEY` to it. - - When prompted on first run, press Enter to use the default deployment, or answer `y` to stop and edit `.env` first. - - Customize `.env` only when you need to override defaults from `.env.default`. Refer to `.env.example` for the full list of available variables. + - Copy `.env.example` to `.env`. + - Customize `.env` when you need to change essential startup defaults. Copy optional files from `envs/` without the `.example` suffix when you need advanced settings. - **Optional (for advanced deployments)**: If you maintain a full `.env` file copied from `.env.example`, you may use the environment synchronization tool to keep it aligned with the latest `.env.example` updates while preserving your custom settings. See the [Environment Variables Synchronization](#environment-variables-synchronization) section below. 1. **Running the Services**: - - Execute `./dify-compose up -d` from the `docker` directory to start the services. On Windows PowerShell, run `.\dify-compose.ps1 up -d`. + - Execute `docker compose up -d` from the `docker` directory to start the services. - To specify a vector database, set the `VECTOR_STORE` variable in your `.env` file to your desired vector database service, such as `milvus`, `weaviate`, or `opensearch`. + ```bash + cp .env.example .env + docker compose up -d + ``` + 1. **SSL Certificate Setup**: - Refer `docker/certbot/README.md` to set up SSL certificates using Certbot. 1. **OpenTelemetry Collector Setup**: @@ -41,7 +43,7 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T 1. **Middleware Setup**: - Use the `docker-compose.middleware.yaml` for setting up essential middleware services like databases and caches. - Navigate to the `docker` directory. - - Ensure the `middleware.env` file is created by running `cp middleware.env.example middleware.env` (refer to the `middleware.env.example` file). + - Ensure the `middleware.env` file is created by running `cp envs/middleware.env.example middleware.env` (refer to the `envs/middleware.env.example` file). 1. **Running Middleware Services**: - Navigate to the `docker` directory. - Execute `docker compose --env-file middleware.env -f docker-compose.middleware.yaml -p dify up -d` to start PostgreSQL/MySQL (per `DB_TYPE`) plus the bundled Weaviate instance. @@ -58,13 +60,13 @@ For users migrating from the `docker-legacy` setup: 1. **Data Migration**: - Ensure that data from services like databases and caches is backed up and migrated appropriately to the new structure if necessary. -### Overview of `.env.default`, `.env`, and `.env.example` +### Overview of `.env`, `.env.example`, and `envs/` -- `.env.default` contains the minimal default configuration for Docker Compose deployments. -- `.env` contains the generated `SECRET_KEY` plus any local overrides. -- `.env.example` is the full reference for advanced configuration. +- `.env.example` contains the essential default configuration for Docker Compose deployments. +- `.env` contains local startup values copied from `.env.example` and any local changes. +- `envs/*.env.example` files contain optional advanced configuration grouped by theme. -The `dify-compose` wrappers merge `.env.default` and `.env` into a temporary environment file, append paired internal service keys when needed, and remove the temporary file after Docker Compose starts. +Docker Compose reads `envs/*.env` files when present, then reads `.env` last so values in `.env` take precedence. #### Key Modules and Customization @@ -74,7 +76,7 @@ The `dify-compose` wrappers merge `.env.default` and `.env` into a temporary env #### Other notable variables -The `.env.example` file provided in the Docker setup is extensive and covers a wide range of configuration options. It is structured into several sections, each pertaining to different aspects of the application and its services. Here are some of the key sections and variables: +The root `.env.example` file contains the essential startup settings. Optional and provider-specific settings are grouped in `envs/*.env.example` files. Here are some of the key sections and variables: 1. **Common Variables**: @@ -102,7 +104,7 @@ The `.env.example` file provided in the Docker setup is extensive and covers a w 1. **Storage Configuration**: - - `STORAGE_TYPE`, `S3_BUCKET_NAME`, `AZURE_BLOB_ACCOUNT_NAME`: Settings for file storage options like local, S3, Azure Blob, etc. + - `STORAGE_TYPE`, `OPENDAL_SCHEME`, `OPENDAL_FS_ROOT`: Default local file storage settings. Optional storage backends are configured from the files under `envs/`. 1. **Vector Database Configuration**: @@ -124,11 +126,11 @@ The `.env.example` file provided in the Docker setup is extensive and covers a w ### Environment Variables Synchronization -When upgrading Dify or pulling the latest changes, new environment variables may be introduced in `.env.default` or `.env.example`. +When upgrading Dify or pulling the latest changes, new environment variables may be introduced in `.env.example` or the optional files under `envs/`. -If you use the default override-only workflow, review `.env.default` and add only the values you need to override to `.env`. +If you use the default workflow, review `.env.example` and keep your `.env` aligned with essential startup values. -If you maintain a full `.env` file copied from `.env.example`, an optional environment variables synchronization tool is provided. +If you maintain a customized `.env` file copied from `.env.example`, an optional environment variables synchronization tool is provided. > This tool performs a **one-way synchronization** from `.env.example` to `.env`. > Existing values in `.env` are never overwritten automatically. diff --git a/docker/dify-compose b/docker/dify-compose deleted file mode 100755 index 16bbd6b538..0000000000 --- a/docker/dify-compose +++ /dev/null @@ -1,334 +0,0 @@ -#!/usr/bin/env bash - -set -euo pipefail - -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -cd "$SCRIPT_DIR" - -DEFAULT_ENV_FILE=".env.default" -USER_ENV_FILE=".env" - -log() { - printf '%s\n' "$*" >&2 -} - -die() { - printf 'Error: %s\n' "$*" >&2 - exit 1 -} - -detect_compose() { - if docker compose version >/dev/null 2>&1; then - COMPOSE_CMD=(docker compose) - return - fi - - if command -v docker-compose >/dev/null 2>&1; then - COMPOSE_CMD=(docker-compose) - return - fi - - die "Docker Compose is not available. Install Docker Compose, then run this command again." -} - -generate_secret_key() { - if command -v openssl >/dev/null 2>&1; then - openssl rand -base64 42 - return - fi - - if command -v dd >/dev/null 2>&1 && command -v base64 >/dev/null 2>&1; then - dd if=/dev/urandom bs=42 count=1 2>/dev/null | base64 | tr -d '\n' - printf '\n' - return - fi - - return 1 -} - -ensure_env_files() { - [[ -f "$DEFAULT_ENV_FILE" ]] || die "$DEFAULT_ENV_FILE is missing." - - if [[ -f "$USER_ENV_FILE" ]]; then - return - fi - - : >"$USER_ENV_FILE" - - if [[ ! -t 0 ]]; then - log "Created $USER_ENV_FILE for local overrides." - return - fi - - printf 'Created %s for local overrides.\n' "$USER_ENV_FILE" - printf 'Do you need a custom deployment now? (Most users can press Enter to skip.) [y/N] ' - read -r answer - - case "${answer:-}" in - y | Y | yes | YES | Yes) - cat <<'EOF' -Edit .env with the settings you want to override, using .env.example as the full reference. -Run ./dify-compose up -d again when you are ready. -EOF - exit 0 - ;; - esac -} - -user_env_value() { - local key="$1" - awk -F= -v target="$key" ' - /^[[:space:]]*#/ || !/=/{ next } - { - key = $1 - gsub(/^[[:space:]]+|[[:space:]]+$/, "", key) - if (key == target) { - value = substr($0, index($0, "=") + 1) - gsub(/^[[:space:]]+|[[:space:]]+$/, "", value) - if ((value ~ /^".*"$/) || (value ~ /^'\''.*'\''$/)) { - value = substr(value, 2, length(value) - 2) - } - result = value - } - } - END { print result } - ' "$USER_ENV_FILE" -} - -set_user_env_value() { - local key="$1" - local value="$2" - local temp_file - - temp_file="$(mktemp "${TMPDIR:-/tmp}/dify-env.XXXXXX")" - awk -F= -v target="$key" -v replacement="$key=$value" ' - BEGIN { replaced = 0 } - /^[[:space:]]*#/ || !/=/{ print; next } - { - key = $1 - gsub(/^[[:space:]]+|[[:space:]]+$/, "", key) - if (key == target) { - if (!replaced) { - print replacement - replaced = 1 - } - next - } - print - } - END { - if (!replaced) { - print replacement - } - } - ' "$USER_ENV_FILE" >"$temp_file" - mv "$temp_file" "$USER_ENV_FILE" -} - -ensure_secret_key() { - local current_secret_key - local secret_key - - current_secret_key="$(user_env_value SECRET_KEY)" - if [[ -n "$current_secret_key" ]]; then - return - fi - - secret_key="$(generate_secret_key)" || die "Unable to generate SECRET_KEY. Install openssl or configure SECRET_KEY in .env." - set_user_env_value SECRET_KEY "$secret_key" - log "Generated SECRET_KEY in $USER_ENV_FILE." -} - -env_value() { - local key="$1" - awk -F= -v target="$key" ' - /^[[:space:]]*#/ || !/=/{ next } - { - key = $1 - gsub(/^[[:space:]]+|[[:space:]]+$/, "", key) - if (key == target) { - value = substr($0, index($0, "=") + 1) - gsub(/^[[:space:]]+|[[:space:]]+$/, "", value) - if ((value ~ /^".*"$/) || (value ~ /^'\''.*'\''$/)) { - value = substr(value, 2, length(value) - 2) - } - result = value - } - } - END { print result } - ' "$DEFAULT_ENV_FILE" "$USER_ENV_FILE" -} - -user_overrides() { - local key="$1" - grep -Eq "^[[:space:]]*${key}[[:space:]]*=" "$USER_ENV_FILE" -} - -write_merged_env() { - awk ' - function trim(s) { - sub(/^[[:space:]]+/, "", s) - sub(/[[:space:]]+$/, "", s) - return s - } - - /^[[:space:]]*#/ || !/=/{ next } - - { - key = $0 - sub(/=.*/, "", key) - key = trim(key) - if (key == "") { - next - } - - value = substr($0, index($0, "=") + 1) - value = trim(value) - - if (!(key in seen)) { - order[++count] = key - seen[key] = 1 - } - - values[key] = value - } - - END { - for (i = 1; i <= count; i++) { - key = order[i] - print key "=" values[key] - } - } - ' "$DEFAULT_ENV_FILE" "$USER_ENV_FILE" >"$MERGED_ENV_FILE" -} - -set_merged_env_value() { - local key="$1" - local value="$2" - local temp_file - - temp_file="$(mktemp "${TMPDIR:-/tmp}/dify-compose-env.XXXXXX")" - awk -F= -v target="$key" -v replacement="$key=$value" ' - BEGIN { replaced = 0 } - /^[[:space:]]*#/ || !/=/{ print; next } - { - key = $1 - gsub(/^[[:space:]]+|[[:space:]]+$/, "", key) - if (key == target) { - if (!replaced) { - print replacement - replaced = 1 - } - next - } - print - } - END { - if (!replaced) { - print replacement - } - } - ' "$MERGED_ENV_FILE" >"$temp_file" - mv "$temp_file" "$MERGED_ENV_FILE" -} - -set_if_not_overridden() { - local key="$1" - local value="$2" - - if user_overrides "$key"; then - return - fi - - set_merged_env_value "$key" "$value" -} - -metadata_db_host() { - case "$1" in - mysql) printf 'db_mysql' ;; - postgresql | '') printf 'db_postgres' ;; - *) printf '%s' "$(env_value DB_HOST)" ;; - esac -} - -metadata_db_port() { - case "$1" in - mysql) printf '3306' ;; - postgresql | '') printf '5432' ;; - *) printf '%s' "$(env_value DB_PORT)" ;; - esac -} - -metadata_db_user() { - case "$1" in - mysql) printf 'root' ;; - postgresql | '') printf 'postgres' ;; - *) printf '%s' "$(env_value DB_USERNAME)" ;; - esac -} - -build_merged_env() { - MERGED_ENV_FILE="$(mktemp "${TMPDIR:-/tmp}/dify-compose.XXXXXX")" - trap 'rm -f "$MERGED_ENV_FILE"' EXIT - - write_merged_env - - local db_type - local redis_host - local redis_port - local redis_username - local redis_password - local redis_auth - local code_execution_api_key - local weaviate_api_key - - db_type="$(env_value DB_TYPE)" - - set_if_not_overridden DB_HOST "$(metadata_db_host "$db_type")" - set_if_not_overridden DB_PORT "$(metadata_db_port "$db_type")" - set_if_not_overridden DB_USERNAME "$(metadata_db_user "$db_type")" - - if ! user_overrides CELERY_BROKER_URL; then - redis_host="$(env_value REDIS_HOST)" - redis_port="$(env_value REDIS_PORT)" - redis_username="$(env_value REDIS_USERNAME)" - redis_password="$(env_value REDIS_PASSWORD)" - redis_auth="" - - if [[ -n "$redis_username" && -n "$redis_password" ]]; then - redis_auth="${redis_username}:${redis_password}@" - elif [[ -n "$redis_password" ]]; then - redis_auth=":${redis_password}@" - elif [[ -n "$redis_username" ]]; then - redis_auth="${redis_username}@" - fi - - set_merged_env_value CELERY_BROKER_URL "redis://${redis_auth}${redis_host:-redis}:${redis_port:-6379}/1" - fi - - if ! user_overrides SANDBOX_API_KEY; then - code_execution_api_key="$(env_value CODE_EXECUTION_API_KEY)" - set_if_not_overridden SANDBOX_API_KEY "${code_execution_api_key:-dify-sandbox}" - fi - - if ! user_overrides WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS; then - weaviate_api_key="$(env_value WEAVIATE_API_KEY)" - set_if_not_overridden WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS \ - "${weaviate_api_key:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}" - fi -} - -main() { - detect_compose - ensure_env_files - ensure_secret_key - build_merged_env - - if [[ "$#" -eq 0 ]]; then - set -- up -d - fi - - "${COMPOSE_CMD[@]}" --env-file "$MERGED_ENV_FILE" "$@" -} - -main "$@" diff --git a/docker/dify-compose.ps1 b/docker/dify-compose.ps1 deleted file mode 100644 index 851f8b76fe..0000000000 --- a/docker/dify-compose.ps1 +++ /dev/null @@ -1,317 +0,0 @@ -$ErrorActionPreference = "Stop" -Set-StrictMode -Version Latest - -$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path -Set-Location $ScriptDir - -$DefaultEnvFile = ".env.default" -$UserEnvFile = ".env" -$MergedEnvFile = $null -$Utf8NoBom = New-Object System.Text.UTF8Encoding -ArgumentList $false - -function Write-Info { - param([string]$Message) - [Console]::Error.WriteLine($Message) -} - -function Fail { - param([string]$Message) - [Console]::Error.WriteLine("Error: $Message") - exit 1 -} - -function Test-CommandSuccess { - param([string[]]$Command) - - try { - $Executable = $Command[0] - $CommandArgs = @() - if ($Command.Length -gt 1) { - $CommandArgs = @($Command[1..($Command.Length - 1)]) - } - - & $Executable @CommandArgs *> $null - return $LASTEXITCODE -eq 0 - } - catch { - return $false - } -} - -function Get-ComposeCommand { - if (Test-CommandSuccess @("docker", "compose", "version")) { - return @("docker", "compose") - } - - if ((Get-Command "docker-compose" -ErrorAction SilentlyContinue) -and (Test-CommandSuccess @("docker-compose", "version"))) { - return @("docker-compose") - } - - Fail "Docker Compose is not available. Install Docker Compose, then run this command again." -} - -function New-SecretKey { - $Bytes = New-Object byte[] 42 - $Generator = [System.Security.Cryptography.RandomNumberGenerator]::Create() - - try { - $Generator.GetBytes($Bytes) - } - finally { - $Generator.Dispose() - } - - return [Convert]::ToBase64String($Bytes) -} - -function Ensure-EnvFiles { - if (-not (Test-Path $DefaultEnvFile -PathType Leaf)) { - Fail "$DefaultEnvFile is missing." - } - - if (Test-Path $UserEnvFile -PathType Leaf) { - return - } - - New-Item -ItemType File -Path $UserEnvFile | Out-Null - - if ([Console]::IsInputRedirected) { - Write-Info "Created $UserEnvFile for local overrides." - return - } - - Write-Info "Created $UserEnvFile for local overrides." - $Answer = Read-Host "Do you need a custom deployment now? (Most users can press Enter to skip.) [y/N]" - - if ($Answer -match "^(y|yes)$") { - Write-Output "Edit .env with the settings you want to override, using .env.example as the full reference." - Write-Output "Run .\dify-compose.ps1 up -d again when you are ready." - exit 0 - } -} - -function Read-EnvFile { - param([string]$Path) - - $Values = [ordered]@{} - - if (-not (Test-Path $Path -PathType Leaf)) { - return $Values - } - - foreach ($Line in Get-Content -Path $Path) { - if ($Line -match "^\s*#" -or $Line -notmatch "=") { - continue - } - - $SeparatorIndex = $Line.IndexOf("=") - $Key = $Line.Substring(0, $SeparatorIndex).Trim() - $Value = $Line.Substring($SeparatorIndex + 1).Trim() - - if (($Value.StartsWith('"') -and $Value.EndsWith('"')) -or ($Value.StartsWith("'") -and $Value.EndsWith("'"))) { - $Value = $Value.Substring(1, $Value.Length - 2) - } - - if ($Key.Length -gt 0) { - $Values[$Key] = $Value - } - } - - return $Values -} - -function Set-UserEnvValue { - param( - [string]$Key, - [string]$Value - ) - - $Path = [string](Resolve-Path $UserEnvFile) - $Lines = [System.IO.File]::ReadAllLines($Path, [System.Text.Encoding]::UTF8) - $Output = New-Object System.Collections.Generic.List[string] - $Replaced = $false - - foreach ($Line in $Lines) { - if ($Line -match "^\s*#" -or $Line -notmatch "=") { - $Output.Add($Line) - continue - } - - $SeparatorIndex = $Line.IndexOf("=") - $CurrentKey = $Line.Substring(0, $SeparatorIndex).Trim() - - if ($CurrentKey -eq $Key) { - if (-not $Replaced) { - $Output.Add("$Key=$Value") - $Replaced = $true - } - continue - } - - $Output.Add($Line) - } - - if (-not $Replaced) { - $Output.Add("$Key=$Value") - } - - [System.IO.File]::WriteAllLines($Path, $Output, $Utf8NoBom) -} - -function Ensure-SecretKey { - $Values = Read-EnvFile $UserEnvFile - - if ($Values.Contains("SECRET_KEY") -and $Values["SECRET_KEY"]) { - return - } - - Set-UserEnvValue "SECRET_KEY" (New-SecretKey) - Write-Info "Generated SECRET_KEY in $UserEnvFile." -} - -function Merge-EnvValues { - $Values = [ordered]@{} - - foreach ($Entry in (Read-EnvFile $DefaultEnvFile).GetEnumerator()) { - $Values[$Entry.Key] = $Entry.Value - } - - foreach ($Entry in (Read-EnvFile $UserEnvFile).GetEnumerator()) { - $Values[$Entry.Key] = $Entry.Value - } - - return $Values -} - -function User-Overrides { - param([string]$Key) - - if (-not (Test-Path $UserEnvFile -PathType Leaf)) { - return $false - } - - return [bool](Select-String -Path $UserEnvFile -Pattern "^\s*$([regex]::Escape($Key))\s*=" -Quiet) -} - -function Metadata-DbHost { - param([string]$DbType, $Values) - - switch ($DbType) { - "mysql" { return "db_mysql" } - "postgresql" { return "db_postgres" } - "" { return "db_postgres" } - default { return $Values["DB_HOST"] } - } -} - -function Metadata-DbPort { - param([string]$DbType, $Values) - - switch ($DbType) { - "mysql" { return "3306" } - "postgresql" { return "5432" } - "" { return "5432" } - default { return $Values["DB_PORT"] } - } -} - -function Metadata-DbUser { - param([string]$DbType, $Values) - - switch ($DbType) { - "mysql" { return "root" } - "postgresql" { return "postgres" } - "" { return "postgres" } - default { return $Values["DB_USERNAME"] } - } -} - -function Write-MergedEnv { - param($Values) - - $Output = New-Object System.Collections.Generic.List[string] - - foreach ($Entry in $Values.GetEnumerator()) { - $Output.Add("$($Entry.Key)=$($Entry.Value)") - } - - [System.IO.File]::WriteAllLines($MergedEnvFile, $Output, $Utf8NoBom) -} - -function Build-MergedEnv { - $Values = Merge-EnvValues - $script:MergedEnvFile = [System.IO.Path]::GetTempFileName() - - $DbType = if ($Values.Contains("DB_TYPE")) { $Values["DB_TYPE"] } else { "postgresql" } - - if (-not (User-Overrides "DB_HOST")) { - $Values["DB_HOST"] = Metadata-DbHost $DbType $Values - } - - if (-not (User-Overrides "DB_PORT")) { - $Values["DB_PORT"] = Metadata-DbPort $DbType $Values - } - - if (-not (User-Overrides "DB_USERNAME")) { - $Values["DB_USERNAME"] = Metadata-DbUser $DbType $Values - } - - if (-not (User-Overrides "CELERY_BROKER_URL")) { - $RedisHost = if ($Values.Contains("REDIS_HOST") -and $Values["REDIS_HOST"]) { $Values["REDIS_HOST"] } else { "redis" } - $RedisPort = if ($Values.Contains("REDIS_PORT") -and $Values["REDIS_PORT"]) { $Values["REDIS_PORT"] } else { "6379" } - $RedisUsername = if ($Values.Contains("REDIS_USERNAME")) { $Values["REDIS_USERNAME"] } else { "" } - $RedisPassword = if ($Values.Contains("REDIS_PASSWORD")) { $Values["REDIS_PASSWORD"] } else { "" } - $RedisAuth = "" - - if ($RedisUsername -and $RedisPassword) { - $RedisAuth = "${RedisUsername}:${RedisPassword}@" - } - elseif ($RedisPassword) { - $RedisAuth = ":${RedisPassword}@" - } - elseif ($RedisUsername) { - $RedisAuth = "${RedisUsername}@" - } - - $Values["CELERY_BROKER_URL"] = "redis://$RedisAuth${RedisHost}:${RedisPort}/1" - } - - if (-not (User-Overrides "SANDBOX_API_KEY")) { - $CodeExecutionApiKey = if ($Values.Contains("CODE_EXECUTION_API_KEY") -and $Values["CODE_EXECUTION_API_KEY"]) { $Values["CODE_EXECUTION_API_KEY"] } else { "dify-sandbox" } - $Values["SANDBOX_API_KEY"] = $CodeExecutionApiKey - } - - if (-not (User-Overrides "WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS")) { - $WeaviateApiKey = if ($Values.Contains("WEAVIATE_API_KEY") -and $Values["WEAVIATE_API_KEY"]) { $Values["WEAVIATE_API_KEY"] } else { "WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih" } - $Values["WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS"] = $WeaviateApiKey - } - - Write-MergedEnv $Values -} - -$ComposeCommand = Get-ComposeCommand - -try { - Ensure-EnvFiles - Ensure-SecretKey - Build-MergedEnv - - $ComposeArgs = @($args) - if ($ComposeArgs.Count -eq 0) { - $ComposeArgs = @("up", "-d") - } - - $ComposeCommandArgs = @() - if ($ComposeCommand.Length -gt 1) { - $ComposeCommandArgs = @($ComposeCommand[1..($ComposeCommand.Length - 1)]) - } - - $ComposeExecutable = $ComposeCommand[0] - & $ComposeExecutable @ComposeCommandArgs --env-file $MergedEnvFile @ComposeArgs - exit $LASTEXITCODE -} -finally { - if ($MergedEnvFile -and (Test-Path $MergedEnvFile -PathType Leaf)) { - Remove-Item -Force $MergedEnvFile - } -} diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index d011a8855a..0f65c38098 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -1,4 +1,202 @@ -x-shared-env: &shared-api-worker-env +# Shared configuration using YAML anchors and env_file +x-shared-api-worker-config: &shared-api-worker-config + env_file: + - path: ./envs/core-services/shared.env + required: false + - path: ./envs/core-services/api.env + required: false + - path: ./envs/security.env + required: false + - path: ./envs/databases/db-postgres.env + required: false + - path: ./envs/databases/db-mysql.env + required: false + - path: ./envs/databases/redis.env + required: false + - path: ./envs/vectorstores/weaviate.env + required: false + - path: ./envs/vectorstores/qdrant.env + required: false + - path: ./envs/vectorstores/oceanbase.env + required: false + - path: ./envs/vectorstores/seekdb.env + required: false + - path: ./envs/vectorstores/couchbase.env + required: false + - path: ./envs/vectorstores/pgvector.env + required: false + - path: ./envs/vectorstores/vastbase.env + required: false + - path: ./envs/vectorstores/pgvecto-rs.env + required: false + - path: ./envs/vectorstores/chroma.env + required: false + - path: ./envs/vectorstores/iris.env + required: false + - path: ./envs/vectorstores/oracle.env + required: false + - path: ./envs/vectorstores/opengauss.env + required: false + - path: ./envs/vectorstores/myscale.env + required: false + - path: ./envs/vectorstores/matrixone.env + required: false + - path: ./envs/vectorstores/elasticsearch.env + required: false + - path: ./envs/vectorstores/opensearch.env + required: false + - path: ./envs/vectorstores/milvus.env + required: false + - path: ./envs/infrastructure/nginx.env + required: false + - path: ./envs/infrastructure/certbot.env + required: false + - path: ./envs/infrastructure/ssrf-proxy.env + required: false + - path: ./envs/infrastructure/etcd.env + required: false + - path: ./envs/infrastructure/minio.env + required: false + - path: ./envs/infrastructure/milvus-standalone.env + required: false + - ./.env + networks: + - ssrf_proxy_network + - default + restart: always + +x-shared-worker-config: &shared-worker-config + env_file: + - path: ./envs/core-services/shared.env + required: false + - path: ./envs/core-services/worker.env + required: false + - path: ./envs/security.env + required: false + - path: ./envs/databases/db-postgres.env + required: false + - path: ./envs/databases/db-mysql.env + required: false + - path: ./envs/databases/redis.env + required: false + - path: ./envs/vectorstores/weaviate.env + required: false + - path: ./envs/vectorstores/qdrant.env + required: false + - path: ./envs/vectorstores/oceanbase.env + required: false + - path: ./envs/vectorstores/seekdb.env + required: false + - path: ./envs/vectorstores/couchbase.env + required: false + - path: ./envs/vectorstores/pgvector.env + required: false + - path: ./envs/vectorstores/vastbase.env + required: false + - path: ./envs/vectorstores/pgvecto-rs.env + required: false + - path: ./envs/vectorstores/chroma.env + required: false + - path: ./envs/vectorstores/iris.env + required: false + - path: ./envs/vectorstores/oracle.env + required: false + - path: ./envs/vectorstores/opengauss.env + required: false + - path: ./envs/vectorstores/myscale.env + required: false + - path: ./envs/vectorstores/matrixone.env + required: false + - path: ./envs/vectorstores/elasticsearch.env + required: false + - path: ./envs/vectorstores/opensearch.env + required: false + - path: ./envs/vectorstores/milvus.env + required: false + - path: ./envs/infrastructure/nginx.env + required: false + - path: ./envs/infrastructure/certbot.env + required: false + - path: ./envs/infrastructure/ssrf-proxy.env + required: false + - path: ./envs/infrastructure/etcd.env + required: false + - path: ./envs/infrastructure/minio.env + required: false + - path: ./envs/infrastructure/milvus-standalone.env + required: false + - ./.env + networks: + - ssrf_proxy_network + - default + restart: always + +x-shared-worker-beat-config: &shared-worker-beat-config + env_file: + - path: ./envs/core-services/shared.env + required: false + - path: ./envs/core-services/worker-beat.env + required: false + - path: ./envs/security.env + required: false + - path: ./envs/databases/db-postgres.env + required: false + - path: ./envs/databases/db-mysql.env + required: false + - path: ./envs/databases/redis.env + required: false + - path: ./envs/vectorstores/weaviate.env + required: false + - path: ./envs/vectorstores/qdrant.env + required: false + - path: ./envs/vectorstores/oceanbase.env + required: false + - path: ./envs/vectorstores/seekdb.env + required: false + - path: ./envs/vectorstores/couchbase.env + required: false + - path: ./envs/vectorstores/pgvector.env + required: false + - path: ./envs/vectorstores/vastbase.env + required: false + - path: ./envs/vectorstores/pgvecto-rs.env + required: false + - path: ./envs/vectorstores/chroma.env + required: false + - path: ./envs/vectorstores/iris.env + required: false + - path: ./envs/vectorstores/oracle.env + required: false + - path: ./envs/vectorstores/opengauss.env + required: false + - path: ./envs/vectorstores/myscale.env + required: false + - path: ./envs/vectorstores/matrixone.env + required: false + - path: ./envs/vectorstores/elasticsearch.env + required: false + - path: ./envs/vectorstores/opensearch.env + required: false + - path: ./envs/vectorstores/milvus.env + required: false + - path: ./envs/infrastructure/nginx.env + required: false + - path: ./envs/infrastructure/certbot.env + required: false + - path: ./envs/infrastructure/ssrf-proxy.env + required: false + - path: ./envs/infrastructure/etcd.env + required: false + - path: ./envs/infrastructure/minio.env + required: false + - path: ./envs/infrastructure/milvus-standalone.env + required: false + - ./.env + networks: + - ssrf_proxy_network + - default + restart: always + services: # Init container to fix permissions init_permissions: @@ -21,12 +219,9 @@ services: # API service api: + <<: *shared-api-worker-config image: langgenius/dify-api:1.14.0 - restart: always environment: - # Use the shared environment variables. - <<: *shared-api-worker-env - # Startup mode, 'api' starts the API server. MODE: api SENTRY_DSN: ${API_SENTRY_DSN:-} SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0} @@ -69,12 +264,9 @@ services: # worker service # The Celery worker for processing all queues (dataset, workflow, mail, etc.) worker: + <<: *shared-worker-config image: langgenius/dify-api:1.14.0 - restart: always environment: - # Use the shared environment variables. - <<: *shared-api-worker-env - # Startup mode, 'worker' starts the Celery worker for processing all queues. MODE: worker SENTRY_DSN: ${API_SENTRY_DSN:-} SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0} @@ -115,12 +307,9 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: + <<: *shared-worker-beat-config image: langgenius/dify-api:1.14.0 - restart: always environment: - # Use the shared environment variables. - <<: *shared-api-worker-env - # Startup mode, 'worker_beat' starts the Celery beat for scheduling periodic tasks. MODE: beat depends_on: init_permissions: @@ -154,6 +343,12 @@ services: web: image: langgenius/dify-web:1.14.0 restart: always + env_file: + - path: ./envs/core-services/web.env + required: false + - path: ./envs/security.env + required: false + - ./.env environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} APP_API_URL: ${APP_API_URL:-} @@ -270,6 +465,12 @@ services: sandbox: image: langgenius/dify-sandbox:0.2.15 restart: always + env_file: + - path: ./envs/core-services/sandbox.env + required: false + - path: ./envs/security.env + required: false + - ./.env environment: # The DifySandbox configurations # Make sure you are changing this key for your deployment with a strong key. @@ -294,9 +495,24 @@ services: plugin_daemon: image: langgenius/dify-plugin-daemon:0.6.0-local restart: always + env_file: + - path: ./envs/core-services/shared.env + required: false + - path: ./envs/core-services/plugin-daemon.env + required: false + - path: ./envs/security.env + required: false + - path: ./envs/databases/db-postgres.env + required: false + - path: ./envs/databases/db-mysql.env + required: false + - path: ./envs/databases/redis.env + required: false + - ./.env + networks: + - ssrf_proxy_network + - default environment: - # Use the shared environment variables. - <<: *shared-api-worker-env DB_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin} DB_SSL_MODE: ${DB_SSL_MODE:-disable} SERVER_PORT: ${PLUGIN_DAEMON_PORT:-5002} diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index b5afa164dc..0f8458a58f 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -4,722 +4,204 @@ # or docker-compose-template.yaml and regenerate this file. # ================================================================== -x-shared-env: &shared-api-worker-env - CONSOLE_API_URL: ${CONSOLE_API_URL:-} - CONSOLE_WEB_URL: ${CONSOLE_WEB_URL:-} - SERVICE_API_URL: ${SERVICE_API_URL:-} - TRIGGER_URL: ${TRIGGER_URL:-http://localhost} - APP_API_URL: ${APP_API_URL:-} - APP_WEB_URL: ${APP_WEB_URL:-} - FILES_URL: ${FILES_URL:-} - INTERNAL_FILES_URL: ${INTERNAL_FILES_URL:-} - LANG: ${LANG:-C.UTF-8} - LC_ALL: ${LC_ALL:-C.UTF-8} - PYTHONIOENCODING: ${PYTHONIOENCODING:-utf-8} - UV_CACHE_DIR: ${UV_CACHE_DIR:-/tmp/.uv-cache} - LOG_LEVEL: ${LOG_LEVEL:-INFO} - LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text} - LOG_FILE: ${LOG_FILE:-/app/logs/server.log} - LOG_FILE_MAX_SIZE: ${LOG_FILE_MAX_SIZE:-20} - LOG_FILE_BACKUP_COUNT: ${LOG_FILE_BACKUP_COUNT:-5} - LOG_DATEFORMAT: ${LOG_DATEFORMAT:-%Y-%m-%d %H:%M:%S} - LOG_TZ: ${LOG_TZ:-UTC} - DEBUG: ${DEBUG:-false} - FLASK_DEBUG: ${FLASK_DEBUG:-false} - ENABLE_REQUEST_LOGGING: ${ENABLE_REQUEST_LOGGING:-False} - SECRET_KEY: ${SECRET_KEY:-sk-9f73s3ljTXVcMT3Blb3ljTqtsKiGHXVcMT3BlbkFJLK7U} - INIT_PASSWORD: ${INIT_PASSWORD:-} - DEPLOY_ENV: ${DEPLOY_ENV:-PRODUCTION} - CHECK_UPDATE_URL: ${CHECK_UPDATE_URL:-https://updates.dify.ai} - OPENAI_API_BASE: ${OPENAI_API_BASE:-https://api.openai.com/v1} - MIGRATION_ENABLED: ${MIGRATION_ENABLED:-true} - FILES_ACCESS_TIMEOUT: ${FILES_ACCESS_TIMEOUT:-300} - ENABLE_COLLABORATION_MODE: ${ENABLE_COLLABORATION_MODE:-false} - ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60} - REFRESH_TOKEN_EXPIRE_DAYS: ${REFRESH_TOKEN_EXPIRE_DAYS:-30} - APP_DEFAULT_ACTIVE_REQUESTS: ${APP_DEFAULT_ACTIVE_REQUESTS:-0} - APP_MAX_ACTIVE_REQUESTS: ${APP_MAX_ACTIVE_REQUESTS:-0} - APP_MAX_EXECUTION_TIME: ${APP_MAX_EXECUTION_TIME:-1200} - DIFY_BIND_ADDRESS: ${DIFY_BIND_ADDRESS:-0.0.0.0} - DIFY_PORT: ${DIFY_PORT:-5001} - SERVER_WORKER_AMOUNT: ${SERVER_WORKER_AMOUNT:-1} - SERVER_WORKER_CLASS: ${SERVER_WORKER_CLASS:-gevent} - SERVER_WORKER_CONNECTIONS: ${SERVER_WORKER_CONNECTIONS:-10} - CELERY_WORKER_CLASS: ${CELERY_WORKER_CLASS:-} - GUNICORN_TIMEOUT: ${GUNICORN_TIMEOUT:-360} - CELERY_WORKER_AMOUNT: ${CELERY_WORKER_AMOUNT:-4} - CELERY_AUTO_SCALE: ${CELERY_AUTO_SCALE:-false} - CELERY_MAX_WORKERS: ${CELERY_MAX_WORKERS:-} - CELERY_MIN_WORKERS: ${CELERY_MIN_WORKERS:-} - API_TOOL_DEFAULT_CONNECT_TIMEOUT: ${API_TOOL_DEFAULT_CONNECT_TIMEOUT:-10} - API_TOOL_DEFAULT_READ_TIMEOUT: ${API_TOOL_DEFAULT_READ_TIMEOUT:-60} - ENABLE_WEBSITE_JINAREADER: ${ENABLE_WEBSITE_JINAREADER:-true} - ENABLE_WEBSITE_FIRECRAWL: ${ENABLE_WEBSITE_FIRECRAWL:-true} - ENABLE_WEBSITE_WATERCRAWL: ${ENABLE_WEBSITE_WATERCRAWL:-true} - NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX: ${NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX:-false} - DB_TYPE: ${DB_TYPE:-postgresql} - DB_USERNAME: ${DB_USERNAME:-postgres} - DB_PASSWORD: ${DB_PASSWORD:-difyai123456} - DB_HOST: ${DB_HOST:-db_postgres} - DB_PORT: ${DB_PORT:-5432} - DB_DATABASE: ${DB_DATABASE:-dify} - SQLALCHEMY_POOL_SIZE: ${SQLALCHEMY_POOL_SIZE:-30} - SQLALCHEMY_MAX_OVERFLOW: ${SQLALCHEMY_MAX_OVERFLOW:-10} - SQLALCHEMY_POOL_RECYCLE: ${SQLALCHEMY_POOL_RECYCLE:-3600} - SQLALCHEMY_ECHO: ${SQLALCHEMY_ECHO:-false} - SQLALCHEMY_POOL_PRE_PING: ${SQLALCHEMY_POOL_PRE_PING:-false} - SQLALCHEMY_POOL_USE_LIFO: ${SQLALCHEMY_POOL_USE_LIFO:-false} - SQLALCHEMY_POOL_TIMEOUT: ${SQLALCHEMY_POOL_TIMEOUT:-30} - SQLALCHEMY_POOL_RESET_ON_RETURN: ${SQLALCHEMY_POOL_RESET_ON_RETURN:-rollback} - POSTGRES_MAX_CONNECTIONS: ${POSTGRES_MAX_CONNECTIONS:-200} - POSTGRES_SHARED_BUFFERS: ${POSTGRES_SHARED_BUFFERS:-128MB} - POSTGRES_WORK_MEM: ${POSTGRES_WORK_MEM:-4MB} - POSTGRES_MAINTENANCE_WORK_MEM: ${POSTGRES_MAINTENANCE_WORK_MEM:-64MB} - POSTGRES_EFFECTIVE_CACHE_SIZE: ${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB} - POSTGRES_STATEMENT_TIMEOUT: ${POSTGRES_STATEMENT_TIMEOUT:-0} - POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: ${POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT:-0} - MYSQL_MAX_CONNECTIONS: ${MYSQL_MAX_CONNECTIONS:-1000} - MYSQL_INNODB_BUFFER_POOL_SIZE: ${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M} - MYSQL_INNODB_LOG_FILE_SIZE: ${MYSQL_INNODB_LOG_FILE_SIZE:-128M} - MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT: ${MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT:-2} - REDIS_HOST: ${REDIS_HOST:-redis} - REDIS_PORT: ${REDIS_PORT:-6379} - REDIS_USERNAME: ${REDIS_USERNAME:-} - REDIS_PASSWORD: ${REDIS_PASSWORD:-difyai123456} - REDIS_USE_SSL: ${REDIS_USE_SSL:-false} - REDIS_SSL_CERT_REQS: ${REDIS_SSL_CERT_REQS:-CERT_NONE} - REDIS_SSL_CA_CERTS: ${REDIS_SSL_CA_CERTS:-} - REDIS_SSL_CERTFILE: ${REDIS_SSL_CERTFILE:-} - REDIS_SSL_KEYFILE: ${REDIS_SSL_KEYFILE:-} - REDIS_DB: ${REDIS_DB:-0} - REDIS_KEY_PREFIX: ${REDIS_KEY_PREFIX:-} - REDIS_MAX_CONNECTIONS: ${REDIS_MAX_CONNECTIONS:-} - REDIS_USE_SENTINEL: ${REDIS_USE_SENTINEL:-false} - REDIS_SENTINELS: ${REDIS_SENTINELS:-} - REDIS_SENTINEL_SERVICE_NAME: ${REDIS_SENTINEL_SERVICE_NAME:-} - REDIS_SENTINEL_USERNAME: ${REDIS_SENTINEL_USERNAME:-} - REDIS_SENTINEL_PASSWORD: ${REDIS_SENTINEL_PASSWORD:-} - REDIS_SENTINEL_SOCKET_TIMEOUT: ${REDIS_SENTINEL_SOCKET_TIMEOUT:-0.1} - REDIS_USE_CLUSTERS: ${REDIS_USE_CLUSTERS:-false} - REDIS_CLUSTERS: ${REDIS_CLUSTERS:-} - REDIS_CLUSTERS_PASSWORD: ${REDIS_CLUSTERS_PASSWORD:-} - REDIS_RETRY_RETRIES: ${REDIS_RETRY_RETRIES:-3} - REDIS_RETRY_BACKOFF_BASE: ${REDIS_RETRY_BACKOFF_BASE:-1.0} - REDIS_RETRY_BACKOFF_CAP: ${REDIS_RETRY_BACKOFF_CAP:-10.0} - REDIS_SOCKET_TIMEOUT: ${REDIS_SOCKET_TIMEOUT:-5.0} - REDIS_SOCKET_CONNECT_TIMEOUT: ${REDIS_SOCKET_CONNECT_TIMEOUT:-5.0} - REDIS_HEALTH_CHECK_INTERVAL: ${REDIS_HEALTH_CHECK_INTERVAL:-30} - CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://:difyai123456@redis:6379/1} - CELERY_BACKEND: ${CELERY_BACKEND:-redis} - BROKER_USE_SSL: ${BROKER_USE_SSL:-false} - CELERY_USE_SENTINEL: ${CELERY_USE_SENTINEL:-false} - CELERY_SENTINEL_MASTER_NAME: ${CELERY_SENTINEL_MASTER_NAME:-} - CELERY_SENTINEL_PASSWORD: ${CELERY_SENTINEL_PASSWORD:-} - CELERY_SENTINEL_SOCKET_TIMEOUT: ${CELERY_SENTINEL_SOCKET_TIMEOUT:-0.1} - CELERY_TASK_ANNOTATIONS: ${CELERY_TASK_ANNOTATIONS:-null} - WEB_API_CORS_ALLOW_ORIGINS: ${WEB_API_CORS_ALLOW_ORIGINS:-*} - CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*} - COOKIE_DOMAIN: ${COOKIE_DOMAIN:-} - NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-} - NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost} - NEXT_PUBLIC_BATCH_CONCURRENCY: ${NEXT_PUBLIC_BATCH_CONCURRENCY:-5} - STORAGE_TYPE: ${STORAGE_TYPE:-opendal} - OPENDAL_SCHEME: ${OPENDAL_SCHEME:-fs} - OPENDAL_FS_ROOT: ${OPENDAL_FS_ROOT:-storage} - CLICKZETTA_VOLUME_TYPE: ${CLICKZETTA_VOLUME_TYPE:-user} - CLICKZETTA_VOLUME_NAME: ${CLICKZETTA_VOLUME_NAME:-} - CLICKZETTA_VOLUME_TABLE_PREFIX: ${CLICKZETTA_VOLUME_TABLE_PREFIX:-dataset_} - CLICKZETTA_VOLUME_DIFY_PREFIX: ${CLICKZETTA_VOLUME_DIFY_PREFIX:-dify_km} - S3_ENDPOINT: ${S3_ENDPOINT:-} - S3_REGION: ${S3_REGION:-us-east-1} - S3_BUCKET_NAME: ${S3_BUCKET_NAME:-difyai} - S3_ACCESS_KEY: ${S3_ACCESS_KEY:-} - S3_SECRET_KEY: ${S3_SECRET_KEY:-} - S3_ADDRESS_STYLE: ${S3_ADDRESS_STYLE:-auto} - S3_USE_AWS_MANAGED_IAM: ${S3_USE_AWS_MANAGED_IAM:-false} - ARCHIVE_STORAGE_ENABLED: ${ARCHIVE_STORAGE_ENABLED:-false} - ARCHIVE_STORAGE_ENDPOINT: ${ARCHIVE_STORAGE_ENDPOINT:-} - ARCHIVE_STORAGE_ARCHIVE_BUCKET: ${ARCHIVE_STORAGE_ARCHIVE_BUCKET:-} - ARCHIVE_STORAGE_EXPORT_BUCKET: ${ARCHIVE_STORAGE_EXPORT_BUCKET:-} - ARCHIVE_STORAGE_ACCESS_KEY: ${ARCHIVE_STORAGE_ACCESS_KEY:-} - ARCHIVE_STORAGE_SECRET_KEY: ${ARCHIVE_STORAGE_SECRET_KEY:-} - ARCHIVE_STORAGE_REGION: ${ARCHIVE_STORAGE_REGION:-auto} - AZURE_BLOB_ACCOUNT_NAME: ${AZURE_BLOB_ACCOUNT_NAME:-difyai} - AZURE_BLOB_ACCOUNT_KEY: ${AZURE_BLOB_ACCOUNT_KEY:-difyai} - AZURE_BLOB_CONTAINER_NAME: ${AZURE_BLOB_CONTAINER_NAME:-difyai-container} - AZURE_BLOB_ACCOUNT_URL: ${AZURE_BLOB_ACCOUNT_URL:-https://.blob.core.windows.net} - GOOGLE_STORAGE_BUCKET_NAME: ${GOOGLE_STORAGE_BUCKET_NAME:-your-bucket-name} - GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: ${GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64:-} - ALIYUN_OSS_BUCKET_NAME: ${ALIYUN_OSS_BUCKET_NAME:-your-bucket-name} - ALIYUN_OSS_ACCESS_KEY: ${ALIYUN_OSS_ACCESS_KEY:-your-access-key} - ALIYUN_OSS_SECRET_KEY: ${ALIYUN_OSS_SECRET_KEY:-your-secret-key} - ALIYUN_OSS_ENDPOINT: ${ALIYUN_OSS_ENDPOINT:-https://oss-ap-southeast-1-internal.aliyuncs.com} - ALIYUN_OSS_REGION: ${ALIYUN_OSS_REGION:-ap-southeast-1} - ALIYUN_OSS_AUTH_VERSION: ${ALIYUN_OSS_AUTH_VERSION:-v4} - ALIYUN_OSS_PATH: ${ALIYUN_OSS_PATH:-your-path} - TENCENT_COS_BUCKET_NAME: ${TENCENT_COS_BUCKET_NAME:-your-bucket-name} - TENCENT_COS_SECRET_KEY: ${TENCENT_COS_SECRET_KEY:-your-secret-key} - TENCENT_COS_SECRET_ID: ${TENCENT_COS_SECRET_ID:-your-secret-id} - TENCENT_COS_REGION: ${TENCENT_COS_REGION:-your-region} - TENCENT_COS_SCHEME: ${TENCENT_COS_SCHEME:-your-scheme} - TENCENT_COS_CUSTOM_DOMAIN: ${TENCENT_COS_CUSTOM_DOMAIN:-your-custom-domain} - OCI_ENDPOINT: ${OCI_ENDPOINT:-https://your-object-storage-namespace.compat.objectstorage.us-ashburn-1.oraclecloud.com} - OCI_BUCKET_NAME: ${OCI_BUCKET_NAME:-your-bucket-name} - OCI_ACCESS_KEY: ${OCI_ACCESS_KEY:-your-access-key} - OCI_SECRET_KEY: ${OCI_SECRET_KEY:-your-secret-key} - OCI_REGION: ${OCI_REGION:-us-ashburn-1} - HUAWEI_OBS_BUCKET_NAME: ${HUAWEI_OBS_BUCKET_NAME:-your-bucket-name} - HUAWEI_OBS_SECRET_KEY: ${HUAWEI_OBS_SECRET_KEY:-your-secret-key} - HUAWEI_OBS_ACCESS_KEY: ${HUAWEI_OBS_ACCESS_KEY:-your-access-key} - HUAWEI_OBS_SERVER: ${HUAWEI_OBS_SERVER:-your-server-url} - HUAWEI_OBS_PATH_STYLE: ${HUAWEI_OBS_PATH_STYLE:-false} - VOLCENGINE_TOS_BUCKET_NAME: ${VOLCENGINE_TOS_BUCKET_NAME:-your-bucket-name} - VOLCENGINE_TOS_SECRET_KEY: ${VOLCENGINE_TOS_SECRET_KEY:-your-secret-key} - VOLCENGINE_TOS_ACCESS_KEY: ${VOLCENGINE_TOS_ACCESS_KEY:-your-access-key} - VOLCENGINE_TOS_ENDPOINT: ${VOLCENGINE_TOS_ENDPOINT:-your-server-url} - VOLCENGINE_TOS_REGION: ${VOLCENGINE_TOS_REGION:-your-region} - BAIDU_OBS_BUCKET_NAME: ${BAIDU_OBS_BUCKET_NAME:-your-bucket-name} - BAIDU_OBS_SECRET_KEY: ${BAIDU_OBS_SECRET_KEY:-your-secret-key} - BAIDU_OBS_ACCESS_KEY: ${BAIDU_OBS_ACCESS_KEY:-your-access-key} - BAIDU_OBS_ENDPOINT: ${BAIDU_OBS_ENDPOINT:-your-server-url} - SUPABASE_BUCKET_NAME: ${SUPABASE_BUCKET_NAME:-your-bucket-name} - SUPABASE_API_KEY: ${SUPABASE_API_KEY:-your-access-key} - SUPABASE_URL: ${SUPABASE_URL:-your-server-url} - VECTOR_STORE: ${VECTOR_STORE:-weaviate} - VECTOR_INDEX_NAME_PREFIX: ${VECTOR_INDEX_NAME_PREFIX:-Vector_index} - WEAVIATE_ENDPOINT: ${WEAVIATE_ENDPOINT:-http://weaviate:8080} - WEAVIATE_API_KEY: ${WEAVIATE_API_KEY:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih} - WEAVIATE_GRPC_ENDPOINT: ${WEAVIATE_GRPC_ENDPOINT:-grpc://weaviate:50051} - WEAVIATE_TOKENIZATION: ${WEAVIATE_TOKENIZATION:-word} - OCEANBASE_VECTOR_HOST: ${OCEANBASE_VECTOR_HOST:-oceanbase} - OCEANBASE_VECTOR_PORT: ${OCEANBASE_VECTOR_PORT:-2881} - OCEANBASE_VECTOR_USER: ${OCEANBASE_VECTOR_USER:-root@test} - OCEANBASE_VECTOR_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} - OCEANBASE_VECTOR_DATABASE: ${OCEANBASE_VECTOR_DATABASE:-test} - OCEANBASE_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai} - OCEANBASE_MEMORY_LIMIT: ${OCEANBASE_MEMORY_LIMIT:-6G} - OCEANBASE_ENABLE_HYBRID_SEARCH: ${OCEANBASE_ENABLE_HYBRID_SEARCH:-false} - OCEANBASE_FULLTEXT_PARSER: ${OCEANBASE_FULLTEXT_PARSER:-ik} - SEEKDB_MEMORY_LIMIT: ${SEEKDB_MEMORY_LIMIT:-2G} - QDRANT_URL: ${QDRANT_URL:-http://qdrant:6333} - QDRANT_API_KEY: ${QDRANT_API_KEY:-difyai123456} - QDRANT_CLIENT_TIMEOUT: ${QDRANT_CLIENT_TIMEOUT:-20} - QDRANT_GRPC_ENABLED: ${QDRANT_GRPC_ENABLED:-false} - QDRANT_GRPC_PORT: ${QDRANT_GRPC_PORT:-6334} - QDRANT_REPLICATION_FACTOR: ${QDRANT_REPLICATION_FACTOR:-1} - MILVUS_URI: ${MILVUS_URI:-http://host.docker.internal:19530} - MILVUS_DATABASE: ${MILVUS_DATABASE:-} - MILVUS_TOKEN: ${MILVUS_TOKEN:-} - MILVUS_USER: ${MILVUS_USER:-} - MILVUS_PASSWORD: ${MILVUS_PASSWORD:-} - MILVUS_ENABLE_HYBRID_SEARCH: ${MILVUS_ENABLE_HYBRID_SEARCH:-False} - MILVUS_ANALYZER_PARAMS: ${MILVUS_ANALYZER_PARAMS:-} - MYSCALE_HOST: ${MYSCALE_HOST:-myscale} - MYSCALE_PORT: ${MYSCALE_PORT:-8123} - MYSCALE_USER: ${MYSCALE_USER:-default} - MYSCALE_PASSWORD: ${MYSCALE_PASSWORD:-} - MYSCALE_DATABASE: ${MYSCALE_DATABASE:-dify} - MYSCALE_FTS_PARAMS: ${MYSCALE_FTS_PARAMS:-} - COUCHBASE_CONNECTION_STRING: ${COUCHBASE_CONNECTION_STRING:-couchbase://couchbase-server} - COUCHBASE_USER: ${COUCHBASE_USER:-Administrator} - COUCHBASE_PASSWORD: ${COUCHBASE_PASSWORD:-password} - COUCHBASE_BUCKET_NAME: ${COUCHBASE_BUCKET_NAME:-Embeddings} - COUCHBASE_SCOPE_NAME: ${COUCHBASE_SCOPE_NAME:-_default} - HOLOGRES_HOST: ${HOLOGRES_HOST:-} - HOLOGRES_PORT: ${HOLOGRES_PORT:-80} - HOLOGRES_DATABASE: ${HOLOGRES_DATABASE:-} - HOLOGRES_ACCESS_KEY_ID: ${HOLOGRES_ACCESS_KEY_ID:-} - HOLOGRES_ACCESS_KEY_SECRET: ${HOLOGRES_ACCESS_KEY_SECRET:-} - HOLOGRES_SCHEMA: ${HOLOGRES_SCHEMA:-public} - HOLOGRES_TOKENIZER: ${HOLOGRES_TOKENIZER:-jieba} - HOLOGRES_DISTANCE_METHOD: ${HOLOGRES_DISTANCE_METHOD:-Cosine} - HOLOGRES_BASE_QUANTIZATION_TYPE: ${HOLOGRES_BASE_QUANTIZATION_TYPE:-rabitq} - HOLOGRES_MAX_DEGREE: ${HOLOGRES_MAX_DEGREE:-64} - HOLOGRES_EF_CONSTRUCTION: ${HOLOGRES_EF_CONSTRUCTION:-400} - PGVECTOR_HOST: ${PGVECTOR_HOST:-pgvector} - PGVECTOR_PORT: ${PGVECTOR_PORT:-5432} - PGVECTOR_USER: ${PGVECTOR_USER:-postgres} - PGVECTOR_PASSWORD: ${PGVECTOR_PASSWORD:-difyai123456} - PGVECTOR_DATABASE: ${PGVECTOR_DATABASE:-dify} - PGVECTOR_MIN_CONNECTION: ${PGVECTOR_MIN_CONNECTION:-1} - PGVECTOR_MAX_CONNECTION: ${PGVECTOR_MAX_CONNECTION:-5} - PGVECTOR_PG_BIGM: ${PGVECTOR_PG_BIGM:-false} - PGVECTOR_PG_BIGM_VERSION: ${PGVECTOR_PG_BIGM_VERSION:-1.2-20240606} - VASTBASE_HOST: ${VASTBASE_HOST:-vastbase} - VASTBASE_PORT: ${VASTBASE_PORT:-5432} - VASTBASE_USER: ${VASTBASE_USER:-dify} - VASTBASE_PASSWORD: ${VASTBASE_PASSWORD:-Difyai123456} - VASTBASE_DATABASE: ${VASTBASE_DATABASE:-dify} - VASTBASE_MIN_CONNECTION: ${VASTBASE_MIN_CONNECTION:-1} - VASTBASE_MAX_CONNECTION: ${VASTBASE_MAX_CONNECTION:-5} - PGVECTO_RS_HOST: ${PGVECTO_RS_HOST:-pgvecto-rs} - PGVECTO_RS_PORT: ${PGVECTO_RS_PORT:-5432} - PGVECTO_RS_USER: ${PGVECTO_RS_USER:-postgres} - PGVECTO_RS_PASSWORD: ${PGVECTO_RS_PASSWORD:-difyai123456} - PGVECTO_RS_DATABASE: ${PGVECTO_RS_DATABASE:-dify} - ANALYTICDB_KEY_ID: ${ANALYTICDB_KEY_ID:-your-ak} - ANALYTICDB_KEY_SECRET: ${ANALYTICDB_KEY_SECRET:-your-sk} - ANALYTICDB_REGION_ID: ${ANALYTICDB_REGION_ID:-cn-hangzhou} - ANALYTICDB_INSTANCE_ID: ${ANALYTICDB_INSTANCE_ID:-gp-ab123456} - ANALYTICDB_ACCOUNT: ${ANALYTICDB_ACCOUNT:-testaccount} - ANALYTICDB_PASSWORD: ${ANALYTICDB_PASSWORD:-testpassword} - ANALYTICDB_NAMESPACE: ${ANALYTICDB_NAMESPACE:-dify} - ANALYTICDB_NAMESPACE_PASSWORD: ${ANALYTICDB_NAMESPACE_PASSWORD:-difypassword} - ANALYTICDB_HOST: ${ANALYTICDB_HOST:-gp-test.aliyuncs.com} - ANALYTICDB_PORT: ${ANALYTICDB_PORT:-5432} - ANALYTICDB_MIN_CONNECTION: ${ANALYTICDB_MIN_CONNECTION:-1} - ANALYTICDB_MAX_CONNECTION: ${ANALYTICDB_MAX_CONNECTION:-5} - TIDB_VECTOR_HOST: ${TIDB_VECTOR_HOST:-tidb} - TIDB_VECTOR_PORT: ${TIDB_VECTOR_PORT:-4000} - TIDB_VECTOR_USER: ${TIDB_VECTOR_USER:-} - TIDB_VECTOR_PASSWORD: ${TIDB_VECTOR_PASSWORD:-} - TIDB_VECTOR_DATABASE: ${TIDB_VECTOR_DATABASE:-dify} - MATRIXONE_HOST: ${MATRIXONE_HOST:-matrixone} - MATRIXONE_PORT: ${MATRIXONE_PORT:-6001} - MATRIXONE_USER: ${MATRIXONE_USER:-dump} - MATRIXONE_PASSWORD: ${MATRIXONE_PASSWORD:-111} - MATRIXONE_DATABASE: ${MATRIXONE_DATABASE:-dify} - TIDB_ON_QDRANT_URL: ${TIDB_ON_QDRANT_URL:-http://127.0.0.1} - TIDB_ON_QDRANT_API_KEY: ${TIDB_ON_QDRANT_API_KEY:-dify} - TIDB_ON_QDRANT_CLIENT_TIMEOUT: ${TIDB_ON_QDRANT_CLIENT_TIMEOUT:-20} - TIDB_ON_QDRANT_GRPC_ENABLED: ${TIDB_ON_QDRANT_GRPC_ENABLED:-false} - TIDB_ON_QDRANT_GRPC_PORT: ${TIDB_ON_QDRANT_GRPC_PORT:-6334} - TIDB_PUBLIC_KEY: ${TIDB_PUBLIC_KEY:-dify} - TIDB_PRIVATE_KEY: ${TIDB_PRIVATE_KEY:-dify} - TIDB_API_URL: ${TIDB_API_URL:-http://127.0.0.1} - TIDB_IAM_API_URL: ${TIDB_IAM_API_URL:-http://127.0.0.1} - TIDB_REGION: ${TIDB_REGION:-regions/aws-us-east-1} - TIDB_PROJECT_ID: ${TIDB_PROJECT_ID:-dify} - TIDB_SPEND_LIMIT: ${TIDB_SPEND_LIMIT:-100} - CHROMA_HOST: ${CHROMA_HOST:-127.0.0.1} - CHROMA_PORT: ${CHROMA_PORT:-8000} - CHROMA_TENANT: ${CHROMA_TENANT:-default_tenant} - CHROMA_DATABASE: ${CHROMA_DATABASE:-default_database} - CHROMA_AUTH_PROVIDER: ${CHROMA_AUTH_PROVIDER:-chromadb.auth.token_authn.TokenAuthClientProvider} - CHROMA_AUTH_CREDENTIALS: ${CHROMA_AUTH_CREDENTIALS:-} - ORACLE_USER: ${ORACLE_USER:-dify} - ORACLE_PASSWORD: ${ORACLE_PASSWORD:-dify} - ORACLE_DSN: ${ORACLE_DSN:-oracle:1521/FREEPDB1} - ORACLE_CONFIG_DIR: ${ORACLE_CONFIG_DIR:-/app/api/storage/wallet} - ORACLE_WALLET_LOCATION: ${ORACLE_WALLET_LOCATION:-/app/api/storage/wallet} - ORACLE_WALLET_PASSWORD: ${ORACLE_WALLET_PASSWORD:-dify} - ORACLE_IS_AUTONOMOUS: ${ORACLE_IS_AUTONOMOUS:-false} - ALIBABACLOUD_MYSQL_HOST: ${ALIBABACLOUD_MYSQL_HOST:-127.0.0.1} - ALIBABACLOUD_MYSQL_PORT: ${ALIBABACLOUD_MYSQL_PORT:-3306} - ALIBABACLOUD_MYSQL_USER: ${ALIBABACLOUD_MYSQL_USER:-root} - ALIBABACLOUD_MYSQL_PASSWORD: ${ALIBABACLOUD_MYSQL_PASSWORD:-difyai123456} - ALIBABACLOUD_MYSQL_DATABASE: ${ALIBABACLOUD_MYSQL_DATABASE:-dify} - ALIBABACLOUD_MYSQL_MAX_CONNECTION: ${ALIBABACLOUD_MYSQL_MAX_CONNECTION:-5} - ALIBABACLOUD_MYSQL_HNSW_M: ${ALIBABACLOUD_MYSQL_HNSW_M:-6} - RELYT_HOST: ${RELYT_HOST:-db} - RELYT_PORT: ${RELYT_PORT:-5432} - RELYT_USER: ${RELYT_USER:-postgres} - RELYT_PASSWORD: ${RELYT_PASSWORD:-difyai123456} - RELYT_DATABASE: ${RELYT_DATABASE:-postgres} - OPENSEARCH_HOST: ${OPENSEARCH_HOST:-opensearch} - OPENSEARCH_PORT: ${OPENSEARCH_PORT:-9200} - OPENSEARCH_SECURE: ${OPENSEARCH_SECURE:-true} - OPENSEARCH_VERIFY_CERTS: ${OPENSEARCH_VERIFY_CERTS:-true} - OPENSEARCH_AUTH_METHOD: ${OPENSEARCH_AUTH_METHOD:-basic} - OPENSEARCH_USER: ${OPENSEARCH_USER:-admin} - OPENSEARCH_PASSWORD: ${OPENSEARCH_PASSWORD:-admin} - OPENSEARCH_AWS_REGION: ${OPENSEARCH_AWS_REGION:-ap-southeast-1} - OPENSEARCH_AWS_SERVICE: ${OPENSEARCH_AWS_SERVICE:-aoss} - TENCENT_VECTOR_DB_URL: ${TENCENT_VECTOR_DB_URL:-http://127.0.0.1} - TENCENT_VECTOR_DB_API_KEY: ${TENCENT_VECTOR_DB_API_KEY:-dify} - TENCENT_VECTOR_DB_TIMEOUT: ${TENCENT_VECTOR_DB_TIMEOUT:-30} - TENCENT_VECTOR_DB_USERNAME: ${TENCENT_VECTOR_DB_USERNAME:-dify} - TENCENT_VECTOR_DB_DATABASE: ${TENCENT_VECTOR_DB_DATABASE:-dify} - TENCENT_VECTOR_DB_SHARD: ${TENCENT_VECTOR_DB_SHARD:-1} - TENCENT_VECTOR_DB_REPLICAS: ${TENCENT_VECTOR_DB_REPLICAS:-2} - TENCENT_VECTOR_DB_ENABLE_HYBRID_SEARCH: ${TENCENT_VECTOR_DB_ENABLE_HYBRID_SEARCH:-false} - ELASTICSEARCH_HOST: ${ELASTICSEARCH_HOST:-0.0.0.0} - ELASTICSEARCH_PORT: ${ELASTICSEARCH_PORT:-9200} - ELASTICSEARCH_USERNAME: ${ELASTICSEARCH_USERNAME:-elastic} - ELASTICSEARCH_PASSWORD: ${ELASTICSEARCH_PASSWORD:-elastic} - KIBANA_PORT: ${KIBANA_PORT:-5601} - ELASTICSEARCH_USE_CLOUD: ${ELASTICSEARCH_USE_CLOUD:-false} - ELASTICSEARCH_CLOUD_URL: ${ELASTICSEARCH_CLOUD_URL:-YOUR-ELASTICSEARCH_CLOUD_URL} - ELASTICSEARCH_API_KEY: ${ELASTICSEARCH_API_KEY:-YOUR-ELASTICSEARCH_API_KEY} - ELASTICSEARCH_VERIFY_CERTS: ${ELASTICSEARCH_VERIFY_CERTS:-False} - ELASTICSEARCH_CA_CERTS: ${ELASTICSEARCH_CA_CERTS:-} - ELASTICSEARCH_REQUEST_TIMEOUT: ${ELASTICSEARCH_REQUEST_TIMEOUT:-100000} - ELASTICSEARCH_RETRY_ON_TIMEOUT: ${ELASTICSEARCH_RETRY_ON_TIMEOUT:-True} - ELASTICSEARCH_MAX_RETRIES: ${ELASTICSEARCH_MAX_RETRIES:-10} - BAIDU_VECTOR_DB_ENDPOINT: ${BAIDU_VECTOR_DB_ENDPOINT:-http://127.0.0.1:5287} - BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS: ${BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS:-30000} - BAIDU_VECTOR_DB_ACCOUNT: ${BAIDU_VECTOR_DB_ACCOUNT:-root} - BAIDU_VECTOR_DB_API_KEY: ${BAIDU_VECTOR_DB_API_KEY:-dify} - BAIDU_VECTOR_DB_DATABASE: ${BAIDU_VECTOR_DB_DATABASE:-dify} - BAIDU_VECTOR_DB_SHARD: ${BAIDU_VECTOR_DB_SHARD:-1} - BAIDU_VECTOR_DB_REPLICAS: ${BAIDU_VECTOR_DB_REPLICAS:-3} - BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER: ${BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER:-DEFAULT_ANALYZER} - BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE: ${BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE:-COARSE_MODE} - BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT: ${BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT:-500} - BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO: ${BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO:-0.05} - BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS: ${BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS:-300} - VIKINGDB_ACCESS_KEY: ${VIKINGDB_ACCESS_KEY:-your-ak} - VIKINGDB_SECRET_KEY: ${VIKINGDB_SECRET_KEY:-your-sk} - VIKINGDB_REGION: ${VIKINGDB_REGION:-cn-shanghai} - VIKINGDB_HOST: ${VIKINGDB_HOST:-api-vikingdb.xxx.volces.com} - VIKINGDB_SCHEME: ${VIKINGDB_SCHEME:-http} - VIKINGDB_CONNECTION_TIMEOUT: ${VIKINGDB_CONNECTION_TIMEOUT:-30} - VIKINGDB_SOCKET_TIMEOUT: ${VIKINGDB_SOCKET_TIMEOUT:-30} - LINDORM_URL: ${LINDORM_URL:-http://localhost:30070} - LINDORM_USERNAME: ${LINDORM_USERNAME:-admin} - LINDORM_PASSWORD: ${LINDORM_PASSWORD:-admin} - LINDORM_USING_UGC: ${LINDORM_USING_UGC:-True} - LINDORM_QUERY_TIMEOUT: ${LINDORM_QUERY_TIMEOUT:-1} - OPENGAUSS_HOST: ${OPENGAUSS_HOST:-opengauss} - OPENGAUSS_PORT: ${OPENGAUSS_PORT:-6600} - OPENGAUSS_USER: ${OPENGAUSS_USER:-postgres} - OPENGAUSS_PASSWORD: ${OPENGAUSS_PASSWORD:-Dify@123} - OPENGAUSS_DATABASE: ${OPENGAUSS_DATABASE:-dify} - OPENGAUSS_MIN_CONNECTION: ${OPENGAUSS_MIN_CONNECTION:-1} - OPENGAUSS_MAX_CONNECTION: ${OPENGAUSS_MAX_CONNECTION:-5} - OPENGAUSS_ENABLE_PQ: ${OPENGAUSS_ENABLE_PQ:-false} - HUAWEI_CLOUD_HOSTS: ${HUAWEI_CLOUD_HOSTS:-https://127.0.0.1:9200} - HUAWEI_CLOUD_USER: ${HUAWEI_CLOUD_USER:-admin} - HUAWEI_CLOUD_PASSWORD: ${HUAWEI_CLOUD_PASSWORD:-admin} - UPSTASH_VECTOR_URL: ${UPSTASH_VECTOR_URL:-https://xxx-vector.upstash.io} - UPSTASH_VECTOR_TOKEN: ${UPSTASH_VECTOR_TOKEN:-dify} - TABLESTORE_ENDPOINT: ${TABLESTORE_ENDPOINT:-https://instance-name.cn-hangzhou.ots.aliyuncs.com} - TABLESTORE_INSTANCE_NAME: ${TABLESTORE_INSTANCE_NAME:-instance-name} - TABLESTORE_ACCESS_KEY_ID: ${TABLESTORE_ACCESS_KEY_ID:-xxx} - TABLESTORE_ACCESS_KEY_SECRET: ${TABLESTORE_ACCESS_KEY_SECRET:-xxx} - TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE: ${TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE:-false} - CLICKZETTA_USERNAME: ${CLICKZETTA_USERNAME:-} - CLICKZETTA_PASSWORD: ${CLICKZETTA_PASSWORD:-} - CLICKZETTA_INSTANCE: ${CLICKZETTA_INSTANCE:-} - CLICKZETTA_SERVICE: ${CLICKZETTA_SERVICE:-api.clickzetta.com} - CLICKZETTA_WORKSPACE: ${CLICKZETTA_WORKSPACE:-quick_start} - CLICKZETTA_VCLUSTER: ${CLICKZETTA_VCLUSTER:-default_ap} - CLICKZETTA_SCHEMA: ${CLICKZETTA_SCHEMA:-dify} - CLICKZETTA_BATCH_SIZE: ${CLICKZETTA_BATCH_SIZE:-100} - CLICKZETTA_ENABLE_INVERTED_INDEX: ${CLICKZETTA_ENABLE_INVERTED_INDEX:-true} - CLICKZETTA_ANALYZER_TYPE: ${CLICKZETTA_ANALYZER_TYPE:-chinese} - CLICKZETTA_ANALYZER_MODE: ${CLICKZETTA_ANALYZER_MODE:-smart} - CLICKZETTA_VECTOR_DISTANCE_FUNCTION: ${CLICKZETTA_VECTOR_DISTANCE_FUNCTION:-cosine_distance} - IRIS_HOST: ${IRIS_HOST:-iris} - IRIS_SUPER_SERVER_PORT: ${IRIS_SUPER_SERVER_PORT:-1972} - IRIS_WEB_SERVER_PORT: ${IRIS_WEB_SERVER_PORT:-52773} - IRIS_USER: ${IRIS_USER:-_SYSTEM} - IRIS_PASSWORD: ${IRIS_PASSWORD:-Dify@1234} - IRIS_DATABASE: ${IRIS_DATABASE:-USER} - IRIS_SCHEMA: ${IRIS_SCHEMA:-dify} - IRIS_CONNECTION_URL: ${IRIS_CONNECTION_URL:-} - IRIS_MIN_CONNECTION: ${IRIS_MIN_CONNECTION:-1} - IRIS_MAX_CONNECTION: ${IRIS_MAX_CONNECTION:-3} - IRIS_TEXT_INDEX: ${IRIS_TEXT_INDEX:-true} - IRIS_TEXT_INDEX_LANGUAGE: ${IRIS_TEXT_INDEX_LANGUAGE:-en} - IRIS_TIMEZONE: ${IRIS_TIMEZONE:-UTC} - UPLOAD_FILE_SIZE_LIMIT: ${UPLOAD_FILE_SIZE_LIMIT:-15} - UPLOAD_FILE_BATCH_LIMIT: ${UPLOAD_FILE_BATCH_LIMIT:-5} - UPLOAD_FILE_EXTENSION_BLACKLIST: ${UPLOAD_FILE_EXTENSION_BLACKLIST:-} - SINGLE_CHUNK_ATTACHMENT_LIMIT: ${SINGLE_CHUNK_ATTACHMENT_LIMIT:-10} - IMAGE_FILE_BATCH_LIMIT: ${IMAGE_FILE_BATCH_LIMIT:-10} - ATTACHMENT_IMAGE_FILE_SIZE_LIMIT: ${ATTACHMENT_IMAGE_FILE_SIZE_LIMIT:-2} - ATTACHMENT_IMAGE_DOWNLOAD_TIMEOUT: ${ATTACHMENT_IMAGE_DOWNLOAD_TIMEOUT:-60} - ETL_TYPE: ${ETL_TYPE:-dify} - UNSTRUCTURED_API_URL: ${UNSTRUCTURED_API_URL:-} - UNSTRUCTURED_API_KEY: ${UNSTRUCTURED_API_KEY:-} - SCARF_NO_ANALYTICS: ${SCARF_NO_ANALYTICS:-true} - PLUGIN_BASED_TOKEN_COUNTING_ENABLED: ${PLUGIN_BASED_TOKEN_COUNTING_ENABLED:-false} - MULTIMODAL_SEND_FORMAT: ${MULTIMODAL_SEND_FORMAT:-base64} - UPLOAD_IMAGE_FILE_SIZE_LIMIT: ${UPLOAD_IMAGE_FILE_SIZE_LIMIT:-10} - UPLOAD_VIDEO_FILE_SIZE_LIMIT: ${UPLOAD_VIDEO_FILE_SIZE_LIMIT:-100} - UPLOAD_AUDIO_FILE_SIZE_LIMIT: ${UPLOAD_AUDIO_FILE_SIZE_LIMIT:-50} - SENTRY_DSN: ${SENTRY_DSN:-} - API_SENTRY_DSN: ${API_SENTRY_DSN:-} - API_SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0} - API_SENTRY_PROFILES_SAMPLE_RATE: ${API_SENTRY_PROFILES_SAMPLE_RATE:-1.0} - WEB_SENTRY_DSN: ${WEB_SENTRY_DSN:-} - PLUGIN_SENTRY_ENABLED: ${PLUGIN_SENTRY_ENABLED:-false} - PLUGIN_SENTRY_DSN: ${PLUGIN_SENTRY_DSN:-} - NOTION_INTEGRATION_TYPE: ${NOTION_INTEGRATION_TYPE:-public} - NOTION_CLIENT_SECRET: ${NOTION_CLIENT_SECRET:-} - NOTION_CLIENT_ID: ${NOTION_CLIENT_ID:-} - NOTION_INTERNAL_SECRET: ${NOTION_INTERNAL_SECRET:-} - MAIL_TYPE: ${MAIL_TYPE:-} - MAIL_DEFAULT_SEND_FROM: ${MAIL_DEFAULT_SEND_FROM:-} - RESEND_API_URL: ${RESEND_API_URL:-https://api.resend.com} - RESEND_API_KEY: ${RESEND_API_KEY:-} - SMTP_SERVER: ${SMTP_SERVER:-} - SMTP_PORT: ${SMTP_PORT:-465} - SMTP_USERNAME: ${SMTP_USERNAME:-} - SMTP_PASSWORD: ${SMTP_PASSWORD:-} - SMTP_USE_TLS: ${SMTP_USE_TLS:-true} - SMTP_OPPORTUNISTIC_TLS: ${SMTP_OPPORTUNISTIC_TLS:-false} - SMTP_LOCAL_HOSTNAME: ${SMTP_LOCAL_HOSTNAME:-} - SENDGRID_API_KEY: ${SENDGRID_API_KEY:-} - INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: ${INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH:-4000} - INVITE_EXPIRY_HOURS: ${INVITE_EXPIRY_HOURS:-72} - RESET_PASSWORD_TOKEN_EXPIRY_MINUTES: ${RESET_PASSWORD_TOKEN_EXPIRY_MINUTES:-5} - EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES: ${EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES:-5} - CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES: ${CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES:-5} - OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES: ${OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES:-5} - CODE_EXECUTION_ENDPOINT: ${CODE_EXECUTION_ENDPOINT:-http://sandbox:8194} - CODE_EXECUTION_API_KEY: ${CODE_EXECUTION_API_KEY:-dify-sandbox} - CODE_EXECUTION_SSL_VERIFY: ${CODE_EXECUTION_SSL_VERIFY:-True} - CODE_EXECUTION_POOL_MAX_CONNECTIONS: ${CODE_EXECUTION_POOL_MAX_CONNECTIONS:-100} - CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS: ${CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS:-20} - CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY: ${CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY:-5.0} - CODE_MAX_NUMBER: ${CODE_MAX_NUMBER:-9223372036854775807} - CODE_MIN_NUMBER: ${CODE_MIN_NUMBER:--9223372036854775808} - CODE_MAX_DEPTH: ${CODE_MAX_DEPTH:-5} - CODE_MAX_PRECISION: ${CODE_MAX_PRECISION:-20} - CODE_MAX_STRING_LENGTH: ${CODE_MAX_STRING_LENGTH:-400000} - CODE_MAX_STRING_ARRAY_LENGTH: ${CODE_MAX_STRING_ARRAY_LENGTH:-30} - CODE_MAX_OBJECT_ARRAY_LENGTH: ${CODE_MAX_OBJECT_ARRAY_LENGTH:-30} - CODE_MAX_NUMBER_ARRAY_LENGTH: ${CODE_MAX_NUMBER_ARRAY_LENGTH:-1000} - CODE_EXECUTION_CONNECT_TIMEOUT: ${CODE_EXECUTION_CONNECT_TIMEOUT:-10} - CODE_EXECUTION_READ_TIMEOUT: ${CODE_EXECUTION_READ_TIMEOUT:-60} - CODE_EXECUTION_WRITE_TIMEOUT: ${CODE_EXECUTION_WRITE_TIMEOUT:-10} - TEMPLATE_TRANSFORM_MAX_LENGTH: ${TEMPLATE_TRANSFORM_MAX_LENGTH:-400000} - WORKFLOW_MAX_EXECUTION_STEPS: ${WORKFLOW_MAX_EXECUTION_STEPS:-500} - WORKFLOW_MAX_EXECUTION_TIME: ${WORKFLOW_MAX_EXECUTION_TIME:-1200} - WORKFLOW_CALL_MAX_DEPTH: ${WORKFLOW_CALL_MAX_DEPTH:-5} - MAX_VARIABLE_SIZE: ${MAX_VARIABLE_SIZE:-204800} - WORKFLOW_FILE_UPLOAD_LIMIT: ${WORKFLOW_FILE_UPLOAD_LIMIT:-10} - GRAPH_ENGINE_MIN_WORKERS: ${GRAPH_ENGINE_MIN_WORKERS:-1} - GRAPH_ENGINE_MAX_WORKERS: ${GRAPH_ENGINE_MAX_WORKERS:-10} - GRAPH_ENGINE_SCALE_UP_THRESHOLD: ${GRAPH_ENGINE_SCALE_UP_THRESHOLD:-3} - GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME: ${GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME:-5.0} - WORKFLOW_NODE_EXECUTION_STORAGE: ${WORKFLOW_NODE_EXECUTION_STORAGE:-rdbms} - CORE_WORKFLOW_EXECUTION_REPOSITORY: ${CORE_WORKFLOW_EXECUTION_REPOSITORY:-core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository} - CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY: ${CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY:-core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository} - API_WORKFLOW_RUN_REPOSITORY: ${API_WORKFLOW_RUN_REPOSITORY:-repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository} - API_WORKFLOW_NODE_EXECUTION_REPOSITORY: ${API_WORKFLOW_NODE_EXECUTION_REPOSITORY:-repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository} - WORKFLOW_LOG_CLEANUP_ENABLED: ${WORKFLOW_LOG_CLEANUP_ENABLED:-false} - WORKFLOW_LOG_RETENTION_DAYS: ${WORKFLOW_LOG_RETENTION_DAYS:-30} - WORKFLOW_LOG_CLEANUP_BATCH_SIZE: ${WORKFLOW_LOG_CLEANUP_BATCH_SIZE:-100} - WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS: ${WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS:-} - ALIYUN_SLS_ACCESS_KEY_ID: ${ALIYUN_SLS_ACCESS_KEY_ID:-} - ALIYUN_SLS_ACCESS_KEY_SECRET: ${ALIYUN_SLS_ACCESS_KEY_SECRET:-} - ALIYUN_SLS_ENDPOINT: ${ALIYUN_SLS_ENDPOINT:-} - ALIYUN_SLS_REGION: ${ALIYUN_SLS_REGION:-} - ALIYUN_SLS_PROJECT_NAME: ${ALIYUN_SLS_PROJECT_NAME:-} - ALIYUN_SLS_LOGSTORE_TTL: ${ALIYUN_SLS_LOGSTORE_TTL:-365} - LOGSTORE_DUAL_WRITE_ENABLED: ${LOGSTORE_DUAL_WRITE_ENABLED:-false} - LOGSTORE_DUAL_READ_ENABLED: ${LOGSTORE_DUAL_READ_ENABLED:-true} - LOGSTORE_ENABLE_PUT_GRAPH_FIELD: ${LOGSTORE_ENABLE_PUT_GRAPH_FIELD:-true} - HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760} - HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576} - HTTP_REQUEST_NODE_SSL_VERIFY: ${HTTP_REQUEST_NODE_SSL_VERIFY:-True} - HTTP_REQUEST_MAX_CONNECT_TIMEOUT: ${HTTP_REQUEST_MAX_CONNECT_TIMEOUT:-10} - HTTP_REQUEST_MAX_READ_TIMEOUT: ${HTTP_REQUEST_MAX_READ_TIMEOUT:-600} - HTTP_REQUEST_MAX_WRITE_TIMEOUT: ${HTTP_REQUEST_MAX_WRITE_TIMEOUT:-600} - WEBHOOK_REQUEST_BODY_MAX_SIZE: ${WEBHOOK_REQUEST_BODY_MAX_SIZE:-10485760} - RESPECT_XFORWARD_HEADERS_ENABLED: ${RESPECT_XFORWARD_HEADERS_ENABLED:-false} - SSRF_PROXY_HTTP_URL: ${SSRF_PROXY_HTTP_URL:-http://ssrf_proxy:3128} - SSRF_PROXY_HTTPS_URL: ${SSRF_PROXY_HTTPS_URL:-http://ssrf_proxy:3128} - LOOP_NODE_MAX_COUNT: ${LOOP_NODE_MAX_COUNT:-100} - MAX_TOOLS_NUM: ${MAX_TOOLS_NUM:-10} - MAX_PARALLEL_LIMIT: ${MAX_PARALLEL_LIMIT:-10} - MAX_ITERATIONS_NUM: ${MAX_ITERATIONS_NUM:-99} - TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000} - EXPERIMENTAL_ENABLE_VINEXT: ${EXPERIMENTAL_ENABLE_VINEXT:-false} - ALLOW_INLINE_STYLES: ${ALLOW_INLINE_STYLES:-false} - ALLOW_UNSAFE_DATA_SCHEME: ${ALLOW_UNSAFE_DATA_SCHEME:-false} - MAX_TREE_DEPTH: ${MAX_TREE_DEPTH:-50} - PGDATA: ${PGDATA:-/var/lib/postgresql/data/pgdata} - MYSQL_HOST_VOLUME: ${MYSQL_HOST_VOLUME:-./volumes/mysql/data} - SANDBOX_API_KEY: ${SANDBOX_API_KEY:-dify-sandbox} - SANDBOX_GIN_MODE: ${SANDBOX_GIN_MODE:-release} - SANDBOX_WORKER_TIMEOUT: ${SANDBOX_WORKER_TIMEOUT:-15} - SANDBOX_ENABLE_NETWORK: ${SANDBOX_ENABLE_NETWORK:-true} - SANDBOX_HTTP_PROXY: ${SANDBOX_HTTP_PROXY:-http://ssrf_proxy:3128} - SANDBOX_HTTPS_PROXY: ${SANDBOX_HTTPS_PROXY:-http://ssrf_proxy:3128} - SANDBOX_PORT: ${SANDBOX_PORT:-8194} - WEAVIATE_PERSISTENCE_DATA_PATH: ${WEAVIATE_PERSISTENCE_DATA_PATH:-/var/lib/weaviate} - WEAVIATE_QUERY_DEFAULTS_LIMIT: ${WEAVIATE_QUERY_DEFAULTS_LIMIT:-25} - WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED: ${WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED:-true} - WEAVIATE_DEFAULT_VECTORIZER_MODULE: ${WEAVIATE_DEFAULT_VECTORIZER_MODULE:-none} - WEAVIATE_CLUSTER_HOSTNAME: ${WEAVIATE_CLUSTER_HOSTNAME:-node1} - WEAVIATE_AUTHENTICATION_APIKEY_ENABLED: ${WEAVIATE_AUTHENTICATION_APIKEY_ENABLED:-true} - WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS: ${WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih} - WEAVIATE_AUTHENTICATION_APIKEY_USERS: ${WEAVIATE_AUTHENTICATION_APIKEY_USERS:-hello@dify.ai} - WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED: ${WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED:-true} - WEAVIATE_AUTHORIZATION_ADMINLIST_USERS: ${WEAVIATE_AUTHORIZATION_ADMINLIST_USERS:-hello@dify.ai} - WEAVIATE_DISABLE_TELEMETRY: ${WEAVIATE_DISABLE_TELEMETRY:-false} - WEAVIATE_ENABLE_TOKENIZER_GSE: ${WEAVIATE_ENABLE_TOKENIZER_GSE:-false} - WEAVIATE_ENABLE_TOKENIZER_KAGOME_JA: ${WEAVIATE_ENABLE_TOKENIZER_KAGOME_JA:-false} - WEAVIATE_ENABLE_TOKENIZER_KAGOME_KR: ${WEAVIATE_ENABLE_TOKENIZER_KAGOME_KR:-false} - CHROMA_SERVER_AUTHN_CREDENTIALS: ${CHROMA_SERVER_AUTHN_CREDENTIALS:-difyai123456} - CHROMA_SERVER_AUTHN_PROVIDER: ${CHROMA_SERVER_AUTHN_PROVIDER:-chromadb.auth.token_authn.TokenAuthenticationServerProvider} - CHROMA_IS_PERSISTENT: ${CHROMA_IS_PERSISTENT:-TRUE} - ORACLE_PWD: ${ORACLE_PWD:-Dify123456} - ORACLE_CHARACTERSET: ${ORACLE_CHARACTERSET:-AL32UTF8} - ETCD_AUTO_COMPACTION_MODE: ${ETCD_AUTO_COMPACTION_MODE:-revision} - ETCD_AUTO_COMPACTION_RETENTION: ${ETCD_AUTO_COMPACTION_RETENTION:-1000} - ETCD_QUOTA_BACKEND_BYTES: ${ETCD_QUOTA_BACKEND_BYTES:-4294967296} - ETCD_SNAPSHOT_COUNT: ${ETCD_SNAPSHOT_COUNT:-50000} - MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY:-minioadmin} - MINIO_SECRET_KEY: ${MINIO_SECRET_KEY:-minioadmin} - ETCD_ENDPOINTS: ${ETCD_ENDPOINTS:-etcd:2379} - MINIO_ADDRESS: ${MINIO_ADDRESS:-minio:9000} - MILVUS_AUTHORIZATION_ENABLED: ${MILVUS_AUTHORIZATION_ENABLED:-true} - PGVECTOR_PGUSER: ${PGVECTOR_PGUSER:-postgres} - PGVECTOR_POSTGRES_PASSWORD: ${PGVECTOR_POSTGRES_PASSWORD:-difyai123456} - PGVECTOR_POSTGRES_DB: ${PGVECTOR_POSTGRES_DB:-dify} - PGVECTOR_PGDATA: ${PGVECTOR_PGDATA:-/var/lib/postgresql/data/pgdata} - OPENSEARCH_DISCOVERY_TYPE: ${OPENSEARCH_DISCOVERY_TYPE:-single-node} - OPENSEARCH_BOOTSTRAP_MEMORY_LOCK: ${OPENSEARCH_BOOTSTRAP_MEMORY_LOCK:-true} - OPENSEARCH_JAVA_OPTS_MIN: ${OPENSEARCH_JAVA_OPTS_MIN:-512m} - OPENSEARCH_JAVA_OPTS_MAX: ${OPENSEARCH_JAVA_OPTS_MAX:-1024m} - OPENSEARCH_INITIAL_ADMIN_PASSWORD: ${OPENSEARCH_INITIAL_ADMIN_PASSWORD:-Qazwsxedc!@#123} - OPENSEARCH_MEMLOCK_SOFT: ${OPENSEARCH_MEMLOCK_SOFT:--1} - OPENSEARCH_MEMLOCK_HARD: ${OPENSEARCH_MEMLOCK_HARD:--1} - OPENSEARCH_NOFILE_SOFT: ${OPENSEARCH_NOFILE_SOFT:-65536} - OPENSEARCH_NOFILE_HARD: ${OPENSEARCH_NOFILE_HARD:-65536} - NGINX_SERVER_NAME: ${NGINX_SERVER_NAME:-_} - NGINX_HTTPS_ENABLED: ${NGINX_HTTPS_ENABLED:-false} - NGINX_PORT: ${NGINX_PORT:-80} - NGINX_SSL_PORT: ${NGINX_SSL_PORT:-443} - NGINX_SSL_CERT_FILENAME: ${NGINX_SSL_CERT_FILENAME:-dify.crt} - NGINX_SSL_CERT_KEY_FILENAME: ${NGINX_SSL_CERT_KEY_FILENAME:-dify.key} - NGINX_SSL_PROTOCOLS: ${NGINX_SSL_PROTOCOLS:-TLSv1.2 TLSv1.3} - NGINX_WORKER_PROCESSES: ${NGINX_WORKER_PROCESSES:-auto} - NGINX_CLIENT_MAX_BODY_SIZE: ${NGINX_CLIENT_MAX_BODY_SIZE:-100M} - NGINX_KEEPALIVE_TIMEOUT: ${NGINX_KEEPALIVE_TIMEOUT:-65} - NGINX_PROXY_READ_TIMEOUT: ${NGINX_PROXY_READ_TIMEOUT:-3600s} - NGINX_PROXY_SEND_TIMEOUT: ${NGINX_PROXY_SEND_TIMEOUT:-3600s} - NGINX_ENABLE_CERTBOT_CHALLENGE: ${NGINX_ENABLE_CERTBOT_CHALLENGE:-false} - CERTBOT_EMAIL: ${CERTBOT_EMAIL:-} - CERTBOT_DOMAIN: ${CERTBOT_DOMAIN:-} - CERTBOT_OPTIONS: ${CERTBOT_OPTIONS:-} - SSRF_HTTP_PORT: ${SSRF_HTTP_PORT:-3128} - SSRF_COREDUMP_DIR: ${SSRF_COREDUMP_DIR:-/var/spool/squid} - SSRF_REVERSE_PROXY_PORT: ${SSRF_REVERSE_PROXY_PORT:-8194} - SSRF_SANDBOX_HOST: ${SSRF_SANDBOX_HOST:-sandbox} - SSRF_DEFAULT_TIME_OUT: ${SSRF_DEFAULT_TIME_OUT:-5} - SSRF_DEFAULT_CONNECT_TIME_OUT: ${SSRF_DEFAULT_CONNECT_TIME_OUT:-5} - SSRF_DEFAULT_READ_TIME_OUT: ${SSRF_DEFAULT_READ_TIME_OUT:-5} - SSRF_DEFAULT_WRITE_TIME_OUT: ${SSRF_DEFAULT_WRITE_TIME_OUT:-5} - SSRF_POOL_MAX_CONNECTIONS: ${SSRF_POOL_MAX_CONNECTIONS:-100} - SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS: ${SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS:-20} - SSRF_POOL_KEEPALIVE_EXPIRY: ${SSRF_POOL_KEEPALIVE_EXPIRY:-5.0} - EXPOSE_NGINX_PORT: ${EXPOSE_NGINX_PORT:-80} - EXPOSE_NGINX_SSL_PORT: ${EXPOSE_NGINX_SSL_PORT:-443} - POSITION_TOOL_PINS: ${POSITION_TOOL_PINS:-} - POSITION_TOOL_INCLUDES: ${POSITION_TOOL_INCLUDES:-} - POSITION_TOOL_EXCLUDES: ${POSITION_TOOL_EXCLUDES:-} - POSITION_PROVIDER_PINS: ${POSITION_PROVIDER_PINS:-} - POSITION_PROVIDER_INCLUDES: ${POSITION_PROVIDER_INCLUDES:-} - POSITION_PROVIDER_EXCLUDES: ${POSITION_PROVIDER_EXCLUDES:-} - CSP_WHITELIST: ${CSP_WHITELIST:-} - CREATE_TIDB_SERVICE_JOB_ENABLED: ${CREATE_TIDB_SERVICE_JOB_ENABLED:-false} - MAX_SUBMIT_COUNT: ${MAX_SUBMIT_COUNT:-100} - TOP_K_MAX_VALUE: ${TOP_K_MAX_VALUE:-10} - DB_PLUGIN_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin} - EXPOSE_PLUGIN_DAEMON_PORT: ${EXPOSE_PLUGIN_DAEMON_PORT:-5002} - PLUGIN_DAEMON_PORT: ${PLUGIN_DAEMON_PORT:-5002} - PLUGIN_DAEMON_KEY: ${PLUGIN_DAEMON_KEY:-lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi} - PLUGIN_DAEMON_URL: ${PLUGIN_DAEMON_URL:-http://plugin_daemon:5002} - PLUGIN_MAX_PACKAGE_SIZE: ${PLUGIN_MAX_PACKAGE_SIZE:-52428800} - PLUGIN_MODEL_SCHEMA_CACHE_TTL: ${PLUGIN_MODEL_SCHEMA_CACHE_TTL:-3600} - PLUGIN_PPROF_ENABLED: ${PLUGIN_PPROF_ENABLED:-false} - PLUGIN_DEBUGGING_HOST: ${PLUGIN_DEBUGGING_HOST:-0.0.0.0} - PLUGIN_DEBUGGING_PORT: ${PLUGIN_DEBUGGING_PORT:-5003} - EXPOSE_PLUGIN_DEBUGGING_HOST: ${EXPOSE_PLUGIN_DEBUGGING_HOST:-localhost} - EXPOSE_PLUGIN_DEBUGGING_PORT: ${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003} - PLUGIN_DIFY_INNER_API_KEY: ${PLUGIN_DIFY_INNER_API_KEY:-QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1} - PLUGIN_DIFY_INNER_API_URL: ${PLUGIN_DIFY_INNER_API_URL:-http://api:5001} - ENDPOINT_URL_TEMPLATE: ${ENDPOINT_URL_TEMPLATE:-http://localhost/e/{hook_id}} - MARKETPLACE_ENABLED: ${MARKETPLACE_ENABLED:-true} - MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai} - CREATORS_PLATFORM_FEATURES_ENABLED: ${CREATORS_PLATFORM_FEATURES_ENABLED:-true} - CREATORS_PLATFORM_API_URL: ${CREATORS_PLATFORM_API_URL:-https://creators.dify.ai} - CREATORS_PLATFORM_OAUTH_CLIENT_ID: ${CREATORS_PLATFORM_OAUTH_CLIENT_ID:-} - FORCE_VERIFYING_SIGNATURE: ${FORCE_VERIFYING_SIGNATURE:-true} - PLUGIN_STDIO_BUFFER_SIZE: ${PLUGIN_STDIO_BUFFER_SIZE:-1024} - PLUGIN_STDIO_MAX_BUFFER_SIZE: ${PLUGIN_STDIO_MAX_BUFFER_SIZE:-5242880} - PLUGIN_PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} - PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} - PLUGIN_DAEMON_TIMEOUT: ${PLUGIN_DAEMON_TIMEOUT:-600.0} - PIP_MIRROR_URL: ${PIP_MIRROR_URL:-} - PLUGIN_STORAGE_TYPE: ${PLUGIN_STORAGE_TYPE:-local} - PLUGIN_STORAGE_LOCAL_ROOT: ${PLUGIN_STORAGE_LOCAL_ROOT:-/app/storage} - PLUGIN_WORKING_PATH: ${PLUGIN_WORKING_PATH:-/app/storage/cwd} - PLUGIN_INSTALLED_PATH: ${PLUGIN_INSTALLED_PATH:-plugin} - PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages} - PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets} - PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-} - PLUGIN_S3_USE_AWS: ${PLUGIN_S3_USE_AWS:-false} - PLUGIN_S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false} - PLUGIN_S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-} - PLUGIN_S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false} - PLUGIN_AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-} - PLUGIN_AWS_SECRET_KEY: ${PLUGIN_AWS_SECRET_KEY:-} - PLUGIN_AWS_REGION: ${PLUGIN_AWS_REGION:-} - PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME: ${PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME:-} - PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING: ${PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING:-} - PLUGIN_TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-} - PLUGIN_TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-} - PLUGIN_TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-} - PLUGIN_ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-} - PLUGIN_ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-} - PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-} - PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-} - PLUGIN_ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4} - PLUGIN_ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-} - PLUGIN_VOLCENGINE_TOS_ENDPOINT: ${PLUGIN_VOLCENGINE_TOS_ENDPOINT:-} - PLUGIN_VOLCENGINE_TOS_ACCESS_KEY: ${PLUGIN_VOLCENGINE_TOS_ACCESS_KEY:-} - PLUGIN_VOLCENGINE_TOS_SECRET_KEY: ${PLUGIN_VOLCENGINE_TOS_SECRET_KEY:-} - PLUGIN_VOLCENGINE_TOS_REGION: ${PLUGIN_VOLCENGINE_TOS_REGION:-} - ENABLE_OTEL: ${ENABLE_OTEL:-false} - OTLP_TRACE_ENDPOINT: ${OTLP_TRACE_ENDPOINT:-} - OTLP_METRIC_ENDPOINT: ${OTLP_METRIC_ENDPOINT:-} - OTLP_BASE_ENDPOINT: ${OTLP_BASE_ENDPOINT:-http://localhost:4318} - OTLP_API_KEY: ${OTLP_API_KEY:-} - OTEL_EXPORTER_OTLP_PROTOCOL: ${OTEL_EXPORTER_OTLP_PROTOCOL:-} - OTEL_EXPORTER_TYPE: ${OTEL_EXPORTER_TYPE:-otlp} - OTEL_SAMPLING_RATE: ${OTEL_SAMPLING_RATE:-0.1} - OTEL_BATCH_EXPORT_SCHEDULE_DELAY: ${OTEL_BATCH_EXPORT_SCHEDULE_DELAY:-5000} - OTEL_MAX_QUEUE_SIZE: ${OTEL_MAX_QUEUE_SIZE:-2048} - OTEL_MAX_EXPORT_BATCH_SIZE: ${OTEL_MAX_EXPORT_BATCH_SIZE:-512} - OTEL_METRIC_EXPORT_INTERVAL: ${OTEL_METRIC_EXPORT_INTERVAL:-60000} - OTEL_BATCH_EXPORT_TIMEOUT: ${OTEL_BATCH_EXPORT_TIMEOUT:-10000} - OTEL_METRIC_EXPORT_TIMEOUT: ${OTEL_METRIC_EXPORT_TIMEOUT:-30000} - ALLOW_EMBED: ${ALLOW_EMBED:-false} - QUEUE_MONITOR_THRESHOLD: ${QUEUE_MONITOR_THRESHOLD:-200} - QUEUE_MONITOR_ALERT_EMAILS: ${QUEUE_MONITOR_ALERT_EMAILS:-} - QUEUE_MONITOR_INTERVAL: ${QUEUE_MONITOR_INTERVAL:-30} - SWAGGER_UI_ENABLED: ${SWAGGER_UI_ENABLED:-false} - SWAGGER_UI_PATH: ${SWAGGER_UI_PATH:-/swagger-ui.html} - DSL_EXPORT_ENCRYPT_DATASET_ID: ${DSL_EXPORT_ENCRYPT_DATASET_ID:-true} - DATASET_MAX_SEGMENTS_PER_REQUEST: ${DATASET_MAX_SEGMENTS_PER_REQUEST:-0} - ENABLE_CLEAN_EMBEDDING_CACHE_TASK: ${ENABLE_CLEAN_EMBEDDING_CACHE_TASK:-false} - ENABLE_CLEAN_UNUSED_DATASETS_TASK: ${ENABLE_CLEAN_UNUSED_DATASETS_TASK:-false} - ENABLE_CREATE_TIDB_SERVERLESS_TASK: ${ENABLE_CREATE_TIDB_SERVERLESS_TASK:-false} - ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK: ${ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK:-false} - ENABLE_CLEAN_MESSAGES: ${ENABLE_CLEAN_MESSAGES:-false} - ENABLE_WORKFLOW_RUN_CLEANUP_TASK: ${ENABLE_WORKFLOW_RUN_CLEANUP_TASK:-false} - ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: ${ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK:-false} - ENABLE_DATASETS_QUEUE_MONITOR: ${ENABLE_DATASETS_QUEUE_MONITOR:-false} - ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK: ${ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK:-true} - ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK: ${ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK:-true} - WORKFLOW_SCHEDULE_POLLER_INTERVAL: ${WORKFLOW_SCHEDULE_POLLER_INTERVAL:-1} - WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE: ${WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE:-100} - WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK: ${WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK:-0} - TENANT_ISOLATED_TASK_CONCURRENCY: ${TENANT_ISOLATED_TASK_CONCURRENCY:-1} - ANNOTATION_IMPORT_FILE_SIZE_LIMIT: ${ANNOTATION_IMPORT_FILE_SIZE_LIMIT:-2} - ANNOTATION_IMPORT_MAX_RECORDS: ${ANNOTATION_IMPORT_MAX_RECORDS:-10000} - ANNOTATION_IMPORT_MIN_RECORDS: ${ANNOTATION_IMPORT_MIN_RECORDS:-1} - ANNOTATION_IMPORT_RATE_LIMIT_PER_MINUTE: ${ANNOTATION_IMPORT_RATE_LIMIT_PER_MINUTE:-5} - ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR: ${ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR:-20} - ANNOTATION_IMPORT_MAX_CONCURRENT: ${ANNOTATION_IMPORT_MAX_CONCURRENT:-5} - AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-} - SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD: ${SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD:-21} - SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE:-1000} - SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL:-200} - SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: ${SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS:-30} - EVENT_BUS_REDIS_URL: ${EVENT_BUS_REDIS_URL:-} - EVENT_BUS_REDIS_CHANNEL_TYPE: ${EVENT_BUS_REDIS_CHANNEL_TYPE:-pubsub} - EVENT_BUS_REDIS_USE_CLUSTERS: ${EVENT_BUS_REDIS_USE_CLUSTERS:-false} - ENABLE_HUMAN_INPUT_TIMEOUT_TASK: ${ENABLE_HUMAN_INPUT_TIMEOUT_TASK:-true} - HUMAN_INPUT_TIMEOUT_TASK_INTERVAL: ${HUMAN_INPUT_TIMEOUT_TASK_INTERVAL:-1} - SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL:-90000} +# Shared configuration using YAML anchors and env_file +x-shared-api-worker-config: &shared-api-worker-config + env_file: + - path: ./envs/core-services/shared.env + required: false + - path: ./envs/core-services/api.env + required: false + - path: ./envs/security.env + required: false + - path: ./envs/databases/db-postgres.env + required: false + - path: ./envs/databases/db-mysql.env + required: false + - path: ./envs/databases/redis.env + required: false + - path: ./envs/vectorstores/weaviate.env + required: false + - path: ./envs/vectorstores/qdrant.env + required: false + - path: ./envs/vectorstores/oceanbase.env + required: false + - path: ./envs/vectorstores/seekdb.env + required: false + - path: ./envs/vectorstores/couchbase.env + required: false + - path: ./envs/vectorstores/pgvector.env + required: false + - path: ./envs/vectorstores/vastbase.env + required: false + - path: ./envs/vectorstores/pgvecto-rs.env + required: false + - path: ./envs/vectorstores/chroma.env + required: false + - path: ./envs/vectorstores/iris.env + required: false + - path: ./envs/vectorstores/oracle.env + required: false + - path: ./envs/vectorstores/opengauss.env + required: false + - path: ./envs/vectorstores/myscale.env + required: false + - path: ./envs/vectorstores/matrixone.env + required: false + - path: ./envs/vectorstores/elasticsearch.env + required: false + - path: ./envs/vectorstores/opensearch.env + required: false + - path: ./envs/vectorstores/milvus.env + required: false + - path: ./envs/infrastructure/nginx.env + required: false + - path: ./envs/infrastructure/certbot.env + required: false + - path: ./envs/infrastructure/ssrf-proxy.env + required: false + - path: ./envs/infrastructure/etcd.env + required: false + - path: ./envs/infrastructure/minio.env + required: false + - path: ./envs/infrastructure/milvus-standalone.env + required: false + - ./.env + networks: + - ssrf_proxy_network + - default + restart: always + +x-shared-worker-config: &shared-worker-config + env_file: + - path: ./envs/core-services/shared.env + required: false + - path: ./envs/core-services/worker.env + required: false + - path: ./envs/security.env + required: false + - path: ./envs/databases/db-postgres.env + required: false + - path: ./envs/databases/db-mysql.env + required: false + - path: ./envs/databases/redis.env + required: false + - path: ./envs/vectorstores/weaviate.env + required: false + - path: ./envs/vectorstores/qdrant.env + required: false + - path: ./envs/vectorstores/oceanbase.env + required: false + - path: ./envs/vectorstores/seekdb.env + required: false + - path: ./envs/vectorstores/couchbase.env + required: false + - path: ./envs/vectorstores/pgvector.env + required: false + - path: ./envs/vectorstores/vastbase.env + required: false + - path: ./envs/vectorstores/pgvecto-rs.env + required: false + - path: ./envs/vectorstores/chroma.env + required: false + - path: ./envs/vectorstores/iris.env + required: false + - path: ./envs/vectorstores/oracle.env + required: false + - path: ./envs/vectorstores/opengauss.env + required: false + - path: ./envs/vectorstores/myscale.env + required: false + - path: ./envs/vectorstores/matrixone.env + required: false + - path: ./envs/vectorstores/elasticsearch.env + required: false + - path: ./envs/vectorstores/opensearch.env + required: false + - path: ./envs/vectorstores/milvus.env + required: false + - path: ./envs/infrastructure/nginx.env + required: false + - path: ./envs/infrastructure/certbot.env + required: false + - path: ./envs/infrastructure/ssrf-proxy.env + required: false + - path: ./envs/infrastructure/etcd.env + required: false + - path: ./envs/infrastructure/minio.env + required: false + - path: ./envs/infrastructure/milvus-standalone.env + required: false + - ./.env + networks: + - ssrf_proxy_network + - default + restart: always + +x-shared-worker-beat-config: &shared-worker-beat-config + env_file: + - path: ./envs/core-services/shared.env + required: false + - path: ./envs/core-services/worker-beat.env + required: false + - path: ./envs/security.env + required: false + - path: ./envs/databases/db-postgres.env + required: false + - path: ./envs/databases/db-mysql.env + required: false + - path: ./envs/databases/redis.env + required: false + - path: ./envs/vectorstores/weaviate.env + required: false + - path: ./envs/vectorstores/qdrant.env + required: false + - path: ./envs/vectorstores/oceanbase.env + required: false + - path: ./envs/vectorstores/seekdb.env + required: false + - path: ./envs/vectorstores/couchbase.env + required: false + - path: ./envs/vectorstores/pgvector.env + required: false + - path: ./envs/vectorstores/vastbase.env + required: false + - path: ./envs/vectorstores/pgvecto-rs.env + required: false + - path: ./envs/vectorstores/chroma.env + required: false + - path: ./envs/vectorstores/iris.env + required: false + - path: ./envs/vectorstores/oracle.env + required: false + - path: ./envs/vectorstores/opengauss.env + required: false + - path: ./envs/vectorstores/myscale.env + required: false + - path: ./envs/vectorstores/matrixone.env + required: false + - path: ./envs/vectorstores/elasticsearch.env + required: false + - path: ./envs/vectorstores/opensearch.env + required: false + - path: ./envs/vectorstores/milvus.env + required: false + - path: ./envs/infrastructure/nginx.env + required: false + - path: ./envs/infrastructure/certbot.env + required: false + - path: ./envs/infrastructure/ssrf-proxy.env + required: false + - path: ./envs/infrastructure/etcd.env + required: false + - path: ./envs/infrastructure/minio.env + required: false + - path: ./envs/infrastructure/milvus-standalone.env + required: false + - ./.env + networks: + - ssrf_proxy_network + - default + restart: always services: # Init container to fix permissions @@ -743,12 +225,9 @@ services: # API service api: + <<: *shared-api-worker-config image: langgenius/dify-api:1.14.0 - restart: always environment: - # Use the shared environment variables. - <<: *shared-api-worker-env - # Startup mode, 'api' starts the API server. MODE: api SENTRY_DSN: ${API_SENTRY_DSN:-} SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0} @@ -791,12 +270,9 @@ services: # worker service # The Celery worker for processing all queues (dataset, workflow, mail, etc.) worker: + <<: *shared-worker-config image: langgenius/dify-api:1.14.0 - restart: always environment: - # Use the shared environment variables. - <<: *shared-api-worker-env - # Startup mode, 'worker' starts the Celery worker for processing all queues. MODE: worker SENTRY_DSN: ${API_SENTRY_DSN:-} SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0} @@ -837,12 +313,9 @@ services: # worker_beat service # Celery beat for scheduling periodic tasks. worker_beat: + <<: *shared-worker-beat-config image: langgenius/dify-api:1.14.0 - restart: always environment: - # Use the shared environment variables. - <<: *shared-api-worker-env - # Startup mode, 'worker_beat' starts the Celery beat for scheduling periodic tasks. MODE: beat depends_on: init_permissions: @@ -876,6 +349,12 @@ services: web: image: langgenius/dify-web:1.14.0 restart: always + env_file: + - path: ./envs/core-services/web.env + required: false + - path: ./envs/security.env + required: false + - ./.env environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} APP_API_URL: ${APP_API_URL:-} @@ -992,6 +471,12 @@ services: sandbox: image: langgenius/dify-sandbox:0.2.15 restart: always + env_file: + - path: ./envs/core-services/sandbox.env + required: false + - path: ./envs/security.env + required: false + - ./.env environment: # The DifySandbox configurations # Make sure you are changing this key for your deployment with a strong key. @@ -1016,9 +501,24 @@ services: plugin_daemon: image: langgenius/dify-plugin-daemon:0.6.0-local restart: always + env_file: + - path: ./envs/core-services/shared.env + required: false + - path: ./envs/core-services/plugin-daemon.env + required: false + - path: ./envs/security.env + required: false + - path: ./envs/databases/db-postgres.env + required: false + - path: ./envs/databases/db-mysql.env + required: false + - path: ./envs/databases/redis.env + required: false + - ./.env + networks: + - ssrf_proxy_network + - default environment: - # Use the shared environment variables. - <<: *shared-api-worker-env DB_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin} DB_SSL_MODE: ${DB_SSL_MODE:-disable} SERVER_PORT: ${PLUGIN_DAEMON_PORT:-5002} diff --git a/docker/envs/core-services/api.env.example b/docker/envs/core-services/api.env.example new file mode 100644 index 0000000000..1a3fc7a4ab --- /dev/null +++ b/docker/envs/core-services/api.env.example @@ -0,0 +1,13 @@ +# ------------------------------ +# Api Configuration +# ------------------------------ + +MODE=api +SENTRY_DSN= +SENTRY_TRACES_SAMPLE_RATE=1.0 +SENTRY_PROFILES_SAMPLE_RATE=1.0 +PLUGIN_REMOTE_INSTALL_HOST=localhost +PLUGIN_REMOTE_INSTALL_PORT=5003 +PLUGIN_MAX_PACKAGE_SIZE=52428800 +PLUGIN_DAEMON_TIMEOUT=600.0 +INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1 diff --git a/docker/envs/core-services/plugin-daemon.env.example b/docker/envs/core-services/plugin-daemon.env.example new file mode 100644 index 0000000000..c3b1bef974 --- /dev/null +++ b/docker/envs/core-services/plugin-daemon.env.example @@ -0,0 +1,23 @@ +# ------------------------------ +# Plugin Daemon Configuration +# ------------------------------ + +DB_PLUGIN_DATABASE=dify_plugin +PLUGIN_DAEMON_URL=http://plugin_daemon:5002 +PLUGIN_PPROF_ENABLED=false +PLUGIN_DIFY_INNER_API_URL=http://api:5001 +FORCE_VERIFYING_SIGNATURE=true +PLUGIN_STDIO_BUFFER_SIZE=1024 +PLUGIN_STDIO_MAX_BUFFER_SIZE=5242880 +PLUGIN_PYTHON_ENV_INIT_TIMEOUT=120 +PLUGIN_MAX_EXECUTION_TIMEOUT=600 +PLUGIN_DEBUGGING_HOST=0.0.0.0 +PLUGIN_DEBUGGING_PORT=5003 +PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi +PLUGIN_DIFY_INNER_API_KEY=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1 +PLUGIN_DAEMON_PORT=5002 +CELERY_WORKER_CLASS= +PLUGIN_STORAGE_TYPE=local +PLUGIN_STORAGE_LOCAL_ROOT=/app/storage +PLUGIN_WORKING_PATH=/app/storage/cwd +PLUGIN_STORAGE_OSS_BUCKET= diff --git a/docker/envs/core-services/sandbox.env.example b/docker/envs/core-services/sandbox.env.example new file mode 100644 index 0000000000..5d4ee6614b --- /dev/null +++ b/docker/envs/core-services/sandbox.env.example @@ -0,0 +1,17 @@ +# ------------------------------ +# Sandbox Configuration +# ------------------------------ + +SANDBOX_HTTP_PROXY=http://ssrf_proxy:3128 +SANDBOX_HTTPS_PROXY=http://ssrf_proxy:3128 +SANDBOX_PORT=8194 +PIP_MIRROR_URL= +SANDBOX_API_KEY=dify-sandbox +SANDBOX_GIN_MODE=release +SANDBOX_WORKER_TIMEOUT=15 +SANDBOX_ENABLE_NETWORK=true +SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21 +SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000 +SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200 +SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30 +SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000 diff --git a/docker/envs/core-services/shared.env.example b/docker/envs/core-services/shared.env.example new file mode 100644 index 0000000000..2a57f6954a --- /dev/null +++ b/docker/envs/core-services/shared.env.example @@ -0,0 +1,469 @@ +# ------------------------------ +# Shared API/Worker Configuration +# ------------------------------ + +CONSOLE_WEB_URL= +SERVICE_API_URL= +TRIGGER_URL=http://localhost +APP_WEB_URL= +FILES_URL= +INTERNAL_FILES_URL= +LANG=C.UTF-8 +LC_ALL=C.UTF-8 +PYTHONIOENCODING=utf-8 +UV_CACHE_DIR=/tmp/.uv-cache +CHECK_UPDATE_URL=https://updates.dify.ai +OPENAI_API_BASE=https://api.openai.com/v1 +MIGRATION_ENABLED=true +FILES_ACCESS_TIMEOUT=300 +ENABLE_COLLABORATION_MODE=false +CELERY_BROKER_URL=redis://:difyai123456@redis:6379/1 +CELERY_TASK_ANNOTATIONS=null +AZURE_BLOB_ACCOUNT_URL=https://.blob.core.windows.net +SUPABASE_URL=your-server-url +TIDB_ON_QDRANT_URL=http://127.0.0.1 +TIDB_ON_QDRANT_API_KEY=dify +TIDB_API_URL=http://127.0.0.1 +TIDB_IAM_API_URL=http://127.0.0.1 +TIDB_REGION=regions/aws-us-east-1 +TIDB_PROJECT_ID=dify +TIDB_SPEND_LIMIT=100 +TENCENT_VECTOR_DB_URL=http://127.0.0.1 +TENCENT_VECTOR_DB_API_KEY=dify +LINDORM_URL=http://localhost:30070 +LINDORM_USERNAME=admin +UPSTASH_VECTOR_URL=https://xxx-vector.upstash.io +UPLOAD_FILE_SIZE_LIMIT=15 +UPLOAD_FILE_BATCH_LIMIT=5 +UPLOAD_FILE_EXTENSION_BLACKLIST= +SINGLE_CHUNK_ATTACHMENT_LIMIT=10 +IMAGE_FILE_BATCH_LIMIT=10 +ATTACHMENT_IMAGE_FILE_SIZE_LIMIT=2 +ATTACHMENT_IMAGE_DOWNLOAD_TIMEOUT=60 +ETL_TYPE=dify +UNSTRUCTURED_API_URL= +MULTIMODAL_SEND_FORMAT=base64 +UPLOAD_IMAGE_FILE_SIZE_LIMIT=10 +UPLOAD_VIDEO_FILE_SIZE_LIMIT=100 +UPLOAD_AUDIO_FILE_SIZE_LIMIT=50 +API_SENTRY_DSN= +API_SENTRY_TRACES_SAMPLE_RATE=1.0 +API_SENTRY_PROFILES_SAMPLE_RATE=1.0 +WEB_SENTRY_DSN= +PLUGIN_SENTRY_ENABLED=false +PLUGIN_SENTRY_DSN= +NOTION_INTEGRATION_TYPE=public +RESEND_API_URL=https://api.resend.com +SSRF_PROXY_HTTP_URL=http://ssrf_proxy:3128 +SSRF_PROXY_HTTPS_URL=http://ssrf_proxy:3128 +PGDATA=/var/lib/postgresql/data/pgdata +PLUGIN_MAX_PACKAGE_SIZE=52428800 +PLUGIN_MODEL_SCHEMA_CACHE_TTL=3600 +ENDPOINT_URL_TEMPLATE=http://localhost/e/{hook_id} +LOG_LEVEL=INFO +LOG_OUTPUT_FORMAT=text +LOG_FILE=/app/logs/server.log +LOG_FILE_MAX_SIZE=20 +LOG_FILE_BACKUP_COUNT=5 +LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S +LOG_TZ=UTC +DEBUG=false +FLASK_DEBUG=false +ENABLE_REQUEST_LOGGING=False +WORKFLOW_LOG_CLEANUP_ENABLED=false +WORKFLOW_LOG_RETENTION_DAYS=30 +WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100 +WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS= +EXPOSE_PLUGIN_DEBUGGING_HOST=localhost +EXPOSE_PLUGIN_DEBUGGING_PORT=5003 +DEPLOY_ENV=PRODUCTION +ACCESS_TOKEN_EXPIRE_MINUTES=60 +REFRESH_TOKEN_EXPIRE_DAYS=30 +APP_DEFAULT_ACTIVE_REQUESTS=0 +APP_MAX_ACTIVE_REQUESTS=0 +APP_MAX_EXECUTION_TIME=1200 +DIFY_BIND_ADDRESS=0.0.0.0 +DIFY_PORT=5001 +SERVER_WORKER_AMOUNT=1 +SERVER_WORKER_CLASS=gevent +SERVER_WORKER_CONNECTIONS=10 +CELERY_SENTINEL_PASSWORD= +S3_ACCESS_KEY= +S3_SECRET_KEY= +ARCHIVE_STORAGE_ACCESS_KEY= +ARCHIVE_STORAGE_SECRET_KEY= +AZURE_BLOB_ACCOUNT_KEY=difyai +ALIYUN_OSS_ACCESS_KEY=your-access-key +ALIYUN_OSS_SECRET_KEY=your-secret-key +TENCENT_COS_SECRET_KEY=your-secret-key +TENCENT_COS_SECRET_ID=your-secret-id +OCI_ACCESS_KEY=your-access-key +OCI_SECRET_KEY=your-secret-key +HUAWEI_OBS_SECRET_KEY=your-secret-key +HUAWEI_OBS_ACCESS_KEY=your-access-key +VOLCENGINE_TOS_SECRET_KEY=your-secret-key +VOLCENGINE_TOS_ACCESS_KEY=your-access-key +BAIDU_OBS_SECRET_KEY=your-secret-key +BAIDU_OBS_ACCESS_KEY=your-access-key +SUPABASE_API_KEY=your-access-key +ALIBABACLOUD_MYSQL_PASSWORD=difyai123456 +RELYT_PASSWORD=difyai123456 +LINDORM_PASSWORD=admin +LINDORM_USING_UGC=True +LINDORM_QUERY_TIMEOUT=1 +HUAWEI_CLOUD_PASSWORD=admin +UPSTASH_VECTOR_TOKEN=dify +TABLESTORE_ACCESS_KEY_ID=xxx +TABLESTORE_ACCESS_KEY_SECRET=xxx +TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE=false +CLICKZETTA_PASSWORD= +CLICKZETTA_INSTANCE= +CLICKZETTA_SERVICE=api.clickzetta.com +CLICKZETTA_WORKSPACE=quick_start +CLICKZETTA_VCLUSTER=default_ap +CLICKZETTA_SCHEMA=dify +CLICKZETTA_BATCH_SIZE=100 +CLICKZETTA_ENABLE_INVERTED_INDEX=true +CLICKZETTA_ANALYZER_TYPE=chinese +CLICKZETTA_ANALYZER_MODE=smart +UNSTRUCTURED_API_KEY= +SCARF_NO_ANALYTICS=true +PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false +NOTION_CLIENT_SECRET= +NOTION_CLIENT_ID= +NOTION_INTERNAL_SECRET= +MAIL_TYPE=resend +MAIL_DEFAULT_SEND_FROM= +RESEND_API_KEY=your-resend-api-key +SMTP_SERVER= +SMTP_PORT=465 +SMTP_USERNAME= +SMTP_PASSWORD= +SMTP_USE_TLS=true +SMTP_OPPORTUNISTIC_TLS=false +SMTP_LOCAL_HOSTNAME= +SENDGRID_API_KEY= +INVITE_EXPIRY_HOURS=72 +RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5 +EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES=5 +CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5 +OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5 +CODE_EXECUTION_ENDPOINT=http://sandbox:8194 +CODE_EXECUTION_API_KEY=dify-sandbox +CODE_EXECUTION_SSL_VERIFY=True +CODE_EXECUTION_POOL_MAX_CONNECTIONS=100 +CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20 +CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0 +CODE_MAX_NUMBER=9223372036854775807 +CODE_MIN_NUMBER=-9223372036854775808 +CODE_MAX_DEPTH=5 +CODE_MAX_PRECISION=20 +CODE_MAX_STRING_LENGTH=400000 +CODE_MAX_STRING_ARRAY_LENGTH=30 +CODE_MAX_OBJECT_ARRAY_LENGTH=30 +CODE_MAX_NUMBER_ARRAY_LENGTH=1000 +CODE_EXECUTION_CONNECT_TIMEOUT=10 +CODE_EXECUTION_READ_TIMEOUT=60 +CODE_EXECUTION_WRITE_TIMEOUT=10 +TEMPLATE_TRANSFORM_MAX_LENGTH=400000 +WORKFLOW_MAX_EXECUTION_STEPS=500 +WORKFLOW_MAX_EXECUTION_TIME=1200 +WORKFLOW_CALL_MAX_DEPTH=5 +MAX_VARIABLE_SIZE=204800 +WORKFLOW_FILE_UPLOAD_LIMIT=10 +GRAPH_ENGINE_MIN_WORKERS=1 +GRAPH_ENGINE_MAX_WORKERS=10 +GRAPH_ENGINE_SCALE_UP_THRESHOLD=3 +GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME=5.0 +ALIYUN_SLS_ACCESS_KEY_ID= +ALIYUN_SLS_ACCESS_KEY_SECRET= +WEBHOOK_REQUEST_BODY_MAX_SIZE=10485760 +RESPECT_XFORWARD_HEADERS_ENABLED=false +SSRF_HTTP_PORT=3128 +SSRF_COREDUMP_DIR=/var/spool/squid +SSRF_REVERSE_PROXY_PORT=8194 +SSRF_SANDBOX_HOST=sandbox +SSRF_DEFAULT_TIME_OUT=5 +SSRF_DEFAULT_CONNECT_TIME_OUT=5 +SSRF_DEFAULT_READ_TIME_OUT=5 +SSRF_DEFAULT_WRITE_TIME_OUT=5 +SSRF_POOL_MAX_CONNECTIONS=100 +SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20 +SSRF_POOL_KEEPALIVE_EXPIRY=5.0 +PLUGIN_AWS_ACCESS_KEY= +PLUGIN_AWS_SECRET_KEY= +PLUGIN_AWS_REGION= +PLUGIN_TENCENT_COS_SECRET_KEY= +PLUGIN_TENCENT_COS_SECRET_ID= +PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID= +PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET= +PLUGIN_VOLCENGINE_TOS_ACCESS_KEY= +PLUGIN_VOLCENGINE_TOS_SECRET_KEY= +OTLP_API_KEY= +OTEL_EXPORTER_OTLP_PROTOCOL= +OTEL_EXPORTER_TYPE=otlp +OTEL_SAMPLING_RATE=0.1 +OTEL_BATCH_EXPORT_SCHEDULE_DELAY=5000 +OTEL_MAX_QUEUE_SIZE=2048 +OTEL_MAX_EXPORT_BATCH_SIZE=512 +OTEL_METRIC_EXPORT_INTERVAL=60000 +OTEL_BATCH_EXPORT_TIMEOUT=10000 +OTEL_METRIC_EXPORT_TIMEOUT=30000 +QUEUE_MONITOR_THRESHOLD=200 +QUEUE_MONITOR_ALERT_EMAILS= +QUEUE_MONITOR_INTERVAL=30 +SWAGGER_UI_ENABLED=false +SWAGGER_UI_PATH=/swagger-ui.html +DSL_EXPORT_ENCRYPT_DATASET_ID=true +DATASET_MAX_SEGMENTS_PER_REQUEST=0 +ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false +ENABLE_CLEAN_UNUSED_DATASETS_TASK=false +ENABLE_CREATE_TIDB_SERVERLESS_TASK=false +ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false +ENABLE_CLEAN_MESSAGES=false +ENABLE_WORKFLOW_RUN_CLEANUP_TASK=false +ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false +ENABLE_DATASETS_QUEUE_MONITOR=false +ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true +ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK=true +WORKFLOW_SCHEDULE_POLLER_INTERVAL=1 +WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE=100 +WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK=0 +TENANT_ISOLATED_TASK_CONCURRENCY=1 +ANNOTATION_IMPORT_FILE_SIZE_LIMIT=2 +ANNOTATION_IMPORT_MAX_RECORDS=10000 +ANNOTATION_IMPORT_MIN_RECORDS=1 +ANNOTATION_IMPORT_RATE_LIMIT_PER_MINUTE=5 +ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR=20 +ANNOTATION_IMPORT_MAX_CONCURRENT=5 +CREATORS_PLATFORM_FEATURES_ENABLED=true +CREATORS_PLATFORM_API_URL=https://creators.dify.ai +CREATORS_PLATFORM_OAUTH_CLIENT_ID= +TIDB_VECTOR_DATABASE=dify +ALIBABACLOUD_MYSQL_HOST=127.0.0.1 +ALIBABACLOUD_MYSQL_PORT=3306 +ALIBABACLOUD_MYSQL_USER=root +ALIBABACLOUD_MYSQL_DATABASE=dify +ALIBABACLOUD_MYSQL_MAX_CONNECTION=5 +ALIBABACLOUD_MYSQL_HNSW_M=6 +RELYT_DATABASE=postgres +TENCENT_VECTOR_DB_DATABASE=dify +BAIDU_VECTOR_DB_DATABASE=dify +EXPOSE_PLUGIN_DAEMON_PORT=5002 +GUNICORN_TIMEOUT=360 +CELERY_WORKER_AMOUNT= +CELERY_AUTO_SCALE=false +CELERY_MAX_WORKERS= +CELERY_MIN_WORKERS= +API_TOOL_DEFAULT_CONNECT_TIMEOUT=10 +API_TOOL_DEFAULT_READ_TIMEOUT=60 +CELERY_BACKEND=redis +CELERY_USE_SENTINEL=false +CELERY_SENTINEL_MASTER_NAME= +CELERY_SENTINEL_SOCKET_TIMEOUT=0.1 +WEB_API_CORS_ALLOW_ORIGINS=* +CONSOLE_CORS_ALLOW_ORIGINS=* +COOKIE_DOMAIN= +OPENDAL_SCHEME=fs +OPENDAL_FS_ROOT=storage +CLICKZETTA_VOLUME_TYPE=user +CLICKZETTA_VOLUME_NAME= +CLICKZETTA_VOLUME_TABLE_PREFIX=dataset_ +CLICKZETTA_VOLUME_DIFY_PREFIX=dify_km +S3_ENDPOINT= +S3_REGION=us-east-1 +S3_BUCKET_NAME=difyai +S3_ADDRESS_STYLE=auto +S3_USE_AWS_MANAGED_IAM=false +ARCHIVE_STORAGE_ENABLED=false +ARCHIVE_STORAGE_ENDPOINT= +ARCHIVE_STORAGE_ARCHIVE_BUCKET= +ARCHIVE_STORAGE_EXPORT_BUCKET= +ARCHIVE_STORAGE_REGION=auto +AZURE_BLOB_ACCOUNT_NAME=difyai +AZURE_BLOB_CONTAINER_NAME=difyai-container +GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name +GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64= +ALIYUN_OSS_BUCKET_NAME=your-bucket-name +ALIYUN_OSS_ENDPOINT=https://oss-ap-southeast-1-internal.aliyuncs.com +ALIYUN_OSS_REGION=ap-southeast-1 +ALIYUN_OSS_AUTH_VERSION=v4 +ALIYUN_OSS_PATH=your-path +ALIYUN_CLOUDBOX_ID=your-cloudbox-id +TENCENT_COS_BUCKET_NAME=your-bucket-name +TENCENT_COS_REGION=your-region +TENCENT_COS_SCHEME=your-scheme +TENCENT_COS_CUSTOM_DOMAIN=your-custom-domain +OCI_ENDPOINT=https://your-object-storage-namespace.compat.objectstorage.us-ashburn-1.oraclecloud.com +OCI_BUCKET_NAME=your-bucket-name +OCI_REGION=us-ashburn-1 +HUAWEI_OBS_BUCKET_NAME=your-bucket-name +HUAWEI_OBS_SERVER=your-server-url +HUAWEI_OBS_PATH_STYLE=false +VOLCENGINE_TOS_BUCKET_NAME=your-bucket-name +VOLCENGINE_TOS_ENDPOINT=your-server-url +VOLCENGINE_TOS_REGION=your-region +BAIDU_OBS_BUCKET_NAME=your-bucket-name +BAIDU_OBS_ENDPOINT=your-server-url +SUPABASE_BUCKET_NAME=your-bucket-name +TENCENT_VECTOR_DB_TIMEOUT=30 +TENCENT_VECTOR_DB_USERNAME=dify +TENCENT_VECTOR_DB_SHARD=1 +TENCENT_VECTOR_DB_REPLICAS=2 +TENCENT_VECTOR_DB_ENABLE_HYBRID_SEARCH=false +BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287 +BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS=30000 +BAIDU_VECTOR_DB_ACCOUNT=root +BAIDU_VECTOR_DB_API_KEY=dify +BAIDU_VECTOR_DB_SHARD=1 +BAIDU_VECTOR_DB_REPLICAS=3 +BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER=DEFAULT_ANALYZER +BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE=COARSE_MODE +BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT=500 +BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO=0.05 +BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS=300 +HUAWEI_CLOUD_HOSTS=https://127.0.0.1:9200 +HUAWEI_CLOUD_USER=admin +WORKFLOW_NODE_EXECUTION_STORAGE=rdbms +CORE_WORKFLOW_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository +CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository +API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository +API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository +ALIYUN_SLS_ENDPOINT= +ALIYUN_SLS_REGION= +ALIYUN_SLS_PROJECT_NAME= +ALIYUN_SLS_LOGSTORE_TTL=365 +LOGSTORE_DUAL_WRITE_ENABLED=false +LOGSTORE_DUAL_READ_ENABLED=true +LOGSTORE_ENABLE_PUT_GRAPH_FIELD=true +HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760 +HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576 +HTTP_REQUEST_NODE_SSL_VERIFY=True +HTTP_REQUEST_MAX_CONNECT_TIMEOUT=10 +HTTP_REQUEST_MAX_READ_TIMEOUT=600 +HTTP_REQUEST_MAX_WRITE_TIMEOUT=600 +PLUGIN_INSTALLED_PATH=plugin +PLUGIN_PACKAGE_CACHE_PATH=plugin_packages +PLUGIN_MEDIA_CACHE_PATH=assets +PLUGIN_S3_USE_AWS=false +PLUGIN_S3_USE_AWS_MANAGED_IAM=false +PLUGIN_S3_ENDPOINT= +PLUGIN_S3_USE_PATH_STYLE=false +PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME= +PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING= +PLUGIN_TENCENT_COS_REGION= +PLUGIN_ALIYUN_OSS_REGION= +PLUGIN_ALIYUN_OSS_ENDPOINT= +PLUGIN_ALIYUN_OSS_AUTH_VERSION=v4 +PLUGIN_ALIYUN_OSS_PATH= +PLUGIN_VOLCENGINE_TOS_ENDPOINT= +PLUGIN_VOLCENGINE_TOS_REGION= +ENABLE_OTEL=false +OTLP_TRACE_ENDPOINT= +OTLP_METRIC_ENDPOINT= +# Prefix used to create collection name in vector database +OTLP_BASE_ENDPOINT=http://localhost:4318 +WEAVIATE_GRPC_ENDPOINT=grpc://weaviate:50051 +ANALYTICDB_KEY_ID=your-ak +ANALYTICDB_KEY_SECRET=your-sk +ANALYTICDB_REGION_ID=cn-hangzhou +ANALYTICDB_INSTANCE_ID=gp-ab123456 +ANALYTICDB_ACCOUNT=testaccount +ANALYTICDB_PASSWORD=testpassword +ANALYTICDB_NAMESPACE=dify +ANALYTICDB_NAMESPACE_PASSWORD=difypassword +ANALYTICDB_HOST=gp-test.aliyuncs.com +ANALYTICDB_PORT=5432 +ANALYTICDB_MIN_CONNECTION=1 +ANALYTICDB_MAX_CONNECTION=5 +TIDB_VECTOR_HOST=tidb +TIDB_VECTOR_PORT=4000 +TIDB_VECTOR_USER= +TIDB_VECTOR_PASSWORD= +TIDB_ON_QDRANT_CLIENT_TIMEOUT=20 +TIDB_ON_QDRANT_GRPC_ENABLED=false +TIDB_ON_QDRANT_GRPC_PORT=6334 +TIDB_PUBLIC_KEY=dify +TIDB_PRIVATE_KEY=dify +RELYT_HOST=db +RELYT_PORT=5432 +RELYT_USER=postgres +VIKINGDB_ACCESS_KEY=your-ak +VIKINGDB_SECRET_KEY=your-sk +VIKINGDB_REGION=cn-shanghai +VIKINGDB_HOST=api-vikingdb.xxx.volces.com +VIKINGDB_SCHEME=http +VIKINGDB_CONNECTION_TIMEOUT=30 +VIKINGDB_SOCKET_TIMEOUT=30 +TABLESTORE_ENDPOINT=https://instance-name.cn-hangzhou.ots.aliyuncs.com +TABLESTORE_INSTANCE_NAME=instance-name +CLICKZETTA_USERNAME= +CLICKZETTA_VECTOR_DISTANCE_FUNCTION=cosine_distance +COMPOSE_PROFILES=${VECTOR_STORE:-weaviate},${DB_TYPE:-postgresql} +EXPOSE_NGINX_PORT=80 +EXPOSE_NGINX_SSL_PORT=443 +POSITION_TOOL_PINS= +POSITION_TOOL_INCLUDES= +POSITION_TOOL_EXCLUDES= +POSITION_PROVIDER_PINS= +POSITION_PROVIDER_INCLUDES= +POSITION_PROVIDER_EXCLUDES= +CREATE_TIDB_SERVICE_JOB_ENABLED=false +MAX_SUBMIT_COUNT=100 + +# Vector Store Configuration +STORAGE_TYPE=opendal +VECTOR_STORE=weaviate +VECTOR_INDEX_NAME_PREFIX=Vector_index +WEAVIATE_ENDPOINT=http://weaviate:8080 +WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih +WEAVIATE_TOKENIZATION=word +OCEANBASE_VECTOR_HOST=oceanbase +OCEANBASE_VECTOR_PORT=2881 +OCEANBASE_VECTOR_USER=root@test +OCEANBASE_VECTOR_PASSWORD=difyai123456 +OCEANBASE_VECTOR_DATABASE=test +OCEANBASE_ENABLE_HYBRID_SEARCH=false +OCEANBASE_FULLTEXT_PARSER=ik +SEEKDB_MEMORY_LIMIT=2G +QDRANT_URL=http://qdrant:6333 +QDRANT_API_KEY=difyai123456 +QDRANT_CLIENT_TIMEOUT=20 +QDRANT_GRPC_ENABLED=false +QDRANT_GRPC_PORT=6334 +QDRANT_REPLICATION_FACTOR=1 +MILVUS_URI=http://host.docker.internal:19530 +MILVUS_TOKEN= +MILVUS_USER= +MILVUS_PASSWORD= +MILVUS_ANALYZER_PARAMS= +PGVECTOR_HOST=pgvector +PGVECTOR_PORT=5432 +PGVECTOR_USER=postgres +PGVECTOR_PASSWORD=difyai123456 +PGVECTOR_DATABASE=dify +PGVECTOR_MIN_CONNECTION=1 +PGVECTOR_MAX_CONNECTION=5 +PGVECTOR_PG_BIGM=false +PGVECTOR_PG_BIGM_VERSION=1.2-20240606 + +# Hologres Configuration +HOLOGRES_HOST= +HOLOGRES_PORT=80 +HOLOGRES_DATABASE= +HOLOGRES_ACCESS_KEY_ID= +HOLOGRES_ACCESS_KEY_SECRET= +HOLOGRES_SCHEMA=public +HOLOGRES_TOKENIZER=jieba +HOLOGRES_DISTANCE_METHOD=Cosine +HOLOGRES_BASE_QUANTIZATION_TYPE=rabitq +HOLOGRES_MAX_DEGREE=64 +HOLOGRES_EF_CONSTRUCTION=400 + +# Milvus API Configuration +MILVUS_DATABASE= +MILVUS_ENABLE_HYBRID_SEARCH=False + +# Human Input Task Configuration +ENABLE_HUMAN_INPUT_TIMEOUT_TASK=true +HUMAN_INPUT_TIMEOUT_TASK_INTERVAL=1 diff --git a/docker/envs/core-services/web.env.example b/docker/envs/core-services/web.env.example new file mode 100644 index 0000000000..d366cd87ba --- /dev/null +++ b/docker/envs/core-services/web.env.example @@ -0,0 +1,30 @@ +# ------------------------------ +# Web Configuration +# ------------------------------ + +CONSOLE_API_URL= +APP_API_URL= +SENTRY_DSN= +NEXT_PUBLIC_SOCKET_URL=ws://localhost +EXPERIMENTAL_ENABLE_VINEXT=false +LOOP_NODE_MAX_COUNT=100 +MAX_TOOLS_NUM=10 +MAX_PARALLEL_LIMIT=10 +MAX_ITERATIONS_NUM=99 +TEXT_GENERATION_TIMEOUT_MS=60000 +ALLOW_INLINE_STYLES=false +ALLOW_UNSAFE_DATA_SCHEME=false +MAX_TREE_DEPTH=50 +MARKETPLACE_ENABLED=true +MARKETPLACE_API_URL=https://marketplace.dify.ai +INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000 +ALLOW_EMBED=false +AMPLITUDE_API_KEY= +ENABLE_WEBSITE_JINAREADER=true +ENABLE_WEBSITE_FIRECRAWL=true +ENABLE_WEBSITE_WATERCRAWL=true +NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false +NEXT_PUBLIC_COOKIE_DOMAIN= +NEXT_PUBLIC_BATCH_CONCURRENCY=5 +CSP_WHITELIST= +TOP_K_MAX_VALUE=10 diff --git a/docker/envs/core-services/worker-beat.env.example b/docker/envs/core-services/worker-beat.env.example new file mode 100644 index 0000000000..380fe02b68 --- /dev/null +++ b/docker/envs/core-services/worker-beat.env.example @@ -0,0 +1,8 @@ +# ------------------------------ +# Worker Beat Configuration +# ------------------------------ + +MODE=beat +COMPOSE_WORKER_HEALTHCHECK_DISABLED=true +COMPOSE_WORKER_HEALTHCHECK_INTERVAL=30s +COMPOSE_WORKER_HEALTHCHECK_TIMEOUT=30s diff --git a/docker/envs/core-services/worker.env.example b/docker/envs/core-services/worker.env.example new file mode 100644 index 0000000000..58cf4ea901 --- /dev/null +++ b/docker/envs/core-services/worker.env.example @@ -0,0 +1,13 @@ +# ------------------------------ +# Worker Configuration +# ------------------------------ + +MODE=worker +SENTRY_DSN= +SENTRY_TRACES_SAMPLE_RATE=1.0 +SENTRY_PROFILES_SAMPLE_RATE=1.0 +PLUGIN_MAX_PACKAGE_SIZE=52428800 +INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1 +COMPOSE_WORKER_HEALTHCHECK_DISABLED=true +COMPOSE_WORKER_HEALTHCHECK_INTERVAL=30s +COMPOSE_WORKER_HEALTHCHECK_TIMEOUT=30s diff --git a/docker/envs/databases/db-mysql.env.example b/docker/envs/databases/db-mysql.env.example new file mode 100644 index 0000000000..b3ea6801fe --- /dev/null +++ b/docker/envs/databases/db-mysql.env.example @@ -0,0 +1,9 @@ +# ------------------------------ +# Db Mysql Configuration +# ------------------------------ + +MYSQL_INNODB_LOG_FILE_SIZE=128M +MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT=2 +MYSQL_MAX_CONNECTIONS=1000 +MYSQL_INNODB_BUFFER_POOL_SIZE=512M +MYSQL_HOST_VOLUME=./volumes/mysql/data diff --git a/docker/envs/databases/db-postgres.env.example b/docker/envs/databases/db-postgres.env.example new file mode 100644 index 0000000000..14cefb6bee --- /dev/null +++ b/docker/envs/databases/db-postgres.env.example @@ -0,0 +1,26 @@ +# ------------------------------ +# Db Postgres Configuration +# ------------------------------ + +PGDATA=/var/lib/postgresql/data/pgdata +DB_TYPE=postgresql +DB_USERNAME=postgres +DB_PASSWORD=difyai123456 +DB_HOST=db_postgres +DB_PORT=5432 +DB_DATABASE=dify +SQLALCHEMY_POOL_SIZE=30 +SQLALCHEMY_MAX_OVERFLOW=10 +SQLALCHEMY_POOL_RECYCLE=3600 +SQLALCHEMY_ECHO=false +SQLALCHEMY_POOL_PRE_PING=false +SQLALCHEMY_POOL_USE_LIFO=false +SQLALCHEMY_POOL_TIMEOUT=30 +SQLALCHEMY_POOL_RESET_ON_RETURN=rollback +POSTGRES_MAX_CONNECTIONS=100 +POSTGRES_SHARED_BUFFERS=128MB +POSTGRES_WORK_MEM=4MB +POSTGRES_MAINTENANCE_WORK_MEM=64MB +POSTGRES_EFFECTIVE_CACHE_SIZE=4096MB +POSTGRES_STATEMENT_TIMEOUT=0 +POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=0 diff --git a/docker/envs/databases/redis.env.example b/docker/envs/databases/redis.env.example new file mode 100644 index 0000000000..74bcb6525e --- /dev/null +++ b/docker/envs/databases/redis.env.example @@ -0,0 +1,35 @@ +# ------------------------------ +# Redis Configuration +# ------------------------------ + +REDIS_HOST=redis +REDIS_PORT=6379 +REDIS_USERNAME= +REDIS_PASSWORD=difyai123456 +REDIS_USE_SSL=false +REDIS_SSL_CERT_REQS=CERT_NONE +REDIS_SSL_CA_CERTS= +REDIS_SSL_CERTFILE= +REDIS_SSL_KEYFILE= +REDIS_DB=0 +REDIS_KEY_PREFIX= +REDIS_MAX_CONNECTIONS= +REDIS_USE_SENTINEL=false +REDIS_SENTINELS= +REDIS_SENTINEL_SERVICE_NAME= +REDIS_SENTINEL_USERNAME= +REDIS_SENTINEL_PASSWORD= +REDIS_SENTINEL_SOCKET_TIMEOUT=0.1 +REDIS_USE_CLUSTERS=false +REDIS_CLUSTERS= +REDIS_CLUSTERS_PASSWORD= +REDIS_RETRY_RETRIES=3 +REDIS_RETRY_BACKOFF_BASE=1.0 +REDIS_RETRY_BACKOFF_CAP=10.0 +REDIS_SOCKET_TIMEOUT=5.0 +REDIS_SOCKET_CONNECT_TIMEOUT=5.0 +REDIS_HEALTH_CHECK_INTERVAL=30 +EVENT_BUS_REDIS_URL= +EVENT_BUS_REDIS_CHANNEL_TYPE=pubsub +EVENT_BUS_REDIS_USE_CLUSTERS=false +BROKER_USE_SSL=false diff --git a/docker/envs/infrastructure/certbot.env.example b/docker/envs/infrastructure/certbot.env.example new file mode 100644 index 0000000000..c654fbe02f --- /dev/null +++ b/docker/envs/infrastructure/certbot.env.example @@ -0,0 +1,7 @@ +# ------------------------------ +# Certbot Configuration +# ------------------------------ + +CERTBOT_EMAIL=your_email@example.com +CERTBOT_DOMAIN=your_domain.com +CERTBOT_OPTIONS= diff --git a/docker/envs/infrastructure/etcd.env.example b/docker/envs/infrastructure/etcd.env.example new file mode 100644 index 0000000000..4dca26671a --- /dev/null +++ b/docker/envs/infrastructure/etcd.env.example @@ -0,0 +1,4 @@ +# ------------------------------ +# Etcd Configuration +# ------------------------------ + diff --git a/docker/envs/infrastructure/milvus-standalone.env.example b/docker/envs/infrastructure/milvus-standalone.env.example new file mode 100644 index 0000000000..7e87ed2648 --- /dev/null +++ b/docker/envs/infrastructure/milvus-standalone.env.example @@ -0,0 +1,4 @@ +# ------------------------------ +# Milvus Standalone Configuration +# ------------------------------ + diff --git a/docker/envs/infrastructure/minio.env.example b/docker/envs/infrastructure/minio.env.example new file mode 100644 index 0000000000..7c8e1fa35a --- /dev/null +++ b/docker/envs/infrastructure/minio.env.example @@ -0,0 +1,4 @@ +# ------------------------------ +# Minio Configuration +# ------------------------------ + diff --git a/docker/envs/infrastructure/nginx.env.example b/docker/envs/infrastructure/nginx.env.example new file mode 100644 index 0000000000..fbe86680ba --- /dev/null +++ b/docker/envs/infrastructure/nginx.env.example @@ -0,0 +1,17 @@ +# ------------------------------ +# Nginx Configuration +# ------------------------------ + +NGINX_SERVER_NAME=_ +NGINX_HTTPS_ENABLED=false +NGINX_PORT=80 +NGINX_SSL_PORT=443 +NGINX_SSL_CERT_FILENAME=dify.crt +NGINX_SSL_CERT_KEY_FILENAME=dify.key +NGINX_SSL_PROTOCOLS=TLSv1.2 TLSv1.3 +NGINX_WORKER_PROCESSES=auto +NGINX_CLIENT_MAX_BODY_SIZE=100M +NGINX_KEEPALIVE_TIMEOUT=65 +NGINX_PROXY_READ_TIMEOUT=3600s +NGINX_PROXY_SEND_TIMEOUT=3600s +NGINX_ENABLE_CERTBOT_CHALLENGE=false diff --git a/docker/envs/infrastructure/ssrf-proxy.env.example b/docker/envs/infrastructure/ssrf-proxy.env.example new file mode 100644 index 0000000000..210a782494 --- /dev/null +++ b/docker/envs/infrastructure/ssrf-proxy.env.example @@ -0,0 +1,17 @@ +# ------------------------------ +# Ssrf Proxy Configuration +# ------------------------------ + +SSRF_PROXY_HTTP_URL=http://ssrf_proxy:3128 +SSRF_PROXY_HTTPS_URL=http://ssrf_proxy:3128 +SSRF_HTTP_PORT=3128 +SSRF_COREDUMP_DIR=/var/spool/squid +SSRF_REVERSE_PROXY_PORT=8194 +SSRF_SANDBOX_HOST=sandbox +SSRF_DEFAULT_TIME_OUT=5 +SSRF_DEFAULT_CONNECT_TIME_OUT=5 +SSRF_DEFAULT_READ_TIME_OUT=5 +SSRF_DEFAULT_WRITE_TIME_OUT=5 +SSRF_POOL_MAX_CONNECTIONS=100 +SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20 +SSRF_POOL_KEEPALIVE_EXPIRY=5.0 diff --git a/docker/middleware.env.example b/docker/envs/middleware.env.example similarity index 100% rename from docker/middleware.env.example rename to docker/envs/middleware.env.example diff --git a/docker/envs/security.env.example b/docker/envs/security.env.example new file mode 100644 index 0000000000..787aef2706 --- /dev/null +++ b/docker/envs/security.env.example @@ -0,0 +1,40 @@ +# ------------------------------ +# Security Configuration +# ------------------------------ + +TIDB_ON_QDRANT_API_KEY=dify +TENCENT_VECTOR_DB_API_KEY=dify +ALIBABACLOUD_MYSQL_PASSWORD=difyai123456 +RELYT_PASSWORD=difyai123456 +LINDORM_PASSWORD=admin +HUAWEI_CLOUD_PASSWORD=admin +UPSTASH_VECTOR_TOKEN=dify +TABLESTORE_ACCESS_KEY_ID=xxx +TABLESTORE_ACCESS_KEY_SECRET=xxx +UNSTRUCTURED_API_KEY= +PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false +NOTION_CLIENT_SECRET= +NOTION_INTERNAL_SECRET= +RESEND_API_KEY=your-resend-api-key +SMTP_PASSWORD= +SENDGRID_API_KEY= +RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5 +EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES=5 +CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5 +OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5 +CODE_EXECUTION_API_KEY=dify-sandbox +ALIYUN_SLS_ACCESS_KEY_ID= +ALIYUN_SLS_ACCESS_KEY_SECRET= +OTLP_API_KEY= +BAIDU_VECTOR_DB_API_KEY=dify +ANALYTICDB_KEY_ID=your-ak +ANALYTICDB_KEY_SECRET=your-sk +ANALYTICDB_PASSWORD=testpassword +ANALYTICDB_NAMESPACE_PASSWORD=difypassword +TIDB_VECTOR_PASSWORD= +TIDB_PUBLIC_KEY=dify +TIDB_PRIVATE_KEY=dify +VIKINGDB_ACCESS_KEY=your-ak +VIKINGDB_SECRET_KEY=your-sk +SECRET_KEY=sk-9f73s3ljTXVcMT3Blb3ljTqtsKiGHXVcMT3BlbkFJLK7U +INIT_PASSWORD= diff --git a/docker/envs/vectorstores/chroma.env.example b/docker/envs/vectorstores/chroma.env.example new file mode 100644 index 0000000000..2a15375a3d --- /dev/null +++ b/docker/envs/vectorstores/chroma.env.example @@ -0,0 +1,13 @@ +# ------------------------------ +# Chroma Configuration +# ------------------------------ + +CHROMA_DATABASE=default_database +CHROMA_AUTH_PROVIDER=chromadb.auth.token_authn.TokenAuthClientProvider +CHROMA_AUTH_CREDENTIALS= +CHROMA_HOST=127.0.0.1 +CHROMA_PORT=8000 +CHROMA_TENANT=default_tenant +CHROMA_SERVER_AUTHN_CREDENTIALS=difyai123456 +CHROMA_SERVER_AUTHN_PROVIDER=chromadb.auth.token_authn.TokenAuthenticationServerProvider +CHROMA_IS_PERSISTENT=TRUE diff --git a/docker/envs/vectorstores/couchbase.env.example b/docker/envs/vectorstores/couchbase.env.example new file mode 100644 index 0000000000..4329d9c723 --- /dev/null +++ b/docker/envs/vectorstores/couchbase.env.example @@ -0,0 +1,9 @@ +# ------------------------------ +# Couchbase Configuration +# ------------------------------ + +COUCHBASE_PASSWORD=password +COUCHBASE_BUCKET_NAME=Embeddings +COUCHBASE_SCOPE_NAME=_default +COUCHBASE_CONNECTION_STRING=couchbase://couchbase-server +COUCHBASE_USER=Administrator diff --git a/docker/envs/vectorstores/elasticsearch.env.example b/docker/envs/vectorstores/elasticsearch.env.example new file mode 100644 index 0000000000..2aaa965cd7 --- /dev/null +++ b/docker/envs/vectorstores/elasticsearch.env.example @@ -0,0 +1,17 @@ +# ------------------------------ +# Elasticsearch Configuration +# ------------------------------ + +ELASTICSEARCH_CLOUD_URL=YOUR-ELASTICSEARCH_CLOUD_URL +ELASTICSEARCH_PASSWORD=elastic +KIBANA_PORT=5601 +ELASTICSEARCH_USE_CLOUD=false +ELASTICSEARCH_API_KEY=YOUR-ELASTICSEARCH_API_KEY +ELASTICSEARCH_VERIFY_CERTS=False +ELASTICSEARCH_CA_CERTS= +ELASTICSEARCH_REQUEST_TIMEOUT=100000 +ELASTICSEARCH_RETRY_ON_TIMEOUT=True +ELASTICSEARCH_MAX_RETRIES=10 +ELASTICSEARCH_HOST=0.0.0.0 +ELASTICSEARCH_PORT=9200 +ELASTICSEARCH_USERNAME=elastic diff --git a/docker/envs/vectorstores/iris.env.example b/docker/envs/vectorstores/iris.env.example new file mode 100644 index 0000000000..b1eb39bff8 --- /dev/null +++ b/docker/envs/vectorstores/iris.env.example @@ -0,0 +1,17 @@ +# ------------------------------ +# Iris Configuration +# ------------------------------ + +IRIS_CONNECTION_URL= +IRIS_MIN_CONNECTION=1 +IRIS_MAX_CONNECTION=3 +IRIS_TEXT_INDEX=true +IRIS_TEXT_INDEX_LANGUAGE=en +IRIS_TIMEZONE=UTC +IRIS_PASSWORD=Dify@1234 +IRIS_DATABASE=USER +IRIS_SCHEMA=dify +IRIS_HOST=iris +IRIS_SUPER_SERVER_PORT=1972 +IRIS_WEB_SERVER_PORT=52773 +IRIS_USER=_SYSTEM diff --git a/docker/envs/vectorstores/matrixone.env.example b/docker/envs/vectorstores/matrixone.env.example new file mode 100644 index 0000000000..931375f8b4 --- /dev/null +++ b/docker/envs/vectorstores/matrixone.env.example @@ -0,0 +1,9 @@ +# ------------------------------ +# Matrixone Configuration +# ------------------------------ + +MATRIXONE_PASSWORD=111 +MATRIXONE_HOST=matrixone +MATRIXONE_PORT=6001 +MATRIXONE_USER=dump +MATRIXONE_DATABASE=dify diff --git a/docker/envs/vectorstores/milvus.env.example b/docker/envs/vectorstores/milvus.env.example new file mode 100644 index 0000000000..d16879ca7b --- /dev/null +++ b/docker/envs/vectorstores/milvus.env.example @@ -0,0 +1,13 @@ +# ------------------------------ +# Milvus Configuration +# ------------------------------ + +MINIO_ACCESS_KEY=minioadmin +MINIO_SECRET_KEY=minioadmin +ETCD_ENDPOINTS=etcd:2379 +MINIO_ADDRESS=minio:9000 +ETCD_AUTO_COMPACTION_MODE=revision +ETCD_AUTO_COMPACTION_RETENTION=1000 +ETCD_QUOTA_BACKEND_BYTES=4294967296 +ETCD_SNAPSHOT_COUNT=50000 +MILVUS_AUTHORIZATION_ENABLED=true diff --git a/docker/envs/vectorstores/myscale.env.example b/docker/envs/vectorstores/myscale.env.example new file mode 100644 index 0000000000..eaa9e88cc0 --- /dev/null +++ b/docker/envs/vectorstores/myscale.env.example @@ -0,0 +1,10 @@ +# ------------------------------ +# Myscale Configuration +# ------------------------------ + +MYSCALE_PASSWORD= +MYSCALE_DATABASE=dify +MYSCALE_FTS_PARAMS= +MYSCALE_HOST=myscale +MYSCALE_PORT=8123 +MYSCALE_USER=default diff --git a/docker/envs/vectorstores/oceanbase.env.example b/docker/envs/vectorstores/oceanbase.env.example new file mode 100644 index 0000000000..42bed8df6a --- /dev/null +++ b/docker/envs/vectorstores/oceanbase.env.example @@ -0,0 +1,6 @@ +# ------------------------------ +# Oceanbase Configuration +# ------------------------------ + +OCEANBASE_CLUSTER_NAME=difyai +OCEANBASE_MEMORY_LIMIT=6G diff --git a/docker/envs/vectorstores/opengauss.env.example b/docker/envs/vectorstores/opengauss.env.example new file mode 100644 index 0000000000..9f58499b64 --- /dev/null +++ b/docker/envs/vectorstores/opengauss.env.example @@ -0,0 +1,12 @@ +# ------------------------------ +# Opengauss Configuration +# ------------------------------ + +OPENGAUSS_PASSWORD=Dify@123 +OPENGAUSS_DATABASE=dify +OPENGAUSS_MIN_CONNECTION=1 +OPENGAUSS_MAX_CONNECTION=5 +OPENGAUSS_ENABLE_PQ=false +OPENGAUSS_HOST=opengauss +OPENGAUSS_PORT=6600 +OPENGAUSS_USER=postgres diff --git a/docker/envs/vectorstores/opensearch.env.example b/docker/envs/vectorstores/opensearch.env.example new file mode 100644 index 0000000000..a6a9283378 --- /dev/null +++ b/docker/envs/vectorstores/opensearch.env.example @@ -0,0 +1,22 @@ +# ------------------------------ +# Opensearch Configuration +# ------------------------------ + +OPENSEARCH_PASSWORD=admin +OPENSEARCH_AWS_REGION=ap-southeast-1 +OPENSEARCH_AWS_SERVICE=aoss +OPENSEARCH_INITIAL_ADMIN_PASSWORD=Qazwsxedc!@#123 +OPENSEARCH_MEMLOCK_SOFT=-1 +OPENSEARCH_MEMLOCK_HARD=-1 +OPENSEARCH_NOFILE_SOFT=65536 +OPENSEARCH_NOFILE_HARD=65536 +OPENSEARCH_HOST=opensearch +OPENSEARCH_PORT=9200 +OPENSEARCH_SECURE=true +OPENSEARCH_VERIFY_CERTS=true +OPENSEARCH_AUTH_METHOD=basic +OPENSEARCH_USER=admin +OPENSEARCH_DISCOVERY_TYPE=single-node +OPENSEARCH_BOOTSTRAP_MEMORY_LOCK=true +OPENSEARCH_JAVA_OPTS_MIN=512m +OPENSEARCH_JAVA_OPTS_MAX=1024m diff --git a/docker/envs/vectorstores/oracle.env.example b/docker/envs/vectorstores/oracle.env.example new file mode 100644 index 0000000000..c8f24db41a --- /dev/null +++ b/docker/envs/vectorstores/oracle.env.example @@ -0,0 +1,13 @@ +# ------------------------------ +# Oracle Configuration +# ------------------------------ + +ORACLE_PASSWORD=dify +ORACLE_DSN=oracle:1521/FREEPDB1 +ORACLE_CONFIG_DIR=/app/api/storage/wallet +ORACLE_WALLET_LOCATION=/app/api/storage/wallet +ORACLE_WALLET_PASSWORD=dify +ORACLE_IS_AUTONOMOUS=false +ORACLE_USER=dify +ORACLE_PWD=Dify123456 +ORACLE_CHARACTERSET=AL32UTF8 diff --git a/docker/envs/vectorstores/pgvecto-rs.env.example b/docker/envs/vectorstores/pgvecto-rs.env.example new file mode 100644 index 0000000000..6428e5dd67 --- /dev/null +++ b/docker/envs/vectorstores/pgvecto-rs.env.example @@ -0,0 +1,9 @@ +# ------------------------------ +# Pgvecto Rs Configuration +# ------------------------------ + +PGVECTO_RS_HOST=pgvecto-rs +PGVECTO_RS_PORT=5432 +PGVECTO_RS_USER=postgres +PGVECTO_RS_PASSWORD=difyai123456 +PGVECTO_RS_DATABASE=dify diff --git a/docker/envs/vectorstores/pgvector.env.example b/docker/envs/vectorstores/pgvector.env.example new file mode 100644 index 0000000000..9fd1dbf962 --- /dev/null +++ b/docker/envs/vectorstores/pgvector.env.example @@ -0,0 +1,8 @@ +# ------------------------------ +# Pgvector Configuration +# ------------------------------ + +PGVECTOR_PGUSER=postgres +PGVECTOR_POSTGRES_PASSWORD=difyai123456 +PGVECTOR_POSTGRES_DB=dify +PGVECTOR_PGDATA=/var/lib/postgresql/data/pgdata diff --git a/docker/envs/vectorstores/qdrant.env.example b/docker/envs/vectorstores/qdrant.env.example new file mode 100644 index 0000000000..a3555fe547 --- /dev/null +++ b/docker/envs/vectorstores/qdrant.env.example @@ -0,0 +1,4 @@ +# ------------------------------ +# Qdrant Configuration +# ------------------------------ + diff --git a/docker/envs/vectorstores/seekdb.env.example b/docker/envs/vectorstores/seekdb.env.example new file mode 100644 index 0000000000..4307fbede2 --- /dev/null +++ b/docker/envs/vectorstores/seekdb.env.example @@ -0,0 +1,4 @@ +# ------------------------------ +# Seekdb Configuration +# ------------------------------ + diff --git a/docker/envs/vectorstores/vastbase.env.example b/docker/envs/vectorstores/vastbase.env.example new file mode 100644 index 0000000000..2c9db50fbe --- /dev/null +++ b/docker/envs/vectorstores/vastbase.env.example @@ -0,0 +1,11 @@ +# ------------------------------ +# Vastbase Configuration +# ------------------------------ + +VASTBASE_PASSWORD=Difyai123456 +VASTBASE_DATABASE=dify +VASTBASE_MIN_CONNECTION=1 +VASTBASE_MAX_CONNECTION=5 +VASTBASE_HOST=vastbase +VASTBASE_PORT=5432 +VASTBASE_USER=dify diff --git a/docker/envs/vectorstores/weaviate.env.example b/docker/envs/vectorstores/weaviate.env.example new file mode 100644 index 0000000000..82a3ccb172 --- /dev/null +++ b/docker/envs/vectorstores/weaviate.env.example @@ -0,0 +1,18 @@ +# ------------------------------ +# Weaviate Configuration +# ------------------------------ + +WEAVIATE_PERSISTENCE_DATA_PATH=/var/lib/weaviate +WEAVIATE_QUERY_DEFAULTS_LIMIT=25 +WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true +WEAVIATE_DEFAULT_VECTORIZER_MODULE=none +WEAVIATE_CLUSTER_HOSTNAME=node1 +WEAVIATE_AUTHENTICATION_APIKEY_ENABLED=true +WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih +WEAVIATE_AUTHENTICATION_APIKEY_USERS=hello@dify.ai +WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED=true +WEAVIATE_AUTHORIZATION_ADMINLIST_USERS=hello@dify.ai +WEAVIATE_DISABLE_TELEMETRY=false +WEAVIATE_ENABLE_TOKENIZER_GSE=false +WEAVIATE_ENABLE_TOKENIZER_KAGOME_JA=false +WEAVIATE_ENABLE_TOKENIZER_KAGOME_KR=false diff --git a/docker/generate_docker_compose b/docker/generate_docker_compose index 46d948f3c1..580091e006 100755 --- a/docker/generate_docker_compose +++ b/docker/generate_docker_compose @@ -64,25 +64,61 @@ def generate_shared_env_block(env_vars, anchor_name="shared-api-worker-env"): return "\n".join(lines) -def insert_shared_env(template_path, output_path, shared_env_block, header_comments): +def create_env_files_from_example(env_example_path): """ - Inserts the shared environment variables block and header comments into the template file, - removing any existing x-shared-env anchors, and generates the final docker-compose.yaml file. - Always writes with LF line endings. + Creates actual env files from .env.example by copying the categorized .env.example files. + This allows docker-compose to use env_file references. + Supports per-module structure with subdirectories. + """ + base_dir = os.path.dirname(os.path.abspath(env_example_path)) + root_env_file = os.path.join(base_dir, ".env") + if not os.path.exists(root_env_file): + with open(env_example_path, "r", encoding="utf-8") as src, open( + root_env_file, "w", encoding="utf-8", newline="\n" + ) as dst: + dst.write(src.read()) + print(f"Created {root_env_file}") + else: + print(f"{root_env_file} already exists, skipping") + + envs_dir = os.path.join(base_dir, "envs") + if not os.path.isdir(envs_dir): + print(f"No envs directory found at {envs_dir}, skipping split env files") + return [] + + created_files = [] + # Walk through all .env.example files in subdirectories + for root, dirs, files in os.walk(envs_dir): + for file in files: + if file.endswith('.env.example'): + example_file = os.path.join(root, file) + env_file = example_file.replace('.env.example', '.env') + + if os.path.exists(env_file): + print(f"{env_file} already exists, skipping") + continue + + # Copy .example to actual file + with open(example_file, "r", encoding="utf-8") as src, open( + env_file, "w", encoding="utf-8", newline="\n" + ) as dst: + dst.write(src.read()) + created_files.append(env_file) + print(f"Created {env_file}") + + return created_files + + +def insert_shared_env(template_path, output_path, header_comments): + """ + Copies the template file to output path with header comments. + The template now uses env_file references instead of a huge YAML anchor. """ with open(template_path, "r", encoding="utf-8") as f: template_content = f.read() - # Remove existing x-shared-env: &shared-api-worker-env lines - template_content = re.sub( - r"^x-shared-env: &shared-api-worker-env\s*\n?", - "", - template_content, - flags=re.MULTILINE, - ) - - # Prepare the final content with header comments and shared env block - final_content = f"{header_comments}\n{shared_env_block}\n\n{template_content}" + # Prepare the final content with header comments + final_content = f"{header_comments}\n{template_content}" with open(output_path, "w", encoding="utf-8", newline="\n") as f: f.write(final_content) @@ -90,10 +126,10 @@ def insert_shared_env(template_path, output_path, shared_env_block, header_comme def main(): - env_example_path = ".env.example" - template_path = "docker-compose-template.yaml" - output_path = "docker-compose.yaml" - anchor_name = "shared-api-worker-env" # Can be modified as needed + base_dir = os.path.dirname(os.path.abspath(__file__)) + env_example_path = os.path.join(base_dir, ".env.example") + template_path = os.path.join(base_dir, "docker-compose-template.yaml") + output_path = os.path.join(base_dir, "docker-compose.yaml") # Define header comments to be added at the top of docker-compose.yaml header_comments = ( @@ -110,17 +146,14 @@ def main(): print(f"Error: File {path} does not exist.") sys.exit(1) - # Parse .env.example file - env_vars = parse_env_example(env_example_path) + # Create env files from categorized .env.example files + # These files are used by docker-compose's env_file directive + # This ensures .env files exist even in CI/CD environments + create_env_files_from_example(env_example_path) - if not env_vars: - print("Warning: No environment variables found in .env.example.") - - # Generate shared environment variables block - shared_env_block = generate_shared_env_block(env_vars, anchor_name) - - # Insert shared environment variables block and header comments into the template - insert_shared_env(template_path, output_path, shared_env_block, header_comments) + # Copy template to output with header comments + # The template now uses env_file references instead of a huge YAML anchor + insert_shared_env(template_path, output_path, header_comments) if __name__ == "__main__": diff --git a/e2e/scripts/common.ts b/e2e/scripts/common.ts index ea6c897b2d..2964892dd0 100644 --- a/e2e/scripts/common.ts +++ b/e2e/scripts/common.ts @@ -36,7 +36,7 @@ export const webDir = path.join(rootDir, 'web') export const middlewareComposeFile = path.join(dockerDir, 'docker-compose.middleware.yaml') export const middlewareEnvFile = path.join(dockerDir, 'middleware.env') -export const middlewareEnvExampleFile = path.join(dockerDir, 'middleware.env.example') +export const middlewareEnvExampleFile = path.join(dockerDir, 'envs', 'middleware.env.example') export const webEnvLocalFile = path.join(webDir, '.env.local') export const webEnvExampleFile = path.join(webDir, '.env.example') export const apiEnvExampleFile = path.join(apiDir, 'tests', 'integration_tests', '.env.example')