mirror of
https://github.com/langgenius/dify.git
synced 2026-05-09 12:59:18 +08:00
refactor: split docker-compose env config into separate files (#31586)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: -LAN- <laipz8200@outlook.com>
This commit is contained in:
parent
d06b5529b3
commit
48d27e250b
2
.github/workflows/api-tests.yml
vendored
2
.github/workflows/api-tests.yml
vendored
@ -99,7 +99,7 @@ jobs:
|
||||
- name: Set up dotenvs
|
||||
run: |
|
||||
cp docker/.env.example docker/.env
|
||||
cp docker/middleware.env.example docker/middleware.env
|
||||
cp docker/envs/middleware.env.example docker/middleware.env
|
||||
|
||||
- name: Expose Service Ports
|
||||
run: sh .github/workflows/expose_service_ports.sh
|
||||
|
||||
4
.github/workflows/db-migration-test.yml
vendored
4
.github/workflows/db-migration-test.yml
vendored
@ -37,7 +37,7 @@ jobs:
|
||||
- name: Prepare middleware env
|
||||
run: |
|
||||
cd docker
|
||||
cp middleware.env.example middleware.env
|
||||
cp envs/middleware.env.example middleware.env
|
||||
|
||||
- name: Set up Middlewares
|
||||
uses: hoverkraft-tech/compose-action@d2bee4f07e8ca410d6b196d00f90c12e7d48c33a # v2.6.0
|
||||
@ -87,7 +87,7 @@ jobs:
|
||||
- name: Prepare middleware env for MySQL
|
||||
run: |
|
||||
cd docker
|
||||
cp middleware.env.example middleware.env
|
||||
cp envs/middleware.env.example middleware.env
|
||||
sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' middleware.env
|
||||
sed -i 's/DB_HOST=db_postgres/DB_HOST=db_mysql/' middleware.env
|
||||
sed -i 's/DB_PORT=5432/DB_PORT=3306/' middleware.env
|
||||
|
||||
8
.github/workflows/main-ci.yml
vendored
8
.github/workflows/main-ci.yml
vendored
@ -57,7 +57,7 @@ jobs:
|
||||
- '.github/workflows/api-tests.yml'
|
||||
- '.github/workflows/expose_service_ports.sh'
|
||||
- 'docker/.env.example'
|
||||
- 'docker/middleware.env.example'
|
||||
- 'docker/envs/middleware.env.example'
|
||||
- 'docker/docker-compose.middleware.yaml'
|
||||
- 'docker/docker-compose-template.yaml'
|
||||
- 'docker/generate_docker_compose'
|
||||
@ -84,7 +84,7 @@ jobs:
|
||||
- 'pnpm-workspace.yaml'
|
||||
- '.nvmrc'
|
||||
- 'docker/docker-compose.middleware.yaml'
|
||||
- 'docker/middleware.env.example'
|
||||
- 'docker/envs/middleware.env.example'
|
||||
- '.github/workflows/web-e2e.yml'
|
||||
- '.github/actions/setup-web/**'
|
||||
vdb:
|
||||
@ -94,7 +94,7 @@ jobs:
|
||||
- '.github/workflows/vdb-tests.yml'
|
||||
- '.github/workflows/expose_service_ports.sh'
|
||||
- 'docker/.env.example'
|
||||
- 'docker/middleware.env.example'
|
||||
- 'docker/envs/middleware.env.example'
|
||||
- 'docker/docker-compose.yaml'
|
||||
- 'docker/docker-compose-template.yaml'
|
||||
- 'docker/generate_docker_compose'
|
||||
@ -116,7 +116,7 @@ jobs:
|
||||
- '.github/workflows/db-migration-test.yml'
|
||||
- '.github/workflows/expose_service_ports.sh'
|
||||
- 'docker/.env.example'
|
||||
- 'docker/middleware.env.example'
|
||||
- 'docker/envs/middleware.env.example'
|
||||
- 'docker/docker-compose.middleware.yaml'
|
||||
- 'docker/docker-compose-template.yaml'
|
||||
- 'docker/generate_docker_compose'
|
||||
|
||||
2
.github/workflows/vdb-tests-full.yml
vendored
2
.github/workflows/vdb-tests-full.yml
vendored
@ -51,7 +51,7 @@ jobs:
|
||||
- name: Set up dotenvs
|
||||
run: |
|
||||
cp docker/.env.example docker/.env
|
||||
cp docker/middleware.env.example docker/middleware.env
|
||||
cp docker/envs/middleware.env.example docker/middleware.env
|
||||
|
||||
- name: Expose Service Ports
|
||||
run: sh .github/workflows/expose_service_ports.sh
|
||||
|
||||
2
.github/workflows/vdb-tests.yml
vendored
2
.github/workflows/vdb-tests.yml
vendored
@ -48,7 +48,7 @@ jobs:
|
||||
- name: Set up dotenvs
|
||||
run: |
|
||||
cp docker/.env.example docker/.env
|
||||
cp docker/middleware.env.example docker/middleware.env
|
||||
cp docker/envs/middleware.env.example docker/middleware.env
|
||||
|
||||
- name: Expose Service Ports
|
||||
run: sh .github/workflows/expose_service_ports.sh
|
||||
|
||||
@ -76,11 +76,10 @@ The easiest way to start the Dify server is through [Docker Compose](docker/dock
|
||||
```bash
|
||||
cd dify
|
||||
cd docker
|
||||
./dify-compose up -d
|
||||
cp .env.example .env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
On Windows PowerShell, run `.\dify-compose.ps1 up -d` from the `docker` directory.
|
||||
|
||||
After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization process.
|
||||
|
||||
#### Seeking help
|
||||
@ -138,7 +137,7 @@ Star Dify on GitHub and be instantly notified of new releases.
|
||||
|
||||
### Custom configurations
|
||||
|
||||
If you need to customize the configuration, add only the values you want to override to `docker/.env`. The default values live in [`docker/.env.default`](docker/.env.default), and the full reference remains in [`docker/.env.example`](docker/.env.example). After making any changes, re-run `./dify-compose up -d` or `.\dify-compose.ps1 up -d` from the `docker` directory. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
|
||||
If you need to customize the configuration, edit `docker/.env`. The essential startup defaults live in [`docker/.env.example`](docker/.env.example), and optional advanced variables are split under `docker/envs/` by theme. After making any changes, re-run `docker compose up -d` from the `docker` directory. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
|
||||
|
||||
### Metrics Monitoring with Grafana
|
||||
|
||||
|
||||
@ -93,10 +93,16 @@ BASE_API_AND_DOCKER_COMPOSE_CONFIG_SET_DIFF: frozenset[str] = frozenset(
|
||||
|
||||
API_CONFIG_SET = set(dotenv_values(Path("api") / Path(".env.example")).keys())
|
||||
DOCKER_CONFIG_SET = set(dotenv_values(Path("docker") / Path(".env.example")).keys())
|
||||
DOCKER_COMPOSE_CONFIG_SET = set()
|
||||
DOCKER_COMPOSE_CONFIG_SET = set(DOCKER_CONFIG_SET)
|
||||
|
||||
with open(Path("docker") / Path("docker-compose.yaml")) as f:
|
||||
DOCKER_COMPOSE_CONFIG_SET = set(yaml.safe_load(f.read())["x-shared-env"].keys())
|
||||
# Read environment variables from the split env files used by docker-compose
|
||||
# Walk through all .env.example files in subdirectories (per-module structure)
|
||||
envs_dir = Path("docker") / Path("envs")
|
||||
if envs_dir.exists():
|
||||
for env_file_path in envs_dir.rglob("*.env.example"):
|
||||
env_keys = set(dotenv_values(env_file_path).keys())
|
||||
DOCKER_CONFIG_SET.update(env_keys)
|
||||
DOCKER_COMPOSE_CONFIG_SET.update(env_keys)
|
||||
|
||||
|
||||
def test_yaml_config():
|
||||
|
||||
@ -1,51 +0,0 @@
|
||||
# ------------------------------------------------------------------
|
||||
# Minimal defaults for Docker Compose deployments.
|
||||
#
|
||||
# Keep local changes in .env. Use .env.example as the full reference
|
||||
# for advanced and service-specific settings.
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
# Public URLs used when Dify generates links. Change these together when
|
||||
# exposing Dify under another hostname, IP address, or port.
|
||||
CONSOLE_WEB_URL=http://localhost
|
||||
SERVICE_API_URL=http://localhost
|
||||
APP_WEB_URL=http://localhost
|
||||
FILES_URL=http://localhost
|
||||
INTERNAL_FILES_URL=http://api:5001
|
||||
TRIGGER_URL=http://localhost
|
||||
ENDPOINT_URL_TEMPLATE=http://localhost/e/{hook_id}
|
||||
NEXT_PUBLIC_SOCKET_URL=ws://localhost
|
||||
EXPOSE_PLUGIN_DEBUGGING_HOST=localhost
|
||||
EXPOSE_PLUGIN_DEBUGGING_PORT=5003
|
||||
|
||||
# Built-in metadata database defaults.
|
||||
DB_TYPE=postgresql
|
||||
DB_USERNAME=postgres
|
||||
DB_PASSWORD=difyai123456
|
||||
DB_HOST=db_postgres
|
||||
DB_PORT=5432
|
||||
DB_DATABASE=dify
|
||||
|
||||
# Built-in Redis defaults.
|
||||
REDIS_HOST=redis
|
||||
REDIS_PORT=6379
|
||||
REDIS_PASSWORD=difyai123456
|
||||
|
||||
# Default file storage.
|
||||
STORAGE_TYPE=opendal
|
||||
OPENDAL_SCHEME=fs
|
||||
OPENDAL_FS_ROOT=storage
|
||||
|
||||
# Default vector database.
|
||||
VECTOR_STORE=weaviate
|
||||
|
||||
# Internal service authentication. Paired values must match.
|
||||
PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi
|
||||
PLUGIN_DIFY_INNER_API_KEY=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
|
||||
|
||||
# Host ports.
|
||||
EXPOSE_NGINX_PORT=80
|
||||
EXPOSE_NGINX_SSL_PORT=443
|
||||
|
||||
# Docker Compose profiles for bundled services.
|
||||
COMPOSE_PROFILES=${VECTOR_STORE:-weaviate},${DB_TYPE:-postgresql}
|
||||
1575
docker/.env.example
1575
docker/.env.example
File diff suppressed because it is too large
Load Diff
3
docker/.gitignore
vendored
Normal file
3
docker/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
# Ignore actual .env files (keep only .env.example files in git)
|
||||
*.env
|
||||
!*.env.example
|
||||
@ -7,29 +7,31 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T
|
||||
- **Certbot Container**: `docker-compose.yaml` now contains `certbot` for managing SSL certificates. This container automatically renews certificates and ensures secure HTTPS connections.\
|
||||
For more information, refer `docker/certbot/README.md`.
|
||||
|
||||
- **Persistent Environment Variables**: Default environment variables are managed through `.env.default`, while local overrides are stored in `.env`, ensuring that your configurations persist across deployments.
|
||||
- **Persistent Environment Variables**: Essential startup defaults are provided in `.env.example`, while local values are stored in `.env`, ensuring that your configurations persist across deployments.
|
||||
|
||||
> What is `.env`? </br> </br>
|
||||
> The `.env` file is a local override file. Keep it small by adding only the values that differ from `.env.default`. Use `.env.example` as the full reference when you need advanced configuration.
|
||||
> The `.env` file is the local startup file. Copy it from `.env.example` for a default deployment. Optional advanced settings live in `envs/*.env.example` files.
|
||||
|
||||
- **Unified Vector Database Services**: All vector database services are now managed from a single Docker Compose file `docker-compose.yaml`. You can switch between different vector databases by setting the `VECTOR_STORE` environment variable in your `.env` file.
|
||||
|
||||
- **Local .env Overrides**: The `dify-compose` and `dify-compose.ps1` wrappers create `.env` if it is missing and generate a persistent `SECRET_KEY` for this deployment.
|
||||
|
||||
### How to Deploy Dify with `docker-compose.yaml`
|
||||
|
||||
1. **Prerequisites**: Ensure Docker and Docker Compose are installed on your system.
|
||||
1. **Environment Setup**:
|
||||
- Navigate to the `docker` directory.
|
||||
- No copy step is required. The `dify-compose` wrappers create `.env` if it is missing and write a generated `SECRET_KEY` to it.
|
||||
- When prompted on first run, press Enter to use the default deployment, or answer `y` to stop and edit `.env` first.
|
||||
- Customize `.env` only when you need to override defaults from `.env.default`. Refer to `.env.example` for the full list of available variables.
|
||||
- Copy `.env.example` to `.env`.
|
||||
- Customize `.env` when you need to change essential startup defaults. Copy optional files from `envs/` without the `.example` suffix when you need advanced settings.
|
||||
- **Optional (for advanced deployments)**:
|
||||
If you maintain a full `.env` file copied from `.env.example`, you may use the environment synchronization tool to keep it aligned with the latest `.env.example` updates while preserving your custom settings.
|
||||
See the [Environment Variables Synchronization](#environment-variables-synchronization) section below.
|
||||
1. **Running the Services**:
|
||||
- Execute `./dify-compose up -d` from the `docker` directory to start the services. On Windows PowerShell, run `.\dify-compose.ps1 up -d`.
|
||||
- Execute `docker compose up -d` from the `docker` directory to start the services.
|
||||
- To specify a vector database, set the `VECTOR_STORE` variable in your `.env` file to your desired vector database service, such as `milvus`, `weaviate`, or `opensearch`.
|
||||
```bash
|
||||
cp .env.example .env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
1. **SSL Certificate Setup**:
|
||||
- Refer `docker/certbot/README.md` to set up SSL certificates using Certbot.
|
||||
1. **OpenTelemetry Collector Setup**:
|
||||
@ -41,7 +43,7 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T
|
||||
1. **Middleware Setup**:
|
||||
- Use the `docker-compose.middleware.yaml` for setting up essential middleware services like databases and caches.
|
||||
- Navigate to the `docker` directory.
|
||||
- Ensure the `middleware.env` file is created by running `cp middleware.env.example middleware.env` (refer to the `middleware.env.example` file).
|
||||
- Ensure the `middleware.env` file is created by running `cp envs/middleware.env.example middleware.env` (refer to the `envs/middleware.env.example` file).
|
||||
1. **Running Middleware Services**:
|
||||
- Navigate to the `docker` directory.
|
||||
- Execute `docker compose --env-file middleware.env -f docker-compose.middleware.yaml -p dify up -d` to start PostgreSQL/MySQL (per `DB_TYPE`) plus the bundled Weaviate instance.
|
||||
@ -58,13 +60,13 @@ For users migrating from the `docker-legacy` setup:
|
||||
1. **Data Migration**:
|
||||
- Ensure that data from services like databases and caches is backed up and migrated appropriately to the new structure if necessary.
|
||||
|
||||
### Overview of `.env.default`, `.env`, and `.env.example`
|
||||
### Overview of `.env`, `.env.example`, and `envs/`
|
||||
|
||||
- `.env.default` contains the minimal default configuration for Docker Compose deployments.
|
||||
- `.env` contains the generated `SECRET_KEY` plus any local overrides.
|
||||
- `.env.example` is the full reference for advanced configuration.
|
||||
- `.env.example` contains the essential default configuration for Docker Compose deployments.
|
||||
- `.env` contains local startup values copied from `.env.example` and any local changes.
|
||||
- `envs/*.env.example` files contain optional advanced configuration grouped by theme.
|
||||
|
||||
The `dify-compose` wrappers merge `.env.default` and `.env` into a temporary environment file, append paired internal service keys when needed, and remove the temporary file after Docker Compose starts.
|
||||
Docker Compose reads `envs/*.env` files when present, then reads `.env` last so values in `.env` take precedence.
|
||||
|
||||
#### Key Modules and Customization
|
||||
|
||||
@ -74,7 +76,7 @@ The `dify-compose` wrappers merge `.env.default` and `.env` into a temporary env
|
||||
|
||||
#### Other notable variables
|
||||
|
||||
The `.env.example` file provided in the Docker setup is extensive and covers a wide range of configuration options. It is structured into several sections, each pertaining to different aspects of the application and its services. Here are some of the key sections and variables:
|
||||
The root `.env.example` file contains the essential startup settings. Optional and provider-specific settings are grouped in `envs/*.env.example` files. Here are some of the key sections and variables:
|
||||
|
||||
1. **Common Variables**:
|
||||
|
||||
@ -102,7 +104,7 @@ The `.env.example` file provided in the Docker setup is extensive and covers a w
|
||||
|
||||
1. **Storage Configuration**:
|
||||
|
||||
- `STORAGE_TYPE`, `S3_BUCKET_NAME`, `AZURE_BLOB_ACCOUNT_NAME`: Settings for file storage options like local, S3, Azure Blob, etc.
|
||||
- `STORAGE_TYPE`, `OPENDAL_SCHEME`, `OPENDAL_FS_ROOT`: Default local file storage settings. Optional storage backends are configured from the files under `envs/`.
|
||||
|
||||
1. **Vector Database Configuration**:
|
||||
|
||||
@ -124,11 +126,11 @@ The `.env.example` file provided in the Docker setup is extensive and covers a w
|
||||
|
||||
### Environment Variables Synchronization
|
||||
|
||||
When upgrading Dify or pulling the latest changes, new environment variables may be introduced in `.env.default` or `.env.example`.
|
||||
When upgrading Dify or pulling the latest changes, new environment variables may be introduced in `.env.example` or the optional files under `envs/`.
|
||||
|
||||
If you use the default override-only workflow, review `.env.default` and add only the values you need to override to `.env`.
|
||||
If you use the default workflow, review `.env.example` and keep your `.env` aligned with essential startup values.
|
||||
|
||||
If you maintain a full `.env` file copied from `.env.example`, an optional environment variables synchronization tool is provided.
|
||||
If you maintain a customized `.env` file copied from `.env.example`, an optional environment variables synchronization tool is provided.
|
||||
|
||||
> This tool performs a **one-way synchronization** from `.env.example` to `.env`.
|
||||
> Existing values in `.env` are never overwritten automatically.
|
||||
|
||||
@ -1,334 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
DEFAULT_ENV_FILE=".env.default"
|
||||
USER_ENV_FILE=".env"
|
||||
|
||||
log() {
|
||||
printf '%s\n' "$*" >&2
|
||||
}
|
||||
|
||||
die() {
|
||||
printf 'Error: %s\n' "$*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
detect_compose() {
|
||||
if docker compose version >/dev/null 2>&1; then
|
||||
COMPOSE_CMD=(docker compose)
|
||||
return
|
||||
fi
|
||||
|
||||
if command -v docker-compose >/dev/null 2>&1; then
|
||||
COMPOSE_CMD=(docker-compose)
|
||||
return
|
||||
fi
|
||||
|
||||
die "Docker Compose is not available. Install Docker Compose, then run this command again."
|
||||
}
|
||||
|
||||
generate_secret_key() {
|
||||
if command -v openssl >/dev/null 2>&1; then
|
||||
openssl rand -base64 42
|
||||
return
|
||||
fi
|
||||
|
||||
if command -v dd >/dev/null 2>&1 && command -v base64 >/dev/null 2>&1; then
|
||||
dd if=/dev/urandom bs=42 count=1 2>/dev/null | base64 | tr -d '\n'
|
||||
printf '\n'
|
||||
return
|
||||
fi
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
ensure_env_files() {
|
||||
[[ -f "$DEFAULT_ENV_FILE" ]] || die "$DEFAULT_ENV_FILE is missing."
|
||||
|
||||
if [[ -f "$USER_ENV_FILE" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
: >"$USER_ENV_FILE"
|
||||
|
||||
if [[ ! -t 0 ]]; then
|
||||
log "Created $USER_ENV_FILE for local overrides."
|
||||
return
|
||||
fi
|
||||
|
||||
printf 'Created %s for local overrides.\n' "$USER_ENV_FILE"
|
||||
printf 'Do you need a custom deployment now? (Most users can press Enter to skip.) [y/N] '
|
||||
read -r answer
|
||||
|
||||
case "${answer:-}" in
|
||||
y | Y | yes | YES | Yes)
|
||||
cat <<'EOF'
|
||||
Edit .env with the settings you want to override, using .env.example as the full reference.
|
||||
Run ./dify-compose up -d again when you are ready.
|
||||
EOF
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
user_env_value() {
|
||||
local key="$1"
|
||||
awk -F= -v target="$key" '
|
||||
/^[[:space:]]*#/ || !/=/{ next }
|
||||
{
|
||||
key = $1
|
||||
gsub(/^[[:space:]]+|[[:space:]]+$/, "", key)
|
||||
if (key == target) {
|
||||
value = substr($0, index($0, "=") + 1)
|
||||
gsub(/^[[:space:]]+|[[:space:]]+$/, "", value)
|
||||
if ((value ~ /^".*"$/) || (value ~ /^'\''.*'\''$/)) {
|
||||
value = substr(value, 2, length(value) - 2)
|
||||
}
|
||||
result = value
|
||||
}
|
||||
}
|
||||
END { print result }
|
||||
' "$USER_ENV_FILE"
|
||||
}
|
||||
|
||||
set_user_env_value() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local temp_file
|
||||
|
||||
temp_file="$(mktemp "${TMPDIR:-/tmp}/dify-env.XXXXXX")"
|
||||
awk -F= -v target="$key" -v replacement="$key=$value" '
|
||||
BEGIN { replaced = 0 }
|
||||
/^[[:space:]]*#/ || !/=/{ print; next }
|
||||
{
|
||||
key = $1
|
||||
gsub(/^[[:space:]]+|[[:space:]]+$/, "", key)
|
||||
if (key == target) {
|
||||
if (!replaced) {
|
||||
print replacement
|
||||
replaced = 1
|
||||
}
|
||||
next
|
||||
}
|
||||
print
|
||||
}
|
||||
END {
|
||||
if (!replaced) {
|
||||
print replacement
|
||||
}
|
||||
}
|
||||
' "$USER_ENV_FILE" >"$temp_file"
|
||||
mv "$temp_file" "$USER_ENV_FILE"
|
||||
}
|
||||
|
||||
ensure_secret_key() {
|
||||
local current_secret_key
|
||||
local secret_key
|
||||
|
||||
current_secret_key="$(user_env_value SECRET_KEY)"
|
||||
if [[ -n "$current_secret_key" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
secret_key="$(generate_secret_key)" || die "Unable to generate SECRET_KEY. Install openssl or configure SECRET_KEY in .env."
|
||||
set_user_env_value SECRET_KEY "$secret_key"
|
||||
log "Generated SECRET_KEY in $USER_ENV_FILE."
|
||||
}
|
||||
|
||||
env_value() {
|
||||
local key="$1"
|
||||
awk -F= -v target="$key" '
|
||||
/^[[:space:]]*#/ || !/=/{ next }
|
||||
{
|
||||
key = $1
|
||||
gsub(/^[[:space:]]+|[[:space:]]+$/, "", key)
|
||||
if (key == target) {
|
||||
value = substr($0, index($0, "=") + 1)
|
||||
gsub(/^[[:space:]]+|[[:space:]]+$/, "", value)
|
||||
if ((value ~ /^".*"$/) || (value ~ /^'\''.*'\''$/)) {
|
||||
value = substr(value, 2, length(value) - 2)
|
||||
}
|
||||
result = value
|
||||
}
|
||||
}
|
||||
END { print result }
|
||||
' "$DEFAULT_ENV_FILE" "$USER_ENV_FILE"
|
||||
}
|
||||
|
||||
user_overrides() {
|
||||
local key="$1"
|
||||
grep -Eq "^[[:space:]]*${key}[[:space:]]*=" "$USER_ENV_FILE"
|
||||
}
|
||||
|
||||
write_merged_env() {
|
||||
awk '
|
||||
function trim(s) {
|
||||
sub(/^[[:space:]]+/, "", s)
|
||||
sub(/[[:space:]]+$/, "", s)
|
||||
return s
|
||||
}
|
||||
|
||||
/^[[:space:]]*#/ || !/=/{ next }
|
||||
|
||||
{
|
||||
key = $0
|
||||
sub(/=.*/, "", key)
|
||||
key = trim(key)
|
||||
if (key == "") {
|
||||
next
|
||||
}
|
||||
|
||||
value = substr($0, index($0, "=") + 1)
|
||||
value = trim(value)
|
||||
|
||||
if (!(key in seen)) {
|
||||
order[++count] = key
|
||||
seen[key] = 1
|
||||
}
|
||||
|
||||
values[key] = value
|
||||
}
|
||||
|
||||
END {
|
||||
for (i = 1; i <= count; i++) {
|
||||
key = order[i]
|
||||
print key "=" values[key]
|
||||
}
|
||||
}
|
||||
' "$DEFAULT_ENV_FILE" "$USER_ENV_FILE" >"$MERGED_ENV_FILE"
|
||||
}
|
||||
|
||||
set_merged_env_value() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local temp_file
|
||||
|
||||
temp_file="$(mktemp "${TMPDIR:-/tmp}/dify-compose-env.XXXXXX")"
|
||||
awk -F= -v target="$key" -v replacement="$key=$value" '
|
||||
BEGIN { replaced = 0 }
|
||||
/^[[:space:]]*#/ || !/=/{ print; next }
|
||||
{
|
||||
key = $1
|
||||
gsub(/^[[:space:]]+|[[:space:]]+$/, "", key)
|
||||
if (key == target) {
|
||||
if (!replaced) {
|
||||
print replacement
|
||||
replaced = 1
|
||||
}
|
||||
next
|
||||
}
|
||||
print
|
||||
}
|
||||
END {
|
||||
if (!replaced) {
|
||||
print replacement
|
||||
}
|
||||
}
|
||||
' "$MERGED_ENV_FILE" >"$temp_file"
|
||||
mv "$temp_file" "$MERGED_ENV_FILE"
|
||||
}
|
||||
|
||||
set_if_not_overridden() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
|
||||
if user_overrides "$key"; then
|
||||
return
|
||||
fi
|
||||
|
||||
set_merged_env_value "$key" "$value"
|
||||
}
|
||||
|
||||
metadata_db_host() {
|
||||
case "$1" in
|
||||
mysql) printf 'db_mysql' ;;
|
||||
postgresql | '') printf 'db_postgres' ;;
|
||||
*) printf '%s' "$(env_value DB_HOST)" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
metadata_db_port() {
|
||||
case "$1" in
|
||||
mysql) printf '3306' ;;
|
||||
postgresql | '') printf '5432' ;;
|
||||
*) printf '%s' "$(env_value DB_PORT)" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
metadata_db_user() {
|
||||
case "$1" in
|
||||
mysql) printf 'root' ;;
|
||||
postgresql | '') printf 'postgres' ;;
|
||||
*) printf '%s' "$(env_value DB_USERNAME)" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
build_merged_env() {
|
||||
MERGED_ENV_FILE="$(mktemp "${TMPDIR:-/tmp}/dify-compose.XXXXXX")"
|
||||
trap 'rm -f "$MERGED_ENV_FILE"' EXIT
|
||||
|
||||
write_merged_env
|
||||
|
||||
local db_type
|
||||
local redis_host
|
||||
local redis_port
|
||||
local redis_username
|
||||
local redis_password
|
||||
local redis_auth
|
||||
local code_execution_api_key
|
||||
local weaviate_api_key
|
||||
|
||||
db_type="$(env_value DB_TYPE)"
|
||||
|
||||
set_if_not_overridden DB_HOST "$(metadata_db_host "$db_type")"
|
||||
set_if_not_overridden DB_PORT "$(metadata_db_port "$db_type")"
|
||||
set_if_not_overridden DB_USERNAME "$(metadata_db_user "$db_type")"
|
||||
|
||||
if ! user_overrides CELERY_BROKER_URL; then
|
||||
redis_host="$(env_value REDIS_HOST)"
|
||||
redis_port="$(env_value REDIS_PORT)"
|
||||
redis_username="$(env_value REDIS_USERNAME)"
|
||||
redis_password="$(env_value REDIS_PASSWORD)"
|
||||
redis_auth=""
|
||||
|
||||
if [[ -n "$redis_username" && -n "$redis_password" ]]; then
|
||||
redis_auth="${redis_username}:${redis_password}@"
|
||||
elif [[ -n "$redis_password" ]]; then
|
||||
redis_auth=":${redis_password}@"
|
||||
elif [[ -n "$redis_username" ]]; then
|
||||
redis_auth="${redis_username}@"
|
||||
fi
|
||||
|
||||
set_merged_env_value CELERY_BROKER_URL "redis://${redis_auth}${redis_host:-redis}:${redis_port:-6379}/1"
|
||||
fi
|
||||
|
||||
if ! user_overrides SANDBOX_API_KEY; then
|
||||
code_execution_api_key="$(env_value CODE_EXECUTION_API_KEY)"
|
||||
set_if_not_overridden SANDBOX_API_KEY "${code_execution_api_key:-dify-sandbox}"
|
||||
fi
|
||||
|
||||
if ! user_overrides WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS; then
|
||||
weaviate_api_key="$(env_value WEAVIATE_API_KEY)"
|
||||
set_if_not_overridden WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS \
|
||||
"${weaviate_api_key:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}"
|
||||
fi
|
||||
}
|
||||
|
||||
main() {
|
||||
detect_compose
|
||||
ensure_env_files
|
||||
ensure_secret_key
|
||||
build_merged_env
|
||||
|
||||
if [[ "$#" -eq 0 ]]; then
|
||||
set -- up -d
|
||||
fi
|
||||
|
||||
"${COMPOSE_CMD[@]}" --env-file "$MERGED_ENV_FILE" "$@"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@ -1,317 +0,0 @@
|
||||
$ErrorActionPreference = "Stop"
|
||||
Set-StrictMode -Version Latest
|
||||
|
||||
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
Set-Location $ScriptDir
|
||||
|
||||
$DefaultEnvFile = ".env.default"
|
||||
$UserEnvFile = ".env"
|
||||
$MergedEnvFile = $null
|
||||
$Utf8NoBom = New-Object System.Text.UTF8Encoding -ArgumentList $false
|
||||
|
||||
function Write-Info {
|
||||
param([string]$Message)
|
||||
[Console]::Error.WriteLine($Message)
|
||||
}
|
||||
|
||||
function Fail {
|
||||
param([string]$Message)
|
||||
[Console]::Error.WriteLine("Error: $Message")
|
||||
exit 1
|
||||
}
|
||||
|
||||
function Test-CommandSuccess {
|
||||
param([string[]]$Command)
|
||||
|
||||
try {
|
||||
$Executable = $Command[0]
|
||||
$CommandArgs = @()
|
||||
if ($Command.Length -gt 1) {
|
||||
$CommandArgs = @($Command[1..($Command.Length - 1)])
|
||||
}
|
||||
|
||||
& $Executable @CommandArgs *> $null
|
||||
return $LASTEXITCODE -eq 0
|
||||
}
|
||||
catch {
|
||||
return $false
|
||||
}
|
||||
}
|
||||
|
||||
function Get-ComposeCommand {
|
||||
if (Test-CommandSuccess @("docker", "compose", "version")) {
|
||||
return @("docker", "compose")
|
||||
}
|
||||
|
||||
if ((Get-Command "docker-compose" -ErrorAction SilentlyContinue) -and (Test-CommandSuccess @("docker-compose", "version"))) {
|
||||
return @("docker-compose")
|
||||
}
|
||||
|
||||
Fail "Docker Compose is not available. Install Docker Compose, then run this command again."
|
||||
}
|
||||
|
||||
function New-SecretKey {
|
||||
$Bytes = New-Object byte[] 42
|
||||
$Generator = [System.Security.Cryptography.RandomNumberGenerator]::Create()
|
||||
|
||||
try {
|
||||
$Generator.GetBytes($Bytes)
|
||||
}
|
||||
finally {
|
||||
$Generator.Dispose()
|
||||
}
|
||||
|
||||
return [Convert]::ToBase64String($Bytes)
|
||||
}
|
||||
|
||||
function Ensure-EnvFiles {
|
||||
if (-not (Test-Path $DefaultEnvFile -PathType Leaf)) {
|
||||
Fail "$DefaultEnvFile is missing."
|
||||
}
|
||||
|
||||
if (Test-Path $UserEnvFile -PathType Leaf) {
|
||||
return
|
||||
}
|
||||
|
||||
New-Item -ItemType File -Path $UserEnvFile | Out-Null
|
||||
|
||||
if ([Console]::IsInputRedirected) {
|
||||
Write-Info "Created $UserEnvFile for local overrides."
|
||||
return
|
||||
}
|
||||
|
||||
Write-Info "Created $UserEnvFile for local overrides."
|
||||
$Answer = Read-Host "Do you need a custom deployment now? (Most users can press Enter to skip.) [y/N]"
|
||||
|
||||
if ($Answer -match "^(y|yes)$") {
|
||||
Write-Output "Edit .env with the settings you want to override, using .env.example as the full reference."
|
||||
Write-Output "Run .\dify-compose.ps1 up -d again when you are ready."
|
||||
exit 0
|
||||
}
|
||||
}
|
||||
|
||||
function Read-EnvFile {
|
||||
param([string]$Path)
|
||||
|
||||
$Values = [ordered]@{}
|
||||
|
||||
if (-not (Test-Path $Path -PathType Leaf)) {
|
||||
return $Values
|
||||
}
|
||||
|
||||
foreach ($Line in Get-Content -Path $Path) {
|
||||
if ($Line -match "^\s*#" -or $Line -notmatch "=") {
|
||||
continue
|
||||
}
|
||||
|
||||
$SeparatorIndex = $Line.IndexOf("=")
|
||||
$Key = $Line.Substring(0, $SeparatorIndex).Trim()
|
||||
$Value = $Line.Substring($SeparatorIndex + 1).Trim()
|
||||
|
||||
if (($Value.StartsWith('"') -and $Value.EndsWith('"')) -or ($Value.StartsWith("'") -and $Value.EndsWith("'"))) {
|
||||
$Value = $Value.Substring(1, $Value.Length - 2)
|
||||
}
|
||||
|
||||
if ($Key.Length -gt 0) {
|
||||
$Values[$Key] = $Value
|
||||
}
|
||||
}
|
||||
|
||||
return $Values
|
||||
}
|
||||
|
||||
function Set-UserEnvValue {
|
||||
param(
|
||||
[string]$Key,
|
||||
[string]$Value
|
||||
)
|
||||
|
||||
$Path = [string](Resolve-Path $UserEnvFile)
|
||||
$Lines = [System.IO.File]::ReadAllLines($Path, [System.Text.Encoding]::UTF8)
|
||||
$Output = New-Object System.Collections.Generic.List[string]
|
||||
$Replaced = $false
|
||||
|
||||
foreach ($Line in $Lines) {
|
||||
if ($Line -match "^\s*#" -or $Line -notmatch "=") {
|
||||
$Output.Add($Line)
|
||||
continue
|
||||
}
|
||||
|
||||
$SeparatorIndex = $Line.IndexOf("=")
|
||||
$CurrentKey = $Line.Substring(0, $SeparatorIndex).Trim()
|
||||
|
||||
if ($CurrentKey -eq $Key) {
|
||||
if (-not $Replaced) {
|
||||
$Output.Add("$Key=$Value")
|
||||
$Replaced = $true
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
$Output.Add($Line)
|
||||
}
|
||||
|
||||
if (-not $Replaced) {
|
||||
$Output.Add("$Key=$Value")
|
||||
}
|
||||
|
||||
[System.IO.File]::WriteAllLines($Path, $Output, $Utf8NoBom)
|
||||
}
|
||||
|
||||
function Ensure-SecretKey {
|
||||
$Values = Read-EnvFile $UserEnvFile
|
||||
|
||||
if ($Values.Contains("SECRET_KEY") -and $Values["SECRET_KEY"]) {
|
||||
return
|
||||
}
|
||||
|
||||
Set-UserEnvValue "SECRET_KEY" (New-SecretKey)
|
||||
Write-Info "Generated SECRET_KEY in $UserEnvFile."
|
||||
}
|
||||
|
||||
function Merge-EnvValues {
|
||||
$Values = [ordered]@{}
|
||||
|
||||
foreach ($Entry in (Read-EnvFile $DefaultEnvFile).GetEnumerator()) {
|
||||
$Values[$Entry.Key] = $Entry.Value
|
||||
}
|
||||
|
||||
foreach ($Entry in (Read-EnvFile $UserEnvFile).GetEnumerator()) {
|
||||
$Values[$Entry.Key] = $Entry.Value
|
||||
}
|
||||
|
||||
return $Values
|
||||
}
|
||||
|
||||
function User-Overrides {
|
||||
param([string]$Key)
|
||||
|
||||
if (-not (Test-Path $UserEnvFile -PathType Leaf)) {
|
||||
return $false
|
||||
}
|
||||
|
||||
return [bool](Select-String -Path $UserEnvFile -Pattern "^\s*$([regex]::Escape($Key))\s*=" -Quiet)
|
||||
}
|
||||
|
||||
function Metadata-DbHost {
|
||||
param([string]$DbType, $Values)
|
||||
|
||||
switch ($DbType) {
|
||||
"mysql" { return "db_mysql" }
|
||||
"postgresql" { return "db_postgres" }
|
||||
"" { return "db_postgres" }
|
||||
default { return $Values["DB_HOST"] }
|
||||
}
|
||||
}
|
||||
|
||||
function Metadata-DbPort {
|
||||
param([string]$DbType, $Values)
|
||||
|
||||
switch ($DbType) {
|
||||
"mysql" { return "3306" }
|
||||
"postgresql" { return "5432" }
|
||||
"" { return "5432" }
|
||||
default { return $Values["DB_PORT"] }
|
||||
}
|
||||
}
|
||||
|
||||
function Metadata-DbUser {
|
||||
param([string]$DbType, $Values)
|
||||
|
||||
switch ($DbType) {
|
||||
"mysql" { return "root" }
|
||||
"postgresql" { return "postgres" }
|
||||
"" { return "postgres" }
|
||||
default { return $Values["DB_USERNAME"] }
|
||||
}
|
||||
}
|
||||
|
||||
function Write-MergedEnv {
|
||||
param($Values)
|
||||
|
||||
$Output = New-Object System.Collections.Generic.List[string]
|
||||
|
||||
foreach ($Entry in $Values.GetEnumerator()) {
|
||||
$Output.Add("$($Entry.Key)=$($Entry.Value)")
|
||||
}
|
||||
|
||||
[System.IO.File]::WriteAllLines($MergedEnvFile, $Output, $Utf8NoBom)
|
||||
}
|
||||
|
||||
function Build-MergedEnv {
|
||||
$Values = Merge-EnvValues
|
||||
$script:MergedEnvFile = [System.IO.Path]::GetTempFileName()
|
||||
|
||||
$DbType = if ($Values.Contains("DB_TYPE")) { $Values["DB_TYPE"] } else { "postgresql" }
|
||||
|
||||
if (-not (User-Overrides "DB_HOST")) {
|
||||
$Values["DB_HOST"] = Metadata-DbHost $DbType $Values
|
||||
}
|
||||
|
||||
if (-not (User-Overrides "DB_PORT")) {
|
||||
$Values["DB_PORT"] = Metadata-DbPort $DbType $Values
|
||||
}
|
||||
|
||||
if (-not (User-Overrides "DB_USERNAME")) {
|
||||
$Values["DB_USERNAME"] = Metadata-DbUser $DbType $Values
|
||||
}
|
||||
|
||||
if (-not (User-Overrides "CELERY_BROKER_URL")) {
|
||||
$RedisHost = if ($Values.Contains("REDIS_HOST") -and $Values["REDIS_HOST"]) { $Values["REDIS_HOST"] } else { "redis" }
|
||||
$RedisPort = if ($Values.Contains("REDIS_PORT") -and $Values["REDIS_PORT"]) { $Values["REDIS_PORT"] } else { "6379" }
|
||||
$RedisUsername = if ($Values.Contains("REDIS_USERNAME")) { $Values["REDIS_USERNAME"] } else { "" }
|
||||
$RedisPassword = if ($Values.Contains("REDIS_PASSWORD")) { $Values["REDIS_PASSWORD"] } else { "" }
|
||||
$RedisAuth = ""
|
||||
|
||||
if ($RedisUsername -and $RedisPassword) {
|
||||
$RedisAuth = "${RedisUsername}:${RedisPassword}@"
|
||||
}
|
||||
elseif ($RedisPassword) {
|
||||
$RedisAuth = ":${RedisPassword}@"
|
||||
}
|
||||
elseif ($RedisUsername) {
|
||||
$RedisAuth = "${RedisUsername}@"
|
||||
}
|
||||
|
||||
$Values["CELERY_BROKER_URL"] = "redis://$RedisAuth${RedisHost}:${RedisPort}/1"
|
||||
}
|
||||
|
||||
if (-not (User-Overrides "SANDBOX_API_KEY")) {
|
||||
$CodeExecutionApiKey = if ($Values.Contains("CODE_EXECUTION_API_KEY") -and $Values["CODE_EXECUTION_API_KEY"]) { $Values["CODE_EXECUTION_API_KEY"] } else { "dify-sandbox" }
|
||||
$Values["SANDBOX_API_KEY"] = $CodeExecutionApiKey
|
||||
}
|
||||
|
||||
if (-not (User-Overrides "WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS")) {
|
||||
$WeaviateApiKey = if ($Values.Contains("WEAVIATE_API_KEY") -and $Values["WEAVIATE_API_KEY"]) { $Values["WEAVIATE_API_KEY"] } else { "WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih" }
|
||||
$Values["WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS"] = $WeaviateApiKey
|
||||
}
|
||||
|
||||
Write-MergedEnv $Values
|
||||
}
|
||||
|
||||
$ComposeCommand = Get-ComposeCommand
|
||||
|
||||
try {
|
||||
Ensure-EnvFiles
|
||||
Ensure-SecretKey
|
||||
Build-MergedEnv
|
||||
|
||||
$ComposeArgs = @($args)
|
||||
if ($ComposeArgs.Count -eq 0) {
|
||||
$ComposeArgs = @("up", "-d")
|
||||
}
|
||||
|
||||
$ComposeCommandArgs = @()
|
||||
if ($ComposeCommand.Length -gt 1) {
|
||||
$ComposeCommandArgs = @($ComposeCommand[1..($ComposeCommand.Length - 1)])
|
||||
}
|
||||
|
||||
$ComposeExecutable = $ComposeCommand[0]
|
||||
& $ComposeExecutable @ComposeCommandArgs --env-file $MergedEnvFile @ComposeArgs
|
||||
exit $LASTEXITCODE
|
||||
}
|
||||
finally {
|
||||
if ($MergedEnvFile -and (Test-Path $MergedEnvFile -PathType Leaf)) {
|
||||
Remove-Item -Force $MergedEnvFile
|
||||
}
|
||||
}
|
||||
@ -1,4 +1,202 @@
|
||||
x-shared-env: &shared-api-worker-env
|
||||
# Shared configuration using YAML anchors and env_file
|
||||
x-shared-api-worker-config: &shared-api-worker-config
|
||||
env_file:
|
||||
- path: ./envs/core-services/shared.env
|
||||
required: false
|
||||
- path: ./envs/core-services/api.env
|
||||
required: false
|
||||
- path: ./envs/security.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-postgres.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-mysql.env
|
||||
required: false
|
||||
- path: ./envs/databases/redis.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/weaviate.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/qdrant.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/oceanbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/seekdb.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/couchbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/pgvector.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/vastbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/pgvecto-rs.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/chroma.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/iris.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/oracle.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/opengauss.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/myscale.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/matrixone.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/elasticsearch.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/opensearch.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/milvus.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/nginx.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/certbot.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/ssrf-proxy.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/etcd.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/minio.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/milvus-standalone.env
|
||||
required: false
|
||||
- ./.env
|
||||
networks:
|
||||
- ssrf_proxy_network
|
||||
- default
|
||||
restart: always
|
||||
|
||||
x-shared-worker-config: &shared-worker-config
|
||||
env_file:
|
||||
- path: ./envs/core-services/shared.env
|
||||
required: false
|
||||
- path: ./envs/core-services/worker.env
|
||||
required: false
|
||||
- path: ./envs/security.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-postgres.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-mysql.env
|
||||
required: false
|
||||
- path: ./envs/databases/redis.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/weaviate.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/qdrant.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/oceanbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/seekdb.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/couchbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/pgvector.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/vastbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/pgvecto-rs.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/chroma.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/iris.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/oracle.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/opengauss.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/myscale.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/matrixone.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/elasticsearch.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/opensearch.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/milvus.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/nginx.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/certbot.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/ssrf-proxy.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/etcd.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/minio.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/milvus-standalone.env
|
||||
required: false
|
||||
- ./.env
|
||||
networks:
|
||||
- ssrf_proxy_network
|
||||
- default
|
||||
restart: always
|
||||
|
||||
x-shared-worker-beat-config: &shared-worker-beat-config
|
||||
env_file:
|
||||
- path: ./envs/core-services/shared.env
|
||||
required: false
|
||||
- path: ./envs/core-services/worker-beat.env
|
||||
required: false
|
||||
- path: ./envs/security.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-postgres.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-mysql.env
|
||||
required: false
|
||||
- path: ./envs/databases/redis.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/weaviate.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/qdrant.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/oceanbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/seekdb.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/couchbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/pgvector.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/vastbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/pgvecto-rs.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/chroma.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/iris.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/oracle.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/opengauss.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/myscale.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/matrixone.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/elasticsearch.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/opensearch.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/milvus.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/nginx.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/certbot.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/ssrf-proxy.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/etcd.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/minio.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/milvus-standalone.env
|
||||
required: false
|
||||
- ./.env
|
||||
networks:
|
||||
- ssrf_proxy_network
|
||||
- default
|
||||
restart: always
|
||||
|
||||
services:
|
||||
# Init container to fix permissions
|
||||
init_permissions:
|
||||
@ -21,12 +219,9 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
<<: *shared-api-worker-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
<<: *shared-api-worker-env
|
||||
# Startup mode, 'api' starts the API server.
|
||||
MODE: api
|
||||
SENTRY_DSN: ${API_SENTRY_DSN:-}
|
||||
SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0}
|
||||
@ -69,12 +264,9 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
<<: *shared-worker-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
<<: *shared-api-worker-env
|
||||
# Startup mode, 'worker' starts the Celery worker for processing all queues.
|
||||
MODE: worker
|
||||
SENTRY_DSN: ${API_SENTRY_DSN:-}
|
||||
SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0}
|
||||
@ -115,12 +307,9 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
<<: *shared-worker-beat-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
<<: *shared-api-worker-env
|
||||
# Startup mode, 'worker_beat' starts the Celery beat for scheduling periodic tasks.
|
||||
MODE: beat
|
||||
depends_on:
|
||||
init_permissions:
|
||||
@ -154,6 +343,12 @@ services:
|
||||
web:
|
||||
image: langgenius/dify-web:1.14.0
|
||||
restart: always
|
||||
env_file:
|
||||
- path: ./envs/core-services/web.env
|
||||
required: false
|
||||
- path: ./envs/security.env
|
||||
required: false
|
||||
- ./.env
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
APP_API_URL: ${APP_API_URL:-}
|
||||
@ -270,6 +465,12 @@ services:
|
||||
sandbox:
|
||||
image: langgenius/dify-sandbox:0.2.15
|
||||
restart: always
|
||||
env_file:
|
||||
- path: ./envs/core-services/sandbox.env
|
||||
required: false
|
||||
- path: ./envs/security.env
|
||||
required: false
|
||||
- ./.env
|
||||
environment:
|
||||
# The DifySandbox configurations
|
||||
# Make sure you are changing this key for your deployment with a strong key.
|
||||
@ -294,9 +495,24 @@ services:
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.6.0-local
|
||||
restart: always
|
||||
env_file:
|
||||
- path: ./envs/core-services/shared.env
|
||||
required: false
|
||||
- path: ./envs/core-services/plugin-daemon.env
|
||||
required: false
|
||||
- path: ./envs/security.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-postgres.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-mysql.env
|
||||
required: false
|
||||
- path: ./envs/databases/redis.env
|
||||
required: false
|
||||
- ./.env
|
||||
networks:
|
||||
- ssrf_proxy_network
|
||||
- default
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
<<: *shared-api-worker-env
|
||||
DB_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin}
|
||||
DB_SSL_MODE: ${DB_SSL_MODE:-disable}
|
||||
SERVER_PORT: ${PLUGIN_DAEMON_PORT:-5002}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
13
docker/envs/core-services/api.env.example
Normal file
13
docker/envs/core-services/api.env.example
Normal file
@ -0,0 +1,13 @@
|
||||
# ------------------------------
|
||||
# Api Configuration
|
||||
# ------------------------------
|
||||
|
||||
MODE=api
|
||||
SENTRY_DSN=
|
||||
SENTRY_TRACES_SAMPLE_RATE=1.0
|
||||
SENTRY_PROFILES_SAMPLE_RATE=1.0
|
||||
PLUGIN_REMOTE_INSTALL_HOST=localhost
|
||||
PLUGIN_REMOTE_INSTALL_PORT=5003
|
||||
PLUGIN_MAX_PACKAGE_SIZE=52428800
|
||||
PLUGIN_DAEMON_TIMEOUT=600.0
|
||||
INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
|
||||
23
docker/envs/core-services/plugin-daemon.env.example
Normal file
23
docker/envs/core-services/plugin-daemon.env.example
Normal file
@ -0,0 +1,23 @@
|
||||
# ------------------------------
|
||||
# Plugin Daemon Configuration
|
||||
# ------------------------------
|
||||
|
||||
DB_PLUGIN_DATABASE=dify_plugin
|
||||
PLUGIN_DAEMON_URL=http://plugin_daemon:5002
|
||||
PLUGIN_PPROF_ENABLED=false
|
||||
PLUGIN_DIFY_INNER_API_URL=http://api:5001
|
||||
FORCE_VERIFYING_SIGNATURE=true
|
||||
PLUGIN_STDIO_BUFFER_SIZE=1024
|
||||
PLUGIN_STDIO_MAX_BUFFER_SIZE=5242880
|
||||
PLUGIN_PYTHON_ENV_INIT_TIMEOUT=120
|
||||
PLUGIN_MAX_EXECUTION_TIMEOUT=600
|
||||
PLUGIN_DEBUGGING_HOST=0.0.0.0
|
||||
PLUGIN_DEBUGGING_PORT=5003
|
||||
PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi
|
||||
PLUGIN_DIFY_INNER_API_KEY=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
|
||||
PLUGIN_DAEMON_PORT=5002
|
||||
CELERY_WORKER_CLASS=
|
||||
PLUGIN_STORAGE_TYPE=local
|
||||
PLUGIN_STORAGE_LOCAL_ROOT=/app/storage
|
||||
PLUGIN_WORKING_PATH=/app/storage/cwd
|
||||
PLUGIN_STORAGE_OSS_BUCKET=
|
||||
17
docker/envs/core-services/sandbox.env.example
Normal file
17
docker/envs/core-services/sandbox.env.example
Normal file
@ -0,0 +1,17 @@
|
||||
# ------------------------------
|
||||
# Sandbox Configuration
|
||||
# ------------------------------
|
||||
|
||||
SANDBOX_HTTP_PROXY=http://ssrf_proxy:3128
|
||||
SANDBOX_HTTPS_PROXY=http://ssrf_proxy:3128
|
||||
SANDBOX_PORT=8194
|
||||
PIP_MIRROR_URL=
|
||||
SANDBOX_API_KEY=dify-sandbox
|
||||
SANDBOX_GIN_MODE=release
|
||||
SANDBOX_WORKER_TIMEOUT=15
|
||||
SANDBOX_ENABLE_NETWORK=true
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000
|
||||
469
docker/envs/core-services/shared.env.example
Normal file
469
docker/envs/core-services/shared.env.example
Normal file
@ -0,0 +1,469 @@
|
||||
# ------------------------------
|
||||
# Shared API/Worker Configuration
|
||||
# ------------------------------
|
||||
|
||||
CONSOLE_WEB_URL=
|
||||
SERVICE_API_URL=
|
||||
TRIGGER_URL=http://localhost
|
||||
APP_WEB_URL=
|
||||
FILES_URL=
|
||||
INTERNAL_FILES_URL=
|
||||
LANG=C.UTF-8
|
||||
LC_ALL=C.UTF-8
|
||||
PYTHONIOENCODING=utf-8
|
||||
UV_CACHE_DIR=/tmp/.uv-cache
|
||||
CHECK_UPDATE_URL=https://updates.dify.ai
|
||||
OPENAI_API_BASE=https://api.openai.com/v1
|
||||
MIGRATION_ENABLED=true
|
||||
FILES_ACCESS_TIMEOUT=300
|
||||
ENABLE_COLLABORATION_MODE=false
|
||||
CELERY_BROKER_URL=redis://:difyai123456@redis:6379/1
|
||||
CELERY_TASK_ANNOTATIONS=null
|
||||
AZURE_BLOB_ACCOUNT_URL=https://<your_account_name>.blob.core.windows.net
|
||||
SUPABASE_URL=your-server-url
|
||||
TIDB_ON_QDRANT_URL=http://127.0.0.1
|
||||
TIDB_ON_QDRANT_API_KEY=dify
|
||||
TIDB_API_URL=http://127.0.0.1
|
||||
TIDB_IAM_API_URL=http://127.0.0.1
|
||||
TIDB_REGION=regions/aws-us-east-1
|
||||
TIDB_PROJECT_ID=dify
|
||||
TIDB_SPEND_LIMIT=100
|
||||
TENCENT_VECTOR_DB_URL=http://127.0.0.1
|
||||
TENCENT_VECTOR_DB_API_KEY=dify
|
||||
LINDORM_URL=http://localhost:30070
|
||||
LINDORM_USERNAME=admin
|
||||
UPSTASH_VECTOR_URL=https://xxx-vector.upstash.io
|
||||
UPLOAD_FILE_SIZE_LIMIT=15
|
||||
UPLOAD_FILE_BATCH_LIMIT=5
|
||||
UPLOAD_FILE_EXTENSION_BLACKLIST=
|
||||
SINGLE_CHUNK_ATTACHMENT_LIMIT=10
|
||||
IMAGE_FILE_BATCH_LIMIT=10
|
||||
ATTACHMENT_IMAGE_FILE_SIZE_LIMIT=2
|
||||
ATTACHMENT_IMAGE_DOWNLOAD_TIMEOUT=60
|
||||
ETL_TYPE=dify
|
||||
UNSTRUCTURED_API_URL=
|
||||
MULTIMODAL_SEND_FORMAT=base64
|
||||
UPLOAD_IMAGE_FILE_SIZE_LIMIT=10
|
||||
UPLOAD_VIDEO_FILE_SIZE_LIMIT=100
|
||||
UPLOAD_AUDIO_FILE_SIZE_LIMIT=50
|
||||
API_SENTRY_DSN=
|
||||
API_SENTRY_TRACES_SAMPLE_RATE=1.0
|
||||
API_SENTRY_PROFILES_SAMPLE_RATE=1.0
|
||||
WEB_SENTRY_DSN=
|
||||
PLUGIN_SENTRY_ENABLED=false
|
||||
PLUGIN_SENTRY_DSN=
|
||||
NOTION_INTEGRATION_TYPE=public
|
||||
RESEND_API_URL=https://api.resend.com
|
||||
SSRF_PROXY_HTTP_URL=http://ssrf_proxy:3128
|
||||
SSRF_PROXY_HTTPS_URL=http://ssrf_proxy:3128
|
||||
PGDATA=/var/lib/postgresql/data/pgdata
|
||||
PLUGIN_MAX_PACKAGE_SIZE=52428800
|
||||
PLUGIN_MODEL_SCHEMA_CACHE_TTL=3600
|
||||
ENDPOINT_URL_TEMPLATE=http://localhost/e/{hook_id}
|
||||
LOG_LEVEL=INFO
|
||||
LOG_OUTPUT_FORMAT=text
|
||||
LOG_FILE=/app/logs/server.log
|
||||
LOG_FILE_MAX_SIZE=20
|
||||
LOG_FILE_BACKUP_COUNT=5
|
||||
LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S
|
||||
LOG_TZ=UTC
|
||||
DEBUG=false
|
||||
FLASK_DEBUG=false
|
||||
ENABLE_REQUEST_LOGGING=False
|
||||
WORKFLOW_LOG_CLEANUP_ENABLED=false
|
||||
WORKFLOW_LOG_RETENTION_DAYS=30
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
|
||||
WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS=
|
||||
EXPOSE_PLUGIN_DEBUGGING_HOST=localhost
|
||||
EXPOSE_PLUGIN_DEBUGGING_PORT=5003
|
||||
DEPLOY_ENV=PRODUCTION
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||
REFRESH_TOKEN_EXPIRE_DAYS=30
|
||||
APP_DEFAULT_ACTIVE_REQUESTS=0
|
||||
APP_MAX_ACTIVE_REQUESTS=0
|
||||
APP_MAX_EXECUTION_TIME=1200
|
||||
DIFY_BIND_ADDRESS=0.0.0.0
|
||||
DIFY_PORT=5001
|
||||
SERVER_WORKER_AMOUNT=1
|
||||
SERVER_WORKER_CLASS=gevent
|
||||
SERVER_WORKER_CONNECTIONS=10
|
||||
CELERY_SENTINEL_PASSWORD=
|
||||
S3_ACCESS_KEY=
|
||||
S3_SECRET_KEY=
|
||||
ARCHIVE_STORAGE_ACCESS_KEY=
|
||||
ARCHIVE_STORAGE_SECRET_KEY=
|
||||
AZURE_BLOB_ACCOUNT_KEY=difyai
|
||||
ALIYUN_OSS_ACCESS_KEY=your-access-key
|
||||
ALIYUN_OSS_SECRET_KEY=your-secret-key
|
||||
TENCENT_COS_SECRET_KEY=your-secret-key
|
||||
TENCENT_COS_SECRET_ID=your-secret-id
|
||||
OCI_ACCESS_KEY=your-access-key
|
||||
OCI_SECRET_KEY=your-secret-key
|
||||
HUAWEI_OBS_SECRET_KEY=your-secret-key
|
||||
HUAWEI_OBS_ACCESS_KEY=your-access-key
|
||||
VOLCENGINE_TOS_SECRET_KEY=your-secret-key
|
||||
VOLCENGINE_TOS_ACCESS_KEY=your-access-key
|
||||
BAIDU_OBS_SECRET_KEY=your-secret-key
|
||||
BAIDU_OBS_ACCESS_KEY=your-access-key
|
||||
SUPABASE_API_KEY=your-access-key
|
||||
ALIBABACLOUD_MYSQL_PASSWORD=difyai123456
|
||||
RELYT_PASSWORD=difyai123456
|
||||
LINDORM_PASSWORD=admin
|
||||
LINDORM_USING_UGC=True
|
||||
LINDORM_QUERY_TIMEOUT=1
|
||||
HUAWEI_CLOUD_PASSWORD=admin
|
||||
UPSTASH_VECTOR_TOKEN=dify
|
||||
TABLESTORE_ACCESS_KEY_ID=xxx
|
||||
TABLESTORE_ACCESS_KEY_SECRET=xxx
|
||||
TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE=false
|
||||
CLICKZETTA_PASSWORD=
|
||||
CLICKZETTA_INSTANCE=
|
||||
CLICKZETTA_SERVICE=api.clickzetta.com
|
||||
CLICKZETTA_WORKSPACE=quick_start
|
||||
CLICKZETTA_VCLUSTER=default_ap
|
||||
CLICKZETTA_SCHEMA=dify
|
||||
CLICKZETTA_BATCH_SIZE=100
|
||||
CLICKZETTA_ENABLE_INVERTED_INDEX=true
|
||||
CLICKZETTA_ANALYZER_TYPE=chinese
|
||||
CLICKZETTA_ANALYZER_MODE=smart
|
||||
UNSTRUCTURED_API_KEY=
|
||||
SCARF_NO_ANALYTICS=true
|
||||
PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
|
||||
NOTION_CLIENT_SECRET=
|
||||
NOTION_CLIENT_ID=
|
||||
NOTION_INTERNAL_SECRET=
|
||||
MAIL_TYPE=resend
|
||||
MAIL_DEFAULT_SEND_FROM=
|
||||
RESEND_API_KEY=your-resend-api-key
|
||||
SMTP_SERVER=
|
||||
SMTP_PORT=465
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
SMTP_USE_TLS=true
|
||||
SMTP_OPPORTUNISTIC_TLS=false
|
||||
SMTP_LOCAL_HOSTNAME=
|
||||
SENDGRID_API_KEY=
|
||||
INVITE_EXPIRY_HOURS=72
|
||||
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
|
||||
EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES=5
|
||||
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5
|
||||
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5
|
||||
CODE_EXECUTION_ENDPOINT=http://sandbox:8194
|
||||
CODE_EXECUTION_API_KEY=dify-sandbox
|
||||
CODE_EXECUTION_SSL_VERIFY=True
|
||||
CODE_EXECUTION_POOL_MAX_CONNECTIONS=100
|
||||
CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0
|
||||
CODE_MAX_NUMBER=9223372036854775807
|
||||
CODE_MIN_NUMBER=-9223372036854775808
|
||||
CODE_MAX_DEPTH=5
|
||||
CODE_MAX_PRECISION=20
|
||||
CODE_MAX_STRING_LENGTH=400000
|
||||
CODE_MAX_STRING_ARRAY_LENGTH=30
|
||||
CODE_MAX_OBJECT_ARRAY_LENGTH=30
|
||||
CODE_MAX_NUMBER_ARRAY_LENGTH=1000
|
||||
CODE_EXECUTION_CONNECT_TIMEOUT=10
|
||||
CODE_EXECUTION_READ_TIMEOUT=60
|
||||
CODE_EXECUTION_WRITE_TIMEOUT=10
|
||||
TEMPLATE_TRANSFORM_MAX_LENGTH=400000
|
||||
WORKFLOW_MAX_EXECUTION_STEPS=500
|
||||
WORKFLOW_MAX_EXECUTION_TIME=1200
|
||||
WORKFLOW_CALL_MAX_DEPTH=5
|
||||
MAX_VARIABLE_SIZE=204800
|
||||
WORKFLOW_FILE_UPLOAD_LIMIT=10
|
||||
GRAPH_ENGINE_MIN_WORKERS=1
|
||||
GRAPH_ENGINE_MAX_WORKERS=10
|
||||
GRAPH_ENGINE_SCALE_UP_THRESHOLD=3
|
||||
GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME=5.0
|
||||
ALIYUN_SLS_ACCESS_KEY_ID=
|
||||
ALIYUN_SLS_ACCESS_KEY_SECRET=
|
||||
WEBHOOK_REQUEST_BODY_MAX_SIZE=10485760
|
||||
RESPECT_XFORWARD_HEADERS_ENABLED=false
|
||||
SSRF_HTTP_PORT=3128
|
||||
SSRF_COREDUMP_DIR=/var/spool/squid
|
||||
SSRF_REVERSE_PROXY_PORT=8194
|
||||
SSRF_SANDBOX_HOST=sandbox
|
||||
SSRF_DEFAULT_TIME_OUT=5
|
||||
SSRF_DEFAULT_CONNECT_TIME_OUT=5
|
||||
SSRF_DEFAULT_READ_TIME_OUT=5
|
||||
SSRF_DEFAULT_WRITE_TIME_OUT=5
|
||||
SSRF_POOL_MAX_CONNECTIONS=100
|
||||
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||
SSRF_POOL_KEEPALIVE_EXPIRY=5.0
|
||||
PLUGIN_AWS_ACCESS_KEY=
|
||||
PLUGIN_AWS_SECRET_KEY=
|
||||
PLUGIN_AWS_REGION=
|
||||
PLUGIN_TENCENT_COS_SECRET_KEY=
|
||||
PLUGIN_TENCENT_COS_SECRET_ID=
|
||||
PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID=
|
||||
PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET=
|
||||
PLUGIN_VOLCENGINE_TOS_ACCESS_KEY=
|
||||
PLUGIN_VOLCENGINE_TOS_SECRET_KEY=
|
||||
OTLP_API_KEY=
|
||||
OTEL_EXPORTER_OTLP_PROTOCOL=
|
||||
OTEL_EXPORTER_TYPE=otlp
|
||||
OTEL_SAMPLING_RATE=0.1
|
||||
OTEL_BATCH_EXPORT_SCHEDULE_DELAY=5000
|
||||
OTEL_MAX_QUEUE_SIZE=2048
|
||||
OTEL_MAX_EXPORT_BATCH_SIZE=512
|
||||
OTEL_METRIC_EXPORT_INTERVAL=60000
|
||||
OTEL_BATCH_EXPORT_TIMEOUT=10000
|
||||
OTEL_METRIC_EXPORT_TIMEOUT=30000
|
||||
QUEUE_MONITOR_THRESHOLD=200
|
||||
QUEUE_MONITOR_ALERT_EMAILS=
|
||||
QUEUE_MONITOR_INTERVAL=30
|
||||
SWAGGER_UI_ENABLED=false
|
||||
SWAGGER_UI_PATH=/swagger-ui.html
|
||||
DSL_EXPORT_ENCRYPT_DATASET_ID=true
|
||||
DATASET_MAX_SEGMENTS_PER_REQUEST=0
|
||||
ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false
|
||||
ENABLE_CLEAN_UNUSED_DATASETS_TASK=false
|
||||
ENABLE_CREATE_TIDB_SERVERLESS_TASK=false
|
||||
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false
|
||||
ENABLE_CLEAN_MESSAGES=false
|
||||
ENABLE_WORKFLOW_RUN_CLEANUP_TASK=false
|
||||
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false
|
||||
ENABLE_DATASETS_QUEUE_MONITOR=false
|
||||
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true
|
||||
ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK=true
|
||||
WORKFLOW_SCHEDULE_POLLER_INTERVAL=1
|
||||
WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE=100
|
||||
WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK=0
|
||||
TENANT_ISOLATED_TASK_CONCURRENCY=1
|
||||
ANNOTATION_IMPORT_FILE_SIZE_LIMIT=2
|
||||
ANNOTATION_IMPORT_MAX_RECORDS=10000
|
||||
ANNOTATION_IMPORT_MIN_RECORDS=1
|
||||
ANNOTATION_IMPORT_RATE_LIMIT_PER_MINUTE=5
|
||||
ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR=20
|
||||
ANNOTATION_IMPORT_MAX_CONCURRENT=5
|
||||
CREATORS_PLATFORM_FEATURES_ENABLED=true
|
||||
CREATORS_PLATFORM_API_URL=https://creators.dify.ai
|
||||
CREATORS_PLATFORM_OAUTH_CLIENT_ID=
|
||||
TIDB_VECTOR_DATABASE=dify
|
||||
ALIBABACLOUD_MYSQL_HOST=127.0.0.1
|
||||
ALIBABACLOUD_MYSQL_PORT=3306
|
||||
ALIBABACLOUD_MYSQL_USER=root
|
||||
ALIBABACLOUD_MYSQL_DATABASE=dify
|
||||
ALIBABACLOUD_MYSQL_MAX_CONNECTION=5
|
||||
ALIBABACLOUD_MYSQL_HNSW_M=6
|
||||
RELYT_DATABASE=postgres
|
||||
TENCENT_VECTOR_DB_DATABASE=dify
|
||||
BAIDU_VECTOR_DB_DATABASE=dify
|
||||
EXPOSE_PLUGIN_DAEMON_PORT=5002
|
||||
GUNICORN_TIMEOUT=360
|
||||
CELERY_WORKER_AMOUNT=
|
||||
CELERY_AUTO_SCALE=false
|
||||
CELERY_MAX_WORKERS=
|
||||
CELERY_MIN_WORKERS=
|
||||
API_TOOL_DEFAULT_CONNECT_TIMEOUT=10
|
||||
API_TOOL_DEFAULT_READ_TIMEOUT=60
|
||||
CELERY_BACKEND=redis
|
||||
CELERY_USE_SENTINEL=false
|
||||
CELERY_SENTINEL_MASTER_NAME=
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT=0.1
|
||||
WEB_API_CORS_ALLOW_ORIGINS=*
|
||||
CONSOLE_CORS_ALLOW_ORIGINS=*
|
||||
COOKIE_DOMAIN=
|
||||
OPENDAL_SCHEME=fs
|
||||
OPENDAL_FS_ROOT=storage
|
||||
CLICKZETTA_VOLUME_TYPE=user
|
||||
CLICKZETTA_VOLUME_NAME=
|
||||
CLICKZETTA_VOLUME_TABLE_PREFIX=dataset_
|
||||
CLICKZETTA_VOLUME_DIFY_PREFIX=dify_km
|
||||
S3_ENDPOINT=
|
||||
S3_REGION=us-east-1
|
||||
S3_BUCKET_NAME=difyai
|
||||
S3_ADDRESS_STYLE=auto
|
||||
S3_USE_AWS_MANAGED_IAM=false
|
||||
ARCHIVE_STORAGE_ENABLED=false
|
||||
ARCHIVE_STORAGE_ENDPOINT=
|
||||
ARCHIVE_STORAGE_ARCHIVE_BUCKET=
|
||||
ARCHIVE_STORAGE_EXPORT_BUCKET=
|
||||
ARCHIVE_STORAGE_REGION=auto
|
||||
AZURE_BLOB_ACCOUNT_NAME=difyai
|
||||
AZURE_BLOB_CONTAINER_NAME=difyai-container
|
||||
GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name
|
||||
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=
|
||||
ALIYUN_OSS_BUCKET_NAME=your-bucket-name
|
||||
ALIYUN_OSS_ENDPOINT=https://oss-ap-southeast-1-internal.aliyuncs.com
|
||||
ALIYUN_OSS_REGION=ap-southeast-1
|
||||
ALIYUN_OSS_AUTH_VERSION=v4
|
||||
ALIYUN_OSS_PATH=your-path
|
||||
ALIYUN_CLOUDBOX_ID=your-cloudbox-id
|
||||
TENCENT_COS_BUCKET_NAME=your-bucket-name
|
||||
TENCENT_COS_REGION=your-region
|
||||
TENCENT_COS_SCHEME=your-scheme
|
||||
TENCENT_COS_CUSTOM_DOMAIN=your-custom-domain
|
||||
OCI_ENDPOINT=https://your-object-storage-namespace.compat.objectstorage.us-ashburn-1.oraclecloud.com
|
||||
OCI_BUCKET_NAME=your-bucket-name
|
||||
OCI_REGION=us-ashburn-1
|
||||
HUAWEI_OBS_BUCKET_NAME=your-bucket-name
|
||||
HUAWEI_OBS_SERVER=your-server-url
|
||||
HUAWEI_OBS_PATH_STYLE=false
|
||||
VOLCENGINE_TOS_BUCKET_NAME=your-bucket-name
|
||||
VOLCENGINE_TOS_ENDPOINT=your-server-url
|
||||
VOLCENGINE_TOS_REGION=your-region
|
||||
BAIDU_OBS_BUCKET_NAME=your-bucket-name
|
||||
BAIDU_OBS_ENDPOINT=your-server-url
|
||||
SUPABASE_BUCKET_NAME=your-bucket-name
|
||||
TENCENT_VECTOR_DB_TIMEOUT=30
|
||||
TENCENT_VECTOR_DB_USERNAME=dify
|
||||
TENCENT_VECTOR_DB_SHARD=1
|
||||
TENCENT_VECTOR_DB_REPLICAS=2
|
||||
TENCENT_VECTOR_DB_ENABLE_HYBRID_SEARCH=false
|
||||
BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287
|
||||
BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS=30000
|
||||
BAIDU_VECTOR_DB_ACCOUNT=root
|
||||
BAIDU_VECTOR_DB_API_KEY=dify
|
||||
BAIDU_VECTOR_DB_SHARD=1
|
||||
BAIDU_VECTOR_DB_REPLICAS=3
|
||||
BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER=DEFAULT_ANALYZER
|
||||
BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE=COARSE_MODE
|
||||
BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT=500
|
||||
BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO=0.05
|
||||
BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS=300
|
||||
HUAWEI_CLOUD_HOSTS=https://127.0.0.1:9200
|
||||
HUAWEI_CLOUD_USER=admin
|
||||
WORKFLOW_NODE_EXECUTION_STORAGE=rdbms
|
||||
CORE_WORKFLOW_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository
|
||||
CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository
|
||||
API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository
|
||||
API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository
|
||||
ALIYUN_SLS_ENDPOINT=
|
||||
ALIYUN_SLS_REGION=
|
||||
ALIYUN_SLS_PROJECT_NAME=
|
||||
ALIYUN_SLS_LOGSTORE_TTL=365
|
||||
LOGSTORE_DUAL_WRITE_ENABLED=false
|
||||
LOGSTORE_DUAL_READ_ENABLED=true
|
||||
LOGSTORE_ENABLE_PUT_GRAPH_FIELD=true
|
||||
HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760
|
||||
HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576
|
||||
HTTP_REQUEST_NODE_SSL_VERIFY=True
|
||||
HTTP_REQUEST_MAX_CONNECT_TIMEOUT=10
|
||||
HTTP_REQUEST_MAX_READ_TIMEOUT=600
|
||||
HTTP_REQUEST_MAX_WRITE_TIMEOUT=600
|
||||
PLUGIN_INSTALLED_PATH=plugin
|
||||
PLUGIN_PACKAGE_CACHE_PATH=plugin_packages
|
||||
PLUGIN_MEDIA_CACHE_PATH=assets
|
||||
PLUGIN_S3_USE_AWS=false
|
||||
PLUGIN_S3_USE_AWS_MANAGED_IAM=false
|
||||
PLUGIN_S3_ENDPOINT=
|
||||
PLUGIN_S3_USE_PATH_STYLE=false
|
||||
PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME=
|
||||
PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING=
|
||||
PLUGIN_TENCENT_COS_REGION=
|
||||
PLUGIN_ALIYUN_OSS_REGION=
|
||||
PLUGIN_ALIYUN_OSS_ENDPOINT=
|
||||
PLUGIN_ALIYUN_OSS_AUTH_VERSION=v4
|
||||
PLUGIN_ALIYUN_OSS_PATH=
|
||||
PLUGIN_VOLCENGINE_TOS_ENDPOINT=
|
||||
PLUGIN_VOLCENGINE_TOS_REGION=
|
||||
ENABLE_OTEL=false
|
||||
OTLP_TRACE_ENDPOINT=
|
||||
OTLP_METRIC_ENDPOINT=
|
||||
# Prefix used to create collection name in vector database
|
||||
OTLP_BASE_ENDPOINT=http://localhost:4318
|
||||
WEAVIATE_GRPC_ENDPOINT=grpc://weaviate:50051
|
||||
ANALYTICDB_KEY_ID=your-ak
|
||||
ANALYTICDB_KEY_SECRET=your-sk
|
||||
ANALYTICDB_REGION_ID=cn-hangzhou
|
||||
ANALYTICDB_INSTANCE_ID=gp-ab123456
|
||||
ANALYTICDB_ACCOUNT=testaccount
|
||||
ANALYTICDB_PASSWORD=testpassword
|
||||
ANALYTICDB_NAMESPACE=dify
|
||||
ANALYTICDB_NAMESPACE_PASSWORD=difypassword
|
||||
ANALYTICDB_HOST=gp-test.aliyuncs.com
|
||||
ANALYTICDB_PORT=5432
|
||||
ANALYTICDB_MIN_CONNECTION=1
|
||||
ANALYTICDB_MAX_CONNECTION=5
|
||||
TIDB_VECTOR_HOST=tidb
|
||||
TIDB_VECTOR_PORT=4000
|
||||
TIDB_VECTOR_USER=
|
||||
TIDB_VECTOR_PASSWORD=
|
||||
TIDB_ON_QDRANT_CLIENT_TIMEOUT=20
|
||||
TIDB_ON_QDRANT_GRPC_ENABLED=false
|
||||
TIDB_ON_QDRANT_GRPC_PORT=6334
|
||||
TIDB_PUBLIC_KEY=dify
|
||||
TIDB_PRIVATE_KEY=dify
|
||||
RELYT_HOST=db
|
||||
RELYT_PORT=5432
|
||||
RELYT_USER=postgres
|
||||
VIKINGDB_ACCESS_KEY=your-ak
|
||||
VIKINGDB_SECRET_KEY=your-sk
|
||||
VIKINGDB_REGION=cn-shanghai
|
||||
VIKINGDB_HOST=api-vikingdb.xxx.volces.com
|
||||
VIKINGDB_SCHEME=http
|
||||
VIKINGDB_CONNECTION_TIMEOUT=30
|
||||
VIKINGDB_SOCKET_TIMEOUT=30
|
||||
TABLESTORE_ENDPOINT=https://instance-name.cn-hangzhou.ots.aliyuncs.com
|
||||
TABLESTORE_INSTANCE_NAME=instance-name
|
||||
CLICKZETTA_USERNAME=
|
||||
CLICKZETTA_VECTOR_DISTANCE_FUNCTION=cosine_distance
|
||||
COMPOSE_PROFILES=${VECTOR_STORE:-weaviate},${DB_TYPE:-postgresql}
|
||||
EXPOSE_NGINX_PORT=80
|
||||
EXPOSE_NGINX_SSL_PORT=443
|
||||
POSITION_TOOL_PINS=
|
||||
POSITION_TOOL_INCLUDES=
|
||||
POSITION_TOOL_EXCLUDES=
|
||||
POSITION_PROVIDER_PINS=
|
||||
POSITION_PROVIDER_INCLUDES=
|
||||
POSITION_PROVIDER_EXCLUDES=
|
||||
CREATE_TIDB_SERVICE_JOB_ENABLED=false
|
||||
MAX_SUBMIT_COUNT=100
|
||||
|
||||
# Vector Store Configuration
|
||||
STORAGE_TYPE=opendal
|
||||
VECTOR_STORE=weaviate
|
||||
VECTOR_INDEX_NAME_PREFIX=Vector_index
|
||||
WEAVIATE_ENDPOINT=http://weaviate:8080
|
||||
WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
|
||||
WEAVIATE_TOKENIZATION=word
|
||||
OCEANBASE_VECTOR_HOST=oceanbase
|
||||
OCEANBASE_VECTOR_PORT=2881
|
||||
OCEANBASE_VECTOR_USER=root@test
|
||||
OCEANBASE_VECTOR_PASSWORD=difyai123456
|
||||
OCEANBASE_VECTOR_DATABASE=test
|
||||
OCEANBASE_ENABLE_HYBRID_SEARCH=false
|
||||
OCEANBASE_FULLTEXT_PARSER=ik
|
||||
SEEKDB_MEMORY_LIMIT=2G
|
||||
QDRANT_URL=http://qdrant:6333
|
||||
QDRANT_API_KEY=difyai123456
|
||||
QDRANT_CLIENT_TIMEOUT=20
|
||||
QDRANT_GRPC_ENABLED=false
|
||||
QDRANT_GRPC_PORT=6334
|
||||
QDRANT_REPLICATION_FACTOR=1
|
||||
MILVUS_URI=http://host.docker.internal:19530
|
||||
MILVUS_TOKEN=
|
||||
MILVUS_USER=
|
||||
MILVUS_PASSWORD=
|
||||
MILVUS_ANALYZER_PARAMS=
|
||||
PGVECTOR_HOST=pgvector
|
||||
PGVECTOR_PORT=5432
|
||||
PGVECTOR_USER=postgres
|
||||
PGVECTOR_PASSWORD=difyai123456
|
||||
PGVECTOR_DATABASE=dify
|
||||
PGVECTOR_MIN_CONNECTION=1
|
||||
PGVECTOR_MAX_CONNECTION=5
|
||||
PGVECTOR_PG_BIGM=false
|
||||
PGVECTOR_PG_BIGM_VERSION=1.2-20240606
|
||||
|
||||
# Hologres Configuration
|
||||
HOLOGRES_HOST=
|
||||
HOLOGRES_PORT=80
|
||||
HOLOGRES_DATABASE=
|
||||
HOLOGRES_ACCESS_KEY_ID=
|
||||
HOLOGRES_ACCESS_KEY_SECRET=
|
||||
HOLOGRES_SCHEMA=public
|
||||
HOLOGRES_TOKENIZER=jieba
|
||||
HOLOGRES_DISTANCE_METHOD=Cosine
|
||||
HOLOGRES_BASE_QUANTIZATION_TYPE=rabitq
|
||||
HOLOGRES_MAX_DEGREE=64
|
||||
HOLOGRES_EF_CONSTRUCTION=400
|
||||
|
||||
# Milvus API Configuration
|
||||
MILVUS_DATABASE=
|
||||
MILVUS_ENABLE_HYBRID_SEARCH=False
|
||||
|
||||
# Human Input Task Configuration
|
||||
ENABLE_HUMAN_INPUT_TIMEOUT_TASK=true
|
||||
HUMAN_INPUT_TIMEOUT_TASK_INTERVAL=1
|
||||
30
docker/envs/core-services/web.env.example
Normal file
30
docker/envs/core-services/web.env.example
Normal file
@ -0,0 +1,30 @@
|
||||
# ------------------------------
|
||||
# Web Configuration
|
||||
# ------------------------------
|
||||
|
||||
CONSOLE_API_URL=
|
||||
APP_API_URL=
|
||||
SENTRY_DSN=
|
||||
NEXT_PUBLIC_SOCKET_URL=ws://localhost
|
||||
EXPERIMENTAL_ENABLE_VINEXT=false
|
||||
LOOP_NODE_MAX_COUNT=100
|
||||
MAX_TOOLS_NUM=10
|
||||
MAX_PARALLEL_LIMIT=10
|
||||
MAX_ITERATIONS_NUM=99
|
||||
TEXT_GENERATION_TIMEOUT_MS=60000
|
||||
ALLOW_INLINE_STYLES=false
|
||||
ALLOW_UNSAFE_DATA_SCHEME=false
|
||||
MAX_TREE_DEPTH=50
|
||||
MARKETPLACE_ENABLED=true
|
||||
MARKETPLACE_API_URL=https://marketplace.dify.ai
|
||||
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000
|
||||
ALLOW_EMBED=false
|
||||
AMPLITUDE_API_KEY=
|
||||
ENABLE_WEBSITE_JINAREADER=true
|
||||
ENABLE_WEBSITE_FIRECRAWL=true
|
||||
ENABLE_WEBSITE_WATERCRAWL=true
|
||||
NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN=
|
||||
NEXT_PUBLIC_BATCH_CONCURRENCY=5
|
||||
CSP_WHITELIST=
|
||||
TOP_K_MAX_VALUE=10
|
||||
8
docker/envs/core-services/worker-beat.env.example
Normal file
8
docker/envs/core-services/worker-beat.env.example
Normal file
@ -0,0 +1,8 @@
|
||||
# ------------------------------
|
||||
# Worker Beat Configuration
|
||||
# ------------------------------
|
||||
|
||||
MODE=beat
|
||||
COMPOSE_WORKER_HEALTHCHECK_DISABLED=true
|
||||
COMPOSE_WORKER_HEALTHCHECK_INTERVAL=30s
|
||||
COMPOSE_WORKER_HEALTHCHECK_TIMEOUT=30s
|
||||
13
docker/envs/core-services/worker.env.example
Normal file
13
docker/envs/core-services/worker.env.example
Normal file
@ -0,0 +1,13 @@
|
||||
# ------------------------------
|
||||
# Worker Configuration
|
||||
# ------------------------------
|
||||
|
||||
MODE=worker
|
||||
SENTRY_DSN=
|
||||
SENTRY_TRACES_SAMPLE_RATE=1.0
|
||||
SENTRY_PROFILES_SAMPLE_RATE=1.0
|
||||
PLUGIN_MAX_PACKAGE_SIZE=52428800
|
||||
INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
|
||||
COMPOSE_WORKER_HEALTHCHECK_DISABLED=true
|
||||
COMPOSE_WORKER_HEALTHCHECK_INTERVAL=30s
|
||||
COMPOSE_WORKER_HEALTHCHECK_TIMEOUT=30s
|
||||
9
docker/envs/databases/db-mysql.env.example
Normal file
9
docker/envs/databases/db-mysql.env.example
Normal file
@ -0,0 +1,9 @@
|
||||
# ------------------------------
|
||||
# Db Mysql Configuration
|
||||
# ------------------------------
|
||||
|
||||
MYSQL_INNODB_LOG_FILE_SIZE=128M
|
||||
MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT=2
|
||||
MYSQL_MAX_CONNECTIONS=1000
|
||||
MYSQL_INNODB_BUFFER_POOL_SIZE=512M
|
||||
MYSQL_HOST_VOLUME=./volumes/mysql/data
|
||||
26
docker/envs/databases/db-postgres.env.example
Normal file
26
docker/envs/databases/db-postgres.env.example
Normal file
@ -0,0 +1,26 @@
|
||||
# ------------------------------
|
||||
# Db Postgres Configuration
|
||||
# ------------------------------
|
||||
|
||||
PGDATA=/var/lib/postgresql/data/pgdata
|
||||
DB_TYPE=postgresql
|
||||
DB_USERNAME=postgres
|
||||
DB_PASSWORD=difyai123456
|
||||
DB_HOST=db_postgres
|
||||
DB_PORT=5432
|
||||
DB_DATABASE=dify
|
||||
SQLALCHEMY_POOL_SIZE=30
|
||||
SQLALCHEMY_MAX_OVERFLOW=10
|
||||
SQLALCHEMY_POOL_RECYCLE=3600
|
||||
SQLALCHEMY_ECHO=false
|
||||
SQLALCHEMY_POOL_PRE_PING=false
|
||||
SQLALCHEMY_POOL_USE_LIFO=false
|
||||
SQLALCHEMY_POOL_TIMEOUT=30
|
||||
SQLALCHEMY_POOL_RESET_ON_RETURN=rollback
|
||||
POSTGRES_MAX_CONNECTIONS=100
|
||||
POSTGRES_SHARED_BUFFERS=128MB
|
||||
POSTGRES_WORK_MEM=4MB
|
||||
POSTGRES_MAINTENANCE_WORK_MEM=64MB
|
||||
POSTGRES_EFFECTIVE_CACHE_SIZE=4096MB
|
||||
POSTGRES_STATEMENT_TIMEOUT=0
|
||||
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=0
|
||||
35
docker/envs/databases/redis.env.example
Normal file
35
docker/envs/databases/redis.env.example
Normal file
@ -0,0 +1,35 @@
|
||||
# ------------------------------
|
||||
# Redis Configuration
|
||||
# ------------------------------
|
||||
|
||||
REDIS_HOST=redis
|
||||
REDIS_PORT=6379
|
||||
REDIS_USERNAME=
|
||||
REDIS_PASSWORD=difyai123456
|
||||
REDIS_USE_SSL=false
|
||||
REDIS_SSL_CERT_REQS=CERT_NONE
|
||||
REDIS_SSL_CA_CERTS=
|
||||
REDIS_SSL_CERTFILE=
|
||||
REDIS_SSL_KEYFILE=
|
||||
REDIS_DB=0
|
||||
REDIS_KEY_PREFIX=
|
||||
REDIS_MAX_CONNECTIONS=
|
||||
REDIS_USE_SENTINEL=false
|
||||
REDIS_SENTINELS=
|
||||
REDIS_SENTINEL_SERVICE_NAME=
|
||||
REDIS_SENTINEL_USERNAME=
|
||||
REDIS_SENTINEL_PASSWORD=
|
||||
REDIS_SENTINEL_SOCKET_TIMEOUT=0.1
|
||||
REDIS_USE_CLUSTERS=false
|
||||
REDIS_CLUSTERS=
|
||||
REDIS_CLUSTERS_PASSWORD=
|
||||
REDIS_RETRY_RETRIES=3
|
||||
REDIS_RETRY_BACKOFF_BASE=1.0
|
||||
REDIS_RETRY_BACKOFF_CAP=10.0
|
||||
REDIS_SOCKET_TIMEOUT=5.0
|
||||
REDIS_SOCKET_CONNECT_TIMEOUT=5.0
|
||||
REDIS_HEALTH_CHECK_INTERVAL=30
|
||||
EVENT_BUS_REDIS_URL=
|
||||
EVENT_BUS_REDIS_CHANNEL_TYPE=pubsub
|
||||
EVENT_BUS_REDIS_USE_CLUSTERS=false
|
||||
BROKER_USE_SSL=false
|
||||
7
docker/envs/infrastructure/certbot.env.example
Normal file
7
docker/envs/infrastructure/certbot.env.example
Normal file
@ -0,0 +1,7 @@
|
||||
# ------------------------------
|
||||
# Certbot Configuration
|
||||
# ------------------------------
|
||||
|
||||
CERTBOT_EMAIL=your_email@example.com
|
||||
CERTBOT_DOMAIN=your_domain.com
|
||||
CERTBOT_OPTIONS=
|
||||
4
docker/envs/infrastructure/etcd.env.example
Normal file
4
docker/envs/infrastructure/etcd.env.example
Normal file
@ -0,0 +1,4 @@
|
||||
# ------------------------------
|
||||
# Etcd Configuration
|
||||
# ------------------------------
|
||||
|
||||
4
docker/envs/infrastructure/milvus-standalone.env.example
Normal file
4
docker/envs/infrastructure/milvus-standalone.env.example
Normal file
@ -0,0 +1,4 @@
|
||||
# ------------------------------
|
||||
# Milvus Standalone Configuration
|
||||
# ------------------------------
|
||||
|
||||
4
docker/envs/infrastructure/minio.env.example
Normal file
4
docker/envs/infrastructure/minio.env.example
Normal file
@ -0,0 +1,4 @@
|
||||
# ------------------------------
|
||||
# Minio Configuration
|
||||
# ------------------------------
|
||||
|
||||
17
docker/envs/infrastructure/nginx.env.example
Normal file
17
docker/envs/infrastructure/nginx.env.example
Normal file
@ -0,0 +1,17 @@
|
||||
# ------------------------------
|
||||
# Nginx Configuration
|
||||
# ------------------------------
|
||||
|
||||
NGINX_SERVER_NAME=_
|
||||
NGINX_HTTPS_ENABLED=false
|
||||
NGINX_PORT=80
|
||||
NGINX_SSL_PORT=443
|
||||
NGINX_SSL_CERT_FILENAME=dify.crt
|
||||
NGINX_SSL_CERT_KEY_FILENAME=dify.key
|
||||
NGINX_SSL_PROTOCOLS=TLSv1.2 TLSv1.3
|
||||
NGINX_WORKER_PROCESSES=auto
|
||||
NGINX_CLIENT_MAX_BODY_SIZE=100M
|
||||
NGINX_KEEPALIVE_TIMEOUT=65
|
||||
NGINX_PROXY_READ_TIMEOUT=3600s
|
||||
NGINX_PROXY_SEND_TIMEOUT=3600s
|
||||
NGINX_ENABLE_CERTBOT_CHALLENGE=false
|
||||
17
docker/envs/infrastructure/ssrf-proxy.env.example
Normal file
17
docker/envs/infrastructure/ssrf-proxy.env.example
Normal file
@ -0,0 +1,17 @@
|
||||
# ------------------------------
|
||||
# Ssrf Proxy Configuration
|
||||
# ------------------------------
|
||||
|
||||
SSRF_PROXY_HTTP_URL=http://ssrf_proxy:3128
|
||||
SSRF_PROXY_HTTPS_URL=http://ssrf_proxy:3128
|
||||
SSRF_HTTP_PORT=3128
|
||||
SSRF_COREDUMP_DIR=/var/spool/squid
|
||||
SSRF_REVERSE_PROXY_PORT=8194
|
||||
SSRF_SANDBOX_HOST=sandbox
|
||||
SSRF_DEFAULT_TIME_OUT=5
|
||||
SSRF_DEFAULT_CONNECT_TIME_OUT=5
|
||||
SSRF_DEFAULT_READ_TIME_OUT=5
|
||||
SSRF_DEFAULT_WRITE_TIME_OUT=5
|
||||
SSRF_POOL_MAX_CONNECTIONS=100
|
||||
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||
SSRF_POOL_KEEPALIVE_EXPIRY=5.0
|
||||
40
docker/envs/security.env.example
Normal file
40
docker/envs/security.env.example
Normal file
@ -0,0 +1,40 @@
|
||||
# ------------------------------
|
||||
# Security Configuration
|
||||
# ------------------------------
|
||||
|
||||
TIDB_ON_QDRANT_API_KEY=dify
|
||||
TENCENT_VECTOR_DB_API_KEY=dify
|
||||
ALIBABACLOUD_MYSQL_PASSWORD=difyai123456
|
||||
RELYT_PASSWORD=difyai123456
|
||||
LINDORM_PASSWORD=admin
|
||||
HUAWEI_CLOUD_PASSWORD=admin
|
||||
UPSTASH_VECTOR_TOKEN=dify
|
||||
TABLESTORE_ACCESS_KEY_ID=xxx
|
||||
TABLESTORE_ACCESS_KEY_SECRET=xxx
|
||||
UNSTRUCTURED_API_KEY=
|
||||
PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
|
||||
NOTION_CLIENT_SECRET=
|
||||
NOTION_INTERNAL_SECRET=
|
||||
RESEND_API_KEY=your-resend-api-key
|
||||
SMTP_PASSWORD=
|
||||
SENDGRID_API_KEY=
|
||||
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
|
||||
EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES=5
|
||||
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5
|
||||
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5
|
||||
CODE_EXECUTION_API_KEY=dify-sandbox
|
||||
ALIYUN_SLS_ACCESS_KEY_ID=
|
||||
ALIYUN_SLS_ACCESS_KEY_SECRET=
|
||||
OTLP_API_KEY=
|
||||
BAIDU_VECTOR_DB_API_KEY=dify
|
||||
ANALYTICDB_KEY_ID=your-ak
|
||||
ANALYTICDB_KEY_SECRET=your-sk
|
||||
ANALYTICDB_PASSWORD=testpassword
|
||||
ANALYTICDB_NAMESPACE_PASSWORD=difypassword
|
||||
TIDB_VECTOR_PASSWORD=
|
||||
TIDB_PUBLIC_KEY=dify
|
||||
TIDB_PRIVATE_KEY=dify
|
||||
VIKINGDB_ACCESS_KEY=your-ak
|
||||
VIKINGDB_SECRET_KEY=your-sk
|
||||
SECRET_KEY=sk-9f73s3ljTXVcMT3Blb3ljTqtsKiGHXVcMT3BlbkFJLK7U
|
||||
INIT_PASSWORD=
|
||||
13
docker/envs/vectorstores/chroma.env.example
Normal file
13
docker/envs/vectorstores/chroma.env.example
Normal file
@ -0,0 +1,13 @@
|
||||
# ------------------------------
|
||||
# Chroma Configuration
|
||||
# ------------------------------
|
||||
|
||||
CHROMA_DATABASE=default_database
|
||||
CHROMA_AUTH_PROVIDER=chromadb.auth.token_authn.TokenAuthClientProvider
|
||||
CHROMA_AUTH_CREDENTIALS=
|
||||
CHROMA_HOST=127.0.0.1
|
||||
CHROMA_PORT=8000
|
||||
CHROMA_TENANT=default_tenant
|
||||
CHROMA_SERVER_AUTHN_CREDENTIALS=difyai123456
|
||||
CHROMA_SERVER_AUTHN_PROVIDER=chromadb.auth.token_authn.TokenAuthenticationServerProvider
|
||||
CHROMA_IS_PERSISTENT=TRUE
|
||||
9
docker/envs/vectorstores/couchbase.env.example
Normal file
9
docker/envs/vectorstores/couchbase.env.example
Normal file
@ -0,0 +1,9 @@
|
||||
# ------------------------------
|
||||
# Couchbase Configuration
|
||||
# ------------------------------
|
||||
|
||||
COUCHBASE_PASSWORD=password
|
||||
COUCHBASE_BUCKET_NAME=Embeddings
|
||||
COUCHBASE_SCOPE_NAME=_default
|
||||
COUCHBASE_CONNECTION_STRING=couchbase://couchbase-server
|
||||
COUCHBASE_USER=Administrator
|
||||
17
docker/envs/vectorstores/elasticsearch.env.example
Normal file
17
docker/envs/vectorstores/elasticsearch.env.example
Normal file
@ -0,0 +1,17 @@
|
||||
# ------------------------------
|
||||
# Elasticsearch Configuration
|
||||
# ------------------------------
|
||||
|
||||
ELASTICSEARCH_CLOUD_URL=YOUR-ELASTICSEARCH_CLOUD_URL
|
||||
ELASTICSEARCH_PASSWORD=elastic
|
||||
KIBANA_PORT=5601
|
||||
ELASTICSEARCH_USE_CLOUD=false
|
||||
ELASTICSEARCH_API_KEY=YOUR-ELASTICSEARCH_API_KEY
|
||||
ELASTICSEARCH_VERIFY_CERTS=False
|
||||
ELASTICSEARCH_CA_CERTS=
|
||||
ELASTICSEARCH_REQUEST_TIMEOUT=100000
|
||||
ELASTICSEARCH_RETRY_ON_TIMEOUT=True
|
||||
ELASTICSEARCH_MAX_RETRIES=10
|
||||
ELASTICSEARCH_HOST=0.0.0.0
|
||||
ELASTICSEARCH_PORT=9200
|
||||
ELASTICSEARCH_USERNAME=elastic
|
||||
17
docker/envs/vectorstores/iris.env.example
Normal file
17
docker/envs/vectorstores/iris.env.example
Normal file
@ -0,0 +1,17 @@
|
||||
# ------------------------------
|
||||
# Iris Configuration
|
||||
# ------------------------------
|
||||
|
||||
IRIS_CONNECTION_URL=
|
||||
IRIS_MIN_CONNECTION=1
|
||||
IRIS_MAX_CONNECTION=3
|
||||
IRIS_TEXT_INDEX=true
|
||||
IRIS_TEXT_INDEX_LANGUAGE=en
|
||||
IRIS_TIMEZONE=UTC
|
||||
IRIS_PASSWORD=Dify@1234
|
||||
IRIS_DATABASE=USER
|
||||
IRIS_SCHEMA=dify
|
||||
IRIS_HOST=iris
|
||||
IRIS_SUPER_SERVER_PORT=1972
|
||||
IRIS_WEB_SERVER_PORT=52773
|
||||
IRIS_USER=_SYSTEM
|
||||
9
docker/envs/vectorstores/matrixone.env.example
Normal file
9
docker/envs/vectorstores/matrixone.env.example
Normal file
@ -0,0 +1,9 @@
|
||||
# ------------------------------
|
||||
# Matrixone Configuration
|
||||
# ------------------------------
|
||||
|
||||
MATRIXONE_PASSWORD=111
|
||||
MATRIXONE_HOST=matrixone
|
||||
MATRIXONE_PORT=6001
|
||||
MATRIXONE_USER=dump
|
||||
MATRIXONE_DATABASE=dify
|
||||
13
docker/envs/vectorstores/milvus.env.example
Normal file
13
docker/envs/vectorstores/milvus.env.example
Normal file
@ -0,0 +1,13 @@
|
||||
# ------------------------------
|
||||
# Milvus Configuration
|
||||
# ------------------------------
|
||||
|
||||
MINIO_ACCESS_KEY=minioadmin
|
||||
MINIO_SECRET_KEY=minioadmin
|
||||
ETCD_ENDPOINTS=etcd:2379
|
||||
MINIO_ADDRESS=minio:9000
|
||||
ETCD_AUTO_COMPACTION_MODE=revision
|
||||
ETCD_AUTO_COMPACTION_RETENTION=1000
|
||||
ETCD_QUOTA_BACKEND_BYTES=4294967296
|
||||
ETCD_SNAPSHOT_COUNT=50000
|
||||
MILVUS_AUTHORIZATION_ENABLED=true
|
||||
10
docker/envs/vectorstores/myscale.env.example
Normal file
10
docker/envs/vectorstores/myscale.env.example
Normal file
@ -0,0 +1,10 @@
|
||||
# ------------------------------
|
||||
# Myscale Configuration
|
||||
# ------------------------------
|
||||
|
||||
MYSCALE_PASSWORD=
|
||||
MYSCALE_DATABASE=dify
|
||||
MYSCALE_FTS_PARAMS=
|
||||
MYSCALE_HOST=myscale
|
||||
MYSCALE_PORT=8123
|
||||
MYSCALE_USER=default
|
||||
6
docker/envs/vectorstores/oceanbase.env.example
Normal file
6
docker/envs/vectorstores/oceanbase.env.example
Normal file
@ -0,0 +1,6 @@
|
||||
# ------------------------------
|
||||
# Oceanbase Configuration
|
||||
# ------------------------------
|
||||
|
||||
OCEANBASE_CLUSTER_NAME=difyai
|
||||
OCEANBASE_MEMORY_LIMIT=6G
|
||||
12
docker/envs/vectorstores/opengauss.env.example
Normal file
12
docker/envs/vectorstores/opengauss.env.example
Normal file
@ -0,0 +1,12 @@
|
||||
# ------------------------------
|
||||
# Opengauss Configuration
|
||||
# ------------------------------
|
||||
|
||||
OPENGAUSS_PASSWORD=Dify@123
|
||||
OPENGAUSS_DATABASE=dify
|
||||
OPENGAUSS_MIN_CONNECTION=1
|
||||
OPENGAUSS_MAX_CONNECTION=5
|
||||
OPENGAUSS_ENABLE_PQ=false
|
||||
OPENGAUSS_HOST=opengauss
|
||||
OPENGAUSS_PORT=6600
|
||||
OPENGAUSS_USER=postgres
|
||||
22
docker/envs/vectorstores/opensearch.env.example
Normal file
22
docker/envs/vectorstores/opensearch.env.example
Normal file
@ -0,0 +1,22 @@
|
||||
# ------------------------------
|
||||
# Opensearch Configuration
|
||||
# ------------------------------
|
||||
|
||||
OPENSEARCH_PASSWORD=admin
|
||||
OPENSEARCH_AWS_REGION=ap-southeast-1
|
||||
OPENSEARCH_AWS_SERVICE=aoss
|
||||
OPENSEARCH_INITIAL_ADMIN_PASSWORD=Qazwsxedc!@#123
|
||||
OPENSEARCH_MEMLOCK_SOFT=-1
|
||||
OPENSEARCH_MEMLOCK_HARD=-1
|
||||
OPENSEARCH_NOFILE_SOFT=65536
|
||||
OPENSEARCH_NOFILE_HARD=65536
|
||||
OPENSEARCH_HOST=opensearch
|
||||
OPENSEARCH_PORT=9200
|
||||
OPENSEARCH_SECURE=true
|
||||
OPENSEARCH_VERIFY_CERTS=true
|
||||
OPENSEARCH_AUTH_METHOD=basic
|
||||
OPENSEARCH_USER=admin
|
||||
OPENSEARCH_DISCOVERY_TYPE=single-node
|
||||
OPENSEARCH_BOOTSTRAP_MEMORY_LOCK=true
|
||||
OPENSEARCH_JAVA_OPTS_MIN=512m
|
||||
OPENSEARCH_JAVA_OPTS_MAX=1024m
|
||||
13
docker/envs/vectorstores/oracle.env.example
Normal file
13
docker/envs/vectorstores/oracle.env.example
Normal file
@ -0,0 +1,13 @@
|
||||
# ------------------------------
|
||||
# Oracle Configuration
|
||||
# ------------------------------
|
||||
|
||||
ORACLE_PASSWORD=dify
|
||||
ORACLE_DSN=oracle:1521/FREEPDB1
|
||||
ORACLE_CONFIG_DIR=/app/api/storage/wallet
|
||||
ORACLE_WALLET_LOCATION=/app/api/storage/wallet
|
||||
ORACLE_WALLET_PASSWORD=dify
|
||||
ORACLE_IS_AUTONOMOUS=false
|
||||
ORACLE_USER=dify
|
||||
ORACLE_PWD=Dify123456
|
||||
ORACLE_CHARACTERSET=AL32UTF8
|
||||
9
docker/envs/vectorstores/pgvecto-rs.env.example
Normal file
9
docker/envs/vectorstores/pgvecto-rs.env.example
Normal file
@ -0,0 +1,9 @@
|
||||
# ------------------------------
|
||||
# Pgvecto Rs Configuration
|
||||
# ------------------------------
|
||||
|
||||
PGVECTO_RS_HOST=pgvecto-rs
|
||||
PGVECTO_RS_PORT=5432
|
||||
PGVECTO_RS_USER=postgres
|
||||
PGVECTO_RS_PASSWORD=difyai123456
|
||||
PGVECTO_RS_DATABASE=dify
|
||||
8
docker/envs/vectorstores/pgvector.env.example
Normal file
8
docker/envs/vectorstores/pgvector.env.example
Normal file
@ -0,0 +1,8 @@
|
||||
# ------------------------------
|
||||
# Pgvector Configuration
|
||||
# ------------------------------
|
||||
|
||||
PGVECTOR_PGUSER=postgres
|
||||
PGVECTOR_POSTGRES_PASSWORD=difyai123456
|
||||
PGVECTOR_POSTGRES_DB=dify
|
||||
PGVECTOR_PGDATA=/var/lib/postgresql/data/pgdata
|
||||
4
docker/envs/vectorstores/qdrant.env.example
Normal file
4
docker/envs/vectorstores/qdrant.env.example
Normal file
@ -0,0 +1,4 @@
|
||||
# ------------------------------
|
||||
# Qdrant Configuration
|
||||
# ------------------------------
|
||||
|
||||
4
docker/envs/vectorstores/seekdb.env.example
Normal file
4
docker/envs/vectorstores/seekdb.env.example
Normal file
@ -0,0 +1,4 @@
|
||||
# ------------------------------
|
||||
# Seekdb Configuration
|
||||
# ------------------------------
|
||||
|
||||
11
docker/envs/vectorstores/vastbase.env.example
Normal file
11
docker/envs/vectorstores/vastbase.env.example
Normal file
@ -0,0 +1,11 @@
|
||||
# ------------------------------
|
||||
# Vastbase Configuration
|
||||
# ------------------------------
|
||||
|
||||
VASTBASE_PASSWORD=Difyai123456
|
||||
VASTBASE_DATABASE=dify
|
||||
VASTBASE_MIN_CONNECTION=1
|
||||
VASTBASE_MAX_CONNECTION=5
|
||||
VASTBASE_HOST=vastbase
|
||||
VASTBASE_PORT=5432
|
||||
VASTBASE_USER=dify
|
||||
18
docker/envs/vectorstores/weaviate.env.example
Normal file
18
docker/envs/vectorstores/weaviate.env.example
Normal file
@ -0,0 +1,18 @@
|
||||
# ------------------------------
|
||||
# Weaviate Configuration
|
||||
# ------------------------------
|
||||
|
||||
WEAVIATE_PERSISTENCE_DATA_PATH=/var/lib/weaviate
|
||||
WEAVIATE_QUERY_DEFAULTS_LIMIT=25
|
||||
WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true
|
||||
WEAVIATE_DEFAULT_VECTORIZER_MODULE=none
|
||||
WEAVIATE_CLUSTER_HOSTNAME=node1
|
||||
WEAVIATE_AUTHENTICATION_APIKEY_ENABLED=true
|
||||
WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
|
||||
WEAVIATE_AUTHENTICATION_APIKEY_USERS=hello@dify.ai
|
||||
WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED=true
|
||||
WEAVIATE_AUTHORIZATION_ADMINLIST_USERS=hello@dify.ai
|
||||
WEAVIATE_DISABLE_TELEMETRY=false
|
||||
WEAVIATE_ENABLE_TOKENIZER_GSE=false
|
||||
WEAVIATE_ENABLE_TOKENIZER_KAGOME_JA=false
|
||||
WEAVIATE_ENABLE_TOKENIZER_KAGOME_KR=false
|
||||
@ -64,25 +64,61 @@ def generate_shared_env_block(env_vars, anchor_name="shared-api-worker-env"):
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def insert_shared_env(template_path, output_path, shared_env_block, header_comments):
|
||||
def create_env_files_from_example(env_example_path):
|
||||
"""
|
||||
Inserts the shared environment variables block and header comments into the template file,
|
||||
removing any existing x-shared-env anchors, and generates the final docker-compose.yaml file.
|
||||
Always writes with LF line endings.
|
||||
Creates actual env files from .env.example by copying the categorized .env.example files.
|
||||
This allows docker-compose to use env_file references.
|
||||
Supports per-module structure with subdirectories.
|
||||
"""
|
||||
base_dir = os.path.dirname(os.path.abspath(env_example_path))
|
||||
root_env_file = os.path.join(base_dir, ".env")
|
||||
if not os.path.exists(root_env_file):
|
||||
with open(env_example_path, "r", encoding="utf-8") as src, open(
|
||||
root_env_file, "w", encoding="utf-8", newline="\n"
|
||||
) as dst:
|
||||
dst.write(src.read())
|
||||
print(f"Created {root_env_file}")
|
||||
else:
|
||||
print(f"{root_env_file} already exists, skipping")
|
||||
|
||||
envs_dir = os.path.join(base_dir, "envs")
|
||||
if not os.path.isdir(envs_dir):
|
||||
print(f"No envs directory found at {envs_dir}, skipping split env files")
|
||||
return []
|
||||
|
||||
created_files = []
|
||||
# Walk through all .env.example files in subdirectories
|
||||
for root, dirs, files in os.walk(envs_dir):
|
||||
for file in files:
|
||||
if file.endswith('.env.example'):
|
||||
example_file = os.path.join(root, file)
|
||||
env_file = example_file.replace('.env.example', '.env')
|
||||
|
||||
if os.path.exists(env_file):
|
||||
print(f"{env_file} already exists, skipping")
|
||||
continue
|
||||
|
||||
# Copy .example to actual file
|
||||
with open(example_file, "r", encoding="utf-8") as src, open(
|
||||
env_file, "w", encoding="utf-8", newline="\n"
|
||||
) as dst:
|
||||
dst.write(src.read())
|
||||
created_files.append(env_file)
|
||||
print(f"Created {env_file}")
|
||||
|
||||
return created_files
|
||||
|
||||
|
||||
def insert_shared_env(template_path, output_path, header_comments):
|
||||
"""
|
||||
Copies the template file to output path with header comments.
|
||||
The template now uses env_file references instead of a huge YAML anchor.
|
||||
"""
|
||||
with open(template_path, "r", encoding="utf-8") as f:
|
||||
template_content = f.read()
|
||||
|
||||
# Remove existing x-shared-env: &shared-api-worker-env lines
|
||||
template_content = re.sub(
|
||||
r"^x-shared-env: &shared-api-worker-env\s*\n?",
|
||||
"",
|
||||
template_content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
# Prepare the final content with header comments and shared env block
|
||||
final_content = f"{header_comments}\n{shared_env_block}\n\n{template_content}"
|
||||
# Prepare the final content with header comments
|
||||
final_content = f"{header_comments}\n{template_content}"
|
||||
|
||||
with open(output_path, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(final_content)
|
||||
@ -90,10 +126,10 @@ def insert_shared_env(template_path, output_path, shared_env_block, header_comme
|
||||
|
||||
|
||||
def main():
|
||||
env_example_path = ".env.example"
|
||||
template_path = "docker-compose-template.yaml"
|
||||
output_path = "docker-compose.yaml"
|
||||
anchor_name = "shared-api-worker-env" # Can be modified as needed
|
||||
base_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
env_example_path = os.path.join(base_dir, ".env.example")
|
||||
template_path = os.path.join(base_dir, "docker-compose-template.yaml")
|
||||
output_path = os.path.join(base_dir, "docker-compose.yaml")
|
||||
|
||||
# Define header comments to be added at the top of docker-compose.yaml
|
||||
header_comments = (
|
||||
@ -110,17 +146,14 @@ def main():
|
||||
print(f"Error: File {path} does not exist.")
|
||||
sys.exit(1)
|
||||
|
||||
# Parse .env.example file
|
||||
env_vars = parse_env_example(env_example_path)
|
||||
# Create env files from categorized .env.example files
|
||||
# These files are used by docker-compose's env_file directive
|
||||
# This ensures .env files exist even in CI/CD environments
|
||||
create_env_files_from_example(env_example_path)
|
||||
|
||||
if not env_vars:
|
||||
print("Warning: No environment variables found in .env.example.")
|
||||
|
||||
# Generate shared environment variables block
|
||||
shared_env_block = generate_shared_env_block(env_vars, anchor_name)
|
||||
|
||||
# Insert shared environment variables block and header comments into the template
|
||||
insert_shared_env(template_path, output_path, shared_env_block, header_comments)
|
||||
# Copy template to output with header comments
|
||||
# The template now uses env_file references instead of a huge YAML anchor
|
||||
insert_shared_env(template_path, output_path, header_comments)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@ -36,7 +36,7 @@ export const webDir = path.join(rootDir, 'web')
|
||||
|
||||
export const middlewareComposeFile = path.join(dockerDir, 'docker-compose.middleware.yaml')
|
||||
export const middlewareEnvFile = path.join(dockerDir, 'middleware.env')
|
||||
export const middlewareEnvExampleFile = path.join(dockerDir, 'middleware.env.example')
|
||||
export const middlewareEnvExampleFile = path.join(dockerDir, 'envs', 'middleware.env.example')
|
||||
export const webEnvLocalFile = path.join(webDir, '.env.local')
|
||||
export const webEnvExampleFile = path.join(webDir, '.env.example')
|
||||
export const apiEnvExampleFile = path.join(apiDir, 'tests', 'integration_tests', '.env.example')
|
||||
|
||||
Loading…
Reference in New Issue
Block a user