mirror of
https://github.com/langgenius/dify.git
synced 2026-05-09 04:36:31 +08:00
Merge branch 'main' into upgrade-graphon-to-0-3-0
This commit is contained in:
commit
cd39c31014
2
.github/workflows/api-tests.yml
vendored
2
.github/workflows/api-tests.yml
vendored
@ -99,7 +99,7 @@ jobs:
|
||||
- name: Set up dotenvs
|
||||
run: |
|
||||
cp docker/.env.example docker/.env
|
||||
cp docker/middleware.env.example docker/middleware.env
|
||||
cp docker/envs/middleware.env.example docker/middleware.env
|
||||
|
||||
- name: Expose Service Ports
|
||||
run: sh .github/workflows/expose_service_ports.sh
|
||||
|
||||
4
.github/workflows/db-migration-test.yml
vendored
4
.github/workflows/db-migration-test.yml
vendored
@ -37,7 +37,7 @@ jobs:
|
||||
- name: Prepare middleware env
|
||||
run: |
|
||||
cd docker
|
||||
cp middleware.env.example middleware.env
|
||||
cp envs/middleware.env.example middleware.env
|
||||
|
||||
- name: Set up Middlewares
|
||||
uses: hoverkraft-tech/compose-action@d2bee4f07e8ca410d6b196d00f90c12e7d48c33a # v2.6.0
|
||||
@ -87,7 +87,7 @@ jobs:
|
||||
- name: Prepare middleware env for MySQL
|
||||
run: |
|
||||
cd docker
|
||||
cp middleware.env.example middleware.env
|
||||
cp envs/middleware.env.example middleware.env
|
||||
sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' middleware.env
|
||||
sed -i 's/DB_HOST=db_postgres/DB_HOST=db_mysql/' middleware.env
|
||||
sed -i 's/DB_PORT=5432/DB_PORT=3306/' middleware.env
|
||||
|
||||
8
.github/workflows/main-ci.yml
vendored
8
.github/workflows/main-ci.yml
vendored
@ -57,7 +57,7 @@ jobs:
|
||||
- '.github/workflows/api-tests.yml'
|
||||
- '.github/workflows/expose_service_ports.sh'
|
||||
- 'docker/.env.example'
|
||||
- 'docker/middleware.env.example'
|
||||
- 'docker/envs/middleware.env.example'
|
||||
- 'docker/docker-compose.middleware.yaml'
|
||||
- 'docker/docker-compose-template.yaml'
|
||||
- 'docker/generate_docker_compose'
|
||||
@ -84,7 +84,7 @@ jobs:
|
||||
- 'pnpm-workspace.yaml'
|
||||
- '.nvmrc'
|
||||
- 'docker/docker-compose.middleware.yaml'
|
||||
- 'docker/middleware.env.example'
|
||||
- 'docker/envs/middleware.env.example'
|
||||
- '.github/workflows/web-e2e.yml'
|
||||
- '.github/actions/setup-web/**'
|
||||
vdb:
|
||||
@ -94,7 +94,7 @@ jobs:
|
||||
- '.github/workflows/vdb-tests.yml'
|
||||
- '.github/workflows/expose_service_ports.sh'
|
||||
- 'docker/.env.example'
|
||||
- 'docker/middleware.env.example'
|
||||
- 'docker/envs/middleware.env.example'
|
||||
- 'docker/docker-compose.yaml'
|
||||
- 'docker/docker-compose-template.yaml'
|
||||
- 'docker/generate_docker_compose'
|
||||
@ -116,7 +116,7 @@ jobs:
|
||||
- '.github/workflows/db-migration-test.yml'
|
||||
- '.github/workflows/expose_service_ports.sh'
|
||||
- 'docker/.env.example'
|
||||
- 'docker/middleware.env.example'
|
||||
- 'docker/envs/middleware.env.example'
|
||||
- 'docker/docker-compose.middleware.yaml'
|
||||
- 'docker/docker-compose-template.yaml'
|
||||
- 'docker/generate_docker_compose'
|
||||
|
||||
2
.github/workflows/vdb-tests-full.yml
vendored
2
.github/workflows/vdb-tests-full.yml
vendored
@ -51,7 +51,7 @@ jobs:
|
||||
- name: Set up dotenvs
|
||||
run: |
|
||||
cp docker/.env.example docker/.env
|
||||
cp docker/middleware.env.example docker/middleware.env
|
||||
cp docker/envs/middleware.env.example docker/middleware.env
|
||||
|
||||
- name: Expose Service Ports
|
||||
run: sh .github/workflows/expose_service_ports.sh
|
||||
|
||||
2
.github/workflows/vdb-tests.yml
vendored
2
.github/workflows/vdb-tests.yml
vendored
@ -48,7 +48,7 @@ jobs:
|
||||
- name: Set up dotenvs
|
||||
run: |
|
||||
cp docker/.env.example docker/.env
|
||||
cp docker/middleware.env.example docker/middleware.env
|
||||
cp docker/envs/middleware.env.example docker/middleware.env
|
||||
|
||||
- name: Expose Service Ports
|
||||
run: sh .github/workflows/expose_service_ports.sh
|
||||
|
||||
@ -76,11 +76,10 @@ The easiest way to start the Dify server is through [Docker Compose](docker/dock
|
||||
```bash
|
||||
cd dify
|
||||
cd docker
|
||||
./dify-compose up -d
|
||||
cp .env.example .env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
On Windows PowerShell, run `.\dify-compose.ps1 up -d` from the `docker` directory.
|
||||
|
||||
After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization process.
|
||||
|
||||
#### Seeking help
|
||||
@ -138,7 +137,7 @@ Star Dify on GitHub and be instantly notified of new releases.
|
||||
|
||||
### Custom configurations
|
||||
|
||||
If you need to customize the configuration, add only the values you want to override to `docker/.env`. The default values live in [`docker/.env.default`](docker/.env.default), and the full reference remains in [`docker/.env.example`](docker/.env.example). After making any changes, re-run `./dify-compose up -d` or `.\dify-compose.ps1 up -d` from the `docker` directory. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
|
||||
If you need to customize the configuration, edit `docker/.env`. The essential startup defaults live in [`docker/.env.example`](docker/.env.example), and optional advanced variables are split under `docker/envs/` by theme. After making any changes, re-run `docker compose up -d` from the `docker` directory. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
|
||||
|
||||
### Metrics Monitoring with Grafana
|
||||
|
||||
|
||||
@ -98,6 +98,8 @@ DB_DATABASE=dify
|
||||
|
||||
SQLALCHEMY_POOL_PRE_PING=true
|
||||
SQLALCHEMY_POOL_TIMEOUT=30
|
||||
# Connection pool reset behavior on return
|
||||
SQLALCHEMY_POOL_RESET_ON_RETURN=rollback
|
||||
|
||||
# Storage configuration
|
||||
# use for store upload files, private keys...
|
||||
@ -381,7 +383,7 @@ VIKINGDB_ACCESS_KEY=your-ak
|
||||
VIKINGDB_SECRET_KEY=your-sk
|
||||
VIKINGDB_REGION=cn-shanghai
|
||||
VIKINGDB_HOST=api-vikingdb.xxx.volces.com
|
||||
VIKINGDB_SCHEMA=http
|
||||
VIKINGDB_SCHEME=http
|
||||
VIKINGDB_CONNECTION_TIMEOUT=30
|
||||
VIKINGDB_SOCKET_TIMEOUT=30
|
||||
|
||||
@ -432,8 +434,6 @@ UPLOAD_FILE_EXTENSION_BLACKLIST=
|
||||
|
||||
# Model configuration
|
||||
MULTIMODAL_SEND_FORMAT=base64
|
||||
PROMPT_GENERATION_MAX_TOKENS=512
|
||||
CODE_GENERATION_MAX_TOKENS=1024
|
||||
PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
|
||||
|
||||
# Mail configuration, support: resend, smtp, sendgrid
|
||||
|
||||
@ -114,7 +114,7 @@ class SQLAlchemyEngineOptionsDict(TypedDict):
|
||||
pool_pre_ping: bool
|
||||
connect_args: dict[str, str]
|
||||
pool_use_lifo: bool
|
||||
pool_reset_on_return: None
|
||||
pool_reset_on_return: Literal["commit", "rollback", None]
|
||||
pool_timeout: int
|
||||
|
||||
|
||||
@ -223,6 +223,11 @@ class DatabaseConfig(BaseSettings):
|
||||
default=30,
|
||||
)
|
||||
|
||||
SQLALCHEMY_POOL_RESET_ON_RETURN: Literal["commit", "rollback", None] = Field(
|
||||
description="Connection pool reset behavior on return. Options: 'commit', 'rollback', or None",
|
||||
default="rollback",
|
||||
)
|
||||
|
||||
RETRIEVAL_SERVICE_EXECUTORS: NonNegativeInt = Field(
|
||||
description="Number of processes for the retrieval service, default to CPU cores.",
|
||||
default=os.cpu_count() or 1,
|
||||
@ -252,7 +257,7 @@ class DatabaseConfig(BaseSettings):
|
||||
"pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING,
|
||||
"connect_args": connect_args,
|
||||
"pool_use_lifo": self.SQLALCHEMY_POOL_USE_LIFO,
|
||||
"pool_reset_on_return": None,
|
||||
"pool_reset_on_return": self.SQLALCHEMY_POOL_RESET_ON_RETURN,
|
||||
"pool_timeout": self.SQLALCHEMY_POOL_TIMEOUT,
|
||||
}
|
||||
return result
|
||||
|
||||
@ -19,7 +19,7 @@
|
||||
"name": "Website Generator"
|
||||
},
|
||||
"app_id": "b53545b1-79ea-4da3-b31a-c39391c6f041",
|
||||
"category": "Programming",
|
||||
"categories": ["Programming"],
|
||||
"copyright": null,
|
||||
"description": null,
|
||||
"is_listed": true,
|
||||
@ -35,7 +35,7 @@
|
||||
"name": "Investment Analysis Report Copilot"
|
||||
},
|
||||
"app_id": "a23b57fa-85da-49c0-a571-3aff375976c1",
|
||||
"category": "Agent",
|
||||
"categories": ["Agent"],
|
||||
"copyright": "Dify.AI",
|
||||
"description": "Welcome to your personalized Investment Analysis Copilot service, where we delve into the depths of stock analysis to provide you with comprehensive insights. \n",
|
||||
"is_listed": true,
|
||||
@ -51,7 +51,7 @@
|
||||
"name": "Workflow Planning Assistant "
|
||||
},
|
||||
"app_id": "f3303a7d-a81c-404e-b401-1f8711c998c1",
|
||||
"category": "Workflow",
|
||||
"categories": ["Workflow"],
|
||||
"copyright": null,
|
||||
"description": "An assistant that helps you plan and select the right node for a workflow (V0.6.0). ",
|
||||
"is_listed": true,
|
||||
@ -67,7 +67,7 @@
|
||||
"name": "Automated Email Reply "
|
||||
},
|
||||
"app_id": "e9d92058-7d20-4904-892f-75d90bef7587",
|
||||
"category": "Workflow",
|
||||
"categories": ["Workflow"],
|
||||
"copyright": null,
|
||||
"description": "Reply emails using Gmail API. It will automatically retrieve email in your inbox and create a response in Gmail. \nConfigure your Gmail API in Google Cloud Console. ",
|
||||
"is_listed": true,
|
||||
@ -83,7 +83,7 @@
|
||||
"name": "Book Translation "
|
||||
},
|
||||
"app_id": "98b87f88-bd22-4d86-8b74-86beba5e0ed4",
|
||||
"category": "Workflow",
|
||||
"categories": ["Workflow"],
|
||||
"copyright": null,
|
||||
"description": "A workflow designed to translate a full book up to 15000 tokens per run. Uses Code node to separate text into chunks and Iteration to translate each chunk. ",
|
||||
"is_listed": true,
|
||||
@ -99,7 +99,7 @@
|
||||
"name": "Python bug fixer"
|
||||
},
|
||||
"app_id": "cae337e6-aec5-4c7b-beca-d6f1a808bd5e",
|
||||
"category": "Programming",
|
||||
"categories": ["Programming"],
|
||||
"copyright": null,
|
||||
"description": null,
|
||||
"is_listed": true,
|
||||
@ -115,7 +115,7 @@
|
||||
"name": "Code Interpreter"
|
||||
},
|
||||
"app_id": "d077d587-b072-4f2c-b631-69ed1e7cdc0f",
|
||||
"category": "Programming",
|
||||
"categories": ["Programming"],
|
||||
"copyright": "Copyright 2023 Dify",
|
||||
"description": "Code interpreter, clarifying the syntax and semantics of the code.",
|
||||
"is_listed": true,
|
||||
@ -131,7 +131,7 @@
|
||||
"name": "SVG Logo Design "
|
||||
},
|
||||
"app_id": "73fbb5f1-c15d-4d74-9cc8-46d9db9b2cca",
|
||||
"category": "Agent",
|
||||
"categories": ["Agent"],
|
||||
"copyright": "Dify.AI",
|
||||
"description": "Hello, I am your creative partner in bringing ideas to vivid life! I can assist you in creating stunning designs by leveraging abilities of DALL·E 3. ",
|
||||
"is_listed": true,
|
||||
@ -147,7 +147,7 @@
|
||||
"name": "Long Story Generator (Iteration) "
|
||||
},
|
||||
"app_id": "5efb98d7-176b-419c-b6ef-50767391ab62",
|
||||
"category": "Workflow",
|
||||
"categories": ["Workflow"],
|
||||
"copyright": null,
|
||||
"description": "A workflow demonstrating how to use Iteration node to generate long article that is longer than the context length of LLMs. ",
|
||||
"is_listed": true,
|
||||
@ -163,7 +163,7 @@
|
||||
"name": "Text Summarization Workflow"
|
||||
},
|
||||
"app_id": "f00c4531-6551-45ee-808f-1d7903099515",
|
||||
"category": "Workflow",
|
||||
"categories": ["Workflow"],
|
||||
"copyright": null,
|
||||
"description": "Based on users' choice, retrieve external knowledge to more accurately summarize articles.",
|
||||
"is_listed": true,
|
||||
@ -179,7 +179,7 @@
|
||||
"name": "YouTube Channel Data Analysis"
|
||||
},
|
||||
"app_id": "be591209-2ca8-410f-8f3b-ca0e530dd638",
|
||||
"category": "Agent",
|
||||
"categories": ["Agent"],
|
||||
"copyright": "Dify.AI",
|
||||
"description": "I am a YouTube Channel Data Analysis Copilot, I am here to provide expert data analysis tailored to your needs. ",
|
||||
"is_listed": true,
|
||||
@ -195,7 +195,7 @@
|
||||
"name": "Article Grading Bot"
|
||||
},
|
||||
"app_id": "a747f7b4-c48b-40d6-b313-5e628232c05f",
|
||||
"category": "Writing",
|
||||
"categories": ["Writing"],
|
||||
"copyright": null,
|
||||
"description": "Assess the quality of articles and text based on user defined criteria. ",
|
||||
"is_listed": true,
|
||||
@ -211,7 +211,7 @@
|
||||
"name": "SEO Blog Generator"
|
||||
},
|
||||
"app_id": "18f3bd03-524d-4d7a-8374-b30dbe7c69d5",
|
||||
"category": "Workflow",
|
||||
"categories": ["Workflow"],
|
||||
"copyright": null,
|
||||
"description": "Workflow for retrieving information from the internet, followed by segmented generation of SEO blogs.",
|
||||
"is_listed": true,
|
||||
@ -227,7 +227,7 @@
|
||||
"name": "SQL Creator"
|
||||
},
|
||||
"app_id": "050ef42e-3e0c-40c1-a6b6-a64f2c49d744",
|
||||
"category": "Programming",
|
||||
"categories": ["Programming"],
|
||||
"copyright": "Copyright 2023 Dify",
|
||||
"description": "Write SQL from natural language by pasting in your schema with the request.Please describe your query requirements in natural language and select the target database type.",
|
||||
"is_listed": true,
|
||||
@ -243,7 +243,7 @@
|
||||
"name": "Sentiment Analysis "
|
||||
},
|
||||
"app_id": "f06bf86b-d50c-4895-a942-35112dbe4189",
|
||||
"category": "Workflow",
|
||||
"categories": ["Workflow"],
|
||||
"copyright": null,
|
||||
"description": "Batch sentiment analysis of text, followed by JSON output of sentiment classification along with scores.",
|
||||
"is_listed": true,
|
||||
@ -259,7 +259,7 @@
|
||||
"name": "Strategic Consulting Expert"
|
||||
},
|
||||
"app_id": "7e8ca1ae-02f2-4b5f-979e-62d19133bee2",
|
||||
"category": "Assistant",
|
||||
"categories": ["Assistant"],
|
||||
"copyright": "Copyright 2023 Dify",
|
||||
"description": "I can answer your questions related to strategic marketing.",
|
||||
"is_listed": true,
|
||||
@ -275,7 +275,7 @@
|
||||
"name": "Code Converter"
|
||||
},
|
||||
"app_id": "4006c4b2-0735-4f37-8dbb-fb1a8c5bd87a",
|
||||
"category": "Programming",
|
||||
"categories": ["Programming"],
|
||||
"copyright": "Copyright 2023 Dify",
|
||||
"description": "This is an application that provides the ability to convert code snippets in multiple programming languages. You can input the code you wish to convert, select the target programming language, and get the desired output.",
|
||||
"is_listed": true,
|
||||
@ -291,7 +291,7 @@
|
||||
"name": "Question Classifier + Knowledge + Chatbot "
|
||||
},
|
||||
"app_id": "d9f6b733-e35d-4a40-9f38-ca7bbfa009f7",
|
||||
"category": "Workflow",
|
||||
"categories": ["Workflow"],
|
||||
"copyright": null,
|
||||
"description": "Basic Workflow Template, a chatbot capable of identifying intents alongside with a knowledge base.",
|
||||
"is_listed": true,
|
||||
@ -307,7 +307,7 @@
|
||||
"name": "AI Front-end interviewer"
|
||||
},
|
||||
"app_id": "127efead-8944-4e20-ba9d-12402eb345e0",
|
||||
"category": "HR",
|
||||
"categories": ["HR"],
|
||||
"copyright": "Copyright 2023 Dify",
|
||||
"description": "A simulated front-end interviewer that tests the skill level of front-end development through questioning.",
|
||||
"is_listed": true,
|
||||
@ -323,7 +323,7 @@
|
||||
"name": "Knowledge Retrieval + Chatbot "
|
||||
},
|
||||
"app_id": "e9870913-dd01-4710-9f06-15d4180ca1ce",
|
||||
"category": "Workflow",
|
||||
"categories": ["Workflow"],
|
||||
"copyright": null,
|
||||
"description": "Basic Workflow Template, A chatbot with a knowledge base. ",
|
||||
"is_listed": true,
|
||||
@ -339,7 +339,7 @@
|
||||
"name": "Email Assistant Workflow "
|
||||
},
|
||||
"app_id": "dd5b6353-ae9b-4bce-be6a-a681a12cf709",
|
||||
"category": "Workflow",
|
||||
"categories": ["Workflow"],
|
||||
"copyright": null,
|
||||
"description": "A multifunctional email assistant capable of summarizing, replying, composing, proofreading, and checking grammar.",
|
||||
"is_listed": true,
|
||||
@ -355,7 +355,7 @@
|
||||
"name": "Customer Review Analysis Workflow "
|
||||
},
|
||||
"app_id": "9c0cd31f-4b62-4005-adf5-e3888d08654a",
|
||||
"category": "Workflow",
|
||||
"categories": ["Workflow"],
|
||||
"copyright": null,
|
||||
"description": "Utilize LLM (Large Language Models) to classify customer reviews and forward them to the internal system.",
|
||||
"is_listed": true,
|
||||
|
||||
@ -25,6 +25,7 @@ from controllers.console.wraps import (
|
||||
is_admin_or_owner_required,
|
||||
setup_required,
|
||||
)
|
||||
from core.db.session_factory import session_factory
|
||||
from core.ops.ops_trace_manager import OpsTraceManager
|
||||
from core.rag.entities import PreProcessingRule, Rule, Segmentation
|
||||
from core.rag.retrieval.retrieval_methods import RetrievalMethod
|
||||
@ -841,7 +842,8 @@ class AppTraceApi(Resource):
|
||||
@account_initialization_required
|
||||
def get(self, app_id):
|
||||
"""Get app trace"""
|
||||
app_trace_config = OpsTraceManager.get_app_tracing_config(app_id=app_id)
|
||||
with session_factory.create_session() as session:
|
||||
app_trace_config = OpsTraceManager.get_app_tracing_config(app_id, session)
|
||||
|
||||
return app_trace_config
|
||||
|
||||
|
||||
@ -52,7 +52,7 @@ class RecommendedAppResponse(ResponseModel):
|
||||
copyright: str | None = None
|
||||
privacy_policy: str | None = None
|
||||
custom_disclaimer: str | None = None
|
||||
category: str | None = None
|
||||
categories: list[str] = Field(default_factory=list)
|
||||
position: int | None = None
|
||||
is_listed: bool | None = None
|
||||
can_trial: bool | None = None
|
||||
|
||||
@ -842,24 +842,24 @@ class WorkflowResponseConverter:
|
||||
return []
|
||||
|
||||
files: list[Mapping[str, Any]] = []
|
||||
if isinstance(value, FileSegment):
|
||||
files.append(value.value.to_dict())
|
||||
elif isinstance(value, ArrayFileSegment):
|
||||
files.extend([i.to_dict() for i in value.value])
|
||||
elif isinstance(value, File):
|
||||
files.append(value.to_dict())
|
||||
elif isinstance(value, list):
|
||||
for item in value:
|
||||
file = cls._get_file_var_from_value(item)
|
||||
match value:
|
||||
case FileSegment():
|
||||
files.append(value.value.to_dict())
|
||||
case ArrayFileSegment():
|
||||
files.extend([i.to_dict() for i in value.value])
|
||||
case File():
|
||||
files.append(value.to_dict())
|
||||
case list():
|
||||
for item in value:
|
||||
file = cls._get_file_var_from_value(item)
|
||||
if file:
|
||||
files.append(file)
|
||||
case dict():
|
||||
file = cls._get_file_var_from_value(value)
|
||||
if file:
|
||||
files.append(file)
|
||||
elif isinstance(
|
||||
value,
|
||||
dict,
|
||||
):
|
||||
file = cls._get_file_var_from_value(value)
|
||||
if file:
|
||||
files.append(file)
|
||||
case _:
|
||||
pass
|
||||
|
||||
return files
|
||||
|
||||
|
||||
@ -569,13 +569,13 @@ class OpsTraceManager:
|
||||
db.session.commit()
|
||||
|
||||
@classmethod
|
||||
def get_app_tracing_config(cls, app_id: str):
|
||||
def get_app_tracing_config(cls, app_id: str, session: Session):
|
||||
"""
|
||||
Get app tracing config
|
||||
:param app_id: app id
|
||||
:return:
|
||||
"""
|
||||
app: App | None = db.session.get(App, app_id)
|
||||
app: App | None = session.get(App, app_id)
|
||||
if not app:
|
||||
raise ValueError("App not found")
|
||||
if not app.tracing:
|
||||
|
||||
@ -53,24 +53,27 @@ class PromptMessageUtil:
|
||||
files = []
|
||||
if isinstance(prompt_message.content, list):
|
||||
for content in prompt_message.content:
|
||||
if isinstance(content, TextPromptMessageContent):
|
||||
text += content.data
|
||||
elif isinstance(content, ImagePromptMessageContent):
|
||||
files.append(
|
||||
{
|
||||
"type": "image",
|
||||
"data": content.data[:10] + "...[TRUNCATED]..." + content.data[-10:],
|
||||
"detail": content.detail.value,
|
||||
}
|
||||
)
|
||||
elif isinstance(content, AudioPromptMessageContent):
|
||||
files.append(
|
||||
{
|
||||
"type": "audio",
|
||||
"data": content.data[:10] + "...[TRUNCATED]..." + content.data[-10:],
|
||||
"format": content.format,
|
||||
}
|
||||
)
|
||||
match content:
|
||||
case TextPromptMessageContent():
|
||||
text += content.data
|
||||
case ImagePromptMessageContent():
|
||||
files.append(
|
||||
{
|
||||
"type": "image",
|
||||
"data": content.data[:10] + "...[TRUNCATED]..." + content.data[-10:],
|
||||
"detail": content.detail.value,
|
||||
}
|
||||
)
|
||||
case AudioPromptMessageContent():
|
||||
files.append(
|
||||
{
|
||||
"type": "audio",
|
||||
"data": content.data[:10] + "...[TRUNCATED]..." + content.data[-10:],
|
||||
"format": content.format,
|
||||
}
|
||||
)
|
||||
case _:
|
||||
continue
|
||||
else:
|
||||
text = cast(str, prompt_message.content)
|
||||
|
||||
|
||||
@ -23,36 +23,37 @@ _TOOL_FILE_URL_PATTERN = re.compile(r"(?:^|/+)files/tools/(?P<tool_file_id>[^/?#
|
||||
|
||||
|
||||
def safe_json_value(v):
|
||||
if isinstance(v, datetime):
|
||||
tz_name = "UTC"
|
||||
if isinstance(current_user, Account) and current_user.timezone is not None:
|
||||
tz_name = current_user.timezone
|
||||
return v.astimezone(pytz.timezone(tz_name)).isoformat()
|
||||
elif isinstance(v, date):
|
||||
return v.isoformat()
|
||||
elif isinstance(v, UUID):
|
||||
return str(v)
|
||||
elif isinstance(v, Decimal):
|
||||
return float(v)
|
||||
elif isinstance(v, bytes):
|
||||
try:
|
||||
return v.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
return v.hex()
|
||||
elif isinstance(v, memoryview):
|
||||
return v.tobytes().hex()
|
||||
elif isinstance(v, np.integer):
|
||||
return int(v)
|
||||
elif isinstance(v, np.floating):
|
||||
return float(v)
|
||||
elif isinstance(v, np.ndarray):
|
||||
return v.tolist()
|
||||
elif isinstance(v, dict):
|
||||
return safe_json_dict(v)
|
||||
elif isinstance(v, list | tuple | set):
|
||||
return [safe_json_value(i) for i in v]
|
||||
else:
|
||||
return v
|
||||
match v:
|
||||
case datetime():
|
||||
tz_name = "UTC"
|
||||
if isinstance(current_user, Account) and current_user.timezone is not None:
|
||||
tz_name = current_user.timezone
|
||||
return v.astimezone(pytz.timezone(tz_name)).isoformat()
|
||||
case date():
|
||||
return v.isoformat()
|
||||
case UUID():
|
||||
return str(v)
|
||||
case Decimal():
|
||||
return float(v)
|
||||
case bytes():
|
||||
try:
|
||||
return v.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
return v.hex()
|
||||
case memoryview():
|
||||
return v.tobytes().hex()
|
||||
case np.integer():
|
||||
return int(v)
|
||||
case np.floating():
|
||||
return float(v)
|
||||
case np.ndarray():
|
||||
return v.tolist()
|
||||
case dict():
|
||||
return safe_json_dict(v)
|
||||
case list() | tuple() | set():
|
||||
return [safe_json_value(i) for i in v]
|
||||
case _:
|
||||
return v
|
||||
|
||||
|
||||
def safe_json_dict(d: dict[str, Any]):
|
||||
|
||||
@ -0,0 +1,26 @@
|
||||
"""add recommended app categories
|
||||
|
||||
Revision ID: a4f2d8c9b731
|
||||
Revises: 227822d22895
|
||||
Create Date: 2026-04-29 12:00:00.000000
|
||||
|
||||
"""
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "a4f2d8c9b731"
|
||||
down_revision = "227822d22895"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
with op.batch_alter_table("recommended_apps", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("categories", sa.JSON(), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
with op.batch_alter_table("recommended_apps", schema=None) as batch_op:
|
||||
batch_op.drop_column("categories")
|
||||
@ -878,6 +878,7 @@ class RecommendedApp(TypeBase):
|
||||
copyright: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
privacy_policy: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
category: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
categories: Mapped[list[str] | None] = mapped_column(sa.JSON, nullable=True, default=None)
|
||||
custom_disclaimer: Mapped[str] = mapped_column(LongText, default="")
|
||||
position: Mapped[int] = mapped_column(sa.Integer, nullable=False, default=0)
|
||||
is_listed: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, default=True)
|
||||
|
||||
49
api/services/recommend_app/category_order.py
Normal file
49
api/services/recommend_app/category_order.py
Normal file
@ -0,0 +1,49 @@
|
||||
"""Apply Redis-backed category ordering for DB-backed Explore apps."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from collections.abc import Collection
|
||||
from typing import Any
|
||||
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
EXPLORE_APP_CATEGORY_ORDER_KEY_PREFIX = "explore:apps:category_order"
|
||||
|
||||
|
||||
def _category_order_key(language: str) -> str:
|
||||
return f"{EXPLORE_APP_CATEGORY_ORDER_KEY_PREFIX}:{language}"
|
||||
|
||||
|
||||
def get_explore_app_category_order(language: str) -> list[str]:
|
||||
try:
|
||||
raw_categories = redis_client.get(_category_order_key(language))
|
||||
except Exception:
|
||||
logger.exception("Failed to read explore app category order from Redis.")
|
||||
return []
|
||||
|
||||
if not raw_categories:
|
||||
return []
|
||||
|
||||
if isinstance(raw_categories, bytes):
|
||||
raw_categories = raw_categories.decode("utf-8")
|
||||
|
||||
try:
|
||||
categories: Any = json.loads(raw_categories)
|
||||
except (TypeError, json.JSONDecodeError):
|
||||
logger.warning("Invalid explore app category order payload for language %s.", language)
|
||||
return []
|
||||
|
||||
if not isinstance(categories, list):
|
||||
return []
|
||||
|
||||
return [category for category in categories if isinstance(category, str)]
|
||||
|
||||
|
||||
def order_categories(categories: Collection[str], language: str) -> list[str]:
|
||||
configured_order = get_explore_app_category_order(language)
|
||||
if configured_order:
|
||||
return configured_order
|
||||
|
||||
return sorted(categories)
|
||||
@ -6,6 +6,7 @@ from constants.languages import languages
|
||||
from extensions.ext_database import db
|
||||
from models.model import App, RecommendedApp
|
||||
from services.app_dsl_service import AppDslService
|
||||
from services.recommend_app.category_order import order_categories
|
||||
from services.recommend_app.recommend_app_base import RecommendAppRetrievalBase
|
||||
from services.recommend_app.recommend_app_type import RecommendAppType
|
||||
|
||||
@ -18,7 +19,7 @@ class RecommendedAppItemDict(TypedDict):
|
||||
copyright: Any
|
||||
privacy_policy: Any
|
||||
custom_disclaimer: str
|
||||
category: str
|
||||
categories: list[str]
|
||||
position: int
|
||||
is_listed: bool
|
||||
|
||||
@ -80,6 +81,7 @@ class DatabaseRecommendAppRetrieval(RecommendAppRetrievalBase):
|
||||
if not site:
|
||||
continue
|
||||
|
||||
app_categories = recommended_app.categories or []
|
||||
recommended_app_result: RecommendedAppItemDict = {
|
||||
"id": recommended_app.id,
|
||||
"app": recommended_app.app,
|
||||
@ -88,15 +90,18 @@ class DatabaseRecommendAppRetrieval(RecommendAppRetrievalBase):
|
||||
"copyright": site.copyright,
|
||||
"privacy_policy": site.privacy_policy,
|
||||
"custom_disclaimer": site.custom_disclaimer,
|
||||
"category": recommended_app.category,
|
||||
"categories": app_categories,
|
||||
"position": recommended_app.position,
|
||||
"is_listed": recommended_app.is_listed,
|
||||
}
|
||||
recommended_apps_result.append(recommended_app_result)
|
||||
|
||||
categories.add(recommended_app.category)
|
||||
categories.update(app_categories)
|
||||
|
||||
return RecommendedAppsResultDict(recommended_apps=recommended_apps_result, categories=sorted(categories))
|
||||
return RecommendedAppsResultDict(
|
||||
recommended_apps=recommended_apps_result,
|
||||
categories=order_categories(categories, language),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def fetch_recommended_app_detail_from_db(cls, app_id: str) -> RecommendedAppDetailDict | None:
|
||||
|
||||
@ -194,14 +194,15 @@ class VariableTruncator(BaseTruncator):
|
||||
|
||||
result: _PartResult[Any]
|
||||
# Apply type-specific truncation with target size
|
||||
if isinstance(segment, ArraySegment):
|
||||
result = self._truncate_array(segment.value, target_size)
|
||||
elif isinstance(segment, StringSegment):
|
||||
result = self._truncate_string(segment.value, target_size)
|
||||
elif isinstance(segment, ObjectSegment):
|
||||
result = self._truncate_object(segment.value, target_size)
|
||||
else:
|
||||
raise AssertionError("this should be unreachable.")
|
||||
match segment:
|
||||
case ArraySegment():
|
||||
result = self._truncate_array(segment.value, target_size)
|
||||
case StringSegment():
|
||||
result = self._truncate_string(segment.value, target_size)
|
||||
case ObjectSegment():
|
||||
result = self._truncate_object(segment.value, target_size)
|
||||
case _:
|
||||
raise AssertionError("this should be unreachable.")
|
||||
|
||||
return _PartResult(
|
||||
value=segment.model_copy(update={"value": result.value}),
|
||||
@ -219,40 +220,41 @@ class VariableTruncator(BaseTruncator):
|
||||
return VariableTruncator.calculate_json_size(value.model_dump(), depth=depth + 1)
|
||||
if depth > _MAX_DEPTH:
|
||||
raise MaxDepthExceededError()
|
||||
if isinstance(value, str):
|
||||
# Ideally, the size of strings should be calculated based on their utf-8 encoded length.
|
||||
# However, this adds complexity as we would need to compute encoded sizes consistently
|
||||
# throughout the code. Therefore, we approximate the size using the string's length.
|
||||
# Rough estimate: number of characters, plus 2 for quotes
|
||||
return len(value) + 2
|
||||
elif isinstance(value, (int, float)):
|
||||
return len(str(value))
|
||||
elif isinstance(value, bool):
|
||||
return 4 if value else 5 # "true" or "false"
|
||||
elif value is None:
|
||||
return 4 # "null"
|
||||
elif isinstance(value, list):
|
||||
# Size = sum of elements + separators + brackets
|
||||
total = 2 # "[]"
|
||||
for i, item in enumerate(value):
|
||||
if i > 0:
|
||||
total += 1 # ","
|
||||
total += VariableTruncator.calculate_json_size(item, depth=depth + 1)
|
||||
return total
|
||||
elif isinstance(value, dict):
|
||||
# Size = sum of keys + values + separators + brackets
|
||||
total = 2 # "{}"
|
||||
for index, key in enumerate(value.keys()):
|
||||
if index > 0:
|
||||
total += 1 # ","
|
||||
total += VariableTruncator.calculate_json_size(str(key), depth=depth + 1) # Key as string
|
||||
total += 1 # ":"
|
||||
total += VariableTruncator.calculate_json_size(value[key], depth=depth + 1)
|
||||
return total
|
||||
elif isinstance(value, File):
|
||||
return VariableTruncator.calculate_json_size(value.model_dump(), depth=depth + 1)
|
||||
else:
|
||||
raise UnknownTypeError(f"got unknown type {type(value)}")
|
||||
match value:
|
||||
case str():
|
||||
# Ideally, the size of strings should be calculated based on their utf-8 encoded length.
|
||||
# However, this adds complexity as we would need to compute encoded sizes consistently
|
||||
# throughout the code. Therefore, we approximate the size using the string's length.
|
||||
# Rough estimate: number of characters, plus 2 for quotes
|
||||
return len(value) + 2
|
||||
case bool():
|
||||
return 4 if value else 5 # "true" or "false"
|
||||
case int() | float():
|
||||
return len(str(value))
|
||||
case None:
|
||||
return 4 # "null"
|
||||
case list():
|
||||
# Size = sum of elements + separators + brackets
|
||||
total = 2 # "[]"
|
||||
for i, item in enumerate(value):
|
||||
if i > 0:
|
||||
total += 1 # ","
|
||||
total += VariableTruncator.calculate_json_size(item, depth=depth + 1)
|
||||
return total
|
||||
case dict():
|
||||
# Size = sum of keys + values + separators + brackets
|
||||
total = 2 # "{}"
|
||||
for index, key in enumerate(value.keys()):
|
||||
if index > 0:
|
||||
total += 1 # ","
|
||||
total += VariableTruncator.calculate_json_size(str(key), depth=depth + 1) # Key as string
|
||||
total += 1 # ":"
|
||||
total += VariableTruncator.calculate_json_size(value[key], depth=depth + 1)
|
||||
return total
|
||||
case File():
|
||||
return VariableTruncator.calculate_json_size(value.model_dump(), depth=depth + 1)
|
||||
case _:
|
||||
raise UnknownTypeError(f"got unknown type {type(value)}")
|
||||
|
||||
def _truncate_string(self, value: str, target_size: int) -> _PartResult[str]:
|
||||
if (size := self.calculate_json_size(value)) < target_size:
|
||||
@ -419,22 +421,23 @@ class VariableTruncator(BaseTruncator):
|
||||
target_size: int,
|
||||
) -> _PartResult[Any]:
|
||||
"""Truncate a value within an object to fit within budget."""
|
||||
if isinstance(val, UpdatedVariable):
|
||||
# TODO(Workflow): push UpdatedVariable normalization closer to its producer.
|
||||
return self._truncate_object(val.model_dump(), target_size)
|
||||
elif isinstance(val, str):
|
||||
return self._truncate_string(val, target_size)
|
||||
elif isinstance(val, list):
|
||||
return self._truncate_array(val, target_size)
|
||||
elif isinstance(val, dict):
|
||||
return self._truncate_object(val, target_size)
|
||||
elif isinstance(val, File):
|
||||
# File objects should not be truncated, return as-is
|
||||
return _PartResult(val, self.calculate_json_size(val), False)
|
||||
elif val is None or isinstance(val, (bool, int, float)):
|
||||
return _PartResult(val, self.calculate_json_size(val), False)
|
||||
else:
|
||||
raise AssertionError("this statement should be unreachable.")
|
||||
match val:
|
||||
case UpdatedVariable():
|
||||
# TODO(Workflow): push UpdatedVariable normalization closer to its producer.
|
||||
return self._truncate_object(val.model_dump(), target_size)
|
||||
case str():
|
||||
return self._truncate_string(val, target_size)
|
||||
case list():
|
||||
return self._truncate_array(val, target_size)
|
||||
case dict():
|
||||
return self._truncate_object(val, target_size)
|
||||
case File():
|
||||
# File objects should not be truncated, return as-is
|
||||
return _PartResult(val, self.calculate_json_size(val), False)
|
||||
case None | bool() | int() | float():
|
||||
return _PartResult(val, self.calculate_json_size(val), False)
|
||||
case _:
|
||||
raise AssertionError("this statement should be unreachable.")
|
||||
|
||||
|
||||
class DummyVariableTruncator(BaseTruncator):
|
||||
|
||||
@ -47,6 +47,7 @@ def _create_recommended_app(
|
||||
*,
|
||||
app_id: str,
|
||||
category: str = "chat",
|
||||
categories: list[str] | None = None,
|
||||
language: str = "en-US",
|
||||
is_listed: bool = True,
|
||||
position: int = 1,
|
||||
@ -57,6 +58,7 @@ def _create_recommended_app(
|
||||
copyright="copy",
|
||||
privacy_policy="pp",
|
||||
category=category,
|
||||
categories=[category] if categories is None else categories,
|
||||
language=language,
|
||||
is_listed=is_listed,
|
||||
position=position,
|
||||
@ -113,6 +115,53 @@ class TestFetchRecommendedAppsFromDb:
|
||||
assert "assistant" in result["categories"]
|
||||
assert "writing" in result["categories"]
|
||||
|
||||
def test_returns_multiple_categories_for_one_app(
|
||||
self, flask_app_with_containers, db_session_with_containers: Session
|
||||
):
|
||||
tenant_id = str(uuid4())
|
||||
created_app = _create_app(db_session_with_containers, tenant_id=tenant_id)
|
||||
_create_site(db_session_with_containers, app_id=created_app.id)
|
||||
_create_recommended_app(
|
||||
db_session_with_containers,
|
||||
app_id=created_app.id,
|
||||
category="writing",
|
||||
categories=["writing", "assistant"],
|
||||
)
|
||||
|
||||
db_session_with_containers.expire_all()
|
||||
|
||||
result = DatabaseRecommendAppRetrieval.fetch_recommended_apps_from_db("en-US")
|
||||
|
||||
recommended_app = next(item for item in result["recommended_apps"] if item["app_id"] == created_app.id)
|
||||
assert recommended_app["categories"] == ["writing", "assistant"]
|
||||
assert "writing" in result["categories"]
|
||||
assert "assistant" in result["categories"]
|
||||
|
||||
def test_ignores_legacy_category_when_categories_are_empty(
|
||||
self,
|
||||
flask_app_with_containers,
|
||||
db_session_with_containers: Session,
|
||||
):
|
||||
legacy_category = f"legacy-empty-{uuid4()}"
|
||||
tenant_id = str(uuid4())
|
||||
created_app = _create_app(db_session_with_containers, tenant_id=tenant_id)
|
||||
_create_site(db_session_with_containers, app_id=created_app.id)
|
||||
_create_recommended_app(
|
||||
db_session_with_containers,
|
||||
app_id=created_app.id,
|
||||
category=legacy_category,
|
||||
categories=[],
|
||||
)
|
||||
|
||||
db_session_with_containers.expire_all()
|
||||
|
||||
result = DatabaseRecommendAppRetrieval.fetch_recommended_apps_from_db("en-US")
|
||||
|
||||
recommended_app = next(item for item in result["recommended_apps"] if item["app_id"] == created_app.id)
|
||||
assert "category" not in recommended_app
|
||||
assert recommended_app["categories"] == []
|
||||
assert legacy_category not in result["categories"]
|
||||
|
||||
def test_falls_back_to_default_language_when_empty(
|
||||
self, flask_app_with_containers, db_session_with_containers: Session
|
||||
):
|
||||
|
||||
@ -114,8 +114,8 @@ def test_flask_configs(monkeypatch: pytest.MonkeyPatch):
|
||||
"pool_recycle": 3600,
|
||||
"pool_size": 30,
|
||||
"pool_use_lifo": False,
|
||||
"pool_reset_on_return": None,
|
||||
"pool_timeout": 30,
|
||||
"pool_reset_on_return": "rollback",
|
||||
}
|
||||
|
||||
assert config["CONSOLE_WEB_URL"] == "https://example.com"
|
||||
|
||||
@ -126,7 +126,7 @@ class TestRecommendedAppResponseModels:
|
||||
},
|
||||
"app_id": "app-1",
|
||||
"description": "desc",
|
||||
"category": "cat",
|
||||
"categories": ["cat", "other"],
|
||||
"position": 1,
|
||||
"is_listed": True,
|
||||
"can_trial": False,
|
||||
@ -137,4 +137,5 @@ class TestRecommendedAppResponseModels:
|
||||
).model_dump(mode="json")
|
||||
|
||||
assert response["recommended_apps"][0]["app_id"] == "app-1"
|
||||
assert response["recommended_apps"][0]["categories"] == ["cat", "other"]
|
||||
assert response["categories"] == ["cat"]
|
||||
|
||||
@ -407,18 +407,18 @@ def test_update_app_tracing_config_success(mock_db):
|
||||
def test_get_app_tracing_config_errors_when_missing(mock_db):
|
||||
mock_db.get.return_value = None
|
||||
with pytest.raises(ValueError, match="App not found"):
|
||||
OpsTraceManager.get_app_tracing_config("app")
|
||||
OpsTraceManager.get_app_tracing_config("app", mock_db)
|
||||
|
||||
|
||||
def test_get_app_tracing_config_returns_defaults(mock_db):
|
||||
mock_db.get.return_value = SimpleNamespace(tracing=None)
|
||||
assert OpsTraceManager.get_app_tracing_config("app-id") == {"enabled": False, "tracing_provider": None}
|
||||
assert OpsTraceManager.get_app_tracing_config("app-id", mock_db) == {"enabled": False, "tracing_provider": None}
|
||||
|
||||
|
||||
def test_get_app_tracing_config_returns_payload(mock_db):
|
||||
payload = {"enabled": True, "tracing_provider": "dummy"}
|
||||
mock_db.get.return_value = SimpleNamespace(tracing=json.dumps(payload))
|
||||
assert OpsTraceManager.get_app_tracing_config("app-id") == payload
|
||||
assert OpsTraceManager.get_app_tracing_config("app-id", mock_db) == payload
|
||||
|
||||
|
||||
def test_check_and_project_helpers(monkeypatch):
|
||||
|
||||
@ -0,0 +1,26 @@
|
||||
import json
|
||||
from unittest.mock import patch
|
||||
|
||||
from services.recommend_app.category_order import get_explore_app_category_order, order_categories
|
||||
|
||||
|
||||
@patch("services.recommend_app.category_order.redis_client.get")
|
||||
def test_get_explore_app_category_order_returns_redis_list(mock_get):
|
||||
mock_get.return_value = json.dumps(["C", "A", "B"]).encode()
|
||||
|
||||
assert get_explore_app_category_order("en-US") == ["C", "A", "B"]
|
||||
mock_get.assert_called_once_with("explore:apps:category_order:en-US")
|
||||
|
||||
|
||||
@patch("services.recommend_app.category_order.redis_client.get")
|
||||
def test_order_categories_uses_redis_order_as_source_of_truth(mock_get):
|
||||
mock_get.return_value = json.dumps(["C", "A", "B"]).encode()
|
||||
|
||||
assert order_categories({"A", "B", "C", "D"}, "en-US") == ["C", "A", "B"]
|
||||
|
||||
|
||||
@patch("services.recommend_app.category_order.redis_client.get")
|
||||
def test_order_categories_falls_back_to_sorted_categories_without_redis_order(mock_get):
|
||||
mock_get.return_value = None
|
||||
|
||||
assert order_categories({"B", "A", "C"}, "en-US") == ["A", "B", "C"]
|
||||
@ -93,10 +93,16 @@ BASE_API_AND_DOCKER_COMPOSE_CONFIG_SET_DIFF: frozenset[str] = frozenset(
|
||||
|
||||
API_CONFIG_SET = set(dotenv_values(Path("api") / Path(".env.example")).keys())
|
||||
DOCKER_CONFIG_SET = set(dotenv_values(Path("docker") / Path(".env.example")).keys())
|
||||
DOCKER_COMPOSE_CONFIG_SET = set()
|
||||
DOCKER_COMPOSE_CONFIG_SET = set(DOCKER_CONFIG_SET)
|
||||
|
||||
with open(Path("docker") / Path("docker-compose.yaml")) as f:
|
||||
DOCKER_COMPOSE_CONFIG_SET = set(yaml.safe_load(f.read())["x-shared-env"].keys())
|
||||
# Read environment variables from the split env files used by docker-compose
|
||||
# Walk through all .env.example files in subdirectories (per-module structure)
|
||||
envs_dir = Path("docker") / Path("envs")
|
||||
if envs_dir.exists():
|
||||
for env_file_path in envs_dir.rglob("*.env.example"):
|
||||
env_keys = set(dotenv_values(env_file_path).keys())
|
||||
DOCKER_CONFIG_SET.update(env_keys)
|
||||
DOCKER_COMPOSE_CONFIG_SET.update(env_keys)
|
||||
|
||||
|
||||
def test_yaml_config():
|
||||
|
||||
@ -1,51 +0,0 @@
|
||||
# ------------------------------------------------------------------
|
||||
# Minimal defaults for Docker Compose deployments.
|
||||
#
|
||||
# Keep local changes in .env. Use .env.example as the full reference
|
||||
# for advanced and service-specific settings.
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
# Public URLs used when Dify generates links. Change these together when
|
||||
# exposing Dify under another hostname, IP address, or port.
|
||||
CONSOLE_WEB_URL=http://localhost
|
||||
SERVICE_API_URL=http://localhost
|
||||
APP_WEB_URL=http://localhost
|
||||
FILES_URL=http://localhost
|
||||
INTERNAL_FILES_URL=http://api:5001
|
||||
TRIGGER_URL=http://localhost
|
||||
ENDPOINT_URL_TEMPLATE=http://localhost/e/{hook_id}
|
||||
NEXT_PUBLIC_SOCKET_URL=ws://localhost
|
||||
EXPOSE_PLUGIN_DEBUGGING_HOST=localhost
|
||||
EXPOSE_PLUGIN_DEBUGGING_PORT=5003
|
||||
|
||||
# Built-in metadata database defaults.
|
||||
DB_TYPE=postgresql
|
||||
DB_USERNAME=postgres
|
||||
DB_PASSWORD=difyai123456
|
||||
DB_HOST=db_postgres
|
||||
DB_PORT=5432
|
||||
DB_DATABASE=dify
|
||||
|
||||
# Built-in Redis defaults.
|
||||
REDIS_HOST=redis
|
||||
REDIS_PORT=6379
|
||||
REDIS_PASSWORD=difyai123456
|
||||
|
||||
# Default file storage.
|
||||
STORAGE_TYPE=opendal
|
||||
OPENDAL_SCHEME=fs
|
||||
OPENDAL_FS_ROOT=storage
|
||||
|
||||
# Default vector database.
|
||||
VECTOR_STORE=weaviate
|
||||
|
||||
# Internal service authentication. Paired values must match.
|
||||
PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi
|
||||
PLUGIN_DIFY_INNER_API_KEY=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
|
||||
|
||||
# Host ports.
|
||||
EXPOSE_NGINX_PORT=80
|
||||
EXPOSE_NGINX_SSL_PORT=443
|
||||
|
||||
# Docker Compose profiles for bundled services.
|
||||
COMPOSE_PROFILES=${VECTOR_STORE:-weaviate},${DB_TYPE:-postgresql}
|
||||
1588
docker/.env.example
1588
docker/.env.example
File diff suppressed because it is too large
Load Diff
3
docker/.gitignore
vendored
Normal file
3
docker/.gitignore
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
# Ignore actual .env files (keep only .env.example files in git)
|
||||
*.env
|
||||
!*.env.example
|
||||
@ -7,29 +7,31 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T
|
||||
- **Certbot Container**: `docker-compose.yaml` now contains `certbot` for managing SSL certificates. This container automatically renews certificates and ensures secure HTTPS connections.\
|
||||
For more information, refer `docker/certbot/README.md`.
|
||||
|
||||
- **Persistent Environment Variables**: Default environment variables are managed through `.env.default`, while local overrides are stored in `.env`, ensuring that your configurations persist across deployments.
|
||||
- **Persistent Environment Variables**: Essential startup defaults are provided in `.env.example`, while local values are stored in `.env`, ensuring that your configurations persist across deployments.
|
||||
|
||||
> What is `.env`? </br> </br>
|
||||
> The `.env` file is a local override file. Keep it small by adding only the values that differ from `.env.default`. Use `.env.example` as the full reference when you need advanced configuration.
|
||||
> The `.env` file is the local startup file. Copy it from `.env.example` for a default deployment. Optional advanced settings live in `envs/*.env.example` files.
|
||||
|
||||
- **Unified Vector Database Services**: All vector database services are now managed from a single Docker Compose file `docker-compose.yaml`. You can switch between different vector databases by setting the `VECTOR_STORE` environment variable in your `.env` file.
|
||||
|
||||
- **Local .env Overrides**: The `dify-compose` and `dify-compose.ps1` wrappers create `.env` if it is missing and generate a persistent `SECRET_KEY` for this deployment.
|
||||
|
||||
### How to Deploy Dify with `docker-compose.yaml`
|
||||
|
||||
1. **Prerequisites**: Ensure Docker and Docker Compose are installed on your system.
|
||||
1. **Environment Setup**:
|
||||
- Navigate to the `docker` directory.
|
||||
- No copy step is required. The `dify-compose` wrappers create `.env` if it is missing and write a generated `SECRET_KEY` to it.
|
||||
- When prompted on first run, press Enter to use the default deployment, or answer `y` to stop and edit `.env` first.
|
||||
- Customize `.env` only when you need to override defaults from `.env.default`. Refer to `.env.example` for the full list of available variables.
|
||||
- Copy `.env.example` to `.env`.
|
||||
- Customize `.env` when you need to change essential startup defaults. Copy optional files from `envs/` without the `.example` suffix when you need advanced settings.
|
||||
- **Optional (for advanced deployments)**:
|
||||
If you maintain a full `.env` file copied from `.env.example`, you may use the environment synchronization tool to keep it aligned with the latest `.env.example` updates while preserving your custom settings.
|
||||
See the [Environment Variables Synchronization](#environment-variables-synchronization) section below.
|
||||
1. **Running the Services**:
|
||||
- Execute `./dify-compose up -d` from the `docker` directory to start the services. On Windows PowerShell, run `.\dify-compose.ps1 up -d`.
|
||||
- Execute `docker compose up -d` from the `docker` directory to start the services.
|
||||
- To specify a vector database, set the `VECTOR_STORE` variable in your `.env` file to your desired vector database service, such as `milvus`, `weaviate`, or `opensearch`.
|
||||
```bash
|
||||
cp .env.example .env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
1. **SSL Certificate Setup**:
|
||||
- Refer `docker/certbot/README.md` to set up SSL certificates using Certbot.
|
||||
1. **OpenTelemetry Collector Setup**:
|
||||
@ -41,7 +43,7 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T
|
||||
1. **Middleware Setup**:
|
||||
- Use the `docker-compose.middleware.yaml` for setting up essential middleware services like databases and caches.
|
||||
- Navigate to the `docker` directory.
|
||||
- Ensure the `middleware.env` file is created by running `cp middleware.env.example middleware.env` (refer to the `middleware.env.example` file).
|
||||
- Ensure the `middleware.env` file is created by running `cp envs/middleware.env.example middleware.env` (refer to the `envs/middleware.env.example` file).
|
||||
1. **Running Middleware Services**:
|
||||
- Navigate to the `docker` directory.
|
||||
- Execute `docker compose --env-file middleware.env -f docker-compose.middleware.yaml -p dify up -d` to start PostgreSQL/MySQL (per `DB_TYPE`) plus the bundled Weaviate instance.
|
||||
@ -58,13 +60,13 @@ For users migrating from the `docker-legacy` setup:
|
||||
1. **Data Migration**:
|
||||
- Ensure that data from services like databases and caches is backed up and migrated appropriately to the new structure if necessary.
|
||||
|
||||
### Overview of `.env.default`, `.env`, and `.env.example`
|
||||
### Overview of `.env`, `.env.example`, and `envs/`
|
||||
|
||||
- `.env.default` contains the minimal default configuration for Docker Compose deployments.
|
||||
- `.env` contains the generated `SECRET_KEY` plus any local overrides.
|
||||
- `.env.example` is the full reference for advanced configuration.
|
||||
- `.env.example` contains the essential default configuration for Docker Compose deployments.
|
||||
- `.env` contains local startup values copied from `.env.example` and any local changes.
|
||||
- `envs/*.env.example` files contain optional advanced configuration grouped by theme.
|
||||
|
||||
The `dify-compose` wrappers merge `.env.default` and `.env` into a temporary environment file, append paired internal service keys when needed, and remove the temporary file after Docker Compose starts.
|
||||
Docker Compose reads `envs/*.env` files when present, then reads `.env` last so values in `.env` take precedence.
|
||||
|
||||
#### Key Modules and Customization
|
||||
|
||||
@ -74,7 +76,7 @@ The `dify-compose` wrappers merge `.env.default` and `.env` into a temporary env
|
||||
|
||||
#### Other notable variables
|
||||
|
||||
The `.env.example` file provided in the Docker setup is extensive and covers a wide range of configuration options. It is structured into several sections, each pertaining to different aspects of the application and its services. Here are some of the key sections and variables:
|
||||
The root `.env.example` file contains the essential startup settings. Optional and provider-specific settings are grouped in `envs/*.env.example` files. Here are some of the key sections and variables:
|
||||
|
||||
1. **Common Variables**:
|
||||
|
||||
@ -102,7 +104,7 @@ The `.env.example` file provided in the Docker setup is extensive and covers a w
|
||||
|
||||
1. **Storage Configuration**:
|
||||
|
||||
- `STORAGE_TYPE`, `S3_BUCKET_NAME`, `AZURE_BLOB_ACCOUNT_NAME`: Settings for file storage options like local, S3, Azure Blob, etc.
|
||||
- `STORAGE_TYPE`, `OPENDAL_SCHEME`, `OPENDAL_FS_ROOT`: Default local file storage settings. Optional storage backends are configured from the files under `envs/`.
|
||||
|
||||
1. **Vector Database Configuration**:
|
||||
|
||||
@ -124,11 +126,11 @@ The `.env.example` file provided in the Docker setup is extensive and covers a w
|
||||
|
||||
### Environment Variables Synchronization
|
||||
|
||||
When upgrading Dify or pulling the latest changes, new environment variables may be introduced in `.env.default` or `.env.example`.
|
||||
When upgrading Dify or pulling the latest changes, new environment variables may be introduced in `.env.example` or the optional files under `envs/`.
|
||||
|
||||
If you use the default override-only workflow, review `.env.default` and add only the values you need to override to `.env`.
|
||||
If you use the default workflow, review `.env.example` and keep your `.env` aligned with essential startup values.
|
||||
|
||||
If you maintain a full `.env` file copied from `.env.example`, an optional environment variables synchronization tool is provided.
|
||||
If you maintain a customized `.env` file copied from `.env.example`, an optional environment variables synchronization tool is provided.
|
||||
|
||||
> This tool performs a **one-way synchronization** from `.env.example` to `.env`.
|
||||
> Existing values in `.env` are never overwritten automatically.
|
||||
|
||||
@ -1,334 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
DEFAULT_ENV_FILE=".env.default"
|
||||
USER_ENV_FILE=".env"
|
||||
|
||||
log() {
|
||||
printf '%s\n' "$*" >&2
|
||||
}
|
||||
|
||||
die() {
|
||||
printf 'Error: %s\n' "$*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
detect_compose() {
|
||||
if docker compose version >/dev/null 2>&1; then
|
||||
COMPOSE_CMD=(docker compose)
|
||||
return
|
||||
fi
|
||||
|
||||
if command -v docker-compose >/dev/null 2>&1; then
|
||||
COMPOSE_CMD=(docker-compose)
|
||||
return
|
||||
fi
|
||||
|
||||
die "Docker Compose is not available. Install Docker Compose, then run this command again."
|
||||
}
|
||||
|
||||
generate_secret_key() {
|
||||
if command -v openssl >/dev/null 2>&1; then
|
||||
openssl rand -base64 42
|
||||
return
|
||||
fi
|
||||
|
||||
if command -v dd >/dev/null 2>&1 && command -v base64 >/dev/null 2>&1; then
|
||||
dd if=/dev/urandom bs=42 count=1 2>/dev/null | base64 | tr -d '\n'
|
||||
printf '\n'
|
||||
return
|
||||
fi
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
ensure_env_files() {
|
||||
[[ -f "$DEFAULT_ENV_FILE" ]] || die "$DEFAULT_ENV_FILE is missing."
|
||||
|
||||
if [[ -f "$USER_ENV_FILE" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
: >"$USER_ENV_FILE"
|
||||
|
||||
if [[ ! -t 0 ]]; then
|
||||
log "Created $USER_ENV_FILE for local overrides."
|
||||
return
|
||||
fi
|
||||
|
||||
printf 'Created %s for local overrides.\n' "$USER_ENV_FILE"
|
||||
printf 'Do you need a custom deployment now? (Most users can press Enter to skip.) [y/N] '
|
||||
read -r answer
|
||||
|
||||
case "${answer:-}" in
|
||||
y | Y | yes | YES | Yes)
|
||||
cat <<'EOF'
|
||||
Edit .env with the settings you want to override, using .env.example as the full reference.
|
||||
Run ./dify-compose up -d again when you are ready.
|
||||
EOF
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
user_env_value() {
|
||||
local key="$1"
|
||||
awk -F= -v target="$key" '
|
||||
/^[[:space:]]*#/ || !/=/{ next }
|
||||
{
|
||||
key = $1
|
||||
gsub(/^[[:space:]]+|[[:space:]]+$/, "", key)
|
||||
if (key == target) {
|
||||
value = substr($0, index($0, "=") + 1)
|
||||
gsub(/^[[:space:]]+|[[:space:]]+$/, "", value)
|
||||
if ((value ~ /^".*"$/) || (value ~ /^'\''.*'\''$/)) {
|
||||
value = substr(value, 2, length(value) - 2)
|
||||
}
|
||||
result = value
|
||||
}
|
||||
}
|
||||
END { print result }
|
||||
' "$USER_ENV_FILE"
|
||||
}
|
||||
|
||||
set_user_env_value() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local temp_file
|
||||
|
||||
temp_file="$(mktemp "${TMPDIR:-/tmp}/dify-env.XXXXXX")"
|
||||
awk -F= -v target="$key" -v replacement="$key=$value" '
|
||||
BEGIN { replaced = 0 }
|
||||
/^[[:space:]]*#/ || !/=/{ print; next }
|
||||
{
|
||||
key = $1
|
||||
gsub(/^[[:space:]]+|[[:space:]]+$/, "", key)
|
||||
if (key == target) {
|
||||
if (!replaced) {
|
||||
print replacement
|
||||
replaced = 1
|
||||
}
|
||||
next
|
||||
}
|
||||
print
|
||||
}
|
||||
END {
|
||||
if (!replaced) {
|
||||
print replacement
|
||||
}
|
||||
}
|
||||
' "$USER_ENV_FILE" >"$temp_file"
|
||||
mv "$temp_file" "$USER_ENV_FILE"
|
||||
}
|
||||
|
||||
ensure_secret_key() {
|
||||
local current_secret_key
|
||||
local secret_key
|
||||
|
||||
current_secret_key="$(user_env_value SECRET_KEY)"
|
||||
if [[ -n "$current_secret_key" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
secret_key="$(generate_secret_key)" || die "Unable to generate SECRET_KEY. Install openssl or configure SECRET_KEY in .env."
|
||||
set_user_env_value SECRET_KEY "$secret_key"
|
||||
log "Generated SECRET_KEY in $USER_ENV_FILE."
|
||||
}
|
||||
|
||||
env_value() {
|
||||
local key="$1"
|
||||
awk -F= -v target="$key" '
|
||||
/^[[:space:]]*#/ || !/=/{ next }
|
||||
{
|
||||
key = $1
|
||||
gsub(/^[[:space:]]+|[[:space:]]+$/, "", key)
|
||||
if (key == target) {
|
||||
value = substr($0, index($0, "=") + 1)
|
||||
gsub(/^[[:space:]]+|[[:space:]]+$/, "", value)
|
||||
if ((value ~ /^".*"$/) || (value ~ /^'\''.*'\''$/)) {
|
||||
value = substr(value, 2, length(value) - 2)
|
||||
}
|
||||
result = value
|
||||
}
|
||||
}
|
||||
END { print result }
|
||||
' "$DEFAULT_ENV_FILE" "$USER_ENV_FILE"
|
||||
}
|
||||
|
||||
user_overrides() {
|
||||
local key="$1"
|
||||
grep -Eq "^[[:space:]]*${key}[[:space:]]*=" "$USER_ENV_FILE"
|
||||
}
|
||||
|
||||
write_merged_env() {
|
||||
awk '
|
||||
function trim(s) {
|
||||
sub(/^[[:space:]]+/, "", s)
|
||||
sub(/[[:space:]]+$/, "", s)
|
||||
return s
|
||||
}
|
||||
|
||||
/^[[:space:]]*#/ || !/=/{ next }
|
||||
|
||||
{
|
||||
key = $0
|
||||
sub(/=.*/, "", key)
|
||||
key = trim(key)
|
||||
if (key == "") {
|
||||
next
|
||||
}
|
||||
|
||||
value = substr($0, index($0, "=") + 1)
|
||||
value = trim(value)
|
||||
|
||||
if (!(key in seen)) {
|
||||
order[++count] = key
|
||||
seen[key] = 1
|
||||
}
|
||||
|
||||
values[key] = value
|
||||
}
|
||||
|
||||
END {
|
||||
for (i = 1; i <= count; i++) {
|
||||
key = order[i]
|
||||
print key "=" values[key]
|
||||
}
|
||||
}
|
||||
' "$DEFAULT_ENV_FILE" "$USER_ENV_FILE" >"$MERGED_ENV_FILE"
|
||||
}
|
||||
|
||||
set_merged_env_value() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local temp_file
|
||||
|
||||
temp_file="$(mktemp "${TMPDIR:-/tmp}/dify-compose-env.XXXXXX")"
|
||||
awk -F= -v target="$key" -v replacement="$key=$value" '
|
||||
BEGIN { replaced = 0 }
|
||||
/^[[:space:]]*#/ || !/=/{ print; next }
|
||||
{
|
||||
key = $1
|
||||
gsub(/^[[:space:]]+|[[:space:]]+$/, "", key)
|
||||
if (key == target) {
|
||||
if (!replaced) {
|
||||
print replacement
|
||||
replaced = 1
|
||||
}
|
||||
next
|
||||
}
|
||||
print
|
||||
}
|
||||
END {
|
||||
if (!replaced) {
|
||||
print replacement
|
||||
}
|
||||
}
|
||||
' "$MERGED_ENV_FILE" >"$temp_file"
|
||||
mv "$temp_file" "$MERGED_ENV_FILE"
|
||||
}
|
||||
|
||||
set_if_not_overridden() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
|
||||
if user_overrides "$key"; then
|
||||
return
|
||||
fi
|
||||
|
||||
set_merged_env_value "$key" "$value"
|
||||
}
|
||||
|
||||
metadata_db_host() {
|
||||
case "$1" in
|
||||
mysql) printf 'db_mysql' ;;
|
||||
postgresql | '') printf 'db_postgres' ;;
|
||||
*) printf '%s' "$(env_value DB_HOST)" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
metadata_db_port() {
|
||||
case "$1" in
|
||||
mysql) printf '3306' ;;
|
||||
postgresql | '') printf '5432' ;;
|
||||
*) printf '%s' "$(env_value DB_PORT)" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
metadata_db_user() {
|
||||
case "$1" in
|
||||
mysql) printf 'root' ;;
|
||||
postgresql | '') printf 'postgres' ;;
|
||||
*) printf '%s' "$(env_value DB_USERNAME)" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
build_merged_env() {
|
||||
MERGED_ENV_FILE="$(mktemp "${TMPDIR:-/tmp}/dify-compose.XXXXXX")"
|
||||
trap 'rm -f "$MERGED_ENV_FILE"' EXIT
|
||||
|
||||
write_merged_env
|
||||
|
||||
local db_type
|
||||
local redis_host
|
||||
local redis_port
|
||||
local redis_username
|
||||
local redis_password
|
||||
local redis_auth
|
||||
local code_execution_api_key
|
||||
local weaviate_api_key
|
||||
|
||||
db_type="$(env_value DB_TYPE)"
|
||||
|
||||
set_if_not_overridden DB_HOST "$(metadata_db_host "$db_type")"
|
||||
set_if_not_overridden DB_PORT "$(metadata_db_port "$db_type")"
|
||||
set_if_not_overridden DB_USERNAME "$(metadata_db_user "$db_type")"
|
||||
|
||||
if ! user_overrides CELERY_BROKER_URL; then
|
||||
redis_host="$(env_value REDIS_HOST)"
|
||||
redis_port="$(env_value REDIS_PORT)"
|
||||
redis_username="$(env_value REDIS_USERNAME)"
|
||||
redis_password="$(env_value REDIS_PASSWORD)"
|
||||
redis_auth=""
|
||||
|
||||
if [[ -n "$redis_username" && -n "$redis_password" ]]; then
|
||||
redis_auth="${redis_username}:${redis_password}@"
|
||||
elif [[ -n "$redis_password" ]]; then
|
||||
redis_auth=":${redis_password}@"
|
||||
elif [[ -n "$redis_username" ]]; then
|
||||
redis_auth="${redis_username}@"
|
||||
fi
|
||||
|
||||
set_merged_env_value CELERY_BROKER_URL "redis://${redis_auth}${redis_host:-redis}:${redis_port:-6379}/1"
|
||||
fi
|
||||
|
||||
if ! user_overrides SANDBOX_API_KEY; then
|
||||
code_execution_api_key="$(env_value CODE_EXECUTION_API_KEY)"
|
||||
set_if_not_overridden SANDBOX_API_KEY "${code_execution_api_key:-dify-sandbox}"
|
||||
fi
|
||||
|
||||
if ! user_overrides WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS; then
|
||||
weaviate_api_key="$(env_value WEAVIATE_API_KEY)"
|
||||
set_if_not_overridden WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS \
|
||||
"${weaviate_api_key:-WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih}"
|
||||
fi
|
||||
}
|
||||
|
||||
main() {
|
||||
detect_compose
|
||||
ensure_env_files
|
||||
ensure_secret_key
|
||||
build_merged_env
|
||||
|
||||
if [[ "$#" -eq 0 ]]; then
|
||||
set -- up -d
|
||||
fi
|
||||
|
||||
"${COMPOSE_CMD[@]}" --env-file "$MERGED_ENV_FILE" "$@"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@ -1,317 +0,0 @@
|
||||
$ErrorActionPreference = "Stop"
|
||||
Set-StrictMode -Version Latest
|
||||
|
||||
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
Set-Location $ScriptDir
|
||||
|
||||
$DefaultEnvFile = ".env.default"
|
||||
$UserEnvFile = ".env"
|
||||
$MergedEnvFile = $null
|
||||
$Utf8NoBom = New-Object System.Text.UTF8Encoding -ArgumentList $false
|
||||
|
||||
function Write-Info {
|
||||
param([string]$Message)
|
||||
[Console]::Error.WriteLine($Message)
|
||||
}
|
||||
|
||||
function Fail {
|
||||
param([string]$Message)
|
||||
[Console]::Error.WriteLine("Error: $Message")
|
||||
exit 1
|
||||
}
|
||||
|
||||
function Test-CommandSuccess {
|
||||
param([string[]]$Command)
|
||||
|
||||
try {
|
||||
$Executable = $Command[0]
|
||||
$CommandArgs = @()
|
||||
if ($Command.Length -gt 1) {
|
||||
$CommandArgs = @($Command[1..($Command.Length - 1)])
|
||||
}
|
||||
|
||||
& $Executable @CommandArgs *> $null
|
||||
return $LASTEXITCODE -eq 0
|
||||
}
|
||||
catch {
|
||||
return $false
|
||||
}
|
||||
}
|
||||
|
||||
function Get-ComposeCommand {
|
||||
if (Test-CommandSuccess @("docker", "compose", "version")) {
|
||||
return @("docker", "compose")
|
||||
}
|
||||
|
||||
if ((Get-Command "docker-compose" -ErrorAction SilentlyContinue) -and (Test-CommandSuccess @("docker-compose", "version"))) {
|
||||
return @("docker-compose")
|
||||
}
|
||||
|
||||
Fail "Docker Compose is not available. Install Docker Compose, then run this command again."
|
||||
}
|
||||
|
||||
function New-SecretKey {
|
||||
$Bytes = New-Object byte[] 42
|
||||
$Generator = [System.Security.Cryptography.RandomNumberGenerator]::Create()
|
||||
|
||||
try {
|
||||
$Generator.GetBytes($Bytes)
|
||||
}
|
||||
finally {
|
||||
$Generator.Dispose()
|
||||
}
|
||||
|
||||
return [Convert]::ToBase64String($Bytes)
|
||||
}
|
||||
|
||||
function Ensure-EnvFiles {
|
||||
if (-not (Test-Path $DefaultEnvFile -PathType Leaf)) {
|
||||
Fail "$DefaultEnvFile is missing."
|
||||
}
|
||||
|
||||
if (Test-Path $UserEnvFile -PathType Leaf) {
|
||||
return
|
||||
}
|
||||
|
||||
New-Item -ItemType File -Path $UserEnvFile | Out-Null
|
||||
|
||||
if ([Console]::IsInputRedirected) {
|
||||
Write-Info "Created $UserEnvFile for local overrides."
|
||||
return
|
||||
}
|
||||
|
||||
Write-Info "Created $UserEnvFile for local overrides."
|
||||
$Answer = Read-Host "Do you need a custom deployment now? (Most users can press Enter to skip.) [y/N]"
|
||||
|
||||
if ($Answer -match "^(y|yes)$") {
|
||||
Write-Output "Edit .env with the settings you want to override, using .env.example as the full reference."
|
||||
Write-Output "Run .\dify-compose.ps1 up -d again when you are ready."
|
||||
exit 0
|
||||
}
|
||||
}
|
||||
|
||||
function Read-EnvFile {
|
||||
param([string]$Path)
|
||||
|
||||
$Values = [ordered]@{}
|
||||
|
||||
if (-not (Test-Path $Path -PathType Leaf)) {
|
||||
return $Values
|
||||
}
|
||||
|
||||
foreach ($Line in Get-Content -Path $Path) {
|
||||
if ($Line -match "^\s*#" -or $Line -notmatch "=") {
|
||||
continue
|
||||
}
|
||||
|
||||
$SeparatorIndex = $Line.IndexOf("=")
|
||||
$Key = $Line.Substring(0, $SeparatorIndex).Trim()
|
||||
$Value = $Line.Substring($SeparatorIndex + 1).Trim()
|
||||
|
||||
if (($Value.StartsWith('"') -and $Value.EndsWith('"')) -or ($Value.StartsWith("'") -and $Value.EndsWith("'"))) {
|
||||
$Value = $Value.Substring(1, $Value.Length - 2)
|
||||
}
|
||||
|
||||
if ($Key.Length -gt 0) {
|
||||
$Values[$Key] = $Value
|
||||
}
|
||||
}
|
||||
|
||||
return $Values
|
||||
}
|
||||
|
||||
function Set-UserEnvValue {
|
||||
param(
|
||||
[string]$Key,
|
||||
[string]$Value
|
||||
)
|
||||
|
||||
$Path = [string](Resolve-Path $UserEnvFile)
|
||||
$Lines = [System.IO.File]::ReadAllLines($Path, [System.Text.Encoding]::UTF8)
|
||||
$Output = New-Object System.Collections.Generic.List[string]
|
||||
$Replaced = $false
|
||||
|
||||
foreach ($Line in $Lines) {
|
||||
if ($Line -match "^\s*#" -or $Line -notmatch "=") {
|
||||
$Output.Add($Line)
|
||||
continue
|
||||
}
|
||||
|
||||
$SeparatorIndex = $Line.IndexOf("=")
|
||||
$CurrentKey = $Line.Substring(0, $SeparatorIndex).Trim()
|
||||
|
||||
if ($CurrentKey -eq $Key) {
|
||||
if (-not $Replaced) {
|
||||
$Output.Add("$Key=$Value")
|
||||
$Replaced = $true
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
$Output.Add($Line)
|
||||
}
|
||||
|
||||
if (-not $Replaced) {
|
||||
$Output.Add("$Key=$Value")
|
||||
}
|
||||
|
||||
[System.IO.File]::WriteAllLines($Path, $Output, $Utf8NoBom)
|
||||
}
|
||||
|
||||
function Ensure-SecretKey {
|
||||
$Values = Read-EnvFile $UserEnvFile
|
||||
|
||||
if ($Values.Contains("SECRET_KEY") -and $Values["SECRET_KEY"]) {
|
||||
return
|
||||
}
|
||||
|
||||
Set-UserEnvValue "SECRET_KEY" (New-SecretKey)
|
||||
Write-Info "Generated SECRET_KEY in $UserEnvFile."
|
||||
}
|
||||
|
||||
function Merge-EnvValues {
|
||||
$Values = [ordered]@{}
|
||||
|
||||
foreach ($Entry in (Read-EnvFile $DefaultEnvFile).GetEnumerator()) {
|
||||
$Values[$Entry.Key] = $Entry.Value
|
||||
}
|
||||
|
||||
foreach ($Entry in (Read-EnvFile $UserEnvFile).GetEnumerator()) {
|
||||
$Values[$Entry.Key] = $Entry.Value
|
||||
}
|
||||
|
||||
return $Values
|
||||
}
|
||||
|
||||
function User-Overrides {
|
||||
param([string]$Key)
|
||||
|
||||
if (-not (Test-Path $UserEnvFile -PathType Leaf)) {
|
||||
return $false
|
||||
}
|
||||
|
||||
return [bool](Select-String -Path $UserEnvFile -Pattern "^\s*$([regex]::Escape($Key))\s*=" -Quiet)
|
||||
}
|
||||
|
||||
function Metadata-DbHost {
|
||||
param([string]$DbType, $Values)
|
||||
|
||||
switch ($DbType) {
|
||||
"mysql" { return "db_mysql" }
|
||||
"postgresql" { return "db_postgres" }
|
||||
"" { return "db_postgres" }
|
||||
default { return $Values["DB_HOST"] }
|
||||
}
|
||||
}
|
||||
|
||||
function Metadata-DbPort {
|
||||
param([string]$DbType, $Values)
|
||||
|
||||
switch ($DbType) {
|
||||
"mysql" { return "3306" }
|
||||
"postgresql" { return "5432" }
|
||||
"" { return "5432" }
|
||||
default { return $Values["DB_PORT"] }
|
||||
}
|
||||
}
|
||||
|
||||
function Metadata-DbUser {
|
||||
param([string]$DbType, $Values)
|
||||
|
||||
switch ($DbType) {
|
||||
"mysql" { return "root" }
|
||||
"postgresql" { return "postgres" }
|
||||
"" { return "postgres" }
|
||||
default { return $Values["DB_USERNAME"] }
|
||||
}
|
||||
}
|
||||
|
||||
function Write-MergedEnv {
|
||||
param($Values)
|
||||
|
||||
$Output = New-Object System.Collections.Generic.List[string]
|
||||
|
||||
foreach ($Entry in $Values.GetEnumerator()) {
|
||||
$Output.Add("$($Entry.Key)=$($Entry.Value)")
|
||||
}
|
||||
|
||||
[System.IO.File]::WriteAllLines($MergedEnvFile, $Output, $Utf8NoBom)
|
||||
}
|
||||
|
||||
function Build-MergedEnv {
|
||||
$Values = Merge-EnvValues
|
||||
$script:MergedEnvFile = [System.IO.Path]::GetTempFileName()
|
||||
|
||||
$DbType = if ($Values.Contains("DB_TYPE")) { $Values["DB_TYPE"] } else { "postgresql" }
|
||||
|
||||
if (-not (User-Overrides "DB_HOST")) {
|
||||
$Values["DB_HOST"] = Metadata-DbHost $DbType $Values
|
||||
}
|
||||
|
||||
if (-not (User-Overrides "DB_PORT")) {
|
||||
$Values["DB_PORT"] = Metadata-DbPort $DbType $Values
|
||||
}
|
||||
|
||||
if (-not (User-Overrides "DB_USERNAME")) {
|
||||
$Values["DB_USERNAME"] = Metadata-DbUser $DbType $Values
|
||||
}
|
||||
|
||||
if (-not (User-Overrides "CELERY_BROKER_URL")) {
|
||||
$RedisHost = if ($Values.Contains("REDIS_HOST") -and $Values["REDIS_HOST"]) { $Values["REDIS_HOST"] } else { "redis" }
|
||||
$RedisPort = if ($Values.Contains("REDIS_PORT") -and $Values["REDIS_PORT"]) { $Values["REDIS_PORT"] } else { "6379" }
|
||||
$RedisUsername = if ($Values.Contains("REDIS_USERNAME")) { $Values["REDIS_USERNAME"] } else { "" }
|
||||
$RedisPassword = if ($Values.Contains("REDIS_PASSWORD")) { $Values["REDIS_PASSWORD"] } else { "" }
|
||||
$RedisAuth = ""
|
||||
|
||||
if ($RedisUsername -and $RedisPassword) {
|
||||
$RedisAuth = "${RedisUsername}:${RedisPassword}@"
|
||||
}
|
||||
elseif ($RedisPassword) {
|
||||
$RedisAuth = ":${RedisPassword}@"
|
||||
}
|
||||
elseif ($RedisUsername) {
|
||||
$RedisAuth = "${RedisUsername}@"
|
||||
}
|
||||
|
||||
$Values["CELERY_BROKER_URL"] = "redis://$RedisAuth${RedisHost}:${RedisPort}/1"
|
||||
}
|
||||
|
||||
if (-not (User-Overrides "SANDBOX_API_KEY")) {
|
||||
$CodeExecutionApiKey = if ($Values.Contains("CODE_EXECUTION_API_KEY") -and $Values["CODE_EXECUTION_API_KEY"]) { $Values["CODE_EXECUTION_API_KEY"] } else { "dify-sandbox" }
|
||||
$Values["SANDBOX_API_KEY"] = $CodeExecutionApiKey
|
||||
}
|
||||
|
||||
if (-not (User-Overrides "WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS")) {
|
||||
$WeaviateApiKey = if ($Values.Contains("WEAVIATE_API_KEY") -and $Values["WEAVIATE_API_KEY"]) { $Values["WEAVIATE_API_KEY"] } else { "WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih" }
|
||||
$Values["WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS"] = $WeaviateApiKey
|
||||
}
|
||||
|
||||
Write-MergedEnv $Values
|
||||
}
|
||||
|
||||
$ComposeCommand = Get-ComposeCommand
|
||||
|
||||
try {
|
||||
Ensure-EnvFiles
|
||||
Ensure-SecretKey
|
||||
Build-MergedEnv
|
||||
|
||||
$ComposeArgs = @($args)
|
||||
if ($ComposeArgs.Count -eq 0) {
|
||||
$ComposeArgs = @("up", "-d")
|
||||
}
|
||||
|
||||
$ComposeCommandArgs = @()
|
||||
if ($ComposeCommand.Length -gt 1) {
|
||||
$ComposeCommandArgs = @($ComposeCommand[1..($ComposeCommand.Length - 1)])
|
||||
}
|
||||
|
||||
$ComposeExecutable = $ComposeCommand[0]
|
||||
& $ComposeExecutable @ComposeCommandArgs --env-file $MergedEnvFile @ComposeArgs
|
||||
exit $LASTEXITCODE
|
||||
}
|
||||
finally {
|
||||
if ($MergedEnvFile -and (Test-Path $MergedEnvFile -PathType Leaf)) {
|
||||
Remove-Item -Force $MergedEnvFile
|
||||
}
|
||||
}
|
||||
@ -1,4 +1,202 @@
|
||||
x-shared-env: &shared-api-worker-env
|
||||
# Shared configuration using YAML anchors and env_file
|
||||
x-shared-api-worker-config: &shared-api-worker-config
|
||||
env_file:
|
||||
- path: ./envs/core-services/shared.env
|
||||
required: false
|
||||
- path: ./envs/core-services/api.env
|
||||
required: false
|
||||
- path: ./envs/security.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-postgres.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-mysql.env
|
||||
required: false
|
||||
- path: ./envs/databases/redis.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/weaviate.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/qdrant.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/oceanbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/seekdb.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/couchbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/pgvector.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/vastbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/pgvecto-rs.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/chroma.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/iris.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/oracle.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/opengauss.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/myscale.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/matrixone.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/elasticsearch.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/opensearch.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/milvus.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/nginx.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/certbot.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/ssrf-proxy.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/etcd.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/minio.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/milvus-standalone.env
|
||||
required: false
|
||||
- ./.env
|
||||
networks:
|
||||
- ssrf_proxy_network
|
||||
- default
|
||||
restart: always
|
||||
|
||||
x-shared-worker-config: &shared-worker-config
|
||||
env_file:
|
||||
- path: ./envs/core-services/shared.env
|
||||
required: false
|
||||
- path: ./envs/core-services/worker.env
|
||||
required: false
|
||||
- path: ./envs/security.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-postgres.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-mysql.env
|
||||
required: false
|
||||
- path: ./envs/databases/redis.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/weaviate.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/qdrant.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/oceanbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/seekdb.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/couchbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/pgvector.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/vastbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/pgvecto-rs.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/chroma.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/iris.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/oracle.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/opengauss.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/myscale.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/matrixone.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/elasticsearch.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/opensearch.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/milvus.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/nginx.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/certbot.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/ssrf-proxy.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/etcd.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/minio.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/milvus-standalone.env
|
||||
required: false
|
||||
- ./.env
|
||||
networks:
|
||||
- ssrf_proxy_network
|
||||
- default
|
||||
restart: always
|
||||
|
||||
x-shared-worker-beat-config: &shared-worker-beat-config
|
||||
env_file:
|
||||
- path: ./envs/core-services/shared.env
|
||||
required: false
|
||||
- path: ./envs/core-services/worker-beat.env
|
||||
required: false
|
||||
- path: ./envs/security.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-postgres.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-mysql.env
|
||||
required: false
|
||||
- path: ./envs/databases/redis.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/weaviate.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/qdrant.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/oceanbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/seekdb.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/couchbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/pgvector.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/vastbase.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/pgvecto-rs.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/chroma.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/iris.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/oracle.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/opengauss.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/myscale.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/matrixone.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/elasticsearch.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/opensearch.env
|
||||
required: false
|
||||
- path: ./envs/vectorstores/milvus.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/nginx.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/certbot.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/ssrf-proxy.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/etcd.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/minio.env
|
||||
required: false
|
||||
- path: ./envs/infrastructure/milvus-standalone.env
|
||||
required: false
|
||||
- ./.env
|
||||
networks:
|
||||
- ssrf_proxy_network
|
||||
- default
|
||||
restart: always
|
||||
|
||||
services:
|
||||
# Init container to fix permissions
|
||||
init_permissions:
|
||||
@ -21,12 +219,9 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
<<: *shared-api-worker-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
<<: *shared-api-worker-env
|
||||
# Startup mode, 'api' starts the API server.
|
||||
MODE: api
|
||||
SENTRY_DSN: ${API_SENTRY_DSN:-}
|
||||
SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0}
|
||||
@ -69,12 +264,9 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
<<: *shared-worker-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
<<: *shared-api-worker-env
|
||||
# Startup mode, 'worker' starts the Celery worker for processing all queues.
|
||||
MODE: worker
|
||||
SENTRY_DSN: ${API_SENTRY_DSN:-}
|
||||
SENTRY_TRACES_SAMPLE_RATE: ${API_SENTRY_TRACES_SAMPLE_RATE:-1.0}
|
||||
@ -115,12 +307,9 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
<<: *shared-worker-beat-config
|
||||
image: langgenius/dify-api:1.14.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
<<: *shared-api-worker-env
|
||||
# Startup mode, 'worker_beat' starts the Celery beat for scheduling periodic tasks.
|
||||
MODE: beat
|
||||
depends_on:
|
||||
init_permissions:
|
||||
@ -154,6 +343,12 @@ services:
|
||||
web:
|
||||
image: langgenius/dify-web:1.14.0
|
||||
restart: always
|
||||
env_file:
|
||||
- path: ./envs/core-services/web.env
|
||||
required: false
|
||||
- path: ./envs/security.env
|
||||
required: false
|
||||
- ./.env
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
APP_API_URL: ${APP_API_URL:-}
|
||||
@ -228,7 +423,7 @@ services:
|
||||
MYSQL_ROOT_PASSWORD: ${DB_PASSWORD:-difyai123456}
|
||||
MYSQL_DATABASE: ${DB_DATABASE:-dify}
|
||||
command: >
|
||||
--max_connections=1000
|
||||
--max_connections=${MYSQL_MAX_CONNECTIONS:-1000}
|
||||
--innodb_buffer_pool_size=${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M}
|
||||
--innodb_log_file_size=${MYSQL_INNODB_LOG_FILE_SIZE:-128M}
|
||||
--innodb_flush_log_at_trx_commit=${MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT:-2}
|
||||
@ -270,6 +465,12 @@ services:
|
||||
sandbox:
|
||||
image: langgenius/dify-sandbox:0.2.15
|
||||
restart: always
|
||||
env_file:
|
||||
- path: ./envs/core-services/sandbox.env
|
||||
required: false
|
||||
- path: ./envs/security.env
|
||||
required: false
|
||||
- ./.env
|
||||
environment:
|
||||
# The DifySandbox configurations
|
||||
# Make sure you are changing this key for your deployment with a strong key.
|
||||
@ -294,9 +495,24 @@ services:
|
||||
plugin_daemon:
|
||||
image: langgenius/dify-plugin-daemon:0.6.0-local
|
||||
restart: always
|
||||
env_file:
|
||||
- path: ./envs/core-services/shared.env
|
||||
required: false
|
||||
- path: ./envs/core-services/plugin-daemon.env
|
||||
required: false
|
||||
- path: ./envs/security.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-postgres.env
|
||||
required: false
|
||||
- path: ./envs/databases/db-mysql.env
|
||||
required: false
|
||||
- path: ./envs/databases/redis.env
|
||||
required: false
|
||||
- ./.env
|
||||
networks:
|
||||
- ssrf_proxy_network
|
||||
- default
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
<<: *shared-api-worker-env
|
||||
DB_DATABASE: ${DB_PLUGIN_DATABASE:-dify_plugin}
|
||||
DB_SSL_MODE: ${DB_SSL_MODE:-disable}
|
||||
SERVER_PORT: ${PLUGIN_DAEMON_PORT:-5002}
|
||||
|
||||
@ -51,7 +51,7 @@ services:
|
||||
MYSQL_ROOT_PASSWORD: ${DB_PASSWORD:-difyai123456}
|
||||
MYSQL_DATABASE: ${DB_DATABASE:-dify}
|
||||
command: >
|
||||
--max_connections=1000
|
||||
--max_connections=${MYSQL_MAX_CONNECTIONS:-1000}
|
||||
--innodb_buffer_pool_size=${MYSQL_INNODB_BUFFER_POOL_SIZE:-512M}
|
||||
--innodb_log_file_size=${MYSQL_INNODB_LOG_FILE_SIZE:-128M}
|
||||
--innodb_flush_log_at_trx_commit=${MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT:-2}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
13
docker/envs/core-services/api.env.example
Normal file
13
docker/envs/core-services/api.env.example
Normal file
@ -0,0 +1,13 @@
|
||||
# ------------------------------
|
||||
# Api Configuration
|
||||
# ------------------------------
|
||||
|
||||
MODE=api
|
||||
SENTRY_DSN=
|
||||
SENTRY_TRACES_SAMPLE_RATE=1.0
|
||||
SENTRY_PROFILES_SAMPLE_RATE=1.0
|
||||
PLUGIN_REMOTE_INSTALL_HOST=localhost
|
||||
PLUGIN_REMOTE_INSTALL_PORT=5003
|
||||
PLUGIN_MAX_PACKAGE_SIZE=52428800
|
||||
PLUGIN_DAEMON_TIMEOUT=600.0
|
||||
INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
|
||||
23
docker/envs/core-services/plugin-daemon.env.example
Normal file
23
docker/envs/core-services/plugin-daemon.env.example
Normal file
@ -0,0 +1,23 @@
|
||||
# ------------------------------
|
||||
# Plugin Daemon Configuration
|
||||
# ------------------------------
|
||||
|
||||
DB_PLUGIN_DATABASE=dify_plugin
|
||||
PLUGIN_DAEMON_URL=http://plugin_daemon:5002
|
||||
PLUGIN_PPROF_ENABLED=false
|
||||
PLUGIN_DIFY_INNER_API_URL=http://api:5001
|
||||
FORCE_VERIFYING_SIGNATURE=true
|
||||
PLUGIN_STDIO_BUFFER_SIZE=1024
|
||||
PLUGIN_STDIO_MAX_BUFFER_SIZE=5242880
|
||||
PLUGIN_PYTHON_ENV_INIT_TIMEOUT=120
|
||||
PLUGIN_MAX_EXECUTION_TIMEOUT=600
|
||||
PLUGIN_DEBUGGING_HOST=0.0.0.0
|
||||
PLUGIN_DEBUGGING_PORT=5003
|
||||
PLUGIN_DAEMON_KEY=lYkiYYT6owG+71oLerGzA7GXCgOT++6ovaezWAjpCjf+Sjc3ZtU+qUEi
|
||||
PLUGIN_DIFY_INNER_API_KEY=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
|
||||
PLUGIN_DAEMON_PORT=5002
|
||||
CELERY_WORKER_CLASS=
|
||||
PLUGIN_STORAGE_TYPE=local
|
||||
PLUGIN_STORAGE_LOCAL_ROOT=/app/storage
|
||||
PLUGIN_WORKING_PATH=/app/storage/cwd
|
||||
PLUGIN_STORAGE_OSS_BUCKET=
|
||||
17
docker/envs/core-services/sandbox.env.example
Normal file
17
docker/envs/core-services/sandbox.env.example
Normal file
@ -0,0 +1,17 @@
|
||||
# ------------------------------
|
||||
# Sandbox Configuration
|
||||
# ------------------------------
|
||||
|
||||
SANDBOX_HTTP_PROXY=http://ssrf_proxy:3128
|
||||
SANDBOX_HTTPS_PROXY=http://ssrf_proxy:3128
|
||||
SANDBOX_PORT=8194
|
||||
PIP_MIRROR_URL=
|
||||
SANDBOX_API_KEY=dify-sandbox
|
||||
SANDBOX_GIN_MODE=release
|
||||
SANDBOX_WORKER_TIMEOUT=15
|
||||
SANDBOX_ENABLE_NETWORK=true
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000
|
||||
469
docker/envs/core-services/shared.env.example
Normal file
469
docker/envs/core-services/shared.env.example
Normal file
@ -0,0 +1,469 @@
|
||||
# ------------------------------
|
||||
# Shared API/Worker Configuration
|
||||
# ------------------------------
|
||||
|
||||
CONSOLE_WEB_URL=
|
||||
SERVICE_API_URL=
|
||||
TRIGGER_URL=http://localhost
|
||||
APP_WEB_URL=
|
||||
FILES_URL=
|
||||
INTERNAL_FILES_URL=
|
||||
LANG=C.UTF-8
|
||||
LC_ALL=C.UTF-8
|
||||
PYTHONIOENCODING=utf-8
|
||||
UV_CACHE_DIR=/tmp/.uv-cache
|
||||
CHECK_UPDATE_URL=https://updates.dify.ai
|
||||
OPENAI_API_BASE=https://api.openai.com/v1
|
||||
MIGRATION_ENABLED=true
|
||||
FILES_ACCESS_TIMEOUT=300
|
||||
ENABLE_COLLABORATION_MODE=false
|
||||
CELERY_BROKER_URL=redis://:difyai123456@redis:6379/1
|
||||
CELERY_TASK_ANNOTATIONS=null
|
||||
AZURE_BLOB_ACCOUNT_URL=https://<your_account_name>.blob.core.windows.net
|
||||
SUPABASE_URL=your-server-url
|
||||
TIDB_ON_QDRANT_URL=http://127.0.0.1
|
||||
TIDB_ON_QDRANT_API_KEY=dify
|
||||
TIDB_API_URL=http://127.0.0.1
|
||||
TIDB_IAM_API_URL=http://127.0.0.1
|
||||
TIDB_REGION=regions/aws-us-east-1
|
||||
TIDB_PROJECT_ID=dify
|
||||
TIDB_SPEND_LIMIT=100
|
||||
TENCENT_VECTOR_DB_URL=http://127.0.0.1
|
||||
TENCENT_VECTOR_DB_API_KEY=dify
|
||||
LINDORM_URL=http://localhost:30070
|
||||
LINDORM_USERNAME=admin
|
||||
UPSTASH_VECTOR_URL=https://xxx-vector.upstash.io
|
||||
UPLOAD_FILE_SIZE_LIMIT=15
|
||||
UPLOAD_FILE_BATCH_LIMIT=5
|
||||
UPLOAD_FILE_EXTENSION_BLACKLIST=
|
||||
SINGLE_CHUNK_ATTACHMENT_LIMIT=10
|
||||
IMAGE_FILE_BATCH_LIMIT=10
|
||||
ATTACHMENT_IMAGE_FILE_SIZE_LIMIT=2
|
||||
ATTACHMENT_IMAGE_DOWNLOAD_TIMEOUT=60
|
||||
ETL_TYPE=dify
|
||||
UNSTRUCTURED_API_URL=
|
||||
MULTIMODAL_SEND_FORMAT=base64
|
||||
UPLOAD_IMAGE_FILE_SIZE_LIMIT=10
|
||||
UPLOAD_VIDEO_FILE_SIZE_LIMIT=100
|
||||
UPLOAD_AUDIO_FILE_SIZE_LIMIT=50
|
||||
API_SENTRY_DSN=
|
||||
API_SENTRY_TRACES_SAMPLE_RATE=1.0
|
||||
API_SENTRY_PROFILES_SAMPLE_RATE=1.0
|
||||
WEB_SENTRY_DSN=
|
||||
PLUGIN_SENTRY_ENABLED=false
|
||||
PLUGIN_SENTRY_DSN=
|
||||
NOTION_INTEGRATION_TYPE=public
|
||||
RESEND_API_URL=https://api.resend.com
|
||||
SSRF_PROXY_HTTP_URL=http://ssrf_proxy:3128
|
||||
SSRF_PROXY_HTTPS_URL=http://ssrf_proxy:3128
|
||||
PGDATA=/var/lib/postgresql/data/pgdata
|
||||
PLUGIN_MAX_PACKAGE_SIZE=52428800
|
||||
PLUGIN_MODEL_SCHEMA_CACHE_TTL=3600
|
||||
ENDPOINT_URL_TEMPLATE=http://localhost/e/{hook_id}
|
||||
LOG_LEVEL=INFO
|
||||
LOG_OUTPUT_FORMAT=text
|
||||
LOG_FILE=/app/logs/server.log
|
||||
LOG_FILE_MAX_SIZE=20
|
||||
LOG_FILE_BACKUP_COUNT=5
|
||||
LOG_DATEFORMAT=%Y-%m-%d %H:%M:%S
|
||||
LOG_TZ=UTC
|
||||
DEBUG=false
|
||||
FLASK_DEBUG=false
|
||||
ENABLE_REQUEST_LOGGING=False
|
||||
WORKFLOW_LOG_CLEANUP_ENABLED=false
|
||||
WORKFLOW_LOG_RETENTION_DAYS=30
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
|
||||
WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS=
|
||||
EXPOSE_PLUGIN_DEBUGGING_HOST=localhost
|
||||
EXPOSE_PLUGIN_DEBUGGING_PORT=5003
|
||||
DEPLOY_ENV=PRODUCTION
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||
REFRESH_TOKEN_EXPIRE_DAYS=30
|
||||
APP_DEFAULT_ACTIVE_REQUESTS=0
|
||||
APP_MAX_ACTIVE_REQUESTS=0
|
||||
APP_MAX_EXECUTION_TIME=1200
|
||||
DIFY_BIND_ADDRESS=0.0.0.0
|
||||
DIFY_PORT=5001
|
||||
SERVER_WORKER_AMOUNT=1
|
||||
SERVER_WORKER_CLASS=gevent
|
||||
SERVER_WORKER_CONNECTIONS=10
|
||||
CELERY_SENTINEL_PASSWORD=
|
||||
S3_ACCESS_KEY=
|
||||
S3_SECRET_KEY=
|
||||
ARCHIVE_STORAGE_ACCESS_KEY=
|
||||
ARCHIVE_STORAGE_SECRET_KEY=
|
||||
AZURE_BLOB_ACCOUNT_KEY=difyai
|
||||
ALIYUN_OSS_ACCESS_KEY=your-access-key
|
||||
ALIYUN_OSS_SECRET_KEY=your-secret-key
|
||||
TENCENT_COS_SECRET_KEY=your-secret-key
|
||||
TENCENT_COS_SECRET_ID=your-secret-id
|
||||
OCI_ACCESS_KEY=your-access-key
|
||||
OCI_SECRET_KEY=your-secret-key
|
||||
HUAWEI_OBS_SECRET_KEY=your-secret-key
|
||||
HUAWEI_OBS_ACCESS_KEY=your-access-key
|
||||
VOLCENGINE_TOS_SECRET_KEY=your-secret-key
|
||||
VOLCENGINE_TOS_ACCESS_KEY=your-access-key
|
||||
BAIDU_OBS_SECRET_KEY=your-secret-key
|
||||
BAIDU_OBS_ACCESS_KEY=your-access-key
|
||||
SUPABASE_API_KEY=your-access-key
|
||||
ALIBABACLOUD_MYSQL_PASSWORD=difyai123456
|
||||
RELYT_PASSWORD=difyai123456
|
||||
LINDORM_PASSWORD=admin
|
||||
LINDORM_USING_UGC=True
|
||||
LINDORM_QUERY_TIMEOUT=1
|
||||
HUAWEI_CLOUD_PASSWORD=admin
|
||||
UPSTASH_VECTOR_TOKEN=dify
|
||||
TABLESTORE_ACCESS_KEY_ID=xxx
|
||||
TABLESTORE_ACCESS_KEY_SECRET=xxx
|
||||
TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE=false
|
||||
CLICKZETTA_PASSWORD=
|
||||
CLICKZETTA_INSTANCE=
|
||||
CLICKZETTA_SERVICE=api.clickzetta.com
|
||||
CLICKZETTA_WORKSPACE=quick_start
|
||||
CLICKZETTA_VCLUSTER=default_ap
|
||||
CLICKZETTA_SCHEMA=dify
|
||||
CLICKZETTA_BATCH_SIZE=100
|
||||
CLICKZETTA_ENABLE_INVERTED_INDEX=true
|
||||
CLICKZETTA_ANALYZER_TYPE=chinese
|
||||
CLICKZETTA_ANALYZER_MODE=smart
|
||||
UNSTRUCTURED_API_KEY=
|
||||
SCARF_NO_ANALYTICS=true
|
||||
PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
|
||||
NOTION_CLIENT_SECRET=
|
||||
NOTION_CLIENT_ID=
|
||||
NOTION_INTERNAL_SECRET=
|
||||
MAIL_TYPE=resend
|
||||
MAIL_DEFAULT_SEND_FROM=
|
||||
RESEND_API_KEY=your-resend-api-key
|
||||
SMTP_SERVER=
|
||||
SMTP_PORT=465
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
SMTP_USE_TLS=true
|
||||
SMTP_OPPORTUNISTIC_TLS=false
|
||||
SMTP_LOCAL_HOSTNAME=
|
||||
SENDGRID_API_KEY=
|
||||
INVITE_EXPIRY_HOURS=72
|
||||
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
|
||||
EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES=5
|
||||
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5
|
||||
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5
|
||||
CODE_EXECUTION_ENDPOINT=http://sandbox:8194
|
||||
CODE_EXECUTION_API_KEY=dify-sandbox
|
||||
CODE_EXECUTION_SSL_VERIFY=True
|
||||
CODE_EXECUTION_POOL_MAX_CONNECTIONS=100
|
||||
CODE_EXECUTION_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||
CODE_EXECUTION_POOL_KEEPALIVE_EXPIRY=5.0
|
||||
CODE_MAX_NUMBER=9223372036854775807
|
||||
CODE_MIN_NUMBER=-9223372036854775808
|
||||
CODE_MAX_DEPTH=5
|
||||
CODE_MAX_PRECISION=20
|
||||
CODE_MAX_STRING_LENGTH=400000
|
||||
CODE_MAX_STRING_ARRAY_LENGTH=30
|
||||
CODE_MAX_OBJECT_ARRAY_LENGTH=30
|
||||
CODE_MAX_NUMBER_ARRAY_LENGTH=1000
|
||||
CODE_EXECUTION_CONNECT_TIMEOUT=10
|
||||
CODE_EXECUTION_READ_TIMEOUT=60
|
||||
CODE_EXECUTION_WRITE_TIMEOUT=10
|
||||
TEMPLATE_TRANSFORM_MAX_LENGTH=400000
|
||||
WORKFLOW_MAX_EXECUTION_STEPS=500
|
||||
WORKFLOW_MAX_EXECUTION_TIME=1200
|
||||
WORKFLOW_CALL_MAX_DEPTH=5
|
||||
MAX_VARIABLE_SIZE=204800
|
||||
WORKFLOW_FILE_UPLOAD_LIMIT=10
|
||||
GRAPH_ENGINE_MIN_WORKERS=1
|
||||
GRAPH_ENGINE_MAX_WORKERS=10
|
||||
GRAPH_ENGINE_SCALE_UP_THRESHOLD=3
|
||||
GRAPH_ENGINE_SCALE_DOWN_IDLE_TIME=5.0
|
||||
ALIYUN_SLS_ACCESS_KEY_ID=
|
||||
ALIYUN_SLS_ACCESS_KEY_SECRET=
|
||||
WEBHOOK_REQUEST_BODY_MAX_SIZE=10485760
|
||||
RESPECT_XFORWARD_HEADERS_ENABLED=false
|
||||
SSRF_HTTP_PORT=3128
|
||||
SSRF_COREDUMP_DIR=/var/spool/squid
|
||||
SSRF_REVERSE_PROXY_PORT=8194
|
||||
SSRF_SANDBOX_HOST=sandbox
|
||||
SSRF_DEFAULT_TIME_OUT=5
|
||||
SSRF_DEFAULT_CONNECT_TIME_OUT=5
|
||||
SSRF_DEFAULT_READ_TIME_OUT=5
|
||||
SSRF_DEFAULT_WRITE_TIME_OUT=5
|
||||
SSRF_POOL_MAX_CONNECTIONS=100
|
||||
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||
SSRF_POOL_KEEPALIVE_EXPIRY=5.0
|
||||
PLUGIN_AWS_ACCESS_KEY=
|
||||
PLUGIN_AWS_SECRET_KEY=
|
||||
PLUGIN_AWS_REGION=
|
||||
PLUGIN_TENCENT_COS_SECRET_KEY=
|
||||
PLUGIN_TENCENT_COS_SECRET_ID=
|
||||
PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID=
|
||||
PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET=
|
||||
PLUGIN_VOLCENGINE_TOS_ACCESS_KEY=
|
||||
PLUGIN_VOLCENGINE_TOS_SECRET_KEY=
|
||||
OTLP_API_KEY=
|
||||
OTEL_EXPORTER_OTLP_PROTOCOL=
|
||||
OTEL_EXPORTER_TYPE=otlp
|
||||
OTEL_SAMPLING_RATE=0.1
|
||||
OTEL_BATCH_EXPORT_SCHEDULE_DELAY=5000
|
||||
OTEL_MAX_QUEUE_SIZE=2048
|
||||
OTEL_MAX_EXPORT_BATCH_SIZE=512
|
||||
OTEL_METRIC_EXPORT_INTERVAL=60000
|
||||
OTEL_BATCH_EXPORT_TIMEOUT=10000
|
||||
OTEL_METRIC_EXPORT_TIMEOUT=30000
|
||||
QUEUE_MONITOR_THRESHOLD=200
|
||||
QUEUE_MONITOR_ALERT_EMAILS=
|
||||
QUEUE_MONITOR_INTERVAL=30
|
||||
SWAGGER_UI_ENABLED=false
|
||||
SWAGGER_UI_PATH=/swagger-ui.html
|
||||
DSL_EXPORT_ENCRYPT_DATASET_ID=true
|
||||
DATASET_MAX_SEGMENTS_PER_REQUEST=0
|
||||
ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false
|
||||
ENABLE_CLEAN_UNUSED_DATASETS_TASK=false
|
||||
ENABLE_CREATE_TIDB_SERVERLESS_TASK=false
|
||||
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false
|
||||
ENABLE_CLEAN_MESSAGES=false
|
||||
ENABLE_WORKFLOW_RUN_CLEANUP_TASK=false
|
||||
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false
|
||||
ENABLE_DATASETS_QUEUE_MONITOR=false
|
||||
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true
|
||||
ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK=true
|
||||
WORKFLOW_SCHEDULE_POLLER_INTERVAL=1
|
||||
WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE=100
|
||||
WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK=0
|
||||
TENANT_ISOLATED_TASK_CONCURRENCY=1
|
||||
ANNOTATION_IMPORT_FILE_SIZE_LIMIT=2
|
||||
ANNOTATION_IMPORT_MAX_RECORDS=10000
|
||||
ANNOTATION_IMPORT_MIN_RECORDS=1
|
||||
ANNOTATION_IMPORT_RATE_LIMIT_PER_MINUTE=5
|
||||
ANNOTATION_IMPORT_RATE_LIMIT_PER_HOUR=20
|
||||
ANNOTATION_IMPORT_MAX_CONCURRENT=5
|
||||
CREATORS_PLATFORM_FEATURES_ENABLED=true
|
||||
CREATORS_PLATFORM_API_URL=https://creators.dify.ai
|
||||
CREATORS_PLATFORM_OAUTH_CLIENT_ID=
|
||||
TIDB_VECTOR_DATABASE=dify
|
||||
ALIBABACLOUD_MYSQL_HOST=127.0.0.1
|
||||
ALIBABACLOUD_MYSQL_PORT=3306
|
||||
ALIBABACLOUD_MYSQL_USER=root
|
||||
ALIBABACLOUD_MYSQL_DATABASE=dify
|
||||
ALIBABACLOUD_MYSQL_MAX_CONNECTION=5
|
||||
ALIBABACLOUD_MYSQL_HNSW_M=6
|
||||
RELYT_DATABASE=postgres
|
||||
TENCENT_VECTOR_DB_DATABASE=dify
|
||||
BAIDU_VECTOR_DB_DATABASE=dify
|
||||
EXPOSE_PLUGIN_DAEMON_PORT=5002
|
||||
GUNICORN_TIMEOUT=360
|
||||
CELERY_WORKER_AMOUNT=
|
||||
CELERY_AUTO_SCALE=false
|
||||
CELERY_MAX_WORKERS=
|
||||
CELERY_MIN_WORKERS=
|
||||
API_TOOL_DEFAULT_CONNECT_TIMEOUT=10
|
||||
API_TOOL_DEFAULT_READ_TIMEOUT=60
|
||||
CELERY_BACKEND=redis
|
||||
CELERY_USE_SENTINEL=false
|
||||
CELERY_SENTINEL_MASTER_NAME=
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT=0.1
|
||||
WEB_API_CORS_ALLOW_ORIGINS=*
|
||||
CONSOLE_CORS_ALLOW_ORIGINS=*
|
||||
COOKIE_DOMAIN=
|
||||
OPENDAL_SCHEME=fs
|
||||
OPENDAL_FS_ROOT=storage
|
||||
CLICKZETTA_VOLUME_TYPE=user
|
||||
CLICKZETTA_VOLUME_NAME=
|
||||
CLICKZETTA_VOLUME_TABLE_PREFIX=dataset_
|
||||
CLICKZETTA_VOLUME_DIFY_PREFIX=dify_km
|
||||
S3_ENDPOINT=
|
||||
S3_REGION=us-east-1
|
||||
S3_BUCKET_NAME=difyai
|
||||
S3_ADDRESS_STYLE=auto
|
||||
S3_USE_AWS_MANAGED_IAM=false
|
||||
ARCHIVE_STORAGE_ENABLED=false
|
||||
ARCHIVE_STORAGE_ENDPOINT=
|
||||
ARCHIVE_STORAGE_ARCHIVE_BUCKET=
|
||||
ARCHIVE_STORAGE_EXPORT_BUCKET=
|
||||
ARCHIVE_STORAGE_REGION=auto
|
||||
AZURE_BLOB_ACCOUNT_NAME=difyai
|
||||
AZURE_BLOB_CONTAINER_NAME=difyai-container
|
||||
GOOGLE_STORAGE_BUCKET_NAME=your-bucket-name
|
||||
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64=
|
||||
ALIYUN_OSS_BUCKET_NAME=your-bucket-name
|
||||
ALIYUN_OSS_ENDPOINT=https://oss-ap-southeast-1-internal.aliyuncs.com
|
||||
ALIYUN_OSS_REGION=ap-southeast-1
|
||||
ALIYUN_OSS_AUTH_VERSION=v4
|
||||
ALIYUN_OSS_PATH=your-path
|
||||
ALIYUN_CLOUDBOX_ID=your-cloudbox-id
|
||||
TENCENT_COS_BUCKET_NAME=your-bucket-name
|
||||
TENCENT_COS_REGION=your-region
|
||||
TENCENT_COS_SCHEME=your-scheme
|
||||
TENCENT_COS_CUSTOM_DOMAIN=your-custom-domain
|
||||
OCI_ENDPOINT=https://your-object-storage-namespace.compat.objectstorage.us-ashburn-1.oraclecloud.com
|
||||
OCI_BUCKET_NAME=your-bucket-name
|
||||
OCI_REGION=us-ashburn-1
|
||||
HUAWEI_OBS_BUCKET_NAME=your-bucket-name
|
||||
HUAWEI_OBS_SERVER=your-server-url
|
||||
HUAWEI_OBS_PATH_STYLE=false
|
||||
VOLCENGINE_TOS_BUCKET_NAME=your-bucket-name
|
||||
VOLCENGINE_TOS_ENDPOINT=your-server-url
|
||||
VOLCENGINE_TOS_REGION=your-region
|
||||
BAIDU_OBS_BUCKET_NAME=your-bucket-name
|
||||
BAIDU_OBS_ENDPOINT=your-server-url
|
||||
SUPABASE_BUCKET_NAME=your-bucket-name
|
||||
TENCENT_VECTOR_DB_TIMEOUT=30
|
||||
TENCENT_VECTOR_DB_USERNAME=dify
|
||||
TENCENT_VECTOR_DB_SHARD=1
|
||||
TENCENT_VECTOR_DB_REPLICAS=2
|
||||
TENCENT_VECTOR_DB_ENABLE_HYBRID_SEARCH=false
|
||||
BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287
|
||||
BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS=30000
|
||||
BAIDU_VECTOR_DB_ACCOUNT=root
|
||||
BAIDU_VECTOR_DB_API_KEY=dify
|
||||
BAIDU_VECTOR_DB_SHARD=1
|
||||
BAIDU_VECTOR_DB_REPLICAS=3
|
||||
BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER=DEFAULT_ANALYZER
|
||||
BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE=COARSE_MODE
|
||||
BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT=500
|
||||
BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO=0.05
|
||||
BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS=300
|
||||
HUAWEI_CLOUD_HOSTS=https://127.0.0.1:9200
|
||||
HUAWEI_CLOUD_USER=admin
|
||||
WORKFLOW_NODE_EXECUTION_STORAGE=rdbms
|
||||
CORE_WORKFLOW_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository
|
||||
CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository
|
||||
API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository
|
||||
API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository
|
||||
ALIYUN_SLS_ENDPOINT=
|
||||
ALIYUN_SLS_REGION=
|
||||
ALIYUN_SLS_PROJECT_NAME=
|
||||
ALIYUN_SLS_LOGSTORE_TTL=365
|
||||
LOGSTORE_DUAL_WRITE_ENABLED=false
|
||||
LOGSTORE_DUAL_READ_ENABLED=true
|
||||
LOGSTORE_ENABLE_PUT_GRAPH_FIELD=true
|
||||
HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760
|
||||
HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576
|
||||
HTTP_REQUEST_NODE_SSL_VERIFY=True
|
||||
HTTP_REQUEST_MAX_CONNECT_TIMEOUT=10
|
||||
HTTP_REQUEST_MAX_READ_TIMEOUT=600
|
||||
HTTP_REQUEST_MAX_WRITE_TIMEOUT=600
|
||||
PLUGIN_INSTALLED_PATH=plugin
|
||||
PLUGIN_PACKAGE_CACHE_PATH=plugin_packages
|
||||
PLUGIN_MEDIA_CACHE_PATH=assets
|
||||
PLUGIN_S3_USE_AWS=false
|
||||
PLUGIN_S3_USE_AWS_MANAGED_IAM=false
|
||||
PLUGIN_S3_ENDPOINT=
|
||||
PLUGIN_S3_USE_PATH_STYLE=false
|
||||
PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME=
|
||||
PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING=
|
||||
PLUGIN_TENCENT_COS_REGION=
|
||||
PLUGIN_ALIYUN_OSS_REGION=
|
||||
PLUGIN_ALIYUN_OSS_ENDPOINT=
|
||||
PLUGIN_ALIYUN_OSS_AUTH_VERSION=v4
|
||||
PLUGIN_ALIYUN_OSS_PATH=
|
||||
PLUGIN_VOLCENGINE_TOS_ENDPOINT=
|
||||
PLUGIN_VOLCENGINE_TOS_REGION=
|
||||
ENABLE_OTEL=false
|
||||
OTLP_TRACE_ENDPOINT=
|
||||
OTLP_METRIC_ENDPOINT=
|
||||
# Prefix used to create collection name in vector database
|
||||
OTLP_BASE_ENDPOINT=http://localhost:4318
|
||||
WEAVIATE_GRPC_ENDPOINT=grpc://weaviate:50051
|
||||
ANALYTICDB_KEY_ID=your-ak
|
||||
ANALYTICDB_KEY_SECRET=your-sk
|
||||
ANALYTICDB_REGION_ID=cn-hangzhou
|
||||
ANALYTICDB_INSTANCE_ID=gp-ab123456
|
||||
ANALYTICDB_ACCOUNT=testaccount
|
||||
ANALYTICDB_PASSWORD=testpassword
|
||||
ANALYTICDB_NAMESPACE=dify
|
||||
ANALYTICDB_NAMESPACE_PASSWORD=difypassword
|
||||
ANALYTICDB_HOST=gp-test.aliyuncs.com
|
||||
ANALYTICDB_PORT=5432
|
||||
ANALYTICDB_MIN_CONNECTION=1
|
||||
ANALYTICDB_MAX_CONNECTION=5
|
||||
TIDB_VECTOR_HOST=tidb
|
||||
TIDB_VECTOR_PORT=4000
|
||||
TIDB_VECTOR_USER=
|
||||
TIDB_VECTOR_PASSWORD=
|
||||
TIDB_ON_QDRANT_CLIENT_TIMEOUT=20
|
||||
TIDB_ON_QDRANT_GRPC_ENABLED=false
|
||||
TIDB_ON_QDRANT_GRPC_PORT=6334
|
||||
TIDB_PUBLIC_KEY=dify
|
||||
TIDB_PRIVATE_KEY=dify
|
||||
RELYT_HOST=db
|
||||
RELYT_PORT=5432
|
||||
RELYT_USER=postgres
|
||||
VIKINGDB_ACCESS_KEY=your-ak
|
||||
VIKINGDB_SECRET_KEY=your-sk
|
||||
VIKINGDB_REGION=cn-shanghai
|
||||
VIKINGDB_HOST=api-vikingdb.xxx.volces.com
|
||||
VIKINGDB_SCHEME=http
|
||||
VIKINGDB_CONNECTION_TIMEOUT=30
|
||||
VIKINGDB_SOCKET_TIMEOUT=30
|
||||
TABLESTORE_ENDPOINT=https://instance-name.cn-hangzhou.ots.aliyuncs.com
|
||||
TABLESTORE_INSTANCE_NAME=instance-name
|
||||
CLICKZETTA_USERNAME=
|
||||
CLICKZETTA_VECTOR_DISTANCE_FUNCTION=cosine_distance
|
||||
COMPOSE_PROFILES=${VECTOR_STORE:-weaviate},${DB_TYPE:-postgresql}
|
||||
EXPOSE_NGINX_PORT=80
|
||||
EXPOSE_NGINX_SSL_PORT=443
|
||||
POSITION_TOOL_PINS=
|
||||
POSITION_TOOL_INCLUDES=
|
||||
POSITION_TOOL_EXCLUDES=
|
||||
POSITION_PROVIDER_PINS=
|
||||
POSITION_PROVIDER_INCLUDES=
|
||||
POSITION_PROVIDER_EXCLUDES=
|
||||
CREATE_TIDB_SERVICE_JOB_ENABLED=false
|
||||
MAX_SUBMIT_COUNT=100
|
||||
|
||||
# Vector Store Configuration
|
||||
STORAGE_TYPE=opendal
|
||||
VECTOR_STORE=weaviate
|
||||
VECTOR_INDEX_NAME_PREFIX=Vector_index
|
||||
WEAVIATE_ENDPOINT=http://weaviate:8080
|
||||
WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
|
||||
WEAVIATE_TOKENIZATION=word
|
||||
OCEANBASE_VECTOR_HOST=oceanbase
|
||||
OCEANBASE_VECTOR_PORT=2881
|
||||
OCEANBASE_VECTOR_USER=root@test
|
||||
OCEANBASE_VECTOR_PASSWORD=difyai123456
|
||||
OCEANBASE_VECTOR_DATABASE=test
|
||||
OCEANBASE_ENABLE_HYBRID_SEARCH=false
|
||||
OCEANBASE_FULLTEXT_PARSER=ik
|
||||
SEEKDB_MEMORY_LIMIT=2G
|
||||
QDRANT_URL=http://qdrant:6333
|
||||
QDRANT_API_KEY=difyai123456
|
||||
QDRANT_CLIENT_TIMEOUT=20
|
||||
QDRANT_GRPC_ENABLED=false
|
||||
QDRANT_GRPC_PORT=6334
|
||||
QDRANT_REPLICATION_FACTOR=1
|
||||
MILVUS_URI=http://host.docker.internal:19530
|
||||
MILVUS_TOKEN=
|
||||
MILVUS_USER=
|
||||
MILVUS_PASSWORD=
|
||||
MILVUS_ANALYZER_PARAMS=
|
||||
PGVECTOR_HOST=pgvector
|
||||
PGVECTOR_PORT=5432
|
||||
PGVECTOR_USER=postgres
|
||||
PGVECTOR_PASSWORD=difyai123456
|
||||
PGVECTOR_DATABASE=dify
|
||||
PGVECTOR_MIN_CONNECTION=1
|
||||
PGVECTOR_MAX_CONNECTION=5
|
||||
PGVECTOR_PG_BIGM=false
|
||||
PGVECTOR_PG_BIGM_VERSION=1.2-20240606
|
||||
|
||||
# Hologres Configuration
|
||||
HOLOGRES_HOST=
|
||||
HOLOGRES_PORT=80
|
||||
HOLOGRES_DATABASE=
|
||||
HOLOGRES_ACCESS_KEY_ID=
|
||||
HOLOGRES_ACCESS_KEY_SECRET=
|
||||
HOLOGRES_SCHEMA=public
|
||||
HOLOGRES_TOKENIZER=jieba
|
||||
HOLOGRES_DISTANCE_METHOD=Cosine
|
||||
HOLOGRES_BASE_QUANTIZATION_TYPE=rabitq
|
||||
HOLOGRES_MAX_DEGREE=64
|
||||
HOLOGRES_EF_CONSTRUCTION=400
|
||||
|
||||
# Milvus API Configuration
|
||||
MILVUS_DATABASE=
|
||||
MILVUS_ENABLE_HYBRID_SEARCH=False
|
||||
|
||||
# Human Input Task Configuration
|
||||
ENABLE_HUMAN_INPUT_TIMEOUT_TASK=true
|
||||
HUMAN_INPUT_TIMEOUT_TASK_INTERVAL=1
|
||||
30
docker/envs/core-services/web.env.example
Normal file
30
docker/envs/core-services/web.env.example
Normal file
@ -0,0 +1,30 @@
|
||||
# ------------------------------
|
||||
# Web Configuration
|
||||
# ------------------------------
|
||||
|
||||
CONSOLE_API_URL=
|
||||
APP_API_URL=
|
||||
SENTRY_DSN=
|
||||
NEXT_PUBLIC_SOCKET_URL=ws://localhost
|
||||
EXPERIMENTAL_ENABLE_VINEXT=false
|
||||
LOOP_NODE_MAX_COUNT=100
|
||||
MAX_TOOLS_NUM=10
|
||||
MAX_PARALLEL_LIMIT=10
|
||||
MAX_ITERATIONS_NUM=99
|
||||
TEXT_GENERATION_TIMEOUT_MS=60000
|
||||
ALLOW_INLINE_STYLES=false
|
||||
ALLOW_UNSAFE_DATA_SCHEME=false
|
||||
MAX_TREE_DEPTH=50
|
||||
MARKETPLACE_ENABLED=true
|
||||
MARKETPLACE_API_URL=https://marketplace.dify.ai
|
||||
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH=4000
|
||||
ALLOW_EMBED=false
|
||||
AMPLITUDE_API_KEY=
|
||||
ENABLE_WEBSITE_JINAREADER=true
|
||||
ENABLE_WEBSITE_FIRECRAWL=true
|
||||
ENABLE_WEBSITE_WATERCRAWL=true
|
||||
NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX=false
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN=
|
||||
NEXT_PUBLIC_BATCH_CONCURRENCY=5
|
||||
CSP_WHITELIST=
|
||||
TOP_K_MAX_VALUE=10
|
||||
8
docker/envs/core-services/worker-beat.env.example
Normal file
8
docker/envs/core-services/worker-beat.env.example
Normal file
@ -0,0 +1,8 @@
|
||||
# ------------------------------
|
||||
# Worker Beat Configuration
|
||||
# ------------------------------
|
||||
|
||||
MODE=beat
|
||||
COMPOSE_WORKER_HEALTHCHECK_DISABLED=true
|
||||
COMPOSE_WORKER_HEALTHCHECK_INTERVAL=30s
|
||||
COMPOSE_WORKER_HEALTHCHECK_TIMEOUT=30s
|
||||
13
docker/envs/core-services/worker.env.example
Normal file
13
docker/envs/core-services/worker.env.example
Normal file
@ -0,0 +1,13 @@
|
||||
# ------------------------------
|
||||
# Worker Configuration
|
||||
# ------------------------------
|
||||
|
||||
MODE=worker
|
||||
SENTRY_DSN=
|
||||
SENTRY_TRACES_SAMPLE_RATE=1.0
|
||||
SENTRY_PROFILES_SAMPLE_RATE=1.0
|
||||
PLUGIN_MAX_PACKAGE_SIZE=52428800
|
||||
INNER_API_KEY_FOR_PLUGIN=QaHbTe77CtuXmsfyhR7+vRjI/+XbV1AaFy691iy+kGDv2Jvy0/eAh8Y1
|
||||
COMPOSE_WORKER_HEALTHCHECK_DISABLED=true
|
||||
COMPOSE_WORKER_HEALTHCHECK_INTERVAL=30s
|
||||
COMPOSE_WORKER_HEALTHCHECK_TIMEOUT=30s
|
||||
9
docker/envs/databases/db-mysql.env.example
Normal file
9
docker/envs/databases/db-mysql.env.example
Normal file
@ -0,0 +1,9 @@
|
||||
# ------------------------------
|
||||
# Db Mysql Configuration
|
||||
# ------------------------------
|
||||
|
||||
MYSQL_INNODB_LOG_FILE_SIZE=128M
|
||||
MYSQL_INNODB_FLUSH_LOG_AT_TRX_COMMIT=2
|
||||
MYSQL_MAX_CONNECTIONS=1000
|
||||
MYSQL_INNODB_BUFFER_POOL_SIZE=512M
|
||||
MYSQL_HOST_VOLUME=./volumes/mysql/data
|
||||
26
docker/envs/databases/db-postgres.env.example
Normal file
26
docker/envs/databases/db-postgres.env.example
Normal file
@ -0,0 +1,26 @@
|
||||
# ------------------------------
|
||||
# Db Postgres Configuration
|
||||
# ------------------------------
|
||||
|
||||
PGDATA=/var/lib/postgresql/data/pgdata
|
||||
DB_TYPE=postgresql
|
||||
DB_USERNAME=postgres
|
||||
DB_PASSWORD=difyai123456
|
||||
DB_HOST=db_postgres
|
||||
DB_PORT=5432
|
||||
DB_DATABASE=dify
|
||||
SQLALCHEMY_POOL_SIZE=30
|
||||
SQLALCHEMY_MAX_OVERFLOW=10
|
||||
SQLALCHEMY_POOL_RECYCLE=3600
|
||||
SQLALCHEMY_ECHO=false
|
||||
SQLALCHEMY_POOL_PRE_PING=false
|
||||
SQLALCHEMY_POOL_USE_LIFO=false
|
||||
SQLALCHEMY_POOL_TIMEOUT=30
|
||||
SQLALCHEMY_POOL_RESET_ON_RETURN=rollback
|
||||
POSTGRES_MAX_CONNECTIONS=100
|
||||
POSTGRES_SHARED_BUFFERS=128MB
|
||||
POSTGRES_WORK_MEM=4MB
|
||||
POSTGRES_MAINTENANCE_WORK_MEM=64MB
|
||||
POSTGRES_EFFECTIVE_CACHE_SIZE=4096MB
|
||||
POSTGRES_STATEMENT_TIMEOUT=0
|
||||
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT=0
|
||||
35
docker/envs/databases/redis.env.example
Normal file
35
docker/envs/databases/redis.env.example
Normal file
@ -0,0 +1,35 @@
|
||||
# ------------------------------
|
||||
# Redis Configuration
|
||||
# ------------------------------
|
||||
|
||||
REDIS_HOST=redis
|
||||
REDIS_PORT=6379
|
||||
REDIS_USERNAME=
|
||||
REDIS_PASSWORD=difyai123456
|
||||
REDIS_USE_SSL=false
|
||||
REDIS_SSL_CERT_REQS=CERT_NONE
|
||||
REDIS_SSL_CA_CERTS=
|
||||
REDIS_SSL_CERTFILE=
|
||||
REDIS_SSL_KEYFILE=
|
||||
REDIS_DB=0
|
||||
REDIS_KEY_PREFIX=
|
||||
REDIS_MAX_CONNECTIONS=
|
||||
REDIS_USE_SENTINEL=false
|
||||
REDIS_SENTINELS=
|
||||
REDIS_SENTINEL_SERVICE_NAME=
|
||||
REDIS_SENTINEL_USERNAME=
|
||||
REDIS_SENTINEL_PASSWORD=
|
||||
REDIS_SENTINEL_SOCKET_TIMEOUT=0.1
|
||||
REDIS_USE_CLUSTERS=false
|
||||
REDIS_CLUSTERS=
|
||||
REDIS_CLUSTERS_PASSWORD=
|
||||
REDIS_RETRY_RETRIES=3
|
||||
REDIS_RETRY_BACKOFF_BASE=1.0
|
||||
REDIS_RETRY_BACKOFF_CAP=10.0
|
||||
REDIS_SOCKET_TIMEOUT=5.0
|
||||
REDIS_SOCKET_CONNECT_TIMEOUT=5.0
|
||||
REDIS_HEALTH_CHECK_INTERVAL=30
|
||||
EVENT_BUS_REDIS_URL=
|
||||
EVENT_BUS_REDIS_CHANNEL_TYPE=pubsub
|
||||
EVENT_BUS_REDIS_USE_CLUSTERS=false
|
||||
BROKER_USE_SSL=false
|
||||
7
docker/envs/infrastructure/certbot.env.example
Normal file
7
docker/envs/infrastructure/certbot.env.example
Normal file
@ -0,0 +1,7 @@
|
||||
# ------------------------------
|
||||
# Certbot Configuration
|
||||
# ------------------------------
|
||||
|
||||
CERTBOT_EMAIL=your_email@example.com
|
||||
CERTBOT_DOMAIN=your_domain.com
|
||||
CERTBOT_OPTIONS=
|
||||
4
docker/envs/infrastructure/etcd.env.example
Normal file
4
docker/envs/infrastructure/etcd.env.example
Normal file
@ -0,0 +1,4 @@
|
||||
# ------------------------------
|
||||
# Etcd Configuration
|
||||
# ------------------------------
|
||||
|
||||
4
docker/envs/infrastructure/milvus-standalone.env.example
Normal file
4
docker/envs/infrastructure/milvus-standalone.env.example
Normal file
@ -0,0 +1,4 @@
|
||||
# ------------------------------
|
||||
# Milvus Standalone Configuration
|
||||
# ------------------------------
|
||||
|
||||
4
docker/envs/infrastructure/minio.env.example
Normal file
4
docker/envs/infrastructure/minio.env.example
Normal file
@ -0,0 +1,4 @@
|
||||
# ------------------------------
|
||||
# Minio Configuration
|
||||
# ------------------------------
|
||||
|
||||
17
docker/envs/infrastructure/nginx.env.example
Normal file
17
docker/envs/infrastructure/nginx.env.example
Normal file
@ -0,0 +1,17 @@
|
||||
# ------------------------------
|
||||
# Nginx Configuration
|
||||
# ------------------------------
|
||||
|
||||
NGINX_SERVER_NAME=_
|
||||
NGINX_HTTPS_ENABLED=false
|
||||
NGINX_PORT=80
|
||||
NGINX_SSL_PORT=443
|
||||
NGINX_SSL_CERT_FILENAME=dify.crt
|
||||
NGINX_SSL_CERT_KEY_FILENAME=dify.key
|
||||
NGINX_SSL_PROTOCOLS=TLSv1.2 TLSv1.3
|
||||
NGINX_WORKER_PROCESSES=auto
|
||||
NGINX_CLIENT_MAX_BODY_SIZE=100M
|
||||
NGINX_KEEPALIVE_TIMEOUT=65
|
||||
NGINX_PROXY_READ_TIMEOUT=3600s
|
||||
NGINX_PROXY_SEND_TIMEOUT=3600s
|
||||
NGINX_ENABLE_CERTBOT_CHALLENGE=false
|
||||
17
docker/envs/infrastructure/ssrf-proxy.env.example
Normal file
17
docker/envs/infrastructure/ssrf-proxy.env.example
Normal file
@ -0,0 +1,17 @@
|
||||
# ------------------------------
|
||||
# Ssrf Proxy Configuration
|
||||
# ------------------------------
|
||||
|
||||
SSRF_PROXY_HTTP_URL=http://ssrf_proxy:3128
|
||||
SSRF_PROXY_HTTPS_URL=http://ssrf_proxy:3128
|
||||
SSRF_HTTP_PORT=3128
|
||||
SSRF_COREDUMP_DIR=/var/spool/squid
|
||||
SSRF_REVERSE_PROXY_PORT=8194
|
||||
SSRF_SANDBOX_HOST=sandbox
|
||||
SSRF_DEFAULT_TIME_OUT=5
|
||||
SSRF_DEFAULT_CONNECT_TIME_OUT=5
|
||||
SSRF_DEFAULT_READ_TIME_OUT=5
|
||||
SSRF_DEFAULT_WRITE_TIME_OUT=5
|
||||
SSRF_POOL_MAX_CONNECTIONS=100
|
||||
SSRF_POOL_MAX_KEEPALIVE_CONNECTIONS=20
|
||||
SSRF_POOL_KEEPALIVE_EXPIRY=5.0
|
||||
40
docker/envs/security.env.example
Normal file
40
docker/envs/security.env.example
Normal file
@ -0,0 +1,40 @@
|
||||
# ------------------------------
|
||||
# Security Configuration
|
||||
# ------------------------------
|
||||
|
||||
TIDB_ON_QDRANT_API_KEY=dify
|
||||
TENCENT_VECTOR_DB_API_KEY=dify
|
||||
ALIBABACLOUD_MYSQL_PASSWORD=difyai123456
|
||||
RELYT_PASSWORD=difyai123456
|
||||
LINDORM_PASSWORD=admin
|
||||
HUAWEI_CLOUD_PASSWORD=admin
|
||||
UPSTASH_VECTOR_TOKEN=dify
|
||||
TABLESTORE_ACCESS_KEY_ID=xxx
|
||||
TABLESTORE_ACCESS_KEY_SECRET=xxx
|
||||
UNSTRUCTURED_API_KEY=
|
||||
PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
|
||||
NOTION_CLIENT_SECRET=
|
||||
NOTION_INTERNAL_SECRET=
|
||||
RESEND_API_KEY=your-resend-api-key
|
||||
SMTP_PASSWORD=
|
||||
SENDGRID_API_KEY=
|
||||
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
|
||||
EMAIL_REGISTER_TOKEN_EXPIRY_MINUTES=5
|
||||
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5
|
||||
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5
|
||||
CODE_EXECUTION_API_KEY=dify-sandbox
|
||||
ALIYUN_SLS_ACCESS_KEY_ID=
|
||||
ALIYUN_SLS_ACCESS_KEY_SECRET=
|
||||
OTLP_API_KEY=
|
||||
BAIDU_VECTOR_DB_API_KEY=dify
|
||||
ANALYTICDB_KEY_ID=your-ak
|
||||
ANALYTICDB_KEY_SECRET=your-sk
|
||||
ANALYTICDB_PASSWORD=testpassword
|
||||
ANALYTICDB_NAMESPACE_PASSWORD=difypassword
|
||||
TIDB_VECTOR_PASSWORD=
|
||||
TIDB_PUBLIC_KEY=dify
|
||||
TIDB_PRIVATE_KEY=dify
|
||||
VIKINGDB_ACCESS_KEY=your-ak
|
||||
VIKINGDB_SECRET_KEY=your-sk
|
||||
SECRET_KEY=sk-9f73s3ljTXVcMT3Blb3ljTqtsKiGHXVcMT3BlbkFJLK7U
|
||||
INIT_PASSWORD=
|
||||
13
docker/envs/vectorstores/chroma.env.example
Normal file
13
docker/envs/vectorstores/chroma.env.example
Normal file
@ -0,0 +1,13 @@
|
||||
# ------------------------------
|
||||
# Chroma Configuration
|
||||
# ------------------------------
|
||||
|
||||
CHROMA_DATABASE=default_database
|
||||
CHROMA_AUTH_PROVIDER=chromadb.auth.token_authn.TokenAuthClientProvider
|
||||
CHROMA_AUTH_CREDENTIALS=
|
||||
CHROMA_HOST=127.0.0.1
|
||||
CHROMA_PORT=8000
|
||||
CHROMA_TENANT=default_tenant
|
||||
CHROMA_SERVER_AUTHN_CREDENTIALS=difyai123456
|
||||
CHROMA_SERVER_AUTHN_PROVIDER=chromadb.auth.token_authn.TokenAuthenticationServerProvider
|
||||
CHROMA_IS_PERSISTENT=TRUE
|
||||
9
docker/envs/vectorstores/couchbase.env.example
Normal file
9
docker/envs/vectorstores/couchbase.env.example
Normal file
@ -0,0 +1,9 @@
|
||||
# ------------------------------
|
||||
# Couchbase Configuration
|
||||
# ------------------------------
|
||||
|
||||
COUCHBASE_PASSWORD=password
|
||||
COUCHBASE_BUCKET_NAME=Embeddings
|
||||
COUCHBASE_SCOPE_NAME=_default
|
||||
COUCHBASE_CONNECTION_STRING=couchbase://couchbase-server
|
||||
COUCHBASE_USER=Administrator
|
||||
17
docker/envs/vectorstores/elasticsearch.env.example
Normal file
17
docker/envs/vectorstores/elasticsearch.env.example
Normal file
@ -0,0 +1,17 @@
|
||||
# ------------------------------
|
||||
# Elasticsearch Configuration
|
||||
# ------------------------------
|
||||
|
||||
ELASTICSEARCH_CLOUD_URL=YOUR-ELASTICSEARCH_CLOUD_URL
|
||||
ELASTICSEARCH_PASSWORD=elastic
|
||||
KIBANA_PORT=5601
|
||||
ELASTICSEARCH_USE_CLOUD=false
|
||||
ELASTICSEARCH_API_KEY=YOUR-ELASTICSEARCH_API_KEY
|
||||
ELASTICSEARCH_VERIFY_CERTS=False
|
||||
ELASTICSEARCH_CA_CERTS=
|
||||
ELASTICSEARCH_REQUEST_TIMEOUT=100000
|
||||
ELASTICSEARCH_RETRY_ON_TIMEOUT=True
|
||||
ELASTICSEARCH_MAX_RETRIES=10
|
||||
ELASTICSEARCH_HOST=0.0.0.0
|
||||
ELASTICSEARCH_PORT=9200
|
||||
ELASTICSEARCH_USERNAME=elastic
|
||||
17
docker/envs/vectorstores/iris.env.example
Normal file
17
docker/envs/vectorstores/iris.env.example
Normal file
@ -0,0 +1,17 @@
|
||||
# ------------------------------
|
||||
# Iris Configuration
|
||||
# ------------------------------
|
||||
|
||||
IRIS_CONNECTION_URL=
|
||||
IRIS_MIN_CONNECTION=1
|
||||
IRIS_MAX_CONNECTION=3
|
||||
IRIS_TEXT_INDEX=true
|
||||
IRIS_TEXT_INDEX_LANGUAGE=en
|
||||
IRIS_TIMEZONE=UTC
|
||||
IRIS_PASSWORD=Dify@1234
|
||||
IRIS_DATABASE=USER
|
||||
IRIS_SCHEMA=dify
|
||||
IRIS_HOST=iris
|
||||
IRIS_SUPER_SERVER_PORT=1972
|
||||
IRIS_WEB_SERVER_PORT=52773
|
||||
IRIS_USER=_SYSTEM
|
||||
9
docker/envs/vectorstores/matrixone.env.example
Normal file
9
docker/envs/vectorstores/matrixone.env.example
Normal file
@ -0,0 +1,9 @@
|
||||
# ------------------------------
|
||||
# Matrixone Configuration
|
||||
# ------------------------------
|
||||
|
||||
MATRIXONE_PASSWORD=111
|
||||
MATRIXONE_HOST=matrixone
|
||||
MATRIXONE_PORT=6001
|
||||
MATRIXONE_USER=dump
|
||||
MATRIXONE_DATABASE=dify
|
||||
13
docker/envs/vectorstores/milvus.env.example
Normal file
13
docker/envs/vectorstores/milvus.env.example
Normal file
@ -0,0 +1,13 @@
|
||||
# ------------------------------
|
||||
# Milvus Configuration
|
||||
# ------------------------------
|
||||
|
||||
MINIO_ACCESS_KEY=minioadmin
|
||||
MINIO_SECRET_KEY=minioadmin
|
||||
ETCD_ENDPOINTS=etcd:2379
|
||||
MINIO_ADDRESS=minio:9000
|
||||
ETCD_AUTO_COMPACTION_MODE=revision
|
||||
ETCD_AUTO_COMPACTION_RETENTION=1000
|
||||
ETCD_QUOTA_BACKEND_BYTES=4294967296
|
||||
ETCD_SNAPSHOT_COUNT=50000
|
||||
MILVUS_AUTHORIZATION_ENABLED=true
|
||||
10
docker/envs/vectorstores/myscale.env.example
Normal file
10
docker/envs/vectorstores/myscale.env.example
Normal file
@ -0,0 +1,10 @@
|
||||
# ------------------------------
|
||||
# Myscale Configuration
|
||||
# ------------------------------
|
||||
|
||||
MYSCALE_PASSWORD=
|
||||
MYSCALE_DATABASE=dify
|
||||
MYSCALE_FTS_PARAMS=
|
||||
MYSCALE_HOST=myscale
|
||||
MYSCALE_PORT=8123
|
||||
MYSCALE_USER=default
|
||||
6
docker/envs/vectorstores/oceanbase.env.example
Normal file
6
docker/envs/vectorstores/oceanbase.env.example
Normal file
@ -0,0 +1,6 @@
|
||||
# ------------------------------
|
||||
# Oceanbase Configuration
|
||||
# ------------------------------
|
||||
|
||||
OCEANBASE_CLUSTER_NAME=difyai
|
||||
OCEANBASE_MEMORY_LIMIT=6G
|
||||
12
docker/envs/vectorstores/opengauss.env.example
Normal file
12
docker/envs/vectorstores/opengauss.env.example
Normal file
@ -0,0 +1,12 @@
|
||||
# ------------------------------
|
||||
# Opengauss Configuration
|
||||
# ------------------------------
|
||||
|
||||
OPENGAUSS_PASSWORD=Dify@123
|
||||
OPENGAUSS_DATABASE=dify
|
||||
OPENGAUSS_MIN_CONNECTION=1
|
||||
OPENGAUSS_MAX_CONNECTION=5
|
||||
OPENGAUSS_ENABLE_PQ=false
|
||||
OPENGAUSS_HOST=opengauss
|
||||
OPENGAUSS_PORT=6600
|
||||
OPENGAUSS_USER=postgres
|
||||
22
docker/envs/vectorstores/opensearch.env.example
Normal file
22
docker/envs/vectorstores/opensearch.env.example
Normal file
@ -0,0 +1,22 @@
|
||||
# ------------------------------
|
||||
# Opensearch Configuration
|
||||
# ------------------------------
|
||||
|
||||
OPENSEARCH_PASSWORD=admin
|
||||
OPENSEARCH_AWS_REGION=ap-southeast-1
|
||||
OPENSEARCH_AWS_SERVICE=aoss
|
||||
OPENSEARCH_INITIAL_ADMIN_PASSWORD=Qazwsxedc!@#123
|
||||
OPENSEARCH_MEMLOCK_SOFT=-1
|
||||
OPENSEARCH_MEMLOCK_HARD=-1
|
||||
OPENSEARCH_NOFILE_SOFT=65536
|
||||
OPENSEARCH_NOFILE_HARD=65536
|
||||
OPENSEARCH_HOST=opensearch
|
||||
OPENSEARCH_PORT=9200
|
||||
OPENSEARCH_SECURE=true
|
||||
OPENSEARCH_VERIFY_CERTS=true
|
||||
OPENSEARCH_AUTH_METHOD=basic
|
||||
OPENSEARCH_USER=admin
|
||||
OPENSEARCH_DISCOVERY_TYPE=single-node
|
||||
OPENSEARCH_BOOTSTRAP_MEMORY_LOCK=true
|
||||
OPENSEARCH_JAVA_OPTS_MIN=512m
|
||||
OPENSEARCH_JAVA_OPTS_MAX=1024m
|
||||
13
docker/envs/vectorstores/oracle.env.example
Normal file
13
docker/envs/vectorstores/oracle.env.example
Normal file
@ -0,0 +1,13 @@
|
||||
# ------------------------------
|
||||
# Oracle Configuration
|
||||
# ------------------------------
|
||||
|
||||
ORACLE_PASSWORD=dify
|
||||
ORACLE_DSN=oracle:1521/FREEPDB1
|
||||
ORACLE_CONFIG_DIR=/app/api/storage/wallet
|
||||
ORACLE_WALLET_LOCATION=/app/api/storage/wallet
|
||||
ORACLE_WALLET_PASSWORD=dify
|
||||
ORACLE_IS_AUTONOMOUS=false
|
||||
ORACLE_USER=dify
|
||||
ORACLE_PWD=Dify123456
|
||||
ORACLE_CHARACTERSET=AL32UTF8
|
||||
9
docker/envs/vectorstores/pgvecto-rs.env.example
Normal file
9
docker/envs/vectorstores/pgvecto-rs.env.example
Normal file
@ -0,0 +1,9 @@
|
||||
# ------------------------------
|
||||
# Pgvecto Rs Configuration
|
||||
# ------------------------------
|
||||
|
||||
PGVECTO_RS_HOST=pgvecto-rs
|
||||
PGVECTO_RS_PORT=5432
|
||||
PGVECTO_RS_USER=postgres
|
||||
PGVECTO_RS_PASSWORD=difyai123456
|
||||
PGVECTO_RS_DATABASE=dify
|
||||
8
docker/envs/vectorstores/pgvector.env.example
Normal file
8
docker/envs/vectorstores/pgvector.env.example
Normal file
@ -0,0 +1,8 @@
|
||||
# ------------------------------
|
||||
# Pgvector Configuration
|
||||
# ------------------------------
|
||||
|
||||
PGVECTOR_PGUSER=postgres
|
||||
PGVECTOR_POSTGRES_PASSWORD=difyai123456
|
||||
PGVECTOR_POSTGRES_DB=dify
|
||||
PGVECTOR_PGDATA=/var/lib/postgresql/data/pgdata
|
||||
4
docker/envs/vectorstores/qdrant.env.example
Normal file
4
docker/envs/vectorstores/qdrant.env.example
Normal file
@ -0,0 +1,4 @@
|
||||
# ------------------------------
|
||||
# Qdrant Configuration
|
||||
# ------------------------------
|
||||
|
||||
4
docker/envs/vectorstores/seekdb.env.example
Normal file
4
docker/envs/vectorstores/seekdb.env.example
Normal file
@ -0,0 +1,4 @@
|
||||
# ------------------------------
|
||||
# Seekdb Configuration
|
||||
# ------------------------------
|
||||
|
||||
11
docker/envs/vectorstores/vastbase.env.example
Normal file
11
docker/envs/vectorstores/vastbase.env.example
Normal file
@ -0,0 +1,11 @@
|
||||
# ------------------------------
|
||||
# Vastbase Configuration
|
||||
# ------------------------------
|
||||
|
||||
VASTBASE_PASSWORD=Difyai123456
|
||||
VASTBASE_DATABASE=dify
|
||||
VASTBASE_MIN_CONNECTION=1
|
||||
VASTBASE_MAX_CONNECTION=5
|
||||
VASTBASE_HOST=vastbase
|
||||
VASTBASE_PORT=5432
|
||||
VASTBASE_USER=dify
|
||||
18
docker/envs/vectorstores/weaviate.env.example
Normal file
18
docker/envs/vectorstores/weaviate.env.example
Normal file
@ -0,0 +1,18 @@
|
||||
# ------------------------------
|
||||
# Weaviate Configuration
|
||||
# ------------------------------
|
||||
|
||||
WEAVIATE_PERSISTENCE_DATA_PATH=/var/lib/weaviate
|
||||
WEAVIATE_QUERY_DEFAULTS_LIMIT=25
|
||||
WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true
|
||||
WEAVIATE_DEFAULT_VECTORIZER_MODULE=none
|
||||
WEAVIATE_CLUSTER_HOSTNAME=node1
|
||||
WEAVIATE_AUTHENTICATION_APIKEY_ENABLED=true
|
||||
WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
|
||||
WEAVIATE_AUTHENTICATION_APIKEY_USERS=hello@dify.ai
|
||||
WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED=true
|
||||
WEAVIATE_AUTHORIZATION_ADMINLIST_USERS=hello@dify.ai
|
||||
WEAVIATE_DISABLE_TELEMETRY=false
|
||||
WEAVIATE_ENABLE_TOKENIZER_GSE=false
|
||||
WEAVIATE_ENABLE_TOKENIZER_KAGOME_JA=false
|
||||
WEAVIATE_ENABLE_TOKENIZER_KAGOME_KR=false
|
||||
@ -64,25 +64,61 @@ def generate_shared_env_block(env_vars, anchor_name="shared-api-worker-env"):
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def insert_shared_env(template_path, output_path, shared_env_block, header_comments):
|
||||
def create_env_files_from_example(env_example_path):
|
||||
"""
|
||||
Inserts the shared environment variables block and header comments into the template file,
|
||||
removing any existing x-shared-env anchors, and generates the final docker-compose.yaml file.
|
||||
Always writes with LF line endings.
|
||||
Creates actual env files from .env.example by copying the categorized .env.example files.
|
||||
This allows docker-compose to use env_file references.
|
||||
Supports per-module structure with subdirectories.
|
||||
"""
|
||||
base_dir = os.path.dirname(os.path.abspath(env_example_path))
|
||||
root_env_file = os.path.join(base_dir, ".env")
|
||||
if not os.path.exists(root_env_file):
|
||||
with open(env_example_path, "r", encoding="utf-8") as src, open(
|
||||
root_env_file, "w", encoding="utf-8", newline="\n"
|
||||
) as dst:
|
||||
dst.write(src.read())
|
||||
print(f"Created {root_env_file}")
|
||||
else:
|
||||
print(f"{root_env_file} already exists, skipping")
|
||||
|
||||
envs_dir = os.path.join(base_dir, "envs")
|
||||
if not os.path.isdir(envs_dir):
|
||||
print(f"No envs directory found at {envs_dir}, skipping split env files")
|
||||
return []
|
||||
|
||||
created_files = []
|
||||
# Walk through all .env.example files in subdirectories
|
||||
for root, dirs, files in os.walk(envs_dir):
|
||||
for file in files:
|
||||
if file.endswith('.env.example'):
|
||||
example_file = os.path.join(root, file)
|
||||
env_file = example_file.replace('.env.example', '.env')
|
||||
|
||||
if os.path.exists(env_file):
|
||||
print(f"{env_file} already exists, skipping")
|
||||
continue
|
||||
|
||||
# Copy .example to actual file
|
||||
with open(example_file, "r", encoding="utf-8") as src, open(
|
||||
env_file, "w", encoding="utf-8", newline="\n"
|
||||
) as dst:
|
||||
dst.write(src.read())
|
||||
created_files.append(env_file)
|
||||
print(f"Created {env_file}")
|
||||
|
||||
return created_files
|
||||
|
||||
|
||||
def insert_shared_env(template_path, output_path, header_comments):
|
||||
"""
|
||||
Copies the template file to output path with header comments.
|
||||
The template now uses env_file references instead of a huge YAML anchor.
|
||||
"""
|
||||
with open(template_path, "r", encoding="utf-8") as f:
|
||||
template_content = f.read()
|
||||
|
||||
# Remove existing x-shared-env: &shared-api-worker-env lines
|
||||
template_content = re.sub(
|
||||
r"^x-shared-env: &shared-api-worker-env\s*\n?",
|
||||
"",
|
||||
template_content,
|
||||
flags=re.MULTILINE,
|
||||
)
|
||||
|
||||
# Prepare the final content with header comments and shared env block
|
||||
final_content = f"{header_comments}\n{shared_env_block}\n\n{template_content}"
|
||||
# Prepare the final content with header comments
|
||||
final_content = f"{header_comments}\n{template_content}"
|
||||
|
||||
with open(output_path, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(final_content)
|
||||
@ -90,10 +126,10 @@ def insert_shared_env(template_path, output_path, shared_env_block, header_comme
|
||||
|
||||
|
||||
def main():
|
||||
env_example_path = ".env.example"
|
||||
template_path = "docker-compose-template.yaml"
|
||||
output_path = "docker-compose.yaml"
|
||||
anchor_name = "shared-api-worker-env" # Can be modified as needed
|
||||
base_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
env_example_path = os.path.join(base_dir, ".env.example")
|
||||
template_path = os.path.join(base_dir, "docker-compose-template.yaml")
|
||||
output_path = os.path.join(base_dir, "docker-compose.yaml")
|
||||
|
||||
# Define header comments to be added at the top of docker-compose.yaml
|
||||
header_comments = (
|
||||
@ -110,17 +146,14 @@ def main():
|
||||
print(f"Error: File {path} does not exist.")
|
||||
sys.exit(1)
|
||||
|
||||
# Parse .env.example file
|
||||
env_vars = parse_env_example(env_example_path)
|
||||
# Create env files from categorized .env.example files
|
||||
# These files are used by docker-compose's env_file directive
|
||||
# This ensures .env files exist even in CI/CD environments
|
||||
create_env_files_from_example(env_example_path)
|
||||
|
||||
if not env_vars:
|
||||
print("Warning: No environment variables found in .env.example.")
|
||||
|
||||
# Generate shared environment variables block
|
||||
shared_env_block = generate_shared_env_block(env_vars, anchor_name)
|
||||
|
||||
# Insert shared environment variables block and header comments into the template
|
||||
insert_shared_env(template_path, output_path, shared_env_block, header_comments)
|
||||
# Copy template to output with header comments
|
||||
# The template now uses env_file references instead of a huge YAML anchor
|
||||
insert_shared_env(template_path, output_path, header_comments)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@ -36,7 +36,7 @@ export const webDir = path.join(rootDir, 'web')
|
||||
|
||||
export const middlewareComposeFile = path.join(dockerDir, 'docker-compose.middleware.yaml')
|
||||
export const middlewareEnvFile = path.join(dockerDir, 'middleware.env')
|
||||
export const middlewareEnvExampleFile = path.join(dockerDir, 'middleware.env.example')
|
||||
export const middlewareEnvExampleFile = path.join(dockerDir, 'envs', 'middleware.env.example')
|
||||
export const webEnvLocalFile = path.join(webDir, '.env.local')
|
||||
export const webEnvExampleFile = path.join(webDir, '.env.example')
|
||||
export const apiEnvExampleFile = path.join(apiDir, 'tests', 'integration_tests', '.env.example')
|
||||
|
||||
@ -202,6 +202,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/app/annotation/add-annotation-modal/index.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/app/annotation/batch-add-annotation-modal/index.tsx": {
|
||||
"erasable-syntax-only/enums": {
|
||||
"count": 1
|
||||
@ -230,6 +235,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/app/annotation/edit-annotation-modal/index.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/app/annotation/header-opts/index.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
@ -252,6 +262,9 @@
|
||||
"erasable-syntax-only/enums": {
|
||||
"count": 1
|
||||
},
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"react/set-state-in-effect": {
|
||||
"count": 5
|
||||
},
|
||||
@ -269,11 +282,6 @@
|
||||
"count": 4
|
||||
}
|
||||
},
|
||||
"web/app/components/app/app-publisher/index.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 5
|
||||
}
|
||||
},
|
||||
"web/app/components/app/app-publisher/version-info-modal.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
@ -344,6 +352,9 @@
|
||||
}
|
||||
},
|
||||
"web/app/components/app/configuration/config/agent/agent-tools/setting-built-in-tool.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"react-hooks/exhaustive-deps": {
|
||||
"count": 1
|
||||
},
|
||||
@ -401,6 +412,16 @@
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"web/app/components/app/configuration/configuration-view.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/app/configuration/dataset-config/card-item/index.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/app/configuration/dataset-config/index.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
@ -531,6 +552,9 @@
|
||||
}
|
||||
},
|
||||
"web/app/components/app/log/list.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"react/set-state-in-effect": {
|
||||
"count": 6
|
||||
},
|
||||
@ -580,6 +604,9 @@
|
||||
}
|
||||
},
|
||||
"web/app/components/app/workflow-log/list.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"react/set-state-in-effect": {
|
||||
"count": 2
|
||||
}
|
||||
@ -904,6 +931,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/base/drawer-plus/index.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/base/emoji-picker/index.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
@ -1029,6 +1061,11 @@
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"web/app/components/base/float-right-container/index.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"web/app/components/base/form/components/base/base-form.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 6
|
||||
@ -1233,7 +1270,7 @@
|
||||
},
|
||||
"web/app/components/base/icons/src/vender/line/development/index.ts": {
|
||||
"no-barrel-files/no-barrel-files": {
|
||||
"count": 2
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/base/icons/src/vender/line/editor/index.ts": {
|
||||
@ -2144,14 +2181,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/documents/detail/batch-modal/index.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"react/set-state-in-effect": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/documents/detail/completed/common/chunk-content.tsx": {
|
||||
"react/set-state-in-effect": {
|
||||
"count": 1
|
||||
@ -2162,11 +2191,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/documents/detail/completed/components/index.ts": {
|
||||
"no-barrel-files/no-barrel-files": {
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/documents/detail/completed/components/segment-list-content.tsx": {
|
||||
"ts/no-non-null-asserted-optional-chain": {
|
||||
"count": 1
|
||||
@ -2231,14 +2255,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/documents/detail/segment-add/index.tsx": {
|
||||
"erasable-syntax-only/enums": {
|
||||
"count": 1
|
||||
},
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/documents/detail/settings/pipeline-settings/index.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 6
|
||||
@ -2280,6 +2296,9 @@
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/hit-testing/index.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"react/unsupported-syntax": {
|
||||
"count": 1
|
||||
}
|
||||
@ -2319,7 +2338,7 @@
|
||||
},
|
||||
"web/app/components/datasets/metadata/metadata-dataset/dataset-metadata-drawer.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 2
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"web/app/components/datasets/metadata/metadata-dataset/select-metadata-modal.tsx": {
|
||||
@ -2813,10 +2832,18 @@
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/plugin-detail-panel/endpoint-modal.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 7
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/plugin-detail-panel/index.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/plugin-detail-panel/model-list.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
@ -2838,6 +2865,9 @@
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/plugin-detail-panel/strategy-detail.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
}
|
||||
@ -2896,6 +2926,9 @@
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/plugin-detail-panel/trigger/event-detail-drawer.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 5
|
||||
}
|
||||
@ -2933,16 +2966,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/readme-panel/index.tsx": {
|
||||
"react/unsupported-syntax": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/readme-panel/store.ts": {
|
||||
"erasable-syntax-only/enums": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/plugins/reference-setting-modal/auto-update-setting/types.ts": {
|
||||
"erasable-syntax-only/enums": {
|
||||
"count": 2
|
||||
@ -3170,7 +3193,7 @@
|
||||
},
|
||||
"web/app/components/tools/edit-custom-collection-modal/config-credentials.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"web/app/components/tools/edit-custom-collection-modal/get-schema.tsx": {
|
||||
@ -3179,6 +3202,9 @@
|
||||
}
|
||||
},
|
||||
"web/app/components/tools/edit-custom-collection-modal/index.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"react/set-state-in-effect": {
|
||||
"count": 4
|
||||
},
|
||||
@ -3187,6 +3213,9 @@
|
||||
}
|
||||
},
|
||||
"web/app/components/tools/edit-custom-collection-modal/test-api.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
@ -3196,6 +3225,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/tools/mcp/detail/provider-detail.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/tools/mcp/mcp-server-modal.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
@ -3224,12 +3258,20 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/tools/provider/detail.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/tools/provider/empty.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/tools/setting/build-in/config-credentials.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 3
|
||||
}
|
||||
@ -4061,6 +4103,11 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/workflow/nodes/knowledge-retrieval/components/dataset-item.tsx": {
|
||||
"no-restricted-imports": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"web/app/components/workflow/nodes/knowledge-retrieval/components/metadata/condition-list/condition-item.tsx": {
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
|
||||
@ -28,6 +28,7 @@ Always import from a **subpath export** — there is no barrel:
|
||||
import { Button } from '@langgenius/dify-ui/button'
|
||||
import { cn } from '@langgenius/dify-ui/cn'
|
||||
import { Dialog, DialogContent, DialogTrigger } from '@langgenius/dify-ui/dialog'
|
||||
import { Drawer, DrawerPopup, DrawerTrigger } from '@langgenius/dify-ui/drawer'
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@langgenius/dify-ui/popover'
|
||||
import '@langgenius/dify-ui/styles.css' // once, in the app root
|
||||
```
|
||||
@ -36,12 +37,12 @@ Importing from `@langgenius/dify-ui` (no subpath) is intentionally not supported
|
||||
|
||||
## Primitives
|
||||
|
||||
| Category | Subpath | Notes |
|
||||
| -------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------- |
|
||||
| Overlay | `./alert-dialog`, `./autocomplete`, `./combobox`, `./context-menu`, `./dialog`, `./dropdown-menu`, `./popover`, `./select`, `./toast`, `./tooltip` | Portalled. See [Overlay & portal contract] below. |
|
||||
| Form | `./autocomplete`, `./combobox`, `./number-field`, `./slider`, `./switch` | Controlled / uncontrolled per Base UI defaults. |
|
||||
| Layout | `./scroll-area` | Custom-styled scrollbar over the host viewport. |
|
||||
| Media | `./avatar`, `./button` | Button exposes `cva` variants. |
|
||||
| Category | Subpath | Notes |
|
||||
| -------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------- |
|
||||
| Overlay | `./alert-dialog`, `./autocomplete`, `./combobox`, `./context-menu`, `./dialog`, `./drawer`, `./dropdown-menu`, `./popover`, `./select`, `./toast`, `./tooltip` | Portalled. See [Overlay & portal contract] below. |
|
||||
| Form | `./autocomplete`, `./combobox`, `./number-field`, `./slider`, `./switch` | Controlled / uncontrolled per Base UI defaults. |
|
||||
| Layout | `./scroll-area` | Custom-styled scrollbar over the host viewport. |
|
||||
| Media | `./avatar`, `./button` | Button exposes `cva` variants. |
|
||||
|
||||
Utilities:
|
||||
|
||||
@ -65,7 +66,7 @@ If a consumer uses Dify UI source files through the workspace, add an explicit s
|
||||
|
||||
## Overlay & portal contract
|
||||
|
||||
All overlay primitives (`dialog`, `alert-dialog`, `autocomplete`, `combobox`, `popover`, `dropdown-menu`, `context-menu`, `select`, `tooltip`, `toast`) render their content inside a [Base UI Portal] attached to `document.body`. This is the Base UI default — see the upstream [Portals][Base UI Portal] docs for the underlying behavior. Consumers **do not** need to wrap anything in a portal manually.
|
||||
Overlay primitives render their floating surfaces inside a [Base UI Portal] attached to `document.body`. This is the Base UI default — see the upstream [Portals][Base UI Portal] docs for the underlying behavior. Convenience content components such as `DialogContent`, `PopoverContent`, and `SelectContent` own their portal internally; primitives with explicit portal anatomy such as `Drawer` expose the matching `DrawerPortal` part so consumers can compose the full Base UI structure.
|
||||
|
||||
### Root isolation requirement
|
||||
|
||||
@ -83,19 +84,19 @@ Equivalent: any root element with `isolation: isolate` in CSS. Without it, overl
|
||||
|
||||
Every overlay primitive uses a single, shared z-index. Do **not** override it at call sites.
|
||||
|
||||
| Layer | z-index | Where |
|
||||
| ----------------------------------------------------------------------------------------------------------- | -------- | -------------------------------------------------------------------------- |
|
||||
| Overlays (Dialog, AlertDialog, Autocomplete, Combobox, Popover, DropdownMenu, ContextMenu, Select, Tooltip) | `z-1002` | Positioner / Backdrop |
|
||||
| Toast viewport | `z-1003` | One layer above overlays so notifications are never hidden under a dialog. |
|
||||
| Layer | z-index | Where |
|
||||
| ------------------------------------------------------------------------------------------------------------------- | -------- | -------------------------------------------------------------------------- |
|
||||
| Overlays (Dialog, AlertDialog, Autocomplete, Combobox, Drawer, Popover, DropdownMenu, ContextMenu, Select, Tooltip) | `z-1002` | Positioner / Backdrop |
|
||||
| Toast viewport | `z-1003` | One layer above overlays so notifications are never hidden under a dialog. |
|
||||
|
||||
Rationale: during Dify's migration from legacy `base/modal` / `base/dialog` overlays to this package, new and old overlays coexist in the DOM. `z-1002` sits above any common legacy layer, eliminating per-call-site z-index hacks. Among themselves, new primitives share the same z-index and **rely on DOM order** for stacking — the portal mounted later wins.
|
||||
Rationale: during Dify's migration from legacy `base/modal` / `base/dialog` / `base/drawer` / `base/drawer-plus` overlays to this package, new and old overlays coexist in the DOM. `z-1002` sits above any common legacy layer, eliminating per-call-site z-index hacks. Among themselves, new primitives share the same z-index and **rely on DOM order** for stacking — the portal mounted later wins.
|
||||
|
||||
See `[web/docs/overlay-migration.md](../../web/docs/overlay-migration.md)` for the Dify-web migration history. Once the legacy overlays are gone, the values in this table can drop back to `z-50` / `z-51`.
|
||||
|
||||
### Rules
|
||||
|
||||
- Never add `z-1003` / `z-9999` / etc. overrides on primitives from this package. If something is getting clipped, the **parent** overlay (typically a legacy one) is the problem and should be migrated.
|
||||
- Never portal an overlay manually on top of our primitives — use `DialogTrigger`, `PopoverTrigger`, etc. Base UI handles focus management, scroll-locking, and dismissal.
|
||||
- Never create an extra manual portal on top of our primitives — use the exported content / portal parts such as `DialogContent`, `PopoverContent`, and `DrawerPortal`. Base UI handles focus management, scroll-locking, and dismissal.
|
||||
- When a primitive needs additional presentation chrome (e.g. a custom backdrop), add it **inside** the exported component, not at call sites.
|
||||
|
||||
## Development
|
||||
|
||||
@ -37,6 +37,10 @@
|
||||
"types": "./src/dialog/index.tsx",
|
||||
"import": "./src/dialog/index.tsx"
|
||||
},
|
||||
"./drawer": {
|
||||
"types": "./src/drawer/index.tsx",
|
||||
"import": "./src/drawer/index.tsx"
|
||||
},
|
||||
"./dropdown-menu": {
|
||||
"types": "./src/dropdown-menu/index.tsx",
|
||||
"import": "./src/dropdown-menu/index.tsx"
|
||||
|
||||
61
packages/dify-ui/src/drawer/__tests__/index.spec.tsx
Normal file
61
packages/dify-ui/src/drawer/__tests__/index.spec.tsx
Normal file
@ -0,0 +1,61 @@
|
||||
import { render } from 'vitest-browser-react'
|
||||
import {
|
||||
Drawer,
|
||||
DrawerBackdrop,
|
||||
DrawerCloseButton,
|
||||
DrawerContent,
|
||||
DrawerDescription,
|
||||
DrawerPopup,
|
||||
DrawerPortal,
|
||||
DrawerTitle,
|
||||
DrawerTrigger,
|
||||
DrawerViewport,
|
||||
} from '../index'
|
||||
|
||||
const asHTMLElement = (element: HTMLElement | SVGElement) => element as HTMLElement
|
||||
|
||||
describe('Drawer wrapper', () => {
|
||||
describe('User Interactions', () => {
|
||||
it('should open a portalled drawer and close it with the default close button', async () => {
|
||||
const screen = await render(
|
||||
<Drawer>
|
||||
<DrawerTrigger>Open settings</DrawerTrigger>
|
||||
<DrawerPortal>
|
||||
<DrawerBackdrop data-testid="drawer-backdrop" />
|
||||
<DrawerViewport>
|
||||
<DrawerPopup>
|
||||
<DrawerTitle>Settings</DrawerTitle>
|
||||
<DrawerDescription>Configure the current workspace.</DrawerDescription>
|
||||
<DrawerContent>
|
||||
<p>Workspace controls</p>
|
||||
<DrawerCloseButton />
|
||||
</DrawerContent>
|
||||
</DrawerPopup>
|
||||
</DrawerViewport>
|
||||
</DrawerPortal>
|
||||
</Drawer>,
|
||||
)
|
||||
|
||||
expect(document.body.querySelector('[role="dialog"]')).not.toBeInTheDocument()
|
||||
|
||||
asHTMLElement(screen.getByRole('button', { name: 'Open settings' }).element()).click()
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(document.body.querySelector('[role="dialog"]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
const dialog = asHTMLElement(document.body.querySelector('[role="dialog"]')!)
|
||||
expect(document.body).toContainElement(dialog)
|
||||
expect(screen.container).not.toContainElement(dialog)
|
||||
await expect.element(dialog).toHaveTextContent('Workspace controls')
|
||||
await expect.element(screen.getByText('Configure the current workspace.')).toBeInTheDocument()
|
||||
await expect.element(screen.getByTestId('drawer-backdrop')).toHaveClass('z-1002')
|
||||
|
||||
asHTMLElement(screen.getByRole('button', { name: 'Close drawer' }).element()).click()
|
||||
|
||||
await vi.waitFor(() => {
|
||||
expect(document.body.querySelector('[role="dialog"]')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
116
packages/dify-ui/src/drawer/index.tsx
Normal file
116
packages/dify-ui/src/drawer/index.tsx
Normal file
@ -0,0 +1,116 @@
|
||||
'use client'
|
||||
|
||||
import type { ReactNode } from 'react'
|
||||
import { Drawer as BaseDrawer } from '@base-ui/react/drawer'
|
||||
import { cn } from '../cn'
|
||||
|
||||
export const Drawer = BaseDrawer.Root
|
||||
export const DrawerProvider = BaseDrawer.Provider
|
||||
export const DrawerIndent = BaseDrawer.Indent
|
||||
export const DrawerIndentBackground = BaseDrawer.IndentBackground
|
||||
export const DrawerTrigger = BaseDrawer.Trigger
|
||||
export const DrawerSwipeArea = BaseDrawer.SwipeArea
|
||||
export const DrawerPortal = BaseDrawer.Portal
|
||||
export const DrawerTitle = BaseDrawer.Title
|
||||
export const DrawerDescription = BaseDrawer.Description
|
||||
export const DrawerClose = BaseDrawer.Close
|
||||
export const createDrawerHandle = BaseDrawer.createHandle
|
||||
|
||||
export type DrawerRootProps<Payload = unknown> = BaseDrawer.Root.Props<Payload>
|
||||
export type DrawerRootActions = BaseDrawer.Root.Actions
|
||||
export type DrawerRootChangeEventDetails = BaseDrawer.Root.ChangeEventDetails
|
||||
export type DrawerRootChangeEventReason = BaseDrawer.Root.ChangeEventReason
|
||||
export type DrawerRootSnapPoint = BaseDrawer.Root.SnapPoint
|
||||
export type DrawerRootSnapPointChangeEventDetails = BaseDrawer.Root.SnapPointChangeEventDetails
|
||||
export type DrawerRootSnapPointChangeEventReason = BaseDrawer.Root.SnapPointChangeEventReason
|
||||
export type DrawerTriggerProps<Payload = unknown> = BaseDrawer.Trigger.Props<Payload>
|
||||
|
||||
export function DrawerBackdrop({
|
||||
className,
|
||||
...props
|
||||
}: BaseDrawer.Backdrop.Props) {
|
||||
return (
|
||||
<BaseDrawer.Backdrop
|
||||
className={cn(
|
||||
'fixed inset-0 z-1002 bg-background-overlay opacity-[calc(1-var(--drawer-swipe-progress,0))]',
|
||||
'transition-opacity duration-200 data-ending-style:opacity-0 data-starting-style:opacity-0 data-swiping:duration-0 motion-reduce:transition-none',
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export function DrawerViewport({
|
||||
className,
|
||||
...props
|
||||
}: BaseDrawer.Viewport.Props) {
|
||||
return (
|
||||
<BaseDrawer.Viewport
|
||||
className={cn('fixed inset-0 z-1002 touch-none overflow-hidden overscroll-contain outline-hidden', className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export function DrawerPopup({
|
||||
className,
|
||||
...props
|
||||
}: BaseDrawer.Popup.Props) {
|
||||
return (
|
||||
<BaseDrawer.Popup
|
||||
className={cn(
|
||||
'fixed z-1002 flex min-h-0 flex-col overflow-hidden border-[0.5px] border-components-panel-border bg-components-panel-bg text-text-primary shadow-xl outline-hidden touch-none',
|
||||
'transition-[transform,opacity,box-shadow] duration-200 data-swiping:select-none data-swiping:duration-0 motion-reduce:transition-none',
|
||||
'data-[swipe-direction=right]:inset-y-0 data-[swipe-direction=right]:right-0 data-[swipe-direction=right]:h-dvh data-[swipe-direction=right]:w-120 data-[swipe-direction=right]:max-w-[calc(100vw-2rem)] data-[swipe-direction=right]:rounded-l-2xl data-[swipe-direction=right]:border-r-0 data-[swipe-direction=right]:transform-[translateX(var(--drawer-swipe-movement-x,0px))]',
|
||||
'data-starting-style:data-[swipe-direction=right]:transform-[translateX(calc(100%+2px))] data-ending-style:data-[swipe-direction=right]:transform-[translateX(calc(100%+2px))]',
|
||||
'data-[swipe-direction=left]:inset-y-0 data-[swipe-direction=left]:left-0 data-[swipe-direction=left]:h-dvh data-[swipe-direction=left]:w-120 data-[swipe-direction=left]:max-w-[calc(100vw-2rem)] data-[swipe-direction=left]:rounded-r-2xl data-[swipe-direction=left]:border-l-0 data-[swipe-direction=left]:transform-[translateX(var(--drawer-swipe-movement-x,0px))]',
|
||||
'data-starting-style:data-[swipe-direction=left]:transform-[translateX(calc(-100%-2px))] data-ending-style:data-[swipe-direction=left]:transform-[translateX(calc(-100%-2px))]',
|
||||
'data-[swipe-direction=down]:inset-x-0 data-[swipe-direction=down]:bottom-0 data-[swipe-direction=down]:max-h-[calc(100dvh-2rem)] data-[swipe-direction=down]:w-full data-[swipe-direction=down]:rounded-t-2xl data-[swipe-direction=down]:border-b-0 data-[swipe-direction=down]:transform-[translateY(calc(var(--drawer-snap-point-offset,0px)+var(--drawer-swipe-movement-y,0px)))]',
|
||||
'data-starting-style:data-[swipe-direction=down]:transform-[translateY(calc(100%+2px))] data-ending-style:data-[swipe-direction=down]:transform-[translateY(calc(100%+2px))]',
|
||||
'data-[swipe-direction=up]:inset-x-0 data-[swipe-direction=up]:top-0 data-[swipe-direction=up]:max-h-[calc(100dvh-2rem)] data-[swipe-direction=up]:w-full data-[swipe-direction=up]:rounded-b-2xl data-[swipe-direction=up]:border-t-0 data-[swipe-direction=up]:transform-[translateY(var(--drawer-swipe-movement-y,0px))]',
|
||||
'data-starting-style:data-[swipe-direction=up]:transform-[translateY(calc(-100%-2px))] data-ending-style:data-[swipe-direction=up]:transform-[translateY(calc(-100%-2px))]',
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
export function DrawerContent({
|
||||
className,
|
||||
...props
|
||||
}: BaseDrawer.Content.Props) {
|
||||
return (
|
||||
<BaseDrawer.Content
|
||||
className={cn('min-h-0 flex-1 overflow-y-auto overscroll-contain p-6 pb-[calc(1.5rem+env(safe-area-inset-bottom,0))]', className)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
type DrawerCloseButtonProps = Omit<BaseDrawer.Close.Props, 'children'> & {
|
||||
children?: ReactNode
|
||||
}
|
||||
|
||||
export function DrawerCloseButton({
|
||||
className,
|
||||
children,
|
||||
type = 'button',
|
||||
'aria-label': ariaLabel = 'Close drawer',
|
||||
...props
|
||||
}: DrawerCloseButtonProps) {
|
||||
return (
|
||||
<BaseDrawer.Close
|
||||
type={type}
|
||||
aria-label={ariaLabel}
|
||||
className={cn(
|
||||
'flex h-8 w-8 cursor-pointer items-center justify-center rounded-lg text-text-tertiary outline-hidden hover:bg-state-base-hover hover:text-text-secondary focus-visible:bg-state-base-hover focus-visible:ring-1 focus-visible:ring-components-input-border-hover disabled:cursor-not-allowed disabled:opacity-50',
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
{children ?? <span aria-hidden="true" className="i-ri-close-line h-4 w-4" />}
|
||||
</BaseDrawer.Close>
|
||||
)
|
||||
}
|
||||
@ -127,7 +127,7 @@ const createApp = (overrides: Partial<App> = {}): App => ({
|
||||
copyright: overrides.copyright ?? '',
|
||||
privacy_policy: overrides.privacy_policy ?? null,
|
||||
custom_disclaimer: overrides.custom_disclaimer ?? null,
|
||||
category: overrides.category ?? 'Writing',
|
||||
categories: overrides.categories ?? ['Writing'],
|
||||
position: overrides.position ?? 1,
|
||||
is_listed: overrides.is_listed ?? true,
|
||||
install_count: overrides.install_count ?? 0,
|
||||
@ -165,9 +165,9 @@ describe('Explore App List Flow', () => {
|
||||
mockExploreData = {
|
||||
categories: ['Writing', 'Translate', 'Programming'],
|
||||
allList: [
|
||||
createApp({ app_id: 'app-1', app: { ...createApp().app, name: 'Writer Bot' }, category: 'Writing' }),
|
||||
createApp({ app_id: 'app-2', app: { ...createApp().app, id: 'app-id-2', name: 'Translator' }, category: 'Translate' }),
|
||||
createApp({ app_id: 'app-3', app: { ...createApp().app, id: 'app-id-3', name: 'Code Helper' }, category: 'Programming' }),
|
||||
createApp({ app_id: 'app-1', app: { ...createApp().app, name: 'Writer Bot' }, categories: ['Writing'] }),
|
||||
createApp({ app_id: 'app-2', app: { ...createApp().app, id: 'app-id-2', name: 'Translator' }, categories: ['Translate'] }),
|
||||
createApp({ app_id: 'app-3', app: { ...createApp().app, id: 'app-id-3', name: 'Code Helper' }, categories: ['Programming'] }),
|
||||
],
|
||||
}
|
||||
})
|
||||
@ -190,6 +190,30 @@ describe('Explore App List Flow', () => {
|
||||
expect(screen.queryByText('Code Helper')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should only use categories when filtering by selected category', () => {
|
||||
mockTabValue = 'Writing'
|
||||
mockExploreData = {
|
||||
categories: ['Writing', 'Translate'],
|
||||
allList: [
|
||||
createApp({
|
||||
app_id: 'app-1',
|
||||
app: { ...createApp().app, name: 'Active Writer' },
|
||||
categories: ['Writing'],
|
||||
}),
|
||||
createApp({
|
||||
app_id: 'app-2',
|
||||
app: { ...createApp().app, id: 'app-id-2', name: 'Legacy Writer' },
|
||||
categories: [],
|
||||
}),
|
||||
],
|
||||
}
|
||||
|
||||
renderAppList()
|
||||
|
||||
expect(screen.getByText('Active Writer')).toBeInTheDocument()
|
||||
expect(screen.queryByText('Legacy Writer')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should filter apps by search keyword', async () => {
|
||||
renderAppList()
|
||||
|
||||
|
||||
@ -205,7 +205,7 @@ vi.mock('@/app/components/tools/setting/build-in/config-credentials', () => ({
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/workflow-tool', () => ({
|
||||
default: ({ onHide, onSave, onRemove }: { payload: unknown, onHide: () => void, onSave: (d: unknown) => void, onRemove: () => void }) => (
|
||||
WorkflowToolDrawer: ({ onHide, onSave, onRemove }: { payload: unknown, onHide: () => void, onSave: (d: unknown) => void, onRemove: () => void }) => (
|
||||
<div data-testid="workflow-tool-modal">
|
||||
<button data-testid="wf-modal-hide" onClick={onHide}>Hide</button>
|
||||
<button data-testid="wf-modal-save" onClick={() => onSave({ name: 'updated-wf' })}>Save</button>
|
||||
|
||||
@ -39,7 +39,9 @@ const AuthenticatedLayout = ({ children }: { children: React.ReactNode }) => {
|
||||
const getSigninUrl = useCallback(() => {
|
||||
const params = new URLSearchParams(searchParams)
|
||||
params.delete('message')
|
||||
params.set('redirect_url', pathname)
|
||||
const query = params.toString()
|
||||
const fullPath = query ? `${pathname}?${query}` : pathname
|
||||
params.set('redirect_url', fullPath)
|
||||
return `/webapp-signin?${params.toString()}`
|
||||
}, [searchParams, pathname])
|
||||
|
||||
|
||||
@ -97,7 +97,7 @@ const AppInfoDetailPanel = ({
|
||||
<ContentDialog
|
||||
show={show}
|
||||
onClose={onClose}
|
||||
className="absolute top-2 bottom-2 left-2 flex w-[420px] flex-col rounded-2xl p-0!"
|
||||
className="absolute top-2 bottom-2 left-2 flex w-[452px] max-w-[calc(100vw-1rem)] flex-col rounded-2xl p-0!"
|
||||
>
|
||||
<div className="flex shrink-0 flex-col items-start justify-center gap-3 self-stretch p-4">
|
||||
<div className="flex items-center gap-3 self-stretch">
|
||||
|
||||
@ -20,6 +20,7 @@ const mockOpenAsyncWindow = vi.fn()
|
||||
const mockFetchInstalledAppList = vi.fn()
|
||||
const mockFetchAppDetailDirect = vi.fn()
|
||||
const mockToastError = vi.fn()
|
||||
const mockWindowOpen = vi.fn()
|
||||
const mockInvalidateAppWorkflow = vi.fn()
|
||||
|
||||
const sectionProps = vi.hoisted(() => ({
|
||||
@ -37,6 +38,7 @@ vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
Trans: ({ i18nKey }: { i18nKey?: string }) => i18nKey ?? null,
|
||||
}))
|
||||
|
||||
vi.mock('ahooks', async () => {
|
||||
@ -91,6 +93,21 @@ vi.mock('@/service/use-workflow', () => ({
|
||||
useInvalidateAppWorkflow: () => mockInvalidateAppWorkflow,
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-tools', () => ({
|
||||
useWorkflowToolDetailByAppID: () => ({
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
}),
|
||||
useInvalidateAllWorkflowTools: () => vi.fn(),
|
||||
useInvalidateWorkflowToolDetailByAppID: () => vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: () => ({
|
||||
isCurrentWorkspaceManager: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@langgenius/dify-ui/toast', () => ({
|
||||
toast: {
|
||||
error: (...args: unknown[]) => mockToastError(...args),
|
||||
@ -121,6 +138,15 @@ vi.mock('../../app-access-control', () => ({
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/workflow-tool', () => ({
|
||||
WorkflowToolDrawer: ({ onHide }: { onHide: () => void }) => (
|
||||
<div data-testid="workflow-tool-drawer">
|
||||
workflow tool drawer
|
||||
<button onClick={onHide}>close-workflow-tool-drawer</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@langgenius/dify-ui/popover', () => import('@/__mocks__/base-ui-popover'))
|
||||
|
||||
vi.mock('../sections', () => ({
|
||||
@ -143,6 +169,13 @@ vi.mock('../sections', () => ({
|
||||
<div>
|
||||
<button onClick={props.handleEmbed}>publisher-embed</button>
|
||||
<button onClick={() => void props.handleOpenInExplore()}>publisher-open-in-explore</button>
|
||||
{props.handleOpenRunConfig && (
|
||||
<>
|
||||
<button onClick={() => props.handleOpenRunConfig(props.appURL)}>publisher-run-config</button>
|
||||
<button onClick={() => props.handleOpenRunConfig(`${props.appURL}?mode=batch`)}>publisher-batch-run-config</button>
|
||||
</>
|
||||
)}
|
||||
<button onClick={props.onConfigureWorkflowTool}>publisher-workflow-tool</button>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
@ -175,6 +208,10 @@ describe('AppPublisher', () => {
|
||||
mockOpenAsyncWindow.mockImplementation(async (resolver: () => Promise<string>) => {
|
||||
await resolver()
|
||||
})
|
||||
Object.defineProperty(window, 'open', {
|
||||
writable: true,
|
||||
value: mockWindowOpen,
|
||||
})
|
||||
})
|
||||
|
||||
it('should open the publish popover and refetch access permission data', async () => {
|
||||
@ -231,6 +268,94 @@ describe('AppPublisher', () => {
|
||||
expect(screen.getByTestId('embedded-modal'))!.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should collect hidden inputs before opening published run links from config actions', async () => {
|
||||
render(
|
||||
<AppPublisher
|
||||
publishedAt={Date.now()}
|
||||
inputs={[{
|
||||
variable: 'secret',
|
||||
label: 'Secret',
|
||||
type: 'text-input',
|
||||
required: true,
|
||||
hide: true,
|
||||
default: '',
|
||||
} as any]}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByText('common.publish'))
|
||||
fireEvent.click(screen.getByText('publisher-run-config'))
|
||||
|
||||
expect(screen.getByText('overview.appInfo.workflowLaunchHiddenInputs.title')).toBeInTheDocument()
|
||||
|
||||
fireEvent.change(screen.getByLabelText('Secret'), {
|
||||
target: { value: 'top-secret' },
|
||||
})
|
||||
fireEvent.click(screen.getByRole('button', { name: 'overview.appInfo.launch' }))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockWindowOpen).toHaveBeenCalledWith(
|
||||
`https://example.com${basePath}/chat/token-1?secret=${encodeURIComponent('top-secret')}`,
|
||||
'_blank',
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should open batch run config links with the configured hidden inputs', async () => {
|
||||
mockAppDetail = {
|
||||
...mockAppDetail,
|
||||
mode: AppModeEnum.WORKFLOW,
|
||||
}
|
||||
|
||||
render(
|
||||
<AppPublisher
|
||||
publishedAt={Date.now()}
|
||||
inputs={[{
|
||||
variable: 'batch_secret',
|
||||
label: 'Batch Secret',
|
||||
type: 'text-input',
|
||||
required: true,
|
||||
hide: true,
|
||||
default: '',
|
||||
} as any]}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByText('common.publish'))
|
||||
fireEvent.click(screen.getByText('publisher-batch-run-config'))
|
||||
|
||||
fireEvent.change(screen.getByLabelText('Batch Secret'), {
|
||||
target: { value: 'batch-value' },
|
||||
})
|
||||
fireEvent.click(screen.getByRole('button', { name: 'overview.appInfo.launch' }))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockWindowOpen).toHaveBeenCalledWith(
|
||||
`https://example.com${basePath}/workflow/token-1?mode=batch&batch_secret=${encodeURIComponent('batch-value')}`,
|
||||
'_blank',
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should keep workflow tool drawer mounted after closing the publish popover', () => {
|
||||
mockAppDetail = {
|
||||
...mockAppDetail,
|
||||
mode: AppModeEnum.WORKFLOW,
|
||||
}
|
||||
|
||||
render(
|
||||
<AppPublisher
|
||||
publishedAt={Date.now()}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByText('common.publish'))
|
||||
fireEvent.click(screen.getByText('publisher-workflow-tool'))
|
||||
|
||||
expect(screen.queryByTestId('popover-content')).not.toBeInTheDocument()
|
||||
expect(screen.getByTestId('workflow-tool-drawer')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should close embedded and access control panels through child callbacks', async () => {
|
||||
render(
|
||||
<AppPublisher
|
||||
|
||||
@ -18,8 +18,32 @@ vi.mock('../publish-with-multiple-model', () => ({
|
||||
}))
|
||||
|
||||
vi.mock('../suggested-action', () => ({
|
||||
default: ({ children, onClick, link, disabled }: { children: ReactNode, onClick?: () => void, link?: string, disabled?: boolean }) => (
|
||||
<button type="button" data-link={link} disabled={disabled} onClick={onClick}>{children}</button>
|
||||
default: ({
|
||||
children,
|
||||
onClick,
|
||||
link,
|
||||
disabled,
|
||||
actionButton,
|
||||
}: {
|
||||
children: ReactNode
|
||||
onClick?: () => void
|
||||
link?: string
|
||||
disabled?: boolean
|
||||
actionButton?: { ariaLabel: string, onClick: () => void }
|
||||
}) => (
|
||||
<div>
|
||||
<button type="button" data-link={link} disabled={disabled} onClick={onClick}>{children}</button>
|
||||
{actionButton && (
|
||||
<button
|
||||
type="button"
|
||||
aria-label={actionButton.ariaLabel}
|
||||
disabled={disabled}
|
||||
onClick={actionButton.onClick}
|
||||
>
|
||||
{actionButton.ariaLabel}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
@ -170,9 +194,25 @@ describe('app-publisher sections', () => {
|
||||
expect(render(<AccessModeDisplay />).container).toBeEmptyDOMElement()
|
||||
})
|
||||
|
||||
it('should hide access control content when enabled is false', () => {
|
||||
render(
|
||||
<PublisherAccessSection
|
||||
enabled={false}
|
||||
isAppAccessSet
|
||||
isLoading={false}
|
||||
accessMode={AccessMode.PUBLIC}
|
||||
onClick={vi.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.queryByText('publishApp.title')).not.toBeInTheDocument()
|
||||
expect(screen.queryByText('accessControlDialog.accessItems.anyone')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render workflow actions, batch run links, and workflow tool configuration', () => {
|
||||
const handleOpenInExplore = vi.fn()
|
||||
const handleEmbed = vi.fn()
|
||||
const handleOpenRunConfig = vi.fn()
|
||||
|
||||
const { rerender } = render(
|
||||
<PublisherActionsSection
|
||||
@ -190,22 +230,30 @@ describe('app-publisher sections', () => {
|
||||
disabledFunctionTooltip="disabled"
|
||||
handleEmbed={handleEmbed}
|
||||
handleOpenInExplore={handleOpenInExplore}
|
||||
handleOpenRunConfig={handleOpenRunConfig}
|
||||
handlePublish={vi.fn()}
|
||||
hasHumanInputNode={false}
|
||||
hasTriggerNode={false}
|
||||
inputs={[]}
|
||||
missingStartNode={false}
|
||||
onRefreshData={vi.fn()}
|
||||
outputs={[]}
|
||||
published={true}
|
||||
published={false}
|
||||
publishedAt={Date.now()}
|
||||
showBatchRunConfig
|
||||
showRunConfig
|
||||
toolPublished
|
||||
workflowToolAvailable={false}
|
||||
workflowToolIsLoading={false}
|
||||
workflowToolOutdated={false}
|
||||
workflowToolIsCurrentWorkspaceManager
|
||||
workflowToolMessage="workflow-disabled"
|
||||
onConfigureWorkflowTool={vi.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('common.batchRunApp')).toHaveAttribute('data-link', 'https://example.com/app?mode=batch')
|
||||
fireEvent.click(screen.getAllByRole('button', { name: 'operation.config' })[0]!)
|
||||
expect(handleOpenRunConfig).toHaveBeenCalledWith('https://example.com/app')
|
||||
fireEvent.click(screen.getAllByRole('button', { name: 'operation.config' })[1]!)
|
||||
expect(handleOpenRunConfig).toHaveBeenCalledWith('https://example.com/app?mode=batch')
|
||||
fireEvent.click(screen.getByText('common.openInExplore'))
|
||||
expect(handleOpenInExplore).toHaveBeenCalled()
|
||||
expect(screen.getByText('workflow-tool-configure')).toBeInTheDocument()
|
||||
@ -223,17 +271,19 @@ describe('app-publisher sections', () => {
|
||||
disabledFunctionTooltip="disabled"
|
||||
handleEmbed={handleEmbed}
|
||||
handleOpenInExplore={handleOpenInExplore}
|
||||
handleOpenRunConfig={handleOpenRunConfig}
|
||||
handlePublish={vi.fn()}
|
||||
hasHumanInputNode={false}
|
||||
hasTriggerNode={false}
|
||||
inputs={[]}
|
||||
missingStartNode
|
||||
onRefreshData={vi.fn()}
|
||||
outputs={[]}
|
||||
published={false}
|
||||
publishedAt={Date.now()}
|
||||
toolPublished={false}
|
||||
workflowToolAvailable
|
||||
workflowToolIsLoading={false}
|
||||
workflowToolOutdated={false}
|
||||
workflowToolIsCurrentWorkspaceManager
|
||||
onConfigureWorkflowTool={vi.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
@ -248,16 +298,19 @@ describe('app-publisher sections', () => {
|
||||
disabledFunctionButton={false}
|
||||
handleEmbed={handleEmbed}
|
||||
handleOpenInExplore={handleOpenInExplore}
|
||||
handleOpenRunConfig={handleOpenRunConfig}
|
||||
handlePublish={vi.fn()}
|
||||
hasHumanInputNode={false}
|
||||
hasTriggerNode
|
||||
inputs={[]}
|
||||
missingStartNode={false}
|
||||
outputs={[]}
|
||||
published={false}
|
||||
publishedAt={undefined}
|
||||
toolPublished={false}
|
||||
workflowToolAvailable
|
||||
workflowToolIsLoading={false}
|
||||
workflowToolOutdated={false}
|
||||
workflowToolIsCurrentWorkspaceManager
|
||||
onConfigureWorkflowTool={vi.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
|
||||
@ -46,4 +46,47 @@ describe('SuggestedAction', () => {
|
||||
|
||||
expect(handleClick).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should render and trigger the trailing action button when configured', () => {
|
||||
const handleActionClick = vi.fn()
|
||||
|
||||
render(
|
||||
<SuggestedAction
|
||||
link="https://example.com/docs"
|
||||
actionButton={{
|
||||
ariaLabel: 'Configure action',
|
||||
icon: <span>config</span>,
|
||||
onClick: handleActionClick,
|
||||
}}
|
||||
>
|
||||
Configurable action
|
||||
</SuggestedAction>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Configure action' }))
|
||||
|
||||
expect(screen.getByRole('link', { name: 'Configurable action' })).toHaveAttribute('href', 'https://example.com/docs')
|
||||
expect(handleActionClick).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should block action button clicks when disabled', () => {
|
||||
const handleActionClick = vi.fn()
|
||||
|
||||
render(
|
||||
<SuggestedAction
|
||||
link="https://example.com/docs"
|
||||
disabled
|
||||
actionButton={{
|
||||
ariaLabel: 'Configure action',
|
||||
icon: <span>config</span>,
|
||||
onClick: handleActionClick,
|
||||
}}
|
||||
>
|
||||
Disabled with action
|
||||
</SuggestedAction>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'Configure action' }))
|
||||
expect(handleActionClick).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
@ -1,28 +1,40 @@
|
||||
import type { FormEvent } from 'react'
|
||||
import type { ModelAndParameter } from '../configuration/debug/types'
|
||||
import type { WorkflowHiddenStartVariable, WorkflowLaunchInputValue } from '@/app/components/app/overview/app-card-utils'
|
||||
import type { CollaborationUpdate } from '@/app/components/workflow/collaboration/types/collaboration'
|
||||
import type { InputVar, Variable } from '@/app/components/workflow/types'
|
||||
import type { PublishWorkflowParams } from '@/types/workflow'
|
||||
import { Button } from '@langgenius/dify-ui/button'
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@langgenius/dify-ui/popover'
|
||||
import { toast } from '@langgenius/dify-ui/toast'
|
||||
import { RiStoreLine } from '@remixicon/react'
|
||||
import { useSuspenseQuery } from '@tanstack/react-query'
|
||||
import { useKeyPress } from 'ahooks'
|
||||
import {
|
||||
|
||||
memo,
|
||||
use,
|
||||
useCallback,
|
||||
useContext,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { WorkflowLaunchDialog } from '@/app/components/app/overview/app-card-sections'
|
||||
import {
|
||||
buildWorkflowLaunchUrl,
|
||||
createWorkflowLaunchInitialValues,
|
||||
isWorkflowLaunchInputSupported,
|
||||
|
||||
} from '@/app/components/app/overview/app-card-utils'
|
||||
import EmbeddedModal from '@/app/components/app/overview/embedded'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import { trackEvent } from '@/app/components/base/amplitude'
|
||||
import { WorkflowToolDrawer } from '@/app/components/tools/workflow-tool'
|
||||
import { useConfigureButton } from '@/app/components/tools/workflow-tool/hooks/use-configure-button'
|
||||
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
|
||||
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
|
||||
import { WorkflowContext } from '@/app/components/workflow/context'
|
||||
import { appDefaultIconBackground } from '@/config'
|
||||
import { useAsyncWindowOpen } from '@/hooks/use-async-window-open'
|
||||
import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now'
|
||||
import { AccessMode } from '@/models/access-control'
|
||||
@ -57,8 +69,8 @@ export type AppPublisherProps = {
|
||||
debugWithMultipleModel?: boolean
|
||||
multipleModelConfigs?: ModelAndParameter[]
|
||||
/** modelAndParameter is passed when debugWithMultipleModel is true */
|
||||
onPublish?: (params?: any) => Promise<any> | any
|
||||
onRestore?: () => Promise<any> | any
|
||||
onPublish?: AppPublisherPublishHandler
|
||||
onRestore?: AppPublisherRestoreHandler
|
||||
onToggle?: (state: boolean) => void
|
||||
crossAxisOffset?: number
|
||||
toolPublished?: boolean
|
||||
@ -74,6 +86,12 @@ export type AppPublisherProps = {
|
||||
|
||||
const PUBLISH_SHORTCUT = ['ctrl', '⇧', 'P']
|
||||
|
||||
type AppPublisherPublishHandler
|
||||
= | ((params?: ModelAndParameter | PublishWorkflowParams) => Promise<unknown> | unknown)
|
||||
| ((params?: unknown) => Promise<unknown> | unknown)
|
||||
|
||||
type AppPublisherRestoreHandler = () => Promise<unknown> | unknown
|
||||
|
||||
const AppPublisher = ({
|
||||
disabled = false,
|
||||
publishDisabled = false,
|
||||
@ -100,11 +118,15 @@ const AppPublisher = ({
|
||||
const [published, setPublished] = useState(false)
|
||||
const [open, setOpen] = useState(false)
|
||||
const [showAppAccessControl, setShowAppAccessControl] = useState(false)
|
||||
const [workflowToolDrawerOpen, setWorkflowToolDrawerOpen] = useState(false)
|
||||
|
||||
const [embeddingModalOpen, setEmbeddingModalOpen] = useState(false)
|
||||
const [workflowLaunchDialogOpen, setWorkflowLaunchDialogOpen] = useState(false)
|
||||
const [workflowLaunchTargetUrl, setWorkflowLaunchTargetUrl] = useState('')
|
||||
const [workflowLaunchValues, setWorkflowLaunchValues] = useState<Record<string, WorkflowLaunchInputValue>>({})
|
||||
const [publishingToMarketplace, setPublishingToMarketplace] = useState(false)
|
||||
|
||||
const workflowStore = useContext(WorkflowContext)
|
||||
const workflowStore = use(WorkflowContext)
|
||||
const appDetail = useAppStore(state => state.appDetail)
|
||||
const setAppDetail = useAppStore(s => s.setAppDetail)
|
||||
const { data: systemFeatures } = useSuspenseQuery(systemFeaturesQueryOptions())
|
||||
@ -113,6 +135,22 @@ const AppPublisher = ({
|
||||
|
||||
const appURL = getPublisherAppUrl({ appBaseUrl: appBaseURL, accessToken, mode: appDetail?.mode })
|
||||
const isChatApp = [AppModeEnum.CHAT, AppModeEnum.AGENT_CHAT, AppModeEnum.COMPLETION].includes(appDetail?.mode || AppModeEnum.CHAT)
|
||||
const hiddenLaunchVariables = useMemo<WorkflowHiddenStartVariable[]>(
|
||||
() => (inputs ?? []).filter(input => input.hide === true),
|
||||
[inputs],
|
||||
)
|
||||
const supportedWorkflowLaunchVariables = useMemo(
|
||||
() => hiddenLaunchVariables.filter(isWorkflowLaunchInputSupported),
|
||||
[hiddenLaunchVariables],
|
||||
)
|
||||
const unsupportedWorkflowLaunchVariables = useMemo(
|
||||
() => hiddenLaunchVariables.filter(variable => !isWorkflowLaunchInputSupported(variable)),
|
||||
[hiddenLaunchVariables],
|
||||
)
|
||||
const initialWorkflowLaunchValues = useMemo(
|
||||
() => createWorkflowLaunchInitialValues(supportedWorkflowLaunchVariables),
|
||||
[supportedWorkflowLaunchVariables],
|
||||
)
|
||||
|
||||
const { data: userCanAccessApp, isLoading: isGettingUserCanAccessApp, refetch } = useGetUserCanAccessApp({ appId: appDetail?.id, enabled: false })
|
||||
const { data: appAccessSubjects, isLoading: isGettingAppWhiteListSubjects } = useAppWhiteListSubjects(appDetail?.id, open && systemFeatures.webapp_auth.enabled && appDetail?.access_mode === AccessMode.SPECIFIC_GROUPS_MEMBERS)
|
||||
@ -222,6 +260,31 @@ const AppPublisher = ({
|
||||
}
|
||||
}, [appDetail, setAppDetail])
|
||||
|
||||
const handleOpenWorkflowLaunchDialog = useCallback((targetUrl: string) => {
|
||||
setWorkflowLaunchValues(initialWorkflowLaunchValues)
|
||||
setWorkflowLaunchTargetUrl(targetUrl)
|
||||
setWorkflowLaunchDialogOpen(true)
|
||||
}, [initialWorkflowLaunchValues])
|
||||
|
||||
const handleWorkflowLaunchValueChange = useCallback((variable: string, value: WorkflowLaunchInputValue) => {
|
||||
setWorkflowLaunchValues(prev => ({
|
||||
...prev,
|
||||
[variable]: value,
|
||||
}))
|
||||
}, [])
|
||||
|
||||
const handleWorkflowLaunchConfirm = useCallback(async (event: FormEvent<HTMLFormElement>) => {
|
||||
event.preventDefault()
|
||||
|
||||
const targetUrl = await buildWorkflowLaunchUrl({
|
||||
accessibleUrl: workflowLaunchTargetUrl,
|
||||
variables: supportedWorkflowLaunchVariables,
|
||||
values: workflowLaunchValues,
|
||||
})
|
||||
|
||||
window.open(targetUrl, '_blank')
|
||||
setWorkflowLaunchDialogOpen(false)
|
||||
}, [supportedWorkflowLaunchVariables, workflowLaunchTargetUrl, workflowLaunchValues])
|
||||
const handlePublishToMarketplace = useCallback(async () => {
|
||||
if (!appDetail?.id || publishingToMarketplace)
|
||||
return
|
||||
@ -273,6 +336,31 @@ const AppPublisher = ({
|
||||
const workflowToolMessage = !hasPublishedVersion || !workflowToolAvailable
|
||||
? t('common.workflowAsToolDisabledHint', { ns: 'workflow' })
|
||||
: undefined
|
||||
const workflowToolVisible = appDetail?.mode === AppModeEnum.WORKFLOW && !hasHumanInputNode && !hasTriggerNode
|
||||
const workflowToolPublished = !!toolPublished
|
||||
const closeWorkflowToolDrawer = useCallback(() => setWorkflowToolDrawerOpen(false), [])
|
||||
const workflowToolIcon = useMemo(() => ({
|
||||
content: (appDetail?.icon_type === 'image' ? '🤖' : appDetail?.icon) || '🤖',
|
||||
background: (appDetail?.icon_type === 'image' ? appDefaultIconBackground : appDetail?.icon_background) || appDefaultIconBackground,
|
||||
}), [appDetail?.icon, appDetail?.icon_background, appDetail?.icon_type])
|
||||
const workflowTool = useConfigureButton({
|
||||
enabled: workflowToolVisible,
|
||||
published: workflowToolPublished,
|
||||
detailNeedUpdate: workflowToolPublished && published,
|
||||
workflowAppId: appDetail?.id ?? '',
|
||||
icon: workflowToolIcon,
|
||||
name: appDetail?.name ?? '',
|
||||
description: appDetail?.description ?? '',
|
||||
inputs,
|
||||
outputs,
|
||||
handlePublish,
|
||||
onRefreshData,
|
||||
onConfigured: closeWorkflowToolDrawer,
|
||||
})
|
||||
const openWorkflowToolDrawer = useCallback(() => {
|
||||
handleOpenChange(false)
|
||||
setWorkflowToolDrawerOpen(true)
|
||||
}, [handleOpenChange])
|
||||
const upgradeHighlightStyle = useMemo(() => ({
|
||||
background: 'linear-gradient(97deg, var(--components-input-border-active-prompt-1, rgba(11, 165, 236, 0.95)) -3.64%, var(--components-input-border-active-prompt-2, rgba(21, 90, 239, 0.95)) 45.14%)',
|
||||
WebkitBackgroundClip: 'text',
|
||||
@ -343,23 +431,27 @@ const AppPublisher = ({
|
||||
handleOpenChange(false)
|
||||
handleOpenInExplore()
|
||||
}}
|
||||
handleOpenRunConfig={handleOpenWorkflowLaunchDialog}
|
||||
handlePublish={handlePublish}
|
||||
hasHumanInputNode={hasHumanInputNode}
|
||||
hasTriggerNode={hasTriggerNode}
|
||||
inputs={inputs}
|
||||
missingStartNode={missingStartNode}
|
||||
onRefreshData={onRefreshData}
|
||||
outputs={outputs}
|
||||
published={published}
|
||||
publishedAt={publishedAt}
|
||||
showBatchRunConfig={hiddenLaunchVariables.length > 0 && (appDetail?.mode === AppModeEnum.WORKFLOW || appDetail?.mode === AppModeEnum.COMPLETION)}
|
||||
showRunConfig={hiddenLaunchVariables.length > 0}
|
||||
toolPublished={toolPublished}
|
||||
workflowToolAvailable={workflowToolAvailable}
|
||||
workflowToolIsLoading={workflowTool.isLoading}
|
||||
workflowToolOutdated={workflowTool.outdated}
|
||||
workflowToolIsCurrentWorkspaceManager={workflowTool.isCurrentWorkspaceManager}
|
||||
workflowToolMessage={workflowToolMessage}
|
||||
onConfigureWorkflowTool={openWorkflowToolDrawer}
|
||||
/>
|
||||
{systemFeatures.enable_creators_platform && (
|
||||
<div className="border-t border-divider-subtle p-4">
|
||||
<SuggestedAction
|
||||
icon={<RiStoreLine className="h-4 w-4" />}
|
||||
icon={<span className="i-ri-store-line h-4 w-4" />}
|
||||
disabled={!publishedAt || publishingToMarketplace}
|
||||
onClick={handlePublishToMarketplace}
|
||||
>
|
||||
@ -377,9 +469,29 @@ const AppPublisher = ({
|
||||
onClose={() => setEmbeddingModalOpen(false)}
|
||||
appBaseUrl={appBaseURL}
|
||||
accessToken={accessToken}
|
||||
hiddenInputs={hiddenLaunchVariables}
|
||||
/>
|
||||
{showAppAccessControl && <AccessControl app={appDetail!} onConfirm={handleAccessControlUpdate} onClose={() => { setShowAppAccessControl(false) }} />}
|
||||
<WorkflowLaunchDialog
|
||||
t={t}
|
||||
open={workflowLaunchDialogOpen}
|
||||
hiddenVariables={supportedWorkflowLaunchVariables}
|
||||
unsupportedVariables={unsupportedWorkflowLaunchVariables}
|
||||
values={workflowLaunchValues}
|
||||
onOpenChange={setWorkflowLaunchDialogOpen}
|
||||
onValueChange={handleWorkflowLaunchValueChange}
|
||||
onSubmit={handleWorkflowLaunchConfirm}
|
||||
/>
|
||||
</Popover>
|
||||
{workflowToolDrawerOpen && (
|
||||
<WorkflowToolDrawer
|
||||
isAdd={!workflowToolPublished}
|
||||
payload={workflowTool.payload}
|
||||
onHide={closeWorkflowToolDrawer}
|
||||
onCreate={workflowTool.handleCreate}
|
||||
onSave={workflowTool.handleUpdate}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
@ -8,13 +8,12 @@ import {
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from '@langgenius/dify-ui/tooltip'
|
||||
import { RiSettings2Line } from '@remixicon/react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import { CodeBrowser } from '@/app/components/base/icons/src/vender/line/development'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import UpgradeBtn from '@/app/components/billing/upgrade-btn'
|
||||
import WorkflowToolConfigureButton from '@/app/components/tools/workflow-tool/configure-button'
|
||||
import { appDefaultIconBackground } from '@/config'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import ShortcutsName from '../../workflow/shortcuts-name'
|
||||
import PublishWithMultipleModel from './publish-with-multiple-model'
|
||||
@ -46,11 +45,8 @@ type AccessSectionProps = {
|
||||
|
||||
type ActionsSectionProps = Pick<AppPublisherProps, | 'hasHumanInputNode'
|
||||
| 'hasTriggerNode'
|
||||
| 'inputs'
|
||||
| 'missingStartNode'
|
||||
| 'onRefreshData'
|
||||
| 'toolPublished'
|
||||
| 'outputs'
|
||||
| 'publishedAt'
|
||||
| 'workflowToolAvailable'> & {
|
||||
appDetail: {
|
||||
@ -67,9 +63,16 @@ type ActionsSectionProps = Pick<AppPublisherProps, | 'hasHumanInputNode'
|
||||
disabledFunctionTooltip?: string
|
||||
handleEmbed: () => void
|
||||
handleOpenInExplore: () => void
|
||||
handleOpenRunConfig?: (url: string) => void
|
||||
handlePublish: (params?: ModelAndParameter | PublishWorkflowParams) => Promise<void>
|
||||
published: boolean
|
||||
showBatchRunConfig?: boolean
|
||||
showRunConfig?: boolean
|
||||
workflowToolIsLoading: boolean
|
||||
workflowToolOutdated: boolean
|
||||
workflowToolIsCurrentWorkspaceManager: boolean
|
||||
workflowToolMessage?: string
|
||||
onConfigureWorkflowTool: () => void
|
||||
}
|
||||
|
||||
export const AccessModeDisplay = ({ mode }: { mode?: keyof typeof ACCESS_MODE_MAP }) => {
|
||||
@ -256,18 +259,20 @@ export const PublisherActionsSection = ({
|
||||
disabledFunctionTooltip,
|
||||
handleEmbed,
|
||||
handleOpenInExplore,
|
||||
handlePublish,
|
||||
handleOpenRunConfig,
|
||||
hasHumanInputNode = false,
|
||||
hasTriggerNode = false,
|
||||
inputs,
|
||||
missingStartNode = false,
|
||||
onRefreshData,
|
||||
outputs,
|
||||
published,
|
||||
publishedAt,
|
||||
showBatchRunConfig = false,
|
||||
showRunConfig = false,
|
||||
toolPublished,
|
||||
workflowToolAvailable = true,
|
||||
workflowToolIsLoading,
|
||||
workflowToolOutdated,
|
||||
workflowToolIsCurrentWorkspaceManager,
|
||||
workflowToolMessage,
|
||||
onConfigureWorkflowTool,
|
||||
}: ActionsSectionProps) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
@ -284,6 +289,13 @@ export const PublisherActionsSection = ({
|
||||
disabled={disabledFunctionButton}
|
||||
link={appURL}
|
||||
icon={<span className="i-ri-play-circle-line h-4 w-4" />}
|
||||
actionButton={showRunConfig
|
||||
? {
|
||||
ariaLabel: t('operation.config', { ns: 'common' }),
|
||||
icon: <RiSettings2Line className="h-4 w-4" />,
|
||||
onClick: () => handleOpenRunConfig?.(appURL),
|
||||
}
|
||||
: undefined}
|
||||
>
|
||||
{t('common.runApp', { ns: 'workflow' })}
|
||||
</SuggestedAction>
|
||||
@ -296,6 +308,13 @@ export const PublisherActionsSection = ({
|
||||
disabled={disabledFunctionButton}
|
||||
link={`${appURL}${appURL.includes('?') ? '&' : '?'}mode=batch`}
|
||||
icon={<span className="i-ri-play-list-2-line h-4 w-4" />}
|
||||
actionButton={showBatchRunConfig
|
||||
? {
|
||||
ariaLabel: t('operation.config', { ns: 'common' }),
|
||||
icon: <RiSettings2Line className="h-4 w-4" />,
|
||||
onClick: () => handleOpenRunConfig?.(`${appURL}${appURL.includes('?') ? '&' : '?'}mode=batch`),
|
||||
}
|
||||
: undefined}
|
||||
>
|
||||
{t('common.batchRunApp', { ns: 'workflow' })}
|
||||
</SuggestedAction>
|
||||
@ -305,7 +324,7 @@ export const PublisherActionsSection = ({
|
||||
<SuggestedAction
|
||||
onClick={handleEmbed}
|
||||
disabled={!publishedAt}
|
||||
icon={<CodeBrowser className="h-4 w-4" />}
|
||||
icon={<span className="i-custom-vender-line-development-code-browser h-4 w-4" />}
|
||||
>
|
||||
{t('common.embedIntoSite', { ns: 'workflow' })}
|
||||
</SuggestedAction>
|
||||
@ -340,18 +359,10 @@ export const PublisherActionsSection = ({
|
||||
<WorkflowToolConfigureButton
|
||||
disabled={workflowToolDisabled}
|
||||
published={!!toolPublished}
|
||||
detailNeedUpdate={!!toolPublished && published}
|
||||
workflowAppId={appDetail?.id ?? ''}
|
||||
icon={{
|
||||
content: (appDetail.icon_type === 'image' ? '🤖' : appDetail?.icon) || '🤖',
|
||||
background: (appDetail.icon_type === 'image' ? appDefaultIconBackground : appDetail?.icon_background) || appDefaultIconBackground,
|
||||
}}
|
||||
name={appDetail?.name ?? ''}
|
||||
description={appDetail?.description ?? ''}
|
||||
inputs={inputs}
|
||||
outputs={outputs}
|
||||
handlePublish={handlePublish}
|
||||
onRefreshData={onRefreshData}
|
||||
isLoading={workflowToolIsLoading}
|
||||
outdated={workflowToolOutdated}
|
||||
isCurrentWorkspaceManager={workflowToolIsCurrentWorkspaceManager}
|
||||
onConfigure={onConfigureWorkflowTool}
|
||||
disabledReason={workflowToolMessage}
|
||||
/>
|
||||
)}
|
||||
|
||||
@ -1,33 +1,93 @@
|
||||
import type { HTMLProps, PropsWithChildren } from 'react'
|
||||
import type { HTMLProps, PropsWithChildren, MouseEvent as ReactMouseEvent } from 'react'
|
||||
import { cn } from '@langgenius/dify-ui/cn'
|
||||
import { RiArrowRightUpLine } from '@remixicon/react'
|
||||
|
||||
type SuggestedActionButton = {
|
||||
ariaLabel: string
|
||||
icon: React.ReactNode
|
||||
onClick: (event: ReactMouseEvent<HTMLButtonElement>) => void
|
||||
}
|
||||
|
||||
type SuggestedActionProps = PropsWithChildren<HTMLProps<HTMLAnchorElement> & {
|
||||
icon?: React.ReactNode
|
||||
link?: string
|
||||
disabled?: boolean
|
||||
actionButton?: SuggestedActionButton
|
||||
}>
|
||||
|
||||
const SuggestedAction = ({ icon, link, disabled, children, className, onClick, ...props }: SuggestedActionProps) => {
|
||||
const handleClick = (e: React.MouseEvent<HTMLAnchorElement>) => {
|
||||
if (disabled)
|
||||
const SuggestedAction = ({
|
||||
icon,
|
||||
link,
|
||||
disabled,
|
||||
children,
|
||||
className,
|
||||
onClick,
|
||||
actionButton,
|
||||
...props
|
||||
}: SuggestedActionProps) => {
|
||||
const handleClick = (event: ReactMouseEvent<HTMLAnchorElement>) => {
|
||||
if (disabled) {
|
||||
event.preventDefault()
|
||||
return
|
||||
onClick?.(e)
|
||||
}
|
||||
|
||||
onClick?.(event)
|
||||
}
|
||||
return (
|
||||
|
||||
const handleActionClick = (event: ReactMouseEvent<HTMLButtonElement>) => {
|
||||
if (disabled) {
|
||||
event.preventDefault()
|
||||
return
|
||||
}
|
||||
|
||||
actionButton?.onClick(event)
|
||||
}
|
||||
|
||||
const mainAction = (
|
||||
<a
|
||||
href={disabled ? undefined : link}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className={cn('flex items-center justify-start gap-2 rounded-lg bg-background-section-burn px-2.5 py-2 text-text-secondary transition-colors not-first:mt-1', disabled ? 'cursor-not-allowed opacity-30 shadow-xs' : 'cursor-pointer text-text-secondary hover:bg-state-accent-hover hover:text-text-accent', className)}
|
||||
className={cn(
|
||||
'flex min-w-0 items-center justify-start gap-2 px-2.5 py-2 text-text-secondary transition-colors',
|
||||
actionButton ? 'flex-1 rounded-l-lg' : 'rounded-lg bg-background-section-burn not-first:mt-1',
|
||||
disabled ? 'cursor-not-allowed opacity-30 shadow-xs' : 'cursor-pointer hover:bg-state-accent-hover hover:text-text-accent',
|
||||
)}
|
||||
onClick={handleClick}
|
||||
{...props}
|
||||
>
|
||||
<div className="relative h-4 w-4">{icon}</div>
|
||||
<div className="relative h-4 w-4 shrink-0">{icon}</div>
|
||||
<div className="shrink grow basis-0 system-sm-medium">{children}</div>
|
||||
<RiArrowRightUpLine className="h-3.5 w-3.5" />
|
||||
<RiArrowRightUpLine className="h-3.5 w-3.5 shrink-0" />
|
||||
</a>
|
||||
)
|
||||
|
||||
if (!actionButton)
|
||||
return mainAction
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'flex items-stretch rounded-lg bg-background-section-burn not-first:mt-1',
|
||||
disabled ? 'opacity-30 shadow-xs' : '',
|
||||
className,
|
||||
)}
|
||||
>
|
||||
{mainAction}
|
||||
<button
|
||||
type="button"
|
||||
aria-label={actionButton.ariaLabel}
|
||||
disabled={disabled}
|
||||
className={cn(
|
||||
'flex w-9 shrink-0 items-center justify-center rounded-r-lg border-l-[0.5px] border-divider-subtle text-text-tertiary transition-colors',
|
||||
disabled ? 'cursor-not-allowed' : 'cursor-pointer hover:bg-state-accent-hover hover:text-text-accent',
|
||||
)}
|
||||
onClick={handleActionClick}
|
||||
>
|
||||
{actionButton.icon}
|
||||
</button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default SuggestedAction
|
||||
|
||||
@ -4,6 +4,29 @@ import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import ConfigModalFormFields from '../form-fields'
|
||||
|
||||
vi.mock('react-i18next', async () => {
|
||||
const React = await import('react')
|
||||
return {
|
||||
useTranslation: () => ({
|
||||
t: (key: string, options?: Record<string, unknown>) => {
|
||||
const ns = options?.ns as string | undefined
|
||||
return ns ? `${ns}.${key}` : key
|
||||
},
|
||||
i18n: { language: 'en', changeLanguage: vi.fn() },
|
||||
}),
|
||||
Trans: ({ i18nKey, components }: { i18nKey: string, components?: Record<string, ReactNode> }) => (
|
||||
<span data-i18n-key={i18nKey}>
|
||||
{i18nKey}
|
||||
{components?.docLink}
|
||||
</span>
|
||||
),
|
||||
}
|
||||
})
|
||||
|
||||
vi.mock('@/context/i18n', () => ({
|
||||
useDocLink: () => (path?: string) => `https://docs.example.com${path || ''}`,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/file-uploader', () => ({
|
||||
FileUploaderInAttachmentWrapper: ({
|
||||
onChange,
|
||||
@ -74,6 +97,12 @@ vi.mock('@langgenius/dify-ui/select', async (importOriginal) => {
|
||||
}
|
||||
})
|
||||
|
||||
vi.mock('@langgenius/dify-ui/tooltip', () => ({
|
||||
Tooltip: ({ children }: { children: ReactNode }) => <div>{children}</div>,
|
||||
TooltipTrigger: ({ children }: { children: ReactNode }) => <div>{children}</div>,
|
||||
TooltipContent: ({ children }: { children: ReactNode }) => <div>{children}</div>,
|
||||
}))
|
||||
|
||||
vi.mock('../field', () => ({
|
||||
default: ({ children, title }: { children: ReactNode, title: string }) => (
|
||||
<div>
|
||||
@ -176,7 +205,18 @@ describe('ConfigModalFormFields', () => {
|
||||
expect(selectProps.payloadChangeHandlers.default).toHaveBeenCalledWith('beta')
|
||||
})
|
||||
|
||||
it('should wire file, json schema, and visibility controls', () => {
|
||||
it('should wire file, json schema, and visibility controls', async () => {
|
||||
const textInputProps = createBaseProps()
|
||||
const textInputView = render(<ConfigModalFormFields {...textInputProps} />)
|
||||
expect(screen.getByText('variableConfig.hidden')).toBeInTheDocument()
|
||||
fireEvent.click(screen.getByRole('button', { name: 'variableConfig.hiddenDescription' }))
|
||||
expect(await screen.findByText('variableConfig.hiddenDescription')).toBeInTheDocument()
|
||||
const docLink = await screen.findByRole('link')
|
||||
expect(docLink).toHaveAttribute('href', 'https://docs.example.com/use-dify/nodes/user-input#hide-and-pre-fill-input-fields')
|
||||
expect(docLink).toHaveAttribute('target', '_blank')
|
||||
expect(docLink).toHaveAttribute('rel', 'noopener noreferrer')
|
||||
textInputView.unmount()
|
||||
|
||||
const singleFileProps = createBaseProps()
|
||||
singleFileProps.tempPayload = {
|
||||
...singleFileProps.tempPayload,
|
||||
@ -185,18 +225,20 @@ describe('ConfigModalFormFields', () => {
|
||||
allowed_file_extensions: [],
|
||||
allowed_file_upload_methods: ['remote_url'],
|
||||
}
|
||||
render(<ConfigModalFormFields {...singleFileProps} />)
|
||||
const singleFileView = render(<ConfigModalFormFields {...singleFileProps} />)
|
||||
expect(screen.queryByText('variableConfig.hidden')).not.toBeInTheDocument()
|
||||
expect(screen.queryByText('variableConfig.hiddenDescription')).not.toBeInTheDocument()
|
||||
fireEvent.click(screen.getByText('single-file-setting'))
|
||||
fireEvent.click(screen.getByText('upload-file'))
|
||||
fireEvent.click(screen.getAllByText('unchecked')[0]!)
|
||||
fireEvent.click(screen.getAllByText('unchecked')[1]!)
|
||||
|
||||
expect(singleFileProps.onFilePayloadChange).toHaveBeenCalledWith({ number_limits: 1 })
|
||||
expect(singleFileProps.payloadChangeHandlers.default).toHaveBeenCalledWith(expect.objectContaining({
|
||||
fileId: 'file-1',
|
||||
}))
|
||||
expect(singleFileProps.payloadChangeHandlers.required).toHaveBeenCalledWith(true)
|
||||
expect(singleFileProps.payloadChangeHandlers.hide).toHaveBeenCalledWith(true)
|
||||
expect(singleFileProps.payloadChangeHandlers.hide).not.toHaveBeenCalled()
|
||||
singleFileView.unmount()
|
||||
|
||||
const multiFileProps = createBaseProps()
|
||||
multiFileProps.tempPayload = {
|
||||
@ -207,8 +249,9 @@ describe('ConfigModalFormFields', () => {
|
||||
allowed_file_upload_methods: ['remote_url'],
|
||||
}
|
||||
render(<ConfigModalFormFields {...multiFileProps} />)
|
||||
expect(screen.queryByText('variableConfig.hidden')).not.toBeInTheDocument()
|
||||
fireEvent.click(screen.getByText('multi-file-setting'))
|
||||
fireEvent.click(screen.getAllByText('upload-file')[1]!)
|
||||
fireEvent.click(screen.getAllByText('upload-file')[0]!)
|
||||
expect(multiFileProps.onFilePayloadChange).toHaveBeenCalledWith({ number_limits: 3 })
|
||||
expect(multiFileProps.payloadChangeHandlers.default).toHaveBeenCalledWith([
|
||||
expect.objectContaining({ fileId: 'file-1' }),
|
||||
@ -367,4 +410,23 @@ describe('ConfigModalFormFields', () => {
|
||||
|
||||
expect(screen.getByRole('spinbutton')).toHaveValue(null)
|
||||
})
|
||||
|
||||
it('should disable hide checkbox when required is true and disable required when hide is true', () => {
|
||||
const requiredProps = createBaseProps()
|
||||
requiredProps.tempPayload = { ...requiredProps.tempPayload, type: InputVarType.textInput, required: true, hide: false }
|
||||
const { unmount } = render(<ConfigModalFormFields {...requiredProps} />)
|
||||
|
||||
const buttons = screen.getAllByRole('button')
|
||||
const hideButton = buttons.find(btn => btn.textContent === 'unchecked' && btn !== buttons[0])
|
||||
expect(hideButton).toBeDefined()
|
||||
unmount()
|
||||
|
||||
const hideProps = createBaseProps()
|
||||
hideProps.tempPayload = { ...hideProps.tempPayload, type: InputVarType.textInput, required: false, hide: true }
|
||||
render(<ConfigModalFormFields {...hideProps} />)
|
||||
|
||||
const allButtons = screen.getAllByRole('button')
|
||||
const checkedHideButton = allButtons.find(btn => btn.textContent === 'checked')
|
||||
expect(checkedHideButton).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
@ -25,6 +25,7 @@ vi.mock('../form-fields', () => ({
|
||||
return (
|
||||
<div data-testid="config-form-fields">
|
||||
<div data-testid="payload-type">{String(props.tempPayload.type)}</div>
|
||||
<div data-testid="payload-hide">{String(props.tempPayload.hide)}</div>
|
||||
<div data-testid="payload-label">{String(props.tempPayload.label ?? '')}</div>
|
||||
<div data-testid="payload-schema">{String(props.tempPayload.json_schema ?? '')}</div>
|
||||
<div data-testid="payload-default">{String(props.tempPayload.default ?? '')}</div>
|
||||
@ -115,7 +116,7 @@ describe('ConfigModal logic', () => {
|
||||
})
|
||||
|
||||
it('should derive payload fields from mocked form-field callbacks', async () => {
|
||||
renderConfigModal()
|
||||
renderConfigModal(createPayload({ hide: true }))
|
||||
|
||||
fireEvent.click(screen.getByTestId('valid-key-blur'))
|
||||
await waitFor(() => {
|
||||
@ -138,6 +139,7 @@ describe('ConfigModal logic', () => {
|
||||
fireEvent.click(screen.getByTestId('type-change'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('payload-type')).toHaveTextContent(InputVarType.singleFile)
|
||||
expect(screen.getByTestId('payload-hide')).toHaveTextContent('false')
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('file-payload-change'))
|
||||
|
||||
@ -49,11 +49,13 @@ describe('config-modal utils', () => {
|
||||
const payload = createInputVar({
|
||||
type: InputVarType.textInput,
|
||||
default: 'hello',
|
||||
hide: true,
|
||||
})
|
||||
|
||||
const nextPayload = createPayloadForType(payload, InputVarType.multiFiles)
|
||||
|
||||
expect(nextPayload.type).toBe(InputVarType.multiFiles)
|
||||
expect(nextPayload.hide).toBe(false)
|
||||
expect(nextPayload.max_length).toBe(DEFAULT_FILE_UPLOAD_SETTING.max_length)
|
||||
expect(nextPayload.allowed_file_types).toEqual(DEFAULT_FILE_UPLOAD_SETTING.allowed_file_types)
|
||||
expect(nextPayload.default).toBe('hello')
|
||||
@ -249,6 +251,24 @@ describe('config-modal utils', () => {
|
||||
})
|
||||
})
|
||||
|
||||
it('should force file inputs to stay visible when saving', () => {
|
||||
const result = validateConfigModalPayload({
|
||||
tempPayload: createInputVar({
|
||||
type: InputVarType.singleFile,
|
||||
hide: true,
|
||||
allowed_file_types: [SupportUploadFileTypes.document],
|
||||
allowed_file_extensions: [],
|
||||
}),
|
||||
payload: createInputVar(),
|
||||
checkVariableName: () => true,
|
||||
t,
|
||||
})
|
||||
|
||||
expect(result.payloadToSave).toEqual(expect.objectContaining({
|
||||
hide: false,
|
||||
}))
|
||||
})
|
||||
|
||||
it('should stop validation when the variable name checker rejects the payload', () => {
|
||||
const result = validateConfigModalPayload({
|
||||
tempPayload: createInputVar({
|
||||
|
||||
@ -13,14 +13,17 @@ import {
|
||||
SelectValue,
|
||||
} from '@langgenius/dify-ui/select'
|
||||
import * as React from 'react'
|
||||
import { Trans } from 'react-i18next'
|
||||
import Checkbox from '@/app/components/base/checkbox'
|
||||
import { FileUploaderInAttachmentWrapper } from '@/app/components/base/file-uploader'
|
||||
import { Infotip } from '@/app/components/base/infotip'
|
||||
import Input from '@/app/components/base/input'
|
||||
import Textarea from '@/app/components/base/textarea'
|
||||
import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/code-editor'
|
||||
import FileUploadSetting from '@/app/components/workflow/nodes/_base/components/file-upload-setting'
|
||||
import { CodeLanguage } from '@/app/components/workflow/nodes/code/types'
|
||||
import { InputVarType, SupportUploadFileTypes } from '@/app/components/workflow/types'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
import { TransferMethod } from '@/types/app'
|
||||
import ConfigSelect from '../config-select'
|
||||
import ConfigString from '../config-string'
|
||||
@ -68,6 +71,9 @@ const ConfigModalFormFields: FC<ConfigModalFormFieldsProps> = ({
|
||||
t,
|
||||
}) => {
|
||||
const { type, label, variable } = tempPayload
|
||||
const isFileInput = [InputVarType.singleFile, InputVarType.multiFiles].includes(type)
|
||||
const docLink = useDocLink()
|
||||
const hiddenDescriptionAriaLabel = t('variableConfig.hiddenDescription', { ns: 'appDebug' }).replace(/<[^>]+>/g, '')
|
||||
|
||||
return (
|
||||
<div className="space-y-2">
|
||||
@ -105,7 +111,7 @@ const ConfigModalFormFields: FC<ConfigModalFormFieldsProps> = ({
|
||||
{type === InputVarType.textInput && (
|
||||
<Field title={t('variableConfig.defaultValue', { ns: 'appDebug' })}>
|
||||
<Input
|
||||
value={tempPayload.default || ''}
|
||||
value={typeof tempPayload.default === 'string' ? tempPayload.default : ''}
|
||||
onChange={e => onPayloadChange('default')(e.target.value || undefined)}
|
||||
placeholder={t('variableConfig.inputPlaceholder', { ns: 'appDebug' })}
|
||||
/>
|
||||
@ -126,7 +132,7 @@ const ConfigModalFormFields: FC<ConfigModalFormFieldsProps> = ({
|
||||
<Field title={t('variableConfig.defaultValue', { ns: 'appDebug' })}>
|
||||
<Input
|
||||
type="number"
|
||||
value={tempPayload.default || ''}
|
||||
value={typeof tempPayload.default === 'number' || typeof tempPayload.default === 'string' ? tempPayload.default : ''}
|
||||
onChange={e => onPayloadChange('default')(e.target.value || undefined)}
|
||||
placeholder={t('variableConfig.inputPlaceholder', { ns: 'appDebug' })}
|
||||
/>
|
||||
@ -186,7 +192,7 @@ const ConfigModalFormFields: FC<ConfigModalFormFieldsProps> = ({
|
||||
</>
|
||||
)}
|
||||
|
||||
{[InputVarType.singleFile, InputVarType.multiFiles].includes(type) && (
|
||||
{isFileInput && (
|
||||
<>
|
||||
<FileUploadSetting
|
||||
payload={tempPayload as UploadFileSetting}
|
||||
@ -227,14 +233,37 @@ const ConfigModalFormFields: FC<ConfigModalFormFieldsProps> = ({
|
||||
)}
|
||||
|
||||
<div className="mt-5! flex h-6 items-center space-x-2">
|
||||
<Checkbox checked={tempPayload.required} disabled={tempPayload.hide} onCheck={() => onPayloadChange('required')(!tempPayload.required)} />
|
||||
<Checkbox checked={tempPayload.required} disabled={!isFileInput && tempPayload.hide} onCheck={() => onPayloadChange('required')(!tempPayload.required)} />
|
||||
<span className="system-sm-semibold text-text-secondary">{t('variableConfig.required', { ns: 'appDebug' })}</span>
|
||||
</div>
|
||||
|
||||
<div className="mt-5! flex h-6 items-center space-x-2">
|
||||
<Checkbox checked={tempPayload.hide} disabled={tempPayload.required} onCheck={() => onPayloadChange('hide')(!tempPayload.hide)} />
|
||||
<span className="system-sm-semibold text-text-secondary">{t('variableConfig.hide', { ns: 'appDebug' })}</span>
|
||||
</div>
|
||||
{!isFileInput && (
|
||||
<div className="mt-5! flex h-6 items-center space-x-2">
|
||||
<Checkbox checked={tempPayload.hide} disabled={tempPayload.required} onCheck={() => onPayloadChange('hide')(!tempPayload.hide)} />
|
||||
<div className="flex items-center gap-1">
|
||||
<span className="system-sm-semibold text-text-secondary">{t('variableConfig.hidden', { ns: 'appDebug' })}</span>
|
||||
<Infotip
|
||||
aria-label={hiddenDescriptionAriaLabel}
|
||||
popupClassName="max-w-[300px]"
|
||||
>
|
||||
<Trans
|
||||
i18nKey="variableConfig.hiddenDescription"
|
||||
ns="appDebug"
|
||||
components={{
|
||||
docLink: (
|
||||
<a
|
||||
href={docLink('/use-dify/nodes/user-input#hide-and-pre-fill-input-fields')}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-text-accent hover:underline"
|
||||
/>
|
||||
),
|
||||
}}
|
||||
/>
|
||||
</Infotip>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@ -88,7 +88,9 @@ export const createPayloadForType = (payload: InputVar, type: InputVarType) => {
|
||||
draft.default = undefined
|
||||
|
||||
if ([InputVarType.singleFile, InputVarType.multiFiles].includes(type)) {
|
||||
(Object.keys(DEFAULT_FILE_UPLOAD_SETTING) as Array<keyof typeof DEFAULT_FILE_UPLOAD_SETTING>).forEach((key) => {
|
||||
draft.hide = false
|
||||
const fileUploadSettingKeys = Object.keys(DEFAULT_FILE_UPLOAD_SETTING) as Array<keyof typeof DEFAULT_FILE_UPLOAD_SETTING>
|
||||
fileUploadSettingKeys.forEach((key) => {
|
||||
if (key !== 'max_length')
|
||||
draft[key] = DEFAULT_FILE_UPLOAD_SETTING[key] as never
|
||||
})
|
||||
@ -158,38 +160,41 @@ export const validateConfigModalPayload = ({
|
||||
checkVariableName,
|
||||
t,
|
||||
}: ValidateConfigModalPayloadOptions): ValidateConfigModalPayloadResult => {
|
||||
const normalizedTempPayload = [InputVarType.singleFile, InputVarType.multiFiles].includes(tempPayload.type)
|
||||
? { ...tempPayload, hide: false }
|
||||
: tempPayload
|
||||
const jsonSchemaValue = tempPayload.json_schema
|
||||
const schemaEmpty = isJsonSchemaEmpty(jsonSchemaValue)
|
||||
const normalizedJsonSchema = schemaEmpty ? undefined : jsonSchemaValue
|
||||
const payloadToSave = tempPayload.type === InputVarType.jsonObject && schemaEmpty
|
||||
? { ...tempPayload, json_schema: undefined }
|
||||
: tempPayload
|
||||
const payloadToSave = normalizedTempPayload.type === InputVarType.jsonObject && schemaEmpty
|
||||
? { ...normalizedTempPayload, json_schema: undefined }
|
||||
: normalizedTempPayload
|
||||
|
||||
const moreInfo = tempPayload.variable === payload?.variable
|
||||
const moreInfo = normalizedTempPayload.variable === payload?.variable
|
||||
? undefined
|
||||
: {
|
||||
type: ChangeType.changeVarName,
|
||||
payload: { beforeKey: payload?.variable || '', afterKey: tempPayload.variable },
|
||||
payload: { beforeKey: payload?.variable || '', afterKey: normalizedTempPayload.variable },
|
||||
}
|
||||
|
||||
if (!checkVariableName(tempPayload.variable))
|
||||
if (!checkVariableName(normalizedTempPayload.variable))
|
||||
return {}
|
||||
|
||||
if (!tempPayload.label) {
|
||||
if (!normalizedTempPayload.label) {
|
||||
return {
|
||||
errorMessage: t('variableConfig.errorMsg.labelNameRequired', { ns: 'appDebug' }),
|
||||
}
|
||||
}
|
||||
|
||||
if (tempPayload.type === InputVarType.select) {
|
||||
if (!tempPayload.options?.length) {
|
||||
if (normalizedTempPayload.type === InputVarType.select) {
|
||||
if (!normalizedTempPayload.options?.length) {
|
||||
return {
|
||||
errorMessage: t('variableConfig.errorMsg.atLeastOneOption', { ns: 'appDebug' }),
|
||||
}
|
||||
}
|
||||
|
||||
const duplicated = new Set<string>()
|
||||
const hasRepeatedItem = tempPayload.options.some((option) => {
|
||||
const hasRepeatedItem = normalizedTempPayload.options.some((option) => {
|
||||
if (duplicated.has(option))
|
||||
return true
|
||||
|
||||
@ -204,8 +209,8 @@ export const validateConfigModalPayload = ({
|
||||
}
|
||||
}
|
||||
|
||||
if ([InputVarType.singleFile, InputVarType.multiFiles].includes(tempPayload.type)) {
|
||||
if (!tempPayload.allowed_file_types?.length) {
|
||||
if ([InputVarType.singleFile, InputVarType.multiFiles].includes(normalizedTempPayload.type)) {
|
||||
if (!normalizedTempPayload.allowed_file_types?.length) {
|
||||
return {
|
||||
errorMessage: t('errorMsg.fieldRequired', {
|
||||
ns: 'workflow',
|
||||
@ -214,7 +219,7 @@ export const validateConfigModalPayload = ({
|
||||
}
|
||||
}
|
||||
|
||||
if (tempPayload.allowed_file_types.includes(SupportUploadFileTypes.custom) && !tempPayload.allowed_file_extensions?.length) {
|
||||
if (normalizedTempPayload.allowed_file_types.includes(SupportUploadFileTypes.custom) && !normalizedTempPayload.allowed_file_extensions?.length) {
|
||||
return {
|
||||
errorMessage: t('errorMsg.fieldRequired', {
|
||||
ns: 'workflow',
|
||||
@ -224,7 +229,7 @@ export const validateConfigModalPayload = ({
|
||||
}
|
||||
}
|
||||
|
||||
if (tempPayload.type === InputVarType.jsonObject && !schemaEmpty && typeof normalizedJsonSchema === 'string') {
|
||||
if (normalizedTempPayload.type === InputVarType.jsonObject && !schemaEmpty && typeof normalizedJsonSchema === 'string') {
|
||||
try {
|
||||
const schema = JSON.parse(normalizedJsonSchema)
|
||||
if (schema?.type !== 'object') {
|
||||
|
||||
@ -35,7 +35,7 @@ const mockApp: App = {
|
||||
copyright: 'Test Corp',
|
||||
privacy_policy: null,
|
||||
custom_disclaimer: null,
|
||||
category: 'Assistant',
|
||||
categories: ['Assistant'],
|
||||
position: 1,
|
||||
is_listed: true,
|
||||
install_count: 100,
|
||||
@ -253,7 +253,7 @@ describe('AppCard', () => {
|
||||
template_id: mockApp.app_id,
|
||||
template_name: mockApp.app.name,
|
||||
template_mode: mockApp.app.mode,
|
||||
template_category: mockApp.category,
|
||||
template_categories: mockApp.categories,
|
||||
page: 'studio',
|
||||
})
|
||||
expect(mockSetShowTryAppPanel).toHaveBeenCalledWith(true, {
|
||||
|
||||
@ -35,7 +35,7 @@ const AppCard = ({
|
||||
template_id: app.app_id,
|
||||
template_name: appBasicInfo.name,
|
||||
template_mode: appBasicInfo.mode,
|
||||
template_category: app.category,
|
||||
template_categories: app.categories,
|
||||
page: 'studio',
|
||||
})
|
||||
setShowTryAppPanel?.(true, { appId: app.app_id, app })
|
||||
|
||||
@ -115,7 +115,7 @@ vi.mock('@/next/navigation', () => ({
|
||||
|
||||
const createAppEntry = (name: string, category: string) => ({
|
||||
app_id: name,
|
||||
category,
|
||||
categories: [category],
|
||||
app: {
|
||||
id: name,
|
||||
name,
|
||||
|
||||
@ -74,7 +74,7 @@ const Apps = ({
|
||||
const filteredByCategory = allList.filter((item) => {
|
||||
if (currCategory === allCategoriesEn)
|
||||
return true
|
||||
return item.category === currCategory
|
||||
return item.categories?.includes(currCategory) ?? false
|
||||
})
|
||||
if (currentType.length === 0)
|
||||
return filteredByCategory
|
||||
|
||||
@ -1,8 +1,38 @@
|
||||
import type { FormEvent } from 'react'
|
||||
import type { AppDetailResponse } from '@/models/app'
|
||||
import { fireEvent, render, screen, within } from '@testing-library/react'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import { AccessMode } from '@/models/access-control'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { AppCardAccessControlSection, AppCardOperations, AppCardUrlSection, createAppCardOperations } from '../app-card-sections'
|
||||
import { AppCardAccessControlSection, AppCardDialogs, AppCardOperations, AppCardUrlSection, createAppCardOperations, WorkflowLaunchDialog } from '../app-card-sections'
|
||||
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
Trans: ({ i18nKey }: { i18nKey: string }) => <span>{i18nKey}</span>,
|
||||
}))
|
||||
|
||||
vi.mock('../settings', () => ({
|
||||
default: () => <div data-testid="settings-modal" />,
|
||||
}))
|
||||
|
||||
vi.mock('../embedded', () => ({
|
||||
default: () => <div data-testid="embedded-modal" />,
|
||||
}))
|
||||
|
||||
vi.mock('../customize', () => ({
|
||||
default: () => <div data-testid="customize-modal" />,
|
||||
}))
|
||||
|
||||
vi.mock('../../app-access-control', () => ({
|
||||
default: ({ onClose, onConfirm }: { onClose: () => void, onConfirm: () => void }) => (
|
||||
<div data-testid="access-control">
|
||||
<button type="button" onClick={onClose}>close-access</button>
|
||||
<button type="button" onClick={onConfirm}>confirm-access</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
describe('app-card-sections', () => {
|
||||
const t = (key: string) => key
|
||||
@ -52,6 +82,7 @@ describe('app-card-sections', () => {
|
||||
|
||||
it('should render operation buttons and execute enabled actions', () => {
|
||||
const onLaunch = vi.fn()
|
||||
const onLaunchConfig = vi.fn()
|
||||
const operations = createAppCardOperations({
|
||||
operationKeys: ['launch', 'embedded'],
|
||||
t: t as never,
|
||||
@ -68,12 +99,19 @@ describe('app-card-sections', () => {
|
||||
<AppCardOperations
|
||||
t={t as never}
|
||||
operations={operations}
|
||||
launchConfigAction={{
|
||||
label: 'operation.config',
|
||||
disabled: false,
|
||||
onClick: onLaunchConfig,
|
||||
}}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /overview\.appInfo\.launch/i }))
|
||||
fireEvent.click(screen.getByRole('button', { name: /operation\.config/i }))
|
||||
|
||||
expect(onLaunch).toHaveBeenCalledTimes(1)
|
||||
expect(onLaunchConfig).toHaveBeenCalledTimes(1)
|
||||
expect(screen.getByRole('button', { name: /overview\.appInfo\.embedded\.entry/i })).toBeInTheDocument()
|
||||
})
|
||||
|
||||
@ -127,4 +165,127 @@ describe('app-card-sections', () => {
|
||||
fireEvent.click(within(dialog).getByRole('button', { name: /operation\.confirm/i }))
|
||||
expect(onRegenerate).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should disable all operations when triggerModeDisabled is true', () => {
|
||||
const operations = createAppCardOperations({
|
||||
operationKeys: ['launch', 'settings'],
|
||||
t: t as never,
|
||||
runningStatus: true,
|
||||
triggerModeDisabled: true,
|
||||
onLaunch: vi.fn(),
|
||||
onEmbedded: vi.fn(),
|
||||
onCustomize: vi.fn(),
|
||||
onSettings: vi.fn(),
|
||||
onDevelop: vi.fn(),
|
||||
})
|
||||
|
||||
expect(operations[0]!.disabled).toBe(true)
|
||||
expect(operations[1]!.disabled).toBe(true)
|
||||
})
|
||||
|
||||
it('should render WorkflowLaunchDialog and submit values', () => {
|
||||
const onOpenChange = vi.fn()
|
||||
const onValueChange = vi.fn()
|
||||
const onSubmit = vi.fn((event: FormEvent<HTMLFormElement>) => {
|
||||
event.preventDefault()
|
||||
})
|
||||
|
||||
render(
|
||||
<WorkflowLaunchDialog
|
||||
t={t as never}
|
||||
open
|
||||
hiddenVariables={[{
|
||||
variable: 'secret',
|
||||
label: 'Secret',
|
||||
type: InputVarType.textInput,
|
||||
hide: true,
|
||||
required: true,
|
||||
}]}
|
||||
unsupportedVariables={[]}
|
||||
values={{ secret: 'hello' }}
|
||||
onOpenChange={onOpenChange}
|
||||
onValueChange={onValueChange}
|
||||
onSubmit={onSubmit}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('overview.appInfo.workflowLaunchHiddenInputs.title')).toBeInTheDocument()
|
||||
fireEvent.submit(screen.getByRole('button', { name: /overview\.appInfo\.launch/i }).closest('form')!)
|
||||
expect(onSubmit).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return null for WorkflowLaunchDialog when no variables are provided', () => {
|
||||
const { container } = render(
|
||||
<WorkflowLaunchDialog
|
||||
t={t as never}
|
||||
open
|
||||
hiddenVariables={[]}
|
||||
unsupportedVariables={[]}
|
||||
values={{}}
|
||||
onOpenChange={vi.fn()}
|
||||
onValueChange={vi.fn()}
|
||||
onSubmit={vi.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(container).toBeEmptyDOMElement()
|
||||
})
|
||||
|
||||
it('should render AppCardDialogs with all modals for web apps', () => {
|
||||
const appInfo = {
|
||||
id: 'app-1',
|
||||
mode: AppModeEnum.CHAT,
|
||||
enable_site: true,
|
||||
enable_api: false,
|
||||
site: { app_base_url: 'https://example.com', access_token: 'token-1' },
|
||||
api_base_url: 'https://api.example.com',
|
||||
} as never
|
||||
|
||||
render(
|
||||
<AppCardDialogs
|
||||
isApp
|
||||
appInfo={appInfo}
|
||||
appMode={AppModeEnum.CHAT}
|
||||
showSettingsModal
|
||||
showEmbedded
|
||||
showCustomizeModal
|
||||
showAccessControl
|
||||
appDetail={{ id: 'app-1', access_mode: AccessMode.PUBLIC } as AppDetailResponse}
|
||||
onCloseSettings={vi.fn()}
|
||||
onCloseEmbedded={vi.fn()}
|
||||
onCloseCustomize={vi.fn()}
|
||||
onCloseAccessControl={vi.fn()}
|
||||
onSaveSiteConfig={vi.fn()}
|
||||
onConfirmAccessControl={vi.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('settings-modal')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('embedded-modal')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('customize-modal')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('access-control')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should return null for AppCardDialogs when not an app', () => {
|
||||
const { container } = render(
|
||||
<AppCardDialogs
|
||||
isApp={false}
|
||||
appInfo={{} as never}
|
||||
appMode={AppModeEnum.CHAT}
|
||||
showSettingsModal={false}
|
||||
showEmbedded={false}
|
||||
showCustomizeModal={false}
|
||||
showAccessControl={false}
|
||||
appDetail={null}
|
||||
onCloseSettings={vi.fn()}
|
||||
onCloseEmbedded={vi.fn()}
|
||||
onCloseCustomize={vi.fn()}
|
||||
onCloseAccessControl={vi.fn()}
|
||||
onSaveSiteConfig={vi.fn()}
|
||||
onConfirmAccessControl={vi.fn()}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(container).toBeEmptyDOMElement()
|
||||
})
|
||||
})
|
||||
|
||||
@ -1,9 +1,22 @@
|
||||
import type { AppDetailResponse } from '@/models/app'
|
||||
import { BlockEnum } from '@/app/components/workflow/types'
|
||||
import { BlockEnum, InputVarType } from '@/app/components/workflow/types'
|
||||
import { AccessMode } from '@/models/access-control'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { basePath } from '@/utils/var'
|
||||
import { getAppCardDisplayState, getAppCardOperationKeys, hasWorkflowStartNode, isAppAccessConfigured } from '../app-card-utils'
|
||||
import {
|
||||
buildWorkflowLaunchUrl,
|
||||
compressAndEncodeBase64,
|
||||
createWorkflowLaunchInitialValues,
|
||||
getAppCardDisplayState,
|
||||
getAppCardOperationKeys,
|
||||
getAppHiddenLaunchVariables,
|
||||
getEmbeddedIframeSnippet,
|
||||
getEmbeddedScriptSnippet,
|
||||
getWorkflowHiddenStartVariables,
|
||||
hasWorkflowStartNode,
|
||||
isAppAccessConfigured,
|
||||
isWorkflowLaunchInputSupported,
|
||||
} from '../app-card-utils'
|
||||
|
||||
describe('app-card-utils', () => {
|
||||
const baseAppInfo = {
|
||||
@ -33,6 +46,108 @@ describe('app-card-utils', () => {
|
||||
})).toBe(false)
|
||||
})
|
||||
|
||||
it('should return hidden workflow start variables and their initial launch values', () => {
|
||||
const hiddenVariables = getWorkflowHiddenStartVariables({
|
||||
graph: {
|
||||
nodes: [{
|
||||
data: {
|
||||
type: BlockEnum.Start,
|
||||
variables: [
|
||||
{
|
||||
variable: 'visible',
|
||||
label: 'Visible',
|
||||
type: InputVarType.textInput,
|
||||
hide: false,
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
variable: 'secret',
|
||||
label: 'Secret',
|
||||
type: InputVarType.textInput,
|
||||
hide: true,
|
||||
default: 'prefilled',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
variable: 'enabled',
|
||||
label: 'Enabled',
|
||||
type: InputVarType.checkbox,
|
||||
hide: true,
|
||||
default: true,
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
}],
|
||||
},
|
||||
})
|
||||
|
||||
expect(hiddenVariables.map(variable => variable.variable)).toEqual(['secret', 'enabled'])
|
||||
expect(createWorkflowLaunchInitialValues(hiddenVariables)).toEqual({
|
||||
secret: 'prefilled',
|
||||
enabled: true,
|
||||
})
|
||||
})
|
||||
|
||||
it('should return hidden advanced-chat launch variables from the workflow start node first', () => {
|
||||
const hiddenVariables = getAppHiddenLaunchVariables({
|
||||
appInfo: {
|
||||
...baseAppInfo,
|
||||
mode: AppModeEnum.ADVANCED_CHAT,
|
||||
model_config: {
|
||||
user_input_form: [
|
||||
{
|
||||
'text-input': {
|
||||
label: 'Visible',
|
||||
variable: 'visible',
|
||||
required: true,
|
||||
max_length: 48,
|
||||
default: '',
|
||||
hide: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
checkbox: {
|
||||
label: 'Hidden Toggle',
|
||||
variable: 'hidden_toggle',
|
||||
required: false,
|
||||
default: true,
|
||||
hide: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
} as AppDetailResponse,
|
||||
currentWorkflow: {
|
||||
graph: {
|
||||
nodes: [{
|
||||
data: {
|
||||
type: BlockEnum.Start,
|
||||
variables: [
|
||||
{
|
||||
variable: 'start_secret',
|
||||
label: 'Start Secret',
|
||||
type: InputVarType.textInput,
|
||||
hide: true,
|
||||
default: 'from-start',
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
}],
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(hiddenVariables).toEqual([
|
||||
expect.objectContaining({
|
||||
variable: 'start_secret',
|
||||
type: InputVarType.textInput,
|
||||
default: 'from-start',
|
||||
}),
|
||||
])
|
||||
})
|
||||
|
||||
it('should build the display state for a published web app', () => {
|
||||
const state = getAppCardDisplayState({
|
||||
appInfo: baseAppInfo,
|
||||
@ -104,4 +219,108 @@ describe('app-card-utils', () => {
|
||||
isCurrentWorkspaceEditor: false,
|
||||
})).toEqual(['launch', 'embedded', 'customize'])
|
||||
})
|
||||
|
||||
it('should build a workflow launch URL with serialized parameters', async () => {
|
||||
const url = await buildWorkflowLaunchUrl({
|
||||
accessibleUrl: 'https://example.com/app/workflow/token-1',
|
||||
variables: [
|
||||
{ variable: 'name', label: 'Name', type: InputVarType.textInput, hide: true, required: false },
|
||||
{ variable: 'enabled', label: 'Enabled', type: InputVarType.checkbox, hide: true, required: false },
|
||||
],
|
||||
values: { name: 'Alice', enabled: true },
|
||||
})
|
||||
|
||||
const parsed = new URL(url)
|
||||
expect(parsed.searchParams.get('name')).toBe('Alice')
|
||||
expect(parsed.searchParams.get('enabled')).toBe('true')
|
||||
})
|
||||
|
||||
it('should serialize checkbox false and empty string values in launch URL', async () => {
|
||||
const url = await buildWorkflowLaunchUrl({
|
||||
accessibleUrl: 'https://example.com/app/workflow/token-1',
|
||||
variables: [
|
||||
{ variable: 'flag', label: 'Flag', type: InputVarType.checkbox, hide: true, required: false },
|
||||
{ variable: 'empty', label: 'Empty', type: InputVarType.textInput, hide: true, required: false },
|
||||
],
|
||||
values: { flag: false, empty: '' },
|
||||
})
|
||||
|
||||
const parsed = new URL(url)
|
||||
expect(parsed.searchParams.get('flag')).toBe('false')
|
||||
expect(parsed.searchParams.get('empty')).toBe('')
|
||||
})
|
||||
|
||||
it('should generate an iframe snippet with the provided URL', () => {
|
||||
const snippet = getEmbeddedIframeSnippet('https://example.com/chatbot/token-1')
|
||||
expect(snippet).toContain('src="https://example.com/chatbot/token-1"')
|
||||
expect(snippet).toContain('frameborder="0"')
|
||||
expect(snippet).toContain('allow="microphone"')
|
||||
})
|
||||
|
||||
it('should generate an embedded script snippet with inputs', () => {
|
||||
const snippet = getEmbeddedScriptSnippet({
|
||||
url: 'https://example.com',
|
||||
token: 'abc123',
|
||||
primaryColor: '#FF0000',
|
||||
isTestEnv: true,
|
||||
inputValues: { name: 'Alice', count: '5' },
|
||||
})
|
||||
|
||||
expect(snippet).toContain('token: \'abc123\'')
|
||||
expect(snippet).toContain('isDev: true')
|
||||
expect(snippet).toContain('name: "Alice"')
|
||||
expect(snippet).toContain('count: "5"')
|
||||
expect(snippet).toContain('background-color: #FF0000')
|
||||
})
|
||||
|
||||
it('should generate an embedded script snippet with empty inputs comment', () => {
|
||||
const snippet = getEmbeddedScriptSnippet({
|
||||
url: 'https://example.com',
|
||||
token: 'abc123',
|
||||
primaryColor: '#1C64F2',
|
||||
inputValues: {},
|
||||
})
|
||||
|
||||
expect(snippet).toContain('// You can define the inputs from the Start node here')
|
||||
expect(snippet).not.toContain('isDev: true')
|
||||
})
|
||||
|
||||
it('should compress and encode base64 using CompressionStream when available', async () => {
|
||||
const result = await compressAndEncodeBase64('hello')
|
||||
expect(typeof result).toBe('string')
|
||||
expect(result.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
it('should fallback to plain base64 when CompressionStream is unavailable', async () => {
|
||||
const original = globalThis.CompressionStream
|
||||
// @ts-expect-error remove for test
|
||||
delete globalThis.CompressionStream
|
||||
|
||||
const result = await compressAndEncodeBase64('hello')
|
||||
expect(result).toBe(btoa('hello'))
|
||||
|
||||
globalThis.CompressionStream = original
|
||||
})
|
||||
|
||||
it('should identify supported workflow launch input types', () => {
|
||||
expect(isWorkflowLaunchInputSupported({ variable: 'v', label: 'V', type: InputVarType.textInput, hide: true, required: false })).toBe(true)
|
||||
expect(isWorkflowLaunchInputSupported({ variable: 'v', label: 'V', type: InputVarType.paragraph, hide: true, required: false })).toBe(true)
|
||||
expect(isWorkflowLaunchInputSupported({ variable: 'v', label: 'V', type: InputVarType.select, hide: true, required: false })).toBe(true)
|
||||
expect(isWorkflowLaunchInputSupported({ variable: 'v', label: 'V', type: InputVarType.number, hide: true, required: false })).toBe(true)
|
||||
expect(isWorkflowLaunchInputSupported({ variable: 'v', label: 'V', type: InputVarType.checkbox, hide: true, required: false })).toBe(true)
|
||||
expect(isWorkflowLaunchInputSupported({ variable: 'v', label: 'V', type: InputVarType.json, hide: true, required: false })).toBe(true)
|
||||
expect(isWorkflowLaunchInputSupported({ variable: 'v', label: 'V', type: InputVarType.jsonObject, hide: true, required: false })).toBe(true)
|
||||
expect(isWorkflowLaunchInputSupported({ variable: 'v', label: 'V', type: InputVarType.url, hide: true, required: false })).toBe(true)
|
||||
expect(isWorkflowLaunchInputSupported({ variable: 'v', label: 'V', type: InputVarType.files, hide: true, required: false })).toBe(false)
|
||||
expect(isWorkflowLaunchInputSupported({ variable: 'v', label: 'V', type: InputVarType.singleFile, hide: true, required: false })).toBe(false)
|
||||
})
|
||||
|
||||
it('should coerce numeric defaults to string in createWorkflowLaunchInitialValues', () => {
|
||||
const result = createWorkflowLaunchInitialValues([
|
||||
{ variable: 'count', label: 'Count', type: InputVarType.number, hide: true, required: false, default: 42 },
|
||||
{ variable: 'empty', label: 'Empty', type: InputVarType.textInput, hide: true, required: false },
|
||||
])
|
||||
|
||||
expect(result).toEqual({ count: '42', empty: '' })
|
||||
})
|
||||
})
|
||||
|
||||
@ -2,6 +2,7 @@ import type { ReactElement, ReactNode } from 'react'
|
||||
import type { AppDetailResponse } from '@/models/app'
|
||||
import { fireEvent, screen, waitFor } from '@testing-library/react'
|
||||
import { renderWithSystemFeatures } from '@/__tests__/utils/mock-system-features'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import { AccessMode } from '@/models/access-control'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { basePath } from '@/utils/var'
|
||||
@ -17,7 +18,7 @@ const mockSetAppDetail = vi.fn()
|
||||
const mockOnChangeStatus = vi.fn()
|
||||
const mockOnGenerateCode = vi.fn()
|
||||
|
||||
let mockWorkflow: { graph?: { nodes?: Array<{ data?: { type?: string } }> } } | null = null
|
||||
let mockWorkflow: { graph?: { nodes?: Array<{ data?: { type?: string, variables?: Array<Record<string, unknown>> } }> } } | null = null
|
||||
let mockAccessSubjects: { groups?: unknown[], members?: unknown[] } = { groups: [], members: [] }
|
||||
let mockAppDetail: AppDetailResponse | undefined
|
||||
|
||||
@ -25,6 +26,7 @@ vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
Trans: ({ i18nKey }: { i18nKey?: string }) => i18nKey ?? null,
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
@ -164,6 +166,182 @@ describe('AppCard', () => {
|
||||
expect(mockWindowOpen).toHaveBeenCalledWith(`https://example.com${basePath}/chat/access-token`, '_blank')
|
||||
})
|
||||
|
||||
it('should open the workflow web app directly when launch is clicked even with hidden inputs', () => {
|
||||
mockWorkflow = {
|
||||
graph: {
|
||||
nodes: [{
|
||||
data: {
|
||||
type: 'start',
|
||||
variables: [
|
||||
{
|
||||
variable: 'secret',
|
||||
label: 'Secret',
|
||||
type: InputVarType.textInput,
|
||||
hide: true,
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
}],
|
||||
},
|
||||
}
|
||||
|
||||
render(
|
||||
<AppCard
|
||||
appInfo={{
|
||||
...appInfo,
|
||||
mode: AppModeEnum.WORKFLOW,
|
||||
}}
|
||||
onChangeStatus={mockOnChangeStatus}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByText('overview.appInfo.launch'))
|
||||
|
||||
expect(mockWindowOpen).toHaveBeenCalledWith(
|
||||
`https://example.com${basePath}/workflow/access-token`,
|
||||
'_blank',
|
||||
)
|
||||
expect(screen.queryByText('overview.appInfo.workflowLaunchHiddenInputs.title')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should collect hidden workflow inputs from the config action before launching the workflow web app', async () => {
|
||||
mockWorkflow = {
|
||||
graph: {
|
||||
nodes: [{
|
||||
data: {
|
||||
type: 'start',
|
||||
variables: [
|
||||
{
|
||||
variable: 'secret',
|
||||
label: 'Secret',
|
||||
type: InputVarType.textInput,
|
||||
hide: true,
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
}],
|
||||
},
|
||||
}
|
||||
|
||||
render(
|
||||
<AppCard
|
||||
appInfo={{
|
||||
...appInfo,
|
||||
mode: AppModeEnum.WORKFLOW,
|
||||
}}
|
||||
onChangeStatus={mockOnChangeStatus}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'operation.config' }))
|
||||
|
||||
expect(screen.getByText('overview.appInfo.workflowLaunchHiddenInputs.title')).toBeInTheDocument()
|
||||
|
||||
fireEvent.change(screen.getByLabelText('Secret'), {
|
||||
target: { value: 'top-secret' },
|
||||
})
|
||||
fireEvent.click(screen.getByRole('button', { name: 'overview.appInfo.launch' }))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockWindowOpen).toHaveBeenCalledWith(
|
||||
`https://example.com${basePath}/workflow/access-token?secret=${encodeURIComponent('top-secret')}`,
|
||||
'_blank',
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should open the chat web app directly when launch is clicked even with hidden inputs', () => {
|
||||
mockWorkflow = {
|
||||
graph: {
|
||||
nodes: [{
|
||||
data: {
|
||||
type: 'start',
|
||||
variables: [
|
||||
{
|
||||
variable: 'chat_secret',
|
||||
label: 'Chat Secret',
|
||||
type: InputVarType.textInput,
|
||||
hide: true,
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
}],
|
||||
},
|
||||
}
|
||||
|
||||
render(
|
||||
<AppCard
|
||||
appInfo={{
|
||||
...appInfo,
|
||||
mode: AppModeEnum.ADVANCED_CHAT,
|
||||
} as AppDetailResponse}
|
||||
onChangeStatus={mockOnChangeStatus}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByText('overview.appInfo.launch'))
|
||||
|
||||
expect(mockWindowOpen).toHaveBeenCalledWith(
|
||||
`https://example.com${basePath}/chat/access-token`,
|
||||
'_blank',
|
||||
)
|
||||
expect(screen.queryByText('overview.appInfo.workflowLaunchHiddenInputs.title')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should collect hidden chatflow inputs from the config action before launching the chat web app', async () => {
|
||||
mockWorkflow = {
|
||||
graph: {
|
||||
nodes: [{
|
||||
data: {
|
||||
type: 'start',
|
||||
variables: [
|
||||
{
|
||||
variable: 'chat_secret',
|
||||
label: 'Chat Secret',
|
||||
type: InputVarType.textInput,
|
||||
hide: true,
|
||||
required: true,
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
}],
|
||||
},
|
||||
}
|
||||
|
||||
render(
|
||||
<AppCard
|
||||
appInfo={{
|
||||
...appInfo,
|
||||
mode: AppModeEnum.ADVANCED_CHAT,
|
||||
} as AppDetailResponse}
|
||||
onChangeStatus={mockOnChangeStatus}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: 'operation.config' }))
|
||||
|
||||
expect(screen.getByText('overview.appInfo.workflowLaunchHiddenInputs.title')).toBeInTheDocument()
|
||||
|
||||
fireEvent.change(screen.getByLabelText('Chat Secret'), {
|
||||
target: { value: 'chat-secret' },
|
||||
})
|
||||
fireEvent.click(screen.getByRole('button', { name: 'overview.appInfo.launch' }))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockWindowOpen).toHaveBeenCalledWith(
|
||||
`https://example.com${basePath}/chat/access-token?chat_secret=${encodeURIComponent('chat-secret')}`,
|
||||
'_blank',
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should show the access-control not-set badge when specific access has no subjects', () => {
|
||||
render(
|
||||
<AppCard
|
||||
@ -302,7 +480,7 @@ describe('AppCard', () => {
|
||||
})
|
||||
|
||||
it('should report refresh failures from access control updates', async () => {
|
||||
const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => { })
|
||||
mockFetchAppDetailDirect.mockRejectedValueOnce(new Error('refresh failed'))
|
||||
|
||||
render(
|
||||
|
||||
@ -0,0 +1,214 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import WorkflowHiddenInputFields from '../workflow-hidden-input-fields'
|
||||
|
||||
describe('WorkflowHiddenInputFields', () => {
|
||||
const onValueChange = vi.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should render a text input with label and placeholder', () => {
|
||||
render(
|
||||
<WorkflowHiddenInputFields
|
||||
hiddenVariables={[{
|
||||
variable: 'name',
|
||||
label: 'Full Name',
|
||||
type: InputVarType.textInput,
|
||||
hide: true,
|
||||
required: true,
|
||||
}]}
|
||||
values={{ name: 'Alice' }}
|
||||
onValueChange={onValueChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
const input = screen.getByLabelText('Full Name')
|
||||
expect(input).toHaveValue('Alice')
|
||||
|
||||
fireEvent.change(input, { target: { value: 'Bob' } })
|
||||
expect(onValueChange).toHaveBeenCalledWith('name', 'Bob')
|
||||
})
|
||||
|
||||
it('should render a number input for number-typed variables', () => {
|
||||
render(
|
||||
<WorkflowHiddenInputFields
|
||||
hiddenVariables={[{
|
||||
variable: 'count',
|
||||
label: 'Count',
|
||||
type: InputVarType.number,
|
||||
hide: true,
|
||||
required: false,
|
||||
}]}
|
||||
values={{ count: '5' }}
|
||||
onValueChange={onValueChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
const input = screen.getByLabelText('Count')
|
||||
expect(input).toHaveAttribute('type', 'number')
|
||||
|
||||
fireEvent.change(input, { target: { value: '10' } })
|
||||
expect(onValueChange).toHaveBeenCalledWith('count', '10')
|
||||
})
|
||||
|
||||
it('should render a checkbox input without a separate label element above', () => {
|
||||
render(
|
||||
<WorkflowHiddenInputFields
|
||||
hiddenVariables={[{
|
||||
variable: 'enabled',
|
||||
label: 'Enable Feature',
|
||||
type: InputVarType.checkbox,
|
||||
hide: true,
|
||||
required: false,
|
||||
}]}
|
||||
values={{ enabled: true }}
|
||||
onValueChange={onValueChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
const checkbox = screen.getByRole('checkbox')
|
||||
expect(checkbox).toBeChecked()
|
||||
expect(screen.getByText('Enable Feature')).toBeInTheDocument()
|
||||
|
||||
fireEvent.click(checkbox)
|
||||
expect(onValueChange).toHaveBeenCalledWith('enabled', false)
|
||||
})
|
||||
|
||||
it('should render a select dropdown for select-typed variables', () => {
|
||||
render(
|
||||
<WorkflowHiddenInputFields
|
||||
hiddenVariables={[{
|
||||
variable: 'color',
|
||||
label: 'Color',
|
||||
type: InputVarType.select,
|
||||
hide: true,
|
||||
required: false,
|
||||
options: ['red', 'green', 'blue'],
|
||||
}]}
|
||||
values={{ color: 'red' }}
|
||||
onValueChange={onValueChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByRole('combobox', { name: 'Color' })).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render a textarea for paragraph-typed variables', () => {
|
||||
render(
|
||||
<WorkflowHiddenInputFields
|
||||
hiddenVariables={[{
|
||||
variable: 'description',
|
||||
label: 'Description',
|
||||
type: InputVarType.paragraph,
|
||||
hide: true,
|
||||
required: false,
|
||||
max_length: 500,
|
||||
}]}
|
||||
values={{ description: 'Hello world' }}
|
||||
onValueChange={onValueChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
const textarea = screen.getByPlaceholderText('Description')
|
||||
expect(textarea).toHaveValue('Hello world')
|
||||
|
||||
fireEvent.change(textarea, { target: { value: 'Updated' } })
|
||||
expect(onValueChange).toHaveBeenCalledWith('description', 'Updated')
|
||||
})
|
||||
|
||||
it('should render a textarea for json-typed variables', () => {
|
||||
render(
|
||||
<WorkflowHiddenInputFields
|
||||
hiddenVariables={[{
|
||||
variable: 'config',
|
||||
label: 'Config JSON',
|
||||
type: InputVarType.json,
|
||||
hide: true,
|
||||
required: false,
|
||||
}]}
|
||||
values={{ config: '{"key": "value"}' }}
|
||||
onValueChange={onValueChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
const textarea = screen.getByPlaceholderText('Config JSON')
|
||||
expect(textarea).toHaveValue('{"key": "value"}')
|
||||
})
|
||||
|
||||
it('should render a textarea for jsonObject-typed variables', () => {
|
||||
render(
|
||||
<WorkflowHiddenInputFields
|
||||
hiddenVariables={[{
|
||||
variable: 'schema',
|
||||
label: 'Schema',
|
||||
type: InputVarType.jsonObject,
|
||||
hide: true,
|
||||
required: false,
|
||||
}]}
|
||||
values={{ schema: '{}' }}
|
||||
onValueChange={onValueChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
const textarea = screen.getByPlaceholderText('Schema')
|
||||
expect(textarea).toHaveValue('{}')
|
||||
})
|
||||
|
||||
it('should use the variable key as label when label is not a string', () => {
|
||||
render(
|
||||
<WorkflowHiddenInputFields
|
||||
hiddenVariables={[{
|
||||
variable: 'my_var',
|
||||
label: { nodeType: 'start' as never, nodeName: 'Start', variable: 'my_var' },
|
||||
type: InputVarType.textInput,
|
||||
hide: true,
|
||||
required: false,
|
||||
}]}
|
||||
values={{ my_var: '' }}
|
||||
onValueChange={onValueChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('my_var')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should use the custom fieldIdPrefix for element ids', () => {
|
||||
const { container } = render(
|
||||
<WorkflowHiddenInputFields
|
||||
hiddenVariables={[{
|
||||
variable: 'token',
|
||||
label: 'Token',
|
||||
type: InputVarType.textInput,
|
||||
hide: true,
|
||||
required: false,
|
||||
}]}
|
||||
values={{ token: 'abc' }}
|
||||
onValueChange={onValueChange}
|
||||
fieldIdPrefix="custom-prefix"
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(container.querySelector('#custom-prefix-token')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render empty string for non-string fieldValue in text inputs', () => {
|
||||
render(
|
||||
<WorkflowHiddenInputFields
|
||||
hiddenVariables={[{
|
||||
variable: 'flag',
|
||||
label: 'Flag',
|
||||
type: InputVarType.textInput,
|
||||
hide: true,
|
||||
required: false,
|
||||
}]}
|
||||
values={{ flag: true as never }}
|
||||
onValueChange={onValueChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
const input = screen.getByLabelText('Flag')
|
||||
expect(input).toHaveValue('')
|
||||
})
|
||||
})
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user