Merge remote-tracking branch 'origin/main' into feat/vibe-wf

This commit is contained in:
yyh 2026-01-16 23:27:07 +08:00
commit f97511d85c
No known key found for this signature in database
28 changed files with 696 additions and 703 deletions

View File

@ -16,14 +16,14 @@ jobs:
- name: Check Docker Compose inputs
id: docker-compose-changes
uses: tj-actions/changed-files@v46
uses: tj-actions/changed-files@v47
with:
files: |
docker/generate_docker_compose
docker/.env.example
docker/docker-compose-template.yaml
docker/docker-compose.yaml
- uses: actions/setup-python@v5
- uses: actions/setup-python@v6
with:
python-version: "3.11"

View File

@ -112,7 +112,7 @@ jobs:
context: "web"
steps:
- name: Download digests
uses: actions/download-artifact@v4
uses: actions/download-artifact@v7
with:
path: /tmp/digests
pattern: digests-${{ matrix.context }}-*

View File

@ -19,7 +19,7 @@ jobs:
github.event.workflow_run.head_branch == 'deploy/agent-dev'
steps:
- name: Deploy to server
uses: appleboy/ssh-action@v0.1.8
uses: appleboy/ssh-action@v1
with:
host: ${{ secrets.AGENT_DEV_SSH_HOST }}
username: ${{ secrets.SSH_USER }}

View File

@ -16,7 +16,7 @@ jobs:
github.event.workflow_run.head_branch == 'deploy/dev'
steps:
- name: Deploy to server
uses: appleboy/ssh-action@v0.1.8
uses: appleboy/ssh-action@v1
with:
host: ${{ secrets.SSH_HOST }}
username: ${{ secrets.SSH_USER }}

View File

@ -20,7 +20,7 @@ jobs:
)
steps:
- name: Deploy to server
uses: appleboy/ssh-action@v0.1.8
uses: appleboy/ssh-action@v1
with:
host: ${{ secrets.HITL_SSH_HOST }}
username: ${{ secrets.SSH_USER }}

View File

@ -18,7 +18,7 @@ jobs:
pull-requests: write
steps:
- uses: actions/stale@v5
- uses: actions/stale@v10
with:
days-before-issue-stale: 15
days-before-issue-close: 3

View File

@ -109,12 +109,12 @@ jobs:
pnpm run lint:report
continue-on-error: true
- name: Annotate Code
if: steps.changed-files.outputs.any_changed == 'true'
uses: DerLev/eslint-annotations@51347b3a0abfb503fc8734d5ae31c4b151297fae
with:
eslint-report: web/eslint_report.json
github-token: ${{ secrets.GITHUB_TOKEN }}
# - name: Annotate Code
# if: steps.changed-files.outputs.any_changed == 'true' && github.event_name == 'pull_request'
# uses: DerLev/eslint-annotations@51347b3a0abfb503fc8734d5ae31c4b151297fae
# with:
# eslint-report: web/eslint_report.json
# github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Web type check
if: steps.changed-files.outputs.any_changed == 'true'

View File

@ -21,7 +21,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v6
with:
fetch-depth: 0

View File

@ -12,12 +12,8 @@ The codebase is split into:
## Backend Workflow
- Read `api/AGENTS.md` for details
- Run backend CLI commands through `uv run --project api <command>`.
- Before submission, all backend modifications must pass local checks: `make lint`, `make type-check`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh`.
- Use Makefile targets for linting and formatting; `make lint` and `make type-check` cover the required checks.
- Integration tests are CI-only and are not expected to run in the local environment.
## Frontend Workflow

View File

@ -61,7 +61,8 @@ check:
lint:
@echo "🔧 Running ruff format, check with fixes, import linter, and dotenv-linter..."
@uv run --project api --dev sh -c 'ruff format ./api && ruff check --fix ./api'
@uv run --project api --dev ruff format ./api
@uv run --project api --dev ruff check --fix ./api
@uv run --directory api --dev lint-imports
@uv run --project api --dev dotenv-linter ./api/.env.example ./web/.env.example
@echo "✅ Linting complete"
@ -73,7 +74,12 @@ type-check:
test:
@echo "🧪 Running backend unit tests..."
@uv run --project api --dev dev/pytest/pytest_unit_tests.sh
@if [ -n "$(TARGET_TESTS)" ]; then \
echo "Target: $(TARGET_TESTS)"; \
uv run --project api --dev pytest $(TARGET_TESTS); \
else \
uv run --project api --dev dev/pytest/pytest_unit_tests.sh; \
fi
@echo "✅ Tests complete"
# Build Docker images
@ -125,7 +131,7 @@ help:
@echo " make check - Check code with ruff"
@echo " make lint - Format, fix, and lint code (ruff, imports, dotenv)"
@echo " make type-check - Run type checking with basedpyright"
@echo " make test - Run backend unit tests"
@echo " make test - Run backend unit tests (or TARGET_TESTS=./api/tests/<target_tests>)"
@echo ""
@echo "Docker Build Targets:"
@echo " make build-web - Build web Docker image"

0
agent-notes/.gitkeep Normal file
View File

View File

@ -1,62 +1,236 @@
# Agent Skill Index
# API Agent Guide
## Agent Notes (must-check)
Before you start work on any backend file under `api/`, you MUST check whether a related note exists under:
- `agent-notes/<same-relative-path-as-target-file>.md`
Rules:
- **Path mapping**: for a target file `<path>/<name>.py`, the note must be `agent-notes/<path>/<name>.py.md` (same folder structure, same filename, plus `.md`).
- **Before working**:
- If the note exists, read it first and follow any constraints/decisions recorded there.
- If the note conflicts with the current code, or references an "origin" file/path that has been deleted, renamed, or migrated, treat the **code as the single source of truth** and update the note to match reality.
- If the note does not exist, create it with a short architecture/intent summary and any relevant invariants/edge cases.
- **During working**:
- Keep the note in sync as you discover constraints, make decisions, or change approach.
- If you move/rename a file, migrate its note to the new mapped path (and fix any outdated references inside the note).
- Record non-obvious edge cases, trade-offs, and the test/verification plan as you go (not just at the end).
- Keep notes **coherent**: integrate new findings into the relevant sections and rewrite for clarity; avoid append-only “recent fix” / changelog-style additions unless the note is explicitly intended to be a changelog.
- **When finishing work**:
- Update the related note(s) to reflect what changed, why, and any new edge cases/tests.
- If a file is deleted, remove or clearly deprecate the corresponding note so it cannot be mistaken as current guidance.
- Keep notes concise and accurate; they are meant to prevent repeated rediscovery.
## Skill Index
Start with the section that best matches your need. Each entry lists the problems it solves plus key files/concepts so you know what to expect before opening it.
______________________________________________________________________
### Platform Foundations
## Platform Foundations
- **[Infrastructure Overview](agent_skills/infra.md)**\
When to read this:
#### [Infrastructure Overview](agent_skills/infra.md)
- **When to read this**
- You need to understand where a feature belongs in the architecture.
- Youre wiring storage, Redis, vector stores, or OTEL.
- Youre about to add CLI commands or async jobs.\
What it covers: configuration stack (`configs/app_config.py`, remote settings), storage entry points (`extensions/ext_storage.py`, `core/file/file_manager.py`), Redis conventions (`extensions/ext_redis.py`), plugin runtime topology, vector-store factory (`core/rag/datasource/vdb/*`), observability hooks, SSRF proxy usage, and core CLI commands.
- Youre about to add CLI commands or async jobs.
- **What it covers**
- Configuration stack (`configs/app_config.py`, remote settings)
- Storage entry points (`extensions/ext_storage.py`, `core/file/file_manager.py`)
- Redis conventions (`extensions/ext_redis.py`)
- Plugin runtime topology
- Vector-store factory (`core/rag/datasource/vdb/*`)
- Observability hooks
- SSRF proxy usage
- Core CLI commands
- **[Coding Style](agent_skills/coding_style.md)**\
When to read this:
### Plugin & Extension Development
- Youre writing or reviewing backend code and need the authoritative checklist.
- Youre unsure about Pydantic validators, SQLAlchemy session usage, or logging patterns.
- You want the exact lint/type/test commands used in PRs.\
Includes: Ruff & BasedPyright commands, no-annotation policy, session examples (`with Session(db.engine, ...)`), `@field_validator` usage, logging expectations, and the rule set for file size, helpers, and package management.
______________________________________________________________________
## Plugin & Extension Development
- **[Plugin Systems](agent_skills/plugin.md)**\
When to read this:
#### [Plugin Systems](agent_skills/plugin.md)
- **When to read this**
- Youre building or debugging a marketplace plugin.
- You need to know how manifests, providers, daemons, and migrations fit together.\
What it covers: plugin manifests (`core/plugin/entities/plugin.py`), installation/upgrade flows (`services/plugin/plugin_service.py`, CLI commands), runtime adapters (`core/plugin/impl/*` for tool/model/datasource/trigger/endpoint/agent), daemon coordination (`core/plugin/entities/plugin_daemon.py`), and how provider registries surface capabilities to the rest of the platform.
- You need to know how manifests, providers, daemons, and migrations fit together.
- **What it covers**
- Plugin manifests (`core/plugin/entities/plugin.py`)
- Installation/upgrade flows (`services/plugin/plugin_service.py`, CLI commands)
- Runtime adapters (`core/plugin/impl/*` for tool/model/datasource/trigger/endpoint/agent)
- Daemon coordination (`core/plugin/entities/plugin_daemon.py`)
- How provider registries surface capabilities to the rest of the platform
- **[Plugin OAuth](agent_skills/plugin_oauth.md)**\
When to read this:
#### [Plugin OAuth](agent_skills/plugin_oauth.md)
- **When to read this**
- You must integrate OAuth for a plugin or datasource.
- Youre handling credential encryption or refresh flows.\
Topics: credential storage, encryption helpers (`core/helper/provider_encryption.py`), OAuth client bootstrap (`services/plugin/oauth_service.py`, `services/plugin/plugin_parameter_service.py`), and how console/API layers expose the flows.
- Youre handling credential encryption or refresh flows.
- **Topics**
- Credential storage
- Encryption helpers (`core/helper/provider_encryption.py`)
- OAuth client bootstrap (`services/plugin/oauth_service.py`, `services/plugin/plugin_parameter_service.py`)
- How console/API layers expose the flows
______________________________________________________________________
### Workflow Entry & Execution
## Workflow Entry & Execution
#### [Trigger Concepts](agent_skills/trigger.md)
- **[Trigger Concepts](agent_skills/trigger.md)**\
When to read this:
- **When to read this**
- Youre debugging why a workflow didnt start.
- Youre adding a new trigger type or hook.
- You need to trace async execution, draft debugging, or webhook/schedule pipelines.\
Details: Start-node taxonomy, webhook & schedule internals (`core/workflow/nodes/trigger_*`, `services/trigger/*`), async orchestration (`services/async_workflow_service.py`, Celery queues), debug event bus, and storage/logging interactions.
- You need to trace async execution, draft debugging, or webhook/schedule pipelines.
- **Details**
- Start-node taxonomy
- Webhook & schedule internals (`core/workflow/nodes/trigger_*`, `services/trigger/*`)
- Async orchestration (`services/async_workflow_service.py`, Celery queues)
- Debug event bus
- Storage/logging interactions
______________________________________________________________________
## General Reminders
## Additional Notes for Agents
- All skill docs assume you follow the coding style guide—run Ruff/BasedPyright/tests listed there before submitting changes.
- All skill docs assume you follow the coding style rules below—run the lint/type/test commands before submitting changes.
- When you cannot find an answer in these briefs, search the codebase using the paths referenced (e.g., `core/plugin/impl/tool.py`, `services/dataset_service.py`).
- If you run into cross-cutting concerns (tenancy, configuration, storage), check the infrastructure guide first; it links to most supporting modules.
- Keep multi-tenancy and configuration central: everything flows through `configs.dify_config` and `tenant_id`.
- When touching plugins or triggers, consult both the system overview and the specialised doc to ensure you adjust lifecycle, storage, and observability consistently.
## Coding Style
This is the default standard for backend code in this repo. Follow it for new code and use it as the checklist when reviewing changes.
### Linting & Formatting
- Use Ruff for formatting and linting (follow `.ruff.toml`).
- Keep each line under 120 characters (including spaces).
### Naming Conventions
- Use `snake_case` for variables and functions.
- Use `PascalCase` for classes.
- Use `UPPER_CASE` for constants.
### Typing & Class Layout
- Code should usually include type annotations that match the repos current Python version (avoid untyped public APIs and “mystery” values).
- Prefer modern typing forms (e.g. `list[str]`, `dict[str, int]`) and avoid `Any` unless theres a strong reason.
- For classes, declare member variables at the top of the class body (before `__init__`) so the class shape is obvious at a glance:
```python
from datetime import datetime
class Example:
user_id: str
created_at: datetime
def __init__(self, user_id: str, created_at: datetime) -> None:
self.user_id = user_id
self.created_at = created_at
```
### General Rules
- Use Pydantic v2 conventions.
- Use `uv` for Python package management in this repo (usually with `--project api`).
- Prefer simple functions over small “utility classes” for lightweight helpers.
- Avoid implementing dunder methods unless its clearly needed and matches existing patterns.
- Never start long-running services as part of agent work (`uv run app.py`, `flask run`, etc.); running tests is allowed.
- Keep files below ~800 lines; split when necessary.
- Keep code readable and explicit—avoid clever hacks.
### Architecture & Boundaries
- Mirror the layered architecture: controller → service → core/domain.
- Reuse existing helpers in `core/`, `services/`, and `libs/` before creating new abstractions.
- Optimise for observability: deterministic control flow, clear logging, actionable errors.
### Logging & Errors
- Never use `print`; use a module-level logger:
- `logger = logging.getLogger(__name__)`
- Include tenant/app/workflow identifiers in log context when relevant.
- Raise domain-specific exceptions (`services/errors`, `core/errors`) and translate them into HTTP responses in controllers.
- Log retryable events at `warning`, terminal failures at `error`.
### SQLAlchemy Patterns
- Models inherit from `models.base.TypeBase`; do not create ad-hoc metadata or engines.
- Open sessions with context managers:
```python
from sqlalchemy.orm import Session
with Session(db.engine, expire_on_commit=False) as session:
stmt = select(Workflow).where(
Workflow.id == workflow_id,
Workflow.tenant_id == tenant_id,
)
workflow = session.execute(stmt).scalar_one_or_none()
```
- Prefer SQLAlchemy expressions; avoid raw SQL unless necessary.
- Always scope queries by `tenant_id` and protect write paths with safeguards (`FOR UPDATE`, row counts, etc.).
- Introduce repository abstractions only for very large tables (e.g., workflow executions) or when alternative storage strategies are required.
### Storage & External I/O
- Access storage via `extensions.ext_storage.storage`.
- Use `core.helper.ssrf_proxy` for outbound HTTP fetches.
- Background tasks that touch storage must be idempotent, and should log relevant object identifiers.
### Pydantic Usage
- Define DTOs with Pydantic v2 models and forbid extras by default.
- Use `@field_validator` / `@model_validator` for domain rules.
Example:
```python
from pydantic import BaseModel, ConfigDict, HttpUrl, field_validator
class TriggerConfig(BaseModel):
endpoint: HttpUrl
secret: str
model_config = ConfigDict(extra="forbid")
@field_validator("secret")
def ensure_secret_prefix(cls, value: str) -> str:
if not value.startswith("dify_"):
raise ValueError("secret must start with dify_")
return value
```
### Generics & Protocols
- Use `typing.Protocol` to define behavioural contracts (e.g., cache interfaces).
- Apply generics (`TypeVar`, `Generic`) for reusable utilities like caches or providers.
- Validate dynamic inputs at runtime when generics cannot enforce safety alone.
### Tooling & Checks
Quick checks while iterating:
- Format: `make format`
- Lint (includes auto-fix): `make lint`
- Type check: `make type-check`
- Targeted tests: `make test TARGET_TESTS=./api/tests/<target_tests>`
Before opening a PR / submitting:
- `make lint`
- `make type-check`
- `make test`
### Controllers & Services
- Controllers: parse input via Pydantic, invoke services, return serialised responses; no business logic.
- Services: coordinate repositories, providers, background tasks; keep side effects explicit.
- Document non-obvious behaviour with concise comments.
### Miscellaneous
- Use `configs.dify_config` for configuration—never read environment variables directly.
- Maintain tenant awareness end-to-end; `tenant_id` must flow through every layer touching shared resources.
- Queue async work through `services/async_workflow_service`; implement tasks under `tasks/` with explicit queue selection.
- Keep experimental scripts under `dev/`; do not ship them in production builds.

View File

@ -1,115 +0,0 @@
## Linter
- Always follow `.ruff.toml`.
- Run `uv run ruff check --fix --unsafe-fixes`.
- Keep each line under 100 characters (including spaces).
## Code Style
- `snake_case` for variables and functions.
- `PascalCase` for classes.
- `UPPER_CASE` for constants.
## Rules
- Use Pydantic v2 standard.
- Use `uv` for package management.
- Do not override dunder methods like `__init__`, `__iadd__`, etc.
- Never launch services (`uv run app.py`, `flask run`, etc.); running tests under `tests/` is allowed.
- Prefer simple functions over classes for lightweight helpers.
- Keep files below 800 lines; split when necessary.
- Keep code readable—no clever hacks.
- Never use `print`; log with `logger = logging.getLogger(__name__)`.
## Guiding Principles
- Mirror the projects layered architecture: controller → service → core/domain.
- Reuse existing helpers in `core/`, `services/`, and `libs/` before creating new abstractions.
- Optimise for observability: deterministic control flow, clear logging, actionable errors.
## SQLAlchemy Patterns
- Models inherit from `models.base.Base`; never create ad-hoc metadata or engines.
- Open sessions with context managers:
```python
from sqlalchemy.orm import Session
with Session(db.engine, expire_on_commit=False) as session:
stmt = select(Workflow).where(
Workflow.id == workflow_id,
Workflow.tenant_id == tenant_id,
)
workflow = session.execute(stmt).scalar_one_or_none()
```
- Use SQLAlchemy expressions; avoid raw SQL unless necessary.
- Introduce repository abstractions only for very large tables (e.g., workflow executions) to support alternative storage strategies.
- Always scope queries by `tenant_id` and protect write paths with safeguards (`FOR UPDATE`, row counts, etc.).
## Storage & External IO
- Access storage via `extensions.ext_storage.storage`.
- Use `core.helper.ssrf_proxy` for outbound HTTP fetches.
- Background tasks that touch storage must be idempotent and log the relevant object identifiers.
## Pydantic Usage
- Define DTOs with Pydantic v2 models and forbid extras by default.
- Use `@field_validator` / `@model_validator` for domain rules.
- Example:
```python
from pydantic import BaseModel, ConfigDict, HttpUrl, field_validator
class TriggerConfig(BaseModel):
endpoint: HttpUrl
secret: str
model_config = ConfigDict(extra="forbid")
@field_validator("secret")
def ensure_secret_prefix(cls, value: str) -> str:
if not value.startswith("dify_"):
raise ValueError("secret must start with dify_")
return value
```
## Generics & Protocols
- Use `typing.Protocol` to define behavioural contracts (e.g., cache interfaces).
- Apply generics (`TypeVar`, `Generic`) for reusable utilities like caches or providers.
- Validate dynamic inputs at runtime when generics cannot enforce safety alone.
## Error Handling & Logging
- Raise domain-specific exceptions (`services/errors`, `core/errors`) and translate to HTTP responses in controllers.
- Declare `logger = logging.getLogger(__name__)` at module top.
- Include tenant/app/workflow identifiers in log context.
- Log retryable events at `warning`, terminal failures at `error`.
## Tooling & Checks
- Format/lint: `uv run --project api --dev ruff format ./api` and `uv run --project api --dev ruff check --fix --unsafe-fixes ./api`.
- Type checks: `uv run --directory api --dev basedpyright`.
- Tests: `uv run --project api --dev dev/pytest/pytest_unit_tests.sh`.
- Run all of the above before submitting your work.
## Controllers & Services
- Controllers: parse input via Pydantic, invoke services, return serialised responses; no business logic.
- Services: coordinate repositories, providers, background tasks; keep side effects explicit.
- Avoid repositories unless necessary; direct SQLAlchemy usage is preferred for typical tables.
- Document non-obvious behaviour with concise comments.
## Miscellaneous
- Use `configs.dify_config` for configuration—never read environment variables directly.
- Maintain tenant awareness end-to-end; `tenant_id` must flow through every layer touching shared resources.
- Queue async work through `services/async_workflow_service`; implement tasks under `tasks/` with explicit queue selection.
- Keep experimental scripts under `dev/`; do not ship them in production builds.

View File

@ -30,6 +30,11 @@ class TagBindingRemovePayload(BaseModel):
type: Literal["knowledge", "app"] | None = Field(default=None, description="Tag type")
class TagListQueryParam(BaseModel):
type: Literal["knowledge", "app", ""] = Field("", description="Tag type filter")
keyword: str | None = Field(None, description="Search keyword")
register_schema_models(
console_ns,
TagBasePayload,
@ -43,12 +48,15 @@ class TagListApi(Resource):
@setup_required
@login_required
@account_initialization_required
@console_ns.doc(
params={"type": 'Tag type filter. Can be "knowledge" or "app".', "keyword": "Search keyword for tag name."}
)
@marshal_with(dataset_tag_fields)
def get(self):
_, current_tenant_id = current_account_with_tenant()
tag_type = request.args.get("type", type=str, default="")
keyword = request.args.get("keyword", default=None, type=str)
tags = TagService.get_tags(tag_type, current_tenant_id, keyword)
raw_args = request.args.to_dict()
param = TagListQueryParam.model_validate(raw_args)
tags = TagService.get_tags(param.type, current_tenant_id, param.keyword)
return tags, 200

View File

@ -69,8 +69,8 @@ class LLMGenerator:
response: LLMResult = model_instance.invoke_llm(
prompt_messages=list(prompts), model_parameters={"max_tokens": 500, "temperature": 1}, stream=False
)
answer = cast(str, response.message.content)
if answer is None:
answer = response.message.get_text_content()
if answer == "":
return ""
try:
result_dict = json.loads(answer)
@ -182,7 +182,7 @@ class LLMGenerator:
prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False
)
rule_config["prompt"] = cast(str, response.message.content)
rule_config["prompt"] = response.message.get_text_content()
except InvokeError as e:
error = str(e)
@ -235,13 +235,11 @@ class LLMGenerator:
return rule_config
rule_config["prompt"] = cast(str, prompt_content.message.content)
rule_config["prompt"] = prompt_content.message.get_text_content()
if not isinstance(prompt_content.message.content, str):
raise NotImplementedError("prompt content is not a string")
parameter_generate_prompt = parameter_template.format(
inputs={
"INPUT_TEXT": prompt_content.message.content,
"INPUT_TEXT": prompt_content.message.get_text_content(),
},
remove_template_variables=False,
)
@ -251,7 +249,7 @@ class LLMGenerator:
statement_generate_prompt = statement_template.format(
inputs={
"TASK_DESCRIPTION": instruction,
"INPUT_TEXT": prompt_content.message.content,
"INPUT_TEXT": prompt_content.message.get_text_content(),
},
remove_template_variables=False,
)
@ -261,7 +259,7 @@ class LLMGenerator:
parameter_content: LLMResult = model_instance.invoke_llm(
prompt_messages=list(parameter_messages), model_parameters=model_parameters, stream=False
)
rule_config["variables"] = re.findall(r'"\s*([^"]+)\s*"', cast(str, parameter_content.message.content))
rule_config["variables"] = re.findall(r'"\s*([^"]+)\s*"', parameter_content.message.get_text_content())
except InvokeError as e:
error = str(e)
error_step = "generate variables"
@ -270,7 +268,7 @@ class LLMGenerator:
statement_content: LLMResult = model_instance.invoke_llm(
prompt_messages=list(statement_messages), model_parameters=model_parameters, stream=False
)
rule_config["opening_statement"] = cast(str, statement_content.message.content)
rule_config["opening_statement"] = statement_content.message.get_text_content()
except InvokeError as e:
error = str(e)
error_step = "generate conversation opener"
@ -342,7 +340,7 @@ class LLMGenerator:
prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False
)
generated_code = cast(str, response.message.content)
generated_code = response.message.get_text_content()
return {"code": generated_code, "language": code_language, "error": ""}
except InvokeError as e:
@ -378,7 +376,7 @@ class LLMGenerator:
raise TypeError("Expected LLMResult when stream=False")
response = result
answer = cast(str, response.message.content)
answer = response.message.get_text_content()
return answer.strip()
@classmethod
@ -402,10 +400,7 @@ class LLMGenerator:
prompt_messages=list(prompt_messages), model_parameters=model_parameters, stream=False
)
raw_content = response.message.content
if not isinstance(raw_content, str):
raise ValueError(f"LLM response content must be a string, got: {type(raw_content)}")
raw_content = response.message.get_text_content()
try:
parsed_content = json.loads(raw_content)

View File

@ -65,15 +65,17 @@ const CardView: FC<ICardViewProps> = ({ appId, isInPanel, className }) => {
<div className="text-xs text-text-secondary">
{t('overview.disableTooltip.triggerMode', { ns: 'appOverview', feature: featureName })}
</div>
<div
className="cursor-pointer text-xs font-medium text-text-accent hover:underline"
<a
href={triggerDocUrl}
target="_blank"
rel="noopener noreferrer"
className="block cursor-pointer text-xs font-medium text-text-accent hover:underline"
onClick={(event) => {
event.stopPropagation()
window.open(triggerDocUrl, '_blank')
}}
>
{t('overview.appInfo.enableTooltip.learnMore', { ns: 'appOverview' })}
</div>
</a>
</div>
), [t, triggerDocUrl])

View File

@ -20,6 +20,7 @@ const SearchInput: FC<SearchInputProps> = ({
white,
}) => {
const { t } = useTranslation()
const inputRef = useRef<HTMLInputElement>(null)
const [focus, setFocus] = useState<boolean>(false)
const isComposing = useRef<boolean>(false)
const [compositionValue, setCompositionValue] = useState<string>('')
@ -36,6 +37,7 @@ const SearchInput: FC<SearchInputProps> = ({
<RiSearchLine className="h-4 w-4 text-components-input-text-placeholder" aria-hidden="true" />
</div>
<input
ref={inputRef}
type="text"
name="query"
className={cn(
@ -65,14 +67,17 @@ const SearchInput: FC<SearchInputProps> = ({
autoComplete="off"
/>
{value && (
<div
className="group/clear flex h-4 w-4 shrink-0 cursor-pointer items-center justify-center"
<button
type="button"
aria-label={t('operation.clear', { ns: 'common' })}
className="group/clear flex h-4 w-4 shrink-0 cursor-pointer items-center justify-center border-none bg-transparent p-0"
onClick={() => {
onChange('')
inputRef.current?.focus()
}}
>
<RiCloseCircleFill className="h-4 w-4 text-text-quaternary group-hover/clear:text-text-tertiary" />
</div>
</button>
)}
</div>
)

View File

@ -442,6 +442,10 @@ const Completed: FC<ICompletedProps> = ({
setFullScreen(!fullScreen)
}, [fullScreen])
const toggleCollapsed = useCallback(() => {
setIsCollapsed(prev => !prev)
}, [])
const viewNewlyAddedChunk = useCallback(async () => {
const totalPages = segmentListData?.total_pages || 0
const total = segmentListData?.total || 0
@ -578,15 +582,16 @@ const Completed: FC<ICompletedProps> = ({
return selectedStatus ? 1 : 0
}, [selectedStatus])
const contextValue = useMemo<SegmentListContextValue>(() => ({
isCollapsed,
fullScreen,
toggleFullScreen,
currSegment,
currChildChunk,
}), [isCollapsed, fullScreen, toggleFullScreen, currSegment, currChildChunk])
return (
<SegmentListContext.Provider value={{
isCollapsed,
fullScreen,
toggleFullScreen,
currSegment,
currChildChunk,
}}
>
<SegmentListContext.Provider value={contextValue}>
{/* Menu Bar */}
{!isFullDocMode && (
<div className={s.docSearchWrapper}>
@ -618,7 +623,7 @@ const Completed: FC<ICompletedProps> = ({
onClear={() => handleInputChange('')}
/>
<Divider type="vertical" className="mx-3 h-3.5" />
<DisplayToggle isCollapsed={isCollapsed} toggleCollapsed={() => setIsCollapsed(!isCollapsed)} />
<DisplayToggle isCollapsed={isCollapsed} toggleCollapsed={toggleCollapsed} />
</div>
)}
{/* Segment list */}

View File

@ -1,4 +1,5 @@
import type { FC } from 'react'
import type { SegmentListContextValue } from '..'
import * as React from 'react'
import { Markdown } from '@/app/components/base/markdown'
import { cn } from '@/utils/classnames'
@ -14,13 +15,15 @@ type ChunkContentProps = {
className?: string
}
const selectIsCollapsed = (s: SegmentListContextValue) => s.isCollapsed
const ChunkContent: FC<ChunkContentProps> = ({
detail,
isFullDocMode,
className,
}) => {
const { answer, content, sign_content } = detail
const isCollapsed = useSegmentListContext(s => s.isCollapsed)
const isCollapsed = useSegmentListContext(selectIsCollapsed)
if (answer) {
return (

View File

@ -1,4 +1,3 @@
import type { ActivePluginType } from './constants'
import type { PluginsSort, SearchParamsFromCollection } from './types'
import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'
import { useQueryState } from 'nuqs'
@ -17,32 +16,14 @@ export function useSetMarketplaceSort() {
return useSetAtom(marketplaceSortAtom)
}
/**
* Preserve the state for marketplace
*/
export const preserveSearchStateInQueryAtom = atom<boolean>(false)
const searchPluginTextAtom = atom<string>('')
const activePluginTypeAtom = atom<ActivePluginType>('all')
const filterPluginTagsAtom = atom<string[]>([])
export function useSearchPluginText() {
const preserveSearchStateInQuery = useAtomValue(preserveSearchStateInQueryAtom)
const queryState = useQueryState('q', marketplaceSearchParamsParsers.q)
const atomState = useAtom(searchPluginTextAtom)
return preserveSearchStateInQuery ? queryState : atomState
return useQueryState('q', marketplaceSearchParamsParsers.q)
}
export function useActivePluginType() {
const preserveSearchStateInQuery = useAtomValue(preserveSearchStateInQueryAtom)
const queryState = useQueryState('category', marketplaceSearchParamsParsers.category)
const atomState = useAtom(activePluginTypeAtom)
return preserveSearchStateInQuery ? queryState : atomState
return useQueryState('category', marketplaceSearchParamsParsers.category)
}
export function useFilterPluginTags() {
const preserveSearchStateInQuery = useAtomValue(preserveSearchStateInQueryAtom)
const queryState = useQueryState('tags', marketplaceSearchParamsParsers.tags)
const atomState = useAtom(filterPluginTagsAtom)
return preserveSearchStateInQuery ? queryState : atomState
return useQueryState('tags', marketplaceSearchParamsParsers.tags)
}
/**

View File

@ -1,15 +0,0 @@
'use client'
import { useHydrateAtoms } from 'jotai/utils'
import { preserveSearchStateInQueryAtom } from './atoms'
export function HydrateMarketplaceAtoms({
preserveSearchStateInQuery,
children,
}: {
preserveSearchStateInQuery: boolean
children: React.ReactNode
}) {
useHydrateAtoms([[preserveSearchStateInQueryAtom, preserveSearchStateInQuery]])
return <>{children}</>
}

View File

@ -1,7 +1,6 @@
import type { SearchParams } from 'nuqs'
import { TanstackQueryInitializer } from '@/context/query-client'
import Description from './description'
import { HydrateMarketplaceAtoms } from './hydration-client'
import { HydrateQueryClient } from './hydration-server'
import ListWrapper from './list/list-wrapper'
import StickySearchAndSwitchWrapper from './sticky-search-and-switch-wrapper'
@ -10,8 +9,7 @@ type MarketplaceProps = {
showInstallButton?: boolean
pluginTypeSwitchClassName?: string
/**
* Pass the search params from the request to prefetch data on the server
* and preserve the search params in the URL.
* Pass the search params from the request to prefetch data on the server.
*/
searchParams?: Promise<SearchParams>
}
@ -24,15 +22,13 @@ const Marketplace = async ({
return (
<TanstackQueryInitializer>
<HydrateQueryClient searchParams={searchParams}>
<HydrateMarketplaceAtoms preserveSearchStateInQuery={!!searchParams}>
<Description />
<StickySearchAndSwitchWrapper
pluginTypeSwitchClassName={pluginTypeSwitchClassName}
/>
<ListWrapper
showInstallButton={showInstallButton}
/>
</HydrateMarketplaceAtoms>
<Description />
<StickySearchAndSwitchWrapper
pluginTypeSwitchClassName={pluginTypeSwitchClassName}
/>
<ListWrapper
showInstallButton={showInstallButton}
/>
</HydrateQueryClient>
</TanstackQueryInitializer>
)

View File

@ -68,7 +68,7 @@ export const PluginPageContextProvider = ({
const options = useMemo(() => {
return enable_marketplace ? tabs : tabs.filter(tab => tab.value !== PLUGIN_PAGE_TABS_MAP.marketplace)
}, [tabs, enable_marketplace])
const [activeTab, setActiveTab] = useQueryState('category', {
const [activeTab, setActiveTab] = useQueryState('tab', {
defaultValue: options[0].value,
})

View File

@ -5,6 +5,7 @@ import type { NodeOutPutVar, Variable } from '@/app/components/workflow/types'
import { useBoolean } from 'ahooks'
import * as React from 'react'
import { useEffect, useRef, useState } from 'react'
import { createPortal } from 'react-dom'
import { useTranslation } from 'react-i18next'
import VarReferenceVars from '@/app/components/workflow/nodes/_base/components/variable/var-reference-vars'
import { cn } from '@/utils/classnames'
@ -147,7 +148,7 @@ const CodeEditor: FC<Props> = ({
onMount={onEditorMounted}
placeholder={t('common.jinjaEditorPlaceholder', { ns: 'workflow' })!}
/>
{isShowVarPicker && (
{isShowVarPicker && createPortal(
<div
ref={popupRef}
className="w-[228px] space-y-1 rounded-lg border border-components-panel-border bg-components-panel-bg p-1 shadow-lg"
@ -164,7 +165,8 @@ const CodeEditor: FC<Props> = ({
onChange={handleSelectVar}
isSupportFileVar={false}
/>
</div>
</div>,
document.body,
)}
</div>
)

View File

@ -26,7 +26,8 @@ export default antfu(
'react-hooks/preserve-manual-memoization': 'warn',
'react-hooks/purity': 'warn',
'react-hooks/refs': 'warn',
'react-hooks/set-state-in-effect': 'warn',
// prefer react-hooks-extra/no-direct-set-state-in-use-effect
'react-hooks/set-state-in-effect': 'off',
'react-hooks/set-state-in-render': 'warn',
'react-hooks/static-components': 'warn',
'react-hooks/unsupported-syntax': 'warn',
@ -53,6 +54,14 @@ export default antfu(
},
},
},
{
files: ['**/*.ts', '**/*.tsx'],
settings: {
'react-x': {
additionalStateHooks: '/^use\\w*State(?:s)?|useAtom$/u',
},
},
},
// downgrade some rules from error to warn for gradual adoption
// we should fix these in following pull requests
{

View File

@ -153,9 +153,9 @@
"zustand": "^5.0.9"
},
"devDependencies": {
"@antfu/eslint-config": "^6.7.3",
"@antfu/eslint-config": "^7.0.1",
"@chromatic-com/storybook": "^4.1.1",
"@eslint-react/eslint-plugin": "^2.3.13",
"@eslint-react/eslint-plugin": "^2.7.0",
"@mdx-js/loader": "^3.1.1",
"@mdx-js/react": "^3.1.1",
"@next/bundle-analyzer": "15.5.9",
@ -190,7 +190,7 @@
"@types/semver": "^7.7.1",
"@types/sortablejs": "^1.15.8",
"@types/uuid": "^10.0.0",
"@typescript-eslint/parser": "^8.50.0",
"@typescript-eslint/parser": "^8.53.0",
"@typescript/native-preview": "^7.0.0-dev",
"@vitejs/plugin-react": "^5.1.2",
"@vitest/coverage-v8": "4.0.16",
@ -202,7 +202,7 @@
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.4.26",
"eslint-plugin-sonarjs": "^3.0.5",
"eslint-plugin-storybook": "^10.1.10",
"eslint-plugin-storybook": "^10.1.11",
"eslint-plugin-tailwindcss": "^3.18.2",
"husky": "^9.1.7",
"jsdom": "^27.3.0",
@ -225,7 +225,6 @@
},
"pnpm": {
"overrides": {
"@eslint/plugin-kit@<0.3.4": "0.3.4",
"@monaco-editor/loader": "1.5.0",
"@nolyfill/safe-buffer": "npm:safe-buffer@^5.2.1",
"array-includes": "npm:@nolyfill/array-includes@^1",
@ -276,7 +275,6 @@
]
},
"resolutions": {
"@eslint/plugin-kit": "~0.3",
"@types/react": "~19.2.7",
"@types/react-dom": "~19.2.3",
"brace-expansion": "~2.0",

File diff suppressed because it is too large Load Diff

View File

@ -24,8 +24,12 @@ export type FetchOptionType = Omit<RequestInit, 'body'> & {
}
const afterResponse204: AfterResponseHook = async (_request, _options, response) => {
if (response.status === 204)
return Response.json({ result: 'success' })
if (response.status === 204) {
return new Response(JSON.stringify({ result: 'success' }), {
status: 200,
headers: { 'Content-Type': ContentType.json },
})
}
}
export type ResponseError = {