diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 94e857f93a..21195c8f20 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -15,6 +15,10 @@ # Docs /docs/ @crazywoola +# CLI +/cli/ @langgenius/maintainers +/.github/workflows/cli-tests.yml @langgenius/maintainers + # Backend (default owner, more specific rules below will override) /api/ @QuantumGhost diff --git a/.github/workflows/cli-release.yml b/.github/workflows/cli-release.yml new file mode 100644 index 0000000000..5137855b87 --- /dev/null +++ b/.github/workflows/cli-release.yml @@ -0,0 +1,131 @@ +name: CLI Release + +on: + release: + types: [published] + workflow_dispatch: + inputs: + dify_release_tag: + description: "dify release tag to attach cli artifacts to (e.g. 1.14.0). Bare semver — dify tags are NOT v-prefixed." + type: string + required: true + +concurrency: + group: cli-release-${{ github.event.release.tag_name || inputs.dify_release_tag }} + cancel-in-progress: true + +jobs: + release: + runs-on: ubuntu-latest + if: >- + github.repository == 'langgenius/dify' && + (github.event_name == 'workflow_dispatch' || + (vars.CLI_AUTO_RELEASE == 'true' && !github.event.release.prerelease)) + env: + DIFY_TAG: ${{ github.event.release.tag_name || inputs.dify_release_tag }} + permissions: + contents: write + id-token: write + defaults: + run: + shell: bash + working-directory: ./cli + + steps: + - name: Checkout + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + fetch-depth: 0 + + - name: Setup web environment + uses: ./.github/actions/setup-web + + - name: Setup Node registry auth + uses: actions/setup-node@v4 + with: + node-version-file: .nvmrc + registry-url: 'https://registry.npmjs.org' + + - name: Read cli/package.json + id: manifest + run: | + version=$(node -p "require('./package.json').version") + channel=$(node -p "require('./package.json').difyctl.channel") + minDify=$(node -p "require('./package.json').difyctl.compat.minDify") + maxDify=$(node -p "require('./package.json').difyctl.compat.maxDify") + { + echo "version=$version" + echo "channel=$channel" + echo "minDify=$minDify" + echo "maxDify=$maxDify" + } >> "$GITHUB_OUTPUT" + + - name: Validate manifest + run: scripts/release-validate-manifest.sh + + - name: Bump guard (auto-path only) + if: github.event_name == 'release' + run: scripts/release-bump-guard.sh + env: + NEW_VERSION: ${{ steps.manifest.outputs.version }} + NEW_MIN_DIFY: ${{ steps.manifest.outputs.minDify }} + NEW_MAX_DIFY: ${{ steps.manifest.outputs.maxDify }} + + - name: Build cli + run: | + DIFYCTL_VERSION="${{ steps.manifest.outputs.version }}" \ + DIFYCTL_CHANNEL="${{ steps.manifest.outputs.channel }}" \ + DIFYCTL_MIN_DIFY="${{ steps.manifest.outputs.minDify }}" \ + DIFYCTL_MAX_DIFY="${{ steps.manifest.outputs.maxDify }}" \ + DIFYCTL_COMMIT="$(git rev-parse HEAD)" \ + DIFYCTL_BUILD_DATE="$(git log -1 --format=%cI HEAD)" \ + pnpm build + + - name: Pack tarballs + run: pnpm pack:tarballs + + - name: Verify target dify release exists + run: gh release view "$DIFY_TAG" --repo langgenius/dify > /dev/null + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Publish to npm (idempotent) + run: scripts/release-npm-publish.sh + env: + CHANNEL: ${{ steps.manifest.outputs.channel }} + NEW_VERSION: ${{ steps.manifest.outputs.version }} + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + + - name: Generate sha256 checksum file + run: scripts/release-write-checksums.sh + env: + CLI_VERSION: ${{ steps.manifest.outputs.version }} + + - name: Install cosign + uses: sigstore/cosign-installer@v3 + + - name: Keyless-sign tarballs + checksum file (Sigstore) + run: scripts/release-cosign-sign.sh + env: + CLI_VERSION: ${{ steps.manifest.outputs.version }} + COSIGN_EXPERIMENTAL: '1' + + - name: Snapshot tarballs + checksum + signatures as workflow artifact + if: always() + uses: actions/upload-artifact@v4 + with: + name: difyctl-${{ steps.manifest.outputs.version }}-${{ env.DIFY_TAG }} + path: | + cli/dist/difyctl-v*.tar.xz + cli/dist/difyctl-v*-checksums.txt + cli/dist/difyctl-v*.sig + cli/dist/difyctl-v*.pem + retention-days: 90 + if-no-files-found: error + + - name: Upload tarballs + checksum + signatures to dify GH release (idempotent) + run: scripts/release-upload-tarballs.sh + env: + CLI_VERSION: ${{ steps.manifest.outputs.version }} + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/cli-smoke.yml b/.github/workflows/cli-smoke.yml new file mode 100644 index 0000000000..07d19ada65 --- /dev/null +++ b/.github/workflows/cli-smoke.yml @@ -0,0 +1,57 @@ +name: CLI Smoke (live dify) + +on: + workflow_dispatch: + inputs: + dify_version: + description: "Dify image tag to test against (e.g. 1.7.0)" + type: string + required: true + cli_ref: + description: "Git ref to build the cli from (default: current branch)" + type: string + required: false + +jobs: + smoke: + runs-on: ubuntu-latest + timeout-minutes: 30 + defaults: + run: + shell: bash + steps: + - name: Checkout cli ref + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + ref: ${{ inputs.cli_ref || github.ref }} + persist-credentials: false + + - name: Setup web environment + uses: ./.github/actions/setup-web + + - name: Bring up dify + env: + DIFY_VERSION: ${{ inputs.dify_version }} + run: | + cd docker + cp .env.example .env + DIFY_API_IMAGE_TAG="$DIFY_VERSION" \ + DIFY_WEB_IMAGE_TAG="$DIFY_VERSION" \ + docker compose up -d api worker web db redis + for i in $(seq 1 60); do + if curl -fsS http://localhost:5001/health >/dev/null 2>&1; then + echo "dify api ready after ${i}s" + break + fi + sleep 1 + done + + - name: Run smoke against live dify + working-directory: ./cli + run: pnpm exec tsx scripts/run-smoke.ts --base-url http://localhost:5001 + + - name: Dump dify logs on failure + if: failure() + run: | + cd docker + docker compose logs api worker web --tail=200 diff --git a/.github/workflows/cli-tests.yml b/.github/workflows/cli-tests.yml new file mode 100644 index 0000000000..e12a5dc5c3 --- /dev/null +++ b/.github/workflows/cli-tests.yml @@ -0,0 +1,46 @@ +name: CLI Tests + +on: + workflow_call: + secrets: + CODECOV_TOKEN: + required: false + +permissions: + contents: read + +concurrency: + group: cli-tests-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + test: + name: CLI Tests + runs-on: ubuntu-latest + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + defaults: + run: + shell: bash + working-directory: ./cli + + steps: + - name: Checkout code + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + + - name: Setup web environment + uses: ./.github/actions/setup-web + + - name: CI pipeline (typecheck, lint, coverage, build) + run: make ci + + - name: Report coverage + if: ${{ env.CODECOV_TOKEN != '' }} + uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0 + with: + directory: cli/coverage + flags: cli + env: + CODECOV_TOKEN: ${{ env.CODECOV_TOKEN }} diff --git a/.github/workflows/main-ci.yml b/.github/workflows/main-ci.yml index ba36b5c07a..bf5a443cd4 100644 --- a/.github/workflows/main-ci.yml +++ b/.github/workflows/main-ci.yml @@ -42,6 +42,7 @@ jobs: runs-on: ubuntu-latest outputs: api-changed: ${{ steps.changes.outputs.api }} + cli-changed: ${{ steps.changes.outputs.cli }} e2e-changed: ${{ steps.changes.outputs.e2e }} web-changed: ${{ steps.changes.outputs.web }} vdb-changed: ${{ steps.changes.outputs.vdb }} @@ -63,6 +64,16 @@ jobs: - 'docker/generate_docker_compose' - 'docker/ssrf_proxy/**' - 'docker/volumes/sandbox/conf/**' + cli: + - 'cli/**' + - 'package.json' + - 'pnpm-lock.yaml' + - 'pnpm-workspace.yaml' + - 'eslint.config.mjs' + - '.npmrc' + - '.nvmrc' + - '.github/workflows/cli-tests.yml' + - '.github/actions/setup-web/**' web: - 'web/**' - 'packages/**' @@ -186,6 +197,66 @@ jobs: echo "API tests were not required, but the skip job finished with result: $SKIP_RESULT" >&2 exit 1 + cli-tests-run: + name: Run CLI Tests + needs: + - pre_job + - check-changes + if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.cli-changed == 'true' + uses: ./.github/workflows/cli-tests.yml + secrets: inherit + + cli-tests-skip: + name: Skip CLI Tests + needs: + - pre_job + - check-changes + if: needs.pre_job.outputs.should_skip != 'true' && needs.check-changes.outputs.cli-changed != 'true' + runs-on: ubuntu-latest + steps: + - name: Report skipped CLI tests + run: echo "No CLI-related changes detected; skipping CLI tests." + + cli-tests: + name: CLI Tests + if: ${{ always() }} + needs: + - pre_job + - check-changes + - cli-tests-run + - cli-tests-skip + runs-on: ubuntu-latest + steps: + - name: Finalize CLI Tests status + env: + SHOULD_SKIP_WORKFLOW: ${{ needs.pre_job.outputs.should_skip }} + TESTS_CHANGED: ${{ needs.check-changes.outputs.cli-changed }} + RUN_RESULT: ${{ needs.cli-tests-run.result }} + SKIP_RESULT: ${{ needs.cli-tests-skip.result }} + run: | + if [[ "$SHOULD_SKIP_WORKFLOW" == 'true' ]]; then + echo "CLI tests were skipped because this workflow run duplicated a successful or newer run." + exit 0 + fi + + if [[ "$TESTS_CHANGED" == 'true' ]]; then + if [[ "$RUN_RESULT" == 'success' ]]; then + echo "CLI tests ran successfully." + exit 0 + fi + + echo "CLI tests were required but finished with result: $RUN_RESULT" >&2 + exit 1 + fi + + if [[ "$SKIP_RESULT" == 'success' ]]; then + echo "CLI tests were skipped because no CLI-related files changed." + exit 0 + fi + + echo "CLI tests were not required, but the skip job finished with result: $SKIP_RESULT" >&2 + exit 1 + web-tests-run: name: Run Web Tests needs: diff --git a/.gitignore b/.gitignore index 3493a7c756..8870ad113c 100644 --- a/.gitignore +++ b/.gitignore @@ -115,6 +115,12 @@ venv/ ENV/ env.bak/ venv.bak/ + +# cli/ has a src/env/ module (DIFY_* registry) — don't treat it as a venv +!/cli/src/env/ +!/cli/src/commands/env/ +# cli/scripts/lib/ holds TS build helpers (resolve-buildinfo etc.) — don't treat as Python lib/ +!/cli/scripts/lib/ .conda/ # Spyder project settings @@ -240,6 +246,7 @@ scripts/stress-test/reports/ # settings *.local.json *.local.md +*.local.toml # Code Agent Folder .qoder/* diff --git a/api/app_factory.py b/api/app_factory.py index 48e50ceae9..d6fb70ab2e 100644 --- a/api/app_factory.py +++ b/api/app_factory.py @@ -159,6 +159,7 @@ def initialize_extensions(app: DifyApp): ext_logstore, ext_mail, ext_migrate, + ext_oauth_bearer, ext_orjson, ext_otel, ext_proxy_fix, @@ -203,6 +204,7 @@ def initialize_extensions(app: DifyApp): ext_enterprise_telemetry, ext_request_logging, ext_session_factory, + ext_oauth_bearer, ] for ext in extensions: short_name = ext.__name__.split(".")[-1] diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index ae49ae47d0..77cf59ce68 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -499,6 +499,35 @@ class HttpConfig(BaseSettings): def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]: return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",") + inner_OPENAPI_CORS_ALLOW_ORIGINS: str = Field( + description=( + "Comma-separated allowlist for /openapi/v1/* CORS. " + "Default empty = same-origin only. Browser-cookie routes within " + "the group reject cross-origin OPTIONS regardless of this list." + ), + validation_alias=AliasChoices("OPENAPI_CORS_ALLOW_ORIGINS"), + default="", + ) + + @computed_field + def OPENAPI_CORS_ALLOW_ORIGINS(self) -> list[str]: + return [o for o in self.inner_OPENAPI_CORS_ALLOW_ORIGINS.split(",") if o] + + inner_OPENAPI_KNOWN_CLIENT_IDS: str = Field( + description=( + "Comma-separated client_id values accepted at " + "POST /openapi/v1/oauth/device/code. New CLIs / SDKs added here " + "without code changes. Unknown client_id returns 400 unsupported_client." + ), + validation_alias=AliasChoices("OPENAPI_KNOWN_CLIENT_IDS"), + default="difyctl", + ) + + @computed_field # type: ignore[misc] + @property + def OPENAPI_KNOWN_CLIENT_IDS(self) -> frozenset[str]: + return frozenset(c for c in self.inner_OPENAPI_KNOWN_CLIENT_IDS.split(",") if c) + HTTP_REQUEST_MAX_CONNECT_TIMEOUT: int = Field( ge=1, description="Maximum connection timeout in seconds for HTTP requests", default=10 ) @@ -874,6 +903,17 @@ class AuthConfig(BaseSettings): default=86400, ) + ENABLE_OAUTH_BEARER: bool = Field( + description="Enable OAuth bearer authentication (device-flow + Service API /v1/* bearer middleware).", + default=True, + ) + + OPENAPI_RATE_LIMIT_PER_TOKEN: PositiveInt = Field( + description="Per-token rate limit on /openapi/v1/* (requests per minute). " + "Bucket keyed on sha256(token), shared across api replicas via Redis.", + default=60, + ) + class ModerationConfig(BaseSettings): """ @@ -1148,6 +1188,14 @@ class CeleryScheduleTasksConfig(BaseSettings): description="Enable scheduled workflow run cleanup task", default=False, ) + ENABLE_CLEAN_OAUTH_ACCESS_TOKENS_TASK: bool = Field( + description="Enable scheduled cleanup of revoked/expired OAuth access-token rows past retention.", + default=True, + ) + OAUTH_ACCESS_TOKEN_RETENTION_DAYS: PositiveInt = Field( + description="Days to retain revoked OAuth access-token rows before deletion.", + default=30, + ) ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: bool = Field( description="Enable mail clean document notify task", default=False, diff --git a/api/controllers/openapi/__init__.py b/api/controllers/openapi/__init__.py new file mode 100644 index 0000000000..f3afa706ad --- /dev/null +++ b/api/controllers/openapi/__init__.py @@ -0,0 +1,41 @@ +from flask import Blueprint +from flask_restx import Namespace + +from libs.device_flow_security import attach_anti_framing +from libs.external_api import ExternalApi + +bp = Blueprint("openapi", __name__, url_prefix="/openapi/v1") +attach_anti_framing(bp) + +api = ExternalApi( + bp, + version="1.0", + title="OpenAPI", + description="User-scoped programmatic API (bearer auth)", +) + +openapi_ns = Namespace("openapi", description="User-scoped operations", path="/") + +from . import ( + account, + app_run, + apps, + apps_permitted_external, + index, + oauth_device, + oauth_device_sso, + workspaces, +) + +__all__ = [ + "account", + "app_run", + "apps", + "apps_permitted_external", + "index", + "oauth_device", + "oauth_device_sso", + "workspaces", +] + +api.add_namespace(openapi_ns) diff --git a/api/controllers/openapi/_audit.py b/api/controllers/openapi/_audit.py new file mode 100644 index 0000000000..c31bae28ab --- /dev/null +++ b/api/controllers/openapi/_audit.py @@ -0,0 +1,66 @@ +"""Audit emission for openapi app-run endpoints. + +Pattern: logger.info with extra={"audit": True, "event": "app.run.openapi", ...} +matches the existing oauth_device convention. The EE OTel exporter consults +its own allowlist to decide whether to ship the line. +""" + +from __future__ import annotations + +import logging + +logger = logging.getLogger(__name__) + +EVENT_APP_RUN_OPENAPI = "app.run.openapi" +EVENT_OPENAPI_WRONG_SURFACE_DENIED = "openapi.wrong_surface_denied" + + +def emit_app_run( + *, + app_id: str, + tenant_id: str, + caller_kind: str, + mode: str, + surface: str, +) -> None: + logger.info( + "audit: %s app_id=%s tenant_id=%s caller_kind=%s mode=%s surface=%s", + EVENT_APP_RUN_OPENAPI, + app_id, + tenant_id, + caller_kind, + mode, + surface, + extra={ + "audit": True, + "event": EVENT_APP_RUN_OPENAPI, + "app_id": app_id, + "tenant_id": tenant_id, + "caller_kind": caller_kind, + "mode": mode, + "surface": surface, + }, + ) + + +def emit_wrong_surface( + *, + subject_type: str | None, + attempted_path: str, + client_id: str | None, + token_id: str | None, +) -> None: + logger.warning( + "audit: %s subject_type=%s attempted_path=%s", + EVENT_OPENAPI_WRONG_SURFACE_DENIED, + subject_type, + attempted_path, + extra={ + "audit": True, + "event": EVENT_OPENAPI_WRONG_SURFACE_DENIED, + "subject_type": subject_type, + "attempted_path": attempted_path, + "client_id": client_id, + "token_id": token_id, + }, + ) diff --git a/api/controllers/openapi/_input_schema.py b/api/controllers/openapi/_input_schema.py new file mode 100644 index 0000000000..2ff99f6dfc --- /dev/null +++ b/api/controllers/openapi/_input_schema.py @@ -0,0 +1,143 @@ +"""Server-side JSON Schema derivation from Dify `user_input_form`.""" + +from __future__ import annotations + +from typing import Any, cast + +from controllers.service_api.app.error import AppUnavailableError +from models import App +from models.model import AppMode + +JSON_SCHEMA_DRAFT = "https://json-schema.org/draft/2020-12/schema" + +EMPTY_INPUT_SCHEMA: dict[str, Any] = { + "$schema": JSON_SCHEMA_DRAFT, + "type": "object", + "properties": {}, + "required": [], +} + +_CHAT_FAMILY = frozenset({AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}) + + +def _file_object_shape() -> dict[str, Any]: + """Single-file value shape. Forward-compat placeholder; refine when file-API contract pins.""" + return { + "type": "object", + "properties": { + "type": {"type": "string"}, + "transfer_method": {"type": "string"}, + "url": {"type": "string"}, + "upload_file_id": {"type": "string"}, + }, + "additionalProperties": True, + } + + +def _row_to_schema(row_type: str, row: dict[str, Any]) -> dict[str, Any] | None: + label = row.get("label") or row.get("variable", "") + base: dict[str, Any] = {"title": label} if label else {} + + if row_type in ("text-input", "paragraph"): + out = {"type": "string"} | base + max_length = row.get("max_length") + if isinstance(max_length, int) and max_length > 0: + out["maxLength"] = max_length + return out + + if row_type == "select": + return {"type": "string"} | base | {"enum": list(row.get("options") or [])} + + if row_type == "number": + return {"type": "number"} | base + + if row_type == "file": + return _file_object_shape() | base + + if row_type == "file-list": + return { + "type": "array", + "items": _file_object_shape(), + } | base + + return None + + +def _form_to_jsonschema(form: list[dict[str, Any]]) -> tuple[dict[str, Any], list[str]]: + """Translate a user_input_form row list into (properties, required-list). + + Each row is a single-key dict: `{"text-input": {variable, label, required, ...}}`. + Unknown variable types are skipped (forward-compat). + """ + properties: dict[str, Any] = {} + required: list[str] = [] + for row in form: + if not isinstance(row, dict) or len(row) != 1: + continue + ((row_type, row_body),) = row.items() + if not isinstance(row_body, dict): + continue + variable = row_body.get("variable") + if not variable: + continue + schema = _row_to_schema(row_type, row_body) + if schema is None: + continue + properties[variable] = schema + if row_body.get("required"): + required.append(variable) + return properties, required + + +def resolve_app_config(app: App) -> tuple[dict[str, Any], list[dict[str, Any]]]: + """Resolve `(features_dict, user_input_form)` for parameters / schema derivation. + + Raises `AppUnavailableError` on misconfigured apps. + """ + if app.mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}: + workflow = app.workflow + if workflow is None: + raise AppUnavailableError() + return ( + workflow.features_dict, + cast(list[dict[str, Any]], workflow.user_input_form(to_old_structure=True)), + ) + + app_model_config = app.app_model_config + if app_model_config is None: + raise AppUnavailableError() + features_dict = cast(dict[str, Any], app_model_config.to_dict()) + return features_dict, cast(list[dict[str, Any]], features_dict.get("user_input_form", [])) + + +def build_input_schema(app: App) -> dict[str, Any]: + """Derive Draft 2020-12 JSON Schema from `user_input_form` + app mode. + + chat / agent-chat / advanced-chat: top-level `query` (required, minLength=1) + `inputs` object. + completion / workflow: `inputs` object only. + Raises `AppUnavailableError` on misconfigured apps. + """ + _, user_input_form = resolve_app_config(app) + inputs_props, inputs_required = _form_to_jsonschema(user_input_form) + + properties: dict[str, Any] = {} + required: list[str] = [] + + if app.mode in _CHAT_FAMILY: + properties["query"] = {"type": "string", "minLength": 1} + required.append("query") + + properties["inputs"] = { + "type": "object", + "properties": inputs_props, + "required": inputs_required, + "additionalProperties": False, + } + required.append("inputs") + + return { + "$schema": JSON_SCHEMA_DRAFT, + "type": "object", + "properties": properties, + "required": required, + } diff --git a/api/controllers/openapi/_models.py b/api/controllers/openapi/_models.py new file mode 100644 index 0000000000..758c32d77f --- /dev/null +++ b/api/controllers/openapi/_models.py @@ -0,0 +1,112 @@ +"""Shared response substructures for openapi endpoints.""" + +from __future__ import annotations + +from typing import Any, Literal + +from pydantic import BaseModel, Field + +# Server-side cap on `limit` query param for any /openapi/v1/* list endpoint. +# Sibling endpoints (`/apps`, `/account/sessions`, future routes) all clamp to +# this; do not introduce per-endpoint caps without raising the constant. +MAX_PAGE_LIMIT = 200 + + +class UsageInfo(BaseModel): + prompt_tokens: int = 0 + completion_tokens: int = 0 + total_tokens: int = 0 + + +class MessageMetadata(BaseModel): + usage: UsageInfo | None = None + retriever_resources: list[dict[str, Any]] = [] + + +class PaginationEnvelope[T](BaseModel): + """Canonical pagination envelope for `/openapi/v1/*` list endpoints.""" + + page: int + limit: int + total: int + has_more: bool + data: list[T] + + @classmethod + def build(cls, *, page: int, limit: int, total: int, items: list[T]) -> PaginationEnvelope[T]: + return cls(page=page, limit=limit, total=total, has_more=page * limit < total, data=items) + + +class AppListRow(BaseModel): + id: str + name: str + description: str | None = None + mode: str + tags: list[dict[str, str]] = [] + updated_at: str | None = None + created_by_name: str | None = None + workspace_id: str | None = None + workspace_name: str | None = None + + +class AppInfoResponse(BaseModel): + id: str + name: str + description: str | None = None + mode: str + author: str | None = None + tags: list[dict[str, str]] = [] + + +class AppDescribeInfo(AppInfoResponse): + updated_at: str | None = None + service_api_enabled: bool + + +class AppDescribeResponse(BaseModel): + info: AppDescribeInfo | None = None + parameters: dict[str, Any] | None = None + input_schema: dict[str, Any] | None = None + + +class ChatMessageResponse(BaseModel): + event: str + task_id: str + id: str + message_id: str + conversation_id: str + mode: str + answer: str + metadata: MessageMetadata = Field(default_factory=MessageMetadata) + created_at: int + + +class CompletionMessageResponse(BaseModel): + event: str + task_id: str + id: str + message_id: str + mode: str + answer: str + metadata: MessageMetadata = Field(default_factory=MessageMetadata) + created_at: int + + +class WorkflowRunData(BaseModel): + id: str + workflow_id: str + status: str + outputs: dict[str, Any] = Field(default_factory=dict) + error: str | None = None + elapsed_time: float | None = None + total_tokens: int | None = None + total_steps: int | None = None + created_at: int | None = None + finished_at: int | None = None + + +class WorkflowRunResponse(BaseModel): + workflow_run_id: str + task_id: str + mode: Literal["workflow"] = "workflow" + data: WorkflowRunData diff --git a/api/controllers/openapi/account.py b/api/controllers/openapi/account.py new file mode 100644 index 0000000000..76a60a23ea --- /dev/null +++ b/api/controllers/openapi/account.py @@ -0,0 +1,236 @@ +"""User-scoped account endpoints. /account is the bearer-authed +identity read; /account/sessions and /account/sessions/ manage +the user's active OAuth tokens. +""" + +from __future__ import annotations + +from datetime import UTC, datetime + +from flask import g, request +from flask_restx import Resource +from sqlalchemy import and_, select, update +from werkzeug.exceptions import BadRequest, NotFound + +from controllers.openapi import openapi_ns +from controllers.openapi._models import MAX_PAGE_LIMIT, PaginationEnvelope +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from libs.oauth_bearer import ( + ACCEPT_USER_ANY, + TOKEN_CACHE_KEY_FMT, + AuthContext, + SubjectType, + validate_bearer, +) +from libs.rate_limit import ( + LIMIT_ME_PER_ACCOUNT, + LIMIT_ME_PER_EMAIL, + enforce, +) +from models import Account, OAuthAccessToken, Tenant, TenantAccountJoin + + +@openapi_ns.route("/account") +class AccountApi(Resource): + @validate_bearer(accept=ACCEPT_USER_ANY) + def get(self): + ctx = g.auth_ctx + + if ctx.subject_type == SubjectType.EXTERNAL_SSO: + enforce(LIMIT_ME_PER_EMAIL, key=f"subject:{ctx.subject_email}") + else: + enforce(LIMIT_ME_PER_ACCOUNT, key=f"account:{ctx.account_id}") + + if ctx.subject_type == SubjectType.EXTERNAL_SSO: + return { + "subject_type": ctx.subject_type, + "subject_email": ctx.subject_email, + "subject_issuer": ctx.subject_issuer, + "account": None, + "workspaces": [], + "default_workspace_id": None, + } + + account = ( + db.session.query(Account).filter(Account.id == ctx.account_id).one_or_none() if ctx.account_id else None + ) + memberships = _load_memberships(ctx.account_id) if ctx.account_id else [] + default_ws_id = _pick_default_workspace(memberships) + + return { + "subject_type": ctx.subject_type, + "subject_email": ctx.subject_email or (account.email if account else None), + "account": _account_payload(account) if account else None, + "workspaces": [_workspace_payload(m) for m in memberships], + "default_workspace_id": default_ws_id, + } + + +@openapi_ns.route("/account/sessions/self") +class AccountSessionsSelfApi(Resource): + @validate_bearer(accept=ACCEPT_USER_ANY) + def delete(self): + ctx = g.auth_ctx + _require_oauth_subject(ctx) + _revoke_token_by_id(str(ctx.token_id)) + return {"status": "revoked"}, 200 + + +@openapi_ns.route("/account/sessions") +class AccountSessionsApi(Resource): + @validate_bearer(accept=ACCEPT_USER_ANY) + def get(self): + ctx = g.auth_ctx + now = datetime.now(UTC) + page = int(request.args.get("page", "1")) + limit = min(int(request.args.get("limit", "100")), MAX_PAGE_LIMIT) + + all_rows = db.session.execute( + select( + OAuthAccessToken.id, + OAuthAccessToken.prefix, + OAuthAccessToken.client_id, + OAuthAccessToken.device_label, + OAuthAccessToken.created_at, + OAuthAccessToken.last_used_at, + OAuthAccessToken.expires_at, + ) + .where( + and_( + *_subject_match(ctx), + OAuthAccessToken.revoked_at.is_(None), + OAuthAccessToken.token_hash.is_not(None), + OAuthAccessToken.expires_at > now, + ) + ) + .order_by(OAuthAccessToken.created_at.desc()) + ).all() + + total = len(all_rows) + sliced = all_rows[(page - 1) * limit : page * limit] + + items = [ + { + "id": str(r.id), + "prefix": r.prefix, + "client_id": r.client_id, + "device_label": r.device_label, + "created_at": _iso(r.created_at), + "last_used_at": _iso(r.last_used_at), + "expires_at": _iso(r.expires_at), + } + for r in sliced + ] + + return ( + PaginationEnvelope.build(page=page, limit=limit, total=total, items=items).model_dump(mode="json"), + 200, + ) + + +@openapi_ns.route("/account/sessions/") +class AccountSessionByIdApi(Resource): + @validate_bearer(accept=ACCEPT_USER_ANY) + def delete(self, session_id: str): + ctx = g.auth_ctx + _require_oauth_subject(ctx) + + # Subject-match guard. 404 (not 403) on cross-subject so the + # endpoint doesn't leak token IDs that belong to other subjects. + owns = db.session.execute( + select(OAuthAccessToken.id).where( + and_( + OAuthAccessToken.id == session_id, + *_subject_match(ctx), + ) + ) + ).first() + if owns is None: + raise NotFound("session not found") + + _revoke_token_by_id(session_id) + return {"status": "revoked"}, 200 + + +def _subject_match(ctx: AuthContext) -> tuple: + """Where-clauses that scope a query to the bearer's subject. Works + for both account (account_id) and external_sso (email + issuer). + """ + if ctx.subject_type == SubjectType.ACCOUNT: + return (OAuthAccessToken.account_id == str(ctx.account_id),) + return ( + OAuthAccessToken.subject_email == ctx.subject_email, + OAuthAccessToken.subject_issuer == ctx.subject_issuer, + OAuthAccessToken.account_id.is_(None), + ) + + +def _require_oauth_subject(ctx: AuthContext) -> None: + if not ctx.source.startswith("oauth"): + raise BadRequest( + "this endpoint revokes OAuth bearer tokens; use /openapi/v1/personal-access-tokens/self for PATs" + ) + + +def _revoke_token_by_id(token_id: str) -> None: + # Snapshot pre-revoke hash for cache invalidation; UPDATE WHERE + # makes double-revoke idempotent. + row = ( + db.session.query(OAuthAccessToken.token_hash) + .filter( + OAuthAccessToken.id == token_id, + OAuthAccessToken.revoked_at.is_(None), + ) + .one_or_none() + ) + pre_revoke_hash = row[0] if row else None + + stmt = ( + update(OAuthAccessToken) + .where( + OAuthAccessToken.id == token_id, + OAuthAccessToken.revoked_at.is_(None), + ) + .values(revoked_at=datetime.now(UTC), token_hash=None) + ) + db.session.execute(stmt) + db.session.commit() + + if pre_revoke_hash: + redis_client.delete(TOKEN_CACHE_KEY_FMT.format(hash=pre_revoke_hash)) + + +def _iso(dt: datetime | None) -> str | None: + if dt is None: + return None + if dt.tzinfo is None: + dt = dt.replace(tzinfo=UTC) + return dt.isoformat().replace("+00:00", "Z") + + +def _load_memberships(account_id): + return ( + db.session.query(TenantAccountJoin, Tenant) + .join(Tenant, Tenant.id == TenantAccountJoin.tenant_id) + .filter(TenantAccountJoin.account_id == account_id) + .all() + ) + + +def _pick_default_workspace(memberships) -> str | None: + if not memberships: + return None + for join, tenant in memberships: + if getattr(join, "current", False): + return str(tenant.id) + return str(memberships[0][1].id) + + +def _workspace_payload(row) -> dict: + join, tenant = row + return {"id": str(tenant.id), "name": tenant.name, "role": getattr(join, "role", "")} + + +def _account_payload(account) -> dict: + return {"id": str(account.id), "email": account.email, "name": account.name} diff --git a/api/controllers/openapi/app_run.py b/api/controllers/openapi/app_run.py new file mode 100644 index 0000000000..e1e5ccf97b --- /dev/null +++ b/api/controllers/openapi/app_run.py @@ -0,0 +1,200 @@ +"""POST /openapi/v1/apps//run — mode-agnostic runner.""" + +from __future__ import annotations + +import logging +from collections.abc import Callable, Iterator, Mapping +from contextlib import contextmanager +from typing import Any, Literal +from uuid import UUID + +from flask import request +from flask_restx import Resource +from pydantic import BaseModel, ValidationError, field_validator +from werkzeug.exceptions import BadRequest, HTTPException, InternalServerError, NotFound, UnprocessableEntity + +import services +from controllers.openapi import openapi_ns +from controllers.openapi._audit import emit_app_run +from controllers.openapi._models import ( + ChatMessageResponse, + CompletionMessageResponse, + WorkflowRunResponse, +) +from controllers.openapi.auth.composition import OAUTH_BEARER_PIPELINE +from controllers.service_api.app.error import ( + AppUnavailableError, + CompletionRequestError, + ConversationCompletedError, + ProviderModelCurrentlyNotSupportError, + ProviderNotInitializeError, + ProviderQuotaExceededError, +) +from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError +from core.app.entities.app_invoke_entities import InvokeFrom +from core.errors.error import ( + ModelCurrentlyNotSupportError, + ProviderTokenNotInitError, + QuotaExceededError, +) +from graphon.model_runtime.errors.invoke import InvokeError +from libs import helper +from libs.helper import UUIDStrOrEmpty +from libs.oauth_bearer import Scope +from models.model import App, AppMode +from services.app_generate_service import AppGenerateService +from services.errors.app import ( + IsDraftWorkflowError, + WorkflowIdFormatError, + WorkflowNotFoundError, +) +from services.errors.llm import InvokeRateLimitError + +logger = logging.getLogger(__name__) + + +class AppRunRequest(BaseModel): + inputs: dict[str, Any] + query: str | None = None + files: list[dict[str, Any]] | None = None + response_mode: Literal["blocking", "streaming"] | None = None + conversation_id: UUIDStrOrEmpty | None = None + auto_generate_name: bool = True + workflow_id: str | None = None + workspace_id: UUIDStrOrEmpty | None = None + + @field_validator("conversation_id", mode="before") + @classmethod + def _normalize_conv(cls, value: str | UUID | None) -> str | None: + if isinstance(value, str): + value = value.strip() + if not value: + return None + try: + return helper.uuid_value(value) + except ValueError as exc: + raise ValueError("conversation_id must be a valid UUID") from exc + + +@contextmanager +def _translate_service_errors() -> Iterator[None]: + try: + yield + except WorkflowNotFoundError as ex: + raise NotFound(str(ex)) + except (IsDraftWorkflowError, WorkflowIdFormatError) as ex: + raise BadRequest(str(ex)) + except services.errors.conversation.ConversationNotExistsError: + raise NotFound("Conversation Not Exists.") + except services.errors.conversation.ConversationCompletedError: + raise ConversationCompletedError() + except services.errors.app_model_config.AppModelConfigBrokenError: + logger.exception("App model config broken.") + raise AppUnavailableError() + except ProviderTokenNotInitError as ex: + raise ProviderNotInitializeError(ex.description) + except QuotaExceededError: + raise ProviderQuotaExceededError() + except ModelCurrentlyNotSupportError: + raise ProviderModelCurrentlyNotSupportError() + except InvokeRateLimitError as ex: + raise InvokeRateLimitHttpError(ex.description) + except InvokeError as e: + raise CompletionRequestError(e.description) + + +def _unpack_blocking(response: Any) -> Mapping[str, Any]: + if isinstance(response, tuple): + response = response[0] + if not isinstance(response, Mapping): + raise InternalServerError("blocking generate returned non-mapping response") + return response + + +def _generate(app: App, caller: Any, args: dict[str, Any], streaming: bool): + return AppGenerateService.generate( + app_model=app, + user=caller, + args=args, + invoke_from=InvokeFrom.OPENAPI, + streaming=streaming, + ) + + +def _run_chat(app: App, caller: Any, payload: AppRunRequest, streaming: bool): + if not payload.query or not payload.query.strip(): + raise UnprocessableEntity("query_required_for_chat") + args = payload.model_dump(exclude_none=True) + with _translate_service_errors(): + response = _generate(app, caller, args, streaming) + if streaming: + return response, None + return None, ChatMessageResponse.model_validate(_unpack_blocking(response)).model_dump(mode="json") + + +def _run_completion(app: App, caller: Any, payload: AppRunRequest, streaming: bool): + args = payload.model_dump(exclude_none=True) + args["auto_generate_name"] = False + args.setdefault("query", "") + with _translate_service_errors(): + response = _generate(app, caller, args, streaming) + if streaming: + return response, None + return None, CompletionMessageResponse.model_validate(_unpack_blocking(response)).model_dump(mode="json") + + +def _run_workflow(app: App, caller: Any, payload: AppRunRequest, streaming: bool): + if payload.query is not None: + raise UnprocessableEntity("query_not_supported_for_workflow") + args = payload.model_dump(exclude={"query", "conversation_id", "auto_generate_name"}, exclude_none=True) + with _translate_service_errors(): + response = _generate(app, caller, args, streaming) + if streaming: + return response, None + return None, WorkflowRunResponse.model_validate(_unpack_blocking(response)).model_dump(mode="json") + + +_DISPATCH: dict[AppMode, Callable[[App, Any, AppRunRequest, bool], tuple[Any, dict[str, Any] | None]]] = { + AppMode.CHAT: _run_chat, + AppMode.AGENT_CHAT: _run_chat, + AppMode.ADVANCED_CHAT: _run_chat, + AppMode.COMPLETION: _run_completion, + AppMode.WORKFLOW: _run_workflow, +} + + +@openapi_ns.route("/apps//run") +class AppRunApi(Resource): + @OAUTH_BEARER_PIPELINE.guard(scope=Scope.APPS_RUN) + def post(self, app_id: str, app_model: App, caller, caller_kind: str): + body = request.get_json(silent=True) or {} + body.pop("user", None) + try: + payload = AppRunRequest.model_validate(body) + except ValidationError as exc: + raise UnprocessableEntity(exc.json()) + + handler = _DISPATCH.get(app_model.mode) + if handler is None: + raise UnprocessableEntity("mode_not_runnable") + + streaming = payload.response_mode == "streaming" + try: + stream_obj, blocking_body = handler(app_model, caller, payload, streaming) + except HTTPException: + raise + except Exception: + logger.exception("internal server error.") + raise InternalServerError() + + emit_app_run( + app_id=app_model.id, + tenant_id=app_model.tenant_id, + caller_kind=caller_kind, + mode=str(app_model.mode), + surface="apps", + ) + + if streaming: + return helper.compact_generate_response(stream_obj) + return blocking_body, 200 diff --git a/api/controllers/openapi/apps.py b/api/controllers/openapi/apps.py new file mode 100644 index 0000000000..4209233670 --- /dev/null +++ b/api/controllers/openapi/apps.py @@ -0,0 +1,330 @@ +"""GET /openapi/v1/apps and per-app reads. + +Decorator order: `method_decorators` is innermost-first. `validate_bearer` +is last → outermost → sets `g.auth_ctx` before `require_scope` reads it. +""" + +from __future__ import annotations + +import uuid as _uuid +from typing import Any + +import sqlalchemy as sa +from flask import g, request +from flask_restx import Resource +from pydantic import BaseModel, ConfigDict, Field, ValidationError, field_validator +from werkzeug.exceptions import Conflict, NotFound, UnprocessableEntity + +from controllers.common.fields import Parameters +from controllers.openapi import openapi_ns +from controllers.openapi._input_schema import EMPTY_INPUT_SCHEMA, build_input_schema, resolve_app_config +from controllers.openapi._models import ( + MAX_PAGE_LIMIT, + AppDescribeInfo, + AppDescribeResponse, + AppListRow, + PaginationEnvelope, +) +from controllers.openapi.auth.surface_gate import accept_subjects +from controllers.service_api.app.error import AppUnavailableError +from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict +from extensions.ext_database import db +from libs.oauth_bearer import ( + ACCEPT_USER_ANY, + AuthContext, + Scope, + SubjectType, + require_scope, + require_workspace_member, + validate_bearer, +) +from models import App, Tenant +from models.model import AppMode +from services.app_service import AppService +from services.openapi.visibility import apply_openapi_gate, is_openapi_visible +from services.tag_service import TagService + +# method_decorators applies left-to-right innermost-first; flask_restx wraps +# in order, so the LAST entry is the outermost. Execution flows +# validate_bearer → accept_subjects → require_scope → handler. +_APPS_READ_DECORATORS = [ + require_scope(Scope.APPS_READ), + accept_subjects(SubjectType.ACCOUNT), + validate_bearer(accept=ACCEPT_USER_ANY), +] + +_ALLOWED_DESCRIBE_FIELDS: frozenset[str] = frozenset({"info", "parameters", "input_schema"}) + + +class AppDescribeQuery(BaseModel): + """`?fields=` allow-list for GET /apps//describe. + + Empty / omitted → all blocks. Unknown member → ValidationError → 422. + """ + + model_config = ConfigDict(extra="forbid") + + fields: set[str] | None = None + workspace_id: str | None = None + + @field_validator("workspace_id", mode="before") + @classmethod + def _validate_workspace_id(cls, v: object) -> str | None: + if v is None or v == "": + return None + if not isinstance(v, str): + raise ValueError("workspace_id must be a string") + try: + _uuid.UUID(v) + except ValueError: + raise ValueError("workspace_id must be a valid UUID") + return v + + @field_validator("fields", mode="before") + @classmethod + def _parse_fields(cls, v: object) -> set[str] | None: + if v is None or v == "": + return None + if not isinstance(v, str): + raise ValueError("fields must be a comma-separated string") + members = {m.strip() for m in v.split(",") if m.strip()} + unknown = members - _ALLOWED_DESCRIBE_FIELDS + if unknown: + raise ValueError(f"unknown field(s): {sorted(unknown)}") + return members + + +_EMPTY_PARAMETERS: dict[str, Any] = { + "opening_statement": None, + "suggested_questions": [], + "user_input_form": [], + "file_upload": None, + "system_parameters": {}, +} + + +class AppReadResource(Resource): + """Base for per-app read endpoints; subclasses call `_load()` for SSO/membership/exists checks.""" + + method_decorators = _APPS_READ_DECORATORS + + def _load(self, app_id: str, workspace_id: str | None = None) -> tuple[App, AuthContext]: + ctx: AuthContext = g.auth_ctx + + try: + parsed_uuid = _uuid.UUID(app_id) + is_uuid = True + except ValueError: + parsed_uuid = None + is_uuid = False + + if is_uuid: + app = db.session.get(App, str(parsed_uuid)) # normalised dashed form + if not app or app.status != "normal" or not is_openapi_visible(app): + raise NotFound("app not found") + else: + if not workspace_id: + raise UnprocessableEntity("workspace_id is required for name-based lookup") + matches = list( + db.session.execute( + apply_openapi_gate( + sa.select(App).where( + App.name == app_id, + App.tenant_id == workspace_id, + App.status == "normal", + ) + ) + ).scalars() + ) + if len(matches) == 0: + raise NotFound("app not found") + if len(matches) > 1: + lines = [f"app name {app_id!r} is ambiguous — re-run with a UUID:\n\n"] + lines.append(f" {'ID':<36} {'MODE':<12} NAME\n") + for m in matches: + lines.append(f" {str(m.id):<36} {str(m.mode.value):<12} {m.name}\n") + raise Conflict("".join(lines)) + app = matches[0] + + require_workspace_member(ctx, str(app.tenant_id)) + return app, ctx + + +def parameters_payload(app: App) -> dict: + """Mirrors service_api/app/app.py::AppParameterApi response body.""" + features_dict, user_input_form = resolve_app_config(app) + parameters = get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form) + return Parameters.model_validate(parameters).model_dump(mode="json") + + +@openapi_ns.route("/apps//describe") +class AppDescribeApi(AppReadResource): + def get(self, app_id: str): + try: + query = AppDescribeQuery.model_validate(request.args.to_dict(flat=True)) + except ValidationError as exc: + raise UnprocessableEntity(exc.json()) + + app, _ = self._load(app_id, workspace_id=query.workspace_id) + + requested = query.fields + want_info = requested is None or "info" in requested + want_params = requested is None or "parameters" in requested + want_schema = requested is None or "input_schema" in requested + + info = ( + AppDescribeInfo( + id=str(app.id), + name=app.name, + mode=app.mode, + description=app.description, + tags=[{"name": t.name} for t in app.tags], + author=app.author_name, + updated_at=app.updated_at.isoformat() if app.updated_at else None, + service_api_enabled=bool(app.enable_api), + ) + if want_info + else None + ) + + parameters: dict[str, Any] | None = None + input_schema: dict[str, Any] | None = None + if want_params: + try: + parameters = parameters_payload(app) + except AppUnavailableError: + parameters = dict(_EMPTY_PARAMETERS) + if want_schema: + try: + input_schema = build_input_schema(app) + except AppUnavailableError: + input_schema = dict(EMPTY_INPUT_SCHEMA) + + return ( + AppDescribeResponse( + info=info, + parameters=parameters, + input_schema=input_schema, + ).model_dump(mode="json", exclude_none=False), + 200, + ) + + +class AppListQuery(BaseModel): + """`mode` is a closed enum — unknown values 422 instead of silently-empty data.""" + + workspace_id: str + page: int = Field(1, ge=1) + limit: int = Field(20, ge=1, le=MAX_PAGE_LIMIT) + mode: AppMode | None = None + name: str | None = Field(None, max_length=200) + tag: str | None = Field(None, max_length=100) + + +@openapi_ns.route("/apps") +class AppListApi(Resource): + method_decorators = _APPS_READ_DECORATORS + + def get(self): + ctx: AuthContext = g.auth_ctx + + try: + query = AppListQuery.model_validate(request.args.to_dict(flat=True)) + except ValidationError as exc: + raise UnprocessableEntity(exc.json()) + + workspace_id = query.workspace_id + require_workspace_member(ctx, workspace_id) + + empty = ( + PaginationEnvelope[AppListRow] + .build(page=query.page, limit=query.limit, total=0, items=[]) + .model_dump(mode="json"), + 200, + ) + + if query.name: + try: + parsed_uuid = _uuid.UUID(query.name) + except ValueError: + parsed_uuid = None + else: + parsed_uuid = None + + if parsed_uuid is not None: + app = db.session.get(App, str(parsed_uuid)) + if ( + not app + or app.status != "normal" + or str(app.tenant_id) != workspace_id + or not is_openapi_visible(app) + ): + return empty + tenant_name = db.session.execute( + sa.select(Tenant.name).where(Tenant.id == workspace_id) + ).scalar_one_or_none() + item = AppListRow( + id=str(app.id), + name=app.name, + description=app.description, + mode=app.mode, + tags=[{"name": t.name} for t in app.tags], + updated_at=app.updated_at.isoformat() if app.updated_at else None, + created_by_name=getattr(app, "author_name", None), + workspace_id=str(workspace_id), + workspace_name=tenant_name, + ) + env = PaginationEnvelope[AppListRow].build(page=1, limit=1, total=1, items=[item]) + return env.model_dump(mode="json"), 200 + + tag_ids: list[str] | None = None + if query.tag: + tags = TagService.get_tag_by_tag_name("app", workspace_id, query.tag) + if not tags: + return empty + tag_ids = [tag.id for tag in tags] + + args: dict[str, Any] = { + "page": query.page, + "limit": query.limit, + "mode": query.mode.value if query.mode else "", + "name": query.name, + "status": "normal", + # Visibility gate pushed into the query — pagination.total stays + # consistent across pages because invisible rows never count. + "openapi_visible": True, + } + if tag_ids: + args["tag_ids"] = tag_ids + + pagination = AppService().get_paginate_apps(ctx.account_id, workspace_id, args) + if pagination is None: + return empty + + tenant_name: str | None = None + if pagination.items: + tenant_name = db.session.execute( + sa.select(Tenant.name).where(Tenant.id == workspace_id) + ).scalar_one_or_none() + + items = [ + AppListRow( + id=str(r.id), + name=r.name, + description=r.description, + mode=r.mode, + tags=[{"name": t.name} for t in r.tags], + updated_at=r.updated_at.isoformat() if r.updated_at else None, + created_by_name=getattr(r, "author_name", None), + workspace_id=str(workspace_id), + workspace_name=tenant_name, + ) + for r in pagination.items + ] + env = PaginationEnvelope[AppListRow].build( + page=query.page, + limit=query.limit, + total=int(pagination.total), + items=items, + ) + return env.model_dump(mode="json"), 200 diff --git a/api/controllers/openapi/apps_permitted_external.py b/api/controllers/openapi/apps_permitted_external.py new file mode 100644 index 0000000000..037705a0ca --- /dev/null +++ b/api/controllers/openapi/apps_permitted_external.py @@ -0,0 +1,121 @@ +"""GET /openapi/v1/permitted-external-apps — external-subject app discovery (EE only). + +`dfoe_` (External SSO) callers reach apps gated by ACL access-mode +(public / sso_verified). License-gated: CE deploys never enable the +EE blueprint chain so this module is unreachable there. +""" + +from __future__ import annotations + +import sqlalchemy as sa +from flask import request +from flask_restx import Resource +from pydantic import BaseModel, ConfigDict, Field, ValidationError +from werkzeug.exceptions import UnprocessableEntity + +from controllers.openapi import openapi_ns +from controllers.openapi._models import ( + MAX_PAGE_LIMIT, + AppListRow, + PaginationEnvelope, +) +from controllers.openapi.auth.surface_gate import accept_subjects +from extensions.ext_database import db +from libs.device_flow_security import enterprise_only +from libs.oauth_bearer import ( + ACCEPT_USER_ANY, + Scope, + SubjectType, + require_scope, + validate_bearer, +) +from models import App, Tenant +from models.model import AppMode +from services.enterprise.app_permitted_service import list_permitted_apps +from services.openapi.license_gate import license_required +from services.openapi.visibility import apply_openapi_gate + + +class PermittedExternalAppsListQuery(BaseModel): + """Strict (`extra='forbid'`) — rejects `workspace_id`/`tag`/etc. that are valid on /apps but not here.""" + + model_config = ConfigDict(extra="forbid") + + page: int = Field(1, ge=1) + limit: int = Field(20, ge=1, le=MAX_PAGE_LIMIT) + mode: AppMode | None = None + name: str | None = Field(None, max_length=200) + + +@openapi_ns.route("/permitted-external-apps") +class PermittedExternalAppsListApi(Resource): + # method_decorators applies left-to-right innermost-first; execution + # flows enterprise_only → validate_bearer → accept_subjects → + # license_required → require_scope → handler. validate_bearer is + # widened to ACCEPT_USER_ANY so accept_subjects can emit the + # `openapi.wrong_surface_denied` audit on dfoa_→external misses + # instead of validate_bearer rejecting silently with "subject type + # not accepted here". + method_decorators = [ + require_scope(Scope.APPS_READ_PERMITTED_EXTERNAL), + license_required, + accept_subjects(SubjectType.EXTERNAL_SSO), + validate_bearer(accept=ACCEPT_USER_ANY), + enterprise_only, + ] + + def get(self): + try: + query = PermittedExternalAppsListQuery.model_validate(request.args.to_dict(flat=True)) + except ValidationError as exc: + raise UnprocessableEntity(exc.json()) + + page_result = list_permitted_apps( + page=query.page, + limit=query.limit, + mode=query.mode.value if query.mode else None, + name=query.name, + ) + + if not page_result.app_ids: + env = PaginationEnvelope[AppListRow].build( + page=query.page, limit=query.limit, total=page_result.total, items=[] + ) + return env.model_dump(mode="json"), 200 + + apps_by_id = { + str(a.id): a + for a in db.session.execute( + apply_openapi_gate(sa.select(App).where(App.id.in_(page_result.app_ids))) + ).scalars().all() + } + tenant_ids = list({a.tenant_id for a in apps_by_id.values()}) + tenants_by_id = { + str(t.id): t for t in db.session.execute(sa.select(Tenant).where(Tenant.id.in_(tenant_ids))).scalars().all() + } + + items: list[AppListRow] = [] + for app_id in page_result.app_ids: + app = apps_by_id.get(app_id) + if not app or app.status != "normal": + continue + tenant = tenants_by_id.get(str(app.tenant_id)) + items.append( + AppListRow( + id=str(app.id), + name=app.name, + description=app.description, + mode=app.mode, + tags=[], # tenant-scoped; not surfaced cross-tenant + updated_at=app.updated_at.isoformat() if app.updated_at else None, + created_by_name=None, # cross-tenant author leak prevention + workspace_id=str(app.tenant_id), + workspace_name=tenant.name if tenant else None, + ) + ) + + # total/has_more reflect the EE-side allow-list; len(items) may be < limit when local rows are dropped. + env = PaginationEnvelope[AppListRow].build( + page=query.page, limit=query.limit, total=page_result.total, items=items + ) + return env.model_dump(mode="json"), 200 diff --git a/api/controllers/openapi/auth/__init__.py b/api/controllers/openapi/auth/__init__.py new file mode 100644 index 0000000000..17ac5493d0 --- /dev/null +++ b/api/controllers/openapi/auth/__init__.py @@ -0,0 +1,3 @@ +from controllers.openapi.auth.composition import OAUTH_BEARER_PIPELINE + +__all__ = ["OAUTH_BEARER_PIPELINE"] diff --git a/api/controllers/openapi/auth/composition.py b/api/controllers/openapi/auth/composition.py new file mode 100644 index 0000000000..8bf39467c9 --- /dev/null +++ b/api/controllers/openapi/auth/composition.py @@ -0,0 +1,52 @@ +"""`OAUTH_BEARER_PIPELINE` — the auth scheme for openapi `/run` endpoints. + +Endpoints attach via `@OAUTH_BEARER_PIPELINE.guard(scope=…)`. No alternative +paths. Read endpoints (`/apps`, `/info`, `/parameters`, `/describe`) skip +the pipeline and use `validate_bearer + require_scope + require_workspace_member` +inline — they don't need `AppAuthzCheck`/`CallerMount`. +""" + +from __future__ import annotations + +from controllers.openapi.auth.pipeline import Pipeline +from controllers.openapi.auth.steps import ( + AppAuthzCheck, + AppResolver, + BearerCheck, + CallerMount, + ScopeCheck, + SurfaceCheck, + WorkspaceMembershipCheck, +) +from controllers.openapi.auth.strategies import ( + AccountMounter, + AclStrategy, + AppAuthzStrategy, + EndUserMounter, + MembershipStrategy, +) +from libs.oauth_bearer import SubjectType +from services.feature_service import FeatureService + + +def _resolve_app_authz_strategy() -> AppAuthzStrategy: + if FeatureService.get_system_features().webapp_auth.enabled: + return AclStrategy() + return MembershipStrategy() + + +# Pipeline currently serves only `/openapi/v1/apps//run` — an account +# (dfoa_) surface route. SurfaceCheck runs right after BearerCheck so +# pipeline-guarded routes get the same wrong_surface 403 + audit emit as +# the inline `@accept_subjects` decorator on read endpoints. When the +# external-surface run route lands, swap in an external-pipeline builder +# that constructs SurfaceCheck(accepted=frozenset({USER_EXT_SSO})). +OAUTH_BEARER_PIPELINE = Pipeline( + BearerCheck(), + SurfaceCheck(accepted=frozenset({SubjectType.ACCOUNT})), + ScopeCheck(), + AppResolver(), + WorkspaceMembershipCheck(), + AppAuthzCheck(_resolve_app_authz_strategy), + CallerMount(AccountMounter(), EndUserMounter()), +) diff --git a/api/controllers/openapi/auth/context.py b/api/controllers/openapi/auth/context.py new file mode 100644 index 0000000000..48a6fd6aeb --- /dev/null +++ b/api/controllers/openapi/auth/context.py @@ -0,0 +1,46 @@ +"""Mutable per-request context for the openapi auth pipeline. + +Every field starts None / empty and is filled in by a step. The pipeline +is the only thing that should construct or mutate Context — handlers +read populated values via the decorator's kwargs unpacking. +""" + +from __future__ import annotations + +import uuid +from dataclasses import dataclass, field +from datetime import datetime +from typing import TYPE_CHECKING, Literal, Protocol + +from flask import Request + +from libs.oauth_bearer import Scope, SubjectType + +if TYPE_CHECKING: + from models import App, Tenant + + +@dataclass +class Context: + request: Request + required_scope: Scope + subject_type: SubjectType | None = None + subject_email: str | None = None + subject_issuer: str | None = None + account_id: uuid.UUID | None = None + scopes: frozenset[Scope] = field(default_factory=frozenset) + token_id: uuid.UUID | None = None + token_hash: str | None = None + cached_verified_tenants: dict[str, bool] | None = None + source: str | None = None + expires_at: datetime | None = None + app: App | None = None + tenant: Tenant | None = None + caller: object | None = None + caller_kind: Literal["account", "end_user"] | None = None + + +class Step(Protocol): + """One responsibility. Mutate ctx or raise to short-circuit.""" + + def __call__(self, ctx: Context) -> None: ... diff --git a/api/controllers/openapi/auth/pipeline.py b/api/controllers/openapi/auth/pipeline.py new file mode 100644 index 0000000000..1dbcfab9b2 --- /dev/null +++ b/api/controllers/openapi/auth/pipeline.py @@ -0,0 +1,41 @@ +"""Pipeline IS the auth scheme. + +`Pipeline.guard(scope=…)` is the only attachment point for endpoints — +that is the design lock-in: forgetting an auth layer is structurally +impossible because there is no "sometimes wrap, sometimes don't" choice. +""" + +from __future__ import annotations + +from functools import wraps + +from flask import request + +from controllers.openapi.auth.context import Context, Step +from libs.oauth_bearer import Scope + + +class Pipeline: + def __init__(self, *steps: Step) -> None: + self._steps = steps + + def run(self, ctx: Context) -> None: + for step in self._steps: + step(ctx) + + def guard(self, *, scope: Scope): + def decorator(view): + @wraps(view) + def decorated(*args, **kwargs): + ctx = Context(request=request, required_scope=scope) + self.run(ctx) + kwargs.update( + app_model=ctx.app, + caller=ctx.caller, + caller_kind=ctx.caller_kind, + ) + return view(*args, **kwargs) + + return decorated + + return decorator diff --git a/api/controllers/openapi/auth/steps.py b/api/controllers/openapi/auth/steps.py new file mode 100644 index 0000000000..b60bc674ca --- /dev/null +++ b/api/controllers/openapi/auth/steps.py @@ -0,0 +1,174 @@ +"""Pipeline steps. Each is one responsibility. + +`BearerCheck` is the only step that touches the token registry; downstream +steps see only the populated `Context`. `BearerCheck` also assigns +``g.auth_ctx`` (the same way ``validate_bearer`` does) so the surface gate ++ any handler reading the request-scoped context has a single source of +truth across both auth-attach paths. +""" + +from __future__ import annotations + +from collections.abc import Callable + +from flask import g +from werkzeug.exceptions import BadRequest, Forbidden, NotFound, Unauthorized + +from configs import dify_config +from controllers.openapi.auth.context import Context +from controllers.openapi.auth.strategies import AppAuthzStrategy, CallerMounter +from controllers.openapi.auth.surface_gate import check_surface +from extensions.ext_database import db +from libs.oauth_bearer import ( + AuthContext, + InvalidBearerError, + Scope, + SubjectType, + _extract_bearer, # type: ignore[attr-defined] + check_workspace_membership, + get_authenticator, +) +from models import App, Tenant, TenantStatus + + +class BearerCheck: + """Resolve bearer → populate identity fields. Rate-limit is enforced + inside `BearerAuthenticator.authenticate`, so no separate step here. + Also attaches the resolved `AuthContext` to ``g.auth_ctx`` — same shape + the decorator-level ``validate_bearer`` writes — so the surface gate + + downstream readers don't see two different identity sources.""" + + def __call__(self, ctx: Context) -> None: + token = _extract_bearer(ctx.request) + if not token: + raise Unauthorized("bearer required") + + try: + authn = get_authenticator().authenticate(token) + except InvalidBearerError as e: + raise Unauthorized(str(e)) + + ctx.subject_type = authn.subject_type + ctx.subject_email = authn.subject_email + ctx.subject_issuer = authn.subject_issuer + ctx.account_id = authn.account_id + ctx.scopes = frozenset(authn.scopes) + ctx.source = authn.source + ctx.token_id = authn.token_id + ctx.expires_at = authn.expires_at + ctx.token_hash = authn.token_hash + ctx.cached_verified_tenants = dict(authn.verified_tenants) + + # Single source of truth for the request-scoped identity. Surface + # gate + handlers read `g.auth_ctx` regardless of whether the route + # ran the decorator path (`validate_bearer`) or the pipeline path. + g.auth_ctx = authn + + +class ScopeCheck: + """Verify ctx.scopes (already populated by BearerCheck) covers required.""" + + def __call__(self, ctx: Context) -> None: + if Scope.FULL in ctx.scopes or ctx.required_scope in ctx.scopes: + return + raise Forbidden("insufficient_scope") + + +class SurfaceCheck: + """Reject the request if `g.auth_ctx.subject_type` is not in `accepted`. + + Delegates to `surface_gate.check_surface` so the inline decorator and + the pipeline step emit identical audit events. Relies on `BearerCheck` + (above) having set `g.auth_ctx`. + """ + + def __init__(self, *, accepted: frozenset[SubjectType]) -> None: + self._accepted = accepted + + def __call__(self, ctx: Context) -> None: + check_surface(self._accepted) + + +class AppResolver: + """Read app_id from request.view_args, populate ctx.app + ctx.tenant. + + Every endpoint using the OAuth bearer pipeline must declare + ```` in its route — that is the design lock-in (no body / + header coupling). + """ + + def __call__(self, ctx: Context) -> None: + app_id = (ctx.request.view_args or {}).get("app_id") + if not app_id: + raise BadRequest("app_id is required in path") + app = db.session.get(App, app_id) + if not app or app.status != "normal": + raise NotFound("app not found") + if not app.enable_api: + raise Forbidden("service_api_disabled") + tenant = db.session.get(Tenant, app.tenant_id) + if tenant is None or tenant.status == TenantStatus.ARCHIVE: + raise Forbidden("workspace unavailable") + ctx.app, ctx.tenant = app, tenant + + +class WorkspaceMembershipCheck: + """Layer 0 — workspace membership gate. + + CE-only (skipped when ENTERPRISE_ENABLED). Account-subject bearers + (dfoa_) only — SSO subjects skip. + """ + + def __call__(self, ctx: Context) -> None: + if dify_config.ENTERPRISE_ENABLED: + return + if ctx.subject_type != SubjectType.ACCOUNT: + return + if ctx.account_id is None or ctx.tenant is None: + raise Unauthorized("account_id or tenant unset — BearerCheck or AppResolver did not run") + if ctx.token_hash is None: + raise Unauthorized("token_hash unset — BearerCheck did not run") + + check_workspace_membership( + account_id=ctx.account_id, + tenant_id=ctx.tenant.id, + token_hash=ctx.token_hash, + cached_verdicts=ctx.cached_verified_tenants or {}, + ) + + +class AppAuthzCheck: + def __init__(self, resolve_strategy: Callable[[], AppAuthzStrategy]) -> None: + self._resolve = resolve_strategy + + def __call__(self, ctx: Context) -> None: + if not self._resolve().authorize(ctx): + raise Forbidden("subject_no_app_access") + + +class CallerMount: + def __init__(self, *mounters: CallerMounter) -> None: + self._mounters = mounters + + def __call__(self, ctx: Context) -> None: + if ctx.subject_type is None: + raise Unauthorized("subject_type unset — BearerCheck did not run") + for m in self._mounters: + if m.applies_to(ctx.subject_type): + m.mount(ctx) + return + raise Unauthorized("no caller mounter for subject type") + + +# AuthContext re-export so callers reading `g.auth_ctx` after a pipeline +# run get a consistent import location next to the step that writes it. +__all__ = [ + "AppAuthzCheck", + "AppResolver", + "AuthContext", + "BearerCheck", + "CallerMount", + "ScopeCheck", + "SurfaceCheck", + "WorkspaceMembershipCheck", +] diff --git a/api/controllers/openapi/auth/strategies.py b/api/controllers/openapi/auth/strategies.py new file mode 100644 index 0000000000..0f4f67187d --- /dev/null +++ b/api/controllers/openapi/auth/strategies.py @@ -0,0 +1,188 @@ +"""Strategy classes for the openapi auth pipeline. + +App authorization (Acl/Membership) and caller mounting (Account/EndUser) +vary along independent axes; each strategy is one class so the pipeline +composition stays a flat list. +""" + +from __future__ import annotations + +import uuid +from typing import Protocol + +from flask import current_app +from flask_login import user_logged_in +from sqlalchemy import select + +from controllers.openapi.auth.context import Context +from core.app.entities.app_invoke_entities import InvokeFrom +from extensions.ext_database import db +from libs.oauth_bearer import SubjectType +from models import Account, TenantAccountJoin +from services.end_user_service import EndUserService +from services.enterprise.enterprise_service import ( + EnterpriseService, + WebAppAccessMode, +) + + +class AppAuthzStrategy(Protocol): + def authorize(self, ctx: Context) -> bool: ... + + +class AclStrategy: + """Per-app ACL, evaluated in two stages. + + The EE gateway has already enforced tenancy and workspace membership + by the time this strategy runs, so AclStrategy only owns per-app ACL: + + 1. Subject vs access-mode compatibility (pure rule table). External-SSO + bearers belong to public-facing apps only; account bearers cover the + full set. A mismatch is an immediate deny — no IO. + 2. For modes that pair with the subject, decide whether the inner + permission API must run. Only `PRIVATE` (per-app selected-user list) + requires it; the remaining modes are pass-through. + """ + + _ALLOWED_MODES_BY_SUBJECT: dict[SubjectType, frozenset[WebAppAccessMode]] = { + SubjectType.ACCOUNT: frozenset( + { + WebAppAccessMode.PUBLIC, + WebAppAccessMode.SSO_VERIFIED, + WebAppAccessMode.PRIVATE_ALL, + WebAppAccessMode.PRIVATE, + } + ), + SubjectType.EXTERNAL_SSO: frozenset( + { + WebAppAccessMode.PUBLIC, + WebAppAccessMode.SSO_VERIFIED, + } + ), + } + + _MODES_REQUIRING_INNER_CHECK: frozenset[WebAppAccessMode] = frozenset( + {WebAppAccessMode.PRIVATE} + ) + + def authorize(self, ctx: Context) -> bool: + if ctx.app is None: + return False + access_mode = self._fetch_access_mode(ctx.app.id) + if access_mode is None: + return False + if not self._subject_allowed_for_mode(ctx.subject_type, access_mode): + return False + if access_mode not in self._MODES_REQUIRING_INNER_CHECK: + return True + return self._inner_permission_check(ctx) + + @staticmethod + def _fetch_access_mode(app_id: str) -> WebAppAccessMode | None: + settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_id=app_id) + if settings is None: + return None + try: + return WebAppAccessMode(settings.access_mode) + except ValueError: + return None + + @classmethod + def _subject_allowed_for_mode( + cls, subject_type: SubjectType, access_mode: WebAppAccessMode + ) -> bool: + return access_mode in cls._ALLOWED_MODES_BY_SUBJECT.get(subject_type, frozenset()) + + def _inner_permission_check(self, ctx: Context) -> bool: + if ctx.app is None: + return False + user_id = self._resolve_user_id(ctx) + if user_id is None: + return False + return EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp( + user_id=user_id, + app_id=ctx.app.id, + ) + + @staticmethod + def _resolve_user_id(ctx: Context) -> str | None: + if ctx.subject_type == SubjectType.ACCOUNT: + return str(ctx.account_id) if ctx.account_id is not None else None + if ctx.subject_email is None: + return None + account = db.session.execute( + select(Account).where(Account.email == ctx.subject_email), + ).scalar_one_or_none() + return str(account.id) if account is not None else None + + +class MembershipStrategy: + """Tenant-membership fallback. + + Used when webapp-auth is disabled (CE deployment). Account-bearing + subjects pass if they have a TenantAccountJoin row; EXTERNAL_SSO is + denied (it requires the webapp-auth surface). + """ + + def authorize(self, ctx: Context) -> bool: + if ctx.subject_type == SubjectType.EXTERNAL_SSO: + return False + if ctx.tenant is None: + return False + return _has_tenant_membership(ctx.account_id, ctx.tenant.id) + + +def _has_tenant_membership(account_id: uuid.UUID | str | None, tenant_id: str) -> bool: + if not account_id: + return False + row = db.session.execute( + select(TenantAccountJoin.id).where( + TenantAccountJoin.tenant_id == tenant_id, + TenantAccountJoin.account_id == account_id, + ) + ).scalar_one_or_none() + return row is not None + + +def _login_as(user) -> None: + """Set Flask-Login request user so downstream services see the caller.""" + current_app.login_manager._update_request_context_with_user(user) + user_logged_in.send(current_app._get_current_object(), user=user) + + +class CallerMounter(Protocol): + def applies_to(self, subject_type: SubjectType) -> bool: ... + + def mount(self, ctx: Context) -> None: ... + + +class AccountMounter: + def applies_to(self, subject_type: SubjectType) -> bool: + return subject_type == SubjectType.ACCOUNT + + def mount(self, ctx: Context) -> None: + if ctx.account_id is None: + raise RuntimeError("AccountMounter: account_id unset — BearerCheck did not run") + account = db.session.get(Account, ctx.account_id) + if account is None: + raise RuntimeError("AccountMounter: account row missing for resolved bearer") + account.current_tenant = ctx.tenant + _login_as(account) + ctx.caller, ctx.caller_kind = account, "account" + + +class EndUserMounter: + def applies_to(self, subject_type: SubjectType) -> bool: + return subject_type == SubjectType.EXTERNAL_SSO + + def mount(self, ctx: Context) -> None: + if ctx.tenant is None or ctx.app is None or ctx.subject_email is None: + raise RuntimeError("EndUserMounter: tenant/app/subject_email unset — earlier steps did not run") + end_user = EndUserService.get_or_create_end_user_by_type( + InvokeFrom.OPENAPI, + tenant_id=ctx.tenant.id, + app_id=ctx.app.id, + user_id=ctx.subject_email, + ) + _login_as(end_user) + ctx.caller, ctx.caller_kind = end_user, "end_user" diff --git a/api/controllers/openapi/auth/surface_gate.py b/api/controllers/openapi/auth/surface_gate.py new file mode 100644 index 0000000000..60b7819552 --- /dev/null +++ b/api/controllers/openapi/auth/surface_gate.py @@ -0,0 +1,90 @@ +"""Surface gate. + +`@accept_subjects(...)` is the route-level form. `SurfaceCheck` (pipeline +step) is the pipeline-level form. Both delegate to `check_surface` so the +audit emit + canonical-path message are single-sourced. + +Subjects come from `libs.oauth_bearer.SubjectType` directly — no parallel +vocabulary. Caller hits the wrong surface → 403 ``wrong_surface`` + audit +``openapi.wrong_surface_denied``. +""" + +from __future__ import annotations + +from collections.abc import Callable +from functools import wraps +from typing import TypeVar + +from flask import g, request +from werkzeug.exceptions import Forbidden + +from controllers.openapi._audit import emit_wrong_surface +from libs.oauth_bearer import SubjectType + +_CANONICAL_PATH: dict[SubjectType, str] = { + SubjectType.ACCOUNT: "/openapi/v1/apps", + SubjectType.EXTERNAL_SSO: "/openapi/v1/permitted-external-apps", +} + +F = TypeVar("F", bound=Callable[..., object]) + + +def check_surface(accepted: frozenset[SubjectType]) -> None: + """Enforce that ``g.auth_ctx.subject_type`` is in ``accepted``. + + Raises ``Forbidden`` with ``wrong_surface`` + canonical-path hint on + miss; emits ``openapi.wrong_surface_denied`` audit. If ``g.auth_ctx`` + is missing the bearer layer didn't run — that's a wiring bug, not a + user-driven failure, so surface it as a ``RuntimeError`` instead of + a silent 403. + """ + ctx = getattr(g, "auth_ctx", None) + if ctx is None: + raise RuntimeError( + "check_surface called without g.auth_ctx; " + "stack validate_bearer or BearerCheck above the surface gate" + ) + + subject = _coerce_subject_type(getattr(ctx, "subject_type", None)) + if subject in accepted: + return + + canonical = _CANONICAL_PATH.get(subject, "/openapi/v1/") if subject else "/openapi/v1/" + emit_wrong_surface( + subject_type=subject.value if subject else None, + attempted_path=request.path, + client_id=getattr(ctx, "client_id", None), + token_id=_stringify(getattr(ctx, "token_id", None)), + ) + raise Forbidden(description=f"wrong_surface (canonical: {canonical})") + + +def accept_subjects(*accepted: SubjectType) -> Callable[[F], F]: + accepted_set: frozenset[SubjectType] = frozenset(accepted) + + def deco(fn: F) -> F: + @wraps(fn) + def wrapper(*args: object, **kwargs: object) -> object: + check_surface(accepted_set) + return fn(*args, **kwargs) + + return wrapper # type: ignore[return-value] + + return deco + + +def _coerce_subject_type(raw: object) -> SubjectType | None: + if raw is None: + return None + if isinstance(raw, SubjectType): + return raw + try: + return SubjectType(raw) + except ValueError: + return None + + +def _stringify(value: object) -> str | None: + if value is None: + return None + return str(value) diff --git a/api/controllers/openapi/index.py b/api/controllers/openapi/index.py new file mode 100644 index 0000000000..a6626f9cc6 --- /dev/null +++ b/api/controllers/openapi/index.py @@ -0,0 +1,9 @@ +from flask_restx import Resource + +from controllers.openapi import openapi_ns + + +@openapi_ns.route("/_health") +class HealthApi(Resource): + def get(self): + return {"ok": True} diff --git a/api/controllers/openapi/oauth_device.py b/api/controllers/openapi/oauth_device.py new file mode 100644 index 0000000000..a2b6a5fa90 --- /dev/null +++ b/api/controllers/openapi/oauth_device.py @@ -0,0 +1,401 @@ +"""Device-flow endpoints under /openapi/v1/oauth/device/*. Two +sub-groups in one module: + + Protocol (RFC 8628, public + rate-limited): + POST /oauth/device/code + POST /oauth/device/token + GET /oauth/device/lookup + + Approval (account branch, console-cookie authed): + POST /oauth/device/approve + POST /oauth/device/deny + +SSO branch lives in oauth_device_sso.py. +""" + +from __future__ import annotations + +import logging + +from flask import request +from flask_login import login_required +from flask_restx import Resource +from pydantic import BaseModel, ValidationError +from werkzeug.exceptions import BadRequest + +from configs import dify_config +from controllers.console.wraps import account_initialization_required, setup_required +from controllers.openapi import openapi_ns +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from libs.helper import extract_remote_ip +from libs.login import current_account_with_tenant +from libs.oauth_bearer import MINTABLE_PROFILES, SubjectType, bearer_feature_required +from libs.rate_limit import ( + LIMIT_APPROVE_CONSOLE, + LIMIT_DEVICE_CODE_PER_IP, + LIMIT_LOOKUP_PUBLIC, + rate_limit, +) +from services.oauth_device_flow import ( + ACCOUNT_ISSUER_SENTINEL, + DEFAULT_POLL_INTERVAL_SECONDS, + DEVICE_FLOW_TTL_SECONDS, + DeviceFlowRedis, + DeviceFlowStatus, + InvalidTransitionError, + SlowDownDecision, + StateNotFoundError, + mint_oauth_token, + oauth_ttl_days, +) +from services.openapi.mint_policy import MintPolicyViolation, validate_mint_policy + +logger = logging.getLogger(__name__) + + +# ========================================================================= +# Request / query schemas +# ========================================================================= + + +class DeviceCodeRequest(BaseModel): + client_id: str + device_label: str + + +class DevicePollRequest(BaseModel): + device_code: str + client_id: str + + +class DeviceLookupQuery(BaseModel): + user_code: str + + +class DeviceMutateRequest(BaseModel): + user_code: str + + +def _validate_json[M: BaseModel](model: type[M]) -> M: + body = request.get_json(silent=True) or {} + try: + return model.model_validate(body) + except ValidationError as exc: + raise BadRequest(str(exc)) + + +def _validate_query[M: BaseModel](model: type[M]) -> M: + try: + return model.model_validate(request.args.to_dict(flat=True)) + except ValidationError as exc: + raise BadRequest(str(exc)) + + +# ========================================================================= +# Protocol endpoints — RFC 8628 (public + per-IP rate limit) +# ========================================================================= + + +@openapi_ns.route("/oauth/device/code") +class OAuthDeviceCodeApi(Resource): + @rate_limit(LIMIT_DEVICE_CODE_PER_IP) + def post(self): + payload = _validate_json(DeviceCodeRequest) + client_id = payload.client_id + device_label = payload.device_label + + if client_id not in dify_config.OPENAPI_KNOWN_CLIENT_IDS: + return {"error": "unsupported_client"}, 400 + + store = DeviceFlowRedis(redis_client) + ip = extract_remote_ip(request) + device_code, user_code, expires_in = store.start(client_id, device_label, created_ip=ip) + + return { + "device_code": device_code, + "user_code": user_code, + "verification_uri": _verification_uri(), + "expires_in": expires_in, + "interval": DEFAULT_POLL_INTERVAL_SECONDS, + }, 200 + + +@openapi_ns.route("/oauth/device/token") +class OAuthDeviceTokenApi(Resource): + """RFC 8628 poll.""" + + def post(self): + payload = _validate_json(DevicePollRequest) + device_code = payload.device_code + + store = DeviceFlowRedis(redis_client) + + # slow_down beats every other branch — polling-too-fast clients + # see only that response regardless of underlying state. + if store.record_poll(device_code, DEFAULT_POLL_INTERVAL_SECONDS) is SlowDownDecision.SLOW_DOWN: + return {"error": "slow_down"}, 400 + + state = store.load_by_device_code(device_code) + if state is None: + return {"error": "expired_token"}, 400 + + if state.status is DeviceFlowStatus.PENDING: + return {"error": "authorization_pending"}, 400 + + terminal = store.consume_on_poll(device_code) + if terminal is None: + return {"error": "expired_token"}, 400 + + if terminal.status is DeviceFlowStatus.DENIED: + return {"error": "access_denied"}, 400 + + poll_payload = terminal.poll_payload or {} + if "token" not in poll_payload: + logger.error("device_flow: approved state missing poll_payload for %s", device_code) + return {"error": "expired_token"}, 400 + + _audit_cross_ip_if_needed(state) + return poll_payload, 200 + + +@openapi_ns.route("/oauth/device/lookup") +class OAuthDeviceLookupApi(Resource): + """Read-only — public for pre-validate before login. user_code is + high-entropy + short-TTL; per-IP rate limit blocks enumeration. + """ + + @rate_limit(LIMIT_LOOKUP_PUBLIC) + def get(self): + payload = _validate_query(DeviceLookupQuery) + user_code = payload.user_code.strip().upper() + + store = DeviceFlowRedis(redis_client) + found = store.load_by_user_code(user_code) + if found is None: + return {"valid": False, "expires_in_remaining": 0, "client_id": None}, 200 + + _device_code, state = found + if state.status is not DeviceFlowStatus.PENDING: + return {"valid": False, "expires_in_remaining": 0, "client_id": state.client_id}, 200 + + return { + "valid": True, + "expires_in_remaining": DEVICE_FLOW_TTL_SECONDS, + "client_id": state.client_id, + }, 200 + + +# ========================================================================= +# Approval endpoints — account branch (cookie-authed) +# ========================================================================= + + +_APPROVE_GUARD_KEY_FMT = "device_code:{code}:approving" +_APPROVE_GUARD_TTL_SECONDS = 10 + + +@openapi_ns.route("/oauth/device/approve") +class DeviceApproveApi(Resource): + @setup_required + @login_required + @account_initialization_required + @bearer_feature_required + @rate_limit(LIMIT_APPROVE_CONSOLE) + def post(self): + payload = _validate_json(DeviceMutateRequest) + user_code = payload.user_code.strip().upper() + + account, tenant = current_account_with_tenant() + store = DeviceFlowRedis(redis_client) + + found = store.load_by_user_code(user_code) + if found is None: + return {"error": "expired_or_unknown"}, 404 + device_code, state = found + if state.status is not DeviceFlowStatus.PENDING: + return {"error": "already_resolved"}, 409 + + # SET NX guard — without it, two in-flight approves both pass + # PENDING, both mint, and the second upsert silently rotates the + # first caller into an already-revoked token. + guard_key = _APPROVE_GUARD_KEY_FMT.format(code=device_code) + if not redis_client.set(guard_key, "1", nx=True, ex=_APPROVE_GUARD_TTL_SECONDS): + return {"error": "approve_in_progress"}, 409 + + try: + profile = MINTABLE_PROFILES[SubjectType.ACCOUNT] + try: + validate_mint_policy( + subject_type=profile.subject_type, + prefix=profile.prefix, + scopes=profile.scopes, + ) + except MintPolicyViolation as e: + raise BadRequest(description=str(e)) from None + ttl_days = oauth_ttl_days(tenant_id=tenant) + mint = mint_oauth_token( + db.session, + redis_client, + subject_email=account.email, + subject_issuer=ACCOUNT_ISSUER_SENTINEL, + account_id=str(account.id), + client_id=state.client_id, + device_label=state.device_label, + prefix=profile.prefix, + ttl_days=ttl_days, + ) + + poll_payload = _build_account_poll_payload(account, tenant, mint) + try: + store.approve( + device_code, + subject_email=account.email, + account_id=str(account.id), + subject_issuer=ACCOUNT_ISSUER_SENTINEL, + minted_token=mint.token, + token_id=str(mint.token_id), + poll_payload=poll_payload, + ) + except (StateNotFoundError, InvalidTransitionError): + # Row minted but state vanished — roll forward; the orphan + # token is revocable via auth devices list / Authorized Apps. + logger.exception("device_flow: approve raced on %s", device_code) + return {"error": "state_lost"}, 409 + finally: + redis_client.delete(guard_key) + + _emit_approve_audit(state, account, tenant, mint) + return {"status": "approved"}, 200 + + +@openapi_ns.route("/oauth/device/deny") +class DeviceDenyApi(Resource): + @setup_required + @login_required + @account_initialization_required + @bearer_feature_required + @rate_limit(LIMIT_APPROVE_CONSOLE) + def post(self): + payload = _validate_json(DeviceMutateRequest) + user_code = payload.user_code.strip().upper() + + store = DeviceFlowRedis(redis_client) + found = store.load_by_user_code(user_code) + if found is None: + return {"error": "expired_or_unknown"}, 404 + device_code, state = found + if state.status is not DeviceFlowStatus.PENDING: + return {"error": "already_resolved"}, 409 + + try: + store.deny(device_code) + except (StateNotFoundError, InvalidTransitionError): + logger.exception("device_flow: deny raced on %s", device_code) + return {"error": "state_lost"}, 409 + + _emit_deny_audit(state) + return {"status": "denied"}, 200 + + +# ========================================================================= +# Helpers +# ========================================================================= + + +def _verification_uri() -> str: + base = getattr(dify_config, "CONSOLE_WEB_URL", None) + if base: + return f"{base.rstrip('/')}/device" + return f"{request.host_url.rstrip('/')}/device" + + +def _audit_cross_ip_if_needed(state) -> None: + poll_ip = extract_remote_ip(request) + if state.created_ip and poll_ip and poll_ip != state.created_ip: + logger.warning( + "audit: oauth.device_code_cross_ip_poll token_id=%s creation_ip=%s poll_ip=%s", + state.token_id, + state.created_ip, + poll_ip, + extra={ + "audit": True, + "token_id": state.token_id, + "creation_ip": state.created_ip, + "poll_ip": poll_ip, + }, + ) + + +def _build_account_poll_payload(account, tenant, mint) -> dict: + """Pre-render the poll-response body so the unauthenticated poll + handler doesn't re-query accounts/tenants for authz data. + """ + from models import Tenant, TenantAccountJoin + + rows = ( + db.session.query(Tenant, TenantAccountJoin) + .join(TenantAccountJoin, TenantAccountJoin.tenant_id == Tenant.id) + .filter(TenantAccountJoin.account_id == account.id) + .all() + ) + workspaces = [{"id": str(t.id), "name": t.name, "role": getattr(m, "role", "")} for t, m in rows] + # Prefer active session tenant → DB-flagged current join → first membership. + default_ws_id = None + if tenant and any(w["id"] == str(tenant) for w in workspaces): + default_ws_id = str(tenant) + if default_ws_id is None: + for _t, m in rows: + if getattr(m, "current", False): + default_ws_id = str(m.tenant_id) + break + if default_ws_id is None and workspaces: + default_ws_id = workspaces[0]["id"] + + return { + "token": mint.token, + "expires_at": mint.expires_at.isoformat(), + "subject_type": SubjectType.ACCOUNT, + "account": {"id": str(account.id), "email": account.email, "name": account.name}, + "workspaces": workspaces, + "default_workspace_id": default_ws_id, + "token_id": str(mint.token_id), + } + + +def _emit_approve_audit(state, account, tenant, mint) -> None: + logger.warning( + "audit: oauth.device_flow_approved token_id=%s subject=%s client_id=%s device_label=%s rotated=? expires_at=%s", + mint.token_id, + account.email, + state.client_id, + state.device_label, + mint.expires_at, + extra={ + "audit": True, + "event": "oauth.device_flow_approved", + "token_id": str(mint.token_id), + "subject_type": SubjectType.ACCOUNT, + "subject_email": account.email, + "account_id": str(account.id), + "tenant_id": tenant, + "client_id": state.client_id, + "device_label": state.device_label, + "scopes": ["full"], + "expires_at": mint.expires_at.isoformat(), + }, + ) + + +def _emit_deny_audit(state) -> None: + logger.warning( + "audit: oauth.device_flow_denied client_id=%s device_label=%s", + state.client_id, + state.device_label, + extra={ + "audit": True, + "event": "oauth.device_flow_denied", + "client_id": state.client_id, + "device_label": state.device_label, + }, + ) diff --git a/api/controllers/openapi/oauth_device_sso.py b/api/controllers/openapi/oauth_device_sso.py new file mode 100644 index 0000000000..6065cfe430 --- /dev/null +++ b/api/controllers/openapi/oauth_device_sso.py @@ -0,0 +1,369 @@ +"""SSO-branch device-flow endpoints under /openapi/v1/oauth/device/*. +EE-only. Browser flow: + + GET /oauth/device/sso-initiate → 302 to IdP authorize URL + GET /oauth/device/sso-complete → ACS callback, sets approval-grant cookie + GET /oauth/device/approval-context → SPA reads cookie claims (idempotent) + POST /oauth/device/approve-external → mints dfoe_ token + clears cookie + +Function-based (raw @bp.route) rather than Resource classes because the +handlers do redirects + cookie kwargs that don't fit the Resource shape. +""" + +from __future__ import annotations + +import logging +import secrets +from dataclasses import dataclass + +from flask import jsonify, make_response, redirect, request +from sqlalchemy import func, select +from werkzeug.exceptions import ( + BadGateway, + BadRequest, + Conflict, + Forbidden, + NotFound, + Unauthorized, +) + +from controllers.openapi import bp +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from libs import jws +from libs.device_flow_security import ( + APPROVAL_GRANT_COOKIE_NAME, + ApprovalGrantClaims, + approval_grant_cleared_cookie_kwargs, + approval_grant_cookie_kwargs, + consume_approval_grant_nonce, + consume_sso_assertion_nonce, + enterprise_only, + mint_approval_grant, + verify_approval_grant, +) +from libs.oauth_bearer import MINTABLE_PROFILES, SubjectType +from libs.rate_limit import ( + LIMIT_APPROVE_EXT_PER_EMAIL, + LIMIT_SSO_INITIATE_PER_IP, + enforce, + rate_limit, +) +from models import Account +from models.account import AccountStatus +from services.enterprise.enterprise_service import EnterpriseService +from services.oauth_device_flow import ( + DeviceFlowRedis, + DeviceFlowStatus, + InvalidTransitionError, + StateNotFoundError, + mint_oauth_token, + oauth_ttl_days, +) +from services.openapi.mint_policy import MintPolicyViolation, validate_mint_policy + +logger = logging.getLogger(__name__) + + +# Matches DEVICE_FLOW_TTL_SECONDS so the signed state can't outlive the +# device_code it references. +STATE_ENVELOPE_TTL_SECONDS = 15 * 60 + +# Canonical sso-complete path. IdP-side ACS callback URL must point here. +_SSO_COMPLETE_PATH = "/openapi/v1/oauth/device/sso-complete" + + +@bp.route("/oauth/device/sso-initiate", methods=["GET"]) +@enterprise_only +@rate_limit(LIMIT_SSO_INITIATE_PER_IP) +def sso_initiate(): + user_code = (request.args.get("user_code") or "").strip().upper() + if not user_code: + raise BadRequest("user_code required") + + store = DeviceFlowRedis(redis_client) + found = store.load_by_user_code(user_code) + if found is None: + raise BadRequest("invalid_user_code") + _, state = found + if state.status is not DeviceFlowStatus.PENDING: + raise BadRequest("invalid_user_code") + + keyset = jws.KeySet.from_shared_secret() + signed_state = jws.sign( + keyset, + payload={ + "redirect_url": "", + "app_code": "", + "intent": "device_flow", + "user_code": user_code, + "nonce": secrets.token_urlsafe(16), + "return_to": "", + "idp_callback_url": f"{request.host_url.rstrip('/')}{_SSO_COMPLETE_PATH}", + }, + aud=jws.AUD_STATE_ENVELOPE, + ttl_seconds=STATE_ENVELOPE_TTL_SECONDS, + ) + + try: + reply = EnterpriseService.initiate_device_flow_sso(signed_state) + except Exception as e: + logger.warning("sso-initiate: enterprise call failed: %s", e) + raise BadGateway("sso_initiate_failed") from e + + url = (reply or {}).get("url") + if not url: + raise BadGateway("sso_initiate_missing_url") + + # Clear stale approval-grant — defends against cross-tab/back-button mixing. + resp = redirect(url, code=302) + resp.set_cookie(**approval_grant_cleared_cookie_kwargs()) + return resp + + +@bp.route("/oauth/device/sso-complete", methods=["GET"]) +@enterprise_only +def sso_complete(): + blob = request.args.get("sso_assertion") + if not blob: + raise BadRequest("sso_assertion required") + + keyset = jws.KeySet.from_shared_secret() + + try: + claims = jws.verify(keyset, blob, expected_aud=jws.AUD_EXT_SUBJECT_ASSERTION) + except jws.VerifyError as e: + logger.warning("sso-complete: rejected assertion: %s", e) + raise BadRequest("invalid_sso_assertion") from e + + if not consume_sso_assertion_nonce(redis_client, claims.get("nonce", "")): + raise BadRequest("invalid_sso_assertion") + + user_code = (claims.get("user_code") or "").strip().upper() + store = DeviceFlowRedis(redis_client) + found = store.load_by_user_code(user_code) + if found is None: + raise Conflict("user_code_not_pending") + _, state = found + if state.status is not DeviceFlowStatus.PENDING: + raise Conflict("user_code_not_pending") + + if _email_belongs_to_dify_account(claims["email"]): + _emit_external_rejection_audit( + state, + _RejectedClaims(subject_email=claims["email"], subject_issuer=claims["issuer"]), + reason="email_belongs_to_dify_account", + ) + return redirect("/device?sso_error=email_belongs_to_dify_account", code=302) + + iss = request.host_url.rstrip("/") + cookie_value, _ = mint_approval_grant( + keyset=keyset, + iss=iss, + subject_email=claims["email"], + subject_issuer=claims["issuer"], + user_code=user_code, + ) + + resp = redirect("/device?sso_verified=1", code=302) + resp.set_cookie(**approval_grant_cookie_kwargs(cookie_value)) + return resp + + +@bp.route("/oauth/device/approval-context", methods=["GET"]) +@enterprise_only +def approval_context(): + token = request.cookies.get(APPROVAL_GRANT_COOKIE_NAME) + if not token: + raise Unauthorized("no_session") + + keyset = jws.KeySet.from_shared_secret() + try: + claims = verify_approval_grant(keyset, token) + except jws.VerifyError as e: + logger.warning("approval-context: bad cookie: %s", e) + raise Unauthorized("no_session") from e + + return jsonify( + { + "subject_email": claims.subject_email, + "subject_issuer": claims.subject_issuer, + "user_code": claims.user_code, + "csrf_token": claims.csrf_token, + "expires_at": claims.expires_at.isoformat(), + } + ), 200 + + +@bp.route("/oauth/device/approve-external", methods=["POST"]) +@enterprise_only +def approve_external(): + token = request.cookies.get(APPROVAL_GRANT_COOKIE_NAME) + if not token: + raise Unauthorized("invalid_session") + + keyset = jws.KeySet.from_shared_secret() + try: + claims: ApprovalGrantClaims = verify_approval_grant(keyset, token) + except jws.VerifyError as e: + logger.warning("approve-external: bad cookie: %s", e) + raise Unauthorized("invalid_session") from e + + enforce(LIMIT_APPROVE_EXT_PER_EMAIL, key=f"subject:{claims.subject_email}") + + csrf_header = request.headers.get("X-CSRF-Token", "") + if not csrf_header or csrf_header != claims.csrf_token: + raise Forbidden("csrf_mismatch") + + data = request.get_json(silent=True) or {} + body_user_code = (data.get("user_code") or "").strip().upper() + if body_user_code != claims.user_code: + raise BadRequest("user_code_mismatch") + + store = DeviceFlowRedis(redis_client) + found = store.load_by_user_code(claims.user_code) + if found is None: + raise NotFound("user_code_not_pending") + device_code, state = found + if state.status is not DeviceFlowStatus.PENDING: + raise Conflict("user_code_not_pending") + + if _email_belongs_to_dify_account(claims.subject_email): + _emit_external_rejection_audit(state, claims, reason="email_belongs_to_dify_account") + raise Forbidden("email_belongs_to_dify_account") + + if not consume_approval_grant_nonce(redis_client, claims.nonce): + raise Unauthorized("session_already_consumed") + + profile = MINTABLE_PROFILES[SubjectType.EXTERNAL_SSO] + try: + validate_mint_policy( + subject_type=profile.subject_type, + prefix=profile.prefix, + scopes=profile.scopes, + ) + except MintPolicyViolation as e: + raise BadRequest(description=str(e)) from None + + ttl_days = oauth_ttl_days(tenant_id=None) + mint = mint_oauth_token( + db.session, + redis_client, + subject_email=claims.subject_email, + subject_issuer=claims.subject_issuer, + account_id=None, + client_id=state.client_id, + device_label=state.device_label, + prefix=profile.prefix, + ttl_days=ttl_days, + ) + + poll_payload = { + "token": mint.token, + "expires_at": mint.expires_at.isoformat(), + "subject_type": SubjectType.EXTERNAL_SSO, + "subject_email": claims.subject_email, + "subject_issuer": claims.subject_issuer, + "account": None, + "workspaces": [], + "default_workspace_id": None, + "token_id": str(mint.token_id), + } + + try: + store.approve( + device_code, + subject_email=claims.subject_email, + account_id=None, + subject_issuer=claims.subject_issuer, + minted_token=mint.token, + token_id=str(mint.token_id), + poll_payload=poll_payload, + ) + except (StateNotFoundError, InvalidTransitionError) as e: + logger.exception("approve-external: state transition raced") + raise Conflict("state_lost") from e + + _emit_approve_external_audit(state, claims, mint) + + resp = make_response(jsonify({"status": "approved"}), 200) + resp.set_cookie(**approval_grant_cleared_cookie_kwargs()) + return resp + + +@dataclass(frozen=True) +class _RejectedClaims: + """Minimal subject shape consumed by `_emit_external_rejection_audit`. + + Mirrors the attributes used from `ApprovalGrantClaims` so callers holding + only a raw JWS claims dict (e.g. `sso_complete`) can emit the same audit + event without reaching for the full dataclass. + """ + + subject_email: str + subject_issuer: str + + +def _email_belongs_to_dify_account(email: str) -> bool: + """External SSO subjects whose email matches an active Dify Account must + authenticate via the internal Dify login path (which mints dfoa_), not via + the external SSO device flow. Returning True here blocks dfoe_ minting. + + Pending/uninitialized/banned/closed accounts do not block: pending and + uninitialized users may complete invitation via SSO; banned and closed + accounts are handled by separate enforcement paths. + """ + if not email: + return False + normalized = email.strip().lower() + if not normalized: + return False + row = db.session.execute( + select(Account.id).where( + func.lower(Account.email) == normalized, + Account.status == AccountStatus.ACTIVE, + ), + ).scalar_one_or_none() + return row is not None + + +def _emit_external_rejection_audit(state, claims, *, reason: str) -> None: + logger.warning( + "audit: oauth.device_flow_rejected subject_type=%s subject_email=%s subject_issuer=%s reason=%s", + SubjectType.EXTERNAL_SSO, + claims.subject_email, + claims.subject_issuer, + reason, + extra={ + "audit": True, + "event": "oauth.device_flow_rejected", + "subject_type": SubjectType.EXTERNAL_SSO, + "subject_email": claims.subject_email, + "subject_issuer": claims.subject_issuer, + "reason": reason, + "client_id": state.client_id, + "device_label": state.device_label, + }, + ) + + +def _emit_approve_external_audit(state, claims, mint) -> None: + logger.warning( + "audit: oauth.device_flow_approved subject_type=%s subject_email=%s subject_issuer=%s token_id=%s", + SubjectType.EXTERNAL_SSO, + claims.subject_email, + claims.subject_issuer, + mint.token_id, + extra={ + "audit": True, + "event": "oauth.device_flow_approved", + "subject_type": SubjectType.EXTERNAL_SSO, + "subject_email": claims.subject_email, + "subject_issuer": claims.subject_issuer, + "token_id": str(mint.token_id), + "client_id": state.client_id, + "device_label": state.device_label, + "scopes": ["apps:run"], + "expires_at": mint.expires_at.isoformat(), + }, + ) diff --git a/api/controllers/openapi/workspaces.py b/api/controllers/openapi/workspaces.py new file mode 100644 index 0000000000..aba94a131d --- /dev/null +++ b/api/controllers/openapi/workspaces.py @@ -0,0 +1,87 @@ +"""User-scoped workspace reads under /openapi/v1/workspaces. Bearer-authed +counterparts to the cookie-authed /console/api/workspaces endpoints. + +Account bearers (dfoa_) see every tenant they're a member of. External +SSO bearers (dfoe_) have no account_id and so see an empty list — that +matches /openapi/v1/account. +""" + +from __future__ import annotations + +from itertools import starmap + +from flask import g +from flask_restx import Resource +from sqlalchemy import select +from werkzeug.exceptions import NotFound + +from controllers.openapi import openapi_ns +from controllers.openapi.auth.surface_gate import accept_subjects +from extensions.ext_database import db +from libs.oauth_bearer import ( + ACCEPT_USER_ANY, + SubjectType, + validate_bearer, +) +from models import Tenant, TenantAccountJoin + + +@openapi_ns.route("/workspaces") +class WorkspacesApi(Resource): + @validate_bearer(accept=ACCEPT_USER_ANY) + @accept_subjects(SubjectType.ACCOUNT) + def get(self): + ctx = g.auth_ctx + + rows = db.session.execute( + select(Tenant, TenantAccountJoin) + .join(TenantAccountJoin, TenantAccountJoin.tenant_id == Tenant.id) + .where(TenantAccountJoin.account_id == str(ctx.account_id)) + .order_by(Tenant.created_at.asc()) + ).all() + + return {"workspaces": list(starmap(_workspace_summary, rows))}, 200 + + +@openapi_ns.route("/workspaces/") +class WorkspaceByIdApi(Resource): + @validate_bearer(accept=ACCEPT_USER_ANY) + @accept_subjects(SubjectType.ACCOUNT) + def get(self, workspace_id: str): + ctx = g.auth_ctx + + row = db.session.execute( + select(Tenant, TenantAccountJoin) + .join(TenantAccountJoin, TenantAccountJoin.tenant_id == Tenant.id) + .where( + Tenant.id == workspace_id, + TenantAccountJoin.account_id == str(ctx.account_id), + ) + ).first() + # 404 (not 403) on non-member so workspace IDs don't leak across tenants. + if row is None: + raise NotFound("workspace not found") + + tenant, membership = row + return _workspace_detail(tenant, membership), 200 + + +def _workspace_summary(tenant: Tenant, membership: TenantAccountJoin) -> dict: + return { + "id": str(tenant.id), + "name": tenant.name, + "role": getattr(membership, "role", ""), + "status": tenant.status, + "current": getattr(membership, "current", False), + } + + +def _workspace_detail(tenant: Tenant, membership: TenantAccountJoin) -> dict: + return { + "id": str(tenant.id), + "name": tenant.name, + "role": getattr(membership, "role", ""), + "status": tenant.status, + "current": getattr(membership, "current", False), + "created_at": tenant.created_at.isoformat() if tenant.created_at else None, + } diff --git a/api/controllers/web/wraps.py b/api/controllers/web/wraps.py index 11650fa4b5..ccc9c0f8f6 100644 --- a/api/controllers/web/wraps.py +++ b/api/controllers/web/wraps.py @@ -16,7 +16,7 @@ from libs.passport import PassportService from libs.token import extract_webapp_passport from models.model import App, EndUser, Site from services.app_service import AppService -from services.enterprise.enterprise_service import EnterpriseService, WebAppSettings +from services.enterprise.enterprise_service import EnterpriseService, WebAppAccessMode, WebAppSettings from services.feature_service import FeatureService from services.webapp_auth_service import WebAppAuthService @@ -74,7 +74,7 @@ def decode_jwt_token(app_code: str | None = None, user_id: str | None = None) -> webapp_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_id) if not webapp_settings: raise NotFound("Web app settings not found.") - app_web_auth_enabled = webapp_settings.access_mode != "public" + app_web_auth_enabled = webapp_settings.access_mode != WebAppAccessMode.PUBLIC _validate_webapp_token(decoded, app_web_auth_enabled, system_features.webapp_auth.enabled) _validate_user_accessibility( @@ -88,7 +88,8 @@ def decode_jwt_token(app_code: str | None = None, user_id: str | None = None) -> raise Unauthorized("Please re-login to access the web app.") app_id = AppService.get_app_id_by_code(app_code) app_web_auth_enabled = ( - EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_id=app_id).access_mode != "public" + EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_id=app_id).access_mode + != WebAppAccessMode.PUBLIC ) if app_web_auth_enabled: raise WebAppAuthRequiredError() diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index 15645add57..23f040d838 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -685,6 +685,8 @@ class WorkflowAppGenerateTaskPipeline(GraphRuntimeStateSupport): match invoke_from: case InvokeFrom.SERVICE_API: created_from = WorkflowAppLogCreatedFrom.SERVICE_API + case InvokeFrom.OPENAPI: + created_from = WorkflowAppLogCreatedFrom.OPENAPI case InvokeFrom.EXPLORE: created_from = WorkflowAppLogCreatedFrom.INSTALLED_APP case InvokeFrom.WEB_APP: diff --git a/api/core/app/entities/app_invoke_entities.py b/api/core/app/entities/app_invoke_entities.py index 09992f4bbf..0c4d184f1e 100644 --- a/api/core/app/entities/app_invoke_entities.py +++ b/api/core/app/entities/app_invoke_entities.py @@ -24,6 +24,7 @@ class UserFrom(StrEnum): class InvokeFrom(StrEnum): SERVICE_API = "service-api" + OPENAPI = "openapi" WEB_APP = "web-app" TRIGGER = "trigger" EXPLORE = "explore" diff --git a/api/extensions/ext_blueprints.py b/api/extensions/ext_blueprints.py index 7d13f0c061..92131a04ab 100644 --- a/api/extensions/ext_blueprints.py +++ b/api/extensions/ext_blueprints.py @@ -8,6 +8,8 @@ AUTHENTICATED_HEADERS: tuple[str, ...] = (*SERVICE_API_HEADERS, HEADER_NAME_CSRF FILES_HEADERS: tuple[str, ...] = (*BASE_CORS_HEADERS, HEADER_NAME_CSRF_TOKEN) EMBED_HEADERS: tuple[str, ...] = ("Content-Type", HEADER_NAME_APP_CODE) EXPOSED_HEADERS: tuple[str, ...] = ("X-Version", "X-Env", "X-Trace-Id") +OPENAPI_HEADERS: tuple[str, ...] = ("Authorization", "Content-Type", HEADER_NAME_CSRF_TOKEN) +OPENAPI_MAX_AGE_SECONDS: int = 600 def _apply_cors_once(bp, /, **cors_kwargs): @@ -29,6 +31,7 @@ def init_app(app: DifyApp): from controllers.files import bp as files_bp from controllers.inner_api import bp as inner_api_bp from controllers.mcp import bp as mcp_bp + from controllers.openapi import bp as openapi_bp from controllers.service_api import bp as service_api_bp from controllers.trigger import bp as trigger_bp from controllers.web import bp as web_bp @@ -41,6 +44,22 @@ def init_app(app: DifyApp): ) app.register_blueprint(service_api_bp) + # User-scoped programmatic API. Default empty allowlist = same-origin + # only; expand via OPENAPI_CORS_ALLOW_ORIGINS for third-party + # integrations. supports_credentials so cookie-authed approve/deny + # work; cross-origin OPTIONS without an allowed origin will fail + # the same as on the console blueprint. + _apply_cors_once( + openapi_bp, + resources={r"/*": {"origins": dify_config.OPENAPI_CORS_ALLOW_ORIGINS}}, + supports_credentials=True, + allow_headers=list(OPENAPI_HEADERS), + methods=["GET", "POST", "PATCH", "DELETE", "OPTIONS"], + expose_headers=list(EXPOSED_HEADERS), + max_age=OPENAPI_MAX_AGE_SECONDS, + ) + app.register_blueprint(openapi_bp) + _apply_cors_once( web_bp, resources={ diff --git a/api/extensions/ext_celery.py b/api/extensions/ext_celery.py index 340f514fcc..fce065eda9 100644 --- a/api/extensions/ext_celery.py +++ b/api/extensions/ext_celery.py @@ -222,6 +222,12 @@ def init_app(app: DifyApp) -> Celery: "task": "schedule.clean_workflow_runs_task.clean_workflow_runs_task", "schedule": crontab(minute="0", hour="0"), } + if dify_config.ENABLE_CLEAN_OAUTH_ACCESS_TOKENS_TASK: + imports.append("schedule.clean_oauth_access_tokens_task") + beat_schedule["clean_oauth_access_tokens_task"] = { + "task": "schedule.clean_oauth_access_tokens_task.clean_oauth_access_tokens_task", + "schedule": crontab(minute="0", hour="5", day_of_month=f"*/{day}"), + } if dify_config.ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK: imports.append("schedule.workflow_schedule_task") beat_schedule["workflow_schedule_task"] = { diff --git a/api/extensions/ext_login.py b/api/extensions/ext_login.py index bc59eaca63..9f9372888f 100644 --- a/api/extensions/ext_login.py +++ b/api/extensions/ext_login.py @@ -12,7 +12,7 @@ from constants import HEADER_NAME_APP_CODE from dify_app import DifyApp from extensions.ext_database import db from libs.passport import PassportService -from libs.token import extract_access_token, extract_webapp_passport +from libs.token import extract_access_token, extract_console_cookie_token, extract_webapp_passport from models import Account, Tenant, TenantAccountJoin from models.model import AppMCPServer, EndUser from services.account_service import AccountService @@ -84,6 +84,24 @@ def load_user_from_request(request_from_flask_login: Request) -> LoginUser | Non logged_in_account = AccountService.load_logged_in_account(account_id=user_id) return logged_in_account + elif request.blueprint == "openapi": + # Account-branch device-flow approval routes (approve / deny / + # approval-context) sit under @login_required and authenticate via + # the console session cookie. Cookie-only on purpose — bearer + # tokens (dfoa_/dfoe_) live on the Authorization header and are + # validated by AppPipeline, not flask-login. + cookie_token = extract_console_cookie_token(request) + if not cookie_token: + return None + try: + decoded = PassportService().verify(cookie_token) + except Exception: + return None + user_id = decoded.get("user_id") + source = decoded.get("token_source") + if source or not user_id: + return None + return AccountService.load_logged_in_account(account_id=user_id) elif request.blueprint == "web": app_code = request.headers.get(HEADER_NAME_APP_CODE) webapp_token = extract_webapp_passport(app_code, request) if app_code else None diff --git a/api/extensions/ext_oauth_bearer.py b/api/extensions/ext_oauth_bearer.py new file mode 100644 index 0000000000..58c2ac2d2c --- /dev/null +++ b/api/extensions/ext_oauth_bearer.py @@ -0,0 +1,23 @@ +"""Bind the bearer authenticator at startup. Must run after ext_database +and ext_redis (needs both factories). +""" + +from __future__ import annotations + +from configs import dify_config +from dify_app import DifyApp +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from libs.oauth_bearer import build_and_bind + + +def is_enabled() -> bool: + return dify_config.ENABLE_OAUTH_BEARER + + +def init_app(app: DifyApp) -> None: + # scoped_session isn't a context manager; request teardown closes it. + def session_factory(): + return db.session + + build_and_bind(session_factory=session_factory, redis_client=redis_client) diff --git a/api/libs/device_flow_security.py b/api/libs/device_flow_security.py new file mode 100644 index 0000000000..d973a0820b --- /dev/null +++ b/api/libs/device_flow_security.py @@ -0,0 +1,196 @@ +"""Device-flow security primitives: enterprise_only gate, approval-grant +cookie mint/verify/consume, and anti-framing headers. +""" + +from __future__ import annotations + +import logging +import secrets +from collections.abc import Callable +from dataclasses import dataclass +from datetime import UTC, datetime, timedelta +from functools import wraps + +from flask import Blueprint +from werkzeug.exceptions import NotFound + +from libs import jws +from libs.token import is_secure +from services.feature_service import FeatureService, LicenseStatus + +logger = logging.getLogger(__name__) + + +# ============================================================================ +# enterprise_only decorator +# ============================================================================ + + +# Fail-closed: any non-EE-active status (default NONE on CE, plus INACTIVE / EXPIRED / LOST) +# is denied. Future LicenseStatus values default to denial unless explicitly admitted. +_EE_ENABLED_STATUSES = {LicenseStatus.ACTIVE, LicenseStatus.EXPIRING} + + +def enterprise_only[**P, R](view: Callable[P, R]) -> Callable[P, R]: + """404 on CE, passthrough on EE. Apply before rate-limit so CE + responses don't consume the bucket. + """ + + @wraps(view) + def decorated(*args: P.args, **kwargs: P.kwargs): + settings = FeatureService.get_system_features() + if settings.license.status not in _EE_ENABLED_STATUSES: + raise NotFound() + return view(*args, **kwargs) + + return decorated + + +# ============================================================================ +# approval_grant cookie +# ============================================================================ + + +APPROVAL_GRANT_COOKIE_NAME = "device_approval_grant" +APPROVAL_GRANT_COOKIE_PATH = "/openapi/v1/oauth/device" +APPROVAL_GRANT_COOKIE_TTL_SECONDS = 300 # 5 min +NONCE_TTL_SECONDS = 600 # 2x cookie TTL — defeats clock-skew late replay +NONCE_KEY_FMT = "device_approval_grant_nonce:{nonce}" +SSO_ASSERTION_NONCE_KEY_FMT = "sso_assertion_nonce:{nonce}" + + +@dataclass(frozen=True, slots=True) +class ApprovalGrantClaims: + subject_email: str + subject_issuer: str + user_code: str + nonce: str + csrf_token: str + expires_at: datetime + + +def mint_approval_grant( + *, + keyset: jws.KeySet, + iss: str, + subject_email: str, + subject_issuer: str, + user_code: str, +) -> tuple[str, ApprovalGrantClaims]: + """Use ``approval_grant_cookie_kwargs`` to set the cookie — single + source of truth for Path/HttpOnly/Secure/SameSite. + """ + now = datetime.now(UTC) + exp = now + timedelta(seconds=APPROVAL_GRANT_COOKIE_TTL_SECONDS) + nonce = _random_opaque() + csrf_token = _random_opaque() + + payload = { + "iss": iss, + "subject_email": subject_email, + "subject_issuer": subject_issuer, + "user_code": user_code, + "nonce": nonce, + "csrf_token": csrf_token, + } + token = jws.sign(keyset, payload, aud=jws.AUD_APPROVAL_GRANT, ttl_seconds=APPROVAL_GRANT_COOKIE_TTL_SECONDS) + + return token, ApprovalGrantClaims( + subject_email=subject_email, + subject_issuer=subject_issuer, + user_code=user_code, + nonce=nonce, + csrf_token=csrf_token, + expires_at=exp, + ) + + +def verify_approval_grant(keyset: jws.KeySet, token: str) -> ApprovalGrantClaims: + """Sig + aud + exp only — nonce consumption is the caller's job.""" + data = jws.verify(keyset, token, expected_aud=jws.AUD_APPROVAL_GRANT) + return ApprovalGrantClaims( + subject_email=data["subject_email"], + subject_issuer=data["subject_issuer"], + user_code=data["user_code"], + nonce=data["nonce"], + csrf_token=data["csrf_token"], + expires_at=datetime.fromtimestamp(data["exp"], tz=UTC), + ) + + +def consume_approval_grant_nonce(redis_client, nonce: str) -> bool: + if not nonce: + return False + return bool( + redis_client.set( + NONCE_KEY_FMT.format(nonce=nonce), + "1", + nx=True, + ex=NONCE_TTL_SECONDS, + ) + ) + + +def consume_sso_assertion_nonce(redis_client, nonce: str) -> bool: + if not nonce: + return False + return bool( + redis_client.set( + SSO_ASSERTION_NONCE_KEY_FMT.format(nonce=nonce), + "1", + nx=True, + ex=NONCE_TTL_SECONDS, + ) + ) + + +def approval_grant_cookie_kwargs(value: str) -> dict: + """``secure`` follows is_secure() so HTTP-only deployments don't + silently drop the cookie. + """ + return { + "key": APPROVAL_GRANT_COOKIE_NAME, + "value": value, + "max_age": APPROVAL_GRANT_COOKIE_TTL_SECONDS, + "path": APPROVAL_GRANT_COOKIE_PATH, + "secure": is_secure(), + "httponly": True, + "samesite": "Lax", + } + + +def approval_grant_cleared_cookie_kwargs() -> dict: + return { + "key": APPROVAL_GRANT_COOKIE_NAME, + "value": "", + "max_age": 0, + "path": APPROVAL_GRANT_COOKIE_PATH, + "secure": is_secure(), + "httponly": True, + "samesite": "Lax", + } + + +def _random_opaque() -> str: + return secrets.token_urlsafe(16) + + +# ============================================================================ +# Anti-framing headers +# ============================================================================ + + +_ANTI_FRAMING_HEADERS = { + "X-Frame-Options": "DENY", + "Content-Security-Policy": "frame-ancestors 'none'", +} + + +def attach_anti_framing(bp: Blueprint) -> None: + """X-Frame-Options + CSP on every response from ``bp`` (CI invariant #4).""" + + @bp.after_request + def _apply_headers(response): # pyright: ignore[reportUnusedFunction] + for name, value in _ANTI_FRAMING_HEADERS.items(): + response.headers.setdefault(name, value) + return response diff --git a/api/libs/external_api.py b/api/libs/external_api.py index f907d17750..10b503654e 100644 --- a/api/libs/external_api.py +++ b/api/libs/external_api.py @@ -75,6 +75,7 @@ def register_external_error_handlers(api: Api): def handle_value_error(e: ValueError): got_request_exception.send(current_app, exception=e) + current_app.logger.exception("value_error in request handler") status_code = 400 data = {"code": "invalid_param", "message": str(e), "status": status_code} return data, status_code diff --git a/api/libs/helper.py b/api/libs/helper.py index ac69a11084..47472c17da 100644 --- a/api/libs/helper.py +++ b/api/libs/helper.py @@ -542,3 +542,18 @@ class RateLimiter: self._redis_client.zadd(key, {member: current_time}) self._redis_client.expire(key, self.time_window * 2) + + def seconds_until_available(self, email: str) -> int: + """Seconds until the oldest in-window entry expires, freeing a slot. + + Defensive floor of 1 second. Caller should only invoke this after + is_rate_limited() returned True. + """ + key = self._get_key(email) + oldest = cast(Any, self._redis_client).zrange(key, 0, 0, withscores=True) + if not oldest: + return 1 + _member, score = oldest[0] + free_at = int(score) + self.time_window + remaining = free_at - int(time.time()) + return max(remaining, 1) diff --git a/api/libs/jws.py b/api/libs/jws.py new file mode 100644 index 0000000000..692ccb39fa --- /dev/null +++ b/api/libs/jws.py @@ -0,0 +1,108 @@ +"""HS256 compact JWS keyed on the shared Dify SECRET_KEY. Used by the SSO +state envelope, external subject assertion, and approval-grant cookie — +all three share one key-set so api ↔ enterprise can verify each other. +""" + +from __future__ import annotations + +from datetime import UTC, datetime, timedelta + +import jwt + +from configs import dify_config + +AUD_STATE_ENVELOPE = "api.sso.state_envelope" +AUD_EXT_SUBJECT_ASSERTION = "api.device_flow.external_subject_assertion" +AUD_APPROVAL_GRANT = "api.device_flow.approval_grant" + +ACTIVE_KID_V1 = "dify-shared-v1" + + +class KeySetError(Exception): + pass + + +class KeySet: + """``from_entries`` reserves multi-kid construction for rotation slots.""" + + def __init__(self, entries: dict[str, bytes], active_kid: str) -> None: + if active_kid not in entries: + raise KeySetError(f"active kid {active_kid!r} missing from key-set") + if not entries[active_kid]: + raise KeySetError(f"active kid {active_kid!r} has empty secret") + self._entries: dict[str, bytes] = {k: bytes(v) for k, v in entries.items()} + self._active_kid = active_kid + + @classmethod + def from_shared_secret(cls) -> KeySet: + secret = dify_config.SECRET_KEY + if not secret: + raise KeySetError("dify_config.SECRET_KEY is empty; cannot build key-set") + return cls({ACTIVE_KID_V1: secret.encode("utf-8")}, ACTIVE_KID_V1) + + @classmethod + def from_entries(cls, entries: dict[str, bytes], active_kid: str) -> KeySet: + return cls(entries, active_kid) + + @property + def active_kid(self) -> str: + return self._active_kid + + def lookup(self, kid: str) -> bytes | None: + return self._entries.get(kid) + + +def sign(keyset: KeySet, payload: dict, aud: str, ttl_seconds: int) -> str: + """``iat`` + ``exp`` are injected here; callers must not set them.""" + if "aud" in payload or "iat" in payload or "exp" in payload: + raise ValueError("reserved claim present in payload (aud/iat/exp)") + if ttl_seconds <= 0: + raise ValueError("ttl_seconds must be positive") + + kid = keyset.active_kid + secret = keyset.lookup(kid) + if secret is None: + raise KeySetError(f"active kid {kid!r} lookup miss") + + iat = datetime.now(UTC) + exp = iat + timedelta(seconds=ttl_seconds) + claims = {**payload, "aud": aud, "iat": iat, "exp": exp} + return jwt.encode( + claims, + secret, + algorithm="HS256", + headers={"kid": kid, "typ": "JWT"}, + ) + + +class VerifyError(Exception): + pass + + +def verify(keyset: KeySet, token: str, expected_aud: str) -> dict: + """Unknown kid is rejected — never fall back to the active kid, since + a past kid value would otherwise be forgeable by anyone who saw it. + """ + try: + header = jwt.get_unverified_header(token) + except jwt.PyJWTError as e: + raise VerifyError(f"decode header: {e}") from e + kid = header.get("kid") + if not kid: + raise VerifyError("no kid in header") + secret = keyset.lookup(kid) + if secret is None: + raise VerifyError(f"unknown kid {kid!r}") + try: + return jwt.decode( + token, + secret, + algorithms=["HS256"], + audience=expected_aud, + ) + except jwt.ExpiredSignatureError as e: + raise VerifyError("token expired") from e + except jwt.InvalidAudienceError as e: + raise VerifyError("aud mismatch") from e + except jwt.PyJWTError as e: + raise VerifyError(f"decode: {e}") from e diff --git a/api/libs/oauth_bearer.py b/api/libs/oauth_bearer.py new file mode 100644 index 0000000000..73543c75b2 --- /dev/null +++ b/api/libs/oauth_bearer.py @@ -0,0 +1,650 @@ +"""OAuth bearer primitives. + +To add a token kind: write a Resolver, add a SubjectType + Accepts member, +append a TokenKind to build_registry, and update _SUBJECT_TO_ACCEPT. +Authenticator + validate_bearer stay untouched. +""" + +from __future__ import annotations + +import hashlib +import json +import logging +import uuid +from collections.abc import Callable, Iterable +from dataclasses import dataclass, field +from datetime import UTC, datetime +from enum import StrEnum +from functools import wraps +from typing import Literal, ParamSpec, Protocol, TypeVar + +from flask import g, request +from sqlalchemy import select, update +from sqlalchemy.orm import Session +from werkzeug.exceptions import Forbidden, ServiceUnavailable, Unauthorized + +from configs import dify_config +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from libs.rate_limit import enforce_bearer_rate_limit +from models import Account, OAuthAccessToken, TenantAccountJoin + +logger = logging.getLogger(__name__) + + +# ============================================================================ +# Contract — types, enums, protocols +# ============================================================================ + + +class SubjectType(StrEnum): + ACCOUNT = "account" + EXTERNAL_SSO = "external_sso" + + +class Scope(StrEnum): + """Catalog of bearer scopes recognised by the openapi surface. + + `FULL` is the catch-all carried by `dfoa_` account tokens — it satisfies + any per-route `require_scope`. `dfoe_` tokens carry the per-feature scopes + (`APPS_RUN`, `APPS_READ_PERMITTED_EXTERNAL`). + """ + + FULL = "full" + APPS_READ = "apps:read" + APPS_READ_PERMITTED_EXTERNAL = "apps:read:permitted-external" + APPS_RUN = "apps:run" + + +class Accepts(StrEnum): + """Subject types a route is willing to accept as caller.""" + + USER_ACCOUNT = "user_account" + USER_EXT_SSO = "user_ext_sso" + + +ACCEPT_USER_ANY: frozenset[Accepts] = frozenset({Accepts.USER_ACCOUNT, Accepts.USER_EXT_SSO}) +ACCEPT_USER_EXT_SSO: frozenset[Accepts] = frozenset({Accepts.USER_EXT_SSO}) + +_SUBJECT_TO_ACCEPT: dict[SubjectType, Accepts] = { + SubjectType.ACCOUNT: Accepts.USER_ACCOUNT, + SubjectType.EXTERNAL_SSO: Accepts.USER_EXT_SSO, +} + + +@dataclass(frozen=True, slots=True) +class AuthContext: + """Attached to ``g.auth_ctx``. ``scopes`` / ``subject_type`` / ``source`` + come from the TokenKind, not the DB — corrupt rows can't elevate scope. + + `verified_tenants` is a snapshot of the Layer-0 verdict cache at + authenticate time. Per-request mutations write through to Redis via + `record_layer0_verdict`; this snapshot is not updated in place (frozen). + """ + + subject_type: SubjectType + subject_email: str | None + subject_issuer: str | None + account_id: uuid.UUID | None + client_id: str | None + scopes: frozenset[Scope] + token_id: uuid.UUID + source: str + expires_at: datetime | None + token_hash: str + verified_tenants: dict[str, bool] = field(default_factory=dict) + + +@dataclass(frozen=True, slots=True) +class ResolvedRow: + subject_email: str | None + subject_issuer: str | None + account_id: uuid.UUID | None + client_id: str | None + token_id: uuid.UUID + expires_at: datetime | None + verified_tenants: dict[str, bool] = field(default_factory=dict) + + def to_cache(self) -> dict: + return { + "subject_email": self.subject_email, + "subject_issuer": self.subject_issuer, + "account_id": str(self.account_id) if self.account_id else None, + "client_id": self.client_id, + "token_id": str(self.token_id), + "expires_at": self.expires_at.isoformat() if self.expires_at else None, + "verified_tenants": dict(self.verified_tenants), + } + + @classmethod + def from_cache(cls, data: dict) -> ResolvedRow: + return cls( + subject_email=data["subject_email"], + subject_issuer=data["subject_issuer"], + account_id=uuid.UUID(data["account_id"]) if data["account_id"] else None, + client_id=data.get("client_id"), + token_id=uuid.UUID(data["token_id"]), + expires_at=datetime.fromisoformat(data["expires_at"]) if data["expires_at"] else None, + verified_tenants=_coerce_verified_tenants(data.get("verified_tenants")), + ) + + +def _coerce_verified_tenants(raw: object) -> dict[str, bool]: + """Tolerate legacy entries that stored 'ok'/'denied' string verdicts. + + TODO(post-v1.0): remove once the AuthContext cache TTL has fully cycled + on all live deployments (60s TTL → safe to drop one release after rollout). + """ + if not isinstance(raw, dict): + return {} + out: dict[str, bool] = {} + for k, v in raw.items(): + if isinstance(v, bool): + out[k] = v + elif v == "ok": + out[k] = True + elif v == "denied": + out[k] = False + return out + + +class Resolver(Protocol): + def resolve(self, token_hash: str) -> ResolvedRow | None: # pragma: no cover - contract + ... + + +@dataclass(frozen=True, slots=True) +class TokenKind: + prefix: str + subject_type: SubjectType + scopes: frozenset[Scope] + source: str + resolver: Resolver + + def matches(self, token: str) -> bool: + return token.startswith(self.prefix) + + +@dataclass(frozen=True, slots=True) +class MintProfile: + """Single source of truth for (subject_type, prefix, scopes) at mint time. + + Consumers: + - ``build_registry`` reads scopes here so the resolve-time TokenKind + cannot drift from the mint-time intent. + - Device-flow ``approve`` / ``approve-external`` read prefix + scopes + here when calling ``mint_oauth_token`` and ``validate_mint_policy``. + - ``services.openapi.mint_policy.validate_mint_policy`` cross-checks + the (subject_type, prefix, scopes) triple a caller intends to mint + against this table — a caller that assembles its own scope set + from a non-canonical source will fail closed at approve time. + """ + + subject_type: SubjectType + prefix: str + scopes: frozenset[Scope] + + +MINTABLE_PROFILES: dict[SubjectType, MintProfile] = { + SubjectType.ACCOUNT: MintProfile( + subject_type=SubjectType.ACCOUNT, + prefix="dfoa_", + scopes=frozenset({Scope.FULL}), + ), + SubjectType.EXTERNAL_SSO: MintProfile( + subject_type=SubjectType.EXTERNAL_SSO, + prefix="dfoe_", + scopes=frozenset({Scope.APPS_RUN, Scope.APPS_READ_PERMITTED_EXTERNAL}), + ), +} + + +class InvalidBearerError(Exception): + """Token missing, unknown prefix, or no live row.""" + + +class TokenExpiredError(Exception): + """Hard-expire bookkeeping is the resolver's job before raising.""" + + +# ============================================================================ +# Registry +# ============================================================================ + + +class TokenKindRegistry: + def __init__(self, kinds: Iterable[TokenKind]) -> None: + self._kinds: tuple[TokenKind, ...] = tuple(kinds) + prefixes = [k.prefix for k in self._kinds] + if len(set(prefixes)) != len(prefixes): + raise ValueError(f"duplicate prefix in registry: {prefixes}") + + def find(self, token: str) -> TokenKind | None: + for k in self._kinds: + if k.matches(token): + return k + return None + + def kinds(self) -> tuple[TokenKind, ...]: + return self._kinds + + +# ============================================================================ +# Authenticator +# ============================================================================ + + +def sha256_hex(token: str) -> str: + return hashlib.sha256(token.encode("utf-8")).hexdigest() + + +class BearerAuthenticator: + def __init__(self, registry: TokenKindRegistry) -> None: + self._registry = registry + + @property + def registry(self) -> TokenKindRegistry: + return self._registry + + def authenticate(self, token: str) -> AuthContext: + """Identity + per-token rate limit (single source). + + Both the openapi pipeline (`BearerCheck`) and the decorator + (`validate_bearer`) call this — rate-limit fires exactly once per + request regardless of which path hosts the route. + """ + kind = self._registry.find(token) + if kind is None: + raise InvalidBearerError("unknown token prefix") + token_hash = sha256_hex(token) + row = kind.resolver.resolve(token_hash) + if row is None: + raise InvalidBearerError("token unknown or revoked") + enforce_bearer_rate_limit(token_hash) + return AuthContext( + subject_type=kind.subject_type, + subject_email=row.subject_email, + subject_issuer=row.subject_issuer, + account_id=row.account_id, + client_id=row.client_id, + scopes=kind.scopes, + token_id=row.token_id, + source=kind.source, + expires_at=row.expires_at, + token_hash=token_hash, + verified_tenants=dict(row.verified_tenants), + ) + + +# ============================================================================ +# OAuth access token resolver (PAT resolver would be a sibling class) +# ============================================================================ + +TOKEN_CACHE_KEY_FMT = "auth:token:{hash}" +POSITIVE_TTL_SECONDS = 60 +NEGATIVE_TTL_SECONDS = 10 +AUDIT_OAUTH_EXPIRED = "oauth.token_expired" + +ScopeVariant = Literal["account", "external_sso"] + + +class OAuthAccessTokenResolver: + """``.for_account()`` / ``.for_external_sso()`` are variant-scoped views + sharing DB + cache plumbing. + """ + + def __init__( + self, + session_factory, + redis_client, + positive_ttl: int = POSITIVE_TTL_SECONDS, + negative_ttl: int = NEGATIVE_TTL_SECONDS, + ) -> None: + self.session_factory = session_factory + self._redis = redis_client + self._positive_ttl = positive_ttl + self._negative_ttl = negative_ttl + + def for_account(self) -> Resolver: + return _VariantResolver(self, variant="account") + + def for_external_sso(self) -> Resolver: + return _VariantResolver(self, variant="external_sso") + + def _cache_key(self, token_hash: str) -> str: + return TOKEN_CACHE_KEY_FMT.format(hash=token_hash) + + def cache_get(self, token_hash: str) -> ResolvedRow | None | Literal["invalid"]: + raw = self._redis.get(self._cache_key(token_hash)) + if raw is None: + return None + text = raw.decode() if isinstance(raw, (bytes, bytearray)) else raw + if text == "invalid": + return "invalid" + try: + return ResolvedRow.from_cache(json.loads(text)) + except (ValueError, KeyError): + logger.warning("auth:token cache entry malformed; treating as miss") + return None + + def cache_set_positive(self, token_hash: str, row: ResolvedRow) -> None: + self._redis.setex( + self._cache_key(token_hash), + self._positive_ttl, + json.dumps(row.to_cache()), + ) + + def cache_set_negative(self, token_hash: str) -> None: + self._redis.setex(self._cache_key(token_hash), self._negative_ttl, "invalid") + + def hard_expire(self, session: Session, row_id: uuid.UUID | str, token_hash: str) -> None: + """Atomic CAS — only the worker that flips revoked_at emits audit; + replays are idempotent. + """ + stmt = ( + update(OAuthAccessToken) + .where(OAuthAccessToken.id == row_id, OAuthAccessToken.revoked_at.is_(None)) + .values(revoked_at=datetime.now(UTC), token_hash=None) + ) + result = session.execute(stmt) + session.commit() + if result.rowcount == 1: + logger.warning( + "audit: %s token_id=%s", + AUDIT_OAUTH_EXPIRED, + row_id, + extra={"audit": True, "token_id": str(row_id)}, + ) + self._redis.delete(self._cache_key(token_hash)) + self.cache_set_negative(token_hash) + + +class _VariantResolver: + def __init__(self, parent: OAuthAccessTokenResolver, variant: ScopeVariant) -> None: + self._parent = parent + self._variant = variant + + def resolve(self, token_hash: str) -> ResolvedRow | None: + cached = self._parent.cache_get(token_hash) + if cached == "invalid": + return None + if cached is not None and not isinstance(cached, str): + if not self._matches_variant(cached): + return None + return cached + + # Flask-SQLAlchemy's scoped_session is request-bound and not a + # context manager; use it directly. + session = self._parent.session_factory() + row = self._load_from_db(session, token_hash) + if row is None: + self._parent.cache_set_negative(token_hash) + return None + + now = datetime.now(UTC) + if row.expires_at is not None and row.expires_at <= now: + self._parent.hard_expire(session, row.id, token_hash) + return None + + if not self._matches_variant_model(row): + logger.error( + "internal_state_invariant: account_id/prefix mismatch token_id=%s prefix=%s", + row.id, + row.prefix, + ) + return None + + resolved = ResolvedRow( + subject_email=row.subject_email, + subject_issuer=row.subject_issuer, + account_id=uuid.UUID(str(row.account_id)) if row.account_id else None, + client_id=row.client_id, + token_id=uuid.UUID(str(row.id)), + expires_at=row.expires_at, + ) + self._parent.cache_set_positive(token_hash, resolved) + return resolved + + def _matches_variant(self, row: ResolvedRow) -> bool: + has_account = row.account_id is not None + if self._variant == "account": + return has_account + return not has_account + + def _matches_variant_model(self, row: OAuthAccessToken) -> bool: + has_account = row.account_id is not None + if self._variant == "account": + return has_account and row.prefix == "dfoa_" + return (not has_account) and row.prefix == "dfoe_" + + def _load_from_db(self, session: Session, token_hash: str) -> OAuthAccessToken | None: + return ( + session.query(OAuthAccessToken) + .filter( + OAuthAccessToken.token_hash == token_hash, + OAuthAccessToken.revoked_at.is_(None), + ) + .one_or_none() + ) + + +# ============================================================================ +# Layer 0 — workspace membership cache + helper +# ============================================================================ + + +def record_layer0_verdict(token_hash: str, tenant_id: str, verdict: bool) -> None: + """Merge a Layer-0 membership verdict into the AuthContext cache entry at + `auth:token:{hash}`. No-op if entry missing/expired/invalid — next request + rebuilds via authenticate() and re-runs Layer 0. + """ + cache_key = TOKEN_CACHE_KEY_FMT.format(hash=token_hash) + raw = redis_client.get(cache_key) + if raw is None: + return + text = raw.decode() if isinstance(raw, (bytes, bytearray)) else raw + if text == "invalid": + return + try: + data = json.loads(text) + except (ValueError, KeyError): + return + ttl = redis_client.ttl(cache_key) + if ttl <= 0: + return + data.setdefault("verified_tenants", {})[tenant_id] = verdict + redis_client.setex(cache_key, ttl, json.dumps(data)) + + +def check_workspace_membership( + *, + account_id: uuid.UUID | str, + tenant_id: str, + token_hash: str, + cached_verdicts: dict[str, bool], +) -> None: + """Layer-0 enforcement core. Raises `Forbidden` on deny, returns on allow. + + Shared by the pipeline step (`WorkspaceMembershipCheck`) and the + inline helper (`require_workspace_member`). Caller is responsible for + short-circuiting on EE / SSO subjects before invoking — this function + runs the membership + active-status checks unconditionally. + """ + cached = cached_verdicts.get(tenant_id) + if cached is True: + return + if cached is False: + raise Forbidden("workspace_membership_revoked") + + join = db.session.execute( + select(TenantAccountJoin.id).where( + TenantAccountJoin.account_id == account_id, + TenantAccountJoin.tenant_id == tenant_id, + ) + ).scalar_one_or_none() + if join is None: + record_layer0_verdict(token_hash, tenant_id, False) + raise Forbidden("workspace_membership_revoked") + + status = db.session.execute(select(Account.status).where(Account.id == account_id)).scalar_one_or_none() + if status != "active": + record_layer0_verdict(token_hash, tenant_id, False) + raise Forbidden("workspace_membership_revoked") + + record_layer0_verdict(token_hash, tenant_id, True) + + +def require_workspace_member(ctx: AuthContext, tenant_id: str) -> None: + """AuthContext-flavoured wrapper around `check_workspace_membership`. + + No-op on EE (gateway RBAC owns tenant isolation) and for SSO subjects + (no `tenant_account_joins` row by definition). + """ + if dify_config.ENTERPRISE_ENABLED: + return + if ctx.subject_type != SubjectType.ACCOUNT or ctx.account_id is None: + return + check_workspace_membership( + account_id=ctx.account_id, + tenant_id=tenant_id, + token_hash=ctx.token_hash, + cached_verdicts=ctx.verified_tenants, + ) + + +# ============================================================================ +# Decorator — route-level bearer gate +# ============================================================================ + + +_authenticator: BearerAuthenticator | None = None + + +def bind_authenticator(authenticator: BearerAuthenticator) -> None: + global _authenticator + _authenticator = authenticator + + +def get_authenticator() -> BearerAuthenticator: + if _authenticator is None: + raise RuntimeError("BearerAuthenticator not bound; call bind_authenticator at startup") + return _authenticator + + +def _extract_bearer(req) -> str | None: + header = req.headers.get("Authorization", "") + scheme, _, value = header.partition(" ") + if scheme.lower() != "bearer" or not value: + return None + return value.strip() + + +_DP = ParamSpec("_DP") +_DR = TypeVar("_DR") + + +def validate_bearer(*, accept: frozenset[Accepts]) -> Callable[[Callable[_DP, _DR]], Callable[_DP, _DR]]: + """Opt-in: omitting it leaves the route unauthenticated. + + Resolves user-level OAuth bearers (``dfoa_`` / ``dfoe_``). Legacy + ``app-`` keys belong to ``service_api/wraps.py:validate_app_token`` + and are rejected here as the wrong auth scheme for this surface. + """ + + def wrap(fn: Callable[_DP, _DR]) -> Callable[_DP, _DR]: + @wraps(fn) + def inner(*args: _DP.args, **kwargs: _DP.kwargs) -> _DR: + token = _extract_bearer(request) + if token is None: + raise Unauthorized("missing bearer token") + + if _authenticator is None: + raise ServiceUnavailable("bearer_auth_disabled: set ENABLE_OAUTH_BEARER=true to enable") + + try: + ctx = get_authenticator().authenticate(token) + except InvalidBearerError as e: + raise Unauthorized(str(e)) + + if _SUBJECT_TO_ACCEPT[ctx.subject_type] not in accept: + raise Forbidden("token subject type not accepted here") + + g.auth_ctx = ctx + return fn(*args, **kwargs) + + return inner + + return wrap + + +def bearer_feature_required[**P, R](fn: Callable[P, R]) -> Callable[P, R]: + """503 if ENABLE_OAUTH_BEARER is off — minted tokens would be unusable + without the authenticator, so fail fast instead of approving silently. + """ + + @wraps(fn) + def inner(*args: P.args, **kwargs: P.kwargs) -> R: + if not dify_config.ENABLE_OAUTH_BEARER: + raise ServiceUnavailable("bearer_auth_disabled: set ENABLE_OAUTH_BEARER=true to enable") + return fn(*args, **kwargs) + + return inner + + +def require_scope(scope: Scope) -> Callable: + """Route-level scope gate — must run AFTER validate_bearer so that + g.auth_ctx is set. Raises Forbidden('insufficient_scope: ') + when the bearer lacks both the requested scope and `Scope.FULL`. + """ + + def wrap(fn: Callable) -> Callable: + @wraps(fn) + def inner(*args, **kwargs): + ctx = getattr(g, "auth_ctx", None) + if ctx is None: + raise RuntimeError( + "require_scope used without validate_bearer; stack @validate_bearer above @require_scope" + ) + if Scope.FULL not in ctx.scopes and scope not in ctx.scopes: + raise Forbidden(f"insufficient_scope: {scope}") + return fn(*args, **kwargs) + + return inner + + return wrap + + +# ============================================================================ +# Wiring — called once from the app factory +# ============================================================================ + + +def build_registry(session_factory, redis_client) -> TokenKindRegistry: + oauth = OAuthAccessTokenResolver(session_factory, redis_client) + account = MINTABLE_PROFILES[SubjectType.ACCOUNT] + external = MINTABLE_PROFILES[SubjectType.EXTERNAL_SSO] + return TokenKindRegistry( + [ + TokenKind( + prefix=account.prefix, + subject_type=account.subject_type, + scopes=account.scopes, + source="oauth_account", + resolver=oauth.for_account(), + ), + TokenKind( + prefix=external.prefix, + subject_type=external.subject_type, + scopes=external.scopes, + source="oauth_external_sso", + resolver=oauth.for_external_sso(), + ), + ] + ) + + +def build_and_bind(session_factory, redis_client) -> BearerAuthenticator: + registry = build_registry(session_factory, redis_client) + auth = BearerAuthenticator(registry) + bind_authenticator(auth) + return auth diff --git a/api/libs/rate_limit.py b/api/libs/rate_limit.py new file mode 100644 index 0000000000..2818898789 --- /dev/null +++ b/api/libs/rate_limit.py @@ -0,0 +1,140 @@ +"""Typed rate-limit decorator over ``libs.helper.RateLimiter`` (sliding- +window Redis ZSET). Apply after auth decorators so scopes can read +``g.auth_ctx``. Use :func:`enforce` when the bucket key is computed +in-handler. RFC-8628 ``slow_down`` is inline — its response shape isn't +generic 429. +""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import timedelta +from enum import StrEnum +from functools import wraps +from typing import ParamSpec, TypeVar + +from flask import g, jsonify, make_response, request, session +from werkzeug.exceptions import TooManyRequests + +from configs import dify_config +from libs.helper import RateLimiter, extract_remote_ip + + +class RateLimitScope(StrEnum): + IP = "ip" + SESSION = "session" + ACCOUNT = "account" + SUBJECT_EMAIL = "subject_email" + TOKEN_ID = "token_id" + + +@dataclass(frozen=True, slots=True) +class RateLimit: + limit: int + window: timedelta + scopes: tuple[RateLimitScope, ...] + + +LIMIT_DEVICE_CODE_PER_IP = RateLimit(60, timedelta(hours=1), (RateLimitScope.IP,)) +LIMIT_SSO_INITIATE_PER_IP = RateLimit(60, timedelta(hours=1), (RateLimitScope.IP,)) +LIMIT_APPROVE_EXT_PER_EMAIL = RateLimit(10, timedelta(hours=1), (RateLimitScope.SUBJECT_EMAIL,)) +LIMIT_APPROVE_CONSOLE = RateLimit(10, timedelta(hours=1), (RateLimitScope.SESSION,)) +LIMIT_LOOKUP_PUBLIC = RateLimit(60, timedelta(minutes=5), (RateLimitScope.IP,)) +LIMIT_ME_PER_ACCOUNT = RateLimit(60, timedelta(minutes=1), (RateLimitScope.ACCOUNT,)) +LIMIT_ME_PER_EMAIL = RateLimit(60, timedelta(minutes=1), (RateLimitScope.SUBJECT_EMAIL,)) +LIMIT_BEARER_PER_TOKEN = RateLimit( + limit=dify_config.OPENAPI_RATE_LIMIT_PER_TOKEN, + window=timedelta(minutes=1), + scopes=(RateLimitScope.TOKEN_ID,), # bucket key composed by caller from sha256(token) +) + + +def _one_key(scope: RateLimitScope) -> str: + match scope: + case RateLimitScope.IP: + return f"ip:{extract_remote_ip(request) or 'unknown'}" + case RateLimitScope.SESSION: + return f"session:{session.get('_id', 'anon')}" + case RateLimitScope.ACCOUNT: + ctx = getattr(g, "auth_ctx", None) + if ctx and ctx.account_id: + return f"account:{ctx.account_id}" + return "account:anon" + case RateLimitScope.SUBJECT_EMAIL: + ctx = getattr(g, "auth_ctx", None) + if ctx and ctx.subject_email: + return f"subject:{ctx.subject_email}" + return "subject:anon" + case RateLimitScope.TOKEN_ID: + ctx = getattr(g, "auth_ctx", None) + if ctx and ctx.token_id: + return f"token:{ctx.token_id}" + return "token:anon" + + +def _composite_key(scopes: tuple[RateLimitScope, ...]) -> str: + return "|".join(_one_key(s) for s in scopes) + + +def _limiter_prefix(scopes: tuple[RateLimitScope, ...]) -> str: + return "rl:" + "+".join(s.value for s in scopes) + + +def _build_limiter(spec: RateLimit) -> RateLimiter: + return RateLimiter( + prefix=_limiter_prefix(spec.scopes), + max_attempts=spec.limit, + time_window=int(spec.window.total_seconds()), + ) + + +_P = ParamSpec("_P") +_R = TypeVar("_R") + + +def rate_limit(spec: RateLimit) -> Callable[[Callable[_P, _R]], Callable[_P, _R]]: + """Apply after auth decorators that the scopes read from.""" + limiter = _build_limiter(spec) + + def wrap(fn: Callable[_P, _R]) -> Callable[_P, _R]: + @wraps(fn) + def inner(*args: _P.args, **kwargs: _P.kwargs) -> _R: + key = _composite_key(spec.scopes) + if limiter.is_rate_limited(key): + raise TooManyRequests("rate_limited") + limiter.increment_rate_limit(key) + return fn(*args, **kwargs) + + return inner + + return wrap + + +def enforce(spec: RateLimit, *, key: str) -> None: + """Imperative form — caller composes the bucket key to match scope + semantics (the key is opaque here). + """ + limiter = _build_limiter(spec) + if limiter.is_rate_limited(key): + raise TooManyRequests("rate_limited") + limiter.increment_rate_limit(key) + + +def enforce_bearer_rate_limit(token_hash: str) -> None: + """Per-token rate limit on /openapi/v1/* bearer-authed routes. + + Bucket key = ``token:`` so the same token shares one + bucket across api replicas (Redis-backed sliding window). + """ + limiter = _build_limiter(LIMIT_BEARER_PER_TOKEN) + key = f"token:{token_hash}" + if limiter.is_rate_limited(key): + retry_after = limiter.seconds_until_available(key) + response = make_response( + jsonify({"error": "rate_limited", "retry_after_ms": retry_after * 1000}), + 429, + ) + response.headers["Retry-After"] = str(retry_after) + raise TooManyRequests(response=response) + limiter.increment_rate_limit(key) diff --git a/api/libs/token.py b/api/libs/token.py index 5b043465ac..68048d8c7d 100644 --- a/api/libs/token.py +++ b/api/libs/token.py @@ -72,11 +72,15 @@ def extract_csrf_token_from_cookie(request: Request) -> str | None: return request.cookies.get(_real_cookie_name(COOKIE_NAME_CSRF_TOKEN)) -def extract_access_token(request: Request) -> str | None: - def _try_extract_from_cookie(request: Request) -> str | None: - return request.cookies.get(_real_cookie_name(COOKIE_NAME_ACCESS_TOKEN)) +def extract_console_cookie_token(request: Request) -> str | None: + """Cookie-only console session token. Used by /openapi/v1/oauth/device/* + approval routes, which must not fall through to the Authorization header + (that's where dfoa_/dfoe_ bearers live — they aren't JWTs).""" + return request.cookies.get(_real_cookie_name(COOKIE_NAME_ACCESS_TOKEN)) - return _try_extract_from_cookie(request) or _try_extract_from_header(request) + +def extract_access_token(request: Request) -> str | None: + return extract_console_cookie_token(request) or _try_extract_from_header(request) def extract_webapp_access_token(request: Request) -> str | None: diff --git a/api/migrations/versions/2026_04_23_2200-d4a5e1f3c9b7_add_oauth_access_tokens.py b/api/migrations/versions/2026_04_23_2200-d4a5e1f3c9b7_add_oauth_access_tokens.py new file mode 100644 index 0000000000..fbb2ef801e --- /dev/null +++ b/api/migrations/versions/2026_04_23_2200-d4a5e1f3c9b7_add_oauth_access_tokens.py @@ -0,0 +1,104 @@ +"""add oauth_access_tokens table + +Revision ID: d4a5e1f3c9b7 +Revises: 227822d22895, b69ca54b9208, 2a3aebbbf4bb +Create Date: 2026-04-23 22:00:00.000000 + +Merges the three open heads at time of authoring (add_workflow_comments_table, +add_chatbot_color_theme, add_app_tracing) into a single parent so the new +oauth_access_tokens table sits on a definite linear chain thereafter. + +Table stores user-level OAuth bearer tokens minted via the device-flow grant +(difyctl auth login). PAT storage (personal_access_tokens) is a separate +table not added in this migration. +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "d4a5e1f3c9b7" +down_revision = ("227822d22895", "b69ca54b9208", "2a3aebbbf4bb") +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + "oauth_access_tokens", + sa.Column( + "id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + primary_key=True, + ), + sa.Column("subject_email", sa.Text(), nullable=False), + sa.Column("subject_issuer", sa.Text(), nullable=True), + sa.Column("account_id", postgresql.UUID(as_uuid=True), nullable=True), + sa.Column("client_id", sa.String(length=64), nullable=False), + sa.Column("device_label", sa.Text(), nullable=False), + sa.Column("prefix", sa.String(length=8), nullable=False), + sa.Column("token_hash", sa.String(length=64), nullable=True, unique=True), + sa.Column( + "created_at", + sa.TIMESTAMP(timezone=True), + server_default=sa.text("NOW()"), + nullable=False, + ), + sa.Column("last_used_at", sa.TIMESTAMP(timezone=True), nullable=True), + sa.Column("expires_at", sa.TIMESTAMP(timezone=True), nullable=False), + sa.Column("revoked_at", sa.TIMESTAMP(timezone=True), nullable=True), + sa.ForeignKeyConstraint( + ["account_id"], + ["accounts.id"], + name="fk_oauth_access_tokens_account_id", + ondelete="SET NULL", + ), + ) + + op.create_index( + "idx_oauth_subject_email", + "oauth_access_tokens", + ["subject_email"], + postgresql_where=sa.text("revoked_at IS NULL"), + ) + op.create_index( + "idx_oauth_account", + "oauth_access_tokens", + ["account_id"], + postgresql_where=sa.text("revoked_at IS NULL AND account_id IS NOT NULL"), + ) + op.create_index( + "idx_oauth_client", + "oauth_access_tokens", + ["subject_email", "client_id"], + postgresql_where=sa.text("revoked_at IS NULL"), + ) + op.create_index( + "idx_oauth_token_hash", + "oauth_access_tokens", + ["token_hash"], + postgresql_where=sa.text("revoked_at IS NULL"), + ) + # Partial unique index — rotate-in-place keyed on (subject, client, device). + # The app always writes a non-NULL subject_issuer (account flow uses a + # sentinel, external-SSO uses the verified IdP issuer); without that the + # composite key would never collide because Postgres treats NULLs as + # distinct in unique indices. + op.create_index( + "uq_oauth_active_per_device", + "oauth_access_tokens", + ["subject_email", "subject_issuer", "client_id", "device_label"], + unique=True, + postgresql_where=sa.text("revoked_at IS NULL"), + ) + + +def downgrade(): + op.drop_index("uq_oauth_active_per_device", table_name="oauth_access_tokens") + op.drop_index("idx_oauth_token_hash", table_name="oauth_access_tokens") + op.drop_index("idx_oauth_client", table_name="oauth_access_tokens") + op.drop_index("idx_oauth_account", table_name="oauth_access_tokens") + op.drop_index("idx_oauth_subject_email", table_name="oauth_access_tokens") + op.drop_table("oauth_access_tokens") diff --git a/api/models/__init__.py b/api/models/__init__.py index 85be9ca3bd..4880f94779 100644 --- a/api/models/__init__.py +++ b/api/models/__init__.py @@ -73,7 +73,7 @@ from .model import ( TrialApp, UploadFile, ) -from .oauth import DatasourceOauthParamConfig, DatasourceProvider +from .oauth import DatasourceOauthParamConfig, DatasourceProvider, OAuthAccessToken from .provider import ( LoadBalancingModelConfig, Provider, @@ -177,6 +177,7 @@ __all__ = [ "MessageChain", "MessageFeedback", "MessageFile", + "OAuthAccessToken", "OperationLog", "PinnedConversation", "Provider", diff --git a/api/models/oauth.py b/api/models/oauth.py index bd04d890d3..f85448ea75 100644 --- a/api/models/oauth.py +++ b/api/models/oauth.py @@ -84,3 +84,35 @@ class DatasourceOauthTenantParamConfig(TypeBase): onupdate=func.current_timestamp(), init=False, ) + + +class OAuthAccessToken(TypeBase): + """Device-flow bearer. account_id NOT NULL ⇒ dfoa_ (Dify account, + subject_issuer = "dify:account" sentinel); account_id NULL + + subject_issuer = verified IdP issuer ⇒ dfoe_ (external SSO, EE-only). + subject_issuer is non-NULL for all rows the app writes — Postgres + treats NULLs as distinct in unique indices, so the partial unique + index on (subject_email, subject_issuer, client_id, device_label) + WHERE revoked_at IS NULL would otherwise fail to rotate in place. + """ + + __tablename__ = "oauth_access_tokens" + __table_args__ = (sa.PrimaryKeyConstraint("id", name="oauth_access_tokens_pkey"),) + + id: Mapped[str] = mapped_column( + StringUUID, insert_default=lambda: str(uuidv7()), default_factory=lambda: str(uuidv7()), init=False + ) + subject_email: Mapped[str] = mapped_column(sa.Text, nullable=False) + client_id: Mapped[str] = mapped_column(sa.String(64), nullable=False) + device_label: Mapped[str] = mapped_column(sa.Text, nullable=False) + prefix: Mapped[str] = mapped_column(sa.String(8), nullable=False) + expires_at: Mapped[datetime] = mapped_column(sa.DateTime(timezone=True), nullable=False) + subject_issuer: Mapped[str | None] = mapped_column(sa.Text, nullable=True, default=None) + account_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None) + token_hash: Mapped[str | None] = mapped_column(sa.String(64), nullable=True, default=None) + last_used_at: Mapped[datetime | None] = mapped_column(sa.DateTime(timezone=True), nullable=True, default=None) + revoked_at: Mapped[datetime | None] = mapped_column(sa.DateTime(timezone=True), nullable=True, default=None) + + created_at: Mapped[datetime] = mapped_column( + sa.DateTime(timezone=True), nullable=False, server_default=func.now(), init=False + ) diff --git a/api/models/workflow.py b/api/models/workflow.py index d127244b0f..23133f51dd 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -1206,6 +1206,7 @@ class WorkflowAppLogCreatedFrom(StrEnum): SERVICE_API = "service-api" WEB_APP = "web-app" INSTALLED_APP = "installed-app" + OPENAPI = "openapi" @classmethod def value_of(cls, value: str) -> "WorkflowAppLogCreatedFrom": diff --git a/api/schedule/clean_oauth_access_tokens_task.py b/api/schedule/clean_oauth_access_tokens_task.py new file mode 100644 index 0000000000..10250e986e --- /dev/null +++ b/api/schedule/clean_oauth_access_tokens_task.py @@ -0,0 +1,54 @@ +"""DELETE oauth_access_tokens past retention. Revocation is UPDATE +(token_id stays for audits) so rows accumulate across re-logins, and +expired-but-never-presented rows have no hard-expire trigger — both get +pruned here. Spec: docs/specs/v1.0/server/tokens.md §Hard-expire. +""" + +from __future__ import annotations + +import logging +import time +from datetime import UTC, datetime, timedelta + +import click +from sqlalchemy import delete, or_, select + +import app +from configs import dify_config +from extensions.ext_database import db +from models.oauth import OAuthAccessToken + +logger = logging.getLogger(__name__) + +DELETE_BATCH_SIZE = 500 + + +@app.celery.task(queue="retention") +def clean_oauth_access_tokens_task(): + click.echo(click.style("Start clean oauth_access_tokens.", fg="green")) + retention_days = int(dify_config.OAUTH_ACCESS_TOKEN_RETENTION_DAYS) + cutoff = datetime.now(UTC) - timedelta(days=retention_days) + start_at = time.perf_counter() + + candidates = or_( + OAuthAccessToken.revoked_at < cutoff, + # Zombies: expired but never re-presented, so middleware never flipped them. + (OAuthAccessToken.revoked_at.is_(None)) & (OAuthAccessToken.expires_at < cutoff), + ) + + total = 0 + while True: + ids = db.session.scalars(select(OAuthAccessToken.id).where(candidates).limit(DELETE_BATCH_SIZE)).all() + if not ids: + break + db.session.execute(delete(OAuthAccessToken).where(OAuthAccessToken.id.in_(ids))) + db.session.commit() + total += len(ids) + + end_at = time.perf_counter() + click.echo( + click.style( + f"Cleaned {total} oauth_access_tokens rows older than {retention_days}d in {end_at - start_at:.2f}s", + fg="green", + ) + ) diff --git a/api/services/app_service.py b/api/services/app_service.py index a046b909b3..28cd2c1db5 100644 --- a/api/services/app_service.py +++ b/api/services/app_service.py @@ -37,7 +37,7 @@ class AppService: Get app list with pagination :param user_id: user id :param tenant_id: tenant id - :param args: request args + :param args: request args. Optional keys: status (e.g. "normal") restricts App.status. :return: """ filters = [App.tenant_id == tenant_id, App.is_universal == False] @@ -53,6 +53,14 @@ class AppService: elif args["mode"] == "agent-chat": filters.append(App.mode == AppMode.AGENT_CHAT) + if args.get("status"): + filters.append(App.status == args["status"]) + # OpenAPI surface visibility gate. Pushed into the query so + # `pagination.total` reflects only apps the openapi caller can + # actually reach — post-filtering by enable_api after the page + # arrives would make `total` page-dependent. + if args.get("openapi_visible"): + filters.append(App.enable_api.is_(True)) if args.get("is_created_by_me", False): filters.append(App.created_by == user_id) if args.get("name"): diff --git a/api/services/enterprise/app_permitted_service.py b/api/services/enterprise/app_permitted_service.py new file mode 100644 index 0000000000..77d6346995 --- /dev/null +++ b/api/services/enterprise/app_permitted_service.py @@ -0,0 +1,44 @@ +from __future__ import annotations + +import logging +from dataclasses import dataclass + +from werkzeug.exceptions import ServiceUnavailable + +from services.enterprise.enterprise_service import EnterpriseService +from services.errors.enterprise import EnterpriseAPIError + +logger = logging.getLogger(__name__) + + +@dataclass(frozen=True, slots=True) +class PermittedAppsPage: + app_ids: list[str] + total: int + has_more: bool + + +def list_permitted_apps( + *, + page: int, + limit: int, + mode: str | None = None, + name: str | None = None, +) -> PermittedAppsPage: + try: + body = EnterpriseService.WebAppAuth.list_externally_accessible_apps( + page=page, limit=limit, mode=mode, name=name + ) + except EnterpriseAPIError as exc: + logger.warning( + "permitted_apps EE call failed: status=%s message=%s", + getattr(exc, "status_code", None), + str(exc), + ) + raise ServiceUnavailable("permitted_apps_unavailable") from exc + + return PermittedAppsPage( + app_ids=[row["appId"] for row in body.get("data", [])], + total=int(body.get("total", 0)), + has_more=bool(body.get("hasMore", False)), + ) diff --git a/api/services/enterprise/enterprise_service.py b/api/services/enterprise/enterprise_service.py index 5040fcc7e3..be4f1782d9 100644 --- a/api/services/enterprise/enterprise_service.py +++ b/api/services/enterprise/enterprise_service.py @@ -1,5 +1,6 @@ from __future__ import annotations +import enum import logging import uuid from datetime import datetime @@ -23,10 +24,22 @@ VALID_LICENSE_CACHE_TTL = 600 # 10 minutes — valid licenses are stable INVALID_LICENSE_CACHE_TTL = 30 # 30 seconds — short so admin fixes are picked up quickly +class WebAppAccessMode(enum.StrEnum): + PUBLIC = "public" + PRIVATE = "private" + PRIVATE_ALL = "private_all" + SSO_VERIFIED = "sso_verified" + + +PERMISSION_CHECK_MODES: frozenset[WebAppAccessMode] = frozenset( + {WebAppAccessMode.PRIVATE, WebAppAccessMode.PRIVATE_ALL} +) + + class WebAppSettings(BaseModel): access_mode: str = Field( - description="Access mode for the web app. Can be 'public', 'private', 'private_all', 'sso_verified'", - default="private", + description=f"Access mode for the web app. One of: {', '.join(m.value for m in WebAppAccessMode)}", + default=WebAppAccessMode.PRIVATE.value, alias="accessMode", ) @@ -106,6 +119,15 @@ class EnterpriseService: def get_workspace_info(cls, tenant_id: str): return EnterpriseRequest.send_request("GET", f"/workspace/{tenant_id}/info") + @classmethod + def initiate_device_flow_sso(cls, signed_state: str) -> dict: + return EnterpriseRequest.send_request( + "POST", + "/device-flow/sso-initiate", + json={"signed_state": signed_state}, + raise_for_status=True, + ) + @classmethod def join_default_workspace(cls, *, account_id: str) -> DefaultWorkspaceJoinResult: """ @@ -217,8 +239,9 @@ class EnterpriseService: def update_app_access_mode(cls, app_id: str, access_mode: str): if not app_id: raise ValueError("app_id must be provided.") - if access_mode not in ["public", "private", "private_all"]: - raise ValueError("access_mode must be either 'public', 'private', or 'private_all'") + allowed = {WebAppAccessMode.PUBLIC, WebAppAccessMode.PRIVATE, WebAppAccessMode.PRIVATE_ALL} + if access_mode not in allowed: + raise ValueError(f"access_mode must be one of: {', '.join(m.value for m in allowed)}") data = {"appId": app_id, "accessMode": access_mode} @@ -234,6 +257,32 @@ class EnterpriseService: params = {"appId": app_id} EnterpriseRequest.send_request("DELETE", "/webapp/clean", params=params) + @classmethod + def list_externally_accessible_apps( + cls, + *, + page: int, + limit: int, + mode: str | None = None, + name: str | None = None, + ) -> dict: + """Call EE InnerListExternallyAccessibleApps; returns raw camelCase response. + + Response shape: ``{"data": [{"appId", "tenantId", "mode", "name", "updatedAt"}], + "total": int, "hasMore": bool}``. + """ + body: dict[str, str | int] = {"page": page, "limit": limit} + if mode is not None: + body["mode"] = mode + if name is not None: + body["name"] = name + return EnterpriseRequest.send_request( + "POST", + "/webapp/externally-accessible-apps", + json=body, + timeout=5.0, + ) + @classmethod def get_cached_license_status(cls) -> LicenseStatus | None: """Get enterprise license status with Redis caching to reduce HTTP calls. diff --git a/api/services/oauth_device_flow.py b/api/services/oauth_device_flow.py new file mode 100644 index 0000000000..11e92f8ae9 --- /dev/null +++ b/api/services/oauth_device_flow.py @@ -0,0 +1,467 @@ +"""Device-flow service layer: Redis state machine, OAuth token mint +(DB upsert + plaintext generation), and TTL policy. Specs: +docs/specs/v1.0/server/{device-flow.md, tokens.md}. +""" + +from __future__ import annotations + +import hashlib +import json +import logging +import os +import secrets +import time +import uuid +from dataclasses import asdict, dataclass, field +from datetime import UTC, datetime, timedelta +from enum import StrEnum + +from sqlalchemy import func, select +from sqlalchemy.dialects.postgresql import insert as pg_insert +from sqlalchemy.orm import Session, scoped_session + +from libs.oauth_bearer import TOKEN_CACHE_KEY_FMT +from models.oauth import OAuthAccessToken + +logger = logging.getLogger(__name__) + + +# ============================================================================ +# Redis state machine — device_code + user_code ephemeral state +# ============================================================================ + + +_DEVICE_CODE_KEY_PREFIX = "device_code:" +_USER_CODE_KEY_PREFIX = "user_code:" +DEVICE_CODE_KEY_FMT = _DEVICE_CODE_KEY_PREFIX + "{code}" +USER_CODE_KEY_FMT = _USER_CODE_KEY_PREFIX + "{code}" + +# Atomic GET → status-check → DEL(both keys). Two concurrent pollers must +# not both observe APPROVED — only the winner gets the plaintext token, +# the loser sees nil and the caller maps that to expired_token. +_CONSUME_ON_POLL_LUA = """ +local raw = redis.call('GET', KEYS[1]) +if not raw then return nil end +local ok, decoded = pcall(cjson.decode, raw) +if not ok then return nil end +if decoded.status == 'pending' then return nil end +if decoded.user_code then + redis.call('DEL', ARGV[1] .. decoded.user_code) +end +redis.call('DEL', KEYS[1]) +return raw +""" + +DEVICE_FLOW_TTL_SECONDS = 15 * 60 # RFC 8628 expires_in +APPROVED_TTL_SECONDS_MIN = 60 # plaintext-token lifetime floor + +USER_CODE_ALPHABET = "ABCDEFGHJKLMNPQRSTUVWXY3456789" # ambiguous chars dropped +USER_CODE_SEGMENT_LEN = 4 +USER_CODE_MAX_CLAIM_ATTEMPTS = 5 + +DEFAULT_POLL_INTERVAL_SECONDS = 5 # RFC 8628 minimum + + +class DeviceFlowStatus(StrEnum): + PENDING = "pending" + APPROVED = "approved" + DENIED = "denied" + + +class SlowDownDecision(StrEnum): + OK = "ok" + SLOW_DOWN = "slow_down" + + +@dataclass +class DeviceFlowState: + """``minted_token`` is plaintext between approve and the next poll; + DEL'd after the poll reads it. + """ + + user_code: str + client_id: str + device_label: str + status: DeviceFlowStatus + subject_email: str | None = None + account_id: str | None = None + subject_issuer: str | None = None + minted_token: str | None = None + token_id: str | None = None + created_at: str = "" + created_ip: str = "" + last_poll_at: str = "" + poll_payload: dict | None = field(default=None) + + def to_json(self) -> str: + return json.dumps(asdict(self)) + + @classmethod + def from_json(cls, raw: str) -> DeviceFlowState: + data = json.loads(raw) + if "status" in data: + data["status"] = DeviceFlowStatus(data["status"]) + return cls(**data) + + +def _random_device_code() -> str: + return "dc_" + secrets.token_urlsafe(24) + + +def _random_user_code_segment() -> str: + return "".join(secrets.choice(USER_CODE_ALPHABET) for _ in range(USER_CODE_SEGMENT_LEN)) + + +def _random_user_code() -> str: + return f"{_random_user_code_segment()}-{_random_user_code_segment()}" + + +class StateNotFoundError(Exception): + pass + + +class InvalidTransitionError(Exception): + pass + + +class UserCodeExhaustedError(Exception): + pass + + +class DeviceFlowRedis: + def __init__(self, redis_client) -> None: + self._redis = redis_client + self._consume_on_poll_script = redis_client.register_script(_CONSUME_ON_POLL_LUA) + + def start(self, client_id: str, device_label: str, created_ip: str) -> tuple[str, str, int]: + device_code = _random_device_code() + user_code = self._claim_user_code(device_code) + state = DeviceFlowState( + user_code=user_code, + client_id=client_id, + device_label=device_label, + status=DeviceFlowStatus.PENDING, + created_at=datetime.now(UTC).isoformat(), + created_ip=created_ip, + ) + self._redis.setex( + DEVICE_CODE_KEY_FMT.format(code=device_code), + DEVICE_FLOW_TTL_SECONDS, + state.to_json(), + ) + return device_code, user_code, DEVICE_FLOW_TTL_SECONDS + + def _claim_user_code(self, device_code: str) -> str: + for _ in range(USER_CODE_MAX_CLAIM_ATTEMPTS): + user_code = _random_user_code() + key = USER_CODE_KEY_FMT.format(code=user_code) + ok = self._redis.set(key, device_code, nx=True, ex=DEVICE_FLOW_TTL_SECONDS) + if ok: + return user_code + raise UserCodeExhaustedError("could not allocate a unique user_code in 5 attempts") + + def load_by_user_code(self, user_code: str) -> tuple[str, DeviceFlowState] | None: + raw_dc = self._redis.get(USER_CODE_KEY_FMT.format(code=user_code)) + if not raw_dc: + return None + device_code = raw_dc.decode() if isinstance(raw_dc, (bytes, bytearray)) else raw_dc + state = self._load_state(device_code) + if state is None: + return None + return device_code, state + + def load_by_device_code(self, device_code: str) -> DeviceFlowState | None: + return self._load_state(device_code) + + def _load_state(self, device_code: str) -> DeviceFlowState | None: + raw = self._redis.get(DEVICE_CODE_KEY_FMT.format(code=device_code)) + if not raw: + return None + text_ = raw.decode() if isinstance(raw, (bytes, bytearray)) else raw + try: + return DeviceFlowState.from_json(text_) + except (ValueError, KeyError): + logger.exception("device_flow: corrupt state for %s", device_code) + return None + + def approve( + self, + device_code: str, + subject_email: str, + account_id: str | None, + minted_token: str, + token_id: str, + subject_issuer: str | None = None, + poll_payload: dict | None = None, + ) -> None: + state = self._load_state(device_code) + if state is None: + raise StateNotFoundError(device_code) + if state.status is not DeviceFlowStatus.PENDING: + raise InvalidTransitionError(f"cannot approve {state.status}") + + state.status = DeviceFlowStatus.APPROVED + state.subject_email = subject_email + state.account_id = account_id + state.subject_issuer = subject_issuer + state.minted_token = minted_token + state.token_id = token_id + state.poll_payload = poll_payload + + new_ttl = self._remaining_ttl(device_code, floor=APPROVED_TTL_SECONDS_MIN) + self._redis.setex(DEVICE_CODE_KEY_FMT.format(code=device_code), new_ttl, state.to_json()) + + def deny(self, device_code: str) -> None: + state = self._load_state(device_code) + if state is None: + raise StateNotFoundError(device_code) + if state.status is not DeviceFlowStatus.PENDING: + raise InvalidTransitionError(f"cannot deny {state.status}") + state.status = DeviceFlowStatus.DENIED + self._redis.setex( + DEVICE_CODE_KEY_FMT.format(code=device_code), + self._remaining_ttl(device_code, floor=1), + state.to_json(), + ) + + def consume_on_poll(self, device_code: str) -> DeviceFlowState | None: + """Race-safe via Lua EVAL: GET + status-check + DEL execute in a + single Redis transaction so only one of N concurrent pollers + observes the APPROVED state. Losers get None, mapped to + expired_token by the caller. + """ + raw = self._consume_on_poll_script( + keys=[DEVICE_CODE_KEY_FMT.format(code=device_code)], + args=[_USER_CODE_KEY_PREFIX], + ) + if raw is None: + return None + text_ = raw.decode() if isinstance(raw, (bytes, bytearray)) else raw + try: + return DeviceFlowState.from_json(text_) + except (ValueError, KeyError): + logger.exception("device_flow: corrupt state on consume %s", device_code) + return None + + def record_poll(self, device_code: str, interval_seconds: int) -> SlowDownDecision: + now = time.time() + key = f"device_code:{device_code}:last_poll" + prev_raw = self._redis.get(key) + self._redis.setex(key, DEVICE_FLOW_TTL_SECONDS, str(now)) + if prev_raw is None: + return SlowDownDecision.OK + prev_s = prev_raw.decode() if isinstance(prev_raw, (bytes, bytearray)) else prev_raw + try: + prev = float(prev_s) + except ValueError: + return SlowDownDecision.OK + if now - prev < interval_seconds: + return SlowDownDecision.SLOW_DOWN + return SlowDownDecision.OK + + def _remaining_ttl(self, device_code: str, floor: int) -> int: + """``max(remaining, floor)`` — guarantees the CLI has at least + ``floor`` seconds to poll after a near-expiry approve. + """ + ttl = self._redis.ttl(DEVICE_CODE_KEY_FMT.format(code=device_code)) + if ttl is None or ttl < 0: + return floor + return max(int(ttl), floor) + + +# ============================================================================ +# Token mint — generate + upsert +# ============================================================================ + + +OAUTH_BODY_BYTES = 32 # ~256 bits entropy +PREFIX_OAUTH_ACCOUNT = "dfoa_" +PREFIX_OAUTH_EXTERNAL_SSO = "dfoe_" + +# Sentinel issuer for account-flow rows. Postgres' default partial unique +# index treats NULLs as distinct, which would let two live `dfoa_` rows +# share (email, client, device) and break rotate-in-place. Storing a +# non-empty literal makes the composite key collide as intended. +ACCOUNT_ISSUER_SENTINEL = "dify:account" + + +@dataclass(frozen=True, slots=True) +class MintResult: + """Plaintext token surfaces to the caller once.""" + + token: str + token_id: uuid.UUID + expires_at: datetime + + +@dataclass(frozen=True, slots=True) +class UpsertOutcome: + token_id: uuid.UUID + rotated: bool + old_hash: str | None + + +def generate_token(prefix: str) -> str: + return prefix + secrets.token_urlsafe(OAUTH_BODY_BYTES) + + +def sha256_hex(token: str) -> str: + return hashlib.sha256(token.encode("utf-8")).hexdigest() + + +def mint_oauth_token( + # Accept either Session or Flask-SQLAlchemy's request-scoped wrapper — + # the wrapper proxies the same execute/commit surface. + session: Session | scoped_session, + redis_client, + *, + subject_email: str, + subject_issuer: str | None, + account_id: str | None, + client_id: str, + device_label: str, + prefix: str, + ttl_days: int, +) -> MintResult: + """Live row rotates in place via partial unique index + ``uq_oauth_active_per_device``; hard-expired rows are excluded by the + index predicate so re-login INSERTs fresh. Pre-rotate Redis entry is + deleted so stale AuthContext drops immediately. + """ + if prefix == PREFIX_OAUTH_ACCOUNT: + # Account flow always writes the sentinel — caller may pass None + # (for clarity) or the sentinel itself; nothing else is valid. + if subject_issuer not in (None, ACCOUNT_ISSUER_SENTINEL): + raise ValueError(f"account-flow token must use ACCOUNT_ISSUER_SENTINEL, got {subject_issuer!r}") + subject_issuer = ACCOUNT_ISSUER_SENTINEL + elif prefix == PREFIX_OAUTH_EXTERNAL_SSO: + # Defense in depth: enterprise canonicalises + rejects empty, + # but a regression there must not yield a NULL composite key here. + if not subject_issuer or not subject_issuer.strip(): + raise ValueError("external-SSO token requires non-empty subject_issuer") + else: + raise ValueError(f"unknown oauth prefix: {prefix!r}") + + token = generate_token(prefix) + new_hash = sha256_hex(token) + expires_at = datetime.now(UTC) + timedelta(days=ttl_days) + + outcome = _upsert( + session, + subject_email=subject_email, + subject_issuer=subject_issuer, + account_id=account_id, + client_id=client_id, + device_label=device_label, + prefix=prefix, + new_hash=new_hash, + expires_at=expires_at, + ) + + if outcome.rotated and outcome.old_hash: + redis_client.delete(TOKEN_CACHE_KEY_FMT.format(hash=outcome.old_hash)) + + return MintResult(token=token, token_id=outcome.token_id, expires_at=expires_at) + + +def _upsert( + session: Session | scoped_session, + *, + subject_email: str, + subject_issuer: str | None, + account_id: str | None, + client_id: str, + device_label: str, + prefix: str, + new_hash: str, + expires_at: datetime, +) -> UpsertOutcome: + # Snapshot prior live row's hash for Redis invalidation post-rotate. + # subject_issuer is always non-null here (account flow uses sentinel, + # external-SSO is validated upstream), so equality matches the index. + prior = session.execute( + select(OAuthAccessToken.id, OAuthAccessToken.token_hash) + .where( + OAuthAccessToken.subject_email == subject_email, + OAuthAccessToken.subject_issuer == subject_issuer, + OAuthAccessToken.client_id == client_id, + OAuthAccessToken.device_label == device_label, + OAuthAccessToken.revoked_at.is_(None), + ) + .limit(1) + ).first() + old_hash = prior.token_hash if prior else None + + insert_stmt = pg_insert(OAuthAccessToken).values( + subject_email=subject_email, + subject_issuer=subject_issuer, + account_id=account_id, + client_id=client_id, + device_label=device_label, + prefix=prefix, + token_hash=new_hash, + expires_at=expires_at, + ) + upsert_stmt = insert_stmt.on_conflict_do_update( + index_elements=["subject_email", "subject_issuer", "client_id", "device_label"], + index_where=OAuthAccessToken.revoked_at.is_(None), + set_={ + "token_hash": insert_stmt.excluded.token_hash, + "prefix": insert_stmt.excluded.prefix, + "account_id": insert_stmt.excluded.account_id, + "expires_at": insert_stmt.excluded.expires_at, + "created_at": func.now(), + "last_used_at": None, + }, + ).returning(OAuthAccessToken.id) + row = session.execute(upsert_stmt).first() + session.commit() + + if row is None: + raise RuntimeError("oauth_token upsert returned no row") + token_id = uuid.UUID(str(row.id)) + return UpsertOutcome( + token_id=token_id, + rotated=prior is not None, + old_hash=old_hash, + ) + + +# ============================================================================ +# TTL policy — days new OAuth tokens live +# ============================================================================ + + +DEFAULT_OAUTH_TTL_DAYS = 14 +MIN_TTL_DAYS = 1 +MAX_TTL_DAYS = 365 + +_TTL_ENV_VAR = "OAUTH_TTL_DAYS" + + +def oauth_ttl_days(tenant_id: str | None = None) -> int: + """``OAUTH_TTL_DAYS`` env, else default. EE tenant-level lookup + is deferred; when it lands it wins over the env (Redis-cached 60s). + """ + _ = tenant_id + + raw = os.environ.get(_TTL_ENV_VAR) + if raw is None: + return DEFAULT_OAUTH_TTL_DAYS + try: + value = int(raw) + except ValueError: + logger.warning( + "%s=%r is not an int; falling back to %d", + _TTL_ENV_VAR, + raw, + DEFAULT_OAUTH_TTL_DAYS, + ) + return DEFAULT_OAUTH_TTL_DAYS + if value < MIN_TTL_DAYS: + logger.warning("%s=%d below min %d; clamping", _TTL_ENV_VAR, value, MIN_TTL_DAYS) + return MIN_TTL_DAYS + if value > MAX_TTL_DAYS: + logger.warning("%s=%d above max %d; clamping", _TTL_ENV_VAR, value, MAX_TTL_DAYS) + return MAX_TTL_DAYS + return value diff --git a/api/services/openapi/__init__.py b/api/services/openapi/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/services/openapi/license_gate.py b/api/services/openapi/license_gate.py new file mode 100644 index 0000000000..f37462cd20 --- /dev/null +++ b/api/services/openapi/license_gate.py @@ -0,0 +1,54 @@ +"""License gate for the /openapi/v1/permitted-external-apps* surface. + +EE-only. CE deploys (``ENTERPRISE_ENABLED=false``) skip the gate entirely — +the EE blueprint chain is what gives CE deploys no callers on this surface +in practice, but the explicit short-circuit avoids any test/fixture that +flips the surface on without flipping the license. + +Reuses ``FeatureService.get_system_features()`` so the license status +travels the same path as the console reads. + +Companion to ``controllers.console.wraps.enterprise_license_required`` — +that one is for console (cookie-authed, force-logout 401). This one is +for bearer surface (token-authed, 403 ``license_required``). +""" + +from __future__ import annotations + +import logging +from collections.abc import Callable +from functools import wraps + +from werkzeug.exceptions import Forbidden + +from configs import dify_config +from services.feature_service import FeatureService, LicenseStatus + +logger = logging.getLogger(__name__) + +_VALID_LICENSE_STATUSES: frozenset[LicenseStatus] = frozenset( + {LicenseStatus.ACTIVE, LicenseStatus.EXPIRING} +) + + +def license_required[**P, R](view: Callable[P, R]) -> Callable[P, R]: + """Decorator form. Raises ``Forbidden('license_required')`` when the EE + deployment has no valid license. No-op on CE (``ENTERPRISE_ENABLED=false``). + """ + + @wraps(view) + def decorated(*args: P.args, **kwargs: P.kwargs) -> R: + if dify_config.ENTERPRISE_ENABLED and not _is_license_valid(): + raise Forbidden(description="license_required") + return view(*args, **kwargs) + + return decorated + + +def _is_license_valid() -> bool: + try: + features = FeatureService.get_system_features() + except Exception: + logger.exception("license_gate: FeatureService.get_system_features failed") + return False + return features.license.status in _VALID_LICENSE_STATUSES diff --git a/api/services/openapi/mint_policy.py b/api/services/openapi/mint_policy.py new file mode 100644 index 0000000000..6c374aa1a3 --- /dev/null +++ b/api/services/openapi/mint_policy.py @@ -0,0 +1,51 @@ +"""Hard mint policy. + +``validate_mint_policy`` cross-checks a (subject_type, prefix, scopes) +triple a caller intends to mint against ``MINTABLE_PROFILES`` — +the single source of truth in ``libs.oauth_bearer``. + +The defense-in-depth value: if a future caller assembles ``prefix`` or +``scopes`` from a non-canonical source (env, request body, plug-in +contribution), the mismatch fails closed at approve time before any +row hits the DB. When the caller reads straight from +``MINTABLE_PROFILES``, the check is a structural pin — it confirms the +table entry is well-formed and the caller picked the right key. +""" + +from __future__ import annotations + +from libs.oauth_bearer import MINTABLE_PROFILES, Scope, SubjectType + + +class MintPolicyViolation(Exception): # noqa: N818 — spec-defined name, used in BadRequest message + """Raised on a (subject_type, prefix, scopes) mismatch. Callers translate + to 400 ``mint_policy_violation``.""" + + +def validate_mint_policy( + *, + subject_type: SubjectType, + prefix: str, + scopes: frozenset[Scope], +) -> None: + """Raise ``MintPolicyViolation`` when the triple does not match the + canonical ``MINTABLE_PROFILES`` entry for ``subject_type``. + """ + profile = MINTABLE_PROFILES.get(subject_type) + if profile is None: + raise MintPolicyViolation( + f"mint_policy_violation: unknown subject_type={subject_type!r}" + ) + + drift = [] + if profile.prefix != prefix: + drift.append(f"prefix got={prefix!r} expected={profile.prefix!r}") + if frozenset(scopes) != profile.scopes: + got = sorted(s.value for s in scopes) + want = sorted(s.value for s in profile.scopes) + drift.append(f"scopes got={got} expected={want}") + + if drift: + raise MintPolicyViolation( + f"mint_policy_violation: subject_type={subject_type.value} — " + "; ".join(drift) + ) diff --git a/api/services/openapi/visibility.py b/api/services/openapi/visibility.py new file mode 100644 index 0000000000..ed665a768f --- /dev/null +++ b/api/services/openapi/visibility.py @@ -0,0 +1,32 @@ +"""Single-source visibility filter for the /openapi/v1/* surface. + +Keep every openapi-surface app query routed through ``_apply_openapi_gate``; +retiring or replacing the gate then becomes a one-line change here. + +The Service API (/v1/* app-key surface) does NOT use this helper — that +surface has its own per-request guard (``service_api_disabled``) wired +into the legacy ``validate_app_token`` decorator. +""" + +from __future__ import annotations + +from typing import Any + +from models.model import App + + +def apply_openapi_gate(query: Any) -> Any: + """Filter a SQLAlchemy Select/Query to apps visible on /openapi/v1/*. + + Works with both legacy ``Query.filter`` and 2.0-style ``Select.filter`` + (alias of ``.where``). + """ + return query.filter(App.enable_api.is_(True)) + + +def is_openapi_visible(app: App) -> bool: + """Per-row counterpart for code paths that fetch an App by primary key + (``session.get`` / ``session.scalar``) and need the same visibility check + the query gate would have applied. + """ + return bool(app.enable_api) diff --git a/api/services/webapp_auth_service.py b/api/services/webapp_auth_service.py index eaea79af2f..834d78011a 100644 --- a/api/services/webapp_auth_service.py +++ b/api/services/webapp_auth_service.py @@ -15,7 +15,7 @@ from models import Account, AccountStatus from models.model import App, EndUser, Site from services.account_service import AccountService from services.app_service import AppService -from services.enterprise.enterprise_service import EnterpriseService +from services.enterprise.enterprise_service import PERMISSION_CHECK_MODES, EnterpriseService, WebAppAccessMode from services.errors.account import AccountLoginError, AccountNotFoundError, AccountPasswordError from tasks.mail_email_code_login import send_email_code_login_mail_task @@ -137,12 +137,8 @@ class WebAppAuthService: """ Check if the app requires permission check based on its access mode. """ - modes_requiring_permission_check = [ - "private", - "private_all", - ] if access_mode: - return access_mode in modes_requiring_permission_check + return access_mode in PERMISSION_CHECK_MODES if not app_code and not app_id: raise ValueError("Either app_code or app_id must be provided.") @@ -153,7 +149,7 @@ class WebAppAuthService: raise ValueError("App ID could not be determined from the provided app_code.") webapp_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_id) - if webapp_settings and webapp_settings.access_mode in modes_requiring_permission_check: + if webapp_settings and webapp_settings.access_mode in PERMISSION_CHECK_MODES: return True return False @@ -166,11 +162,11 @@ class WebAppAuthService: raise ValueError("Either app_code or access_mode must be provided.") if access_mode: - if access_mode == "public": + if access_mode == WebAppAccessMode.PUBLIC: return WebAppAuthType.PUBLIC - elif access_mode in ["private", "private_all"]: + elif access_mode in PERMISSION_CHECK_MODES: return WebAppAuthType.INTERNAL - elif access_mode == "sso_verified": + elif access_mode == WebAppAccessMode.SSO_VERIFIED: return WebAppAuthType.EXTERNAL if app_code: diff --git a/api/tests/integration_tests/controllers/openapi/__init__.py b/api/tests/integration_tests/controllers/openapi/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/controllers/openapi/conftest.py b/api/tests/integration_tests/controllers/openapi/conftest.py new file mode 100644 index 0000000000..19a8ab673b --- /dev/null +++ b/api/tests/integration_tests/controllers/openapi/conftest.py @@ -0,0 +1,125 @@ +"""Shared fixtures for /openapi/v1/* integration tests.""" + +from __future__ import annotations + +import hashlib +import uuid +from collections.abc import Generator +from datetime import UTC, datetime, timedelta + +import pytest +from flask import Flask + +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models import Account, App, OAuthAccessToken, Tenant, TenantAccountJoin +from models.account import AccountStatus + + +def _sha256(token: str) -> str: + return hashlib.sha256(token.encode("utf-8")).hexdigest() + + +@pytest.fixture(autouse=True) +def disable_enterprise(monkeypatch): + """Default to CE behaviour for /openapi/v1 tests. Tests that exercise the + EE branch override this with their own monkeypatch in-test.""" + from configs import dify_config + + monkeypatch.setattr(dify_config, "ENTERPRISE_ENABLED", False) + + +@pytest.fixture +def workspace_account(flask_app: Flask) -> Generator[tuple[Account, Tenant, TenantAccountJoin], None, None]: + with flask_app.app_context(): + tenant = Tenant(name="t1", status="normal") + account = Account(email="u@example.com", name="u") + db.session.add_all([tenant, account]) + db.session.commit() + account.status = AccountStatus.ACTIVE + join = TenantAccountJoin(tenant_id=tenant.id, account_id=account.id, role="owner") + db.session.add(join) + db.session.commit() + yield account, tenant, join + db.session.delete(join) + db.session.delete(account) + db.session.delete(tenant) + db.session.commit() + + +@pytest.fixture +def app_in_workspace(flask_app: Flask, workspace_account) -> Generator[App, None, None]: + _, tenant, _ = workspace_account + with flask_app.app_context(): + app = App(tenant_id=tenant.id, name="a", mode="chat", status="normal", enable_site=True, enable_api=True) + db.session.add(app) + db.session.commit() + yield app + db.session.delete(app) + db.session.commit() + + +@pytest.fixture +def mint_token(flask_app: Flask): + """Factory fixture; tracks minted rows and deletes them on teardown so + the auth-related test runs don't accumulate `oauth_access_tokens` rows.""" + minted: list[OAuthAccessToken] = [] + + def _mint( + token: str, + *, + account_id: str | None, + prefix: str, + subject_email: str, + subject_issuer: str | None, + ) -> OAuthAccessToken: + with flask_app.app_context(): + row = OAuthAccessToken( + token_hash=_sha256(token), + prefix=prefix, + account_id=account_id, + subject_email=subject_email, + subject_issuer=subject_issuer, + client_id="difyctl", + device_label="test-device", + expires_at=datetime.now(UTC) + timedelta(hours=1), + ) + db.session.add(row) + db.session.commit() + minted.append(row) + return row + + yield _mint + + with flask_app.app_context(): + for row in minted: + db.session.delete(db.session.merge(row)) + db.session.commit() + + +@pytest.fixture +def account_token(workspace_account, mint_token) -> str: + account, _, _ = workspace_account + token = "dfoa_" + uuid.uuid4().hex + mint_token( + token, + account_id=account.id, + prefix="dfoa_", + subject_email=account.email, + subject_issuer="dify:account", + ) + return token + + +@pytest.fixture(autouse=True) +def _flush_auth_redis(flask_app: Flask) -> Generator[None, None, None]: + def _flush(): + with flask_app.app_context(): + for k in redis_client.keys("auth:*"): + redis_client.delete(k) + for k in redis_client.keys("rl:*"): + redis_client.delete(k) + + _flush() + yield + _flush() diff --git a/api/tests/integration_tests/controllers/openapi/test_app_run.py b/api/tests/integration_tests/controllers/openapi/test_app_run.py new file mode 100644 index 0000000000..524fdf1310 --- /dev/null +++ b/api/tests/integration_tests/controllers/openapi/test_app_run.py @@ -0,0 +1,252 @@ +"""Integration tests for POST /openapi/v1/apps//run.""" + +from __future__ import annotations + +import uuid +from collections.abc import Generator + +import pytest +from flask import Flask + +from core.app.entities.app_invoke_entities import InvokeFrom +from extensions.ext_database import db +from models import App + + +def test_run_chat_dispatches_to_chat_handler(flask_app, account_token, app_in_workspace, monkeypatch): + captured = {} + + def _fake_generate(*, app_model, user, args, invoke_from, streaming): + captured["mode"] = app_model.mode + captured["args"] = args + captured["invoke_from"] = invoke_from + return { + "event": "message", + "task_id": "t", + "id": "m", + "message_id": "m", + "conversation_id": "c", + "mode": "chat", + "answer": "ok", + "created_at": 0, + } + + monkeypatch.setattr( + "controllers.openapi.app_run.AppGenerateService.generate", staticmethod(_fake_generate) + ) + client = flask_app.test_client() + res = client.post( + f"/openapi/v1/apps/{app_in_workspace.id}/run", + json={"inputs": {}, "query": "hi", "response_mode": "blocking", "user": "spoof@x.com"}, + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + assert res.get_json()["mode"] == "chat" + assert captured["mode"] == "chat" + assert captured["invoke_from"] == InvokeFrom.OPENAPI + assert "user" not in captured["args"], "server must strip body.user; identity comes from bearer" + + +@pytest.fixture +def app_with_mode(flask_app: Flask, workspace_account): + """Factory that creates an App row in the workspace_account tenant with + a specified mode. Tracks rows for teardown. + """ + _, tenant, _ = workspace_account + created: list[App] = [] + + def _make(mode: str) -> App: + with flask_app.app_context(): + app = App( + tenant_id=tenant.id, + name=f"a-{mode}", + mode=mode, + status="normal", + enable_site=True, + enable_api=True, + ) + db.session.add(app) + db.session.commit() + db.session.refresh(app) + db.session.expunge(app) + created.append(app) + return app + + yield _make + + with flask_app.app_context(): + for app in created: + db.session.delete(db.session.merge(app)) + db.session.commit() + + +def test_run_chat_without_query_returns_422(flask_app, account_token, app_in_workspace, monkeypatch): + client = flask_app.test_client() + res = client.post( + f"/openapi/v1/apps/{app_in_workspace.id}/run", + json={"inputs": {}, "response_mode": "blocking"}, + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 422 + assert b"query_required_for_chat" in res.data + + +def test_run_completion_dispatches_to_completion_handler( + flask_app, account_token, app_with_mode, monkeypatch +): + app = app_with_mode("completion") + + captured: dict = {} + + def _fake_generate(*, app_model, user, args, invoke_from, streaming): + captured["mode"] = app_model.mode + captured["args"] = args + return { + "event": "message", + "task_id": "t", + "id": "m", + "message_id": "m", + "mode": "completion", + "answer": "ok", + "created_at": 0, + } + + monkeypatch.setattr( + "controllers.openapi.app_run.AppGenerateService.generate", staticmethod(_fake_generate) + ) + client = flask_app.test_client() + res = client.post( + f"/openapi/v1/apps/{app.id}/run", + json={"inputs": {}, "response_mode": "blocking"}, + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + assert res.get_json()["mode"] == "completion" + assert captured["mode"] == "completion" + + +def test_run_workflow_with_query_returns_422(flask_app, account_token, app_with_mode, monkeypatch): + app = app_with_mode("workflow") + client = flask_app.test_client() + res = client.post( + f"/openapi/v1/apps/{app.id}/run", + json={"inputs": {}, "query": "hi", "response_mode": "blocking"}, + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 422 + assert b"query_not_supported_for_workflow" in res.data + + +def test_run_workflow_no_query_dispatches_to_workflow_handler( + flask_app, account_token, app_with_mode, monkeypatch +): + app = app_with_mode("workflow") + + def _fake_generate(*, app_model, user, args, invoke_from, streaming): + return { + "workflow_run_id": "wfr", + "task_id": "t", + "data": {"id": "wf-d", "workflow_id": "wf", "status": "succeeded"}, + } + + monkeypatch.setattr( + "controllers.openapi.app_run.AppGenerateService.generate", staticmethod(_fake_generate) + ) + client = flask_app.test_client() + res = client.post( + f"/openapi/v1/apps/{app.id}/run", + json={"inputs": {}, "response_mode": "blocking"}, + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + body = res.get_json() + assert body["mode"] == "workflow" + assert body["workflow_run_id"] == "wfr" + + +def test_run_unsupported_mode_returns_422(flask_app, account_token, app_with_mode, monkeypatch): + app = app_with_mode("channel") + client = flask_app.test_client() + res = client.post( + f"/openapi/v1/apps/{app.id}/run", + json={"inputs": {}, "response_mode": "blocking"}, + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 422 + assert b"mode_not_runnable" in res.data + + +def test_run_without_bearer_returns_401(flask_app, app_in_workspace): + client = flask_app.test_client() + res = client.post( + f"/openapi/v1/apps/{app_in_workspace.id}/run", + json={"inputs": {}, "query": "hi"}, + ) + assert res.status_code == 401 + + +def test_run_with_insufficient_scope_returns_403( + flask_app, account_token, app_in_workspace, monkeypatch +): + """Stub the authenticator to return an AuthContext with empty scopes.""" + from libs import oauth_bearer + + real_authenticate = oauth_bearer.BearerAuthenticator.authenticate + + def _stub_authenticate(self, token: str): + ctx = real_authenticate(self, token) + from dataclasses import replace + + return replace(ctx, scopes=frozenset()) + + monkeypatch.setattr(oauth_bearer.BearerAuthenticator, "authenticate", _stub_authenticate) + + client = flask_app.test_client() + res = client.post( + f"/openapi/v1/apps/{app_in_workspace.id}/run", + json={"inputs": {}, "query": "hi"}, + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 403 + + +def test_run_with_unknown_app_returns_404(flask_app, account_token): + client = flask_app.test_client() + res = client.post( + f"/openapi/v1/apps/{uuid.uuid4()}/run", + json={"inputs": {}, "query": "hi"}, + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 404 + + +def test_run_streaming_returns_event_stream( + flask_app, account_token, app_in_workspace, monkeypatch +): + def _stream() -> Generator[str, None, None]: + yield "event: message\ndata: {\"x\": 1}\n\n" + + monkeypatch.setattr( + "controllers.openapi.app_run.AppGenerateService.generate", + staticmethod(lambda **kw: _stream()), + ) + + client = flask_app.test_client() + res = client.post( + f"/openapi/v1/apps/{app_in_workspace.id}/run", + json={"inputs": {}, "query": "hi", "response_mode": "streaming"}, + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + assert res.headers["Content-Type"].startswith("text/event-stream") + assert b"event: message" in res.data + + +def test_run_without_inputs_returns_422(flask_app, account_token, app_in_workspace): + client = flask_app.test_client() + res = client.post( + f"/openapi/v1/apps/{app_in_workspace.id}/run", + json={"query": "hi"}, + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 422 diff --git a/api/tests/integration_tests/controllers/openapi/test_apps.py b/api/tests/integration_tests/controllers/openapi/test_apps.py new file mode 100644 index 0000000000..20ac46fbbd --- /dev/null +++ b/api/tests/integration_tests/controllers/openapi/test_apps.py @@ -0,0 +1,210 @@ +"""Integration tests for /openapi/v1/apps* read surface.""" + +from __future__ import annotations + +from flask.testing import FlaskClient + +from models import App + + +def test_apps_bare_id_route_404(test_client, app_in_workspace, account_token): + resp = test_client.get( + f"/openapi/v1/apps/{app_in_workspace.id}", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert resp.status_code == 404 + + +def test_apps_parameters_route_404(test_client, app_in_workspace, account_token): + resp = test_client.get( + f"/openapi/v1/apps/{app_in_workspace.id}/parameters", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert resp.status_code == 404 + + +def test_apps_info_route_404(test_client, app_in_workspace, account_token): + resp = test_client.get( + f"/openapi/v1/apps/{app_in_workspace.id}/info", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert resp.status_code == 404 + + +def test_apps_describe_returns_merged_shape( + test_client: FlaskClient, + app_in_workspace: App, + account_token: str, +): + res = test_client.get( + f"/openapi/v1/apps/{app_in_workspace.id}/describe", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + body = res.json + assert body["info"]["id"] == app_in_workspace.id + assert body["info"]["mode"] == "chat" + assert isinstance(body["parameters"], dict) + + +def test_apps_describe_full_includes_input_schema( + test_client: FlaskClient, + app_in_workspace: App, + account_token: str, +): + res = test_client.get( + f"/openapi/v1/apps/{app_in_workspace.id}/describe", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + body = res.json + assert body["info"] is not None + assert body["parameters"] is not None + assert body["input_schema"] is not None + assert body["input_schema"]["$schema"] == "https://json-schema.org/draft/2020-12/schema" + + +def test_apps_describe_fields_info_only( + test_client: FlaskClient, + app_in_workspace: App, + account_token: str, +): + res = test_client.get( + f"/openapi/v1/apps/{app_in_workspace.id}/describe?fields=info", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + body = res.json + assert body["info"] is not None + assert body["parameters"] is None + assert body["input_schema"] is None + + +def test_apps_describe_fields_parameters_only( + test_client: FlaskClient, + app_in_workspace: App, + account_token: str, +): + res = test_client.get( + f"/openapi/v1/apps/{app_in_workspace.id}/describe?fields=parameters", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + body = res.json + assert body["info"] is None + assert body["parameters"] is not None + assert body["input_schema"] is None + + +def test_apps_describe_fields_input_schema_only( + test_client: FlaskClient, + app_in_workspace: App, + account_token: str, +): + res = test_client.get( + f"/openapi/v1/apps/{app_in_workspace.id}/describe?fields=input_schema", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + body = res.json + assert body["info"] is None + assert body["parameters"] is None + assert body["input_schema"] is not None + + +def test_apps_describe_fields_combined( + test_client: FlaskClient, + app_in_workspace: App, + account_token: str, +): + res = test_client.get( + f"/openapi/v1/apps/{app_in_workspace.id}/describe?fields=info,input_schema", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + body = res.json + assert body["info"] is not None + assert body["parameters"] is None + assert body["input_schema"] is not None + + +def test_apps_describe_fields_unknown_returns_422( + test_client: FlaskClient, + app_in_workspace: App, + account_token: str, +): + res = test_client.get( + f"/openapi/v1/apps/{app_in_workspace.id}/describe?fields=garbage", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 422 + + +def test_apps_describe_fields_extra_param_returns_422( + test_client: FlaskClient, + app_in_workspace: App, + account_token: str, +): + res = test_client.get( + f"/openapi/v1/apps/{app_in_workspace.id}/describe?fields=info&page=1", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 422 + + +def test_apps_list_returns_pagination_envelope( + test_client: FlaskClient, + workspace_account, + app_in_workspace: App, + account_token: str, +): + _, tenant, _ = workspace_account + res = test_client.get( + f"/openapi/v1/apps?workspace_id={tenant.id}&page=1&limit=20", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + body = res.json + assert body["page"] == 1 + assert body["limit"] == 20 + assert body["total"] >= 1 + assert any(d["id"] == app_in_workspace.id for d in body["data"]) + + +def test_apps_list_requires_workspace_id(test_client: FlaskClient, account_token: str): + res = test_client.get("/openapi/v1/apps", headers={"Authorization": f"Bearer {account_token}"}) + assert res.status_code == 400 + + +def test_apps_list_tag_no_match_returns_empty_data_not_400( + test_client: FlaskClient, + workspace_account, + app_in_workspace: App, + account_token: str, +): + _, tenant, _ = workspace_account + res = test_client.get( + f"/openapi/v1/apps?workspace_id={tenant.id}&tag=nonexistent", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + assert res.json["data"] == [] + + +def test_account_sessions_returns_envelope( + test_client: FlaskClient, + account_token: str, +): + res = test_client.get("/openapi/v1/account/sessions", headers={"Authorization": f"Bearer {account_token}"}) + assert res.status_code == 200 + body = res.json + # canonical envelope shape + assert isinstance(body["data"], list) + assert "page" in body + assert "limit" in body + assert "total" in body + assert "has_more" in body + # the bearer's own minted session must appear + assert any(s["prefix"] == "dfoa_" for s in body["data"]) + # legacy "sessions" key must NOT appear + assert "sessions" not in body diff --git a/api/tests/integration_tests/controllers/openapi/test_auth.py b/api/tests/integration_tests/controllers/openapi/test_auth.py new file mode 100644 index 0000000000..5f0727fbbe --- /dev/null +++ b/api/tests/integration_tests/controllers/openapi/test_auth.py @@ -0,0 +1,127 @@ +"""Integration tests for the /openapi/v1 bearer auth surface. + +Layer 0 (workspace membership), per-token rate limit, and read-scope (`apps:read`) +acceptance/rejection on app-scoped routes. +""" + +from __future__ import annotations + +from collections.abc import Generator + +import pytest +from flask import Flask +from flask.testing import FlaskClient + +from extensions.ext_database import db +from models import App, Tenant + + +def test_info_accepts_account_bearer_with_apps_read_scope( + test_client: FlaskClient, + app_in_workspace: App, + account_token: str, +) -> None: + res = test_client.get( + f"/openapi/v1/apps/{app_in_workspace.id}/info", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + assert res.json["id"] == app_in_workspace.id + + +@pytest.fixture +def other_workspace_app(flask_app: Flask) -> Generator[App, None, None]: + """A fresh app under a *different* tenant — caller has no membership row.""" + with flask_app.app_context(): + other_tenant = Tenant(name="other", status="normal") + db.session.add(other_tenant) + db.session.commit() + app = App( + tenant_id=other_tenant.id, + name="b", + mode="chat", + status="normal", + enable_site=True, + enable_api=True, + ) + db.session.add(app) + db.session.commit() + yield app + db.session.delete(app) + db.session.delete(other_tenant) + db.session.commit() + + +def test_layer0_denies_account_bearer_without_membership( + test_client: FlaskClient, + account_token: str, + other_workspace_app: App, +) -> None: + """Account A bearer hitting an app under tenant B — Layer 0 denies on CE.""" + res = test_client.get( + f"/openapi/v1/apps/{other_workspace_app.id}/info", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 403 + assert res.json.get("message") == "workspace_membership_revoked" + + +def test_layer0_skipped_when_enterprise_enabled( + test_client: FlaskClient, + account_token: str, + other_workspace_app: App, + monkeypatch, +) -> None: + """On EE, Layer 0 short-circuits — gateway RBAC owns tenant isolation. + + /info uses validate_bearer + require_workspace_member inline (no + AppAuthzCheck), so a cross-tenant bearer reaches the app lookup and + gets 200 — gateway is expected to enforce isolation upstream. + """ + from configs import dify_config + + # Override the conftest autouse default for this test only. + monkeypatch.setattr(dify_config, "ENTERPRISE_ENABLED", True) + + res = test_client.get( + f"/openapi/v1/apps/{other_workspace_app.id}/info", + headers={"Authorization": f"Bearer {account_token}"}, + ) + assert res.status_code == 200 + assert res.json.get("message") != "workspace_membership_revoked" + + +def test_rate_limit_returns_429_after_60_requests( + test_client: FlaskClient, + account_token: str, +) -> None: + """61st sequential GET to /account on the same bearer → 429 with Retry-After.""" + headers = {"Authorization": f"Bearer {account_token}"} + for i in range(60): + r = test_client.get("/openapi/v1/account", headers=headers) + assert r.status_code == 200, f"unexpected fail at i={i}" + + r = test_client.get("/openapi/v1/account", headers=headers) + assert r.status_code == 429 + assert r.headers.get("Retry-After"), "Retry-After header missing" + assert int(r.headers["Retry-After"]) >= 1 + body = r.json or {} + assert body.get("error") == "rate_limited" + assert isinstance(body.get("retry_after_ms"), int) + assert body["retry_after_ms"] >= 1000 + + +def test_rate_limit_bucket_shared_across_surfaces( + test_client: FlaskClient, + app_in_workspace: App, + account_token: str, +) -> None: + """30 calls to /account + 30 calls to /apps//info on same token → 61st 429s.""" + headers = {"Authorization": f"Bearer {account_token}"} + for _ in range(30): + assert test_client.get("/openapi/v1/account", headers=headers).status_code == 200 + for _ in range(30): + assert test_client.get(f"/openapi/v1/apps/{app_in_workspace.id}/info", headers=headers).status_code == 200 + + r = test_client.get("/openapi/v1/account", headers=headers) + assert r.status_code == 429 diff --git a/api/tests/unit_tests/controllers/openapi/__init__.py b/api/tests/unit_tests/controllers/openapi/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/controllers/openapi/auth/__init__.py b/api/tests/unit_tests/controllers/openapi/auth/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/controllers/openapi/auth/test_composition.py b/api/tests/unit_tests/controllers/openapi/auth/test_composition.py new file mode 100644 index 0000000000..aa6478dd97 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/auth/test_composition.py @@ -0,0 +1,66 @@ +from unittest.mock import patch + +from controllers.openapi.auth.composition import OAUTH_BEARER_PIPELINE, _resolve_app_authz_strategy +from controllers.openapi.auth.pipeline import Pipeline +from controllers.openapi.auth.steps import ( + AppAuthzCheck, + AppResolver, + BearerCheck, + CallerMount, + ScopeCheck, + SurfaceCheck, + WorkspaceMembershipCheck, +) +from controllers.openapi.auth.strategies import ( + AccountMounter, + AclStrategy, + EndUserMounter, + MembershipStrategy, +) +from libs.oauth_bearer import SubjectType + + +def test_pipeline_is_composed(): + assert isinstance(OAUTH_BEARER_PIPELINE, Pipeline) + + +def test_pipeline_step_order(): + """BearerCheck → SurfaceCheck → ScopeCheck → AppResolver → + WorkspaceMembershipCheck → AppAuthzCheck → CallerMount. + SurfaceCheck enforces the dfoa_/dfoe_ surface split + emits + `openapi.wrong_surface_denied`. Rate-limit is enforced inside + `BearerAuthenticator.authenticate`, not as a separate pipeline step.""" + steps = OAUTH_BEARER_PIPELINE._steps + assert isinstance(steps[0], BearerCheck) + assert isinstance(steps[1], SurfaceCheck) + assert isinstance(steps[2], ScopeCheck) + assert isinstance(steps[3], AppResolver) + assert isinstance(steps[4], WorkspaceMembershipCheck) + assert isinstance(steps[5], AppAuthzCheck) + assert isinstance(steps[6], CallerMount) + + +def test_pipeline_surface_check_accepts_account_only(): + """Current pipeline serves /apps//run — account surface only.""" + surface = OAUTH_BEARER_PIPELINE._steps[1] + assert isinstance(surface, SurfaceCheck) + assert surface._accepted == frozenset({SubjectType.ACCOUNT}) + + +def test_caller_mount_has_both_mounters(): + cm = OAUTH_BEARER_PIPELINE._steps[6] + kinds = {type(m) for m in cm._mounters} + assert AccountMounter in kinds + assert EndUserMounter in kinds + + +@patch("controllers.openapi.auth.composition.FeatureService") +def test_strategy_resolver_picks_acl_when_enabled(fs): + fs.get_system_features.return_value.webapp_auth.enabled = True + assert isinstance(_resolve_app_authz_strategy(), AclStrategy) + + +@patch("controllers.openapi.auth.composition.FeatureService") +def test_strategy_resolver_picks_membership_when_disabled(fs): + fs.get_system_features.return_value.webapp_auth.enabled = False + assert isinstance(_resolve_app_authz_strategy(), MembershipStrategy) diff --git a/api/tests/unit_tests/controllers/openapi/auth/test_context.py b/api/tests/unit_tests/controllers/openapi/auth/test_context.py new file mode 100644 index 0000000000..46e932af04 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/auth/test_context.py @@ -0,0 +1,21 @@ +from unittest.mock import MagicMock + +from controllers.openapi.auth.context import Context + + +def test_context_starts_unpopulated(): + ctx = Context(request=MagicMock(), required_scope="apps:run") + assert ctx.subject_type is None + assert ctx.subject_email is None + assert ctx.account_id is None + assert ctx.scopes == frozenset() + assert ctx.app is None + assert ctx.tenant is None + assert ctx.caller is None + assert ctx.caller_kind is None + + +def test_context_fields_are_mutable(): + ctx = Context(request=MagicMock(), required_scope="apps:run") + ctx.scopes = frozenset({"full"}) + assert "full" in ctx.scopes diff --git a/api/tests/unit_tests/controllers/openapi/auth/test_pipeline.py b/api/tests/unit_tests/controllers/openapi/auth/test_pipeline.py new file mode 100644 index 0000000000..cfeaf86cfe --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/auth/test_pipeline.py @@ -0,0 +1,61 @@ +from unittest.mock import MagicMock + +import pytest +from flask import Flask + +from controllers.openapi.auth.context import Context +from controllers.openapi.auth.pipeline import Pipeline + + +def test_run_invokes_each_step_in_order(): + calls = [] + + class S: + def __init__(self, tag): + self.tag = tag + + def __call__(self, ctx): + calls.append(self.tag) + + Pipeline(S("a"), S("b"), S("c")).run(Context(request=MagicMock(), required_scope="x")) + assert calls == ["a", "b", "c"] + + +def test_run_short_circuits_on_raise(): + calls = [] + + class Boom: + def __call__(self, ctx): + raise RuntimeError("boom") + + class Tail: + def __call__(self, ctx): + calls.append("ran") + + with pytest.raises(RuntimeError): + Pipeline(Boom(), Tail()).run(Context(request=MagicMock(), required_scope="x")) + assert calls == [] + + +def test_guard_decorator_runs_pipeline_and_unpacks_handler_kwargs(): + seen = {} + + class FakeStep: + def __call__(self, ctx): + ctx.app = "APP" + ctx.caller = "CALLER" + ctx.caller_kind = "account" + + pipeline = Pipeline(FakeStep()) + + @pipeline.guard(scope="apps:run") + def handler(app_model, caller, caller_kind): + seen["app_model"] = app_model + seen["caller"] = caller + seen["caller_kind"] = caller_kind + return "ok" + + app = Flask(__name__) + with app.test_request_context("/x", method="POST"): + assert handler() == "ok" + assert seen == {"app_model": "APP", "caller": "CALLER", "caller_kind": "account"} diff --git a/api/tests/unit_tests/controllers/openapi/auth/test_step_app_resolver.py b/api/tests/unit_tests/controllers/openapi/auth/test_step_app_resolver.py new file mode 100644 index 0000000000..4d64f4b881 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/auth/test_step_app_resolver.py @@ -0,0 +1,64 @@ +from types import SimpleNamespace +from unittest.mock import MagicMock, patch + +import pytest +from werkzeug.exceptions import BadRequest, Forbidden, NotFound + +from controllers.openapi.auth.context import Context +from controllers.openapi.auth.steps import AppResolver +from models import TenantStatus + + +def _ctx(view_args): + req = MagicMock() + req.view_args = view_args + return Context(request=req, required_scope="apps:run") + + +def _app(*, status="normal", enable_api=True): + return SimpleNamespace(id="app1", tenant_id="t1", status=status, enable_api=enable_api) + + +def _tenant(*, status=TenantStatus.NORMAL): + return SimpleNamespace(id="t1", status=status) + + +def test_resolver_rejects_missing_path_param(): + with pytest.raises(BadRequest): + AppResolver()(_ctx({})) + + +def test_resolver_rejects_none_view_args(): + with pytest.raises(BadRequest): + AppResolver()(_ctx(None)) + + +@patch("controllers.openapi.auth.steps.db") +def test_resolver_404_when_app_missing(db): + db.session.get.side_effect = [None] + with pytest.raises(NotFound): + AppResolver()(_ctx({"app_id": "x"})) + + +@patch("controllers.openapi.auth.steps.db") +def test_resolver_403_when_disabled(db): + db.session.get.side_effect = [_app(enable_api=False)] + with pytest.raises(Forbidden) as exc: + AppResolver()(_ctx({"app_id": "x"})) + assert "service_api_disabled" in str(exc.value.description) + + +@patch("controllers.openapi.auth.steps.db") +def test_resolver_403_when_tenant_archived(db): + db.session.get.side_effect = [_app(), _tenant(status=TenantStatus.ARCHIVE)] + with pytest.raises(Forbidden): + AppResolver()(_ctx({"app_id": "x"})) + + +@patch("controllers.openapi.auth.steps.db") +def test_resolver_populates_app_and_tenant(db): + db.session.get.side_effect = [_app(), _tenant()] + ctx = _ctx({"app_id": "x"}) + AppResolver()(ctx) + assert ctx.app.id == "app1" + assert ctx.tenant.id == "t1" diff --git a/api/tests/unit_tests/controllers/openapi/auth/test_step_authz.py b/api/tests/unit_tests/controllers/openapi/auth/test_step_authz.py new file mode 100644 index 0000000000..4e221d4d06 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/auth/test_step_authz.py @@ -0,0 +1,75 @@ +from types import SimpleNamespace +from unittest.mock import MagicMock, patch + +import pytest +from werkzeug.exceptions import Forbidden + +from controllers.openapi.auth.context import Context +from controllers.openapi.auth.steps import AppAuthzCheck +from controllers.openapi.auth.strategies import AclStrategy, MembershipStrategy +from libs.oauth_bearer import SubjectType + + +def _ctx(*, subject_type, account_id="acc1"): + c = Context(request=MagicMock(), required_scope="apps:run") + c.subject_type = subject_type + c.subject_email = "alice@example.com" + c.account_id = account_id + c.app = SimpleNamespace(id="app1") + c.tenant = SimpleNamespace(id="t1") + return c + + +@patch("controllers.openapi.auth.strategies.EnterpriseService") +def test_acl_strategy_private_calls_inner_api(ent): + ent.WebAppAuth.get_app_access_mode_by_id.return_value = SimpleNamespace(access_mode="private") + ent.WebAppAuth.is_user_allowed_to_access_webapp.return_value = True + assert AclStrategy().authorize(_ctx(subject_type=SubjectType.ACCOUNT)) is True + ent.WebAppAuth.is_user_allowed_to_access_webapp.assert_called_once_with( + user_id="acc1", + app_id="app1", + ) + + +@pytest.mark.parametrize( + ("access_mode", "subject_type", "expected"), + [ + ("public", SubjectType.ACCOUNT, True), + ("public", SubjectType.EXTERNAL_SSO, True), + ("sso_verified", SubjectType.ACCOUNT, True), + ("sso_verified", SubjectType.EXTERNAL_SSO, True), + ("private_all", SubjectType.ACCOUNT, True), + ("private_all", SubjectType.EXTERNAL_SSO, False), + ("private", SubjectType.EXTERNAL_SSO, False), + ], +) +@patch("controllers.openapi.auth.strategies.EnterpriseService") +def test_acl_strategy_subject_mode_matrix(ent, access_mode, subject_type, expected): + """Step 1 matrix: subject vs access-mode compatibility. No inner API call expected.""" + ent.WebAppAuth.get_app_access_mode_by_id.return_value = SimpleNamespace(access_mode=access_mode) + account_id = "acc1" if subject_type == SubjectType.ACCOUNT else None + assert AclStrategy().authorize(_ctx(subject_type=subject_type, account_id=account_id)) is expected + ent.WebAppAuth.is_user_allowed_to_access_webapp.assert_not_called() + + +@patch("controllers.openapi.auth.strategies._has_tenant_membership") +def test_membership_strategy_uses_join_lookup(member): + member.return_value = True + assert MembershipStrategy().authorize(_ctx(subject_type=SubjectType.ACCOUNT)) is True + member.assert_called_once_with("acc1", "t1") + + +def test_membership_strategy_rejects_external_sso(): + assert MembershipStrategy().authorize(_ctx(subject_type=SubjectType.EXTERNAL_SSO, account_id=None)) is False + + +def test_app_authz_check_raises_when_strategy_denies(): + deny = SimpleNamespace(authorize=lambda c: False) + with pytest.raises(Forbidden) as exc: + AppAuthzCheck(lambda: deny)(_ctx(subject_type=SubjectType.ACCOUNT)) + assert "subject_no_app_access" in str(exc.value.description) + + +def test_app_authz_check_passes_when_strategy_allows(): + allow = SimpleNamespace(authorize=lambda c: True) + AppAuthzCheck(lambda: allow)(_ctx(subject_type=SubjectType.ACCOUNT)) diff --git a/api/tests/unit_tests/controllers/openapi/auth/test_step_bearer.py b/api/tests/unit_tests/controllers/openapi/auth/test_step_bearer.py new file mode 100644 index 0000000000..5168a47204 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/auth/test_step_bearer.py @@ -0,0 +1,67 @@ +import uuid +from datetime import UTC, datetime +from unittest.mock import MagicMock, patch + +import pytest +from flask import Flask, g +from werkzeug.exceptions import Unauthorized + +from controllers.openapi.auth.context import Context +from controllers.openapi.auth.steps import BearerCheck +from libs.oauth_bearer import AuthContext, InvalidBearerError, Scope, SubjectType + + +def _ctx(headers): + req = MagicMock() + req.headers = headers + return Context(request=req, required_scope="apps:run") + + +def test_bearer_check_rejects_missing_header(): + app = Flask(__name__) + with app.test_request_context(), pytest.raises(Unauthorized): + BearerCheck()(_ctx({})) + + +@patch("controllers.openapi.auth.steps.get_authenticator") +def test_bearer_check_rejects_unknown_prefix(get_auth): + get_auth.return_value.authenticate.side_effect = InvalidBearerError("unknown token prefix") + app = Flask(__name__) + with app.test_request_context(), pytest.raises(Unauthorized): + BearerCheck()(_ctx({"Authorization": "Bearer xxx_abc"})) + + +@patch("controllers.openapi.auth.steps.get_authenticator") +def test_bearer_check_populates_context_and_g_auth_ctx(get_auth): + tok_id = uuid.uuid4() + authn = AuthContext( + subject_type=SubjectType.ACCOUNT, + subject_email="a@x.com", + subject_issuer=None, + account_id=None, + client_id="difyctl", + scopes=frozenset({Scope.FULL}), + token_id=tok_id, + source="oauth-account", + expires_at=datetime.now(UTC), + token_hash="hash-1", + verified_tenants={}, + ) + get_auth.return_value.authenticate.return_value = authn + + app = Flask(__name__) + ctx = _ctx({"Authorization": "Bearer dfoa_abc"}) + with app.test_request_context(): + BearerCheck()(ctx) + + assert ctx.subject_type == SubjectType.ACCOUNT + assert ctx.subject_email == "a@x.com" + assert ctx.scopes == frozenset({Scope.FULL}) + assert ctx.source == "oauth-account" + assert ctx.token_id == tok_id + assert ctx.token_hash == "hash-1" + # BearerCheck must also publish the same identity on `g.auth_ctx` + # so the surface gate + downstream handlers don't see two + # different identity sources between the decorator + pipeline paths. + assert g.auth_ctx is authn + assert g.auth_ctx.client_id == "difyctl" diff --git a/api/tests/unit_tests/controllers/openapi/auth/test_step_layer0.py b/api/tests/unit_tests/controllers/openapi/auth/test_step_layer0.py new file mode 100644 index 0000000000..4ae8f90246 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/auth/test_step_layer0.py @@ -0,0 +1,157 @@ +"""Unit tests for WorkspaceMembershipCheck (Layer 0).""" + +from __future__ import annotations + +import uuid +from types import SimpleNamespace +from unittest.mock import MagicMock, patch + +import pytest +from werkzeug.exceptions import Forbidden + +from controllers.openapi.auth.context import Context +from controllers.openapi.auth.steps import WorkspaceMembershipCheck +from libs.oauth_bearer import SubjectType + + +def _ctx(*, subject_type, account_id, tenant_id, cached_verified_tenants=None, token_hash=None) -> Context: + c = Context(request=MagicMock(), required_scope="apps:read") + c.subject_type = subject_type + c.account_id = account_id + c.tenant = SimpleNamespace(id=tenant_id) if tenant_id else None + c.cached_verified_tenants = cached_verified_tenants + c.token_hash = token_hash + return c + + +@pytest.fixture +def step(): + return WorkspaceMembershipCheck() + + +@patch("controllers.openapi.auth.steps.dify_config") +@patch("libs.oauth_bearer.record_layer0_verdict") +@patch("libs.oauth_bearer.db") +def test_skips_when_enterprise_enabled(mock_db, mock_record, mock_cfg, step): + mock_cfg.ENTERPRISE_ENABLED = True + ctx = _ctx( + subject_type=SubjectType.ACCOUNT, + account_id=str(uuid.uuid4()), + tenant_id=str(uuid.uuid4()), + cached_verified_tenants={}, + token_hash="hash-1", + ) + step(ctx) # no raise + mock_db.session.execute.assert_not_called() + mock_record.assert_not_called() + + +@patch("controllers.openapi.auth.steps.dify_config") +@patch("libs.oauth_bearer.record_layer0_verdict") +@patch("libs.oauth_bearer.db") +def test_skips_for_external_sso(mock_db, mock_record, mock_cfg, step): + mock_cfg.ENTERPRISE_ENABLED = False + ctx = _ctx( + subject_type=SubjectType.EXTERNAL_SSO, + account_id=None, + tenant_id=str(uuid.uuid4()), + cached_verified_tenants={}, + token_hash="hash-1", + ) + step(ctx) # no raise + mock_db.session.execute.assert_not_called() + mock_record.assert_not_called() + + +@patch("controllers.openapi.auth.steps.dify_config") +@patch("libs.oauth_bearer.record_layer0_verdict") +@patch("libs.oauth_bearer.db") +def test_uses_cached_ok(mock_db, mock_record, mock_cfg, step): + mock_cfg.ENTERPRISE_ENABLED = False + ctx = _ctx( + subject_type=SubjectType.ACCOUNT, + account_id="a1", + tenant_id="t1", + cached_verified_tenants={"t1": True}, + token_hash="hash-1", + ) + step(ctx) + mock_db.session.execute.assert_not_called() + mock_record.assert_not_called() + + +@patch("controllers.openapi.auth.steps.dify_config") +@patch("libs.oauth_bearer.record_layer0_verdict") +@patch("libs.oauth_bearer.db") +def test_uses_cached_denied(mock_db, mock_record, mock_cfg, step): + mock_cfg.ENTERPRISE_ENABLED = False + ctx = _ctx( + subject_type=SubjectType.ACCOUNT, + account_id="a1", + tenant_id="t1", + cached_verified_tenants={"t1": False}, + token_hash="hash-1", + ) + with pytest.raises(Forbidden, match="workspace_membership_revoked"): + step(ctx) + mock_db.session.execute.assert_not_called() + mock_record.assert_not_called() + + +@patch("controllers.openapi.auth.steps.dify_config") +@patch("libs.oauth_bearer.record_layer0_verdict") +@patch("libs.oauth_bearer.db") +def test_denies_when_no_membership(mock_db, mock_record, mock_cfg, step): + mock_cfg.ENTERPRISE_ENABLED = False + mock_db.session.execute.return_value.scalar_one_or_none.return_value = None + ctx = _ctx( + subject_type=SubjectType.ACCOUNT, + account_id="a1", + tenant_id="t1", + cached_verified_tenants={}, + token_hash="hash-1", + ) + with pytest.raises(Forbidden, match="workspace_membership_revoked"): + step(ctx) + mock_record.assert_called_once_with("hash-1", "t1", False) + + +@patch("controllers.openapi.auth.steps.dify_config") +@patch("libs.oauth_bearer.record_layer0_verdict") +@patch("libs.oauth_bearer.db") +def test_denies_when_account_inactive(mock_db, mock_record, mock_cfg, step): + mock_cfg.ENTERPRISE_ENABLED = False + mock_db.session.execute.side_effect = [ + MagicMock(scalar_one_or_none=MagicMock(return_value="join-id")), + MagicMock(scalar_one_or_none=MagicMock(return_value="banned")), + ] + ctx = _ctx( + subject_type=SubjectType.ACCOUNT, + account_id="a1", + tenant_id="t1", + cached_verified_tenants={}, + token_hash="hash-1", + ) + with pytest.raises(Forbidden, match="workspace_membership_revoked"): + step(ctx) + mock_record.assert_called_once_with("hash-1", "t1", False) + + +@patch("controllers.openapi.auth.steps.dify_config") +@patch("libs.oauth_bearer.record_layer0_verdict") +@patch("libs.oauth_bearer.db") +def test_allows_active_member(mock_db, mock_record, mock_cfg, step): + mock_cfg.ENTERPRISE_ENABLED = False + mock_db.session.execute.side_effect = [ + MagicMock(scalar_one_or_none=MagicMock(return_value="join-id")), + MagicMock(scalar_one_or_none=MagicMock(return_value="active")), + ] + ctx = _ctx( + subject_type=SubjectType.ACCOUNT, + account_id="a1", + tenant_id="t1", + cached_verified_tenants={}, + token_hash="hash-1", + ) + step(ctx) # no raise + mock_record.assert_called_once_with("hash-1", "t1", True) diff --git a/api/tests/unit_tests/controllers/openapi/auth/test_step_mount.py b/api/tests/unit_tests/controllers/openapi/auth/test_step_mount.py new file mode 100644 index 0000000000..e3a4c6675b --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/auth/test_step_mount.py @@ -0,0 +1,77 @@ +from types import SimpleNamespace +from unittest.mock import MagicMock, patch + +import pytest +from werkzeug.exceptions import Unauthorized + +from controllers.openapi.auth.context import Context +from controllers.openapi.auth.steps import CallerMount +from controllers.openapi.auth.strategies import AccountMounter, EndUserMounter +from core.app.entities.app_invoke_entities import InvokeFrom +from libs.oauth_bearer import SubjectType + + +def _ctx(*, subject_type, account_id=None, subject_email=None): + c = Context(request=MagicMock(), required_scope="apps:run") + c.subject_type = subject_type + c.account_id = account_id + c.subject_email = subject_email + c.app = SimpleNamespace(id="app1") + c.tenant = SimpleNamespace(id="t1") + return c + + +@patch("controllers.openapi.auth.strategies._login_as") +@patch("controllers.openapi.auth.strategies.db") +def test_account_mounter(db, login): + account = SimpleNamespace() + db.session.get.return_value = account + ctx = _ctx(subject_type=SubjectType.ACCOUNT, account_id="acc1") + AccountMounter().mount(ctx) + assert ctx.caller is account + assert ctx.caller.current_tenant is ctx.tenant + assert ctx.caller_kind == "account" + login.assert_called_once_with(account) + + +@patch("controllers.openapi.auth.strategies._login_as") +@patch("controllers.openapi.auth.strategies.EndUserService") +def test_end_user_mounter(svc, login): + eu = SimpleNamespace() + svc.get_or_create_end_user_by_type.return_value = eu + ctx = _ctx(subject_type=SubjectType.EXTERNAL_SSO, subject_email="a@x.com") + EndUserMounter().mount(ctx) + svc.get_or_create_end_user_by_type.assert_called_once_with( + InvokeFrom.OPENAPI, + tenant_id="t1", + app_id="app1", + user_id="a@x.com", + ) + assert ctx.caller is eu + assert ctx.caller_kind == "end_user" + + +def test_caller_mount_dispatches_by_subject_type(): + seen = {} + + class Fake: + def __init__(self, st, tag): + self._st, self._tag = st, tag + + def applies_to(self, st): + return st == self._st + + def mount(self, ctx): + seen["who"] = self._tag + + cm = CallerMount( + Fake(SubjectType.ACCOUNT, "acct"), + Fake(SubjectType.EXTERNAL_SSO, "sso"), + ) + cm(_ctx(subject_type=SubjectType.EXTERNAL_SSO)) + assert seen == {"who": "sso"} + + +def test_caller_mount_raises_when_none_applies(): + with pytest.raises(Unauthorized): + CallerMount()(_ctx(subject_type=SubjectType.ACCOUNT)) diff --git a/api/tests/unit_tests/controllers/openapi/auth/test_step_scope.py b/api/tests/unit_tests/controllers/openapi/auth/test_step_scope.py new file mode 100644 index 0000000000..6e3044d73f --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/auth/test_step_scope.py @@ -0,0 +1,27 @@ +from unittest.mock import MagicMock + +import pytest +from werkzeug.exceptions import Forbidden + +from controllers.openapi.auth.context import Context +from controllers.openapi.auth.steps import ScopeCheck + + +def _ctx(scopes, required): + c = Context(request=MagicMock(), required_scope=required) + c.scopes = frozenset(scopes) + return c + + +def test_scope_check_passes_on_full(): + ScopeCheck()(_ctx({"full"}, "apps:run")) + + +def test_scope_check_passes_on_explicit_match(): + ScopeCheck()(_ctx({"apps:run"}, "apps:run")) + + +def test_scope_check_rejects_when_missing(): + with pytest.raises(Forbidden) as exc: + ScopeCheck()(_ctx({"apps:read"}, "apps:run")) + assert "insufficient_scope" in str(exc.value.description) diff --git a/api/tests/unit_tests/controllers/openapi/auth/test_surface_gate.py b/api/tests/unit_tests/controllers/openapi/auth/test_surface_gate.py new file mode 100644 index 0000000000..389ea06dc1 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/auth/test_surface_gate.py @@ -0,0 +1,181 @@ +"""Surface gate tests. + +The gate has two attachment forms — decorator (`accept_subjects`) and +pipeline step (`SurfaceCheck`) — and both must: +- 403 on mismatched subject type with a canonical-path hint +- emit `openapi.wrong_surface_denied` once with the right payload +- pass-through on match +- raise RuntimeError (not 403) if g.auth_ctx is missing — that's a + wiring bug, not a user-driven failure +""" + +from __future__ import annotations + +import uuid +from datetime import UTC, datetime +from unittest.mock import MagicMock, patch + +import pytest +from flask import Flask, g +from werkzeug.exceptions import Forbidden + +from controllers.openapi.auth.context import Context +from controllers.openapi.auth.steps import SurfaceCheck +from controllers.openapi.auth.surface_gate import accept_subjects, check_surface +from libs.oauth_bearer import AuthContext, Scope, SubjectType + + +def _account_ctx() -> AuthContext: + return AuthContext( + subject_type=SubjectType.ACCOUNT, + subject_email="user@example.com", + subject_issuer="dify:account", + account_id=uuid.uuid4(), + client_id="difyctl", + scopes=frozenset({Scope.FULL}), + token_id=uuid.uuid4(), + source="oauth_account", + expires_at=datetime.now(UTC), + token_hash="h1", + verified_tenants={}, + ) + + +def _sso_ctx() -> AuthContext: + return AuthContext( + subject_type=SubjectType.EXTERNAL_SSO, + subject_email="sso@partner.com", + subject_issuer="https://idp.partner.com", + account_id=None, + client_id="difyctl", + scopes=frozenset({Scope.APPS_RUN, Scope.APPS_READ_PERMITTED_EXTERNAL}), + token_id=uuid.uuid4(), + source="oauth_external_sso", + expires_at=datetime.now(UTC), + token_hash="h2", + verified_tenants={}, + ) + + +# --------------------------------------------------------------------------- +# check_surface — shared core +# --------------------------------------------------------------------------- + + +def test_check_surface_passes_when_subject_in_accepted(): + app = Flask(__name__) + with app.test_request_context("/openapi/v1/apps"): + g.auth_ctx = _account_ctx() + check_surface(frozenset({SubjectType.ACCOUNT})) # no raise + + +def test_check_surface_rejects_on_wrong_subject_and_emits_audit(): + app = Flask(__name__) + with app.test_request_context("/openapi/v1/permitted-external-apps"): + g.auth_ctx = _account_ctx() + with patch("controllers.openapi.auth.surface_gate.emit_wrong_surface") as emit: + with pytest.raises(Forbidden) as exc: + check_surface(frozenset({SubjectType.EXTERNAL_SSO})) + assert "wrong_surface" in exc.value.description + # canonical-path hint should point at the caller's surface, + # not the surface they were rejected from + assert "/openapi/v1/apps" in exc.value.description + emit.assert_called_once() + kwargs = emit.call_args.kwargs + assert kwargs["subject_type"] == SubjectType.ACCOUNT.value + assert kwargs["attempted_path"] == "/openapi/v1/permitted-external-apps" + assert kwargs["client_id"] == "difyctl" + assert kwargs["token_id"] is not None + + +def test_check_surface_rejects_sso_on_account_surface(): + app = Flask(__name__) + with app.test_request_context("/openapi/v1/apps"): + g.auth_ctx = _sso_ctx() + with patch("controllers.openapi.auth.surface_gate.emit_wrong_surface") as emit: + with pytest.raises(Forbidden): + check_surface(frozenset({SubjectType.ACCOUNT})) + kwargs = emit.call_args.kwargs + assert kwargs["subject_type"] == SubjectType.EXTERNAL_SSO.value + + +def test_check_surface_runtime_error_when_g_auth_ctx_missing(): + """Missing g.auth_ctx means the bearer layer didn't run — wiring bug, + not a user-driven failure. Surface as RuntimeError (loud) so a future + refactor doesn't accidentally let a route skip authentication and + return a 403 that looks identical to a legitimate wrong-surface deny. + """ + app = Flask(__name__) + with app.test_request_context("/openapi/v1/apps"): + with pytest.raises(RuntimeError): + check_surface(frozenset({SubjectType.ACCOUNT})) + + +# --------------------------------------------------------------------------- +# @accept_subjects — decorator form +# --------------------------------------------------------------------------- + + +def _make_app() -> Flask: + app = Flask(__name__) + + @app.route("/account-only") + @accept_subjects(SubjectType.ACCOUNT) + def _account_only(): + return "ok" + + @app.route("/external-only") + @accept_subjects(SubjectType.EXTERNAL_SSO) + def _external_only(): + return "ok" + + return app + + +def test_accept_subjects_decorator_passes_on_match(): + app = _make_app() + with app.test_request_context("/account-only"): + g.auth_ctx = _account_ctx() + # Re-route through the decorated function by reaching for view_function + view = app.view_functions["_account_only"] + assert view() == "ok" + + +def test_accept_subjects_decorator_403_on_miss(): + app = _make_app() + with app.test_request_context("/external-only"): + g.auth_ctx = _account_ctx() + view = app.view_functions["_external_only"] + with patch("controllers.openapi.auth.surface_gate.emit_wrong_surface"): + with pytest.raises(Forbidden): + view() + + +# --------------------------------------------------------------------------- +# SurfaceCheck — pipeline step form +# --------------------------------------------------------------------------- + + +def _pipeline_ctx() -> Context: + req = MagicMock() + req.path = "/openapi/v1/apps//run" + return Context(request=req, required_scope=Scope.APPS_RUN) + + +def test_surface_check_passes_on_match(): + step = SurfaceCheck(accepted=frozenset({SubjectType.ACCOUNT})) + app = Flask(__name__) + with app.test_request_context("/openapi/v1/apps/x/run"): + g.auth_ctx = _account_ctx() + step(_pipeline_ctx()) # no raise + + +def test_surface_check_rejects_on_miss_and_emits_audit(): + step = SurfaceCheck(accepted=frozenset({SubjectType.EXTERNAL_SSO})) + app = Flask(__name__) + with app.test_request_context("/openapi/v1/apps/x/run"): + g.auth_ctx = _account_ctx() + with patch("controllers.openapi.auth.surface_gate.emit_wrong_surface") as emit: + with pytest.raises(Forbidden): + step(_pipeline_ctx()) + emit.assert_called_once() diff --git a/api/tests/unit_tests/controllers/openapi/conftest.py b/api/tests/unit_tests/controllers/openapi/conftest.py new file mode 100644 index 0000000000..9486ff6e94 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/conftest.py @@ -0,0 +1,15 @@ +import pytest + +from controllers.openapi.auth.pipeline import Pipeline + + +@pytest.fixture +def bypass_pipeline(monkeypatch): + """Stub Pipeline.run so endpoint decoration does not invoke real auth. + + Module-level @OAUTH_BEARER_PIPELINE.guard(...) captures the real + pipeline at import time; mocking the module attribute does not undo + that. Patching Pipeline.run on the class is the bypass that actually + works. + """ + monkeypatch.setattr(Pipeline, "run", lambda self, ctx: None) diff --git a/api/tests/unit_tests/controllers/openapi/test_account.py b/api/tests/unit_tests/controllers/openapi/test_account.py new file mode 100644 index 0000000000..8a00b2a2a5 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_account.py @@ -0,0 +1,140 @@ +"""User-scoped identity + session endpoints under /openapi/v1/account.""" + +import builtins + +import pytest +from flask import Flask +from flask.views import MethodView + +from controllers.openapi import bp as openapi_bp +from controllers.openapi.account import ( + AccountApi, + AccountSessionByIdApi, + AccountSessionsApi, + AccountSessionsSelfApi, +) + +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + + +@pytest.fixture +def openapi_app() -> Flask: + app = Flask(__name__) + app.config["TESTING"] = True + app.register_blueprint(openapi_bp) + return app + + +def _rule(app: Flask, path: str): + return next(r for r in app.url_map.iter_rules() if r.rule == path) + + +def test_account_route_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/account" in rules + + +def test_account_dispatches_to_class(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/account") + assert openapi_app.view_functions[rule.endpoint].view_class is AccountApi + + +def test_account_sessions_self_route_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/account/sessions/self" in rules + + +def test_sessions_self_dispatches_to_class(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/account/sessions/self") + assert openapi_app.view_functions[rule.endpoint].view_class is AccountSessionsSelfApi + + +def test_account_methods(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/account") + assert "GET" in rule.methods + + +def test_sessions_self_methods(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/account/sessions/self") + assert "DELETE" in rule.methods + + +def test_sessions_list_route_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/account/sessions" in rules + + +def test_sessions_list_dispatches_to_sessions_api(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/account/sessions") + assert openapi_app.view_functions[rule.endpoint].view_class is AccountSessionsApi + assert "GET" in rule.methods + + +def test_session_by_id_route_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/account/sessions/" in rules + + +def test_session_by_id_dispatches_to_correct_class(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/account/sessions/") + assert openapi_app.view_functions[rule.endpoint].view_class is AccountSessionByIdApi + assert "DELETE" in rule.methods + + +def test_subject_match_for_account_filters_by_account_id(): + """Account subject scopes queries via account_id.""" + import uuid as _uuid + + from controllers.openapi.account import _subject_match + from libs.oauth_bearer import AuthContext, SubjectType + + aid = _uuid.uuid4() + ctx = AuthContext( + subject_type=SubjectType.ACCOUNT, + subject_email="user@example.com", + subject_issuer="dify:account", + account_id=aid, + client_id="difyctl", + scopes=frozenset({"full"}), + token_id=_uuid.uuid4(), + source="oauth_account", + expires_at=None, + token_hash="h1", + verified_tenants={}, + ) + clauses = _subject_match(ctx) + # One predicate, on account_id + assert len(clauses) == 1 + assert "account_id" in str(clauses[0]) + + +def test_subject_match_for_external_sso_filters_by_email_and_issuer(): + """External SSO subject scopes via (subject_email, subject_issuer) + AND account_id IS NULL — so a same-email account row from a + federated tenant cannot be revoked through an SSO bearer. + """ + import uuid as _uuid + + from controllers.openapi.account import _subject_match + from libs.oauth_bearer import AuthContext, SubjectType + + ctx = AuthContext( + subject_type=SubjectType.EXTERNAL_SSO, + subject_email="sso@partner.com", + subject_issuer="https://idp.partner.com", + account_id=None, + client_id="difyctl", + scopes=frozenset({"apps:run"}), + token_id=_uuid.uuid4(), + source="oauth_external_sso", + expires_at=None, + token_hash="h1", + verified_tenants={}, + ) + clauses = _subject_match(ctx) + assert len(clauses) == 3 + rendered = " ".join(str(c) for c in clauses) + assert "subject_email" in rendered + assert "subject_issuer" in rendered + assert "account_id IS NULL" in rendered diff --git a/api/tests/unit_tests/controllers/openapi/test_app_describe_query.py b/api/tests/unit_tests/controllers/openapi/test_app_describe_query.py new file mode 100644 index 0000000000..a6abdc95eb --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_app_describe_query.py @@ -0,0 +1,48 @@ +"""Unit tests for AppDescribeQuery (`?fields=` allow-list).""" + +from __future__ import annotations + +import pytest +from pydantic import ValidationError + +from controllers.openapi.apps import AppDescribeQuery + + +def test_no_fields_returns_none() -> None: + q = AppDescribeQuery.model_validate({}) + assert q.fields is None + + +def test_empty_string_returns_none() -> None: + q = AppDescribeQuery.model_validate({"fields": ""}) + assert q.fields is None + + +def test_single_field() -> None: + q = AppDescribeQuery.model_validate({"fields": "info"}) + assert q.fields == {"info"} + + +def test_comma_list() -> None: + q = AppDescribeQuery.model_validate({"fields": "info,parameters"}) + assert q.fields == {"info", "parameters"} + + +def test_whitespace_tolerant() -> None: + q = AppDescribeQuery.model_validate({"fields": " info , input_schema "}) + assert q.fields == {"info", "input_schema"} + + +def test_unknown_member_rejected() -> None: + with pytest.raises(ValidationError): + AppDescribeQuery.model_validate({"fields": "garbage"}) + + +def test_unknown_among_known_rejected() -> None: + with pytest.raises(ValidationError): + AppDescribeQuery.model_validate({"fields": "info,garbage"}) + + +def test_extra_param_forbidden() -> None: + with pytest.raises(ValidationError): + AppDescribeQuery.model_validate({"fields": "info", "page": "1"}) diff --git a/api/tests/unit_tests/controllers/openapi/test_app_list_query.py b/api/tests/unit_tests/controllers/openapi/test_app_list_query.py new file mode 100644 index 0000000000..f7e8e9c73a --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_app_list_query.py @@ -0,0 +1,105 @@ +"""Unit tests for AppListQuery — the /apps query-param validator. + +Runs against the model directly, not the HTTP layer. Pins: +- defaults match the plan (page=1, limit=20). +- workspace_id is required. +- numeric bounds enforced (page >= 1, limit in [1, MAX_PAGE_LIMIT]). +- mode validates against the AppMode enum. +- name and tag have length caps. +""" + +from __future__ import annotations + +import pytest +from pydantic import ValidationError + +from controllers.openapi._models import MAX_PAGE_LIMIT +from controllers.openapi.apps import AppListQuery + + +def test_defaults(): + q = AppListQuery.model_validate({"workspace_id": "ws-1"}) + assert q.workspace_id == "ws-1" + assert q.page == 1 + assert q.limit == 20 + assert q.mode is None + assert q.name is None + assert q.tag is None + + +def test_workspace_id_required(): + with pytest.raises(ValidationError): + AppListQuery.model_validate({}) + + +def test_page_must_be_positive(): + with pytest.raises(ValidationError): + AppListQuery.model_validate({"workspace_id": "ws-1", "page": 0}) + with pytest.raises(ValidationError): + AppListQuery.model_validate({"workspace_id": "ws-1", "page": -1}) + + +def test_page_rejects_non_integer_string(): + with pytest.raises(ValidationError): + AppListQuery.model_validate({"workspace_id": "ws-1", "page": "abc"}) + + +def test_limit_must_be_positive(): + with pytest.raises(ValidationError): + AppListQuery.model_validate({"workspace_id": "ws-1", "limit": 0}) + with pytest.raises(ValidationError): + AppListQuery.model_validate({"workspace_id": "ws-1", "limit": -1}) + + +def test_limit_caps_at_max_page_limit(): + # Boundary accepts. + q = AppListQuery.model_validate({"workspace_id": "ws-1", "limit": MAX_PAGE_LIMIT}) + assert q.limit == MAX_PAGE_LIMIT + + # Just over rejects. + with pytest.raises(ValidationError): + AppListQuery.model_validate({"workspace_id": "ws-1", "limit": MAX_PAGE_LIMIT + 1}) + + +def test_mode_whitelisted_against_app_mode(): + # Valid mode passes. + q = AppListQuery.model_validate({"workspace_id": "ws-1", "mode": "chat"}) + assert q.mode is not None + assert q.mode.value == "chat" + + # Invalid mode rejects. + with pytest.raises(ValidationError): + AppListQuery.model_validate({"workspace_id": "ws-1", "mode": "not-a-mode"}) + + +def test_name_length_capped(): + AppListQuery.model_validate({"workspace_id": "ws-1", "name": "x" * 200}) + with pytest.raises(ValidationError): + AppListQuery.model_validate({"workspace_id": "ws-1", "name": "x" * 201}) + + +def test_tag_length_capped(): + AppListQuery.model_validate({"workspace_id": "ws-1", "tag": "x" * 100}) + with pytest.raises(ValidationError): + AppListQuery.model_validate({"workspace_id": "ws-1", "tag": "x" * 101}) + + +def test_all_fields_accept_valid_values(): + """Pin the happy-path acceptance for every field in one place.""" + q = AppListQuery.model_validate( + { + "workspace_id": "ws-1", + "page": 5, + "limit": 50, + "mode": "workflow", + "name": "search", + "tag": "prod", + } + ) + assert q.workspace_id == "ws-1" + assert q.page == 5 + assert q.limit == 50 + assert q.mode is not None + assert q.mode.value == "workflow" + assert q.name == "search" + assert q.tag == "prod" diff --git a/api/tests/unit_tests/controllers/openapi/test_app_payloads.py b/api/tests/unit_tests/controllers/openapi/test_app_payloads.py new file mode 100644 index 0000000000..64cdc38250 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_app_payloads.py @@ -0,0 +1,55 @@ +"""Unit tests for app payload-rendering helpers — independent of +HTTP plumbing or DB. Pin the response shapes that are CLI contracts. +""" + +from __future__ import annotations + +from types import SimpleNamespace + +import pytest + +from controllers.openapi.apps import ( # pyright: ignore[reportPrivateUsage] + _EMPTY_PARAMETERS, + parameters_payload, +) +from controllers.service_api.app.error import AppUnavailableError + + +def _fake_app(**overrides): + base = { + "id": "app1", + "name": "X", + "description": "d", + "mode": "chat", + "author_name": "alice", + "tags": [SimpleNamespace(name="prod")], + "updated_at": None, + "enable_api": True, + "workflow": None, + "app_model_config": None, + } + base.update(overrides) + return SimpleNamespace(**base) + + +def test_parameters_payload_raises_app_unavailable_when_no_config(): + with pytest.raises(AppUnavailableError): + parameters_payload(_fake_app(mode="chat", app_model_config=None)) + + +def test_empty_parameters_constant_matches_describe_fallback_shape(): + """The fallback dict served by /describe when an app has no config + must match the spec's stated keys (opening_statement, suggested_questions, + user_input_form, file_upload, system_parameters).""" + assert set(_EMPTY_PARAMETERS.keys()) == { + "opening_statement", + "suggested_questions", + "user_input_form", + "file_upload", + "system_parameters", + } + assert _EMPTY_PARAMETERS["suggested_questions"] == [] + assert _EMPTY_PARAMETERS["user_input_form"] == [] + assert _EMPTY_PARAMETERS["opening_statement"] is None + assert _EMPTY_PARAMETERS["file_upload"] is None + assert _EMPTY_PARAMETERS["system_parameters"] == {} diff --git a/api/tests/unit_tests/controllers/openapi/test_app_run_dispatch.py b/api/tests/unit_tests/controllers/openapi/test_app_run_dispatch.py new file mode 100644 index 0000000000..1f20281e03 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_app_run_dispatch.py @@ -0,0 +1,45 @@ +import pytest +from werkzeug.exceptions import InternalServerError + +from controllers.openapi.app_run import ( + _DISPATCH, + AppRunRequest, + _unpack_blocking, +) +from models.model import AppMode + + +def test_dispatch_covers_runnable_modes(): + runnable = {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT, AppMode.COMPLETION, AppMode.WORKFLOW} + assert set(_DISPATCH) == runnable + + +def test_unpack_blocking_passes_through_mapping(): + assert _unpack_blocking({"a": 1}) == {"a": 1} + + +def test_unpack_blocking_unwraps_tuple(): + assert _unpack_blocking(({"a": 1}, 200)) == {"a": 1} + + +def test_unpack_blocking_rejects_non_mapping(): + with pytest.raises(InternalServerError): + _unpack_blocking("not a mapping") + + +def test_app_run_request_strips_blank_conversation_id(): + payload = AppRunRequest(inputs={}, conversation_id=" ") + assert payload.conversation_id is None + + +def test_app_run_request_rejects_invalid_uuid_conversation_id(): + from pydantic import ValidationError + with pytest.raises(ValidationError, match="conversation_id must be a valid UUID"): + AppRunRequest(inputs={}, conversation_id="not-a-uuid") + + +def test_app_run_request_accepts_valid_uuid_conversation_id(): + import uuid as _uuid + cid = str(_uuid.uuid4()) + payload = AppRunRequest(inputs={}, conversation_id=cid) + assert payload.conversation_id == cid diff --git a/api/tests/unit_tests/controllers/openapi/test_apps_permitted_external_query.py b/api/tests/unit_tests/controllers/openapi/test_apps_permitted_external_query.py new file mode 100644 index 0000000000..96873b04f4 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_apps_permitted_external_query.py @@ -0,0 +1,53 @@ +"""Unit tests for PermittedExternalAppsListQuery — the +/permitted-external-apps query validator. + +Strict ConfigDict(extra='forbid'): cross-tenant tag/workspace_id are +unresolvable, so the model must reject them as 422 instead of silently +dropping them. Mode/name/page/limit have the same shape as AppListQuery. +""" + +from __future__ import annotations + +import pytest +from pydantic import ValidationError + +from controllers.openapi.apps_permitted_external import PermittedExternalAppsListQuery + + +def test_query_defaults_match_apps_list(): + q = PermittedExternalAppsListQuery.model_validate({}) + assert q.page == 1 + assert q.limit == 20 + assert q.mode is None + assert q.name is None + + +def test_query_rejects_workspace_id(): + """workspace_id is meaningless for /permitted-external-apps (cross-tenant); + rejecting it forces CLI authors to drop the param rather than send it + silently.""" + with pytest.raises(ValidationError): + PermittedExternalAppsListQuery.model_validate({"workspace_id": "ws-1"}) + + +def test_query_rejects_tag(): + """Tags are tenant-scoped; cross-tenant tag resolution is undefined.""" + with pytest.raises(ValidationError): + PermittedExternalAppsListQuery.model_validate({"tag": "prod"}) + + +def test_query_validates_mode_against_app_mode(): + with pytest.raises(ValidationError): + PermittedExternalAppsListQuery.model_validate({"mode": "not-a-mode"}) + + +def test_query_clamps_limit_at_max(): + with pytest.raises(ValidationError): + PermittedExternalAppsListQuery.model_validate({"limit": 500}) + + +def test_query_accepts_valid_mode(): + """Pin the happy path: AppMode values pass.""" + q = PermittedExternalAppsListQuery.model_validate({"mode": "chat"}) + assert q.mode is not None + assert q.mode.value == "chat" diff --git a/api/tests/unit_tests/controllers/openapi/test_audit_app_run.py b/api/tests/unit_tests/controllers/openapi/test_audit_app_run.py new file mode 100644 index 0000000000..b2a115f955 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_audit_app_run.py @@ -0,0 +1,26 @@ +import logging + +from controllers.openapi._audit import EVENT_APP_RUN_OPENAPI, emit_app_run + + +def test_event_constant(): + assert EVENT_APP_RUN_OPENAPI == "app.run.openapi" + + +def test_emit_app_run_logs_with_audit_extra(caplog): + with caplog.at_level(logging.INFO, logger="controllers.openapi._audit"): + emit_app_run( + app_id="app1", + tenant_id="t1", + caller_kind="account", + mode="chat", + surface="apps", + ) + record = next(r for r in caplog.records if r.message and "app.run.openapi" in r.message) + assert record.audit is True + assert record.event == EVENT_APP_RUN_OPENAPI + assert record.app_id == "app1" + assert record.tenant_id == "t1" + assert record.caller_kind == "account" + assert record.mode == "chat" + assert record.surface == "apps" diff --git a/api/tests/unit_tests/controllers/openapi/test_cors.py b/api/tests/unit_tests/controllers/openapi/test_cors.py new file mode 100644 index 0000000000..895c685da1 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_cors.py @@ -0,0 +1,127 @@ +"""CORS posture for /openapi/v1/* — default empty allowlist (same-origin), +expandable via OPENAPI_CORS_ALLOW_ORIGINS. Cross-origin requests from +disallowed origins do not receive the Access-Control-Allow-Origin +header, which the browser then blocks. + +Tests use a fresh Blueprint + Flask-CORS per case because the production +blueprint is a module-level singleton and can't be reconfigured once +registered. +""" + +import builtins + +from flask import Blueprint, Flask +from flask.views import MethodView +from flask_cors import CORS +from flask_restx import Resource + +from configs import dify_config +from extensions.ext_blueprints import OPENAPI_HEADERS, OPENAPI_MAX_AGE_SECONDS +from libs.external_api import ExternalApi + +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + + +def _make_app(allowed_origins: list[str], blueprint_name: str) -> Flask: + """Build a Flask app with a fresh openapi-style blueprint mirroring + production CORS settings, parameterised on the origin allowlist. + """ + bp = Blueprint(blueprint_name, __name__, url_prefix="/openapi/v1") + api = ExternalApi(bp, version="1.0", title="OpenAPI Test", description="") + + @api.route("/_health") + class _Health(Resource): + def get(self): + return {"ok": True} + + CORS( + bp, + resources={r"/*": {"origins": allowed_origins}}, + supports_credentials=True, + allow_headers=list(OPENAPI_HEADERS), + methods=["GET", "POST", "PATCH", "DELETE", "OPTIONS"], + expose_headers=["X-Version"], + max_age=OPENAPI_MAX_AGE_SECONDS, + ) + + app = Flask(__name__) + app.config["TESTING"] = True + app.register_blueprint(bp) + return app + + +def test_default_openapi_cors_allowlist_is_empty(): + """Default config admits no cross-origin until operator opts in.""" + assert dify_config.OPENAPI_CORS_ALLOW_ORIGINS == [] + + +def test_preflight_allowed_origin_returns_cors_headers(): + app = _make_app(["https://app.example.com"], "openapi_t1") + client = app.test_client() + response = client.options( + "/openapi/v1/_health", + headers={ + "Origin": "https://app.example.com", + "Access-Control-Request-Method": "GET", + }, + ) + + assert response.headers.get("Access-Control-Allow-Origin") == "https://app.example.com" + assert response.headers.get("Access-Control-Max-Age") == str(OPENAPI_MAX_AGE_SECONDS) + + +def test_preflight_disallowed_origin_omits_cors_headers(): + app = _make_app(["https://app.example.com"], "openapi_t2") + client = app.test_client() + response = client.options( + "/openapi/v1/_health", + headers={ + "Origin": "https://attacker.example", + "Access-Control-Request-Method": "GET", + }, + ) + + # flask-cors omits Allow-Origin for disallowed origins; browser blocks. + assert "Access-Control-Allow-Origin" not in response.headers + + +def test_preflight_with_default_empty_allowlist_omits_cors_headers(): + app = _make_app([], "openapi_t3") + client = app.test_client() + response = client.options( + "/openapi/v1/_health", + headers={ + "Origin": "https://app.example.com", + "Access-Control-Request-Method": "GET", + }, + ) + + assert "Access-Control-Allow-Origin" not in response.headers + + +def test_same_origin_request_succeeds_without_origin_header(): + app = _make_app(["https://app.example.com"], "openapi_t4") + client = app.test_client() + # Browsers don't send Origin on same-origin GETs. + response = client.get("/openapi/v1/_health") + + assert response.status_code == 200 + assert response.get_json() == {"ok": True} + + +def test_authorization_header_is_in_allow_headers(): + """Bearer-authed routes need Authorization in the preflight response.""" + app = _make_app(["https://app.example.com"], "openapi_t5") + client = app.test_client() + response = client.options( + "/openapi/v1/_health", + headers={ + "Origin": "https://app.example.com", + "Access-Control-Request-Method": "GET", + "Access-Control-Request-Headers": "Authorization", + }, + ) + + allow_headers = response.headers.get("Access-Control-Allow-Headers", "").lower() + assert "authorization" in allow_headers diff --git a/api/tests/unit_tests/controllers/openapi/test_device_approve_deny.py b/api/tests/unit_tests/controllers/openapi/test_device_approve_deny.py new file mode 100644 index 0000000000..dbe2f7bfae --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_device_approve_deny.py @@ -0,0 +1,52 @@ +"""Account-branch device-flow approve/deny under /openapi/v1.""" + +import builtins + +import pytest +from flask import Flask +from flask.views import MethodView + +from controllers.openapi import bp as openapi_bp +from controllers.openapi.oauth_device import DeviceApproveApi, DeviceDenyApi + +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + + +@pytest.fixture +def openapi_app() -> Flask: + app = Flask(__name__) + app.config["TESTING"] = True + app.register_blueprint(openapi_bp) + return app + + +def _rule(app: Flask, path: str): + return next(r for r in app.url_map.iter_rules() if r.rule == path) + + +def test_approve_route_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/oauth/device/approve" in rules + + +def test_deny_route_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/oauth/device/deny" in rules + + +def test_approve_dispatches_to_class(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/oauth/device/approve") + assert openapi_app.view_functions[rule.endpoint].view_class is DeviceApproveApi + + +def test_deny_dispatches_to_class(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/oauth/device/deny") + assert openapi_app.view_functions[rule.endpoint].view_class is DeviceDenyApi + + +def test_approve_and_deny_methods(openapi_app: Flask): + approve = _rule(openapi_app, "/openapi/v1/oauth/device/approve") + deny = _rule(openapi_app, "/openapi/v1/oauth/device/deny") + assert "POST" in approve.methods + assert "POST" in deny.methods diff --git a/api/tests/unit_tests/controllers/openapi/test_device_code.py b/api/tests/unit_tests/controllers/openapi/test_device_code.py new file mode 100644 index 0000000000..821a423805 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_device_code.py @@ -0,0 +1,47 @@ +"""POST /openapi/v1/oauth/device/code is the canonical RFC 8628 device +authorization endpoint. + +Tests verify URL routing without invoking the handler — invoking would +require Redis, which the unit-test runtime does not initialise. +""" + +import builtins + +import pytest +from flask import Flask +from flask.views import MethodView + +from controllers.openapi import bp as openapi_bp +from controllers.openapi.oauth_device import OAuthDeviceCodeApi + +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + + +@pytest.fixture +def openapi_app() -> Flask: + app = Flask(__name__) + app.config["TESTING"] = True + app.register_blueprint(openapi_bp) + return app + + +def test_openapi_route_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/oauth/device/code" in rules + + +def test_route_dispatches_to_class(openapi_app: Flask): + rule = next(r for r in openapi_app.url_map.iter_rules() if r.rule == "/openapi/v1/oauth/device/code") + assert openapi_app.view_functions[rule.endpoint].view_class is OAuthDeviceCodeApi + + +def test_route_accepts_post(openapi_app: Flask): + rule = next(r for r in openapi_app.url_map.iter_rules() if r.rule == "/openapi/v1/oauth/device/code") + assert "POST" in rule.methods + + +def test_known_client_ids_default_includes_difyctl(): + from configs import dify_config + + assert "difyctl" in dify_config.OPENAPI_KNOWN_CLIENT_IDS diff --git a/api/tests/unit_tests/controllers/openapi/test_device_lookup.py b/api/tests/unit_tests/controllers/openapi/test_device_lookup.py new file mode 100644 index 0000000000..5907378a73 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_device_lookup.py @@ -0,0 +1,36 @@ +"""GET /openapi/v1/oauth/device/lookup is the canonical user-code lookup.""" + +import builtins + +import pytest +from flask import Flask +from flask.views import MethodView + +from controllers.openapi import bp as openapi_bp +from controllers.openapi.oauth_device import OAuthDeviceLookupApi + +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + + +@pytest.fixture +def openapi_app() -> Flask: + app = Flask(__name__) + app.config["TESTING"] = True + app.register_blueprint(openapi_bp) + return app + + +def test_openapi_route_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/oauth/device/lookup" in rules + + +def test_route_dispatches_to_class(openapi_app: Flask): + rule = next(r for r in openapi_app.url_map.iter_rules() if r.rule == "/openapi/v1/oauth/device/lookup") + assert openapi_app.view_functions[rule.endpoint].view_class is OAuthDeviceLookupApi + + +def test_route_accepts_get(openapi_app: Flask): + rule = next(r for r in openapi_app.url_map.iter_rules() if r.rule == "/openapi/v1/oauth/device/lookup") + assert "GET" in rule.methods diff --git a/api/tests/unit_tests/controllers/openapi/test_device_sso.py b/api/tests/unit_tests/controllers/openapi/test_device_sso.py new file mode 100644 index 0000000000..95e4466a4f --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_device_sso.py @@ -0,0 +1,105 @@ +"""SSO-branch device-flow endpoints under /openapi/v1/oauth/device/.""" + +import builtins +from unittest.mock import MagicMock, patch + +import pytest +from flask import Flask +from flask.views import MethodView + +from controllers.openapi import bp as openapi_bp +from controllers.openapi.oauth_device_sso import ( + _email_belongs_to_dify_account, + approval_context, + approve_external, + sso_complete, + sso_initiate, +) + +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + + +@pytest.fixture +def openapi_app() -> Flask: + app = Flask(__name__) + app.config["TESTING"] = True + app.register_blueprint(openapi_bp) + return app + + +def _rule(app: Flask, path: str): + return next(r for r in app.url_map.iter_rules() if r.rule == path) + + +def test_sso_initiate_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/oauth/device/sso-initiate" in rules + + +def test_sso_complete_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/oauth/device/sso-complete" in rules + + +def test_approval_context_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/oauth/device/approval-context" in rules + + +def test_approve_external_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/oauth/device/approve-external" in rules + + +def test_sso_initiate_dispatches_to_function(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/oauth/device/sso-initiate") + assert openapi_app.view_functions[rule.endpoint] is sso_initiate + + +def test_sso_complete_dispatches_to_function(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/oauth/device/sso-complete") + assert openapi_app.view_functions[rule.endpoint] is sso_complete + + +def test_approval_context_dispatches_to_function(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/oauth/device/approval-context") + assert openapi_app.view_functions[rule.endpoint] is approval_context + + +def test_approve_external_dispatches_to_function(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/oauth/device/approve-external") + assert openapi_app.view_functions[rule.endpoint] is approve_external + + +def test_sso_complete_idp_callback_url_uses_canonical_path(): + """sso_initiate hardcodes the IdP callback URL — must point at the + canonical /openapi/v1/ path so IdP-side ACS configuration matches. + """ + from controllers.openapi import oauth_device_sso + + assert oauth_device_sso._SSO_COMPLETE_PATH == "/openapi/v1/oauth/device/sso-complete" + + +@pytest.mark.parametrize( + ("email", "row", "expected"), + [ + ("alice@example.com", "acc1", True), + ("alice@example.com", None, False), + ("Alice@Example.COM", "acc1", True), # case-insensitive lookup + (" alice@example.com ", "acc1", True), # surrounding whitespace stripped + ("", "acc1", False), + (" ", "acc1", False), + ("", None, False), + ], +) +@patch("controllers.openapi.oauth_device_sso.db") +def test_email_belongs_to_dify_account(db_mock, email, row, expected): + exec_result = MagicMock() + exec_result.scalar_one_or_none.return_value = row + db_mock.session.execute.return_value = exec_result + assert _email_belongs_to_dify_account(email) is expected + if email.strip(): + db_mock.session.execute.assert_called_once() + else: + db_mock.session.execute.assert_not_called() diff --git a/api/tests/unit_tests/controllers/openapi/test_device_token.py b/api/tests/unit_tests/controllers/openapi/test_device_token.py new file mode 100644 index 0000000000..8b83068856 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_device_token.py @@ -0,0 +1,31 @@ +"""POST /openapi/v1/oauth/device/token is the canonical poll endpoint.""" + +import builtins + +import pytest +from flask import Flask +from flask.views import MethodView + +from controllers.openapi import bp as openapi_bp +from controllers.openapi.oauth_device import OAuthDeviceTokenApi + +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + + +@pytest.fixture +def openapi_app() -> Flask: + app = Flask(__name__) + app.config["TESTING"] = True + app.register_blueprint(openapi_bp) + return app + + +def test_openapi_route_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/oauth/device/token" in rules + + +def test_route_dispatches_to_class(openapi_app: Flask): + rule = next(r for r in openapi_app.url_map.iter_rules() if r.rule == "/openapi/v1/oauth/device/token") + assert openapi_app.view_functions[rule.endpoint].view_class is OAuthDeviceTokenApi diff --git a/api/tests/unit_tests/controllers/openapi/test_health.py b/api/tests/unit_tests/controllers/openapi/test_health.py new file mode 100644 index 0000000000..f59e4d9a97 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_health.py @@ -0,0 +1,33 @@ +import builtins + +import pytest +from flask import Flask +from flask.views import MethodView + +from controllers.openapi import bp as openapi_bp + +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + + +@pytest.fixture +def app() -> Flask: + app = Flask(__name__) + app.config["TESTING"] = True + app.register_blueprint(openapi_bp) + return app + + +def test_health_returns_ok(app: Flask): + client = app.test_client() + response = client.get("/openapi/v1/_health") + + assert response.status_code == 200 + assert response.get_json() == {"ok": True} + + +def test_health_path_is_under_openapi_v1_prefix(app: Flask): + client = app.test_client() + assert client.get("/_health").status_code == 404 + assert client.get("/v1/_health").status_code == 404 + assert client.get("/openapi/v1/_health").status_code == 200 diff --git a/api/tests/unit_tests/controllers/openapi/test_input_schema.py b/api/tests/unit_tests/controllers/openapi/test_input_schema.py new file mode 100644 index 0000000000..73cb978ac1 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_input_schema.py @@ -0,0 +1,182 @@ +"""Unit tests for input_schema derivation.""" + +from __future__ import annotations + +import pytest + +from controllers.openapi._input_schema import _form_to_jsonschema + + +def _wrap(component: dict) -> list[dict]: + """user_input_form rows are single-key dicts: {"text-input": {...}}.""" + return [component] + + +def test_text_input_required() -> None: + form = _wrap({"text-input": {"variable": "industry", "label": "Industry", "required": True, "max_length": 200}}) + props, required = _form_to_jsonschema(form) + assert props == {"industry": {"type": "string", "title": "Industry", "maxLength": 200}} + assert required == ["industry"] + + +def test_paragraph_optional() -> None: + form = _wrap({"paragraph": {"variable": "context", "label": "Context", "required": False, "max_length": 4000}}) + props, required = _form_to_jsonschema(form) + assert props["context"] == {"type": "string", "title": "Context", "maxLength": 4000} + assert required == [] + + +def test_select_enum() -> None: + form = _wrap( + { + "select": { + "variable": "tier", + "label": "Tier", + "required": True, + "options": ["free", "pro", "enterprise"], + } + } + ) + props, required = _form_to_jsonschema(form) + assert props == {"tier": {"type": "string", "title": "Tier", "enum": ["free", "pro", "enterprise"]}} + assert required == ["tier"] + + +def test_number() -> None: + form = _wrap({"number": {"variable": "count", "label": "Count", "required": False}}) + props, _required = _form_to_jsonschema(form) + assert props["count"] == {"type": "number", "title": "Count"} + + +def test_file() -> None: + form = _wrap({"file": {"variable": "doc", "label": "Doc", "required": True}}) + props, required = _form_to_jsonschema(form) + assert props["doc"]["type"] == "object" + assert "title" in props["doc"] + assert required == ["doc"] + + +def test_file_list() -> None: + form = _wrap({"file-list": {"variable": "attachments", "label": "Attachments", "required": False}}) + props, _required = _form_to_jsonschema(form) + assert props["attachments"]["type"] == "array" + assert props["attachments"]["items"]["type"] == "object" + + +def test_unknown_type_skipped() -> None: + """Forward-compat: unknown variable types are skipped, not 500'd.""" + form = _wrap({"future-type": {"variable": "x", "label": "X", "required": False}}) + props, required = _form_to_jsonschema(form) + assert props == {} + assert required == [] + + +def test_required_order_preserved() -> None: + form = [ + {"text-input": {"variable": "a", "label": "A", "required": True}}, + {"text-input": {"variable": "b", "label": "B", "required": False}}, + {"text-input": {"variable": "c", "label": "C", "required": True}}, + ] + _props, required = _form_to_jsonschema(form) + assert required == ["a", "c"] + + +def test_max_length_omitted_when_zero() -> None: + form = _wrap({"text-input": {"variable": "x", "label": "X", "required": False, "max_length": 0}}) + props, _ = _form_to_jsonschema(form) + assert "maxLength" not in props["x"] + + +from unittest.mock import MagicMock + +from controllers.openapi._input_schema import EMPTY_INPUT_SCHEMA, build_input_schema +from controllers.service_api.app.error import AppUnavailableError +from models.model import AppMode + + +def _stub_app(mode: AppMode, *, form: list[dict] | None = None, has_workflow: bool | None = None): + """Returns a MagicMock whose .mode + workflow / app_model_config branch is wired up.""" + app = MagicMock() + app.mode = mode + if mode in (AppMode.WORKFLOW, AppMode.ADVANCED_CHAT): + if has_workflow is False: + app.workflow = None + else: + app.workflow = MagicMock() + app.workflow.user_input_form.return_value = form or [] + app.workflow.features_dict = {} + else: + if has_workflow is False: + app.app_model_config = None + else: + app.app_model_config = MagicMock() + app.app_model_config.to_dict.return_value = {"user_input_form": form or []} + return app + + +def test_chat_mode_includes_query() -> None: + app = _stub_app(AppMode.CHAT, form=[{"text-input": {"variable": "x", "label": "X", "required": True}}]) + schema = build_input_schema(app) + assert schema["$schema"] == "https://json-schema.org/draft/2020-12/schema" + assert "query" in schema["properties"] + assert schema["properties"]["query"]["type"] == "string" + assert schema["properties"]["query"]["minLength"] == 1 + assert "query" in schema["required"] + assert "inputs" in schema["required"] + assert schema["properties"]["inputs"]["additionalProperties"] is False + + +def test_agent_chat_mode_includes_query() -> None: + app = _stub_app(AppMode.AGENT_CHAT, form=[]) + schema = build_input_schema(app) + assert "query" in schema["properties"] + + +def test_advanced_chat_mode_includes_query() -> None: + app = _stub_app(AppMode.ADVANCED_CHAT, form=[]) + schema = build_input_schema(app) + assert "query" in schema["properties"] + + +def test_workflow_mode_omits_query() -> None: + app = _stub_app(AppMode.WORKFLOW, form=[]) + schema = build_input_schema(app) + assert "query" not in schema["properties"] + assert schema["required"] == ["inputs"] + + +def test_completion_mode_omits_query() -> None: + app = _stub_app(AppMode.COMPLETION, form=[]) + schema = build_input_schema(app) + assert "query" not in schema["properties"] + assert schema["required"] == ["inputs"] + + +def test_inputs_required_driven_by_form() -> None: + app = _stub_app( + AppMode.CHAT, + form=[ + {"text-input": {"variable": "industry", "label": "Industry", "required": True}}, + {"text-input": {"variable": "context", "label": "Context", "required": False}}, + ], + ) + schema = build_input_schema(app) + assert schema["properties"]["inputs"]["required"] == ["industry"] + + +def test_misconfigured_chat_raises_app_unavailable() -> None: + app = _stub_app(AppMode.CHAT, has_workflow=False) + with pytest.raises(AppUnavailableError): + build_input_schema(app) + + +def test_misconfigured_workflow_raises_app_unavailable() -> None: + app = _stub_app(AppMode.WORKFLOW, has_workflow=False) + with pytest.raises(AppUnavailableError): + build_input_schema(app) + + +def test_empty_input_schema_sentinel_shape() -> None: + assert EMPTY_INPUT_SCHEMA["type"] == "object" + assert EMPTY_INPUT_SCHEMA["properties"] == {} + assert EMPTY_INPUT_SCHEMA["required"] == [] diff --git a/api/tests/unit_tests/controllers/openapi/test_models.py b/api/tests/unit_tests/controllers/openapi/test_models.py new file mode 100644 index 0000000000..d29b592f6a --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_models.py @@ -0,0 +1,31 @@ +from controllers.openapi._models import MessageMetadata, UsageInfo + + +def test_usage_info_defaults_zero(): + u = UsageInfo() + assert u.prompt_tokens == 0 + assert u.completion_tokens == 0 + assert u.total_tokens == 0 + + +def test_message_metadata_accepts_partial(): + m = MessageMetadata(usage=UsageInfo(total_tokens=10)) + assert m.usage.total_tokens == 10 + assert m.retriever_resources == [] + + +def test_describe_response_all_blocks_optional() -> None: + from controllers.openapi._models import AppDescribeResponse + + payload = AppDescribeResponse().model_dump(mode="json", exclude_none=False) + assert payload == {"info": None, "parameters": None, "input_schema": None} + + +def test_describe_response_input_schema_field() -> None: + from controllers.openapi._models import AppDescribeResponse + + schema = {"$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object"} + payload = AppDescribeResponse(input_schema=schema).model_dump(mode="json", exclude_none=False) + assert payload["input_schema"] == schema + assert payload["info"] is None + assert payload["parameters"] is None diff --git a/api/tests/unit_tests/controllers/openapi/test_pagination_envelope.py b/api/tests/unit_tests/controllers/openapi/test_pagination_envelope.py new file mode 100644 index 0000000000..ff8a4c19aa --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_pagination_envelope.py @@ -0,0 +1,127 @@ +"""Unit tests for PaginationEnvelope generic Pydantic model.""" + +from __future__ import annotations + +from pydantic import BaseModel + +from controllers.openapi._models import PaginationEnvelope + + +class _Row(BaseModel): + id: str + name: str + + +def test_envelope_basic_fields(): + env = PaginationEnvelope[_Row](page=1, limit=20, total=42, has_more=True, data=[_Row(id="a", name="A")]) + dumped = env.model_dump(mode="json") + assert dumped == { + "page": 1, + "limit": 20, + "total": 42, + "has_more": True, + "data": [{"id": "a", "name": "A"}], + } + + +def test_envelope_empty_data_no_more(): + env = PaginationEnvelope[_Row](page=1, limit=20, total=0, has_more=False, data=[]) + assert env.model_dump(mode="json")["data"] == [] + assert env.model_dump(mode="json")["has_more"] is False + + +def test_envelope_has_more_true_when_total_exceeds_page_window(): + env = PaginationEnvelope[_Row].build(page=1, limit=20, total=42, items=[_Row(id="a", name="A")]) + assert env.has_more is True + + +def test_envelope_has_more_false_when_total_within_page_window(): + env = PaginationEnvelope[_Row].build(page=2, limit=20, total=22, items=[_Row(id="a", name="A")]) + assert env.has_more is False + + +def test_envelope_has_more_false_for_last_page(): + env = PaginationEnvelope[_Row].build(page=3, limit=20, total=42, items=[_Row(id="a", name="A")]) + assert env.has_more is False + + +def test_max_page_limit_is_200(): + from controllers.openapi._models import MAX_PAGE_LIMIT + + assert MAX_PAGE_LIMIT == 200 + + +def test_envelope_uses_pep695_generics(): + """Verify the class uses PEP 695 native generic syntax (not legacy Generic[T]).""" + from controllers.openapi._models import PaginationEnvelope + + # PEP 695 syntax populates __type_params__; the legacy Generic[T] form does not. + assert PaginationEnvelope.__type_params__, "expected PEP 695 native generic syntax" + + fields = PaginationEnvelope.model_fields + assert {"page", "limit", "total", "has_more", "data"} <= set(fields) + + +def test_app_info_response_dump_matches_spec(): + from controllers.openapi._models import AppInfoResponse + + obj = AppInfoResponse( + id="app1", + name="X", + description="d", + mode="chat", + author="alice", + tags=[{"name": "prod"}], + ) + assert obj.model_dump(mode="json") == { + "id": "app1", + "name": "X", + "description": "d", + "mode": "chat", + "author": "alice", + "tags": [{"name": "prod"}], + } + + +def test_app_describe_response_nests_info_and_parameters(): + from controllers.openapi._models import AppDescribeInfo, AppDescribeResponse + + info = AppDescribeInfo( + id="app1", + name="X", + mode="chat", + description=None, + tags=[], + author=None, + updated_at="2026-05-05T00:00:00+00:00", + service_api_enabled=True, + ) + obj = AppDescribeResponse(info=info, parameters={"opening_statement": None}) + dumped = obj.model_dump(mode="json") + assert dumped["info"]["service_api_enabled"] is True + assert dumped["parameters"]["opening_statement"] is None + + +def test_response_models_dump_per_mode(): + from controllers.openapi._models import ( + ChatMessageResponse, + CompletionMessageResponse, + WorkflowRunData, + WorkflowRunResponse, + ) + chat = ChatMessageResponse( + event="message", task_id="t1", id="m1", message_id="m1", + conversation_id="c1", mode="chat", answer="hi", created_at=0, + ) + assert chat.model_dump(mode="json")["mode"] == "chat" + wf = WorkflowRunResponse( + workflow_run_id="r1", task_id="t1", + data=WorkflowRunData(id="r1", workflow_id="w1", status="succeeded"), + ) + assert wf.model_dump(mode="json")["data"]["status"] == "succeeded" + assert wf.model_dump(mode="json")["mode"] == "workflow" + comp = CompletionMessageResponse( + event="message", task_id="t2", id="m2", message_id="m2", + mode="completion", answer="ok", created_at=0, + ) + assert comp.model_dump(mode="json")["mode"] == "completion" diff --git a/api/tests/unit_tests/controllers/openapi/test_workspaces.py b/api/tests/unit_tests/controllers/openapi/test_workspaces.py new file mode 100644 index 0000000000..9cdc13a395 --- /dev/null +++ b/api/tests/unit_tests/controllers/openapi/test_workspaces.py @@ -0,0 +1,58 @@ +"""Phase E step 17: workspace reads at /openapi/v1/workspaces. Bearer-authed +list + member-gated detail. No legacy /v1/ equivalent — the cookie-authed +/console/api/workspaces is a separate consumer that stays in console. +""" + +import builtins + +import pytest +from flask import Flask +from flask.views import MethodView + +from controllers.openapi import bp as openapi_bp +from controllers.openapi.workspaces import WorkspaceByIdApi, WorkspacesApi + +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + + +@pytest.fixture +def openapi_app() -> Flask: + app = Flask(__name__) + app.config["TESTING"] = True + app.register_blueprint(openapi_bp) + return app + + +def _rule(app: Flask, path: str): + return next(r for r in app.url_map.iter_rules() if r.rule == path) + + +def test_workspaces_list_route_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/workspaces" in rules + + +def test_workspaces_list_dispatches_to_workspaces_api(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/workspaces") + assert openapi_app.view_functions[rule.endpoint].view_class is WorkspacesApi + assert "GET" in rule.methods + + +def test_workspace_by_id_route_registered(openapi_app: Flask): + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/openapi/v1/workspaces/" in rules + + +def test_workspace_by_id_dispatches_to_correct_class(openapi_app: Flask): + rule = _rule(openapi_app, "/openapi/v1/workspaces/") + assert openapi_app.view_functions[rule.endpoint].view_class is WorkspaceByIdApi + assert "GET" in rule.methods + + +def test_console_legacy_workspaces_route_not_remounted_on_openapi(openapi_app: Flask): + """Phase E only adds the bearer-authed mounts on /openapi/v1/. + The cookie-authed /console/api/workspaces stays where it is. + """ + rules = {r.rule for r in openapi_app.url_map.iter_rules()} + assert "/console/api/workspaces" not in rules diff --git a/api/tests/unit_tests/core/app/test_invoke_from.py b/api/tests/unit_tests/core/app/test_invoke_from.py new file mode 100644 index 0000000000..e0a8344d2f --- /dev/null +++ b/api/tests/unit_tests/core/app/test_invoke_from.py @@ -0,0 +1,9 @@ +from core.app.entities.app_invoke_entities import InvokeFrom + + +def test_openapi_variant_present(): + assert InvokeFrom.OPENAPI.value == "openapi" + + +def test_openapi_distinct_from_service_api(): + assert InvokeFrom.OPENAPI != InvokeFrom.SERVICE_API diff --git a/api/tests/unit_tests/libs/test_oauth_bearer.py b/api/tests/unit_tests/libs/test_oauth_bearer.py new file mode 100644 index 0000000000..1ce25a48f7 --- /dev/null +++ b/api/tests/unit_tests/libs/test_oauth_bearer.py @@ -0,0 +1,29 @@ +"""Unit tests for the openapi bearer-scope catalog and TokenKind registry.""" + +from __future__ import annotations + +from unittest.mock import MagicMock + + +def test_apps_read_permitted_external_scope_present(): + from libs.oauth_bearer import Scope + + assert Scope.APPS_READ_PERMITTED_EXTERNAL.value == "apps:read:permitted-external" + + +def test_dfoe_token_kind_carries_apps_read_permitted_external(): + from libs.oauth_bearer import Scope, build_registry + + registry = build_registry(MagicMock(), MagicMock()) + dfoe = next(k for k in registry.kinds() if k.prefix == "dfoe_") + assert Scope.APPS_READ_PERMITTED_EXTERNAL in dfoe.scopes + + +def test_dfoa_token_kind_does_not_carry_apps_read_permitted_external(): + """dfoa_ relies on Scope.FULL umbrella; the explicit permitted scope + is reserved for dfoe_.""" + from libs.oauth_bearer import Scope, build_registry + + registry = build_registry(MagicMock(), MagicMock()) + dfoa = next(k for k in registry.kinds() if k.prefix == "dfoa_") + assert Scope.APPS_READ_PERMITTED_EXTERNAL not in dfoa.scopes diff --git a/api/tests/unit_tests/libs/test_oauth_bearer_layer0_cache.py b/api/tests/unit_tests/libs/test_oauth_bearer_layer0_cache.py new file mode 100644 index 0000000000..0023f17119 --- /dev/null +++ b/api/tests/unit_tests/libs/test_oauth_bearer_layer0_cache.py @@ -0,0 +1,94 @@ +"""Unit tests for record_layer0_verdict — merge L0 verdict into AuthContext cache.""" + +from __future__ import annotations + +import json +from unittest.mock import MagicMock, patch + +import pytest + +from libs.oauth_bearer import record_layer0_verdict + + +@pytest.fixture +def mock_redis(): + return MagicMock() + + +@patch("libs.oauth_bearer.redis_client") +def test_no_op_when_cache_entry_missing(mock_redis): + mock_redis.get.return_value = None + record_layer0_verdict("h1", "t1", True) + mock_redis.setex.assert_not_called() + + +@patch("libs.oauth_bearer.redis_client") +def test_no_op_when_cache_entry_invalid_marker(mock_redis): + mock_redis.get.return_value = b"invalid" + record_layer0_verdict("h1", "t1", True) + mock_redis.setex.assert_not_called() + + +@patch("libs.oauth_bearer.redis_client") +def test_no_op_when_json_malformed(mock_redis): + mock_redis.get.return_value = b"not json" + record_layer0_verdict("h1", "t1", True) + mock_redis.setex.assert_not_called() + + +@patch("libs.oauth_bearer.redis_client") +def test_no_op_when_ttl_expired(mock_redis): + mock_redis.get.return_value = json.dumps( + { + "subject_email": "e", + "subject_issuer": None, + "account_id": None, + "token_id": "tid", + "expires_at": None, + } + ).encode() + mock_redis.ttl.return_value = -1 + record_layer0_verdict("h1", "t1", True) + mock_redis.setex.assert_not_called() + + +@patch("libs.oauth_bearer.redis_client") +def test_merges_new_tenant_verdict(mock_redis): + mock_redis.get.return_value = json.dumps( + { + "subject_email": "e", + "subject_issuer": None, + "account_id": None, + "token_id": "tid", + "expires_at": None, + "verified_tenants": {"t0": True}, + } + ).encode() + mock_redis.ttl.return_value = 42 + + record_layer0_verdict("h1", "t1", False) + + mock_redis.setex.assert_called_once() + args = mock_redis.setex.call_args + assert args.args[0] == "auth:token:h1" + assert args.args[1] == 42 # remaining TTL preserved + written = json.loads(args.args[2]) + assert written["verified_tenants"] == {"t0": True, "t1": False} + + +@patch("libs.oauth_bearer.redis_client") +def test_merges_when_field_absent_from_legacy_entry(mock_redis): + """Backward compat: legacy cache entry without verified_tenants field.""" + mock_redis.get.return_value = json.dumps( + { + "subject_email": "e", + "subject_issuer": None, + "account_id": None, + "token_id": "tid", + "expires_at": None, + } + ).encode() + mock_redis.ttl.return_value = 42 + record_layer0_verdict("h1", "t1", True) + written = json.loads(mock_redis.setex.call_args.args[2]) + assert written["verified_tenants"] == {"t1": True} diff --git a/api/tests/unit_tests/libs/test_oauth_bearer_require_scope.py b/api/tests/unit_tests/libs/test_oauth_bearer_require_scope.py new file mode 100644 index 0000000000..f58471904c --- /dev/null +++ b/api/tests/unit_tests/libs/test_oauth_bearer_require_scope.py @@ -0,0 +1,85 @@ +"""require_scope is a route-level gate run after validate_bearer. +Tests use a fake auth_ctx attached directly to flask.g — no +authenticator wiring needed. +""" + +from __future__ import annotations + +import uuid + +import pytest +from flask import Flask, g +from werkzeug.exceptions import Forbidden + +from libs.oauth_bearer import ( + AuthContext, + Scope, + SubjectType, + require_scope, +) + + +@pytest.fixture +def app() -> Flask: + app = Flask(__name__) + app.config["TESTING"] = True + return app + + +def _ctx(scopes) -> AuthContext: + return AuthContext( + subject_type=SubjectType.ACCOUNT, + subject_email="user@example.com", + subject_issuer="dify:account", + account_id=uuid.uuid4(), + client_id="difyctl", + scopes=scopes, + token_id=uuid.uuid4(), + source="oauth_account", + expires_at=None, + token_hash="h1", + verified_tenants={}, + ) + + +def test_require_scope_allows_when_scope_present(app: Flask): + @require_scope("apps:read") + def view(): + return "ok" + + with app.test_request_context(): + g.auth_ctx = _ctx(frozenset({"apps:read"})) + assert view() == "ok" + + +def test_require_scope_rejects_when_scope_missing(app: Flask): + @require_scope("apps:write") + def view(): + return "ok" + + with app.test_request_context(): + g.auth_ctx = _ctx(frozenset({"apps:read"})) + with pytest.raises(Forbidden) as exc: + view() + assert "insufficient_scope: apps:write" in str(exc.value.description) + + +def test_require_scope_full_passes_any_check(app: Flask): + @require_scope("apps:write") + def view(): + return "ok" + + with app.test_request_context(): + g.auth_ctx = _ctx(frozenset({Scope.FULL})) + assert view() == "ok" + + +def test_require_scope_without_validate_bearer_raises_runtime_error(app: Flask): + @require_scope("apps:read") + def view(): + return "ok" + + with app.test_request_context(): + # No g.auth_ctx — validate_bearer was forgotten + with pytest.raises(RuntimeError, match="stack @validate_bearer above @require_scope"): + view() diff --git a/api/tests/unit_tests/libs/test_rate_limit_bearer.py b/api/tests/unit_tests/libs/test_rate_limit_bearer.py new file mode 100644 index 0000000000..b204575ccb --- /dev/null +++ b/api/tests/unit_tests/libs/test_rate_limit_bearer.py @@ -0,0 +1,74 @@ +"""Unit tests for the per-token bearer rate limit primitive.""" + +from __future__ import annotations + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +import pytest +from werkzeug.exceptions import TooManyRequests + +from libs.helper import RateLimiter +from libs.rate_limit import ( + LIMIT_BEARER_PER_TOKEN, + enforce_bearer_rate_limit, +) + + +@pytest.fixture +def mock_redis(): + return MagicMock() + + +def test_limit_bearer_per_token_uses_60_per_minute_default(): + assert LIMIT_BEARER_PER_TOKEN.limit == 60 + assert LIMIT_BEARER_PER_TOKEN.window == timedelta(minutes=1) + + +def test_seconds_until_available_returns_remaining_window(mock_redis): + """ZSET oldest entry score = 100; window = 60s; now = 130s → remaining = 30s.""" + rl = RateLimiter("rl:bearer:token", max_attempts=60, time_window=60, redis_client=mock_redis) + mock_redis.zrange.return_value = [(b"member-1", 100.0)] + with patch("libs.helper.time.time", return_value=130): + assert rl.seconds_until_available("k1") == 30 + + +def test_seconds_until_available_floor_one_second(mock_redis): + """Even when math says <1s remaining, return at least 1 so client backs off measurably.""" + rl = RateLimiter("rl:bearer:token", max_attempts=60, time_window=60, redis_client=mock_redis) + mock_redis.zrange.return_value = [(b"member-1", 119.5)] + with patch("libs.helper.time.time", return_value=180): + # window expired (180 > 119.5+60=179.5 by 0.5s) — bucket is actually free now + # but this method only called when is_rate_limited() == True; defensive floor. + assert rl.seconds_until_available("k1") >= 1 + + +def test_seconds_until_available_empty_bucket(mock_redis): + """No entries → 1s sentinel (defensive; should not be reached when limited).""" + rl = RateLimiter("rl:bearer:token", max_attempts=60, time_window=60, redis_client=mock_redis) + mock_redis.zrange.return_value = [] + assert rl.seconds_until_available("k1") == 1 + + +@patch("libs.rate_limit._build_limiter") +def test_enforce_bearer_rate_limit_passes_under_limit(mock_build): + limiter = MagicMock() + limiter.is_rate_limited.return_value = False + mock_build.return_value = limiter + enforce_bearer_rate_limit("hash-1") + limiter.increment_rate_limit.assert_called_once_with("token:hash-1") + + +@patch("libs.rate_limit._build_limiter") +def test_enforce_bearer_rate_limit_raises_429_with_retry_after(mock_build): + limiter = MagicMock() + limiter.is_rate_limited.return_value = True + limiter.seconds_until_available.return_value = 23 + mock_build.return_value = limiter + with pytest.raises(TooManyRequests) as exc: + enforce_bearer_rate_limit("hash-1") + headers = dict(exc.value.get_response().headers) + assert headers.get("Retry-After") == "23" + body = exc.value.get_response().get_json() or {} + assert body.get("error") == "rate_limited" + assert body.get("retry_after_ms") == 23000 diff --git a/api/tests/unit_tests/libs/test_workspace_member_helper.py b/api/tests/unit_tests/libs/test_workspace_member_helper.py new file mode 100644 index 0000000000..540e19ad9e --- /dev/null +++ b/api/tests/unit_tests/libs/test_workspace_member_helper.py @@ -0,0 +1,94 @@ +"""Unit tests for require_workspace_member.""" + +from __future__ import annotations + +import uuid +from unittest.mock import MagicMock, patch + +import pytest +from werkzeug.exceptions import Forbidden + +from libs.oauth_bearer import AuthContext, Scope, SubjectType, require_workspace_member + + +def _ctx(verified: dict[str, bool] | None = None, *, account: bool = True) -> AuthContext: + return AuthContext( + subject_type=SubjectType.ACCOUNT if account else SubjectType.EXTERNAL_SSO, + subject_email="e@example.com", + subject_issuer=None, + account_id=uuid.uuid4() if account else None, + client_id="difyctl", + scopes=frozenset({Scope.FULL}), + token_id=uuid.uuid4(), + source="oauth_account", + expires_at=None, + token_hash="h1", + verified_tenants=dict(verified or {}), + ) + + +@patch("libs.oauth_bearer.dify_config") +def test_skips_when_enterprise_enabled(mock_cfg): + mock_cfg.ENTERPRISE_ENABLED = True + require_workspace_member(_ctx(), "t1") + + +@patch("libs.oauth_bearer.dify_config") +def test_skips_for_external_sso(mock_cfg): + mock_cfg.ENTERPRISE_ENABLED = False + require_workspace_member(_ctx(account=False), "t1") + + +@patch("libs.oauth_bearer.db") +@patch("libs.oauth_bearer.dify_config") +def test_uses_cached_ok_no_db_access(mock_cfg, mock_db): + mock_cfg.ENTERPRISE_ENABLED = False + require_workspace_member(_ctx({"t1": True}), "t1") + mock_db.session.execute.assert_not_called() + + +@patch("libs.oauth_bearer.db") +@patch("libs.oauth_bearer.dify_config") +def test_uses_cached_denied(mock_cfg, mock_db): + mock_cfg.ENTERPRISE_ENABLED = False + with pytest.raises(Forbidden, match="workspace_membership_revoked"): + require_workspace_member(_ctx({"t1": False}), "t1") + mock_db.session.execute.assert_not_called() + + +@patch("libs.oauth_bearer.record_layer0_verdict") +@patch("libs.oauth_bearer.db") +@patch("libs.oauth_bearer.dify_config") +def test_denies_when_no_membership(mock_cfg, mock_db, mock_record): + mock_cfg.ENTERPRISE_ENABLED = False + mock_db.session.execute.return_value.scalar_one_or_none.return_value = None + with pytest.raises(Forbidden, match="workspace_membership_revoked"): + require_workspace_member(_ctx({}), "t1") + mock_record.assert_called_once_with("h1", "t1", False) + + +@patch("libs.oauth_bearer.record_layer0_verdict") +@patch("libs.oauth_bearer.db") +@patch("libs.oauth_bearer.dify_config") +def test_denies_when_account_inactive(mock_cfg, mock_db, mock_record): + mock_cfg.ENTERPRISE_ENABLED = False + mock_db.session.execute.side_effect = [ + MagicMock(scalar_one_or_none=MagicMock(return_value="join-id")), + MagicMock(scalar_one_or_none=MagicMock(return_value="banned")), + ] + with pytest.raises(Forbidden, match="workspace_membership_revoked"): + require_workspace_member(_ctx({}), "t1") + mock_record.assert_called_once_with("h1", "t1", False) + + +@patch("libs.oauth_bearer.record_layer0_verdict") +@patch("libs.oauth_bearer.db") +@patch("libs.oauth_bearer.dify_config") +def test_allows_active_member(mock_cfg, mock_db, mock_record): + mock_cfg.ENTERPRISE_ENABLED = False + mock_db.session.execute.side_effect = [ + MagicMock(scalar_one_or_none=MagicMock(return_value="join-id")), + MagicMock(scalar_one_or_none=MagicMock(return_value="active")), + ] + require_workspace_member(_ctx({}), "t1") + mock_record.assert_called_once_with("h1", "t1", True) diff --git a/api/tests/unit_tests/services/enterprise/test_app_permitted_service.py b/api/tests/unit_tests/services/enterprise/test_app_permitted_service.py new file mode 100644 index 0000000000..339f783ca8 --- /dev/null +++ b/api/tests/unit_tests/services/enterprise/test_app_permitted_service.py @@ -0,0 +1,57 @@ +from unittest.mock import patch + +import pytest + +from services.enterprise.app_permitted_service import PermittedAppsPage, list_permitted_apps +from services.errors.enterprise import EnterpriseAPIError + +WRAPPER = "services.enterprise.app_permitted_service.EnterpriseService.WebAppAuth.list_externally_accessible_apps" + + +def test_list_permitted_apps_decodes_camelcase_response(): + fake_body = { + "data": [{"appId": "a"}, {"appId": "b"}], + "total": 2, + "hasMore": False, + } + with patch(WRAPPER, return_value=fake_body) as m: + page = list_permitted_apps(page=1, limit=10) + + assert isinstance(page, PermittedAppsPage) + assert page.total == 2 + assert page.has_more is False + assert page.app_ids == ["a", "b"] + m.assert_called_once_with(page=1, limit=10, mode=None, name=None) + + +def test_list_permitted_apps_passes_filters_to_wrapper(): + fake_body = {"data": [], "total": 0, "hasMore": False} + with patch(WRAPPER, return_value=fake_body) as m: + list_permitted_apps(page=2, limit=5, mode="workflow", name="alpha") + + m.assert_called_once_with(page=2, limit=5, mode="workflow", name="alpha") + + +def test_list_permitted_apps_503_on_ee_error(): + with patch(WRAPPER, side_effect=EnterpriseAPIError("boom", status_code=500)): + from werkzeug.exceptions import ServiceUnavailable + + with pytest.raises(ServiceUnavailable): + list_permitted_apps(page=1, limit=10) + + +def test_list_permitted_apps_503_on_status_error(): + with patch(WRAPPER, side_effect=EnterpriseAPIError("bad key", status_code=401)): + from werkzeug.exceptions import ServiceUnavailable + + with pytest.raises(ServiceUnavailable): + list_permitted_apps(page=1, limit=10) + + +def test_list_permitted_apps_handles_empty_response(): + fake_body = {"data": [], "total": 0, "hasMore": False} + with patch(WRAPPER, return_value=fake_body): + page = list_permitted_apps(page=1, limit=10) + assert page.app_ids == [] + assert page.total == 0 + assert page.has_more is False diff --git a/api/tests/unit_tests/services/enterprise/test_enterprise_service.py b/api/tests/unit_tests/services/enterprise/test_enterprise_service.py index 6ad6a490b0..599a9a7b95 100644 --- a/api/tests/unit_tests/services/enterprise/test_enterprise_service.py +++ b/api/tests/unit_tests/services/enterprise/test_enterprise_service.py @@ -188,6 +188,31 @@ class TestWebAppAuth: req.send_request.assert_called_once_with("DELETE", "/webapp/clean", params={"appId": "a1"}) + def test_list_externally_accessible_apps_minimal_call(self): + with patch(f"{MODULE}.EnterpriseRequest") as req: + req.send_request.return_value = {"data": [], "total": 0, "hasMore": False} + result = EnterpriseService.WebAppAuth.list_externally_accessible_apps(page=1, limit=10) + + assert result == {"data": [], "total": 0, "hasMore": False} + req.send_request.assert_called_once_with( + "POST", + "/webapp/externally-accessible-apps", + json={"page": 1, "limit": 10}, + timeout=5.0, + ) + + def test_list_externally_accessible_apps_with_filters(self): + with patch(f"{MODULE}.EnterpriseRequest") as req: + req.send_request.return_value = {"data": [], "total": 0, "hasMore": False} + EnterpriseService.WebAppAuth.list_externally_accessible_apps(page=2, limit=5, mode="workflow", name="alpha") + + req.send_request.assert_called_once_with( + "POST", + "/webapp/externally-accessible-apps", + json={"page": 2, "limit": 5, "mode": "workflow", "name": "alpha"}, + timeout=5.0, + ) + class TestJoinDefaultWorkspace: def test_join_default_workspace_success(self): diff --git a/api/tests/unit_tests/services/openapi/__init__.py b/api/tests/unit_tests/services/openapi/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/unit_tests/services/openapi/test_mint_policy.py b/api/tests/unit_tests/services/openapi/test_mint_policy.py new file mode 100644 index 0000000000..7409a064a9 --- /dev/null +++ b/api/tests/unit_tests/services/openapi/test_mint_policy.py @@ -0,0 +1,126 @@ +"""Tests for the mint-policy validator. + +Cross-checks the (subject_type, prefix, scopes) triple a caller intends +to mint against ``MINTABLE_PROFILES``. The validator's defense-in-depth +value kicks in when a caller wires scopes or prefix from a non-canonical +source — the well-formed canonical path is the no-violation case. +""" + +from __future__ import annotations + +import pytest + +from libs.oauth_bearer import MINTABLE_PROFILES, Scope, SubjectType +from services.openapi.mint_policy import MintPolicyViolation, validate_mint_policy + + +def test_canonical_account_profile_passes(): + profile = MINTABLE_PROFILES[SubjectType.ACCOUNT] + validate_mint_policy( + subject_type=profile.subject_type, + prefix=profile.prefix, + scopes=profile.scopes, + ) + + +def test_canonical_external_sso_profile_passes(): + profile = MINTABLE_PROFILES[SubjectType.EXTERNAL_SSO] + validate_mint_policy( + subject_type=profile.subject_type, + prefix=profile.prefix, + scopes=profile.scopes, + ) + + +def test_wrong_prefix_rejected(): + with pytest.raises(MintPolicyViolation) as exc: + validate_mint_policy( + subject_type=SubjectType.ACCOUNT, + prefix="dfoe_", # SSO prefix on an account subject + scopes=frozenset({Scope.FULL}), + ) + assert "prefix" in str(exc.value) + + +def test_wrong_scopes_rejected(): + with pytest.raises(MintPolicyViolation) as exc: + validate_mint_policy( + subject_type=SubjectType.ACCOUNT, + prefix="dfoa_", + scopes=frozenset({Scope.APPS_RUN}), # account should be {FULL} + ) + assert "scopes" in str(exc.value) + + +def test_external_sso_with_full_scope_rejected(): + with pytest.raises(MintPolicyViolation): + validate_mint_policy( + subject_type=SubjectType.EXTERNAL_SSO, + prefix="dfoe_", + scopes=frozenset({Scope.FULL}), # FULL never applies to dfoe_ + ) + + +def test_message_carries_both_drift_reasons(): + """Mismatched prefix AND mismatched scopes both surface in one error.""" + with pytest.raises(MintPolicyViolation) as exc: + validate_mint_policy( + subject_type=SubjectType.ACCOUNT, + prefix="dfoe_", + scopes=frozenset({Scope.APPS_RUN}), + ) + msg = str(exc.value) + assert "prefix" in msg + assert "scopes" in msg + + +def test_license_required_decorator_skips_on_ce(): + from unittest.mock import patch + + from services.openapi.license_gate import license_required + + @license_required + def view(): + return "ok" + + with patch("services.openapi.license_gate.dify_config") as cfg: + cfg.ENTERPRISE_ENABLED = False + assert view() == "ok" + + +def test_license_required_decorator_403_on_invalid_ee_license(): + from unittest.mock import patch + + from werkzeug.exceptions import Forbidden + + from services.openapi.license_gate import license_required + + @license_required + def view(): + return "ok" + + with ( + patch("services.openapi.license_gate.dify_config") as cfg, + patch("services.openapi.license_gate._is_license_valid", return_value=False), + ): + cfg.ENTERPRISE_ENABLED = True + with pytest.raises(Forbidden) as exc: + view() + assert "license_required" in exc.value.description + + +def test_license_required_decorator_passes_on_valid_ee_license(): + from unittest.mock import patch + + from services.openapi.license_gate import license_required + + @license_required + def view(): + return "ok" + + with ( + patch("services.openapi.license_gate.dify_config") as cfg, + patch("services.openapi.license_gate._is_license_valid", return_value=True), + ): + cfg.ENTERPRISE_ENABLED = True + assert view() == "ok" diff --git a/cli/.gitignore b/cli/.gitignore new file mode 100644 index 0000000000..5edc280fdc --- /dev/null +++ b/cli/.gitignore @@ -0,0 +1,7 @@ +dist/ +coverage/ +node_modules/ +oclif.manifest.json +*.tsbuildinfo +.vitest-cache/ +docs/specs/ diff --git a/cli/AGENTS.md b/cli/AGENTS.md new file mode 100644 index 0000000000..ec929c689f --- /dev/null +++ b/cli/AGENTS.md @@ -0,0 +1,99 @@ +# AGENTS.md — difyctl (TypeScript CLI) + +TypeScript port of difyctl. Stack: oclif 4.x, Node 22+, ESM, ky for HTTP, vitest, eslint via @antfu/eslint-config. + +> Architecture patterns, scaffolding recipe, printer chain, strategy pattern, testing conventions, anti-patterns: see **[`ARD.md`]**. + +## Code rules + +- **Spaces, not tabs.** +- **Minimum comments.** Code speak for self. Comment only non-obvious WHY — hidden constraints, subtle invariants, bug-workaround notes. Never restate code. Never reference tasks, PRs, current callers. +- **No magic strings or numbers.** Enums or named constants for bounded value sets. +- **No long positional arg lists.** Use options objects. +- **No long if/switch ladders on discriminator.** Polymorphism, dispatch tables, or strategy pattern. Name concept, let implementations plug in. +- **No `any`. No `unknown` outside genuine wire boundaries** (HTTP body parse, env vars). Narrow types everywhere else. +- **Avoid `!` non-null assertions.** Narrow instead. +- **`readonly` on inputs not mutated.** +- **Discriminated unions** for variant data (SSE events, run outputs, error shapes), not optional-field bags. +- **No backwards-compat shims.** No re-exports of old names, no `// removed:` markers, no deprecation notes. Delete, update callers. +- **No new dependencies without explicit approval.** +- **No CLI behavior changes in refactor commit.** Same flags, same output, same exit codes. +- **Every leaf command extends `DifyCommand`.** Add `static agentGuide` string when command benefits from agent workflow docs — see `src/commands/AGENTS.md`. + +## Layering + +| Layer | Path | Role | +| --------- | -------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------- | +| commands | `src/commands/` | oclif command shells. Only place oclif imports run. | +| domain | `src/run/`, `src/get/`, etc. | Plain TS modules. Take typed deps via options. Testable without oclif. | +| api | `src/api/` | One typed client per resource. Each takes `KyInstance`. | +| http | `src/http/` | `createClient` + middleware (auth, retry, logging, error mapping). Only place ky runs. | +| io | `src/io/` | Streams + spinner. Fence between data-out and progress UI. | +| printers | `src/printers/` | `CompositePrintFlags` + `-o {json,yaml,name,wide,text}` matrix. | +| errors | `src/errors/` | `BaseError`, `ErrorCode` enum, `ExitCode` enum, dispatch table, `formatErrorForCli`. | +| guide | `src/commands/**//guide.ts` | Per-command agent guide string. Export `agentGuide`, assign `static agentGuide = agentGuide` in command class. Surfaced via `--help`. | +| cache | `src/cache/` | On-disk caches (app-info, etc.). | +| auth | `src/auth/` | Hosts file, token store, login flow. | +| config | `src/config/` | XDG dir resolution, config.yml load/save. | +| workspace | `src/workspace/` | Resolver: flag → env → bundle. | +| types | `src/types/` | Pure data + zod schemas for server contracts. No runtime imports outward. | + +## Command Structure + +Scaffold recipe + checklist: see `ARD.md §New command scaffold`. Full folder convention (subcommands, guide.ts): see `src/commands/AGENTS.md`. + +Layer rules: + +- Commands thin shells. Use `this.authedCtx(opts)` for bearer context; delegate to domain function. +- Domain receives deps via options; never imports oclif. +- Only `src/http/client.ts` and `src/api/*` import ky at runtime; elsewhere use `import type { KyInstance }`. +- `process.*` lives in `src/io/`, `src/config/dir.ts`, `src/util/browser.ts`. Nowhere else. +- No circular imports. `types/` pure leaf. + +## Dev commands + +```sh +pnpm install # one-time +pnpm dev [args...] # run CLI from source (no -- separator) +pnpm test # vitest +pnpm test:coverage # with coverage +pnpm type-check # tsc, no emit +pnpm lint # eslint +pnpm lint:fix # eslint --fix +pnpm build # production bundle + oclif manifest +pnpm manifest # regenerate oclif.manifest.json only +``` + +`make` covers `build` / `test` / `release` / `ci` as no-arg targets. Dev runs use `pnpm dev` directly. + +## Tests + +- Behavior tests run against real Hono mock at `test/fixtures/dify-mock/`. No `nock`, `msw`, or `fetchMock` — every test exercises real HTTP. +- Test files co-located: `foo.test.ts` next to `foo.ts`. +- Type-check, lint, full test suite must be green before any commit. + +## Spec docs (`docs/specs/`) + +Behavior contracts. Living tree — amended in place, no version subfolders. + +**Keep:** HTTP wire shape (req/resp JSON, headers, status codes), SQL DDL, Redis keys + TTL, state transitions, audit event names + payload, error/exit codes, rate-limit values, JWS/cookie envelope claims. + +**Cut:** language type decls, internal helper sigs, decorator snippets, file-path tables, pseudocode mirroring code, "Open items"/"Handler walk"/"CI guard"/"Migration" sections, rationale (`Rejected:`/`Why X not Y`/`Historical note:`/product comparisons), release-pipeline lines, version-pinning (`in v1.0`, `post-v1.0`, milestone codes), frontmatter `date`/`status`/`author`. + +**Test:** "rewrite in Rust tomorrow, does spec hold?" HTTP/SQL/Redis stays; type defs go. + +**Rules:** behavior, not rationale. One topic per file; cross-refs = `auth.md §Storage`. Tables beat prose. Code wins on drift — update spec. + +## Out of scope for unrelated work + +Do not modify in passing: + +- `test/fixtures/dify-mock/` public surface (endpoints, JSON shapes, status codes, scenario names) — that's the dify-api contract. +- `bin/`, `scripts/`, `Makefile`, `eslint.config.js`, `tsconfig*.json`, `package.json` (unless the change is required by the task). + +## Commits + +- One concern per commit. Style: `(): ` lowercase. Body explains why if non-obvious. +- Never push, amend, force-push, or skip hooks (`--no-verify`) without explicit user approval. + +[`ARD.md`]: ARD.md diff --git a/cli/ARD.md b/cli/ARD.md new file mode 100644 index 0000000000..25242ba949 --- /dev/null +++ b/cli/ARD.md @@ -0,0 +1,344 @@ +# ARD — Architecture & Design Reference + +Onboarding ref for `dify/cli/` contributors. Cover canonical patterns, layer contracts, scaffolding recipe, dev workflow, anti-patterns. Read before adding command or touching shared infra. + +Spec authority: [`docs/specs/`]. Specs own HTTP wire shape + server behavior; this file owns CLI code structure. + +--- + +## Project layout + +``` +src/ + commands/ one folder per command leaf + api/ HTTP client wrappers (one file per resource) + auth/ hosts.yml read/write + cache/ app-info cache + config/ config.yml read/write + errors/ BaseError, ErrorCode, exit codes + http/ ky client factory + middleware + io/ IOStreams, spinner, printer chain + limit/ --limit flag parsing + types/ shared TypeScript types + util/ small pure helpers + workspace/ workspace ID resolution +``` + +--- + +## New command scaffold + +Recipe for adding command leaf. Follow order. + +**1. Create folder** + +``` +src/commands/// +``` + +Examples: `get/app/`, `auth/devices/revoke/`, `describe/app/`. + +**2. Mandatory files** + +| File | Responsibility | +| ---------- | ---------------------------------------------------------------------------------------- | +| `index.ts` | oclif `Command` subclass. Flag/arg declaration + `run()` wiring only. No business logic. | +| `run.ts` | Pure async function. Typed options + deps. Returns string. No `@oclif/core` imports. | + +**3. Optional files — add as needed** + +| File | Purpose | +| ------------------ | --------------------------------------------------- | +| `handlers.ts` | Output format handlers (text, table, etc.) | +| `print-flags.ts` | `--output` flag → printer resolution | +| `payload-shape.ts` | Response type narrowing/transformation | +| `run.test.ts` | Behavior tests against `run.ts` | +| `guide.ts` | Agent onboarding text — exports `agentGuide` string | + +**4. Checklist** + +- [ ] `index.ts` extends `DifyCommand` +- [ ] Authed command calls `this.authedCtx()`; non-authed skips +- [ ] No try/catch in `run()` — `DifyCommand.catch()` handles `BaseError` +- [ ] `run.ts` returns string; no direct stdout write +- [ ] `run.ts` no `@oclif/core` imports +- [ ] HTTP client via factory dep, not direct +- [ ] `run.test.ts` written before impl (test-first) +- [ ] `pnpm manifest` run after adding command (updates `oclif.manifest.json`) +- [ ] README command table updated by hand + +--- + +## DifyCommand base class + +All commands extend `DifyCommand`, not `Command`. + +```typescript +export default class MyCommand extends DifyCommand { + async run(): Promise { + const { args, flags } = await this.parse(MyCommand) + + // Authed: authedCtx() sets outputFormat + builds context + const ctx = await this.authedCtx({ retryFlag: flags['http-retry'], format: flags.output }) + + process.stdout.write(await runMyThing({ /* args */ }, { bundle: ctx.bundle, http: ctx.http, io: ctx.io })) + } +} +``` + +**`authedCtx(opts)`** — wraps `buildAuthedContext`. Sets `this.outputFormat` as side effect. Required for any command needing bearer token. + +**`catch(err)` override** — auto-handles `BaseError` with format-aware serialization. Never wrap `run()` in try/catch. Throw `BaseError`; base class catches. + +--- + +## Error handling + +Throw `BaseError`. Never throw raw `Error` for domain failures. + +```typescript +import { BaseError } from '../../errors/base.js' +import { ErrorCode } from '../../errors/codes.js' + +throw new BaseError({ + code: ErrorCode.UsageMissingArg, + message: 'workspace id required', + hint: 'pass --workspace or run \'difyctl auth use \'', +}) +``` + +`ErrorCode` exhaustive const object — never use raw strings. `exitFor(code)` maps to exit codes auto. `DifyCommand.catch()` calls `formatErrorForCli` with `outputFormat` so JSON/YAML consumers get machine-readable error output. + +| Exit | Meaning | +| ---- | ----------------------------------------- | +| 0 | Success | +| 1 | Generic error | +| 2 | Usage error (bad flag, missing arg) | +| 4 | Auth error (not logged in, token expired) | +| 6 | Version/compat error | + +New error code: add to `ErrorCode` + map to `ExitCode` in `codes.ts`. Never scatter exit codes inline. + +--- + +## IOStreams + +I/O context passed through every layer. Carries stdout, stderr, stdin, TTY flags, `outputFormat`. + +```typescript +export type IOStreams = { + out: NodeJS.WritableStream + err: NodeJS.WritableStream + in: NodeJS.ReadableStream + isOutTTY: boolean + isErrTTY: boolean + outputFormat: string // 'json' | 'yaml' | 'name' | 'wide' | '' +} +``` + +| Factory | When | +| --------------------- | --------------------------------- | +| `realStreams(format)` | Production — wraps `process.std*` | +| `bufferStreams()` | Tests — captures output in memory | +| `nullStreams()` | When IO irrelevant | + +`outputFormat` set at construction. Do not mutate. Do not pass `format` as separate arg downstream — put in `IOStreams`, pass struct. + +--- + +## Spinner + +`runWithSpinner` wraps async call with animated spinner on stderr. Auto-disables for structured output — no manual `enabled:` flag needed. + +```typescript +const result = await runWithSpinner( + { io, label: 'Fetching apps' }, + () => client.list(params), +) +``` + +`STRUCTURED_FORMATS = new Set(['json', 'yaml', 'name'])` drives disable check. New structured format = add to this set only — no other callsites change. + +Only override `enabled` for intentional suppression (e.g., tests using `bufferStreams` already suppress via `isErrTTY: false`). + +--- + +## Printer chain + +Output rendering separated from data fetching. + +1. `run.ts` returns string — rendered result. +1. `handlers.ts` defines format handlers (`TextHandler`, `TableHandler`, etc.). +1. `print-flags.ts` maps `--output` value to correct handler. + +```typescript +// run.ts +const printer = new AppPrintFlags().toPrinter(format) +return printer.print(data) +``` + +New output format: implement handler interface, register in `print-flags.ts`. Never add `if (format === 'json')` branches in `run.ts`. + +--- + +## Strategy pattern (mode dispatch) + +Singleton strategies + picker function. No switch ladders on discriminator. + +```typescript +export type RunStrategy = { + execute: (ctx: RunContext) => Promise +} + +const blocking = new BlockingStrategy() +const streamingText = new StreamingTextStrategy() +const streamingStructured = new StreamingStructuredStrategy() + +export function pickStrategy(useStream: boolean, isText: boolean): RunStrategy { + if (!useStream) + return blocking + return isText ? streamingText : streamingStructured +} +``` + +New mode = new class + one line in picker. Singletons avoid per-call allocation. + +--- + +## HTTP clients + +One file per resource under `src/api/`. Each exports class wrapping `KyInstance`. + +```typescript +export class AppsClient { + private readonly http: KyInstance + constructor(http: KyInstance) { this.http = http } + + async list(params: ListParams): Promise { /* ... */ throw new Error('elided') } + async describe(id: string, workspaceId: string, fields: string[]): Promise { /* ... */ throw new Error('elided') } +} +``` + +Inject via factory dep in `run.ts` for testability: + +```typescript +type GetAppDeps = { + appsFactory?: (http: KyInstance) => AppsClient +} +// default: (h) => new AppsClient(h) +``` + +Never instantiate clients in `index.ts`. + +--- + +## Testing + +**Test-first.** Write failing test, run to confirm fail, then implement. + +Tests live in `run.test.ts` alongside command. Test `run.ts` direct — never oclif `Command` class. + +```typescript +const io = bufferStreams() +const result = await runGetApp( + { format: 'json', appId: 'app-1' }, + { bundle, http: mockHttp, io, appsFactory: () => fakeClient }, +) +expect(JSON.parse(result).data).toHaveLength(1) +``` + +### dify-mock fixture server + +`test/fixtures/dify-mock/server.ts` mirrors `/openapi/v1/*`. Each test starts isolated instance: + +```typescript +import { startMock } from '../../../test/fixtures/dify-mock/server.js' + +const mock = await startMock({ scenario: 'happy' }) +// ... test against mock.url ... +await mock.stop() +``` + +| Scenario | Effect | +| ----------------- | ----------------------------------------------------------------------------- | +| `happy` (default) | Standard fixtures: 4 apps across 2 workspaces, 2 workspaces, 1 active session | +| `sso` | `/workspaces` returns empty (external-SSO bearer model) | +| `expired` | All authenticated routes return 401 `auth_expired` | +| `pagination` | `/apps` honors `?page=` + `?limit=`, total > one page | +| `slow` | Adds `Retry-After: 1` to GETs to test ky retry behavior | + +New scenario: extend `Scenario` union in `scenarios.ts`, branch in relevant handler. No per-test mocks — one fixture surface keeps tests aligned with real API. + +### Assertions + +Inline string/regex/JSON checks — no golden files. + +```typescript +expect(out).toMatch(/^ID\s+NAME\s+ROLE/) +expect(JSON.parse(out).workspaces).toHaveLength(2) +``` + +--- + +## Scripts + +| Command | When to run | +| ----------------------- | -------------------------------------------------- | +| `pnpm dev [args]` | Run CLI from source during dev | +| `pnpm test` | Full vitest suite — run before every commit | +| `pnpm test:coverage` | Coverage report | +| `pnpm type-check` | `tsc --noEmit` — catches type errors without build | +| `pnpm lint` | ESLint check | +| `pnpm lint:fix` | ESLint auto-fix (perfectionist sort, chaining) | +| `pnpm build` | Production bundle + `oclif manifest` | +| `pnpm manifest` | Regenerate `oclif.manifest.json` only | +| `pnpm pack:tarballs` | Build distributable tarballs (release only) | + +**`pnpm manifest` rule:** run after adding, removing, renaming any command, flag, or arg. Manifest = runtime command registry — stale manifest causes silent flag failures at runtime. + +**README hand-maintained.** `oclif readme` incompatible with this monorepo setup. When adding command, update command table in `README.md` manually. + +--- + +## Lint rules that catch contributors + +Repo runs `@antfu/eslint-config` + perfectionist + unicorn. + +| Rule | What it catches | +| ---------------------------------- | -------------------------------------------------- | +| `perfectionist/sort-named-imports` | Alphabetical, case-insensitive | +| `perfectionist/sort-imports` | Relative imports last; `import type` first | +| `antfu/consistent-chaining` | Long `.foo().bar().baz()` must split across lines | +| `unicorn/no-new-array` | Use `Array.from({ length: n })` not `new Array(n)` | +| `noUncheckedIndexedAccess` (tsc) | `arr[i]` is `T \| undefined`; guard before use | + +`pnpm lint:fix` resolves perfectionist + chaining auto. + +--- + +## PR conventions + +- One feature, one PR. Bundle test + impl + doc update. +- Branch off `feat/cli`. Never target `main`. +- Commit style: `(cli): `. Types: `feat`, `fix`, `refactor`, `docs`, `chore`. Body explains why if non-obvious. +- Plan/spec/superpowers files do not ship in CLI commits. +- Verify diff before committing — `.local.json` and `.vitest-cache/` gitignored but check anyway. + +--- + +## Anti-patterns + +| Pattern | Do instead | +| -------------------------------------------------------------------- | ------------------------------------------------------- | +| `if (format === 'json') { ... }` in `run.ts` | Printer handler per format | +| `try { ... } catch (e) { if (isBaseError(e)) ... }` in every command | Throw `BaseError`; `DifyCommand.catch()` handles | +| Raw string error codes `'not_logged_in'` | `ErrorCode.NotLoggedIn` | +| `enabled: !isHuman` in `runWithSpinner` | Set `outputFormat` on `IOStreams`; spinner auto-detects | +| Long positional arg lists | Options struct | +| `Record` dispatch map | Named singletons + picker function | +| `@oclif/core` import in `run.ts` | Keep oclif in `index.ts` only | +| `buildAuthedContext(this, opts)` in command body | `this.authedCtx(opts)` | +| `console.log` in `src/` | Return string from `run.ts`; write in `index.ts` | +| New dependency without approval | Check first | + +[`docs/specs/`]: docs/specs/ diff --git a/cli/Dockerfile b/cli/Dockerfile new file mode 100644 index 0000000000..846f543787 --- /dev/null +++ b/cli/Dockerfile @@ -0,0 +1,19 @@ +# difyctl container image. +# +# Build: +# docker build --build-arg VERSION= -f cli/Dockerfile -t ghcr.io/langgenius/difyctl: cli/ +# +# The image installs from the published npm package, so multi-arch is handled by +# the base image (node:22-alpine ships linux/amd64 + linux/arm64) and the npm +# package contains the platform-agnostic ESM bundle plus napi-rs prebuilds for +# both targets. + +FROM node:22-alpine + +ARG VERSION +RUN test -n "$VERSION" || (echo "VERSION build-arg is required" >&2 && exit 1) \ + && npm install -g --no-fund --no-audit "@langgenius/difyctl@${VERSION}" \ + && npm cache clean --force + +ENTRYPOINT ["difyctl"] +CMD ["--help"] diff --git a/cli/Makefile b/cli/Makefile new file mode 100644 index 0000000000..aba79803ad --- /dev/null +++ b/cli/Makefile @@ -0,0 +1,58 @@ +# Difyctl +# +# `make help` lists everything. +# +# Usage examples: +# make install # install dependencies +# make build # production bundle (dist/) + manifest +# pnpm dev run app id "hi" # run the CLI from source (use pnpm, not make) +# make test # vitest +# make ci # full CI pipeline +# make release VERSION=1.2.3 # multi-platform tarballs +# make version # print resolved buildinfo +# make clean # delete build artifacts + + +PNPM ?= pnpm + +.PHONY: help install build test coverage lint fix typecheck manifest version ci release clean + +help: ## Show this help. + @awk 'BEGIN{FS = ":.*## "; printf "Targets:\n"} /^[a-zA-Z_-]+:.*## / {printf " \033[36m%-12s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) + +install: ## Install dependencies (pnpm install). + @$(PNPM) install + +build: ## Production bundle into dist/ + oclif manifest. + @$(PNPM) build + +test: ## Run unit + integration tests (vitest). + @$(PNPM) test + +coverage: ## Run tests with coverage report. + @$(PNPM) test:coverage + +lint: ## Lint without auto-fix. + @$(PNPM) lint + +fix: ## Lint with auto-fix. + @$(PNPM) lint:fix + +typecheck: ## TypeScript type-check (no emit). + @$(PNPM) type-check + +manifest: ## Regenerate oclif command manifest. + @$(PNPM) manifest + +version: ## Print resolved buildinfo (what a build would inject). + @$(PNPM) exec tsx scripts/print-buildinfo.ts + +ci: typecheck lint coverage build ## Full CI pipeline (typecheck, lint, coverage, build). + +release: ## Multi-platform tarballs. Reads channel/version from cli/package.json. + @scripts/release.sh + +clean: ## Remove build artifacts. + @rm -rf dist oclif.manifest.json node_modules/.cache + +.DEFAULT_GOAL := help diff --git a/cli/README.md b/cli/README.md new file mode 100644 index 0000000000..c2cfb9a06d --- /dev/null +++ b/cli/README.md @@ -0,0 +1,117 @@ +# difyctl + +CLI client for [Dify] platform. Browser device-flow signin, list/inspect apps, run with structured input, parse output as JSON, YAML, or human text. + +## Install + +### npm + +```sh +npm install -g @langgenius/difyctl +``` + +### Tarball + +```sh +# macOS arm64 +curl -fsSL https://github.com/langgenius/dify/releases/latest/download/difyctl-darwin-arm64.tar.xz | tar xJ -C /usr/local +ln -sf /usr/local/difyctl/bin/difyctl /usr/local/bin/difyctl + +# Linux x64 +curl -fsSL https://github.com/langgenius/dify/releases/latest/download/difyctl-linux-x64.tar.xz | tar xJ -C /opt +ln -sf /opt/difyctl/bin/difyctl /usr/local/bin/difyctl +``` + +Other targets: `darwin-x64`, `linux-arm64`, `win32-x64`. + +### Container + +```sh +docker run --rm -it -v "$HOME/.config/difyctl:/root/.config/difyctl" \ + ghcr.io/langgenius/difyctl:latest version +``` + +## Quickstart + +```sh +difyctl auth login # opens browser; paste the device code shown +difyctl get app # list apps in default workspace +difyctl describe app # inspect parameters +difyctl run app "hello" # run, blocking +difyctl run app "hello" -o json | jq .answer # JSON output +difyctl run app --input name=world --input topic=cats # workflow inputs +``` + +Background docs: `difyctl help account`, `difyctl help external`, `difyctl help environment`. + +## Commands + +| Group | Commands | +| ---------- | -------------------------------------------------------------------------------------------------- | +| `auth` | `login`, `logout`, `status`, `whoami`, `use `, `devices list/revoke` | +| `get` | `get app [] [-A] [--mode] [--name] [--tag] [-o json\|yaml\|name\|wide]`, `get workspace` | +| `describe` | `describe app [--refresh] [-o json\|yaml]` | +| `run` | `run app [] [--input k=v]... [--conversation ] [--stream] [-o json\|yaml\|text]` | +| `config` | `view`, `get `, `set `, `unset `, `path` | +| `env` | `list` | +| `help` | `account`, `external`, `environment` | +| `version` | `version [--json]` | + +Run `difyctl --help` for per-command reference. + +## Output formats + +| Flag | Behavior | +| --------- | ------------------------------------------------------ | +| (none) | Human table, columns auto-sized to terminal. | +| `-o wide` | Same as table, no column truncation. | +| `-o json` | Pretty-printed JSON, machine-parseable, stable shape. | +| `-o yaml` | YAML mirror of `-o json`. | +| `-o name` | IDs only, newline-separated — pipes into `xargs`. | +| `-o text` | kubectl-describe style human text (`describe`, `run`). | + +Errors emit JSON envelope to stderr in `-o json` mode; else human message. Exit codes deterministic. + +## Configuration + +| OS | Config path | +| ------- | -------------------------------------------- | +| Linux | `${XDG_CONFIG_HOME:-$HOME/.config}/difyctl/` | +| macOS | `$HOME/.config/difyctl/` | +| Windows | `%APPDATA%\difyctl\` | + +Override with `DIFY_CONFIG_DIR=/some/path`. Files written `0600`, directory `0700`. Tokens use OS keychain by default, fall back to sealed file on hosts without one. + +For every env var `difyctl` reads, run `difyctl env list` (machine-readable) or `difyctl help environment` (narrative). + +## Streaming + +`run app` uses blocking transport by default. For long-running apps (likely exceed ~30s) pass `--stream`: + +```sh +difyctl run app app-1 "tell me about cats" --stream +``` + +Agent apps (`mode === 'agent-chat'` or `is_agent` flag set) stream regardless — Dify backend rejects blocking requests for agent mode. Combining `--stream` with `-o json` or `-o yaml` aggregates SSE events into same envelope shape as blocking response, so structured output identical regardless of transport. + +## HTTP retry + +Idempotent requests (`GET`, `PUT`, `DELETE`) retry on transient network/DNS failures with exponential backoff. Default count: **3**. `POST` and `PATCH` never retry — side effects possible. + +| Knob | Effect | +| ------------------------ | ---------------------------------------------- | +| `--http-retry ` | Per-invocation override. `0` disables retries. | +| `DIFYCTL_HTTP_RETRY=` | Process-level default. | + +Resolution: flag → env → 3. + +## Contributing + +See [`ARD.md`] for architecture patterns, scaffolding recipe, dev workflow. + +## License + +Apache-2.0. + +[Dify]: https://dify.ai +[`ARD.md`]: ARD.md diff --git a/cli/bin/dev.js b/cli/bin/dev.js new file mode 100755 index 0000000000..5ab99f9a76 --- /dev/null +++ b/cli/bin/dev.js @@ -0,0 +1,12 @@ +#!/usr/bin/env -S node --import tsx + +import { execute } from '@oclif/core' + +globalThis.__DIFYCTL_VERSION__ = process.env.DIFYCTL_VERSION ?? '0.0.0-dev' +globalThis.__DIFYCTL_COMMIT__ = process.env.DIFYCTL_COMMIT ?? 'HEAD' +globalThis.__DIFYCTL_BUILD_DATE__ = process.env.DIFYCTL_BUILD_DATE ?? new Date().toISOString() +globalThis.__DIFYCTL_CHANNEL__ = process.env.DIFYCTL_CHANNEL ?? 'dev' +globalThis.__DIFYCTL_MIN_DIFY__ = process.env.DIFYCTL_MIN_DIFY ?? '0.0.0' +globalThis.__DIFYCTL_MAX_DIFY__ = process.env.DIFYCTL_MAX_DIFY ?? '0.0.0' + +await execute({ development: true, dir: import.meta.url }) diff --git a/cli/bin/run.js b/cli/bin/run.js new file mode 100755 index 0000000000..92b78ec67a --- /dev/null +++ b/cli/bin/run.js @@ -0,0 +1,5 @@ +#!/usr/bin/env node + +import { execute } from '@oclif/core' + +await execute({ dir: import.meta.url }) diff --git a/cli/docs/auth-storage.md b/cli/docs/auth-storage.md new file mode 100644 index 0000000000..da531f5d62 --- /dev/null +++ b/cli/docs/auth-storage.md @@ -0,0 +1,82 @@ +# difyctl — token storage backends + +How `difyctl` decides where to store the OAuth bearer it acquires from the Dify API service. Two backends, OS-aware probe, deterministic fallback. + +> Spec authority for the auth model itself: [`docs/specs/auth.md`]. This file documents the **storage layer** only — what backend is selected, when, and how to override it. + +## Backends + +| Backend | Module | When selected | What's stored | +| -------------- | ---------------------------- | ---------------------------------------- | ----------------------------------------- | +| OS keychain | `@napi-rs/keyring` | Probe succeeds (default) | Bearer + refresh blob, opaque to disk | +| Encrypted file | `cli/src/auth/file-store.ts` | Probe fails or `DIFY_TOKEN_STORAGE=file` | `0600` JSON at `/tokens.json` | + +The file backend is **not** plaintext: token bytes are sealed via a key derived from the OS user's HOME path and a process-stable salt. It exists so that `difyctl` works on minimal Linux containers without a Secret Service / keyctl, and on macOS hosts where Keychain access is administratively blocked. + +## Selection algorithm + +``` +1. read DIFY_TOKEN_STORAGE — values: keychain | file | auto (default) +2. if value == file: use file backend, exit +3. if value == keychain: probe; on failure exit 4 (auth_expired) + hint +4. if value == auto/unset: + a. probe keychain + b. if probe ok: use keychain + c. if probe ENOTSUP: fall back to file (silent) + d. if probe ESERVICE: fall back to file + stderr warning +``` + +The probe is a no-op write of a sentinel record under the `difyctl` service name, followed by read-back and delete. It runs once per process; the result is cached on the bundle so command bodies do not re-probe. + +## Per-OS expectations + +| Platform | Backend | Notes | +| ------------------------------- | ------------------------------ | ------------------------------------------------------------------ | +| macOS 12+ | Keychain via Keychain Services | One entry per `(host, subject_id)` under service `difyctl` | +| Linux + GNOME / KDE / KeePassXC | Secret Service via `libsecret` | Falls back to file in headless containers | +| Linux + keyctl-only kernels | File | `@napi-rs/keyring` does not bind keyctl; no probe attempted | +| Windows 10/11 | Credential Manager | Service: `difyctl/` | +| WSL | Same as Linux | The probe inherits whatever Secret Service the WSL distro provides | + +## Prebuild matrix + +`@napi-rs/keyring` ships native binaries for the targets the CLI distribution covers. The CI matrix below mirrors the oclif tarball matrix: + +| os/arch | binary | source | +| ----------------- | ------------------------------ | ----------- | +| `darwin-arm64` | `keyring.darwin-arm64.node` | npm package | +| `darwin-x64` | `keyring.darwin-x64.node` | npm package | +| `linux-x64-gnu` | `keyring.linux-x64-gnu.node` | npm package | +| `linux-arm64-gnu` | `keyring.linux-arm64-gnu.node` | npm package | +| `win32-x64` | `keyring.win32-x64-msvc.node` | npm package | + +If a platform / arch lacks a prebuild and `@napi-rs/keyring` cannot install at runtime, the file backend takes over without user intervention. + +## Manual override + +Operators force a backend via env var: + +```sh +DIFY_TOKEN_STORAGE=keychain difyctl auth login # require keychain (fail loud) +DIFY_TOKEN_STORAGE=file difyctl auth login # force file backend +``` + +Use the `file` form on shared CI runners where the Secret Service is unavailable but you still want a per-runner token. Use `keychain` on developer machines to fail fast if the keychain is locked or denied. + +## Failure modes + +| Symptom | Cause | Recovery | +| ------------------------------------------------------------- | -------------------------------------------------------------------------------------- | --------------------------------------------------------------- | +| `auth login` succeeds but `auth status` says "no credentials" | Keychain wrote, file path probed at read time | Set `DIFY_TOKEN_STORAGE=file` to pin one backend | +| `keychain probe failed (errno -2)` | Linux container without `libsecret`, no fallback because `DIFY_TOKEN_STORAGE=keychain` | Unset the env var or set `=file` | +| Multiple `difyctl/` entries in Keychain | Hosts changed but old entries not pruned | `auth logout --all-hosts` (post-v1.0); for v1.0, prune manually | +| Tokens disappear on macOS after every reboot | Keychain locked or in iCloud-only mode | Either unlock the login keychain or fall back to file | + +## Source pointers + +- Backend selection: `cli/src/auth/storage.ts` +- Keychain wrapper: `cli/src/auth/keychain.ts` (uses `@napi-rs/keyring`) +- File backend: `cli/src/auth/file-store.ts` +- Probe gate: `cli/src/auth/probe.ts` + +[`docs/specs/auth.md`]: specs/auth.md diff --git a/cli/docs/build.md b/cli/docs/build.md new file mode 100644 index 0000000000..ff3a4f1fef --- /dev/null +++ b/cli/docs/build.md @@ -0,0 +1,115 @@ +# difyctl — build + +How to build, package, and ship `difyctl` from the dify monorepo. + +> Spec authority for distribution: [`docs/specs/README.md`]. This file documents the **mechanics** — commands, define knobs, tarball production. + +## Toolchain + +| Tool | Version | Source | +| ---------------- | ----------- | -------------------------------------------- | +| Node | `^22.22.1` | `package.json#engines` | +| pnpm | `10.33.0` | `package.json#packageManager` | +| TypeScript | catalog pin | `pnpm catalog` | +| vite-plus (`vp`) | catalog pin | `pnpm catalog` — wraps Vite for tests + pack | +| oclif | catalog pin | command discovery, manifest, tarball builder | + +Versions above are what the build pipeline targets. Install Node + pnpm via whatever method works for your environment. + +## Quick start + +```sh +# inside dify/ monorepo root +pnpm install +pnpm --filter @langgenius/difyctl build +``` + +This produces: + +- `cli/dist/` — TypeScript output (commands, source map, .d.ts) +- `cli/oclif.manifest.json` — oclif command index (used at runtime to skip filesystem scan) + +## Scripts + +| Script | What it does | +| ------------------------------------------------- | ------------------------------------------------------------------- | +| `pnpm --filter @langgenius/difyctl build` | `vp pack && oclif manifest` — production bundle + manifest refresh | +| `pnpm --filter @langgenius/difyctl dev` | `tsx bin/dev.js` — runs CLI from TS source, no build step | +| `pnpm --filter @langgenius/difyctl test` | `vp test` — vitest suite (40+ files, ~400+ behavior tests) | +| `pnpm --filter @langgenius/difyctl lint` | eslint — antfu config + perfectionist sort + unicorn rules | +| `pnpm --filter @langgenius/difyctl type-check` | `tsc` — strict mode, `noUncheckedIndexedAccess` | +| `pnpm --filter @langgenius/difyctl manifest` | `oclif manifest` — refresh `oclif.manifest.json` only | +| `pnpm --filter @langgenius/difyctl readme` | `oclif readme` — regenerate command reference in `cli/README.md` | +| `pnpm --filter @langgenius/difyctl pack:tarballs` | `oclif pack tarballs --xz --parallel` — multi-target tarball matrix | + +## Build-time defines (vite-plus) + +`vite.config.ts` injects a small set of constants at pack time. These replace what the Go port did via `-ldflags -X`: + +| Define | Source | Read from | +| ------------------------ | ----------------------------------------------------------------------------- | ------------------------- | +| `__DIFYCTL_VERSION__` | `DIFYCTL_VERSION` env var (else `package.json#version`) | `cli/src/version/info.ts` | +| `__DIFYCTL_COMMIT__` | `DIFYCTL_COMMIT` env var (else `git rev-parse HEAD`) | `cli/src/version/info.ts` | +| `__DIFYCTL_BUILD_DATE__` | `DIFYCTL_BUILD_DATE` env var (else `new Date().toISOString()`) | `cli/src/version/info.ts` | +| `__DIFYCTL_CHANNEL__` | `DIFYCTL_CHANNEL` env var (default `stable`; values: `stable`, `beta`, `dev`) | `cli/src/version/info.ts` | + +In dev (`bin/dev.js`), `globalThis.__DIFYCTL_*__` are set from the same env vars at startup, so `difyctl version` reflects the local checkout without a rebuild. + +## Tarball production + +```sh +# default — every supported (os, arch) target +pnpm --filter @langgenius/difyctl pack:tarballs + +# subset — single target +pnpm --filter @langgenius/difyctl exec \ + oclif pack tarballs --xz --targets=darwin-arm64 +``` + +Outputs land in `cli/dist/`. Each tarball bundles the Node binary for that target (oclif fetches from `nodejs.org`), the compiled JS, and the prebuild for `@napi-rs/keyring` matching that target. + +Supported targets (matches `auth-storage.md` matrix): + +- `darwin-arm64` +- `darwin-x64` +- `linux-x64-gnu` +- `linux-arm64-gnu` +- `win32-x64` + +## Container image + +```sh +docker build \ + --build-arg VERSION=$(node -p "require('./cli/package.json').version") \ + -f cli/Dockerfile \ + -t ghcr.io/langgenius/difyctl:dev cli/ +``` + +The Dockerfile uses `node:22-alpine` and `npm install -g @langgenius/difyctl@${VERSION}` so the CI release pipeline does not need to ship multi-arch tarballs separately for container users — it just publishes to npm and rebuilds the image. + +## Release flow + +The dify release workflow ships a `release-cli` job that fans out from the dify version tag: + +1. `pnpm --filter @langgenius/difyctl build` +1. `oclif manifest` (already part of build) +1. `oclif pack tarballs --xz --parallel` +1. `pnpm publish --access public --tag latest` (requires `NPM_TOKEN`) +1. `softprops/action-gh-release` attaches tarballs + install scripts to the GitHub release +1. `docker buildx build --push` for the container image + +CLI version equals dify version; parity enforced at tag time. + +## Local install for smoke testing + +```sh +# from a fresh clone — verify the build works end to end +pnpm install +pnpm --filter @langgenius/difyctl build +pnpm --filter @langgenius/difyctl exec oclif pack tarballs --targets=$(uname -s | tr 'A-Z' 'a-z')-$(uname -m | sed 's/x86_64/x64/;s/aarch64/arm64/') +ls cli/dist/*.tar.xz +``` + +The resulting tarball is a self-contained Node + difyctl bundle — extract anywhere, point `$PATH` at `bin/`, and `difyctl --version` works without the host having Node installed. + +[`docs/specs/README.md`]: specs/README.md diff --git a/cli/docs/release-smoke-checklist.md b/cli/docs/release-smoke-checklist.md new file mode 100644 index 0000000000..baf437b399 --- /dev/null +++ b/cli/docs/release-smoke-checklist.md @@ -0,0 +1,15 @@ +# Release Smoke Checklist + +Before promoting an rc to stable (or merging the release commit on `main`), +the release author runs through the following against a local dify at the +target tag: + +- [ ] `docker compose up` dify-api and dify-web at the target version +- [ ] `difyctl auth login` completes oauth and stores a token +- [ ] `difyctl get workspace` lists workspaces, current marked +- [ ] `difyctl get apps` returns apps for the current workspace +- [ ] `difyctl describe app ` returns info + parameters + input_schema +- [ ] `difyctl run app --input "input=hi"` blocking returns echoed answer +- [ ] `difyctl run app --input "input=hi" --stream` streams events +- [ ] `difyctl version` shows correct version, channel, and compat range +- [ ] Uninstall the previous version, install the new tarball via install-cli.sh, repeat the above diff --git a/cli/docs/specs/apps.md b/cli/docs/specs/apps.md new file mode 100644 index 0000000000..5347401daa --- /dev/null +++ b/cli/docs/specs/apps.md @@ -0,0 +1,313 @@ +--- +title: apps (get / describe / run) +--- + +# apps + +> Implementation: see [`cli/src/`](../../src/). Build & test: see [`cli/README.md`](../../README.md). + +App-resource commands. Split into two CLI surfaces matching server subject_type separation: + +| Subject | CLI surface | Server surface | +|---|---|---| +| `dfoa_` (account) | `get apps`, `get app`, `run app` | `/openapi/v1/apps*` | +| `dfoe_` (external SSO, EE only) | `get permitted-external-apps`, `get permitted-external-app`, `run permitted-external-app` | `/openapi/v1/permitted-external-apps*` | + +CLI dispatches client-side based on the cached `subject_type` from `hosts.yml` (written at `auth login` from `GET /openapi/v1/account`). Cross-surface invocation errors before any network call: + +- `dfoa_` token + `get permitted-*` → `error: 'permitted-external-apps' commands are for external SSO sessions; use 'get apps' instead`. Exit 2. +- `dfoe_` token + `get app*` / `run app` → `error: 'apps' commands are for account sessions; use 'get permitted-external-apps' instead`. Exit 2. + +Companion: `auth.md §HTTP contract`, `server/endpoints.md §OpenAPI — app (two surfaces, strict subject_type separation)`. Agent onboarding text (`help account` / `help external`) in `guide.md`. + +## Command surface + +### dfoa_ surface (account sessions; CE + EE) + +| Command | Verb | Purpose | +|---|---|---| +| `difyctl get apps` | get | List apps in target workspace | +| `difyctl get app ` | get | Single-app metadata fetch (slim) | +| `difyctl describe app ` | describe | Rich detail: info + parameter schema + JSON Schema | +| `difyctl run app [message]` | run | Invoke app; server dispatches by `apps.mode` | + +### dfoe_ surface (external SSO; EE only) + +| Command | Verb | Purpose | +|---|---|---| +| `difyctl get permitted-external-apps` | get | List apps the SSO subject can access (no workspace) | +| `difyctl get permitted-external-app ` | get | Single permitted-external-app metadata fetch | +| `difyctl describe permitted-external-app ` | describe | Rich detail for a permitted app | +| `difyctl run permitted-external-app [message]` | run | Invoke a permitted app | + +## `get apps` (dfoa_) + +``` +difyctl get apps [-w ] [-A] [--mode ] [--name ] [--tag ...] [--limit N] [-o ] +``` + +Pagination is hidden by default — auto-paginates internally. Users specify `--limit` (total rows wanted); the client issues as many server pages as needed and stops at `--limit` or when `has_more=false`. No user-facing `--page` flag; see §Pagination below. + +### Flow + +1. Read active session from `hosts.yml` (bearer + subject_type + current workspace). +2. Pre-flight: `subject_type == "account"`? Else → cross-surface error (exit 2). +3. Resolve `workspace_id`: `-w/--workspace` flag → `DIFY_WORKSPACE_ID` env → `hosts.yml.workspace.id`. Unresolved → exit 2 `workspace_required`. +4. `GET /openapi/v1/apps?workspace_id=&page=N&limit=N&mode=...&name=...&tag=...`. Bearer auth, scope `apps:read`. +5. If `-A/--all-workspaces`: fan out across `available_workspaces`, max 4 concurrent. Concatenate. +6. Render per `-o`. + +### `get app ` (dfoa_, single) + +``` +difyctl get app [-w ] [-o ] +``` + +1. Pre-flight: `subject_type == "account"`. Else cross-surface error. +2. Resolve `workspace_id` (required by server on this surface). +3. `GET /openapi/v1/apps//describe?fields=info&workspace_id=`. Bearer auth, scope `apps:read`. Slim variant — `parameters` + `input_schema` skipped server-side. +4. Render per `-o`. + +### Flags + +| Flag | Default | Notes | +|---|---|---| +| `-w, --workspace` | resolved per chain | Override current workspace. Required for `get app ` | +| `-A, --all-workspaces` | false | Client-side fan-out across all member workspaces. Visits each workspace with same auth, applies same scope + ACL filter | +| `--mode` | (all) | `chat` / `completion` / `workflow` / `agent-chat` / `advanced-chat` | +| `--name` | — | Substring match | +| `--tag` (repeatable) | — | Tag name (not UUID); server resolves within workspace | +| `--limit` | `defaults.limit` (50) | Total rows across auto-paged calls. Max 10000 | +| `-o, --output` | `defaults.format` (table) | `table` / `json` / `yaml` / `wide` / `name` | + +## `get permitted-external-apps` (dfoe_, EE only) + +``` +difyctl get permitted-external-apps [--mode ] [--name ] [--limit N] [-o ] +``` + +No workspace concept. No `-w` flag. No `-A` (single deployment-wide query). No `--tag` (tags are tenant-scoped; dfoe_ is cross-tenant). + +### Flow + +1. Pre-flight: `subject_type == "external_sso"`. Else cross-surface error. +2. `GET /openapi/v1/permitted-external-apps?page=N&limit=N&mode=...&name=...`. Bearer auth, scope `apps:read:permitted-external`. Strict server validator — extra params → 422. +3. Auto-paginate until `--limit` or `has_more=false`. +4. Render per `-o`. + +### `get permitted-external-app ` (dfoe_, single) + +``` +difyctl get permitted-external-app [-o ] +``` + +1. Pre-flight: `subject_type == "external_sso"`. +2. `GET /openapi/v1/permitted-external-apps/`. Slim metadata. 404 if app not in permitted set. + +### Flags (dfoe_ list) + +| Flag | Default | Notes | +|---|---|---| +| `--mode` | (all) | Same enum as dfoa_ surface | +| `--name` | — | Substring match | +| `--limit` | 50 | Total rows. Max 10000 | +| `-o, --output` | table | `table` / `json` / `yaml` / `name` (no `wide` — no workspace columns to show) | + +### Pagination + +Pagination is implementation detail, not user vocabulary. The flag is `--limit N` (total rows wanted). The client loops the server's `{page, limit, total, has_more, data}` envelope until either `--limit` rows have been collected or the server reports `has_more=false`. + +Server-side: each `/openapi/v1/apps` call is capped at `MAX_PAGE_LIMIT` (200) rows per page. The CLI computes per-call `limit` as `min(remaining, MAX_PAGE_LIMIT)` and increments `page` until done. + +Behavior summary: + +- `--limit 10` → 1 server call, `?page=1&limit=10`. Returns ≤10 rows. +- `--limit 50` (default) → 1 server call, `?page=1&limit=50`. Returns ≤50 rows. +- `--limit 200` → 1 server call, `?page=1&limit=200`. +- `--limit 500` → up to 3 server calls (`page=1&limit=200`, `page=2&limit=200`, `page=3&limit=100`); short-circuits if any page returns `has_more=false`. +- `--limit 0` → reserved (rejected with `config_invalid_value`); use a positive integer or omit for the default. + +Single-page raw fetch is not exposed. Agents that need page-by-page parsing should consume the JSON envelope from the server directly (`-o json` always returns the assembled `data: [...]` after auto-paging — no `page`/`has_more` keys leak into the CLI's stdout). + +Output format implications: + +- `table` / `wide` / `name`: rows stream to stdout as each server page arrives, with one final summary line on stderr (`fetched 312 of 312 rows`) when the loop ends. +- `json` / `yaml`: assembled into a single payload at end-of-loop and emitted once. The wire-level `{page, limit, total, has_more}` keys do not appear in CLI output; only the merged `data: [...]` array. + +Errors mid-loop: + +- 429 from server: respect `Retry-After` header, sleep, retry up to 3 times. After exhaustion → exit 1 with the rows already collected printed (`-o table`/`wide`/`name`) or a partial-payload error envelope (`-o json`/`yaml`). +- 5xx mid-loop: same treatment as 429 (exponential backoff, retry). +- 4xx mid-loop (other than 429): abort, surface error to stderr, exit per `auth.md §Error handling`. + +### Output — list + +Default text/table: + +``` +ID MODE NAME DESCRIPTION +app_a1b2 chat Customer FAQ Answers product FAQ from KB... +app_c3d4 workflow Daily Report Generates daily ops summary... +``` + +Description column truncates to fit terminal width (terminal width minus other column widths). Trailing ellipsis. Sort: `updated_at DESC`. + +| Format | Columns / payload | +| ----------------- | ------------------------------------------------------------------------------------------------- | +| `table` (default) | `ID MODE NAME DESCRIPTION` (description truncated) | +| `wide` | `ID MODE NAME DESCRIPTION TAGS UPDATED AUTHOR` (no truncation) | +| `name` | IDs only, one per line. Pipeable: `difyctl get app -o name \| xargs -I{} difyctl describe app {}` | +| `json` | Full server payload, indented | +| `yaml` | Full server payload | + +With `-A`: prepend `WORKSPACE` column to table/wide; JSON/YAML wrap each row with `{workspace_id, ...row}`. + +### Output — single + +Same shape as list except `data` is a one-element array. `name` format emits the single ID. + +### Error handling + +| Server code | CLI behavior | +| ------------------------ | -------------------------------------------------------------------------------------------------------- | +| 403 `wrong_surface` | `error: this surface accepts only account sessions; you're signed in as .` Exit 1. Should be caught client-side; surfacing it means subject_type cache is stale — CLI clears and prompts re-login. | +| 403 `insufficient_scope` | `error: token lacks scope for this operation (required=apps:read).` Exit 1 | +| 402 `license_required` (dfoe_ surface only) | `error: this deployment's enterprise license does not cover external SSO apps surface.` Exit 1 | +| 401 | Per `auth.md §No token refresh` — clear creds, exit 4 | +| 404 (single fetch) | `error: app not found (app_id=).` Exit 1 | + +## `describe app` (dfoa_) / `describe permitted-external-app` (dfoe_) + +``` +difyctl describe app [-w ] [-o ] # dfoa_ +difyctl describe permitted-external-app [-o ] # dfoe_ +``` + +Surface-specific. Cross-surface invocation errors client-side (exit 2). + +### Flow — `describe app` (dfoa_) + +1. Pre-flight: `subject_type == "account"`. Else cross-surface error. +2. Resolve `workspace_id` (required by server on this surface). +3. `GET /openapi/v1/apps//describe?workspace_id=`. Bearer auth, scope `apps:read`. +4. Render per `-o`. + +### Flow — `describe permitted-external-app` (dfoe_, EE only) + +1. Pre-flight: `subject_type == "external_sso"`. Else cross-surface error. +2. `GET /openapi/v1/permitted-external-apps/`. Bearer auth, scope `apps:read:permitted-external`. +3. Render per `-o`. EE returns the same `{info, parameters, input_schema}` shape; tenant resolved from app row. + +### Default text output (kubectl-describe style) + +``` +ID: app_a1b2c3 +Mode: chat +Name: Customer FAQ +Description: Answers product FAQ from knowledge base. +Author: alice@example.com +Tags: [prod, customer-facing] + +Inputs: + industry (string, required) + Options: retail | finance | health + context (paragraph, optional) + Max length: 4000 + +File upload: + Image: enabled (max 5, ≤10MB) + Document: disabled + +Conversation features: + Opening statement: "Hi! I can help you with product questions." + Suggested questions: 3 items +``` + +### Output formats + +| Format | Payload | +| ----------------- | ------------------------------------------------ | +| `table` (default) | Sectioned indented label:value text | +| `json` | Server `/describe` response, indented | +| `yaml` | Same, YAML | + +`wide` and `name` not applicable to describe (single resource); return `NoCompatiblePrinterError`. + +**Server endpoint:** `GET /openapi/v1/apps//describe` — canonical "what is this app" surface. Returns `{info, parameters, input_schema}`; the openapi-namespace has no `/info` or `/parameters` routes (callers requiring slim subsets use `?fields=...`). `input_schema` is JSON Schema (Draft 2020-12) derived server-side from `user_input_form`, intended for agent tool-call payload generation. Single-request failure mode: no partial-state handling needed. + +## `run app` (dfoa_) / `run permitted-external-app` (dfoe_) + +``` +difyctl run app [message] [-w ] [--input ]... [--conversation ] [--stream] [-o text|json|yaml] +difyctl run permitted-external-app [message] [--input ]... [--conversation ] [--stream] [-o text|json|yaml] +``` + +Two commands, one runner under the hood. Surface routing identical to read paths — `run app` hits `/apps//run`, `run permitted-external-app` hits `/permitted-external-apps//run`. Cross-surface invocation errors client-side (exit 2). + +### Flow — `run app` (dfoa_) + +1. Pre-flight: `subject_type == "account"`. Else cross-surface error. +2. Resolve bearer + metadata from `hosts.yml`. Bearer absent → `not_logged_in`, exit 4. +3. Resolve `workspace_id` (`-w` → env → ctx). Sent in `AppRunRequest` body (informational). +4. `POST /openapi/v1/apps//run`. Bearer auth, scope `apps:run`. +5. Render response. + +### Flow — `run permitted-external-app` (dfoe_, EE only) + +1. Pre-flight: `subject_type == "external_sso"`. +2. Resolve bearer. Bearer absent → exit 4. +3. `POST /openapi/v1/permitted-external-apps//run`. Bearer auth, scope `apps:run`. No `workspace_id` in body — server resolves tenant from app row. +4. Render response. + +CLI does NOT pre-fetch to dispatch by mode — server owns mode dispatch. CLI's job: build `AppRunRequest` from `` + `--input` + `--conversation` + `--stream`; render `AppRunResponse` per mode shape returned. The `app_meta` cache (full describe blob) is consulted opportunistically for client-side input validation; cache miss non-fatal — server validates authoritatively and returns 422 with required-input names if anything is wrong. + +### Universal `enable_api` gate + +Both surfaces filter through `_apply_openapi_gate` (`api/services/openapi/visibility.py`). App where `enable_api = false` → 404 on both surfaces (no existence leak). See `server/middleware.md §Universal openapi gate`. No console escape hatch. + +### Headers + +| Header | Value | +| --------------- | ---------------- | +| `Authorization` | `Bearer ` | + +Server resolves tenant from `apps.tenant_id`. App id is in URL path (no `X-Dify-App-Id` header here). + +### Input + +- Positional `` — for chat / agent-chat / advanced-chat modes, the user message. Workflow mode rejects positional message; use `--input`. +- `--input =` (repeatable) — app inputs (variables configured in the app). For workflow runs, these are the workflow inputs. +- `--conversation ` — chat-mode only; resume an existing conversation. Stderr-printed conversation hint after first chat invocation: `--conversation `. +- `--stream` — request SSE streaming. `agent-chat` always streams (the upstream agent step requires it); for other modes, blocking when omitted. Text output streams deltas as they arrive; `-o json` / `-o yaml` aggregate into the blocking-shape envelope at end-of-stream. + +### Output + +Format selected by `-o|--output`. Default `text` (human-readable). + +| Format | Behavior | +| ---------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `text` (default) | Per-mode rendering — chat / agent-chat / advanced-chat: assistant reply to stdout, conversation hint to stderr; completion: assistant reply to stdout; workflow: `data.outputs` JSON to stdout | +| `json` | Raw server response to stdout, indented JSON | +| `yaml` | Raw server response to stdout, YAML | + +`wide` and `name` reserved for collection commands; return `NoCompatiblePrinterError` on `run app`. + +**Not supported:** `--jq`, mermaid output, table output for run results, live progress display, usage metrics breakdown. + +### Error handling + +| Server code | CLI behavior | +| -------------------------- | --------------------------------------------------------------------------------- | +| 403 `wrong_surface` | Subject_type mismatch (should be caught client-side). Clear subject_type cache, prompt re-login. Exit 1 | +| 403 app ACL deny | `error: you do not have access to this app (app_id=).` Exit 1 | +| 403 `insufficient_scope` | `error: token lacks scope for this operation (required=apps:run).` Exit 1 | +| 402 `license_required` (dfoe_ surface only) | `error: this deployment's enterprise license does not cover external SSO apps surface.` Exit 1 | +| 404 app not found | `error: app not found (app_id=).` Exit 1 | +| 422 validation | Print server's validation message; surface required-input names as hints. Exit 1 | +| 401 | Per `auth.md §No token refresh` — clear creds, exit 4 | + +## App-info cache + +Single 1h-TTL store holds the full `{info, parameters, input_schema}` blob keyed by `(host, app_id)`. Slim `?fields=info` calls populate / read the same key — partial blob is upgraded on the next full fetch. Shared by `get app `, `describe app`, and `run app` (no per-command duplicate caches). + +Default sort: `updated_at DESC`. Tag-name match (`--tag prod`) hits every tag with that name; `-o json` disambiguates via tag IDs. `get apps -A` caps parallel workspace fan-out at 4 concurrent calls to stay under per-token rate limits. CLI renderer keys on `parameters.user_input_form` (not `input_schema`). diff --git a/cli/docs/specs/auth.md b/cli/docs/specs/auth.md new file mode 100644 index 0000000000..1df29424d1 --- /dev/null +++ b/cli/docs/specs/auth.md @@ -0,0 +1,469 @@ +--- +title: auth +--- + +# auth + +> Implementation: see [`cli/src/`](../../src/). Build & test: see [`cli/README.md`](../../README.md). + +CLI auth: login, logout, status, whoami, devices. Credential storage. Bearer HTTP contract. Error model. + +Companion: `server/tokens.md` (storage + prefixes), `server/device-flow.md` (server flow), `server/endpoints.md` (API contracts). + +## Commands + +| Command | Purpose | Output | +|---|---|---| +| `difyctl auth login` | Interactive device flow | Prompts → `Logged in as …` | +| `difyctl auth logout` | Server revoke + local clear | One-line confirm | +| `difyctl auth status [-v] [--json]` | Identity dashboard | Multi-line human or JSON | +| `difyctl auth whoami [--json]` | Account identity only | One line or JSON | +| `difyctl auth use ` | Switch active workspace (account only) | One-line confirm | +| `difyctl auth devices list [--json]` | OAuth sessions across devices | Table or JSON | +| `difyctl auth devices revoke [--all] [--yes]` | Revoke one or all OAuth sessions | One-line confirm | + +Interactive device flow only. No `--with-token` / `DIFY_PAT` / `--token` — PAT not supported. + +## Session model + +**Single active host.** `auth login` replaces prior session. `--host` on individual commands reaches non-active hosts without changing the active one. Credential store shape allows future migration to a per-host map if multi-host becomes real. + +**Workspace.** Server returns the user's workspaces on login + marks default. CLI stores `default_workspace_id` in `hosts.yml` as the active workspace. Every resource command accepts `--workspace ` override. `difyctl auth use ` switches active workspace (writes `current_workspace_id` to `hosts.yml`). See `workspaces.md §Resolution chain`. + +**Account switch on same host.** Re-login with a different account → drop all prior metadata (workspace, account, available_workspaces), adopt new account's server default. Sequence: + +1. New device flow completes. +2. Compare returned `account.id` vs stored. +3. Different → clear all metadata + bearer. +4. Best-effort revoke old bearer: `DELETE /openapi/v1/account/sessions/self`. Fire-and-forget. +5. Write new bundle, stderr `note: previous account signed out`. + + +## Login + +Interactive device flow. + +### Interactive + +``` +$ difyctl auth login +? Dify host: https://dify.internal +! Copy this one-time code: ABCD-1234 +Press Enter to open dify.internal/device in your browser... + +Waiting for authorization... done +Logged in as gareth@dify.ai (Gareth Chen) +Workspace: Acme Corp +``` + +**Flow:** + +1. **Host.** Skipped if `--host` given. Else prior host shown as default. CLI normalizes scheme to `https://`, strips trailing slash. Non-HTTPS rejected unless `--insecure`. +2. **Device flow.** `POST /openapi/v1/oauth/device/code` with `client_id=difyctl` + `device_label`. Server returns `{device_code, user_code, verification_uri, expires_in, interval}`. +3. **Show code + URL on stderr** (always — stays visible for manual recovery). +4. **Browser open decision** (see below). Auto-open prompts `Press Enter to open …` → `open` / `xdg-open` / `cmd /c start`. Launch failure → `note: couldn't open browser; open the URL above manually`. +5. **Poll** `POST /openapi/v1/oauth/device/token` every `interval` sec. Spinner + countdown (interactive); silent (plain/structured). Handle `authorization_pending` / `slow_down` / `expired_token` / `access_denied`. +6. **Workspace resolution.** Server-returned default becomes active. No prompt. + +**Flags:** + +| Flag | Effect | +|---|---| +| `--host ` | Skip host prompt | +| `--no-browser` | Force skip-open even when auto-open qualifies | +| `--insecure` | Allow `http://`. Stderr warns. For `auth login` specifically, warns that `device_code` + `user_code` travel plaintext — any on-path MITM can intercept, poll the token endpoint, and race the legitimate user's approval. Local-dev / loopback only | + +**Error states:** + +| Server code | CLI behavior | +|---|---| +| `authorization_pending` | Keep polling at current interval. No stderr noise. | +| `slow_down` | Double current interval (`new = min(prev * 2, 60s)`), keep polling. Stderr at debug level only. Per RFC 8628 §3.5. | +| `expired_token` | `error: code expired before authorization; run 'difyctl auth login' to try again`. Exit 4 | +| `access_denied` | `error: authorization denied`. Exit 4 | +| any other / unknown error code | `error: unexpected device-flow error: `. Exit 1. Treat as transient bug, do not retry. | +| transport 5xx / network timeout on poll | Retry up to 5× with exponential backoff (1s → 16s, capped). Exhaust → `error: device-flow poll unavailable`. Exit 1. | + +### Browser-open decision + +Skip auto-open if any condition matches: + +| Condition | Check | +|---|---| +| User opted out | `--no-browser` set | +| SSH | `$SSH_CONNECTION` or `$SSH_TTY` set | +| Headless Linux | Linux + both `$DISPLAY` and `$WAYLAND_DISPLAY` unset | +| Non-interactive | stdout or stderr not a TTY | + +Else attempt auto-open. Failure non-fatal — code + URL already on stderr. Windows-over-SSH (OpenSSH / WSL) sets `$SSH_CONNECTION` same as POSIX. + +SSH example: + +``` +$ difyctl auth login --host https://dify.internal +! Detected SSH session — opening the browser on this machine is skipped. +! Open this URL on any device with a browser: +! https://dify.internal/device +! When prompted, enter this one-time code (expires in 15 minutes): +! ABCD-1234 + +Waiting for authorization... done +Logged in as gareth@dify.ai (Gareth Chen) +Workspace: Acme Corp +``` + +### Re-login / host switch + +`auth login` while logged in replaces session. Different host → stderr note: + +``` +note: switching from to ; previous session will be cleared +``` + +Old session cleared only on new-login success. Failed re-login preserves old. + +### First-run + +No prior host → prompt first. No magic discovery (users know their Dify URL — it's how they reach the web console). Cloud users may see `https://cloud.dify.ai` as the default suggestion — exact copy at implementation. + +## Logout + +Revoke server-side + clear local. Best-effort; never blocks on network. + +1. Read bearer. +2. `DELETE /openapi/v1/account/sessions/self`. Bearer in `Authorization` header. +3. Non-200 → stderr: `warning: server revoke failed ( ); local credentials cleared anyway`. +4. Delete keychain entry + rewrite `hosts.yml` without bearer. +5. `Logged out of ` to stdout. Exit 0 even on step-2 failure. + +## Status + identity output + +`auth status` default = minimal identity. `-v` = extra metadata. **Never print token details** — expiry, refresh timing, raw token. That's the middleware's concern, not the user's. + +**Compact (default):** + +``` +Logged in to dify.internal as gareth@dify.ai (Gareth Chen) + Workspace: Acme Corp + Session: Dify account — full access +``` + +**Verbose (`-v`):** + +``` +dify.internal + Account: gareth@dify.ai (Gareth Chen, acc_6c8a1f) + Workspace: Acme Corp (ws_abc123, role: owner) + Available: 2 workspaces + Session: Dify account — full access (scope: full) + Surface: apps (dfoa_) + Storage: keychain +``` + +**Session tier line** — one-line summary of what the user can do. Shown on compact + verbose. `Surface:` line names the server surface the session targets (`apps` for `dfoa_`, `permitted-external-apps` for `dfoe_`). + +| Subject | Line | +|---|---| +| Account (`dfoa_`, scope `full`) | `Session: Dify account — full access` | +| External SSO (`dfoe_`, scopes `apps:run` + `apps:read:permitted-external`) | `Session: External SSO — can run permitted apps and discover them, cannot access workspace surface` | + +**Logged out:** `Not logged in. Run 'difyctl auth login' to sign in.` Exit 4. + +**JSON (`--json`):** + +```json +{ + "host": "dify.internal", + "logged_in": true, + "account": { "id": "acc_6c8a1f", "email": "gareth@dify.ai", "name": "Gareth Chen" }, + "workspace": { "id": "ws_abc123", "name": "Acme Corp", "role": "owner" }, + "available_workspaces_count": 2, + "storage": "keychain" +} +``` + +Logged-out JSON: `{"host": null, "logged_in": false}`. + +**`auth whoami`:** +- Human: `gareth@dify.ai (Gareth Chen)` +- JSON: `{"id": "acc_6c8a1f", "email": "gareth@dify.ai", "name": "Gareth Chen"}` + +### External SSO rendering + +`dfoe_` token (`subject_email + subject_issuer` populated, `account_id = NULL`) → no workspace lines (subject isn't a workspace member; no workspace concept): + +``` +Logged in to dify.internal as sso-user@partner.com (via https://idp.partner.com) + Surface: permitted-external-apps (external SSO) + Scopes: apps:run, apps:read:permitted-external +``` + +`auth whoami`: +- Human: `sso-user@partner.com (external SSO, issuer: https://idp.partner.com)` +- JSON: `{"subject_type": "external_sso", "email": "sso-user@partner.com", "issuer": "https://idp.partner.com"}` + +## Devices (multi-device management) + +Users sign in from multiple machines simultaneously. Server stores one row per `(subject_email, subject_issuer, client_id, device_label)` via partial unique index; same-device re-login rotates in place. `device_label` auto-derived from hostname (`"difyctl on gareth-mbp"`). + +### `auth devices list` + +``` +$ difyctl auth devices list +DEVICE CREATED LAST USED CURRENT +difyctl on gareth-mbp 2026-03-15 5m ago * +difyctl on ci-runner-01 2026-02-01 17h ago +difyctl on old-thinkpad 2025-11-02 98d ago +``` + +- `GET /openapi/v1/account/sessions` with current bearer. +- `CURRENT` flags the row where `id == local token_id`. +- `--json` → raw array. + +### `auth devices revoke ` + +``` +$ difyctl auth devices revoke "difyctl on old-thinkpad" +Revoked: difyctl on old-thinkpad +``` + +- Resolution: exact `device_label` → UUID → unique substring. Ambiguous → exit 2 with disambiguation hint. +- `DELETE /openapi/v1/account/sessions/`. Server enforces subject-match → 403 otherwise. Cross-user revoke is admin-only and out of scope here. +- **Self-revoke shortcut.** If the resolved id matches current session's `token_id`, behave like `auth logout` — server revoke + local clear. +- **`--all`** revokes every OAuth token for this user *except* current device. Confirm prompt unless `--yes`. + +## Credential storage + +Bearer in OS keychain (preferred). Metadata in YAML. Keychain unavailable → YAML also holds bearer at `0600`. Same model as `gh`. + +### File path + +| OS | Default | +|---|---| +| Linux | `$XDG_CONFIG_HOME/difyctl/hosts.yml` else `~/.config/difyctl/hosts.yml` | +| macOS | `~/.config/difyctl/hosts.yml` (not `~/Library/…` — matches `gh`/`docker`/`kubectl`/`git`) | +| Windows | `%AppData%\difyctl\hosts.yml` | + +`DIFY_CONFIG_DIR` overrides. Dir `0700`, files `0600` POSIX; Windows ACL user-only. Unexpected mode on read → stderr warning. + +### `hosts.yml` schema + +```yaml +current_host: dify.internal +subject_type: account # OR "external_sso" — drives CLI command dispatch +account: + id: acc_6c8a... + email: gareth@dify.ai + name: Gareth Chen +workspace: # only present when subject_type == "account" + id: ws_abc123 + name: Acme Corp + role: owner +available_workspaces: # empty list when subject_type == "external_sso" + - id: ws_abc123 + name: Acme Corp + role: owner + - id: ws_def456 + name: Side Project + role: member +external_sso: # only present when subject_type == "external_sso" + email: sso-user@partner.com + issuer: https://idp.partner.com +token_storage: keychain # OR "file" when keychain unavailable +token_id: oat_abc123... # for revocation DELETE +token_expires_at: null # usually null (gh-shape) +# token kind (OAuth account / OAuth ExtSSO) discriminated by prefix: +# dfoa_ / dfoe_. subject_type field is the authoritative dispatch key. +# Only present when token_storage == "file": +tokens: + bearer: "dfoa_..." # OR "dfoe_..." +``` + +CLI dispatch reads `subject_type` to decide which commands are valid for this session — `dfoa_` allows `get apps` / `get app` / `run app`; `dfoe_` allows `get permitted-external-apps` / `get permitted-external-app` / `run permitted-external-app`. Cross-surface invocation errors client-side before any network call. Surface field on the token itself is implicit from prefix; `subject_type` is the canonical field. + +### Keychain entry + +When `token_storage: keychain`: + +- Service: `difyctl` +- Account: `` (e.g. `dify.internal`) +- Password: JSON blob: + +```json +{ + "bearer": "dfoa_ab2f...", + "source": "oauth", + "token_id": "oat_abc...", + "expires_at": null +} +``` + +### Storage-mode detection + +1. First login: probe keychain via Set → Get → Delete sentinel (`difyctl-probe:`). +2. Probe OK → `token_storage: keychain`. Probe fail → `token_storage: file` + stderr: `info: OS keychain unavailable; token will be stored in ~/.config/difyctl/hosts.yml (0600).` +3. Mode persisted in `hosts.yml`; respected subsequently. +4. Force file: `DIFY_CREDENTIAL_STORAGE=file`. + +### Source of truth + +- Metadata → `hosts.yml` authoritative. Keychain bearer without matching config → treated as logged out. +- Bearer in keychain mode → keychain authoritative. Config says keychain but missing keychain entry → logged out. +- Manual edits to `tokens:` under `token_storage: keychain` are ignored. + +### Env escape hatch + +`DIFY_TOKEN` + `DIFY_HOST` + `DIFY_WORKSPACE_ID` all present → skip storage reads; bearer env-driven, never persisted. Undocumented in `--help`. Emergencies only. + +**All-or-none.** Partial set (e.g., `DIFY_TOKEN` alone) → exit 2 with `error: env escape hatch requires all of DIFY_TOKEN, DIFY_HOST, DIFY_WORKSPACE_ID; missing: `. CLI does not silently fall back to storage when one var is set. + +`DIFY_TOKEN` accepts `dfoa_` / `dfoe_` only. `app-` and `dfp_` rejected with the same prefix-validation error as device-flow ingestion. + +### File-mode security + +Plain-text bearer in `~/.config/difyctl/hosts.yml` (`0600`) = as secure as an SSH key on the same disk. Backups, fs attackers, misconfigured ACLs leak it. First file-mode write emits stderr notice making the trade-off explicit. + +## HTTP contract + +### Bearer + +Every authenticated request: + +``` +Authorization: Bearer +``` + +`` = `dfoa_…` / `dfoe_…`. No cookies, no `X-CSRF-Token`, no jar. CLI never mints or accepts `dfp_`. + +**Single surface.** All bearer traffic targets the service API — CLI base URL = `/v1`. Bearer tokens never reach `/console/api/*`. Details: `server/middleware.md §Coexistence`. + +### App-context headers + +Commands acting on a specific app: + +| Header | Required | Purpose | +|---|---|---| +| `X-Dify-App-Id: ` | yes | Target app. Server resolves tenant from `app.tenant_id` | +| `X-Dify-Env` | no | Static CLI traffic identifier; sent by CLI to distinguish CLI-originated requests. Not user-configurable. | + +App-scoped `app-` keys ignore both (app is in the key). Identity calls (`auth login/whoami/status`) hitting `GET /openapi/v1/account` send neither. + +### Identification headers + +Every request — authenticated + device flow: + +| Header | Value | +|---|---| +| `User-Agent` | `difyctl/ (; ; )` — e.g. `difyctl/1.0.0 (darwin; arm64; stable)` | + +Admins filter CLI traffic via User-Agent regex. + +**No `X-Dify-Client`** — redundant with User-Agent parse, and client-controlled inputs without server enforcement are noise. + +**No `X-CSRF-Token`** — bearer requests bypass CSRF server-side. + +**Not included:** CLI request-id (server-side IDs suffice), fingerprint, telemetry opt-in. + +### No token refresh + +Bearer tokens are long-lived. They live until the user revokes or a user-set `expires_at` is reached. CLI never rotates. + +**On 401:** + +1. Don't retry. +2. Clear local creds (token + metadata). +3. Typed error: `error: session expired or revoked; run 'difyctl auth login' to sign in again.` +4. Exit 4. + +No mutex, no storage-reload dance, no cross-process race. + +### Middleware chain + +`RequestLogger → UserAgent → BearerAuth → ErrorParser`. `BearerAuth` injects `Authorization: Bearer `. Streaming handlers don't handle mid-stream refresh (it doesn't exist). + +`/console/api/refresh-token` is not called by difyctl. Web console uses it unchanged. + +## Bearer token kinds + +Two subject variants. Full storage + scope details in `server/tokens.md`. + +| Token | Prefix | Minted by | Subject | Scope | Surface | +|---|---|---|---|---|---| +| OAuth account | `dfoa_…` | Device flow, account branch | Dify account | `[full]` | `/openapi/v1/apps*` | +| OAuth External SSO | `dfoe_…` | Device flow, SSO branch (EE only) | SSO-verified email, no account | `[apps:run, apps:read:permitted-external]` | `/openapi/v1/permitted-external-apps*` | + +Surface is bound by subject_type. Cross-surface requests → 403 `wrong_surface`. CLI caches `subject_type` in `hosts.yml` at login and dispatches commands client-side; cross-surface command invocation errors before any network call. + +Wire format identical: `Authorization: Bearer `. CLI stores kind + subject locally so logout + `auth status` render correctly. + +CLI rejects `dfp_` at every ingestion point. + +**CLI-side OAuth plaintext defense:** + +1. No raw-bearer export command. +2. Device-flow response consumed directly by CLI, written to keychain. User never sees OAuth plaintext at login. +3. Same-device re-login rotates in place → prior exfiltrated plaintext invalid. +4. `app-` and `dfp_` rejected at every CLI ingestion point (`DIFY_TOKEN` env, credential-store load). + +## Login-time behavior + +### Device flow + +1. `POST /openapi/v1/oauth/device/code` with `client_id=difyctl` + `device_label`. +2. Prompt user with URL + user_code. +3. Poll `POST /openapi/v1/oauth/device/token` until success. +4. Response: + - Account branch: `{token: "dfoa_...", account, workspaces, default_workspace_id, expires_at}` + - External SSO branch: `{token: "dfoe_...", subject_type: "external_sso", subject_email, subject_issuer, account: null, workspaces: []}` + Prefix encodes subject type. +5. Write bearer → keychain (or file). Write metadata → `hosts.yml`. +6. Print `Logged in as …`. + +Server-side endpoint details: `server/endpoints.md §OpenAPI — identity + sessions`. + +## Error model + exit codes + +gh's 5-bucket model. Errors also emit structured JSON when `--json` / `--jq` is active — agents branch on `code` string independently of exit int. + +### Exit codes + +| Code | Meaning | Triggers | +|---|---|---| +| `0` | Success | — | +| `1` | Generic / unexpected | Network failures, server 5xx, uncaught exceptions, unparseable responses | +| `2` | Usage error | Unknown flag, invalid arg, missing required input, conflicting flags | +| `4` | Auth error | Not logged in, session expired, PAT rejected, server-revoked | +| `6` | Version / compat | Server version outside `SupportedRange`, unsupported endpoint, schema break | + +No `127` shell-style (overloaded). No per-HTTP-status exit codes — 403/404/409 all → 1; agents read `http_status` from JSON body. + +### JSON error envelope + +Single-line JSON to stderr + mapped exit code: + +```json +{"error":{"code":"auth_expired","message":"session expired","hint":"run 'difyctl auth login'","http_status":401}} +``` + +### Stable `code` strings + +| Code | Exit | +|---|---| +| `not_logged_in`, `auth_expired`, `token_expired` | 4 | +| `version_skew`, `unsupported_endpoint` | 6 | +| `usage_invalid_flag`, `usage_missing_arg`, `config_invalid_key`, `config_invalid_value` | 2 | +| `config_schema_unsupported` | 6 | +| `network_timeout`, `network_dns`, `server_5xx`, `server_4xx_other` | 1 | +| `unknown` | 1 | + +### Human output + +Non-JSON mode → stderr, up to two lines: + +``` +error: +hint: +``` + +Hint optional. + diff --git a/cli/docs/specs/config.md b/cli/docs/specs/config.md new file mode 100644 index 0000000000..45d5d4e31e --- /dev/null +++ b/cli/docs/specs/config.md @@ -0,0 +1,212 @@ +--- +title: config +--- + +# config + +> Implementation: see [`cli/src/`](../../src/). Build & test: see [`cli/README.md`](../../README.md). + +Owns `config.yml`, `difyctl config` commands, env-var registry, `--env` / `X-Dify-Env` plumbing. + +Identity (`hosts.yml`) lives in `auth.md §Credential storage`. + +## Commands + +| Command | Purpose | +|---|---| +| `difyctl config view [--json]` | Print full `config.yml` | +| `difyctl config get [--json]` | Print one value; dotted path | +| `difyctl config set ` | Set known key to validated value; atomic write | +| `difyctl config unset ` | Remove key → revert to built-in default; idempotent | +| `difyctl config path` | Print absolute path of `config.yml` | +| `difyctl env list [--json]` | Enumerate all env vars | +| `difyctl help environment` | Topic page — narrative over the env registry | + +## File layout + +``` +~/.config/difyctl/ +├── hosts.yml identity + tokens (owned by auth.md) +└── config.yml preferences + state (this doc) +``` + +Path resolution (matches `gh`): + +1. `DIFY_CONFIG_DIR` env — verbatim override. +2. Per-OS default: + +| OS | Default | +|---|---| +| Linux | `$XDG_CONFIG_HOME/difyctl` else `~/.config/difyctl` | +| macOS | `~/.config/difyctl` (not `~/Library/…`) | +| Windows | `%AppData%\difyctl` | + +Single resolver lives in `cli/src/config/dir.ts`; `auth.md` reads `hosts.yml` from the same dir. Dir `0700`, files `0600`. + +## `config.yml` schema (v1) + +```yaml +version: 1 +defaults: + format: table # table | json | yaml + limit: 50 # pagination default +state: + current_app: null # reserved; not yet written by any command +``` + +### Field ownership + +| Field | Owner | Notes | +|---|---|---| +| `version` | config | Schema migration | +| `defaults.format` | config writes, output layer reads | `table` / `json` / `yaml` | +| `defaults.limit` | config writes, list commands read | Int 1–200 | +| `state.current_app` | reserved; not yet written by any command | | + +**No `default_host` / `default_workspace` in config.yml.** Both live in `hosts.yml` (identity state). Workspace switching via `difyctl auth use` — see `workspaces.md`. + +### First run + +Missing `config.yml` → treat as empty, return built-in defaults. File materializes only on first `config set` / `app use`. Zero-install still works. + +## Precedence + +Every setting resolves through the same chain. Higher wins. + +1. Command-line flag (`--format json`, `--limit 50`, `--host …`) +2. Env var (`DIFY_FORMAT`, `DIFY_LIMIT`, `DIFY_HOST`, …) +3. `config.yml` defaults +4. `hosts.yml` session identity (read-only from this module) +5. Built-in default + +**No duplication rule.** Anything in `hosts.yml` (host, workspace, account) invalid in `config.yml`. `config set default_host …` rejected. + +**Per-call, not global.** Flags don't mutate files. File writes = explicit `config set` / `unset` / `app use`. + +## Command details + +### `config view` + +``` +$ difyctl config view +version: 1 +defaults: + format: table + limit: 50 +state: + current_app: null +``` + +`--json` → same structure as JSON. File absent → built-in defaults + stderr note. + +### `config get ` + +``` +$ difyctl config get defaults.format +table +``` + +Dotted path, single scalar. Unknown key → exit 2 `config_invalid_key`. Null/unset → blank line + exit 0 (scripts branch on exit). `--json` → `{"key": "...", "value": ...}`. + +### `config set ` + +Flow: resolve key → validate value → atomic temp-file+rename → one-line confirm. Unknown key → `config_invalid_key`. Invalid value → `config_invalid_value`. Both exit 2. + +### `config unset ` + +Remove key → revert on next resolution. Unknown → exit 2. Not set → exit 0 (idempotent). + +### `config path` + +Bare absolute path. Honors `DIFY_CONFIG_DIR`. Prints even if file missing. Useful: `$EDITOR "$(difyctl config path)"`. + +### Known-keys registry + +| Key | Type | Allowed | Default | +|---|---|---|---| +| `defaults.format` | string | `table` \| `json` \| `yaml` | `table` | +| `defaults.limit` | int | 1–200 | `50` | +| `state.current_app` | string | app id (written only by R1) | `null` | + +**Limit out-of-range = error, not silent clamp.** `defaults.limit` and `--limit` outside `[1, 200]` → exit 2 with `config_invalid_value` (`config set`) or `usage_invalid_flag` (`--limit` flag). Same rule for `DIFY_LIMIT` env at command resolution. CLI never silently truncates user intent. + +Registry compiled into the bundle. + +## Env-var registry + +Compiled into the bundle. Runtime export via `difyctl env list`; narrative via `difyctl help environment`. + +| Variable | Owner | Overrides | Meaning | +|---|---|---|---| +| `DIFY_CONFIG_DIR` | auth | Per-OS default | Config dir override for both files | +| `DIFY_HOST` | auth | `--host` / `hosts.yml.current_host` | Single-invocation host | +| `DIFY_WORKSPACE_ID` | auth | `--workspace` / `hosts.yml.workspace.id` | Single-invocation workspace | +| `DIFY_CREDENTIAL_STORAGE` | auth | Auto-detected backend | `file` forces file-mode | +| `DIFY_TOKEN` | auth | Stored bearer | Env escape hatch (see `auth.md §Env escape hatch`) — undocumented in `--help`. Accepts `dfoa_` / `dfoe_` only | +| `DIFY_FORMAT` | config | `defaults.format` | `table` \| `json` \| `yaml` | +| `DIFY_LIMIT` | config | `defaults.limit` | Pagination default | +| `NO_COLOR` | output | Auto color | Standard — disables all color | +| `CLICOLOR_FORCE` | output | Auto color | Force color in non-TTY | +| `DIFY_NO_PROGRESS` | output | Auto progress | Suppress spinners | +| `DIFY_PLAIN` | output | Display mode | Force plain mode | +| `DIFY_NO_VERSION_CHECK` | version | Version probe | `1` skips version check | + +No `DIFY_NO_REFRESH`, `DIFY_ACCESS_TOKEN`, `DIFY_REFRESH_TOKEN`, `DIFY_CSRF_TOKEN` — refresh doesn't exist (bearer auth). + +### `difyctl env list` + +Human → table matching the registry above. `--json`: + +```json +[{"name":"DIFY_HOST","owner":"auth","default":"...","description":"..."}] +``` + +### `difyctl help environment` + +Topic page grouped by owner + narrative, reusing the registry data. + +## Validation + schema versioning + +### Strict keys + +`config set` fails on: + +- Unknown key → `config_invalid_key`, exit 2 +- Wrong type → `config_invalid_value`, exit 2 +- Known key owned by `hosts.yml` (e.g. `default_host`) → `config_invalid_key` + +### Strict values + +Enums checked. Ints parsed + range-checked. Unrecognized YAML fields (hand-edit path) → ignored with stderr warning: + +``` +warning: unknown config field 'defaults.color'; ignored +``` + +### Schema versioning + +Every `config.yml` carries `version: ` at root. + +| Stored | CLI supports | Behavior | +|---|---|---| +| equal | v1 = v1 | Read as-is | +| older | v1 → v2 | Step-by-step migrator on read, write back on next mutation, log `info: migrated config.yml from v1 to v2` once | +| newer | v2 > v1 | Refuse. Exit 6, `config_schema_unsupported`, stderr: `error: config.yml was written by a newer difyctl (version 2); upgrade this CLI or edit the file manually` | + +Migrator framework ships as no-op until a v2 schema lands. Silent truncation = worse than a clear error. + +## Concurrent writes + +No file lock on `config.yml`. Mutations are user-interactive (`config set` / `app use`); the race window is tiny. Accept: two simultaneous `config set` can lose one write; later `config view` makes it obvious. + +`hosts.yml` keeps `flock` as cheap insurance for any future write contention; this doesn't apply to `config.yml`. + +## Error model + +Exit codes follow `auth.md §Error model`. This module adds: + +| Code | Exit | Trigger | +|---|---|---| +| `config_invalid_key` | 2 | `config set/get/unset` on unknown key | +| `config_invalid_value` | 2 | `config set` value outside enum/range | +| `config_schema_unsupported` | 6 | `config.yml version` > CLI supports | diff --git a/cli/docs/specs/guide.md b/cli/docs/specs/guide.md new file mode 100644 index 0000000000..466acc4d6d --- /dev/null +++ b/cli/docs/specs/guide.md @@ -0,0 +1,110 @@ +--- +title: guide +--- + +# guide + +> Implementation: see [`cli/src/`](../../src/). Build & test: see [`cli/README.md`](../../README.md). + +Agent onboarding quickstart. Three subcommands under the `help` topic — one per bearer token type plus `help environment`: + +``` +difyctl help account +difyctl help external +difyctl help environment +``` + +Root `difyctl --help` directs agents here: + +``` +For AI agents: run 'difyctl help account' (dfoa_ token) or +'difyctl help external' (dfoe_ token). +``` + +--- + +## `difyctl help account` + +For `dfoa_` (OAuth account) bearers. Workspace-scoped access: discovery, workspace management, invocation. + +Output (stdout, static text): + +``` +difyctl Agent Guide — Account Token (dfoa_) + +Surface: /openapi/v1/apps* (workspace-scoped). + +Discovery: + difyctl get apps # list apps in current workspace + difyctl get apps --mode chat # filter by mode + difyctl get apps --name "keyword" # filter by name + difyctl get apps --tag "prod" # filter by tag + difyctl get apps -A # fan out across all member workspaces + difyctl get app # single app slim metadata + difyctl describe app # parameters + input schema + +Workspace: + difyctl get workspace # list your workspaces + difyctl auth use # switch active workspace + +Invocation: + difyctl run app "message" # chat or completion + difyctl run app --input k=v # workflow inputs (repeatable) + difyctl run app -o json # structured output + +Cross-surface: + 'permitted-external-apps' commands are for external SSO sessions only. + Running them on a dfoa_ session errors with exit 2. + +Error handling: + All errors emit JSON to stderr when -o json is active. + Branch on the 'code' field. Exit codes: + 0 = success + 1 = server or network error + 2 = usage / bad input + 4 = auth error (re-run 'difyctl auth login') +``` + +--- + +## `difyctl help external` + +For `dfoe_` (External SSO) bearers. EE-only. No workspace concept. Surface: `/openapi/v1/permitted-external-apps*`. + +Output (stdout, static text): + +``` +difyctl Agent Guide — External SSO Token (dfoe_) + +Surface: /openapi/v1/permitted-external-apps* (no workspace). +Scopes: apps:run + apps:read:permitted-external. +Visibility: apps where access_mode is 'public' or 'sso_verified'. + +Discovery: + difyctl get permitted-external-apps # list permitted apps + difyctl get permitted-external-apps --mode chat # filter by mode + difyctl get permitted-external-apps --name "kw" # filter by name + difyctl get permitted-external-app # single permitted-external-app metadata + difyctl describe permitted-external-app # parameters + input schema + +Invocation: + difyctl run permitted-external-app "message" # chat or completion + difyctl run permitted-external-app --input k=v # workflow inputs + difyctl run permitted-external-app -o json # structured output + +Cross-surface: + 'apps' and workspace commands are for account sessions only. + Running them on a dfoe_ session errors with exit 2. + +Error handling: + All errors emit JSON to stderr when -o json is active. + Branch on the 'code' field. Exit codes: + 0 = success + 1 = server or network error + 2 = usage / bad input + 4 = auth error (re-run 'difyctl auth login') +``` + +--- + +No auth gate — onboarding runs even when not logged in. Text is static, not generated from server state. diff --git a/cli/docs/specs/server/device-flow.md b/cli/docs/specs/server/device-flow.md new file mode 100644 index 0000000000..bd73df8472 --- /dev/null +++ b/cli/docs/specs/server/device-flow.md @@ -0,0 +1,554 @@ +--- +title: server — device flow +--- + +# device flow + +OAuth 2.0 Device Authorization Grant (RFC 8628) for `difyctl auth login`. Two branches — account and External SSO — sharing one code-entry page and one Redis state machine. + +Companion: `tokens.md` (storage), `middleware.md` (post-mint auth), `endpoints.md` (full endpoint table), `security.md` (rate limits + audit + anti-frame). + +## Shape + +CLI shows a one-time code + URL; user opens the URL on any device with a browser; server polls for approval. No PKCE + localhost callback. + +## Ephemeral state (Redis) + +Each attempt = short-lived state machine, 15-min TTL, single-use. + +``` +device_code:{device_code_value} → JSON: + { + "user_code": "ABCD-1234", + "client_id": "difyctl", + "device_label": "difyctl on gareth-mbp", + "status": "pending" | "approved" | "denied", + "subject_email": null | "", // set on approval + "account_id": null | "", // may stay null for SSO-only + "minted_token": null | "dfoa_<43 chars>" | "dfoe_<43 chars>", + "token_id": null | "", // oauth_access_tokens.id after mint + "created_at": "", + "created_ip": "", + "last_poll_at": "" // for slow_down + } + +user_code:{user_code_value} → "" (reverse lookup) +``` + +Both keys `EX 900` (15 min). Matches RFC 8628 `expires_in`. + +**Code format:** + +- `device_code` = `dc_<32 base64url chars>` (~256 bit). Never user-facing. +- `user_code` = 8 chars `XXXX-XXXX`, uppercase, reduced alphabet (Crockford-style, ambiguous chars stripped). Low entropy by design — humans type it. Defended by rate-limit + 15-min TTL + single-use. + +**Alphabet (literal, 30 chars):** + +``` +3 4 5 6 7 8 9 A B C D E F G H J K L M N P Q R S T U V W X Y +``` + +Excluded: `0` (vs `O`), `1` (vs `I`/`l`), `2` (vs `Z`), `O` (vs `0`), `I` (vs `1`), `Z` (vs `2`). Server normalizes input — uppercases, strips hyphen, rejects any char outside the alphabet with `400 invalid_user_code`. + +**Collision handling.** 30⁸ ≈ 6.5 × 10¹¹ combinations. Silent overwrite would cross-authorize users. `/device/code` atomically claims `user_code` via Redis `SET NX EX` in a 5-attempt retry loop. After 5 collisions → `503 user_code_exhausted` (operator alarm — never seen in normal traffic). `device_code` entropy high enough that no such check is needed. + +**State transitions:** + +``` +pending → user clicks Authorize at /device → approved +pending → user clicks Cancel → denied +pending → 900s TTL elapses → evicted + +approved → CLI poll reads minted_token → DEL both keys +denied → CLI poll reads status=denied → DEL both keys +``` + +**Mint-at-approve semantics.** `oauth_access_tokens` row is written when the user clicks Authorize, not during CLI poll. Redis `minted_token` holds plaintext until the CLI poll retrieves it; then full state DEL'd (plaintext lives in Redis for seconds). On transition to `approved`, `EXPIRE` shrinks the key to `max(remaining_ttl, 60s)`. User-aborted approve (CLI never polls) leaves an orphaned row; user revokes via `auth devices list/revoke`. + +## Account branch + +User authenticates via password / email-code / social OAuth / account-SSO at `/signin`, returns to `/device`, clicks Authorize → mints `dfoa_`. + +```mermaid +sequenceDiagram + autonumber + actor User + participant CLI as difyctl + participant Browser + participant API as Dify API Service (Flask) + participant EE as Enterprise Go + participant IdP + participant KC as OS Keychain + + User->>CLI: difyctl auth login + CLI->>API: POST /openapi/v1/oauth/device/code + API-->>CLI: {device_code, user_code=ABCD-1234} + CLI->>Browser: open /device (no code in URL — user types it) + + loop every `interval` sec + CLI->>API: POST /openapi/v1/oauth/device/token + API-->>CLI: authorization_pending + end + + Browser->>Browser: GET /device + Note over Browser: Code-entry page. + User->>Browser: type ABCD-1234, click Continue + Note over Browser: SPA holds user_code in state.
No console session → render LoginForm. + User->>Browser: click "Sign in with Dify account" + Note over Browser: setPostLoginRedirect('/device?user_code=ABCD-1234')
router.push('/signin') + + Browser->>Browser: GET /signin + User->>Browser: click login method (password / email-code / SSO / …) + + alt Account-SSO (return_to plumbed via signed state) + Browser->>EE: GET /enterprise/sso/saml/login?intent=account_login&return_to=/device?user_code=… + Note over EE: Sign state envelope (JWS):
{intent, return_to, nonce} + EE-->>Browser: IdP authorize URL (state=signed JWS) + Browser->>IdP: authorize + User->>IdP: credentials + MFA + IdP-->>Browser: 302 → Enterprise ACS + Browser->>EE: POST /sso/saml/acs + Note over EE: Verify state JWS + consume state nonce.
dispatchSSOCallback: Intent=account_login
→ mintWebappPassport → 302 state.ReturnTo + EE-->>Browser: 302 → /device?user_code=… (Set-Cookie: console_session) + else Password / email-code / social OAuth (no return_to plumb) + Note over Browser: User lands on /signin default post-login destination.
Manually navigates back to /device URL from CLI terminal. + end + + Browser->>Browser: GET /device?user_code=… + Note over Browser: Console session present.
Render Authorize with device_label + email + user_code. + User->>Browser: click Authorize + + Browser->>API: POST /openapi/v1/oauth/device/approve
Cookie: console_session + Note over API: Validate session. Mint dfoa_.
INSERT/UPDATE oauth_access_tokens
(account_id=[user], scope=[full]).
Redis device state → approved. + API-->>Browser: {status: "approved"} + + CLI->>API: POST /openapi/v1/oauth/device/token (next poll) + API-->>CLI: {access_token: "dfoa_...", account, workspaces, scopes:[full]} + CLI->>KC: store dfoa_ + metadata + CLI-->>User: ✓ Signed in as user@example.com +``` + +### Account-branch endpoints + +All endpoint contracts (request/response, rate limits, auth): `endpoints.md`. + +- `POST /openapi/v1/oauth/device/code` (unauthenticated) — CLI initiates. Response `interval` = **5 (RFC 8628 default)**, hardcoded server-side. CLI polls every `interval` seconds; clamps to `[1, 60]` defensively, treats `0` / negative / absent as `5`. +- `POST /openapi/v1/oauth/device/token` (unauthenticated, rate-limited) — CLI polls. +- `GET /openapi/v1/oauth/device/lookup` (public + rate-limit) — web validates typed code. +- `POST /openapi/v1/oauth/device/approve` (session) — web mints `dfoa_`. +- `POST /openapi/v1/oauth/device/deny` (session) — web denies. + +### Approve implementation + +`POST /openapi/v1/oauth/device/approve`: + +1. `GET user_code:{user_code}` → device_code. Miss → 404. +2. `GET device_code:{device_code}`. Status ≠ pending → 409. +3. Resolve subject from session: `subject_email = session.email`; `account_id` = matching account, or NULL. +4. Read TTL: `ttl_days = Policy.OAuthTTLDays()`. +5. Generate `dfoa_` token. SHA-256. +6. **Upsert** `oauth_access_tokens` keyed on `(subject_email, subject_issuer, client_id, device_label)`. `device_label` from Redis state; `subject_issuer = NULL` on account branch: + + ```sql + -- Caller normalizes :issuer before this query: + -- account branch → :issuer = 'dify:account' (sentinel) + -- SSO branch → :issuer = + + -- Capture old hash to invalidate Redis cache after upsert + SELECT token_hash AS old_hash INTO + FROM oauth_access_tokens + WHERE subject_email = :email + AND subject_issuer = :issuer + AND client_id = :client AND device_label = :label AND revoked_at IS NULL; + + INSERT INTO oauth_access_tokens + (subject_email, subject_issuer, account_id, client_id, device_label, prefix, token_hash, expires_at) + VALUES (:email, :issuer, :account_id, :client, :label, :prefix, :new_hash, + NOW() + (:ttl_days || ' days')::interval) + ON CONFLICT (subject_email, subject_issuer, client_id, device_label) WHERE revoked_at IS NULL + DO UPDATE SET + token_hash = EXCLUDED.token_hash, + prefix = EXCLUDED.prefix, + account_id = EXCLUDED.account_id, -- handles CE→EE account provisioning + expires_at = EXCLUDED.expires_at, -- rotate refreshes TTL from current policy + created_at = NOW(), + last_used_at = NULL + RETURNING id; + ``` + + `ON CONFLICT` matches the partial unique index `uq_oauth_active_per_device` (see `tokens.md §oauth_access_tokens`). Account branch writes the `'dify:account'` sentinel into `subject_issuer` at mint time so the column is never NULL — Postgres' default NULL-as-distinct semantics don't apply, and the plain partial unique index enforces "one active row per (email, issuer, client, device)" without needing a COALESCE expression index. Rows hard-expired via `tokens.md §Detection + hard-expire` (`revoked_at IS NOT NULL`) are excluded — so re-login after hard-expire takes the INSERT branch. + + - First login from this device → INSERT (new row, new `id`). + - Re-login same device → UPDATE (same `id`, fresh `token_hash` + `created_at`). Old plaintext invalid at commit. + - Login from different device → INSERT (new row, independent). + +7. **Invalidate old Redis on rotation:** `DEL auth:token:{old_hash}`. No-op if no prior row. Without this, old cached entry could stay valid up to 60 s. +8. Update Redis `device_code:{device_code}` → `{status=approved, subject_email, account_id, minted_token=dfoa_..., token_id, ...}`. Scope not persisted; computed at CLI-poll response time. `EXPIRE` to `max(remaining_ttl, 60s)`. +9. **Mint policy validation.** For account branch: scope = `[full]`. SSO branch (see §External SSO branch): scope = `[apps:run, apps:read:permitted-external]`. Cross-subject scope minting → 400 `mint_policy_violation` before INSERT/UPDATE. CE deploys reject `dfoe_` mint entirely. +10. Emit audit `oauth.device_flow_approved` (payload: `subject_email`, `account_id` nullable, `client_id`, `device_label`, `scopes`, `token_id`, `subject_type`, `rotated: true|false`, `expires_at`). +11. Return `{ status: "approved" }`. + +### Deny + +`POST /openapi/v1/oauth/device/deny` — lookup same as approve, update Redis `{status=denied, …}` keeping TTL. Emit `oauth.device_flow_denied`. Return `{ status: "denied" }`. + +### Poll + +`POST /openapi/v1/oauth/device/token`: + +1. `GET device_code:{device_code}`. Miss → `{error: "expired_token"}`. +2. If `last_poll_at < interval` sec ago → `{error: "slow_down"}`. Update `last_poll_at`. +3. Dispatch on `status`: + - `pending` → `{error: "authorization_pending"}`. + - `denied` → `{error: "access_denied"}`. `DEL` both keys. + - `approved` → proceed. +4. **Validate minted row still live:** `SELECT 1 FROM oauth_access_tokens WHERE id=:token_id AND revoked_at IS NULL AND expires_at > NOW() AND token_hash IS NOT NULL`. Miss → token was revoked or hard-expired between approve and poll. Return `{error: "access_denied"}`, `DEL` both keys. +5. **Cross-IP audit:** if request IP ≠ `/device/code` creation IP, emit `oauth.device_code_cross_ip_poll` (payload: `token_id`, `subject_email`, `creation_ip`, `poll_ip`). Does not block — RFC 8628 allows this; audit enables admin detection. +6. Return success body. `DEL` both keys. + +Success (account subject): + +```json +{ + "token": "dfoa_...", + "expires_at": null, + "account": { "id": "acc_...", "email": "...", "name": "..." }, + "workspaces": [{ "id": "ws_...", "name": "...", "role": "owner" }], + "default_workspace_id": "ws_..." +} +``` + +External SSO subject: `token = dfoe_...`, `account: null`, `workspaces: []`, plus `subject_type: "external_sso"`, `subject_email`, `subject_issuer`. + +## External SSO branch + +EE-only. SSO-verified IdP users without a Dify `accounts` row authenticate at the IdP, return with a signed external-subject assertion, accept a short-lived cookie, then click Authorize → mints `dfoe_`. + +All four External-SSO API Service endpoints (`sso-initiate`, `sso-complete`, `approval-context`, `approve-external`) are gated by the `@enterprise_only` decorator. CE builds short-circuit to 404 before any business logic runs. Account-branch endpoints (`/openapi/v1/oauth/device/{code,token,lookup,approve,deny}`) are **not** decorated. + +```mermaid +sequenceDiagram + autonumber + actor User + participant CLI as difyctl + participant Browser + participant API as Dify API Service (Flask) + participant EE as Enterprise Go + participant IdP + participant KC as OS Keychain + + User->>CLI: difyctl auth login + CLI->>API: POST /openapi/v1/oauth/device/code + API-->>CLI: {device_code, user_code} + + CLI->>Browser: open /device + loop every `interval` sec + CLI->>API: POST /openapi/v1/oauth/device/token + API-->>CLI: authorization_pending + end + + Browser->>API: GET /device + API-->>Browser: Code-entry page + User->>Browser: type ABCD-1234, click Continue + Note over Browser: SPA holds user_code in state.
No session → render LoginForm. + User->>Browser: click "Sign in with SSO" + + Browser->>API: GET /openapi/v1/oauth/device/sso-initiate?user_code=ABCD-1234 + API->>EE: SSOInitiate(intent="device_flow", user_code, redirect_url) + Note over EE: Sign state envelope (JWS):
{Intent, UserCode, Nonce, IdPCallbackURL} + EE-->>API: IdP authorize URL (redirect_uri = existing Enterprise callback,
state = signed JWS) + API-->>Browser: 302 → IdP authorize URL + + Browser->>IdP: authorize + User->>IdP: credentials + MFA + IdP-->>Browser: 302 → Enterprise callback (state echoed) + + Browser->>EE: POST /sso/saml/acs + Note over EE: Verify state JWS + consume state nonce.
dispatchSSOCallback:
Intent="device_flow" → signExternalSubjectAssertion
(email, issuer, user_code, nonce, kid). + EE-->>Browser: 302 → /openapi/v1/oauth/device/sso-complete?sso_assertion=[JWS] + + Browser->>API: GET /openapi/v1/oauth/device/sso-complete?sso_assertion=… + Note over API: Validate blob (JWS+kid, 5-min TTL,
consume nonce, verify user_code pending).
Mint device_approval_grant cookie
(HttpOnly, Path=/openapi/v1/oauth/device, 5-min TTL,
csrf_token inside). + API-->>Browser: Set-Cookie: device_approval_grant
302 → /device?sso_verified=1 + + Browser->>API: GET /openapi/v1/oauth/device/approval-context (cookie auto-attached) + API-->>Browser: {subject_email, subject_issuer, user_code, csrf_token, expires_at} + Note over Browser: Render "Authorize difyctl as sso-user@partner.com?" + User->>Browser: click Authorize + + Browser->>API: POST /openapi/v1/oauth/device/approve-external
Cookie + X-CSRF-Token + Note over API: Validate cookie + CSRF.
Verify body user_code == cookie user_code.
Consume nonce. Mint dfoe_.
INSERT/UPDATE oauth_access_tokens
(account_id=NULL, email, issuer, scope=[apps:run, apps:read:permitted-external]).
Redis device state → approved. Clear cookie. + API-->>Browser: {status: "approved"} + + CLI->>API: POST /openapi/v1/oauth/device/token (next poll) + API-->>CLI: {access_token: "dfoe_...", subject_type:"external_sso", email, issuer, scopes:[apps:run, apps:read:permitted-external]} + CLI->>KC: store dfoe_ + metadata + CLI-->>User: ✓ Signed in as sso-user@partner.com +``` + +### Enterprise: SSO state envelope + +State passed to the IdP (SAML RelayState / OIDC `state` / OAuth2 `state`) is a compact JWS envelope, signed HS256 with the shared Dify secret (`SECRET_KEY` on api / `DIFY_SECRET_KEY` on Enterprise). `kid` header selects the active key. One secret backs state envelope + subject assertion + approval cookie. + +Envelope claims: + +| Claim | Meaning | +|---|---| +| `intent` | `"webapp"` (legacy) / `"account_login"` / `"device_flow"`. Empty = `"webapp"` | +| `user_code` | Populated when `intent = "device_flow"` | +| `nonce` | Per-initiate; consumed at callback via `SET NX` to defeat state-JWS replay | +| `return_to` | Post-login target (e.g., `/device?user_code=X`); exact-path whitelisted | +| `idp_callback_url` | Existing Enterprise-registered callback (IdP-facing) | +| `app_code` | Empty unless `intent = "webapp"` | +| `redirect_url` | API Service redirect target | + +Enterprise's callbacks reject any state whose signature fails. Signed state is mandatory — no phased rollout flag. + +**Three intents (plus legacy empty/`"webapp"`):** + +| `intent` | Behavior | +|---|---| +| `""` / `"webapp"` | Existing webapp-passport flow. `return_to` ignored. | +| `"account_login"` | Account-branch device-flow handoff. Mints console session, then 302s to `return_to` instead of default `/apps`. | +| `"device_flow"` | SSO-only device-flow handoff. Skips console-session/passport mint; signs external subject assertion; 302s to `idp_callback_url?sso_assertion=`. | + +Initiate handlers (SAML / OIDC / OAuth2 + external variants): + +- `intent="device_flow"`: skip the webapp readiness check, allow empty `app_code`, populate `intent`/`user_code`/`nonce`. +- `intent="account_login"`: populate `intent`; require non-empty `return_to`. +- `return_to` present → validate via exact-path whitelist (`path == "/device"`, query keys ⊆ `{user_code, sso_verified}`). Anything else → 400 `invalid_return_to`. +- Sign state JWS with `kid`, attach to outbound IdP `state` / RelayState. + +Callbacks verify state signature + `kid`, consume state nonce (`SET NX EX state_nonce:{nonce} 600` — defeats re-POST replay on ACS), then dispatch on `intent`: + +- `webapp` → `mintWebappPassport`. +- `account_login` → `mintWebappPassport`, override redirect target with `return_to`. +- `device_flow` → sign short-lived external subject assertion (no `end_user` row, no webapp passport). + +`device_flow` branch signs a short-lived external subject assertion: + +``` +302 Location: ?sso_assertion= + +Signed blob (compact JWS, HS256, shared Dify `SECRET_KEY`): +{ + "sub_type": "external_sso", + "email": "", + "issuer": "", + "user_code": "", + "nonce": "", + "kid": "api-ee-shared-v1", + "iat": , + "exp": , + "aud": "api.device_flow.external_subject_assertion" +} +``` + +No `WebSSOLogin` / `WebSSOExternalLogin` call on device_flow path — no `end_user` row, no webapp passport. Enterprise's only job: verify IdP assertion, hand API Service verified identity. + +### API Service: `sso-initiate` + +`GET /openapi/v1/oauth/device/sso-initiate?user_code=`. API-internal, not IdP-registered. + +1. **Clear any stale `device_approval_grant` cookie** — `Set-Cookie: device_approval_grant=; Max-Age=0; Path=/openapi/v1/oauth/device`. Defends against cross-tab mixing and stale cookies from Back-button navigation. +2. Validate `user_code` maps to device_code in `pending`. Absent / unknown / not-pending → 400 `invalid_user_code`. +3. Read workspace-wide SSO config. None configured → 404 `sso_not_configured`. +4. Determine configured IdP type (exactly one per workspace today — SAML OR OIDC OR OAuth2). +5. Call matching Enterprise initiate with `intent="device_flow"` + `redirect_url="/openapi/v1/oauth/device/sso-complete"` + `user_code` + no `app_code`. +6. Enterprise returns IdP auth URL with signed state attached. API Service 302s user to IdP. + +**Decorator order.** `@enterprise_only` must run **before** `@rate_limit("60/hour/ip")` — otherwise CE 404s consume the bucket. Flask stack: `@enterprise_only → @rate_limit → handler`. + +### API Service: `sso-complete` + +`GET /openapi/v1/oauth/device/sso-complete?sso_assertion=`. + +1. Validate `sso_assertion` JWS signature with key identified by blob `kid` header. Invalid / expired (>5 min) / wrong `aud` / unknown `kid` → 400 `invalid_sso_assertion`. +2. Consume nonce: `SET NX EX sso_assertion_nonce:{nonce} 600`. Replay → 400. +3. Extract `subject_email`, `subject_issuer`, `user_code` from blob. +4. Verify `user_code` still maps to device_code in `pending`. Not pending → 409 — user retries from `/device` without burning another IdP round-trip. +5. **Email-collision reject.** If `subject_email` matches an active Dify Account row (case-insensitive — `func.lower(Account.email) == normalized`, filtered to `AccountStatus.ACTIVE`) → emit `oauth.device_flow_rejected` audit (payload: `subject_type="external_sso"`, `subject_email`, `subject_issuer`, `reason="email_belongs_to_dify_account"`), 302 → `/device?sso_error=email_belongs_to_dify_account`. The SSO branch is reserved for IdP users without a Dify account; account-SSO users must take Button 1. +6. Mint `device_approval_grant` cookie (see §Approval grant cookie). Fresh `nonce`, fresh `csrf_token`, 5-min TTL, signed with active API-side key. +7. `Set-Cookie: device_approval_grant=; HttpOnly; Secure; SameSite=Lax; Path=/openapi/v1/oauth/device; Max-Age=300`. +8. 302 → `/device?sso_verified=1`. + +Cookie-then-redirect means the SPA detects SSO completion via a lookup call, not URL-fragment parsing. No JWT ever reaches page JS. + +### API Service: `approval-context` + +`GET /openapi/v1/oauth/device/approval-context`. No body. Browser attaches `device_approval_grant` cookie automatically (path-match). + +1. Read + validate cookie (signature, `aud`, `exp`, kid resolvable). Missing / invalid → 401 `no_session`. +2. Return `{ subject_email, subject_issuer, user_code, csrf_token, expires_at }`. + +Nonce NOT consumed here. Lookup idempotent — SPA may fetch on mount, refresh, React strict-mode double-render. + +### API Service: `approve-external` + +`POST /openapi/v1/oauth/device/approve-external`. Cookie-authed + CSRF double-submit. + +**Subject invariant:** only External SSO subjects (no `accounts` row) reach this endpoint. The email-collision check at `sso-complete` (step 5 above) plus the explicit re-check here defend in depth — a cookie surviving an aborted `sso-complete` cannot promote an account email to an external SSO token. + +Request headers: `Cookie: device_approval_grant=` + `X-CSRF-Token: `. +Request body: `{ "user_code": "ABCD-1234" }`. + +1. Validate cookie: signature, `aud == "api.device_flow.approval_grant"`, `exp > now()`, kid resolvable. Fail → 401 `invalid_session`. +2. Validate CSRF: header `X-CSRF-Token` == cookie claim `csrf_token`. Mismatch / absent → 403 `csrf_mismatch`. +3. Validate binding: body `user_code` == cookie claim `user_code`. Mismatch → 400 `user_code_mismatch`. +4. `GET user_code:{user_code}` → device_code. Miss → 404. +5. `GET device_code:{device_code}`. Status ≠ pending → 409. +6. **Email-collision reject (defense in depth).** If cookie `subject_email` matches an active Dify Account row (case-insensitive `func.lower(Account.email) == normalized`, filtered to `AccountStatus.ACTIVE`) → emit `oauth.device_flow_rejected` audit, 403 `email_belongs_to_dify_account`. +7. Claim cookie nonce: `SET NX EX device_approval_grant_nonce:{nonce} 600`. Already claimed → 401 `session_already_consumed`. +8. Resolve subject from cookie claims: `subject_email`, `subject_issuer`, `account_id = NULL`. +9. Read TTL: `ttl_days = Policy.OAuthTTLDays()`. +10. **Mint policy validation.** `dfoe_` mint locked to `scopes = [apps:run, apps:read:permitted-external]`. Any other requested scope → 400 `mint_policy_violation`. Cross-subject (e.g., approve-external attempting `[full]`) blocked here. +11. Generate `dfoe_` token, hash. Upsert `oauth_access_tokens` — same `ON CONFLICT` upsert as account branch, keyed on `(subject_email, subject_issuer, client_id, device_label)` with `account_id = NULL` and `subject_issuer` populated from cookie claim. `device_label` from Redis `device_code:{device_code}`. +12. `DEL auth:token:{old_hash}` on rotation. +13. Update Redis `device_code:{device_code}` → `{status=approved, subject_email, account_id:null, minted_token, token_id, …}`. `EXPIRE` to `max(remaining_ttl, 60s)`. +14. Emit `oauth.device_flow_approved` with `subject_type: "external_sso"`, `subject_email`, `subject_issuer`, `client_id`, `device_label`, `scopes: [apps:run, apps:read:permitted-external]`, `rotated`, `expires_at`. +15. Respond: `Set-Cookie: device_approval_grant=; Max-Age=0; Path=/openapi/v1/oauth/device`. Body `{ status: "approved" }`. + +CLI poll at `POST /openapi/v1/oauth/device/token` picks up the token. Response: `account: null`, `workspaces: []`, `subject_email` populated. + +## Approval grant cookie + +SSO branch needs to carry IdP-authenticated identity from SSO callback to approve-external endpoint **without** granting console / webapp / `/v1/*` access. Existing webapp-SSO JWT is app-scoped — unsuitable. + +`device_approval_grant` = short-lived compact JWS cookie (HS256, shared Dify `SECRET_KEY`), path-scoped to `/openapi/v1/oauth/device`. Zero authority beyond approving the specific device_code it's bound to. `HttpOnly` + `Path=/openapi/v1/oauth/device` + `SameSite=Lax`. + +**Cookie envelope:** + +```json +{ + "iss": "", + "aud": "api.device_flow.approval_grant", + "subject_email": "user@example.com", + "subject_issuer": "https://idp.example.com", + "user_code": "ABCD-1234", + "nonce": "", + "csrf_token": "", + "kid": "api-ee-shared-v1", + "exp": , + "iat": +} +``` + +**Cookie attributes:** + +``` +Set-Cookie: device_approval_grant=; + HttpOnly; Secure; SameSite=Lax; + Path=/openapi/v1/oauth/device; Max-Age=300 +``` + +**Isolation:** + +| Session | Valid on | TTL | Reusable | +|---|---|---|---| +| Console account session | `/console/api/*` | hours, refreshable | yes | +| Webapp passport | `/passport` + webapp routes, scoped to `app_code` | per-app-configured | yes | +| **`device_approval_grant`** | **`/openapi/v1/oauth/device/approval-context` + `/openapi/v1/oauth/device/approve-external` only** | **5 min, one-shot** | **no** (single nonce, bound to single `user_code`) | + +**Enforcement:** + +1. **Path scoping.** `Path=/openapi/v1/oauth/device` — browser does not attach to other URLs. Console / webapp / `/v1/*` / other `/openapi/v1/*` middlewares never see this cookie. +2. **Audience binding.** Validator checks `aud == "api.device_flow.approval_grant"`. Any future cookie with different `aud` → cross-reject. +3. **One-shot nonce.** `SET NX EX device_approval_grant_nonce:{nonce} 600`. Replay → 401 `session_already_consumed`. Nonce burned at approve-external success, not at lookup — user can hit lookup repeatedly without burning. +4. **User-code binding.** Body `user_code` must equal cookie claim. Prevents leaked cookie from approving a different pending device_code. +5. **CSRF double-submit.** Approve must include `X-CSRF-Token` matching cookie claim `csrf_token`. Cookie alone insufficient. `csrf_token` + `nonce` = ≥16 bytes (128-bit) CSPRNG. +6. **Short TTL.** 5 min — covers human approval delay, bounds leak exposure. + +**Nonce TTL 2×.** Redis nonce keys use 600 s (10 min) while cookie / assertion lifetimes are 300 s (5 min). The 2× ratio defeats late-replay if clock skew between Redis and JWS issuer allows a just-expired cookie to verify as non-expired when Redis sees the key gone. + +**Three-nonce model.** Each nonce defends a distinct hop. Removing any one opens a replay class. + +| Nonce | Origin | Consumed at | Redis key | Defeats | +|---|---|---|---|---| +| `state.Nonce` | Enterprise `sso-initiate` | Enterprise ACS / callback (`SET NX`) | `state_nonce:{n}` on Enterprise | Re-POST replay on IdP callback | +| subject-assertion `nonce` | Enterprise `dispatchSSOCallback` | API Service `/openapi/v1/oauth/device/sso-complete` (`SET NX`) | `sso_assertion_nonce:{n}` on API Service | Replay of leaked `?sso_assertion=…` URL | +| cookie `nonce` | API Service `/openapi/v1/oauth/device/sso-complete` | API Service `/openapi/v1/oauth/device/approve-external` (`SET NX`) | `device_approval_grant_nonce:{n}` on API Service | Replay of approval-grant cookie after prior approve | + +**What the cookie cannot do:** reach `/console/api/*` / `/passport` / `/v1/*` / other `/openapi/v1/*` (browser doesn't send it outside `/openapi/v1/oauth/device/*`); approve a different `user_code` (bound at mint); replay after approve (nonce consumed); be read by JS (`HttpOnly`); persist past 5 min. + +**Key rotation.** State envelope + subject assertion + cookie all carry `kid` and use HS256 with the shared Dify `SECRET_KEY` key-set (same secret already shared between API Service and Enterprise — no dedicated signing-key env var). Rotation = append new `kid` to config, overlap window (1 h covers any in-flight 5-min blob), retire old `kid`. Both services reload key-set at process boot and on config reload. One secret, three uses, one rotation. + +## Web UI contract + +One new surface on `dify/web` (Next.js): the `/device` two-button page. Account-page management of CLI sessions is CLI-only (`auth devices list/revoke`). Full security headers: `security.md §Anti-framing`. + +### `/device` — two-button login + +Top-level page, unauthenticated entry allowed. User self-selects branch based on identity type. + +Renders the same `LoginForm` React component used by `/signin` with a `variant="device-authorization"` prop. Only the dispatch targets differ per variant. + +``` + { + setPostLoginRedirect('/device?user_code=' + entered) + router.push('/signin') + }} + onSSOLogin={() => redirect('/openapi/v1/oauth/device/sso-initiate?user_code=' + entered)} +/> +``` + +**SSO availability gate.** `/device` derives `ssoAvailable` from `systemFeatures.webapp_auth`: + +```ts +const ssoAvailable = + systemFeatures.webapp_auth.enabled && + systemFeatures.webapp_auth.allow_sso && + Boolean(systemFeatures.webapp_auth.sso_config.protocol) +``` + +Same triplet the existing `/device` page already evaluates (see `dify/web/app/device/page.tsx`). All three fields ship server-side via `GET /console/api/system-features` regardless of edition; CE deploys (`ENTERPRISE_ENABLED=false`) never populate `sso_config.protocol`, so the SSO button never renders. No new system-features field needed. + +**States:** + +1. **Code entry.** Text input, label "Enter the code shown in your terminal", placeholder `ABCD-1234`. Button "Continue". Auto-uppercase, auto-hyphenate. Required before either login button enables. +2. **Login chooser.** Shown if user not authenticated after code entry. + - **Button 1 — "Sign in with Dify account"** (covers password + email-code + GitHub / Google social OAuth + account-SSO). Dispatch: `setPostLoginRedirect('/device?user_code=')` + `router.push('/signin')`. Target persists via sessionStorage (tab-scoped, survives same-tab cross-origin bounces). Every login-success handler — password, email-code verify, social-OAuth callback landing via `app-initializer`, account-SSO callback landing via `app-initializer` — consumes `resolvePostLoginRedirect()` before falling to `/apps` default. Account-SSO additionally plumbs `return_to` through IdP state (see below) because signed state is required for IdP cross-origin preservation in principle, but sessionStorage covers the browser-side path. + - **Button 2 — "Sign in with SSO"** (External SSO IdP users, no `accounts` row). Hidden when workspace-wide SSO not configured. Dispatch: `/openapi/v1/oauth/device/sso-initiate?user_code=` → state-intent dispatch → `/openapi/v1/oauth/device/sso-complete` sets `device_approval_grant` cookie → 302 → `/device?sso_verified=1`. SPA calls `GET /openapi/v1/oauth/device/approval-context` to render Authorize. +3. **Authorize screen.** + - Heading: "Authorize Dify CLI" + - Body: `Dify CLI (difyctl) is requesting access to your account. If you did not start this from your terminal, click Cancel.` + - Signed-in-as: `Signed in as ` (session or cookie claim) + - Workspace (account path only): `Default workspace: ` + - Buttons: `Authorize` (primary) + `Cancel` (secondary). No scope checkboxes, no role pickers. +4. **Success.** Heading: "You're signed in". Body: "Return to your terminal to continue." No auto-close, no summary, no revoke. +5. **Error / expired.** Heading: "This code is no longer valid". Body: "The code may have expired or already been used. Run `difyctl auth login` again to get a new one." No retry input. + +**`postLoginRedirect` helper.** `web/app/signin/utils/post-login-redirect.ts` — sessionStorage-backed, 15-min TTL. `setPostLoginRedirect(target)` validates same-origin + exact-path whitelist (`/device` with `{user_code, sso_verified}` query keys; `/account/oauth/authorize` with OAuth-dance keys) before storing. `resolvePostLoginRedirect()` re-validates on read. Tab-scoped — concurrent `/device` tabs don't clobber each other. Stale values expire after 15 min. + +**Account-SSO `return_to` plumbing.** Web `sso-auth.tsx` snapshots `postLoginRedirect` into a local `const` on the first synchronous tick of the click handler (defeats React strict-mode double-invoke and tab-duplication races), passes as `return_to` to `/enterprise/sso/{saml,oidc,oauth2}/login?intent=account_login&return_to=`. Enterprise validates exact path, signs into state, honors on callback. + +**Covered sign-in flows:** password, email-code, GitHub, Google, account-SSO — all preserve `/device?user_code=...` via sessionStorage through in-tab navigation and cross-origin callback bounces, consumed by `app-initializer.tsx` or the signin-form success handlers. + +**Known gap:** signup via email-verification link opened in a new tab loses sessionStorage (new browsing context). Signup flow falls to `/apps` default; user manually reopens the CLI-printed `/device` URL. + +### Shared + +- Existing console layout, typography, locale files. +- EN + ZH at launch. + +## Rate limits + +See `security.md §Rate limits` for the full table. Key values: + +- `POST /openapi/v1/oauth/device/code` — 60 / hr / IP. +- `POST /openapi/v1/oauth/device/token` — 1 / `interval` / device_code (RFC 8628 `slow_down`). +- `GET /openapi/v1/oauth/device/sso-initiate` — 60 / hr / IP (`@enterprise_only` gate runs first). +- `POST /openapi/v1/oauth/device/approve-external` — 10 / hr / `subject_email`. +- `POST /openapi/v1/oauth/device/approve` — 10 / hr / session. + +## Audit + +See `security.md §Audit events`. Device-flow-specific events: + +- `oauth.device_flow_approved` — on mint (both branches), carries `rotated`, `subject_type`, `subject_issuer`. +- `oauth.device_flow_denied` — on cancel. +- `oauth.device_flow_rejected` — email-collision reject on SSO branch (`sso-complete` or `approve-external`). +- `oauth.device_code_cross_ip_poll` — CLI polled from different IP than `/device/code` caller. diff --git a/cli/docs/specs/server/endpoints.md b/cli/docs/specs/server/endpoints.md new file mode 100644 index 0000000000..689cc2f679 --- /dev/null +++ b/cli/docs/specs/server/endpoints.md @@ -0,0 +1,327 @@ +--- +title: server — endpoints +--- + +# endpoints + +Flat reference of every HTTP endpoint under `/openapi/v1/*` and adjacent surfaces. + +Companion: `middleware.md` (auth behavior), `tokens.md` (storage), `device-flow.md` (flow logic), `security.md` (rate limits + audit). + +## Regions + +| Region | URL prefix | Host | Auth style | +|---|---|---|---| +| Console API (cookie) | `/console/api/*` | API Service (Flask) | Browser console session cookie. No bearer surface | +| OpenAPI (user-scoped programmatic) | `/openapi/v1/*` | API Service (Flask) | User-level bearer (`dfoa_` / `dfoe_`). `dfp_` rejected. Hosts device-flow protocol + approval, identity reads, session management, workspace reads | +| Service API | `/v1/*` | API Service (Flask) | App-scoped key (`app-…`) only. User-level bearers go to `/openapi/v1/*` | +| Enterprise Inner API | Internal Go service | Enterprise Go | Server-to-server only; never called by clients | + +The full `/openapi/v1/*` surface lives in `openapi.md`; this file is the flat HTTP reference. Gateway routing (nginx in dify, Caddy in dify-enterprise, dify-helm chart) all proxy `/openapi/*` to the api backend. + +## Personal Access Tokens + +Not supported. `dfp_` prefix on `/openapi/v1/*` returns 401 `unknown_token_prefix`. No `/console/api/personal-access-tokens` surface, no `personal_access_tokens` table. See `tokens.md §Wire format`. + +## OAuth device flow — account branch + +Approve / deny live under `/openapi/v1/*` and authenticate with the **console session cookie** (the user clicks Authorize from the dashboard). Same handler classes; only the URL prefix differs from cookie-only console routes. + +| Method | Path | Auth | Purpose | +|---|---|---|---| +| GET | `/openapi/v1/oauth/device/lookup` | Public + rate-limit | Validate entered `user_code`. Returns `{ valid, expires_in_remaining, client_id }` | +| POST | `/openapi/v1/oauth/device/approve` | Console session + CSRF | Approve device flow (account branch — mints `dfoa_`). Body: `{ user_code }` | +| POST | `/openapi/v1/oauth/device/deny` | Console session + CSRF | Deny device flow. Body: `{ user_code }` | + +OAuth-token management lives under `/openapi/v1/account/sessions` (see §Identity + sessions). Token inventory is bearer-authed via OpenAPI — no `/console/api/oauth/authorizations*` surface. + +## OAuth device flow — SSO branch (`@enterprise_only`) + +All four endpoints gated by `@enterprise_only` — CE returns 404. The IdP-side ACS callback URL is the canonical `sso-complete` path below; reconfigure each configured IdP to point at this URL. + +| Method | Path | Auth | Purpose | +|---|---|---|---| +| GET | `/openapi/v1/oauth/device/sso-initiate` | Unauthenticated | Build IdP auth URL via Enterprise initiate. Query `?user_code=`. 302s to IdP | +| GET | `/openapi/v1/oauth/device/sso-complete` | Signed external-subject assertion (5-min TTL, nonce-consumed) | Consume assertion, set `device_approval_grant` cookie (path-scoped to `/openapi/v1/oauth/device`), 302 → `/device?sso_verified=1` | +| GET | `/openapi/v1/oauth/device/approval-context` | `device_approval_grant` cookie | SPA reads session claims. Returns `{ subject_email, subject_issuer, user_code, csrf_token, expires_at }`. Idempotent — nonce not consumed | +| POST | `/openapi/v1/oauth/device/approve-external` | `device_approval_grant` cookie + `X-CSRF-Token` | Approve device_code as External SSO subject. Body `{ user_code }` must match cookie claim. Mints `dfoe_` | + +Decorator order: `@enterprise_only → @rate_limit → handler`. + +## OpenAPI — RFC 8628 protocol (unauthenticated / bearer) + +| Method | Path | Auth | Purpose | +|---|---|---|---| +| POST | `/openapi/v1/oauth/device/code` | Public + rate-limit | Device flow: request code. Body `{ client_id, device_label }`. Returns `{ device_code, user_code, verification_uri, expires_in, interval }` | +| POST | `/openapi/v1/oauth/device/token` | Public + rate-limit | Device flow: poll. Body `{ device_code, client_id }`. Per RFC 8628 error codes (`authorization_pending`, `slow_down`, `expired_token`, `access_denied`) | + +## OpenAPI — identity + sessions (bearer) + +| Method | Path | Auth | Purpose | +|---|---|---|---| +| GET | `/openapi/v1/account` | Bearer (any user-level) | Polymorphic by subject. Used by post-device-flow validation + `auth status -v` refresh | +| GET | `/openapi/v1/account/sessions` | Bearer | List user's active OAuth tokens. Filters `revoked_at IS NULL AND expires_at > NOW() AND token_hash IS NOT NULL`. Used by `auth devices list` | +| DELETE | `/openapi/v1/account/sessions/self` | Bearer | Revoke session backing this request. Used by `auth logout` | +| DELETE | `/openapi/v1/account/sessions/` | Bearer + subject-match | Revoke specific session by id. See `tokens.md §Subject-match on revoke-by-id`. Used by `auth devices revoke` | + +### `GET /openapi/v1/account` response + +Account subject: + +```json +{ + "subject_type": "account", + "subject_email": "user@example.com", + "account": { "id": "acc_...", "email": "user@example.com", "name": "..." }, + "workspaces": [{ "id": "ws_...", "name": "...", "role": "owner" }], + "default_workspace_id": "ws_..." +} +``` + +External SSO subject (EE): + +```json +{ + "subject_type": "external_sso", + "subject_email": "sso-user@partner.com", + "subject_issuer": "https://idp.partner.com", + "account": null, + "workspaces": [], + "default_workspace_id": null +} +``` + +`subject_type` always present. Absent fields are explicit `null` / `[]`, not omitted — strict-schema agents don't fail. + +## OpenAPI — workspaces (bearer) + +| Method | Path | Auth | Purpose | +|---|---|---|---| +| GET | `/openapi/v1/workspaces` | Bearer | List user's workspaces. External SSO subjects (no account) get `[]` | +| GET | `/openapi/v1/workspaces/` | Bearer + member | Workspace details. Non-member returns 404 (not 403 — avoids cross-tenant id leak) | + +## OpenAPI — app (two surfaces, strict subject_type separation) + +Bearer auth via `dfoa_` / `dfoe_`. App is in the URL path — no `X-Dify-App-Id` header. Surface gate (`@accept_subjects(...)`) rejects wrong subject_type before scope check. + +### dfoa_ surface — `/openapi/v1/apps*` (CE + EE) + +| Method | Path | Scope | Deployment | Request / Response | +|---|---|---|---|---| +| GET | `/openapi/v1/apps?workspace_id=` | `apps:read` | CE + EE | `AppListQuery` → `AppPagination`. Params: `workspace_id` **(required)**, `page, limit, mode, name, tag`. List filtered through `_apply_openapi_gate` + AclStrategy (CE: workspace membership only; EE: access_mode allowlist + inner-API for `internal`) | +| GET | `/openapi/v1/apps//describe?workspace_id=` | `apps:read` | CE + EE | `AppDescribeQuery` → `AppDescribeResponse`. Canonical "what is this app". Slim subset via `?fields=info`. See §`/describe` shape | +| POST | `/openapi/v1/apps//run` | `apps:run` | CE + EE | `AppRunRequest` → `AppRunResponse` (or SSE). Mode-agnostic — server dispatches on `apps.mode`. See §`/run` shape | + +Surface: `dfoa_` only. `dfoe_` → 403 `wrong_surface` before Layer 0. `workspace_id` missing on list/describe → 422 `workspace_id_required`. + +### dfoe_ surface — `/openapi/v1/permitted-external-apps*` (EE only) + +| Method | Path | Scope | Deployment | Request / Response | +|---|---|---|---|---| +| GET | `/openapi/v1/permitted-external-apps` | `apps:read:permitted-external` | EE only | `AppPermittedListQuery` → `AppPagination`. Params: `page, limit, mode, name`. Strict validator (`extra='forbid'`) — `workspace_id`, `tag` → 422. List filtered through `_apply_openapi_gate` + access-mode allowlist `{public, sso_verified}` | +| GET | `/openapi/v1/permitted-external-apps/` | `apps:read:permitted-external` | EE only | Single-app metadata. Same visibility filter as list. 404 if app not in `dfoe_`'s permitted set | +| POST | `/openapi/v1/permitted-external-apps//run` | `apps:run` | EE only | Same `AppRunRequest` shape as `dfoa_`. Tenant resolved from app row. No `workspace_id` in body | + +Surface: `dfoe_` only. `dfoa_` → 403 `wrong_surface`. Blueprint registered only when `ENTERPRISE_ENABLED=true`; on CE, routes return 404 (absent — no blueprint). + +### Pipeline + +Run + describe routes attach via `@OAUTH_BEARER_PIPELINE.guard(scope=...)`. Pipeline: `BearerCheck → ScopeCheck → SurfaceGate → AppResolver → WorkspaceMembershipCheck (dfoa_ only) → AppAuthzCheck → CallerMount`. Per-token rate limit is enforced inside `BearerAuthenticator.authenticate` (called by `BearerCheck`). Server-side dispatch on `apps.mode` happens after `AppResolver`: + +```text +mode == chat | agent-chat | advanced-chat → existing chat-messages handler +mode == completion → existing completion-messages handler +mode == workflow → existing workflows/run handler +``` + +List routes attach `@validate_bearer + @accept_subjects + @require_scope`. The `_apply_openapi_gate` helper in `api/services/openapi/visibility.py` is the single source for the `enable_api=true` filter — removing it retires the gate. See `middleware.md §Universal openapi gate`. + +### Subject capability matrix + +| Surface | `dfoa_` | `dfoe_` | +|---|---|---| +| `GET /apps` | ✅ scope + Layer-0 membership; `workspace_id` required | ❌ 403 `wrong_surface` | +| `GET /apps//describe` | ✅ scope + Layer-0 + Layer-1 ACL | ❌ 403 `wrong_surface` | +| `POST /apps//run` | ✅ scope + Layer-0 + Layer-1 ACL | ❌ 403 `wrong_surface` | +| `GET /permitted-external-apps` | ❌ 403 `wrong_surface` (even with `full`) | ✅ scope + access-mode filter (EE only) | +| `GET /permitted-external-apps/` | ❌ 403 `wrong_surface` | ✅ scope + access-mode filter (EE only) | +| `POST /permitted-external-apps//run` | ❌ 403 `wrong_surface` | ✅ scope + Layer-1 binary access-mode gate (EE only) | + +### Visibility rules per surface + +**dfoa_ list (`GET /apps`):** + +``` +1. Workspace membership check (caller ∈ tenant_account_joins for workspace_id) — else 403 +2. base_query = apps WHERE workspace_id = W +3. _apply_openapi_gate(base_query) -- enforces enable_api=true +4. on CE: return query -- no ACL filter +5. on EE: + visible = query WHERE access_mode IN {public, internal_all, sso_verified} + internal_set = query WHERE access_mode = 'internal' + permitted = inner_api.batch_check(caller, internal_set.ids) + return visible UNION permitted +``` + +**dfoe_ list (`GET /permitted-external-apps`, EE only):** + +``` +1. base_query = apps WHERE access_mode IN {public, sso_verified} +2. _apply_openapi_gate(base_query) -- enforces enable_api=true +3. return query +``` + +No workspace check, no inner-API. Cross-tenant by design — `dfoe_` is a global SSO identity, not a tenant resident. + +### License gate + +EE-specific surface (`/permitted-external-apps*`) consults existing console/api license helper at request-handling time. License absent / expired → 402 `license_required`. CE deploys do not register the surface and never emit `license_required`. No new env var; reuses existing `ENTERPRISE_API_URL` + license module. + +**`AppRunRequest` shape (mode-agnostic):** + +```json +{ + "inputs": { "key": "value" }, + "query": "user message (chat / agent-chat / advanced-chat only — workflow rejects)", + "files": [ /* optional file refs */ ], + "response_mode": "blocking" | "streaming", + "conversation_id": "conv_abc (chat-family only)", + "auto_generate_name": false, + "workflow_id": "wf_abc (workflow mode only)", + "workspace_id": "ws_abc (informational; audit + future env routing)" +} +``` + +Server pops the caller subject from auth context — clients do not send a `user` field. Server validates per-mode constraints: `query` required for chat-family + rejected for workflow; `inputs` required for workflow; `conversation_id` ignored outside chat-family. Invalid mode/field combo → 422. + +**`AppRunResponse` shape:** matches the existing per-mode response of the handler the server dispatched to (`ChatMessageResponse` / `CompletionMessageResponse` / `WorkflowRunResponse`). CLI renders per-mode using the `mode` echoed from the response envelope. + +**`/describe` shape:** + +```json +{ + "info": { "id", "name", "mode", "description", "tags", "author", "updated_at", "service_api_enabled" }, + "parameters": { "opening_statement", "suggested_questions", "user_input_form", "file_upload", "system_parameters" }, + "input_schema": { "type": "object", "properties": { ... }, "required": [ ... ] } +} +``` + +Canonical "what is this app" surface. Consolidates info + parameters + agent-friendly JSON Schema in one round-trip. `parameters` carries Dify-native `user_input_form` (semantic labels, render hints) for human/CLI rendering. `input_schema` is JSON Schema (Draft 2020-12) derived server-side from `user_input_form` + mode-specific top-level fields (`query` for chat-family, `inputs` for workflow), agent-consumed for tool-call payload generation. All sub-objects always present; absent fields are explicit `null` / `[]`. + +**`AppDescribeQuery` — query params:** + +| Param | Type | Default | Behaviour | +|---|---|---|---| +| `fields` | comma-separated string | omit = all | Allow-list: `info`, `parameters`, `input_schema`. Unknown member → 422. Empty/omitted returns full payload | +| `workspace_id` | UUID | **required on `/apps//describe`** (dfoa_ surface) | Surface gate enforces dfoa_ subject; Layer 0 needs `workspace_id` for membership check. Missing → 422 `workspace_id_required`. Not accepted on `/permitted-external-apps/` (strict validator rejects) | + +Strict validator (`extra='forbid'`). Server skips computation for unrequested blocks: `parameters_payload(app)` runs only if `parameters` or `input_schema` requested; `app_info_payload(app)` only if `info` requested. + +**Slim variants:** + +| Call | Returns | +|---|---| +| `GET /apps//describe` | Full `{info, parameters, input_schema}` | +| `GET /apps//describe?fields=info` | `{info}` only — replaces former `/info` | +| `GET /apps//describe?fields=info,parameters` | Subset, no `input_schema` derivation | + +CLI default fetch is full (single cache entry, 1h TTL — see `apps.md`); slim variant exists for forward-compat external consumers and as a `?fields=info` quick-lookup path. + +**Pagination envelope (`/apps`, `/permitted-external-apps`, `/account/sessions`):** + +```json +{ + "page": 1, + "limit": 20, + "total": 42, + "has_more": true, + "data": [ /* row objects, type-specific */ ] +} +``` + +`data` is the literal field name on these routes. `GET /openapi/v1/workspaces` is the exception: it returns `{"workspaces": [...]}` — no pagination, no envelope. Clients should treat the workspace list as unpaginated until that route migrates. + +**`GET /openapi/v1/apps` data row:** + +```json +{ + "id": "app-abc", + "name": "Support Bot", + "description": "...", + "mode": "chat", + "tags": [{ "name": "prod" }], + "updated_at": "2026-04-27T10:00:00Z", + "created_by_name": "gareth@dify.ai", + "workspace_id": "ws-xyz", + "workspace_name": "Acme Inc." +} +``` + +`workspace_id` + `workspace_name` populated on `/apps` rows (drives `difyctl get apps -A` cross-workspace fan-out cosmetics). `tag` query param resolved by name within target workspace; no-match = empty `data` (not 400). + +**`GET /openapi/v1/permitted-external-apps` — response shape:** + +Same `PaginationEnvelope` shape as `/apps`. `tags` always `[]` on `/permitted-external-apps` rows — tags are tenant-scoped and `dfoe_` is cross-tenant. `created_by_name` always null — author identity not part of the externally-visible surface. `workspace_id` / `workspace_name` omitted — `dfoe_` has no workspace concept. + +Query params: `page`, `limit`, `mode`, `name`. Strict validator (`extra='forbid'`) — `workspace_id`, `tag`, or any unknown param → 422. + +Blueprint registered only when `ENTERPRISE_ENABLED=true`. CE → 404 (route absent). Implementation calls Enterprise inner-API `POST /inner/api/webapp/externally-accessible-apps` (see §Inner APIs); on 5xx the route returns 503 fail-closed. License absent → 402 `license_required`. + +**Snapshot semantics.** The `total` count reflects the EE-side cached list at request time. Access-mode mutations between auto-paginated calls can shift `total`; the CLI should treat `has_more=false` as authoritative and not assume `total` is monotonic across pages. EE invalidates its cache on every access-mode write and falls back to a 10-minute TTL safety net. + +**Service-API `/v1/parameters` — unchanged.** + +The legacy app-key surface at `GET /v1/parameters` (handled by `service_api/app/app.py:ParametersApi`) keeps the existing `Parameters` shape (`opening_statement`, `suggested_questions`, `user_input_form`, `file_upload`, `system_parameters`). User-bearer callers use `/openapi/v1/apps//describe` instead. + +## Service API — run-slice (app-scoped key) + +`/v1/*` is now app-scoped-key only. Subject to Service API toggle on `apps.service_api_enabled` (see `middleware.md §Service API toggle`). User-level bearers (`dfoa_` / `dfoe_`) hit `POST /openapi/v1/apps//run` (see §OpenAPI — app). + +| Method | Path | Auth | Purpose | +|---|---|---|---| +| POST | `/v1/chat-messages` | App key | Chat apps | +| POST | `/v1/completion-messages` | App key | Completion apps | +| POST | `/v1/workflows/run` | App key | Workflow apps | +| Existing | `/v1/files/upload`, `/v1/meta`, `/v1/parameters`, `/v1/info` | App key | Existing app-key surface | + +### Request headers (bearer on `/openapi/v1/*` + `/v1/*` app-key) + +| Header | Required | Purpose | +|---|---|---| +| `Authorization: Bearer ` | yes | Identifies subject. `app-…` only on `/v1/*`; `dfoa_` / `dfoe_` only on `/openapi/v1/*`. `dfp_` rejected | +| `X-Dify-Env: ` | reserved | CLI may send; server accepts + ignores | +| `X-Dify-Workspace-Id: ` | reserved | Accepts + ignores | +| `User-Agent: difyctl/ (; ; )` | yes | Attribution in access logs | + +**Reserved headers.** `X-Dify-Env` and `X-Dify-Workspace-Id` are accepted (no 400) and ignored. App id travels in the URL path — no `X-Dify-App-Id` header. + +## `/console/api/*` bearer + +Not supported. `/console/api/*` stays cookie-only. User-scoped programmatic features live under `/openapi/v1/*` — see `openapi.md`. + +## Inner APIs + +All `/inner/api/*` endpoints authenticate via `Enterprise-Api-Secret-Key` header (= `INNER_API_KEY` env on the receiving end). Failure shape is the simple `{"error": "..."}` form, NOT the user-facing `{ code, message, hint }` envelope — inner APIs are gateway/s2s-internal. + +### Hosted by enterprise svc (gateway / api → EE) + +| Method | Path | Caller | Purpose | +|---|---|---|---| +| GET | `/inner/api/webapp/permission?appId=&userId=` | dify api middleware | Layer-1 ACL check for account subjects on `internal` access mode. Returns `{ result: bool }`. Not called for other modes | +| POST | `/inner/api/webapp/permission/batch` | dify api `/apps` list handler | Batch variant of the above. Body `{ user_id, app_ids: [...] }` → `{ permitted: [app_ids] }`. Used by list-time visibility filter on EE so a workspace scan with many `internal` apps is one round-trip | +| POST | `/inner/api/webapp/externally-accessible-apps` | dify api `/permitted-external-apps` handler | Deployment-wide list of apps with `access_mode` ∈ `{public, sso_verified}`. Body `{ page, limit, mode?, name? }` → `{ data: [{ app_id, tenant_id, mode, name, updated_at }], total, has_more }`. No subject in the request — same result for every caller. EE caches the merged list under a single Redis key with explicit invalidation on every access-mode write + 10-min TTL safety net. Fail-closed: dify-api translates 5xx to 503 `permitted_external_apps_unavailable` | +| POST | `/inner/api/rbac/check-access` | EE gateway (`dify_rbac` Caddy module) | Workspace-role RBAC check. Body `{ account_id, tenant_id, scene, resource_type, resource_id }` → `{ allowed, reason, ... }` | +| POST | `/inner/api/auth/check-access-oauth` | EE gateway (`dify_rbac` Caddy module) | Token resolve for EE gateway. Body `{ token }` → `{ account_id, tenant_id, subject_type, client_id, scope, expires_at[, subject_email, subject_issuer] }`. New endpoint mirroring RBAC check-access pattern. See `gateway.md §Inner API — auth check-access (OAuth)` | + +### Hosted by dify api (EE → api) + +| Method | Path | Caller | Purpose | +|---|---|---|---| +| GET | `/inner/api/policy/oauth-ttl?tenantId=` | enterprise svc | Return `{ ttl_days: }` for tenant. dify api Redis-caches 60 s | +| POST | `/inner/api/enterprise/apps/batch-metadata` | EE `WebAppUsecase.ListExternallyAccessibleApps` | Hydrate app metadata for a batch of app ids. Body `{ ids: [<= 500] }` → `{ data: [{ id, tenant_id, mode, name, updated_at }] }`. Filters out non-`status=normal` apps server-side. Used to merge EE-side `web_app_settings` rows with api-side `apps` columns before caching | + +Fallback when EE unreachable or CE deployment: env var `OAUTH_TTL_DAYS` → hardcoded `14`. + +## Request flow summary + +See `middleware.md §Request flow` for the full pipeline applied to all `/v1/*` endpoints. diff --git a/cli/docs/specs/server/gateway.md b/cli/docs/specs/server/gateway.md new file mode 100644 index 0000000000..ad3bdfee16 --- /dev/null +++ b/cli/docs/specs/server/gateway.md @@ -0,0 +1,190 @@ +--- +title: server — gateway +--- + +# gateway + +EE gateway integration for `/openapi/v1/*` user-level bearers. RBAC enforcement, token resolve via inner-API, path filter, error envelope translation. + +Active on EE deployments only. CE (`ENTERPRISE_ENABLED=false` in dify api) has no gateway — `/openapi/v1/*` reaches dify api directly; api middleware does Layer 0 workspace-membership in its place. EE (`ENTERPRISE_ENABLED=true`) routes through gateway → api; api middleware skips Layer 0. + +Companion: `middleware.md §Authorization` (api authz pipeline, including CE-only Layer 0), `tokens.md` (storage + cache), `security.md §Inner API trust boundary` (invariants), `endpoints.md §Inner APIs` (HTTP surface). + +## Scope + +| Deployment | Gateway | RBAC | Token resolve | +|---|---|---|---| +| CE | absent | — (api Layer 0 covers workspace membership only) | api middleware direct | +| EE | dify-enterprise gateway (Caddy + `dify_rbac` module) | gateway interceptor | gateway → enterprise svc inner-API + api middleware re-resolve | + +External SSO `dfoe_` subjects: gateway skips RBAC. Bounded by L2 token scopes (`apps:run`, `apps:read:permitted-external`) + L1 ACL `sso_verified` access-mode gate. + +## Request flow + +``` +CLI ─Authorization: Bearer dfoa_xxxxx──→ Gateway (Caddy + dify_rbac patched) + │ + ├─ filter chain (audit-style): + │ skip /openapi/v1/oauth/device/* + │ + ├─ POST /inner/api/auth/check-access-oauth + │ header: Enterprise-Api-Secret-Key + │ ←── enterprise svc + │ (Go: sha256 → Redis auth:token:{hash} → dify_db read view) + │ on 5xx/timeout → 503 fail-closed + │ + ├─ POST /inner/api/rbac/check-access + │ ←── enterprise svc (existing rbac branch) + │ on 5xx/timeout → 503 fail-closed + │ + ├─ on 401/403: translate {error} → {code,message,hint} + │ + └─ forward request UNCHANGED → dify api + │ + ├─ middleware step 3a-e + │ sha256 → Redis auth:token:{hash} → dify_db + │ + ├─ Layer 0 (CE only): workspace membership + ├─ Layer 1: Resource ACL + └─ Layer 2: Token scope → handler +``` + +The gateway does NOT inject identity headers. Api always re-resolves from the token store on the request hot path. The two resolves (gateway-side via inner-API, api-side direct) are independent — both Redis-cached against the shared `auth:token:{hash}` key. + +## Path filter + +Mirrors the existing `dify_audit` filter pattern (`pkg/gateway/audit/filter.go` — `Chain` of `Filter`s, all-must-pass). The `dify_rbac` module exposes a hardcoded `DefaultChain` with a single `PrefixFilter` whitelist. Whitelist miss → no RBAC, forward to api as-is. + +**Whitelist (RBAC enforced):** + +``` +/openapi/v1/account +/openapi/v1/workspaces +/openapi/v1/apps +/openapi/v1/runs +``` + +**Implicit skip (whitelist miss → no RBAC):** + +| Path | Why skipped | +|---|---| +| `/openapi/v1/oauth/device/code` | Bearer-less — device-flow start | +| `/openapi/v1/oauth/device/token` | Bearer-less — device-flow poll | +| `/openapi/v1/oauth/device/sso-complete` | Browser-side SSO continuation | +| `/openapi/v1/oauth/device/approval-context` | Cookie-authed self-action | +| `/openapi/v1/oauth/device/approve` | Cookie-authed self-action | +| `/openapi/v1/oauth/device/approve-external` | Cookie-authed self-action | +| `/openapi/v1/oauth/device/deny` | Cookie-authed self-action | + +Whitelist lives in module source, not Caddyfile — paths are dify-specific, no operator config needed. + +Caddyfile stays single-block: + +```caddyfile +handle /openapi/v1/* { + dify_rbac + reverse_proxy http://api:5001 +} +``` + +## Inner API — auth check-access (OAuth) + +Gateway → enterprise svc. Plain `http.Handler`, `Enterprise-Api-Secret-Key` header for caller auth, snake_case JSON, `{"error": "..."}` failure shape. + +``` +POST /inner/api/auth/check-access-oauth +Headers: + Enterprise-Api-Secret-Key: + Content-Type: application/json +Body: + { "token": "dfoa_xxxxxxxxxxxxxxxxxx" } +``` + +Success body (200): + +```json +{ + "account_id": "", + "tenant_id": "", + "subject_type": "account", + "client_id": "difyctl", + "scope": ["full"], + "expires_at": 1750000000, + "subject_email": "", + "subject_issuer": "" +} +``` + +`account_id`, `subject_email`, `subject_issuer` are `omitempty`. SSO subjects emit `account_id=""` + populated `subject_email` / `subject_issuer`; account subjects emit `account_id` populated + empty SSO fields. + +### Resolve logic + +Same as api-side middleware step 4: + +1. `sha256(token)` for lookup key. +2. Redis cache: `GET auth:token:{hash}`. Hit → return cached struct. +3. DB read of `oauth_access_tokens` on cache miss (`Skip: true` ent schema). +4. Cache result on hit (`SETEX auth:token:{hash} 60 `); cache `"invalid"` (10 s TTL) on miss / expired / revoked. + +### Hard-expire + +On `expires_at <= NOW()`, enterprise svc inner-API performs the same hard-expire as dify api Python middleware (CAS `UPDATE oauth_access_tokens SET revoked_at = NOW(), token_hash = NULL WHERE id = :id AND revoked_at IS NULL`, `DEL auth:token:{hash}`, `SETEX auth:token:{hash} 10 "invalid"`, audit emit `oauth.token_expired`, return 401 `token_expired`). Same flow as `tokens.md §Detection + hard-expire on middleware hit`. Idempotent CAS makes concurrent hits safe. + +### Failure responses + +| HTTP | Body | Cause | +|---|---|---| +| 405 | `{"error": "method not allowed"}` | Non-POST request | +| 401 | `{"error": "invalid inner api key"}` | `Enterprise-Api-Secret-Key` missing or mismatched | +| 400 | `{"error": "invalid request body: ..."}` | JSON decode failed | +| 401 | `{"error": "invalid_token"}` | Hash miss in token store | +| 401 | `{"error": "token_expired"}` | Row `expires_at` past (mutation performed — see Hard-expire) | +| 401 | `{"error": "token_revoked"}` | Row `revoked_at IS NOT NULL` | +| 500 | `{"error": "inner api secret key not configured"}` | Server `INNER_API_KEY` env empty | + +Gateway side translates these to the user-facing `{code, message, hint}` envelope before responding to CLI — see §Error envelope. + +## Cache + +| Layer | Key | TTL | Purpose | +|---|---|---|---| +| Api-side AuthContext cache | `auth:token:{hash}` | 60 s | Hot path on every request — gateway forwards do not bypass it | + +Cache key `auth:token:{hash}` shared across dify api and enterprise svc on the same Redis instance. + +## Failure modes + +Both inner-API calls (token resolve + RBAC check) are fail-closed: any 5xx, network error, or timeout from `/inner/api/auth/check-access-oauth` or `RBAC_INNER_CHECK_URL` → 503 to the client. Matches existing `dify_rbac` behavior in `pkg/gateway/rbac/rbac.go` (`writeRBACError(w, http.StatusServiceUnavailable, ...)` on checker error). + +| Failure | Gateway response | +|---|---| +| Resolve inner-API timeout / 5xx | 503 `{"error": "auth resolve unavailable"}` | +| RBAC inner-API timeout / 5xx | 503 `{"error": "rbac check unavailable"}` (existing) | +| Resolve returns 401 (token invalid / expired / revoked) | 401 — see §Inner API failure table | +| RBAC returns `allowed: false` | 403 `{"error": ""}` (existing) | + +Stale-cache fallback deferred. Operators rely on health monitoring and redundancy rather than gateway-side fault tolerance. CLI surfaces 503 as a transient error and retries with backoff (see `auth.md §HTTP error handling`). + +## Error envelope + +Gateway translates inner-API `{"error": "..."}` responses into the user-facing `{ code, message, hint }` envelope (see `endpoints.md`) before responding to CLI clients on `/openapi/v1/*`. CLI error handling stays uniform regardless of which layer denied the request. + +Translation lives in the `dify_rbac` module's `ServeHTTP` exit path — same place as the existing `writeRBACError`, but keyed on the inner-API source. + +| Source | HTTP | Inner-API `error` | CLI `code` | CLI `message` | CLI `hint` | +|---|---|---|---|---|---| +| resolve | 401 | `invalid_token` | `invalid_token` | Bearer token not recognized. | Run `difyctl auth login` to mint a fresh token. | +| resolve | 401 | `token_expired` | `token_expired` | Bearer token has expired. | Run `difyctl auth login` to mint a fresh token. | +| resolve | 401 | `token_revoked` | `token_revoked` | Bearer token was revoked. | The owner revoked this token. Re-authenticate. | +| RBAC | 403 | `` | `rbac_denied` | `` (echoed) | Ask your workspace admin to grant your role permission for this action. | + +Transient 5xx not translated. `auth resolve unavailable` and `rbac check unavailable` keep the existing `{"error": "..."}` shape — matches `dify_rbac` today and lets CLI treat them as transient and retry with backoff (see `auth.md §HTTP error handling`). + +Inner-API auth failures (`invalid inner api key`) are operator-facing, not user-facing — gateway logs and returns 503 `auth resolve unavailable` to the client. Never leaks the inner-key state. + +## Deployment invariants + +See `security.md §Inner API trust boundary` for the canonical invariants table. Two load-bearing for the gateway: + +- EE deploys MUST keep `/openapi/v1/*` reachable only through the gateway. +- `/inner/api/*` MUST NOT be exposed on public ingress in any deploy. diff --git a/cli/docs/specs/server/middleware.md b/cli/docs/specs/server/middleware.md new file mode 100644 index 0000000000..dc5bb22747 --- /dev/null +++ b/cli/docs/specs/server/middleware.md @@ -0,0 +1,244 @@ +--- +title: server — middleware +--- + +# middleware + +Bearer middleware for user-level tokens. Prefix dispatch, three-layer authorization pipeline. + +Runs against a registered prefix allowlist — it never touches `/console/api/*`. + +Companion: `tokens.md` (storage + cache), `endpoints.md` (HTTP surfaces), `device-flow.md` (mint paths). + +## Allowlist + +Middleware enters only on: + +- `/openapi/v1/*` — user-scoped programmatic API (bearer, never cookie) +- `/v1/*` — Service API (app-scoped key only) + +`/console/api/*` stays cookie-only. + +## Request flow + +The `/openapi/v1/*` blueprint exposes auth as a composable pipeline (`api/controllers/openapi/auth/`). Run + describe endpoints (`/apps//run`, `/apps//describe`, `/permitted-external-apps/`, `/permitted-external-apps//run`) attach the full pipeline; list + identity endpoints attach `validate_bearer + require_scope + require_workspace_member` as inline decorators that compose to the same effective gate. + +1. **Parse `Authorization` header.** Missing / malformed → 401 `missing_bearer_token`. +2. **Prefix dispatch.** + - `app-` on `/v1/*` → existing app-scoped key path (see §Coexistence). + - `app-` on `/openapi/v1/*` → 401 `invalid_prefix`. + - `dfoa_` / `dfoe_` on `/openapi/v1/*` → continue. + - `dfp_` → 401 `unknown_token_prefix` (PAT not supported). + - Else → 401. +3. **Feature gate.** `ENABLE_OAUTH_BEARER=false` or authenticator singleton unbound → 503 `bearer_auth_disabled`. +4. **Bearer authenticate.** + - Hash = `sha256(token)`. + - Redis `GET auth:token:{hash}` — `"invalid"` → 401; cached `AuthContext` → skip DB. + - DB fallback (cache miss) reads `oauth_access_tokens` filtered on `token_hash + revoked_at IS NULL` (no `expires_at` filter). Live row → build `AuthContext` + `SETEX auth:token:{hash} 60 `. Past-expiry row → atomic hard-expire (`UPDATE … SET revoked_at=NOW(), token_hash=NULL`), `DEL` Redis entry, emit `oauth.token_expired`, write 10 s `"invalid"` negative cache, return 401 `token_expired`. Missing row → 10 s `"invalid"` negative cache, 401 `invalid_token`. +5. **Subject + scope.** Prefix → subject + scope, computed not stored: + - `dfoa_` → `AccountContext`, scopes = `[full]`. + - `dfoe_` → `SSOIdentityContext`, scopes = `[apps:run, apps:read:permitted-external]`. + - Sanity: `row.account_id IS NULL` ↔ `dfoe_`. Mismatch → 500 `internal_state_invariant` + audit. Scope check derives from prefix on every request; mint endpoints do not accept a `scopes` field. +6. **Surface gate.** Each `/openapi/v1/*` route declares accepted subject types. Wrong subject_type → 403 `wrong_surface` (e.g., `dfoa_` on `/permitted-external-apps`, `dfoe_` on `/apps`). Gate runs before workspace check. +7. **Workspace membership (Layer 0; CE only, dfoa_ surface only).** See §Authorization Layer 0. +8. **App resolution** (describe + run endpoints). + - Read `app_id` from URL path (`` view arg). No `X-Dify-App-Id` header. + - Load app row. Absent → 404. + - Universal openapi gate: `_apply_openapi_gate` helper enforces `apps.enable_api = true`. Gate-fail → 404 (no existence leak). See §Universal openapi gate. + - Attach `AppContext`. +9. **App ACL (Layer 1).** See §Authorization Layer 1. Two-step: subject-vs-access-mode rule table; inner-API call only when mode = `internal` for an account subject. +10. **Scope enforce (Layer 2).** `required_scope ⊆ context.scopes` with `full` umbrella (`full` ⊇ every narrower scope). +11. **Handler.** Receives `AuthContext` (+ optional `AppContext`). + +`X-Dify-Env` is accepted and ignored — env-aware ACL is not wired. See `endpoints.md §Request headers`. + +**Subject-type gate:** two layers. (a) Surface gate at step 6 rejects wrong-subject-type before anything else runs. (b) Scope is authoritative within accepted surface. External subjects are mint-policy-locked to `[apps:run, apps:read:permitted-external]` → cannot reach `@require_scope(full)` endpoints even if they bypass step 6 by some bug. Defense in depth. + +## Coexistence with app-scoped keys + +Each surface accepts only its own token kinds: + +| Surface | Accepted prefix | Rejected | +|---|---|---| +| `/v1/*` | `app-` | `dfoa_`, `dfoe_`, `dfp_` | +| `/openapi/v1/apps*` | `dfoa_` only | `dfoe_` → 403 `wrong_surface`, `app-` → 401 `invalid_prefix`, `dfp_` → 401 `unknown_token_prefix` | +| `/openapi/v1/permitted-external-apps*` (EE only) | `dfoe_` only | `dfoa_` → 403 `wrong_surface`, others same as above | +| `/openapi/v1/{account,workspaces,oauth/*}` | `dfoa_` (+ public for device-flow protocol) | `dfoe_` accepted on `/account` for identity readback only | + +Shared service-layer code (use-cases) accepts whichever context, operates on resolved app/tenant. `/v1/api-keys` management surface untouched. + +## Service API toggle (legacy) + +On `/v1/*` (app-key surface), when `apps.enable_api = false`, every request for that app is rejected with `service_api_disabled`. No token-type bypasses the toggle. No console escape hatch — admin must flip the toggle. + +For `/openapi/v1/*` (user-bearer surface), the same column is consulted via §Universal openapi gate — not as a separate service-API toggle, just as one of the filter conditions in `_apply_openapi_gate`. + +## Universal openapi gate + +Every `/openapi/v1/*` visibility path applies `enable_api` through one helper. No inline filters scattered across handlers. + +```python +# api/services/openapi/visibility.py + +def _apply_openapi_gate(query): + """Universal gate for /openapi/v1/* surface. Filter to apps reachable + through the user-bearer surface. Remove this filter to retire the gate.""" + return query.filter(App.enable_api.is_(True)) + + +def visible_apps_for_subject(subject, **constraints): + q = base_app_query(**constraints) + q = _apply_openapi_gate(q) + return AclStrategy.for_subject(subject).filter(q, subject) +``` + +Entry points routed through this helper: + +- `GET /apps`, `GET /apps//describe`, `POST /apps//run` (dfoa_ surface) +- `GET /permitted-external-apps`, `GET /permitted-external-apps/`, `POST /permitted-external-apps//run` (dfoe_ surface) + +To remove the `enable_api` filter in the future: delete the helper body / make it a no-op. One file, one function. No grep across handlers needed. + +## CSRF — credential-based, not path-based + +CSRF is required for requests authenticated by **ambient cookies** (console session, `device_approval_grant`). Bearer-authenticated requests are CSRF-exempt — no ambient credentials, attacker can't cause the browser to attach a bearer via `Authorization`. + +| Surface | Credential | CSRF | +|---|---|---| +| `/v1/*` with `app-` key | none ambient | exempt | +| `/openapi/v1/*` with bearer (`dfoa_` / `dfoe_`) | none ambient | exempt | +| `/openapi/v1/oauth/device/{approve,deny}` | console session cookie | required (existing console CSRF token) | +| `/openapi/v1/oauth/device/approve-external` | `device_approval_grant` cookie | required (per-flow CSRF baked into approval-context) | +| `/console/api/*` with cookie session | cookie | required (existing) | + +Rule is credential-based — surface alone doesn't determine CSRF posture; what matters is whether the browser attaches a credential the user didn't explicitly send. + +## Authorization + +Every user-level bearer request passes orthogonal layers in api middleware, coarsest-to-narrowest. AND semantics — all must pass. Deny at any layer → 403. + +``` +S. Surface gate (subject_type allowed on this URL?) enforced for all bearer routes +0. Workspace membership (account active in tenant?) dfoa_ surface only (CE always; EE for the dfoa_ surface) +1. Resource ACL (subject ∈ access_mode-permitted set?) enforced for list + describe + run +2. Token scope (bearer's scope ⊇ required_scope?) enforced +``` + +RBAC (workspace role → action allowed?) is NOT in the api auth pipeline. RBAC is enforced upstream at the EE gateway. See `gateway.md`. The `internal` access-mode inner-API call below is *not* RBAC — it's an EE-specific app-ACL check. + +### Surface gate (Layer S) + +First gate. Each `/openapi/v1/*` route declares accepted subject types via decorator (`@accept_subjects(USER_ACCOUNT)` or `@accept_subjects(USER_EXT_SSO)`). Wrong subject_type → 403 `wrong_surface`. + +Routes: + +| Path prefix | Accepted | Rejected | +|---|---|---| +| `/openapi/v1/apps*` | `dfoa_` | `dfoe_` → 403 `wrong_surface` | +| `/openapi/v1/permitted-external-apps*` (EE only) | `dfoe_` | `dfoa_` → 403 `wrong_surface` | +| `/openapi/v1/workspaces*` | `dfoa_` | `dfoe_` → 403 `wrong_surface` (no workspace concept) | +| `/openapi/v1/account` | both | — (identity readback) | + +CE-only deploys never register `/permitted-external-apps*` blueprints — `dfoe_` minting is disabled at the device-flow mint endpoint when `ENTERPRISE_ENABLED=false`. + +### Layer 0 — Workspace membership (dfoa_ surface only) + +Only the `/apps*` and `/workspaces*` surface runs Layer 0. The `/permitted-external-apps*` surface has no workspace concept — Layer 0 skipped entirely. + +For account-subject bearers (`dfoa_`), the layer verifies (a) an active `tenant_account_joins` row exists for `(account_id, tenant_id)` (tenant resolved from `?workspace_id=` query or `app.tenant_id`) and (b) `accounts.status = 'active'`. Either fails → 403 `workspace_membership_revoked`. + +On EE deploys, gateway RBAC interceptor enforces stricter semantics in addition. + +Cache: same Redis `auth:token:{hash}` AuthContext entry stores membership on a `verified_tenants: { tenant_id: bool }` map (60 s TTL). + +### Layer 1 — Resource ACL + +Applied to list + describe + run on both surfaces. Strategy evaluates two steps in order, gated on `(subject_type, deploy, web_app_settings.access_mode)`. + +**Step 1 — subject vs access-mode rule table.** Pure dispatch, no IO. EE-specific behavior surfaces here; CE has no ACL (app `access_mode` column has no `internal` / `internal_all` / `sso_verified` values on CE, no inner-API to consult). + +| `access_mode` | dfoa_ on CE | dfoa_ on EE | dfoe_ on EE (CE has no dfoe_) | +|---|---|---|---| +| `public` | allow | allow | allow | +| `internal_all` | (n/a) | allow | deny | +| `sso_verified` | (n/a) | allow | allow | +| `internal` | (n/a) | **call inner API** (Step 2) | deny | + +Visibility for list endpoints applies the same rule table to filter rows. Describe/run reject the request after row-load. + +**Step 2 — inner-API permission check** (only for dfoa_ + `internal` mode on EE): + +``` +GET /inner/api/webapp/permission?appId=&userId= +→ { result: bool } +``` + +For list endpoints, the handler issues a batch variant (`POST /inner/api/webapp/permission/batch` with `[app_ids]`) so a workspace-scan over many `internal` apps is one round-trip. + +Failure (network error / 5xx / timeout) → 503 to client. No fallback to "allow", no stale-cache reuse. Mirrors gateway-side `dify_rbac` behavior (see `gateway.md §Failure modes`). Modes that never reach Step 2 are immune to inner-API outage. + +`X-Dify-Env` is accepted and ignored — the inner API takes `app_id + user_id` only; no env dimension exists. + +**App API keys (`app-` prefix)** bypass Layer 1 entirely — key was created by the app owner who vouches for its callers. + +### Layer 2 — Token scope + +Narrowest layer; ceiling on what the bearer can attempt. + +Endpoints declare required scope at registration; the scope check enforces `required_scope ⊆ context.scopes`. `full` is the umbrella — it satisfies every check. Endpoints without explicit scope declaration implicitly require `full`. Tokens without the required scope → 403 `insufficient_scope`. + +**Prefix-derived scopes (no per-token storage):** + +| Token | Subject | Scopes | +|---|---|---| +| `dfoa_` | account | `[full]` | +| `dfoe_` | External SSO | `[apps:run, apps:read:permitted-external]` | + +Derivation happens at request-flow step 5 — prefix → scopes directly, no row inspection. Wire format is `colon:lower`; SCREAMING_CASE is enum-implementation detail. + +**Mint policy.** Hard rejection of cross-subject scopes at device-flow mint: + +- `apps:read:permitted-external` → minted only on EE, only for `dfoe_`. Cross-mint → 400 `mint_policy_violation`. +- `dfoa_` mint default: `[full]`. Future PAT may narrow. +- `dfoe_` mint default: `[apps:run, apps:read:permitted-external]`. No alternatives. + +### Concrete results + +For `POST /openapi/v1/apps//run` (dfoa_): + +| Subject | Surface | L0 | L1 ACL | L2 Scope | +|---|---|---|---|---| +| dfoa_ on CE | accept | workspace member required | n/a (no ACL on CE) | `full` ⊇ `apps:run` → allow | +| dfoa_ on EE | accept | workspace member required | rule table; inner API only for `internal` | `full` ⊇ `apps:run` → allow | +| dfoe_ | 403 `wrong_surface` | — | — | — | + +For `POST /openapi/v1/permitted-external-apps//run` (dfoe_, EE only): + +| Subject | Surface | L0 | L1 ACL | L2 Scope | +|---|---|---|---|---| +| dfoa_ | 403 `wrong_surface` | — | — | — | +| dfoe_ on EE | accept | skipped (no workspace) | binary gate (`public` / `sso_verified` only) | `apps:run` ⊇ `apps:run` → allow | + +## Rate limit + +Bearer-authenticated `/openapi/v1/*` requests gate through a per-token bucket — default **60 req/min**, configurable via `OPENAPI_RATE_LIMIT_PER_TOKEN` env. Bucket is a **shared Redis counter** keyed on `sha256(token)`, applied across all api instances (multi-replica deploys share the limit, not multiply it). Exceed → 429 with `Retry-After` header. Per-IP limits on unauthenticated device-flow endpoints unchanged — see `security.md §Rate limits`. + +## OAuth `client_id` + +`dfoa_` / `dfoe_` tokens carry `client_id` (always `"difyctl"` until an admin-registered client allowlist exists; controlled by `OPENAPI_KNOWN_CLIENT_IDS` env, default `"difyctl"`). Used for: + +- **Scope-policy dispatch at middleware** (rules key on `client_id + subject_type`). +- **CLI grouping** in `auth devices list`. +- **Audit attribution** — every `oauth.*` event carries `client_id`. + +Server does not bind inbound requests to a specific client identifier: + +- Bearer tokens cannot reach `/console/api/*` — surfaces are disjoint. +- Account OAuth scope = `full`; extracting and using from curl is functionally identical to the user's authenticated console session. +- External SSO OAuth = `apps:run` + `apps:read:permitted-external` + SSO access gate — misuse from curl bounded to what the subject is already permitted. + +CLI-side defense (no raw-bearer export command, keychain storage, same-device rotate-in-place) remains. Details: `../auth.md §Bearer token kinds`. + +## X-Dify-Workspace-Id + +Reserved header. Accepted and ignored by resource endpoints. diff --git a/cli/docs/specs/server/openapi.md b/cli/docs/specs/server/openapi.md new file mode 100644 index 0000000000..538878b422 --- /dev/null +++ b/cli/docs/specs/server/openapi.md @@ -0,0 +1,241 @@ +--- +title: server — openapi +--- + +# openapi + +The `/openapi/v1/*` endpoint group: user-scoped, bearer-authed, programmatic-API surface. Hosts everything difyctl, third-party scripts, and integrations talk to. + +Companion: `endpoints.md` (flat HTTP reference), `tokens.md` (storage + prefixes), `device-flow.md` (RFC 8628 logic), `middleware.md` (request pipeline). + +## Surface boundaries + +| Group | Auth | Role | +|---|---|---| +| `/openapi/v1/*` | Bearer (`dfoa_` / `dfoe_`) | User-scoped programmatic surface — identity, sessions, device flow, workspaces, apps | +| `/v1/*` | App-scoped key (`app-`) | Service API, app-key-only | +| `/console/api/*` | Browser cookie | Dashboard — no bearer surface | +| `/inner/api/*` | `Enterprise-Api-Secret-Key` header | Server-to-server only | + +## URL prefix + +``` +/openapi/v1/... +``` + +Distinct from `/v1/` (service_api per-app keys), `/console/api/` (browser cookie), `/inner/api/` (s2s). + +Versioned at the prefix level — `/openapi/v2/` is the future major-version path. No mid-version breakage. + +## Auth model + +**Bearer only.** `Authorization: Bearer `. + +Accepted token prefixes: + +| Prefix | Subject | Status | +|---|---|---| +| `dfoa_` | Dify account (device-flow approved from console) | accepted | +| `dfoe_` | External SSO account (EE, IdP-approved) | accepted (EE-gated routes) | +| `dfp_` | Personal Access Token | rejected — 401 `unknown_token_prefix` | +| `app-…` | App-scoped service_api key | rejected — `/openapi/v1/*` routes to `/v1/*` | + +## Scope model + +`AuthContext.scopes` (frozenset on `g.auth_ctx`) gates routes: + +| Token kind | Scopes | +|---|---| +| `dfoa_` | `[full]` | +| `dfoe_` | `[apps:run, apps:read:permitted-external]` | + +Scopes derive from prefix on every request; mint endpoints do not accept a `scopes` field. Endpoints declare required scope at registration; the check returns 403 `insufficient_scope` with the missing scope name in the body. `full` is the umbrella — it satisfies every check within accepted surface. + +Scope catalog (wire format, `colon:lower`): + +| Scope | Holders | Grants | +|---|---|---| +| `full` | `dfoa_` only | Superuser within the dfoa_ surface | +| `apps:read` | `dfoa_` only | List + describe via `/openapi/v1/apps*` | +| `apps:run` | both | Run via the surface matching the holder's subject_type | +| `apps:read:permitted-external` | `dfoe_` only (EE) | List + describe via `/openapi/v1/permitted-external-apps*` | + +**Mint policy.** Hard rejection at device-flow mint endpoint: + +- `dfoa_` may receive `[full]`, `[apps:read]`, `[apps:run]`, or combinations. +- `dfoe_` may receive only `[apps:run, apps:read:permitted-external]`. +- Cross-subject scope minting → 400 `mint_policy_violation`. CE deploys reject `dfoe_` mint entirely. + +`full` does **not** umbrella `apps:read:permitted-external` across surface — even a `full`-bearing `dfoa_` hitting `/permitted-external-apps*` is rejected with 403 `wrong_surface` at the surface gate, before scope check runs. Surface gate is independent of scope semantics. + +## Endpoint surface + +### Identity + sessions + +| Method | Path | Auth | Purpose | +|---|---|---|---| +| GET | `/openapi/v1/account` | Bearer | Polymorphic by subject. Replaces `/v1/me` | +| GET | `/openapi/v1/account/sessions` | Bearer | **New** — list user's active OAuth tokens (no current `/v1/` equivalent). See §Sessions list shape | +| DELETE | `/openapi/v1/account/sessions/self` | Bearer | Revoke session backing this request. Replaces `/v1/oauth/authorizations/self` | +| DELETE | `/openapi/v1/account/sessions/` | Bearer + subject-match | **New** — revoke specific session | + +`GET /openapi/v1/account` shape: + +```json +{ + "subject_type": "account" | "external_sso", + "subject_email": "...", + "subject_issuer": null | "https://idp.partner.com", + "account": null | { "id", "email", "name" }, + "workspaces": [{ "id", "name", "role" }], + "default_workspace_id": null | "ws_..." +} +``` + +`subject_type` always present. Absent fields are explicit `null` / `[]`. + +### Sessions list shape + +`GET /openapi/v1/account/sessions` filters `revoked_at IS NULL AND expires_at > NOW() AND token_hash IS NOT NULL` — hard-expired rows must not surface as phantom devices. + +Returns the canonical pagination envelope (see `endpoints.md §`/openapi/v1/apps` — list shape`); session row shape: + +```json +{ + "id": "tok_...", + "prefix": "dfoa_ab2f", + "client_id": "difyctl", + "device_label": "difyctl on alice-mbp", + "created_at": "2026-04-20T10:00:00Z", + "last_used_at": "2026-04-26T08:30:00Z", + "expires_at": "2026-05-04T10:00:00Z" +} +``` + +### Device flow (RFC 8628 protocol) + +| Method | Path | Auth | Purpose | +|---|---|---|---| +| POST | `/openapi/v1/oauth/device/code` | Public + rate-limit | Request device + user code | +| POST | `/openapi/v1/oauth/device/token` | Public + rate-limit | Poll for token | +| GET | `/openapi/v1/oauth/device/lookup` | Public + rate-limit | Validate `user_code` from /device page | + +These three are RFC 8628 protocol endpoints — intentionally unauthenticated. Rate-limits stay at current per-IP / per-`device_code` levels (see `security.md`). + +### Device flow (user approval) + +| Method | Path | Auth | Purpose | +|---|---|---|---| +| POST | `/openapi/v1/oauth/device/approve` | Browser cookie + CSRF | Approve device flow (account branch — mints `dfoa_`) | +| POST | `/openapi/v1/oauth/device/deny` | Browser cookie + CSRF | Deny device flow | + +Cookie-authed because the user is approving from the dashboard. + +### Device flow (SSO branch, EE-only) + +| Method | Path | Auth | Purpose | +|---|---|---|---| +| GET | `/openapi/v1/oauth/device/sso-initiate` | Public, `@enterprise_only` | Build IdP auth URL, 302 to IdP | +| GET | `/openapi/v1/oauth/device/sso-complete` | Signed assertion (5-min TTL, nonce-consumed) | Set `device_approval_grant` cookie (path-scoped to `/openapi/v1/oauth/device`), 302 → `/device?sso_verified=1`. **IdP-side ACS callback URL must point here.** | +| GET | `/openapi/v1/oauth/device/approval-context` | `device_approval_grant` cookie | SPA reads claims (idempotent — nonce not consumed) | +| POST | `/openapi/v1/oauth/device/approve-external` | `device_approval_grant` cookie + CSRF | Mint `dfoe_` for External SSO subject | + +CE: `@enterprise_only` returns 404. EE: gated by entitlement. + +### Workspaces (dfoa_ surface) + +| Method | Path | Auth | +|---|---|---| +| GET | `/openapi/v1/workspaces` | Bearer (`dfoa_` only — `dfoe_` 403 `wrong_surface`) | +| GET | `/openapi/v1/workspaces/` | Bearer + member | + +### Apps — dfoa_ surface (CE + EE) + +`workspace_id` required on every request. List/describe/run subject to Layer 0 (workspace membership) + Layer 1 ACL. + +| Method | Path | Auth | +|---|---|---| +| GET | `/openapi/v1/apps?workspace_id=` | Bearer + `apps:read` | +| GET | `/openapi/v1/apps//describe?workspace_id=` | Bearer + `apps:read` — canonical "what is this app". Supports `?fields=info,parameters,input_schema` | +| POST | `/openapi/v1/apps//run` | Bearer + `apps:run` — server dispatches by `apps.mode`. See `endpoints.md §OpenAPI — app` | + +### Permitted apps — dfoe_ surface (EE only) + +No workspace concept — `dfoe_` has no `tenant_account_joins` row. Tenant resolved from app. Layer 0 skipped. Layer 1 enforced (binary access-mode gate). + +| Method | Path | Auth | +|---|---|---| +| GET | `/openapi/v1/permitted-external-apps` | Bearer + `apps:read:permitted-external` | +| GET | `/openapi/v1/permitted-external-apps/` | Bearer + `apps:read:permitted-external` | +| POST | `/openapi/v1/permitted-external-apps//run` | Bearer + `apps:run` | + +Blueprint registered only when `ENTERPRISE_ENABLED=true`. CE deploys return 404 (route absent, not 403). + +## Error model + +Inherits the `apierrors.Typed` shape used by other groups: + +```json +{ + "code": "snake_case_code", + "message": "human-readable", + "hint": "optional next-action" +} +``` + +| HTTP | Code (sample) | When | +|---|---|---| +| 400 | `invalid_request` | Malformed body / missing required field | +| 401 | `bearer_missing` / `bearer_invalid` / `bearer_expired` | Auth failures | +| 403 | `wrong_surface` | Subject_type hit a surface reserved for the other subject_type (`dfoa_` → `/permitted-external-apps*`, `dfoe_` → `/apps*` or `/workspaces*`). Surface gate at request-flow step 6 | +| 403 | `insufficient_scope` (with `required_scope`) | Scope gate failed within accepted surface | +| 403 | `license_required` | EE surface (`/permitted-external-apps*`, `internal`-mode `internal-API`) reached but EE license absent or expired. CE deploys never emit this | +| 404 | `not_found` | Resource not found OR route doesn't exist on this group (e.g. `/permitted-external-apps*` on CE) | +| 429 | `rate_limited` (with `retry_after_ms`) | Per-IP / per-token throttle | +| 503 | `bearer_auth_disabled` | `ENABLE_OAUTH_BEARER=false` | + +## CORS posture + +Distinct from service_api (which is permissive for embedded use). `/openapi/v1/*` allows: + +- `Authorization`, `Content-Type`, `X-CSRF-Token` request headers +- `GET POST PATCH DELETE OPTIONS` methods +- `*` origin **only when** `ENABLE_OAUTH_BEARER=true` AND `OPENAPI_CORS_ALLOW_ORIGINS=*`; otherwise an explicit allowlist +- `Access-Control-Max-Age: 600` + +Cookie-authed routes within the group (approve / deny / approve-external) require same-origin and reject cross-origin OPTIONS. + +## Rate limit posture + +- Public device-flow endpoints: per-IP token bucket (existing settings preserved) +- Bearer-authed routes: per-token bucket, default 60 req/min, configurable via `OPENAPI_RATE_LIMIT_PER_TOKEN`. Shared Redis bucket per `sha256(token)` across all api instances. Details: `middleware.md §Rate limit` +- 429 response includes `Retry-After` header + `retry_after_ms` in body + +## Relationship to other groups + +| Group | State | +|---|---| +| `service_api/` (`/v1/*`) | App-scoped keys only. `service_api/oauth.py` deleted — `/v1/me`, `/v1/oauth/...` retired | +| `console/api/*` | Cookie-authed dashboard only. `console/auth/oauth_device.py` deleted | +| `inner_api/` | Unchanged — internal s2s | +| `controllers/oauth_device_sso.py` (root file) | Deleted — content lives in `controllers/openapi/oauth_device_sso.py` | +| `controllers/fastopenapi.py` | Unrelated — exports the `fastopenapi` library's `FlaskRouter` for console-side schema generation. Naming collision is cosmetic; file stays. | + +## Gateway routing + +Every gateway in front of `api:5001` must route `/openapi/*` to it; without a rule, requests fall through to the web frontend and 404. + +| Deployment | File | Rule | +|---|---|---| +| dify docker-compose | `docker/nginx/conf.d/default.conf.template` | `location /openapi { proxy_pass http://api:5001; include proxy.conf; }` | +| dify-enterprise gateway | `server/hack/configs/gateway/Caddyfile` | `handle /openapi/* { reverse_proxy http://api:5001 }` inside `console.dify.local` only — cookie-authed routes are scoped to that host | +| dify-helm chart | `charts/dify/templates/gateway/caddy-config.yaml` | Same Caddy `handle /openapi/* { reverse_proxy {{ $apiSvc }} }` inside both `consoleApiDomain` blocks (the chart has two variants depending on whether console-api and console-web share a domain) | + +`/openapi/*` is intentionally absent from `enterprise.dify.local`, `app.dify.local`, `serviceApiDomain`, and `api.dify.local`: cookie-authed routes (approve / deny / approval-context / sso-complete) only work on the host that mints the console session cookie, and the IdP-side ACS callback pins a single hostname. + +## Out of scope + +- Admin / billing / setup / init endpoints — owner-only, browser ctx, stay in console +- Plugin marketplace, tool providers — extension management +- Webhook triggers — already separate blueprint +- App-key features (`/v1/chat-messages` etc.) — stay in service_api diff --git a/cli/docs/specs/server/security.md b/cli/docs/specs/server/security.md new file mode 100644 index 0000000000..067a05b840 --- /dev/null +++ b/cli/docs/specs/server/security.md @@ -0,0 +1,150 @@ +--- +title: server — security +--- + +# security + +Rate limits, secret-scanner prefixes, audit events, anti-framing, CI-enforced invariants, log redaction, migration + coexistence notes. + +Companion: `tokens.md`, `middleware.md`, `device-flow.md`, `endpoints.md`. + +## Rate limits + +| Endpoint | Limit | Scope | +| ------------------------------------------------ | ---------------------------- | ---------------------------------------------------------------------------------------------------------------- | +| `POST /openapi/v1/oauth/device/code` | 60 / hr / IP | Prevents device-code spam | +| `POST /openapi/v1/oauth/device/token` | 1 / `interval` / device_code | Per RFC 8628 (`slow_down` on violation) | +| `GET /openapi/v1/oauth/device/sso-initiate` | 60 / hr / IP | SSO-initiate flood protection | +| `POST /openapi/v1/oauth/device/approve-external` | 10 / hr / subject_email | Mirror per-account limit on account-branch approve | +| `POST /openapi/v1/oauth/device/approve` | 10 / hr / session | Account-branch approve | +| `GET /openapi/v1/account` | 60 / min / account | Validation-call spam | +| Bearer-authenticated requests on `/openapi/v1/*` | Per-token bucket | Default 60 req/min; `OPENAPI_RATE_LIMIT_PER_TOKEN` env. Shared Redis bucket per `sha256(token)` across instances | + +## Secret-scanner prefixes + +Two distinct prefixes → two patterns. Assign severity per prefix: + +- `dfoa_[A-Za-z0-9_-]{43}` — high (full scope, account session) +- `dfoe_[A-Za-z0-9_-]{43}` — medium (`apps:run` + `apps:read:permitted-external`, SSO-only surface) + +Coordinate with: + +- GitHub Advanced Security (push-protection partner program) +- GitLab +- BitBucket +- TruffleHog + +Partner match → Dify endpoint receives leaked-token notification → revoke matching row + email owner. + +GitHub partner program approval can take weeks. Initiate early. + +## Audit events + +| Event | Trigger | Payload | +| --------------------------------- | ----------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `oauth.device_flow_approved` | Device-flow approval success | `subject_email`, `account_id` nullable, `subject_issuer` (for External SSO), `client_id`, `device_label`, `scopes`, `subject_type` (`account` / `external_sso`), `rotated`, `expires_at`, `token_id` | +| `oauth.device_flow_denied` | Explicit denial | `subject_email`, `client_id`, `device_label` | +| `oauth.device_flow_rejected` | SSO branch email-collision reject (`sso-complete` or `approve-external`) | `subject_type`, `subject_email`, `subject_issuer`, `reason` | +| `oauth.token_expired` | OAuth hard-expired on middleware hit | `token_id`, `subject`, `reason: "ttl"` | +| `oauth.device_code_cross_ip_poll` | Device-flow poll succeeded from IP different from `/device/code` creation IP | `token_id`, `subject_email`, `creation_ip`, `poll_ip` | +| `app.run.openapi` | `POST /openapi/v1/apps//run` or `POST /openapi/v1/permitted-external-apps//run` called | `app_id`, `tenant_id`, `subject` (subject_type + account_id or subject_email+issuer), `surface` (`apps` / `permitted-external-apps`), `source` (`oauth_account` / `oauth_sso`), `token_id` | +| `openapi.wrong_surface_denied` | Surface gate rejected request (caller hit the surface for the other subject_type) | `subject_type`, `attempted_path`, `client_id`, `token_id` | + +`oauth.token_expired` fires from both Python middleware and EE inner-API resolve; idempotent CAS makes concurrent emit safe. See `tokens.md §Detection + hard-expire`. + +## Inner API trust boundary + +Enterprise-svc inner endpoints serving the EE gateway: + +- `POST /inner/api/rbac/check-access` — workspace-role RBAC check +- `POST /inner/api/auth/check-access-oauth` — token resolve + +Both authenticate via `Enterprise-Api-Secret-Key` header (= `INNER_API_KEY` env). Token tables accessed via `Skip: true` ent schemas in `pkg/data/dify/schema/` (read for resolve; write for hard-expire on `expires_at <= NOW()` only). + +### Invariants + +| # | Invariant | +| --- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| 1 | Single env (`INNER_API_KEY`) gates every inner-API endpoint. No per-endpoint secrets. | +| 2 | `/inner/api/*` MUST NOT be internet-facing. Caddyfiles, nginx, and helm configs MUST NOT proxy this path from the public ingress. | +| 3 | Gateway MUST NOT inject resolved-identity headers downstream. Api always re-resolves from the token store. | +| 4 | Enterprise svc inner-API performs the same hard-expire mutation as dify api Python middleware on `expires_at <= NOW()` (`UPDATE oauth_access_tokens SET revoked_at=NOW(), token_hash=NULL`, idempotent CAS, audit emit, Redis invalidate). | +| 5 | EE deploys MUST keep `/openapi/v1/*` reachable only through the gateway. | + +CE deployments have no gateway: `/inner/api/auth/check-access-oauth` receives no traffic. Dify api Python middleware does its own resolve. + +## Anti-framing + +Every response under `/openapi/v1/*` carries: + +``` +X-Frame-Options: DENY +Content-Security-Policy: frame-ancestors 'none' +``` + +`/device` (Next.js) also emits the same pair. Without this, an attacker's page can iframe `/device` (SPA post-`sso_verified=1`) and UI-trick a victim with a valid `device_approval_grant` cookie into clicking Approve — functionally equivalent to CSRF, bypasses double-submit. Deny framing outright — no trusted embedder exists. + +## Log redaction — device-flow secrets + +Flask access logs, request-body capture (debug mode), Sentry breadcrumbs, and any 3rd-party APM (Datadog, New Relic) capture request/response bodies by default. `device_code` and `user_code` travel plaintext on the routes listed below and **must not** land in any long-lived log store. + +**Routes carrying plaintext:** + +- `POST /openapi/v1/oauth/device/code` (response: `device_code`, `user_code`) +- `POST /openapi/v1/oauth/device/token` (request: `device_code`) +- `GET /openapi/v1/oauth/device/lookup` (query param: `user_code`) +- `POST /openapi/v1/oauth/device/approve` (request: `user_code`) +- `POST /openapi/v1/oauth/device/deny` (request: `user_code`) +- `GET /openapi/v1/oauth/device/sso-initiate` (query: `user_code`) +- `POST /openapi/v1/oauth/device/approve-external` (request: `user_code`) + +**Filter.** Register a Flask request/response log hook that redacts `device_code` + `user_code` fields from: + +- request body (JSON + form) +- query string +- response body (JSON) +- Sentry breadcrumbs (`before_send` hook — replace values with `[REDACTED]`) +- any structured-log emitter (e.g., `structlog` processors) + +Apply by **exact key-name match**, across every route (not route-scoped — cheap belt-and-braces). Same filter redacts `access_token` and `minted_token` keys. + +Minted OAuth plaintext (`dfoa_…`, `dfoe_…`) also covered: belt-and-braces, even though they normally travel only in `Authorization` headers. + +Rate-limit 503 / 4xx error bodies echo back `device_code` / `user_code` in some error shapes — filter covers those too (key-name match, not status-code scoped). + +## Fingerprinting constraints + +- Never log full token at any layer. +- Never log token hash to external system (hash is DB lookup key). +- Audit events carry `token_id` (UUID), not hashes. + +## Operator env vars + +| Var | Default | Effect | +| --------------------------------------- | ----------- | ----------------------------------------------------------------------------------------------------------------------------------------------------- | +| `ENABLE_OAUTH_BEARER` | `true` | Kill switch. `false` → `/openapi/v1/*` bearer routes return 503 `bearer_auth_disabled`. Legacy `app-` keys unaffected. | +| `OAUTH_TTL_DAYS` | `14` | TTL applied to newly minted OAuth tokens. Range `[1, 365]`. | +| `ENABLE_CLEAN_OAUTH_ACCESS_TOKENS_TASK` | `true` | Daily 05:00 retention sweep on `oauth_access_tokens`. | +| `OAUTH_ACCESS_TOKEN_RETENTION_DAYS` | `30` | Rows where `revoked_at` OR `expires_at` is older than this are DELETEd by the retention task. | +| `OPENAPI_KNOWN_CLIENT_IDS` | `"difyctl"` | Comma-separated allowlist of accepted `client_id` values at `/openapi/v1/oauth/device/code`. Unknown clients rejected. | +| `OPENAPI_RATE_LIMIT_PER_TOKEN` | `60` | Per-token request budget per minute on bearer-authed `/openapi/v1/*` routes. Shared Redis bucket per `sha256(token)`. | + +## Enterprise parity + +### External SSO subjects + +EE SSO-verified identities mint `dfoe_` via `/device` SSO branch. `subject_email` populated, `account_id = NULL`, scopes `[apps:run, apps:read:permitted-external]`. + +Surface routing: `dfoe_` tokens reach only `/openapi/v1/permitted-external-apps*` (and `/openapi/v1/account` for identity readback). Surface gate (`@accept_subjects(USER_EXT_SSO)`) rejects `dfoe_` on the `/apps*` and `/workspaces*` surfaces with 403 `wrong_surface`. See `middleware.md §Surface gate`. + +ACL = binary access-mode gate. `app.access_mode ∈ {public, sso_verified}` only. No per-email whitelist, no group evaluation. No tenant concept — `dfoe_` is a global SSO identity; tenant resolved from each app row at request time. + +- SSO-only users can run any app with `access_mode` `public` or `sso_verified` (subject to `_apply_openapi_gate` `enable_api=true` filter). `internal` / `internal_all` apps invisible (filtered out of list, 404 on describe). +- Admin restricts via IdP controls (who can authenticate) or access-mode toggles. +- `subject_issuer` persists on `oauth_access_tokens` (surfaces on `auth devices list`, `GET /openapi/v1/account`, revoke-by-id) and travels on audit events. Not persisted on `end_users`. + +### License / quota + +Bearer traffic attributed by `account_id` (account) or `subject_email` (External SSO), same way app-scoped-key traffic is attributed by tenant. + +EE-specific surface (`/permitted-external-apps*`) gated by license module — license absent / expired → 402 `license_required`. CE deploys skip license check (CE blueprint absence is the gate). Follows existing console/api license pattern; no new env var (reuses `ENTERPRISE_API_URL` + license helper). diff --git a/cli/docs/specs/server/tokens.md b/cli/docs/specs/server/tokens.md new file mode 100644 index 0000000000..40e29d4e66 --- /dev/null +++ b/cli/docs/specs/server/tokens.md @@ -0,0 +1,249 @@ +--- +title: server — tokens +--- + +# tokens + +Server-side primitives for user-level bearer tokens: two Postgres tables, three wire prefixes, Redis caches, TTL policy, revocation paths. + +Companion: `middleware.md` (request flow + authz), `device-flow.md` (mint paths), `endpoints.md` (HTTP surfaces). + +## Token surface + +Prefix-dispatched alongside existing app-scoped keys: + +| Prefix | Subject | App context source | +| ------------------ | ----------------------------------------------- | ------------------------------- | +| `app-…` (existing) | App | Fixed by key row → `apps.tenant_id` | +| `dfoa_` (new) | Dify account (OAuth) | URL path `` | +| `dfoe_` (new) | SSO-verified email, no Dify account (EE, OAuth) | URL path `` | + +**Prefix carries subject type.** Middleware short-circuits scope dispatch at prefix check — no DB/Redis read needed to know "is this caller External SSO?" + +`dfp_` (PAT) is not supported — 401 `unknown_token_prefix`. No model, service, controller, or migration for PAT ships. + +**Tenant never carried by token.** Always looked up from `apps` row at request time. + +**`X-Dify-Env`** is accepted and ignored. + +## Token types + +One kind (OAuth) × two subject variants. Scope derived from prefix, not stored. + +| Kind | Prefix | Subject | Scope | Created by | Storage | UI | +| -------------------- | ------- | ----------------------------------- | -------------- | --------------------------- | ----------------------------------------------------------------------- | --------------------------------------------- | +| OAuth (account) | `dfoa_` | Dify account | `[full]` | Device flow, account branch | `oauth_access_tokens` (`account_id` populated) | CLI `auth devices list/revoke` | +| OAuth (External SSO) | `dfoe_` | SSO-verified email, no account (EE) | `[apps:run, apps:read:permitted-external]` | Device flow, SSO branch | `oauth_access_tokens` (`account_id = NULL`, `subject_issuer` populated) | CLI `auth devices list/revoke` | + +### OAuth access token + +- Created via OAuth Device Flow (`difyctl auth login`). Details: `device-flow.md`. +- Subject determined at approval: + - Account → email matches Dify account → `account_id` populated → scope `[full]` + - External SSO → SSO-verified, no Dify account → `account_id = NULL`, `subject_issuer` populated → scope `[apps:run, apps:read:permitted-external]` +- Auto-derived `device_label` = `"difyctl on "` (client-supplied, server-stored). +- `client_id` allowlist controlled by `OPENAPI_KNOWN_CLIENT_IDS` env (default `"difyctl"`). Unknown clients rejected at `/device/code`. +- Plaintext never rendered in UI or printed to terminal — flows directly from Redis → CLI poll response → OS keychain. + +### Mint policy (hard-enforced at device-flow approve) + +- `dfoa_` mint: `[full]` (v1.0 default). Future PAT may narrow to subsets like `[apps:read, apps:run]`. +- `dfoe_` mint: always `[apps:run, apps:read:permitted-external]`. No alternatives. +- Cross-subject scope minting → 400 `mint_policy_violation`. Device-flow approve handler validates `(subject_type, requested_scope)` pairs against this table before INSERT/UPDATE. +- CE deploys reject `dfoe_` minting entirely (the SSO branch endpoints are `@enterprise_only`). +- Surface gate at request time is independent of scope check (see `middleware.md §Surface gate`) — `full` does **not** umbrella `apps:read:permitted-external` across surfaces. + +### Wire format + +``` +dfoa_<43 base64url chars> # OAuth account (5 + 43) +dfoe_<43 base64url chars> # OAuth External SSO (5 + 43) + +regex: ^(dfoa|dfoe)_[A-Za-z0-9_-]{43}$ +``` + +Server rejects any `dfp_` bearer with 401 `unknown_token_prefix`. + +~256 bits entropy after prefix. Base64url alphabet — safe in URLs, shell vars, YAML. + +### Hashing + +SHA-256 at creation. Only hash stored; `token_hash varchar(64)` uniquely indexed. + +OAuth plaintext is never rendered. Lives in Redis for seconds during approve → poll, travels CLI poll response, written directly to OS keychain. If keychain unavailable, CLI falls back to `hosts.yml` at `0600` with a prominent stderr warning. + +## Storage + +One table: `oauth_access_tokens`. + +### `oauth_access_tokens` + +Identified primarily by email. `account_id` populated when email matches Dify account; NULL for SSO-only (EE). + +```sql +CREATE TABLE oauth_access_tokens ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + + -- Subject + subject_email TEXT NOT NULL, -- primary identity + subject_issuer TEXT, -- NULL for account; IdP entity_id / OIDC issuer URL for SSO-only + account_id UUID REFERENCES accounts(id) ON DELETE SET NULL, -- NULL for SSO-only + + -- Client + client_id VARCHAR(64) NOT NULL, -- allowlisted via OPENAPI_KNOWN_CLIENT_IDS + device_label TEXT NOT NULL, -- 'difyctl on gareth-mbp' + + -- Credential + prefix VARCHAR(8) NOT NULL, -- 'dfoa_' (account) | 'dfoe_' (ExtSSO) + token_hash VARCHAR(64) NULL UNIQUE, -- NULL after hard-expire + + -- Lifecycle + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + last_used_at TIMESTAMPTZ, + expires_at TIMESTAMPTZ NOT NULL, -- mandatory. Set to NOW() + oauth_ttl_days at mint/rotate + revoked_at TIMESTAMPTZ +); + +-- No `scopes` column. Middleware derives from (prefix, account_id IS NULL). + +CREATE INDEX idx_oauth_subject_email ON oauth_access_tokens (subject_email) WHERE revoked_at IS NULL; +CREATE INDEX idx_oauth_account ON oauth_access_tokens (account_id) WHERE revoked_at IS NULL AND account_id IS NOT NULL; +CREATE INDEX idx_oauth_client ON oauth_access_tokens (subject_email, client_id) WHERE revoked_at IS NULL; +CREATE INDEX idx_oauth_token_hash ON oauth_access_tokens (token_hash) WHERE revoked_at IS NULL; + +-- Rotate-in-place per (subject, client, device). +-- Re-login from same device rotates the row. Re-login after hard-expire takes the INSERT branch +-- (expired row has revoked_at IS NOT NULL, so not a candidate for ON CONFLICT). +-- subject_issuer participates so two IdPs asserting same email land in different rows. +-- Account branch writes a sentinel issuer ('dify:account') instead of NULL — application +-- normalizes at mint time. With no NULLs in the indexed column, the standard partial unique +-- index enforces "one active row per (email, issuer, client, device)" without needing a +-- COALESCE expression index or PG15 NULLS NOT DISTINCT. +CREATE UNIQUE INDEX uq_oauth_active_per_device + ON oauth_access_tokens (subject_email, subject_issuer, client_id, device_label) + WHERE revoked_at IS NULL; +``` + +**`ACCOUNT_ISSUER_SENTINEL = 'dify:account'`.** Constant lives in api token service. Mint path normalizes account-branch issuer to this sentinel before every INSERT/UPDATE; SSO branch passes the IdP issuer URL verbatim. Resolve / hard-expire / revoke read the sentinel as opaque — no special-case branching in middleware. Sentinel chosen to be a non-URL (URI scheme reserved for Dify-internal use, no IdP can match) so cannot collide with a real IdP issuer. + +**Multi-device semantics.** `device_label` = per-device identifier. Unique index permits exactly one active row per `(subject_email, subject_issuer, client_id, device_label)`. Different devices = independent rows. Soft-deleted rows excluded from uniqueness. + +## Redis cache + +Middleware hits every request; Postgres on every call = wasteful. Two caches. + +### Token-context cache + +``` +auth:token:{sha256_of_token} → JSON AuthContext + { + "email": "user@example.com", + "account_id": "acc_..." | null, + "subject_type": "account" | "external_sso", + "scopes": ["full"] | ["apps:run", "apps:read:permitted-external"], + "token_id": "oat_...", + "source": "oauth", + "expires_at": "...iso..." | null + } +``` + +- **Positive TTL:** 60 s. Short enough that revokes propagate fast; long enough to deflect hot-token load. +- **Negative TTL:** 10 s. Invalid / revoked / expired tokens cached as `"invalid"` — prevents 401-storm from a broken CI thrashing Postgres. +- **Invalidation on revoke:** every revoke path must `UPDATE revoked_at` AND `DEL auth:token:{hash}`. RPC returns only after both succeed. +- **Expiry:** cached entry carries `expires_at`; middleware double-checks on cache hit. Triggers hard-expire path if TTL tripped mid-cache. +- **Memory:** ~200 bytes/entry. Negligible. + +No `tenant_id` in cache — always looked up per request from the `apps` row. + +### ACL cache + +Layer-2 authorization result (see `middleware.md §Authorization`). + +``` +acl:webapp:{subject}:{app_id} → "allow" | "deny" (60 s allow / 10 s deny) +``` + +`{subject}` = `account_id` for account bearer, `sha256(subject_email)` for External SSO. No env dimension. + +Invalidation: TTL-only (no active push). ACL edits propagate within 60 s worst-case. + +## `last_used_at` + +Currently NULL on new mints. Middleware does not update `last_used_at` on request — sync per-request UPDATE would double the DB write rate and cancel the middleware cache win. `auth devices list` renders blank in the LAST USED column. + +## TTL policy (OAuth) + +All OAuth tokens carry mandatory `expires_at`: + +``` +ttl_days = Policy.OAuthTTLDays() +expires_at = NOW() + ttl_days * 86400s +``` + +| Scope | Source | Default | Range | +| ---------------------------------- | ----------------------------------------------------------------------------- | ------- | ---------- | +| Enterprise tenant | EE Inner API `GET /inner/api/policy/oauth-ttl?tenantId=X` (Redis-cached 60 s) | 14 days | `[1, 365]` | +| CE (no EE) / Inner API unreachable | env var `OAUTH_TTL_DAYS`, falls through to hardcoded `14` | 14 days | `[1, 365]` | + +Applies to every `oauth_access_tokens` row at mint and rotate. + +**Policy change semantics:** new TTL applies to new mints/rotates only. Existing rows keep their originally-written `expires_at`. Tightening 30→7 → effect on next rotate / natural expiry. No background sweep. + +### Detection + hard-expire on middleware hit + +Middleware reads the row **without** `expires_at` filter. Behavior on resolve: + +1. `row.expires_at > NOW()` or `NULL` → valid; cache `AuthContext` 60 s. +2. `row.expires_at <= NOW()` → atomic CAS revoke (`UPDATE oauth_access_tokens SET revoked_at = NOW(), token_hash = NULL WHERE id = :id AND revoked_at IS NULL`), `DEL auth:token:{hash}`, emit `oauth.token_expired` audit (only when CAS hit a row, `rows_affected == 1`), write 10 s `"invalid"` negative cache, return 401 `token_expired`. + +**Hard-expire fires from both paths.** The api Python middleware and the Enterprise inner API (`/inner/api/auth/check-access-oauth`, see `gateway.md §Inner API — auth check-access (OAuth)`) perform the same revoke + invalidate + audit on `expires_at <= NOW()`. Idempotent CAS (`WHERE id = :id AND revoked_at IS NULL`) makes concurrent hits safe. + +`token_hash = NULL` releases the column UNIQUE so same-device re-login can issue a fresh hash without conflict. Row retained for audit (90-day sweep). + +**Re-login lifecycle after hard-expire.** The hard-expired row has `revoked_at IS NOT NULL`; partial unique index `uq_oauth_active_per_device` excludes it. Re-login therefore takes the INSERT branch — new row, new `id`. Consequence: `auth devices list` and `GET /openapi/v1/account/sessions` must filter `(revoked_at IS NULL AND expires_at > NOW() AND token_hash IS NOT NULL)` or dead rows surface as phantom devices. + +**Edge case — token never presented after expiry:** row stays alive with `revoked_at IS NULL` + `expires_at < NOW()`. A scheduled retention task (`schedule/clean_oauth_access_tokens_task.py`, daily 05:00 via Celery beat) DELETEs rows past `OAUTH_ACCESS_TOKEN_RETENTION_DAYS` (default 30) under either: + +```sql +revoked_at < NOW() - INTERVAL 'N days' + OR (revoked_at IS NULL AND expires_at < NOW() - INTERVAL 'N days') +``` + +Kill switch: `ENABLE_CLEAN_OAUTH_ACCESS_TOKENS_TASK=false`. Live unexpired rows are never touched. + +## Revocation paths + +All paths: soft-delete Postgres (`revoked_at = NOW()`) **AND** `DEL auth:token:{hash}`. Return success only after both succeed. + +| Trigger | Endpoint | Auth | +| ---------------------------------- | ------------------------------------------ | ---------------------- | +| `difyctl auth logout` (OAuth) | `DELETE /openapi/v1/account/sessions/self` | Bearer being revoked | +| `difyctl auth devices revoke ` | `DELETE /openapi/v1/account/sessions/` | Bearer + subject-match | + +**Subject-match on revoke-by-id:** + +``` +if requester is AccountContext: + require target.account_id IS NOT NULL AND target.account_id == requester.account_id +elif requester is SSOIdentityContext: + require target.account_id IS NULL + AND target.subject_email == requester.subject_email + AND target.subject_issuer == requester.subject_issuer +else: + 403 +``` + +Two identities sharing an email (account + SSO for `foo@x.com`) do **not** cross-revoke. + +## CLI ingestion contract + +OAuth tokens (`dfoa_` / `dfoe_`) arrive only via device-flow response and are written directly to keychain. CLI never accepts a raw bearer through stdin / flag. `app-` and `dfp_` are rejected at every ingestion point. Details: `../auth.md §Bearer token kinds`. + +## Secret scanner + +Two distinct prefixes → two patterns for GitHub Advanced Security / GitLab / TruffleHog. Severity by prefix: + +- `dfoa_` = full scope, account session → high severity +- `dfoe_` = `apps:run` + `apps:read:permitted-external`, SSO-only → medium severity + +See `security.md §Secret-scanner prefixes` for enrollment detail. diff --git a/cli/docs/specs/workspaces.md b/cli/docs/specs/workspaces.md new file mode 100644 index 0000000000..0e44edf288 --- /dev/null +++ b/cli/docs/specs/workspaces.md @@ -0,0 +1,87 @@ +--- +title: workspaces +--- + +# workspaces + +> Implementation: see [`cli/src/`](../../src/). Build & test: see [`cli/README.md`](../../README.md). + +Workspace discovery and context management for account users. + +``` +difyctl get workspace [flags] +difyctl auth use +``` + +Companion: `auth.md §Session model`, `server/endpoints.md §OpenAPI — workspaces`. + +--- + +## Workspace model + +An account user belongs to N workspaces (tenants). At login, `default_workspace_id` from `/account` becomes the active workspace, stored in `hosts.yml`. dfoa_-surface commands (`get apps`, `get app`, `describe app`, `run app`) scope to the active workspace unless overridden. + +**External SSO users (`dfoe_`)** have no workspace concept. The `get workspace`, `auth use`, and `--workspace` flag are all command-level errors on a dfoe_ session (CLI rejects before any network call). Server-side, `/openapi/v1/workspaces*` returns 403 `wrong_surface` for `dfoe_` (the surface gate rejects). + +### Resolution chain (account users) + +Every resource command resolves workspace through: + +| Priority | Source | +| -------- | -------------------------------------------------------------------------------------- | +| 1 | `--workspace ` flag | +| 2 | `DIFY_WORKSPACE_ID` env var | +| 3 | `current_workspace_id` in `hosts.yml` (set by `auth use`) | +| 4 | `default_workspace_id` from last login response | +| 5 | Error: `error: no workspace selected; run 'difyctl auth use ' or pass --workspace` | + +**Env precedence is explicit.** `DIFY_WORKSPACE_ID` (priority 2) wins over `hosts.yml` (priority 3 + 4) by design — env vars override on-disk state per `config.md §Precedence`. CLI does not validate ID syntax client-side; malformed IDs (e.g., random-string env value) hit the next resource command and surface server's 404 (`workspace not found`) → exit 1. + +--- + +## `difyctl get workspace` — list + +``` +$ difyctl get workspace +ID NAME ROLE +ws-abc123 Acme Corp owner +ws-def456 Side Project member +``` + +**Flags:** + +| Flag | Purpose | +| --------------------- | ------------- | +| `-o json\|yaml\|name` | Output format | + +`-o name` → workspace IDs one per line. + +**Server:** `GET /openapi/v1/workspaces` — bearer auth, `dfoa_` only. `dfoe_` → 403 `wrong_surface` at the surface gate. + +**Active workspace** marked with `*` in the `NAME` column when stored current matches a row. + +--- + +## `difyctl auth use ` — switch active workspace + +``` +$ difyctl auth use ws-def456 +Switched to workspace: Side Project (ws-def456) +``` + +Writes `current_workspace_id` to `hosts.yml`. Subsequent resource commands resolve to this workspace until changed. + +**External SSO subjects:** `error: workspace context unavailable for external SSO sessions`. Exit 2. + +**Unknown ID:** does not validate against server at write time (avoids extra RTT). Invalid IDs produce 404 on the next resource command. + +**Flags:** none. No `--yes` needed — non-destructive state write. + +--- + +## `--workspace` flag + +Every dfoa_-surface resource command (`get apps`, `get app`, `describe app`, `run app`, `get workspace`) accepts `--workspace `. Overrides active workspace for that invocation only; does not persist. + +External SSO sessions: flag rejected client-side with `error: --workspace is not supported on external SSO sessions; the permitted-external-apps surface has no workspace concept`. Exit 2. + diff --git a/cli/package.json b/cli/package.json new file mode 100644 index 0000000000..10be60fba5 --- /dev/null +++ b/cli/package.json @@ -0,0 +1,128 @@ +{ + "name": "@langgenius/difyctl", + "type": "module", + "version": "0.1.0-rc.1", + "description": "Dify command-line interface", + "difyctl": { + "channel": "rc", + "compat": { + "minDify": "1.6.0", + "maxDify": "1.7.0" + } + }, + "license": "Apache-2.0", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js" + } + }, + "bin": { + "difyctl": "./bin/run.js" + }, + "files": [ + "README.md", + "bin", + "dist", + "oclif.manifest.json" + ], + "engines": { + "node": "^22.22.1" + }, + "scripts": { + "build": "vp pack && oclif manifest", + "dev": "tsx bin/dev.js", + "test": "vp test", + "test:coverage": "vp test --coverage", + "lint": "eslint", + "lint:fix": "eslint --fix", + "type-check": "tsc", + "manifest": "oclif manifest", + "pack:tarballs": "oclif pack tarballs --xz --parallel" + }, + "dependencies": { + "@napi-rs/keyring": "catalog:", + "@oclif/core": "catalog:", + "@oclif/plugin-autocomplete": "catalog:", + "@oclif/plugin-help": "catalog:", + "@oclif/plugin-not-found": "catalog:", + "@oclif/plugin-version": "catalog:", + "@oclif/plugin-warn-if-update-available": "catalog:", + "cli-table3": "catalog:", + "eventsource-parser": "catalog:", + "js-yaml": "catalog:", + "ky": "catalog:", + "open": "catalog:", + "ora": "catalog:", + "picocolors": "catalog:", + "std-semver": "catalog:", + "zod": "catalog:" + }, + "devDependencies": { + "@dify/tsconfig": "workspace:*", + "@hono/node-server": "catalog:", + "@types/js-yaml": "catalog:", + "@types/node": "catalog:", + "@vitest/coverage-v8": "catalog:", + "eslint": "catalog:", + "hono": "catalog:", + "oclif": "catalog:", + "tsx": "catalog:", + "typescript": "catalog:", + "vite": "catalog:", + "vite-plus": "catalog:", + "vitest": "catalog:" + }, + "oclif": { + "bin": "difyctl", + "dirname": "difyctl", + "helpClass": "./dist/help-class", + "commands": { + "strategy": "pattern", + "target": "./dist/commands", + "globPatterns": [ + "**/index.+(js|cjs|mjs|ts)", + "!**/_*/**", + "!**/*.+(d.ts|test.ts|test.js|test.mjs|spec.ts|spec.js|spec.mjs|d.mts|d.cts|d.mjs)?(x)" + ] + }, + "topicSeparator": " ", + "plugins": [ + "@oclif/plugin-help", + "@oclif/plugin-not-found", + "@oclif/plugin-warn-if-update-available", + "@oclif/plugin-autocomplete", + "@oclif/plugin-version" + ], + "topics": { + "auth": { + "description": "Authentication and session management" + }, + "auth devices": { + "description": "Manage active OAuth sessions" + }, + "get": { + "description": "List or fetch resources" + }, + "describe": { + "description": "Detailed resource view" + }, + "run": { + "description": "Invoke an app" + }, + "config": { + "description": "Local config management" + }, + "env": { + "description": "Environment variable introspection" + }, + "help": { + "description": "Long-form topic guides" + } + }, + "warn-if-update-available": { + "timeoutInDays": 7, + "message": "<%= chalk.yellow('A newer dify is available — upgrade dify and CLI together. See dify release notes.') %>" + } + } +} diff --git a/cli/scripts/install-cli.ps1 b/cli/scripts/install-cli.ps1 new file mode 100644 index 0000000000..da4a895edb --- /dev/null +++ b/cli/scripts/install-cli.ps1 @@ -0,0 +1,149 @@ +#Requires -Version 5.1 +<# +.SYNOPSIS + One-line difyctl installer for Windows. Verifies sha256 before extract. +.PARAMETER Version + Dify release tag. Defaults to the latest release. +.PARAMETER Prefix + Install root. Defaults to $env:LOCALAPPDATA\difyctl. +.PARAMETER Repo + Release source repo. Defaults to langgenius/dify. +#> +[CmdletBinding()] +param( + [string]$Version = $env:DIFYCTL_VERSION, + [string]$Prefix = $env:DIFYCTL_PREFIX, + [string]$Repo = $env:DIFYCTL_REPO +) + +$ErrorActionPreference = 'Stop' + +if ([string]::IsNullOrEmpty($Version)) { $Version = 'latest' } +if ([string]::IsNullOrEmpty($Prefix)) { $Prefix = Join-Path $env:LOCALAPPDATA 'difyctl' } +if ([string]::IsNullOrEmpty($Repo)) { $Repo = 'langgenius/dify' } + +function Fail($msg) { Write-Error "install-cli: $msg"; exit 1 } +function Need($cmd) { + if (-not (Get-Command $cmd -ErrorAction SilentlyContinue)) { Fail "$cmd is required" } +} +Need tar + +switch ($env:PROCESSOR_ARCHITECTURE) { + 'AMD64' { $arch = 'x64' } + 'ARM64' { $arch = 'arm64' } + default { Fail "unsupported arch: $env:PROCESSOR_ARCHITECTURE" } +} +$target = "win32-$arch" + +if ($Version -eq 'latest') { + $api = "https://api.github.com/repos/$Repo/releases/latest" +} else { + $api = "https://api.github.com/repos/$Repo/releases/tags/$Version" +} + +try { + $release = Invoke-RestMethod -Uri $api -UseBasicParsing +} catch { + Fail "could not fetch release metadata from $api $($_.Exception.Message)" +} + +$tag = $release.tag_name +if ([string]::IsNullOrEmpty($tag)) { Fail "release has no tag_name" } + +$assetRegex = "^difyctl-v[0-9]+\.[0-9]+\.[0-9]+(-[0-9A-Za-z.-]+)?-$target\.tar\.xz$" +$matches = @($release.assets | Where-Object { $_.name -match $assetRegex }) + +if ($matches.Count -eq 0) { Fail "no difyctl asset for $target on $tag" } +if ($matches.Count -gt 1) { + $names = ($matches | ForEach-Object { $_.name }) -join ', ' + Fail "expected exactly 1 difyctl asset for $target on $tag, found $($matches.Count): $names" +} +$asset = $matches[0].name + +$suffix = "-$target.tar.xz" +$cliV = $asset.Substring('difyctl-'.Length, $asset.Length - 'difyctl-'.Length - $suffix.Length) +$checksums = "difyctl-$cliV-checksums.txt" + +$checksumAsset = $release.assets | Where-Object { $_.name -eq $checksums } | Select-Object -First 1 +if ($null -eq $checksumAsset) { + Fail "checksum file $checksums missing on $tag; refusing to install unverified binary" +} + +$url = "https://github.com/$Repo/releases/download/$tag/$asset" +$sumsUrl = "https://github.com/$Repo/releases/download/$tag/$checksums" + +$tmp = Join-Path ([System.IO.Path]::GetTempPath()) ("difyctl-install-" + [guid]::NewGuid().ToString('N')) +New-Item -ItemType Directory -Path $tmp -Force | Out-Null +$tarPath = Join-Path $tmp $asset +$sumPath = Join-Path $tmp $checksums + +try { + Write-Host "downloading $asset" + Invoke-WebRequest -Uri $url -OutFile $tarPath -UseBasicParsing + Invoke-WebRequest -Uri $sumsUrl -OutFile $sumPath -UseBasicParsing + + $expected = (Get-Content $sumPath | Where-Object { $_ -match " $([Regex]::Escape($asset))$" } | Select-Object -First 1) + if ([string]::IsNullOrEmpty($expected)) { Fail "no checksum entry for $asset in $checksums" } + $expectedHash = ($expected -split '\s+')[0].ToLower() + $actualHash = (Get-FileHash -Algorithm SHA256 -Path $tarPath).Hash.ToLower() + if ($expectedHash -ne $actualHash) { + Fail "checksum mismatch for $asset (expected $expectedHash, got $actualHash)" + } + + if (Get-Command cosign -ErrorAction SilentlyContinue) { + $sigUrl = "$url.sig" + $pemUrl = "$url.pem" + $sigPath = Join-Path $tmp "$asset.sig" + $pemPath = Join-Path $tmp "$asset.pem" + try { + Invoke-WebRequest -Uri $sigUrl -OutFile $sigPath -UseBasicParsing + Invoke-WebRequest -Uri $pemUrl -OutFile $pemPath -UseBasicParsing + } catch { + Fail "tarball signature/cert missing on $tag; refusing to install (cosign present): $($_.Exception.Message)" + } + $env:COSIGN_EXPERIMENTAL = '1' + & cosign verify-blob ` + --certificate $pemPath ` + --signature $sigPath ` + --certificate-identity-regexp '^https://github.com/langgenius/dify/' ` + --certificate-oidc-issuer 'https://token.actions.githubusercontent.com' ` + $tarPath + if ($LASTEXITCODE -ne 0) { Fail "cosign verification failed for $asset" } + Write-Host "cosign: verified $asset" + } else { + Write-Host "note: cosign not installed; skipping signature verification (sha256 still enforced)" + } + + $shareDir = Join-Path $Prefix 'share' + $binDir = Join-Path $Prefix 'bin' + New-Item -ItemType Directory -Path $shareDir -Force | Out-Null + New-Item -ItemType Directory -Path $binDir -Force | Out-Null + + Write-Host "extracting to $shareDir" + & tar.exe -xJf $tarPath -C $shareDir --strip-components=1 + if ($LASTEXITCODE -ne 0) { Fail "tar.exe failed with exit $LASTEXITCODE" } + + $sourceBin = Join-Path $shareDir 'bin\difyctl.cmd' + if (-not (Test-Path $sourceBin)) { $sourceBin = Join-Path $shareDir 'bin\difyctl.exe' } + if (-not (Test-Path $sourceBin)) { Fail "expected binary at bin\difyctl.{cmd,exe} after extract" } + + $shimSrc = Get-Item $sourceBin + Copy-Item -Path $sourceBin -Destination (Join-Path $binDir $shimSrc.Name) -Force +} +finally { + if (Test-Path $tmp) { Remove-Item -Recurse -Force $tmp } +} + +Write-Host "" +Write-Host "difyctl $cliV installed: $binDir" + +$userPath = [System.Environment]::GetEnvironmentVariable('Path', 'User') +if ($null -eq $userPath) { $userPath = '' } +if (-not ($userPath -split ';' | Where-Object { $_ -ieq $binDir })) { + $newPath = if ($userPath) { "$userPath;$binDir" } else { $binDir } + [System.Environment]::SetEnvironmentVariable('Path', $newPath, 'User') + Write-Host "added $binDir to user PATH (open a new terminal to pick it up)" +} +else { + Write-Host "verify: run 'difyctl version' in a new terminal" +} diff --git a/cli/scripts/install-cli.sh b/cli/scripts/install-cli.sh new file mode 100755 index 0000000000..c6eab92d52 --- /dev/null +++ b/cli/scripts/install-cli.sh @@ -0,0 +1,132 @@ +#!/bin/sh +# install-cli.sh — one-line difyctl installer for Linux and macOS. +# +# usage: +# curl -fsSL https://raw.githubusercontent.com/langgenius/dify/main/cli/scripts/install-cli.sh | sh +# +# env: DIFYCTL_VERSION (default latest), DIFYCTL_PREFIX (default $HOME/.local), +# DIFYCTL_REPO (default langgenius/dify). +# requires: curl, tar (xz), uname, jq, sha256sum or shasum. + +set -eu + +REPO="${DIFYCTL_REPO:-langgenius/dify}" +VERSION="${DIFYCTL_VERSION:-latest}" +PREFIX="${DIFYCTL_PREFIX:-${HOME}/.local}" + +err() { printf '%s\n' "install-cli: $*" >&2; } +die() { err "$*"; exit 1; } +need() { command -v "$1" >/dev/null 2>&1 || die "$1 is required"; } + +need curl +need tar +need uname +need jq + +if command -v sha256sum >/dev/null 2>&1; then + HASH="sha256sum" +elif command -v shasum >/dev/null 2>&1; then + HASH="shasum -a 256" +else + die "need sha256sum or shasum" +fi + +case "$(uname -s)" in + Linux*) os=linux ;; + Darwin*) os=darwin ;; + *) die "unsupported OS: $(uname -s)" ;; +esac + +case "$(uname -m)" in + x86_64|amd64) arch=x64 ;; + arm64|aarch64) arch=arm64 ;; + *) die "unsupported arch: $(uname -m)" ;; +esac + +target="${os}-${arch}" + +if [ "$VERSION" = "latest" ]; then + api="https://api.github.com/repos/${REPO}/releases/latest" +else + api="https://api.github.com/repos/${REPO}/releases/tags/${VERSION}" +fi + +release=$(curl -fsSL "$api") || die "could not fetch release metadata from ${api}" +tag=$(printf '%s' "$release" | jq -r '.tag_name') +[ -n "$tag" ] && [ "$tag" != "null" ] || die "release has no tag_name" + +matches=$(printf '%s' "$release" \ + | jq -r --arg t "$target" '.assets[].name | select(test("^difyctl-v[0-9]+\\.[0-9]+\\.[0-9]+(-[0-9A-Za-z.-]+)?-\($t)\\.tar\\.xz$"))') +count=$(printf '%s' "$matches" | grep -c . || true) +case "$count" in + 0) die "no difyctl asset for ${target} on ${tag}" ;; + 1) asset="$matches" ;; + *) die "expected exactly 1 difyctl asset for ${target} on ${tag}, found ${count}: ${matches}" ;; +esac + +no_target="${asset%-${target}.tar.xz}" +cli_v="${no_target#difyctl-}" +checksums="difyctl-${cli_v}-checksums.txt" + +printf '%s' "$release" | jq -e --arg c "$checksums" '.assets[] | select(.name == $c)' >/dev/null \ + || die "checksum file ${checksums} missing on ${tag}; refusing to install unverified binary" + +url="https://github.com/${REPO}/releases/download/${tag}/${asset}" +sums_url="https://github.com/${REPO}/releases/download/${tag}/${checksums}" + +tmp=$(mktemp -d 2>/dev/null || mktemp -d -t difyctl-install) +trap 'rm -rf "$tmp"' EXIT INT TERM + +printf 'downloading %s\n from %s\n' "$asset" "$url" +curl -fsSL --retry 3 "$url" -o "${tmp}/${asset}" +curl -fsSL --retry 3 "$sums_url" -o "${tmp}/${checksums}" + +( + cd "$tmp" + grep " ${asset}\$" "$checksums" | $HASH -c - +) || die "checksum mismatch for ${asset}" + +if command -v cosign >/dev/null 2>&1; then + sig_url="${url}.sig" + pem_url="${url}.pem" + curl -fsSL --retry 3 "$sig_url" -o "${tmp}/${asset}.sig" \ + || die "tarball signature missing on ${tag}; refusing to install (cosign present)" + curl -fsSL --retry 3 "$pem_url" -o "${tmp}/${asset}.pem" \ + || die "tarball cert missing on ${tag}; refusing to install (cosign present)" + COSIGN_EXPERIMENTAL=1 cosign verify-blob \ + --certificate "${tmp}/${asset}.pem" \ + --signature "${tmp}/${asset}.sig" \ + --certificate-identity-regexp '^https://github.com/langgenius/dify/' \ + --certificate-oidc-issuer 'https://token.actions.githubusercontent.com' \ + "${tmp}/${asset}" \ + || die "cosign verification failed for ${asset}" + printf 'cosign: verified %s\n' "$asset" +else + printf 'note: cosign not installed; skipping signature verification (sha256 still enforced)\n' >&2 +fi + +share_dir="${PREFIX}/share/difyctl" +bin_dir="${PREFIX}/bin" +mkdir -p "$share_dir" "$bin_dir" + +printf 'extracting to %s\n' "$share_dir" +tar -xJf "${tmp}/${asset}" -C "$share_dir" --strip-components=1 + +target_bin="${share_dir}/bin/difyctl" +[ -x "$target_bin" ] || die "expected binary at ${target_bin} after extract" + +ln -sf "$target_bin" "${bin_dir}/difyctl" + +printf '\ndifyctl %s installed: %s/difyctl\n' "$cli_v" "$bin_dir" + +case ":${PATH}:" in + *":${bin_dir}:"*) + "${bin_dir}/difyctl" version >/dev/null 2>&1 \ + && printf 'verify: run "difyctl version"\n' \ + || err "binary present but failed to execute; check ${bin_dir}/difyctl" + ;; + *) + printf '\n%s is not on your PATH. Add this to your shell profile:\n' "$bin_dir" + printf ' export PATH="%s:$PATH"\n' "$bin_dir" + ;; +esac diff --git a/cli/scripts/lib/common.sh b/cli/scripts/lib/common.sh new file mode 100755 index 0000000000..3d32cb703d --- /dev/null +++ b/cli/scripts/lib/common.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash +# scripts/lib/common.sh — shared shell helpers for cli/ scripts. + +[[ -n "${DIFYCTL_LIB_COMMON_SH:-}" ]] && return 0 +readonly DIFYCTL_LIB_COMMON_SH=1 + +log::info() { printf '\033[36m[info]\033[0m %s\n' "$*" >&2; } +log::warn() { printf '\033[33m[warn]\033[0m %s\n' "$*" >&2; } +log::err() { printf '\033[31m[err ]\033[0m %s\n' "$*" >&2; } + +die() { log::err "$*"; exit 1; } + +# Resolve the cli/ directory (parent of scripts/). +cli::root() { + local dir + dir="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)" + printf '%s' "$dir" +} + +require() { + command -v "$1" >/dev/null 2>&1 || die "missing dependency: $1${2:+ — $2}" +} diff --git a/cli/scripts/lib/resolve-buildinfo.ts b/cli/scripts/lib/resolve-buildinfo.ts new file mode 100644 index 0000000000..247c5ed1c0 --- /dev/null +++ b/cli/scripts/lib/resolve-buildinfo.ts @@ -0,0 +1,66 @@ +import type { ExecSyncOptions } from 'node:child_process' +import { execSync } from 'node:child_process' + +export const BUILD_CHANNELS = ['dev', 'rc', 'stable'] as const +export type BuildChannel = (typeof BUILD_CHANNELS)[number] + +export type BuildInfo = { + version: string + commit: string + buildDate: string + channel: BuildChannel + minDify: string + maxDify: string +} + +export type Env = Record + +export type GitProbe = (cmd: string) => string | null + +const GIT_PROBE_OPTS: ExecSyncOptions = { + stdio: ['ignore', 'pipe', 'ignore'], +} + +export const defaultGitProbe: GitProbe = (cmd) => { + try { + return execSync(cmd, GIT_PROBE_OPTS).toString().trim() || null + } + catch { + return null + } +} + +export type ResolveOptions = { + env?: Env + git?: GitProbe + now?: () => Date +} + +export function resolveBuildInfo(opts: ResolveOptions = {}): BuildInfo { + const env = opts.env ?? process.env + const git = opts.git ?? defaultGitProbe + const now = opts.now ?? (() => new Date()) + + const channel = env.DIFYCTL_CHANNEL ?? 'dev' + if (!(BUILD_CHANNELS as readonly string[]).includes(channel)) { + throw new Error( + `invalid DIFYCTL_CHANNEL: ${channel} (expected ${BUILD_CHANNELS.join(' | ')})`, + ) + } + + const version + = env.DIFYCTL_VERSION + ?? git('git describe --tags --dirty --always') + ?? '0.0.0-dev' + + const commit + = env.DIFYCTL_COMMIT + ?? git('git rev-parse HEAD') + ?? 'none' + + const buildDate = env.DIFYCTL_BUILD_DATE ?? now().toISOString() + const minDify = env.DIFYCTL_MIN_DIFY ?? '0.0.0' + const maxDify = env.DIFYCTL_MAX_DIFY ?? '0.0.0' + + return { version, commit, buildDate, channel: channel as BuildChannel, minDify, maxDify } +} diff --git a/cli/scripts/print-buildinfo.ts b/cli/scripts/print-buildinfo.ts new file mode 100644 index 0000000000..69f448c8d4 --- /dev/null +++ b/cli/scripts/print-buildinfo.ts @@ -0,0 +1,9 @@ +import { resolveBuildInfo } from './lib/resolve-buildinfo.js' + +const info = resolveBuildInfo() +process.stdout.write( + `version: ${info.version}\n` + + `commit: ${info.commit}\n` + + `built: ${info.buildDate}\n` + + `channel: ${info.channel}\n`, +) diff --git a/cli/scripts/release-bump-guard.sh b/cli/scripts/release-bump-guard.sh new file mode 100755 index 0000000000..293b23e594 --- /dev/null +++ b/cli/scripts/release-bump-guard.sh @@ -0,0 +1,54 @@ +#!/usr/bin/env bash +# scripts/release-bump-guard.sh — auto-path only. Refuse if version+compat +# both unchanged vs. the channel-matching npm dist-tag. +# +# Required env: NEW_VERSION, NEW_MIN_DIFY, NEW_MAX_DIFY. + +set -euo pipefail + +_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +# shellcheck source=lib/common.sh +source "${_dir}/lib/common.sh" + +require node +require jq +require npm + +cd "$(cli::root)" + +: "${NEW_VERSION:?NEW_VERSION is required}" +: "${NEW_MIN_DIFY:?NEW_MIN_DIFY is required}" +: "${NEW_MAX_DIFY:?NEW_MAX_DIFY is required}" + +channel=$(node -p "require('./package.json').difyctl.channel") +case "$channel" in + stable) dist_tag=latest ;; + rc) dist_tag=next ;; + *) die "unsupported channel for publish: ${channel}" ;; +esac + +dist_tags_json=$(npm view @langgenius/difyctl dist-tags --json 2>/dev/null || echo '{}') +prev_version=$(echo "$dist_tags_json" | jq -r --arg t "$dist_tag" '.[$t] // ""') + +if [[ -z "$prev_version" ]]; then + log::info "no prior release on dist-tag '${dist_tag}'; skipping bump guard" + exit 0 +fi + +if [[ "$prev_version" == "$NEW_VERSION" ]]; then + echo "::warning title=cli version not bumped::package.json version ${NEW_VERSION} is already published on dist-tag '${dist_tag}'. If this is a deliberate re-run (dify release re-cut, retry after failure), ignore. If you intended to ship new cli bytes, bump cli/package.json#version and re-run." + log::info "same version as npm dist-tag '${dist_tag}' (${prev_version}); skipping bump guard" + exit 0 +fi + +prev_meta=$(npm view "@langgenius/difyctl@${prev_version}" --json) +prev_min=$(echo "$prev_meta" | jq -r '.difyctl.compat.minDify') +prev_max=$(echo "$prev_meta" | jq -r '.difyctl.compat.maxDify') + +[[ "$NEW_VERSION" != "$prev_version" ]] \ + || die "version unchanged from npm dist-tag '${dist_tag}' (${prev_version}); bump cli/package.json" + +[[ "$NEW_MIN_DIFY" != "$prev_min" || "$NEW_MAX_DIFY" != "$prev_max" ]] \ + || die "compat unchanged from npm @${prev_version} on dist-tag '${dist_tag}' (${prev_min}..${prev_max}); bump in cli/package.json" + +log::info "bump guard passed: ${prev_version} → ${NEW_VERSION}, compat ${prev_min}..${prev_max} → ${NEW_MIN_DIFY}..${NEW_MAX_DIFY}" diff --git a/cli/scripts/release-cosign-sign.sh b/cli/scripts/release-cosign-sign.sh new file mode 100755 index 0000000000..5060c605c6 --- /dev/null +++ b/cli/scripts/release-cosign-sign.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash +# scripts/release-cosign-sign.sh — keyless cosign sign of tarballs + checksum +# manifest using GitHub Actions OIDC (Sigstore Fulcio cert + Rekor log entry). +# +# Required env: CLI_VERSION. Workflow must export id-token: write and set +# COSIGN_EXPERIMENTAL=1 (cli-release.yml does both). + +set -euo pipefail + +_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +# shellcheck source=lib/common.sh +source "${_dir}/lib/common.sh" + +require cosign + +: "${CLI_VERSION:?CLI_VERSION is required}" + +cd "$(cli::root)/dist" + +shopt -s nullglob +targets=(difyctl-v"${CLI_VERSION}"-*.tar.xz "difyctl-v${CLI_VERSION}-checksums.txt") +shopt -u nullglob + +[[ ${#targets[@]} -gt 0 ]] || die "no files to sign in dist/ for CLI_VERSION=${CLI_VERSION}" + +for f in "${targets[@]}"; do + [[ -f "$f" ]] || continue + cosign sign-blob --yes \ + --output-signature "${f}.sig" \ + --output-certificate "${f}.pem" \ + "$f" + log::info "signed ${f} → ${f}.sig + ${f}.pem" +done diff --git a/cli/scripts/release-npm-publish.sh b/cli/scripts/release-npm-publish.sh new file mode 100755 index 0000000000..4c5cf44edf --- /dev/null +++ b/cli/scripts/release-npm-publish.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash +# scripts/release-npm-publish.sh — channel-aware npm publish with +# EPUBLISHCONFLICT no-op trap. +# +# Required env: CHANNEL (stable | rc), NEW_VERSION. + +set -euo pipefail + +_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +# shellcheck source=lib/common.sh +source "${_dir}/lib/common.sh" + +require npm +require node + +cd "$(cli::root)" + +: "${CHANNEL:?CHANNEL is required}" +: "${NEW_VERSION:?NEW_VERSION is required}" + +case "$CHANNEL" in + stable) dist_tag=latest ;; + rc) dist_tag=next ;; + *) die "unsupported channel for publish: ${CHANNEL}" ;; +esac + +pkg_version=$(node -p "require('./package.json').version") +[[ "$pkg_version" == "$NEW_VERSION" ]] \ + || die "package.json version (${pkg_version}) != NEW_VERSION (${NEW_VERSION})" + +set +e +output=$(npm publish --access public --provenance --tag "$dist_tag" 2>&1) +status=$? +set -e + +if [[ $status -eq 0 ]]; then + log::info "PUBLISHED @langgenius/difyctl@${NEW_VERSION} --tag ${dist_tag}" + printf '%s\n' "$output" + exit 0 +fi + +if printf '%s' "$output" | grep -qE 'EPUBLISHCONFLICT|cannot publish over the previously published versions'; then + log::warn "NO-OP: @langgenius/difyctl@${NEW_VERSION} already on registry (idempotent re-run)" + exit 0 +fi + +printf '%s\n' "$output" >&2 +die "npm publish failed (exit ${status}); see output above" diff --git a/cli/scripts/release-upload-tarballs.sh b/cli/scripts/release-upload-tarballs.sh new file mode 100755 index 0000000000..ac95176f8d --- /dev/null +++ b/cli/scripts/release-upload-tarballs.sh @@ -0,0 +1,99 @@ +#!/usr/bin/env bash +# scripts/release-upload-tarballs.sh — idempotent gh release upload of +# tarballs + checksum file (sha256-strict; skip on match, fail on mismatch) +# and cosign .sig/.pem signatures (overwrite-allowed; bytes vary per run). +# +# Required env: DIFY_TAG, CLI_VERSION, GH_TOKEN. + +set -euo pipefail + +_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +# shellcheck source=lib/common.sh +source "${_dir}/lib/common.sh" + +require gh +require jq + +: "${DIFY_TAG:?DIFY_TAG is required}" +: "${CLI_VERSION:?CLI_VERSION is required}" + +cd "$(cli::root)" + +REPO_FLAG=(--repo langgenius/dify) + +if command -v sha256sum >/dev/null 2>&1; then + hash_cmd() { sha256sum "$1" | awk '{print $1}'; } +elif command -v shasum >/dev/null 2>&1; then + hash_cmd() { shasum -a 256 "$1" | awk '{print $1}'; } +else + die "no sha256 hasher found (need sha256sum or shasum)" +fi + +remote_json=$(gh release view "$DIFY_TAG" "${REPO_FLAG[@]}" --json assets -q '.assets') + +upload_one() { + local file="$1" + local mode="${2:-strict}" # strict | clobber + local name + name=$(basename "$file") + local local_sha + local_sha=$(hash_cmd "$file") + local remote_entry + remote_entry=$(printf '%s' "$remote_json" | jq -c --arg n "$name" '.[] | select(.name == $n)') + + if [[ -z "$remote_entry" ]]; then + log::info "uploading ${name}" + gh release upload "$DIFY_TAG" "$file" "${REPO_FLAG[@]}" + return + fi + + if [[ "$mode" == "clobber" ]]; then + log::info "overwriting ${name} (clobber mode — cosign sig/cert)" + gh release upload "$DIFY_TAG" "$file" "${REPO_FLAG[@]}" --clobber + return + fi + + local remote_digest remote_sha="" + remote_digest=$(printf '%s' "$remote_entry" | jq -r '.digest // ""') + if [[ "$remote_digest" == sha256:* ]]; then + remote_sha="${remote_digest#sha256:}" + else + local tmp download_url + tmp=$(mktemp) + download_url=$(printf '%s' "$remote_entry" | jq -r '.url') + gh api -H 'Accept: application/octet-stream' "$download_url" > "$tmp" + remote_sha=$(hash_cmd "$tmp") + rm -f "$tmp" + fi + + if [[ "$local_sha" == "$remote_sha" ]]; then + log::info "skip ${name} (sha256 matches)" + return + fi + + die "asset ${name} already on ${DIFY_TAG} with different sha256 (local=${local_sha}, remote=${remote_sha}); refusing to overwrite" +} + +shopt -s nullglob +tars=(dist/difyctl-v"${CLI_VERSION}"-*.tar.xz) +checksum_file="dist/difyctl-v${CLI_VERSION}-checksums.txt" +sigs=(dist/difyctl-v"${CLI_VERSION}"-*.sig dist/difyctl-v"${CLI_VERSION}"-checksums.txt.sig) +pems=(dist/difyctl-v"${CLI_VERSION}"-*.pem dist/difyctl-v"${CLI_VERSION}"-checksums.txt.pem) +shopt -u nullglob + +[[ ${#tars[@]} -gt 0 ]] || die "no tarballs in dist/ matching difyctl-v${CLI_VERSION}-*.tar.xz" +[[ -f "$checksum_file" ]] || die "checksum file missing: ${checksum_file}" + +for f in "${tars[@]}" "$checksum_file"; do + upload_one "$f" strict +done + +# Cosign signatures + certs are keyless and re-generated per run with fresh +# timestamps; their bytes change each run but verify the same blob. Allow +# overwrite via --clobber so re-runs converge cleanly. +for f in "${sigs[@]}" "${pems[@]}"; do + [[ -f "$f" ]] || continue + upload_one "$f" clobber +done + +log::info "uploaded ${#tars[@]} tarballs + checksums.txt + signatures to dify release ${DIFY_TAG}" diff --git a/cli/scripts/release-validate-manifest.sh b/cli/scripts/release-validate-manifest.sh new file mode 100755 index 0000000000..44e88700fa --- /dev/null +++ b/cli/scripts/release-validate-manifest.sh @@ -0,0 +1,43 @@ +#!/usr/bin/env bash +# scripts/release-validate-manifest.sh — validate cli/package.json release fields. + +set -euo pipefail + +_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +# shellcheck source=lib/common.sh +source "${_dir}/lib/common.sh" + +cd "$(cli::root)" + +SEMVER_RE='^[0-9]+\.[0-9]+\.[0-9]+(-[0-9A-Za-z.-]+)?(\+[0-9A-Za-z.-]+)?$' + +version=$(node -p "require('./package.json').version") +channel=$(node -p "require('./package.json').difyctl.channel") +min_dify=$(node -p "require('./package.json').difyctl.compat.minDify") +max_dify=$(node -p "require('./package.json').difyctl.compat.maxDify") + +[[ "$version" =~ $SEMVER_RE ]] || die "invalid version: ${version}" + +case "$channel" in + rc|stable) ;; + *) die "invalid difyctl.channel: ${channel} (expected rc | stable)" ;; +esac + +[[ "$min_dify" =~ $SEMVER_RE ]] || die "invalid difyctl.compat.minDify: ${min_dify}" +[[ "$max_dify" =~ $SEMVER_RE ]] || die "invalid difyctl.compat.maxDify: ${max_dify}" + +case "$min_dify" in *[xX*]*) die "wildcards not allowed in minDify: ${min_dify}" ;; esac +case "$max_dify" in *[xX*]*) die "wildcards not allowed in maxDify: ${max_dify}" ;; esac + +cmp=$(node -e " +const a = process.argv[1].split('-')[0].split('.').map(Number) +const b = process.argv[2].split('-')[0].split('.').map(Number) +for (let i = 0; i < 3; i++) { + if (a[i] !== b[i]) { console.log(a[i] < b[i] ? -1 : 1); process.exit(0) } +} +console.log(0) +" "$min_dify" "$max_dify") + +[[ "$cmp" -le 0 ]] || die "minDify (${min_dify}) > maxDify (${max_dify})" + +log::info "manifest valid: version=${version} channel=${channel} compat=${min_dify}..${max_dify}" diff --git a/cli/scripts/release-write-checksums.sh b/cli/scripts/release-write-checksums.sh new file mode 100755 index 0000000000..2729d9a0b4 --- /dev/null +++ b/cli/scripts/release-write-checksums.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +# scripts/release-write-checksums.sh — write sha256 manifest for tarballs. +# +# Required env: CLI_VERSION (e.g. 0.1.0-rc.1). Output: +# cli/dist/difyctl-v-checksums.txt + +set -euo pipefail + +_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +# shellcheck source=lib/common.sh +source "${_dir}/lib/common.sh" + +: "${CLI_VERSION:?CLI_VERSION is required}" + +cd "$(cli::root)/dist" + +manifest="difyctl-v${CLI_VERSION}-checksums.txt" +> "$manifest" + +if command -v sha256sum >/dev/null 2>&1; then + hash_cmd="sha256sum" +elif command -v shasum >/dev/null 2>&1; then + hash_cmd="shasum -a 256" +else + die "no sha256 hasher found (need sha256sum or shasum)" +fi + +found=0 +for tar in difyctl-v"${CLI_VERSION}"-*.tar.xz; do + [[ -f "$tar" ]] || continue + $hash_cmd "$tar" >> "$manifest" + found=$((found + 1)) +done + +[[ "$found" -gt 0 ]] || die "no tarballs matching difyctl-v${CLI_VERSION}-*.tar.xz in dist/" + +log::info "wrote ${manifest} (${found} entries)" diff --git a/cli/scripts/release.sh b/cli/scripts/release.sh new file mode 100755 index 0000000000..cb6666d8dd --- /dev/null +++ b/cli/scripts/release.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash +# scripts/release.sh — local-developer release build. +# +# Reads cli/package.json, validates, exports DIFYCTL_* env, runs pnpm build + +# oclif pack tarballs. cli-release.yml does NOT call this; the workflow inlines +# the same env contract. + +set -euo pipefail + +_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +# shellcheck source=lib/common.sh +source "${_dir}/lib/common.sh" + +require pnpm 'install with `corepack enable && corepack prepare pnpm@latest --activate`' +require node + +cd "$(cli::root)" + +scripts/release-validate-manifest.sh + +PKG_VERSION=$(node -p "require('./package.json').version") +CHANNEL=$(node -p "require('./package.json').difyctl.channel") +MIN_DIFY=$(node -p "require('./package.json').difyctl.compat.minDify") +MAX_DIFY=$(node -p "require('./package.json').difyctl.compat.maxDify") + +export DIFYCTL_VERSION="${PKG_VERSION}" +export DIFYCTL_CHANNEL="${CHANNEL}" +export DIFYCTL_MIN_DIFY="${MIN_DIFY}" +export DIFYCTL_MAX_DIFY="${MAX_DIFY}" +export DIFYCTL_COMMIT="$(git rev-parse HEAD)" +export DIFYCTL_BUILD_DATE="$(git log -1 --format=%cI HEAD)" + +log::info "release ${DIFYCTL_VERSION} (channel=${DIFYCTL_CHANNEL}, compat=${MIN_DIFY}..${MAX_DIFY})" +pnpm build +pnpm pack:tarballs + +log::info "artifacts in dist/" +ls -lh dist/ 2>/dev/null | tail -n +2 >&2 || true diff --git a/cli/scripts/run-smoke.ts b/cli/scripts/run-smoke.ts new file mode 100644 index 0000000000..e83f35a4fb --- /dev/null +++ b/cli/scripts/run-smoke.ts @@ -0,0 +1,44 @@ +#!/usr/bin/env -S node --import tsx +import { execSync } from 'node:child_process' + +type Check = { name: string, run: () => void } + +const baseUrlIdx = process.argv.indexOf('--base-url') +const baseUrl = baseUrlIdx > -1 ? process.argv[baseUrlIdx + 1] : 'http://localhost:5001' +if (!baseUrl) { + console.error('usage: run-smoke.ts --base-url ') + process.exit(2) +} + +const env = { ...process.env, DIFY_BASE_URL: baseUrl } + +function cli(args: string): string { + return execSync(`pnpm exec tsx bin/dev.js ${args}`, { env, encoding: 'utf8' }) +} + +const checks: Check[] = [ + { name: 'config show', run: () => { cli('config show') } }, + { name: 'get workspace', run: () => { + if (!cli('get workspace').includes('id')) + throw new Error('no workspace listed') + } }, + { name: 'get apps', run: () => { cli('get apps') } }, + { name: 'difyctl version prints compat', run: () => { + if (!cli('version').includes('compat:')) + throw new Error('no compat line') + } }, +] + +let failed = 0 +for (const c of checks) { + try { + c.run() + console.log(`[x] ${c.name}`) + } + catch (err) { + failed++ + console.log(`[ ] ${c.name} — ${(err as Error).message}`) + } +} +console.log(`\n${checks.length - failed}/${checks.length} checks passed`) +process.exit(failed > 0 ? 1 : 0) diff --git a/cli/src/api/account-sessions.ts b/cli/src/api/account-sessions.ts new file mode 100644 index 0000000000..a64d8d8ade --- /dev/null +++ b/cli/src/api/account-sessions.ts @@ -0,0 +1,24 @@ +import type { KyInstance } from 'ky' +import type { SessionListResponse } from '../types/account-session.js' +import { SessionListResponseSchema } from '../types/account-session.js' + +export class AccountSessionsClient { + private readonly http: KyInstance + + constructor(http: KyInstance) { + this.http = http + } + + async list(): Promise { + const raw = await this.http.get('account/sessions').json() + return SessionListResponseSchema.parse(raw) + } + + async revoke(sessionId: string): Promise { + await this.http.delete(`account/sessions/${encodeURIComponent(sessionId)}`) + } + + async revokeSelf(): Promise { + await this.http.delete('account/sessions/self') + } +} diff --git a/cli/src/api/account.ts b/cli/src/api/account.ts new file mode 100644 index 0000000000..13192d3fa3 --- /dev/null +++ b/cli/src/api/account.ts @@ -0,0 +1,16 @@ +import type { KyInstance } from 'ky' +import type { AccountResponse } from '../types/account.js' +import { AccountResponseSchema } from '../types/account.js' + +export class AccountClient { + private readonly http: KyInstance + + constructor(http: KyInstance) { + this.http = http + } + + async get(): Promise { + const raw = await this.http.get('account').json() + return AccountResponseSchema.parse(raw) + } +} diff --git a/cli/src/api/app-meta.test.ts b/cli/src/api/app-meta.test.ts new file mode 100644 index 0000000000..5a61c34429 --- /dev/null +++ b/cli/src/api/app-meta.test.ts @@ -0,0 +1,91 @@ +import type { DifyMock } from '../../test/fixtures/dify-mock/server.js' +import { mkdtemp, rm } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' +import { startMock } from '../../test/fixtures/dify-mock/server.js' +import { loadAppInfoCache } from '../cache/app-info.js' +import { createClient } from '../http/client.js' +import { FieldInfo, FieldParameters } from '../types/app.js' +import { AppMetaClient } from './app-meta.js' +import { AppsClient } from './apps.js' + +describe('AppMetaClient', () => { + let mock: DifyMock + let dir: string + beforeEach(async () => { + mock = await startMock({ scenario: 'happy' }) + dir = await mkdtemp(join(tmpdir(), 'difyctl-meta-')) + }) + afterEach(async () => { + await mock.stop() + await rm(dir, { recursive: true, force: true }) + }) + + it('cache miss → fetch → populate; warm hit skips network', async () => { + const cache = await loadAppInfoCache({ configDir: dir }) + const apps = new AppsClient(createClient({ host: mock.url, bearer: 'dfoa_test' })) + const spy = vi.spyOn(apps, 'describe') + const client = new AppMetaClient({ apps, host: mock.url, cache }) + + const m1 = await client.get('app-1', 'ws-1', [FieldInfo]) + expect(m1.info?.id).toBe('app-1') + expect(spy).toHaveBeenCalledTimes(1) + + const m2 = await client.get('app-1', 'ws-1', [FieldInfo]) + expect(m2.info?.id).toBe('app-1') + expect(spy).toHaveBeenCalledTimes(1) + }) + + it('slim hit + full request triggers fresh fetch + merges', async () => { + const cache = await loadAppInfoCache({ configDir: dir }) + const apps = new AppsClient(createClient({ host: mock.url, bearer: 'dfoa_test' })) + const spy = vi.spyOn(apps, 'describe') + const client = new AppMetaClient({ apps, host: mock.url, cache }) + + await client.get('app-1', 'ws-1', [FieldInfo]) + expect(spy).toHaveBeenCalledTimes(1) + + const full = await client.get('app-1', 'ws-1', [FieldInfo, FieldParameters]) + expect(spy).toHaveBeenCalledTimes(2) + expect(full.coveredFields.has(FieldParameters)).toBe(true) + }) + + it('expired cache entry refetches', async () => { + const cache = await loadAppInfoCache({ configDir: dir, ttlMs: 100, now: () => new Date('2026-05-09T00:00:00Z') }) + const apps = new AppsClient(createClient({ host: mock.url, bearer: 'dfoa_test' })) + const spy = vi.spyOn(apps, 'describe') + const client = new AppMetaClient({ apps, host: mock.url, cache, now: () => new Date('2026-05-09T00:00:00Z') }) + + await client.get('app-1', 'ws-1', [FieldInfo]) + expect(spy).toHaveBeenCalledTimes(1) + + const client2 = new AppMetaClient({ apps, host: mock.url, cache, now: () => new Date('2026-05-09T00:00:01Z') }) + await client2.get('app-1', 'ws-1', [FieldInfo]) + expect(spy).toHaveBeenCalledTimes(2) + }) + + it('invalidate forces next get to fetch', async () => { + const cache = await loadAppInfoCache({ configDir: dir }) + const apps = new AppsClient(createClient({ host: mock.url, bearer: 'dfoa_test' })) + const spy = vi.spyOn(apps, 'describe') + const client = new AppMetaClient({ apps, host: mock.url, cache }) + + await client.get('app-1', 'ws-1', [FieldInfo]) + expect(spy).toHaveBeenCalledTimes(1) + + await client.invalidate('app-1') + await client.get('app-1', 'ws-1', [FieldInfo]) + expect(spy).toHaveBeenCalledTimes(2) + }) + + it('no cache: each call hits network', async () => { + const apps = new AppsClient(createClient({ host: mock.url, bearer: 'dfoa_test' })) + const spy = vi.spyOn(apps, 'describe') + const client = new AppMetaClient({ apps, host: mock.url }) + + await client.get('app-1', 'ws-1', [FieldInfo]) + await client.get('app-1', 'ws-1', [FieldInfo]) + expect(spy).toHaveBeenCalledTimes(2) + }) +}) diff --git a/cli/src/api/app-meta.ts b/cli/src/api/app-meta.ts new file mode 100644 index 0000000000..8d70385043 --- /dev/null +++ b/cli/src/api/app-meta.ts @@ -0,0 +1,45 @@ +import type { AppInfoCache } from '../cache/app-info.js' +import type { AppMeta, AppMetaFieldKey } from '../types/app-meta.js' +import type { AppsClient } from './apps.js' +import { covers, fromDescribe, mergeMeta } from '../types/app-meta.js' + +export type AppMetaClientOptions = { + readonly apps: AppsClient + readonly host: string + readonly cache?: AppInfoCache + readonly now?: () => Date +} + +export class AppMetaClient { + private readonly apps: AppsClient + private readonly host: string + private readonly cache: AppInfoCache | undefined + private readonly now: () => Date + + constructor(opts: AppMetaClientOptions) { + this.apps = opts.apps + this.host = opts.host + this.cache = opts.cache + this.now = opts.now ?? (() => new Date()) + } + + async get(appId: string, workspaceId: string, fields: readonly AppMetaFieldKey[] = []): Promise { + const cached = this.cache?.get(this.host, appId) + if (cached !== undefined && this.cache?.isFresh(cached, this.now()) === true && covers(cached.meta, fields)) + return cached.meta + + const resp = await this.apps.describe(appId, workspaceId, fields.length === 0 ? undefined : fields) + const fresh = fromDescribe(resp, fields) + const merged = cached !== undefined && this.cache?.isFresh(cached, this.now()) === true + ? mergeMeta(cached.meta, fresh) + : fresh + if (this.cache !== undefined) + await this.cache.set(this.host, appId, merged) + return merged + } + + async invalidate(appId: string): Promise { + if (this.cache !== undefined) + await this.cache.delete(this.host, appId) + } +} diff --git a/cli/src/api/app-run.test.ts b/cli/src/api/app-run.test.ts new file mode 100644 index 0000000000..5b576845bf --- /dev/null +++ b/cli/src/api/app-run.test.ts @@ -0,0 +1,122 @@ +import type { DifyMock } from '../../test/fixtures/dify-mock/server.js' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { startMock } from '../../test/fixtures/dify-mock/server.js' +import { createClient } from '../http/client.js' +import { AppRunClient, buildRunBody } from './app-run.js' + +describe('buildRunBody', () => { + it('sets response_mode=blocking by default', () => { + expect(buildRunBody({}).response_mode).toBe('blocking') + }) + + it('omits query when message empty', () => { + expect('query' in buildRunBody({})).toBe(false) + }) + + it('includes query when message present', () => { + expect(buildRunBody({ message: 'hi' }).query).toBe('hi') + }) + + it('passes through inputs', () => { + const body = buildRunBody({ inputs: { a: '1' } }) + expect(body.inputs).toEqual({ a: '1' }) + }) + + it('omits conversation_id when missing/empty', () => { + expect('conversation_id' in buildRunBody({ conversationId: '' })).toBe(false) + }) + + it('includes workspace_id when set', () => { + expect(buildRunBody({ workspaceId: 'ws-1' }).workspace_id).toBe('ws-1') + }) +}) + +describe('AppRunClient.runBlocking', () => { + let mock: DifyMock + beforeEach(async () => { + mock = await startMock({ scenario: 'happy' }) + }) + afterEach(async () => { + await mock.stop() + }) + + it('returns chat-shaped envelope for chat app', async () => { + const c = new AppRunClient(createClient({ host: mock.url, bearer: 'dfoa_test' })) + const out = await c.runBlocking('app-1', buildRunBody({ message: 'hi' })) + expect(out.mode).toBe('chat') + expect(out.answer).toBe('echo: hi') + }) + + it('returns workflow-shaped envelope for workflow app', async () => { + const c = new AppRunClient(createClient({ host: mock.url, bearer: 'dfoa_test' })) + const out = await c.runBlocking('app-2', buildRunBody({ inputs: { x: '1' } })) + expect((out.data as { status: string }).status).toBe('succeeded') + }) + + it('404 unknown app surfaces as error', async () => { + const c = new AppRunClient(createClient({ host: mock.url, bearer: 'dfoa_test', retryAttempts: 0 })) + await expect(c.runBlocking('nope', buildRunBody({}))).rejects.toThrow() + }) +}) + +describe('buildRunBody response_mode override', () => { + it('sets response_mode=streaming when requested', () => { + expect(buildRunBody({ responseMode: 'streaming' }).response_mode).toBe('streaming') + }) +}) + +describe('AppRunClient.runStream', () => { + let mock: DifyMock + beforeEach(async () => { + mock = await startMock({ scenario: 'happy' }) + }) + afterEach(async () => { + await mock.stop() + }) + + it('yields events for chat app', async () => { + const c = new AppRunClient(createClient({ host: mock.url, bearer: 'dfoa_test' })) + const iter = await c.runStream('app-1', buildRunBody({ message: 'hi', responseMode: 'streaming' })) + const dec = new TextDecoder() + const names: string[] = [] + const datas: string[] = [] + for await (const ev of iter) { + names.push(ev.name) + datas.push(dec.decode(ev.data)) + } + expect(names).toEqual(['message', 'message', 'message_end']) + expect(datas[0]).toContain('"answer":"echo: "') + expect(datas[1]).toContain('"answer":"hi"') + }) + + it('throws typed BaseError on non-2xx open', async () => { + mock.setScenario('server-5xx') + const c = new AppRunClient(createClient({ host: mock.url, bearer: 'dfoa_test', retryAttempts: 0 })) + await expect( + c.runStream('app-1', buildRunBody({ message: 'hi', responseMode: 'streaming' })), + ).rejects.toMatchObject({ code: 'server_5xx' }) + }) + + it('aborts when signal fires', async () => { + expect.assertions(1) + const c = new AppRunClient(createClient({ host: mock.url, bearer: 'dfoa_test' })) + const ctrl = new AbortController() + const iter = await c.runStream('app-1', buildRunBody({ message: 'hi', responseMode: 'streaming' }), { signal: ctrl.signal }) + ctrl.abort() + try { + for await (const _ of iter) { /* drain */ } + } + catch (e) { + expect((e as Error).name).toBe('AbortError') + } + }) + + it('derives event name from JSON event field when SSE event line absent', async () => { + const c = new AppRunClient(createClient({ host: mock.url, bearer: 'dfoa_test' })) + const iter = await c.runStream('app-2', buildRunBody({ inputs: { x: '1' }, responseMode: 'streaming' })) + const names: string[] = [] + for await (const ev of iter) + names.push(ev.name) + expect(names).toEqual(['workflow_started', 'node_started', 'node_finished', 'workflow_finished']) + }) +}) diff --git a/cli/src/api/app-run.ts b/cli/src/api/app-run.ts new file mode 100644 index 0000000000..7c79a88345 --- /dev/null +++ b/cli/src/api/app-run.ts @@ -0,0 +1,64 @@ +import type { KyInstance } from 'ky' +import type { SseEvent } from '../http/sse.js' +import { normalizeDifyStream } from '../http/sse-dify.js' +import { parseSSE } from '../http/sse.js' + +export type RunResponse = Record + +export type ResponseMode = 'blocking' | 'streaming' + +export type RunBodyArgs = { + readonly message?: string + readonly inputs?: Readonly> + readonly conversationId?: string + readonly workspaceId?: string + readonly responseMode?: ResponseMode +} + +export function buildRunBody(args: RunBodyArgs): Record { + const body: Record = { + inputs: args.inputs ?? {}, + response_mode: args.responseMode ?? 'blocking', + } + if (args.message !== undefined && args.message !== '') + body.query = args.message + if (args.conversationId !== undefined && args.conversationId !== '') + body.conversation_id = args.conversationId + if (args.workspaceId !== undefined && args.workspaceId !== '') + body.workspace_id = args.workspaceId + return body +} + +export type RunStreamOptions = { + signal?: AbortSignal +} + +export class AppRunClient { + private readonly http: KyInstance + + constructor(http: KyInstance) { + this.http = http + } + + async runBlocking(appId: string, body: Record): Promise { + const raw = await this.http.post(`apps/${encodeURIComponent(appId)}/run`, { json: body }).json() + return raw as RunResponse + } + + async runStream( + appId: string, + body: Record, + opts: RunStreamOptions = {}, + ): Promise> { + const path = `apps/${encodeURIComponent(appId)}/run` + const res = await this.http.post(path, { + json: body, + headers: { Accept: 'text/event-stream' }, + retry: { limit: 0 }, + signal: opts.signal, + }) + if (res.body === null) + throw new Error('streaming response body missing') + return normalizeDifyStream(parseSSE(res.body, opts.signal)) + } +} diff --git a/cli/src/api/apps.ts b/cli/src/api/apps.ts new file mode 100644 index 0000000000..196392423c --- /dev/null +++ b/cli/src/api/apps.ts @@ -0,0 +1,44 @@ +import type { KyInstance } from 'ky' +import type { DescribeResponse, ListResponse } from '../types/app.js' +import { DescribeResponseSchema, ListResponseSchema } from '../types/app.js' + +export type ListQuery = { + readonly workspaceId: string + readonly page?: number + readonly limit?: number + readonly mode?: string + readonly name?: string + readonly tag?: string +} + +export class AppsClient { + private readonly http: KyInstance + + constructor(http: KyInstance) { + this.http = http + } + + async list(q: ListQuery): Promise { + const params = new URLSearchParams() + params.set('workspace_id', q.workspaceId) + params.set('page', String(q.page ?? 1)) + params.set('limit', String(q.limit ?? 20)) + if (q.mode !== undefined && q.mode !== '') + params.set('mode', q.mode) + if (q.name !== undefined && q.name !== '') + params.set('name', q.name) + if (q.tag !== undefined && q.tag !== '') + params.set('tag', q.tag) + const raw = await this.http.get('apps', { searchParams: params }).json() + return ListResponseSchema.parse(raw) + } + + async describe(appId: string, workspaceId: string, fields?: readonly string[]): Promise { + const params = new URLSearchParams() + params.set('workspace_id', workspaceId) + if (fields !== undefined && fields.length > 0) + params.set('fields', fields.join(',')) + const raw = await this.http.get(`apps/${encodeURIComponent(appId)}/describe`, { searchParams: params }).json() + return DescribeResponseSchema.parse(raw) + } +} diff --git a/cli/src/api/device-flow.test.ts b/cli/src/api/device-flow.test.ts new file mode 100644 index 0000000000..e850add937 --- /dev/null +++ b/cli/src/api/device-flow.test.ts @@ -0,0 +1,215 @@ +import type { AddressInfo } from 'node:net' +import type { DifyMock } from '../../test/fixtures/dify-mock/server.js' +import type { CodeResponse } from './oauth-device.js' +import { Buffer } from 'node:buffer' +import * as http from 'node:http' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { startMock } from '../../test/fixtures/dify-mock/server.js' +import { isBaseError } from '../errors/base.js' +import { ErrorCode } from '../errors/codes.js' +import { createClient } from '../http/client.js' +import { DEFAULT_CLIENT_ID, DeviceFlowApi } from './oauth-device.js' + +type StubServer = { + url: string + stop: () => Promise +} + +function startStub(handler: (req: http.IncomingMessage, res: http.ServerResponse) => void): Promise { + return new Promise((resolve, reject) => { + const server = http.createServer(handler) + server.listen(0, '127.0.0.1', () => { + const addr = server.address() as AddressInfo + resolve({ + url: `http://127.0.0.1:${addr.port}`, + stop: () => new Promise((res, rej) => server.close(err => err ? rej(err) : res())), + }) + }) + server.on('error', reject) + }) +} + +function jsonStub(status: number, body: unknown): (req: http.IncomingMessage, res: http.ServerResponse) => void { + return (_req, res) => { + const payload = JSON.stringify(body) + res.writeHead(status, { 'content-type': 'application/json', 'content-length': Buffer.byteLength(payload) }) + res.end(payload) + } +} + +function makeApi(mock: DifyMock): DeviceFlowApi { + return new DeviceFlowApi(createClient({ host: mock.url })) +} + +describe('DeviceFlowApi.requestCode', () => { + let mock: DifyMock + + beforeEach(async () => { + mock = await startMock() + }) + + afterEach(async () => { + await mock.stop() + }) + + it('POSTs to /openapi/v1/oauth/device/code with default client_id', async () => { + const api = makeApi(mock) + const out = await api.requestCode({ device_label: 'difyctl on host' }) + expect(out.user_code).toBe('ABCD-1234') + expect(out.device_code).toBeDefined() + expect(DEFAULT_CLIENT_ID).toBe('difyctl') + }) + + it('strips trailing slash from host', async () => { + const api = new DeviceFlowApi(createClient({ host: `${mock.url}/` })) + const out = await api.requestCode({ device_label: 'l' }) + expect(out.device_code).toBeDefined() + }) + + it('throws BaseError(unsupported_endpoint) on 404', async () => { + let stub: StubServer | undefined + try { + stub = await startStub(jsonStub(404, {})) + const api = new DeviceFlowApi(createClient({ host: stub.url })) + let caught: unknown + try { + await api.requestCode({ device_label: 'l' }) + } + catch (e) { + caught = e + } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.code).toBe(ErrorCode.UnsupportedEndpoint) + } + finally { + await stub?.stop() + } + }) + + it('rejects empty device_label', async () => { + const api = makeApi(mock) + await expect(api.requestCode({ device_label: '' })).rejects.toThrow(/device_label/) + }) +}) + +describe('DeviceFlowApi.pollOnce', () => { + let mock: DifyMock + + beforeEach(async () => { + mock = await startMock() + }) + + afterEach(async () => { + await mock.stop() + }) + + it('returns approved with token on 200', async () => { + const api = makeApi(mock) + const r = await api.pollOnce({ device_code: 'devcode-1' }) + expect(r.status).toBe('approved') + if (r.status === 'approved') + expect(r.success.token).toBe('dfoa_test') + }) + + it('maps authorization_pending to pending', async () => { + let stub: StubServer | undefined + try { + stub = await startStub(jsonStub(400, { error: 'authorization_pending' })) + const api = new DeviceFlowApi(createClient({ host: stub.url })) + const r = await api.pollOnce({ device_code: 'dc' }) + expect(r.status).toBe('pending') + } + finally { + await stub?.stop() + } + }) + + it('maps slow_down to slow_down', async () => { + mock.setScenario('slow-down') + const api = makeApi(mock) + const r = await api.pollOnce({ device_code: 'devcode-1' }) + expect(r.status).toBe('slow_down') + }) + + it('maps expired_token to expired', async () => { + mock.setScenario('expired') + const api = makeApi(mock) + const r = await api.pollOnce({ device_code: 'devcode-1' }) + expect(r.status).toBe('expired') + }) + + it('maps access_denied to denied', async () => { + mock.setScenario('denied') + const api = makeApi(mock) + const r = await api.pollOnce({ device_code: 'devcode-1' }) + expect(r.status).toBe('denied') + }) + + it('throws BaseError(unsupported_endpoint) on 404', async () => { + let stub: StubServer | undefined + try { + stub = await startStub(jsonStub(404, {})) + const api = new DeviceFlowApi(createClient({ host: stub.url })) + await expect(api.pollOnce({ device_code: 'dc' })).rejects.toThrow(/device flow/i) + } + finally { + await stub?.stop() + } + }) + + it('signals retryable on 5xx', async () => { + mock.setScenario('server-5xx') + const api = makeApi(mock) + const r = await api.pollOnce({ device_code: 'devcode-1' }) + expect(r.status).toBe('retry_5xx') + }) + + it('rejects 200 with empty body', async () => { + let stub: StubServer | undefined + try { + stub = await startStub(jsonStub(200, {})) + const api = new DeviceFlowApi(createClient({ host: stub.url })) + await expect(api.pollOnce({ device_code: 'dc' })).rejects.toThrow(/no OAuth envelope|token/i) + } + finally { + await stub?.stop() + } + }) + + it('rejects unknown error code', async () => { + let stub: StubServer | undefined + try { + stub = await startStub(jsonStub(400, { error: 'something_else' })) + const api = new DeviceFlowApi(createClient({ host: stub.url })) + await expect(api.pollOnce({ device_code: 'dc' })).rejects.toThrow(/unknown poll error/) + } + finally { + await stub?.stop() + } + }) + + it('preserves dfoe_ token kind in approved branch', async () => { + mock.setScenario('sso') + const api = makeApi(mock) + const r = await api.pollOnce({ device_code: 'devcode-1' }) + expect(r.status).toBe('approved') + if (r.status === 'approved') { + expect(r.success.token).toBe('dfoe_test') + expect(r.success.subject_type).toBe('external_sso') + } + }) +}) + +describe('DeviceFlowApi types', () => { + it('CodeResponse has required fields', () => { + const r: CodeResponse = { + device_code: 'd', + user_code: 'u', + verification_uri: 'v', + expires_in: 1, + interval: 1, + } + expect(r.device_code).toBe('d') + }) +}) diff --git a/cli/src/api/oauth-device.ts b/cli/src/api/oauth-device.ts new file mode 100644 index 0000000000..1368a7617b --- /dev/null +++ b/cli/src/api/oauth-device.ts @@ -0,0 +1,142 @@ +import type { KyInstance } from 'ky' +import { BaseError } from '../errors/base.js' +import { ErrorCode } from '../errors/codes.js' + +export const DEFAULT_CLIENT_ID = 'difyctl' + +export type CodeRequest = { + client_id?: string + device_label: string +} + +export type CodeResponse = { + device_code: string + user_code: string + verification_uri: string + expires_in: number + interval: number +} + +export type PollRequest = { + client_id?: string + device_code: string +} + +export type PollAccount = { + id: string + email: string + name: string +} + +export type PollWorkspace = { + id: string + name: string + role: string +} + +export type PollSuccess = { + token: string + expires_at?: string + subject_type?: string + subject_email?: string + subject_issuer?: string + account?: PollAccount + workspaces?: readonly PollWorkspace[] + default_workspace_id?: string + token_id?: string +} + +export type PollResult + = | { status: 'pending' } + | { status: 'slow_down' } + | { status: 'expired' } + | { status: 'denied' } + | { status: 'retry_5xx' } + | { status: 'approved', success: PollSuccess } + +const POLL_ERROR_TO_STATUS: Record = { + authorization_pending: 'pending', + slow_down: 'slow_down', + expired_token: 'expired', + access_denied: 'denied', +} + +export class DeviceFlowApi { + private readonly http: KyInstance + + constructor(http: KyInstance) { + this.http = http + } + + async requestCode(req: CodeRequest): Promise { + if (req.device_label === '') { + throw new BaseError({ + code: ErrorCode.UsageMissingArg, + message: 'device_label is required', + }) + } + const body = { client_id: req.client_id ?? DEFAULT_CLIENT_ID, device_label: req.device_label } + const res = await this.http.post('oauth/device/code', { json: body, throwHttpErrors: false, context: { skipClassify: true } }) + if (res.status === 404) + throw versionSkew() + if (!res.ok) { + throw new BaseError({ + code: ErrorCode.Server4xxOther, + message: `device/code: HTTP ${res.status}`, + httpStatus: res.status, + }) + } + return await res.json() as CodeResponse + } + + async pollOnce(req: PollRequest): Promise { + if (req.device_code === '') { + throw new BaseError({ + code: ErrorCode.UsageMissingArg, + message: 'device_code is required', + }) + } + const body = { client_id: req.client_id ?? DEFAULT_CLIENT_ID, device_code: req.device_code } + const res = await this.http.post('oauth/device/token', { json: body, throwHttpErrors: false, context: { skipClassify: true } }) + if (res.status === 404) + throw versionSkew() + if (res.status >= 500) + return { status: 'retry_5xx' } + let payload: { error?: string } & Partial = {} + try { + const text = await res.text() + payload = text === '' ? {} : JSON.parse(text) as typeof payload + } + catch (err) { + throw new BaseError({ + code: ErrorCode.Unknown, + message: `decode poll response: ${(err as Error).message}`, + }) + } + if (typeof payload.error === 'string' && payload.error !== '') { + const status = POLL_ERROR_TO_STATUS[payload.error] + if (status === undefined) { + throw new BaseError({ + code: ErrorCode.Unknown, + message: `unknown poll error "${payload.error}"`, + }) + } + return { status } as PollResult + } + if (typeof payload.token !== 'string' || payload.token === '') { + throw new BaseError({ + code: ErrorCode.Unknown, + message: `poll: ${res.status} with no OAuth envelope`, + }) + } + return { status: 'approved', success: payload as PollSuccess } + } +} + +function versionSkew(): BaseError { + return new BaseError({ + code: ErrorCode.UnsupportedEndpoint, + message: 'this Dify host does not implement the OAuth device flow', + httpStatus: 404, + }) +} diff --git a/cli/src/api/workspaces.ts b/cli/src/api/workspaces.ts new file mode 100644 index 0000000000..309a7892e1 --- /dev/null +++ b/cli/src/api/workspaces.ts @@ -0,0 +1,16 @@ +import type { KyInstance } from 'ky' +import type { WorkspaceListResponse } from '../types/workspace.js' +import { WorkspaceListResponseSchema } from '../types/workspace.js' + +export class WorkspacesClient { + private readonly http: KyInstance + + constructor(http: KyInstance) { + this.http = http + } + + async list(): Promise { + const raw = await this.http.get('workspaces').json() + return WorkspaceListResponseSchema.parse(raw) + } +} diff --git a/cli/src/auth/file-backend.test.ts b/cli/src/auth/file-backend.test.ts new file mode 100644 index 0000000000..65ee66f6a9 --- /dev/null +++ b/cli/src/auth/file-backend.test.ts @@ -0,0 +1,101 @@ +import { mkdtemp, rm, stat, writeFile } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { FILE_PERM } from '../config/dir.js' +import { FileBackend, TOKENS_FILE_NAME } from './file-backend.js' + +describe('FileBackend', () => { + let dir: string + let backend: FileBackend + + beforeEach(async () => { + dir = await mkdtemp(join(tmpdir(), 'difyctl-tokens-')) + backend = new FileBackend(dir) + }) + + afterEach(async () => { + await rm(dir, { recursive: true, force: true }) + }) + + it('returns undefined when file is missing', async () => { + expect(await backend.get('cloud.dify.ai', 'acct-1')).toBeUndefined() + }) + + it('returns empty list when file is missing', async () => { + expect(await backend.list('cloud.dify.ai')).toEqual([]) + }) + + it('round-trips put/get for a single token', async () => { + await backend.put('cloud.dify.ai', 'acct-1', 'dfoa_abc') + expect(await backend.get('cloud.dify.ai', 'acct-1')).toBe('dfoa_abc') + }) + + it('list returns accountIds for the given host', async () => { + await backend.put('cloud.dify.ai', 'acct-1', 'dfoa_a') + await backend.put('cloud.dify.ai', 'acct-2', 'dfoa_b') + await backend.put('self.example.com', 'acct-3', 'dfoa_c') + const ids = await backend.list('cloud.dify.ai') + expect([...ids].sort()).toEqual(['acct-1', 'acct-2']) + }) + + it('list returns empty array for unknown host', async () => { + await backend.put('cloud.dify.ai', 'acct-1', 'dfoa_a') + expect(await backend.list('other.example.com')).toEqual([]) + }) + + it('delete removes the entry', async () => { + await backend.put('cloud.dify.ai', 'acct-1', 'dfoa_a') + await backend.delete('cloud.dify.ai', 'acct-1') + expect(await backend.get('cloud.dify.ai', 'acct-1')).toBeUndefined() + }) + + it('delete is a no-op for missing entries', async () => { + await expect(backend.delete('cloud.dify.ai', 'missing')).resolves.toBeUndefined() + }) + + it('delete prunes empty host entries', async () => { + await backend.put('cloud.dify.ai', 'acct-1', 'dfoa_a') + await backend.delete('cloud.dify.ai', 'acct-1') + expect(await backend.list('cloud.dify.ai')).toEqual([]) + }) + + it('overwrites existing token for same host+accountId', async () => { + await backend.put('cloud.dify.ai', 'acct-1', 'dfoa_old') + await backend.put('cloud.dify.ai', 'acct-1', 'dfoa_new') + expect(await backend.get('cloud.dify.ai', 'acct-1')).toBe('dfoa_new') + }) + + it('writes file with mode 0600', async () => { + await backend.put('cloud.dify.ai', 'acct-1', 'dfoa_a') + const info = await stat(join(dir, TOKENS_FILE_NAME)) + expect(info.mode & 0o777).toBe(FILE_PERM) + }) + + it('rewrites existing file with mode 0600 even if previously permissive', async () => { + const path = join(dir, TOKENS_FILE_NAME) + await writeFile(path, 'hosts: {}\n', { mode: 0o644 }) + await backend.put('cloud.dify.ai', 'acct-1', 'dfoa_a') + const info = await stat(path) + expect(info.mode & 0o777).toBe(FILE_PERM) + }) + + it('writes valid YAML readable by a fresh backend', async () => { + await backend.put('cloud.dify.ai', 'acct-1', 'dfoa_a') + const fresh = new FileBackend(dir) + expect(await fresh.get('cloud.dify.ai', 'acct-1')).toBe('dfoa_a') + }) + + it('persists multiple hosts simultaneously', async () => { + await backend.put('cloud.dify.ai', 'acct-1', 'dfoa_a') + await backend.put('self.example.com', 'acct-2', 'dfoa_b') + expect(await backend.get('cloud.dify.ai', 'acct-1')).toBe('dfoa_a') + expect(await backend.get('self.example.com', 'acct-2')).toBe('dfoa_b') + }) + + it('treats malformed YAML as empty', async () => { + const path = join(dir, TOKENS_FILE_NAME) + await writeFile(path, 'not: valid: yaml: [\n', { mode: FILE_PERM }) + expect(await backend.get('cloud.dify.ai', 'acct-1')).toBeUndefined() + }) +}) diff --git a/cli/src/auth/file-backend.ts b/cli/src/auth/file-backend.ts new file mode 100644 index 0000000000..49bf4d44ed --- /dev/null +++ b/cli/src/auth/file-backend.ts @@ -0,0 +1,99 @@ +import type { TokenStore } from './store.js' +import { mkdir, readFile, rename, stat, unlink, writeFile } from 'node:fs/promises' +import { join } from 'node:path' +import yaml from 'js-yaml' +import { DIR_PERM, FILE_PERM } from '../config/dir.js' + +export const TOKENS_FILE_NAME = 'tokens.yml' + +type AccountMap = Record +type HostMap = Record +type TokensFile = { hosts?: HostMap } + +export class FileBackend implements TokenStore { + private readonly dir: string + private readonly path: string + + constructor(dir: string) { + this.dir = dir + this.path = join(dir, TOKENS_FILE_NAME) + } + + async put(host: string, accountId: string, token: string): Promise { + const file = await this.read() + const hosts = file.hosts ?? {} + const accounts = hosts[host] ?? {} + accounts[accountId] = token + hosts[host] = accounts + await this.write({ hosts }) + } + + async get(host: string, accountId: string): Promise { + const file = await this.read() + return file.hosts?.[host]?.[accountId] + } + + async delete(host: string, accountId: string): Promise { + const file = await this.read() + const accounts = file.hosts?.[host] + if (accounts === undefined || !(accountId in accounts)) + return + delete accounts[accountId] + if (Object.keys(accounts).length === 0 && file.hosts !== undefined) + delete file.hosts[host] + await this.write(file) + } + + async list(host: string): Promise { + const file = await this.read() + const accounts = file.hosts?.[host] + return accounts === undefined ? [] : Object.keys(accounts) + } + + private async read(): Promise { + let raw: string + try { + raw = await readFile(this.path, 'utf8') + } + catch (err) { + if ((err as NodeJS.ErrnoException).code === 'ENOENT') + return {} + throw err + } + let parsed: unknown + try { + parsed = yaml.load(raw) + } + catch { + return {} + } + if (parsed === null || typeof parsed !== 'object') + return {} + return parsed as TokensFile + } + + private async write(file: TokensFile): Promise { + await mkdir(this.dir, { recursive: true, mode: DIR_PERM }) + const body = yaml.dump(file, { lineWidth: -1, noRefs: true }) + const tmp = `${this.path}.tmp.${process.pid}.${Date.now()}` + try { + await writeFile(tmp, body, { mode: FILE_PERM }) + await rename(tmp, this.path) + } + catch (err) { + try { + await unlink(tmp) + } + catch { /* tmp may not exist */ } + throw err + } + try { + const info = await stat(this.path) + if ((info.mode & 0o777) !== FILE_PERM) { + const { chmod } = await import('node:fs/promises') + await chmod(this.path, FILE_PERM) + } + } + catch { /* best-effort permission tighten */ } + } +} diff --git a/cli/src/auth/hosts.test.ts b/cli/src/auth/hosts.test.ts new file mode 100644 index 0000000000..2bc1b2fea9 --- /dev/null +++ b/cli/src/auth/hosts.test.ts @@ -0,0 +1,131 @@ +import { mkdtemp, readFile, rm, stat, writeFile } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { FILE_PERM } from '../config/dir.js' +import { HOSTS_FILE_NAME, HostsBundleSchema, loadHosts, saveHosts } from './hosts.js' + +describe('HostsBundleSchema', () => { + it('parses a minimal logged-out bundle', () => { + const parsed = HostsBundleSchema.parse({}) + expect(parsed.current_host).toBe('') + expect(parsed.token_storage).toBe('file') + }) + + it('parses a logged-in keychain bundle', () => { + const parsed = HostsBundleSchema.parse({ + current_host: 'cloud.dify.ai', + account: { id: 'acct-1', email: 'a@b.c', name: 'A' }, + workspace: { id: 'ws-1', name: 'My Space', role: 'owner' }, + token_storage: 'keychain', + token_id: 'tok_xyz', + }) + expect(parsed.token_storage).toBe('keychain') + expect(parsed.tokens).toBeUndefined() + }) + + it('parses a logged-in file bundle with bearer', () => { + const parsed = HostsBundleSchema.parse({ + current_host: 'cloud.dify.ai', + token_storage: 'file', + tokens: { bearer: 'dfoa_xxx' }, + }) + expect(parsed.tokens?.bearer).toBe('dfoa_xxx') + }) + + it('rejects unknown token_storage values', () => { + expect(() => HostsBundleSchema.parse({ token_storage: 'cloud' })).toThrow() + }) + + it('keeps available_workspaces when provided', () => { + const parsed = HostsBundleSchema.parse({ + available_workspaces: [ + { id: 'a', name: 'A', role: 'owner' }, + { id: 'b', name: 'B', role: 'member' }, + ], + }) + expect(parsed.available_workspaces).toHaveLength(2) + }) +}) + +describe('loadHosts/saveHosts', () => { + let dir: string + + beforeEach(async () => { + dir = await mkdtemp(join(tmpdir(), 'difyctl-hosts-')) + }) + + afterEach(async () => { + await rm(dir, { recursive: true, force: true }) + }) + + it('returns undefined when file is missing', async () => { + expect(await loadHosts(dir)).toBeUndefined() + }) + + it('round-trips bundle through YAML', async () => { + await saveHosts(dir, { + current_host: 'cloud.dify.ai', + account: { id: 'acct-1', email: 'a@b.c', name: 'A' }, + workspace: { id: 'ws-1', name: 'My Space', role: 'owner' }, + token_storage: 'keychain', + token_id: 'tok_xyz', + }) + const loaded = await loadHosts(dir) + expect(loaded?.current_host).toBe('cloud.dify.ai') + expect(loaded?.account?.email).toBe('a@b.c') + expect(loaded?.token_storage).toBe('keychain') + }) + + it('writes file with mode 0600', async () => { + await saveHosts(dir, { current_host: 'cloud.dify.ai', token_storage: 'file' }) + const info = await stat(join(dir, HOSTS_FILE_NAME)) + expect(info.mode & 0o777).toBe(FILE_PERM) + }) + + it('rewrites permissive existing file with mode 0600', async () => { + const path = join(dir, HOSTS_FILE_NAME) + await writeFile(path, 'current_host: ""\ntoken_storage: file\n', { mode: 0o644 }) + await saveHosts(dir, { current_host: 'cloud.dify.ai', token_storage: 'file' }) + const info = await stat(path) + expect(info.mode & 0o777).toBe(FILE_PERM) + }) + + it('atomic write: temp file does not survive on success', async () => { + await saveHosts(dir, { current_host: 'cloud.dify.ai', token_storage: 'file' }) + const { readdir } = await import('node:fs/promises') + const entries = await readdir(dir) + expect(entries.filter(n => n.includes('.tmp.'))).toHaveLength(0) + }) + + it('drops unknown top-level fields', async () => { + const path = join(dir, HOSTS_FILE_NAME) + await writeFile(path, 'current_host: cloud.dify.ai\nfuture_field: 42\ntoken_storage: file\n', { mode: FILE_PERM }) + const loaded = await loadHosts(dir) + expect(loaded?.current_host).toBe('cloud.dify.ai') + expect((loaded as Record | undefined)?.future_field).toBeUndefined() + }) + + it('throws on malformed YAML', async () => { + const path = join(dir, HOSTS_FILE_NAME) + await writeFile(path, ': : :\n', { mode: FILE_PERM }) + await expect(loadHosts(dir)).rejects.toThrow() + }) + + it('throws when YAML contradicts schema', async () => { + const path = join(dir, HOSTS_FILE_NAME) + await writeFile(path, 'token_storage: cloud\n', { mode: FILE_PERM }) + await expect(loadHosts(dir)).rejects.toThrow() + }) + + it('produces YAML with stable keys', async () => { + await saveHosts(dir, { + current_host: 'cloud.dify.ai', + token_storage: 'file', + tokens: { bearer: 'dfoa_x' }, + }) + const raw = await readFile(join(dir, HOSTS_FILE_NAME), 'utf8') + expect(raw).toContain('current_host: cloud.dify.ai') + expect(raw).toContain('bearer: dfoa_x') + }) +}) diff --git a/cli/src/auth/hosts.ts b/cli/src/auth/hosts.ts new file mode 100644 index 0000000000..fc90b3238c --- /dev/null +++ b/cli/src/auth/hosts.ts @@ -0,0 +1,100 @@ +import { mkdir, readFile, rename, unlink, writeFile } from 'node:fs/promises' +import { join } from 'node:path' +import yaml from 'js-yaml' +import { z } from 'zod' +import { DIR_PERM, FILE_PERM } from '../config/dir.js' + +export const HOSTS_FILE_NAME = 'hosts.yml' + +const StorageModeSchema = z.enum(['keychain', 'file']) +export type StorageMode = z.infer + +export const AccountSchema = z.object({ + id: z.string().optional(), + email: z.string().default(''), + name: z.string().default(''), +}) +export type Account = z.infer + +export const WorkspaceSchema = z.object({ + id: z.string(), + name: z.string(), + role: z.string(), +}) +export type Workspace = z.infer + +export const ExternalSubjectSchema = z.object({ + email: z.string(), + issuer: z.string(), +}) +export type ExternalSubject = z.infer + +export const TokensSchema = z.object({ + bearer: z.string(), +}) +export type Tokens = z.infer + +export const HostsBundleSchema = z.object({ + current_host: z.string().default(''), + scheme: z.string().optional(), + account: AccountSchema.optional(), + workspace: WorkspaceSchema.optional(), + available_workspaces: z.array(WorkspaceSchema).optional(), + token_storage: StorageModeSchema.default('file'), + token_id: z.string().optional(), + token_expires_at: z.string().optional(), + tokens: TokensSchema.optional(), + external_subject: ExternalSubjectSchema.optional(), +}) +export type HostsBundle = z.infer + +export async function loadHosts(dir: string): Promise { + const path = join(dir, HOSTS_FILE_NAME) + let raw: string + try { + raw = await readFile(path, 'utf8') + } + catch (err) { + if ((err as NodeJS.ErrnoException).code === 'ENOENT') + return undefined + throw err + } + const parsed = yaml.load(raw) + return HostsBundleSchema.parse(parsed ?? {}) +} + +export async function saveHosts(dir: string, bundle: HostsBundle): Promise { + await mkdir(dir, { recursive: true, mode: DIR_PERM }) + const validated = HostsBundleSchema.parse(bundle) + const body = yaml.dump(stripUndefined(validated), { lineWidth: -1, noRefs: true, sortKeys: false }) + const target = join(dir, HOSTS_FILE_NAME) + const tmp = `${target}.tmp.${process.pid}.${Date.now()}` + try { + await writeFile(tmp, body, { mode: FILE_PERM }) + await rename(tmp, target) + } + catch (err) { + try { + await unlink(tmp) + } + catch { /* tmp may not exist */ } + throw err + } + const { chmod, stat } = await import('node:fs/promises') + try { + const info = await stat(target) + if ((info.mode & 0o777) !== FILE_PERM) + await chmod(target, FILE_PERM) + } + catch { /* best-effort */ } +} + +function stripUndefined>(input: T): Record { + const out: Record = {} + for (const [k, v] of Object.entries(input)) { + if (v === undefined) + continue + out[k] = v + } + return out +} diff --git a/cli/src/auth/keyring-backend.test.ts b/cli/src/auth/keyring-backend.test.ts new file mode 100644 index 0000000000..19e0153916 --- /dev/null +++ b/cli/src/auth/keyring-backend.test.ts @@ -0,0 +1,111 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const passwords = new Map() +const setPassword = vi.fn() +const getPassword = vi.fn() +const deletePassword = vi.fn() + +class FakeAsyncEntry { + private readonly key: string + constructor(service: string, username: string) { + this.key = `${service}::${username}` + } + + async setPassword(value: string): Promise { + setPassword(this.key, value) + passwords.set(this.key, value) + } + + async getPassword(): Promise { + getPassword(this.key) + return passwords.get(this.key) + } + + async deletePassword(): Promise { + deletePassword(this.key) + if (!passwords.has(this.key)) + return false + passwords.delete(this.key) + return true + } +} + +vi.mock('@napi-rs/keyring', () => ({ + AsyncEntry: FakeAsyncEntry, +})) + +const { KEYRING_SERVICE, KeyringBackend } = await import('./keyring-backend.js') + +beforeEach(() => { + passwords.clear() + setPassword.mockClear() + getPassword.mockClear() + deletePassword.mockClear() +}) + +describe('KeyringBackend', () => { + it('uses service name "difyctl"', () => { + expect(KEYRING_SERVICE).toBe('difyctl') + }) + + it('returns undefined when no password is stored', async () => { + const k = new KeyringBackend() + expect(await k.get('cloud.dify.ai', 'acct-1')).toBeUndefined() + }) + + it('round-trips put/get', async () => { + const k = new KeyringBackend() + await k.put('cloud.dify.ai', 'acct-1', 'dfoa_x') + expect(await k.get('cloud.dify.ai', 'acct-1')).toBe('dfoa_x') + }) + + it('keys by host::accountId', async () => { + const k = new KeyringBackend() + await k.put('cloud.dify.ai', 'acct-1', 'A') + await k.put('cloud.dify.ai', 'acct-2', 'B') + expect(await k.get('cloud.dify.ai', 'acct-1')).toBe('A') + expect(await k.get('cloud.dify.ai', 'acct-2')).toBe('B') + }) + + it('delete removes the entry', async () => { + const k = new KeyringBackend() + await k.put('cloud.dify.ai', 'acct-1', 'A') + await k.delete('cloud.dify.ai', 'acct-1') + expect(await k.get('cloud.dify.ai', 'acct-1')).toBeUndefined() + }) + + it('delete is a no-op for missing entries', async () => { + const k = new KeyringBackend() + await expect(k.delete('cloud.dify.ai', 'gone')).resolves.toBeUndefined() + }) + + it('list returns empty array (keyring does not enumerate)', async () => { + const k = new KeyringBackend() + await k.put('cloud.dify.ai', 'acct-1', 'A') + expect(await k.list('cloud.dify.ai')).toEqual([]) + }) + + it('swallows getPassword exceptions and returns undefined', async () => { + const k = new KeyringBackend() + getPassword.mockImplementationOnce(() => { + throw new Error('NoEntry') + }) + expect(await k.get('cloud.dify.ai', 'acct-1')).toBeUndefined() + }) + + it('swallows delete exceptions', async () => { + const k = new KeyringBackend() + deletePassword.mockImplementationOnce(() => { + throw new Error('NoEntry') + }) + await expect(k.delete('cloud.dify.ai', 'acct-1')).resolves.toBeUndefined() + }) + + it('lets put propagate exceptions (caller decides fallback)', async () => { + const k = new KeyringBackend() + setPassword.mockImplementationOnce(() => { + throw new Error('keyring locked') + }) + await expect(k.put('cloud.dify.ai', 'acct-1', 'tok')).rejects.toThrow(/keyring locked/) + }) +}) diff --git a/cli/src/auth/keyring-backend.ts b/cli/src/auth/keyring-backend.ts new file mode 100644 index 0000000000..8e3dc75ab2 --- /dev/null +++ b/cli/src/auth/keyring-backend.ts @@ -0,0 +1,35 @@ +import type { TokenStore } from './store.js' +import { AsyncEntry } from '@napi-rs/keyring' + +export const KEYRING_SERVICE = 'difyctl' + +function username(host: string, accountId: string): string { + return `${host}::${accountId}` +} + +export class KeyringBackend implements TokenStore { + async put(host: string, accountId: string, token: string): Promise { + await new AsyncEntry(KEYRING_SERVICE, username(host, accountId)).setPassword(token) + } + + async get(host: string, accountId: string): Promise { + try { + const v = await new AsyncEntry(KEYRING_SERVICE, username(host, accountId)).getPassword() + return v ?? undefined + } + catch { + return undefined + } + } + + async delete(host: string, accountId: string): Promise { + try { + await new AsyncEntry(KEYRING_SERVICE, username(host, accountId)).deletePassword() + } + catch { /* missing entry is fine */ } + } + + async list(_host: string): Promise { + return [] + } +} diff --git a/cli/src/auth/store.test.ts b/cli/src/auth/store.test.ts new file mode 100644 index 0000000000..21498ae9c0 --- /dev/null +++ b/cli/src/auth/store.test.ts @@ -0,0 +1,75 @@ +import type { TokenStore } from './store.js' +import { describe, expect, it, vi } from 'vitest' +import { selectStore } from './store.js' + +function memBackend(label: string): TokenStore & { _label: string } { + const map = new Map() + const k = (h: string, a: string) => `${h}::${a}` + return { + _label: label, + async put(h, a, t) { map.set(k(h, a), t) }, + async get(h, a) { return map.get(k(h, a)) }, + async delete(h, a) { map.delete(k(h, a)) }, + async list() { return [] }, + } +} + +describe('selectStore', () => { + it('returns keychain when probe succeeds', async () => { + const k = memBackend('keyring') + const f = memBackend('file') + const result = await selectStore({ + configDir: '/tmp/x', + factory: { keyring: () => k, file: () => f }, + }) + expect(result.mode).toBe('keychain') + expect(result.store).toBe(k) + }) + + it('falls back to file when keyring put throws', async () => { + const k = memBackend('keyring') + const f = memBackend('file') + k.put = vi.fn().mockRejectedValue(new Error('locked')) + const result = await selectStore({ + configDir: '/tmp/x', + factory: { keyring: () => k, file: () => f }, + }) + expect(result.mode).toBe('file') + expect(result.store).toBe(f) + }) + + it('falls back to file when probe round-trip mismatches', async () => { + const k = memBackend('keyring') + const f = memBackend('file') + k.get = vi.fn().mockResolvedValue('something-else') + const result = await selectStore({ + configDir: '/tmp/x', + factory: { keyring: () => k, file: () => f }, + }) + expect(result.mode).toBe('file') + expect(result.store).toBe(f) + }) + + it('falls back to file when keyring constructor throws', async () => { + const f = memBackend('file') + const result = await selectStore({ + configDir: '/tmp/x', + factory: { + keyring: () => { throw new Error('no backend') }, + file: () => f, + }, + }) + expect(result.mode).toBe('file') + expect(result.store).toBe(f) + }) + + it('cleans up probe entry after successful probe', async () => { + const k = memBackend('keyring') + const f = memBackend('file') + await selectStore({ + configDir: '/tmp/x', + factory: { keyring: () => k, file: () => f }, + }) + expect(await k.get('__difyctl_probe__', '__probe__')).toBeUndefined() + }) +}) diff --git a/cli/src/auth/store.ts b/cli/src/auth/store.ts new file mode 100644 index 0000000000..1be2a0606c --- /dev/null +++ b/cli/src/auth/store.ts @@ -0,0 +1,40 @@ +import { FileBackend } from './file-backend.js' +import { KeyringBackend } from './keyring-backend.js' + +export type TokenStore = { + put: (host: string, accountId: string, token: string) => Promise + get: (host: string, accountId: string) => Promise + delete: (host: string, accountId: string) => Promise + list: (host: string) => Promise +} + +export type StorageMode = 'keychain' | 'file' + +export type SelectStoreOptions = { + readonly configDir: string + readonly factory?: { + readonly keyring?: () => TokenStore + readonly file?: (dir: string) => TokenStore + } +} + +const PROBE_HOST = '__difyctl_probe__' +const PROBE_ACCOUNT = '__probe__' +const PROBE_VALUE = 'probe-v1' + +export async function selectStore(opts: SelectStoreOptions): Promise<{ store: TokenStore, mode: StorageMode }> { + const fileFactory = opts.factory?.file ?? ((dir: string) => new FileBackend(dir)) + const keyringFactory = opts.factory?.keyring ?? (() => new KeyringBackend()) + try { + const k = keyringFactory() + await k.put(PROBE_HOST, PROBE_ACCOUNT, PROBE_VALUE) + const got = await k.get(PROBE_HOST, PROBE_ACCOUNT) + await k.delete(PROBE_HOST, PROBE_ACCOUNT) + if (got !== PROBE_VALUE) + throw new Error('keyring round-trip mismatch') + return { store: k, mode: 'keychain' } + } + catch { + return { store: fileFactory(opts.configDir), mode: 'file' } + } +} diff --git a/cli/src/cache/app-info.test.ts b/cli/src/cache/app-info.test.ts new file mode 100644 index 0000000000..c3b7c8161c --- /dev/null +++ b/cli/src/cache/app-info.test.ts @@ -0,0 +1,111 @@ +import type { AppMeta } from '../types/app-meta.js' +import { mkdtemp, readFile, rm } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { FieldInfo, FieldParameters } from '../types/app.js' +import { APP_INFO_TTL_MS, cachePath, loadAppInfoCache } from './app-info.js' + +function metaInfoOnly(): AppMeta { + return { + info: { + id: 'app-1', + name: 'Greeter', + description: '', + mode: 'chat', + author: 'tester', + tags: [], + updated_at: null, + service_api_enabled: false, + is_agent: false, + }, + parameters: null, + inputSchema: null, + coveredFields: new Set([FieldInfo]), + } +} + +describe('app-info disk cache', () => { + let dir: string + beforeEach(async () => { + dir = await mkdtemp(join(tmpdir(), 'difyctl-cache-')) + }) + afterEach(async () => { + await rm(dir, { recursive: true, force: true }) + }) + + it('round-trips an entry across reloads', async () => { + const c1 = await loadAppInfoCache({ configDir: dir }) + await c1.set('http://localhost:9999', 'app-1', metaInfoOnly()) + + const c2 = await loadAppInfoCache({ configDir: dir }) + const got = c2.get('http://localhost:9999', 'app-1') + expect(got).toBeDefined() + expect(got?.meta.info?.id).toBe('app-1') + expect(got?.meta.coveredFields.has(FieldInfo)).toBe(true) + }) + + it('isFresh respects TTL', async () => { + const now = new Date('2026-05-09T00:00:00Z') + const c = await loadAppInfoCache({ configDir: dir, now: () => now }) + await c.set('h', 'app-1', metaInfoOnly()) + const r = c.get('h', 'app-1') + expect(r).toBeDefined() + expect(c.isFresh(r!, now)).toBe(true) + expect(c.isFresh(r!, new Date(now.getTime() + APP_INFO_TTL_MS - 1))).toBe(true) + expect(c.isFresh(r!, new Date(now.getTime() + APP_INFO_TTL_MS))).toBe(false) + expect(c.isFresh(r!, new Date(now.getTime() + APP_INFO_TTL_MS + 60_000))).toBe(false) + }) + + it('keys by (host, app_id) — different hosts isolate', async () => { + const c = await loadAppInfoCache({ configDir: dir }) + await c.set('h1', 'app-1', metaInfoOnly()) + expect(c.get('h2', 'app-1')).toBeUndefined() + expect(c.get('h1', 'app-1')).toBeDefined() + }) + + it('delete removes entry from disk', async () => { + const c1 = await loadAppInfoCache({ configDir: dir }) + await c1.set('h', 'app-1', metaInfoOnly()) + await c1.delete('h', 'app-1') + + const c2 = await loadAppInfoCache({ configDir: dir }) + expect(c2.get('h', 'app-1')).toBeUndefined() + }) + + it('writes file with 0600 permission', async () => { + const c = await loadAppInfoCache({ configDir: dir }) + await c.set('h', 'app-1', metaInfoOnly()) + const { stat } = await import('node:fs/promises') + const s = await stat(cachePath(dir)) + if (process.platform !== 'win32') + expect(s.mode & 0o777).toBe(0o600) + }) + + it('missing cache file is not an error', async () => { + const c = await loadAppInfoCache({ configDir: dir }) + expect(c.get('h', 'app-1')).toBeUndefined() + }) + + it('corrupt cache file is treated as empty', async () => { + const { mkdir, writeFile } = await import('node:fs/promises') + await mkdir(join(dir, 'cache'), { recursive: true }) + await writeFile(cachePath(dir), '{not json', 'utf8') + const c = await loadAppInfoCache({ configDir: dir }) + expect(c.get('h', 'app-1')).toBeUndefined() + }) + + it('updates same key in place (no growth)', async () => { + const c = await loadAppInfoCache({ configDir: dir }) + await c.set('h', 'app-1', metaInfoOnly()) + const slim: AppMeta = { + ...metaInfoOnly(), + coveredFields: new Set([FieldInfo, FieldParameters]), + parameters: { opening_statement: 'hi' }, + } + await c.set('h', 'app-1', slim) + const raw = await readFile(cachePath(dir), 'utf8') + const parsed = JSON.parse(raw) as { entries: Record } + expect(Object.keys(parsed.entries)).toHaveLength(1) + }) +}) diff --git a/cli/src/cache/app-info.ts b/cli/src/cache/app-info.ts new file mode 100644 index 0000000000..0345d61b07 --- /dev/null +++ b/cli/src/cache/app-info.ts @@ -0,0 +1,138 @@ +import type { AppMeta, AppMetaCacheRecord, AppMetaFieldKey } from '../types/app-meta.js' +import { mkdir, readFile, rename, writeFile } from 'node:fs/promises' +import { dirname, join } from 'node:path' +import { DIR_PERM, FILE_PERM } from '../config/dir.js' +import { FieldInfo, FieldInputSchema, FieldParameters } from '../types/app.js' + +const CACHE_FILE = 'app-info.json' +export const APP_INFO_TTL_MS = 60 * 60 * 1000 + +type DiskShape = { + entries: Record +} + +type DiskEntry = { + meta: SerializedMeta + fetched_at: string +} + +type SerializedMeta = { + info: AppMeta['info'] + parameters: unknown + input_schema: unknown + covered_fields: AppMetaFieldKey[] +} + +export type AppInfoCache = { + get: (host: string, appId: string) => AppMetaCacheRecord | undefined + set: (host: string, appId: string, meta: AppMeta) => Promise + delete: (host: string, appId: string) => Promise + isFresh: (record: AppMetaCacheRecord, now?: Date) => boolean +} + +type State = { + entries: Map +} + +export type AppInfoCacheOptions = { + readonly configDir: string + readonly ttlMs?: number + readonly now?: () => Date +} + +export async function loadAppInfoCache(opts: AppInfoCacheOptions): Promise { + const path = cachePath(opts.configDir) + const ttlMs = opts.ttlMs ?? APP_INFO_TTL_MS + const state: State = { entries: new Map() } + await readDisk(path, state) + return { + get: (host, appId) => state.entries.get(key(host, appId)), + set: async (host, appId, meta) => { + const record: AppMetaCacheRecord = { meta, fetchedAt: (opts.now ?? (() => new Date()))().toISOString() } + state.entries.set(key(host, appId), record) + await persist(path, state) + }, + delete: async (host, appId) => { + state.entries.delete(key(host, appId)) + await persist(path, state) + }, + isFresh: (record, now) => { + const t = (now ?? new Date()).getTime() - new Date(record.fetchedAt).getTime() + return t >= 0 && t < ttlMs + }, + } +} + +export function cachePath(configDir: string): string { + return join(configDir, 'cache', CACHE_FILE) +} + +function key(host: string, appId: string): string { + return `${host}::${appId}` +} + +async function readDisk(path: string, state: State): Promise { + let raw: string + try { + raw = await readFile(path, 'utf8') + } + catch (err) { + if ((err as NodeJS.ErrnoException).code === 'ENOENT') + return + throw err + } + let parsed: DiskShape + try { + parsed = JSON.parse(raw) as DiskShape + } + catch { + return + } + if (parsed.entries === undefined) + return + for (const [k, e] of Object.entries(parsed.entries)) { + state.entries.set(k, deserialize(e)) + } +} + +function deserialize(e: DiskEntry): AppMetaCacheRecord { + const covered = new Set(filterFields(e.meta.covered_fields)) + return { + meta: { + info: e.meta.info, + parameters: e.meta.parameters, + inputSchema: e.meta.input_schema, + coveredFields: covered, + }, + fetchedAt: e.fetched_at, + } +} + +function filterFields(input: unknown): AppMetaFieldKey[] { + if (!Array.isArray(input)) + return [] + const valid = new Set([FieldInfo, FieldParameters, FieldInputSchema]) + return input.filter((s): s is AppMetaFieldKey => typeof s === 'string' && valid.has(s as AppMetaFieldKey)) +} + +function serialize(record: AppMetaCacheRecord): DiskEntry { + return { + meta: { + info: record.meta.info, + parameters: record.meta.parameters, + input_schema: record.meta.inputSchema, + covered_fields: [...record.meta.coveredFields], + }, + fetched_at: record.fetchedAt, + } +} + +async function persist(path: string, state: State): Promise { + const dir = dirname(path) + await mkdir(dir, { recursive: true, mode: DIR_PERM }) + const disk: DiskShape = { entries: {} } + for (const [k, v] of state.entries) disk.entries[k] = serialize(v) + const tmp = `${path}.${process.pid}.${Date.now()}.tmp` + await writeFile(tmp, JSON.stringify(disk), { mode: FILE_PERM }) + await rename(tmp, path) +} diff --git a/cli/src/commands/AGENTS.md b/cli/src/commands/AGENTS.md new file mode 100644 index 0000000000..2b86403255 --- /dev/null +++ b/cli/src/commands/AGENTS.md @@ -0,0 +1,66 @@ +# AGENTS.md — `src/commands/` + +Per-command agent-optimized usage and structure guide. + +## Command folder convention + +Every command is a folder. `index.ts` is the only oclif entrypoint. All related +code — business logic, helpers, tests, and optional agent guide — colocates inside +the folder. Subcommands are subfolders. + +``` +src/commands/ + / + / + index.ts ← oclif command entrypoint (the ONLY file oclif discovers) + run.ts ← business logic (not a command, invisible to oclif) + handlers.ts ← helpers + guide.ts ← agent guide string (optional) + *.test.ts ← tests + / ← subcommand (e.g. auth/devices/list/) + index.ts + _shared/ ← intra-topic shared code (only when needed by 2+ siblings) + .ts +``` + +oclif discovers commands only via `**/index.+(js|cjs|mjs)`. All other files in +command folders are invisible to oclif — add freely without glob exclusions. +Folders prefixed with `_` (e.g. `_shared/`, `_strategies/`) are also excluded +from oclif discovery and from coverage checks. + +## Adding a new command + +1. Create `src/commands///index.ts` extending `DifyCommand`. +1. Add business logic in sibling files (e.g. `run.ts`, `handlers.ts`). +1. Run `pnpm build` to regenerate the oclif manifest. +1. Run `pnpm test` to verify coverage. + +## Adding an agent guide + +1. Create `src/commands///guide.ts` exporting a plain string: + ```ts + export const agentGuide = ` + WORKFLOW + 1. ... + + ERROR RECOVERY + ... + ` + ``` +1. Import and assign in `index.ts`: + ```ts + import { agentGuide } from './guide.js' + + export default class MyCmd extends DifyCommand { + static agentGuide = agentGuide + } + ``` +1. The guide appears at the bottom of `difyctl --help` automatically. +1. Agents call `difyctl --help` to read both structural help and workflow guidance. + +## Shared utilities + +Code used by two or more commands lives in `src//` (e.g. `src/auth/`, +`src/api/`, `src/errors/`). Do not put broadly shared code inside a command folder. +Intra-topic shared code (used only within one topic's commands) uses `_shared/` +within that topic folder. diff --git a/cli/src/commands/_shared/authed-command.ts b/cli/src/commands/_shared/authed-command.ts new file mode 100644 index 0000000000..9e6193b8c8 --- /dev/null +++ b/cli/src/commands/_shared/authed-command.ts @@ -0,0 +1,58 @@ +import type { Command } from '@oclif/core' +import type { KyInstance } from 'ky' +import type { HostsBundle } from '../../auth/hosts.js' +import type { AppInfoCache } from '../../cache/app-info.js' +import type { IOStreams } from '../../io/streams.js' +import { loadHosts } from '../../auth/hosts.js' +import { loadAppInfoCache } from '../../cache/app-info.js' +import { resolveConfigDir } from '../../config/dir.js' +import { BaseError } from '../../errors/base.js' +import { ErrorCode } from '../../errors/codes.js' +import { formatErrorForCli } from '../../errors/format.js' +import { createClient } from '../../http/client.js' +import { realStreams } from '../../io/streams.js' +import { hostWithScheme } from '../../util/host.js' +import { resolveRetryAttempts } from './global-flags.js' + +export type AuthedContext = { + readonly bundle: HostsBundle + readonly http: KyInstance + readonly host: string + readonly io: IOStreams + readonly configDir: string + readonly cache?: AppInfoCache +} + +export type AuthedContextOptions = { + readonly retryFlag: number | undefined + readonly withCache?: boolean + readonly format?: string +} + +export async function buildAuthedContext( + cmd: Pick, + opts: AuthedContextOptions, +): Promise { + const configDir = resolveConfigDir() + const bundle = await loadHosts(configDir) + if (bundle === undefined || bundle.tokens?.bearer === undefined || bundle.tokens.bearer === '') { + const err = new BaseError({ + code: ErrorCode.NotLoggedIn, + message: 'not logged in', + hint: 'run \'difyctl auth login\'', + }) + cmd.error(formatErrorForCli(err, { format: opts.format }), { exit: err.exit() }) + } + + const host = hostWithScheme(bundle.current_host, bundle.scheme) + const retryAttempts = resolveRetryAttempts({ + flag: opts.retryFlag, + env: (k: string) => process.env[k], + }) + const http = createClient({ host, bearer: bundle.tokens.bearer, retryAttempts }) + const io = realStreams(opts.format ?? '') + + const cache = opts.withCache === true ? await loadAppInfoCache({ configDir }) : undefined + + return { bundle, http, host, io, configDir, cache } +} diff --git a/cli/src/commands/_shared/dify-command.ts b/cli/src/commands/_shared/dify-command.ts new file mode 100644 index 0000000000..c1cb67ac3a --- /dev/null +++ b/cli/src/commands/_shared/dify-command.ts @@ -0,0 +1,21 @@ +import type { CommandError } from '@oclif/core/interfaces' +import type { AuthedContext, AuthedContextOptions } from './authed-command.js' +import { Command } from '@oclif/core' +import { isBaseError } from '../../errors/base.js' +import { formatErrorForCli } from '../../errors/format.js' +import { buildAuthedContext } from './authed-command.js' + +export abstract class DifyCommand extends Command { + protected outputFormat = '' + + protected async authedCtx(opts: AuthedContextOptions): Promise { + this.outputFormat = opts.format ?? '' + return buildAuthedContext(this, opts) + } + + protected override async catch(err: CommandError): Promise { + if (isBaseError(err)) + this.error(formatErrorForCli(err, { format: this.outputFormat }), { exit: err.exit() }) + throw err + } +} diff --git a/cli/src/commands/_shared/global-flags.test.ts b/cli/src/commands/_shared/global-flags.test.ts new file mode 100644 index 0000000000..2fe678726b --- /dev/null +++ b/cli/src/commands/_shared/global-flags.test.ts @@ -0,0 +1,44 @@ +import { describe, expect, it } from 'vitest' +import { resolveRetryAttempts } from './global-flags.js' + +describe('resolveRetryAttempts', () => { + it('returns flag value when given', () => { + expect(resolveRetryAttempts({ flag: 1, env: () => undefined })).toBe(1) + }) + + it('returns 0 when flag is 0', () => { + expect(resolveRetryAttempts({ flag: 0, env: () => undefined })).toBe(0) + }) + + it('falls back to DIFYCTL_HTTP_RETRY env when flag missing', () => { + expect(resolveRetryAttempts({ flag: undefined, env: () => '5' })).toBe(5) + }) + + it('falls back to default 3 when flag and env missing', () => { + expect(resolveRetryAttempts({ flag: undefined, env: () => undefined })).toBe(3) + }) + + it('throws typed BaseError with UsageInvalidFlag on non-numeric env', () => { + let caught: unknown + try { + resolveRetryAttempts({ flag: undefined, env: () => 'foo' }) + } + catch (e) { + caught = e + } + expect((caught as { code: string }).code).toBe('usage_invalid_flag') + expect((caught as Error).message).toMatch(/DIFYCTL_HTTP_RETRY/) + }) + + it('throws typed BaseError with UsageInvalidFlag on negative env', () => { + let caught: unknown + try { + resolveRetryAttempts({ flag: undefined, env: () => '-1' }) + } + catch (e) { + caught = e + } + expect((caught as { code: string }).code).toBe('usage_invalid_flag') + expect((caught as Error).message).toMatch(/DIFYCTL_HTTP_RETRY/) + }) +}) diff --git a/cli/src/commands/_shared/global-flags.ts b/cli/src/commands/_shared/global-flags.ts new file mode 100644 index 0000000000..af29fd7951 --- /dev/null +++ b/cli/src/commands/_shared/global-flags.ts @@ -0,0 +1,29 @@ +import { Flags } from '@oclif/core' +import { newError } from '../../errors/base.js' +import { ErrorCode } from '../../errors/codes.js' + +export const HTTP_RETRY_DEFAULT = 3 + +export const httpRetryFlag = Flags.integer({ + description: 'HTTP retry attempts for GET/PUT/DELETE on transient errors. 0 disables. Overrides DIFYCTL_HTTP_RETRY.', + helpGroup: 'GLOBAL', +}) + +export type ResolveRetryAttemptsOpts = { + flag: number | undefined + env: (k: string) => string | undefined +} + +export function resolveRetryAttempts(opts: ResolveRetryAttemptsOpts): number { + if (opts.flag !== undefined) + return opts.flag + const raw = opts.env('DIFYCTL_HTTP_RETRY') + if (raw === undefined || raw === '') + return HTTP_RETRY_DEFAULT + if (!/^-?\d+$/.test(raw)) + throw newError(ErrorCode.UsageInvalidFlag, `DIFYCTL_HTTP_RETRY: ${JSON.stringify(raw)} is not a non-negative integer`) + const n = Number(raw) + if (n < 0) + throw newError(ErrorCode.UsageInvalidFlag, `DIFYCTL_HTTP_RETRY: ${n} is negative`) + return n +} diff --git a/cli/src/commands/auth/devices/_shared/devices.test.ts b/cli/src/commands/auth/devices/_shared/devices.test.ts new file mode 100644 index 0000000000..92e9bc8826 --- /dev/null +++ b/cli/src/commands/auth/devices/_shared/devices.test.ts @@ -0,0 +1,189 @@ +import type { DifyMock } from '../../../../../test/fixtures/dify-mock/server.js' +import type { HostsBundle } from '../../../../auth/hosts.js' +import type { TokenStore } from '../../../../auth/store.js' +import { mkdtemp, readFile, rm } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { startMock } from '../../../../../test/fixtures/dify-mock/server.js' +import { saveHosts } from '../../../../auth/hosts.js' +import { createClient } from '../../../../http/client.js' +import { bufferStreams } from '../../../../io/streams.js' +import { runDevicesList, runDevicesRevoke } from './devices.js' + +class MemStore implements TokenStore { + readonly entries = new Map() + async put(host: string, accountId: string, token: string): Promise { + this.entries.set(`${host}::${accountId}`, token) + } + + async get(host: string, accountId: string): Promise { + return this.entries.get(`${host}::${accountId}`) + } + + async delete(host: string, accountId: string): Promise { + this.entries.delete(`${host}::${accountId}`) + } + + async list(host: string): Promise { + const prefix = `${host}::` + return Array.from(this.entries.keys()).filter(k => k.startsWith(prefix)) + } +} + +function bundleFor(host: string, tokenId = 'tok-1'): HostsBundle { + return { + current_host: host, + scheme: 'http', + token_storage: 'file', + token_id: tokenId, + tokens: { bearer: 'dfoa_test' }, + account: { id: 'acct-1', email: 'tester@dify.ai', name: 'Test Tester' }, + workspace: { id: 'ws-1', name: 'Default', role: 'owner' }, + available_workspaces: [ + { id: 'ws-1', name: 'Default', role: 'owner' }, + { id: 'ws-2', name: 'Other', role: 'normal' }, + ], + } +} + +describe('runDevicesList', () => { + let mock: DifyMock + beforeEach(async () => { + mock = await startMock({ scenario: 'happy' }) + }) + afterEach(async () => { + await mock.stop() + }) + + it('table: marks current with *', async () => { + const io = bufferStreams() + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + await runDevicesList({ io, bundle: bundleFor(mock.url, 'tok-1'), http }) + const out = io.outBuf() + expect(out).toContain('DEVICE') + expect(out).toContain('difyctl on laptop') + expect(out).toContain('difyctl on desktop') + const lines = out.trim().split('\n') + const laptopLine = lines.find(l => l.includes('difyctl on laptop'))! + expect(laptopLine).toMatch(/\*\s*$/) + }) + + it('json: emits PaginationEnvelope unchanged', async () => { + const io = bufferStreams() + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + await runDevicesList({ io, bundle: bundleFor(mock.url), http, json: true }) + const parsed = JSON.parse(io.outBuf()) as Record + expect(parsed.page).toBe(1) + expect(Array.isArray(parsed.data)).toBe(true) + expect((parsed.data as unknown[]).length).toBe(3) + }) + + it('not-logged-in: throws NotLoggedIn', async () => { + const io = bufferStreams() + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + await expect(runDevicesList({ io, bundle: undefined, http })) + .rejects + .toThrow(/not logged in/) + }) +}) + +describe('runDevicesRevoke', () => { + let mock: DifyMock + let configDir: string + beforeEach(async () => { + mock = await startMock({ scenario: 'happy' }) + configDir = await mkdtemp(join(tmpdir(), 'difyctl-devrevoke-')) + }) + afterEach(async () => { + await mock.stop() + await rm(configDir, { recursive: true, force: true }) + }) + + it('exact device_label: revokes one + leaves local creds', async () => { + const io = bufferStreams() + const store = new MemStore() + const b = bundleFor(mock.url, 'tok-1') + await store.put(b.current_host, 'acct-1', 'dfoa_test') + await saveHosts(configDir, b) + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + + await runDevicesRevoke({ configDir, io, bundle: b, http, store, target: 'difyctl on desktop', all: false }) + expect(io.outBuf()).toContain('Revoked 1 session(s)') + expect(store.entries.size).toBe(1) + }) + + it('exact id: revokes one', async () => { + const io = bufferStreams() + const store = new MemStore() + const b = bundleFor(mock.url, 'tok-1') + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + + await runDevicesRevoke({ configDir, io, bundle: b, http, store, target: 'tok-2', all: false }) + expect(io.outBuf()).toContain('Revoked 1 session(s)') + }) + + it('substring: unique match revokes', async () => { + const io = bufferStreams() + const store = new MemStore() + const b = bundleFor(mock.url, 'tok-1') + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + + await runDevicesRevoke({ configDir, io, bundle: b, http, store, target: 'web', all: false }) + expect(io.outBuf()).toContain('Revoked 1 session(s)') + }) + + it('substring: ambiguous throws', async () => { + const io = bufferStreams() + const store = new MemStore() + const b = bundleFor(mock.url, 'tok-1') + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + + await expect(runDevicesRevoke({ configDir, io, bundle: b, http, store, target: 'difyctl', all: false })) + .rejects + .toThrow(/matches multiple/) + }) + + it('no match throws', async () => { + const io = bufferStreams() + const store = new MemStore() + const b = bundleFor(mock.url, 'tok-1') + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + + await expect(runDevicesRevoke({ configDir, io, bundle: b, http, store, target: 'nonexistent', all: false })) + .rejects + .toThrow(/no session matches/) + }) + + it('--all: revokes everything except current', async () => { + const io = bufferStreams() + const store = new MemStore() + const b = bundleFor(mock.url, 'tok-1') + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + + await runDevicesRevoke({ configDir, io, bundle: b, http, store, all: true }) + expect(io.outBuf()).toContain('Revoked 2 session(s)') + }) + + it('revoking current id clears local creds', async () => { + const io = bufferStreams() + const store = new MemStore() + const b = bundleFor(mock.url, 'tok-1') + await store.put(b.current_host, 'acct-1', 'dfoa_test') + await saveHosts(configDir, b) + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + + await runDevicesRevoke({ configDir, io, bundle: b, http, store, target: 'tok-1', all: false }) + expect(store.entries.size).toBe(0) + await expect(readFile(join(configDir, 'hosts.yml'), 'utf8')).rejects.toThrow(/ENOENT/) + }) + + it('no target + no --all: throws UsageMissingArg', async () => { + const io = bufferStreams() + const store = new MemStore() + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + await expect(runDevicesRevoke({ configDir, io, bundle: bundleFor(mock.url), http, store, all: false })) + .rejects + .toThrow(/specify a device label/) + }) +}) diff --git a/cli/src/commands/auth/devices/_shared/devices.ts b/cli/src/commands/auth/devices/_shared/devices.ts new file mode 100644 index 0000000000..c4879787ef --- /dev/null +++ b/cli/src/commands/auth/devices/_shared/devices.ts @@ -0,0 +1,159 @@ +import type { KyInstance } from 'ky' +import type { HostsBundle } from '../../../../auth/hosts.js' +import type { TokenStore } from '../../../../auth/store.js' +import type { IOStreams } from '../../../../io/streams.js' +import type { SessionRow } from '../../../../types/account-session.js' +import { unlink } from 'node:fs/promises' +import { join } from 'node:path' +import { AccountSessionsClient } from '../../../../api/account-sessions.js' +import { HOSTS_FILE_NAME } from '../../../../auth/hosts.js' +import { BaseError } from '../../../../errors/base.js' +import { ErrorCode } from '../../../../errors/codes.js' +import { colorEnabled, colorScheme } from '../../../../io/color.js' +import { runWithSpinner } from '../../../../io/spinner.js' + +export type DevicesListOptions = { + readonly io: IOStreams + readonly bundle: HostsBundle | undefined + readonly http: KyInstance + readonly json?: boolean +} + +export async function runDevicesList(opts: DevicesListOptions): Promise { + const b = requireLogin(opts.bundle) + const sessions = new AccountSessionsClient(opts.http) + const env = await runWithSpinner( + { io: opts.io, label: 'Fetching devices' }, + () => sessions.list(), + ) + + if (opts.json === true) { + opts.io.out.write(`${JSON.stringify(env)}\n`) + return + } + + opts.io.out.write(renderTable(env.data, b.token_id ?? '')) +} + +export type DevicesRevokeOptions = { + readonly configDir: string + readonly io: IOStreams + readonly bundle: HostsBundle | undefined + readonly http: KyInstance + readonly store: TokenStore + readonly target?: string + readonly all: boolean + readonly yes?: boolean +} + +export async function runDevicesRevoke(opts: DevicesRevokeOptions): Promise { + const cs = colorScheme(colorEnabled(opts.io.isErrTTY)) + const b = requireLogin(opts.bundle) + if (!opts.all && (opts.target === undefined || opts.target === '')) { + throw new BaseError({ + code: ErrorCode.UsageMissingArg, + message: 'specify a device label / id, or pass --all', + hint: 'see \'difyctl auth devices list\'', + }) + } + + const sessions = new AccountSessionsClient(opts.http) + const env = await sessions.list() + const { ids, selfHit } = pickTargets(env.data, opts, b.token_id ?? '') + if (ids.length === 0) { + opts.io.out.write('no sessions to revoke\n') + return + } + + for (const id of ids) + await sessions.revoke(id) + + if (selfHit) + await clearLocal(opts.configDir, b, opts.store) + + opts.io.out.write(`${cs.successIcon()} Revoked ${ids.length} session(s)\n`) +} + +function requireLogin(b: HostsBundle | undefined): HostsBundle { + if (b === undefined || b.current_host === '' || b.tokens?.bearer === undefined || b.tokens.bearer === '') { + throw new BaseError({ + code: ErrorCode.NotLoggedIn, + message: 'not logged in', + hint: 'run \'difyctl auth login\'', + }) + } + return b +} + +export type PickResult = { + ids: readonly string[] + selfHit: boolean +} + +export function pickTargets(rows: readonly SessionRow[], opts: { target?: string, all: boolean }, currentId: string): PickResult { + if (opts.all) { + const ids = rows.filter(r => r.id !== currentId).map(r => r.id) + return { ids, selfHit: false } + } + const target = opts.target ?? '' + const byLabel = rows.filter(r => r.device_label === target) + if (byLabel.length > 1) + throw ambiguous(target, byLabel) + const onlyLabel = byLabel[0] + if (onlyLabel !== undefined) + return { ids: [onlyLabel.id], selfHit: onlyLabel.id === currentId } + + const byId = rows.find(r => r.id === target) + if (byId !== undefined) + return { ids: [byId.id], selfHit: byId.id === currentId } + + const needle = target.toLowerCase() + const bySub = rows.filter(r => r.device_label.toLowerCase().includes(needle)) + if (bySub.length > 1) + throw ambiguous(target, bySub) + const onlySub = bySub[0] + if (onlySub !== undefined) + return { ids: [onlySub.id], selfHit: onlySub.id === currentId } + + throw new BaseError({ + code: ErrorCode.UsageMissingArg, + message: `no session matches "${target}"`, + }) +} + +function ambiguous(target: string, rows: readonly SessionRow[]): BaseError { + const labels = rows.map(r => `${r.device_label} (${r.id})`).join(', ') + return new BaseError({ + code: ErrorCode.UsageInvalidFlag, + message: `"${target}" matches multiple sessions: ${labels}; pass an exact id to disambiguate`, + }) +} + +function renderTable(rows: readonly SessionRow[], currentId: string): string { + const header = ['DEVICE', 'CREATED', 'LAST USED', 'CURRENT'] + const body = rows.map(r => [ + r.device_label !== '' ? r.device_label : r.id, + r.created_at ?? '', + r.last_used_at ?? '', + r.id === currentId ? '*' : '', + ]) + const widths = header.map((h, i) => Math.max(h.length, ...body.map(row => (row[i] ?? '').length))) + const fmt = (cells: readonly string[]): string => + cells.map((c, i) => c.padEnd(widths[i] ?? 0)).join(' ').trimEnd() + return body.length === 0 ? `${fmt(header)}\n` : `${[fmt(header), ...body.map(fmt)].join('\n')}\n` +} + +async function clearLocal(configDir: string, bundle: HostsBundle, store: TokenStore): Promise { + const accountId = bundle.account?.id ?? bundle.external_subject?.email ?? 'default' + try { + await store.delete(bundle.current_host, accountId) + } + catch { /* best-effort */ } + try { + await unlink(join(configDir, HOSTS_FILE_NAME)) + } + catch (err) { + if ((err as NodeJS.ErrnoException).code !== 'ENOENT') + throw err + } +} diff --git a/cli/src/commands/auth/devices/list/index.ts b/cli/src/commands/auth/devices/list/index.ts new file mode 100644 index 0000000000..0f12ac1004 --- /dev/null +++ b/cli/src/commands/auth/devices/list/index.ts @@ -0,0 +1,25 @@ +import { Flags } from '@oclif/core' +import { DifyCommand } from '../../../_shared/dify-command.js' +import { httpRetryFlag } from '../../../_shared/global-flags.js' +import { runDevicesList } from '../_shared/devices.js' + +export default class DevicesList extends DifyCommand { + static override description = 'List active sessions for the current bearer' + + static override examples = [ + '<%= config.bin %> auth devices list', + '<%= config.bin %> auth devices list --json', + ] + + static override flags = { + 'http-retry': httpRetryFlag, + 'json': Flags.boolean({ description: 'emit JSON', default: false }), + } + + async run(): Promise { + const { flags } = await this.parse(DevicesList) + const format = flags.json ? 'json' : '' + const ctx = await this.authedCtx({ retryFlag: flags['http-retry'], format }) + await runDevicesList({ io: ctx.io, bundle: ctx.bundle, http: ctx.http, json: flags.json }) + } +} diff --git a/cli/src/commands/auth/devices/revoke/index.ts b/cli/src/commands/auth/devices/revoke/index.ts new file mode 100644 index 0000000000..9cd1faa8d4 --- /dev/null +++ b/cli/src/commands/auth/devices/revoke/index.ts @@ -0,0 +1,40 @@ +import { Args, Flags } from '@oclif/core' +import { selectStore } from '../../../../auth/store.js' +import { DifyCommand } from '../../../_shared/dify-command.js' +import { httpRetryFlag } from '../../../_shared/global-flags.js' +import { runDevicesRevoke } from '../_shared/devices.js' + +export default class DevicesRevoke extends DifyCommand { + static override description = 'Revoke one or all session devices' + + static override examples = [ + '<%= config.bin %> auth devices revoke "difyctl on laptop"', + '<%= config.bin %> auth devices revoke --all', + ] + + static override args = { + target: Args.string({ description: 'device label / id to revoke', required: false }), + } + + static override flags = { + 'all': Flags.boolean({ description: 'revoke every session except the current one', default: false }), + 'http-retry': httpRetryFlag, + 'yes': Flags.boolean({ description: 'skip confirmation prompt', default: false }), + } + + async run(): Promise { + const { args, flags } = await this.parse(DevicesRevoke) + const ctx = await this.authedCtx({ retryFlag: flags['http-retry'] }) + const { store } = await selectStore({ configDir: ctx.configDir }) + await runDevicesRevoke({ + configDir: ctx.configDir, + io: ctx.io, + bundle: ctx.bundle, + http: ctx.http, + store, + target: args.target, + all: flags.all, + yes: flags.yes, + }) + } +} diff --git a/cli/src/commands/auth/login/device-flow.test.ts b/cli/src/commands/auth/login/device-flow.test.ts new file mode 100644 index 0000000000..4a27788329 --- /dev/null +++ b/cli/src/commands/auth/login/device-flow.test.ts @@ -0,0 +1,173 @@ +import type { CodeResponse, PollRequest, PollResult, PollSuccess } from '../../../api/oauth-device.js' +import type { Clock } from './device-flow.js' +import { describe, expect, it, vi } from 'vitest' +import { BaseError } from '../../../errors/base.js' +import { ErrorCode } from '../../../errors/codes.js' +import { + awaitAuthorization, + DEFAULT_INTERVAL_MS, + MAX_INTERVAL_MS, + POLL_RETRY_ATTEMPTS, + POLL_RETRY_CAP_MS, + POLL_RETRY_INITIAL_MS, +} from './device-flow.js' + +const successPayload: PollSuccess = { + token: 'dfoa_xyz', + account: { id: 'a', email: 'e', name: 'n' }, + workspaces: [{ id: 'w', name: 'W', role: 'owner' }], + default_workspace_id: 'w', + token_id: 't', +} + +class FakeClock implements Clock { + sleeps: number[] = [] + cancelled = false + cancelAt: number | undefined + + async sleepMs(ms: number): Promise { + this.sleeps.push(ms) + if (this.cancelAt !== undefined && this.sleeps.length >= this.cancelAt) + this.cancelled = true + } + + isCancelled(): boolean { + return this.cancelled + } +} + +function fakeApi(scripted: PollResult[]): { pollOnce: (req: PollRequest) => Promise } { + let i = 0 + return { + pollOnce: async () => { + const r = scripted[i++] + if (r === undefined) + throw new Error('scripted-api: out of responses') + return r + }, + } +} + +const code: CodeResponse = { + device_code: 'dc', + user_code: 'ABCD-1234', + verification_uri: 'https://dify.example/device', + expires_in: 900, + interval: 1, +} + +describe('awaitAuthorization', () => { + it('returns success on first approved poll', async () => { + const api = fakeApi([{ status: 'approved', success: successPayload }]) + const clock = new FakeClock() + const result = await awaitAuthorization(api, code, { clock }) + expect(result.token).toBe('dfoa_xyz') + expect(clock.sleeps).toHaveLength(0) + }) + + it('keeps polling on pending then returns approved', async () => { + const api = fakeApi([ + { status: 'pending' }, + { status: 'pending' }, + { status: 'approved', success: successPayload }, + ]) + const clock = new FakeClock() + const result = await awaitAuthorization(api, code, { clock }) + expect(result.token).toBe('dfoa_xyz') + expect(clock.sleeps).toEqual([1000, 1000]) + }) + + it('doubles interval on slow_down (capped at max)', async () => { + const api = fakeApi([ + { status: 'slow_down' }, + { status: 'slow_down' }, + { status: 'approved', success: successPayload }, + ]) + const clock = new FakeClock() + const result = await awaitAuthorization(api, code, { clock }) + expect(result.token).toBe('dfoa_xyz') + expect(clock.sleeps).toEqual([2000, 4000]) + }) + + it('caps interval at MAX_INTERVAL_MS', async () => { + const api = fakeApi([ + { status: 'slow_down' }, + { status: 'slow_down' }, + { status: 'slow_down' }, + { status: 'slow_down' }, + { status: 'slow_down' }, + { status: 'slow_down' }, + { status: 'slow_down' }, + { status: 'approved', success: successPayload }, + ]) + const clock = new FakeClock() + await awaitAuthorization(api, { ...code, interval: 10 }, { clock }) + const last = clock.sleeps[clock.sleeps.length - 1]! + expect(last).toBe(MAX_INTERVAL_MS) + }) + + it('throws BaseError on expired', async () => { + const api = fakeApi([{ status: 'expired' }]) + const clock = new FakeClock() + await expect(awaitAuthorization(api, code, { clock })).rejects.toThrow(/expired/) + }) + + it('throws BaseError on denied', async () => { + const api = fakeApi([{ status: 'denied' }]) + const clock = new FakeClock() + await expect(awaitAuthorization(api, code, { clock })).rejects.toThrow(/denied/) + }) + + it('uses default interval when CodeResponse.interval is 0', async () => { + const api = fakeApi([ + { status: 'pending' }, + { status: 'approved', success: successPayload }, + ]) + const clock = new FakeClock() + await awaitAuthorization(api, { ...code, interval: 0 }, { clock }) + expect(clock.sleeps[0]).toBe(DEFAULT_INTERVAL_MS) + }) + + it('rejects when clock signals cancelled', async () => { + const api = fakeApi([ + { status: 'pending' }, + { status: 'pending' }, + { status: 'pending' }, + { status: 'pending' }, + { status: 'approved', success: successPayload }, + ]) + const clock = new FakeClock() + clock.cancelAt = 2 + await expect(awaitAuthorization(api, code, { clock })).rejects.toThrow(/expired|cancel/) + }) + + it('exposes constants matching Go reference', () => { + expect(POLL_RETRY_ATTEMPTS).toBe(5) + expect(POLL_RETRY_INITIAL_MS).toBe(1000) + expect(POLL_RETRY_CAP_MS).toBe(16_000) + expect(MAX_INTERVAL_MS).toBe(60_000) + expect(DEFAULT_INTERVAL_MS).toBe(5000) + }) + + it('preserves dfoe_ token kind through state machine', async () => { + const externalSuccess: PollSuccess = { + token: 'dfoe_xxx', + subject_type: 'external_sso', + subject_email: 'sso@x.com', + subject_issuer: 'https://issuer', + } + const api = fakeApi([{ status: 'approved', success: externalSuccess }]) + const clock = new FakeClock() + const result = await awaitAuthorization(api, code, { clock }) + expect(result.token).toBe('dfoe_xxx') + expect(result.subject_type).toBe('external_sso') + }) + + it('propagates BaseError thrown by api.pollOnce', async () => { + const api = { + pollOnce: vi.fn().mockRejectedValue(new BaseError({ code: ErrorCode.UnsupportedEndpoint, message: 'old server' })), + } + const clock = new FakeClock() + await expect(awaitAuthorization(api, code, { clock })).rejects.toThrow(/old server/) + }) +}) diff --git a/cli/src/commands/auth/login/device-flow.ts b/cli/src/commands/auth/login/device-flow.ts new file mode 100644 index 0000000000..bf319dccec --- /dev/null +++ b/cli/src/commands/auth/login/device-flow.ts @@ -0,0 +1,107 @@ +import type { CodeResponse, PollRequest, PollResult, PollSuccess } from '../../../api/oauth-device.js' +import { DEFAULT_CLIENT_ID } from '../../../api/oauth-device.js' +import { BaseError } from '../../../errors/base.js' +import { ErrorCode } from '../../../errors/codes.js' + +export const DEFAULT_INTERVAL_MS = 5_000 +export const MAX_INTERVAL_MS = 60_000 +export const POLL_RETRY_ATTEMPTS = 5 +export const POLL_RETRY_INITIAL_MS = 1_000 +export const POLL_RETRY_CAP_MS = 16_000 + +export type Clock = { + sleepMs: (ms: number) => Promise + isCancelled: () => boolean +} + +export type DeviceFlowApiSubset = { + pollOnce: (req: PollRequest) => Promise +} + +export type AwaitOptions = { + clock: Clock + clientId?: string +} + +export async function awaitAuthorization( + api: DeviceFlowApiSubset, + code: CodeResponse, + opts: AwaitOptions, +): Promise { + if (code.device_code === '') + throw expired() + + const baseInterval = code.interval > 0 ? code.interval * 1000 : DEFAULT_INTERVAL_MS + let interval = baseInterval + const req: PollRequest = { + device_code: code.device_code, + client_id: opts.clientId ?? DEFAULT_CLIENT_ID, + } + + while (true) { + if (opts.clock.isCancelled()) + throw expired() + const result = await pollWithRetry(api, req, opts.clock) + switch (result.status) { + case 'approved': + return result.success + case 'pending': + break + case 'slow_down': + interval = Math.min(interval * 2, MAX_INTERVAL_MS) + break + case 'expired': + throw expired() + case 'denied': + throw new BaseError({ + code: ErrorCode.AccessDenied, + message: 'authorization denied', + }) + case 'retry_5xx': + throw new BaseError({ + code: ErrorCode.Server5xx, + message: 'device-flow poll unavailable after retries', + }) + } + await opts.clock.sleepMs(interval) + if (opts.clock.isCancelled()) + throw expired() + } +} + +async function pollWithRetry( + api: DeviceFlowApiSubset, + req: PollRequest, + clock: Clock, +): Promise { + let backoff = POLL_RETRY_INITIAL_MS + for (let attempt = 1; attempt <= POLL_RETRY_ATTEMPTS; attempt++) { + const result = await api.pollOnce(req) + if (result.status !== 'retry_5xx') + return result + if (attempt === POLL_RETRY_ATTEMPTS) + break + await clock.sleepMs(backoff) + backoff = Math.min(backoff * 2, POLL_RETRY_CAP_MS) + } + return { status: 'retry_5xx' } +} + +function expired(): BaseError { + return new BaseError({ + code: ErrorCode.ExpiredToken, + message: 'code expired before authorization', + }) +} + +export function realClock(): Clock { + const cancelled = false + return { + async sleepMs(ms) { + await new Promise(r => setTimeout(r, ms)) + }, + isCancelled() { + return cancelled + }, + } +} diff --git a/cli/src/commands/auth/login/index.ts b/cli/src/commands/auth/login/index.ts new file mode 100644 index 0000000000..2ed172f8e0 --- /dev/null +++ b/cli/src/commands/auth/login/index.ts @@ -0,0 +1,41 @@ +import { Flags } from '@oclif/core' +import { resolveConfigDir } from '../../../config/dir.js' +import { realStreams } from '../../../io/streams.js' +import { DifyCommand } from '../../_shared/dify-command.js' +import { runLogin } from './login.js' + +export default class Login extends DifyCommand { + static override description = 'Sign in to Dify via OAuth device flow' + + static override examples = [ + '<%= config.bin %> auth login', + '<%= config.bin %> auth login --host https://cloud.dify.ai', + '<%= config.bin %> auth login --no-browser', + ] + + static override flags = { + 'host': Flags.string({ + description: 'Dify host URL', + default: '', + }), + 'no-browser': Flags.boolean({ + description: 'do not auto-open the browser', + default: false, + }), + 'insecure': Flags.boolean({ + description: 'allow http:// hosts (local-dev only)', + default: false, + }), + } + + async run(): Promise { + const { flags } = await this.parse(Login) + await runLogin({ + configDir: resolveConfigDir(), + io: realStreams(), + host: flags.host, + noBrowser: flags['no-browser'], + insecure: flags.insecure, + }) + } +} diff --git a/cli/src/commands/auth/login/login.test.ts b/cli/src/commands/auth/login/login.test.ts new file mode 100644 index 0000000000..522623982b --- /dev/null +++ b/cli/src/commands/auth/login/login.test.ts @@ -0,0 +1,185 @@ +import type { DifyMock } from '../../../../test/fixtures/dify-mock/server.js' +import type { TokenStore } from '../../../auth/store.js' +import type { Clock } from './device-flow.js' +import { mkdtemp, readFile, rm } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { startMock } from '../../../../test/fixtures/dify-mock/server.js' +import { DeviceFlowApi } from '../../../api/oauth-device.js' +import { createClient } from '../../../http/client.js' +import { bufferStreams } from '../../../io/streams.js' +import { runLogin } from './login.js' + +const noopClock: Clock = { + sleepMs: async () => { /* immediate */ }, + isCancelled: () => false, +} + +const noopBrowser = async (): Promise => { /* skip OS open */ } + +class MemStore implements TokenStore { + readonly entries = new Map() + async put(host: string, accountId: string, token: string): Promise { + this.entries.set(`${host}::${accountId}`, token) + } + + async get(host: string, accountId: string): Promise { + return this.entries.get(`${host}::${accountId}`) + } + + async delete(host: string, accountId: string): Promise { + this.entries.delete(`${host}::${accountId}`) + } + + async list(host: string): Promise { + const prefix = `${host}::` + return Array.from(this.entries.keys()) + .filter(k => k.startsWith(prefix)) + .map(k => k.slice(prefix.length)) + } +} + +describe('runLogin', () => { + let mock: DifyMock + let configDir: string + + beforeEach(async () => { + mock = await startMock({ scenario: 'happy' }) + configDir = await mkdtemp(join(tmpdir(), 'difyctl-login-')) + }) + + afterEach(async () => { + await mock.stop() + await rm(configDir, { recursive: true, force: true }) + }) + + it('happy: stores bearer + writes hosts.yml + greets account user', async () => { + const io = bufferStreams() + const store = new MemStore() + const bundle = await runLogin({ + configDir, + io, + host: mock.url, + noBrowser: true, + insecure: true, + deviceLabel: 'difyctl on test', + api: new DeviceFlowApi(createClient({ host: mock.url })), + store: { store, mode: 'file' }, + clock: noopClock, + browserOpener: noopBrowser, + }) + expect(bundle.tokens?.bearer).toBe('dfoa_test') + expect(bundle.account?.email).toBe('tester@dify.ai') + expect(bundle.workspace?.id).toBe('ws-1') + expect(bundle.available_workspaces).toHaveLength(2) + const stored = await store.get(bundle.current_host, 'acct-1') + expect(stored).toBe('dfoa_test') + + const hostsRaw = await readFile(join(configDir, 'hosts.yml'), 'utf8') + expect(hostsRaw).toContain('current_host:') + expect(hostsRaw).toContain('tester@dify.ai') + + expect(io.outBuf()).toContain('Logged in to') + expect(io.outBuf()).toContain('tester@dify.ai') + expect(io.outBuf()).toContain('Default') + expect(io.errBuf()).toContain('ABCD-1234') + }) + + it('sso: stores dfoe_ token + greets external SSO subject (no account)', async () => { + mock.setScenario('sso') + const io = bufferStreams() + const store = new MemStore() + const bundle = await runLogin({ + configDir, + io, + host: mock.url, + noBrowser: true, + insecure: true, + deviceLabel: 'difyctl on test', + api: new DeviceFlowApi(createClient({ host: mock.url })), + store: { store, mode: 'file' }, + clock: noopClock, + browserOpener: noopBrowser, + }) + expect(bundle.tokens?.bearer).toBe('dfoe_test') + expect(bundle.account).toBeUndefined() + expect(bundle.external_subject?.email).toBe('sso@dify.ai') + expect(bundle.external_subject?.issuer).toBe('https://issuer.example') + expect(io.outBuf()).toContain('external SSO') + expect(io.outBuf()).toContain('sso@dify.ai') + }) + + it('denied: throws DeviceFlowError + leaves config dir empty', async () => { + mock.setScenario('denied') + const io = bufferStreams() + const store = new MemStore() + await expect(runLogin({ + configDir, + io, + host: mock.url, + noBrowser: true, + insecure: true, + deviceLabel: 'difyctl on test', + api: new DeviceFlowApi(createClient({ host: mock.url })), + store: { store, mode: 'file' }, + clock: noopClock, + browserOpener: noopBrowser, + })).rejects.toThrow(/denied/) + expect(store.entries.size).toBe(0) + await expect(readFile(join(configDir, 'hosts.yml'), 'utf8')).rejects.toThrow(/ENOENT/) + }) + + it('expired: throws DeviceFlowError', async () => { + mock.setScenario('expired') + const io = bufferStreams() + const store = new MemStore() + await expect(runLogin({ + configDir, + io, + host: mock.url, + noBrowser: true, + insecure: true, + deviceLabel: 'difyctl on test', + api: new DeviceFlowApi(createClient({ host: mock.url })), + store: { store, mode: 'file' }, + clock: noopClock, + browserOpener: noopBrowser, + })).rejects.toThrow(/expired/) + }) + + it('rejects http:// host without --insecure', async () => { + const io = bufferStreams() + const store = new MemStore() + await expect(runLogin({ + configDir, + io, + host: mock.url, + noBrowser: true, + insecure: false, + deviceLabel: 'difyctl on test', + api: new DeviceFlowApi(createClient({ host: mock.url })), + store: { store, mode: 'file' }, + clock: noopClock, + browserOpener: noopBrowser, + })).rejects.toThrow(/https:\/\//) + }) + + it('emits skip-reason to stderr when --no-browser', async () => { + const io = bufferStreams() + const store = new MemStore() + await runLogin({ + configDir, + io, + host: mock.url, + noBrowser: true, + insecure: true, + deviceLabel: 'difyctl on test', + api: new DeviceFlowApi(createClient({ host: mock.url })), + store: { store, mode: 'file' }, + clock: noopClock, + browserOpener: noopBrowser, + }) + expect(io.errBuf()).toContain('--no-browser requested') + }) +}) diff --git a/cli/src/commands/auth/login/login.ts b/cli/src/commands/auth/login/login.ts new file mode 100644 index 0000000000..de05f52997 --- /dev/null +++ b/cli/src/commands/auth/login/login.ts @@ -0,0 +1,172 @@ +import type { CodeResponse, PollSuccess } from '../../../api/oauth-device.js' +import type { HostsBundle, StorageMode, Workspace } from '../../../auth/hosts.js' +import type { TokenStore } from '../../../auth/store.js' +import type { IOStreams } from '../../../io/streams.js' +import type { BrowserEnv, BrowserOpener } from '../../../util/browser.js' +import type { Clock } from './device-flow.js' +import * as os from 'node:os' +import * as readline from 'node:readline' +import { DeviceFlowApi } from '../../../api/oauth-device.js' +import { saveHosts } from '../../../auth/hosts.js' +import { selectStore } from '../../../auth/store.js' +import { createClient } from '../../../http/client.js' +import { colorEnabled, colorScheme } from '../../../io/color.js' +import { decideOpen, OpenDecision, openUrl, realEnv } from '../../../util/browser.js' +import { bareHost, DEFAULT_HOST, resolveHost, validateVerificationURI } from '../../../util/host.js' +import { awaitAuthorization, realClock } from './device-flow.js' + +export type LoginOptions = { + readonly configDir: string + readonly io: IOStreams + readonly host?: string + readonly noBrowser?: boolean + readonly insecure?: boolean + readonly deviceLabel?: string + readonly store?: { readonly store: TokenStore, readonly mode: StorageMode } + readonly api?: DeviceFlowApi + readonly browserEnv?: BrowserEnv + readonly browserOpener?: BrowserOpener + readonly clock?: Clock +} + +export async function runLogin(opts: LoginOptions): Promise { + const cs = colorScheme(colorEnabled(opts.io.isErrTTY)) + const insecure = opts.insecure ?? false + + const host = await resolveLoginHost(opts, insecure) + const label = opts.deviceLabel ?? defaultDeviceLabel() + + const api = opts.api ?? new DeviceFlowApi(createClient({ host })) + const code = await api.requestCode({ device_label: label }) + + renderCodePrompt(opts.io.err, cs, code) + validateVerificationURI(code.verification_uri, insecure) + + const env = opts.browserEnv ?? realEnv() + const decision = decideOpen(env, opts.noBrowser ?? false) + if (decision === OpenDecision.Auto) { + const opener = opts.browserOpener ?? openUrl + try { + await opener(code.verification_uri) + } + catch (err) { + opts.io.err.write(`${cs.warningIcon()} couldn't open browser (${(err as Error).message}); open the URL above manually\n`) + } + } + else { + opts.io.err.write(`${cs.warningIcon()} ${decision} — open the URL above manually\n`) + } + + const success = await awaitAuthorization(api, code, { clock: opts.clock ?? realClock() }) + + const storeBundle = opts.store ?? await selectStore({ configDir: opts.configDir }) + const bundle = bundleFromSuccess(host, success, storeBundle.mode) + + await storeBundle.store.put(bundle.current_host, accountKey(bundle), success.token) + await saveHosts(opts.configDir, bundle) + + renderLoggedIn(opts.io.out, cs, host, success) + return bundle +} + +async function resolveLoginHost(opts: LoginOptions, insecure: boolean): Promise { + let raw = opts.host?.trim() ?? '' + if (raw === '') + raw = await promptHost(opts.io) + return resolveHost({ raw, insecure }) +} + +async function promptHost(io: IOStreams): Promise { + io.err.write(`? Dify host [${DEFAULT_HOST}]: `) + const rl = readline.createInterface({ input: io.in, output: io.err, terminal: false }) + try { + const line: string = await new Promise(resolve => rl.once('line', resolve)) + return line.trim() + } + finally { + rl.close() + } +} + +function defaultDeviceLabel(): string { + const host = os.hostname() + return `difyctl on ${host !== '' ? host : 'unknown-host'}` +} + +function renderCodePrompt(w: NodeJS.WritableStream, cs: ReturnType, code: CodeResponse): void { + w.write(`${cs.warningIcon()} Copy this one-time code: ${cs.bold(code.user_code)}\n`) + w.write(` Open: ${code.verification_uri}\n`) +} + +function renderLoggedIn(w: NodeJS.WritableStream, cs: ReturnType, host: string, s: PollSuccess): void { + const display = bareHost(host) + if (s.account !== undefined && s.account.email !== '') { + w.write(`${cs.successIcon()} Logged in to ${display} as ${cs.bold(s.account.email)} (${s.account.name})\n`) + const ws = findDefaultWorkspace(s) + if (ws !== undefined) + w.write(` Workspace: ${ws.name}\n`) + return + } + if (s.subject_email !== undefined && s.subject_email !== '') { + if (s.subject_issuer !== undefined && s.subject_issuer !== '') + w.write(`${cs.successIcon()} Logged in to ${display} as ${cs.bold(s.subject_email)} (external SSO, issuer: ${s.subject_issuer})\n`) + else + w.write(`${cs.successIcon()} Logged in to ${display} as ${cs.bold(s.subject_email)} (external SSO)\n`) + return + } + w.write(`${cs.successIcon()} Logged in to ${display}\n`) +} + +function findDefaultWorkspace(s: PollSuccess): { id: string, name: string, role: string } | undefined { + if (s.default_workspace_id === undefined || s.default_workspace_id === '') + return undefined + return s.workspaces?.find(w => w.id === s.default_workspace_id) +} + +function bundleFromSuccess(host: string, s: PollSuccess, mode: StorageMode): HostsBundle { + const display = bareHost(host) + let scheme: string | undefined + try { + const u = new URL(host) + if (u.protocol !== 'https:') + scheme = u.protocol.replace(':', '') + } + catch { /* keep undefined */ } + + const bundle: HostsBundle = { + current_host: display, + scheme, + token_storage: mode, + token_id: s.token_id, + tokens: { bearer: s.token }, + } + if (s.account !== undefined) { + bundle.account = { id: s.account.id, email: s.account.email, name: s.account.name } + } + if (s.subject_email !== undefined && s.subject_email !== '' + && (s.account === undefined || s.account.id === '')) { + bundle.external_subject = { + email: s.subject_email, + issuer: s.subject_issuer ?? '', + } + } + const def = findDefaultWorkspace(s) + if (def !== undefined) + bundle.workspace = def + if (s.workspaces !== undefined && s.workspaces.length > 0) { + bundle.available_workspaces = s.workspaces.map(w => ({ + id: w.id, + name: w.name, + role: w.role, + })) + } + return bundle +} + +function accountKey(b: HostsBundle): string { + if (b.account?.id !== undefined && b.account.id !== '') + return b.account.id + if (b.external_subject?.email !== undefined && b.external_subject.email !== '') + return b.external_subject.email + return 'default' +} diff --git a/cli/src/commands/auth/logout/index.ts b/cli/src/commands/auth/logout/index.ts new file mode 100644 index 0000000000..3049575ca5 --- /dev/null +++ b/cli/src/commands/auth/logout/index.ts @@ -0,0 +1,39 @@ +import type { KyInstance } from 'ky' +import { loadHosts } from '../../../auth/hosts.js' +import { selectStore } from '../../../auth/store.js' +import { resolveConfigDir } from '../../../config/dir.js' +import { createClient } from '../../../http/client.js' +import { runWithSpinner } from '../../../io/spinner.js' +import { realStreams } from '../../../io/streams.js' +import { hostWithScheme } from '../../../util/host.js' +import { DifyCommand } from '../../_shared/dify-command.js' +import { runLogout } from './logout.js' + +export default class Logout extends DifyCommand { + static override description = 'Log out of the active Dify host' + + static override examples = [ + '<%= config.bin %> auth logout', + ] + + async run(): Promise { + const configDir = resolveConfigDir() + const bundle = await loadHosts(configDir) + const { store } = await selectStore({ configDir }) + + let http: KyInstance | undefined + if (bundle !== undefined && bundle.current_host !== '' && bundle.tokens?.bearer !== undefined && bundle.tokens.bearer !== '') { + http = createClient({ + host: hostWithScheme(bundle.current_host, bundle.scheme), + bearer: bundle.tokens.bearer, + retryAttempts: 0, + }) + } + + const io = realStreams() + await runWithSpinner( + { io, label: 'Signing out', enabled: true, style: 'dify-dim' }, + () => runLogout({ configDir, io, bundle, http, store }), + ) + } +} diff --git a/cli/src/commands/auth/logout/logout.test.ts b/cli/src/commands/auth/logout/logout.test.ts new file mode 100644 index 0000000000..4fd3f53e8b --- /dev/null +++ b/cli/src/commands/auth/logout/logout.test.ts @@ -0,0 +1,143 @@ +import type { DifyMock } from '../../../../test/fixtures/dify-mock/server.js' +import type { HostsBundle } from '../../../auth/hosts.js' +import type { TokenStore } from '../../../auth/store.js' +import { mkdtemp, readFile, rm, writeFile } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { startMock } from '../../../../test/fixtures/dify-mock/server.js' +import { saveHosts } from '../../../auth/hosts.js' +import { createClient } from '../../../http/client.js' +import { bufferStreams } from '../../../io/streams.js' +import { runLogout } from './logout.js' + +class MemStore implements TokenStore { + readonly entries = new Map() + async put(host: string, accountId: string, token: string): Promise { + this.entries.set(`${host}::${accountId}`, token) + } + + async get(host: string, accountId: string): Promise { + return this.entries.get(`${host}::${accountId}`) + } + + async delete(host: string, accountId: string): Promise { + this.entries.delete(`${host}::${accountId}`) + } + + async list(host: string): Promise { + const prefix = `${host}::` + return Array.from(this.entries.keys()) + .filter(k => k.startsWith(prefix)) + .map(k => k.slice(prefix.length)) + } +} + +function fixtureBundle(host: string): HostsBundle { + return { + current_host: host, + scheme: 'http', + token_storage: 'file', + token_id: 'tok-1', + tokens: { bearer: 'dfoa_test' }, + account: { id: 'acct-1', email: 'tester@dify.ai', name: 'Test Tester' }, + workspace: { id: 'ws-1', name: 'Default', role: 'owner' }, + available_workspaces: [ + { id: 'ws-1', name: 'Default', role: 'owner' }, + { id: 'ws-2', name: 'Other', role: 'normal' }, + ], + } +} + +describe('runLogout', () => { + let mock: DifyMock + let configDir: string + + beforeEach(async () => { + mock = await startMock({ scenario: 'happy' }) + configDir = await mkdtemp(join(tmpdir(), 'difyctl-logout-')) + }) + + afterEach(async () => { + await mock.stop() + await rm(configDir, { recursive: true, force: true }) + }) + + it('happy: revokes server side, clears local store + hosts.yml', async () => { + const io = bufferStreams() + const store = new MemStore() + const bundle = fixtureBundle(mock.url) + await store.put(bundle.current_host, 'acct-1', 'dfoa_test') + await saveHosts(configDir, bundle) + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + + await runLogout({ configDir, io, bundle, http, store }) + + expect(store.entries.size).toBe(0) + await expect(readFile(join(configDir, 'hosts.yml'), 'utf8')).rejects.toThrow(/ENOENT/) + expect(io.outBuf()).toContain('Logged out of') + expect(io.errBuf()).toBe('') + }) + + it('not-logged-in: throws BaseError', async () => { + const io = bufferStreams() + const store = new MemStore() + await expect(runLogout({ configDir, io, bundle: undefined, store })).rejects.toThrow(/not logged in/) + }) + + it('hosts.yml absent: still completes locally + emits success', async () => { + const io = bufferStreams() + const store = new MemStore() + const bundle = fixtureBundle(mock.url) + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + + await runLogout({ configDir, io, bundle, http, store }) + + expect(io.outBuf()).toContain('Logged out of') + }) + + it('server revoke fails: warns to stderr but still clears local + exits 0', async () => { + const io = bufferStreams() + const store = new MemStore() + const bundle = fixtureBundle(mock.url) + await store.put(bundle.current_host, 'acct-1', 'dfoa_test') + await saveHosts(configDir, bundle) + mock.setScenario('server-5xx') + const http = createClient({ host: mock.url, bearer: 'dfoa_test', retryAttempts: 0 }) + + await runLogout({ configDir, io, bundle, http, store }) + + expect(store.entries.size).toBe(0) + expect(io.errBuf()).toContain('server revoke failed') + expect(io.outBuf()).toContain('Logged out of') + }) + + it('skips server revoke for non-OAuth bearer (e.g. dfp_)', async () => { + const io = bufferStreams() + const store = new MemStore() + const bundle = fixtureBundle(mock.url) + bundle.tokens = { bearer: 'dfp_personal_token' } + await store.put(bundle.current_host, 'acct-1', 'dfp_personal_token') + await saveHosts(configDir, bundle) + const http = createClient({ host: mock.url, bearer: 'dfp_personal_token' }) + + await runLogout({ configDir, io, bundle, http, store }) + + expect(io.errBuf()).toBe('') + expect(store.entries.size).toBe(0) + }) + + it('preserves unrelated files in configDir', async () => { + const io = bufferStreams() + const store = new MemStore() + const bundle = fixtureBundle(mock.url) + await saveHosts(configDir, bundle) + await writeFile(join(configDir, 'config.yml'), 'foo: bar\n', 'utf8') + const http = createClient({ host: mock.url, bearer: 'dfoa_test' }) + + await runLogout({ configDir, io, bundle, http, store }) + + const cfg = await readFile(join(configDir, 'config.yml'), 'utf8') + expect(cfg).toContain('foo: bar') + }) +}) diff --git a/cli/src/commands/auth/logout/logout.ts b/cli/src/commands/auth/logout/logout.ts new file mode 100644 index 0000000000..48660b6b35 --- /dev/null +++ b/cli/src/commands/auth/logout/logout.ts @@ -0,0 +1,70 @@ +import type { KyInstance } from 'ky' +import type { HostsBundle } from '../../../auth/hosts.js' +import type { TokenStore } from '../../../auth/store.js' +import type { IOStreams } from '../../../io/streams.js' +import { unlink } from 'node:fs/promises' +import { join } from 'node:path' +import { AccountSessionsClient } from '../../../api/account-sessions.js' +import { HOSTS_FILE_NAME } from '../../../auth/hosts.js' +import { BaseError } from '../../../errors/base.js' +import { ErrorCode } from '../../../errors/codes.js' +import { colorEnabled, colorScheme } from '../../../io/color.js' + +export type LogoutOptions = { + readonly configDir: string + readonly io: IOStreams + readonly bundle: HostsBundle | undefined + readonly http?: KyInstance + readonly store: TokenStore +} + +export async function runLogout(opts: LogoutOptions): Promise { + const cs = colorScheme(colorEnabled(opts.io.isErrTTY)) + const bundle = opts.bundle + if (bundle === undefined || bundle.current_host === '' || bundle.tokens?.bearer === undefined || bundle.tokens.bearer === '') { + throw new BaseError({ + code: ErrorCode.NotLoggedIn, + message: 'not logged in', + hint: 'run \'difyctl auth login\'', + }) + } + + let revokeWarning = '' + if (revokeAllowed(bundle.tokens.bearer) && opts.http !== undefined) { + try { + const sessions = new AccountSessionsClient(opts.http) + await sessions.revokeSelf() + } + catch (err) { + revokeWarning = `${cs.warningIcon()} server revoke failed (${(err as Error).message}); local credentials cleared anyway\n` + } + } + + await clearLocal(opts.configDir, bundle, opts.store) + + if (revokeWarning !== '') + opts.io.err.write(revokeWarning) + opts.io.out.write(`${cs.successIcon()} Logged out of ${bundle.current_host}\n`) +} + +const REVOCABLE_PREFIXES = ['dfoa_', 'dfoe_'] as const + +function revokeAllowed(bearer: string): boolean { + return REVOCABLE_PREFIXES.some(p => bearer.startsWith(p)) +} + +async function clearLocal(configDir: string, bundle: HostsBundle, store: TokenStore): Promise { + const accountId = bundle.account?.id ?? bundle.external_subject?.email ?? 'default' + try { + await store.delete(bundle.current_host, accountId) + } + catch { /* best-effort */ } + const hostsPath = join(configDir, HOSTS_FILE_NAME) + try { + await unlink(hostsPath) + } + catch (err) { + if ((err as NodeJS.ErrnoException).code !== 'ENOENT') + throw err + } +} diff --git a/cli/src/commands/auth/status/index.ts b/cli/src/commands/auth/status/index.ts new file mode 100644 index 0000000000..f4f4393cef --- /dev/null +++ b/cli/src/commands/auth/status/index.ts @@ -0,0 +1,28 @@ +import { Flags } from '@oclif/core' +import { loadHosts } from '../../../auth/hosts.js' +import { resolveConfigDir } from '../../../config/dir.js' +import { realStreams } from '../../../io/streams.js' +import { DifyCommand } from '../../_shared/dify-command.js' +import { runStatus } from './status.js' + +export default class Status extends DifyCommand { + static override description = 'Show authentication status for the active host' + + static override examples = [ + '<%= config.bin %> auth status', + '<%= config.bin %> auth status -v', + '<%= config.bin %> auth status --json', + ] + + static override flags = { + verbose: Flags.boolean({ char: 'v', description: 'show account/workspace ids and storage mode', default: false }), + json: Flags.boolean({ description: 'emit JSON', default: false }), + } + + async run(): Promise { + const { flags } = await this.parse(Status) + const configDir = resolveConfigDir() + const bundle = await loadHosts(configDir) + await runStatus({ io: realStreams(), bundle, verbose: flags.verbose, json: flags.json }) + } +} diff --git a/cli/src/commands/auth/status/status.test.ts b/cli/src/commands/auth/status/status.test.ts new file mode 100644 index 0000000000..0000e9cd59 --- /dev/null +++ b/cli/src/commands/auth/status/status.test.ts @@ -0,0 +1,94 @@ +import type { HostsBundle } from '../../../auth/hosts.js' +import { describe, expect, it } from 'vitest' +import { bufferStreams } from '../../../io/streams.js' +import { runStatus } from './status.js' + +function accountBundle(): HostsBundle { + return { + current_host: 'cloud.dify.ai', + token_storage: 'keychain', + token_id: 'tok-1', + tokens: { bearer: 'dfoa_test' }, + account: { id: 'acct-1', email: 'tester@dify.ai', name: 'Test Tester' }, + workspace: { id: 'ws-1', name: 'Default', role: 'owner' }, + available_workspaces: [ + { id: 'ws-1', name: 'Default', role: 'owner' }, + { id: 'ws-2', name: 'Other', role: 'normal' }, + ], + } +} + +function ssoBundle(): HostsBundle { + return { + current_host: 'cloud.dify.ai', + token_storage: 'file', + token_id: 'tok-sso-1', + tokens: { bearer: 'dfoe_test' }, + external_subject: { email: 'sso@dify.ai', issuer: 'https://issuer.example' }, + } +} + +describe('runStatus', () => { + it('logged-out: prints message + throws NotLoggedIn', async () => { + const io = bufferStreams() + await expect(runStatus({ io, bundle: undefined })).rejects.toThrow(/not logged in/) + expect(io.outBuf()).toContain('Not logged in') + }) + + it('logged-out json: emits {logged_in: false}', async () => { + const io = bufferStreams() + await expect(runStatus({ io, bundle: undefined, json: true })).rejects.toThrow(/not logged in/) + expect(JSON.parse(io.outBuf())).toEqual({ host: null, logged_in: false }) + }) + + it('account: human compact', async () => { + const io = bufferStreams() + await runStatus({ io, bundle: accountBundle() }) + const out = io.outBuf() + expect(out).toContain('Logged in to cloud.dify.ai as tester@dify.ai (Test Tester)') + expect(out).toContain('Workspace: Default') + expect(out).toContain('full access') + }) + + it('account verbose: shows ids + storage + workspace count', async () => { + const io = bufferStreams() + await runStatus({ io, bundle: accountBundle(), verbose: true }) + const out = io.outBuf() + expect(out).toContain('cloud.dify.ai') + expect(out).toContain('Account:') + expect(out).toContain('acct-1') + expect(out).toContain('Workspace: Default (ws-1, role: owner)') + expect(out).toContain('Available: 2 workspaces') + expect(out).toContain('Storage: keychain') + }) + + it('sso: human compact mentions issuer', async () => { + const io = bufferStreams() + await runStatus({ io, bundle: ssoBundle() }) + const out = io.outBuf() + expect(out).toContain('sso@dify.ai (via https://issuer.example)') + expect(out).toContain('apps:run') + }) + + it('account json: matches schema with workspace + workspace count', async () => { + const io = bufferStreams() + await runStatus({ io, bundle: accountBundle(), json: true }) + const parsed = JSON.parse(io.outBuf()) as Record + expect(parsed.host).toBe('cloud.dify.ai') + expect(parsed.logged_in).toBe(true) + expect(parsed.storage).toBe('keychain') + expect(parsed.account).toEqual({ id: 'acct-1', email: 'tester@dify.ai', name: 'Test Tester' }) + expect(parsed.workspace).toEqual({ id: 'ws-1', name: 'Default', role: 'owner' }) + expect(parsed.available_workspaces_count).toBe(2) + }) + + it('sso json: subject_type external_sso + email + issuer, no account', async () => { + const io = bufferStreams() + await runStatus({ io, bundle: ssoBundle(), json: true }) + const parsed = JSON.parse(io.outBuf()) as Record + expect(parsed.subject_type).toBe('external_sso') + expect(parsed.subject_email).toBe('sso@dify.ai') + expect(parsed.subject_issuer).toBe('https://issuer.example') + expect(parsed.account).toBeUndefined() + }) +}) diff --git a/cli/src/commands/auth/status/status.ts b/cli/src/commands/auth/status/status.ts new file mode 100644 index 0000000000..c666b08b0a --- /dev/null +++ b/cli/src/commands/auth/status/status.ts @@ -0,0 +1,91 @@ +import type { HostsBundle } from '../../../auth/hosts.js' +import type { IOStreams } from '../../../io/streams.js' +import { BaseError } from '../../../errors/base.js' +import { ErrorCode } from '../../../errors/codes.js' + +export type StatusOptions = { + readonly io: IOStreams + readonly bundle: HostsBundle | undefined + readonly verbose?: boolean + readonly json?: boolean +} + +export async function runStatus(opts: StatusOptions): Promise { + const bundle = opts.bundle + if (bundle === undefined || bundle.current_host === '' || bundle.tokens?.bearer === undefined || bundle.tokens.bearer === '') { + if (opts.json === true) { + opts.io.out.write(`${JSON.stringify({ host: null, logged_in: false })}\n`) + } + else { + opts.io.out.write('Not logged in. Run \'difyctl auth login\' to sign in.\n') + } + throw new BaseError({ code: ErrorCode.NotLoggedIn, message: 'not logged in' }) + } + + if (opts.json === true) { + opts.io.out.write(`${renderJson(bundle)}\n`) + return + } + opts.io.out.write(renderHuman(bundle, opts.verbose ?? false)) +} + +function renderHuman(b: HostsBundle, verbose: boolean): string { + const lines: string[] = [] + if (!verbose) { + if (b.external_subject !== undefined) { + const sub = b.external_subject + lines.push(sub.issuer !== '' + ? `Logged in to ${b.current_host} as ${sub.email} (via ${sub.issuer})` + : `Logged in to ${b.current_host} as ${sub.email} (via SSO)`) + lines.push(' Scope: apps:run') + return `${lines.join('\n')}\n` + } + const acc = b.account ?? { id: '', email: '', name: '' } + lines.push(`Logged in to ${b.current_host} as ${acc.email} (${acc.name})`) + if (b.workspace?.name !== undefined && b.workspace.name !== '') + lines.push(` Workspace: ${b.workspace.name}`) + lines.push(' Session: Dify account — full access') + return `${lines.join('\n')}\n` + } + + if (b.external_subject !== undefined) { + const sub = b.external_subject + lines.push(b.current_host) + lines.push(sub.issuer !== '' + ? ` Subject: ${sub.email} (external SSO, issuer: ${sub.issuer})` + : ` Subject: ${sub.email} (external SSO)`) + lines.push(' Session: External SSO — can run apps, cannot manage workspace resources (scope: apps:run)') + lines.push(` Storage: ${b.token_storage}`) + return `${lines.join('\n')}\n` + } + const acc = b.account ?? { id: '', email: '', name: '' } + lines.push(b.current_host) + lines.push(` Account: ${acc.email} (${acc.name}, ${acc.id ?? ''})`) + if (b.workspace?.id !== undefined && b.workspace.id !== '') + lines.push(` Workspace: ${b.workspace.name} (${b.workspace.id}, role: ${b.workspace.role})`) + lines.push(` Available: ${b.available_workspaces?.length ?? 0} workspaces`) + lines.push(' Session: Dify account — full access (scope: full)') + lines.push(` Storage: ${b.token_storage}`) + return `${lines.join('\n')}\n` +} + +function renderJson(b: HostsBundle): string { + const out: Record = { + host: b.current_host, + logged_in: true, + storage: b.token_storage, + } + if (b.external_subject !== undefined) { + out.subject_type = 'external_sso' + out.subject_email = b.external_subject.email + out.subject_issuer = b.external_subject.issuer + } + else if (b.account !== undefined) { + out.account = { id: b.account.id ?? '', email: b.account.email, name: b.account.name } + if (b.workspace?.id !== undefined && b.workspace.id !== '') { + out.workspace = { id: b.workspace.id, name: b.workspace.name, role: b.workspace.role } + } + out.available_workspaces_count = b.available_workspaces?.length ?? 0 + } + return JSON.stringify(out, null, 2) +} diff --git a/cli/src/commands/auth/use/index.ts b/cli/src/commands/auth/use/index.ts new file mode 100644 index 0000000000..994c9b00db --- /dev/null +++ b/cli/src/commands/auth/use/index.ts @@ -0,0 +1,25 @@ +import { Args } from '@oclif/core' +import { loadHosts } from '../../../auth/hosts.js' +import { resolveConfigDir } from '../../../config/dir.js' +import { realStreams } from '../../../io/streams.js' +import { DifyCommand } from '../../_shared/dify-command.js' +import { runUse } from './use.js' + +export default class Use extends DifyCommand { + static override description = 'Switch the active workspace for the current host' + + static override examples = [ + '<%= config.bin %> auth use ws-abc123', + ] + + static override args = { + workspaceId: Args.string({ description: 'workspace id to activate', required: true }), + } + + async run(): Promise { + const { args } = await this.parse(Use) + const configDir = resolveConfigDir() + const bundle = await loadHosts(configDir) + await runUse({ configDir, io: realStreams(), bundle, workspaceId: args.workspaceId }) + } +} diff --git a/cli/src/commands/auth/use/use.test.ts b/cli/src/commands/auth/use/use.test.ts new file mode 100644 index 0000000000..178785a630 --- /dev/null +++ b/cli/src/commands/auth/use/use.test.ts @@ -0,0 +1,71 @@ +import type { HostsBundle } from '../../../auth/hosts.js' +import { mkdtemp, rm } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { loadHosts, saveHosts } from '../../../auth/hosts.js' +import { bufferStreams } from '../../../io/streams.js' +import { runUse } from './use.js' + +function accountBundle(): HostsBundle { + return { + current_host: 'cloud.dify.ai', + token_storage: 'file', + token_id: 'tok-1', + tokens: { bearer: 'dfoa_test' }, + account: { id: 'acct-1', email: 'tester@dify.ai', name: 'Test Tester' }, + workspace: { id: 'ws-1', name: 'Default', role: 'owner' }, + available_workspaces: [ + { id: 'ws-1', name: 'Default', role: 'owner' }, + { id: 'ws-2', name: 'Other', role: 'normal' }, + ], + } +} + +describe('runUse', () => { + let configDir: string + beforeEach(async () => { + configDir = await mkdtemp(join(tmpdir(), 'difyctl-use-')) + }) + afterEach(async () => { + await rm(configDir, { recursive: true, force: true }) + }) + + it('switches workspace + persists hosts.yml', async () => { + const io = bufferStreams() + const b = accountBundle() + await saveHosts(configDir, b) + const next = await runUse({ configDir, io, bundle: b, workspaceId: 'ws-2' }) + expect(next.workspace).toEqual({ id: 'ws-2', name: 'Other', role: 'normal' }) + const reloaded = await loadHosts(configDir) + expect(reloaded?.workspace?.id).toBe('ws-2') + expect(io.outBuf()).toContain('Switched to workspace Other (ws-2)') + }) + + it('not-logged-in: throws NotLoggedIn', async () => { + const io = bufferStreams() + await expect(runUse({ configDir, io, bundle: undefined, workspaceId: 'ws-1' })) + .rejects + .toThrow(/not logged in/) + }) + + it('sso: throws workspace-unavailable', async () => { + const io = bufferStreams() + const b: HostsBundle = { + current_host: 'cloud.dify.ai', + token_storage: 'file', + tokens: { bearer: 'dfoe_test' }, + external_subject: { email: 'sso@dify.ai', issuer: 'https://issuer.example' }, + } + await expect(runUse({ configDir, io, bundle: b, workspaceId: 'ws-1' })) + .rejects + .toThrow(/workspace context unavailable/) + }) + + it('unknown workspace: throws UsageMissingArg', async () => { + const io = bufferStreams() + await expect(runUse({ configDir, io, bundle: accountBundle(), workspaceId: 'ws-bogus' })) + .rejects + .toThrow(/ws-bogus.*not found/) + }) +}) diff --git a/cli/src/commands/auth/use/use.ts b/cli/src/commands/auth/use/use.ts new file mode 100644 index 0000000000..04454785b2 --- /dev/null +++ b/cli/src/commands/auth/use/use.ts @@ -0,0 +1,49 @@ +import type { HostsBundle, Workspace } from '../../../auth/hosts.js' +import type { IOStreams } from '../../../io/streams.js' +import { saveHosts } from '../../../auth/hosts.js' +import { BaseError } from '../../../errors/base.js' +import { ErrorCode } from '../../../errors/codes.js' +import { colorEnabled, colorScheme } from '../../../io/color.js' + +export type UseOptions = { + readonly configDir: string + readonly io: IOStreams + readonly bundle: HostsBundle | undefined + readonly workspaceId: string +} + +export async function runUse(opts: UseOptions): Promise { + const cs = colorScheme(colorEnabled(opts.io.isErrTTY)) + const b = opts.bundle + if (b === undefined || b.tokens?.bearer === undefined || b.tokens.bearer === '') { + throw new BaseError({ + code: ErrorCode.NotLoggedIn, + message: 'not logged in', + hint: 'run \'difyctl auth login\'', + }) + } + if (b.external_subject !== undefined) { + throw new BaseError({ + code: ErrorCode.UsageInvalidFlag, + message: 'workspace context unavailable for external SSO sessions', + hint: 'external SSO subjects don\'t carry tenant memberships in difyctl', + }) + } + + const found = (b.available_workspaces ?? []).find(w => w.id === opts.workspaceId) + if (found === undefined) { + throw new BaseError({ + code: ErrorCode.UsageMissingArg, + message: `workspace "${opts.workspaceId}" not found in available_workspaces; run 'difyctl auth status' to list`, + }) + } + + const next: HostsBundle = { ...b, workspace: pickWorkspace(found) } + await saveHosts(opts.configDir, next) + opts.io.out.write(`${cs.successIcon()} Switched to workspace ${found.name} (${found.id})\n`) + return next +} + +function pickWorkspace(w: Workspace): Workspace { + return { id: w.id, name: w.name, role: w.role } +} diff --git a/cli/src/commands/auth/whoami/index.ts b/cli/src/commands/auth/whoami/index.ts new file mode 100644 index 0000000000..88be053fde --- /dev/null +++ b/cli/src/commands/auth/whoami/index.ts @@ -0,0 +1,26 @@ +import { Flags } from '@oclif/core' +import { loadHosts } from '../../../auth/hosts.js' +import { resolveConfigDir } from '../../../config/dir.js' +import { realStreams } from '../../../io/streams.js' +import { DifyCommand } from '../../_shared/dify-command.js' +import { runWhoami } from './whoami.js' + +export default class Whoami extends DifyCommand { + static override description = 'Print the active subject\'s identity' + + static override examples = [ + '<%= config.bin %> auth whoami', + '<%= config.bin %> auth whoami --json', + ] + + static override flags = { + json: Flags.boolean({ description: 'emit JSON', default: false }), + } + + async run(): Promise { + const { flags } = await this.parse(Whoami) + const configDir = resolveConfigDir() + const bundle = await loadHosts(configDir) + await runWhoami({ io: realStreams(), bundle, json: flags.json }) + } +} diff --git a/cli/src/commands/auth/whoami/whoami.test.ts b/cli/src/commands/auth/whoami/whoami.test.ts new file mode 100644 index 0000000000..f38a4b634f --- /dev/null +++ b/cli/src/commands/auth/whoami/whoami.test.ts @@ -0,0 +1,72 @@ +import type { HostsBundle } from '../../../auth/hosts.js' +import { describe, expect, it } from 'vitest' +import { bufferStreams } from '../../../io/streams.js' +import { runWhoami } from './whoami.js' + +function accountBundle(): HostsBundle { + return { + current_host: 'cloud.dify.ai', + token_storage: 'keychain', + tokens: { bearer: 'dfoa_test' }, + account: { id: 'acct-1', email: 'tester@dify.ai', name: 'Test Tester' }, + } +} + +describe('runWhoami', () => { + it('logged-out: throws NotLoggedIn', async () => { + const io = bufferStreams() + await expect(runWhoami({ io, bundle: undefined })).rejects.toThrow(/not logged in/) + }) + + it('account human: emits "email (name)"', async () => { + const io = bufferStreams() + await runWhoami({ io, bundle: accountBundle() }) + expect(io.outBuf()).toBe('tester@dify.ai (Test Tester)\n') + }) + + it('account human, no name: emits email only', async () => { + const io = bufferStreams() + const b = accountBundle() + b.account!.name = '' + await runWhoami({ io, bundle: b }) + expect(io.outBuf()).toBe('tester@dify.ai\n') + }) + + it('account json: emits {id, email, name}', async () => { + const io = bufferStreams() + await runWhoami({ io, bundle: accountBundle(), json: true }) + expect(JSON.parse(io.outBuf())).toEqual({ + id: 'acct-1', + email: 'tester@dify.ai', + name: 'Test Tester', + }) + }) + + it('sso human: emits email + issuer', async () => { + const io = bufferStreams() + const b: HostsBundle = { + current_host: 'cloud.dify.ai', + token_storage: 'file', + tokens: { bearer: 'dfoe_test' }, + external_subject: { email: 'sso@dify.ai', issuer: 'https://issuer.example' }, + } + await runWhoami({ io, bundle: b }) + expect(io.outBuf()).toBe('sso@dify.ai (external SSO, issuer: https://issuer.example)\n') + }) + + it('sso json: emits {subject_type, email, issuer}', async () => { + const io = bufferStreams() + const b: HostsBundle = { + current_host: 'cloud.dify.ai', + token_storage: 'file', + tokens: { bearer: 'dfoe_test' }, + external_subject: { email: 'sso@dify.ai', issuer: 'https://issuer.example' }, + } + await runWhoami({ io, bundle: b, json: true }) + expect(JSON.parse(io.outBuf())).toEqual({ + subject_type: 'external_sso', + email: 'sso@dify.ai', + issuer: 'https://issuer.example', + }) + }) +}) diff --git a/cli/src/commands/auth/whoami/whoami.ts b/cli/src/commands/auth/whoami/whoami.ts new file mode 100644 index 0000000000..fca750ae86 --- /dev/null +++ b/cli/src/commands/auth/whoami/whoami.ts @@ -0,0 +1,46 @@ +import type { HostsBundle } from '../../../auth/hosts.js' +import type { IOStreams } from '../../../io/streams.js' +import { BaseError } from '../../../errors/base.js' +import { ErrorCode } from '../../../errors/codes.js' + +export type WhoamiOptions = { + readonly io: IOStreams + readonly bundle: HostsBundle | undefined + readonly json?: boolean +} + +export async function runWhoami(opts: WhoamiOptions): Promise { + const b = opts.bundle + if (b === undefined || b.tokens?.bearer === undefined || b.tokens.bearer === '') { + throw new BaseError({ + code: ErrorCode.NotLoggedIn, + message: 'not logged in', + hint: 'run \'difyctl auth login\'', + }) + } + + if (b.external_subject !== undefined) { + if (opts.json === true) { + opts.io.out.write(`${JSON.stringify({ + subject_type: 'external_sso', + email: b.external_subject.email, + issuer: b.external_subject.issuer, + })}\n`) + return + } + const sub = b.external_subject + opts.io.out.write(sub.issuer !== '' + ? `${sub.email} (external SSO, issuer: ${sub.issuer})\n` + : `${sub.email} (external SSO)\n`) + return + } + + const acc = b.account ?? { id: '', email: '', name: '' } + if (opts.json === true) { + opts.io.out.write(`${JSON.stringify({ id: acc.id ?? '', email: acc.email, name: acc.name })}\n`) + return + } + opts.io.out.write(acc.name !== '' + ? `${acc.email} (${acc.name})\n` + : `${acc.email}\n`) +} diff --git a/cli/src/commands/config/get/index.ts b/cli/src/commands/config/get/index.ts new file mode 100644 index 0000000000..655595f9d7 --- /dev/null +++ b/cli/src/commands/config/get/index.ts @@ -0,0 +1,21 @@ +import { Args } from '@oclif/core' +import { resolveConfigDir } from '../../../config/dir.js' +import { DifyCommand } from '../../_shared/dify-command.js' +import { runConfigGet } from './run.js' + +export default class ConfigGet extends DifyCommand { + static override description = 'Print one config key\'s value' + + static override examples = [ + '<%= config.bin %> config get defaults.format', + ] + + static override args = { + key: Args.string({ description: 'config key', required: true }), + } + + async run(): Promise { + const { args } = await this.parse(ConfigGet) + process.stdout.write(await runConfigGet({ dir: resolveConfigDir(), key: args.key })) + } +} diff --git a/cli/src/commands/config/get/run.test.ts b/cli/src/commands/config/get/run.test.ts new file mode 100644 index 0000000000..7274a6e624 --- /dev/null +++ b/cli/src/commands/config/get/run.test.ts @@ -0,0 +1,52 @@ +import { mkdtemp, writeFile } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { beforeEach, describe, expect, it } from 'vitest' +import { FILE_NAME } from '../../../config/schema.js' +import { isBaseError } from '../../../errors/base.js' +import { ErrorCode } from '../../../errors/codes.js' +import { runConfigGet } from './run.js' + +describe('runConfigGet', () => { + let dir: string + + beforeEach(async () => { + dir = await mkdtemp(join(tmpdir(), 'difyctl-get-')) + }) + + it('returns set value with trailing newline', async () => { + await writeFile( + join(dir, FILE_NAME), + 'schema_version: 1\ndefaults:\n format: yaml\n', + 'utf8', + ) + const out = await runConfigGet({ dir, key: 'defaults.format' }) + expect(out).toBe('yaml\n') + }) + + it('returns empty line when key is unset (matches Go fmt.Fprintln)', async () => { + const out = await runConfigGet({ dir, key: 'defaults.format' }) + expect(out).toBe('\n') + }) + + it('throws BaseError(config_invalid_key) on unknown key', async () => { + let caught: unknown + try { + await runConfigGet({ dir, key: 'bogus.key' }) + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.code).toBe(ErrorCode.ConfigInvalidKey) + }) + + it('returns numeric limit as string', async () => { + await writeFile( + join(dir, FILE_NAME), + 'schema_version: 1\ndefaults:\n limit: 75\n', + 'utf8', + ) + const out = await runConfigGet({ dir, key: 'defaults.limit' }) + expect(out).toBe('75\n') + }) +}) diff --git a/cli/src/commands/config/get/run.ts b/cli/src/commands/config/get/run.ts new file mode 100644 index 0000000000..0f43213318 --- /dev/null +++ b/cli/src/commands/config/get/run.ts @@ -0,0 +1,15 @@ +import type { ConfigFile } from '../../../config/schema.js' +import { getKey } from '../../../config/keys.js' +import { loadConfig } from '../../../config/loader.js' +import { emptyConfig } from '../../../config/schema.js' + +export type RunConfigGetOptions = { + readonly key: string + readonly dir: string +} + +export async function runConfigGet(opts: RunConfigGetOptions): Promise { + const loaded = await loadConfig(opts.dir) + const config: ConfigFile = loaded.found ? loaded.config : emptyConfig() + return `${getKey(config, opts.key)}\n` +} diff --git a/cli/src/commands/config/path/index.ts b/cli/src/commands/config/path/index.ts new file mode 100644 index 0000000000..577a365041 --- /dev/null +++ b/cli/src/commands/config/path/index.ts @@ -0,0 +1,15 @@ +import { resolveConfigDir } from '../../../config/dir.js' +import { DifyCommand } from '../../_shared/dify-command.js' +import { runConfigPath } from './run.js' + +export default class ConfigPath extends DifyCommand { + static override description = 'Print the resolved config.yml path' + + static override examples = [ + '<%= config.bin %> config path', + ] + + async run(): Promise { + process.stdout.write(runConfigPath({ dir: resolveConfigDir() })) + } +} diff --git a/cli/src/commands/config/path/run.test.ts b/cli/src/commands/config/path/run.test.ts new file mode 100644 index 0000000000..e9df22f85a --- /dev/null +++ b/cli/src/commands/config/path/run.test.ts @@ -0,0 +1,14 @@ +import { describe, expect, it } from 'vitest' +import { runConfigPath } from './run.js' + +describe('runConfigPath', () => { + it('joins dir and config.yml with trailing newline', () => { + const out = runConfigPath({ dir: '/tmp/x' }) + expect(out).toBe('/tmp/x/config.yml\n') + }) + + it('handles trailing slash on dir', () => { + const out = runConfigPath({ dir: '/tmp/x/' }) + expect(out).toBe('/tmp/x/config.yml\n') + }) +}) diff --git a/cli/src/commands/config/path/run.ts b/cli/src/commands/config/path/run.ts new file mode 100644 index 0000000000..88c03bc14d --- /dev/null +++ b/cli/src/commands/config/path/run.ts @@ -0,0 +1,10 @@ +import { join } from 'node:path' +import { FILE_NAME } from '../../../config/schema.js' + +export type RunConfigPathOptions = { + readonly dir: string +} + +export function runConfigPath(opts: RunConfigPathOptions): string { + return `${join(opts.dir, FILE_NAME)}\n` +} diff --git a/cli/src/commands/config/set/index.ts b/cli/src/commands/config/set/index.ts new file mode 100644 index 0000000000..867bfdf463 --- /dev/null +++ b/cli/src/commands/config/set/index.ts @@ -0,0 +1,23 @@ +import { Args } from '@oclif/core' +import { resolveConfigDir } from '../../../config/dir.js' +import { DifyCommand } from '../../_shared/dify-command.js' +import { runConfigSet } from './run.js' + +export default class ConfigSet extends DifyCommand { + static override description = 'Set a config key (validates value)' + + static override examples = [ + '<%= config.bin %> config set defaults.format json', + '<%= config.bin %> config set defaults.limit 50', + ] + + static override args = { + key: Args.string({ description: 'config key', required: true }), + value: Args.string({ description: 'config value', required: true }), + } + + async run(): Promise { + const { args } = await this.parse(ConfigSet) + process.stdout.write(await runConfigSet({ dir: resolveConfigDir(), key: args.key, value: args.value })) + } +} diff --git a/cli/src/commands/config/set/run.test.ts b/cli/src/commands/config/set/run.test.ts new file mode 100644 index 0000000000..959b331344 --- /dev/null +++ b/cli/src/commands/config/set/run.test.ts @@ -0,0 +1,88 @@ +import { mkdtemp, readFile } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { beforeEach, describe, expect, it } from 'vitest' +import { FILE_NAME } from '../../../config/schema.js' +import { isBaseError } from '../../../errors/base.js' +import { ErrorCode, ExitCode } from '../../../errors/codes.js' +import { runConfigSet } from './run.js' + +describe('runConfigSet', () => { + let dir: string + + beforeEach(async () => { + dir = await mkdtemp(join(tmpdir(), 'difyctl-set-')) + }) + + it('writes config.yml and returns "set k = v\\n"', async () => { + const out = await runConfigSet({ dir, key: 'defaults.format', value: 'json' }) + expect(out).toBe('set defaults.format = json\n') + const raw = await readFile(join(dir, FILE_NAME), 'utf8') + expect(raw).toContain('format: json') + }) + + it('rejects invalid format value with config_invalid_value', async () => { + let caught: unknown + try { + await runConfigSet({ dir, key: 'defaults.format', value: 'csv' }) + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.code).toBe(ErrorCode.ConfigInvalidValue) + }) + + it('rejects unknown key with config_invalid_key', async () => { + let caught: unknown + try { + await runConfigSet({ dir, key: 'bogus', value: 'x' }) + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.code).toBe(ErrorCode.ConfigInvalidKey) + }) + + it('preserves prior keys when setting a new one', async () => { + await runConfigSet({ dir, key: 'defaults.format', value: 'yaml' }) + await runConfigSet({ dir, key: 'defaults.limit', value: '40' }) + const raw = await readFile(join(dir, FILE_NAME), 'utf8') + expect(raw).toContain('format: yaml') + expect(raw).toContain('limit: 40') + }) + + it('exit code for invalid value is Usage (2)', async () => { + let caught: unknown + try { + await runConfigSet({ dir, key: 'defaults.format', value: 'csv' }) + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.exit()).toBe(ExitCode.Usage) + }) + + it('exit code for unknown key is Usage (2)', async () => { + let caught: unknown + try { + await runConfigSet({ dir, key: 'bogus', value: 'x' }) + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.exit()).toBe(ExitCode.Usage) + }) + + it('typed wrap chain: invalid defaults.limit surfaces ConfigInvalidValue (not UsageInvalidFlag)', async () => { + let caught: unknown + try { + await runConfigSet({ dir, key: 'defaults.limit', value: 'abc' }) + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) { + expect(caught.code).toBe(ErrorCode.ConfigInvalidValue) + expect(caught.exit()).toBe(ExitCode.Usage) + } + }) +}) diff --git a/cli/src/commands/config/set/run.ts b/cli/src/commands/config/set/run.ts new file mode 100644 index 0000000000..d59b065a4d --- /dev/null +++ b/cli/src/commands/config/set/run.ts @@ -0,0 +1,19 @@ +import type { ConfigFile } from '../../../config/schema.js' +import { setKey } from '../../../config/keys.js' +import { loadConfig } from '../../../config/loader.js' +import { emptyConfig } from '../../../config/schema.js' +import { saveConfig } from '../../../config/writer.js' + +export type RunConfigSetOptions = { + readonly key: string + readonly value: string + readonly dir: string +} + +export async function runConfigSet(opts: RunConfigSetOptions): Promise { + const loaded = await loadConfig(opts.dir) + const config: ConfigFile = loaded.found ? loaded.config : emptyConfig() + const next = setKey(config, opts.key, opts.value) + await saveConfig(opts.dir, next) + return `set ${opts.key} = ${opts.value}\n` +} diff --git a/cli/src/commands/config/unset/index.ts b/cli/src/commands/config/unset/index.ts new file mode 100644 index 0000000000..6ee8f8d875 --- /dev/null +++ b/cli/src/commands/config/unset/index.ts @@ -0,0 +1,21 @@ +import { Args } from '@oclif/core' +import { resolveConfigDir } from '../../../config/dir.js' +import { DifyCommand } from '../../_shared/dify-command.js' +import { runConfigUnset } from './run.js' + +export default class ConfigUnset extends DifyCommand { + static override description = 'Reset a config key to its zero value' + + static override examples = [ + '<%= config.bin %> config unset defaults.format', + ] + + static override args = { + key: Args.string({ description: 'config key', required: true }), + } + + async run(): Promise { + const { args } = await this.parse(ConfigUnset) + process.stdout.write(await runConfigUnset({ dir: resolveConfigDir(), key: args.key })) + } +} diff --git a/cli/src/commands/config/unset/run.test.ts b/cli/src/commands/config/unset/run.test.ts new file mode 100644 index 0000000000..e67753149d --- /dev/null +++ b/cli/src/commands/config/unset/run.test.ts @@ -0,0 +1,47 @@ +import { mkdtemp, readFile, writeFile } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { beforeEach, describe, expect, it } from 'vitest' +import { FILE_NAME } from '../../../config/schema.js' +import { isBaseError } from '../../../errors/base.js' +import { ErrorCode } from '../../../errors/codes.js' +import { runConfigUnset } from './run.js' + +describe('runConfigUnset', () => { + let dir: string + + beforeEach(async () => { + dir = await mkdtemp(join(tmpdir(), 'difyctl-unset-')) + }) + + it('clears the requested key, leaves others intact', async () => { + await writeFile( + join(dir, FILE_NAME), + 'schema_version: 1\ndefaults:\n format: json\n limit: 25\n', + 'utf8', + ) + const out = await runConfigUnset({ dir, key: 'defaults.format' }) + expect(out).toBe('unset defaults.format\n') + const raw = await readFile(join(dir, FILE_NAME), 'utf8') + expect(raw).not.toContain('format:') + expect(raw).toContain('limit: 25') + }) + + it('is a no-op (writes empty config) when key was already unset', async () => { + const out = await runConfigUnset({ dir, key: 'defaults.format' }) + expect(out).toBe('unset defaults.format\n') + const raw = await readFile(join(dir, FILE_NAME), 'utf8') + expect(raw).toContain('schema_version: 1') + }) + + it('rejects unknown key', async () => { + let caught: unknown + try { + await runConfigUnset({ dir, key: 'bogus' }) + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.code).toBe(ErrorCode.ConfigInvalidKey) + }) +}) diff --git a/cli/src/commands/config/unset/run.ts b/cli/src/commands/config/unset/run.ts new file mode 100644 index 0000000000..8bd0a512a5 --- /dev/null +++ b/cli/src/commands/config/unset/run.ts @@ -0,0 +1,18 @@ +import type { ConfigFile } from '../../../config/schema.js' +import { unsetKey } from '../../../config/keys.js' +import { loadConfig } from '../../../config/loader.js' +import { emptyConfig } from '../../../config/schema.js' +import { saveConfig } from '../../../config/writer.js' + +export type RunConfigUnsetOptions = { + readonly key: string + readonly dir: string +} + +export async function runConfigUnset(opts: RunConfigUnsetOptions): Promise { + const loaded = await loadConfig(opts.dir) + const config: ConfigFile = loaded.found ? loaded.config : emptyConfig() + const next = unsetKey(config, opts.key) + await saveConfig(opts.dir, next) + return `unset ${opts.key}\n` +} diff --git a/cli/src/commands/config/view/index.ts b/cli/src/commands/config/view/index.ts new file mode 100644 index 0000000000..04a2003972 --- /dev/null +++ b/cli/src/commands/config/view/index.ts @@ -0,0 +1,22 @@ +import { Flags } from '@oclif/core' +import { resolveConfigDir } from '../../../config/dir.js' +import { DifyCommand } from '../../_shared/dify-command.js' +import { runConfigView } from './run.js' + +export default class ConfigView extends DifyCommand { + static override description = 'Print the resolved config' + + static override examples = [ + '<%= config.bin %> config view', + '<%= config.bin %> config view --json', + ] + + static override flags = { + json: Flags.boolean({ description: 'emit JSON', default: false }), + } + + async run(): Promise { + const { flags } = await this.parse(ConfigView) + process.stdout.write(await runConfigView({ dir: resolveConfigDir(), json: flags.json })) + } +} diff --git a/cli/src/commands/config/view/run.test.ts b/cli/src/commands/config/view/run.test.ts new file mode 100644 index 0000000000..b3bc93115e --- /dev/null +++ b/cli/src/commands/config/view/run.test.ts @@ -0,0 +1,70 @@ +import { mkdtemp, writeFile } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { FILE_NAME } from '../../../config/schema.js' +import { runConfigView } from './run.js' + +describe('runConfigView', () => { + let dir: string + + beforeEach(async () => { + dir = await mkdtemp(join(tmpdir(), 'difyctl-view-')) + }) + + afterEach(async () => { + // tmpdir cleanup is best-effort + }) + + it('text format: empty config returns empty string', async () => { + const out = await runConfigView({ dir }) + expect(out).toBe('') + }) + + it('text format: emits "key = value" lines for set keys only', async () => { + await writeFile( + join(dir, FILE_NAME), + 'schema_version: 1\ndefaults:\n format: json\n limit: 50\nstate:\n current_app: app-1\n', + 'utf8', + ) + const out = await runConfigView({ dir }) + expect(out).toBe( + 'defaults.format = json\ndefaults.limit = 50\nstate.current_app = app-1\n', + ) + }) + + it('text format: skips unset keys', async () => { + await writeFile( + join(dir, FILE_NAME), + 'schema_version: 1\ndefaults:\n format: yaml\n', + 'utf8', + ) + const out = await runConfigView({ dir }) + expect(out).toBe('defaults.format = yaml\n') + expect(out).not.toContain('defaults.limit') + expect(out).not.toContain('state.current_app') + }) + + it('json format: empty config returns "{}\\n"', async () => { + const out = await runConfigView({ dir, json: true }) + expect(out).toBe('{}\n') + }) + + it('json format: defaults.limit is numeric, others are strings', async () => { + await writeFile( + join(dir, FILE_NAME), + 'schema_version: 1\ndefaults:\n format: table\n limit: 100\nstate:\n current_app: app-x\n', + 'utf8', + ) + const out = await runConfigView({ dir, json: true }) + const parsed = JSON.parse(out) as Record + expect(parsed['defaults.format']).toBe('table') + expect(parsed['defaults.limit']).toBe(100) + expect(parsed['state.current_app']).toBe('app-x') + }) + + it('json format: trailing newline matches Go encoder.Encode', async () => { + const out = await runConfigView({ dir, json: true }) + expect(out.endsWith('\n')).toBe(true) + }) +}) diff --git a/cli/src/commands/config/view/run.ts b/cli/src/commands/config/view/run.ts new file mode 100644 index 0000000000..bda070ef46 --- /dev/null +++ b/cli/src/commands/config/view/run.ts @@ -0,0 +1,46 @@ +import type { ConfigFile } from '../../../config/schema.js' +import { knownKeyNames, lookupKey } from '../../../config/keys.js' +import { loadConfig } from '../../../config/loader.js' +import { emptyConfig } from '../../../config/schema.js' + +export type RunConfigViewOptions = { + readonly json?: boolean + readonly dir: string +} + +type ViewOut = Record + +export async function runConfigView(opts: RunConfigViewOptions): Promise { + const loaded = await loadConfig(opts.dir) + const config: ConfigFile = loaded.found ? loaded.config : emptyConfig() + const out = collect(config) + if (opts.json) + return `${JSON.stringify(out, null, 2)}\n` + let text = '' + for (const k of knownKeyNames()) { + if (!(k in out)) + continue + text += `${k} = ${out[k]}\n` + } + return text +} + +function collect(config: ConfigFile): ViewOut { + const out: ViewOut = {} + for (const k of knownKeyNames()) { + const spec = lookupKey(k) + if (spec === undefined) + continue + const v = spec.get(config) + if (v === '') + continue + if (k === 'defaults.limit') { + const n = Number.parseInt(v, 10) + if (Number.isFinite(n)) + out[k] = n + continue + } + out[k] = v + } + return out +} diff --git a/cli/src/commands/coverage.test.ts b/cli/src/commands/coverage.test.ts new file mode 100644 index 0000000000..0304eb9966 --- /dev/null +++ b/cli/src/commands/coverage.test.ts @@ -0,0 +1,22 @@ +import { describe, expect, it } from 'vitest' + +const INDEX_MODULES = import.meta.glob<{ default?: unknown }>( + './**/index.ts', + { eager: true }, +) + +const COMMAND_MODULES = Object.fromEntries( + Object.entries(INDEX_MODULES).filter(([path]) => !path.includes('/_')), +) + +describe('command folder coverage', () => { + it('discovers at least one command index', () => { + expect(Object.keys(COMMAND_MODULES).length).toBeGreaterThan(0) + }) + + describe.each(Object.entries(COMMAND_MODULES))('%s', (path, mod) => { + it('default export exists', () => { + expect(mod.default, `${path}: missing default export`).toBeDefined() + }) + }) +}) diff --git a/cli/src/commands/describe/app/handlers.ts b/cli/src/commands/describe/app/handlers.ts new file mode 100644 index 0000000000..c828de78e3 --- /dev/null +++ b/cli/src/commands/describe/app/handlers.ts @@ -0,0 +1,72 @@ +import type { TextHandler } from '../../../printers/format-text.js' +import type { AppMeta } from '../../../types/app-meta.js' +import type { DescribeInfo, Tag } from '../../../types/app.js' + +export const APP_DESCRIBE_MODE_KEY = 'app-describe' + +export type AppDescribePayload = { + info: DescribeInfo | null + parameters: unknown + input_schema: unknown +} + +export type AppDescribeObject = { + mode: () => string + raw: () => AppDescribePayload +} + +export function newAppDescribeObject(meta: AppMeta): AppDescribeObject { + const payload: AppDescribePayload = { + info: meta.info, + parameters: meta.parameters, + input_schema: meta.inputSchema, + } + return { + mode: () => APP_DESCRIBE_MODE_KEY, + raw: () => payload, + } +} + +export const appDescribeTextHandler: TextHandler = { + render(raw): string { + const payload = raw as AppDescribePayload + const lines: string[] = [] + if (payload.info !== null) { + const info = payload.info + const rows: [string, string][] = [ + ['Name', info.name], + ['ID', info.id], + ['Mode', info.mode], + ['Author', info.author], + ['Updated', info.updated_at ?? ''], + ['Service API', info.service_api_enabled ? 'true' : 'false'], + ['Tags', joinTags(info.tags)], + ] + if (info.description !== '') + rows.push(['Description', info.description]) + if (info.is_agent) + rows.push(['Agent', 'true']) + lines.push(...alignedRows(rows)) + } + if (payload.parameters !== null && payload.parameters !== undefined) { + lines.push('Parameters:') + const indented = JSON.stringify(payload.parameters, null, 2) + .split('\n') + .map(l => ` ${l}`) + .join('\n') + lines.push(indented) + } + return `${lines.join('\n')}\n` + }, +} + +function joinTags(tags: readonly Tag[]): string { + if (tags.length === 0) + return '' + return tags.map(t => t.name).join(',') +} + +function alignedRows(rows: readonly [string, string][]): string[] { + const widest = rows.reduce((m, [k]) => Math.max(m, k.length), 0) + return rows.map(([k, v]) => `${`${k}:`.padEnd(widest + 2)}${v}`) +} diff --git a/cli/src/commands/describe/app/index.ts b/cli/src/commands/describe/app/index.ts new file mode 100644 index 0000000000..85628a4580 --- /dev/null +++ b/cli/src/commands/describe/app/index.ts @@ -0,0 +1,35 @@ +import { Args, Flags } from '@oclif/core' +import { DifyCommand } from '../../_shared/dify-command.js' +import { httpRetryFlag } from '../../_shared/global-flags.js' +import { runDescribeApp } from './run.js' + +export default class DescribeApp extends DifyCommand { + static override description = 'Describe a single app (kubectl-describe-style)' + + static override examples = [ + '<%= config.bin %> describe app app-1', + '<%= config.bin %> describe app app-1 -o json', + '<%= config.bin %> describe app app-1 --refresh', + ] + + static override args = { + id: Args.string({ description: 'app id', required: true }), + } + + static override flags = { + 'workspace': Flags.string({ description: 'workspace id (overrides DIFY_WORKSPACE_ID and stored default)' }), + 'http-retry': httpRetryFlag, + 'output': Flags.string({ char: 'o', description: 'output format (json|yaml|text)', default: '' }), + 'refresh': Flags.boolean({ description: 'bypass app-info cache and fetch fresh', default: false }), + } + + async run(): Promise { + const { args, flags } = await this.parse(DescribeApp) + const format = flags.output + const ctx = await this.authedCtx({ retryFlag: flags['http-retry'], withCache: true, format }) + process.stdout.write(await runDescribeApp( + { appId: args.id, workspace: flags.workspace, format, refresh: flags.refresh }, + { bundle: ctx.bundle, http: ctx.http, host: ctx.host, io: ctx.io, cache: ctx.cache }, + )) + } +} diff --git a/cli/src/commands/describe/app/print-flags.ts b/cli/src/commands/describe/app/print-flags.ts new file mode 100644 index 0000000000..0a2f47737f --- /dev/null +++ b/cli/src/commands/describe/app/print-flags.ts @@ -0,0 +1,19 @@ +import type { PrintFlags } from '../../../printers/printer.js' +import { JsonYamlPrintFlags } from '../../../printers/format-json-yaml.js' +import { TextPrintFlags } from '../../../printers/format-text.js' +import { CompositePrintFlags } from '../../../printers/printer.js' +import { APP_DESCRIBE_MODE_KEY, appDescribeTextHandler } from './handlers.js' + +export class AppDescribePrintFlags extends CompositePrintFlags { + private readonly jsonYaml = new JsonYamlPrintFlags() + private readonly text = new TextPrintFlags() + + constructor() { + super() + this.text.register(appDescribeTextHandler, APP_DESCRIBE_MODE_KEY) + } + + protected families(): readonly PrintFlags[] { + return [this.jsonYaml, this.text] + } +} diff --git a/cli/src/commands/describe/app/run.test.ts b/cli/src/commands/describe/app/run.test.ts new file mode 100644 index 0000000000..1f06d765e7 --- /dev/null +++ b/cli/src/commands/describe/app/run.test.ts @@ -0,0 +1,121 @@ +import type { DifyMock } from '../../../../test/fixtures/dify-mock/server.js' +import type { HostsBundle } from '../../../auth/hosts.js' +import { mkdtemp, rm } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { startMock } from '../../../../test/fixtures/dify-mock/server.js' +import { loadAppInfoCache } from '../../../cache/app-info.js' +import { createClient } from '../../../http/client.js' +import { runDescribeApp } from './run.js' + +function bundle(): HostsBundle { + return { + current_host: 'http://localhost', + token_storage: 'file', + tokens: { bearer: 'dfoa_test' }, + account: { id: 'acct-1', email: 't@d.ai', name: 'T' }, + workspace: { id: 'ws-1', name: 'Default', role: 'owner' }, + available_workspaces: [ + { id: 'ws-1', name: 'Default', role: 'owner' }, + { id: 'ws-2', name: 'Other', role: 'normal' }, + ], + } +} + +describe('runDescribeApp', () => { + let mock: DifyMock + let dir: string + beforeEach(async () => { + mock = await startMock({ scenario: 'happy' }) + dir = await mkdtemp(join(tmpdir(), 'difyctl-desc-')) + }) + afterEach(async () => { + await mock.stop() + await rm(dir, { recursive: true, force: true }) + }) + + it('text: renders kubectl-describe-style for chat app', async () => { + const cache = await loadAppInfoCache({ configDir: dir }) + const out = await runDescribeApp( + { appId: 'app-1' }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, cache }, + ) + expect(out).toContain('Name:') + expect(out).toContain('Greeter') + expect(out).toContain('ID:') + expect(out).toContain('app-1') + expect(out).toContain('Mode:') + expect(out).toContain('chat') + expect(out).toContain('Service API:') + expect(out).toContain('Tags:') + expect(out).toContain('demo') + expect(out).toContain('Description:') + expect(out).toContain('Parameters:') + }) + + it('text: agent app shows Agent: true', async () => { + const cache = await loadAppInfoCache({ configDir: dir }) + const out = await runDescribeApp( + { appId: 'app-4', workspace: 'ws-2' }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, cache }, + ) + expect(out).toContain('Agent:') + expect(out).toContain('true') + }) + + it('json: passes through DescribeResponse-shaped meta', async () => { + const cache = await loadAppInfoCache({ configDir: dir }) + const out = await runDescribeApp( + { appId: 'app-1', format: 'json' }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, cache }, + ) + const parsed = JSON.parse(out) as { info: { id: string }, parameters: unknown } + expect(parsed.info.id).toBe('app-1') + expect(parsed.parameters).toBeDefined() + }) + + it('yaml: renders YAML', async () => { + const cache = await loadAppInfoCache({ configDir: dir }) + const out = await runDescribeApp( + { appId: 'app-1', format: 'yaml' }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, cache }, + ) + expect(out).toContain('info:') + expect(out).toContain('id: app-1') + }) + + it('refresh: bypasses cache', async () => { + const cache = await loadAppInfoCache({ configDir: dir }) + await runDescribeApp( + { appId: 'app-1' }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, cache }, + ) + const before = cache.get(mock.url, 'app-1') + expect(before).toBeDefined() + await runDescribeApp( + { appId: 'app-1', refresh: true }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, cache }, + ) + const after = cache.get(mock.url, 'app-1') + expect(after?.fetchedAt).not.toBe(before?.fetchedAt ?? '') + }) + + it('rejects unknown format', async () => { + await expect(runDescribeApp( + { appId: 'app-1', format: 'bogus' }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url }, + )).rejects.toThrow(/not supported/) + }) + + it('unknown app id surfaces as error', async () => { + await expect(runDescribeApp( + { appId: 'nope' }, + { + bundle: bundle(), + http: createClient({ host: mock.url, bearer: 'dfoa_test', retryAttempts: 0 }), + host: mock.url, + }, + )).rejects.toThrow() + }) +}) diff --git a/cli/src/commands/describe/app/run.ts b/cli/src/commands/describe/app/run.ts new file mode 100644 index 0000000000..1a851bad60 --- /dev/null +++ b/cli/src/commands/describe/app/run.ts @@ -0,0 +1,47 @@ +import type { KyInstance } from 'ky' +import type { HostsBundle } from '../../../auth/hosts.js' +import type { AppInfoCache } from '../../../cache/app-info.js' +import type { IOStreams } from '../../../io/streams.js' +import { AppMetaClient } from '../../../api/app-meta.js' +import { AppsClient } from '../../../api/apps.js' +import { runWithSpinner } from '../../../io/spinner.js' +import { nullStreams } from '../../../io/streams.js' +import { FieldInfo, FieldInputSchema, FieldParameters } from '../../../types/app.js' +import { resolveWorkspaceId } from '../../../workspace/resolver.js' +import { newAppDescribeObject } from './handlers.js' +import { AppDescribePrintFlags } from './print-flags.js' + +export type DescribeAppOptions = { + readonly appId: string + readonly workspace?: string + readonly format?: string + readonly refresh?: boolean +} + +export type DescribeAppDeps = { + readonly bundle: HostsBundle + readonly http: KyInstance + readonly host: string + readonly io?: IOStreams + readonly cache?: AppInfoCache + readonly envLookup?: (k: string) => string | undefined +} + +export async function runDescribeApp(opts: DescribeAppOptions, deps: DescribeAppDeps): Promise { + const env = deps.envLookup ?? ((k: string) => process.env[k]) + const wsId = resolveWorkspaceId({ flag: opts.workspace, env: env('DIFY_WORKSPACE_ID'), bundle: deps.bundle }) + const apps = new AppsClient(deps.http) + const meta = new AppMetaClient({ apps, host: deps.host, cache: deps.cache }) + const format = opts.format ?? '' + const io = deps.io ?? nullStreams() + const result = await runWithSpinner( + { io, label: 'Fetching app details' }, + async () => { + if (opts.refresh === true) + await meta.invalidate(opts.appId) + return meta.get(opts.appId, wsId, [FieldInfo, FieldParameters, FieldInputSchema]) + }, + ) + const printer = new AppDescribePrintFlags().toPrinter(format) + return printer.print(newAppDescribeObject(result)) +} diff --git a/cli/src/commands/env/list/index.ts b/cli/src/commands/env/list/index.ts new file mode 100644 index 0000000000..c825dccaa2 --- /dev/null +++ b/cli/src/commands/env/list/index.ts @@ -0,0 +1,21 @@ +import { Flags } from '@oclif/core' +import { DifyCommand } from '../../_shared/dify-command.js' +import { runEnvList } from './run-list.js' + +export default class EnvList extends DifyCommand { + static override description = 'Show every DIFY_* env var difyctl reads' + + static override examples = [ + '<%= config.bin %> env list', + '<%= config.bin %> env list --json', + ] + + static override flags = { + json: Flags.boolean({ description: 'emit JSON', default: false }), + } + + async run(): Promise { + const { flags } = await this.parse(EnvList) + process.stdout.write(runEnvList({ json: flags.json })) + } +} diff --git a/cli/src/commands/env/list/run-list.test.ts b/cli/src/commands/env/list/run-list.test.ts new file mode 100644 index 0000000000..01e6dfcfd0 --- /dev/null +++ b/cli/src/commands/env/list/run-list.test.ts @@ -0,0 +1,60 @@ +import { describe, expect, it } from 'vitest' +import { runEnvList } from './run-list.js' + +const stub = (overrides: Record = {}) => (name: string) => overrides[name] + +describe('runEnvList', () => { + it('text: header is NAME VALUE DESCRIPTION', () => { + const out = runEnvList({ lookup: stub() }) + expect(out.split('\n')[0]).toMatch(/^NAME\s+VALUE\s+DESCRIPTION$/) + }) + + it('text: for unset non-sensitive var', () => { + const out = runEnvList({ lookup: stub() }) + const hostLine = out.split('\n').find(l => l.startsWith('DIFY_HOST'))! + expect(hostLine).toContain('') + }) + + it('text: prints actual value for set non-sensitive var', () => { + const out = runEnvList({ lookup: stub({ DIFY_HOST: 'https://acme' }) }) + const hostLine = out.split('\n').find(l => l.startsWith('DIFY_HOST'))! + expect(hostLine).toContain('https://acme') + }) + + it('text: for set sensitive var (token never echoed)', () => { + const out = runEnvList({ lookup: stub({ DIFY_TOKEN: 'dfoa_secret' }) }) + const tokLine = out.split('\n').find(l => l.startsWith('DIFY_TOKEN'))! + expect(tokLine).toContain('') + expect(tokLine).not.toContain('dfoa_secret') + }) + + it('text: for unset sensitive var', () => { + const out = runEnvList({ lookup: stub() }) + const tokLine = out.split('\n').find(l => l.startsWith('DIFY_TOKEN'))! + expect(tokLine).toContain('') + }) + + it('text: rows are sorted alphabetically by name', () => { + const out = runEnvList({ lookup: stub() }) + const lines = out.trim().split('\n').slice(1).map(l => l.split(/\s+/)[0]) + const sorted = [...lines].sort() + expect(lines).toEqual(sorted) + }) + + it('json: emits array with name/description/sensitive/value fields', () => { + const out = runEnvList({ json: true, lookup: stub({ DIFY_HOST: 'https://acme', DIFY_TOKEN: 'dfoa_x' }) }) + const parsed = JSON.parse(out) as Array<{ name: string, sensitive: boolean, value: string }> + expect(parsed.length).toBeGreaterThan(0) + const host = parsed.find(r => r.name === 'DIFY_HOST')! + expect(host.sensitive).toBe(false) + expect(host.value).toBe('https://acme') + const tok = parsed.find(r => r.name === 'DIFY_TOKEN')! + expect(tok.sensitive).toBe(true) + expect(tok.value).toBe('') + }) + + it('json: trailing newline matches Go encoder.Encode', () => { + const out = runEnvList({ json: true, lookup: stub() }) + expect(out.endsWith('\n')).toBe(true) + }) +}) diff --git a/cli/src/commands/env/list/run-list.ts b/cli/src/commands/env/list/run-list.ts new file mode 100644 index 0000000000..2fce948fc5 --- /dev/null +++ b/cli/src/commands/env/list/run-list.ts @@ -0,0 +1,73 @@ +import { ENV_REGISTRY } from '../../../env/registry.js' + +export type EnvLookup = (name: string) => string | undefined + +export type RunEnvListOptions = { + readonly json?: boolean + readonly lookup?: EnvLookup +} + +export type EnvListJsonRow = { + name: string + description: string + sensitive: boolean + value: string +} + +const COLUMN_PADDING = 2 + +export function runEnvList(opts: RunEnvListOptions = {}): string { + const lookup = opts.lookup ?? defaultLookup + if (opts.json) { + const rows: EnvListJsonRow[] = ENV_REGISTRY.map(v => ({ + name: v.name, + description: v.description, + sensitive: v.sensitive ?? false, + value: displayValue(v.name, v.sensitive ?? false, lookup), + })) + return `${JSON.stringify(rows, null, 2)}\n` + } + const header: readonly string[] = ['NAME', 'VALUE', 'DESCRIPTION'] + const dataRows = ENV_REGISTRY.map(v => [ + v.name, + displayValue(v.name, v.sensitive ?? false, lookup), + v.description, + ]) + return renderTable([header, ...dataRows]) +} + +function displayValue(name: string, sensitive: boolean, lookup: EnvLookup): string { + const raw = lookup(name) ?? '' + if (sensitive) + return raw === '' ? '' : '' + return raw === '' ? '' : raw +} + +function renderTable(rows: readonly (readonly string[])[]): string { + if (rows.length === 0) + return '' + const cols = rows[0]?.length ?? 0 + const widths: number[] = Array.from({ length: cols }, () => 0) + for (const row of rows) { + for (let i = 0; i < cols; i++) { + const cell = row[i] ?? '' + if (cell.length > (widths[i] ?? 0)) + widths[i] = cell.length + } + } + let out = '' + for (const row of rows) { + const parts: string[] = [] + for (let i = 0; i < cols; i++) { + const cell = row[i] ?? '' + const pad = i === cols - 1 ? '' : ' '.repeat((widths[i] ?? 0) - cell.length + COLUMN_PADDING) + parts.push(`${cell}${pad}`) + } + out += `${parts.join('').trimEnd()}\n` + } + return out +} + +function defaultLookup(name: string): string | undefined { + return process.env[name] +} diff --git a/cli/src/commands/get/app/handlers.ts b/cli/src/commands/get/app/handlers.ts new file mode 100644 index 0000000000..736828df74 --- /dev/null +++ b/cli/src/commands/get/app/handlers.ts @@ -0,0 +1,58 @@ +import type { TableColumn, TableHandler, TableRow } from '../../../printers/format-table.js' +import type { ListResponse, Tag } from '../../../types/app.js' +import { isPayloadShape } from './payload-shape.js' + +export const APP_MODE_KEY = 'app' + +export type AppObject = { + mode: () => string + raw: () => ListResponse +} + +export function newAppObject(env: ListResponse): AppObject { + return { + mode: () => APP_MODE_KEY, + raw: () => env, + } +} + +const APP_COLUMNS: readonly TableColumn[] = [ + { name: 'NAME', priority: 0 }, + { name: 'ID', priority: 0 }, + { name: 'MODE', priority: 0 }, + { name: 'TAGS', priority: 0 }, + { name: 'UPDATED', priority: 0 }, + { name: 'AUTHOR', priority: 1 }, + { name: 'WORKSPACE', priority: 1 }, +] + +export const appTableHandler: TableHandler = { + columns: () => APP_COLUMNS, + rows: (raw): readonly TableRow[] => { + if (!isPayloadShape(raw, 'data')) + throw new Error('get/app table: unexpected payload shape') + return raw.data.map(r => [ + r.name, + r.id, + r.mode, + joinTags(r.tags), + r.updated_at ?? '', + r.created_by_name ?? '', + r.workspace_name ?? '', + ]) + }, +} + +export const appNameHandler = { + id(raw: unknown): string { + if (!isPayloadShape(raw, 'data')) + throw new Error('get/app name: unexpected payload shape') + if (raw.data.length === 0) + return '' + return raw.data.map(r => r.id).join('\n') + }, +} + +function joinTags(tags: readonly Tag[]): string { + return tags.map(t => t.name).join(',') +} diff --git a/cli/src/commands/get/app/index.ts b/cli/src/commands/get/app/index.ts new file mode 100644 index 0000000000..a330d8dc08 --- /dev/null +++ b/cli/src/commands/get/app/index.ts @@ -0,0 +1,52 @@ +import { Args, Flags } from '@oclif/core' +import { DifyCommand } from '../../_shared/dify-command.js' +import { httpRetryFlag } from '../../_shared/global-flags.js' +import { runGetApp } from './run.js' + +export default class GetApp extends DifyCommand { + static override description = 'List apps or describe one app\'s basic info' + + static override examples = [ + '<%= config.bin %> get app', + '<%= config.bin %> get app app-1', + '<%= config.bin %> get app -o json', + '<%= config.bin %> get app -A', + ] + + static override args = { + id: Args.string({ description: 'app id', required: false }), + } + + static override flags = { + 'workspace': Flags.string({ description: 'workspace id (overrides DIFY_WORKSPACE_ID and stored default)' }), + 'all-workspaces': Flags.boolean({ + char: 'A', + description: 'list apps across every workspace the bearer can see', + default: false, + }), + 'page': Flags.integer({ description: 'page number', default: 1 }), + 'limit': Flags.string({ description: 'page size [1..200]' }), + 'mode': Flags.string({ description: 'filter by app mode (chat|completion|workflow|agent-chat|advanced-chat)' }), + 'name': Flags.string({ description: 'filter by app name (server-side substring)' }), + 'tag': Flags.string({ description: 'filter by tag name (server-side exact match)' }), + 'http-retry': httpRetryFlag, + 'output': Flags.string({ char: 'o', description: 'output format (json|yaml|name|wide)', default: '' }), + } + + async run(): Promise { + const { args, flags } = await this.parse(GetApp) + const format = flags.output + const ctx = await this.authedCtx({ retryFlag: flags['http-retry'], format }) + process.stdout.write(await runGetApp({ + appId: args.id, + workspace: flags.workspace, + allWorkspaces: flags['all-workspaces'], + page: flags.page, + limitRaw: flags.limit, + mode: flags.mode, + name: flags.name, + tag: flags.tag, + format, + }, { bundle: ctx.bundle, http: ctx.http, io: ctx.io })) + } +} diff --git a/cli/src/commands/get/app/payload-shape.ts b/cli/src/commands/get/app/payload-shape.ts new file mode 100644 index 0000000000..53f638eb86 --- /dev/null +++ b/cli/src/commands/get/app/payload-shape.ts @@ -0,0 +1,5 @@ +export function isPayloadShape(value: unknown, requiredKey: keyof T): value is T { + return typeof value === 'object' + && value !== null + && requiredKey in value +} diff --git a/cli/src/commands/get/app/print-flags.ts b/cli/src/commands/get/app/print-flags.ts new file mode 100644 index 0000000000..5813312052 --- /dev/null +++ b/cli/src/commands/get/app/print-flags.ts @@ -0,0 +1,22 @@ +import type { PrintFlags } from '../../../printers/printer.js' +import { JsonYamlPrintFlags } from '../../../printers/format-json-yaml.js' +import { NamePrintFlags } from '../../../printers/format-name.js' +import { TablePrintFlags } from '../../../printers/format-table.js' +import { CompositePrintFlags } from '../../../printers/printer.js' +import { APP_MODE_KEY, appNameHandler, appTableHandler } from './handlers.js' + +export class AppPrintFlags extends CompositePrintFlags { + private readonly jsonYaml = new JsonYamlPrintFlags() + private readonly table = new TablePrintFlags() + private readonly name = new NamePrintFlags() + + constructor() { + super() + this.table.register(appTableHandler, APP_MODE_KEY) + this.name.register(appNameHandler, APP_MODE_KEY) + } + + protected families(): readonly PrintFlags[] { + return [this.jsonYaml, this.name, this.table] + } +} diff --git a/cli/src/commands/get/app/run.test.ts b/cli/src/commands/get/app/run.test.ts new file mode 100644 index 0000000000..ea2408ea8a --- /dev/null +++ b/cli/src/commands/get/app/run.test.ts @@ -0,0 +1,118 @@ +import type { DifyMock } from '../../../../test/fixtures/dify-mock/server.js' +import type { HostsBundle } from '../../../auth/hosts.js' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { startMock } from '../../../../test/fixtures/dify-mock/server.js' +import { createClient } from '../../../http/client.js' +import { runGetApp } from './run.js' + +const baseBundle: HostsBundle = { + current_host: '127.0.0.1', + scheme: 'http', + account: { id: 'acct-1', email: 'tester@dify.ai', name: 'Test Tester' }, + workspace: { id: 'ws-1', name: 'Default', role: 'owner' }, + available_workspaces: [ + { id: 'ws-1', name: 'Default', role: 'owner' }, + { id: 'ws-2', name: 'Other', role: 'normal' }, + ], + token_storage: 'file', + tokens: { bearer: 'dfoa_test' }, +} + +describe('runGetApp', () => { + let mock: DifyMock + + beforeEach(async () => { + mock = await startMock({ scenario: 'happy' }) + }) + + afterEach(async () => { + await mock.stop() + }) + + function http() { + return createClient({ host: mock.url, bearer: 'dfoa_test' }) + } + + it('list (no id, default format) renders table with NAME ID MODE TAGS UPDATED', async () => { + const out = await runGetApp({}, { bundle: baseBundle, http: http() }) + expect(out).toMatch(/^NAME\s+ID\s+MODE\s+TAGS\s+UPDATED/) + expect(out).toContain('Greeter') + expect(out).toContain('app-1') + expect(out).toContain('chat') + expect(out).toContain('demo') + expect(out).toContain('Workflow') + expect(out).not.toContain('app-3') + }) + + it('by-id (single) renders 1-row table', async () => { + const out = await runGetApp({ appId: 'app-1' }, { bundle: baseBundle, http: http() }) + expect(out).toContain('Greeter') + expect(out).toContain('app-1') + expect(out).not.toContain('Workflow') + }) + + it('--mode filters server-side', async () => { + const out = await runGetApp({ mode: 'workflow' }, { bundle: baseBundle, http: http() }) + expect(out).toContain('Workflow') + expect(out).not.toContain('Greeter') + }) + + it('--tag filters server-side', async () => { + const out = await runGetApp({ tag: 'demo' }, { bundle: baseBundle, http: http() }) + expect(out).toContain('Greeter') + expect(out).not.toContain('Workflow') + }) + + it('-A all-workspaces aggregates across workspaces sorted by id', async () => { + const out = await runGetApp({ allWorkspaces: true }, { bundle: baseBundle, http: http() }) + expect(out).toContain('app-1') + expect(out).toContain('app-2') + expect(out).toContain('app-3') + const idxApp1 = out.indexOf('app-1') + const idxApp3 = out.indexOf('app-3') + expect(idxApp1).toBeLessThan(idxApp3) + }) + + it('-o json emits parseable JSON envelope', async () => { + const out = await runGetApp({ format: 'json' }, { bundle: baseBundle, http: http() }) + const parsed = JSON.parse(out) as { data: Array<{ id: string }>, total: number } + expect(parsed.data).toHaveLength(2) + expect(parsed.data.map(r => r.id).sort()).toEqual(['app-1', 'app-2']) + }) + + it('-o yaml emits YAML envelope', async () => { + const out = await runGetApp({ format: 'yaml' }, { bundle: baseBundle, http: http() }) + expect(out).toContain('data:') + expect(out).toContain('id: app-1') + }) + + it('-o name emits ids one per line', async () => { + const out = await runGetApp({ format: 'name' }, { bundle: baseBundle, http: http() }) + expect(out.trim().split('\n').sort()).toEqual(['app-1', 'app-2']) + }) + + it('-o wide includes AUTHOR and WORKSPACE columns', async () => { + const out = await runGetApp({ format: 'wide' }, { bundle: baseBundle, http: http() }) + expect(out).toMatch(/^NAME\s+ID\s+MODE\s+TAGS\s+UPDATED\s+AUTHOR\s+WORKSPACE/) + expect(out).toContain('tester') + expect(out).toContain('Default') + }) + + it('rejects unknown format', async () => { + await expect(runGetApp({ format: 'bogus' }, { bundle: baseBundle, http: http() })) + .rejects + .toThrow(/not supported/) + }) + + it('--workspace flag overrides bundle default', async () => { + const out = await runGetApp({ workspace: 'ws-2' }, { bundle: baseBundle, http: http() }) + expect(out).toContain('app-3') + expect(out).toContain('OtherWS Bot') + expect(out).not.toContain('Greeter') + }) + + it('throws NotLoggedIn-equivalent when no workspace can be resolved', async () => { + const minimal: HostsBundle = { current_host: 'h', token_storage: 'file' } + await expect(runGetApp({}, { bundle: minimal, http: http() })).rejects.toThrow(/no workspace/) + }) +}) diff --git a/cli/src/commands/get/app/run.ts b/cli/src/commands/get/app/run.ts new file mode 100644 index 0000000000..908b6ca540 --- /dev/null +++ b/cli/src/commands/get/app/run.ts @@ -0,0 +1,165 @@ +import type { KyInstance } from 'ky' +import type { HostsBundle } from '../../../auth/hosts.js' +import type { IOStreams } from '../../../io/streams.js' +import type { DescribeResponse, ListResponse } from '../../../types/app.js' +import { AppsClient } from '../../../api/apps.js' +import { WorkspacesClient } from '../../../api/workspaces.js' +import { runWithSpinner } from '../../../io/spinner.js' +import { nullStreams } from '../../../io/streams.js' +import { LIMIT_DEFAULT, parseLimit } from '../../../limit/limit.js' +import { resolveWorkspaceId } from '../../../workspace/resolver.js' +import { newAppObject } from './handlers.js' +import { AppPrintFlags } from './print-flags.js' + +export type GetAppOptions = { + readonly appId?: string + readonly workspace?: string + readonly allWorkspaces?: boolean + readonly page?: number + readonly limitRaw?: string + readonly mode?: string + readonly name?: string + readonly tag?: string + readonly format?: string +} + +export type GetAppDeps = { + readonly bundle: HostsBundle + readonly http: KyInstance + readonly io?: IOStreams + readonly envLookup?: (k: string) => string | undefined + readonly appsFactory?: (http: KyInstance) => AppsClient + readonly workspacesFactory?: (http: KyInstance) => WorkspacesClient +} + +const ALL_WORKSPACES_CONCURRENCY = 4 + +export async function runGetApp(opts: GetAppOptions, deps: GetAppDeps): Promise { + const env = deps.envLookup ?? ((k: string) => process.env[k]) + const appsFactory = deps.appsFactory ?? ((h: KyInstance) => new AppsClient(h)) + const wsFactory = deps.workspacesFactory ?? ((h: KyInstance) => new WorkspacesClient(h)) + + const apps = appsFactory(deps.http) + const pageSize = resolveLimit(opts.limitRaw, env) + const page = opts.page === undefined || opts.page <= 0 ? 1 : opts.page + const format = opts.format ?? '' + const label = opts.appId !== undefined && opts.appId !== '' ? 'Fetching app' : 'Fetching apps' + const io = deps.io ?? nullStreams() + + const envelope = await runWithSpinner( + { io, label }, + async (): Promise => { + if (opts.allWorkspaces === true) { + const ws = wsFactory(deps.http) + return runAllWorkspaces(apps, ws, opts, page, pageSize) + } + if (opts.appId !== undefined && opts.appId !== '') { + const wsId = resolveWorkspaceId({ flag: opts.workspace, env: env('DIFY_WORKSPACE_ID'), bundle: deps.bundle }) + const wsName = workspaceNameForId(deps.bundle, wsId) + const desc = await apps.describe(opts.appId, wsId, ['info']) + return describeToEnvelope(desc, wsId, wsName) + } + const wsId = resolveWorkspaceId({ flag: opts.workspace, env: env('DIFY_WORKSPACE_ID'), bundle: deps.bundle }) + return apps.list({ + workspaceId: wsId, + page, + limit: pageSize, + mode: opts.mode, + name: opts.name, + tag: opts.tag, + }) + }, + ) + + const printer = new AppPrintFlags().toPrinter(format) + return printer.print(newAppObject(envelope)) +} + +function resolveLimit(raw: string | undefined, env: (k: string) => string | undefined): number { + if (raw !== undefined && raw !== '') + return parseLimit(raw, '--limit') + const envValue = env('DIFY_LIMIT') + if (envValue !== undefined && envValue !== '') + return parseLimit(envValue, 'DIFY_LIMIT') + return LIMIT_DEFAULT +} + +function describeToEnvelope(desc: DescribeResponse, wsId: string, wsName: string): ListResponse { + if (desc.info === null) { + return { page: 1, limit: 1, total: 0, has_more: false, data: [] } + } + return { + page: 1, + limit: 1, + total: 1, + has_more: false, + data: [{ + id: desc.info.id, + name: desc.info.name, + description: desc.info.description, + mode: desc.info.mode, + tags: desc.info.tags, + updated_at: desc.info.updated_at, + created_by_name: desc.info.author === '' ? null : desc.info.author, + workspace_id: wsId, + workspace_name: wsName === '' ? null : wsName, + }], + } +} + +function workspaceNameForId(b: HostsBundle, id: string): string { + if (id === '') + return '' + if (b.workspace?.id === id) + return b.workspace.name + for (const w of b.available_workspaces ?? []) { + if (w.id === id) + return w.name + } + return '' +} + +async function runAllWorkspaces( + apps: AppsClient, + ws: WorkspacesClient, + opts: GetAppOptions, + page: number, + limit: number, +): Promise { + const wsResp = await ws.list() + if (wsResp.workspaces.length === 0) + return { page: 1, limit, total: 0, has_more: false, data: [] } + + const merged: ListResponse = { page: 1, limit, total: 0, has_more: false, data: [] } + const queue = [...wsResp.workspaces] + const workers: Promise[] = [] + + const fetchOne = async (wsId: string): Promise => { + const env = await apps.list({ + workspaceId: wsId, + page, + limit, + mode: opts.mode, + name: opts.name, + tag: opts.tag, + }) + merged.total += env.total + merged.data = [...merged.data, ...env.data] + } + + const runner = async (): Promise => { + while (true) { + const next = queue.shift() + if (next === undefined) + return + await fetchOne(next.id) + } + } + + const N = Math.min(ALL_WORKSPACES_CONCURRENCY, wsResp.workspaces.length) + for (let i = 0; i < N; i++) workers.push(runner()) + await Promise.all(workers) + + merged.data = [...merged.data].sort((a, b) => a.id.localeCompare(b.id)) + return merged +} diff --git a/cli/src/commands/get/workspace/handlers.test.ts b/cli/src/commands/get/workspace/handlers.test.ts new file mode 100644 index 0000000000..8d4b870e3a --- /dev/null +++ b/cli/src/commands/get/workspace/handlers.test.ts @@ -0,0 +1,47 @@ +import type { WorkspaceListResponse } from '../../../types/workspace.js' +import { describe, expect, it } from 'vitest' +import { newWorkspaceObject, WORKSPACE_MODE_KEY, workspaceNameHandler, workspaceTableHandler } from './handlers.js' + +function env(): WorkspaceListResponse { + return { + workspaces: [ + { id: 'ws-1', name: 'Default', role: 'owner', status: 'normal', current: true }, + { id: 'ws-2', name: 'Other', role: 'normal', status: 'normal', current: false }, + ], + } +} + +describe('get/workspace handlers', () => { + it('newWorkspaceObject mode = workspace + raw passthrough', () => { + const obj = newWorkspaceObject(env()) + expect(obj.mode()).toBe(WORKSPACE_MODE_KEY) + expect(obj.raw().workspaces[0]?.id).toBe('ws-1') + }) + + it('workspaceTableHandler marks current via server flag', () => { + const rows = workspaceTableHandler('').rows(env()) + expect(rows[0]?.at(-1)).toBe('*') + expect(rows[1]?.at(-1)).toBe('') + }) + + it('workspaceTableHandler marks current via currentId fallback', () => { + const e: WorkspaceListResponse = { + workspaces: [ + { id: 'ws-1', name: 'Default', role: 'owner', status: 'normal', current: false }, + { id: 'ws-2', name: 'Other', role: 'normal', status: 'normal', current: false }, + ], + } + const rows = workspaceTableHandler('ws-2').rows(e) + expect(rows[0]?.at(-1)).toBe('') + expect(rows[1]?.at(-1)).toBe('*') + }) + + it('workspaceTableHandler emits ID NAME ROLE STATUS CURRENT row order', () => { + const rows = workspaceTableHandler('').rows(env()) + expect(rows[0]).toEqual(['ws-1', 'Default', 'owner', 'normal', '*']) + }) + + it('workspaceNameHandler returns ids joined by newline', () => { + expect(workspaceNameHandler.id(env())).toBe('ws-1\nws-2') + }) +}) diff --git a/cli/src/commands/get/workspace/handlers.ts b/cli/src/commands/get/workspace/handlers.ts new file mode 100644 index 0000000000..2167ae8fd8 --- /dev/null +++ b/cli/src/commands/get/workspace/handlers.ts @@ -0,0 +1,52 @@ +import type { NameHandler } from '../../../printers/format-name.js' +import type { TableColumn, TableHandler, TableRow } from '../../../printers/format-table.js' +import type { WorkspaceListResponse } from '../../../types/workspace.js' +import { isPayloadShape } from '../app/payload-shape.js' + +export const WORKSPACE_MODE_KEY = 'workspace' +const CURRENT_MARKER = '*' + +export type WorkspaceObject = { + mode: () => string + raw: () => WorkspaceListResponse +} + +export function newWorkspaceObject(env: WorkspaceListResponse): WorkspaceObject { + return { + mode: () => WORKSPACE_MODE_KEY, + raw: () => env, + } +} + +const WORKSPACE_COLUMNS: readonly TableColumn[] = [ + { name: 'ID', priority: 0 }, + { name: 'NAME', priority: 0 }, + { name: 'ROLE', priority: 0 }, + { name: 'STATUS', priority: 0 }, + { name: 'CURRENT', priority: 0 }, +] + +export function workspaceTableHandler(currentId: string): TableHandler { + return { + columns: () => WORKSPACE_COLUMNS, + rows: (raw): readonly TableRow[] => { + if (!isPayloadShape(raw, 'workspaces')) + throw new Error('get/workspace table: unexpected payload shape') + return raw.workspaces.map(w => [ + w.id, + w.name, + w.role, + w.status, + w.current || (currentId !== '' && w.id === currentId) ? CURRENT_MARKER : '', + ]) + }, + } +} + +export const workspaceNameHandler: NameHandler = { + id(raw: unknown): string { + if (!isPayloadShape(raw, 'workspaces')) + throw new Error('get/workspace name: unexpected payload shape') + return raw.workspaces.map(w => w.id).join('\n') + }, +} diff --git a/cli/src/commands/get/workspace/index.ts b/cli/src/commands/get/workspace/index.ts new file mode 100644 index 0000000000..de66a0b6d1 --- /dev/null +++ b/cli/src/commands/get/workspace/index.ts @@ -0,0 +1,26 @@ +import { Flags } from '@oclif/core' +import { DifyCommand } from '../../_shared/dify-command.js' +import { httpRetryFlag } from '../../_shared/global-flags.js' +import { runGetWorkspace } from './run.js' + +export default class GetWorkspace extends DifyCommand { + static override description = 'List workspaces visible to the current bearer' + + static override examples = [ + '<%= config.bin %> get workspace', + '<%= config.bin %> get workspace -o json', + '<%= config.bin %> get workspace -o name', + ] + + static override flags = { + 'http-retry': httpRetryFlag, + 'output': Flags.string({ char: 'o', description: 'output format (json|yaml|name|wide)', default: '' }), + } + + async run(): Promise { + const { flags } = await this.parse(GetWorkspace) + const format = flags.output + const ctx = await this.authedCtx({ retryFlag: flags['http-retry'], format }) + process.stdout.write(await runGetWorkspace({ format }, { bundle: ctx.bundle, http: ctx.http, io: ctx.io })) + } +} diff --git a/cli/src/commands/get/workspace/print-flags.ts b/cli/src/commands/get/workspace/print-flags.ts new file mode 100644 index 0000000000..260edb8428 --- /dev/null +++ b/cli/src/commands/get/workspace/print-flags.ts @@ -0,0 +1,22 @@ +import type { PrintFlags } from '../../../printers/printer.js' +import { JsonYamlPrintFlags } from '../../../printers/format-json-yaml.js' +import { NamePrintFlags } from '../../../printers/format-name.js' +import { TablePrintFlags } from '../../../printers/format-table.js' +import { CompositePrintFlags } from '../../../printers/printer.js' +import { WORKSPACE_MODE_KEY, workspaceNameHandler, workspaceTableHandler } from './handlers.js' + +export class WorkspacePrintFlags extends CompositePrintFlags { + private readonly jsonYaml = new JsonYamlPrintFlags() + private readonly table = new TablePrintFlags() + private readonly name = new NamePrintFlags() + + constructor(currentId: string) { + super() + this.table.register(workspaceTableHandler(currentId), WORKSPACE_MODE_KEY) + this.name.register(workspaceNameHandler, WORKSPACE_MODE_KEY) + } + + protected families(): readonly PrintFlags[] { + return [this.jsonYaml, this.name, this.table] + } +} diff --git a/cli/src/commands/get/workspace/run.test.ts b/cli/src/commands/get/workspace/run.test.ts new file mode 100644 index 0000000000..1f09eb20a0 --- /dev/null +++ b/cli/src/commands/get/workspace/run.test.ts @@ -0,0 +1,98 @@ +import type { DifyMock } from '../../../../test/fixtures/dify-mock/server.js' +import type { HostsBundle } from '../../../auth/hosts.js' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { startMock } from '../../../../test/fixtures/dify-mock/server.js' +import { createClient } from '../../../http/client.js' +import { EMPTY_WORKSPACES_MESSAGE, runGetWorkspace } from './run.js' + +const baseBundle: HostsBundle = { + current_host: '127.0.0.1', + scheme: 'http', + account: { id: 'acct-1', email: 'tester@dify.ai', name: 'Test Tester' }, + workspace: { id: 'ws-1', name: 'Default', role: 'owner' }, + available_workspaces: [ + { id: 'ws-1', name: 'Default', role: 'owner' }, + { id: 'ws-2', name: 'Other', role: 'normal' }, + ], + token_storage: 'file', + tokens: { bearer: 'dfoa_test' }, +} + +describe('runGetWorkspace', () => { + let mock: DifyMock + + beforeEach(async () => { + mock = await startMock({ scenario: 'happy' }) + }) + + afterEach(async () => { + await mock.stop() + }) + + function http() { + return createClient({ host: mock.url, bearer: 'dfoa_test' }) + } + + it('default format renders ID NAME ROLE STATUS CURRENT table', async () => { + const out = await runGetWorkspace({}, { bundle: baseBundle, http: http() }) + expect(out).toMatch(/^ID\s+NAME\s+ROLE\s+STATUS\s+CURRENT/) + expect(out).toContain('ws-1') + expect(out).toContain('ws-2') + expect(out).toContain('Default') + expect(out).toContain('owner') + expect(out).toContain('normal') + }) + + it('marks the current workspace with *', async () => { + const out = await runGetWorkspace({}, { bundle: baseBundle, http: http() }) + for (const line of out.split('\n')) { + if (line.includes('ws-1')) + expect(line).toContain('*') + else if (line.includes('ws-2')) + expect(line).not.toContain('*') + } + }) + + it('falls back to bundle workspace.id when server current=false', async () => { + const overridden: HostsBundle = { ...baseBundle, workspace: { id: 'ws-2', name: 'Other', role: 'normal' } } + const out = await runGetWorkspace({}, { bundle: overridden, http: http() }) + for (const line of out.split('\n')) { + if (line.includes('ws-2')) + expect(line).toContain('*') + } + }) + + it('-o json emits a parseable workspaces envelope', async () => { + const out = await runGetWorkspace({ format: 'json' }, { bundle: baseBundle, http: http() }) + const parsed = JSON.parse(out) as { workspaces: Array<{ id: string, status: string, current: boolean }> } + expect(parsed.workspaces).toHaveLength(2) + expect(parsed.workspaces.map(w => w.id).sort()).toEqual(['ws-1', 'ws-2']) + expect(parsed.workspaces[0]?.status).toBe('normal') + expect(parsed.workspaces[0]?.current).toBe(true) + }) + + it('-o yaml emits "workspaces:" header', async () => { + const out = await runGetWorkspace({ format: 'yaml' }, { bundle: baseBundle, http: http() }) + expect(out).toContain('workspaces:') + expect(out).toContain('ws-1') + }) + + it('-o name emits ids joined by newline', async () => { + const out = await runGetWorkspace({ format: 'name' }, { bundle: baseBundle, http: http() }) + expect(out.trim().split('\n').sort()).toEqual(['ws-1', 'ws-2']) + }) + + it('empty workspaces (sso scenario) prints external-SSO message regardless of format', async () => { + mock.setScenario('sso') + const out = await runGetWorkspace({}, { bundle: baseBundle, http: http() }) + expect(out).toBe(EMPTY_WORKSPACES_MESSAGE) + const jsonOut = await runGetWorkspace({ format: 'json' }, { bundle: baseBundle, http: http() }) + expect(jsonOut).toBe(EMPTY_WORKSPACES_MESSAGE) + }) + + it('rejects unknown -o format', async () => { + await expect(runGetWorkspace({ format: 'csv' }, { bundle: baseBundle, http: http() })) + .rejects + .toThrow(/csv|not supported|format/i) + }) +}) diff --git a/cli/src/commands/get/workspace/run.ts b/cli/src/commands/get/workspace/run.ts new file mode 100644 index 0000000000..d2bbecffcb --- /dev/null +++ b/cli/src/commands/get/workspace/run.ts @@ -0,0 +1,37 @@ +import type { KyInstance } from 'ky' +import type { HostsBundle } from '../../../auth/hosts.js' +import type { IOStreams } from '../../../io/streams.js' +import { WorkspacesClient } from '../../../api/workspaces.js' +import { runWithSpinner } from '../../../io/spinner.js' +import { nullStreams } from '../../../io/streams.js' +import { newWorkspaceObject } from './handlers.js' +import { WorkspacePrintFlags } from './print-flags.js' + +export const EMPTY_WORKSPACES_MESSAGE + = 'No workspaces visible to this bearer (external-SSO subjects see empty data).\n' + +export type GetWorkspaceOptions = { + readonly format?: string +} + +export type GetWorkspaceDeps = { + readonly bundle: HostsBundle + readonly http: KyInstance + readonly io?: IOStreams + readonly workspacesFactory?: (http: KyInstance) => WorkspacesClient +} + +export async function runGetWorkspace(opts: GetWorkspaceOptions, deps: GetWorkspaceDeps): Promise { + const wsFactory = deps.workspacesFactory ?? ((h: KyInstance) => new WorkspacesClient(h)) + const format = opts.format ?? '' + const io = deps.io ?? nullStreams() + const env = await runWithSpinner( + { io, label: 'Fetching workspaces' }, + () => wsFactory(deps.http).list(), + ) + if (env.workspaces.length === 0) + return EMPTY_WORKSPACES_MESSAGE + const currentId = deps.bundle.workspace?.id ?? '' + const printer = new WorkspacePrintFlags(currentId).toPrinter(format) + return printer.print(newWorkspaceObject(env)) +} diff --git a/cli/src/commands/help/account/account.test.ts b/cli/src/commands/help/account/account.test.ts new file mode 100644 index 0000000000..162fbda78a --- /dev/null +++ b/cli/src/commands/help/account/account.test.ts @@ -0,0 +1,21 @@ +import { describe, expect, it } from 'vitest' +import { runHelpAccount } from './account.js' + +describe('runHelpAccount', () => { + it('mentions auth login device flow', () => { + expect(runHelpAccount()).toContain('difyctl auth login') + }) + + it('mentions get/describe/run app commands', () => { + const out = runHelpAccount() + expect(out).toContain('difyctl get app') + expect(out).toContain('difyctl describe app') + expect(out).toContain('difyctl run app') + }) + + it('mentions --workspace and env list pointers', () => { + const out = runHelpAccount() + expect(out).toContain('--workspace') + expect(out).toContain('difyctl env list') + }) +}) diff --git a/cli/src/commands/help/account/account.ts b/cli/src/commands/help/account/account.ts new file mode 100644 index 0000000000..8cbf5e28e0 --- /dev/null +++ b/cli/src/commands/help/account/account.ts @@ -0,0 +1,23 @@ +export const ACCOUNT_HELP_TEXT = `difyctl: account-bearer onboarding + + 1. Sign in interactively (browser device flow): + difyctl auth login + + 2. List accessible apps in your default workspace: + difyctl get app + + 3. Describe one app to see its parameters: + difyctl describe app + + 4. Run an app and capture structured output: + difyctl run app "hello" -o json + +Tips: + * Pass --workspace when you need to target a non-default workspace. + * Use --stream for long-running workflow calls (post-v1.0 milestone). + * 'difyctl env list' shows every env var difyctl reads. +` + +export function runHelpAccount(): string { + return ACCOUNT_HELP_TEXT +} diff --git a/cli/src/commands/help/account/index.ts b/cli/src/commands/help/account/index.ts new file mode 100644 index 0000000000..f63324eb0d --- /dev/null +++ b/cli/src/commands/help/account/index.ts @@ -0,0 +1,14 @@ +import { DifyCommand } from '../../_shared/dify-command.js' +import { runHelpAccount } from './account.js' + +export default class HelpAccount extends DifyCommand { + static override description = 'Agent-onboarding text for account bearers (dfoa_)' + + static override examples = [ + '<%= config.bin %> help account', + ] + + async run(): Promise { + process.stdout.write(runHelpAccount()) + } +} diff --git a/cli/src/commands/help/environment/environment.test.ts b/cli/src/commands/help/environment/environment.test.ts new file mode 100644 index 0000000000..d6aad702b4 --- /dev/null +++ b/cli/src/commands/help/environment/environment.test.ts @@ -0,0 +1,25 @@ +import { describe, expect, it } from 'vitest' +import { ENV_REGISTRY } from '../../../env/registry.js' +import { runHelpEnvironment } from './environment.js' + +describe('runHelpEnvironment', () => { + it('starts with the ENVIRONMENT VARIABLES header', () => { + expect(runHelpEnvironment().startsWith('ENVIRONMENT VARIABLES\n\n')).toBe(true) + }) + + it('lists every var from ENV_REGISTRY with its description', () => { + const out = runHelpEnvironment() + for (const v of ENV_REGISTRY) { + expect(out).toContain(v.name) + expect(out).toContain(v.description) + } + }) + + it('marks sensitive vars with a never-echoed notice', () => { + const out = runHelpEnvironment() + expect(out).toContain('(treat as secret; never echoed)') + const sensitiveCount = ENV_REGISTRY.filter(v => v.sensitive).length + const noticeCount = (out.match(/treat as secret/g) ?? []).length + expect(noticeCount).toBe(sensitiveCount) + }) +}) diff --git a/cli/src/commands/help/environment/environment.ts b/cli/src/commands/help/environment/environment.ts new file mode 100644 index 0000000000..279c7de6d6 --- /dev/null +++ b/cli/src/commands/help/environment/environment.ts @@ -0,0 +1,12 @@ +import { ENV_REGISTRY } from '../../../env/registry.js' + +export function runHelpEnvironment(): string { + let out = 'ENVIRONMENT VARIABLES\n\n' + for (const v of ENV_REGISTRY) { + out += ` ${v.name}\n ${v.description}\n` + if (v.sensitive) + out += ' (treat as secret; never echoed)\n' + out += '\n' + } + return out +} diff --git a/cli/src/commands/help/environment/index.ts b/cli/src/commands/help/environment/index.ts new file mode 100644 index 0000000000..3e67667f07 --- /dev/null +++ b/cli/src/commands/help/environment/index.ts @@ -0,0 +1,14 @@ +import { DifyCommand } from '../../_shared/dify-command.js' +import { runHelpEnvironment } from './environment.js' + +export default class HelpEnvironment extends DifyCommand { + static override description = 'Long-form documentation for every DIFY_* env var' + + static override examples = [ + '<%= config.bin %> help environment', + ] + + async run(): Promise { + process.stdout.write(runHelpEnvironment()) + } +} diff --git a/cli/src/commands/help/external/external.test.ts b/cli/src/commands/help/external/external.test.ts new file mode 100644 index 0000000000..9925fc6c76 --- /dev/null +++ b/cli/src/commands/help/external/external.test.ts @@ -0,0 +1,15 @@ +import { describe, expect, it } from 'vitest' +import { runHelpExternal } from './external.js' + +describe('runHelpExternal', () => { + it('mentions external bearer prefix and login flag', () => { + const out = runHelpExternal() + expect(out).toContain('dfoe_') + expect(out).toContain('--external') + expect(out).toContain('DIFY_TOKEN') + }) + + it('explains workspace empty-list expectation', () => { + expect(runHelpExternal()).toContain('get workspace') + }) +}) diff --git a/cli/src/commands/help/external/external.ts b/cli/src/commands/help/external/external.ts new file mode 100644 index 0000000000..19763a8b91 --- /dev/null +++ b/cli/src/commands/help/external/external.ts @@ -0,0 +1,26 @@ +export const EXTERNAL_HELP_TEXT = `difyctl: external-SSO bearer onboarding + + Most agents authenticate as a human account (see 'difyctl help account'). + External-SSO bearers (dfoe_) skip the human flow and exchange an upstream + identity for a Dify token. The CLI surfaces the same commands but a + smaller dataset: + + 1. Acquire a token through your SSO provider (out of band). + 2. Hand it to the CLI: + difyctl auth login --external --token "$DIFY_TOKEN" + + 3. List apps your subject is permitted to invoke: + difyctl get app + + 4. Run an app: + difyctl run app "hello" -o json + +Notes: + * 'difyctl get workspace' returns an empty list for external bearers — that + is expected; external subjects have no workspace membership. + * Tokens are best stored in DIFY_TOKEN; difyctl reads it on every command. +` + +export function runHelpExternal(): string { + return EXTERNAL_HELP_TEXT +} diff --git a/cli/src/commands/help/external/index.ts b/cli/src/commands/help/external/index.ts new file mode 100644 index 0000000000..b476cb20cb --- /dev/null +++ b/cli/src/commands/help/external/index.ts @@ -0,0 +1,14 @@ +import { DifyCommand } from '../../_shared/dify-command.js' +import { runHelpExternal } from './external.js' + +export default class HelpExternal extends DifyCommand { + static override description = 'Agent-onboarding text for external-SSO bearers (dfoe_)' + + static override examples = [ + '<%= config.bin %> help external', + ] + + async run(): Promise { + process.stdout.write(runHelpExternal()) + } +} diff --git a/cli/src/commands/run/app/_strategies/blocking.ts b/cli/src/commands/run/app/_strategies/blocking.ts new file mode 100644 index 0000000000..cfb30a9a17 --- /dev/null +++ b/cli/src/commands/run/app/_strategies/blocking.ts @@ -0,0 +1,29 @@ +import type { RunContext, RunStrategy } from './index.js' +import { buildRunBody } from '../../../../api/app-run.js' +import { runWithSpinner } from '../../../../io/spinner.js' +import { chatConversationHint, newAppRunObject, RUN_MODES } from '../handlers.js' + +const CHAT_MODES: ReadonlySet = new Set([RUN_MODES.Chat, RUN_MODES.AgentChat, RUN_MODES.AdvancedChat]) + +export class BlockingStrategy implements RunStrategy { + async execute(ctx: RunContext): Promise { + const { opts, deps, mode, format, printFlags } = ctx + const body = buildRunBody({ + message: opts.message, + inputs: opts.inputs, + conversationId: opts.conversationId, + workspaceId: opts.workspace, + }) + const resp = await runWithSpinner( + { io: deps.io, label: 'Running app', enabled: ctx.isText }, + () => ctx.runClient.runBlocking(opts.appId, body), + ) + const respMode = typeof resp.mode === 'string' && resp.mode !== '' ? resp.mode : mode + deps.io.out.write(printFlags.toPrinter(format).print(newAppRunObject(respMode, resp))) + if (ctx.isText && CHAT_MODES.has(respMode)) { + const hint = chatConversationHint(resp) + if (hint !== undefined) + deps.io.err.write(hint) + } + } +} diff --git a/cli/src/commands/run/app/_strategies/index.ts b/cli/src/commands/run/app/_strategies/index.ts new file mode 100644 index 0000000000..6997d66e19 --- /dev/null +++ b/cli/src/commands/run/app/_strategies/index.ts @@ -0,0 +1,31 @@ +import type { AppRunClient } from '../../../../api/app-run.js' +import type { AppRunPrintFlags } from '../print-flags.js' +import type { RunAppDeps, RunAppOptions } from '../run.js' +import { BlockingStrategy } from './blocking.js' +import { StreamingStructuredStrategy } from './streaming-structured.js' +import { StreamingTextStrategy } from './streaming-text.js' + +export type RunContext = { + readonly opts: RunAppOptions + readonly deps: RunAppDeps + readonly mode: string + readonly isAgent: boolean + readonly format: string + readonly isText: boolean + readonly runClient: AppRunClient + readonly printFlags: AppRunPrintFlags +} + +export type RunStrategy = { + execute: (ctx: RunContext) => Promise +} + +const blocking = new BlockingStrategy() +const streamingText = new StreamingTextStrategy() +const streamingStructured = new StreamingStructuredStrategy() + +export function pickStrategy(useStream: boolean, isText: boolean): RunStrategy { + if (!useStream) + return blocking + return isText ? streamingText : streamingStructured +} diff --git a/cli/src/commands/run/app/_strategies/streaming-structured.ts b/cli/src/commands/run/app/_strategies/streaming-structured.ts new file mode 100644 index 0000000000..9b53c0b874 --- /dev/null +++ b/cli/src/commands/run/app/_strategies/streaming-structured.ts @@ -0,0 +1,28 @@ +import type { RunContext, RunStrategy } from './index.js' +import { buildRunBody } from '../../../../api/app-run.js' +import { newAppRunObject } from '../handlers.js' +import { collect } from '../sse-collector.js' + +export class StreamingStructuredStrategy implements RunStrategy { + async execute(ctx: RunContext): Promise { + const { opts, deps, mode, format, printFlags } = ctx + const ctrl = new AbortController() + const body = buildRunBody({ + message: opts.message, + inputs: opts.inputs, + conversationId: opts.conversationId, + workspaceId: opts.workspace, + responseMode: 'streaming', + }) + let resp: Record + try { + const events = await ctx.runClient.runStream(opts.appId, body, { signal: ctrl.signal }) + resp = await collect(events, mode) + } + catch (err) { + ctrl.abort() + throw err + } + deps.io.out.write(printFlags.toPrinter(format).print(newAppRunObject(mode, resp))) + } +} diff --git a/cli/src/commands/run/app/_strategies/streaming-text.ts b/cli/src/commands/run/app/_strategies/streaming-text.ts new file mode 100644 index 0000000000..1d2ebab88e --- /dev/null +++ b/cli/src/commands/run/app/_strategies/streaming-text.ts @@ -0,0 +1,33 @@ +import type { RunContext, RunStrategy } from './index.js' +import { buildRunBody } from '../../../../api/app-run.js' +import { decodeStreamError } from '../sse-collector.js' + +export class StreamingTextStrategy implements RunStrategy { + async execute(ctx: RunContext): Promise { + const { opts, deps, mode, printFlags } = ctx + const ctrl = new AbortController() + const body = buildRunBody({ + message: opts.message, + inputs: opts.inputs, + conversationId: opts.conversationId, + workspaceId: opts.workspace, + responseMode: 'streaming', + }) + try { + const events = await ctx.runClient.runStream(opts.appId, body, { signal: ctrl.signal }) + const sp = printFlags.toStreamPrinter(mode) + for await (const ev of events) { + if (ev.name === 'ping') + continue + if (ev.name === 'error') + throw decodeStreamError(ev.data) + sp.onEvent(deps.io.out, deps.io.err, ev) + } + sp.onEnd(deps.io.out, deps.io.err) + } + catch (err) { + ctrl.abort() + throw err + } + } +} diff --git a/cli/src/commands/run/app/agent-guide.test.ts b/cli/src/commands/run/app/agent-guide.test.ts new file mode 100644 index 0000000000..be1deb2683 --- /dev/null +++ b/cli/src/commands/run/app/agent-guide.test.ts @@ -0,0 +1,20 @@ +import { describe, expect, it } from 'vitest' +import RunApp from './index.js' + +describe('run app agentGuide', () => { + it('exposes non-empty agentGuide string', () => { + const guide = (RunApp as unknown as { agentGuide?: string }).agentGuide + expect(typeof guide).toBe('string') + expect(guide!.length).toBeGreaterThan(0) + }) + + it('agentGuide mentions WORKFLOW section', () => { + const guide = (RunApp as unknown as { agentGuide?: string }).agentGuide! + expect(guide).toContain('WORKFLOW') + }) + + it('agentGuide mentions ERROR RECOVERY section', () => { + const guide = (RunApp as unknown as { agentGuide?: string }).agentGuide! + expect(guide).toContain('ERROR RECOVERY') + }) +}) diff --git a/cli/src/commands/run/app/guide.ts b/cli/src/commands/run/app/guide.ts new file mode 100644 index 0000000000..707a1b52a4 --- /dev/null +++ b/cli/src/commands/run/app/guide.ts @@ -0,0 +1,32 @@ +export const agentGuide = ` +WORKFLOW + 1. Discover app id and mode: + difyctl get app -o json + difyctl describe app -o json | jq '.info.mode' + + 2. Run the app: + difyctl run app "your message" + difyctl run app "your message" -o json + +APP MODES + chat / advanced-chat Conversational. Accepts --conversation to + resume an existing thread. + completion Single-turn. Ignores --conversation. + workflow Multi-step graph. Use --input key=val for each + input variable the workflow declares. + agent-chat Always streams regardless of --stream flag. + +FLAGS + --input key=val Pass named inputs. Repeatable. Required for + workflow apps that declare input variables. + --input language=English --input topic="AI safety" + --stream Request SSE streaming. Recommended for runs + exceeding ~30s. Agent apps stream regardless. + --conversation Resume a conversation (chat/advanced-chat only). + --workspace Target a specific workspace. + +ERROR RECOVERY + not logged in difyctl auth login + app not found (404) difyctl get app + workspace required difyctl get workspace +` diff --git a/cli/src/commands/run/app/handlers.ts b/cli/src/commands/run/app/handlers.ts new file mode 100644 index 0000000000..1853957297 --- /dev/null +++ b/cli/src/commands/run/app/handlers.ts @@ -0,0 +1,66 @@ +import type { TextHandler } from '../../../printers/format-text.js' + +export const RUN_MODES = { + Chat: 'chat', + AgentChat: 'agent-chat', + AdvancedChat: 'advanced-chat', + Completion: 'completion', + Workflow: 'workflow', +} as const + +export type RunMode = typeof RUN_MODES[keyof typeof RUN_MODES] + +export type AppRunObject = { + mode: () => string + raw: () => Record +} + +export function newAppRunObject(mode: string, resp: Record): AppRunObject { + const filled = resp.mode === undefined || resp.mode === '' ? { ...resp, mode } : resp + return { mode: () => mode, raw: () => filled } +} + +export const chatTextHandler: TextHandler = { + render(raw): string { + const resp = raw as Record + const out: string[] = [] + const answer = pickString(resp, 'answer') + if (answer !== undefined) + out.push(answer) + out.push('') + return out.join('\n') + }, +} + +export const completionTextHandler: TextHandler = { + render(raw): string { + const resp = raw as Record + const answer = pickString(resp, 'answer') + return `${answer ?? ''}\n` + }, +} + +export const workflowTextHandler: TextHandler = { + render(raw): string { + const resp = raw as Record + const data = resp.data + if (data !== null && typeof data === 'object' && 'outputs' in data) { + const { outputs } = data as { outputs: unknown } + if (outputs !== undefined) + return `${JSON.stringify(outputs)}\n` + } + return `${JSON.stringify(resp)}\n` + }, +} + +export function chatConversationHint(resp: Record): string | undefined { + const cid = pickString(resp, 'conversation_id') + if (cid === undefined || cid === '') + return undefined + return `hint: continue this conversation with --conversation ${cid}\n` +} + +function pickString(o: Record, key: string): string | undefined { + const v = o[key] + return typeof v === 'string' ? v : undefined +} diff --git a/cli/src/commands/run/app/index.ts b/cli/src/commands/run/app/index.ts new file mode 100644 index 0000000000..dc19470f6a --- /dev/null +++ b/cli/src/commands/run/app/index.ts @@ -0,0 +1,72 @@ +import { Args, Flags } from '@oclif/core' +import { BaseError } from '../../../errors/base.js' +import { ErrorCode } from '../../../errors/codes.js' +import { DifyCommand } from '../../_shared/dify-command.js' +import { httpRetryFlag } from '../../_shared/global-flags.js' +import { agentGuide } from './guide.js' +import { runApp } from './run.js' + +export default class RunApp extends DifyCommand { + static override description = 'Run an app and print the response' + static agentGuide = agentGuide + + static override examples = [ + '<%= config.bin %> run app app-1 "hello"', + '<%= config.bin %> run app app-1 --input name=world', + '<%= config.bin %> run app app-1 --stream', + '<%= config.bin %> run app app-1 -o json', + ] + + static override args = { + id: Args.string({ description: 'app id', required: true }), + message: Args.string({ description: 'user message (chat/agent-chat/advanced-chat/completion)', required: false }), + } + + static override flags = { + 'input': Flags.string({ description: 'app input (--input k=v, repeatable)', multiple: true, default: [] }), + 'conversation': Flags.string({ description: 'resume a chat conversation by id' }), + 'workspace': Flags.string({ description: 'workspace id (overrides DIFY_WORKSPACE_ID and stored default)' }), + 'stream': Flags.boolean({ + description: 'request streaming SSE; recommended for runs that may exceed ~30s. Agent apps stream regardless.', + default: false, + }), + 'http-retry': httpRetryFlag, + 'output': Flags.string({ char: 'o', description: 'output format (json|yaml|text)', default: '' }), + } + + async run(): Promise { + const { args, flags, raw } = await this.parse(RunApp) + const format = flags.output + const ctx = await this.authedCtx({ retryFlag: flags['http-retry'], withCache: true, format }) + const inputs = parseInputs(flags.input) + const streamSetExplicitly = raw.some(t => t.type === 'flag' && t.flag === 'stream') + await runApp( + { + appId: args.id, + message: args.message, + inputs, + conversationId: flags.conversation, + workspace: flags.workspace, + format, + stream: flags.stream, + streamSetExplicitly, + }, + { bundle: ctx.bundle, http: ctx.http, host: ctx.host, io: ctx.io, cache: ctx.cache }, + ) + } +} + +function parseInputs(raw: readonly string[]): Record { + const out: Record = {} + for (const item of raw) { + const eq = item.indexOf('=') + if (eq <= 0) { + throw new BaseError({ + code: ErrorCode.UsageInvalidFlag, + message: `--input expects key=value, got ${JSON.stringify(item)}`, + }) + } + out[item.slice(0, eq)] = item.slice(eq + 1) + } + return out +} diff --git a/cli/src/commands/run/app/print-flags.ts b/cli/src/commands/run/app/print-flags.ts new file mode 100644 index 0000000000..fb84105040 --- /dev/null +++ b/cli/src/commands/run/app/print-flags.ts @@ -0,0 +1,27 @@ +import type { PrintFlags } from '../../../printers/printer.js' +import type { StreamPrinter } from '../../../printers/stream-printer.js' +import { JsonYamlPrintFlags } from '../../../printers/format-json-yaml.js' +import { TextPrintFlags } from '../../../printers/format-text.js' +import { CompositePrintFlags } from '../../../printers/printer.js' +import { chatTextHandler, completionTextHandler, RUN_MODES, workflowTextHandler } from './handlers.js' +import { streamPrinterFor } from './stream-handlers.js' + +export class AppRunPrintFlags extends CompositePrintFlags { + private readonly jsonYaml = new JsonYamlPrintFlags() + private readonly text = new TextPrintFlags() + + constructor() { + super() + this.text.register(chatTextHandler, RUN_MODES.Chat, RUN_MODES.AgentChat, RUN_MODES.AdvancedChat) + this.text.register(completionTextHandler, RUN_MODES.Completion) + this.text.register(workflowTextHandler, RUN_MODES.Workflow) + } + + protected families(): readonly PrintFlags[] { + return [this.jsonYaml, this.text] + } + + toStreamPrinter(mode: string): StreamPrinter { + return streamPrinterFor(mode) + } +} diff --git a/cli/src/commands/run/app/run.test.ts b/cli/src/commands/run/app/run.test.ts new file mode 100644 index 0000000000..dbb457bc15 --- /dev/null +++ b/cli/src/commands/run/app/run.test.ts @@ -0,0 +1,172 @@ +import type { DifyMock } from '../../../../test/fixtures/dify-mock/server.js' +import type { HostsBundle } from '../../../auth/hosts.js' +import { mkdtemp, rm } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { startMock } from '../../../../test/fixtures/dify-mock/server.js' +import { loadAppInfoCache } from '../../../cache/app-info.js' +import { createClient } from '../../../http/client.js' +import { bufferStreams } from '../../../io/streams.js' +import { runApp } from './run.js' + +function bundle(): HostsBundle { + return { + current_host: 'http://localhost', + token_storage: 'file', + tokens: { bearer: 'dfoa_test' }, + account: { id: 'acct-1', email: 't@d.ai', name: 'T' }, + workspace: { id: 'ws-1', name: 'Default', role: 'owner' }, + available_workspaces: [ + { id: 'ws-1', name: 'Default', role: 'owner' }, + { id: 'ws-2', name: 'Other', role: 'normal' }, + ], + } +} + +describe('runApp', () => { + let mock: DifyMock + let dir: string + beforeEach(async () => { + mock = await startMock({ scenario: 'happy' }) + dir = await mkdtemp(join(tmpdir(), 'difyctl-runapp-')) + }) + afterEach(async () => { + await mock.stop() + await rm(dir, { recursive: true, force: true }) + }) + + it('chat: prints answer + conversation hint to stderr', async () => { + const io = bufferStreams() + const cache = await loadAppInfoCache({ configDir: dir }) + await runApp( + { appId: 'app-1', message: 'hi' }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, io, cache }, + ) + expect(io.outBuf()).toBe('echo: hi\n') + expect(io.errBuf()).toContain('--conversation conv-1') + }) + + it('workflow: rejects positional message with usage error', async () => { + const io = bufferStreams() + const cache = await loadAppInfoCache({ configDir: dir }) + await expect(runApp( + { appId: 'app-2', message: 'hi' }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, io, cache }, + )).rejects.toMatchObject({ code: 'usage_invalid_flag' }) + }) + + it('workflow: prints outputs JSON', async () => { + const io = bufferStreams() + const cache = await loadAppInfoCache({ configDir: dir }) + await runApp( + { appId: 'app-2', inputs: { x: '1' } }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, io, cache }, + ) + const out = JSON.parse(io.outBuf().trim()) as { result: string } + expect(out.result).toBe('echo: ') + }) + + it('json: passes through full envelope', async () => { + const io = bufferStreams() + const cache = await loadAppInfoCache({ configDir: dir }) + await runApp( + { appId: 'app-1', message: 'hi', format: 'json' }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, io, cache }, + ) + const parsed = JSON.parse(io.outBuf()) as { mode: string, answer: string } + expect(parsed.mode).toBe('chat') + expect(parsed.answer).toBe('echo: hi') + }) + + it('rejects unknown format', async () => { + const io = bufferStreams() + await expect(runApp( + { appId: 'app-1', format: 'bogus' }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, io }, + )).rejects.toThrow(/not supported/) + }) + + it('unknown app id surfaces as error', async () => { + const io = bufferStreams() + await expect(runApp( + { appId: 'nope', message: 'hi' }, + { + bundle: bundle(), + http: createClient({ host: mock.url, bearer: 'dfoa_test', retryAttempts: 0 }), + host: mock.url, + io, + }, + )).rejects.toThrow() + }) + + it('--stream chat: streams answer to stdout and hint to stderr', async () => { + const io = bufferStreams() + const cache = await loadAppInfoCache({ configDir: dir }) + await runApp( + { appId: 'app-1', message: 'hi', stream: true, streamSetExplicitly: true }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, io, cache }, + ) + expect(io.outBuf()).toContain('echo: ') + expect(io.outBuf()).toContain('hi') + expect(io.errBuf()).toContain('--conversation conv-1') + }) + + it('--stream -o json chat: aggregates into blocking-shape envelope', async () => { + const io = bufferStreams() + const cache = await loadAppInfoCache({ configDir: dir }) + await runApp( + { appId: 'app-1', message: 'hi', stream: true, streamSetExplicitly: true, format: 'json' }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, io, cache }, + ) + const parsed = JSON.parse(io.outBuf()) as { mode: string, answer: string, conversation_id: string } + expect(parsed.mode).toBe('chat') + expect(parsed.answer).toBe('echo: hi') + expect(parsed.conversation_id).toBe('conv-1') + }) + + it('agent-chat forces streaming without --stream', async () => { + const io = bufferStreams() + const cache = await loadAppInfoCache({ configDir: dir }) + await runApp( + { appId: 'app-4', workspace: 'ws-2', message: 'do research' }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, io, cache }, + ) + expect(io.outBuf()).toContain('do research') + expect(io.errBuf()).toContain('thought:') + }) + + it('agent-chat with --stream=false explicitly: warns then streams', async () => { + const io = bufferStreams() + const cache = await loadAppInfoCache({ configDir: dir }) + await runApp( + { appId: 'app-4', workspace: 'ws-2', message: 'go', stream: false, streamSetExplicitly: true }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, io, cache }, + ) + expect(io.errBuf()).toContain('agent apps require streaming') + expect(io.outBuf()).toContain('go') + expect(io.errBuf()).toContain('thought:') + }) + + it('--stream workflow -o json: aggregates from workflow_finished', async () => { + const io = bufferStreams() + const cache = await loadAppInfoCache({ configDir: dir }) + await runApp( + { appId: 'app-2', inputs: { x: '1' }, stream: true, streamSetExplicitly: true, format: 'json' }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test' }), host: mock.url, io, cache }, + ) + const parsed = JSON.parse(io.outBuf()) as { mode: string, data: { status: string } } + expect(parsed.mode).toBe('workflow') + expect(parsed.data.status).toBe('succeeded') + }) + + it('stream-error scenario: error event surfaces typed BaseError', async () => { + mock.setScenario('stream-error') + const io = bufferStreams() + const cache = await loadAppInfoCache({ configDir: dir }) + await expect(runApp( + { appId: 'app-1', message: 'hi', stream: true, streamSetExplicitly: true }, + { bundle: bundle(), http: createClient({ host: mock.url, bearer: 'dfoa_test', retryAttempts: 0 }), host: mock.url, io, cache }, + )).rejects.toMatchObject({ code: 'server_5xx' }) + }) +}) diff --git a/cli/src/commands/run/app/run.ts b/cli/src/commands/run/app/run.ts new file mode 100644 index 0000000000..e4a3b08ceb --- /dev/null +++ b/cli/src/commands/run/app/run.ts @@ -0,0 +1,68 @@ +import type { KyInstance } from 'ky' +import type { HostsBundle } from '../../../auth/hosts.js' +import type { AppInfoCache } from '../../../cache/app-info.js' +import type { IOStreams } from '../../../io/streams.js' +import { AppMetaClient } from '../../../api/app-meta.js' +import { AppRunClient } from '../../../api/app-run.js' +import { AppsClient } from '../../../api/apps.js' +import { BaseError } from '../../../errors/base.js' +import { ErrorCode } from '../../../errors/codes.js' +import { FieldInfo } from '../../../types/app.js' +import { resolveWorkspaceId } from '../../../workspace/resolver.js' +import { pickStrategy } from './_strategies/index.js' +import { RUN_MODES } from './handlers.js' +import { AppRunPrintFlags } from './print-flags.js' + +export type RunAppOptions = { + readonly appId: string + readonly message?: string + readonly inputs?: Readonly> + readonly conversationId?: string + readonly workspace?: string + readonly format?: string + readonly stream?: boolean + readonly streamSetExplicitly?: boolean +} + +export type RunAppDeps = { + readonly bundle: HostsBundle + readonly http: KyInstance + readonly host: string + readonly io: IOStreams + readonly cache?: AppInfoCache + readonly envLookup?: (k: string) => string | undefined +} + +const TEXT_FORMATS = new Set(['', 'text']) + +export async function runApp(opts: RunAppOptions, deps: RunAppDeps): Promise { + const env = deps.envLookup ?? ((k: string) => process.env[k]) + const wsId = resolveWorkspaceId({ flag: opts.workspace, env: env('DIFY_WORKSPACE_ID'), bundle: deps.bundle }) + const apps = new AppsClient(deps.http) + const meta = new AppMetaClient({ apps, host: deps.host, cache: deps.cache }) + const m = await meta.get(opts.appId, wsId, [FieldInfo]) + const mode = m.info?.mode ?? '' + if (mode === '') + throw new Error(`app ${opts.appId}: mode missing from /describe`) + + if (mode === RUN_MODES.Workflow && opts.message !== undefined && opts.message !== '') { + throw new BaseError({ + code: ErrorCode.UsageInvalidFlag, + message: 'workflow apps do not accept a positional message', + hint: 'pass workflow inputs via --input key=value (repeatable)', + }) + } + + const isAgent = m.info?.is_agent === true || mode === RUN_MODES.AgentChat + const useStream = opts.stream === true || isAgent + if (isAgent && opts.streamSetExplicitly === true && opts.stream === false) + deps.io.err.write('note: agent apps require streaming; output is collected before printing\n') + + const format = opts.format ?? '' + const isText = TEXT_FORMATS.has(format) + const runClient = new AppRunClient(deps.http) + const printFlags = new AppRunPrintFlags() + + const ctx = { opts, deps, mode, isAgent, format, isText, runClient, printFlags } + await pickStrategy(useStream, isText).execute(ctx) +} diff --git a/cli/src/commands/run/app/sse-collector.test.ts b/cli/src/commands/run/app/sse-collector.test.ts new file mode 100644 index 0000000000..20da843b2e --- /dev/null +++ b/cli/src/commands/run/app/sse-collector.test.ts @@ -0,0 +1,122 @@ +import type { SseEvent } from '../../../http/sse.js' +import { describe, expect, it } from 'vitest' +import { collect, collectorFor, decodeStreamError } from './sse-collector.js' + +const enc = new TextEncoder() +function ev(name: string, data: object): SseEvent { + return { name, data: enc.encode(JSON.stringify(data)) } +} + +async function* iterOf(...evs: SseEvent[]): AsyncIterable { + for (const e of evs) yield e +} + +describe('collectorFor', () => { + it('throws for unknown mode', () => { + expect(() => collectorFor('whatever')).toThrow() + }) + + it.each(['chat', 'advanced-chat', 'agent-chat', 'completion', 'workflow'])( + 'returns collector for %s', + (m) => { + expect(collectorFor(m)).toBeDefined() + }, + ) +}) + +describe('collect — chat', () => { + it('aggregates message + message_end into blocking shape', async () => { + const got = await collect(iterOf( + ev('message', { conversation_id: 'c1', message_id: 'm1', mode: 'chat', answer: 'hello ' }), + ev('message', { answer: 'world' }), + ev('message_end', { metadata: { usage: { tokens: 5 } } }), + ), 'chat') + expect(got).toMatchObject({ + mode: 'chat', + answer: 'hello world', + conversation_id: 'c1', + message_id: 'm1', + metadata: { usage: { tokens: 5 } }, + }) + }) + + it('drops ping events', async () => { + const got = await collect(iterOf( + ev('ping', {}), + ev('message', { answer: 'x' }), + ev('ping', {}), + ), 'chat') + expect(got.answer).toBe('x') + }) + + it('ignores unknown event names', async () => { + const got = await collect(iterOf( + ev('weird_future_event', { whatever: true }), + ev('message', { answer: 'x' }), + ), 'chat') + expect(got.answer).toBe('x') + }) +}) + +describe('collect — agent-chat', () => { + it('captures agent_thoughts', async () => { + const got = await collect(iterOf( + ev('agent_thought', { thought: 'first' }), + ev('agent_message', { answer: 'a' }), + ev('agent_thought', { thought: 'second' }), + ev('agent_message', { answer: 'b' }), + ), 'agent-chat') + expect(got.answer).toBe('ab') + expect(Array.isArray(got.agent_thoughts)).toBe(true) + expect((got.agent_thoughts as unknown[]).length).toBe(2) + }) +}) + +describe('collect — completion', () => { + it('aggregates message events into answer', async () => { + const got = await collect(iterOf( + ev('message', { mode: 'completion', message_id: 'm1', answer: 'foo' }), + ev('message', { answer: 'bar' }), + ev('message_end', { metadata: {} }), + ), 'completion') + expect(got).toMatchObject({ mode: 'completion', answer: 'foobar', message_id: 'm1' }) + }) +}) + +describe('collect — workflow', () => { + it('captures only workflow_finished payload', async () => { + const got = await collect(iterOf( + ev('workflow_started', { id: 'wf' }), + ev('node_started', { id: 'n1' }), + ev('node_finished', { id: 'n1', status: 'succeeded' }), + ev('workflow_finished', { data: { status: 'succeeded', outputs: { x: 1 } } }), + ), 'workflow') + expect(got.mode).toBe('workflow') + expect((got.data as { outputs: { x: number } }).outputs.x).toBe(1) + }) +}) + +describe('collect — error event', () => { + it('throws BaseError when error event arrives', async () => { + await expect(collect(iterOf( + ev('error', { message: 'boom', status: 503 }), + ), 'chat')).rejects.toMatchObject({ code: 'server_5xx', message: 'boom' }) + }) +}) + +describe('decodeStreamError', () => { + it('maps status >= 500 to Server5xx', () => { + const err = decodeStreamError(enc.encode(JSON.stringify({ message: 'x', status: 500 }))) + expect(err.code).toBe('server_5xx') + }) + + it('maps status < 500 to Server4xxOther', () => { + const err = decodeStreamError(enc.encode(JSON.stringify({ message: 'x', status: 400 }))) + expect(err.code).toBe('server_4xx_other') + }) + + it('falls back to default message on empty data', () => { + const err = decodeStreamError(new Uint8Array()) + expect(err.message).toMatch(/error event/i) + }) +}) diff --git a/cli/src/commands/run/app/sse-collector.ts b/cli/src/commands/run/app/sse-collector.ts new file mode 100644 index 0000000000..ce48826d21 --- /dev/null +++ b/cli/src/commands/run/app/sse-collector.ts @@ -0,0 +1,165 @@ +import type { BaseError } from '../../../errors/base.js' +import type { SseEvent } from '../../../http/sse.js' +import { newError } from '../../../errors/base.js' +import { ErrorCode } from '../../../errors/codes.js' +import { RUN_MODES } from './handlers.js' + +export type Collector = { + consume: (ev: SseEvent) => void + finalize: () => Record +} + +const dec = new TextDecoder() + +function parseJson(data: Uint8Array): Record { + if (data.byteLength === 0) + return {} + try { + return JSON.parse(dec.decode(data)) as Record + } + catch (e) { + throw newError(ErrorCode.Unknown, `decode SSE event: ${(e as Error).message}`) + } +} + +function copyScalar(dst: Record, src: Record, keys: readonly string[]): void { + for (const k of keys) { + if (k in dst) + continue + if (k in src) + dst[k] = src[k] + } +} + +class ChatCollector implements Collector { + private answer = '' + private base: Record = {} + private metadata: Record | undefined + private thoughts: unknown[] = [] + private readonly mode: string + private readonly isAgent: boolean + constructor(mode: string, isAgent: boolean) { + this.mode = mode + this.isAgent = isAgent + } + + consume(ev: SseEvent): void { + const c = parseJson(ev.data) + switch (ev.name) { + case 'message': + case 'agent_message': { + if (typeof c.answer === 'string') + this.answer += c.answer + copyScalar(this.base, c, ['id', 'conversation_id', 'message_id', 'task_id', 'created_at']) + return + } + case 'agent_thought': + this.thoughts.push(c) + return + case 'message_end': + if (c.metadata !== undefined && typeof c.metadata === 'object' && c.metadata !== null) + this.metadata = c.metadata as Record + copyScalar(this.base, c, ['id', 'conversation_id', 'message_id', 'task_id', 'created_at']) + } + } + + finalize(): Record { + const out: Record = { mode: this.mode, answer: this.answer, ...this.base } + if (this.metadata !== undefined) + out.metadata = this.metadata + if (this.isAgent || this.thoughts.length > 0) + out.agent_thoughts = this.thoughts + return out + } +} + +class CompletionCollector implements Collector { + private answer = '' + private base: Record = {} + private metadata: Record | undefined + consume(ev: SseEvent): void { + const c = parseJson(ev.data) + switch (ev.name) { + case 'message': + if (typeof c.answer === 'string') + this.answer += c.answer + copyScalar(this.base, c, ['id', 'message_id', 'task_id', 'created_at']) + return + case 'message_end': + if (c.metadata !== undefined && typeof c.metadata === 'object' && c.metadata !== null) + this.metadata = c.metadata as Record + copyScalar(this.base, c, ['id', 'message_id', 'task_id', 'created_at']) + } + } + + finalize(): Record { + const out: Record = { mode: RUN_MODES.Completion, answer: this.answer, ...this.base } + if (this.metadata !== undefined) + out.metadata = this.metadata + return out + } +} + +class WorkflowCollector implements Collector { + private final: Record | undefined + consume(ev: SseEvent): void { + if (ev.name !== 'workflow_finished') + return + this.final = parseJson(ev.data) + } + + finalize(): Record { + return { mode: RUN_MODES.Workflow, ...(this.final ?? {}) } + } +} + +const FACTORIES: Record Collector> = { + [RUN_MODES.Chat]: () => new ChatCollector(RUN_MODES.Chat, false), + [RUN_MODES.AdvancedChat]: () => new ChatCollector(RUN_MODES.AdvancedChat, false), + [RUN_MODES.AgentChat]: () => new ChatCollector(RUN_MODES.AgentChat, true), + [RUN_MODES.Completion]: () => new CompletionCollector(), + [RUN_MODES.Workflow]: () => new WorkflowCollector(), +} + +export function collectorFor(mode: string): Collector { + const f = FACTORIES[mode] + if (f === undefined) + throw newError(ErrorCode.Unknown, `unsupported streaming mode "${mode}"`) + return f() +} + +export function decodeStreamError(data: Uint8Array): BaseError { + type Env = { message?: string, code?: string, status?: number } + let env: Env = {} + if (data.byteLength > 0) { + try { + env = JSON.parse(dec.decode(data)) as Env + } + catch {} + } + const message = env.message !== undefined && env.message !== '' + ? env.message + : 'stream terminated by error event' + const code = env.status !== undefined && env.status > 0 && env.status < 500 + ? ErrorCode.Server4xxOther + : ErrorCode.Server5xx + let err = newError(code, message) + if (env.status !== undefined && env.status > 0) + err = err.withHttpStatus(env.status) + return err +} + +export async function collect( + iter: AsyncIterable, + mode: string, +): Promise> { + const c = collectorFor(mode) + for await (const ev of iter) { + if (ev.name === 'ping') + continue + if (ev.name === 'error') + throw decodeStreamError(ev.data) + c.consume(ev) + } + return c.finalize() +} diff --git a/cli/src/commands/run/app/stream-handlers.test.ts b/cli/src/commands/run/app/stream-handlers.test.ts new file mode 100644 index 0000000000..26b5ade90a --- /dev/null +++ b/cli/src/commands/run/app/stream-handlers.test.ts @@ -0,0 +1,80 @@ +import type { SseEvent } from '../../../http/sse.js' +import { Buffer } from 'node:buffer' +import { PassThrough } from 'node:stream' +import { describe, expect, it } from 'vitest' +import { streamPrinterFor } from './stream-handlers.js' + +const enc = new TextEncoder() +function ev(name: string, data: object): SseEvent { + return { name, data: enc.encode(JSON.stringify(data)) } +} + +function captures(): { out: PassThrough, err: PassThrough, outBuf: () => string, errBuf: () => string } { + const out = new PassThrough() + const err = new PassThrough() + const oc: Buffer[] = [] + out.on('data', d => oc.push(d as Buffer)) + const ec: Buffer[] = [] + err.on('data', d => ec.push(d as Buffer)) + return { + out, + err, + outBuf: () => Buffer.concat(oc).toString('utf-8'), + errBuf: () => Buffer.concat(ec).toString('utf-8'), + } +} + +describe('streamPrinterFor — chat', () => { + it('prints answer chunks live and conversation hint on end', () => { + const sp = streamPrinterFor('chat') + const cap = captures() + sp.onEvent(cap.out, cap.err, ev('message', { conversation_id: 'c1', answer: 'hello ' })) + sp.onEvent(cap.out, cap.err, ev('message', { answer: 'world' })) + sp.onEnd(cap.out, cap.err) + expect(cap.outBuf()).toBe('hello world\n') + expect(cap.errBuf()).toContain('--conversation c1') + }) +}) + +describe('streamPrinterFor — agent-chat', () => { + it('writes agent_thought to stderr', () => { + const sp = streamPrinterFor('agent-chat') + const cap = captures() + sp.onEvent(cap.out, cap.err, ev('agent_thought', { thought: 'thinking' })) + sp.onEvent(cap.out, cap.err, ev('agent_message', { answer: 'done' })) + sp.onEnd(cap.out, cap.err) + expect(cap.errBuf()).toContain('thought: thinking') + expect(cap.outBuf()).toContain('done') + }) +}) + +describe('streamPrinterFor — completion', () => { + it('prints answers + trailing newline', () => { + const sp = streamPrinterFor('completion') + const cap = captures() + sp.onEvent(cap.out, cap.err, ev('message', { answer: 'foo' })) + sp.onEvent(cap.out, cap.err, ev('message', { answer: 'bar' })) + sp.onEnd(cap.out, cap.err) + expect(cap.outBuf()).toBe('foobar\n') + }) +}) + +describe('streamPrinterFor — workflow', () => { + it('streams node titles to stderr and outputs JSON on end', () => { + const sp = streamPrinterFor('workflow') + const cap = captures() + sp.onEvent(cap.out, cap.err, ev('node_started', { title: 'A' })) + sp.onEvent(cap.out, cap.err, ev('node_finished', { id: 'a', status: 'succeeded' })) + sp.onEvent(cap.out, cap.err, ev('workflow_finished', { data: { outputs: { x: 1 } } })) + sp.onEnd(cap.out, cap.err) + expect(cap.errBuf()).toContain('→ A') + const parsed = JSON.parse(cap.outBuf().trim()) as { x: number } + expect(parsed.x).toBe(1) + }) +}) + +describe('streamPrinterFor — unknown mode', () => { + it('throws', () => { + expect(() => streamPrinterFor('whatever')).toThrow() + }) +}) diff --git a/cli/src/commands/run/app/stream-handlers.ts b/cli/src/commands/run/app/stream-handlers.ts new file mode 100644 index 0000000000..cf0355ae78 --- /dev/null +++ b/cli/src/commands/run/app/stream-handlers.ts @@ -0,0 +1,115 @@ +import type { SseEvent } from '../../../http/sse.js' +import type { StreamPrinter } from '../../../printers/stream-printer.js' +import { newError } from '../../../errors/base.js' +import { ErrorCode } from '../../../errors/codes.js' +import { RUN_MODES } from './handlers.js' + +const dec = new TextDecoder() + +function parseJson(data: Uint8Array): Record { + if (data.byteLength === 0) + return {} + try { + return JSON.parse(dec.decode(data)) as Record + } + catch { + return {} + } +} + +class ChatStreamPrinter implements StreamPrinter { + private convoId = '' + onEvent(out: NodeJS.WritableStream, errOut: NodeJS.WritableStream, ev: SseEvent): void { + const c = parseJson(ev.data) + switch (ev.name) { + case 'message': + case 'agent_message': { + if (typeof c.answer === 'string') + out.write(c.answer) + if (typeof c.conversation_id === 'string' && c.conversation_id !== '') + this.convoId = c.conversation_id + return + } + case 'agent_thought': + if (typeof c.thought === 'string' && c.thought !== '') + errOut.write(`thought: ${c.thought}\n`) + return + case 'message_end': + if (typeof c.conversation_id === 'string' && c.conversation_id !== '') + this.convoId = c.conversation_id + } + } + + onEnd(out: NodeJS.WritableStream, errOut: NodeJS.WritableStream): void { + out.write('\n') + if (this.convoId !== '') + errOut.write(`hint: continue this conversation with --conversation ${this.convoId}\n`) + } +} + +class CompletionStreamPrinter implements StreamPrinter { + onEvent(out: NodeJS.WritableStream, _errOut: NodeJS.WritableStream, ev: SseEvent): void { + if (ev.name !== 'message') + return + const c = parseJson(ev.data) + if (typeof c.answer === 'string') + out.write(c.answer) + } + + onEnd(out: NodeJS.WritableStream): void { + out.write('\n') + } +} + +class WorkflowStreamPrinter implements StreamPrinter { + private final: Record | undefined + onEvent(_out: NodeJS.WritableStream, errOut: NodeJS.WritableStream, ev: SseEvent): void { + const c = parseJson(ev.data) + switch (ev.name) { + case 'node_started': { + const title = (typeof c.title === 'string' && c.title !== '') + ? c.title + : (typeof c.id === 'string' ? c.id : '') + if (title !== '') + errOut.write(`→ ${title}\n`) + return + } + case 'node_finished': { + const status = typeof c.status === 'string' ? c.status : '' + if (status !== '' && status !== 'succeeded') { + const id = typeof c.id === 'string' ? c.id : '' + errOut.write(` [${status}] ${id}\n`) + } + return + } + case 'workflow_finished': + this.final = c + } + } + + onEnd(out: NodeJS.WritableStream): void { + if (this.final === undefined) + return + const data = this.final.data + if (data !== null && typeof data === 'object' && 'outputs' in data) { + out.write(`${JSON.stringify((data as { outputs: unknown }).outputs)}\n`) + return + } + out.write(`${JSON.stringify(this.final)}\n`) + } +} + +const FACTORIES: Record StreamPrinter> = { + [RUN_MODES.Chat]: () => new ChatStreamPrinter(), + [RUN_MODES.AdvancedChat]: () => new ChatStreamPrinter(), + [RUN_MODES.AgentChat]: () => new ChatStreamPrinter(), + [RUN_MODES.Completion]: () => new CompletionStreamPrinter(), + [RUN_MODES.Workflow]: () => new WorkflowStreamPrinter(), +} + +export function streamPrinterFor(mode: string): StreamPrinter { + const f = FACTORIES[mode] + if (f === undefined) + throw newError(ErrorCode.Unknown, `unsupported streaming mode "${mode}"`) + return f() +} diff --git a/cli/src/commands/version/index.ts b/cli/src/commands/version/index.ts new file mode 100644 index 0000000000..cbef2b28ab --- /dev/null +++ b/cli/src/commands/version/index.ts @@ -0,0 +1,47 @@ +import { Flags } from '@oclif/core' +import pc from 'picocolors' +import { compatString, difyCompat } from '../../version/compat.js' +import { versionInfo } from '../../version/info.js' +import { DifyCommand } from '../_shared/dify-command.js' + +const RC_WARNING_LINES = [ + 'WARNING: This build is a release candidate. It is in beta test, not stable,', + ' and may have bugs. For production use, install the stable channel.', +] as const + +export default class Version extends DifyCommand { + static override description = 'Show difyctl version, channel, and supported dify range' + static override examples = ['<%= config.bin %> version', '<%= config.bin %> version --json'] + + static override flags = { + json: Flags.boolean({ description: 'emit JSON' }), + } + + async run(): Promise { + const { flags } = await this.parse(Version) + const { version, commit, buildDate, channel } = versionInfo + + if (flags.json) { + this.log(JSON.stringify({ + version, + commit, + buildDate, + channel, + compat: { minDify: difyCompat.minDify, maxDify: difyCompat.maxDify }, + })) + return + } + + this.log(`difyctl ${version}`) + this.log(` channel: ${channel}`) + this.log(` built: ${buildDate} (commit ${commit.slice(0, 7)})`) + this.log(` compat: ${compatString()}`) + + if (channel === 'rc') { + this.log('') + const colour = process.stdout.isTTY ? pc.yellow : (s: string) => s + for (const line of RC_WARNING_LINES) + this.log(colour(line)) + } + } +} diff --git a/cli/src/commands/version/version.test.ts b/cli/src/commands/version/version.test.ts new file mode 100644 index 0000000000..7310ce17d7 --- /dev/null +++ b/cli/src/commands/version/version.test.ts @@ -0,0 +1,60 @@ +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' +import Version from './index.js' + +describe('Version command', () => { + let logs: string[] + + beforeEach(() => { + logs = [] + vi.spyOn(Version.prototype, 'log').mockImplementation((line?: string) => { + logs.push(line ?? '') + }) + }) + + afterEach(() => { + vi.restoreAllMocks() + }) + + it('prints structured block on stable channel without warning', async () => { + const info = await import('../../version/info.js') + const orig = info.versionInfo.channel + Object.assign(info.versionInfo, { channel: 'stable' }) + try { + await Version.run([]) + const text = logs.join('\n') + expect(text).toMatch(/^difyctl /) + expect(text).toContain('channel: stable') + expect(text).toContain('compat:') + expect(text).not.toContain('WARNING:') + } + finally { + Object.assign(info.versionInfo, { channel: orig }) + } + }) + + it('prints warning on rc channel', async () => { + const info = await import('../../version/info.js') + const orig = info.versionInfo.channel + Object.assign(info.versionInfo, { channel: 'rc' }) + try { + await Version.run([]) + const text = logs.join('\n') + expect(text).toContain('channel: rc') + expect(text).toContain('WARNING: This build is a release candidate') + expect(text).toContain('install the stable channel') + } + finally { + Object.assign(info.versionInfo, { channel: orig }) + } + }) + + it('emits JSON when --json flag passed', async () => { + await Version.run(['--json']) + const payload = JSON.parse(logs.join('')) + expect(payload).toHaveProperty('version') + expect(payload).toHaveProperty('channel') + expect(payload).toHaveProperty('compat') + expect(payload.compat).toHaveProperty('minDify') + expect(payload.compat).toHaveProperty('maxDify') + }) +}) diff --git a/cli/src/config/dir.test.ts b/cli/src/config/dir.test.ts new file mode 100644 index 0000000000..24ecde3986 --- /dev/null +++ b/cli/src/config/dir.test.ts @@ -0,0 +1,71 @@ +import { describe, expect, it } from 'vitest' +import { DIR_PERM, FILE_PERM, resolveConfigDir } from './dir.js' + +function fakeEnv(opts: { + override?: string + xdg?: string + home?: string + appData?: string + platform: NodeJS.Platform +}) { + return { + getEnv: (name: string) => { + if (name === 'DIFY_CONFIG_DIR') + return opts.override + if (name === 'XDG_CONFIG_HOME') + return opts.xdg + return undefined + }, + homeDir: () => opts.home ?? '/home/u', + platform: () => opts.platform, + appData: () => opts.appData, + } +} + +describe('config dir', () => { + it('FILE_PERM is 0o600 + DIR_PERM is 0o700 (POSIX defaults)', () => { + expect(FILE_PERM).toBe(0o600) + expect(DIR_PERM).toBe(0o700) + }) + + it('DIFY_CONFIG_DIR override wins on every platform', () => { + for (const platform of ['linux', 'darwin', 'win32'] as const) { + expect(resolveConfigDir(fakeEnv({ override: '/tmp/x', platform }))) + .toBe('/tmp/x') + } + }) + + it('linux uses XDG_CONFIG_HOME when set', () => { + expect(resolveConfigDir(fakeEnv({ xdg: '/x', platform: 'linux' }))) + .toBe('/x/difyctl') + }) + + it('linux falls back to ~/.config when XDG unset', () => { + expect(resolveConfigDir(fakeEnv({ home: '/h', platform: 'linux' }))) + .toBe('/h/.config/difyctl') + }) + + it('linux ignores empty XDG_CONFIG_HOME', () => { + expect(resolveConfigDir(fakeEnv({ xdg: '', home: '/h', platform: 'linux' }))) + .toBe('/h/.config/difyctl') + }) + + it('macos uses ~/.config (not XDG, matches gh/kubectl)', () => { + expect(resolveConfigDir(fakeEnv({ xdg: '/ignored', home: '/h', platform: 'darwin' }))) + .toBe('/h/.config/difyctl') + }) + + it('windows uses APPDATA', () => { + expect(resolveConfigDir(fakeEnv({ appData: 'C:\\Users\\u\\AppData\\Roaming', platform: 'win32' }))) + .toMatch(/difyctl$/) + }) + + it('windows throws if APPDATA unresolvable', () => { + expect(() => resolveConfigDir(fakeEnv({ platform: 'win32' }))).toThrow(/APPDATA/) + }) + + it('unknown platform falls back to ~/.config', () => { + expect(resolveConfigDir(fakeEnv({ home: '/h', platform: 'freebsd' as NodeJS.Platform }))) + .toBe('/h/.config/difyctl') + }) +}) diff --git a/cli/src/config/dir.ts b/cli/src/config/dir.ts new file mode 100644 index 0000000000..6d92953769 --- /dev/null +++ b/cli/src/config/dir.ts @@ -0,0 +1,45 @@ +import { homedir } from 'node:os' +import { join } from 'node:path' + +export const ENV_CONFIG_DIR = 'DIFY_CONFIG_DIR' +export const ENV_XDG_CONFIG_HOME = 'XDG_CONFIG_HOME' +export const SUBDIR = 'difyctl' +export const FILE_PERM = 0o600 +export const DIR_PERM = 0o700 + +export type ConfigEnvironment = { + readonly getEnv: (name: string) => string | undefined + readonly homeDir: () => string + readonly platform: () => NodeJS.Platform + readonly appData: () => string | undefined +} + +export const realEnvironment: ConfigEnvironment = { + getEnv: name => process.env[name], + homeDir: () => homedir(), + platform: () => process.platform, + appData: () => process.env.APPDATA ?? process.env.LOCALAPPDATA, +} + +export function resolveConfigDir(env: ConfigEnvironment = realEnvironment): string { + const override = env.getEnv(ENV_CONFIG_DIR) + if (override !== undefined && override !== '') + return override + + const platform = env.platform() + if (platform === 'linux') { + const xdg = env.getEnv(ENV_XDG_CONFIG_HOME) + if (xdg !== undefined && xdg !== '') + return join(xdg, SUBDIR) + return join(env.homeDir(), '.config', SUBDIR) + } + if (platform === 'darwin') + return join(env.homeDir(), '.config', SUBDIR) + if (platform === 'win32') { + const appData = env.appData() + if (appData === undefined || appData === '') + throw new Error('cannot resolve %APPDATA% on Windows') + return join(appData, SUBDIR) + } + return join(env.homeDir(), '.config', SUBDIR) +} diff --git a/cli/src/config/keys.test.ts b/cli/src/config/keys.test.ts new file mode 100644 index 0000000000..15a7aed72d --- /dev/null +++ b/cli/src/config/keys.test.ts @@ -0,0 +1,141 @@ +import { describe, expect, it } from 'vitest' +import { isBaseError } from '../errors/base.js' +import { ErrorCode } from '../errors/codes.js' +import { + getKey, + knownKeyNames, + knownKeys, + lookupKey, + setKey, + unsetKey, +} from './keys.js' +import { emptyConfig } from './schema.js' + +describe('config keys', () => { + it('exposes the v1.0 key set: defaults.format, defaults.limit, state.current_app', () => { + expect([...knownKeyNames()].sort()).toEqual( + ['defaults.format', 'defaults.limit', 'state.current_app'], + ) + }) + + it('knownKeys is alphabetically sorted', () => { + const names = knownKeys().map(k => k.name) + const sorted = [...names].sort() + expect(names).toEqual(sorted) + }) + + it('lookupKey returns the spec by name', () => { + expect(lookupKey('defaults.format')?.description).toMatch(/format/i) + expect(lookupKey('nope')).toBeUndefined() + }) + + describe('getKey', () => { + it('returns empty string for unset values', () => { + const cfg = emptyConfig() + expect(getKey(cfg, 'defaults.format')).toBe('') + expect(getKey(cfg, 'defaults.limit')).toBe('') + expect(getKey(cfg, 'state.current_app')).toBe('') + }) + + it('throws config_invalid_key for unknown keys', () => { + let caught: unknown + try { + getKey(emptyConfig(), 'nope') + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.code).toBe(ErrorCode.ConfigInvalidKey) + }) + }) + + describe('setKey', () => { + it('sets defaults.format when value is in the allowed enum', () => { + const updated = setKey(emptyConfig(), 'defaults.format', 'json') + expect(updated.defaults.format).toBe('json') + }) + + it('throws config_invalid_value for unknown format', () => { + let caught: unknown + try { + setKey(emptyConfig(), 'defaults.format', 'csv') + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) { + expect(caught.code).toBe(ErrorCode.ConfigInvalidValue) + expect(caught.message).toMatch(/csv/) + } + }) + + it('sets defaults.limit when value is 1..200', () => { + const updated = setKey(emptyConfig(), 'defaults.limit', '50') + expect(updated.defaults.limit).toBe(50) + }) + + it('throws config_invalid_value for limit outside 1..200', () => { + let caught: unknown + try { + setKey(emptyConfig(), 'defaults.limit', '999') + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.code).toBe(ErrorCode.ConfigInvalidValue) + }) + + it('throws config_invalid_value for non-numeric limit', () => { + let caught: unknown + try { + setKey(emptyConfig(), 'defaults.limit', 'abc') + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.code).toBe(ErrorCode.ConfigInvalidValue) + }) + + it('sets state.current_app to any string', () => { + const updated = setKey(emptyConfig(), 'state.current_app', 'app-123') + expect(updated.state.current_app).toBe('app-123') + }) + + it('returns a new config object (does not mutate the original)', () => { + const original = emptyConfig() + const updated = setKey(original, 'defaults.format', 'yaml') + expect(original.defaults.format).toBeUndefined() + expect(updated.defaults.format).toBe('yaml') + }) + }) + + describe('unsetKey', () => { + it('clears a previously-set defaults.format', () => { + const set = setKey(emptyConfig(), 'defaults.format', 'json') + const unset = unsetKey(set, 'defaults.format') + expect(unset.defaults.format).toBeUndefined() + }) + + it('clears a previously-set defaults.limit', () => { + const set = setKey(emptyConfig(), 'defaults.limit', '99') + const unset = unsetKey(set, 'defaults.limit') + expect(unset.defaults.limit).toBeUndefined() + }) + + it('clears state.current_app', () => { + const set = setKey(emptyConfig(), 'state.current_app', 'app-1') + const unset = unsetKey(set, 'state.current_app') + expect(unset.state.current_app).toBeUndefined() + }) + + it('throws config_invalid_key for unknown keys', () => { + let caught: unknown + try { + unsetKey(emptyConfig(), 'nope') + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.code).toBe(ErrorCode.ConfigInvalidKey) + }) + }) +}) diff --git a/cli/src/config/keys.ts b/cli/src/config/keys.ts new file mode 100644 index 0000000000..4d719b1c4c --- /dev/null +++ b/cli/src/config/keys.ts @@ -0,0 +1,96 @@ +import type { AllowedFormat, ConfigFile } from './schema.js' +import { newError } from '../errors/base.js' +import { ErrorCode } from '../errors/codes.js' +import { parseLimit } from '../limit/limit.js' +import { ALLOWED_FORMATS } from './schema.js' + +export type KeySpec = { + readonly name: string + readonly description: string + get: (config: ConfigFile) => string + set: (config: ConfigFile, value: string) => ConfigFile + unset: (config: ConfigFile) => ConfigFile +} + +const KEYS: readonly KeySpec[] = [ + { + name: 'defaults.format', + description: `Default output format used when -o is not passed (${ALLOWED_FORMATS.join('|')}).`, + get: c => c.defaults.format ?? '', + set: (c, v) => { + if (!(ALLOWED_FORMATS as readonly string[]).includes(v)) { + throw newError( + ErrorCode.ConfigInvalidValue, + `defaults.format: ${JSON.stringify(v)} is not one of ${ALLOWED_FORMATS.join('|')}`, + ) + } + return { ...c, defaults: { ...c.defaults, format: v as AllowedFormat } } + }, + unset: c => ({ ...c, defaults: { ...c.defaults, format: undefined } }), + }, + { + name: 'defaults.limit', + description: 'Default page size for list commands (1..200).', + get: c => (c.defaults.limit === undefined ? '' : String(c.defaults.limit)), + set: (c, v) => { + try { + const n = parseLimit(v, 'defaults.limit') + return { ...c, defaults: { ...c.defaults, limit: n } } + } + catch (err) { + throw newError(ErrorCode.ConfigInvalidValue, (err as Error).message).wrap(err) + } + }, + unset: c => ({ ...c, defaults: { ...c.defaults, limit: undefined } }), + }, + { + name: 'state.current_app', + description: 'App ID used when commands need an app context but no positional argument is given.', + get: c => c.state.current_app ?? '', + set: (c, v) => ({ ...c, state: { ...c.state, current_app: v } }), + unset: c => ({ ...c, state: { ...c.state, current_app: undefined } }), + }, +] + +const SORTED: readonly KeySpec[] = [...KEYS].sort((a, b) => a.name.localeCompare(b.name)) +const BY_NAME = new Map(SORTED.map(k => [k.name, k])) + +export function knownKeys(): readonly KeySpec[] { + return SORTED +} + +export function knownKeyNames(): readonly string[] { + return SORTED.map(k => k.name) +} + +export function lookupKey(name: string): KeySpec | undefined { + return BY_NAME.get(name) +} + +export function getKey(config: ConfigFile, name: string): string { + const spec = lookupKey(name) + if (spec === undefined) + throw unknownKey(name) + return spec.get(config) +} + +export function setKey(config: ConfigFile, name: string, value: string): ConfigFile { + const spec = lookupKey(name) + if (spec === undefined) + throw unknownKey(name) + return spec.set(config, value) +} + +export function unsetKey(config: ConfigFile, name: string): ConfigFile { + const spec = lookupKey(name) + if (spec === undefined) + throw unknownKey(name) + return spec.unset(config) +} + +function unknownKey(name: string): Error { + return newError( + ErrorCode.ConfigInvalidKey, + `unknown config key ${JSON.stringify(name)} (known: ${knownKeyNames().join(', ')})`, + ) +} diff --git a/cli/src/config/loader.test.ts b/cli/src/config/loader.test.ts new file mode 100644 index 0000000000..da7bac2c1f --- /dev/null +++ b/cli/src/config/loader.test.ts @@ -0,0 +1,87 @@ +import { mkdir, mkdtemp, writeFile } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { isBaseError } from '../errors/base.js' +import { ErrorCode } from '../errors/codes.js' +import { loadConfig } from './loader.js' +import { FILE_NAME } from './schema.js' + +describe('loadConfig', () => { + let dir: string + + beforeEach(async () => { + dir = await mkdtemp(join(tmpdir(), 'difyctl-cfg-')) + }) + + afterEach(async () => { + await mkdir(dir, { recursive: true }).catch(() => {}) + }) + + it('returns found:false when config.yml is missing', async () => { + const r = await loadConfig(dir) + expect(r.found).toBe(false) + }) + + it('parses a minimal valid config.yml', async () => { + await writeFile(join(dir, FILE_NAME), 'schema_version: 1\n', 'utf8') + const r = await loadConfig(dir) + expect(r.found).toBe(true) + if (r.found) + expect(r.config.schema_version).toBe(1) + }) + + it('parses defaults + state', async () => { + await writeFile( + join(dir, FILE_NAME), + 'schema_version: 1\ndefaults:\n format: json\n limit: 100\nstate:\n current_app: app-1\n', + 'utf8', + ) + const r = await loadConfig(dir) + expect(r.found).toBe(true) + if (r.found) { + expect(r.config.defaults.format).toBe('json') + expect(r.config.defaults.limit).toBe(100) + expect(r.config.state.current_app).toBe('app-1') + } + }) + + it('throws BaseError(config_schema_unsupported) when YAML is malformed', async () => { + await writeFile(join(dir, FILE_NAME), '::not yaml::: {{[', 'utf8') + let caught: unknown + try { + await loadConfig(dir) + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.code).toBe(ErrorCode.ConfigSchemaUnsupported) + }) + + it('throws BaseError(config_schema_unsupported) when zod validation fails', async () => { + await writeFile(join(dir, FILE_NAME), 'defaults:\n limit: 9999\n', 'utf8') + let caught: unknown + try { + await loadConfig(dir) + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.code).toBe(ErrorCode.ConfigSchemaUnsupported) + }) + + it('throws BaseError(config_schema_unsupported) when schema_version > 1 (forward-refuse)', async () => { + await writeFile(join(dir, FILE_NAME), 'schema_version: 2\n', 'utf8') + let caught: unknown + try { + await loadConfig(dir) + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) { + expect(caught.code).toBe(ErrorCode.ConfigSchemaUnsupported) + expect(caught.message).toMatch(/schema_version=2/) + expect(caught.hint).toMatch(/upgrade difyctl/) + } + }) +}) diff --git a/cli/src/config/loader.ts b/cli/src/config/loader.ts new file mode 100644 index 0000000000..8ff00b3631 --- /dev/null +++ b/cli/src/config/loader.ts @@ -0,0 +1,58 @@ +import type { ConfigFile } from './schema.js' +import { readFile } from 'node:fs/promises' +import { join } from 'node:path' +import { load as parseYaml } from 'js-yaml' +import { newError } from '../errors/base.js' +import { ErrorCode } from '../errors/codes.js' +import { + + ConfigFileSchema, + CURRENT_SCHEMA_VERSION, + FILE_NAME, +} from './schema.js' + +export type LoadResult + = | { found: false } + | { found: true, config: ConfigFile } + +export async function loadConfig(dir: string): Promise { + const path = join(dir, FILE_NAME) + let raw: string + try { + raw = await readFile(path, 'utf8') + } + catch (err) { + if ((err as NodeJS.ErrnoException).code === 'ENOENT') + return { found: false } + throw newError(ErrorCode.Unknown, `read ${path}: ${(err as Error).message}`) + .wrap(err) + } + + let parsed: unknown + try { + parsed = parseYaml(raw) + } + catch (err) { + throw newError( + ErrorCode.ConfigSchemaUnsupported, + `parse ${path}: ${(err as Error).message}`, + ).wrap(err).withHint('config.yml is not valid YAML') + } + + const result = ConfigFileSchema.safeParse(parsed ?? {}) + if (!result.success) { + throw newError( + ErrorCode.ConfigSchemaUnsupported, + `validate ${path}: ${result.error.issues.map(i => i.message).join('; ')}`, + ).withHint('config.yml does not match the v1 schema') + } + + if (result.data.schema_version > CURRENT_SCHEMA_VERSION) { + throw newError( + ErrorCode.ConfigSchemaUnsupported, + `config.yml schema_version=${result.data.schema_version} is newer than this binary supports (max=${CURRENT_SCHEMA_VERSION})`, + ).withHint('upgrade difyctl, or remove config.yml') + } + + return { found: true, config: result.data } +} diff --git a/cli/src/config/schema.test.ts b/cli/src/config/schema.test.ts new file mode 100644 index 0000000000..8fec34dec5 --- /dev/null +++ b/cli/src/config/schema.test.ts @@ -0,0 +1,63 @@ +import { describe, expect, it } from 'vitest' +import { + ALLOWED_FORMATS, + ConfigFileSchema, + CURRENT_SCHEMA_VERSION, + emptyConfig, + FILE_NAME, +} from './schema.js' + +describe('config schema', () => { + it('CURRENT_SCHEMA_VERSION is 1', () => { + expect(CURRENT_SCHEMA_VERSION).toBe(1) + }) + + it('FILE_NAME is config.yml', () => { + expect(FILE_NAME).toBe('config.yml') + }) + + it('ALLOWED_FORMATS matches Go set (json/yaml/table/wide/name/text)', () => { + expect([...ALLOWED_FORMATS].sort()).toEqual( + ['json', 'name', 'table', 'text', 'wide', 'yaml'], + ) + }) + + it('emptyConfig fills defaults + state with empty objects', () => { + const cfg = emptyConfig() + expect(cfg.schema_version).toBe(0) + expect(cfg.defaults).toEqual({}) + expect(cfg.state).toEqual({}) + }) + + it('rejects defaults.limit out of bounds', () => { + expect(ConfigFileSchema.safeParse({ defaults: { limit: 0 } }).success).toBe(false) + expect(ConfigFileSchema.safeParse({ defaults: { limit: 201 } }).success).toBe(false) + expect(ConfigFileSchema.safeParse({ defaults: { limit: 50 } }).success).toBe(true) + }) + + it('rejects defaults.format outside the enum', () => { + expect(ConfigFileSchema.safeParse({ defaults: { format: 'csv' } }).success).toBe(false) + expect(ConfigFileSchema.safeParse({ defaults: { format: 'json' } }).success).toBe(true) + }) + + it('accepts the full v1 shape', () => { + const r = ConfigFileSchema.safeParse({ + schema_version: 1, + defaults: { format: 'yaml', limit: 100 }, + state: { current_app: 'app-123' }, + }) + expect(r.success).toBe(true) + if (r.success) { + expect(r.data.defaults.format).toBe('yaml') + expect(r.data.defaults.limit).toBe(100) + expect(r.data.state.current_app).toBe('app-123') + } + }) + + it('parses an empty object into emptyConfig() shape', () => { + const r = ConfigFileSchema.safeParse({}) + expect(r.success).toBe(true) + if (r.success) + expect(r.data).toEqual(emptyConfig()) + }) +}) diff --git a/cli/src/config/schema.ts b/cli/src/config/schema.ts new file mode 100644 index 0000000000..f5946bfc31 --- /dev/null +++ b/cli/src/config/schema.ts @@ -0,0 +1,32 @@ +import { z } from 'zod' + +export const CURRENT_SCHEMA_VERSION = 1 +export const FILE_NAME = 'config.yml' + +export const ALLOWED_FORMATS = ['json', 'yaml', 'table', 'wide', 'name', 'text'] as const +export type AllowedFormat = (typeof ALLOWED_FORMATS)[number] + +export const DefaultsSchema = z + .object({ + format: z.enum(ALLOWED_FORMATS).optional(), + limit: z.number().int().min(1).max(200).optional(), + }) + .default({}) + +export const StateSchema = z + .object({ + current_app: z.string().optional(), + }) + .default({}) + +export const ConfigFileSchema = z.object({ + schema_version: z.number().int().nonnegative().default(0), + defaults: DefaultsSchema, + state: StateSchema, +}) + +export type ConfigFile = z.infer + +export function emptyConfig(): ConfigFile { + return ConfigFileSchema.parse({}) +} diff --git a/cli/src/config/writer.test.ts b/cli/src/config/writer.test.ts new file mode 100644 index 0000000000..0fb08f70de --- /dev/null +++ b/cli/src/config/writer.test.ts @@ -0,0 +1,80 @@ +import { mkdtemp, readdir, readFile, stat } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { beforeEach, describe, expect, it } from 'vitest' +import { loadConfig } from './loader.js' +import { emptyConfig, FILE_NAME } from './schema.js' +import { saveConfig } from './writer.js' + +describe('saveConfig', () => { + let dir: string + + beforeEach(async () => { + dir = await mkdtemp(join(tmpdir(), 'difyctl-w-')) + }) + + it('writes config.yml in the target dir', async () => { + await saveConfig(dir, { ...emptyConfig(), schema_version: 1 }) + const stats = await stat(join(dir, FILE_NAME)) + expect(stats.isFile()).toBe(true) + }) + + it('stamps schema_version=1 even if caller passed 0', async () => { + await saveConfig(dir, { ...emptyConfig() }) + const r = await loadConfig(dir) + expect(r.found).toBe(true) + if (r.found) + expect(r.config.schema_version).toBe(1) + }) + + it('round-trips defaults + state through YAML', async () => { + await saveConfig(dir, { + schema_version: 1, + defaults: { format: 'wide', limit: 75 }, + state: { current_app: 'app-xyz' }, + }) + const r = await loadConfig(dir) + expect(r.found).toBe(true) + if (r.found) { + expect(r.config.defaults.format).toBe('wide') + expect(r.config.defaults.limit).toBe(75) + expect(r.config.state.current_app).toBe('app-xyz') + } + }) + + it('writes file with mode 0o600 (POSIX)', async () => { + if (process.platform === 'win32') + return + await saveConfig(dir, emptyConfig()) + const s = await stat(join(dir, FILE_NAME)) + expect(s.mode & 0o777).toBe(0o600) + }) + + it('does not leave a tmp file on success', async () => { + await saveConfig(dir, emptyConfig()) + const entries = await readdir(dir) + expect(entries.filter(f => f.endsWith('.tmp'))).toHaveLength(0) + expect(entries.filter(f => f.includes('.tmp.'))).toHaveLength(0) + }) + + it('creates parent dir at 0o700 if absent', async () => { + if (process.platform === 'win32') + return + const nested = join(dir, 'nested', 'sub') + await saveConfig(nested, emptyConfig()) + const s = await stat(nested) + expect(s.isDirectory()).toBe(true) + expect(s.mode & 0o777).toBe(0o700) + }) + + it('emits parseable YAML (round-trip via fs.readFile + js-yaml)', async () => { + await saveConfig(dir, { + schema_version: 1, + defaults: { format: 'json' }, + state: {}, + }) + const raw = await readFile(join(dir, FILE_NAME), 'utf8') + expect(raw).toMatch(/^schema_version:/m) + expect(raw).toMatch(/format: json/) + }) +}) diff --git a/cli/src/config/writer.ts b/cli/src/config/writer.ts new file mode 100644 index 0000000000..8362ebf884 --- /dev/null +++ b/cli/src/config/writer.ts @@ -0,0 +1,39 @@ +import type { ConfigFile } from './schema.js' +import { mkdir, rename, unlink, writeFile } from 'node:fs/promises' +import { join } from 'node:path' +import { dump as dumpYaml } from 'js-yaml' +import { newError } from '../errors/base.js' +import { ErrorCode } from '../errors/codes.js' +import { DIR_PERM, FILE_PERM } from './dir.js' +import { + + CURRENT_SCHEMA_VERSION, + FILE_NAME, +} from './schema.js' + +export async function saveConfig(dir: string, config: ConfigFile): Promise { + await mkdir(dir, { recursive: true, mode: DIR_PERM }) + + const stamped: ConfigFile = { ...config, schema_version: CURRENT_SCHEMA_VERSION } + const yaml = dumpYaml(stamped, { lineWidth: -1, noRefs: true }) + + const target = join(dir, FILE_NAME) + const tmp = `${target}.tmp.${process.pid}.${Date.now()}` + + try { + await writeFile(tmp, yaml, { mode: FILE_PERM }) + await rename(tmp, target) + } + catch (err) { + try { + await unlink(tmp) + } + catch { + // tmp may not exist if writeFile failed before creating it + } + throw newError( + ErrorCode.Unknown, + `save ${target}: ${(err as Error).message}`, + ).wrap(err) + } +} diff --git a/cli/src/env/registry.test.ts b/cli/src/env/registry.test.ts new file mode 100644 index 0000000000..5ea4726d6f --- /dev/null +++ b/cli/src/env/registry.test.ts @@ -0,0 +1,95 @@ +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { + ENV_REGISTRY, + getEnv, + lookupEnv, + resolveEnv, +} from './registry.js' + +describe('env registry', () => { + it('contains every DIFY_* var from the v1.0 spec', () => { + const names = ENV_REGISTRY.map(e => e.name) + expect(names).toContain('DIFY_TOKEN') + expect(names).toContain('DIFY_HOST') + expect(names).toContain('DIFY_WORKSPACE_ID') + expect(names).toContain('DIFY_CONFIG_DIR') + expect(names).toContain('DIFY_LIMIT') + expect(names).toContain('DIFY_FORMAT') + expect(names).toContain('DIFY_NO_PROGRESS') + expect(names).toContain('DIFY_PLAIN') + }) + + it('is sorted alphabetically (matches Go init() ordering)', () => { + const names = ENV_REGISTRY.map(e => e.name) + const sorted = [...names].sort() + expect(names).toEqual(sorted) + }) + + it('marks DIFY_TOKEN as sensitive', () => { + expect(lookupEnv('DIFY_TOKEN')?.sensitive).toBe(true) + }) + + it('does not mark non-secret vars as sensitive', () => { + expect(lookupEnv('DIFY_HOST')?.sensitive).toBeFalsy() + expect(lookupEnv('DIFY_LIMIT')?.sensitive).toBeFalsy() + }) + + it('lookupEnv returns undefined for unknown name', () => { + expect(lookupEnv('DIFY_NOPE')).toBeUndefined() + }) + + it('lookupEnv finds the registry entry by name', () => { + expect(lookupEnv('DIFY_HOST')?.description).toMatch(/host/i) + }) + + describe('process.env reads', () => { + const originals: Record = {} + beforeEach(() => { + originals.DIFY_LIMIT = process.env.DIFY_LIMIT + originals.DIFY_HOST = process.env.DIFY_HOST + originals.DIFY_TEST_NONEXISTENT = process.env.DIFY_TEST_NONEXISTENT + delete process.env.DIFY_LIMIT + delete process.env.DIFY_HOST + delete process.env.DIFY_TEST_NONEXISTENT + }) + afterEach(() => { + for (const [k, v] of Object.entries(originals)) { + if (v === undefined) + delete process.env[k] + else process.env[k] = v + } + }) + + it('getEnv returns undefined for unset var', () => { + expect(getEnv('DIFY_TEST_NONEXISTENT')).toBeUndefined() + }) + + it('getEnv returns the literal string for a set var', () => { + process.env.DIFY_HOST = 'https://cloud.dify.ai' + expect(getEnv('DIFY_HOST')).toBe('https://cloud.dify.ai') + }) + + it('resolveEnv returns parsed value for DIFY_LIMIT (uses parseLimit)', () => { + process.env.DIFY_LIMIT = '42' + expect(resolveEnv('DIFY_LIMIT')).toBe(42) + }) + + it('resolveEnv returns the raw string for vars with no parser', () => { + process.env.DIFY_HOST = 'https://example.dify.ai' + expect(resolveEnv('DIFY_HOST')).toBe('https://example.dify.ai') + }) + + it('resolveEnv returns undefined when var is unset and no default', () => { + expect(resolveEnv('DIFY_HOST')).toBeUndefined() + }) + + it('resolveEnv propagates parser errors', () => { + process.env.DIFY_LIMIT = '999' + expect(() => resolveEnv('DIFY_LIMIT')).toThrow(/out of range/) + }) + + it('resolveEnv accepts unknown var name and returns undefined (no throw)', () => { + expect(resolveEnv('DIFY_NOPE')).toBeUndefined() + }) + }) +}) diff --git a/cli/src/env/registry.ts b/cli/src/env/registry.ts new file mode 100644 index 0000000000..5a7938e01b --- /dev/null +++ b/cli/src/env/registry.ts @@ -0,0 +1,68 @@ +import { parseLimit } from '../limit/limit.js' + +export type EnvVar = { + readonly name: string + readonly description: string + readonly default?: string + readonly sensitive?: boolean + readonly parse?: (raw: string) => unknown +} + +const REGISTRY_UNSORTED: readonly EnvVar[] = [ + { + name: 'DIFY_CONFIG_DIR', + description: 'Override the config-dir resolution (precedes XDG_CONFIG_HOME on Linux).', + }, + { + name: 'DIFY_FORMAT', + description: 'Default output format for list commands (table | json | yaml | wide | name).', + }, + { + name: 'DIFY_HOST', + description: 'Default Dify host (overridden by --host).', + }, + { + name: 'DIFY_LIMIT', + description: 'Default page size for list commands (1..200).', + parse: (raw: string) => parseLimit(raw, 'DIFY_LIMIT'), + }, + { + name: 'DIFY_NO_PROGRESS', + description: 'Suppress progress spinners. Truthy values: 1, true, yes.', + }, + { + name: 'DIFY_PLAIN', + description: 'Disable ANSI colors and decorative output. Truthy values: 1, true, yes.', + }, + { + name: 'DIFY_TOKEN', + description: 'Bearer token for non-interactive auth.', + sensitive: true, + }, + { + name: 'DIFY_WORKSPACE_ID', + description: 'Workspace ID used when no --workspace flag is set.', + }, +] + +export const ENV_REGISTRY: readonly EnvVar[] = [...REGISTRY_UNSORTED].sort((a, b) => + a.name.localeCompare(b.name), +) + +const BY_NAME = new Map(ENV_REGISTRY.map(e => [e.name, e])) + +export function lookupEnv(name: string): EnvVar | undefined { + return BY_NAME.get(name) +} + +export function getEnv(name: string): string | undefined { + return process.env[name] +} + +export function resolveEnv(name: string): unknown { + const entry = lookupEnv(name) + const raw = getEnv(name) ?? entry?.default + if (raw === undefined) + return undefined + return entry?.parse ? entry.parse(raw) : raw +} diff --git a/cli/src/errors/base.test.ts b/cli/src/errors/base.test.ts new file mode 100644 index 0000000000..196646fae4 --- /dev/null +++ b/cli/src/errors/base.test.ts @@ -0,0 +1,88 @@ +import { describe, expect, it } from 'vitest' +import { BaseError, isBaseError, newError, unknownError } from './base.js' +import { ErrorCode, ExitCode } from './codes.js' + +describe('BaseError', () => { + it('captures code, message, optional fields', () => { + const err = new BaseError({ + code: ErrorCode.AuthExpired, + message: 'session expired', + hint: 'run difyctl auth login', + httpStatus: 401, + method: 'GET', + url: 'https://x/y', + }) + expect(err.code).toBe(ErrorCode.AuthExpired) + expect(err.message).toBe('session expired') + expect(err.hint).toBe('run difyctl auth login') + expect(err.httpStatus).toBe(401) + expect(err.method).toBe('GET') + expect(err.url).toBe('https://x/y') + }) + + it('is an Error instance and instanceof BaseError', () => { + const err = newError(ErrorCode.Unknown, 'x') + expect(err).toBeInstanceOf(Error) + expect(err).toBeInstanceOf(BaseError) + }) + + it('exit() routes via code map', () => { + expect(newError(ErrorCode.AuthExpired, 'x').exit()).toBe(ExitCode.Auth) + expect(newError(ErrorCode.UsageInvalidFlag, 'x').exit()).toBe(ExitCode.Usage) + expect(newError(ErrorCode.VersionSkew, 'x').exit()).toBe(ExitCode.VersionCompat) + expect(newError(ErrorCode.NetworkDns, 'x').exit()).toBe(ExitCode.Generic) + }) + + it('toString without hint formats ": "', () => { + const err = newError(ErrorCode.AuthExpired, 'session expired') + expect(err.toString()).toBe('auth_expired: session expired') + }) + + it('toString with hint formats ": (hint: )"', () => { + const err = newError(ErrorCode.AuthExpired, 'session expired') + .withHint('run \'difyctl auth login\'') + expect(err.toString()).toBe( + 'auth_expired: session expired (hint: run \'difyctl auth login\')', + ) + }) + + it('builder methods return new instances; original unchanged', () => { + const original = newError(ErrorCode.Unknown, 'boom') + const hinted = original.withHint('try again') + expect(original.hint).toBeUndefined() + expect(hinted.hint).toBe('try again') + expect(hinted).not.toBe(original) + }) + + it('withHttpStatus + withRequest + wrap chain immutably', () => { + const cause = new Error('underlying') + const built = newError(ErrorCode.NetworkTimeout, 'timed out') + .withHttpStatus(504) + .withRequest('POST', 'https://x/y') + .wrap(cause) + expect(built.httpStatus).toBe(504) + expect(built.method).toBe('POST') + expect(built.url).toBe('https://x/y') + expect(built.cause).toBe(cause) + }) + + it('wrap exposes cause via standard Error.cause property', () => { + const cause = new Error('underlying failure') + const wrapped = newError(ErrorCode.NetworkTimeout, 'timed out').wrap(cause) + expect(wrapped.cause).toBe(cause) + }) + + it('isBaseError narrows unknown values', () => { + expect(isBaseError(newError(ErrorCode.Unknown, 'x'))).toBe(true) + expect(isBaseError(new Error('plain'))).toBe(false) + expect(isBaseError({ code: 'unknown' })).toBe(false) + expect(isBaseError(undefined)).toBe(false) + }) + + it('unknownError factory wraps cause and uses ErrorCode.Unknown', () => { + const cause = new Error('boom') + const err = unknownError('something failed', cause) + expect(err.code).toBe(ErrorCode.Unknown) + expect(err.cause).toBe(cause) + }) +}) diff --git a/cli/src/errors/base.ts b/cli/src/errors/base.ts new file mode 100644 index 0000000000..3ec8b6e44f --- /dev/null +++ b/cli/src/errors/base.ts @@ -0,0 +1,81 @@ +import type { ErrorCodeValue, ExitCodeValue } from './codes.js' +import { ErrorCode, exitFor } from './codes.js' + +export type BaseErrorOptions = { + readonly code: ErrorCodeValue + readonly message: string + readonly hint?: string + readonly httpStatus?: number + readonly method?: string + readonly url?: string + readonly cause?: unknown +} + +export class BaseError extends Error { + readonly code: ErrorCodeValue + readonly hint?: string + readonly httpStatus?: number + readonly method?: string + readonly url?: string + + constructor(opts: BaseErrorOptions) { + super(opts.message, opts.cause === undefined ? undefined : { cause: opts.cause }) + this.name = 'BaseError' + this.code = opts.code + this.hint = opts.hint + this.httpStatus = opts.httpStatus + this.method = opts.method + this.url = opts.url + Object.setPrototypeOf(this, new.target.prototype) + } + + exit(): ExitCodeValue { + return exitFor(this.code) + } + + override toString(): string { + return this.hint + ? `${this.code}: ${this.message} (hint: ${this.hint})` + : `${this.code}: ${this.message}` + } + + withHint(hint: string): BaseError { + return new BaseError({ ...this.snapshot(), hint }) + } + + withHttpStatus(httpStatus: number): BaseError { + return new BaseError({ ...this.snapshot(), httpStatus }) + } + + withRequest(method: string, url: string): BaseError { + return new BaseError({ ...this.snapshot(), method, url }) + } + + wrap(cause: unknown): BaseError { + return new BaseError({ ...this.snapshot(), cause }) + } + + private snapshot(): BaseErrorOptions { + return { + code: this.code, + message: this.message, + hint: this.hint, + httpStatus: this.httpStatus, + method: this.method, + url: this.url, + cause: this.cause, + } + } +} + +export function newError(code: ErrorCodeValue, message: string): BaseError { + return new BaseError({ code, message }) +} + +export function isBaseError(value: unknown): value is BaseError { + return value instanceof BaseError +} + +export function unknownError(message: string, cause?: unknown): BaseError { + return new BaseError({ code: ErrorCode.Unknown, message, cause }) +} diff --git a/cli/src/errors/codes.test.ts b/cli/src/errors/codes.test.ts new file mode 100644 index 0000000000..c89aa41d50 --- /dev/null +++ b/cli/src/errors/codes.test.ts @@ -0,0 +1,57 @@ +import { describe, expect, it } from 'vitest' +import { + ALL_ERROR_CODES, + CODE_TO_EXIT_MAP, + ErrorCode, + ExitCode, + exitFor, +} from './codes.js' + +describe('error codes', () => { + it('has 17 codes (parity with internal/api/errors)', () => { + expect(ALL_ERROR_CODES).toHaveLength(17) + }) + + it('has the expected ExitCode buckets', () => { + expect(ExitCode.Success).toBe(0) + expect(ExitCode.Generic).toBe(1) + expect(ExitCode.Usage).toBe(2) + expect(ExitCode.Auth).toBe(4) + expect(ExitCode.VersionCompat).toBe(6) + }) + + it('every code maps to an exit', () => { + for (const code of ALL_ERROR_CODES) + expect(CODE_TO_EXIT_MAP[code]).toBeDefined() + }) + + it('CODE_TO_EXIT_MAP entry count == ALL_ERROR_CODES length (drift guard)', () => { + expect(Object.keys(CODE_TO_EXIT_MAP)).toHaveLength(ALL_ERROR_CODES.length) + }) + + it.each([ + [ErrorCode.NotLoggedIn, ExitCode.Auth], + [ErrorCode.AuthExpired, ExitCode.Auth], + [ErrorCode.TokenExpired, ExitCode.Auth], + [ErrorCode.AccessDenied, ExitCode.Auth], + [ErrorCode.ExpiredToken, ExitCode.Auth], + [ErrorCode.VersionSkew, ExitCode.VersionCompat], + [ErrorCode.UnsupportedEndpoint, ExitCode.VersionCompat], + [ErrorCode.ConfigSchemaUnsupported, ExitCode.VersionCompat], + [ErrorCode.UsageInvalidFlag, ExitCode.Usage], + [ErrorCode.UsageMissingArg, ExitCode.Usage], + [ErrorCode.ConfigInvalidKey, ExitCode.Usage], + [ErrorCode.ConfigInvalidValue, ExitCode.Usage], + [ErrorCode.NetworkTimeout, ExitCode.Generic], + [ErrorCode.NetworkDns, ExitCode.Generic], + [ErrorCode.Server5xx, ExitCode.Generic], + [ErrorCode.Server4xxOther, ExitCode.Generic], + [ErrorCode.Unknown, ExitCode.Generic], + ])('exitFor(%s) -> %d', (code, want) => { + expect(exitFor(code)).toBe(want) + }) + + it('exitFor returns ExitCode.Generic for unknown code (conservative default)', () => { + expect(exitFor('no_such_code')).toBe(ExitCode.Generic) + }) +}) diff --git a/cli/src/errors/codes.ts b/cli/src/errors/codes.ts new file mode 100644 index 0000000000..ad2a1089ce --- /dev/null +++ b/cli/src/errors/codes.ts @@ -0,0 +1,58 @@ +export const ErrorCode = { + NotLoggedIn: 'not_logged_in', + AuthExpired: 'auth_expired', + TokenExpired: 'token_expired', + AccessDenied: 'access_denied', + ExpiredToken: 'expired_token', + VersionSkew: 'version_skew', + UnsupportedEndpoint: 'unsupported_endpoint', + ConfigSchemaUnsupported: 'config_schema_unsupported', + UsageInvalidFlag: 'usage_invalid_flag', + UsageMissingArg: 'usage_missing_arg', + ConfigInvalidKey: 'config_invalid_key', + ConfigInvalidValue: 'config_invalid_value', + NetworkTimeout: 'network_timeout', + NetworkDns: 'network_dns', + Server5xx: 'server_5xx', + Server4xxOther: 'server_4xx_other', + Unknown: 'unknown', +} as const + +export type ErrorCodeValue = (typeof ErrorCode)[keyof typeof ErrorCode] + +export const ExitCode = { + Success: 0, + Generic: 1, + Usage: 2, + Auth: 4, + VersionCompat: 6, +} as const + +export type ExitCodeValue = (typeof ExitCode)[keyof typeof ExitCode] + +const CODE_TO_EXIT: Readonly> = { + not_logged_in: ExitCode.Auth, + auth_expired: ExitCode.Auth, + token_expired: ExitCode.Auth, + access_denied: ExitCode.Auth, + expired_token: ExitCode.Auth, + version_skew: ExitCode.VersionCompat, + unsupported_endpoint: ExitCode.VersionCompat, + config_schema_unsupported: ExitCode.VersionCompat, + usage_invalid_flag: ExitCode.Usage, + usage_missing_arg: ExitCode.Usage, + config_invalid_key: ExitCode.Usage, + config_invalid_value: ExitCode.Usage, + network_timeout: ExitCode.Generic, + network_dns: ExitCode.Generic, + server_5xx: ExitCode.Generic, + server_4xx_other: ExitCode.Generic, + unknown: ExitCode.Generic, +} + +export function exitFor(code: string): ExitCodeValue { + return (CODE_TO_EXIT as Record)[code] ?? ExitCode.Generic +} + +export const ALL_ERROR_CODES: readonly ErrorCodeValue[] = Object.values(ErrorCode) +export const CODE_TO_EXIT_MAP: Readonly> = CODE_TO_EXIT diff --git a/cli/src/errors/envelope.test.ts b/cli/src/errors/envelope.test.ts new file mode 100644 index 0000000000..8f736faa46 --- /dev/null +++ b/cli/src/errors/envelope.test.ts @@ -0,0 +1,57 @@ +import { describe, expect, it } from 'vitest' +import { newError } from './base.js' +import { ErrorCode } from './codes.js' +import { renderEnvelope, toEnvelope } from './envelope.js' + +describe('error envelope', () => { + it('emits required fields only when minimal', () => { + const err = newError(ErrorCode.Unknown, 'boom') + expect(toEnvelope(err)).toEqual({ + error: { code: 'unknown', message: 'boom' }, + }) + }) + + it('includes hint / http_status / method / url when present', () => { + const err = newError(ErrorCode.NetworkTimeout, 'timed out') + .withHint('check your network') + .withHttpStatus(504) + .withRequest('POST', 'https://api.dify.ai/v1/x') + expect(toEnvelope(err)).toEqual({ + error: { + code: 'network_timeout', + message: 'timed out', + hint: 'check your network', + http_status: 504, + method: 'POST', + url: 'https://api.dify.ai/v1/x', + }, + }) + }) + + it('renderEnvelope returns a single-line JSON string', () => { + const err = newError(ErrorCode.AuthExpired, 'session expired') + .withHint('run difyctl auth login') + const out = renderEnvelope(err) + expect(out).toBe( + '{"error":{"code":"auth_expired","message":"session expired","hint":"run difyctl auth login"}}', + ) + expect(out).not.toContain('\n') + }) + + it('renderEnvelope output round-trips through JSON.parse to an ErrorEnvelope shape', () => { + const err = newError(ErrorCode.UsageInvalidFlag, 'bad flag').withHint('see --help') + const parsed = JSON.parse(renderEnvelope(err)) + expect(parsed).toEqual({ + error: { code: 'usage_invalid_flag', message: 'bad flag', hint: 'see --help' }, + }) + }) + + it('omits undefined optional fields entirely (no `hint: null`)', () => { + const err = newError(ErrorCode.Server5xx, 'upstream broke') + const envelope = toEnvelope(err) + expect(envelope.error).not.toHaveProperty('hint') + expect(envelope.error).not.toHaveProperty('http_status') + expect(envelope.error).not.toHaveProperty('method') + expect(envelope.error).not.toHaveProperty('url') + }) +}) diff --git a/cli/src/errors/envelope.ts b/cli/src/errors/envelope.ts new file mode 100644 index 0000000000..e817890606 --- /dev/null +++ b/cli/src/errors/envelope.ts @@ -0,0 +1,32 @@ +import type { BaseError } from './base.js' + +export type ErrorEnvelope = { + error: { + code: string + message: string + hint?: string + http_status?: number + method?: string + url?: string + } +} + +export function toEnvelope(err: BaseError): ErrorEnvelope { + const payload: ErrorEnvelope['error'] = { + code: err.code, + message: err.message, + } + if (err.hint !== undefined) + payload.hint = err.hint + if (err.httpStatus !== undefined) + payload.http_status = err.httpStatus + if (err.method !== undefined) + payload.method = err.method + if (err.url !== undefined) + payload.url = err.url + return { error: payload } +} + +export function renderEnvelope(err: BaseError): string { + return JSON.stringify(toEnvelope(err)) +} diff --git a/cli/src/errors/format.ts b/cli/src/errors/format.ts new file mode 100644 index 0000000000..b20a6e8cdd --- /dev/null +++ b/cli/src/errors/format.ts @@ -0,0 +1,23 @@ +import type { BaseError } from './base.js' +import { renderEnvelope } from './envelope.js' + +export type FormatErrorOptions = { + readonly format?: string +} + +export function formatErrorForCli(err: BaseError, opts: FormatErrorOptions = {}): string { + if (opts.format === 'json') + return renderEnvelope(err) + return humanError(err) +} + +function humanError(err: BaseError): string { + const lines: string[] = [`${err.code}: ${err.message}`] + if (err.hint !== undefined) + lines.push(`hint: ${err.hint}`) + if (err.method !== undefined && err.url !== undefined) + lines.push(`request: ${err.method} ${err.url}`) + if (err.httpStatus !== undefined) + lines.push(`http_status: ${err.httpStatus}`) + return lines.join('\n') +} diff --git a/cli/src/help-class.test.ts b/cli/src/help-class.test.ts new file mode 100644 index 0000000000..3724be9a5f --- /dev/null +++ b/cli/src/help-class.test.ts @@ -0,0 +1,59 @@ +import type { Command } from '@oclif/core' +import { fileURLToPath } from 'node:url' +import { Config } from '@oclif/core' +import { afterEach, beforeAll, describe, expect, it, vi } from 'vitest' +import DifyHelp from './help-class.js' + +describe('DifyHelp', () => { + let config: Config + + beforeAll(async () => { + const root = fileURLToPath(new URL('..', import.meta.url)) + config = await Config.load({ root }) + }) + + afterEach(() => { + vi.restoreAllMocks() + }) + + it('appends agentGuide string after standard help when present', async () => { + const fakeCommand = { + id: 'run:app', + agentGuide: 'WORKFLOW\n 1. do this\n', + description: 'test', + flags: {}, + args: {}, + examples: [], + aliases: [], + } + + const help = new DifyHelp(config, { stripAnsi: true }) + const logSpy = vi.spyOn(help, 'log').mockImplementation(() => {}) + vi.spyOn(Object.getPrototypeOf(Object.getPrototypeOf(help)), 'showCommandHelp') + .mockResolvedValue(undefined) + + await help.showCommandHelp(fakeCommand as unknown as Command.Loadable) + + expect(logSpy).toHaveBeenCalledWith('WORKFLOW\n 1. do this\n') + }) + + it('does not call log for agentGuide when command has none', async () => { + const fakeCommand = { + id: 'auth:login', + description: 'test', + flags: {}, + args: {}, + examples: [], + aliases: [], + } + + const help = new DifyHelp(config, { stripAnsi: true }) + const logSpy = vi.spyOn(help, 'log').mockImplementation(() => {}) + vi.spyOn(Object.getPrototypeOf(Object.getPrototypeOf(help)), 'showCommandHelp') + .mockResolvedValue(undefined) + + await help.showCommandHelp(fakeCommand as unknown as Command.Loadable) + + expect(logSpy).not.toHaveBeenCalled() + }) +}) diff --git a/cli/src/help-class.ts b/cli/src/help-class.ts new file mode 100644 index 0000000000..551eb6c97e --- /dev/null +++ b/cli/src/help-class.ts @@ -0,0 +1,11 @@ +import type { Command } from '@oclif/core' +import { Help } from '@oclif/core' + +export default class DifyHelp extends Help { + override async showCommandHelp(command: Command.Loadable): Promise { + await super.showCommandHelp(command) + const guide = (command as Record).agentGuide + if (typeof guide === 'string' && guide.length > 0) + this.log(guide) + } +} diff --git a/cli/src/http/client.test.ts b/cli/src/http/client.test.ts new file mode 100644 index 0000000000..580db2bf6d --- /dev/null +++ b/cli/src/http/client.test.ts @@ -0,0 +1,270 @@ +import type { DifyMock } from '../../test/fixtures/dify-mock/server.js' +import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' +import { startMock } from '../../test/fixtures/dify-mock/server.js' +import { isBaseError } from '../errors/base.js' +import { ErrorCode } from '../errors/codes.js' +import { createClient } from './client.js' + +describe('http client', () => { + let mock: DifyMock + + beforeEach(async () => { + mock = await startMock() + }) + + afterEach(async () => { + await mock.stop() + }) + + it('GET /workspaces returns parsed JSON when bearer is valid', async () => { + const client = createClient({ host: mock.url, bearer: 'dfoa_test' }) + const body = await client.get('workspaces').json<{ workspaces: unknown[] }>() + expect(body.workspaces).toHaveLength(2) + }) + + it('omits Authorization header when bearer is undefined', async () => { + let captured: string | null = null + const client = createClient({ + host: mock.url, + logger: () => undefined, + bearer: undefined, + }) + try { + await client.get('workspaces', { + hooks: { + beforeRequest: [ + ({ request }) => { captured = request.headers.get('authorization') }, + ], + }, + }).json() + } + catch { + // 401 expected because no bearer + } + expect(captured).toBeNull() + }) + + it('sets Authorization header when bearer is provided', async () => { + let captured: string | null = null + const client = createClient({ host: mock.url, bearer: 'dfoa_test' }) + await client.get('workspaces', { + hooks: { + beforeRequest: [ + ({ request }) => { captured = request.headers.get('authorization') }, + ], + }, + }).json() + expect(captured).toBe('Bearer dfoa_test') + }) + + it('sets a User-Agent header in the difyctl format', async () => { + let captured: string | null = null + const client = createClient({ + host: mock.url, + bearer: 'dfoa_test', + userAgent: 'difyctl/0.0.0-test (test; arm64; dev)', + }) + await client.get('workspaces', { + hooks: { + beforeRequest: [ + ({ request }) => { captured = request.headers.get('user-agent') }, + ], + }, + }).json() + expect(captured).toBe('difyctl/0.0.0-test (test; arm64; dev)') + }) + + it('maps 401 to BaseError(auth_expired)', async () => { + mock.setScenario('auth-expired') + const client = createClient({ host: mock.url, bearer: 'dfoa_test' }) + let caught: unknown + try { + await client.get('workspaces').json() + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) { + expect(caught.code).toBe(ErrorCode.AuthExpired) + expect(caught.httpStatus).toBe(401) + expect(caught.method).toBe('GET') + expect(caught.url).toMatch(/workspaces$/) + } + }) + + it('maps 5xx to BaseError(server_5xx) after retries', async () => { + mock.setScenario('server-5xx') + const client = createClient({ + host: mock.url, + bearer: 'dfoa_test', + retryAttempts: 1, + timeoutMs: 5_000, + }) + let caught: unknown + try { + await client.get('workspaces').json() + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) { + expect(caught.code).toBe(ErrorCode.Server5xx) + expect(caught.httpStatus).toBe(503) + } + }) + + it('maps DNS failure to BaseError(network_dns)', async () => { + const client = createClient({ + host: 'http://nonexistent-host-12345.invalid', + bearer: 'dfoa_test', + retryAttempts: 0, + timeoutMs: 3_000, + }) + let caught: unknown + try { + await client.get('workspaces').json() + } + catch (err) { caught = err } + expect(isBaseError(caught) || caught instanceof Error).toBe(true) + }) + + it('logger fires twice per successful request (request + response phases)', async () => { + const events: { phase: string, status?: number }[] = [] + const client = createClient({ + host: mock.url, + bearer: 'dfoa_test', + logger: e => events.push({ phase: e.phase, status: e.status }), + }) + await client.get('workspaces').json() + expect(events).toHaveLength(2) + expect(events[0]?.phase).toBe('request') + expect(events[1]?.phase).toBe('response') + expect(events[1]?.status).toBe(200) + }) + + it('respects insecure URL trim (no trailing slash collapses correctly)', async () => { + const client = createClient({ host: `${mock.url}/`, bearer: 'dfoa_test' }) + const body = await client.get('workspaces').json<{ workspaces: unknown[] }>() + expect(body.workspaces).toHaveLength(2) + }) + + it('preserves error envelope hint when server returns one', async () => { + const client = createClient({ host: mock.url, bearer: 'dfoa_test' }) + let caught: unknown + try { + await client.get('apps/nope/describe').json() + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.code).toBe(ErrorCode.Server4xxOther) + }) + + it('handles 429 via retry status code list (eventual server-error class)', async () => { + mock.setScenario('rate-limited') + const client = createClient({ + host: mock.url, + bearer: 'dfoa_test', + retryAttempts: 0, + timeoutMs: 5_000, + }) + let caught: unknown + try { + await client.get('workspaces').json() + } + catch (err) { caught = err } + expect(isBaseError(caught)).toBe(true) + if (isBaseError(caught)) + expect(caught.httpStatus).toBe(429) + }) + + it('does not retry POST on 503', async () => { + mock.setScenario('server-5xx') + let attempts = 0 + const client = createClient({ + host: mock.url, + bearer: 'dfoa_test', + retryAttempts: 3, + timeoutMs: 5_000, + logger: (e) => { + if (e.phase === 'request' || e.phase === 'retry') + attempts++ + }, + }) + await expect(client.post('apps/app-1/run', { json: { inputs: {}, response_mode: 'blocking' } }).json()) + .rejects + .toBeDefined() + expect(attempts).toBe(1) + }) + + it('does not retry POST on network error (method allowlist gates retry)', async () => { + let attempts = 0 + const client = createClient({ + host: 'http://nonexistent-host-12345.invalid', + bearer: 'dfoa_test', + retryAttempts: 3, + timeoutMs: 3_000, + logger: (e) => { + if (e.phase === 'request' || e.phase === 'retry') + attempts++ + }, + }) + await expect( + client.post('apps/app-1/run', { json: { inputs: {}, response_mode: 'blocking' } }).json(), + ).rejects.toBeDefined() + expect(attempts).toBe(1) + }) + + it('retries GET on network error up to retryAttempts', async () => { + let attempts = 0 + const client = createClient({ + host: 'http://nonexistent-host-12345.invalid', + bearer: 'dfoa_test', + retryAttempts: 2, + timeoutMs: 3_000, + logger: (e) => { + if (e.phase === 'request' || e.phase === 'retry') + attempts++ + }, + }) + await expect(client.get('workspaces').json()).rejects.toBeDefined() + expect(attempts).toBe(3) + }, 30_000) + + it('does not retry PATCH on network error (method allowlist gates retry)', async () => { + let attempts = 0 + const client = createClient({ + host: 'http://nonexistent-host-12345.invalid', + bearer: 'dfoa_test', + retryAttempts: 3, + timeoutMs: 3_000, + logger: (e) => { + if (e.phase === 'request' || e.phase === 'retry') + attempts++ + }, + }) + await expect( + client.patch('workspaces', { json: {} }).json(), + ).rejects.toBeDefined() + expect(attempts).toBe(1) + }) +}) + +describe('classifyResponse internals', () => { + it('strips Bearer from logged URLs (sanity check via vi.fn logger)', async () => { + const mock = await startMock() + try { + const logger = vi.fn() + const client = createClient({ + host: mock.url, + bearer: 'dfoa_should_not_log', + logger, + }) + await client.get('workspaces').json() + const calls = logger.mock.calls.map(c => c[0]) + for (const event of calls) + expect(JSON.stringify(event)).not.toContain('dfoa_should_not_log') + } + finally { + await mock.stop() + } + }) +}) diff --git a/cli/src/http/client.ts b/cli/src/http/client.ts new file mode 100644 index 0000000000..447b9d4649 --- /dev/null +++ b/cli/src/http/client.ts @@ -0,0 +1,63 @@ +import type { AfterResponseHook, BeforeErrorHook, KyInstance } from 'ky' +import type { HttpFactoryOptions, HttpLogger } from './types.js' +import ky from 'ky' +import { BaseError } from '../errors/base.js' +import { userAgent as defaultUserAgent } from '../version/info.js' +import { classifyResponse, classifyTransportError } from './error-mapper.js' +import { applyBearer } from './middleware/auth.js' +import { logBeforeRequest, logBeforeRetry } from './middleware/request-logger.js' +import { applyUserAgent } from './middleware/user-agent.js' +import { redactBearer } from './sanitize.js' + +export const DEFAULT_TIMEOUT_MS = 30_000 +export const DEFAULT_RETRY_ATTEMPTS = 3 + +function trimSlash(s: string): string { + return s.endsWith('/') ? s.slice(0, -1) : s +} + +function logAndClassify(logger: HttpLogger | undefined): AfterResponseHook { + return async ({ request, response, options }) => { + if (logger !== undefined) { + logger({ + phase: 'response', + method: request.method, + url: redactBearer(request.url), + status: response.status, + }) + } + if (!response.ok && options.context?.skipClassify !== true) + throw await classifyResponse(request, response) + return response + } +} + +const mapTransportError: BeforeErrorHook = ({ error }) => { + if (error instanceof BaseError) + return error + return classifyTransportError(error) +} + +export function createClient(opts: HttpFactoryOptions): KyInstance { + const ua = opts.userAgent ?? defaultUserAgent() + return ky.create({ + prefix: `${trimSlash(opts.host)}/openapi/v1/`, + timeout: opts.timeoutMs ?? DEFAULT_TIMEOUT_MS, + retry: { + limit: opts.retryAttempts ?? DEFAULT_RETRY_ATTEMPTS, + methods: ['get', 'put', 'delete'], + statusCodes: [408, 413, 429, 500, 502, 503, 504], + }, + throwHttpErrors: false, + hooks: { + beforeRequest: [ + applyUserAgent(ua), + applyBearer(opts.bearer), + logBeforeRequest(opts.logger), + ], + afterResponse: [logAndClassify(opts.logger)], + beforeRetry: [logBeforeRetry(opts.logger)], + beforeError: [mapTransportError], + }, + }) +} diff --git a/cli/src/http/error-mapper.ts b/cli/src/http/error-mapper.ts new file mode 100644 index 0000000000..4d687ccbb5 --- /dev/null +++ b/cli/src/http/error-mapper.ts @@ -0,0 +1,85 @@ +import type { BaseError } from '../errors/base.js' +import { newError } from '../errors/base.js' +import { ErrorCode } from '../errors/codes.js' +import { redactBearer } from './sanitize.js' + +type WireFields = { + code?: string + message?: string + hint?: string +} + +type WireEnvelope = WireFields & { + error?: WireFields +} + +async function readBody(response: Response): Promise<{ raw: string, parsed?: WireEnvelope }> { + let raw = '' + try { + raw = await response.text() + } + catch { + return { raw: '' } + } + if (raw === '') + return { raw } + try { + return { raw, parsed: JSON.parse(raw) as WireEnvelope } + } + catch { + return { raw } + } +} + +export async function classifyResponse(request: Request, response: Response): Promise { + const { parsed } = await readBody(response.clone()) + const wire: WireFields = parsed?.error ?? parsed ?? {} + const status = response.status + const url = redactBearer(response.url || request.url) + const method = request.method + + if (status === 401) { + return newError( + ErrorCode.AuthExpired, + wire.message ?? 'session expired or revoked', + ) + .withHint(wire.hint ?? 'run \'difyctl auth login\' to sign in again') + .withHttpStatus(status) + .withRequest(method, url) + } + + if (status >= 500) { + return newError( + ErrorCode.Server5xx, + wire.message ?? `server error (HTTP ${status})`, + ) + .withHttpStatus(status) + .withRequest(method, url) + } + + const err = newError( + ErrorCode.Server4xxOther, + wire.message ?? `request failed (HTTP ${status})`, + ) + .withHttpStatus(status) + .withRequest(method, url) + return wire.hint !== undefined ? err.withHint(wire.hint) : err +} + +export function classifyTransportError(err: unknown): BaseError { + const message = err instanceof Error ? err.message : String(err) + const sanitized = redactBearer(message) + + if (err instanceof Error && err.name === 'TimeoutError') + return newError(ErrorCode.NetworkTimeout, 'request timed out').wrap(err) + if (err instanceof Error && err.name === 'AbortError') + return newError(ErrorCode.NetworkTimeout, 'request aborted').wrap(err) + if (sanitized.toLowerCase().includes('econnrefused')) + return newError(ErrorCode.NetworkDns, 'connection refused').wrap(err) + if (sanitized.toLowerCase().includes('enotfound')) + return newError(ErrorCode.NetworkDns, 'host lookup failed').wrap(err) + if (sanitized.toLowerCase().includes('etimedout')) + return newError(ErrorCode.NetworkTimeout, 'connection timed out').wrap(err) + + return newError(ErrorCode.Unknown, sanitized).wrap(err) +} diff --git a/cli/src/http/middleware/auth.ts b/cli/src/http/middleware/auth.ts new file mode 100644 index 0000000000..c9abace468 --- /dev/null +++ b/cli/src/http/middleware/auth.ts @@ -0,0 +1,10 @@ +import type { BeforeRequestHook } from 'ky' + +export function applyBearer(token: string | undefined): BeforeRequestHook { + return ({ request }) => { + if (token === undefined || token === '') + return + if (!request.headers.has('authorization')) + request.headers.set('authorization', `Bearer ${token}`) + } +} diff --git a/cli/src/http/middleware/request-logger.ts b/cli/src/http/middleware/request-logger.ts new file mode 100644 index 0000000000..8fd31d3d81 --- /dev/null +++ b/cli/src/http/middleware/request-logger.ts @@ -0,0 +1,30 @@ +import type { BeforeRequestHook, BeforeRetryHook } from 'ky' +import type { HttpLogger } from '../types.js' +import { redactBearer } from '../sanitize.js' + +const START_TIME = Symbol('difyctl-http-start') + +type Timed = { [START_TIME]?: number } + +export function logBeforeRequest(logger: HttpLogger | undefined): BeforeRequestHook { + if (logger === undefined) + return () => undefined + return ({ request }) => { + const safeUrl = redactBearer(request.url) + ;(request as unknown as Timed)[START_TIME] = performance.now() + logger({ phase: 'request', method: request.method, url: safeUrl }) + } +} + +export function logBeforeRetry(logger: HttpLogger | undefined): BeforeRetryHook { + if (logger === undefined) + return () => undefined + return ({ request, retryCount }) => { + logger({ + phase: 'retry', + method: request.method, + url: redactBearer(request.url), + attempt: retryCount, + }) + } +} diff --git a/cli/src/http/middleware/user-agent.ts b/cli/src/http/middleware/user-agent.ts new file mode 100644 index 0000000000..a6ab540924 --- /dev/null +++ b/cli/src/http/middleware/user-agent.ts @@ -0,0 +1,8 @@ +import type { BeforeRequestHook } from 'ky' + +export function applyUserAgent(value: string): BeforeRequestHook { + return ({ request }) => { + if (!request.headers.has('user-agent')) + request.headers.set('user-agent', value) + } +} diff --git a/cli/src/http/sanitize.ts b/cli/src/http/sanitize.ts new file mode 100644 index 0000000000..791cc7627b --- /dev/null +++ b/cli/src/http/sanitize.ts @@ -0,0 +1,5 @@ +const BEARER_PATTERN = /Bearer\s+([\w.~+/=-]+)/g + +export function redactBearer(input: string): string { + return input.replace(BEARER_PATTERN, 'Bearer [redacted]') +} diff --git a/cli/src/http/sse-dify.test.ts b/cli/src/http/sse-dify.test.ts new file mode 100644 index 0000000000..b0823ff4bb --- /dev/null +++ b/cli/src/http/sse-dify.test.ts @@ -0,0 +1,95 @@ +import type { SseEvent } from './sse.js' +import { describe, expect, it } from 'vitest' +import { eventNameFromDifyData, normalizeDifyStream } from './sse-dify.js' + +const enc = new TextEncoder() + +function bytes(s: string): Uint8Array { + return enc.encode(s) +} + +async function* fromArray(events: SseEvent[]): AsyncGenerator { + for (const ev of events) + yield ev +} + +async function collect(iter: AsyncIterable): Promise { + const out: SseEvent[] = [] + for await (const ev of iter) + out.push(ev) + return out +} + +describe('eventNameFromDifyData', () => { + it('returns empty string for zero-byte data', () => { + expect(eventNameFromDifyData(new Uint8Array())).toBe('') + }) + + it('returns embedded event name for object payload', () => { + expect(eventNameFromDifyData(bytes('{"event":"message","answer":"hi"}'))).toBe('message') + }) + + it('returns empty string for malformed JSON', () => { + expect(eventNameFromDifyData(bytes('not-json'))).toBe('') + }) + + it('returns empty string for non-string event field', () => { + expect(eventNameFromDifyData(bytes('{"event":42}'))).toBe('') + }) + + it('returns empty string for null payload', () => { + expect(eventNameFromDifyData(bytes('null'))).toBe('') + }) + + it('returns empty string for non-object JSON values', () => { + expect(eventNameFromDifyData(bytes('"just a string"'))).toBe('') + expect(eventNameFromDifyData(bytes('123'))).toBe('') + expect(eventNameFromDifyData(bytes('true'))).toBe('') + }) + + it('returns empty string for object missing event key', () => { + expect(eventNameFromDifyData(bytes('{"answer":"hi"}'))).toBe('') + }) +}) + +describe('normalizeDifyStream', () => { + it('promotes JSON event field into ev.name when transport name absent', async () => { + const out = await collect(normalizeDifyStream(fromArray([ + { name: '', data: bytes('{"event":"workflow_started","id":"wf-1"}') }, + { name: '', data: bytes('{"event":"workflow_finished","status":"succeeded"}') }, + ]))) + expect(out.map(e => e.name)).toEqual(['workflow_started', 'workflow_finished']) + }) + + it('preserves transport-level event name over JSON event field', async () => { + const out = await collect(normalizeDifyStream(fromArray([ + { name: 'ping', data: bytes('') }, + { name: 'foo', data: bytes('{"event":"bar"}') }, + ]))) + expect(out.map(e => e.name)).toEqual(['ping', 'foo']) + }) + + it('forwards unchanged when ev.name absent and data has no JSON event field', async () => { + const ev: SseEvent = { name: '', data: bytes('{"answer":"hi"}') } + const out = await collect(normalizeDifyStream(fromArray([ev]))) + expect(out).toHaveLength(1) + expect(out[0].name).toBe('') + expect(out[0].data).toBe(ev.data) + }) + + it('forwards unchanged when data is malformed JSON', async () => { + const out = await collect(normalizeDifyStream(fromArray([ + { name: '', data: bytes('not-json') }, + ]))) + expect(out).toHaveLength(1) + expect(out[0].name).toBe('') + }) + + it('forwards empty-data events with empty name', async () => { + const out = await collect(normalizeDifyStream(fromArray([ + { name: '', data: bytes('') }, + ]))) + expect(out).toHaveLength(1) + expect(out[0].name).toBe('') + }) +}) diff --git a/cli/src/http/sse-dify.ts b/cli/src/http/sse-dify.ts new file mode 100644 index 0000000000..a26499fa1a --- /dev/null +++ b/cli/src/http/sse-dify.ts @@ -0,0 +1,36 @@ +import type { SseEvent } from './sse.js' + +const dec = new TextDecoder() + +export function eventNameFromDifyData(data: Uint8Array): string { + if (data.byteLength === 0) + return '' + try { + const obj = JSON.parse(dec.decode(data)) as unknown + if (obj === null || typeof obj !== 'object') + return '' + const evt = (obj as { event?: unknown }).event + return typeof evt === 'string' ? evt : '' + } + catch { + return '' + } +} + +// Dify always sends JSON-encoded SSE data. Most endpoints embed the event +// name in the JSON `event` field rather than emitting a transport-level +// `event:` line. This adapter promotes the embedded name into `ev.name` +// so consumers can dispatch uniformly. Transport-level `event:` lines win +// when both are present, preserving compatibility with `event: ping`. +export async function* normalizeDifyStream( + iter: AsyncIterable, +): AsyncGenerator { + for await (const ev of iter) { + if (ev.name !== '') { + yield ev + continue + } + const name = eventNameFromDifyData(ev.data) + yield name === '' ? ev : { ...ev, name } + } +} diff --git a/cli/src/http/sse.test.ts b/cli/src/http/sse.test.ts new file mode 100644 index 0000000000..03d5879e13 --- /dev/null +++ b/cli/src/http/sse.test.ts @@ -0,0 +1,90 @@ +import { describe, expect, it } from 'vitest' +import { parseSSE } from './sse.js' + +function streamOf(...chunks: string[]): ReadableStream { + const enc = new TextEncoder() + return new ReadableStream({ + start(c) { + for (const ch of chunks) c.enqueue(enc.encode(ch)) + c.close() + }, + }) +} + +async function collect(s: ReadableStream): Promise<{ name: string, data: string, id?: string }[]> { + const out: { name: string, data: string, id?: string }[] = [] + const dec = new TextDecoder() + for await (const ev of parseSSE(s)) + out.push({ name: ev.name, data: dec.decode(ev.data), id: ev.id }) + return out +} + +describe('parseSSE', () => { + it('emits one event per blank-line-terminated record', async () => { + const s = streamOf('event: message\ndata: hello\n\nevent: ping\ndata: \n\n') + const got = await collect(s) + expect(got).toEqual([ + { name: 'message', data: 'hello', id: undefined }, + { name: 'ping', data: '', id: undefined }, + ]) + }) + + it('joins multi-line data with newlines', async () => { + const s = streamOf('event: message\ndata: line1\ndata: line2\n\n') + const got = await collect(s) + expect(got[0]?.data).toBe('line1\nline2') + }) + + it('propagates id field', async () => { + const s = streamOf('id: 42\nevent: m\ndata: x\n\n') + expect((await collect(s))[0]?.id).toBe('42') + }) + + it('skips comment lines', async () => { + const s = streamOf(': comment\nevent: m\ndata: x\n\n') + expect(await collect(s)).toEqual([{ name: 'm', data: 'x', id: undefined }]) + }) + + it('survives chunk boundaries inside a field', async () => { + const s = streamOf('event: mes', 'sage\nda', 'ta: hel', 'lo\n\n') + expect((await collect(s))[0]).toEqual({ name: 'message', data: 'hello', id: undefined }) + }) + + it('handles multi-byte utf-8 split across chunks', async () => { + const enc = new TextEncoder().encode('event: m\ndata: 😀\n\n') + const a = enc.slice(0, 14) + const b = enc.slice(14) + const s = new ReadableStream({ + start(c) { + c.enqueue(a) + c.enqueue(b) + c.close() + }, + }) + expect((await collect(s))[0]?.data).toBe('😀') + }) + + it('aborts when signal fires', async () => { + const ctrl = new AbortController() + const slow = new ReadableStream({ + pull(c) { + c.enqueue(new TextEncoder().encode('event: m\ndata: x\n\n')) + }, + }) + let seen = 0 + let caught: unknown + try { + for await (const _ of parseSSE(slow, ctrl.signal)) { + seen++ + if (seen === 1) + ctrl.abort() + } + } + catch (e) { + caught = e + } + expect(seen).toBeGreaterThanOrEqual(1) + expect(seen).toBeLessThan(50) + expect((caught as Error).name).toBe('AbortError') + }) +}) diff --git a/cli/src/http/sse.ts b/cli/src/http/sse.ts new file mode 100644 index 0000000000..5af7692e42 --- /dev/null +++ b/cli/src/http/sse.ts @@ -0,0 +1,107 @@ +import { createParser } from 'eventsource-parser' + +export type SseEvent = { + name: string + data: Uint8Array + id?: string +} + +export async function* parseSSE( + body: ReadableStream, + signal?: AbortSignal, +): AsyncGenerator { + const queue: SseEvent[] = [] + let resolveNext: (() => void) | undefined + let pendingWake = false + let done = false + + const wake = (): void => { + pendingWake = true + if (resolveNext !== undefined) { + const r = resolveNext + resolveNext = undefined + r() + } + } + + const enc = new TextEncoder() + const parser = createParser({ + onEvent(ev) { + queue.push({ + name: ev.event ?? '', + data: enc.encode(ev.data), + id: ev.id, + }) + wake() + }, + }) + + const reader = body.getReader() + const dec = new TextDecoder('utf-8') + + const onAbort = (): void => { + reader.cancel().catch(() => {}) + } + if (signal !== undefined) { + if (signal.aborted) + onAbort() + else + signal.addEventListener('abort', onAbort, { once: true }) + } + + const pump = (async () => { + try { + while (true) { + if (signal?.aborted) { + const e = new Error('aborted') + e.name = 'AbortError' + throw e + } + const { value, done: rDone } = await reader.read() + if (rDone) + break + parser.feed(dec.decode(value, { stream: true })) + } + } + finally { + done = true + wake() + try { + reader.releaseLock() + } + catch {} + } + })() + + try { + while (true) { + while (queue.length > 0) { + const ev = queue.shift() + if (ev !== undefined) + yield ev + } + if (done) { + await pump + return + } + if (pendingWake) { + pendingWake = false + continue + } + await new Promise((res) => { + resolveNext = res + }) + pendingWake = false + } + } + finally { + if (signal !== undefined) + signal.removeEventListener('abort', onAbort) + if (!done) { + try { + await reader.cancel() + } + catch {} + } + } +} diff --git a/cli/src/http/types.ts b/cli/src/http/types.ts new file mode 100644 index 0000000000..c83749acb0 --- /dev/null +++ b/cli/src/http/types.ts @@ -0,0 +1,21 @@ +export type HttpLogPhase = 'request' | 'response' | 'retry' + +export type HttpLogEvent = { + readonly phase: HttpLogPhase + readonly method: string + readonly url: string + readonly status?: number + readonly attempt?: number + readonly durationMs?: number +} + +export type HttpLogger = (event: HttpLogEvent) => void + +export type HttpFactoryOptions = { + readonly host: string + readonly bearer?: string + readonly timeoutMs?: number + readonly retryAttempts?: number + readonly userAgent?: string + readonly logger?: HttpLogger +} diff --git a/cli/src/index.ts b/cli/src/index.ts new file mode 100644 index 0000000000..ae7b924248 --- /dev/null +++ b/cli/src/index.ts @@ -0,0 +1,3 @@ +export { longVersion, shortVersion, userAgent, versionInfo } from './version/info.js' +export type { Channel, VersionInfo } from './version/info.js' +export { run } from '@oclif/core' diff --git a/cli/src/io/color.ts b/cli/src/io/color.ts new file mode 100644 index 0000000000..89a4186a52 --- /dev/null +++ b/cli/src/io/color.ts @@ -0,0 +1,33 @@ +import pc from 'picocolors' + +export type ColorScheme = { + bold: (s: string) => string + successIcon: () => string + warningIcon: () => string + failureIcon: () => string +} + +export function colorScheme(enabled: boolean): ColorScheme { + if (!enabled) { + return { + bold: s => s, + successIcon: () => '✓', + warningIcon: () => '!', + failureIcon: () => '✗', + } + } + return { + bold: s => pc.bold(s), + successIcon: () => pc.green('✓'), + warningIcon: () => pc.yellow('!'), + failureIcon: () => pc.red('✗'), + } +} + +export function colorEnabled(isTTY: boolean): boolean { + if (process.env.NO_COLOR !== undefined && process.env.NO_COLOR !== '') + return false + if (process.env.DIFYCTL_NO_COLOR !== undefined && process.env.DIFYCTL_NO_COLOR !== '') + return false + return isTTY +} diff --git a/cli/src/io/spinner.ts b/cli/src/io/spinner.ts new file mode 100644 index 0000000000..0b74f9df0d --- /dev/null +++ b/cli/src/io/spinner.ts @@ -0,0 +1,79 @@ +import type { IOStreams } from './streams.js' +import oraImport from 'ora' + +const DIFY_FRAMES = ['Dify', 'dIfy', 'diFy', 'difY', 'diFy', 'dIfy'] +const DIFY_BLUE_RGB = '\x1B[38;2;0;51;255m' +const DIFY_BLUE_256 = '\x1B[38;5;27m' +const DIM = '\x1B[2m' +const ANSI_RESET = '\x1B[0m' + +export type SpinnerStyle = 'dify' | 'dify-dim' + +function colorize(s: string, style: SpinnerStyle, truecolor: boolean): string { + if (style === 'dify-dim') + return `${DIM}${s}${ANSI_RESET}` + return `${truecolor ? DIFY_BLUE_RGB : DIFY_BLUE_256}${s}${ANSI_RESET}` +} + +function detectTruecolor(env: NodeJS.ProcessEnv): boolean { + const v = env.COLORTERM ?? '' + return v === 'truecolor' || v === '24bit' +} + +const STRUCTURED_FORMATS = new Set(['json', 'yaml', 'name']) + +export type SpinnerOptions = { + readonly io: IOStreams + readonly label: string + readonly enabled?: boolean + readonly style?: SpinnerStyle + readonly minDisplayMs?: number + readonly env?: NodeJS.ProcessEnv +} + +const DEFAULT_MIN_DISPLAY_MS = 600 + +function sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)) +} + +export async function runWithSpinner( + opts: SpinnerOptions, + fn: () => Promise, +): Promise { + const env = opts.env ?? process.env + const spinnerEnabled = opts.enabled ?? !STRUCTURED_FORMATS.has(opts.io.outputFormat) + const active = spinnerEnabled && opts.io.isErrTTY + if (!active) + return fn() + + const truecolor = detectTruecolor(env) + const style = opts.style ?? 'dify' + const frames = DIFY_FRAMES.map(f => colorize(f, style, truecolor)) + const minMs = opts.minDisplayMs ?? DEFAULT_MIN_DISPLAY_MS + const start = Date.now() + const spinner = oraImport({ + text: opts.label, + stream: opts.io.err as NodeJS.WriteStream, + spinner: { frames, interval: 140 }, + discardStdin: false, + }).start() + + const enforceMin = async () => { + const remaining = minMs - (Date.now() - start) + if (remaining > 0) + await sleep(remaining) + } + + try { + const result = await fn() + await enforceMin() + spinner.succeed(opts.label) + return result + } + catch (err) { + await enforceMin() + spinner.fail(opts.label) + throw err + } +} diff --git a/cli/src/io/streams.ts b/cli/src/io/streams.ts new file mode 100644 index 0000000000..a51f630f62 --- /dev/null +++ b/cli/src/io/streams.ts @@ -0,0 +1,61 @@ +import { Buffer } from 'node:buffer' +import { PassThrough, Readable, Writable } from 'node:stream' + +export type IOStreams = { + out: NodeJS.WritableStream + err: NodeJS.WritableStream + in: NodeJS.ReadableStream + isOutTTY: boolean + isErrTTY: boolean + outputFormat: string +} + +export function nullStreams(): IOStreams { + return bufferStreams() +} + +export function realStreams(outputFormat = ''): IOStreams { + return { + out: process.stdout, + err: process.stderr, + in: process.stdin, + isOutTTY: Boolean(process.stdout.isTTY), + isErrTTY: Boolean(process.stderr.isTTY), + outputFormat, + } +} + +export type BufferStreams = IOStreams & { + outBuf: () => string + errBuf: () => string +} + +export function bufferStreams(stdin = ''): BufferStreams { + const outChunks: Buffer[] = [] + const errChunks: Buffer[] = [] + const out = new Writable({ + write(chunk, _enc, cb) { + outChunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(String(chunk))) + cb() + }, + }) as unknown as NodeJS.WritableStream + const err = new Writable({ + write(chunk, _enc, cb) { + errChunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(String(chunk))) + cb() + }, + }) as unknown as NodeJS.WritableStream + const inStream: NodeJS.ReadableStream = stdin === '' + ? new PassThrough() + : Readable.from([stdin]) + return { + out, + err, + in: inStream, + isOutTTY: false, + isErrTTY: false, + outputFormat: '', + outBuf: () => Buffer.concat(outChunks).toString('utf8'), + errBuf: () => Buffer.concat(errChunks).toString('utf8'), + } +} diff --git a/cli/src/limit/limit.test.ts b/cli/src/limit/limit.test.ts new file mode 100644 index 0000000000..b8f377be25 --- /dev/null +++ b/cli/src/limit/limit.test.ts @@ -0,0 +1,64 @@ +import { describe, expect, it } from 'vitest' +import { isBaseError } from '../errors/base.js' +import { ExitCode } from '../errors/codes.js' +import { LIMIT_DEFAULT, LIMIT_MAX, LIMIT_MIN, parseLimit } from './limit.js' + +describe('limit', () => { + it('constants match Go original', () => { + expect(LIMIT_MIN).toBe(1) + expect(LIMIT_MAX).toBe(200) + expect(LIMIT_DEFAULT).toBe(20) + }) + + it.each([1, 20, 50, 200])('accepts %d', (n) => { + expect(parseLimit(String(n), '--limit')).toBe(n) + }) + + it.each([0, -1, 201, 1000])('rejects %d as out of range', (n) => { + let err: unknown + try { + parseLimit(String(n), '--limit') + } + catch (e) { + err = e + } + expect(isBaseError(err)).toBe(true) + expect((err as { code: string }).code).toBe('usage_invalid_flag') + expect((err as { exit: () => number }).exit()).toBe(ExitCode.Usage) + expect((err as Error).message).toMatch(/out of range/) + }) + + it('rejects non-numeric with typed UsageInvalidFlag', () => { + let err: unknown + try { + parseLimit('abc', '--limit') + } + catch (e) { + err = e + } + expect(isBaseError(err)).toBe(true) + expect((err as { code: string }).code).toBe('usage_invalid_flag') + expect((err as Error).message).toMatch(/not a number/) + }) + + it('rejects empty string with typed UsageInvalidFlag', () => { + let err: unknown + try { + parseLimit('', '--limit') + } + catch (e) { + err = e + } + expect(isBaseError(err)).toBe(true) + expect((err as { code: string }).code).toBe('usage_invalid_flag') + }) + + it('rejects floats (mirroring Go strconv.Atoi behaviour)', () => { + expect(() => parseLimit('1.5', '--limit')).toThrow(/not a number/) + }) + + it('error message names the source knob', () => { + expect(() => parseLimit('999', 'DIFY_LIMIT')).toThrow(/DIFY_LIMIT/) + expect(() => parseLimit('999', 'defaults.limit')).toThrow(/defaults\.limit/) + }) +}) diff --git a/cli/src/limit/limit.ts b/cli/src/limit/limit.ts new file mode 100644 index 0000000000..91bdd204bf --- /dev/null +++ b/cli/src/limit/limit.ts @@ -0,0 +1,25 @@ +import { newError } from '../errors/base.js' +import { ErrorCode } from '../errors/codes.js' + +export const LIMIT_MIN = 1 +export const LIMIT_MAX = 200 +export const LIMIT_DEFAULT = 20 + +const INTEGER_PATTERN = /^-?\d+$/ + +export function parseLimit(raw: string, source: string): number { + if (!INTEGER_PATTERN.test(raw)) { + throw newError( + ErrorCode.UsageInvalidFlag, + `${source}: ${JSON.stringify(raw)} is not a number`, + ) + } + const n = Number(raw) + if (n < LIMIT_MIN || n > LIMIT_MAX) { + throw newError( + ErrorCode.UsageInvalidFlag, + `${source}: ${n} out of range [${LIMIT_MIN}..${LIMIT_MAX}]`, + ) + } + return n +} diff --git a/cli/src/printers/format-json-yaml.test.ts b/cli/src/printers/format-json-yaml.test.ts new file mode 100644 index 0000000000..d37fd39652 --- /dev/null +++ b/cli/src/printers/format-json-yaml.test.ts @@ -0,0 +1,59 @@ +import { describe, expect, it } from 'vitest' +import { JsonYamlPrintFlags } from './format-json-yaml.js' +import { isNoCompatiblePrinter } from './printer.js' + +describe('JsonYamlPrintFlags.allowedFormats', () => { + it('returns json + yaml', () => { + expect(new JsonYamlPrintFlags().allowedFormats()).toEqual(['json', 'yaml']) + }) +}) + +describe('JsonYamlPrintFlags.toPrinter', () => { + it('throws NoCompatiblePrinterError for unsupported formats', () => { + const pf = new JsonYamlPrintFlags() + for (const f of ['', 'text', 'wide', 'name', 'xml']) { + let caught: unknown + try { + pf.toPrinter(f) + } + catch (e) { + caught = e + } + expect(isNoCompatiblePrinter(caught)).toBe(true) + } + }) + + it('returns a json printer that encodes raw payload with 2-space indent', () => { + const p = new JsonYamlPrintFlags().toPrinter('json') + const out = p.print({ raw: () => ({ answer: 'hi' }) }) + expect(out).toContain('"answer"') + expect(out).toContain('"hi"') + expect(out).toContain(' "answer"') + expect(out.endsWith('\n')).toBe(true) + }) + + it('json printer round-trips a plain object with no Raw()', () => { + const p = new JsonYamlPrintFlags().toPrinter('json') + const out = p.print({ k: 'v', n: 1 }) + expect(JSON.parse(out)).toEqual({ k: 'v', n: 1 }) + }) + + it('json printer is lossless for nested arrays', () => { + const data = { items: [{ id: 'a' }, { id: 'b' }] } + const out = new JsonYamlPrintFlags().toPrinter('json').print(data) + expect(JSON.parse(out)).toEqual(data) + }) + + it('returns a yaml printer that emits scalar pairs', () => { + const p = new JsonYamlPrintFlags().toPrinter('yaml') + const out = p.print({ raw: () => ({ answer: 'hi' }) }) + expect(out).toMatch(/answer:\s*['"]?hi['"]?\n?/) + }) + + it('yaml printer round-trips structured data', async () => { + const yaml = await import('js-yaml') + const data = { items: [{ id: 'a', mode: 'chat' }, { id: 'b', mode: 'workflow' }] } + const out = new JsonYamlPrintFlags().toPrinter('yaml').print(data) + expect(yaml.load(out)).toEqual(data) + }) +}) diff --git a/cli/src/printers/format-json-yaml.ts b/cli/src/printers/format-json-yaml.ts new file mode 100644 index 0000000000..b6d895b753 --- /dev/null +++ b/cli/src/printers/format-json-yaml.ts @@ -0,0 +1,31 @@ +import type { Printer, PrintFlags } from './printer.js' +import yaml from 'js-yaml' +import { NoCompatiblePrinterError, payload } from './printer.js' + +const ALLOWED = ['json', 'yaml'] as const + +const jsonPrinter: Printer = { + print(obj) { + return `${JSON.stringify(payload(obj), null, 2)}\n` + }, +} + +const yamlPrinter: Printer = { + print(obj) { + return yaml.dump(payload(obj), { indent: 2, lineWidth: -1 }) + }, +} + +export class JsonYamlPrintFlags implements PrintFlags { + allowedFormats(): readonly string[] { + return ALLOWED + } + + toPrinter(format: string): Printer { + switch (format) { + case 'json': return jsonPrinter + case 'yaml': return yamlPrinter + default: throw new NoCompatiblePrinterError(format, ALLOWED) + } + } +} diff --git a/cli/src/printers/format-name.test.ts b/cli/src/printers/format-name.test.ts new file mode 100644 index 0000000000..c4a6ff0df0 --- /dev/null +++ b/cli/src/printers/format-name.test.ts @@ -0,0 +1,76 @@ +import { describe, expect, it } from 'vitest' +import { NamePrintFlags } from './format-name.js' +import { isNoCompatiblePrinter } from './printer.js' + +const fakeMode = (m: string) => ({ mode: () => m }) + +describe('NamePrintFlags.allowedFormats', () => { + it('returns ["name"]', () => { + expect(new NamePrintFlags().allowedFormats()).toEqual(['name']) + }) +}) + +describe('NamePrintFlags.toPrinter', () => { + it('throws NoCompatiblePrinterError for non-name formats', () => { + const pf = new NamePrintFlags() + let caught: unknown + try { + pf.toPrinter('json') + } + catch (e) { + caught = e + } + expect(isNoCompatiblePrinter(caught)).toBe(true) + }) + + it('prints id + newline for the registered mode', () => { + const pf = new NamePrintFlags() + pf.register({ id: () => 'abc-123' }, 'thing') + expect(pf.toPrinter('name').print(fakeMode('thing'))).toBe('abc-123\n') + }) + + it('appends operation suffix when set', () => { + const pf = new NamePrintFlags() + pf.operation = 'created' + pf.register({ id: () => 'abc' }, 'thing') + expect(pf.toPrinter('name').print(fakeMode('thing'))).toBe('abc created\n') + }) + + it('throws when payload mode has no registered handler', () => { + const pf = new NamePrintFlags() + pf.register({ id: () => 'abc' }, 'thing') + const printer = pf.toPrinter('name') + expect(() => printer.print(fakeMode('other'))).toThrow(/no handler for mode/) + }) + + it('throws when payload does not implement Moder', () => { + const pf = new NamePrintFlags() + pf.register({ id: () => 'abc' }, 'thing') + const printer = pf.toPrinter('name') + expect(() => printer.print({ no: 'mode' })).toThrow(/does not implement Moder/i) + }) + + it('register accepts multiple keys for the same handler', () => { + const pf = new NamePrintFlags() + pf.register({ id: () => 'shared' }, 'a', 'b') + const printer = pf.toPrinter('name') + expect(printer.print(fakeMode('a'))).toBe('shared\n') + expect(printer.print(fakeMode('b'))).toBe('shared\n') + }) + + it('unwraps RawObject before passing payload to handler', () => { + const pf = new NamePrintFlags() + let received: unknown + pf.register({ + id: (p) => { + received = p + return 'ok' + }, + }, 'thing') + pf.toPrinter('name').print({ + mode: () => 'thing', + raw: () => ({ id: 'unwrapped' }), + }) + expect(received).toEqual({ id: 'unwrapped' }) + }) +}) diff --git a/cli/src/printers/format-name.ts b/cli/src/printers/format-name.ts new file mode 100644 index 0000000000..b6ba448ced --- /dev/null +++ b/cli/src/printers/format-name.ts @@ -0,0 +1,42 @@ +import type { Printer, PrintFlags } from './printer.js' +import { isModer, NoCompatiblePrinterError, payload } from './printer.js' + +const ALLOWED = ['name'] as const + +export type NameHandler = { + id: (raw: unknown) => string +} + +export class NamePrintFlags implements PrintFlags { + operation = '' + private readonly handlers = new Map() + + register(handler: NameHandler, ...keys: string[]): void { + for (const k of keys) this.handlers.set(k, handler) + } + + allowedFormats(): readonly string[] { + return ALLOWED + } + + toPrinter(format: string): Printer { + if (format !== 'name') + throw new NoCompatiblePrinterError(format, ALLOWED) + const handlers = this.handlers + const operation = this.operation + return { + print(obj) { + if (!isModer(obj)) + throw new Error(`name printer: payload does not implement Moder`) + const mode = obj.mode() + const h = handlers.get(mode) + if (h === undefined) { + const known = [...handlers.keys()].sort().join(', ') + throw new Error(`name printer: no handler for mode "${mode}" (registered: ${known})`) + } + const id = h.id(payload(obj)) + return operation === '' ? `${id}\n` : `${id} ${operation}\n` + }, + } + } +} diff --git a/cli/src/printers/format-table.test.ts b/cli/src/printers/format-table.test.ts new file mode 100644 index 0000000000..2652f4b0f8 --- /dev/null +++ b/cli/src/printers/format-table.test.ts @@ -0,0 +1,136 @@ +import type { TableColumn, TableHandler } from './format-table.js' +import { describe, expect, it } from 'vitest' +import { TablePrintFlags } from './format-table.js' +import { isNoCompatiblePrinter } from './printer.js' + +const fakeMode = (m: string) => ({ mode: () => m }) + +const handler: TableHandler = { + columns(): readonly TableColumn[] { + return [ + { name: 'NAME', priority: 0 }, + { name: 'AGE', priority: 0 }, + { name: 'DETAILS', priority: 1 }, + ] + }, + rows() { + return [['alpha', '1d', 'extra']] + }, +} + +describe('TablePrintFlags.allowedFormats', () => { + it('returns ["", "wide"]', () => { + expect(new TablePrintFlags().allowedFormats()).toEqual(['', 'wide']) + }) +}) + +describe('TablePrintFlags default format', () => { + it('hides priority>0 columns and their cells', () => { + const pf = new TablePrintFlags() + pf.register(handler, 'thing') + const out = pf.toPrinter('').print(fakeMode('thing')) + expect(out).toContain('NAME') + expect(out).toContain('AGE') + expect(out).not.toContain('DETAILS') + expect(out).not.toContain('extra') + expect(out).toContain('alpha') + }) + + it('column-aligns cells with two-space padding', () => { + const pf = new TablePrintFlags() + pf.register({ + columns: () => [ + { name: 'NAME', priority: 0 }, + { name: 'AGE', priority: 0 }, + ], + rows: () => [ + ['alpha', '1d'], + ['beta-long', '999d'], + ], + }, 'thing') + const out = pf.toPrinter('').print(fakeMode('thing')) + const lines = out.trimEnd().split('\n') + expect(lines).toHaveLength(3) + expect(lines[0]).toBe('NAME AGE') + expect(lines[1]).toBe('alpha 1d') + expect(lines[2]).toBe('beta-long 999d') + }) +}) + +describe('TablePrintFlags wide format', () => { + it('shows all columns including priority>0', () => { + const pf = new TablePrintFlags() + pf.register(handler, 'thing') + const out = pf.toPrinter('wide').print(fakeMode('thing')) + expect(out).toContain('DETAILS') + expect(out).toContain('extra') + }) +}) + +describe('TablePrintFlags noHeaders', () => { + it('omits header row when noHeaders=true', () => { + const pf = new TablePrintFlags({ noHeaders: true }) + pf.register(handler, 'thing') + const out = pf.toPrinter('').print(fakeMode('thing')) + expect(out).not.toContain('NAME') + expect(out).toContain('alpha') + }) +}) + +describe('TablePrintFlags errors', () => { + it('throws NoCompatiblePrinterError for unsupported formats', () => { + let caught: unknown + try { + new TablePrintFlags().toPrinter('json') + } + catch (e) { + caught = e + } + expect(isNoCompatiblePrinter(caught)).toBe(true) + }) + + it('throws on unregistered mode', () => { + const pf = new TablePrintFlags() + pf.register(handler, 'thing') + const printer = pf.toPrinter('') + expect(() => printer.print(fakeMode('other'))).toThrow(/other/) + }) + + it('throws when payload does not implement Moder', () => { + const pf = new TablePrintFlags() + pf.register(handler, 'thing') + expect(() => pf.toPrinter('').print({})).toThrow(/Moder/i) + }) + + it('handler rows() can return null/undefined cells safely (rendered empty)', () => { + const pf = new TablePrintFlags() + pf.register({ + columns: () => [{ name: 'A', priority: 0 }, { name: 'B', priority: 0 }], + rows: () => [['x', undefined], [null, 'y']], + }, 'thing') + const out = pf.toPrinter('').print(fakeMode('thing')) + const lines = out.trimEnd().split('\n') + expect(lines[0]).toBe('A B') + expect(lines[1]).toBe('x ') + expect(lines[2]).toBe(' y') + }) +}) + +describe('TablePrintFlags raw unwrap', () => { + it('passes unwrapped payload to handler.rows()', () => { + let received: unknown + const pf = new TablePrintFlags() + pf.register({ + columns: () => [{ name: 'X', priority: 0 }], + rows: (p) => { + received = p + return [['ok']] + }, + }, 'thing') + pf.toPrinter('').print({ + mode: () => 'thing', + raw: () => ({ items: [{ id: 'x' }] }), + }) + expect(received).toEqual({ items: [{ id: 'x' }] }) + }) +}) diff --git a/cli/src/printers/format-table.ts b/cli/src/printers/format-table.ts new file mode 100644 index 0000000000..f37b92da1b --- /dev/null +++ b/cli/src/printers/format-table.ts @@ -0,0 +1,108 @@ +import type { Printer, PrintFlags } from './printer.js' +import { isModer, NoCompatiblePrinterError, payload } from './printer.js' + +const ALLOWED = ['', 'wide'] as const +const COLUMN_PADDING = 2 + +export type TableColumn = { + name: string + priority: number +} + +export type TableCell = string | null | undefined + +export type TableRow = readonly TableCell[] + +export type TableHandler = { + columns: () => readonly TableColumn[] + rows: (raw: unknown) => readonly TableRow[] +} + +export type TablePrintFlagsOptions = { + noHeaders?: boolean +} + +export class TablePrintFlags implements PrintFlags { + private readonly handlers = new Map() + private readonly noHeaders: boolean + + constructor(opts: TablePrintFlagsOptions = {}) { + this.noHeaders = opts.noHeaders ?? false + } + + register(handler: TableHandler, ...keys: string[]): void { + for (const k of keys) this.handlers.set(k, handler) + } + + allowedFormats(): readonly string[] { + return ALLOWED + } + + toPrinter(format: string): Printer { + if (format !== '' && format !== 'wide') + throw new NoCompatiblePrinterError(format, ALLOWED) + const wide = format === 'wide' + const handlers = this.handlers + const noHeaders = this.noHeaders + return { + print(obj) { + if (!isModer(obj)) + throw new Error('table printer: payload does not implement Moder') + const mode = obj.mode() + const handler = handlers.get(mode) + if (handler === undefined) { + const known = [...handlers.keys()].sort().join(', ') + throw new Error(`table printer: no handler for mode "${mode}" (registered: ${known})`) + } + const cols = handler.columns() + const keep: number[] = [] + for (let i = 0; i < cols.length; i++) { + const col = cols[i] + if (col !== undefined && (col.priority === 0 || wide)) + keep.push(i) + } + const rows = handler.rows(payload(obj)) + const stringRows: string[][] = rows.map(row => + keep.map((idx) => { + const cell = row[idx] + return cell === null || cell === undefined ? '' : String(cell) + }), + ) + const allRows: string[][] = noHeaders + ? stringRows + : [keep.map(i => cols[i]?.name ?? ''), ...stringRows] + return formatTable(allRows) + }, + } + } +} + +function formatTable(rows: readonly string[][]): string { + if (rows.length === 0) + return '' + const colCount = rows[0]?.length ?? 0 + const widths: number[] = Array.from({ length: colCount }, () => 0) + for (const row of rows) { + for (let i = 0; i < colCount; i++) { + const cell = row[i] ?? '' + if (cell.length > (widths[i] ?? 0)) + widths[i] = cell.length + } + } + const lines = rows.map((row) => { + const cells: string[] = [] + for (let i = 0; i < colCount; i++) { + const cell = row[i] ?? '' + const isLast = i === colCount - 1 + if (isLast) { + cells.push(cell) + } + else { + const pad = (widths[i] ?? 0) - cell.length + COLUMN_PADDING + cells.push(cell + ' '.repeat(pad)) + } + } + return cells.join('') + }) + return `${lines.join('\n')}\n` +} diff --git a/cli/src/printers/format-text.test.ts b/cli/src/printers/format-text.test.ts new file mode 100644 index 0000000000..4563555b86 --- /dev/null +++ b/cli/src/printers/format-text.test.ts @@ -0,0 +1,21 @@ +import { describe, expect, it } from 'vitest' +import { TextPrintFlags } from './format-text.js' + +describe('TextPrintFlags', () => { + it('routes to handler by mode', () => { + const f = new TextPrintFlags() + f.register({ render: v => `chat:${(v as { x: string }).x}\n` }, 'chat') + f.register({ render: v => `wf:${(v as { y: string }).y}\n` }, 'workflow') + expect(f.toPrinter('').print({ mode: () => 'chat', raw: () => ({ x: '1' }) })).toBe('chat:1\n') + expect(f.toPrinter('text').print({ mode: () => 'workflow', raw: () => ({ y: '2' }) })).toBe('wf:2\n') + }) + + it('rejects unknown formats', () => { + expect(() => new TextPrintFlags().toPrinter('json')).toThrow(/not supported/) + }) + + it('errors on unregistered mode', () => { + const f = new TextPrintFlags() + expect(() => f.toPrinter('').print({ mode: () => 'agent', raw: () => ({}) })).toThrow(/no handler for mode/) + }) +}) diff --git a/cli/src/printers/format-text.ts b/cli/src/printers/format-text.ts new file mode 100644 index 0000000000..61aa1fd12d --- /dev/null +++ b/cli/src/printers/format-text.ts @@ -0,0 +1,39 @@ +import type { Printer, PrintFlags } from './printer.js' +import { isModer, NoCompatiblePrinterError, payload } from './printer.js' + +const ALLOWED = ['', 'text'] as const + +export type TextHandler = { + render: (raw: unknown) => string +} + +export class TextPrintFlags implements PrintFlags { + private readonly handlers = new Map() + + register(handler: TextHandler, ...keys: string[]): void { + for (const k of keys) this.handlers.set(k, handler) + } + + allowedFormats(): readonly string[] { + return ALLOWED + } + + toPrinter(format: string): Printer { + if (format !== '' && format !== 'text') + throw new NoCompatiblePrinterError(format, ALLOWED) + const handlers = this.handlers + return { + print(obj) { + if (!isModer(obj)) + throw new Error('text printer: payload does not implement Moder') + const mode = obj.mode() + const h = handlers.get(mode) + if (h === undefined) { + const known = [...handlers.keys()].sort().join(', ') + throw new Error(`text printer: no handler for mode "${mode}" (registered: ${known})`) + } + return h.render(payload(obj)) + }, + } + } +} diff --git a/cli/src/printers/printer.test.ts b/cli/src/printers/printer.test.ts new file mode 100644 index 0000000000..7a1cf346fb --- /dev/null +++ b/cli/src/printers/printer.test.ts @@ -0,0 +1,101 @@ +import { describe, expect, it } from 'vitest' +import { + isModer, + isNoCompatiblePrinter, + isRawObject, + NoCompatiblePrinterError, + payload, +} from './printer.js' + +describe('NoCompatiblePrinterError', () => { + it('mentions format and allowed list when allowed is non-empty', () => { + const err = new NoCompatiblePrinterError('xml', ['json', 'yaml']) + expect(err.message).toContain('xml') + expect(err.message).toContain('json') + expect(err.message).toContain('yaml') + }) + + it('mentions only format when allowed list is empty', () => { + const err = new NoCompatiblePrinterError('xml', []) + expect(err.message).toContain('xml') + expect(err.message).toContain('not supported') + expect(err.message).not.toContain('allowed') + }) + + it('exposes format and allowed publicly for callers that branch on them', () => { + const err = new NoCompatiblePrinterError('xml', ['json']) + expect(err.format).toBe('xml') + expect(err.allowed).toEqual(['json']) + }) + + it('has a stable name for serialization', () => { + const err = new NoCompatiblePrinterError('xml', []) + expect(err.name).toBe('NoCompatiblePrinterError') + }) +}) + +describe('isNoCompatiblePrinter', () => { + it('matches NoCompatiblePrinterError instances', () => { + expect(isNoCompatiblePrinter(new NoCompatiblePrinterError('xml', ['json']))).toBe(true) + }) + + it('does not match plain Error', () => { + expect(isNoCompatiblePrinter(new Error('other'))).toBe(false) + }) + + it('does not match a wrapped error message', () => { + expect(isNoCompatiblePrinter(new Error('wrapped: output format "xml" not supported'))).toBe(false) + }) + + it('does not match null/undefined/primitives', () => { + expect(isNoCompatiblePrinter(null)).toBe(false) + expect(isNoCompatiblePrinter(undefined)).toBe(false) + expect(isNoCompatiblePrinter('string')).toBe(false) + expect(isNoCompatiblePrinter(42)).toBe(false) + }) +}) + +describe('isRawObject', () => { + it('detects objects exposing raw()', () => { + expect(isRawObject({ raw: () => 42 })).toBe(true) + }) + + it('rejects values without raw()', () => { + expect(isRawObject({})).toBe(false) + expect(isRawObject(null)).toBe(false) + expect(isRawObject(undefined)).toBe(false) + expect(isRawObject(42)).toBe(false) + }) + + it('rejects objects where raw is not callable', () => { + expect(isRawObject({ raw: 42 })).toBe(false) + }) +}) + +describe('isModer', () => { + it('detects objects exposing mode()', () => { + expect(isModer({ mode: () => 'chat' })).toBe(true) + }) + + it('rejects values without mode()', () => { + expect(isModer({})).toBe(false) + expect(isModer(null)).toBe(false) + expect(isModer({ mode: 'chat' })).toBe(false) + }) +}) + +describe('payload', () => { + it('unwraps RawObject via raw()', () => { + expect(payload({ raw: () => ({ id: 'a' }) })).toEqual({ id: 'a' }) + }) + + it('returns the value as-is when it is not a RawObject', () => { + const obj = { id: 'a' } + expect(payload(obj)).toBe(obj) + }) + + it('returns primitives untouched', () => { + expect(payload(42)).toBe(42) + expect(payload(null)).toBeNull() + }) +}) diff --git a/cli/src/printers/printer.ts b/cli/src/printers/printer.ts new file mode 100644 index 0000000000..02b47db7fa --- /dev/null +++ b/cli/src/printers/printer.ts @@ -0,0 +1,82 @@ +export type Format = '' | 'wide' | 'json' | 'yaml' | 'name' + +export type Printer = { + print: (obj: unknown) => string +} + +export type RawObject = { + raw: () => unknown +} + +export type Moder = { + mode: () => string +} + +export type PrintFlags = { + allowedFormats: () => readonly string[] + toPrinter: (format: string) => Printer +} + +export class NoCompatiblePrinterError extends Error { + override readonly name = 'NoCompatiblePrinterError' + readonly format: string + readonly allowed: readonly string[] + + constructor(format: string, allowed: readonly string[]) { + super( + allowed.length === 0 + ? `output format ${JSON.stringify(format)} not supported` + : `output format ${JSON.stringify(format)} not supported, allowed: ${allowed.join(', ')}`, + ) + this.format = format + this.allowed = allowed + } +} + +export function isNoCompatiblePrinter(err: unknown): err is NoCompatiblePrinterError { + return err instanceof NoCompatiblePrinterError +} + +export abstract class CompositePrintFlags implements PrintFlags { + protected abstract families(): readonly PrintFlags[] + + allowedFormats(): readonly string[] { + const seen = new Set() + for (const fam of this.families()) { + for (const f of fam.allowedFormats()) { + if (f !== '') + seen.add(f) + } + } + return [...seen].sort() + } + + toPrinter(format: string): Printer { + for (const fam of this.families()) { + try { + return fam.toPrinter(format) + } + catch (err) { + if (!isNoCompatiblePrinter(err)) + throw err + } + } + throw new NoCompatiblePrinterError(format, this.allowedFormats()) + } +} + +export function isRawObject(v: unknown): v is RawObject { + return typeof v === 'object' + && v !== null + && typeof (v as { raw?: unknown }).raw === 'function' +} + +export function isModer(v: unknown): v is Moder { + return typeof v === 'object' + && v !== null + && typeof (v as { mode?: unknown }).mode === 'function' +} + +export function payload(obj: unknown): unknown { + return isRawObject(obj) ? obj.raw() : obj +} diff --git a/cli/src/printers/stream-printer.ts b/cli/src/printers/stream-printer.ts new file mode 100644 index 0000000000..fb19f1fc36 --- /dev/null +++ b/cli/src/printers/stream-printer.ts @@ -0,0 +1,6 @@ +import type { SseEvent } from '../http/sse.js' + +export type StreamPrinter = { + onEvent: (out: NodeJS.WritableStream, errOut: NodeJS.WritableStream, ev: SseEvent) => void + onEnd: (out: NodeJS.WritableStream, errOut: NodeJS.WritableStream) => void +} diff --git a/cli/src/printers/width.test.ts b/cli/src/printers/width.test.ts new file mode 100644 index 0000000000..a57e452488 --- /dev/null +++ b/cli/src/printers/width.test.ts @@ -0,0 +1,76 @@ +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { TERMINAL_WIDTH_FALLBACK, terminalWidth, truncate } from './width.js' + +describe('truncate', () => { + it('returns the input unchanged when shorter than max', () => { + expect(truncate('hi', 5)).toBe('hi') + }) + + it('returns the input unchanged when exactly at max', () => { + expect(truncate('hello', 5)).toBe('hello') + }) + + it('truncates to max with single ellipsis char when longer', () => { + expect(truncate('hello world', 5)).toBe('hell…') + }) + + it('returns empty for empty input regardless of max', () => { + expect(truncate('', 5)).toBe('') + }) + + it('returns just the ellipsis when max is 1', () => { + expect(truncate('hello', 1)).toBe('…') + }) + + it('returns empty when max is 0', () => { + expect(truncate('hello', 0)).toBe('') + }) + + it('handles negative max gracefully', () => { + expect(truncate('hello', -3)).toBe('') + }) +}) + +describe('terminalWidth', () => { + let originalColumns: number | undefined + + beforeEach(() => { + originalColumns = process.stdout.columns + }) + + afterEach(() => { + Object.defineProperty(process.stdout, 'columns', { + value: originalColumns, + configurable: true, + writable: true, + }) + }) + + it('returns process.stdout.columns when present', () => { + Object.defineProperty(process.stdout, 'columns', { + value: 120, + configurable: true, + writable: true, + }) + expect(terminalWidth()).toBe(120) + }) + + it('falls back to 80 when columns is undefined', () => { + Object.defineProperty(process.stdout, 'columns', { + value: undefined, + configurable: true, + writable: true, + }) + expect(terminalWidth()).toBe(TERMINAL_WIDTH_FALLBACK) + expect(TERMINAL_WIDTH_FALLBACK).toBe(80) + }) + + it('falls back to 80 when columns is 0', () => { + Object.defineProperty(process.stdout, 'columns', { + value: 0, + configurable: true, + writable: true, + }) + expect(terminalWidth()).toBe(TERMINAL_WIDTH_FALLBACK) + }) +}) diff --git a/cli/src/printers/width.ts b/cli/src/printers/width.ts new file mode 100644 index 0000000000..e48af55a58 --- /dev/null +++ b/cli/src/printers/width.ts @@ -0,0 +1,17 @@ +export const TERMINAL_WIDTH_FALLBACK = 80 +const ELLIPSIS = '…' + +export function terminalWidth(): number { + const cols = process.stdout.columns + return typeof cols === 'number' && cols > 0 ? cols : TERMINAL_WIDTH_FALLBACK +} + +export function truncate(s: string, max: number): string { + if (s === '' || max <= 0) + return '' + if (s.length <= max) + return s + if (max === 1) + return ELLIPSIS + return s.slice(0, max - 1) + ELLIPSIS +} diff --git a/cli/src/types/account-session.ts b/cli/src/types/account-session.ts new file mode 100644 index 0000000000..3d421d4f28 --- /dev/null +++ b/cli/src/types/account-session.ts @@ -0,0 +1,21 @@ +import { z } from 'zod' + +export const SessionRowSchema = z.object({ + id: z.string(), + prefix: z.string().default(''), + client_id: z.string().default(''), + device_label: z.string().default(''), + created_at: z.string().nullable().default(''), + last_used_at: z.string().nullable().default(''), + expires_at: z.string().nullable().default(''), +}) +export type SessionRow = z.infer + +export const SessionListResponseSchema = z.object({ + page: z.number(), + limit: z.number(), + total: z.number(), + has_more: z.boolean(), + data: z.array(SessionRowSchema), +}) +export type SessionListResponse = z.infer diff --git a/cli/src/types/account.ts b/cli/src/types/account.ts new file mode 100644 index 0000000000..2a685dac97 --- /dev/null +++ b/cli/src/types/account.ts @@ -0,0 +1,25 @@ +import { z } from 'zod' + +export const AccountInfoSchema = z.object({ + id: z.string(), + email: z.string(), + name: z.string(), +}) +export type AccountInfo = z.infer + +export const AccountWorkspaceSchema = z.object({ + id: z.string(), + name: z.string(), + role: z.string().default(''), +}) +export type AccountWorkspace = z.infer + +export const AccountResponseSchema = z.object({ + subject_type: z.enum(['account', 'external_sso']), + subject_email: z.string().nullable().default(null), + subject_issuer: z.string().nullable().optional(), + account: AccountInfoSchema.nullable(), + workspaces: z.array(AccountWorkspaceSchema).default([]), + default_workspace_id: z.string().nullable().default(null), +}) +export type AccountResponse = z.infer diff --git a/cli/src/types/app-meta.test.ts b/cli/src/types/app-meta.test.ts new file mode 100644 index 0000000000..eb583b5f92 --- /dev/null +++ b/cli/src/types/app-meta.test.ts @@ -0,0 +1,57 @@ +import type { DescribeResponse } from './app.js' +import { describe, expect, it } from 'vitest' +import { covers, fromDescribe, mergeMeta } from './app-meta.js' +import { FieldInfo, FieldInputSchema, FieldParameters } from './app.js' + +function describeResp(): DescribeResponse { + return { + info: { + id: 'app-1', + name: 'Greeter', + description: '', + mode: 'chat', + author: 'tester', + tags: [], + updated_at: null, + service_api_enabled: false, + is_agent: false, + }, + parameters: { opening_statement: 'hi' }, + input_schema: null, + } +} + +describe('app-meta', () => { + it('fromDescribe with requested=[info] only marks info covered', () => { + const m = fromDescribe(describeResp(), [FieldInfo]) + expect(m.coveredFields.has(FieldInfo)).toBe(true) + expect(m.coveredFields.has(FieldParameters)).toBe(false) + expect(covers(m, [FieldInfo])).toBe(true) + expect(covers(m, [FieldParameters])).toBe(false) + }) + + it('fromDescribe with no fields marks all covered', () => { + const m = fromDescribe(describeResp(), []) + expect(m.coveredFields.has(FieldInfo)).toBe(true) + expect(m.coveredFields.has(FieldParameters)).toBe(true) + expect(m.coveredFields.has(FieldInputSchema)).toBe(true) + expect(covers(m, [])).toBe(true) + }) + + it('mergeMeta unions covered fields and prefers next for covered keys', () => { + const slim = fromDescribe(describeResp(), [FieldInfo]) + const full = fromDescribe(describeResp(), [FieldInfo, FieldParameters, FieldInputSchema]) + const merged = mergeMeta(slim, full) + expect(covers(merged, [FieldInfo, FieldParameters, FieldInputSchema])).toBe(true) + }) + + it('mergeMeta with prev=undefined returns next', () => { + const next = fromDescribe(describeResp(), [FieldInfo]) + expect(mergeMeta(undefined, next)).toBe(next) + }) + + it('covers([]) requires all three slots populated', () => { + const partial = fromDescribe(describeResp(), [FieldInfo, FieldParameters]) + expect(covers(partial, [])).toBe(false) + }) +}) diff --git a/cli/src/types/app-meta.ts b/cli/src/types/app-meta.ts new file mode 100644 index 0000000000..b884e6773e --- /dev/null +++ b/cli/src/types/app-meta.ts @@ -0,0 +1,60 @@ +import type { DescribeInfo, DescribeResponse } from './app.js' +import { FieldInfo, FieldInputSchema, FieldParameters } from './app.js' + +export type AppMetaFieldKey = typeof FieldInfo | typeof FieldParameters | typeof FieldInputSchema + +export type AppMeta = { + info: DescribeInfo | null + parameters: unknown + inputSchema: unknown + coveredFields: ReadonlySet +} + +export type AppMetaCacheRecord = { + meta: AppMeta + fetchedAt: string +} + +export function fromDescribe(resp: DescribeResponse, requested: readonly AppMetaFieldKey[]): AppMeta { + const covered = new Set() + if (requested.length === 0) { + covered.add(FieldInfo) + covered.add(FieldParameters) + covered.add(FieldInputSchema) + } + else { + for (const f of requested) covered.add(f) + } + return { + info: resp.info, + parameters: resp.parameters, + inputSchema: resp.input_schema, + coveredFields: covered, + } +} + +export function mergeMeta(prev: AppMeta | undefined, next: AppMeta): AppMeta { + if (prev === undefined) + return next + const merged = new Set(prev.coveredFields) + for (const f of next.coveredFields) merged.add(f) + return { + info: next.coveredFields.has(FieldInfo) ? next.info : prev.info, + parameters: next.coveredFields.has(FieldParameters) ? next.parameters : prev.parameters, + inputSchema: next.coveredFields.has(FieldInputSchema) ? next.inputSchema : prev.inputSchema, + coveredFields: merged, + } +} + +export function covers(meta: AppMeta, fields: readonly AppMetaFieldKey[]): boolean { + if (fields.length === 0) { + return meta.coveredFields.has(FieldInfo) + && meta.coveredFields.has(FieldParameters) + && meta.coveredFields.has(FieldInputSchema) + } + for (const f of fields) { + if (!meta.coveredFields.has(f)) + return false + } + return true +} diff --git a/cli/src/types/app.ts b/cli/src/types/app.ts new file mode 100644 index 0000000000..cb40ef247a --- /dev/null +++ b/cli/src/types/app.ts @@ -0,0 +1,52 @@ +import { z } from 'zod' + +export const TagSchema = z.object({ + name: z.string(), +}) +export type Tag = z.infer + +export const ListRowSchema = z.object({ + id: z.string(), + name: z.string(), + description: z.string().default(''), + mode: z.string(), + tags: z.array(TagSchema).default([]), + updated_at: z.string().nullable().default(null), + created_by_name: z.string().nullable().default(null), + workspace_id: z.string().default(''), + workspace_name: z.string().nullable().default(null), +}) +export type ListRow = z.infer + +export const ListResponseSchema = z.object({ + page: z.number().int().default(1), + limit: z.number().int().default(20), + total: z.number().int().default(0), + has_more: z.boolean().default(false), + data: z.array(ListRowSchema).default([]), +}) +export type ListResponse = z.infer + +export const DescribeInfoSchema = z.object({ + id: z.string(), + name: z.string(), + description: z.string().default(''), + mode: z.string(), + author: z.string().default(''), + tags: z.array(TagSchema).default([]), + updated_at: z.string().nullable().default(null), + service_api_enabled: z.boolean().default(false), + is_agent: z.boolean().default(false), +}) +export type DescribeInfo = z.infer + +export const DescribeResponseSchema = z.object({ + info: DescribeInfoSchema.nullable(), + parameters: z.unknown().nullable().default(null), + input_schema: z.unknown().nullable().default(null), +}) +export type DescribeResponse = z.infer + +export const FieldInfo = 'info' +export const FieldParameters = 'parameters' +export const FieldInputSchema = 'input_schema' diff --git a/cli/src/types/workspace.ts b/cli/src/types/workspace.ts new file mode 100644 index 0000000000..81b47d79bd --- /dev/null +++ b/cli/src/types/workspace.ts @@ -0,0 +1,15 @@ +import { z } from 'zod' + +export const WorkspaceSummarySchema = z.object({ + id: z.string(), + name: z.string(), + role: z.string(), + status: z.string().default(''), + current: z.boolean().default(false), +}) +export type WorkspaceSummary = z.infer + +export const WorkspaceListResponseSchema = z.object({ + workspaces: z.array(WorkspaceSummarySchema).default([]), +}) +export type WorkspaceListResponse = z.infer diff --git a/cli/src/util/browser.ts b/cli/src/util/browser.ts new file mode 100644 index 0000000000..3a272cc77a --- /dev/null +++ b/cli/src/util/browser.ts @@ -0,0 +1,51 @@ +import openModule from 'open' + +export const OpenDecision = { + Auto: 'auto-open', + SkipSSH: 'Detected SSH session', + SkipHeadlessLinux: 'Headless Linux (no DISPLAY / WAYLAND_DISPLAY)', + SkipNoTTY: 'Non-interactive TTY', + SkipUserOptOut: '--no-browser requested', +} as const +export type OpenDecision = typeof OpenDecision[keyof typeof OpenDecision] + +export type BrowserEnv = { + getEnv: (key: string) => string | undefined + platform: NodeJS.Platform + isOutTTY: boolean + isErrTTY: boolean +} + +export function realEnv(): BrowserEnv { + return { + getEnv: k => process.env[k], + platform: process.platform, + isOutTTY: Boolean(process.stdout.isTTY), + isErrTTY: Boolean(process.stderr.isTTY), + } +} + +export function decideOpen(env: BrowserEnv, userOptOut: boolean): OpenDecision { + if (userOptOut) + return OpenDecision.SkipUserOptOut + if (truthy(env.getEnv('SSH_CONNECTION')) || truthy(env.getEnv('SSH_TTY'))) + return OpenDecision.SkipSSH + if (env.platform === 'linux' + && !truthy(env.getEnv('DISPLAY')) + && !truthy(env.getEnv('WAYLAND_DISPLAY'))) { + return OpenDecision.SkipHeadlessLinux + } + if (!env.isOutTTY || !env.isErrTTY) + return OpenDecision.SkipNoTTY + return OpenDecision.Auto +} + +export type BrowserOpener = (url: string) => Promise + +export const openUrl: BrowserOpener = async (url) => { + await openModule(url) +} + +function truthy(v: string | undefined): boolean { + return v !== undefined && v !== '' +} diff --git a/cli/src/util/host.ts b/cli/src/util/host.ts new file mode 100644 index 0000000000..dcd6f5f3af --- /dev/null +++ b/cli/src/util/host.ts @@ -0,0 +1,70 @@ +import { BaseError } from '../errors/base.js' +import { ErrorCode } from '../errors/codes.js' + +export const DEFAULT_HOST = 'https://cloud.dify.ai' + +export type ResolveHostOptions = { + raw: string + insecure: boolean +} + +export function resolveHost(opts: ResolveHostOptions): string { + let raw = opts.raw.trim() + if (raw === '') + raw = DEFAULT_HOST + if (!raw.includes('://')) + raw = `https://${raw}` + let url: URL + try { + url = new URL(raw) + } + catch (err) { + throw new BaseError({ code: ErrorCode.UsageInvalidFlag, message: `host parse: ${(err as Error).message}` }) + } + url.pathname = url.pathname.replace(/\/+$/, '') + if (url.protocol !== 'https:' && !(opts.insecure && url.protocol === 'http:')) { + throw new BaseError({ + code: ErrorCode.UsageInvalidFlag, + message: 'only https:// hosts are accepted', + hint: 'add --insecure to allow http:// (local-dev only; user_code/device_code travel plaintext)', + }) + } + const out = url.toString() + return out.endsWith('/') ? out.slice(0, -1) : out +} + +export function hostWithScheme(host: string, scheme: string | undefined): string { + if (host.includes('://')) + return host + const proto = scheme === undefined || scheme === '' ? 'https' : scheme + return `${proto}://${host}` +} + +export function bareHost(raw: string): string { + try { + const u = new URL(raw) + return u.host !== '' ? u.host : raw + } + catch { + return raw + } +} + +export function validateVerificationURI(raw: string, insecure: boolean): void { + let url: URL + try { + url = new URL(raw.trim()) + } + catch { + throw new BaseError({ code: ErrorCode.Unknown, message: `server returned invalid verification_uri "${raw}"` }) + } + if (url.protocol !== 'https:' && !(insecure && url.protocol === 'http:')) { + throw new BaseError({ + code: ErrorCode.Unknown, + message: `server returned verification_uri with unsupported scheme "${url.protocol.replace(':', '')}"`, + hint: 'expected https:// (use --insecure to allow http:// on local-dev hosts)', + }) + } + if (url.host === '') + throw new BaseError({ code: ErrorCode.Unknown, message: `server returned verification_uri without host: "${raw}"` }) +} diff --git a/cli/src/version/compat.test.ts b/cli/src/version/compat.test.ts new file mode 100644 index 0000000000..ffe89029bb --- /dev/null +++ b/cli/src/version/compat.test.ts @@ -0,0 +1,15 @@ +import { describe, expect, it } from 'vitest' +import { compatString, difyCompat } from './compat.js' + +describe('difyCompat', () => { + it('exposes minDify and maxDify as readonly strings', () => { + expect(typeof difyCompat.minDify).toBe('string') + expect(typeof difyCompat.maxDify).toBe('string') + }) +}) + +describe('compatString', () => { + it('formats as "dify >=min, <=max"', () => { + expect(compatString()).toMatch(/^dify >=\d+\.\d+\.\d+(-[\w.]+)?, <=\d+\.\d+\.\d+(-[\w.]+)?$/) + }) +}) diff --git a/cli/src/version/compat.ts b/cli/src/version/compat.ts new file mode 100644 index 0000000000..0c482970d9 --- /dev/null +++ b/cli/src/version/compat.ts @@ -0,0 +1,16 @@ +declare const __DIFYCTL_MIN_DIFY__: string +declare const __DIFYCTL_MAX_DIFY__: string + +export type DifyCompat = { + readonly minDify: string + readonly maxDify: string +} + +export const difyCompat: DifyCompat = { + minDify: __DIFYCTL_MIN_DIFY__, + maxDify: __DIFYCTL_MAX_DIFY__, +} + +export function compatString(): string { + return `dify >=${difyCompat.minDify}, <=${difyCompat.maxDify}` +} diff --git a/cli/src/version/info.test.ts b/cli/src/version/info.test.ts new file mode 100644 index 0000000000..f21a0cad0d --- /dev/null +++ b/cli/src/version/info.test.ts @@ -0,0 +1,22 @@ +import { describe, expect, it } from 'vitest' +import { longVersion, shortVersion, userAgent } from './info.js' + +describe('version info', () => { + it('shortVersion returns the build-injected version string', () => { + expect(shortVersion()).toMatch(/^\d+\.\d+\.\d+(-[\w.]+)?$/) + }) + + it('longVersion includes commit, build date, channel, and compat range', () => { + const out = longVersion() + expect(out).toMatch(/^difyctl /) + expect(out).toContain('commit') + expect(out).toContain('built') + expect(out).toContain('channel') + expect(out).toContain('compat:') + expect(out).toMatch(/dify >=\d+\.\d+\.\d.*, <=\d+\.\d+\.\d+/) + }) + + it('userAgent is well-formed', () => { + expect(userAgent()).toMatch(/^difyctl\/\S+ \(\S+; \S+; \S+\)$/) + }) +}) diff --git a/cli/src/version/info.ts b/cli/src/version/info.ts new file mode 100644 index 0000000000..1e98dd6cd5 --- /dev/null +++ b/cli/src/version/info.ts @@ -0,0 +1,36 @@ +import { compatString } from './compat.js' + +declare const __DIFYCTL_VERSION__: string +declare const __DIFYCTL_COMMIT__: string +declare const __DIFYCTL_BUILD_DATE__: string +declare const __DIFYCTL_CHANNEL__: string + +export type Channel = 'dev' | 'rc' | 'stable' + +export type VersionInfo = { + version: string + commit: string + buildDate: string + channel: Channel +} + +export const versionInfo: VersionInfo = { + version: __DIFYCTL_VERSION__, + commit: __DIFYCTL_COMMIT__, + buildDate: __DIFYCTL_BUILD_DATE__, + channel: __DIFYCTL_CHANNEL__ as Channel, +} + +export function shortVersion(): string { + return versionInfo.version +} + +export function longVersion(): string { + const { version, commit, buildDate, channel } = versionInfo + return `difyctl ${version} (commit ${commit.slice(0, 7)}, built ${buildDate}, channel ${channel})\n` + + `compat: ${compatString()}` +} + +export function userAgent(): string { + return `difyctl/${versionInfo.version} (${process.platform}; ${process.arch}; ${versionInfo.channel})` +} diff --git a/cli/src/workspace/resolver.ts b/cli/src/workspace/resolver.ts new file mode 100644 index 0000000000..225e65f666 --- /dev/null +++ b/cli/src/workspace/resolver.ts @@ -0,0 +1,34 @@ +import type { HostsBundle } from '../auth/hosts.js' +import { BaseError } from '../errors/base.js' +import { ErrorCode } from '../errors/codes.js' + +export type WorkspaceResolveInputs = { + readonly flag?: string + readonly env?: string + readonly bundle?: HostsBundle +} + +export function resolveWorkspaceId(inputs: WorkspaceResolveInputs): string { + if (truthy(inputs.flag)) + return inputs.flag + if (truthy(inputs.env)) + return inputs.env + const b = inputs.bundle + if (b !== undefined) { + if (truthy(b.workspace?.id)) + return b.workspace.id + if (b.available_workspaces !== undefined && b.available_workspaces.length > 0 + && truthy(b.available_workspaces[0]?.id)) { + return b.available_workspaces[0].id + } + } + throw new BaseError({ + code: ErrorCode.UsageMissingArg, + message: 'no workspace selected', + hint: 'pass --workspace, set DIFY_WORKSPACE_ID, or run \'difyctl auth use\'', + }) +} + +function truthy(v: string | undefined): v is string { + return v !== undefined && v !== '' +} diff --git a/cli/test/fixtures/dify-mock/scenarios.ts b/cli/test/fixtures/dify-mock/scenarios.ts new file mode 100644 index 0000000000..a1e3e07368 --- /dev/null +++ b/cli/test/fixtures/dify-mock/scenarios.ts @@ -0,0 +1,166 @@ +export type Scenario + = | 'happy' + | 'sso' + | 'denied' + | 'expired' + | 'auth-expired' + | 'rate-limited' + | 'server-5xx' + | 'slow-down' + | 'stream-error' + +export type AccountFixture = { + id: string + email: string + name: string + is_external: boolean + current_workspace_id: string | null +} + +export type WorkspaceFixture = { + id: string + name: string + role: string + status: string + is_current: boolean +} + +export type AppFixture = { + id: string + workspace_id: string + workspace_name: string + name: string + mode: string + description: string + tags: { name: string }[] + created_at: string + updated_at: string + created_by_name: string + author?: string + service_api_enabled?: boolean + is_agent?: boolean + parameters?: Record + input_schema?: Record +} + +export type SessionFixture = { + id: string + prefix: string + client_id: string + device_label: string + created_at: string + last_used_at: string + expires_at: string +} + +export const ACCOUNT: AccountFixture = { + id: 'acct-1', + email: 'tester@dify.ai', + name: 'Test Tester', + is_external: false, + current_workspace_id: 'ws-1', +} + +export const WORKSPACES: WorkspaceFixture[] = [ + { id: 'ws-1', name: 'Default', role: 'owner', status: 'normal', is_current: true }, + { id: 'ws-2', name: 'Other', role: 'normal', status: 'normal', is_current: false }, +] + +export const APPS: AppFixture[] = [ + { + id: 'app-1', + workspace_id: 'ws-1', + workspace_name: 'Default', + name: 'Greeter', + mode: 'chat', + description: 'A simple greeting bot', + tags: [{ name: 'demo' }], + created_at: '2026-01-01T00:00:00Z', + updated_at: '2026-01-02T00:00:00Z', + created_by_name: 'tester', + author: 'tester', + service_api_enabled: true, + is_agent: false, + parameters: { + opening_statement: 'Hi, I am Greeter.', + suggested_questions: ['What is your name?'], + user_input_form: [ + { type: 'text-input', variable: 'name', label: 'Your name', required: true }, + ], + system_parameters: { image_file_size_limit: 10 }, + }, + }, + { + id: 'app-4', + workspace_id: 'ws-2', + workspace_name: 'Other', + name: 'Researcher', + mode: 'agent-chat', + description: 'An agent that researches', + tags: [], + created_at: '2026-02-01T00:00:00Z', + updated_at: '2026-02-02T00:00:00Z', + created_by_name: 'tester', + author: 'tester', + service_api_enabled: false, + is_agent: true, + }, + { + id: 'app-2', + workspace_id: 'ws-1', + workspace_name: 'Default', + name: 'Workflow', + mode: 'workflow', + description: '', + tags: [], + created_at: '2026-01-01T00:00:00Z', + updated_at: '2026-01-02T00:00:00Z', + created_by_name: 'tester', + author: 'tester', + service_api_enabled: false, + }, + { + id: 'app-3', + workspace_id: 'ws-2', + workspace_name: 'Other', + name: 'OtherWS Bot', + mode: 'chat', + description: '', + tags: [{ name: 'wip' }], + created_at: '2026-01-03T00:00:00Z', + updated_at: '2026-01-04T00:00:00Z', + created_by_name: 'admin', + author: 'admin', + service_api_enabled: false, + }, +] + +export const SESSIONS: SessionFixture[] = [ + { + id: 'tok-1', + prefix: 'dfoa', + client_id: 'difyctl', + device_label: 'difyctl on laptop', + created_at: '2026-05-01T00:00:00Z', + last_used_at: '2026-05-08T00:00:00Z', + expires_at: '2026-08-01T00:00:00Z', + }, + { + id: 'tok-2', + prefix: 'dfoa', + client_id: 'difyctl', + device_label: 'difyctl on desktop', + created_at: '2026-04-15T00:00:00Z', + last_used_at: '2026-05-07T00:00:00Z', + expires_at: '2026-07-15T00:00:00Z', + }, + { + id: 'tok-3', + prefix: 'dfoa', + client_id: 'cloud-console', + device_label: 'web ui', + created_at: '2026-05-05T00:00:00Z', + last_used_at: '2026-05-08T00:00:00Z', + expires_at: '2026-08-05T00:00:00Z', + }, +] diff --git a/cli/test/fixtures/dify-mock/server.test.ts b/cli/test/fixtures/dify-mock/server.test.ts new file mode 100644 index 0000000000..ed43c95981 --- /dev/null +++ b/cli/test/fixtures/dify-mock/server.test.ts @@ -0,0 +1,281 @@ +import type { DifyMock } from './server.js' +import { afterEach, beforeEach, describe, expect, it } from 'vitest' +import { startMock } from './server.js' + +describe('dify-mock fixture server', () => { + let mock: DifyMock + + beforeEach(async () => { + mock = await startMock() + }) + + afterEach(async () => { + await mock.stop() + }) + + it('listens on an ephemeral port', () => { + expect(mock.port).toBeGreaterThan(0) + expect(mock.url).toMatch(/^http:\/\/127\.0\.0\.1:\d+$/) + }) + + it('GET /healthz returns 200 without auth', async () => { + const r = await fetch(`${mock.url}/healthz`) + expect(r.status).toBe(200) + expect(await r.json()).toEqual({ ok: true }) + }) + + it('rejects /openapi/v1/* without Authorization header', async () => { + const r = await fetch(`${mock.url}/openapi/v1/workspaces`) + expect(r.status).toBe(401) + }) + + it('rejects malformed Bearer tokens', async () => { + const r = await fetch(`${mock.url}/openapi/v1/workspaces`, { + headers: { Authorization: 'Bearer wrongprefix_abc' }, + }) + expect(r.status).toBe(401) + }) + + it('accepts dfoa_ tokens (community/account)', async () => { + const r = await fetch(`${mock.url}/openapi/v1/workspaces`, { + headers: { Authorization: 'Bearer dfoa_test' }, + }) + expect(r.status).toBe(200) + }) + + it('accepts dfoe_ tokens (enterprise/external-subject)', async () => { + const r = await fetch(`${mock.url}/openapi/v1/workspaces`, { + headers: { Authorization: 'Bearer dfoe_test' }, + }) + expect(r.status).toBe(200) + }) + + it('GET /openapi/v1/workspaces returns the seeded list with status + current', async () => { + const r = await fetch(`${mock.url}/openapi/v1/workspaces`, { + headers: { Authorization: 'Bearer dfoa_test' }, + }) + expect(r.status).toBe(200) + const body = await r.json() as { + workspaces: Array<{ id: string, name: string, role: string, status: string, current: boolean }> + } + expect(body.workspaces).toHaveLength(2) + expect(body.workspaces[0]?.id).toBe('ws-1') + expect(body.workspaces[0]?.status).toBe('normal') + expect(body.workspaces[0]?.current).toBe(true) + expect(body.workspaces[1]?.current).toBe(false) + }) + + it('GET /openapi/v1/workspaces returns empty list under sso scenario', async () => { + mock.setScenario('sso') + const r = await fetch(`${mock.url}/openapi/v1/workspaces`, { + headers: { Authorization: 'Bearer dfoa_test' }, + }) + expect(r.status).toBe(200) + const body = await r.json() as { workspaces: unknown[] } + expect(body.workspaces).toHaveLength(0) + }) + + it('GET /openapi/v1/account returns the seeded account envelope', async () => { + const r = await fetch(`${mock.url}/openapi/v1/account`, { + headers: { Authorization: 'Bearer dfoa_test' }, + }) + expect(r.status).toBe(200) + const body = await r.json() as { + subject_type: string + account: { email: string } | null + workspaces: Array<{ id: string }> + default_workspace_id: string + } + expect(body.subject_type).toBe('account') + expect(body.account?.email).toBe('tester@dify.ai') + expect(body.workspaces).toHaveLength(2) + expect(body.default_workspace_id).toBe('ws-1') + }) + + it('GET /openapi/v1/apps respects ?mode filter', async () => { + const r = await fetch(`${mock.url}/openapi/v1/apps?workspace_id=ws-1&mode=workflow`, { + headers: { Authorization: 'Bearer dfoa_test' }, + }) + const body = await r.json() as { data: Array<{ mode: string }>, total: number } + expect(body.data).toHaveLength(1) + expect(body.data[0]?.mode).toBe('workflow') + expect(body.total).toBe(1) + }) + + it('GET /openapi/v1/apps scopes by workspace_id', async () => { + const r = await fetch(`${mock.url}/openapi/v1/apps?workspace_id=ws-2`, { + headers: { Authorization: 'Bearer dfoa_test' }, + }) + const body = await r.json() as { data: Array<{ id: string }> } + expect(body.data).toHaveLength(2) + expect(body.data.map(r => r.id).sort()).toEqual(['app-3', 'app-4']) + }) + + it('GET /openapi/v1/apps/:id/describe returns 404 for unknown id', async () => { + const r = await fetch(`${mock.url}/openapi/v1/apps/nope/describe?workspace_id=ws-1`, { + headers: { Authorization: 'Bearer dfoa_test' }, + }) + expect(r.status).toBe(404) + }) + + it('GET /openapi/v1/apps/:id/describe returns the app for known id', async () => { + const r = await fetch(`${mock.url}/openapi/v1/apps/app-1/describe?workspace_id=ws-1`, { + headers: { Authorization: 'Bearer dfoa_test' }, + }) + expect(r.status).toBe(200) + const body = await r.json() as { info: { id: string } } + expect(body.info.id).toBe('app-1') + }) + + it('POST /openapi/v1/apps/:id/run blocking returns chat-shaped envelope', async () => { + const r = await fetch(`${mock.url}/openapi/v1/apps/app-1/run`, { + method: 'POST', + headers: { + 'Authorization': 'Bearer dfoa_test', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ query: 'hi', inputs: {}, response_mode: 'blocking' }), + }) + expect(r.status).toBe(200) + const body = await r.json() as { mode: string, answer: string, conversation_id: string } + expect(body.mode).toBe('chat') + expect(body.answer).toBe('echo: hi') + expect(body.conversation_id).toBe('conv-1') + }) + + it('POST /openapi/v1/apps/:id/run blocking returns workflow-shaped envelope', async () => { + const r = await fetch(`${mock.url}/openapi/v1/apps/app-2/run`, { + method: 'POST', + headers: { + 'Authorization': 'Bearer dfoa_test', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ inputs: { x: 1 }, response_mode: 'blocking' }), + }) + expect(r.status).toBe(200) + const body = await r.json() as { data: { status: string, outputs: { result: string } } } + expect(body.data.status).toBe('succeeded') + expect(body.data.outputs.result).toBe('echo: ') + }) + + it('GET /openapi/v1/apps/:id/describe?fields=info returns slim payload', async () => { + const r = await fetch(`${mock.url}/openapi/v1/apps/app-1/describe?workspace_id=ws-1&fields=info`, { + headers: { Authorization: 'Bearer dfoa_test' }, + }) + expect(r.status).toBe(200) + const body = await r.json() as { info: { id: string }, parameters: unknown, input_schema: unknown } + expect(body.info.id).toBe('app-1') + expect(body.parameters).toBeNull() + expect(body.input_schema).toBeNull() + }) + + it('GET /openapi/v1/apps/:id/describe full returns parameters when present', async () => { + const r = await fetch(`${mock.url}/openapi/v1/apps/app-1/describe?workspace_id=ws-1`, { + headers: { Authorization: 'Bearer dfoa_test' }, + }) + expect(r.status).toBe(200) + const body = await r.json() as { parameters: { opening_statement: string } | null } + expect(body.parameters?.opening_statement).toBe('Hi, I am Greeter.') + }) + + it('POST /openapi/v1/oauth/device/code returns RFC 8628 fields', async () => { + const r = await fetch(`${mock.url}/openapi/v1/oauth/device/code`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ client_id: 'difyctl', device_label: 'difyctl on host' }), + }) + expect(r.status).toBe(200) + const body = await r.json() as Record + expect(body.device_code).toBeDefined() + expect(body.user_code).toBeDefined() + expect(body.interval).toBeDefined() + }) + + it('POST /openapi/v1/oauth/device/token returns Dify token envelope', async () => { + const r = await fetch(`${mock.url}/openapi/v1/oauth/device/token`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ client_id: 'difyctl', device_code: 'devcode-1' }), + }) + expect(r.status).toBe(200) + const body = await r.json() as { token: string, subject_type: string, account?: { email: string } } + expect(body.token).toMatch(/^dfoa_/) + expect(body.subject_type).toBe('account') + expect(body.account?.email).toBe('tester@dify.ai') + }) + + it('scenario:sso returns external_sso envelope with dfoe_ token', async () => { + mock.setScenario('sso') + const r = await fetch(`${mock.url}/openapi/v1/oauth/device/token`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ device_code: 'devcode-1' }), + }) + expect(r.status).toBe(200) + const body = await r.json() as { token: string, subject_type: string, subject_email: string } + expect(body.token).toMatch(/^dfoe_/) + expect(body.subject_type).toBe('external_sso') + expect(body.subject_email).toBe('sso@dify.ai') + }) + + it('scenario:denied returns access_denied on token poll', async () => { + mock.setScenario('denied') + const r = await fetch(`${mock.url}/openapi/v1/oauth/device/token`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ device_code: 'devcode-1' }), + }) + expect(r.status).toBe(400) + const body = await r.json() as { error: string } + expect(body.error).toBe('access_denied') + }) + + it('scenario:expired returns expired_token on token poll', async () => { + mock.setScenario('expired') + const r = await fetch(`${mock.url}/openapi/v1/oauth/device/token`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ device_code: 'devcode-1' }), + }) + expect(r.status).toBe(400) + const body = await r.json() as { error: string } + expect(body.error).toBe('expired_token') + }) + + it('scenario:slow-down returns slow_down on token poll', async () => { + mock.setScenario('slow-down') + const r = await fetch(`${mock.url}/openapi/v1/oauth/device/token`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ device_code: 'devcode-1' }), + }) + expect(r.status).toBe(400) + const body = await r.json() as { error: string } + expect(body.error).toBe('slow_down') + }) + + it('scenario:auth-expired returns 401 on bearer-protected endpoint', async () => { + mock.setScenario('auth-expired') + const r = await fetch(`${mock.url}/openapi/v1/workspaces`, { + headers: { Authorization: 'Bearer dfoa_test' }, + }) + expect(r.status).toBe(401) + }) + + it('scenario:rate-limited returns 429 with retry-after', async () => { + mock.setScenario('rate-limited') + const r = await fetch(`${mock.url}/openapi/v1/workspaces`, { + headers: { Authorization: 'Bearer dfoa_test' }, + }) + expect(r.status).toBe(429) + expect(r.headers.get('retry-after')).toBe('1') + }) + + it('scenario:server-5xx returns 503', async () => { + mock.setScenario('server-5xx') + const r = await fetch(`${mock.url}/openapi/v1/workspaces`, { + headers: { Authorization: 'Bearer dfoa_test' }, + }) + expect(r.status).toBe(503) + }) +}) diff --git a/cli/test/fixtures/dify-mock/server.ts b/cli/test/fixtures/dify-mock/server.ts new file mode 100644 index 0000000000..7a71278f49 --- /dev/null +++ b/cli/test/fixtures/dify-mock/server.ts @@ -0,0 +1,340 @@ +import type { AddressInfo } from 'node:net' +import type { Scenario } from './scenarios.js' +import { serve } from '@hono/node-server' +import { Hono } from 'hono' +import { ACCOUNT, APPS, SESSIONS, WORKSPACES } from './scenarios.js' + +export type DifyMockOptions = { + scenario?: Scenario + port?: number +} + +export type DifyMock = { + url: string + port: number + scenario: Scenario + setScenario: (s: Scenario) => void + stop: () => Promise +} + +const TOKEN_RE = /^Bearer\s+dfo[ae]_[\w-]+$/ + +function unauthorized() { + return Response.json( + { error: { code: 'auth_expired', message: 'invalid or expired token' } }, + { status: 401 }, + ) +} + +function sseChunks(events: { event: string, data: Record }[]): string { + return events.map(e => `data: ${JSON.stringify({ ...e.data, event: e.event })}\n\n`).join('') +} + +function streamingRunResponse(mode: string, query: string, isAgent: boolean): string { + if (mode === 'workflow') { + return sseChunks([ + { event: 'workflow_started', data: { id: 'wf-run-1', workflow_id: 'wf-1' } }, + { event: 'node_started', data: { id: 'n1', title: 'first' } }, + { event: 'node_finished', data: { id: 'n1', status: 'succeeded' } }, + { event: 'workflow_finished', data: { id: 'wf-run-1', workflow_id: 'wf-1', data: { id: 'wf-run-1', status: 'succeeded', outputs: { result: `echo: ${query}` } } } }, + ]) + } + if (mode === 'completion') { + return sseChunks([ + { event: 'message', data: { message_id: 'msg-1', mode, answer: 'echo: ' } }, + { event: 'message', data: { answer: query } }, + { event: 'message_end', data: { message_id: 'msg-1', task_id: 'task-1', metadata: {} } }, + ]) + } + const evt = isAgent ? 'agent_message' : 'message' + const events: { event: string, data: Record }[] = [ + { event: evt, data: { message_id: 'msg-1', conversation_id: 'conv-1', mode, answer: 'echo: ' } }, + { event: evt, data: { answer: query } }, + ] + if (isAgent) + events.push({ event: 'agent_thought', data: { thought: 'thinking…' } }) + events.push({ event: 'message_end', data: { message_id: 'msg-1', conversation_id: 'conv-1', metadata: {} } }) + return sseChunks(events) +} + +function blockingRunResponse(mode: string, query: string, inputs: unknown): Record { + const echo = `echo: ${query}` + if (mode === 'workflow') { + return { + task_id: 'task-1', + workflow_run_id: 'wf-run-1', + data: { + id: 'wf-run-1', + workflow_id: 'wf-1', + status: 'succeeded', + outputs: { result: echo, inputs }, + elapsed_time: 0.5, + total_tokens: 12, + }, + } + } + if (mode === 'completion') { + return { + message_id: 'msg-1', + mode, + answer: echo, + created_at: 1714000000, + } + } + return { + event: 'message', + message_id: 'msg-1', + conversation_id: 'conv-1', + mode, + answer: echo, + created_at: 1714000000, + metadata: {}, + } +} + +export function buildApp(getScenario: () => Scenario): Hono { + const app = new Hono() + + app.get('/healthz', c => c.json({ ok: true })) + + app.use('*', async (c, next) => { + if (c.req.path === '/healthz') { + await next() + return + } + if (c.req.path.startsWith('/openapi/v1/oauth/')) { + await next() + return + } + const auth = c.req.header('Authorization') ?? '' + if (!TOKEN_RE.test(auth)) + return unauthorized() + const scenario = getScenario() + if (scenario === 'auth-expired') + return unauthorized() + await next() + }) + + app.use('*', async (c, next) => { + const scenario = getScenario() + if (scenario === 'rate-limited') { + return c.json( + { error: { code: 'rate_limited', message: 'too many requests' } }, + { status: 429, headers: { 'retry-after': '1' } }, + ) + } + if (scenario === 'server-5xx') { + return c.json( + { error: { code: 'server_5xx', message: 'upstream broken' } }, + { status: 503 }, + ) + } + await next() + }) + + app.get('/openapi/v1/account', (c) => { + const scenario = getScenario() + if (scenario === 'sso') { + return c.json({ + subject_type: 'external_sso', + subject_email: 'sso@dify.ai', + subject_issuer: 'https://issuer.example', + account: null, + workspaces: [], + default_workspace_id: null, + }) + } + return c.json({ + subject_type: 'account', + subject_email: ACCOUNT.email, + account: { id: ACCOUNT.id, email: ACCOUNT.email, name: ACCOUNT.name }, + workspaces: WORKSPACES.map(w => ({ id: w.id, name: w.name, role: w.role })), + default_workspace_id: 'ws-1', + }) + }) + + app.get('/openapi/v1/account/sessions', (c) => { + const page = Number(c.req.query('page') ?? '1') + const limit = Number(c.req.query('limit') ?? '100') + const total = SESSIONS.length + const start = (page - 1) * limit + const slice = SESSIONS.slice(start, start + limit) + return c.json({ + page, + limit, + total, + has_more: page * limit < total, + data: slice, + }) + }) + + app.delete('/openapi/v1/account/sessions/self', () => + Response.json({ status: 'revoked' }, { status: 200 })) + + app.delete('/openapi/v1/account/sessions/:id', (c) => { + const id = c.req.param('id') + if (!SESSIONS.some(s => s.id === id)) + return c.json({ error: { code: 'not_found', message: 'session not found' } }, { status: 404 }) + return Response.json({ status: 'revoked' }, { status: 200 }) + }) + + app.get('/openapi/v1/workspaces', (c) => { + if (getScenario() === 'sso') + return c.json({ workspaces: [] }) + return c.json({ + workspaces: WORKSPACES.map(w => ({ + id: w.id, + name: w.name, + role: w.role, + status: w.status, + current: w.is_current, + })), + }) + }) + + app.get('/openapi/v1/apps', (c) => { + const page = Number(c.req.query('page') ?? '1') + const limit = Number(c.req.query('limit') ?? '20') + const mode = c.req.query('mode') + const tag = c.req.query('tag') + const name = c.req.query('name') + const workspaceId = c.req.query('workspace_id') ?? 'ws-1' + let filtered = APPS.filter(a => a.workspace_id === workspaceId) + if (mode !== undefined && mode !== '') + filtered = filtered.filter(a => a.mode === mode) + if (tag !== undefined && tag !== '') + filtered = filtered.filter(a => a.tags.some(t => t.name === tag)) + if (name !== undefined && name !== '') + filtered = filtered.filter(a => a.name.includes(name)) + const total = filtered.length + const start = (page - 1) * limit + const slice = filtered.slice(start, start + limit) + return c.json({ + page, + limit, + total, + has_more: page * limit < total, + data: slice, + }) + }) + + app.get('/openapi/v1/apps/:id/describe', (c) => { + const id = c.req.param('id') + const wsId = c.req.query('workspace_id') + const fieldsRaw = c.req.query('fields') ?? '' + const fields = fieldsRaw === '' ? [] : fieldsRaw.split(',').map(s => s.trim()).filter(s => s !== '') + const app = APPS.find(a => a.id === id && (wsId === undefined || wsId === '' || a.workspace_id === wsId)) + if (app === undefined) + return c.json({ error: { code: 'not_found', message: 'app not found' } }, { status: 404 }) + const wantInfo = fields.length === 0 || fields.includes('info') + const wantParams = fields.length === 0 || fields.includes('parameters') + const wantInputSchema = fields.length === 0 || fields.includes('input_schema') + return c.json({ + info: wantInfo + ? { + id: app.id, + name: app.name, + description: app.description, + mode: app.mode, + author: app.author ?? '', + tags: app.tags, + updated_at: app.updated_at, + service_api_enabled: app.service_api_enabled ?? false, + is_agent: app.is_agent ?? false, + } + : null, + parameters: wantParams ? (app.parameters ?? null) : null, + input_schema: wantInputSchema ? (app.input_schema ?? null) : null, + }) + }) + + app.post('/openapi/v1/apps/:id/run', async (c) => { + const id = c.req.param('id') + const body = await c.req.json() as { query?: string, inputs?: unknown, response_mode?: string } + const app = APPS.find(a => a.id === id) + if (app === undefined) + return c.json({ error: { code: 'not_found', message: 'app not found' } }, { status: 404 }) + const isStreaming = body.response_mode === 'streaming' + const isAgent = app.is_agent === true || app.mode === 'agent-chat' + const query = body.query ?? '' + if (!isStreaming) { + if (isAgent) + return c.json({ error: { code: 'streaming_required', message: 'agent apps must use streaming' } }, { status: 400 }) + return c.json(blockingRunResponse(app.mode, query, body.inputs)) + } + const scenario = getScenario() + if (scenario === 'stream-error') { + const errSse = sseChunks([{ event: 'error', data: { message: 'boom', status: 503 } }]) + return new Response(errSse, { status: 200, headers: { 'content-type': 'text/event-stream' } }) + } + const sse = streamingRunResponse(app.mode, query, isAgent) + return new Response(sse, { status: 200, headers: { 'content-type': 'text/event-stream' } }) + }) + + app.post('/openapi/v1/oauth/device/code', c => + c.json({ + device_code: 'devcode-1', + user_code: 'ABCD-1234', + verification_uri: `${new URL(c.req.url).origin}/device`, + verification_uri_complete: `${new URL(c.req.url).origin}/device?user_code=ABCD-1234`, + expires_in: 600, + interval: 1, + })) + + app.post('/openapi/v1/oauth/device/token', async (c) => { + const scenario = getScenario() + if (scenario === 'denied') + return c.json({ error: 'access_denied', error_description: 'user rejected' }, { status: 400 }) + if (scenario === 'expired') + return c.json({ error: 'expired_token', error_description: 'device_code expired' }, { status: 400 }) + if (scenario === 'slow-down') + return c.json({ error: 'slow_down', error_description: 'increase interval' }, { status: 400 }) + if (scenario === 'sso') { + return c.json({ + token: 'dfoe_test', + subject_type: 'external_sso', + subject_email: 'sso@dify.ai', + subject_issuer: 'https://issuer.example', + token_id: 'tok-sso-1', + }) + } + return c.json({ + token: 'dfoa_test', + subject_type: 'account', + account: ACCOUNT, + workspaces: WORKSPACES.map(w => ({ id: w.id, name: w.name, role: w.role })), + default_workspace_id: 'ws-1', + token_id: 'tok-1', + }) + }) + + return app +} + +export function startMock(opts: DifyMockOptions = {}): Promise { + let scenario: Scenario = opts.scenario ?? 'happy' + const app = buildApp(() => scenario) + return new Promise((resolve, reject) => { + const server = serve({ + fetch: app.fetch, + port: opts.port ?? 0, + hostname: '127.0.0.1', + overrideGlobalObjects: false, + }) + server.on('listening', () => { + const addr = server.address() as AddressInfo + resolve({ + url: `http://127.0.0.1:${addr.port}`, + port: addr.port, + scenario, + setScenario(s) { scenario = s }, + stop() { + return new Promise((res, rej) => { + server.close(err => err ? rej(err) : res()) + }) + }, + }) + }) + server.on('error', reject) + }) +} diff --git a/cli/test/scripts/resolve-buildinfo.test.ts b/cli/test/scripts/resolve-buildinfo.test.ts new file mode 100644 index 0000000000..23c3c872b2 --- /dev/null +++ b/cli/test/scripts/resolve-buildinfo.test.ts @@ -0,0 +1,127 @@ +import { describe, expect, it } from 'vitest' +import { resolveBuildInfo } from '../../scripts/lib/resolve-buildinfo.js' + +const FIXED_DATE = new Date('2026-05-09T12:00:00.000Z') +const fixedNow = () => FIXED_DATE +const noGit = () => null + +describe('resolveBuildInfo', () => { + it('uses env values when fully populated', () => { + const info = resolveBuildInfo({ + env: { + DIFYCTL_VERSION: '1.2.3', + DIFYCTL_COMMIT: 'abcdef0123456789', + DIFYCTL_BUILD_DATE: '2026-01-01T00:00:00.000Z', + DIFYCTL_CHANNEL: 'stable', + }, + git: () => 'should-not-be-called', + now: fixedNow, + }) + expect(info).toStrictEqual({ + version: '1.2.3', + commit: 'abcdef0123456789', + buildDate: '2026-01-01T00:00:00.000Z', + channel: 'stable', + minDify: '0.0.0', + maxDify: '0.0.0', + }) + }) + + it('falls back to git probes when env unset', () => { + const calls: string[] = [] + const git = (cmd: string) => { + calls.push(cmd) + if (cmd.startsWith('git describe')) + return 'v1.0.0-5-gabc1234-dirty' + if (cmd.startsWith('git rev-parse')) + return '1234567890abcdef' + return null + } + const info = resolveBuildInfo({ env: {}, git, now: fixedNow }) + expect(info).toStrictEqual({ + version: 'v1.0.0-5-gabc1234-dirty', + commit: '1234567890abcdef', + buildDate: '2026-05-09T12:00:00.000Z', + channel: 'dev', + minDify: '0.0.0', + maxDify: '0.0.0', + }) + expect(calls).toStrictEqual([ + 'git describe --tags --dirty --always', + 'git rev-parse HEAD', + ]) + }) + + it('uses string defaults when env unset and git unavailable', () => { + const info = resolveBuildInfo({ env: {}, git: noGit, now: fixedNow }) + expect(info).toStrictEqual({ + version: '0.0.0-dev', + commit: 'none', + buildDate: '2026-05-09T12:00:00.000Z', + channel: 'dev', + minDify: '0.0.0', + maxDify: '0.0.0', + }) + }) + + it('throws on invalid channel', () => { + expect(() => + resolveBuildInfo({ env: { DIFYCTL_CHANNEL: 'beta' }, git: noGit, now: fixedNow }), + ).toThrow(/invalid DIFYCTL_CHANNEL: beta/) + }) + + it('throws on removed nightly channel', () => { + expect(() => + resolveBuildInfo({ env: { DIFYCTL_CHANNEL: 'nightly' }, git: noGit, now: fixedNow }), + ).toThrow(/invalid DIFYCTL_CHANNEL: nightly/) + }) + + it('accepts rc channel', () => { + const info = resolveBuildInfo({ + env: { + DIFYCTL_VERSION: '0.1.0-rc.1', + DIFYCTL_CHANNEL: 'rc', + DIFYCTL_COMMIT: 'abc', + DIFYCTL_BUILD_DATE: '2026-01-01T00:00:00.000Z', + }, + git: noGit, + now: fixedNow, + }) + expect(info.channel).toBe('rc') + }) + + it('mixes env and git fallbacks per field', () => { + const git = (cmd: string) => (cmd.startsWith('git describe') ? 'v9.9.9' : null) + const info = resolveBuildInfo({ + env: { DIFYCTL_COMMIT: 'pinned-sha' }, + git, + now: fixedNow, + }) + expect(info.version).toBe('v9.9.9') + expect(info.commit).toBe('pinned-sha') + expect(info.channel).toBe('dev') + }) + + it('reads minDify and maxDify from env', () => { + const info = resolveBuildInfo({ + env: { + DIFYCTL_VERSION: '0.1.0-rc.1', + DIFYCTL_CHANNEL: 'rc', + DIFYCTL_COMMIT: 'abc', + DIFYCTL_BUILD_DATE: '2026-01-01T00:00:00.000Z', + DIFYCTL_MIN_DIFY: '1.6.0', + DIFYCTL_MAX_DIFY: '1.7.0', + }, + git: noGit, + now: fixedNow, + }) + expect(info.minDify).toBe('1.6.0') + expect(info.maxDify).toBe('1.7.0') + }) + + it('defaults minDify and maxDify to 0.0.0 when env unset', () => { + const info = resolveBuildInfo({ env: {}, git: noGit, now: fixedNow }) + expect(info.minDify).toBe('0.0.0') + expect(info.maxDify).toBe('0.0.0') + }) +}) diff --git a/cli/test/setup.ts b/cli/test/setup.ts new file mode 100644 index 0000000000..0c3b720358 --- /dev/null +++ b/cli/test/setup.ts @@ -0,0 +1,6 @@ +(globalThis as unknown as Record).__DIFYCTL_VERSION__ = '0.0.0-test'; +(globalThis as unknown as Record).__DIFYCTL_COMMIT__ = '0000000'; +(globalThis as unknown as Record).__DIFYCTL_BUILD_DATE__ = '1970-01-01T00:00:00.000Z'; +(globalThis as unknown as Record).__DIFYCTL_CHANNEL__ = 'dev'; +(globalThis as unknown as Record).__DIFYCTL_MIN_DIFY__ = '0.0.0'; +(globalThis as unknown as Record).__DIFYCTL_MAX_DIFY__ = '0.0.0' diff --git a/cli/tsconfig.json b/cli/tsconfig.json new file mode 100644 index 0000000000..dc04c33f30 --- /dev/null +++ b/cli/tsconfig.json @@ -0,0 +1,14 @@ +{ + "extends": "@dify/tsconfig/node.json", + "compilerOptions": { + "rootDir": "src", + "types": ["node"], + "declaration": true, + "declarationMap": true, + "noEmit": false, + "outDir": "dist", + "sourceMap": true + }, + "include": ["src/**/*.ts"], + "exclude": ["dist", "test", "node_modules", "**/*.test.ts"] +} diff --git a/cli/vite.config.ts b/cli/vite.config.ts new file mode 100644 index 0000000000..7116527368 --- /dev/null +++ b/cli/vite.config.ts @@ -0,0 +1,36 @@ +import { defineConfig } from 'vite-plus' +import { resolveBuildInfo } from './scripts/lib/resolve-buildinfo.js' + +const buildInfo = resolveBuildInfo() + +export default defineConfig({ + pack: { + entry: ['src/index.ts', 'src/help-class.ts', 'src/commands/**/*.ts', 'src/hooks/*.ts'], + format: ['esm'], + dts: true, + clean: true, + sourcemap: true, + treeshake: false, + outDir: 'dist', + target: 'node22', + define: { + __DIFYCTL_VERSION__: JSON.stringify(buildInfo.version), + __DIFYCTL_COMMIT__: JSON.stringify(buildInfo.commit), + __DIFYCTL_BUILD_DATE__: JSON.stringify(buildInfo.buildDate), + __DIFYCTL_CHANNEL__: JSON.stringify(buildInfo.channel), + __DIFYCTL_MIN_DIFY__: JSON.stringify(buildInfo.minDify), + __DIFYCTL_MAX_DIFY__: JSON.stringify(buildInfo.maxDify), + }, + }, + test: { + environment: 'node', + setupFiles: ['./test/setup.ts'], + include: ['test/**/*.test.ts', 'src/**/*.test.ts'], + coverage: { + provider: 'v8', + reporter: ['text', 'text-summary', 'json'], + include: ['src/**/*.ts'], + exclude: ['src/**/*.test.ts', 'src/types/**'], + }, + }, +}) diff --git a/docker/nginx/conf.d/default.conf.template b/docker/nginx/conf.d/default.conf.template index 94a748290f..8c55712f09 100644 --- a/docker/nginx/conf.d/default.conf.template +++ b/docker/nginx/conf.d/default.conf.template @@ -27,6 +27,11 @@ server { include proxy.conf; } + location /openapi { + proxy_pass http://api:5001; + include proxy.conf; + } + location /files { proxy_pass http://api:5001; include proxy.conf; diff --git a/eslint.config.mjs b/eslint.config.mjs index ae9fdaff01..78864b6c52 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -11,6 +11,9 @@ export default antfu( '!packages/**', '!web/**', '!e2e/**', + '!cli/**', + 'cli/docs/specs/**', + 'cli/oclif.manifest.json', '!eslint.config.mjs', '!package.json', '!pnpm-workspace.yaml', diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 65f2e05bf4..42e99d2534 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -87,12 +87,33 @@ catalogs: '@monaco-editor/react': specifier: 4.7.0 version: 4.7.0 + '@napi-rs/keyring': + specifier: 1.1.6 + version: 1.1.6 '@next/eslint-plugin-next': specifier: 16.2.3 version: 16.2.3 '@next/mdx': specifier: 16.2.3 version: 16.2.3 + '@oclif/core': + specifier: 4.11.1 + version: 4.11.1 + '@oclif/plugin-autocomplete': + specifier: 3.2.6 + version: 3.2.6 + '@oclif/plugin-help': + specifier: 6.2.10 + version: 6.2.10 + '@oclif/plugin-not-found': + specifier: 3.2.18 + version: 3.2.18 + '@oclif/plugin-version': + specifier: 2.2.16 + version: 2.2.16 + '@oclif/plugin-warn-if-update-available': + specifier: 3.1.13 + version: 3.1.13 '@orpc/client': specifier: 1.13.14 version: 1.13.14 @@ -252,6 +273,9 @@ catalogs: class-variance-authority: specifier: 0.7.1 version: 0.7.1 + cli-table3: + specifier: 0.6.5 + version: 0.6.5 client-only: specifier: 0.0.1 version: 0.0.1 @@ -327,6 +351,9 @@ catalogs: eslint-plugin-storybook: specifier: 10.3.5 version: 10.3.5 + eventsource-parser: + specifier: 3.0.5 + version: 3.0.5 fast-deep-equal: specifier: 3.1.3 version: 3.1.3 @@ -411,6 +438,18 @@ catalogs: nuqs: specifier: 2.8.9 version: 2.8.9 + oclif: + specifier: 4.15.5 + version: 4.15.5 + open: + specifier: 10.1.0 + version: 10.1.0 + ora: + specifier: 8.1.0 + version: 8.1.0 + picocolors: + specifier: 1.1.0 + version: 1.1.0 pinyin-pro: specifier: 3.28.1 version: 3.28.1 @@ -570,7 +609,7 @@ overrides: side-channel: npm:@nolyfill/side-channel@^1.0.44 smol-toml@<1.6.1: 1.6.1 solid-js: 1.9.11 - string-width: ~8.2.0 + string-width@>=5: ~8.2.0 svgo@>=3.0.0 <3.3.3: 3.3.3 tar@<=7.5.10: 7.5.11 undici@>=7.0.0 <7.24.0: 7.24.0 @@ -585,7 +624,7 @@ importers: devDependencies: '@antfu/eslint-config': specifier: 'catalog:' - version: 8.2.0(@eslint-react/eslint-plugin@3.0.0(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@next/eslint-plugin-next@16.2.3)(@types/node@25.6.0)(@typescript-eslint/typescript-estree@8.58.2(typescript@6.0.2))(@typescript-eslint/utils@8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(eslint-plugin-react-refresh@0.5.2(eslint@10.2.0(jiti@2.6.1)))(eslint@10.2.0(jiti@2.6.1))(happy-dom@20.9.0)(jiti@2.6.1)(oxlint@1.60.0(oxlint-tsgolint@0.20.0))(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + version: 8.2.0(@eslint-react/eslint-plugin@3.0.0(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@next/eslint-plugin-next@16.2.3)(@types/node@25.6.0)(@typescript-eslint/typescript-estree@8.58.2(typescript@6.0.2))(@typescript-eslint/utils@8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(eslint-plugin-react-refresh@0.5.2(eslint@10.2.0(jiti@2.6.1)))(eslint@10.2.0(jiti@2.6.1))(happy-dom@20.9.0)(jiti@2.6.1)(oxlint@1.60.0(oxlint-tsgolint@0.20.0))(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) eslint: specifier: 'catalog:' version: 10.2.0(jiti@2.6.1) @@ -603,7 +642,98 @@ importers: version: '@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' vite-plus: specifier: 'catalog:' - version: 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + version: 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + + cli: + dependencies: + '@napi-rs/keyring': + specifier: 'catalog:' + version: 1.1.6 + '@oclif/core': + specifier: 'catalog:' + version: 4.11.1 + '@oclif/plugin-autocomplete': + specifier: 'catalog:' + version: 3.2.6 + '@oclif/plugin-help': + specifier: 'catalog:' + version: 6.2.10 + '@oclif/plugin-not-found': + specifier: 'catalog:' + version: 3.2.18 + '@oclif/plugin-version': + specifier: 'catalog:' + version: 2.2.16 + '@oclif/plugin-warn-if-update-available': + specifier: 'catalog:' + version: 3.1.13 + cli-table3: + specifier: 'catalog:' + version: 0.6.5 + eventsource-parser: + specifier: 'catalog:' + version: 3.0.5 + js-yaml: + specifier: 'catalog:' + version: 4.1.1 + ky: + specifier: 'catalog:' + version: 2.0.0 + open: + specifier: 'catalog:' + version: 10.1.0 + ora: + specifier: 'catalog:' + version: 8.1.0 + picocolors: + specifier: 'catalog:' + version: 1.1.0 + std-semver: + specifier: 'catalog:' + version: 1.0.8 + zod: + specifier: 'catalog:' + version: 4.3.6 + devDependencies: + '@dify/tsconfig': + specifier: workspace:* + version: link:../packages/tsconfig + '@hono/node-server': + specifier: 'catalog:' + version: 1.19.14(hono@4.12.14) + '@types/js-yaml': + specifier: 'catalog:' + version: 4.0.9 + '@types/node': + specifier: 'catalog:' + version: 25.6.0 + '@vitest/coverage-v8': + specifier: 'catalog:' + version: 4.1.4(@voidzero-dev/vite-plus-test@0.1.18) + eslint: + specifier: 'catalog:' + version: 10.2.0(jiti@2.6.1) + hono: + specifier: 'catalog:' + version: 4.12.14 + oclif: + specifier: 'catalog:' + version: 4.15.5(@types/node@25.6.0) + tsx: + specifier: 'catalog:' + version: 4.21.0 + typescript: + specifier: 'catalog:' + version: 6.0.2 + vite: + specifier: npm:@voidzero-dev/vite-plus-core@0.1.18 + version: '@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' + vite-plus: + specifier: 'catalog:' + version: 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + vitest: + specifier: npm:@voidzero-dev/vite-plus-test@0.1.18 + version: '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' e2e: devDependencies: @@ -630,7 +760,7 @@ importers: version: '@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' vite-plus: specifier: 'catalog:' - version: 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + version: 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) packages/dify-ui: dependencies: @@ -682,7 +812,7 @@ importers: version: 6.0.1(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)) '@vitest/coverage-v8': specifier: 'catalog:' - version: 4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + version: 4.1.4(@voidzero-dev/vite-plus-test@0.1.18) class-variance-authority: specifier: 'catalog:' version: 0.7.1 @@ -709,10 +839,10 @@ importers: version: '@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' vite-plus: specifier: 'catalog:' - version: 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + version: 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) vitest-browser-react: specifier: 'catalog:' - version: 2.2.0(@types/node@25.6.0)(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(react-dom@19.2.5(react@19.2.5))(react@19.2.5)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + version: 2.2.0(@types/node@25.6.0)(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(react-dom@19.2.5(react@19.2.5))(react@19.2.5)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) packages/iconify-collections: devDependencies: @@ -737,7 +867,7 @@ importers: version: '@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' vite-plus: specifier: 'catalog:' - version: 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + version: 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) packages/tsconfig: {} @@ -760,7 +890,7 @@ importers: version: 8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2) '@vitest/coverage-v8': specifier: 'catalog:' - version: 4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + version: 4.1.4(@voidzero-dev/vite-plus-test@0.1.18) eslint: specifier: 'catalog:' version: 10.2.0(jiti@2.6.1) @@ -772,10 +902,10 @@ importers: version: '@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' vite-plus: specifier: 'catalog:' - version: 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + version: 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) vitest: specifier: npm:@voidzero-dev/vite-plus-test@0.1.18 - version: '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' + version: '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' web: dependencies: @@ -1088,7 +1218,7 @@ importers: devDependencies: '@antfu/eslint-config': specifier: 'catalog:' - version: 8.2.0(@eslint-react/eslint-plugin@3.0.0(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@next/eslint-plugin-next@16.2.3)(@types/node@25.6.0)(@typescript-eslint/typescript-estree@8.58.2(typescript@6.0.2))(@typescript-eslint/utils@8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(eslint-plugin-react-refresh@0.5.2(eslint@10.2.0(jiti@2.6.1)))(eslint@10.2.0(jiti@2.6.1))(happy-dom@20.9.0)(jiti@2.6.1)(oxlint@1.60.0(oxlint-tsgolint@0.20.0))(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + version: 8.2.0(@eslint-react/eslint-plugin@3.0.0(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@next/eslint-plugin-next@16.2.3)(@types/node@25.6.0)(@typescript-eslint/typescript-estree@8.58.2(typescript@6.0.2))(@typescript-eslint/utils@8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(eslint-plugin-react-refresh@0.5.2(eslint@10.2.0(jiti@2.6.1)))(eslint@10.2.0(jiti@2.6.1))(happy-dom@20.9.0)(jiti@2.6.1)(oxlint@1.60.0(oxlint-tsgolint@0.20.0))(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) '@chromatic-com/storybook': specifier: 'catalog:' version: 5.1.2(storybook@10.3.5(@testing-library/dom@10.4.1)(react-dom@19.2.5(react@19.2.5))(react@19.2.5)) @@ -1229,7 +1359,7 @@ importers: version: 0.5.24(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(react-dom@19.2.5(react@19.2.5))(react-server-dom-webpack@19.2.5(react-dom@19.2.5(react@19.2.5))(react@19.2.5))(react@19.2.5) '@vitest/coverage-v8': specifier: 'catalog:' - version: 4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + version: 4.1.4(@voidzero-dev/vite-plus-test@0.1.18) agentation: specifier: 'catalog:' version: 3.0.2(react-dom@19.2.5(react@19.2.5))(react@19.2.5) @@ -1304,13 +1434,13 @@ importers: version: 12.0.0-beta.1(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(typescript@6.0.2)(ws@8.20.0) vite-plus: specifier: 'catalog:' - version: 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + version: 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) vitest: specifier: npm:@voidzero-dev/vite-plus-test@0.1.18 - version: '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' + version: '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' vitest-canvas-mock: specifier: 'catalog:' - version: 1.1.4(@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)) + version: 1.1.4(@voidzero-dev/vite-plus-test@0.1.18) packages: @@ -1465,6 +1595,169 @@ packages: '@antfu/utils@8.1.1': resolution: {integrity: sha512-Mex9nXf9vR6AhcXmMrlz/HVgYYZpVGJ6YlPgwl7UnaFpnshXs6EK/oa5Gpf3CzENMjkvEx2tQtntGnb7UtSTOQ==} + '@aws-crypto/crc32@5.2.0': + resolution: {integrity: sha512-nLbCWqQNgUiwwtFsen1AdzAtvuLRsQS8rYgMuxCrdKf9kOssamGLuPwyTY9wyYblNr9+1XM8v6zoDTPPSIeANg==} + engines: {node: '>=16.0.0'} + + '@aws-crypto/crc32c@5.2.0': + resolution: {integrity: sha512-+iWb8qaHLYKrNvGRbiYRHSdKRWhto5XlZUEBwDjYNf+ly5SVYG6zEoYIdxvf5R3zyeP16w4PLBn3rH1xc74Rag==} + + '@aws-crypto/sha1-browser@5.2.0': + resolution: {integrity: sha512-OH6lveCFfcDjX4dbAvCFSYUjJZjDr/3XJ3xHtjn3Oj5b9RjojQo8npoLeA/bNwkOkrSQ0wgrHzXk4tDRxGKJeg==} + + '@aws-crypto/sha256-browser@5.2.0': + resolution: {integrity: sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==} + + '@aws-crypto/sha256-js@5.2.0': + resolution: {integrity: sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==} + engines: {node: '>=16.0.0'} + + '@aws-crypto/supports-web-crypto@5.2.0': + resolution: {integrity: sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==} + + '@aws-crypto/util@5.2.0': + resolution: {integrity: sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==} + + '@aws-sdk/client-cloudfront@3.1045.0': + resolution: {integrity: sha512-84RIiLrMXcinBK1JXnP1bOavvQ+jxTxN4xsB20e39MfOZMyr7wIxMNn35kTYTg8UTJgN7zwEgzGJo64sU3mtPw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/client-s3@3.1045.0': + resolution: {integrity: sha512-fsuO3Y6t+3Ro9Bsg41DKj4Sfy53CGSrhnMldNplWmG8Tx0UbYk+YDa4RD1hVlJpERw4JBmPkl0+J9qlxMh1pcA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/core@3.974.8': + resolution: {integrity: sha512-njR2qoG6ZuB0kvAS2FyICsFZJ6gmCcf2X/7JcD14sUvGDm26wiZ5BrA6LOiUxKFEF+IVe7kdroxyE00YlkiYsw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/crc64-nvme@3.972.7': + resolution: {integrity: sha512-QUagVVBbC8gODCF6e1aV0mE2TXWB9Opz4k8EJFdNrujUVQm5R4AjJa1mpOqzwOuROBzqJU9zawzig7M96L8Ejg==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-env@3.972.34': + resolution: {integrity: sha512-XT0jtf8Fw9JE6ppsQeoNnZRiG+jqRixMT1v1ZR17G60UvVdsQmTG8nbEyHuEPfMxDXEhfdARaM/XiEhca4lGHQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-http@3.972.36': + resolution: {integrity: sha512-DPoGWfy7J7RKxvbf5kOKIGQkD2ek3dbKgzKIGrnLuvZBz5myU+Im/H6pmc14QcnFbqHMqxvtWSgRDSJW3qXLQg==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-ini@3.972.38': + resolution: {integrity: sha512-oDzUBu2MGJFgoar05sPMCwSrhw44ASyccrHzj66vO69OZqi7I6hZZxXfuPLC8OCzW7C+sU+bI73XHij41yekgQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-login@3.972.38': + resolution: {integrity: sha512-g1NosS8qe4OF++G2UFCM5ovSkgipC7YYor5KCWatG0UoMSO5YFj9C8muePlyVmOBV/WTI16Jo3/s1NUo/o1Bww==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-node@3.972.39': + resolution: {integrity: sha512-HEswDQyxUtadoZ/bJsPPENHg7R0Lzym5LuMksJeHvqhCOpP+rtkDLKI4/ZChH4w3cf5kG8n6bZuI8PzajoiqMg==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-process@3.972.34': + resolution: {integrity: sha512-T3IFs4EVmVi1dVN5RciFnklCANSzvrQd/VuHY9ThHSQmYkTogjcGkoJEr+oNUPQZnso52183088NqysMPji1/Q==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-sso@3.972.38': + resolution: {integrity: sha512-5ZxG+t0+3Q3QPh8KEjX6syskhgNf7I0MN7oGioTf6Lm1NTjfP7sIcYGNsthXC2qR8vcD3edNZwCr2ovfSSWuRA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/credential-provider-web-identity@3.972.38': + resolution: {integrity: sha512-lYHFF30DGI20jZcYX8cm6Ns0V7f1dDN6g/MBDLTyD/5iw+bXs3yBr2iAiHDkx4RFU5JgsnZvCHYKiRVPRdmOgw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-bucket-endpoint@3.972.10': + resolution: {integrity: sha512-Vbc2frZH7wXlMNd+ZZSXUEs/l1Sv8Jj4zUnIfwrYF5lwaLdXHZ9xx4U3rjUcaye3HRhFVc+E5DbBxpRAbB16BA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-expect-continue@3.972.10': + resolution: {integrity: sha512-2Yn0f1Qiq/DjxYR3wfI3LokXnjOhFM7Ssn4LTdFDIxRMCE6I32MAsVnhPX1cUZsuVA9tiZtwwhlSLAtFGxAZlQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-flexible-checksums@3.974.16': + resolution: {integrity: sha512-6ru8doI0/XzszqLIPXf0E/V7HhAw1Pu94010XCKYtBUfD0LxF0BuOzrUf8OQGR6j2o6wgKTHUniOmndQycHwCA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-host-header@3.972.10': + resolution: {integrity: sha512-IJSsIMeVQ8MMCPbuh1AbltkFhLBLXn7aejzfX5YKT/VLDHn++Dcz8886tXckE+wQssyPUhaXrJhdakO2VilRhg==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-location-constraint@3.972.10': + resolution: {integrity: sha512-rI3NZvJcEvjoD0+0PI0iUAwlPw2IlSlhyvgBK/3WkKJQE/YiKFedd9dMN2lVacdNxPNhxL/jzQaKQdrGtQagjQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-logger@3.972.10': + resolution: {integrity: sha512-OOuGvvz1Dm20SjZo5oEBePFqxt5nf8AwkNDSyUHvD9/bfNASmstcYxFAHUowy4n6Io7mWUZ04JURZwSBvyQanQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-recursion-detection@3.972.11': + resolution: {integrity: sha512-+zz6f79Kj9V5qFK2P+D8Ehjnw4AhphAlCAsPjUqEcInA9umtSSKMrHbSagEeOIsDNuvVrH98bjRHcyQukTrhaQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-sdk-s3@3.972.37': + resolution: {integrity: sha512-Km7M+i8DrLArVzrid1gfxeGhYHBd3uxvE77g0s5a52zPSVosxzQBnJ0gwWb6NIp/DOk8gsBMhi7V+cpJG0ndTA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-ssec@3.972.10': + resolution: {integrity: sha512-Gli9A0u8EVVb+5bFDGS/QbSVg28w/wpEidg1ggVcSj65BDTdGR6punsOcVjqdiu1i42WHWo51MCvARPIIz9juw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/middleware-user-agent@3.972.38': + resolution: {integrity: sha512-iz+B29TXcAZsJpwB+AwG/TTGA5l/VnmMZ2UxtiySOZjI6gCdmviXPwdgzcmuazMy16rXoPY4mYCGe7zdNKfx5A==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/nested-clients@3.997.6': + resolution: {integrity: sha512-WBDnqatJl+kGObpfmfSxqnXeYTu3Me8wx8WCtvoxX3pfWrrTv8I4WTMSSs7PZqcRcVh8WeUKMgGFjMG+52SR1w==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/region-config-resolver@3.972.13': + resolution: {integrity: sha512-CvJ2ZIjK/jVD/lbOpowBVElJyC1YxLTIJ13yM0AEo0t2v7swOzGjSA6lJGH+DwZXQhcjUjoYwc8bVYCX5MDr1A==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/signature-v4-multi-region@3.996.25': + resolution: {integrity: sha512-+CMIt3e1VzlklAECmG+DtP1sV8iKq25FuA0OKpnJ4KA0kxUtd7CgClY7/RU6VzJBQwbN4EJ9Ue6plvqx1qGadw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/token-providers@3.1041.0': + resolution: {integrity: sha512-Th7kPI6YPtvJUcdznooXJMy+9rQWjmEF81LxaJssngBzuysK4a/x+l8kjm1zb7nYsUPbndnBdUnwng/3PLvtGw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/types@3.973.8': + resolution: {integrity: sha512-gjlAdtHMbtR9X5iIhVUvbVcy55KnznpC6bkDUWW9z915bi0ckdUr5cjf16Kp6xq0bP5HBD2xzgbL9F9Quv5vUw==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/util-arn-parser@3.972.3': + resolution: {integrity: sha512-HzSD8PMFrvgi2Kserxuff5VitNq2sgf3w9qxmskKDiDTThWfVteJxuCS9JXiPIPtmCrp+7N9asfIaVhBFORllA==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/util-endpoints@3.996.8': + resolution: {integrity: sha512-oOZHcRDihk5iEe5V25NVWg45b3qEA8OpHWVdU/XQh8Zj4heVPAJqWvMphQnU7LkufmUo10EpvFPZuQMiFLJK3g==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/util-locate-window@3.965.5': + resolution: {integrity: sha512-WhlJNNINQB+9qtLtZJcpQdgZw3SCDCpXdUJP7cToGwHbCWCnRckGlc6Bx/OhWwIYFNAn+FIydY8SZ0QmVu3xTQ==} + engines: {node: '>=20.0.0'} + + '@aws-sdk/util-user-agent-browser@3.972.10': + resolution: {integrity: sha512-FAzqXvfEssGdSIz8ejatan0bOdx1qefBWKF/gWmVBXIP1HkS7v/wjjaqrAGGKvyihrXTXW00/2/1nTJtxpXz7g==} + + '@aws-sdk/util-user-agent-node@3.973.24': + resolution: {integrity: sha512-ZWwlkjcIp7cEL8ZfTpTAPNkwx25p7xol0xlKoWVVf22+nsjwmLcHYtTPjIV1cSpmB/b6DaK4cb1fSkvCXHgRdw==} + engines: {node: '>=20.0.0'} + peerDependencies: + aws-crt: '>=1.0.0' + peerDependenciesMeta: + aws-crt: + optional: true + + '@aws-sdk/xml-builder@3.972.22': + resolution: {integrity: sha512-PMYKKtJd70IsSG0yHrdAbxBr+ZWBKLvzFZfD3/urxgf6hXVMzuU5M+3MJ5G67RpOmLBu1fAUN65SbWuKUCOlAA==} + engines: {node: '>=20.0.0'} + + '@aws/lambda-invoke-store@0.2.4': + resolution: {integrity: sha512-iY8yvjE0y651BixKNPgmv1WrQc+GZ142sb0z4gYnChDDY2YqI4P/jsSopBWrKfAt7LOJAkOXt7rC/hms+WclQQ==} + engines: {node: '>=18.0.0'} + '@babel/code-frame@7.29.0': resolution: {integrity: sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==} engines: {node: '>=6.9.0'} @@ -2252,6 +2545,164 @@ packages: cpu: [x64] os: [win32] + '@inquirer/ansi@1.0.2': + resolution: {integrity: sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ==} + engines: {node: '>=18'} + + '@inquirer/checkbox@4.3.2': + resolution: {integrity: sha512-VXukHf0RR1doGe6Sm4F0Em7SWYLTHSsbGfJdS9Ja2bX5/D5uwVOEjr07cncLROdBvmnvCATYEWlHqYmXv2IlQA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/confirm@3.2.0': + resolution: {integrity: sha512-oOIwPs0Dvq5220Z8lGL/6LHRTEr9TgLHmiI99Rj1PJ1p1czTys+olrgBqZk4E2qC0YTzeHprxSQmoHioVdJ7Lw==} + engines: {node: '>=18'} + + '@inquirer/confirm@5.1.21': + resolution: {integrity: sha512-KR8edRkIsUayMXV+o3Gv+q4jlhENF9nMYUZs9PA2HzrXeHI8M5uDag70U7RJn9yyiMZSbtF5/UexBtAVtZGSbQ==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/core@10.3.2': + resolution: {integrity: sha512-43RTuEbfP8MbKzedNqBrlhhNKVwoK//vUFNW3Q3vZ88BLcrs4kYpGg+B2mm5p2K/HfygoCxuKwJJiv8PbGmE0A==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/core@9.2.1': + resolution: {integrity: sha512-F2VBt7W/mwqEU4bL0RnHNZmC/OxzNx9cOYxHqnXX3MP6ruYvZUZAW9imgN9+h/uBT/oP8Gh888J2OZSbjSeWcg==} + engines: {node: '>=18'} + + '@inquirer/editor@4.2.23': + resolution: {integrity: sha512-aLSROkEwirotxZ1pBaP8tugXRFCxW94gwrQLxXfrZsKkfjOYC1aRvAZuhpJOb5cu4IBTJdsCigUlf2iCOu4ZDQ==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/expand@4.0.23': + resolution: {integrity: sha512-nRzdOyFYnpeYTTR2qFwEVmIWypzdAx/sIkCMeTNTcflFOovfqUk+HcFhQQVBftAh9gmGrpFj6QcGEqrDMDOiew==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/external-editor@1.0.3': + resolution: {integrity: sha512-RWbSrDiYmO4LbejWY7ttpxczuwQyZLBUyygsA9Nsv95hpzUWwnNTVQmAq3xuh7vNwCp07UTmE5i11XAEExx4RA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/figures@1.0.15': + resolution: {integrity: sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g==} + engines: {node: '>=18'} + + '@inquirer/input@2.3.0': + resolution: {integrity: sha512-XfnpCStx2xgh1LIRqPXrTNEEByqQWoxsWYzNRSEUxJ5c6EQlhMogJ3vHKu8aXuTacebtaZzMAHwEL0kAflKOBw==} + engines: {node: '>=18'} + + '@inquirer/input@4.3.1': + resolution: {integrity: sha512-kN0pAM4yPrLjJ1XJBjDxyfDduXOuQHrBB8aLDMueuwUGn+vNpF7Gq7TvyVxx8u4SHlFFj4trmj+a2cbpG4Jn1g==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/number@3.0.23': + resolution: {integrity: sha512-5Smv0OK7K0KUzUfYUXDXQc9jrf8OHo4ktlEayFlelCjwMXz0299Y8OrI+lj7i4gCBY15UObk76q0QtxjzFcFcg==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/password@4.0.23': + resolution: {integrity: sha512-zREJHjhT5vJBMZX/IUbyI9zVtVfOLiTO66MrF/3GFZYZ7T4YILW5MSkEYHceSii/KtRk+4i3RE7E1CUXA2jHcA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/prompts@7.10.1': + resolution: {integrity: sha512-Dx/y9bCQcXLI5ooQ5KyvA4FTgeo2jYj/7plWfV5Ak5wDPKQZgudKez2ixyfz7tKXzcJciTxqLeK7R9HItwiByg==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/rawlist@4.1.11': + resolution: {integrity: sha512-+LLQB8XGr3I5LZN/GuAHo+GpDJegQwuPARLChlMICNdwW7OwV2izlCSCxN6cqpL0sMXmbKbFcItJgdQq5EBXTw==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/search@3.2.2': + resolution: {integrity: sha512-p2bvRfENXCZdWF/U2BXvnSI9h+tuA8iNqtUKb9UWbmLYCRQxd8WkvwWvYn+3NgYaNwdUkHytJMGG4MMLucI1kA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/select@2.5.0': + resolution: {integrity: sha512-YmDobTItPP3WcEI86GvPo+T2sRHkxxOq/kXmsBjHS5BVXUgvgZ5AfJjkvQvZr03T81NnI3KrrRuMzeuYUQRFOA==} + engines: {node: '>=18'} + + '@inquirer/select@4.4.2': + resolution: {integrity: sha512-l4xMuJo55MAe+N7Qr4rX90vypFwCajSakx59qe/tMaC1aEHWLyw68wF4o0A4SLAY4E0nd+Vt+EyskeDIqu1M6w==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@inquirer/type@1.5.5': + resolution: {integrity: sha512-MzICLu4yS7V8AA61sANROZ9vT1H3ooca5dSmI1FjZkzq7o/koMsRfQSzRtFo+F3Ao4Sf1C0bpLKejpKB/+j6MA==} + engines: {node: '>=18'} + + '@inquirer/type@2.0.0': + resolution: {integrity: sha512-XvJRx+2KR3YXyYtPUUy+qd9i7p+GO9Ko6VIIpWlBrpWwXDv8WLFeHTxz35CfQFUiBMLXlGHhGzys7lqit9gWag==} + engines: {node: '>=18'} + + '@inquirer/type@3.0.10': + resolution: {integrity: sha512-BvziSRxfz5Ov8ch0z/n3oijRSEcEsHnhggm4xFZe93DHcUCTlutlq9Ox4SVENAfcRD22UQq7T/atg9Wr3k09eA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + '@isaacs/fs-minipass@4.0.1': resolution: {integrity: sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==} engines: {node: '>=18.0.0'} @@ -2390,6 +2841,87 @@ packages: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + '@napi-rs/keyring-darwin-arm64@1.1.6': + resolution: {integrity: sha512-8N+qvM+O6OSU59BTgDP/PvqYhoqfOcD2HGy1NgRFo1B0DRmkTp4U/DGZrV4Pk/nOP6Uf0PLqznfx3a/M8O5sjQ==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [darwin] + + '@napi-rs/keyring-darwin-x64@1.1.6': + resolution: {integrity: sha512-r3Jgc5/ubfaao6Lmk/USA13IwU/GEVLP8NDfg5gYXjPVllU6bWnAaEDHVg7q4vl51kViwj9ELo6XTmOeJFut6A==} + engines: {node: '>= 10'} + cpu: [x64] + os: [darwin] + + '@napi-rs/keyring-freebsd-x64@1.1.6': + resolution: {integrity: sha512-ayG396jZAt7j820gsEyW/LJKn+rf9KtgSPq1NKpvu84Y5GXopoFLyjMIP7wYZ1RLBL6SGKy27/f8S4f6YZ4DuA==} + engines: {node: '>= 10'} + cpu: [x64] + os: [freebsd] + + '@napi-rs/keyring-linux-arm-gnueabihf@1.1.6': + resolution: {integrity: sha512-8nXavgxcaUTUxyFHR+PEQF7eC8rITlYZNUmlf5amTb36y5bkNKrc3QLvCxjtbFSR/+KYzMi3vydoqNmFpF616w==} + engines: {node: '>= 10'} + cpu: [arm] + os: [linux] + + '@napi-rs/keyring-linux-arm64-gnu@1.1.6': + resolution: {integrity: sha512-qsI2NTAxGD3mBhZvdyYGL+N0n1D/NAjV0zCpTsFKKSzdpIrQJ0nM5Y0HxlLi6TsHm61dMyXHkdHb0ut8AzTcGA==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + libc: [glibc] + + '@napi-rs/keyring-linux-arm64-musl@1.1.6': + resolution: {integrity: sha512-SB/2A4LtL+SrS2aZXl3rWBtyCVB2aG2zAU56kOGFDGwRZM2tqaITuQoM1QLOAMwu0eksN/Xedy95Yn2rkRH0nQ==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + libc: [musl] + + '@napi-rs/keyring-linux-riscv64-gnu@1.1.6': + resolution: {integrity: sha512-BcjXf33T2CoVgS87SvZ62Y6xxkbenNIeldy0r8O5nz6zFgN+wYB0scz5ulvowEYBQnhi4fmbxfneeqM/0HUOeA==} + engines: {node: '>= 10'} + cpu: [riscv64] + os: [linux] + libc: [glibc] + + '@napi-rs/keyring-linux-x64-gnu@1.1.6': + resolution: {integrity: sha512-eK0OxCBI6Wl8rFHYynrtEID6pxOwhPfnpIIpul7UPeqCCMJSyZpFN4lFP3oZ4vqX/6FnWjwMrR7IGbPgivdMjA==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + libc: [glibc] + + '@napi-rs/keyring-linux-x64-musl@1.1.6': + resolution: {integrity: sha512-Qb3NP98KFq4jXmk9PUQlcYrHjbzsBTtG+OOxX4YxUNKTGuUaIOGP79lB0w7jhns2oHdq8DwkW2ugzlmGSUaRSw==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + libc: [musl] + + '@napi-rs/keyring-win32-arm64-msvc@1.1.6': + resolution: {integrity: sha512-e794gO2CLD0P7JN2DVPT5CC60k3WmNWTWU5BVoQM8Hj0NYebx7j6LyxMIpdb2cztOHHiv7iltEHekgutf0TMlA==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [win32] + + '@napi-rs/keyring-win32-ia32-msvc@1.1.6': + resolution: {integrity: sha512-SUPafl6vKRMQBKZoSwIeBFZ+c7AGEKUy6mpAD9fVHDKHOBWP3VpHKda4YIlgGtQd3SxH0bjfqJ078Z5SYsDYZQ==} + engines: {node: '>= 10'} + cpu: [ia32] + os: [win32] + + '@napi-rs/keyring-win32-x64-msvc@1.1.6': + resolution: {integrity: sha512-FkNhM1x5ijFzGSrRcshRxUxQSrrjxl4wCmvRcXnimWreOHyzNotT+/1EZtSfM/k8yhdK0HEkkVIMQl0UqfioRw==} + engines: {node: '>= 10'} + cpu: [x64] + os: [win32] + + '@napi-rs/keyring@1.1.6': + resolution: {integrity: sha512-e6xoYELSMyaxcXv4MmEHhf0oOGsMnfWMmeu84CD91ICMgMH1I1vrLSMFpiPEQz03xD+pNQgAkQ7DwwBDozCuvw==} + engines: {node: '>= 10'} + '@napi-rs/wasm-runtime@1.1.2': resolution: {integrity: sha512-sNXv5oLJ7ob93xkZ1XnxisYhGYXfaG9f65/ZgYuAu3qt7b3NadcOEhLvx28hv31PgX8SZJRYrAIPQilQmFpLVw==} peerDependencies: @@ -2471,6 +3003,9 @@ packages: cpu: [x64] os: [win32] + '@nodable/entities@2.1.0': + resolution: {integrity: sha512-nyT7T3nbMyBI/lvr6L5TyWbFJAI9FTgVRakNoBqCD+PmID8DzFrrNdLLtHMwMszOtqZa8PAOV24ZqDnQrhQINA==} + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -2495,6 +3030,38 @@ packages: resolution: {integrity: sha512-y3SvzjuY1ygnzWA4Krwx/WaJAsTMP11DN+e21A8Fa8PW1oDtVB5NSRW7LWurAiS2oKRkuCgcjTYMkBuBkcPCRg==} engines: {node: '>=12.4.0'} + '@oclif/core@4.11.1': + resolution: {integrity: sha512-+N5yqeoOKPnT0p+ZJiNutMILsZukZrEpsVup24XERla594EkGSWS9tiCqRfvzr1xfvf/AhM9pb0yPaf8L3Y9Uw==} + engines: {node: '>=18.0.0'} + + '@oclif/plugin-autocomplete@3.2.6': + resolution: {integrity: sha512-PLBvRFt4DRRGcLFTOPcqCOi79Jp03bxXGlbF3kAMzZx9jX2fjiRT54nYnC8qJzL+tcLLA7Oz9b9S07uHTuGu5A==} + engines: {node: '>=18.0.0'} + + '@oclif/plugin-help@6.2.10': + resolution: {integrity: sha512-Gm5/l/upTtj34StLIjZzhmO3AngqGx20rsbfOqDQ3SrsEnjfujtKgUm+MxXTjl4XfkkWREUN0CwuqLcuftnsOw==} + engines: {node: '>=18.0.0'} + + '@oclif/plugin-help@6.2.46': + resolution: {integrity: sha512-KmuMFt/fURCVxor0rrRjEqs2nLN0Y3ixcixo/M5VjKcN920gbuw5T+AF23FBeyUDuW/Dg79YPcTWy/Rtz0Dg/A==} + engines: {node: '>=18.0.0'} + + '@oclif/plugin-not-found@3.2.18': + resolution: {integrity: sha512-595+i3eG+K4jM+BjWLAuWzBb2wqrXpKqF9IianroSCxeZEC4Ujg6HWvLSYT//afJzeXWpsNyqGCqeoGF7kXE/w==} + engines: {node: '>=18.0.0'} + + '@oclif/plugin-not-found@3.2.82': + resolution: {integrity: sha512-6heNFE2gadcDYijWy4XJc6ZLzPd1qKe0i8sb8uyrR3mX0o5IFA+5KSAx/BFBkGS8j/tKOsCYvvmMKVdReeb1Gg==} + engines: {node: '>=18.0.0'} + + '@oclif/plugin-version@2.2.16': + resolution: {integrity: sha512-VRaKold1dUXq+cCD+vgE0QIT1HYU9duEYZjA0tB4krYuSyz6xS778JMF5EiqbbWEPH5Fwjj/MoHemkLuDgaGYw==} + engines: {node: '>=18.0.0'} + + '@oclif/plugin-warn-if-update-available@3.1.13': + resolution: {integrity: sha512-yAdb7kBPfLPxgC20vtFsttEKSHKP4ZL+iA01mEyzxn7IwzIpEHWlcOKjf4fapmMzAG4BHqcZq/g9yi/SZlRqpQ==} + engines: {node: '>=18.0.0'} + '@orpc/client@1.13.14': resolution: {integrity: sha512-JQf3lO//UGHmmkd8+9fuWuh1gga1lhWuKnsT19cui7F6WizBy0NdFSVQerOsSy2c1kxOthlD7GnicGgSY2rhQA==} @@ -3438,6 +4005,222 @@ packages: resolution: {integrity: sha512-TeheYy0ILzBEI/CO55CP6zJCSdSWeRtGnHy8U8dWSUH4I68iqTsy7HkMktR4xakThc9jotkPQUXT4ITdbV7cHA==} engines: {node: '>=18'} + '@sindresorhus/is@5.6.0': + resolution: {integrity: sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g==} + engines: {node: '>=14.16'} + + '@smithy/chunked-blob-reader-native@4.2.3': + resolution: {integrity: sha512-jA5k5Udn7Y5717L86h4EIv06wIr3xn8GM1qHRi/Nf31annXcXHJjBKvgztnbn2TxH3xWrPBfgwHsOwZf0UmQWw==} + engines: {node: '>=18.0.0'} + + '@smithy/chunked-blob-reader@5.2.2': + resolution: {integrity: sha512-St+kVicSyayWQca+I1rGitaOEH6uKgE8IUWoYnnEX26SWdWQcL6LvMSD19Lg+vYHKdT9B2Zuu7rd3i6Wnyb/iw==} + engines: {node: '>=18.0.0'} + + '@smithy/config-resolver@4.4.17': + resolution: {integrity: sha512-TzDZcAnhTyAHbXVxWZo7/tEcrIeFq20IBk8So3OLOetWpR8EwY/yEqBMBFaJMeyEiREDq4NfEl+qO3OAUD+vbQ==} + engines: {node: '>=18.0.0'} + + '@smithy/core@3.23.17': + resolution: {integrity: sha512-x7BlLbUFL8NWCGjMF9C+1N5cVCxcPa7g6Tv9B4A2luWx3be3oU8hQ96wIwxe/s7OhIzvoJH73HAUSg5JXVlEtQ==} + engines: {node: '>=18.0.0'} + + '@smithy/credential-provider-imds@4.2.14': + resolution: {integrity: sha512-Au28zBN48ZAoXdooGUHemuVBrkE+Ie6RPmGNIAJsFqj33Vhb6xAgRifUydZ2aY+M+KaMAETAlKk5NC5h1G7wpg==} + engines: {node: '>=18.0.0'} + + '@smithy/eventstream-codec@4.2.14': + resolution: {integrity: sha512-erZq0nOIpzfeZdCyzZjdJb4nVSKLUmSkaQUVkRGQTXs30gyUGeKnrYEg+Xe1W5gE3aReS7IgsvANwVPxSzY6Pw==} + engines: {node: '>=18.0.0'} + + '@smithy/eventstream-serde-browser@4.2.14': + resolution: {integrity: sha512-8IelTCtTctWRbb+0Dcy+C0aICh1qa0qWXqgjcXDmMuCvPJRnv26hiDZoAau2ILOniki65mCPKqOQs/BaWvO4CQ==} + engines: {node: '>=18.0.0'} + + '@smithy/eventstream-serde-config-resolver@4.3.14': + resolution: {integrity: sha512-sqHiHpYRYo3FJlaIxD1J8PhbcmJAm7IuM16mVnwSkCToD7g00IBZzKuiLNMGmftULmEUX6/UAz8/NN5uMP8bVA==} + engines: {node: '>=18.0.0'} + + '@smithy/eventstream-serde-node@4.2.14': + resolution: {integrity: sha512-Ht/8BuGlKfFTy0H3+8eEu0vdpwGztCnaLLXtpXNdQqiR7Hj4vFScU3T436vRAjATglOIPjJXronY+1WxxNLSiw==} + engines: {node: '>=18.0.0'} + + '@smithy/eventstream-serde-universal@4.2.14': + resolution: {integrity: sha512-lWyt4T2XQZUZgK3tQ3Wn0w3XBvZsK/vjTuJl6bXbnGZBHH0ZUSONTYiK9TgjTTzU54xQr3DRFwpjmhp0oLm3gg==} + engines: {node: '>=18.0.0'} + + '@smithy/fetch-http-handler@5.3.17': + resolution: {integrity: sha512-bXOvQzaSm6MnmLaWA1elgfQcAtN4UP3vXqV97bHuoOrHQOJiLT3ds6o9eo5bqd0TJfRFpzdGnDQdW3FACiAVdw==} + engines: {node: '>=18.0.0'} + + '@smithy/hash-blob-browser@4.2.15': + resolution: {integrity: sha512-0PJ4Al3fg2nM4qKrAIxyNcApgqHAXcBkN8FeizOz69z0rb26uZ6lMESYtxegaTlXB5Hj84JfwMPavMrwDMjucA==} + engines: {node: '>=18.0.0'} + + '@smithy/hash-node@4.2.14': + resolution: {integrity: sha512-8ZBDY2DD4wr+GGjTpPtiglEsqr0lUP+KHqgZcWczFf6qeZ/YRjMIOoQWVQlmwu7EtxKTd8YXD8lblmYcpBIA1g==} + engines: {node: '>=18.0.0'} + + '@smithy/hash-stream-node@4.2.14': + resolution: {integrity: sha512-tw4GANWkZPb6+BdD4Fgucqzey2+r73Z/GRo9zklsCdwrnxxumUV83ZIaBDdudV4Ylazw3EPTiJZhpX42105ruQ==} + engines: {node: '>=18.0.0'} + + '@smithy/invalid-dependency@4.2.14': + resolution: {integrity: sha512-c21qJiTSb25xvvOp+H2TNZzPCngrvl5vIPqPB8zQ/DmJF4QWXO19x1dWfMJZ6wZuuWUPPm0gV8C0cU3+ifcWuw==} + engines: {node: '>=18.0.0'} + + '@smithy/is-array-buffer@2.2.0': + resolution: {integrity: sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==} + engines: {node: '>=14.0.0'} + + '@smithy/is-array-buffer@4.2.2': + resolution: {integrity: sha512-n6rQ4N8Jj4YTQO3YFrlgZuwKodf4zUFs7EJIWH86pSCWBaAtAGBFfCM7Wx6D2bBJ2xqFNxGBSrUWswT3M0VJow==} + engines: {node: '>=18.0.0'} + + '@smithy/md5-js@4.2.14': + resolution: {integrity: sha512-V2v0vx+h0iUSNG1Alt+GNBMSLGCrl9iVsdd+Ap67HPM9PN479x12V8LkuMoKImNZxn3MXeuyUjls+/7ZACZghA==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-content-length@4.2.14': + resolution: {integrity: sha512-xhHq7fX4/3lv5NHxLUk3OeEvl0xZ+Ek3qIbWaCL4f9JwgDZEclPBElljaZCAItdGPQl/kSM4LPMOpy1MYgprpw==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-endpoint@4.4.32': + resolution: {integrity: sha512-ZZkgyjnJppiZbIm6Qbx92pbXYi1uzenIvGhBSCDlc7NwuAkiqSgS75j1czAD25ZLs2FjMjYy1q7gyRVWG6JA0Q==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-retry@4.5.7': + resolution: {integrity: sha512-bRt6ZImqVSeTk39Nm81K20ObIiAZ3WefY7G6+iz/0tZjs4dgRRjvRX2sgsH+zi6iDCRR/aQvQofLKxxz4rPBZg==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-serde@4.2.20': + resolution: {integrity: sha512-Lx9JMO9vArPtiChE3wbEZ5akMIDQpWQtlu90lhACQmNOXcGXRbaDywMHDzuDZ2OkZzP+9wQfZi3YJT9F67zTQQ==} + engines: {node: '>=18.0.0'} + + '@smithy/middleware-stack@4.2.14': + resolution: {integrity: sha512-2dvkUKLuFdKsCRmOE4Mn63co0Djtsm+JMh0bYZQupN1pJwMeE8FmQmRLLzzEMN0dnNi7CDCYYH8F0EVwWiPBeA==} + engines: {node: '>=18.0.0'} + + '@smithy/node-config-provider@4.3.14': + resolution: {integrity: sha512-S+gFjyo/weSVL0P1b9Ts8C/CwIfNCgUPikk3sl6QVsfE/uUuO+QsF+NsE/JkpvWqqyz1wg7HFdiaZuj5CoBMRg==} + engines: {node: '>=18.0.0'} + + '@smithy/node-http-handler@4.6.1': + resolution: {integrity: sha512-iB+orM4x3xrr57X3YaXazfKnntl0LHlZB1kcXSGzMV1Tt0+YwEjGlbjk/44qEGtBzXAz6yFDzkYTKSV6Pj2HUg==} + engines: {node: '>=18.0.0'} + + '@smithy/property-provider@4.2.14': + resolution: {integrity: sha512-WuM31CgfsnQ/10i7NYr0PyxqknD72Y5uMfUMVSniPjbEPceiTErb4eIqJQ+pdxNEAUEWrewrGjIRjVbVHsxZiQ==} + engines: {node: '>=18.0.0'} + + '@smithy/protocol-http@5.3.14': + resolution: {integrity: sha512-dN5F8kHx8RNU0r+pCwNmFZyz6ChjMkzShy/zup6MtkRmmix4vZzJdW+di7x//b1LiynIev88FM18ie+wwPcQtQ==} + engines: {node: '>=18.0.0'} + + '@smithy/querystring-builder@4.2.14': + resolution: {integrity: sha512-XYA5Z0IqTeF+5XDdh4BBmSA0HvbgVZIyv4cmOoUheDNR57K1HgBp9ukUMx3Cr3XpDHHpLBnexPE3LAtDsZkj2A==} + engines: {node: '>=18.0.0'} + + '@smithy/querystring-parser@4.2.14': + resolution: {integrity: sha512-hr+YyqBD23GVvRxGGrcc/oOeNlK3PzT5Fu4dzrDXxzS1LpFiuL2PQQqKPs87M79aW7ziMs+nvB3qdw77SqE7Lw==} + engines: {node: '>=18.0.0'} + + '@smithy/service-error-classification@4.3.1': + resolution: {integrity: sha512-aUQuDGh760ts/8MU+APjIZhlLPKhIIfqyzZaJikLEIMrdxFvxuLYD0WxWzaYWpmLbQlXDe9p7EWM3HsBe0K6Gw==} + engines: {node: '>=18.0.0'} + + '@smithy/shared-ini-file-loader@4.4.9': + resolution: {integrity: sha512-495/V2I15SHgedSJoDPD23JuSfKAp726ZI1V0wtjB07Wh7q/0tri/0e0DLefZCHgxZonrGKt/OCTpAtP1wE1kQ==} + engines: {node: '>=18.0.0'} + + '@smithy/signature-v4@5.3.14': + resolution: {integrity: sha512-1D9Y/nmlVjCeSivCbhZ7hgEpmHyY1h0GvpSZt3l0xcD9JjmjVC1CHOozS6+Gh+/ldMH8JuJ6cujObQqfayAVFA==} + engines: {node: '>=18.0.0'} + + '@smithy/smithy-client@4.12.13': + resolution: {integrity: sha512-y/Pcj1V9+qG98gyu1gvftHB7rDpdh+7kIBIggs55yGm3JdtBV8GT8IFF3a1qxZ79QnaJHX9GXzvBG6tAd+czJA==} + engines: {node: '>=18.0.0'} + + '@smithy/types@4.14.1': + resolution: {integrity: sha512-59b5HtSVrVR/eYNei3BUj3DCPKD/G7EtDDe7OEJE7i7FtQFugYo6MxbotS8mVJkLNVf8gYaAlEBwwtJ9HzhWSg==} + engines: {node: '>=18.0.0'} + + '@smithy/url-parser@4.2.14': + resolution: {integrity: sha512-p06BiBigJ8bTA3MgnOfCtDUWnAMY0YfedO/GRpmc7p+wg3KW8vbXy1xwSu5ASy0wV7rRYtlfZOIKH4XqfhjSQQ==} + engines: {node: '>=18.0.0'} + + '@smithy/util-base64@4.3.2': + resolution: {integrity: sha512-XRH6b0H/5A3SgblmMa5ErXQ2XKhfbQB+Fm/oyLZ2O2kCUrwgg55bU0RekmzAhuwOjA9qdN5VU2BprOvGGUkOOQ==} + engines: {node: '>=18.0.0'} + + '@smithy/util-body-length-browser@4.2.2': + resolution: {integrity: sha512-JKCrLNOup3OOgmzeaKQwi4ZCTWlYR5H4Gm1r2uTMVBXoemo1UEghk5vtMi1xSu2ymgKVGW631e2fp9/R610ZjQ==} + engines: {node: '>=18.0.0'} + + '@smithy/util-body-length-node@4.2.3': + resolution: {integrity: sha512-ZkJGvqBzMHVHE7r/hcuCxlTY8pQr1kMtdsVPs7ex4mMU+EAbcXppfo5NmyxMYi2XU49eqaz56j2gsk4dHHPG/g==} + engines: {node: '>=18.0.0'} + + '@smithy/util-buffer-from@2.2.0': + resolution: {integrity: sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==} + engines: {node: '>=14.0.0'} + + '@smithy/util-buffer-from@4.2.2': + resolution: {integrity: sha512-FDXD7cvUoFWwN6vtQfEta540Y/YBe5JneK3SoZg9bThSoOAC/eGeYEua6RkBgKjGa/sz6Y+DuBZj3+YEY21y4Q==} + engines: {node: '>=18.0.0'} + + '@smithy/util-config-provider@4.2.2': + resolution: {integrity: sha512-dWU03V3XUprJwaUIFVv4iOnS1FC9HnMHDfUrlNDSh4315v0cWyaIErP8KiqGVbf5z+JupoVpNM7ZB3jFiTejvQ==} + engines: {node: '>=18.0.0'} + + '@smithy/util-defaults-mode-browser@4.3.49': + resolution: {integrity: sha512-a5bNrdiONYB/qE2BuKegvUMd/+ZDwdg4vsNuuSzYE8qs2EYAdK9CynL+Rzn29PbPiUqoz/cbpRbcLzD5lEevHw==} + engines: {node: '>=18.0.0'} + + '@smithy/util-defaults-mode-node@4.2.54': + resolution: {integrity: sha512-g1cvrJvOnzeJgEdf7AE4luI7gp6L8weE0y9a9wQUSGtjb8QRHDbCJYuE4Sy0SD9N8RrnNPFsPltAz/OSoBR9Zw==} + engines: {node: '>=18.0.0'} + + '@smithy/util-endpoints@3.4.2': + resolution: {integrity: sha512-a55Tr+3OKld4TTtnT+RhKOQHyPxm3j/xL4OR83WBUhLJaKDS9dnJ7arRMOp3t31dcLhApwG9bgvrRXBHlLdIkg==} + engines: {node: '>=18.0.0'} + + '@smithy/util-hex-encoding@4.2.2': + resolution: {integrity: sha512-Qcz3W5vuHK4sLQdyT93k/rfrUwdJ8/HZ+nMUOyGdpeGA1Wxt65zYwi3oEl9kOM+RswvYq90fzkNDahPS8K0OIg==} + engines: {node: '>=18.0.0'} + + '@smithy/util-middleware@4.2.14': + resolution: {integrity: sha512-1Su2vj9RYNDEv/V+2E+jXkkwGsgR7dc4sfHn9Z7ruzQHJIEni9zzw5CauvRXlFJfmgcqYP8fWa0dkh2Q2YaQyw==} + engines: {node: '>=18.0.0'} + + '@smithy/util-retry@4.3.8': + resolution: {integrity: sha512-LUIxbTBi+OpvXpg91poGA6BdyoleMDLnfXjVDqyi2RvZmTveY5loE/FgYUBCR5LU2BThW2SoZRh8dTIIy38IPw==} + engines: {node: '>=18.0.0'} + + '@smithy/util-stream@4.5.25': + resolution: {integrity: sha512-/PFpG4k8Ze8Ei+mMKj3oiPICYekthuzePZMgZbCqMiXIHHf4n2aZ4Ps0aSRShycFTGuj/J6XldmC0x0DwednIA==} + engines: {node: '>=18.0.0'} + + '@smithy/util-uri-escape@4.2.2': + resolution: {integrity: sha512-2kAStBlvq+lTXHyAZYfJRb/DfS3rsinLiwb+69SstC9Vb0s9vNWkRwpnj918Pfi85mzi42sOqdV72OLxWAISnw==} + engines: {node: '>=18.0.0'} + + '@smithy/util-utf8@2.3.0': + resolution: {integrity: sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==} + engines: {node: '>=14.0.0'} + + '@smithy/util-utf8@4.2.2': + resolution: {integrity: sha512-75MeYpjdWRe8M5E3AW0O4Cx3UadweS+cwdXjwYGBW5h/gxxnbeZ877sLPX/ZJA9GVTlL/qG0dXP29JWFCD1Ayw==} + engines: {node: '>=18.0.0'} + + '@smithy/util-waiter@4.3.0': + resolution: {integrity: sha512-JyjYmLAfS+pdxF92o4yLgEoy0zhayKTw73FU1aofLWwLcJw7iSqIY2exGmMTrl/lmZugP5p/zxdFSippJDfKWA==} + engines: {node: '>=18.0.0'} + + '@smithy/uuid@1.1.2': + resolution: {integrity: sha512-O/IEdcCUKkubz60tFbGA7ceITTAJsty+lBjNoorP4Z6XRqaFb/OjQjZODophEcuq68nKm6/0r+6/lLQ+XVpk8g==} + engines: {node: '>=18.0.0'} + '@socket.io/component-emitter@3.1.2': resolution: {integrity: sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==} @@ -3593,6 +4376,10 @@ packages: '@swc/helpers@0.5.20': resolution: {integrity: sha512-2egEBHUMasdypIzrprsu8g+OEVd7Vp2MM3a2eVlM/cyFYto0nGz5BX5BTgh/ShZZI9ed+ozEq+Ngt+rgmUs8tw==} + '@szmarczak/http-timer@5.0.1': + resolution: {integrity: sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==} + engines: {node: '>=14.16'} + '@t3-oss/env-core@0.13.11': resolution: {integrity: sha512-sM7GYY+KL7H/Hl0BE0inWfk3nRHZOLhmVn7sHGxaZt9FAR6KqREXAE+6TqKfiavfXmpRxO/OZ2QgKRd+oiBYRQ==} peerDependencies: @@ -4056,6 +4843,9 @@ packages: '@types/hast@3.0.4': resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==} + '@types/http-cache-semantics@4.2.0': + resolution: {integrity: sha512-L3LgimLHXtGkWikKnsPg0/VFx9OGZaC+eN1u4r+OB1XRqH3meBIAVC2zr1WdMH+RHmnRkqliQAOHNJ/E0j/e0Q==} + '@types/js-cookie@3.0.6': resolution: {integrity: sha512-wkw9yd1kEXOPnvEeEV1Go1MmxtBJL0RR79aOTAApecWFVu7w0NNXNqhcWgvw2YgZDYadliXkl14pa3WXw5jlCQ==} @@ -4077,9 +4867,15 @@ packages: '@types/ms@2.1.0': resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==} + '@types/mute-stream@0.0.4': + resolution: {integrity: sha512-CPM9nzrCPPJHQNA9keH9CVkVI+WR5kMa+7XEs5jcGQ0VoAGnLv242w8lIVgwAEfmE4oufJRaTc9PNLQl0ioAow==} + '@types/negotiator@0.6.4': resolution: {integrity: sha512-elf6BsTq+AkyNsb2h5cGNst2Mc7dPliVoAPm1fXglC/BM3f2pFA40BaSSv3E5lyHteEawVKLP+8TwiY1DMNb3A==} + '@types/node@22.19.18': + resolution: {integrity: sha512-9v00a+dn2yWVsYDEunWC4g/TcRKVq3r8N5FuZp7u0SGrPvdN9c2yXI9bBuf5Fl0hNCb+QTIePTn5pJs2pwBOQQ==} + '@types/node@25.6.0': resolution: {integrity: sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ==} @@ -4118,6 +4914,9 @@ packages: '@types/whatwg-mimetype@3.0.2': resolution: {integrity: sha512-c2AKvDT8ToxLIOUlN51gTiHXflsfIFisS4pO7pDPoKouJCESkhZnEy623gwP9laCy5lnLDAw1vAzu2vM2YLOrA==} + '@types/wrap-ansi@3.0.0': + resolution: {integrity: sha512-ltIpx+kM7g/MLRZfkbL7EsCEjfzCcScLpkg37eXEtx5kmrAKBkTJwd1GIAjDSL8wTpM6Hzn5YO4pSb91BEwu1g==} + '@types/ws@8.18.1': resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==} @@ -4294,6 +5093,7 @@ packages: resolution: {integrity: sha512-x7FptB5oDruxNPDNY2+S8tCh0pcq7ymCe1gTHcsp733jYjrJl8V1gMUlVysuCD9Kz46Xz9t1akkv08dPcYDs1w==} peerDependencies: '@vitest/browser': 4.1.4 + vitest: 4.1.4 peerDependenciesMeta: '@vitest/browser': optional: true @@ -4536,6 +5336,10 @@ packages: ajv@6.14.0: resolution: {integrity: sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==} + ansi-escapes@4.3.2: + resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} + engines: {node: '>=8'} + ansi-regex@4.1.1: resolution: {integrity: sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==} engines: {node: '>=6'} @@ -4556,6 +5360,10 @@ packages: resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} engines: {node: '>=10'} + ansis@3.17.0: + resolution: {integrity: sha512-0qWUglt9JEqLFr3w1I1pbrChn1grhaiAR2ocX1PP/flRmxgtwTzPFFFnfIlD6aMOLQZgSuCRlidD70lvx8yhzg==} + engines: {node: '>=14'} + ansis@4.2.0: resolution: {integrity: sha512-HqZ5rWlFjGiV0tDm3UxxgNRqsOTniqoKZu0pIAfh7TZQMGuZK+hH0drySty0si0QXj1ieop4+SkSfPZBPPkHig==} engines: {node: '>=14'} @@ -4599,6 +5407,9 @@ packages: resolution: {integrity: sha512-LElXdjswlqjWrPpJFg1Fx4wpkOCxj1TDHlSV4PlaRxHGWko024xICaa97ZkMfs6DRKlCguiAI+rbXv5GWwXIkg==} hasBin: true + async-retry@1.3.3: + resolution: {integrity: sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==} + async@3.2.6: resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==} @@ -4640,9 +5451,15 @@ packages: boolbase@1.0.0: resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} + bowser@2.14.1: + resolution: {integrity: sha512-tzPjzCxygAKWFOJP011oxFHs57HzIhOEracIgAePE4pqB3LikALKnSzUyU4MGs9/iCEUuHlAJTjTc5M+u7YEGg==} + brace-expansion@1.1.13: resolution: {integrity: sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==} + brace-expansion@2.0.3: + resolution: {integrity: sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==} + brace-expansion@5.0.5: resolution: {integrity: sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==} engines: {node: 18 || 20 || >=22} @@ -4685,10 +5502,21 @@ packages: resolution: {integrity: sha512-tixWYgm5ZoOD+3g6UTea91eow5z6AAHaho3g0V9CNSNb45gM8SmflpAc+GRd1InC4AqN/07Unrgp56Y94N9hJQ==} engines: {node: '>=20.19.0'} + cacheable-lookup@7.0.0: + resolution: {integrity: sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w==} + engines: {node: '>=14.16'} + + cacheable-request@10.2.14: + resolution: {integrity: sha512-zkDT5WAF4hSSoUgyfg5tFIxz8XQK+25W/TLVojJTMKBaxevLBBtLxgqguAuVQB8PVW79FVjHcU+GJ9tVbDZ9mQ==} + engines: {node: '>=14.16'} + callsites@3.1.0: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} engines: {node: '>=6'} + camel-case@4.1.2: + resolution: {integrity: sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==} + camelize@1.0.1: resolution: {integrity: sha512-dU+Tx2fsypxTgtLoE36npi3UqcjSSMNYfkqgmoEhtZrraP5VWq0K7FkWVTYa8eMPtnU/G2txVsfdCJTn9uzpuQ==} @@ -4717,6 +5545,13 @@ packages: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} engines: {node: '>=10'} + chalk@5.6.2: + resolution: {integrity: sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==} + engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + + change-case@4.1.2: + resolution: {integrity: sha512-bSxY2ws9OtviILG1EiY5K7NNxkqg/JnRnFxLtKQ96JaviiIxi7djMrSd0ECT9AC+lttClmYwKw53BWpOMblo7A==} + change-case@5.4.4: resolution: {integrity: sha512-HRQyTk2/YPEkt9TnUPbOpr64Uw3KOicFWPVBb+xiHvd6eBx/qPr9xqfBFDT8P2vWsvvz4jbEkfDe71W3VyNu2w==} @@ -4732,6 +5567,9 @@ packages: character-reference-invalid@2.0.1: resolution: {integrity: sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==} + chardet@2.1.1: + resolution: {integrity: sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==} + check-error@2.1.3: resolution: {integrity: sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA==} engines: {node: '>= 16'} @@ -4790,10 +5628,26 @@ packages: resolution: {integrity: sha512-GfisEZEJvzKrmGWkvfhgzcz/BllN1USeqD2V6tg14OAOgaCD2Z/PUEuxnAZ/nPvmaHRG7a8y77p1T/IRQ4D1Hw==} engines: {node: '>=4'} + clean-stack@3.0.1: + resolution: {integrity: sha512-lR9wNiMRcVQjSB3a7xXGLuz4cr4wJuuXlaAEbRutGowQTmlp7R72/DOgN21e8jdwblMWl9UOJMJXarX94pzKdg==} + engines: {node: '>=10'} + + cli-cursor@5.0.0: + resolution: {integrity: sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==} + engines: {node: '>=18'} + + cli-spinners@2.9.2: + resolution: {integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==} + engines: {node: '>=6'} + cli-table3@0.6.5: resolution: {integrity: sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==} engines: {node: 10.* || >= 12.*} + cli-width@4.1.0: + resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==} + engines: {node: '>= 12'} + client-only@0.0.1: resolution: {integrity: sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==} @@ -4863,6 +5717,13 @@ packages: confbox@0.2.4: resolution: {integrity: sha512-ysOGlgTFbN2/Y6Cg3Iye8YKulHw+R2fNXHrgSmXISQdMnomY6eNDprVdW9R5xBguEqI954+S6709UyiO7B+6OQ==} + constant-case@3.0.4: + resolution: {integrity: sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ==} + + content-type@1.0.5: + resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==} + engines: {node: '>= 0.6'} + convert-source-map@2.0.0: resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} @@ -5133,6 +5994,10 @@ packages: resolution: {integrity: sha512-H9LMLr5zwIbSxrmvikGuI/5KGhZ8E2zH3stkMgM5LpOWDutGM2JZaj460Udnf1a+946zc7YBgrqEWwbk7zHvGw==} engines: {node: '>=18'} + defer-to-connect@2.0.1: + resolution: {integrity: sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==} + engines: {node: '>=10'} + define-lazy-prop@3.0.0: resolution: {integrity: sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==} engines: {node: '>=12'} @@ -5144,10 +6009,18 @@ packages: resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} engines: {node: '>=6'} + detect-indent@7.0.2: + resolution: {integrity: sha512-y+8xyqdGLL+6sh0tVeHcfP/QDd8gUgbasolJJpY7NgeQGSZ739bDtSiaiDgtoicy+mtYB81dKLxO9xRhCyIB3A==} + engines: {node: '>=12.20'} + detect-libc@2.1.2: resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} engines: {node: '>=8'} + detect-newline@4.0.1: + resolution: {integrity: sha512-qE3Veg1YXzGHQhlA6jzebZN2qVf6NX+A7m7qlhCGG30dJixrAQhYOsJjsnBjJkCSmuOPpCk30145fr8FV0bzog==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + detect-node-es@1.1.0: resolution: {integrity: sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==} @@ -5192,6 +6065,9 @@ packages: domutils@3.2.2: resolution: {integrity: sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==} + dot-case@3.0.4: + resolution: {integrity: sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==} + dotenv@16.6.1: resolution: {integrity: sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==} engines: {node: '>=12'} @@ -5205,6 +6081,11 @@ packages: echarts@6.0.0: resolution: {integrity: sha512-Tte/grDQRiETQP4xz3iZWSvoHrkCQtwqd6hs+mifXcjrCuo2iKWbajFObuLJVBlDIJlOzgQPd1hsaKt/3+OMkQ==} + ejs@3.1.10: + resolution: {integrity: sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==} + engines: {node: '>=0.10.0'} + hasBin: true + electron-to-chromium@1.5.328: resolution: {integrity: sha512-QNQ5l45DzYytThO21403XN3FvK0hOkWDG8viNf6jqS42msJ8I4tGDSpBCgvDRRPnkffafiwAym2X2eHeGD2V0w==} @@ -5236,6 +6117,9 @@ packages: resolution: {integrity: sha512-1QFuh8l7LqUcKe24LsPUNzjrzJQ7pgRwp1QMcZ5MX6mFplk2zQ08NVCM84++1cveaUUYtcCYHmeFEuNg16sU4g==} engines: {node: '>=10.0.0'} + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + empathic@2.0.0: resolution: {integrity: sha512-i6UzDscO/XfAcNYD75CfICkmfLedpyPDdozrLMmQc5ORaQcdMoc21OnlEylMIqI7U8eniKrPMxxtj8k0vhmJhA==} engines: {node: '>=14'} @@ -5269,6 +6153,9 @@ packages: resolution: {integrity: sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA==} engines: {node: '>=0.12'} + error-ex@1.3.4: + resolution: {integrity: sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==} + error-stack-parser-es@1.0.5: resolution: {integrity: sha512-5qucVt2XcuGMcEGgWI7i+yZpmpByQ8J1lHhcL7PwqCwu9FPP3VUXzT4ltHe5i2z9dePwEHcDVOAfSnHsOlCXRA==} @@ -5643,6 +6530,10 @@ packages: resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} engines: {node: '>=0.10.0'} + eventsource-parser@3.0.5: + resolution: {integrity: sha512-bSRG85ZrMdmWtm7qkF9He9TNRzc/Bm99gEJMaQoHJ9E6Kv9QBbsldh2oMj7iXmYNEAVvNgvv5vPorG6W+XtBhQ==} + engines: {node: '>=20.0.0'} + expand-template@2.0.3: resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==} engines: {node: '>=6'} @@ -5675,6 +6566,9 @@ packages: fast-levenshtein@2.0.6: resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + fast-levenshtein@3.0.0: + resolution: {integrity: sha512-hKKNajm46uNmTlhHSyZkmToAc56uZJwYq7yrciZjqOxnlfQwERDQJmHPUp7m1m9wx8vgOe8IaCKZ5Kv2k1DdCQ==} + fast-string-truncated-width@1.2.1: resolution: {integrity: sha512-Q9acT/+Uu3GwGj+5w/zsGuQjh9O1TyywhIwAxHudtWrgF09nHOPrvTLhQevPbttcxjr/SNN7mJmfOw/B1bXgow==} @@ -5684,6 +6578,17 @@ packages: fast-wrap-ansi@0.1.6: resolution: {integrity: sha512-HlUwET7a5gqjURj70D5jl7aC3Zmy4weA1SHUfM0JFI0Ptq987NH2TwbBFLoERhfwk+E+eaq4EK3jXoT+R3yp3w==} + fast-xml-builder@1.2.0: + resolution: {integrity: sha512-00aAWieqff+ZJhsXA4g1g7M8k+7AYoMUUHF+/zFb5U6Uv/P0Vl4QZo84/IcufzYalLuEj9928bXN9PbbFzMF0Q==} + + fast-xml-parser@5.7.2: + resolution: {integrity: sha512-P7oW7tLbYnhOLQk/Gv7cZgzgMPP/XN03K02/Jy6Y/NHzyIAIpxuZIM/YqAkfiXFPxA2CTm7NtCijK9EDu09u2w==} + hasBin: true + + fastest-levenshtein@1.0.16: + resolution: {integrity: sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==} + engines: {node: '>= 4.9.1'} + fastq@1.20.1: resolution: {integrity: sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==} @@ -5716,6 +6621,9 @@ packages: resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} engines: {node: '>=16.0.0'} + filelist@1.0.6: + resolution: {integrity: sha512-5giy2PkLYY1cP39p17Ech+2xlpTRL9HLspOfEgm0L6CwBXBTgsK5ou0JtzYuepxkaQ/tvhCFIJ5uXo0OrM2DxA==} + filesize@10.1.6: resolution: {integrity: sha512-sJslQKU2uM33qH5nqewAwVB2QgR6w1aMNsYUp3aN5rMRyXEwJGmZvaWzeJFNTOXWlHQyBFCWrdj3fV/fsTOX8w==} engines: {node: '>= 10.4.0'} @@ -5732,6 +6640,9 @@ packages: resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} engines: {node: '>=10'} + find-yarn-workspace-root@2.0.0: + resolution: {integrity: sha512-1IMnbjt4KzsQfnhnzNd8wUEgXZ44IzZaZmnLYx7D5FZlaHt2gW20Cri8Q+E/t5tIj4+epTBub+2Zxu/vNILzqQ==} + flat-cache@4.0.1: resolution: {integrity: sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==} engines: {node: '>=16'} @@ -5739,6 +6650,10 @@ packages: flatted@3.4.2: resolution: {integrity: sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==} + form-data-encoder@2.1.4: + resolution: {integrity: sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==} + engines: {node: '>= 14.17'} + format@0.2.2: resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==} engines: {node: '>=0.4.x'} @@ -5751,6 +6666,10 @@ packages: fs-constants@1.0.0: resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} + fs-extra@8.1.0: + resolution: {integrity: sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==} + engines: {node: '>=6 <7 || >=8'} + fsevents@2.3.2: resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} @@ -5776,13 +6695,28 @@ packages: resolution: {integrity: sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==} engines: {node: '>=6'} + get-package-type@0.1.0: + resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} + engines: {node: '>=8.0.0'} + + get-stdin@9.0.0: + resolution: {integrity: sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA==} + engines: {node: '>=12'} + get-stream@5.2.0: resolution: {integrity: sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==} engines: {node: '>=8'} + get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + get-tsconfig@4.13.7: resolution: {integrity: sha512-7tN6rFgBlMgpBML5j8typ92BKFi2sFQvIdpAqLA2beia5avZDrMs0FLZiM5etShWq5irVyGcGMEA1jcDaK7A/Q==} + git-hooks-list@3.2.0: + resolution: {integrity: sha512-ZHG9a1gEhUMX1TvGrLdyWb9kDopCBbTnI8z4JgRMYxsijWipgjSEYoPWqBuIB0DnRnvqlQSEeVmzpeuPm7NdFQ==} + github-from-package@0.0.0: resolution: {integrity: sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==} @@ -5825,6 +6759,10 @@ packages: peerDependencies: csstype: ^3.0.10 + got@13.0.0: + resolution: {integrity: sha512-XfBk1CxOOScDcMr9O1yKkNaQyy865NbYs+F7dr4H0LZMVgCj2Le59k6PqbNHoL5ToeaEQUYh6c6yMfVcc6SJxA==} + engines: {node: '>=16'} + graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} @@ -5888,6 +6826,9 @@ packages: hastscript@9.0.1: resolution: {integrity: sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==} + header-case@2.0.4: + resolution: {integrity: sha512-H/vuk5TEEVZwrR0lp2zed9OCo1uAILMlx0JEMgC26rzyJJ3N1v6XkwHHXJQdR2doSjcGPM6OKPYoJgf0plJ11Q==} + hex-rgb@4.3.0: resolution: {integrity: sha512-Ox1pJVrDCyGHMG9CFg1tmrRUMRPRsAWYc/PinY0XzJU4K7y7vjNoLKIQ7BR5UJMCxNN8EM1MNDmHWA/B3aZUuw==} engines: {node: '>=6'} @@ -5896,6 +6837,10 @@ packages: resolution: {integrity: sha512-am5zfg3yu6sqn5yjKBNqhnTX7Cv+m00ox+7jbaKkrLMRJ4rAdldd1xPd/JzbBWspqaQv6RSTrgFN95EsfhC+7w==} engines: {node: '>=16.9.0'} + hosted-git-info@7.0.2: + resolution: {integrity: sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==} + engines: {node: ^16.14.0 || >=18.0.0} + hosted-git-info@9.0.2: resolution: {integrity: sha512-M422h7o/BR3rmCQ8UHi7cyyMqKltdP9Uo+J2fXK+RSAY+wTcKOIRyhTuKv4qn+DJf3g+PL890AzId5KZpX+CBg==} engines: {node: ^20.17.0 || >=22.9.0} @@ -5921,6 +6866,17 @@ packages: htmlparser2@10.1.0: resolution: {integrity: sha512-VTZkM9GWRAtEpveh7MSF6SjjrpNVNNVJfFup7xTY3UpFtm67foy9HDVXneLtFVt4pMz5kZtgNcvCniNFb1hlEQ==} + http-cache-semantics@4.2.0: + resolution: {integrity: sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==} + + http-call@5.3.0: + resolution: {integrity: sha512-ahwimsC23ICE4kPl9xTBjKB4inbRaeLyZeRunC/1Jy/Z6X8tv22MEAjK+KBOMSVLaqXPTTmd8638waVIKLGx2w==} + engines: {node: '>=8.0.0'} + + http2-wrapper@2.2.1: + resolution: {integrity: sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ==} + engines: {node: '>=10.19.0'} + i18next-resources-to-backend@1.2.1: resolution: {integrity: sha512-okHbVA+HZ7n1/76MsfhPqDou0fptl2dAlhRDu2ideXloRRduzHsqDOznJBef+R3DFZnbvWoBW+KxJ7fnFjd6Yw==} @@ -5939,6 +6895,10 @@ packages: resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} engines: {node: '>=0.10.0'} + iconv-lite@0.7.2: + resolution: {integrity: sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==} + engines: {node: '>=0.10.0'} + idb-keyval@6.2.2: resolution: {integrity: sha512-yjD9nARJ/jb1g+CvD0tlhUHOrJ9Sy0P8T9MF3YaLlHnSRpwPfpTX0XIvpmw3gAJUmEu3FiICLBDPXVwyEvrleg==} @@ -6014,6 +6974,9 @@ packages: is-alphanumerical@2.0.1: resolution: {integrity: sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==} + is-arrayish@0.2.1: + resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + is-builtin-module@5.0.0: resolution: {integrity: sha512-f4RqJKBUe5rQkJ2eJEJBXSticB3hGbN9j0yxxMQFqIW89Jp9WYFtzfTcRlstDKVUTRzSOTLKRfO9vIztenwtxA==} engines: {node: '>=18.20'} @@ -6021,6 +6984,11 @@ packages: is-decimal@2.0.1: resolution: {integrity: sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==} + is-docker@2.2.1: + resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==} + engines: {node: '>=8'} + hasBin: true + is-docker@3.0.0: resolution: {integrity: sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -6030,6 +6998,10 @@ packages: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + is-glob@4.0.3: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} @@ -6050,6 +7022,10 @@ packages: resolution: {integrity: sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==} engines: {node: '>=10'} + is-interactive@2.0.0: + resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==} + engines: {node: '>=12'} + is-number@7.0.0: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} @@ -6062,10 +7038,26 @@ packages: resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} engines: {node: '>=12'} + is-retry-allowed@1.2.0: + resolution: {integrity: sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==} + engines: {node: '>=0.10.0'} + is-stream@2.0.1: resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} engines: {node: '>=8'} + is-unicode-supported@1.3.0: + resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==} + engines: {node: '>=12'} + + is-unicode-supported@2.1.0: + resolution: {integrity: sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==} + engines: {node: '>=18'} + + is-wsl@2.2.0: + resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} + engines: {node: '>=8'} + is-wsl@3.1.1: resolution: {integrity: sha512-e6rvdUCiQCAuumZslxRJWR/Doq4VpPR82kqclvcS0efgt430SlGIk05vdCN58+VrzgtIcfNODjozVielycD4Sw==} engines: {node: '>=16'} @@ -6085,6 +7077,11 @@ packages: resolution: {integrity: sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==} engines: {node: '>=8'} + jake@10.9.4: + resolution: {integrity: sha512-wpHYzhxiVQL+IV05BLE2Xn34zW1S223hvjtqk0+gsPrwd/8JNLXJgZZM/iPFsYc1xyphF+6M6EvdE5E9MBGkDA==} + engines: {node: '>=10'} + hasBin: true + jiti@2.6.1: resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==} hasBin: true @@ -6146,6 +7143,9 @@ packages: json-buffer@3.0.1: resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} + json-parse-better-errors@1.0.2: + resolution: {integrity: sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==} + json-schema-traverse@0.4.1: resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} @@ -6161,6 +7161,9 @@ packages: resolution: {integrity: sha512-75EA7EWZExL/j+MDKQrRbdzcRI2HOkRlmUw8fZJc1ioqFEOvBsq7Rt+A6yCxOt9w/TYNpkt52gC6nm/g5tFIng==} engines: {node: ^20.19.0 || ^22.13.0 || >=24} + jsonfile@4.0.0: + resolution: {integrity: sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==} + jsonfile@6.2.0: resolution: {integrity: sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==} @@ -6299,6 +7302,10 @@ packages: resolution: {integrity: sha512-NXYBzinNrblfraPGyrbPoD19C1h9lfI/1mzgWYvXUTe414Gz/X1FD2XBZSZM7rRTrMA8JL3OtAaGifrIKhQ5yQ==} engines: {node: '>= 12.0.0'} + lilconfig@3.1.3: + resolution: {integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==} + engines: {node: '>=14'} + linebreak@1.1.0: resolution: {integrity: sha512-MHp03UImeVhB7XZtjd0E4n6+3xr5Dq/9xI/5FptGk5FrbDR3zagPa2DS6U8ks/3HjbKWG9Q1M2ufOzxV2qLYSQ==} @@ -6327,6 +7334,10 @@ packages: resolution: {integrity: sha512-l1mfj2atMqndAHI3ls7XqPxEjV2J9ZkcNyHpoZA3r2T1LLwDB69jgkMWh71YKwhBbK0G2f4WSn05ahmQXVxupA==} deprecated: Bad release. Please use lodash@4.17.21 instead. + log-symbols@6.0.0: + resolution: {integrity: sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw==} + engines: {node: '>=18'} + longest-streak@3.1.0: resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==} @@ -6343,6 +7354,13 @@ packages: lower-case@2.0.2: resolution: {integrity: sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==} + lowercase-keys@3.0.0: + resolution: {integrity: sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + lru-cache@10.4.3: + resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} + lru-cache@11.2.7: resolution: {integrity: sha512-aY/R+aEsRelme17KGQa/1ZSIpLpNYYrhcrepKTZgE+W3WM16YMCaPwOHLHsmopZHELU0Ojin1lPVxKR0MihncA==} engines: {node: 20 || >=22} @@ -6589,10 +7607,18 @@ packages: engines: {node: '>=16'} hasBin: true + mimic-function@5.0.1: + resolution: {integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==} + engines: {node: '>=18'} + mimic-response@3.1.0: resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==} engines: {node: '>=10'} + mimic-response@4.0.0: + resolution: {integrity: sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + min-indent@1.0.1: resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} engines: {node: '>=4'} @@ -6601,9 +7627,17 @@ packages: resolution: {integrity: sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==} engines: {node: 18 || 20 || >=22} + minimatch@10.2.5: + resolution: {integrity: sha512-MULkVLfKGYDFYejP07QOurDLLQpcjk7Fw+7jXS2R2czRQzR56yHRveU5NDJEOviH+hETZKSkIk5c+T23GjFUMg==} + engines: {node: 18 || 20 || >=22} + minimatch@3.1.5: resolution: {integrity: sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==} + minimatch@5.1.9: + resolution: {integrity: sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==} + engines: {node: '>=10'} + minimist@1.2.8: resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} @@ -6645,6 +7679,14 @@ packages: ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + mute-stream@1.0.0: + resolution: {integrity: sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + mute-stream@2.0.0: + resolution: {integrity: sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==} + engines: {node: ^18.17.0 || >=20.5.0} + mz@2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} @@ -6710,10 +7752,18 @@ packages: node-releases@2.0.36: resolution: {integrity: sha512-TdC8FSgHz8Mwtw9g5L4gR/Sh9XhSP/0DEkQxfEFXOpiul5IiHgHan2VhYYb6agDSfp4KuvltmGApc8HMgUrIkA==} + normalize-package-data@6.0.2: + resolution: {integrity: sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==} + engines: {node: ^16.14.0 || >=18.0.0} + normalize-package-data@8.0.0: resolution: {integrity: sha512-RWk+PI433eESQ7ounYxIp67CYuVsS1uYSonX3kA6ps/3LWfjVQa/ptEg6Y3T6uAMq1mWpX9PQ+qx+QaHpsc7gQ==} engines: {node: ^20.17.0 || >=22.9.0} + normalize-url@8.1.1: + resolution: {integrity: sha512-JYc0DPlpGWB40kH5g07gGTrYuMqV653k3uBKY6uITPWds3M0ov3GaWGp9lbE3Bzngx8+XkfzgvASb9vk9JDFXQ==} + engines: {node: '>=14.16'} + normalize-wheel@1.0.1: resolution: {integrity: sha512-1OnlAPZ3zgrk8B91HyRj+eVv+kS5u+Z0SCsak6Xil/kmgEia50ga7zfkumayonZrImffAxPU/5WcyGhzetHNPA==} @@ -6751,18 +7801,31 @@ packages: obug@2.1.1: resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==} + oclif@4.15.5: + resolution: {integrity: sha512-CX/C0VDGfLHQTk5Y4dEJdxC/aq81qcS4UmXI1EHaKJpFxSRoBb7INe/+S6TaIdKVau4TybCxI1CQi8rSERCkmw==} + engines: {node: '>=18.0.0'} + hasBin: true + ohash@2.0.11: resolution: {integrity: sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ==} once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + onetime@7.0.0: + resolution: {integrity: sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==} + engines: {node: '>=18'} + oniguruma-parser@0.12.1: resolution: {integrity: sha512-8Unqkvk1RYc6yq2WBYRj4hdnsAxVze8i7iPfQr8e4uSP3tRv0rpZcbGUDvxfQQcdwHt/e9PrMvGCsa8OqG9X3w==} oniguruma-to-es@4.3.5: resolution: {integrity: sha512-Zjygswjpsewa0NLTsiizVuMQZbp0MDyM6lIt66OxsF21npUDlzpHi1Mgb/qhQdkb+dWFTzJmFbEWdvZgRho8eQ==} + open@10.1.0: + resolution: {integrity: sha512-mnkeQ1qP5Ue2wd+aivTD3NHd/lZ96Lu0jgf0pwktLPtx6cTZiH7tyeGRRHs0zX0rbrahXPnXlUnbeXyaBBuIaw==} + engines: {node: '>=18'} + open@10.2.0: resolution: {integrity: sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA==} engines: {node: '>=18'} @@ -6778,6 +7841,10 @@ packages: resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} engines: {node: '>= 0.8.0'} + ora@8.1.0: + resolution: {integrity: sha512-GQEkNkH/GHOhPFXcqZs3IDahXEQcQxsSjEkK4KvEEST4t7eNzoMjxTzef+EZ+JluDEV+Raoi3WQ2CflnRdSVnQ==} + engines: {node: '>=18'} + oxc-parser@0.121.0: resolution: {integrity: sha512-ek9o58+SCv6AV7nchiAcUJy1DNE2CC5WRdBcO0mF+W4oRjNQfPO7b3pLjTHSFECpHkKGOZSQxx3hk8viIL5YCg==} engines: {node: ^20.19.0 || >=22.12.0} @@ -6804,6 +7871,10 @@ packages: oxlint-tsgolint: optional: true + p-cancelable@3.0.0: + resolution: {integrity: sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==} + engines: {node: '>=12.20'} + p-limit@3.1.0: resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} engines: {node: '>=10'} @@ -6829,6 +7900,9 @@ packages: papaparse@5.5.3: resolution: {integrity: sha512-5QvjGxYVjxO59MGU2lHVYpRWBBtKHnlIAcSe1uNFCkkptUh63NFRj0FJQm7nR67puEruUci/ZkjmEFrjCAyP4A==} + param-case@3.0.4: + resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==} + parent-module@1.0.1: resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} engines: {node: '>=6'} @@ -6846,6 +7920,10 @@ packages: parse-imports-exports@0.2.4: resolution: {integrity: sha512-4s6vd6dx1AotCx/RCI2m7t7GCh5bDRUtGNvRfHSP2wbBQdMi67pPe7mtzmgwcaQ8VKK/6IB7Glfyu3qdZJPybQ==} + parse-json@4.0.0: + resolution: {integrity: sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==} + engines: {node: '>=4'} + parse-json@8.3.0: resolution: {integrity: sha512-ybiGyvspI+fAoRQbIPRddCcSTV9/LsJbf0e/S85VLowVGzRmokfneg2kwVW/KU5rOXrPSbF1qAKPMgNTqqROQQ==} engines: {node: '>=18'} @@ -6865,9 +7943,15 @@ packages: parse5@8.0.0: resolution: {integrity: sha512-9m4m5GSgXjL4AjumKzq1Fgfp3Z8rsvjRNbnkVwfu2ImRqE5D0LnY2QfDen18FSY9C573YU5XxSapdHZTZ2WolA==} + pascal-case@3.1.2: + resolution: {integrity: sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==} + path-browserify@1.0.1: resolution: {integrity: sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==} + path-case@3.0.4: + resolution: {integrity: sha512-qO4qCFjXqVTrcbPt/hQfhTQ+VhFsqNKOPtytgNKkKxSoEp3XPUQ8ObFuePylOIok5gjn69ry8XiULxCwot3Wfg==} + path-data-parser@0.1.0: resolution: {integrity: sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w==} @@ -6875,6 +7959,10 @@ packages: resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} engines: {node: '>=8'} + path-expression-matcher@1.5.0: + resolution: {integrity: sha512-cbrerZV+6rvdQrrD+iGMcZFEiiSrbv9Tfdkvnusy6y0x0GKBXREFg/Y65GhIfm0tnLntThhzCnfKwp1WRjeCyQ==} + engines: {node: '>=14.0.0'} + path-key@3.1.1: resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} engines: {node: '>=8'} @@ -6907,6 +7995,9 @@ packages: perfect-debounce@2.1.0: resolution: {integrity: sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==} + picocolors@1.1.0: + resolution: {integrity: sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==} + picocolors@1.1.1: resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} @@ -7034,6 +8125,10 @@ packages: queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + quick-lru@5.1.1: + resolution: {integrity: sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==} + engines: {node: '>=10'} + radash@12.1.1: resolution: {integrity: sha512-h36JMxKRqrAxVD8201FrCpyeNuUY9Y5zZwujr20fFO77tpUtGa6EZzfKw/3WaiBX95fq7+MpsuMLNdSnORAwSA==} engines: {node: '>=14.18.0'} @@ -7320,6 +8415,9 @@ packages: resize-observer-polyfill@1.5.1: resolution: {integrity: sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==} + resolve-alpn@1.2.1: + resolution: {integrity: sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==} + resolve-from@4.0.0: resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} engines: {node: '>=4'} @@ -7332,6 +8430,18 @@ packages: engines: {node: '>= 0.4'} hasBin: true + responselike@3.0.0: + resolution: {integrity: sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==} + engines: {node: '>=14.16'} + + restore-cursor@5.1.0: + resolution: {integrity: sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==} + engines: {node: '>=18'} + + retry@0.13.1: + resolution: {integrity: sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==} + engines: {node: '>= 4'} + reusify@1.1.0: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -7389,6 +8499,9 @@ packages: engines: {node: '>=10'} hasBin: true + sentence-case@3.0.4: + resolution: {integrity: sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg==} + seroval-plugins@1.5.1: resolution: {integrity: sha512-4FbuZ/TMl02sqv0RTFexu0SP6V+ywaIe5bAWCCEik0fk17BhALgwvUDVF7e3Uvf9pxmwCEJsRPmlkUE6HdzLAw==} engines: {node: '>=10'} @@ -7415,6 +8528,10 @@ packages: resolution: {integrity: sha512-eAVKTMedR5ckPo4xne/PjYQYrU3qx78gtJZ+sHlXEg5IHhhoQhMfZVzetTYuaJS0L2Ef3AcCRzCHV8T0WI6nIQ==} engines: {node: '>=20'} + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + simple-concat@1.0.1: resolution: {integrity: sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==} @@ -7435,6 +8552,9 @@ packages: resolution: {integrity: sha512-dWUG8F5sIIARXih1DTaQAX4SsiTXhInKf1buxdY9DIg4ZYPZK5nGM1VRIYmEbDbsHt7USo99xSLFu5Q1IqTmsg==} engines: {node: '>= 18'} + snake-case@3.0.4: + resolution: {integrity: sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==} + socket.io-client@4.8.3: resolution: {integrity: sha512-uP0bpjWrjQmUt5DTHq9RuoCBdFJF10cdX9X+a368j/Ft0wmaVgxlrjvK3kjvgCODOMMOz9lcaRzxmso0bTWZ/g==} engines: {node: '>=10.0.0'} @@ -7446,6 +8566,13 @@ packages: solid-js@1.9.11: resolution: {integrity: sha512-WEJtcc5mkh/BnHA6Yrg4whlF8g6QwpmXXRg4P2ztPmcKeHHlH4+djYecBLhSpecZY2RRECXYUwIc/C2r3yzQ4Q==} + sort-object-keys@1.1.3: + resolution: {integrity: sha512-855pvK+VkU7PaKYPc+Jjnmt4EzejQHyhhF33q31qG8x7maDzkeFhAAThdCYay11CISO+qAMwjOBP+fPZe0IPyg==} + + sort-package-json@2.15.1: + resolution: {integrity: sha512-9x9+o8krTT2saA9liI4BljNjwAbvUnWf11Wq+i/iZt8nl2UGYnf3TH5uBydE7VALmP7AGwlfszuEeL8BDyb0YA==} + hasBin: true + sortablejs@1.15.7: resolution: {integrity: sha512-Kk8wLQPlS+yi1ZEf48a4+fzHa4yxjC30M/Sr2AnQu+f/MPwvvX9XjZ6OWejiz8crBsLwSq8GHqaxaET7u6ux0A==} @@ -7500,6 +8627,10 @@ packages: resolution: {integrity: sha512-9SN0XIjBBXCT6ZXXVnScJN4KP2RyFg6B8sEoFlugVHMANysfaEni4LTWlvUQQ/R0wgZl1Ovt9KBQbzn21kHoZA==} engines: {node: '>=20.19.0'} + stdin-discarder@0.2.2: + resolution: {integrity: sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ==} + engines: {node: '>=18'} + storybook@10.3.5: resolution: {integrity: sha512-uBSZu/GZa9aEIW3QMGvdQPMZWhGxSe4dyRWU8B3/Vd47Gy/XLC7tsBxRr13txmmPOEDHZR94uLuq0H50fvuqBw==} hasBin: true @@ -7522,6 +8653,10 @@ packages: string-ts@2.3.1: resolution: {integrity: sha512-xSJq+BS52SaFFAVxuStmx6n5aYZU571uYUnUrPXkPFCfdHyZMMlbP2v2Wx5sNBnAVzq/2+0+mcBLBa3Xa5ubYw==} + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + string-width@8.2.0: resolution: {integrity: sha512-6hJPQ8N0V0P3SNmP6h2J99RLuzrWz2gvT7VnK5tKvrNqJoyS9W4/Fb8mo31UiPvy00z7DQXkP2hnKBVav76thw==} engines: {node: '>=20'} @@ -7535,6 +8670,10 @@ packages: stringify-entities@4.0.4: resolution: {integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==} + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + strip-ansi@7.2.0: resolution: {integrity: sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w==} engines: {node: '>=12'} @@ -7566,6 +8705,9 @@ packages: strip-literal@3.1.0: resolution: {integrity: sha512-8r3mkIM/2+PpjHoOtiAW8Rg3jJLHaV7xPwG+YRGrv6FP0wwk/toTpATxWYOW0BKdWwl82VT2tFYi5DlROa0Mxg==} + strnum@2.3.0: + resolution: {integrity: sha512-ums3KNd42PGyx5xaoVTO1mjU1bH3NpY4vsrVlnv9PNGqQj8wd7rJ6nEypLrJ7z5vxK5RP0yMLo6J/Gsm62DI5Q==} + structured-clone-es@2.0.0: resolution: {integrity: sha512-5UuAHmBLXYPCl22xWJrFuGmIhBKQzxISPVz6E7nmTmTcAOpUzlbjKJsRrCE4vADmMQ0dzeCnlWn9XufnAGf76Q==} @@ -7668,6 +8810,9 @@ packages: tiny-invariant@1.3.3: resolution: {integrity: sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==} + tiny-jsonc@1.0.2: + resolution: {integrity: sha512-f5QDAfLq6zIVSyCZQZhhyl0QS6MvAyTxgz4X4x3+EoCktNWEYJ6PeoEA97fyb98njpBNNi88ybpD7m+BDFXaCw==} + tinybench@2.9.0: resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} @@ -7797,6 +8942,10 @@ packages: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} + type-fest@0.21.3: + resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} + engines: {node: '>=10'} + type-fest@2.19.0: resolution: {integrity: sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==} engines: {node: '>=12.20'} @@ -7826,6 +8975,9 @@ packages: resolution: {integrity: sha512-X2wH19RAPZE3+ldGicOkoj/SIA83OIxcJ6Cuaw23hf8Xc6fQpvZXY0SftE2JgS0QhYLUG4uwodSI3R53keyh7w==} engines: {node: '>=14'} + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + undici-types@7.19.2: resolution: {integrity: sha512-qYVnV5OEm2AW8cJMCpdV20CDyaN3g0AjDlOGf1OW4iaDEx8MwdtChUp4zu4H0VP3nDRF/8RKWH+IPp9uW0YGZg==} @@ -7867,6 +9019,10 @@ packages: unist-util-visit@5.1.0: resolution: {integrity: sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==} + universalify@0.1.2: + resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} + engines: {node: '>= 4.0.0'} + universalify@2.0.1: resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} engines: {node: '>= 10.0.0'} @@ -7891,6 +9047,9 @@ packages: upper-case-first@2.0.2: resolution: {integrity: sha512-514ppYHBaKwfJRK/pNC6c/OxfGa0obSnAl106u97Ed0I625Nin96KAjttZF6ZL3e1XLtphxnqrOi9iWgm+u+bg==} + upper-case@2.0.2: + resolution: {integrity: sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg==} + uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} @@ -7980,6 +9139,10 @@ packages: validate-npm-package-license@3.0.4: resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} + validate-npm-package-name@5.0.1: + resolution: {integrity: sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + vfile-location@5.0.3: resolution: {integrity: sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==} @@ -8141,10 +9304,25 @@ packages: engines: {node: '>= 8'} hasBin: true + widest-line@3.1.0: + resolution: {integrity: sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==} + engines: {node: '>=8'} + word-wrap@1.2.5: resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} engines: {node: '>=0.10.0'} + wordwrap@1.0.0: + resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} + + wrap-ansi@6.2.0: + resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} + engines: {node: '>=8'} + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + wrappy@1.0.2: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} @@ -8184,6 +9362,10 @@ packages: resolution: {integrity: sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==} engines: {node: '>=12'} + xml-naming@0.1.0: + resolution: {integrity: sha512-k8KO9hrMyNk6tUWqUfkTEZbezRRpONVOzUTnc97VnCvyj6Tf9lyUR9EDAIeiVLv56jsMcoXEwjW8Kv5yPY52lw==} + engines: {node: '>=16.0.0'} + xmlbuilder@15.1.1: resolution: {integrity: sha512-yMqGBqtXyeN1e3TGYvgNgDVZ3j84W4cwkOXQswghol6APgZWaff9lnbvN7MHYJOiXsvGPXtjTYJEiC9J2wv9Eg==} engines: {node: '>=8.0'} @@ -8220,6 +9402,10 @@ packages: resolution: {integrity: sha512-4LCcse/U2MHZ63HAJVE+v71o7yOdIe4cZ70Wpf8D/IyjDKYQLV5GD46B+hSTjJsvV5PztjvHoU580EftxjDZFQ==} engines: {node: '>=12.20'} + yoctocolors-cjs@2.1.3: + resolution: {integrity: sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw==} + engines: {node: '>=18'} + yoga-layout@3.2.1: resolution: {integrity: sha512-0LPOt3AxKqMdFBZA3HBAt/t/8vIKq7VaQYbuA8WxCgung+p9TVyKRYdpvCb80HcdTN2NkbIKbhNwKUfm3tQywQ==} @@ -8430,7 +9616,7 @@ snapshots: idb: 8.0.0 tslib: 2.8.1 - '@antfu/eslint-config@8.2.0(@eslint-react/eslint-plugin@3.0.0(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@next/eslint-plugin-next@16.2.3)(@types/node@25.6.0)(@typescript-eslint/typescript-estree@8.58.2(typescript@6.0.2))(@typescript-eslint/utils@8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(eslint-plugin-react-refresh@0.5.2(eslint@10.2.0(jiti@2.6.1)))(eslint@10.2.0(jiti@2.6.1))(happy-dom@20.9.0)(jiti@2.6.1)(oxlint@1.60.0(oxlint-tsgolint@0.20.0))(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)': + '@antfu/eslint-config@8.2.0(@eslint-react/eslint-plugin@3.0.0(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@next/eslint-plugin-next@16.2.3)(@types/node@25.6.0)(@typescript-eslint/typescript-estree@8.58.2(typescript@6.0.2))(@typescript-eslint/utils@8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(eslint-plugin-react-refresh@0.5.2(eslint@10.2.0(jiti@2.6.1)))(eslint@10.2.0(jiti@2.6.1))(happy-dom@20.9.0)(jiti@2.6.1)(oxlint@1.60.0(oxlint-tsgolint@0.20.0))(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)': dependencies: '@antfu/install-pkg': 1.1.0 '@clack/prompts': 1.2.0 @@ -8440,7 +9626,7 @@ snapshots: '@stylistic/eslint-plugin': 5.10.0(eslint@10.2.0(jiti@2.6.1)) '@typescript-eslint/eslint-plugin': 8.58.2(@typescript-eslint/parser@8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2) '@typescript-eslint/parser': 8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2) - '@vitest/eslint-plugin': 1.6.15(@types/node@25.6.0)(@typescript-eslint/eslint-plugin@8.58.2(@typescript-eslint/parser@8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(eslint@10.2.0(jiti@2.6.1))(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + '@vitest/eslint-plugin': 1.6.15(@types/node@25.6.0)(@typescript-eslint/eslint-plugin@8.58.2(@typescript-eslint/parser@8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(eslint@10.2.0(jiti@2.6.1))(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) ansis: 4.2.0 cac: 7.0.0 eslint: 10.2.0(jiti@2.6.1) @@ -8517,6 +9703,499 @@ snapshots: '@antfu/utils@8.1.1': {} + '@aws-crypto/crc32@5.2.0': + dependencies: + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.973.8 + tslib: 2.8.1 + + '@aws-crypto/crc32c@5.2.0': + dependencies: + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.973.8 + tslib: 2.8.1 + + '@aws-crypto/sha1-browser@5.2.0': + dependencies: + '@aws-crypto/supports-web-crypto': 5.2.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-locate-window': 3.965.5 + '@smithy/util-utf8': 2.3.0 + tslib: 2.8.1 + + '@aws-crypto/sha256-browser@5.2.0': + dependencies: + '@aws-crypto/sha256-js': 5.2.0 + '@aws-crypto/supports-web-crypto': 5.2.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-locate-window': 3.965.5 + '@smithy/util-utf8': 2.3.0 + tslib: 2.8.1 + + '@aws-crypto/sha256-js@5.2.0': + dependencies: + '@aws-crypto/util': 5.2.0 + '@aws-sdk/types': 3.973.8 + tslib: 2.8.1 + + '@aws-crypto/supports-web-crypto@5.2.0': + dependencies: + tslib: 2.8.1 + + '@aws-crypto/util@5.2.0': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/util-utf8': 2.3.0 + tslib: 2.8.1 + + '@aws-sdk/client-cloudfront@3.1045.0': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/core': 3.974.8 + '@aws-sdk/credential-provider-node': 3.972.39 + '@aws-sdk/middleware-host-header': 3.972.10 + '@aws-sdk/middleware-logger': 3.972.10 + '@aws-sdk/middleware-recursion-detection': 3.972.11 + '@aws-sdk/middleware-user-agent': 3.972.38 + '@aws-sdk/region-config-resolver': 3.972.13 + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-endpoints': 3.996.8 + '@aws-sdk/util-user-agent-browser': 3.972.10 + '@aws-sdk/util-user-agent-node': 3.973.24 + '@smithy/config-resolver': 4.4.17 + '@smithy/core': 3.23.17 + '@smithy/fetch-http-handler': 5.3.17 + '@smithy/hash-node': 4.2.14 + '@smithy/invalid-dependency': 4.2.14 + '@smithy/middleware-content-length': 4.2.14 + '@smithy/middleware-endpoint': 4.4.32 + '@smithy/middleware-retry': 4.5.7 + '@smithy/middleware-serde': 4.2.20 + '@smithy/middleware-stack': 4.2.14 + '@smithy/node-config-provider': 4.3.14 + '@smithy/node-http-handler': 4.6.1 + '@smithy/protocol-http': 5.3.14 + '@smithy/smithy-client': 4.12.13 + '@smithy/types': 4.14.1 + '@smithy/url-parser': 4.2.14 + '@smithy/util-base64': 4.3.2 + '@smithy/util-body-length-browser': 4.2.2 + '@smithy/util-body-length-node': 4.2.3 + '@smithy/util-defaults-mode-browser': 4.3.49 + '@smithy/util-defaults-mode-node': 4.2.54 + '@smithy/util-endpoints': 3.4.2 + '@smithy/util-middleware': 4.2.14 + '@smithy/util-retry': 4.3.8 + '@smithy/util-stream': 4.5.25 + '@smithy/util-utf8': 4.2.2 + '@smithy/util-waiter': 4.3.0 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/client-s3@3.1045.0': + dependencies: + '@aws-crypto/sha1-browser': 5.2.0 + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/core': 3.974.8 + '@aws-sdk/credential-provider-node': 3.972.39 + '@aws-sdk/middleware-bucket-endpoint': 3.972.10 + '@aws-sdk/middleware-expect-continue': 3.972.10 + '@aws-sdk/middleware-flexible-checksums': 3.974.16 + '@aws-sdk/middleware-host-header': 3.972.10 + '@aws-sdk/middleware-location-constraint': 3.972.10 + '@aws-sdk/middleware-logger': 3.972.10 + '@aws-sdk/middleware-recursion-detection': 3.972.11 + '@aws-sdk/middleware-sdk-s3': 3.972.37 + '@aws-sdk/middleware-ssec': 3.972.10 + '@aws-sdk/middleware-user-agent': 3.972.38 + '@aws-sdk/region-config-resolver': 3.972.13 + '@aws-sdk/signature-v4-multi-region': 3.996.25 + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-endpoints': 3.996.8 + '@aws-sdk/util-user-agent-browser': 3.972.10 + '@aws-sdk/util-user-agent-node': 3.973.24 + '@smithy/config-resolver': 4.4.17 + '@smithy/core': 3.23.17 + '@smithy/eventstream-serde-browser': 4.2.14 + '@smithy/eventstream-serde-config-resolver': 4.3.14 + '@smithy/eventstream-serde-node': 4.2.14 + '@smithy/fetch-http-handler': 5.3.17 + '@smithy/hash-blob-browser': 4.2.15 + '@smithy/hash-node': 4.2.14 + '@smithy/hash-stream-node': 4.2.14 + '@smithy/invalid-dependency': 4.2.14 + '@smithy/md5-js': 4.2.14 + '@smithy/middleware-content-length': 4.2.14 + '@smithy/middleware-endpoint': 4.4.32 + '@smithy/middleware-retry': 4.5.7 + '@smithy/middleware-serde': 4.2.20 + '@smithy/middleware-stack': 4.2.14 + '@smithy/node-config-provider': 4.3.14 + '@smithy/node-http-handler': 4.6.1 + '@smithy/protocol-http': 5.3.14 + '@smithy/smithy-client': 4.12.13 + '@smithy/types': 4.14.1 + '@smithy/url-parser': 4.2.14 + '@smithy/util-base64': 4.3.2 + '@smithy/util-body-length-browser': 4.2.2 + '@smithy/util-body-length-node': 4.2.3 + '@smithy/util-defaults-mode-browser': 4.3.49 + '@smithy/util-defaults-mode-node': 4.2.54 + '@smithy/util-endpoints': 3.4.2 + '@smithy/util-middleware': 4.2.14 + '@smithy/util-retry': 4.3.8 + '@smithy/util-stream': 4.5.25 + '@smithy/util-utf8': 4.2.2 + '@smithy/util-waiter': 4.3.0 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/core@3.974.8': + dependencies: + '@aws-sdk/types': 3.973.8 + '@aws-sdk/xml-builder': 3.972.22 + '@smithy/core': 3.23.17 + '@smithy/node-config-provider': 4.3.14 + '@smithy/property-provider': 4.2.14 + '@smithy/protocol-http': 5.3.14 + '@smithy/signature-v4': 5.3.14 + '@smithy/smithy-client': 4.12.13 + '@smithy/types': 4.14.1 + '@smithy/util-base64': 4.3.2 + '@smithy/util-middleware': 4.2.14 + '@smithy/util-retry': 4.3.8 + '@smithy/util-utf8': 4.2.2 + tslib: 2.8.1 + + '@aws-sdk/crc64-nvme@3.972.7': + dependencies: + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/credential-provider-env@3.972.34': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/types': 3.973.8 + '@smithy/property-provider': 4.2.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/credential-provider-http@3.972.36': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/types': 3.973.8 + '@smithy/fetch-http-handler': 5.3.17 + '@smithy/node-http-handler': 4.6.1 + '@smithy/property-provider': 4.2.14 + '@smithy/protocol-http': 5.3.14 + '@smithy/smithy-client': 4.12.13 + '@smithy/types': 4.14.1 + '@smithy/util-stream': 4.5.25 + tslib: 2.8.1 + + '@aws-sdk/credential-provider-ini@3.972.38': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/credential-provider-env': 3.972.34 + '@aws-sdk/credential-provider-http': 3.972.36 + '@aws-sdk/credential-provider-login': 3.972.38 + '@aws-sdk/credential-provider-process': 3.972.34 + '@aws-sdk/credential-provider-sso': 3.972.38 + '@aws-sdk/credential-provider-web-identity': 3.972.38 + '@aws-sdk/nested-clients': 3.997.6 + '@aws-sdk/types': 3.973.8 + '@smithy/credential-provider-imds': 4.2.14 + '@smithy/property-provider': 4.2.14 + '@smithy/shared-ini-file-loader': 4.4.9 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-login@3.972.38': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/nested-clients': 3.997.6 + '@aws-sdk/types': 3.973.8 + '@smithy/property-provider': 4.2.14 + '@smithy/protocol-http': 5.3.14 + '@smithy/shared-ini-file-loader': 4.4.9 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-node@3.972.39': + dependencies: + '@aws-sdk/credential-provider-env': 3.972.34 + '@aws-sdk/credential-provider-http': 3.972.36 + '@aws-sdk/credential-provider-ini': 3.972.38 + '@aws-sdk/credential-provider-process': 3.972.34 + '@aws-sdk/credential-provider-sso': 3.972.38 + '@aws-sdk/credential-provider-web-identity': 3.972.38 + '@aws-sdk/types': 3.973.8 + '@smithy/credential-provider-imds': 4.2.14 + '@smithy/property-provider': 4.2.14 + '@smithy/shared-ini-file-loader': 4.4.9 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-process@3.972.34': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/types': 3.973.8 + '@smithy/property-provider': 4.2.14 + '@smithy/shared-ini-file-loader': 4.4.9 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/credential-provider-sso@3.972.38': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/nested-clients': 3.997.6 + '@aws-sdk/token-providers': 3.1041.0 + '@aws-sdk/types': 3.973.8 + '@smithy/property-provider': 4.2.14 + '@smithy/shared-ini-file-loader': 4.4.9 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/credential-provider-web-identity@3.972.38': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/nested-clients': 3.997.6 + '@aws-sdk/types': 3.973.8 + '@smithy/property-provider': 4.2.14 + '@smithy/shared-ini-file-loader': 4.4.9 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/middleware-bucket-endpoint@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-arn-parser': 3.972.3 + '@smithy/node-config-provider': 4.3.14 + '@smithy/protocol-http': 5.3.14 + '@smithy/types': 4.14.1 + '@smithy/util-config-provider': 4.2.2 + tslib: 2.8.1 + + '@aws-sdk/middleware-expect-continue@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/protocol-http': 5.3.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-flexible-checksums@3.974.16': + dependencies: + '@aws-crypto/crc32': 5.2.0 + '@aws-crypto/crc32c': 5.2.0 + '@aws-crypto/util': 5.2.0 + '@aws-sdk/core': 3.974.8 + '@aws-sdk/crc64-nvme': 3.972.7 + '@aws-sdk/types': 3.973.8 + '@smithy/is-array-buffer': 4.2.2 + '@smithy/node-config-provider': 4.3.14 + '@smithy/protocol-http': 5.3.14 + '@smithy/types': 4.14.1 + '@smithy/util-middleware': 4.2.14 + '@smithy/util-stream': 4.5.25 + '@smithy/util-utf8': 4.2.2 + tslib: 2.8.1 + + '@aws-sdk/middleware-host-header@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/protocol-http': 5.3.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-location-constraint@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-logger@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-recursion-detection@3.972.11': + dependencies: + '@aws-sdk/types': 3.973.8 + '@aws/lambda-invoke-store': 0.2.4 + '@smithy/protocol-http': 5.3.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-sdk-s3@3.972.37': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-arn-parser': 3.972.3 + '@smithy/core': 3.23.17 + '@smithy/node-config-provider': 4.3.14 + '@smithy/protocol-http': 5.3.14 + '@smithy/signature-v4': 5.3.14 + '@smithy/smithy-client': 4.12.13 + '@smithy/types': 4.14.1 + '@smithy/util-config-provider': 4.2.2 + '@smithy/util-middleware': 4.2.14 + '@smithy/util-stream': 4.5.25 + '@smithy/util-utf8': 4.2.2 + tslib: 2.8.1 + + '@aws-sdk/middleware-ssec@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/middleware-user-agent@3.972.38': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-endpoints': 3.996.8 + '@smithy/core': 3.23.17 + '@smithy/protocol-http': 5.3.14 + '@smithy/types': 4.14.1 + '@smithy/util-retry': 4.3.8 + tslib: 2.8.1 + + '@aws-sdk/nested-clients@3.997.6': + dependencies: + '@aws-crypto/sha256-browser': 5.2.0 + '@aws-crypto/sha256-js': 5.2.0 + '@aws-sdk/core': 3.974.8 + '@aws-sdk/middleware-host-header': 3.972.10 + '@aws-sdk/middleware-logger': 3.972.10 + '@aws-sdk/middleware-recursion-detection': 3.972.11 + '@aws-sdk/middleware-user-agent': 3.972.38 + '@aws-sdk/region-config-resolver': 3.972.13 + '@aws-sdk/signature-v4-multi-region': 3.996.25 + '@aws-sdk/types': 3.973.8 + '@aws-sdk/util-endpoints': 3.996.8 + '@aws-sdk/util-user-agent-browser': 3.972.10 + '@aws-sdk/util-user-agent-node': 3.973.24 + '@smithy/config-resolver': 4.4.17 + '@smithy/core': 3.23.17 + '@smithy/fetch-http-handler': 5.3.17 + '@smithy/hash-node': 4.2.14 + '@smithy/invalid-dependency': 4.2.14 + '@smithy/middleware-content-length': 4.2.14 + '@smithy/middleware-endpoint': 4.4.32 + '@smithy/middleware-retry': 4.5.7 + '@smithy/middleware-serde': 4.2.20 + '@smithy/middleware-stack': 4.2.14 + '@smithy/node-config-provider': 4.3.14 + '@smithy/node-http-handler': 4.6.1 + '@smithy/protocol-http': 5.3.14 + '@smithy/smithy-client': 4.12.13 + '@smithy/types': 4.14.1 + '@smithy/url-parser': 4.2.14 + '@smithy/util-base64': 4.3.2 + '@smithy/util-body-length-browser': 4.2.2 + '@smithy/util-body-length-node': 4.2.3 + '@smithy/util-defaults-mode-browser': 4.3.49 + '@smithy/util-defaults-mode-node': 4.2.54 + '@smithy/util-endpoints': 3.4.2 + '@smithy/util-middleware': 4.2.14 + '@smithy/util-retry': 4.3.8 + '@smithy/util-utf8': 4.2.2 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/region-config-resolver@3.972.13': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/config-resolver': 4.4.17 + '@smithy/node-config-provider': 4.3.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/signature-v4-multi-region@3.996.25': + dependencies: + '@aws-sdk/middleware-sdk-s3': 3.972.37 + '@aws-sdk/types': 3.973.8 + '@smithy/protocol-http': 5.3.14 + '@smithy/signature-v4': 5.3.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/token-providers@3.1041.0': + dependencies: + '@aws-sdk/core': 3.974.8 + '@aws-sdk/nested-clients': 3.997.6 + '@aws-sdk/types': 3.973.8 + '@smithy/property-provider': 4.2.14 + '@smithy/shared-ini-file-loader': 4.4.9 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + transitivePeerDependencies: + - aws-crt + + '@aws-sdk/types@3.973.8': + dependencies: + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@aws-sdk/util-arn-parser@3.972.3': + dependencies: + tslib: 2.8.1 + + '@aws-sdk/util-endpoints@3.996.8': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/types': 4.14.1 + '@smithy/url-parser': 4.2.14 + '@smithy/util-endpoints': 3.4.2 + tslib: 2.8.1 + + '@aws-sdk/util-locate-window@3.965.5': + dependencies: + tslib: 2.8.1 + + '@aws-sdk/util-user-agent-browser@3.972.10': + dependencies: + '@aws-sdk/types': 3.973.8 + '@smithy/types': 4.14.1 + bowser: 2.14.1 + tslib: 2.8.1 + + '@aws-sdk/util-user-agent-node@3.973.24': + dependencies: + '@aws-sdk/middleware-user-agent': 3.972.38 + '@aws-sdk/types': 3.973.8 + '@smithy/node-config-provider': 4.3.14 + '@smithy/types': 4.14.1 + '@smithy/util-config-provider': 4.2.2 + tslib: 2.8.1 + + '@aws-sdk/xml-builder@3.972.22': + dependencies: + '@nodable/entities': 2.1.0 + '@smithy/types': 4.14.1 + fast-xml-parser: 5.7.2 + tslib: 2.8.1 + + '@aws/lambda-invoke-store@0.2.4': {} + '@babel/code-frame@7.29.0': dependencies: '@babel/helper-validator-identifier': 7.28.5 @@ -9379,6 +11058,172 @@ snapshots: '@img/sharp-win32-x64@0.34.5': optional: true + '@inquirer/ansi@1.0.2': {} + + '@inquirer/checkbox@4.3.2(@types/node@25.6.0)': + dependencies: + '@inquirer/ansi': 1.0.2 + '@inquirer/core': 10.3.2(@types/node@25.6.0) + '@inquirer/figures': 1.0.15 + '@inquirer/type': 3.0.10(@types/node@25.6.0) + yoctocolors-cjs: 2.1.3 + optionalDependencies: + '@types/node': 25.6.0 + + '@inquirer/confirm@3.2.0': + dependencies: + '@inquirer/core': 9.2.1 + '@inquirer/type': 1.5.5 + + '@inquirer/confirm@5.1.21(@types/node@25.6.0)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@25.6.0) + '@inquirer/type': 3.0.10(@types/node@25.6.0) + optionalDependencies: + '@types/node': 25.6.0 + + '@inquirer/core@10.3.2(@types/node@25.6.0)': + dependencies: + '@inquirer/ansi': 1.0.2 + '@inquirer/figures': 1.0.15 + '@inquirer/type': 3.0.10(@types/node@25.6.0) + cli-width: 4.1.0 + mute-stream: 2.0.0 + signal-exit: 4.1.0 + wrap-ansi: 6.2.0 + yoctocolors-cjs: 2.1.3 + optionalDependencies: + '@types/node': 25.6.0 + + '@inquirer/core@9.2.1': + dependencies: + '@inquirer/figures': 1.0.15 + '@inquirer/type': 2.0.0 + '@types/mute-stream': 0.0.4 + '@types/node': 22.19.18 + '@types/wrap-ansi': 3.0.0 + ansi-escapes: 4.3.2 + cli-width: 4.1.0 + mute-stream: 1.0.0 + signal-exit: 4.1.0 + strip-ansi: 6.0.1 + wrap-ansi: 6.2.0 + yoctocolors-cjs: 2.1.3 + + '@inquirer/editor@4.2.23(@types/node@25.6.0)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@25.6.0) + '@inquirer/external-editor': 1.0.3(@types/node@25.6.0) + '@inquirer/type': 3.0.10(@types/node@25.6.0) + optionalDependencies: + '@types/node': 25.6.0 + + '@inquirer/expand@4.0.23(@types/node@25.6.0)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@25.6.0) + '@inquirer/type': 3.0.10(@types/node@25.6.0) + yoctocolors-cjs: 2.1.3 + optionalDependencies: + '@types/node': 25.6.0 + + '@inquirer/external-editor@1.0.3(@types/node@25.6.0)': + dependencies: + chardet: 2.1.1 + iconv-lite: 0.7.2 + optionalDependencies: + '@types/node': 25.6.0 + + '@inquirer/figures@1.0.15': {} + + '@inquirer/input@2.3.0': + dependencies: + '@inquirer/core': 9.2.1 + '@inquirer/type': 1.5.5 + + '@inquirer/input@4.3.1(@types/node@25.6.0)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@25.6.0) + '@inquirer/type': 3.0.10(@types/node@25.6.0) + optionalDependencies: + '@types/node': 25.6.0 + + '@inquirer/number@3.0.23(@types/node@25.6.0)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@25.6.0) + '@inquirer/type': 3.0.10(@types/node@25.6.0) + optionalDependencies: + '@types/node': 25.6.0 + + '@inquirer/password@4.0.23(@types/node@25.6.0)': + dependencies: + '@inquirer/ansi': 1.0.2 + '@inquirer/core': 10.3.2(@types/node@25.6.0) + '@inquirer/type': 3.0.10(@types/node@25.6.0) + optionalDependencies: + '@types/node': 25.6.0 + + '@inquirer/prompts@7.10.1(@types/node@25.6.0)': + dependencies: + '@inquirer/checkbox': 4.3.2(@types/node@25.6.0) + '@inquirer/confirm': 5.1.21(@types/node@25.6.0) + '@inquirer/editor': 4.2.23(@types/node@25.6.0) + '@inquirer/expand': 4.0.23(@types/node@25.6.0) + '@inquirer/input': 4.3.1(@types/node@25.6.0) + '@inquirer/number': 3.0.23(@types/node@25.6.0) + '@inquirer/password': 4.0.23(@types/node@25.6.0) + '@inquirer/rawlist': 4.1.11(@types/node@25.6.0) + '@inquirer/search': 3.2.2(@types/node@25.6.0) + '@inquirer/select': 4.4.2(@types/node@25.6.0) + optionalDependencies: + '@types/node': 25.6.0 + + '@inquirer/rawlist@4.1.11(@types/node@25.6.0)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@25.6.0) + '@inquirer/type': 3.0.10(@types/node@25.6.0) + yoctocolors-cjs: 2.1.3 + optionalDependencies: + '@types/node': 25.6.0 + + '@inquirer/search@3.2.2(@types/node@25.6.0)': + dependencies: + '@inquirer/core': 10.3.2(@types/node@25.6.0) + '@inquirer/figures': 1.0.15 + '@inquirer/type': 3.0.10(@types/node@25.6.0) + yoctocolors-cjs: 2.1.3 + optionalDependencies: + '@types/node': 25.6.0 + + '@inquirer/select@2.5.0': + dependencies: + '@inquirer/core': 9.2.1 + '@inquirer/figures': 1.0.15 + '@inquirer/type': 1.5.5 + ansi-escapes: 4.3.2 + yoctocolors-cjs: 2.1.3 + + '@inquirer/select@4.4.2(@types/node@25.6.0)': + dependencies: + '@inquirer/ansi': 1.0.2 + '@inquirer/core': 10.3.2(@types/node@25.6.0) + '@inquirer/figures': 1.0.15 + '@inquirer/type': 3.0.10(@types/node@25.6.0) + yoctocolors-cjs: 2.1.3 + optionalDependencies: + '@types/node': 25.6.0 + + '@inquirer/type@1.5.5': + dependencies: + mute-stream: 1.0.0 + + '@inquirer/type@2.0.0': + dependencies: + mute-stream: 1.0.0 + + '@inquirer/type@3.0.10(@types/node@25.6.0)': + optionalDependencies: + '@types/node': 25.6.0 + '@isaacs/fs-minipass@4.0.1': dependencies: minipass: 7.1.3 @@ -9632,6 +11477,57 @@ snapshots: react: 19.2.5 react-dom: 19.2.5(react@19.2.5) + '@napi-rs/keyring-darwin-arm64@1.1.6': + optional: true + + '@napi-rs/keyring-darwin-x64@1.1.6': + optional: true + + '@napi-rs/keyring-freebsd-x64@1.1.6': + optional: true + + '@napi-rs/keyring-linux-arm-gnueabihf@1.1.6': + optional: true + + '@napi-rs/keyring-linux-arm64-gnu@1.1.6': + optional: true + + '@napi-rs/keyring-linux-arm64-musl@1.1.6': + optional: true + + '@napi-rs/keyring-linux-riscv64-gnu@1.1.6': + optional: true + + '@napi-rs/keyring-linux-x64-gnu@1.1.6': + optional: true + + '@napi-rs/keyring-linux-x64-musl@1.1.6': + optional: true + + '@napi-rs/keyring-win32-arm64-msvc@1.1.6': + optional: true + + '@napi-rs/keyring-win32-ia32-msvc@1.1.6': + optional: true + + '@napi-rs/keyring-win32-x64-msvc@1.1.6': + optional: true + + '@napi-rs/keyring@1.1.6': + optionalDependencies: + '@napi-rs/keyring-darwin-arm64': 1.1.6 + '@napi-rs/keyring-darwin-x64': 1.1.6 + '@napi-rs/keyring-freebsd-x64': 1.1.6 + '@napi-rs/keyring-linux-arm-gnueabihf': 1.1.6 + '@napi-rs/keyring-linux-arm64-gnu': 1.1.6 + '@napi-rs/keyring-linux-arm64-musl': 1.1.6 + '@napi-rs/keyring-linux-riscv64-gnu': 1.1.6 + '@napi-rs/keyring-linux-x64-gnu': 1.1.6 + '@napi-rs/keyring-linux-x64-musl': 1.1.6 + '@napi-rs/keyring-win32-arm64-msvc': 1.1.6 + '@napi-rs/keyring-win32-ia32-msvc': 1.1.6 + '@napi-rs/keyring-win32-x64-msvc': 1.1.6 + '@napi-rs/wasm-runtime@1.1.2(@emnapi/runtime@1.9.1)': dependencies: '@emnapi/runtime': 1.9.1 @@ -9679,6 +11575,8 @@ snapshots: '@next/swc-win32-x64-msvc@16.2.3': optional: true + '@nodable/entities@2.1.0': {} + '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 @@ -9697,6 +11595,75 @@ snapshots: '@nolyfill/side-channel@1.0.44': {} + '@oclif/core@4.11.1': + dependencies: + ansi-escapes: 4.3.2 + ansis: 3.17.0 + clean-stack: 3.0.1 + cli-spinners: 2.9.2 + debug: 4.4.3(supports-color@8.1.1) + ejs: 3.1.10 + get-package-type: 0.1.0 + indent-string: 4.0.0 + is-wsl: 2.2.0 + lilconfig: 3.1.3 + minimatch: 10.2.5 + semver: 7.7.4 + string-width: 4.2.3 + supports-color: 8.1.1 + tinyglobby: 0.2.16 + widest-line: 3.1.0 + wordwrap: 1.0.0 + wrap-ansi: 7.0.0 + + '@oclif/plugin-autocomplete@3.2.6': + dependencies: + '@oclif/core': 4.11.1 + ansis: 3.17.0 + debug: 4.4.3(supports-color@8.1.1) + ejs: 3.1.10 + transitivePeerDependencies: + - supports-color + + '@oclif/plugin-help@6.2.10': + dependencies: + '@oclif/core': 4.11.1 + + '@oclif/plugin-help@6.2.46': + dependencies: + '@oclif/core': 4.11.1 + + '@oclif/plugin-not-found@3.2.18': + dependencies: + '@inquirer/confirm': 3.2.0 + '@oclif/core': 4.11.1 + ansis: 3.17.0 + fast-levenshtein: 3.0.0 + + '@oclif/plugin-not-found@3.2.82(@types/node@25.6.0)': + dependencies: + '@inquirer/prompts': 7.10.1(@types/node@25.6.0) + '@oclif/core': 4.11.1 + ansis: 3.17.0 + fast-levenshtein: 3.0.0 + transitivePeerDependencies: + - '@types/node' + + '@oclif/plugin-version@2.2.16': + dependencies: + '@oclif/core': 4.11.1 + ansis: 3.17.0 + + '@oclif/plugin-warn-if-update-available@3.1.13': + dependencies: + '@oclif/core': 4.11.1 + ansis: 3.17.0 + debug: 4.4.3(supports-color@8.1.1) + http-call: 5.3.0 + lodash: 4.18.0 + transitivePeerDependencies: + - supports-color + '@orpc/client@1.13.14': dependencies: '@orpc/shared': 1.13.14 @@ -10427,6 +12394,341 @@ snapshots: '@sindresorhus/base62@1.0.0': {} + '@sindresorhus/is@5.6.0': {} + + '@smithy/chunked-blob-reader-native@4.2.3': + dependencies: + '@smithy/util-base64': 4.3.2 + tslib: 2.8.1 + + '@smithy/chunked-blob-reader@5.2.2': + dependencies: + tslib: 2.8.1 + + '@smithy/config-resolver@4.4.17': + dependencies: + '@smithy/node-config-provider': 4.3.14 + '@smithy/types': 4.14.1 + '@smithy/util-config-provider': 4.2.2 + '@smithy/util-endpoints': 3.4.2 + '@smithy/util-middleware': 4.2.14 + tslib: 2.8.1 + + '@smithy/core@3.23.17': + dependencies: + '@smithy/protocol-http': 5.3.14 + '@smithy/types': 4.14.1 + '@smithy/url-parser': 4.2.14 + '@smithy/util-base64': 4.3.2 + '@smithy/util-body-length-browser': 4.2.2 + '@smithy/util-middleware': 4.2.14 + '@smithy/util-stream': 4.5.25 + '@smithy/util-utf8': 4.2.2 + '@smithy/uuid': 1.1.2 + tslib: 2.8.1 + + '@smithy/credential-provider-imds@4.2.14': + dependencies: + '@smithy/node-config-provider': 4.3.14 + '@smithy/property-provider': 4.2.14 + '@smithy/types': 4.14.1 + '@smithy/url-parser': 4.2.14 + tslib: 2.8.1 + + '@smithy/eventstream-codec@4.2.14': + dependencies: + '@aws-crypto/crc32': 5.2.0 + '@smithy/types': 4.14.1 + '@smithy/util-hex-encoding': 4.2.2 + tslib: 2.8.1 + + '@smithy/eventstream-serde-browser@4.2.14': + dependencies: + '@smithy/eventstream-serde-universal': 4.2.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/eventstream-serde-config-resolver@4.3.14': + dependencies: + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/eventstream-serde-node@4.2.14': + dependencies: + '@smithy/eventstream-serde-universal': 4.2.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/eventstream-serde-universal@4.2.14': + dependencies: + '@smithy/eventstream-codec': 4.2.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/fetch-http-handler@5.3.17': + dependencies: + '@smithy/protocol-http': 5.3.14 + '@smithy/querystring-builder': 4.2.14 + '@smithy/types': 4.14.1 + '@smithy/util-base64': 4.3.2 + tslib: 2.8.1 + + '@smithy/hash-blob-browser@4.2.15': + dependencies: + '@smithy/chunked-blob-reader': 5.2.2 + '@smithy/chunked-blob-reader-native': 4.2.3 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/hash-node@4.2.14': + dependencies: + '@smithy/types': 4.14.1 + '@smithy/util-buffer-from': 4.2.2 + '@smithy/util-utf8': 4.2.2 + tslib: 2.8.1 + + '@smithy/hash-stream-node@4.2.14': + dependencies: + '@smithy/types': 4.14.1 + '@smithy/util-utf8': 4.2.2 + tslib: 2.8.1 + + '@smithy/invalid-dependency@4.2.14': + dependencies: + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/is-array-buffer@2.2.0': + dependencies: + tslib: 2.8.1 + + '@smithy/is-array-buffer@4.2.2': + dependencies: + tslib: 2.8.1 + + '@smithy/md5-js@4.2.14': + dependencies: + '@smithy/types': 4.14.1 + '@smithy/util-utf8': 4.2.2 + tslib: 2.8.1 + + '@smithy/middleware-content-length@4.2.14': + dependencies: + '@smithy/protocol-http': 5.3.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/middleware-endpoint@4.4.32': + dependencies: + '@smithy/core': 3.23.17 + '@smithy/middleware-serde': 4.2.20 + '@smithy/node-config-provider': 4.3.14 + '@smithy/shared-ini-file-loader': 4.4.9 + '@smithy/types': 4.14.1 + '@smithy/url-parser': 4.2.14 + '@smithy/util-middleware': 4.2.14 + tslib: 2.8.1 + + '@smithy/middleware-retry@4.5.7': + dependencies: + '@smithy/core': 3.23.17 + '@smithy/node-config-provider': 4.3.14 + '@smithy/protocol-http': 5.3.14 + '@smithy/service-error-classification': 4.3.1 + '@smithy/smithy-client': 4.12.13 + '@smithy/types': 4.14.1 + '@smithy/util-middleware': 4.2.14 + '@smithy/util-retry': 4.3.8 + '@smithy/uuid': 1.1.2 + tslib: 2.8.1 + + '@smithy/middleware-serde@4.2.20': + dependencies: + '@smithy/core': 3.23.17 + '@smithy/protocol-http': 5.3.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/middleware-stack@4.2.14': + dependencies: + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/node-config-provider@4.3.14': + dependencies: + '@smithy/property-provider': 4.2.14 + '@smithy/shared-ini-file-loader': 4.4.9 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/node-http-handler@4.6.1': + dependencies: + '@smithy/protocol-http': 5.3.14 + '@smithy/querystring-builder': 4.2.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/property-provider@4.2.14': + dependencies: + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/protocol-http@5.3.14': + dependencies: + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/querystring-builder@4.2.14': + dependencies: + '@smithy/types': 4.14.1 + '@smithy/util-uri-escape': 4.2.2 + tslib: 2.8.1 + + '@smithy/querystring-parser@4.2.14': + dependencies: + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/service-error-classification@4.3.1': + dependencies: + '@smithy/types': 4.14.1 + + '@smithy/shared-ini-file-loader@4.4.9': + dependencies: + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/signature-v4@5.3.14': + dependencies: + '@smithy/is-array-buffer': 4.2.2 + '@smithy/protocol-http': 5.3.14 + '@smithy/types': 4.14.1 + '@smithy/util-hex-encoding': 4.2.2 + '@smithy/util-middleware': 4.2.14 + '@smithy/util-uri-escape': 4.2.2 + '@smithy/util-utf8': 4.2.2 + tslib: 2.8.1 + + '@smithy/smithy-client@4.12.13': + dependencies: + '@smithy/core': 3.23.17 + '@smithy/middleware-endpoint': 4.4.32 + '@smithy/middleware-stack': 4.2.14 + '@smithy/protocol-http': 5.3.14 + '@smithy/types': 4.14.1 + '@smithy/util-stream': 4.5.25 + tslib: 2.8.1 + + '@smithy/types@4.14.1': + dependencies: + tslib: 2.8.1 + + '@smithy/url-parser@4.2.14': + dependencies: + '@smithy/querystring-parser': 4.2.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/util-base64@4.3.2': + dependencies: + '@smithy/util-buffer-from': 4.2.2 + '@smithy/util-utf8': 4.2.2 + tslib: 2.8.1 + + '@smithy/util-body-length-browser@4.2.2': + dependencies: + tslib: 2.8.1 + + '@smithy/util-body-length-node@4.2.3': + dependencies: + tslib: 2.8.1 + + '@smithy/util-buffer-from@2.2.0': + dependencies: + '@smithy/is-array-buffer': 2.2.0 + tslib: 2.8.1 + + '@smithy/util-buffer-from@4.2.2': + dependencies: + '@smithy/is-array-buffer': 4.2.2 + tslib: 2.8.1 + + '@smithy/util-config-provider@4.2.2': + dependencies: + tslib: 2.8.1 + + '@smithy/util-defaults-mode-browser@4.3.49': + dependencies: + '@smithy/property-provider': 4.2.14 + '@smithy/smithy-client': 4.12.13 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/util-defaults-mode-node@4.2.54': + dependencies: + '@smithy/config-resolver': 4.4.17 + '@smithy/credential-provider-imds': 4.2.14 + '@smithy/node-config-provider': 4.3.14 + '@smithy/property-provider': 4.2.14 + '@smithy/smithy-client': 4.12.13 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/util-endpoints@3.4.2': + dependencies: + '@smithy/node-config-provider': 4.3.14 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/util-hex-encoding@4.2.2': + dependencies: + tslib: 2.8.1 + + '@smithy/util-middleware@4.2.14': + dependencies: + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/util-retry@4.3.8': + dependencies: + '@smithy/service-error-classification': 4.3.1 + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/util-stream@4.5.25': + dependencies: + '@smithy/fetch-http-handler': 5.3.17 + '@smithy/node-http-handler': 4.6.1 + '@smithy/types': 4.14.1 + '@smithy/util-base64': 4.3.2 + '@smithy/util-buffer-from': 4.2.2 + '@smithy/util-hex-encoding': 4.2.2 + '@smithy/util-utf8': 4.2.2 + tslib: 2.8.1 + + '@smithy/util-uri-escape@4.2.2': + dependencies: + tslib: 2.8.1 + + '@smithy/util-utf8@2.3.0': + dependencies: + '@smithy/util-buffer-from': 2.2.0 + tslib: 2.8.1 + + '@smithy/util-utf8@4.2.2': + dependencies: + '@smithy/util-buffer-from': 4.2.2 + tslib: 2.8.1 + + '@smithy/util-waiter@4.3.0': + dependencies: + '@smithy/types': 4.14.1 + tslib: 2.8.1 + + '@smithy/uuid@1.1.2': + dependencies: + tslib: 2.8.1 + '@socket.io/component-emitter@3.1.2': {} '@solid-primitives/event-listener@2.4.5(solid-js@1.9.11)': @@ -10619,6 +12921,10 @@ snapshots: dependencies: tslib: 2.8.1 + '@szmarczak/http-timer@5.0.1': + dependencies: + defer-to-connect: 2.0.1 + '@t3-oss/env-core@0.13.11(typescript@6.0.2)(valibot@1.3.1(typescript@6.0.2))(zod@4.3.6)': optionalDependencies: typescript: 6.0.2 @@ -11106,6 +13412,8 @@ snapshots: dependencies: '@types/unist': 3.0.3 + '@types/http-cache-semantics@4.2.0': {} + '@types/js-cookie@3.0.6': {} '@types/js-yaml@4.0.9': {} @@ -11122,8 +13430,16 @@ snapshots: '@types/ms@2.1.0': {} + '@types/mute-stream@0.0.4': + dependencies: + '@types/node': 25.6.0 + '@types/negotiator@0.6.4': {} + '@types/node@22.19.18': + dependencies: + undici-types: 6.21.0 + '@types/node@25.6.0': dependencies: undici-types: 7.19.2 @@ -11157,6 +13473,8 @@ snapshots: '@types/whatwg-mimetype@3.0.2': {} + '@types/wrap-ansi@3.0.0': {} + '@types/ws@8.18.1': dependencies: '@types/node': 25.6.0 @@ -11373,7 +13691,7 @@ snapshots: optionalDependencies: react-server-dom-webpack: 19.2.5(react-dom@19.2.5(react@19.2.5))(react@19.2.5) - '@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)': + '@vitest/coverage-v8@4.1.4(@voidzero-dev/vite-plus-test@0.1.18)': dependencies: '@bcoe/v8-coverage': 1.0.2 '@vitest/utils': 4.1.4 @@ -11385,42 +13703,14 @@ snapshots: obug: 2.1.1 std-env: 4.0.0 tinyrainbow: 3.1.0 - vitest: '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' - transitivePeerDependencies: - - '@arethetypeswrong/core' - - '@edge-runtime/vm' - - '@opentelemetry/api' - - '@tsdown/css' - - '@tsdown/exe' - - '@types/node' - - '@vitejs/devtools' - - '@vitest/coverage-istanbul' - - '@vitest/ui' - - bufferutil - - esbuild - - happy-dom - - jiti - - jsdom - - less - - publint - - sass - - sass-embedded - - stylus - - sugarss - - terser - - tsx - - typescript - - unplugin-unused - - utf-8-validate - - vite - - yaml + vitest: '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' - '@vitest/eslint-plugin@1.6.15(@types/node@25.6.0)(@typescript-eslint/eslint-plugin@8.58.2(@typescript-eslint/parser@8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(eslint@10.2.0(jiti@2.6.1))(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)': + '@vitest/eslint-plugin@1.6.15(@types/node@25.6.0)(@typescript-eslint/eslint-plugin@8.58.2(@typescript-eslint/parser@8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(eslint@10.2.0(jiti@2.6.1))(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)': dependencies: '@typescript-eslint/scope-manager': 8.58.2 '@typescript-eslint/utils': 8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2) eslint: 10.2.0(jiti@2.6.1) - vitest: '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' + vitest: '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' optionalDependencies: '@typescript-eslint/eslint-plugin': 8.58.2(@typescript-eslint/parser@8.58.2(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2))(eslint@10.2.0(jiti@2.6.1))(typescript@6.0.2) typescript: 6.0.2 @@ -11519,7 +13809,7 @@ snapshots: '@voidzero-dev/vite-plus-linux-x64-musl@0.1.18': optional: true - '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)': + '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)': dependencies: '@standard-schema/spec': 1.1.0 '@types/chai': 5.2.3 @@ -11537,7 +13827,7 @@ snapshots: ws: 8.20.0 optionalDependencies: '@types/node': 25.6.0 - '@vitest/coverage-v8': 4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + '@vitest/coverage-v8': 4.1.4(@voidzero-dev/vite-plus-test@0.1.18) happy-dom: 20.9.0 transitivePeerDependencies: - '@arethetypeswrong/core' @@ -11638,6 +13928,10 @@ snapshots: json-schema-traverse: 0.4.1 uri-js: 4.4.1 + ansi-escapes@4.3.2: + dependencies: + type-fest: 0.21.3 + ansi-regex@4.1.1: {} ansi-regex@5.0.1: {} @@ -11650,6 +13944,8 @@ snapshots: ansi-styles@5.2.0: {} + ansis@3.17.0: {} + ansis@4.2.0: {} any-promise@1.3.0: {} @@ -11688,6 +13984,10 @@ snapshots: astring@1.9.0: {} + async-retry@1.3.3: + dependencies: + retry: 0.13.1 + async@3.2.6: {} bail@2.0.2: {} @@ -11718,11 +14018,17 @@ snapshots: boolbase@1.0.0: {} + bowser@2.14.1: {} + brace-expansion@1.1.13: dependencies: balanced-match: 1.0.2 concat-map: 0.0.1 + brace-expansion@2.0.3: + dependencies: + balanced-match: 1.0.2 + brace-expansion@5.0.5: dependencies: balanced-match: 4.0.4 @@ -11761,8 +14067,25 @@ snapshots: cac@7.0.0: {} + cacheable-lookup@7.0.0: {} + + cacheable-request@10.2.14: + dependencies: + '@types/http-cache-semantics': 4.2.0 + get-stream: 6.0.1 + http-cache-semantics: 4.2.0 + keyv: 4.5.4 + mimic-response: 4.0.0 + normalize-url: 8.1.1 + responselike: 3.0.0 + callsites@3.1.0: {} + camel-case@4.1.2: + dependencies: + pascal-case: 3.1.2 + tslib: 2.8.1 + camelize@1.0.1: {} caniuse-lite@1.0.30001781: {} @@ -11799,6 +14122,23 @@ snapshots: ansi-styles: 4.3.0 supports-color: 7.2.0 + chalk@5.6.2: {} + + change-case@4.1.2: + dependencies: + camel-case: 4.1.2 + capital-case: 1.0.4 + constant-case: 3.0.4 + dot-case: 3.0.4 + header-case: 2.0.4 + no-case: 3.0.4 + param-case: 3.0.4 + pascal-case: 3.1.2 + path-case: 3.0.4 + sentence-case: 3.0.4 + snake-case: 3.0.4 + tslib: 2.8.1 + change-case@5.4.4: {} character-entities-html4@2.1.0: {} @@ -11809,6 +14149,8 @@ snapshots: character-reference-invalid@2.0.1: {} + chardet@2.1.1: {} + check-error@2.1.3: {} cheerio-select@2.1.0: @@ -11871,12 +14213,24 @@ snapshots: dependencies: escape-string-regexp: 1.0.5 + clean-stack@3.0.1: + dependencies: + escape-string-regexp: 4.0.0 + + cli-cursor@5.0.0: + dependencies: + restore-cursor: 5.1.0 + + cli-spinners@2.9.2: {} + cli-table3@0.6.5: dependencies: - string-width: 8.2.0 + string-width: 4.2.3 optionalDependencies: '@colors/colors': 1.5.0 + cli-width@4.1.0: {} + client-only@0.0.1: {} clsx@2.1.1: {} @@ -11937,6 +14291,14 @@ snapshots: confbox@0.2.4: {} + constant-case@3.0.4: + dependencies: + no-case: 3.0.4 + tslib: 2.8.1 + upper-case: 2.0.2 + + content-type@1.0.5: {} + convert-source-map@2.0.0: {} copy-to-clipboard@3.3.3: @@ -12212,7 +14574,6 @@ snapshots: decompress-response@6.0.0: dependencies: mimic-response: 3.1.0 - optional: true deep-eql@5.0.2: {} @@ -12228,6 +14589,8 @@ snapshots: bundle-name: 4.1.0 default-browser-id: 5.0.1 + defer-to-connect@2.0.1: {} + define-lazy-prop@3.0.0: {} delaunator@5.1.0: @@ -12236,8 +14599,12 @@ snapshots: dequal@2.0.3: {} + detect-indent@7.0.2: {} + detect-libc@2.1.2: {} + detect-newline@4.0.1: {} + detect-node-es@1.1.0: {} devlop@1.1.0: @@ -12282,6 +14649,11 @@ snapshots: domelementtype: 2.3.0 domhandler: 5.0.3 + dot-case@3.0.4: + dependencies: + no-case: 3.0.4 + tslib: 2.8.1 + dotenv@16.6.1: {} echarts-for-react@3.0.6(echarts@6.0.0)(react@19.2.5): @@ -12296,6 +14668,10 @@ snapshots: tslib: 2.3.0 zrender: 6.0.0 + ejs@3.1.10: + dependencies: + jake: 10.9.4 + electron-to-chromium@1.5.328: {} elkjs@0.11.1: {} @@ -12320,6 +14696,8 @@ snapshots: emoji-regex-xs@2.0.1: {} + emoji-regex@8.0.0: {} + empathic@2.0.0: {} encoding-sniffer@0.2.1: @@ -12356,6 +14734,10 @@ snapshots: entities@7.0.1: {} + error-ex@1.3.4: + dependencies: + is-arrayish: 0.2.1 + error-stack-parser-es@1.0.5: {} error-stack-parser@2.1.4: @@ -12958,6 +15340,8 @@ snapshots: esutils@2.0.3: {} + eventsource-parser@3.0.5: {} + expand-template@2.0.3: optional: true @@ -12997,6 +15381,10 @@ snapshots: fast-levenshtein@2.0.6: {} + fast-levenshtein@3.0.0: + dependencies: + fastest-levenshtein: 1.0.16 + fast-string-truncated-width@1.2.1: {} fast-string-width@1.1.0: @@ -13007,6 +15395,20 @@ snapshots: dependencies: fast-string-width: 1.1.0 + fast-xml-builder@1.2.0: + dependencies: + path-expression-matcher: 1.5.0 + xml-naming: 0.1.0 + + fast-xml-parser@5.7.2: + dependencies: + '@nodable/entities': 2.1.0 + fast-xml-builder: 1.2.0 + path-expression-matcher: 1.5.0 + strnum: 2.3.0 + + fastest-levenshtein@1.0.16: {} + fastq@1.20.1: dependencies: reusify: 1.1.0 @@ -13035,6 +15437,10 @@ snapshots: dependencies: flat-cache: 4.0.1 + filelist@1.0.6: + dependencies: + minimatch: 5.1.9 + filesize@10.1.6: {} fill-range@7.1.1: @@ -13048,6 +15454,10 @@ snapshots: locate-path: 6.0.0 path-exists: 4.0.0 + find-yarn-workspace-root@2.0.0: + dependencies: + micromatch: 4.0.8 + flat-cache@4.0.1: dependencies: flatted: 3.4.2 @@ -13055,6 +15465,8 @@ snapshots: flatted@3.4.2: {} + form-data-encoder@2.1.4: {} + format@0.2.2: {} formatly@0.3.0: @@ -13064,6 +15476,12 @@ snapshots: fs-constants@1.0.0: optional: true + fs-extra@8.1.0: + dependencies: + graceful-fs: 4.2.11 + jsonfile: 4.0.0 + universalify: 0.1.2 + fsevents@2.3.2: optional: true @@ -13078,14 +15496,22 @@ snapshots: get-nonce@1.0.1: {} + get-package-type@0.1.0: {} + + get-stdin@9.0.0: {} + get-stream@5.2.0: dependencies: pump: 3.0.4 + get-stream@6.0.1: {} + get-tsconfig@4.13.7: dependencies: resolve-pkg-maps: 1.0.0 + git-hooks-list@3.2.0: {} + github-from-package@0.0.0: optional: true @@ -13121,6 +15547,20 @@ snapshots: dependencies: csstype: 3.2.3 + got@13.0.0: + dependencies: + '@sindresorhus/is': 5.6.0 + '@szmarczak/http-timer': 5.0.1 + cacheable-lookup: 7.0.0 + cacheable-request: 10.2.14 + decompress-response: 6.0.0 + form-data-encoder: 2.1.4 + get-stream: 6.0.1 + http2-wrapper: 2.2.1 + lowercase-keys: 3.0.0 + p-cancelable: 3.0.0 + responselike: 3.0.0 + graceful-fs@4.2.11: {} hachure-fill@0.5.2: {} @@ -13290,10 +15730,19 @@ snapshots: property-information: 7.1.0 space-separated-tokens: 2.0.2 + header-case@2.0.4: + dependencies: + capital-case: 1.0.4 + tslib: 2.8.1 + hex-rgb@4.3.0: {} hono@4.12.14: {} + hosted-git-info@7.0.2: + dependencies: + lru-cache: 10.4.3 + hosted-git-info@9.0.2: dependencies: lru-cache: 11.2.7 @@ -13319,6 +15768,24 @@ snapshots: domutils: 3.2.2 entities: 7.0.1 + http-cache-semantics@4.2.0: {} + + http-call@5.3.0: + dependencies: + content-type: 1.0.5 + debug: 4.4.3(supports-color@8.1.1) + is-retry-allowed: 1.2.0 + is-stream: 2.0.1 + parse-json: 4.0.0 + tunnel-agent: 0.6.0 + transitivePeerDependencies: + - supports-color + + http2-wrapper@2.2.1: + dependencies: + quick-lru: 5.1.1 + resolve-alpn: 1.2.1 + i18next-resources-to-backend@1.2.1: dependencies: '@babel/runtime': 7.29.2 @@ -13341,6 +15808,10 @@ snapshots: dependencies: safer-buffer: '@nolyfill/safer-buffer@1.0.44' + iconv-lite@0.7.2: + dependencies: + safer-buffer: '@nolyfill/safer-buffer@1.0.44' + idb-keyval@6.2.2: {} idb@8.0.0: {} @@ -13392,16 +15863,22 @@ snapshots: is-alphabetical: 2.0.1 is-decimal: 2.0.1 + is-arrayish@0.2.1: {} + is-builtin-module@5.0.0: dependencies: builtin-modules: 5.0.0 is-decimal@2.0.1: {} + is-docker@2.2.1: {} + is-docker@3.0.0: {} is-extglob@2.1.1: {} + is-fullwidth-code-point@3.0.0: {} + is-glob@4.0.3: dependencies: is-extglob: 2.1.1 @@ -13419,14 +15896,26 @@ snapshots: global-dirs: 3.0.1 is-path-inside: 3.0.3 + is-interactive@2.0.0: {} + is-number@7.0.0: {} is-path-inside@3.0.3: {} is-plain-obj@4.1.0: {} + is-retry-allowed@1.2.0: {} + is-stream@2.0.1: {} + is-unicode-supported@1.3.0: {} + + is-unicode-supported@2.1.0: {} + + is-wsl@2.2.0: + dependencies: + is-docker: 2.2.1 + is-wsl@3.1.1: dependencies: is-inside-container: 1.0.0 @@ -13446,6 +15935,12 @@ snapshots: html-escaper: 2.0.2 istanbul-lib-report: 3.0.1 + jake@10.9.4: + dependencies: + async: 3.2.6 + filelist: 1.0.6 + picocolors: 1.1.1 + jiti@2.6.1: {} jotai@2.19.1(@babel/core@7.29.0)(@babel/template@7.28.6)(@types/react@19.2.14)(react@19.2.5): @@ -13479,6 +15974,8 @@ snapshots: json-buffer@3.0.1: {} + json-parse-better-errors@1.0.2: {} + json-schema-traverse@0.4.1: {} json-stable-stringify-without-jsonify@1.0.1: {} @@ -13491,6 +15988,10 @@ snapshots: eslint-visitor-keys: 5.0.1 semver: 7.7.4 + jsonfile@4.0.0: + optionalDependencies: + graceful-fs: 4.2.11 + jsonfile@6.2.0: dependencies: universalify: 2.0.1 @@ -13622,6 +16123,8 @@ snapshots: lightningcss-win32-arm64-msvc: 1.32.0 lightningcss-win32-x64-msvc: 1.32.0 + lilconfig@3.1.3: {} + linebreak@1.1.0: dependencies: base64-js: 0.0.8 @@ -13647,6 +16150,11 @@ snapshots: lodash@4.18.0: {} + log-symbols@6.0.0: + dependencies: + chalk: 5.6.2 + is-unicode-supported: 1.3.0 + longest-streak@3.1.0: {} loose-envify@1.4.0: @@ -13661,6 +16169,10 @@ snapshots: dependencies: tslib: 2.8.1 + lowercase-keys@3.0.0: {} + + lru-cache@10.4.3: {} + lru-cache@11.2.7: {} lru-cache@5.1.1: @@ -14230,8 +16742,11 @@ snapshots: mime@4.1.0: {} - mimic-response@3.1.0: - optional: true + mimic-function@5.0.1: {} + + mimic-response@3.1.0: {} + + mimic-response@4.0.0: {} min-indent@1.0.1: {} @@ -14239,10 +16754,18 @@ snapshots: dependencies: brace-expansion: 5.0.5 + minimatch@10.2.5: + dependencies: + brace-expansion: 5.0.5 + minimatch@3.1.5: dependencies: brace-expansion: 1.1.13 + minimatch@5.1.9: + dependencies: + brace-expansion: 2.0.3 + minimist@1.2.8: {} minipass@7.1.3: {} @@ -14277,6 +16800,10 @@ snapshots: ms@2.1.3: {} + mute-stream@1.0.0: {} + + mute-stream@2.0.0: {} + mz@2.7.0: dependencies: any-promise: 1.3.0 @@ -14341,12 +16868,20 @@ snapshots: node-releases@2.0.36: {} + normalize-package-data@6.0.2: + dependencies: + hosted-git-info: 7.0.2 + semver: 7.7.4 + validate-npm-package-license: 3.0.4 + normalize-package-data@8.0.0: dependencies: hosted-git-info: 9.0.2 semver: 7.7.4 validate-npm-package-license: 3.0.4 + normalize-url@8.1.1: {} + normalize-wheel@1.0.1: {} nth-check@2.1.1: @@ -14366,12 +16901,47 @@ snapshots: obug@2.1.1: {} + oclif@4.15.5(@types/node@25.6.0): + dependencies: + '@aws-sdk/client-cloudfront': 3.1045.0 + '@aws-sdk/client-s3': 3.1045.0 + '@inquirer/confirm': 3.2.0 + '@inquirer/input': 2.3.0 + '@inquirer/select': 2.5.0 + '@oclif/core': 4.11.1 + '@oclif/plugin-help': 6.2.46 + '@oclif/plugin-not-found': 3.2.82(@types/node@25.6.0) + '@oclif/plugin-warn-if-update-available': 3.1.13 + async-retry: 1.3.3 + chalk: 4.1.2 + change-case: 4.1.2 + debug: 4.4.3(supports-color@8.1.1) + ejs: 3.1.10 + find-yarn-workspace-root: 2.0.0 + fs-extra: 8.1.0 + github-slugger: 2.0.0 + got: 13.0.0 + lodash: 4.18.0 + normalize-package-data: 6.0.2 + semver: 7.7.4 + sort-package-json: 2.15.1 + tiny-jsonc: 1.0.2 + validate-npm-package-name: 5.0.1 + transitivePeerDependencies: + - '@types/node' + - aws-crt + - supports-color + ohash@2.0.11: {} once@1.4.0: dependencies: wrappy: 1.0.2 + onetime@7.0.0: + dependencies: + mimic-function: 5.0.1 + oniguruma-parser@0.12.1: {} oniguruma-to-es@4.3.5: @@ -14380,6 +16950,13 @@ snapshots: regex: 6.1.0 regex-recursion: 6.0.2 + open@10.1.0: + dependencies: + default-browser: 5.5.0 + define-lazy-prop: 3.0.0 + is-inside-container: 1.0.0 + is-wsl: 3.1.1 + open@10.2.0: dependencies: default-browser: 5.5.0 @@ -14407,6 +16984,18 @@ snapshots: type-check: 0.4.0 word-wrap: 1.2.5 + ora@8.1.0: + dependencies: + chalk: 5.6.2 + cli-cursor: 5.0.0 + cli-spinners: 2.9.2 + is-interactive: 2.0.0 + is-unicode-supported: 2.1.0 + log-symbols: 6.0.0 + stdin-discarder: 0.2.2 + string-width: 8.2.0 + strip-ansi: 7.2.0 + oxc-parser@0.121.0(@emnapi/runtime@1.9.1): dependencies: '@oxc-project/types': 0.121.0 @@ -14517,6 +17106,8 @@ snapshots: '@oxlint/binding-win32-x64-msvc': 1.60.0 oxlint-tsgolint: 0.20.0 + p-cancelable@3.0.0: {} + p-limit@3.1.0: dependencies: yocto-queue: 0.1.0 @@ -14539,6 +17130,11 @@ snapshots: papaparse@5.5.3: {} + param-case@3.0.4: + dependencies: + dot-case: 3.0.4 + tslib: 2.8.1 + parent-module@1.0.1: dependencies: callsites: 3.1.0 @@ -14564,6 +17160,11 @@ snapshots: dependencies: parse-statements: 1.0.11 + parse-json@4.0.0: + dependencies: + error-ex: 1.3.4 + json-parse-better-errors: 1.0.2 + parse-json@8.3.0: dependencies: '@babel/code-frame': 7.29.0 @@ -14589,12 +17190,24 @@ snapshots: dependencies: entities: 6.0.1 + pascal-case@3.1.2: + dependencies: + no-case: 3.0.4 + tslib: 2.8.1 + path-browserify@1.0.1: {} + path-case@3.0.4: + dependencies: + dot-case: 3.0.4 + tslib: 2.8.1 + path-data-parser@0.1.0: {} path-exists@4.0.0: {} + path-expression-matcher@1.5.0: {} + path-key@3.1.1: {} path-parse@1.0.7: {} @@ -14620,6 +17233,8 @@ snapshots: perfect-debounce@2.1.0: {} + picocolors@1.1.0: {} + picocolors@1.1.1: {} picomatch@2.3.2: {} @@ -14755,6 +17370,8 @@ snapshots: queue-microtask@1.2.3: {} + quick-lru@5.1.1: {} + radash@12.1.1: {} rc@1.2.8: @@ -15136,6 +17753,8 @@ snapshots: resize-observer-polyfill@1.5.1: {} + resolve-alpn@1.2.1: {} + resolve-from@4.0.0: {} resolve-pkg-maps@1.0.0: {} @@ -15146,6 +17765,17 @@ snapshots: path-parse: 1.0.7 supports-preserve-symlinks-flag: 1.0.0 + responselike@3.0.0: + dependencies: + lowercase-keys: 3.0.0 + + restore-cursor@5.1.0: + dependencies: + onetime: 7.0.0 + signal-exit: 4.1.0 + + retry@0.13.1: {} + reusify@1.1.0: {} robust-predicates@3.0.3: {} @@ -15165,8 +17795,7 @@ snapshots: rw@1.3.3: {} - safe-buffer@5.2.1: - optional: true + safe-buffer@5.2.1: {} safe-json-stringify@1.2.0: {} @@ -15202,6 +17831,12 @@ snapshots: semver@7.7.4: {} + sentence-case@3.0.4: + dependencies: + no-case: 3.0.4 + tslib: 2.8.1 + upper-case-first: 2.0.2 + seroval-plugins@1.5.1(seroval@1.5.1): dependencies: seroval: 1.5.1 @@ -15256,6 +17891,8 @@ snapshots: '@shikijs/vscode-textmate': 10.0.2 '@types/hast': 3.0.4 + signal-exit@4.1.0: {} + simple-concat@1.0.1: optional: true @@ -15278,6 +17915,11 @@ snapshots: smol-toml@1.6.1: {} + snake-case@3.0.4: + dependencies: + dot-case: 3.0.4 + tslib: 2.8.1 + socket.io-client@4.8.3: dependencies: '@socket.io/component-emitter': 3.1.2 @@ -15302,6 +17944,19 @@ snapshots: seroval: 1.5.1 seroval-plugins: 1.5.1(seroval@1.5.1) + sort-object-keys@1.1.3: {} + + sort-package-json@2.15.1: + dependencies: + detect-indent: 7.0.2 + detect-newline: 4.0.1 + get-stdin: 9.0.0 + git-hooks-list: 3.2.0 + is-plain-obj: 4.1.0 + semver: 7.7.4 + sort-object-keys: 1.1.3 + tinyglobby: 0.2.16 + sortablejs@1.15.7: {} source-map-js@1.2.1: {} @@ -15346,6 +18001,8 @@ snapshots: std-semver@1.0.8: {} + stdin-discarder@0.2.2: {} + storybook@10.3.5(@testing-library/dom@10.4.1)(react-dom@19.2.5(react@19.2.5))(react@19.2.5): dependencies: '@storybook/global': 5.0.0 @@ -15395,6 +18052,12 @@ snapshots: string-ts@2.3.1: {} + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + string-width@8.2.0: dependencies: get-east-asian-width: 1.5.0 @@ -15412,6 +18075,10 @@ snapshots: character-entities-html4: 2.1.0 character-entities-legacy: 3.0.0 + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + strip-ansi@7.2.0: dependencies: ansi-regex: 6.2.2 @@ -15435,6 +18102,8 @@ snapshots: dependencies: js-tokens: 9.0.1 + strnum@2.3.0: {} + structured-clone-es@2.0.0: {} style-to-js@1.1.21: @@ -15531,6 +18200,8 @@ snapshots: tiny-invariant@1.3.3: {} + tiny-jsonc@1.0.2: {} + tinybench@2.9.0: {} tinyexec@1.0.4: {} @@ -15630,7 +18301,6 @@ snapshots: tunnel-agent@0.6.0: dependencies: safe-buffer: 5.2.1 - optional: true turbo-stream@3.2.0: {} @@ -15638,6 +18308,8 @@ snapshots: dependencies: prelude-ls: 1.2.1 + type-fest@0.21.3: {} + type-fest@2.19.0: {} type-fest@4.41.0: {} @@ -15654,6 +18326,8 @@ snapshots: unbash@2.2.0: {} + undici-types@6.21.0: {} + undici-types@7.19.2: {} undici@7.24.0: {} @@ -15712,6 +18386,8 @@ snapshots: unist-util-is: 6.0.1 unist-util-visit-parents: 6.0.2 + universalify@0.1.2: {} + universalify@2.0.1: {} unpic@4.2.2: {} @@ -15738,6 +18414,10 @@ snapshots: dependencies: tslib: 2.8.1 + upper-case@2.0.2: + dependencies: + tslib: 2.8.1 + uri-js@4.4.1: dependencies: punycode: 2.3.1 @@ -15804,6 +18484,8 @@ snapshots: spdx-correct: 3.2.0 spdx-expression-parse: 3.0.1 + validate-npm-package-name@5.0.1: {} + vfile-location@5.0.3: dependencies: '@types/unist': 3.0.3 @@ -15883,11 +18565,11 @@ snapshots: - supports-color - typescript - vite-plus@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3): + vite-plus@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3): dependencies: '@oxc-project/types': 0.124.0 '@voidzero-dev/vite-plus-core': 0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) - '@voidzero-dev/vite-plus-test': 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) + '@voidzero-dev/vite-plus-test': 0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3) oxfmt: 0.45.0 oxlint: 1.60.0(oxlint-tsgolint@0.20.0) oxlint-tsgolint: 0.20.0 @@ -15955,11 +18637,11 @@ snapshots: optionalDependencies: vite: '@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' - vitest-browser-react@2.2.0(@types/node@25.6.0)(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(react-dom@19.2.5(react@19.2.5))(react@19.2.5)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3): + vitest-browser-react@2.2.0(@types/node@25.6.0)(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(react-dom@19.2.5(react@19.2.5))(react@19.2.5)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3): dependencies: react: 19.2.5 react-dom: 19.2.5(react@19.2.5) - vitest: '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' + vitest: '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' optionalDependencies: '@types/react': 19.2.14 '@types/react-dom': 19.2.3(@types/react@19.2.14) @@ -15993,11 +18675,11 @@ snapshots: - vite - yaml - vitest-canvas-mock@1.1.4(@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)): + vitest-canvas-mock@1.1.4(@voidzero-dev/vite-plus-test@0.1.18): dependencies: cssfontparser: 1.2.1 moo-color: 1.0.3 - vitest: '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4(@types/node@25.6.0)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' + vitest: '@voidzero-dev/vite-plus-test@0.1.18(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(@voidzero-dev/vite-plus-core@0.1.18(@types/node@25.6.0)(esbuild@0.27.2)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3))(esbuild@0.27.2)(happy-dom@20.9.0)(jiti@2.6.1)(tsx@4.21.0)(typescript@6.0.2)(yaml@2.8.3)' void-elements@3.1.0: {} @@ -16052,8 +18734,26 @@ snapshots: dependencies: isexe: 2.0.0 + widest-line@3.1.0: + dependencies: + string-width: 4.2.3 + word-wrap@1.2.5: {} + wordwrap@1.0.0: {} + + wrap-ansi@6.2.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrappy@1.0.2: {} ws@8.18.3: {} @@ -16071,6 +18771,8 @@ snapshots: xml-name-validator@4.0.0: {} + xml-naming@0.1.0: {} + xmlbuilder@15.1.1: {} xmlhttprequest-ssl@2.1.2: {} @@ -16095,6 +18797,8 @@ snapshots: yocto-queue@1.2.2: {} + yoctocolors-cjs@2.1.3: {} + yoga-layout@3.2.1: {} yup@1.7.1: diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 3dd5b403a3..aedfae270c 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -13,11 +13,13 @@ trustPolicyExclude: - chokidar@4.0.3 - reselect@5.1.1 - semver@6.3.1 + - undici-types@6.21.0 packages: - web - e2e - sdks/nodejs-client - packages/* + - cli overrides: '@lexical/code': npm:lexical-code-no-prism@0.41.0 '@monaco-editor/loader': 1.7.0 @@ -38,7 +40,7 @@ overrides: side-channel: npm:@nolyfill/side-channel@^1.0.44 smol-toml@<1.6.1: 1.6.1 solid-js: 1.9.11 - string-width: ~8.2.0 + string-width@>=5: ~8.2.0 svgo@>=3.0.0 <3.3.3: 3.3.3 tar@<=7.5.10: 7.5.11 undici@>=7.0.0 <7.24.0: 7.24.0 @@ -75,8 +77,15 @@ catalog: '@mdx-js/react': 3.1.1 '@mdx-js/rollup': 3.1.1 '@monaco-editor/react': 4.7.0 + '@napi-rs/keyring': 1.1.6 '@next/eslint-plugin-next': 16.2.3 '@next/mdx': 16.2.3 + '@oclif/core': 4.11.1 + '@oclif/plugin-autocomplete': 3.2.6 + '@oclif/plugin-help': 6.2.10 + '@oclif/plugin-not-found': 3.2.18 + '@oclif/plugin-version': 2.2.16 + '@oclif/plugin-warn-if-update-available': 3.1.13 '@orpc/client': 1.13.14 '@orpc/contract': 1.13.14 '@orpc/openapi-client': 1.13.14 @@ -130,6 +139,7 @@ catalog: agentation: 3.0.2 ahooks: 3.9.7 class-variance-authority: 0.7.1 + cli-table3: 0.6.5 client-only: 0.0.1 clsx: 2.1.1 cmdk: 1.1.1 @@ -155,6 +165,7 @@ catalog: eslint-plugin-react-refresh: 0.5.2 eslint-plugin-sonarjs: 4.0.2 eslint-plugin-storybook: 10.3.5 + eventsource-parser: 3.0.5 fast-deep-equal: 3.1.3 happy-dom: 20.9.0 hast-util-to-jsx-runtime: 2.3.6 @@ -183,6 +194,10 @@ catalog: next: 16.2.3 next-themes: 0.4.6 nuqs: 2.8.9 + oclif: 4.15.5 + open: 10.1.0 + ora: 8.1.0 + picocolors: 1.1.0 pinyin-pro: 3.28.1 playwright: 1.59.1 postcss: 8.5.9 diff --git a/web/app/device/components/authorize-account.tsx b/web/app/device/components/authorize-account.tsx new file mode 100644 index 0000000000..8bdc6ce03c --- /dev/null +++ b/web/app/device/components/authorize-account.tsx @@ -0,0 +1,97 @@ +'use client' + +import type { FC } from 'react' +import { useState } from 'react' +import { deviceApproveAccount, deviceDenyAccount } from '@/service/device-flow' +import { approveErrorCopy } from '../utils/error-copy' + +type Props = { + userCode: string + accountEmail?: string + defaultWorkspace?: string + onApproved: () => void + onDenied: () => void + onError: (message: string) => void +} + +/** + * AuthorizeAccount is the account-branch authorize screen. Called with a + * live console session already established (user bounced through /signin). + * Posts to /openapi/v1/oauth/device/{approve,deny}; these endpoints mint + * the dfoa_ token server-side. + */ +const AuthorizeAccount: FC = ({ + userCode, accountEmail, defaultWorkspace, onApproved, onDenied, onError, +}) => { + const [busy, setBusy] = useState(false) + + const approve = async () => { + setBusy(true) + try { + await deviceApproveAccount(userCode) + onApproved() + } + catch (e) { + onError(approveErrorCopy(e)) + } + finally { + setBusy(false) + } + } + + const deny = async () => { + setBusy(true) + try { + await deviceDenyAccount(userCode) + onDenied() + } + catch (e) { + onError(approveErrorCopy(e)) + } + finally { + setBusy(false) + } + } + + return ( +
+
+

Authorize Dify CLI

+

+ Dify CLI (difyctl) is requesting access to your account. + {' '}If you did not start this from your terminal, click Cancel. +

+
+
+ {accountEmail && ( +

+ Signed in as {accountEmail} +

+ )} + {defaultWorkspace && ( +

+ Default workspace: {defaultWorkspace} +

+ )} +
+
+ + +
+
+ ) +} + +export default AuthorizeAccount diff --git a/web/app/device/components/authorize-sso.tsx b/web/app/device/components/authorize-sso.tsx new file mode 100644 index 0000000000..722052498d --- /dev/null +++ b/web/app/device/components/authorize-sso.tsx @@ -0,0 +1,98 @@ +'use client' + +import type { FC } from 'react' +import { useEffect, useState } from 'react' +import type { ApprovalContext } from '@/service/device-flow' +import { approveExternal, fetchApprovalContext } from '@/service/device-flow' +import { approveErrorCopy } from '../utils/error-copy' + +type Props = { + onApproved: () => void + onError: (message: string) => void +} + +/** + * AuthorizeSSO is the external-SSO branch authorize screen. On mount it + * fetches /openapi/v1/oauth/device/approval-context to learn subject_email, + * issuer, user_code, and csrf_token from the device_approval_grant cookie. + * On Approve click, posts /openapi/v1/oauth/device/approve-external with + * the CSRF header. + * + * The user_code in state is bound to the cookie by server; we do not accept + * one from the URL because the SSO branch deliberately detaches from the + * pre-SSO ?user_code=... query param. + */ +const AuthorizeSSO: FC = ({ onApproved, onError }) => { + const [ctx, setCtx] = useState(null) + const [busy, setBusy] = useState(false) + const [loadErr, setLoadErr] = useState(null) + + useEffect(() => { + let cancelled = false + fetchApprovalContext() + .then((c) => { if (!cancelled) setCtx(c) }) + .catch((e) => { + if (!cancelled) + setLoadErr(approveErrorCopy(e)) + }) + return () => { cancelled = true } + }, []) + + const approve = async () => { + if (!ctx) return + setBusy(true) + try { + await approveExternal(ctx, ctx.user_code) + onApproved() + } + catch (e) { + onError(approveErrorCopy(e)) + } + finally { + setBusy(false) + } + } + + if (loadErr) { + return ( +
+

This session is no longer valid

+

+ Run difyctl auth login again to start a new sign-in. +

+
+ ) + } + if (!ctx) { + return
Loading session…
+ } + + return ( +
+
+

Authorize Dify CLI

+

+ Dify CLI (difyctl) is requesting access via SSO. If you did not start + this from your terminal, close this tab. +

+
+
+

+ Signed in as {ctx.subject_email} +

+

+ Issuer: {ctx.subject_issuer} +

+
+ +
+ ) +} + +export default AuthorizeSSO diff --git a/web/app/device/components/chooser.tsx b/web/app/device/components/chooser.tsx new file mode 100644 index 0000000000..026de2921c --- /dev/null +++ b/web/app/device/components/chooser.tsx @@ -0,0 +1,60 @@ +'use client' + +import type { FC } from 'react' +import { useRouter } from '@/next/navigation' +import { setPostLoginRedirect } from '@/app/signin/utils/post-login-redirect' + +type Props = { + userCode: string + ssoAvailable: boolean +} + +/** + * Chooser renders the two-button device-auth login selector. Account button + * seeds postLoginRedirect + navigates to /signin so every existing account + * login method (password / email-code / social OAuth / account-SSO) flows + * through its usual plumbing. SSO button hits /openapi/v1/oauth/device/sso-initiate + * directly — the SSO branch skips /signin entirely. + * + * v1.0 scope: only account-SSO honours postLoginRedirect (via sso-auth's + * return_to plumbing). Password / email-code / social-OAuth users land on + * /signin's default post-login target and manually return to the /device + * URL printed by the CLI. That's not great UX; a follow-up milestone + * generalises post-signin redirect to all methods. + */ +const Chooser: FC = ({ userCode, ssoAvailable }) => { + const router = useRouter() + + const onAccount = () => { + setPostLoginRedirect(`/device?user_code=${encodeURIComponent(userCode)}`) + router.push('/signin') + } + + const onSSO = () => { + // Full-page navigation, not router.push — /openapi/v1/oauth/device/sso-initiate + // issues a 302 to the IdP. Next's client router can't follow cross- + // origin redirects; a plain window.location assignment handles it. + window.location.href = `/openapi/v1/oauth/device/sso-initiate?user_code=${encodeURIComponent(userCode)}` + } + + return ( +
+ + {ssoAvailable && ( + + )} +
+ ) +} + +export default Chooser diff --git a/web/app/device/components/code-input.tsx b/web/app/device/components/code-input.tsx new file mode 100644 index 0000000000..1d358f782b --- /dev/null +++ b/web/app/device/components/code-input.tsx @@ -0,0 +1,45 @@ +'use client' + +import type { FC } from 'react' +import { useCallback } from 'react' +import { normaliseUserCodeInput } from '../utils/user-code' + +type Props = { + value: string + onChange: (normalised: string) => void + disabled?: boolean + autoFocus?: boolean +} + +/** + * CodeInput renders the user_code text field with live normalisation + * (uppercase, reduced alphabet, XXXX-XXXX hyphenation). + * + * The onChange callback receives the normalised value only — the parent does + * not need to run validation itself. + */ +const CodeInput: FC = ({ value, onChange, disabled, autoFocus }) => { + const handle = useCallback((raw: string) => { + onChange(normaliseUserCodeInput(raw)) + }, [onChange]) + + return ( + handle(e.target.value)} + /> + ) +} + +export default CodeInput diff --git a/web/app/device/page.tsx b/web/app/device/page.tsx new file mode 100644 index 0000000000..c7ac95e1c9 --- /dev/null +++ b/web/app/device/page.tsx @@ -0,0 +1,215 @@ +'use client' + +import { useEffect, useState } from 'react' +import { usePathname, useRouter, useSearchParams } from '@/next/navigation' +import { useQuery } from '@tanstack/react-query' +import { systemFeaturesQueryOptions } from '@/service/system-features' +import { commonQueryKeys, userProfileQueryOptions } from '@/service/use-common' +import { post } from '@/service/base' +import type { ICurrentWorkspace } from '@/models/common' +import { deviceLookup } from '@/service/device-flow' +import CodeInput from './components/code-input' +import Chooser from './components/chooser' +import AuthorizeAccount from './components/authorize-account' +import AuthorizeSSO from './components/authorize-sso' +import { isValidUserCode } from './utils/user-code' +import { classifyLookupError } from './utils/error-copy' + +type View = + | { kind: 'code_entry' } + | { kind: 'chooser'; userCode: string } + | { kind: 'authorize_account'; userCode: string } + | { kind: 'authorize_sso' } + | { kind: 'success' } + | { kind: 'error_expired' } + | { kind: 'error_rate_limited' } + | { kind: 'error_lookup_failed' } + +export default function DevicePage() { + const searchParams = useSearchParams() + const router = useRouter() + const pathname = usePathname() + const urlUserCode = (searchParams.get('user_code') || '').trim().toUpperCase() + const ssoVerified = searchParams.get('sso_verified') === '1' + + const [typed, setTyped] = useState('') + const [view, setView] = useState({ kind: 'code_entry' }) + const [errMsg, setErrMsg] = useState(null) + + // Account subject + workspace identity (for the authorize-account screen). + // Logged-out is a valid landing state on /device — disable refetch storms + // and skip workspace probe until profile resolves (avoids /current + chained + // /refresh-token 401 loops while the user is still entering the code). + const { data: userResp, isError: profileErr } = useQuery({ + ...userProfileQueryOptions(), + throwOnError: false, + retry: false, + refetchOnWindowFocus: false, + refetchOnMount: false, + }) + const account = userResp?.profile + const { data: currentWorkspace } = useQuery({ + queryKey: commonQueryKeys.currentWorkspace, + queryFn: () => post('/workspaces/current'), + enabled: !!account && !profileErr, + retry: false, + refetchOnWindowFocus: false, + }) + const { data: sys } = useQuery(systemFeaturesQueryOptions()) + // Device-flow SSO branch uses external-user (webapp) SSO, not console SSO — + // backend mints EXTERNAL_SSO tokens via Enterprise's external ACS. Gate on + // webapp_auth.{enabled, allow_sso} + a configured webapp SSO protocol. + const ssoAvailable = !!sys?.webapp_auth?.enabled + && !!sys?.webapp_auth?.allow_sso + && (sys?.webapp_auth?.sso_config?.protocol || '') !== '' + + // URL-driven view transitions. Only advances while the user is still on + // the entry/chooser screens — never clobbers terminal views (success / + // error_expired / authorize_*) when userProfile refetches. + // After consuming the params, scrub them from the URL so they don't + // leak via history / Referer / server logs (RFC 8628 §5.4). + useEffect(() => { + if (view.kind !== 'code_entry' && view.kind !== 'chooser') return + // Post-login bounce: chooser holds the typed code, account just loaded. + // The URL was already scrubbed on the first effect run, so urlUserCode + // is empty here — advance using the userCode stashed in view state. + if (view.kind === 'chooser' && account) { + setView({ kind: 'authorize_account', userCode: view.userCode }) + return + } + let consumed = false + if (ssoVerified) { + setView({ kind: 'authorize_sso' }) + consumed = true + } + else if (urlUserCode && isValidUserCode(urlUserCode)) { + if (account) + setView({ kind: 'authorize_account', userCode: urlUserCode }) + else + setView({ kind: 'chooser', userCode: urlUserCode }) + consumed = true + } + if (consumed && (urlUserCode || ssoVerified)) + router.replace(pathname) + }, [urlUserCode, ssoVerified, account, view, router, pathname]) + + const onContinue = async () => { + if (!isValidUserCode(typed)) return + try { + const reply = await deviceLookup(typed) + if (!reply.valid) { + setView({ kind: 'error_expired' }) + return + } + } + catch (e) { + const outcome = classifyLookupError(e) + if (outcome === 'rate_limited') + setView({ kind: 'error_rate_limited' }) + else if (outcome === 'failed') + setView({ kind: 'error_lookup_failed' }) + else + setView({ kind: 'error_expired' }) + return + } + if (account) setView({ kind: 'authorize_account', userCode: typed }) + else setView({ kind: 'chooser', userCode: typed }) + } + + return ( +
+
+ {view.kind === 'code_entry' && ( +
+
+

Authorize Dify CLI

+

+ Enter the code shown in your terminal. +

+
+ + +
+ )} + + {view.kind === 'chooser' && ( +
+
+

Sign in to authorize

+

+ Code {view.userCode} is valid. Choose how to sign in. +

+
+ +
+ )} + + {view.kind === 'authorize_account' && ( + setView({ kind: 'success' })} + onDenied={() => setView({ kind: 'error_expired' })} + onError={e => setErrMsg(e)} + /> + )} + + {view.kind === 'authorize_sso' && ( + setView({ kind: 'success' })} + onError={e => setErrMsg(e)} + /> + )} + + {view.kind === 'success' && ( +
+

You're signed in

+

Return to your terminal to continue.

+
+ )} + + {view.kind === 'error_expired' && ( +
+

This code is no longer valid

+

+ The code may have expired or already been used. Run + {' '} + difyctl auth login + {' '} + again to get a new one. +

+
+ )} + + {view.kind === 'error_rate_limited' && ( +
+

Too many attempts

+

+ We've received too many requests for this code. Wait a moment and try again. +

+
+ )} + + {view.kind === 'error_lookup_failed' && ( +
+

Could not verify the code

+

+ Something went wrong on our side. Try again in a moment. +

+
+ )} + + {errMsg && ( +

{errMsg}

+ )} +
+
+ ) +} diff --git a/web/app/device/utils/error-copy.ts b/web/app/device/utils/error-copy.ts new file mode 100644 index 0000000000..cfdf2af252 --- /dev/null +++ b/web/app/device/utils/error-copy.ts @@ -0,0 +1,41 @@ +// Translate a DeviceFlowError (or any thrown value) into user-facing copy. +// Centralised so account/SSO branches surface the same words for the same +// failure mode and so a new server error code can be wired up here once. + +import { DeviceFlowError } from '@/service/device-flow' + +const APPROVE_COPY: Record = { + rate_limited: 'Too many attempts. Wait a moment and try again.', + no_session: 'Your session has expired. Run difyctl auth login again to start over.', + invalid_session: 'Your session has expired. Run difyctl auth login again to start over.', + session_already_consumed: 'This session was already used. Run difyctl auth login again.', + csrf_mismatch: 'Could not verify the request. Refresh the page and try again.', + forbidden: 'Could not verify the request. Refresh the page and try again.', + expired_or_unknown: 'This code is no longer valid.', + not_found: 'This code is no longer valid.', + user_code_mismatch: 'This code does not match the active session. Run difyctl auth login again.', + user_code_not_pending: 'This code was already approved or denied.', + already_resolved: 'This code was already approved or denied.', + state_lost: 'The flow expired before approval completed. Run difyctl auth login again.', + approve_in_progress: 'An approval is already in progress for this code.', + conflict: 'This code is no longer in a state we can approve.', + server_error: 'Something went wrong on our side. Try again in a moment.', +} + +const DEFAULT_MESSAGE = 'Could not complete the request. Please try again.' + +export function approveErrorCopy(err: unknown): string { + if (err instanceof DeviceFlowError) + return APPROVE_COPY[err.code] ?? DEFAULT_MESSAGE + return DEFAULT_MESSAGE +} + +export type LookupOutcome = 'expired' | 'rate_limited' | 'failed' + +export function classifyLookupError(err: unknown): LookupOutcome { + if (err instanceof DeviceFlowError) { + if (err.code === 'rate_limited' || err.status === 429) return 'rate_limited' + if (err.code === 'server_error' || err.status >= 500) return 'failed' + } + return 'expired' +} diff --git a/web/app/device/utils/user-code.ts b/web/app/device/utils/user-code.ts new file mode 100644 index 0000000000..15f3efc94f --- /dev/null +++ b/web/app/device/utils/user-code.ts @@ -0,0 +1,37 @@ +// user-code.ts — input normalisation + validation for the RFC 8628 +// 8-character user_code format the CLI prints to stderr. +// +// Format: XXXX-XXXX, uppercase, reduced alphabet (no 0/O, 1/I/l, 2/Z). Low +// entropy by design — humans type it — so the server-side rate-limit + TTL + +// single-use properties are what defend it, not the alphabet. + +export const USER_CODE_ALPHABET = 'ABCDEFGHJKLMNPQRSTUVWXY3456789' // excludes 0 O 1 I L 2 Z + +/** + * normaliseUserCodeInput prepares raw input for display in the code field: + * strips non-alphanumerics, uppercases, drops disallowed characters, and + * inserts the hyphen after the fourth accepted char. + * + * Returns at most 9 chars ("XXXX-XXXX"); longer input is truncated. + */ +export function normaliseUserCodeInput(raw: string): string { + const cleaned: string[] = [] + for (const ch of raw.toUpperCase()) { + if (USER_CODE_ALPHABET.includes(ch)) + cleaned.push(ch) + if (cleaned.length === 8) + break + } + if (cleaned.length <= 4) + return cleaned.join('') + return `${cleaned.slice(0, 4).join('')}-${cleaned.slice(4).join('')}` +} + +/** + * isValidUserCode tests whether the normalised form is a complete XXXX-XXXX + * token suitable for submission to /openapi/v1/oauth/device/lookup. + */ +export function isValidUserCode(normalised: string): boolean { + return /^[A-Z0-9]{4}-[A-Z0-9]{4}$/.test(normalised) + && [...normalised.replace('-', '')].every(c => USER_CODE_ALPHABET.includes(c)) +} diff --git a/web/app/signin/utils/post-login-redirect.ts b/web/app/signin/utils/post-login-redirect.ts index a94fb2ad79..291661a87c 100644 --- a/web/app/signin/utils/post-login-redirect.ts +++ b/web/app/signin/utils/post-login-redirect.ts @@ -1,15 +1,68 @@ -let postLoginRedirect: string | null = null +// Persists target across full-page redirects within the same tab (social +// OAuth, SSO IdP bounce). sessionStorage is tab-scoped so concurrent +// /device tabs don't clobber each other. 15-min TTL drops stale values. +// Same-origin + exact-path whitelist prevents open-redirect. +// +// Signup-via-email-link opening in a new tab is out of scope — that tab +// starts with an empty sessionStorage and falls to /apps default. + +const KEY = 'dify_post_login_redirect' +const TTL_MS = 15 * 60 * 1000 + +const ALLOWED: Record> = { + '/device': new Set(['user_code', 'sso_verified']), + '/account/oauth/authorize': new Set(['client_id', 'scope', 'state', 'redirect_uri']), +} + +function validate(target: string): string | null { + if (typeof window === 'undefined') return null + try { + const url = new URL(target, window.location.origin) + if (url.origin !== window.location.origin) return null + const allowedKeys = ALLOWED[url.pathname] + if (!allowedKeys) return null + for (const key of url.searchParams.keys()) { + if (!allowedKeys.has(key)) return null + } + return url.pathname + (url.search || '') + } + catch { + return null + } +} export const setPostLoginRedirect = (value: string | null) => { - postLoginRedirect = value -} - -export const resolvePostLoginRedirect = () => { - if (postLoginRedirect) { - const redirectUrl = postLoginRedirect - postLoginRedirect = null - return redirectUrl + if (typeof window === 'undefined') return + if (value === null) { + try { sessionStorage.removeItem(KEY) } catch {} + return + } + const safe = validate(value) + if (!safe) return + try { + sessionStorage.setItem(KEY, JSON.stringify({ target: safe, ts: Date.now() })) + } + catch {} +} + +export const resolvePostLoginRedirect = (): string | null => { + if (typeof window === 'undefined') return null + let raw: string | null = null + try { + raw = sessionStorage.getItem(KEY) + sessionStorage.removeItem(KEY) + } + catch { + return null + } + if (!raw) return null + try { + const parsed = JSON.parse(raw) + if (typeof parsed?.target !== 'string' || typeof parsed?.ts !== 'number') return null + if (Date.now() - parsed.ts > TTL_MS) return null + return validate(parsed.target) + } + catch { + return null } - - return null } diff --git a/web/next.config.ts b/web/next.config.ts index db44f5b9ed..741cd0afc1 100644 --- a/web/next.config.ts +++ b/web/next.config.ts @@ -30,6 +30,20 @@ const nextConfig: NextConfig = { }, ] }, + // Anti-framing for device-flow surfaces. A framed /device page could UI-trick + // a victim with a valid device_approval_grant cookie into approving a + // device_code — functionally CSRF, bypasses the double-submit token. Deny + // framing outright on every device-flow route; no trusted embedder exists. + async headers() { + const antiFrame = [ + { key: 'X-Frame-Options', value: 'DENY' }, + { key: 'Content-Security-Policy', value: "frame-ancestors 'none'" }, + ] + return [ + { source: '/device', headers: antiFrame }, + { source: '/device/:path*', headers: antiFrame }, + ] + }, output: 'standalone', compiler: { removeConsole: isDev ? false : { exclude: ['warn', 'error'] }, diff --git a/web/service/base.ts b/web/service/base.ts index 64d13ef59a..e278771db5 100644 --- a/web/service/base.ts +++ b/web/service/base.ts @@ -794,6 +794,11 @@ export const request = async(url: string, options = {}, otherOptions?: IOther const [refreshErr] = await asyncRunSafe(refreshAccessTokenOrReLogin(TIME_OUT)) if (refreshErr === null) return baseFetch(url, options, otherOptionsForBaseFetch) + // /device is the device-flow chooser; logged-out is a valid state + // there. Redirecting to /signin loses the user_code context and + // the post-login flow lands on /apps instead of returning here. + if (location.pathname === `${basePath}/device`) + return Promise.reject(err) if (location.pathname !== `${basePath}/signin` || !IS_CE_EDITION) { jumpTo(loginUrl) return Promise.reject(err) diff --git a/web/service/device-flow.ts b/web/service/device-flow.ts new file mode 100644 index 0000000000..d936a48fb2 --- /dev/null +++ b/web/service/device-flow.ts @@ -0,0 +1,134 @@ +// Web-side calls into the Dify device-flow endpoints. All routes now sit +// under /openapi/v1/oauth/device/* (Phase G of the openapi migration). The +// approve/deny endpoints still require the console session cookie + CSRF +// token; lookup is unauthenticated; the SSO branch uses cookie + per-flow +// CSRF baked into the approval-context response. +// +// /openapi/v1/oauth/device/lookup (public — GET) +// /openapi/v1/oauth/device/approve (cookie + CSRF — POST) +// /openapi/v1/oauth/device/deny (cookie + CSRF — POST) +// /openapi/v1/oauth/device/approval-context (cookie — GET) +// /openapi/v1/oauth/device/approve-external (cookie + per-flow CSRF — POST) +// +// /openapi/v1/* is its own URL prefix, so we bypass service/base's +// API_PREFIX (which targets /console/api) and call fetch directly. + +import Cookies from 'js-cookie' +import { CSRF_COOKIE_NAME, CSRF_HEADER_NAME } from '@/config' + +const DEVICE_BASE = '/openapi/v1/oauth/device' + +// Typed error thrown by every wrapper here. The page/component layer +// switches on `code` to choose user-facing copy / view; never render +// `status` or raw body to the user. +export class DeviceFlowError extends Error { + constructor(public code: string, public status: number) { + super(code) + this.name = 'DeviceFlowError' + } +} + +// Translate a non-2xx fetch Response into a DeviceFlowError. Honours the +// server contract `{"error": ""}` and falls back to a status-class +// code so callers can still dispatch (rate_limited / server_error / ...). +async function failFromResponse(res: Response): Promise { + let serverCode = '' + try { + const body = await res.clone().json() + if (body && typeof body.error === 'string') serverCode = body.error + } + catch { /* non-JSON body — fall through to status mapping */ } + + const code = serverCode || statusFallbackCode(res.status) + throw new DeviceFlowError(code, res.status) +} + +function statusFallbackCode(status: number): string { + if (status === 429) return 'rate_limited' + if (status === 401) return 'no_session' + if (status === 403) return 'forbidden' + if (status === 404) return 'not_found' + if (status === 409) return 'conflict' + if (status >= 500) return 'server_error' + return 'unknown' +} + +function consoleCsrfHeader(): Record { + return { [CSRF_HEADER_NAME]: Cookies.get(CSRF_COOKIE_NAME()) || '' } +} + +// ----- Account branch -------------------------------------------------------- + +export type DeviceLookupReply = { + valid: boolean + expires_in_remaining: number + client_id: string +} + +export async function deviceLookup(user_code: string): Promise { + const res = await fetch(`${DEVICE_BASE}/lookup?user_code=${encodeURIComponent(user_code)}`, { + method: 'GET', + }) + if (!res.ok) await failFromResponse(res) + return res.json() +} + +export async function deviceApproveAccount(user_code: string): Promise<{ status: 'approved' }> { + const res = await fetch(`${DEVICE_BASE}/approve`, { + method: 'POST', + credentials: 'include', + headers: { + 'Content-Type': 'application/json', + ...consoleCsrfHeader(), + }, + body: JSON.stringify({ user_code }), + }) + if (!res.ok) await failFromResponse(res) + return res.json() +} + +export async function deviceDenyAccount(user_code: string): Promise<{ status: 'denied' }> { + const res = await fetch(`${DEVICE_BASE}/deny`, { + method: 'POST', + credentials: 'include', + headers: { + 'Content-Type': 'application/json', + ...consoleCsrfHeader(), + }, + body: JSON.stringify({ user_code }), + }) + if (!res.ok) await failFromResponse(res) + return res.json() +} + +// ----- SSO branch (cookie-authed via /openapi/v1/oauth/device/*) ----------- + +export type ApprovalContext = { + subject_email: string + subject_issuer: string + user_code: string + csrf_token: string + expires_at: string +} + +export async function fetchApprovalContext(): Promise { + const res = await fetch(`${DEVICE_BASE}/approval-context`, { + method: 'GET', + credentials: 'include', + }) + if (!res.ok) await failFromResponse(res) + return res.json() +} + +export async function approveExternal(ctx: ApprovalContext, user_code: string): Promise { + const res = await fetch(`${DEVICE_BASE}/approve-external`, { + method: 'POST', + credentials: 'include', + headers: { + 'Content-Type': 'application/json', + 'X-CSRF-Token': ctx.csrf_token, + }, + body: JSON.stringify({ user_code }), + }) + if (!res.ok) await failFromResponse(res) +}