mirror of https://github.com/langgenius/dify.git
Merge branch 'main' into chore/ssrf-config
This commit is contained in:
commit
4ea43f93ae
|
|
@ -0,0 +1,12 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/web"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 2
|
||||
- package-ecosystem: "uv"
|
||||
directory: "/api"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 2
|
||||
|
|
@ -22,13 +22,33 @@ jobs:
|
|||
# Fix lint errors
|
||||
uv run ruff check --fix .
|
||||
# Format code
|
||||
uv run ruff format .
|
||||
uv run ruff format ..
|
||||
|
||||
- name: ast-grep
|
||||
run: |
|
||||
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all
|
||||
uvx --from ast-grep-cli sg --pattern 'session.query($WHATEVER).filter($HERE)' --rewrite 'session.query($WHATEVER).where($HERE)' -l py --update-all
|
||||
|
||||
# Convert Optional[T] to T | None (ignoring quoted types)
|
||||
cat > /tmp/optional-rule.yml << 'EOF'
|
||||
id: convert-optional-to-union
|
||||
language: python
|
||||
rule:
|
||||
kind: generic_type
|
||||
all:
|
||||
- has:
|
||||
kind: identifier
|
||||
pattern: Optional
|
||||
- has:
|
||||
kind: type_parameter
|
||||
has:
|
||||
kind: type
|
||||
pattern: $T
|
||||
fix: $T | None
|
||||
EOF
|
||||
uvx --from ast-grep-cli sg scan --inline-rules "$(cat /tmp/optional-rule.yml)" --update-all
|
||||
# Fix forward references that were incorrectly converted (Python doesn't support "Type" | None syntax)
|
||||
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
|
||||
find . -name "*.py.bak" -type f -delete
|
||||
|
||||
- name: mdformat
|
||||
run: |
|
||||
|
|
|
|||
|
|
@ -0,0 +1,87 @@
|
|||
# AGENTS.md
|
||||
|
||||
## Project Overview
|
||||
|
||||
Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management.
|
||||
|
||||
The codebase consists of:
|
||||
|
||||
- **Backend API** (`/api`): Python Flask application with Domain-Driven Design architecture
|
||||
- **Frontend Web** (`/web`): Next.js 15 application with TypeScript and React 19
|
||||
- **Docker deployment** (`/docker`): Containerized deployment configurations
|
||||
|
||||
## Development Commands
|
||||
|
||||
### Backend (API)
|
||||
|
||||
All Python commands must be prefixed with `uv run --project api`:
|
||||
|
||||
```bash
|
||||
# Start development servers
|
||||
./dev/start-api # Start API server
|
||||
./dev/start-worker # Start Celery worker
|
||||
|
||||
# Run tests
|
||||
uv run --project api pytest # Run all tests
|
||||
uv run --project api pytest tests/unit_tests/ # Unit tests only
|
||||
uv run --project api pytest tests/integration_tests/ # Integration tests
|
||||
|
||||
# Code quality
|
||||
./dev/reformat # Run all formatters and linters
|
||||
uv run --project api ruff check --fix ./ # Fix linting issues
|
||||
uv run --project api ruff format ./ # Format code
|
||||
uv run --directory api basedpyright # Type checking
|
||||
```
|
||||
|
||||
### Frontend (Web)
|
||||
|
||||
```bash
|
||||
cd web
|
||||
pnpm lint # Run ESLint
|
||||
pnpm eslint-fix # Fix ESLint issues
|
||||
pnpm test # Run Jest tests
|
||||
```
|
||||
|
||||
## Testing Guidelines
|
||||
|
||||
### Backend Testing
|
||||
|
||||
- Use `pytest` for all backend tests
|
||||
- Write tests first (TDD approach)
|
||||
- Test structure: Arrange-Act-Assert
|
||||
|
||||
## Code Style Requirements
|
||||
|
||||
### Python
|
||||
|
||||
- Use type hints for all functions and class attributes
|
||||
- No `Any` types unless absolutely necessary
|
||||
- Implement special methods (`__repr__`, `__str__`) appropriately
|
||||
|
||||
### TypeScript/JavaScript
|
||||
|
||||
- Strict TypeScript configuration
|
||||
- ESLint with Prettier integration
|
||||
- Avoid `any` type
|
||||
|
||||
## Important Notes
|
||||
|
||||
- **Environment Variables**: Always use UV for Python commands: `uv run --project api <command>`
|
||||
- **Comments**: Only write meaningful comments that explain "why", not "what"
|
||||
- **File Creation**: Always prefer editing existing files over creating new ones
|
||||
- **Documentation**: Don't create documentation files unless explicitly requested
|
||||
- **Code Quality**: Always run `./dev/reformat` before committing backend changes
|
||||
|
||||
## Common Development Tasks
|
||||
|
||||
### Adding a New API Endpoint
|
||||
|
||||
1. Create controller in `/api/controllers/`
|
||||
1. Add service logic in `/api/services/`
|
||||
1. Update routes in controller's `__init__.py`
|
||||
1. Write tests in `/api/tests/`
|
||||
|
||||
## Project-Specific Conventions
|
||||
|
||||
- All async tasks use Celery with Redis as broker
|
||||
- **Internationalization**: Frontend supports multiple languages with English (`web/i18n/en-US/`) as the source. All user-facing text must use i18n keys, no hardcoded strings. Edit corresponding module files in `en-US/` directory for translations.
|
||||
89
CLAUDE.md
89
CLAUDE.md
|
|
@ -1,89 +0,0 @@
|
|||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management.
|
||||
|
||||
The codebase consists of:
|
||||
|
||||
- **Backend API** (`/api`): Python Flask application with Domain-Driven Design architecture
|
||||
- **Frontend Web** (`/web`): Next.js 15 application with TypeScript and React 19
|
||||
- **Docker deployment** (`/docker`): Containerized deployment configurations
|
||||
|
||||
## Development Commands
|
||||
|
||||
### Backend (API)
|
||||
|
||||
All Python commands must be prefixed with `uv run --project api`:
|
||||
|
||||
```bash
|
||||
# Start development servers
|
||||
./dev/start-api # Start API server
|
||||
./dev/start-worker # Start Celery worker
|
||||
|
||||
# Run tests
|
||||
uv run --project api pytest # Run all tests
|
||||
uv run --project api pytest tests/unit_tests/ # Unit tests only
|
||||
uv run --project api pytest tests/integration_tests/ # Integration tests
|
||||
|
||||
# Code quality
|
||||
./dev/reformat # Run all formatters and linters
|
||||
uv run --project api ruff check --fix ./ # Fix linting issues
|
||||
uv run --project api ruff format ./ # Format code
|
||||
uv run --directory api basedpyright # Type checking
|
||||
```
|
||||
|
||||
### Frontend (Web)
|
||||
|
||||
```bash
|
||||
cd web
|
||||
pnpm lint # Run ESLint
|
||||
pnpm eslint-fix # Fix ESLint issues
|
||||
pnpm test # Run Jest tests
|
||||
```
|
||||
|
||||
## Testing Guidelines
|
||||
|
||||
### Backend Testing
|
||||
|
||||
- Use `pytest` for all backend tests
|
||||
- Write tests first (TDD approach)
|
||||
- Test structure: Arrange-Act-Assert
|
||||
|
||||
## Code Style Requirements
|
||||
|
||||
### Python
|
||||
|
||||
- Use type hints for all functions and class attributes
|
||||
- No `Any` types unless absolutely necessary
|
||||
- Implement special methods (`__repr__`, `__str__`) appropriately
|
||||
|
||||
### TypeScript/JavaScript
|
||||
|
||||
- Strict TypeScript configuration
|
||||
- ESLint with Prettier integration
|
||||
- Avoid `any` type
|
||||
|
||||
## Important Notes
|
||||
|
||||
- **Environment Variables**: Always use UV for Python commands: `uv run --project api <command>`
|
||||
- **Comments**: Only write meaningful comments that explain "why", not "what"
|
||||
- **File Creation**: Always prefer editing existing files over creating new ones
|
||||
- **Documentation**: Don't create documentation files unless explicitly requested
|
||||
- **Code Quality**: Always run `./dev/reformat` before committing backend changes
|
||||
|
||||
## Common Development Tasks
|
||||
|
||||
### Adding a New API Endpoint
|
||||
|
||||
1. Create controller in `/api/controllers/`
|
||||
1. Add service logic in `/api/services/`
|
||||
1. Update routes in controller's `__init__.py`
|
||||
1. Write tests in `/api/tests/`
|
||||
|
||||
## Project-Specific Conventions
|
||||
|
||||
- All async tasks use Celery with Redis as broker
|
||||
- **Internationalization**: Frontend supports multiple languages with English (`web/i18n/en-US/`) as the source. All user-facing text must use i18n keys, no hardcoded strings. Edit corresponding module files in `en-US/` directory for translations.
|
||||
34
Makefile
34
Makefile
|
|
@ -4,10 +4,13 @@ WEB_IMAGE=$(DOCKER_REGISTRY)/dify-web
|
|||
API_IMAGE=$(DOCKER_REGISTRY)/dify-api
|
||||
VERSION=latest
|
||||
|
||||
# Default target - show help
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
# Backend Development Environment Setup
|
||||
.PHONY: dev-setup prepare-docker prepare-web prepare-api
|
||||
|
||||
# Default dev setup target
|
||||
# Dev setup target
|
||||
dev-setup: prepare-docker prepare-web prepare-api
|
||||
@echo "✅ Backend development environment setup complete!"
|
||||
|
||||
|
|
@ -46,6 +49,27 @@ dev-clean:
|
|||
@rm -rf api/storage
|
||||
@echo "✅ Cleanup complete"
|
||||
|
||||
# Backend Code Quality Commands
|
||||
format:
|
||||
@echo "🎨 Running ruff format..."
|
||||
@uv run --project api --dev ruff format ./api
|
||||
@echo "✅ Code formatting complete"
|
||||
|
||||
check:
|
||||
@echo "🔍 Running ruff check..."
|
||||
@uv run --project api --dev ruff check ./api
|
||||
@echo "✅ Code check complete"
|
||||
|
||||
lint:
|
||||
@echo "🔧 Running ruff format and check with fixes..."
|
||||
@uv run --directory api --dev sh -c 'ruff format ./api && ruff check --fix ./api'
|
||||
@echo "✅ Linting complete"
|
||||
|
||||
type-check:
|
||||
@echo "📝 Running type check with basedpyright..."
|
||||
@uv run --directory api --dev basedpyright
|
||||
@echo "✅ Type check complete"
|
||||
|
||||
# Build Docker images
|
||||
build-web:
|
||||
@echo "Building web Docker image: $(WEB_IMAGE):$(VERSION)..."
|
||||
|
|
@ -90,6 +114,12 @@ help:
|
|||
@echo " make prepare-api - Set up API environment"
|
||||
@echo " make dev-clean - Stop Docker middleware containers"
|
||||
@echo ""
|
||||
@echo "Backend Code Quality:"
|
||||
@echo " make format - Format code with ruff"
|
||||
@echo " make check - Check code with ruff"
|
||||
@echo " make lint - Format and fix code with ruff"
|
||||
@echo " make type-check - Run type checking with basedpyright"
|
||||
@echo ""
|
||||
@echo "Docker Build Targets:"
|
||||
@echo " make build-web - Build web Docker image"
|
||||
@echo " make build-api - Build API Docker image"
|
||||
|
|
@ -98,4 +128,4 @@ help:
|
|||
@echo " make build-push-all - Build and push all Docker images"
|
||||
|
||||
# Phony targets
|
||||
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help
|
||||
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help format check lint type-check
|
||||
|
|
|
|||
|
|
@ -328,7 +328,7 @@ MATRIXONE_DATABASE=dify
|
|||
LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
|
||||
LINDORM_USERNAME=admin
|
||||
LINDORM_PASSWORD=admin
|
||||
USING_UGC_INDEX=False
|
||||
LINDORM_USING_UGC=True
|
||||
LINDORM_QUERY_TIMEOUT=1
|
||||
|
||||
# OceanBase Vector configuration
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ line-length = 120
|
|||
quote-style = "double"
|
||||
|
||||
[lint]
|
||||
preview = false
|
||||
preview = true
|
||||
select = [
|
||||
"B", # flake8-bugbear rules
|
||||
"C4", # flake8-comprehensions
|
||||
|
|
@ -65,6 +65,7 @@ ignore = [
|
|||
"B006", # mutable-argument-default
|
||||
"B007", # unused-loop-control-variable
|
||||
"B026", # star-arg-unpacking-after-keyword-arg
|
||||
"B901", # allow return in yield
|
||||
"B903", # class-as-data-structure
|
||||
"B904", # raise-without-from-inside-except
|
||||
"B905", # zip-without-explicit-strict
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
import base64
|
||||
import json
|
||||
import logging
|
||||
import operator
|
||||
import secrets
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import click
|
||||
import sqlalchemy as sa
|
||||
|
|
@ -639,7 +640,7 @@ def old_metadata_migration():
|
|||
@click.option("--email", prompt=True, help="Tenant account email.")
|
||||
@click.option("--name", prompt=True, help="Workspace name.")
|
||||
@click.option("--language", prompt=True, help="Account language, default: en-US.")
|
||||
def create_tenant(email: str, language: Optional[str] = None, name: Optional[str] = None):
|
||||
def create_tenant(email: str, language: str | None = None, name: str | None = None):
|
||||
"""
|
||||
Create tenant account
|
||||
"""
|
||||
|
|
@ -953,7 +954,7 @@ def clear_orphaned_file_records(force: bool):
|
|||
click.echo(click.style("- Deleting orphaned message_files records", fg="white"))
|
||||
query = "DELETE FROM message_files WHERE id IN :ids"
|
||||
with db.engine.begin() as conn:
|
||||
conn.execute(sa.text(query), {"ids": tuple([record["id"] for record in orphaned_message_files])})
|
||||
conn.execute(sa.text(query), {"ids": tuple(record["id"] for record in orphaned_message_files)})
|
||||
click.echo(
|
||||
click.style(f"Removed {len(orphaned_message_files)} orphaned message_files records.", fg="green")
|
||||
)
|
||||
|
|
@ -1307,7 +1308,7 @@ def cleanup_orphaned_draft_variables(
|
|||
|
||||
if dry_run:
|
||||
logger.info("DRY RUN: Would delete the following:")
|
||||
for app_id, count in sorted(stats["orphaned_by_app"].items(), key=lambda x: x[1], reverse=True)[
|
||||
for app_id, count in sorted(stats["orphaned_by_app"].items(), key=operator.itemgetter(1), reverse=True)[
|
||||
:10
|
||||
]: # Show top 10
|
||||
logger.info(" App %s: %s variables", app_id, count)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,28 +7,28 @@ class NotionConfig(BaseSettings):
|
|||
Configuration settings for Notion integration
|
||||
"""
|
||||
|
||||
NOTION_CLIENT_ID: Optional[str] = Field(
|
||||
NOTION_CLIENT_ID: str | None = Field(
|
||||
description="Client ID for Notion API authentication. Required for OAuth 2.0 flow.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
NOTION_CLIENT_SECRET: Optional[str] = Field(
|
||||
NOTION_CLIENT_SECRET: str | None = Field(
|
||||
description="Client secret for Notion API authentication. Required for OAuth 2.0 flow.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
NOTION_INTEGRATION_TYPE: Optional[str] = Field(
|
||||
NOTION_INTEGRATION_TYPE: str | None = Field(
|
||||
description="Type of Notion integration."
|
||||
" Set to 'internal' for internal integrations, or None for public integrations.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
NOTION_INTERNAL_SECRET: Optional[str] = Field(
|
||||
NOTION_INTERNAL_SECRET: str | None = Field(
|
||||
description="Secret key for internal Notion integrations. Required when NOTION_INTEGRATION_TYPE is 'internal'.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
NOTION_INTEGRATION_TOKEN: Optional[str] = Field(
|
||||
NOTION_INTEGRATION_TOKEN: str | None = Field(
|
||||
description="Integration token for Notion API access. Used for direct API calls without OAuth flow.",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeFloat
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,7 +7,7 @@ class SentryConfig(BaseSettings):
|
|||
Configuration settings for Sentry error tracking and performance monitoring
|
||||
"""
|
||||
|
||||
SENTRY_DSN: Optional[str] = Field(
|
||||
SENTRY_DSN: str | None = Field(
|
||||
description="Sentry Data Source Name (DSN)."
|
||||
" This is the unique identifier of your Sentry project, used to send events to the correct project.",
|
||||
default=None,
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Literal, Optional
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import (
|
||||
AliasChoices,
|
||||
|
|
@ -57,7 +57,7 @@ class SecurityConfig(BaseSettings):
|
|||
default=False,
|
||||
)
|
||||
|
||||
ADMIN_API_KEY: Optional[str] = Field(
|
||||
ADMIN_API_KEY: str | None = Field(
|
||||
description="admin api key for authentication",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -97,17 +97,17 @@ class CodeExecutionSandboxConfig(BaseSettings):
|
|||
default="dify-sandbox",
|
||||
)
|
||||
|
||||
CODE_EXECUTION_CONNECT_TIMEOUT: Optional[float] = Field(
|
||||
CODE_EXECUTION_CONNECT_TIMEOUT: float | None = Field(
|
||||
description="Connection timeout in seconds for code execution requests",
|
||||
default=10.0,
|
||||
)
|
||||
|
||||
CODE_EXECUTION_READ_TIMEOUT: Optional[float] = Field(
|
||||
CODE_EXECUTION_READ_TIMEOUT: float | None = Field(
|
||||
description="Read timeout in seconds for code execution requests",
|
||||
default=60.0,
|
||||
)
|
||||
|
||||
CODE_EXECUTION_WRITE_TIMEOUT: Optional[float] = Field(
|
||||
CODE_EXECUTION_WRITE_TIMEOUT: float | None = Field(
|
||||
description="Write timeout in seconds for code execution request",
|
||||
default=10.0,
|
||||
)
|
||||
|
|
@ -368,17 +368,17 @@ class HttpConfig(BaseSettings):
|
|||
default=3,
|
||||
)
|
||||
|
||||
SSRF_PROXY_ALL_URL: Optional[str] = Field(
|
||||
SSRF_PROXY_ALL_URL: str | None = Field(
|
||||
description="Proxy URL for HTTP or HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SSRF_PROXY_HTTP_URL: Optional[str] = Field(
|
||||
SSRF_PROXY_HTTP_URL: str | None = Field(
|
||||
description="Proxy URL for HTTP requests to prevent Server-Side Request Forgery (SSRF)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SSRF_PROXY_HTTPS_URL: Optional[str] = Field(
|
||||
SSRF_PROXY_HTTPS_URL: str | None = Field(
|
||||
description="Proxy URL for HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -420,7 +420,7 @@ class InnerAPIConfig(BaseSettings):
|
|||
default=False,
|
||||
)
|
||||
|
||||
INNER_API_KEY: Optional[str] = Field(
|
||||
INNER_API_KEY: str | None = Field(
|
||||
description="API key for accessing the internal API",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -436,7 +436,7 @@ class LoggingConfig(BaseSettings):
|
|||
default="INFO",
|
||||
)
|
||||
|
||||
LOG_FILE: Optional[str] = Field(
|
||||
LOG_FILE: str | None = Field(
|
||||
description="File path for log output.",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -456,12 +456,12 @@ class LoggingConfig(BaseSettings):
|
|||
default="%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s",
|
||||
)
|
||||
|
||||
LOG_DATEFORMAT: Optional[str] = Field(
|
||||
LOG_DATEFORMAT: str | None = Field(
|
||||
description="Date format string for log timestamps",
|
||||
default=None,
|
||||
)
|
||||
|
||||
LOG_TZ: Optional[str] = Field(
|
||||
LOG_TZ: str | None = Field(
|
||||
description="Timezone for log timestamps (e.g., 'America/New_York')",
|
||||
default="UTC",
|
||||
)
|
||||
|
|
@ -595,22 +595,22 @@ class AuthConfig(BaseSettings):
|
|||
default="/console/api/oauth/authorize",
|
||||
)
|
||||
|
||||
GITHUB_CLIENT_ID: Optional[str] = Field(
|
||||
GITHUB_CLIENT_ID: str | None = Field(
|
||||
description="GitHub OAuth client ID",
|
||||
default=None,
|
||||
)
|
||||
|
||||
GITHUB_CLIENT_SECRET: Optional[str] = Field(
|
||||
GITHUB_CLIENT_SECRET: str | None = Field(
|
||||
description="GitHub OAuth client secret",
|
||||
default=None,
|
||||
)
|
||||
|
||||
GOOGLE_CLIENT_ID: Optional[str] = Field(
|
||||
GOOGLE_CLIENT_ID: str | None = Field(
|
||||
description="Google OAuth client ID",
|
||||
default=None,
|
||||
)
|
||||
|
||||
GOOGLE_CLIENT_SECRET: Optional[str] = Field(
|
||||
GOOGLE_CLIENT_SECRET: str | None = Field(
|
||||
description="Google OAuth client secret",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -678,42 +678,42 @@ class MailConfig(BaseSettings):
|
|||
Configuration for email services
|
||||
"""
|
||||
|
||||
MAIL_TYPE: Optional[str] = Field(
|
||||
MAIL_TYPE: str | None = Field(
|
||||
description="Email service provider type ('smtp' or 'resend' or 'sendGrid), default to None.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
MAIL_DEFAULT_SEND_FROM: Optional[str] = Field(
|
||||
MAIL_DEFAULT_SEND_FROM: str | None = Field(
|
||||
description="Default email address to use as the sender",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RESEND_API_KEY: Optional[str] = Field(
|
||||
RESEND_API_KEY: str | None = Field(
|
||||
description="API key for Resend email service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RESEND_API_URL: Optional[str] = Field(
|
||||
RESEND_API_URL: str | None = Field(
|
||||
description="API URL for Resend email service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SMTP_SERVER: Optional[str] = Field(
|
||||
SMTP_SERVER: str | None = Field(
|
||||
description="SMTP server hostname",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SMTP_PORT: Optional[int] = Field(
|
||||
SMTP_PORT: int | None = Field(
|
||||
description="SMTP server port number",
|
||||
default=465,
|
||||
)
|
||||
|
||||
SMTP_USERNAME: Optional[str] = Field(
|
||||
SMTP_USERNAME: str | None = Field(
|
||||
description="Username for SMTP authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SMTP_PASSWORD: Optional[str] = Field(
|
||||
SMTP_PASSWORD: str | None = Field(
|
||||
description="Password for SMTP authentication",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -733,7 +733,7 @@ class MailConfig(BaseSettings):
|
|||
default=50,
|
||||
)
|
||||
|
||||
SENDGRID_API_KEY: Optional[str] = Field(
|
||||
SENDGRID_API_KEY: str | None = Field(
|
||||
description="API key for SendGrid service",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -756,17 +756,17 @@ class RagEtlConfig(BaseSettings):
|
|||
default="database",
|
||||
)
|
||||
|
||||
UNSTRUCTURED_API_URL: Optional[str] = Field(
|
||||
UNSTRUCTURED_API_URL: str | None = Field(
|
||||
description="API URL for Unstructured.io service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
UNSTRUCTURED_API_KEY: Optional[str] = Field(
|
||||
UNSTRUCTURED_API_KEY: str | None = Field(
|
||||
description="API key for Unstructured.io service",
|
||||
default="",
|
||||
)
|
||||
|
||||
SCARF_NO_ANALYTICS: Optional[str] = Field(
|
||||
SCARF_NO_ANALYTICS: str | None = Field(
|
||||
description="This is about whether to disable Scarf analytics in Unstructured library.",
|
||||
default="false",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -40,17 +38,17 @@ class HostedOpenAiConfig(BaseSettings):
|
|||
Configuration for hosted OpenAI service
|
||||
"""
|
||||
|
||||
HOSTED_OPENAI_API_KEY: Optional[str] = Field(
|
||||
HOSTED_OPENAI_API_KEY: str | None = Field(
|
||||
description="API key for hosted OpenAI service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_OPENAI_API_BASE: Optional[str] = Field(
|
||||
HOSTED_OPENAI_API_BASE: str | None = Field(
|
||||
description="Base URL for hosted OpenAI API",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_OPENAI_API_ORGANIZATION: Optional[str] = Field(
|
||||
HOSTED_OPENAI_API_ORGANIZATION: str | None = Field(
|
||||
description="Organization ID for hosted OpenAI service",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -110,12 +108,12 @@ class HostedAzureOpenAiConfig(BaseSettings):
|
|||
default=False,
|
||||
)
|
||||
|
||||
HOSTED_AZURE_OPENAI_API_KEY: Optional[str] = Field(
|
||||
HOSTED_AZURE_OPENAI_API_KEY: str | None = Field(
|
||||
description="API key for hosted Azure OpenAI service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_AZURE_OPENAI_API_BASE: Optional[str] = Field(
|
||||
HOSTED_AZURE_OPENAI_API_BASE: str | None = Field(
|
||||
description="Base URL for hosted Azure OpenAI API",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -131,12 +129,12 @@ class HostedAnthropicConfig(BaseSettings):
|
|||
Configuration for hosted Anthropic service
|
||||
"""
|
||||
|
||||
HOSTED_ANTHROPIC_API_BASE: Optional[str] = Field(
|
||||
HOSTED_ANTHROPIC_API_BASE: str | None = Field(
|
||||
description="Base URL for hosted Anthropic API",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HOSTED_ANTHROPIC_API_KEY: Optional[str] = Field(
|
||||
HOSTED_ANTHROPIC_API_KEY: str | None = Field(
|
||||
description="API key for hosted Anthropic service",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import os
|
||||
from typing import Any, Literal, Optional
|
||||
from typing import Any, Literal
|
||||
from urllib.parse import parse_qsl, quote_plus
|
||||
|
||||
from pydantic import Field, NonNegativeFloat, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
|
||||
|
|
@ -78,18 +78,18 @@ class StorageConfig(BaseSettings):
|
|||
|
||||
|
||||
class VectorStoreConfig(BaseSettings):
|
||||
VECTOR_STORE: Optional[str] = Field(
|
||||
VECTOR_STORE: str | None = Field(
|
||||
description="Type of vector store to use for efficient similarity search."
|
||||
" Set to None if not using a vector store.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VECTOR_STORE_WHITELIST_ENABLE: Optional[bool] = Field(
|
||||
VECTOR_STORE_WHITELIST_ENABLE: bool | None = Field(
|
||||
description="Enable whitelist for vector store.",
|
||||
default=False,
|
||||
)
|
||||
|
||||
VECTOR_INDEX_NAME_PREFIX: Optional[str] = Field(
|
||||
VECTOR_INDEX_NAME_PREFIX: str | None = Field(
|
||||
description="Prefix used to create collection name in vector database",
|
||||
default="Vector_index",
|
||||
)
|
||||
|
|
@ -225,26 +225,26 @@ class CeleryConfig(DatabaseConfig):
|
|||
default="redis",
|
||||
)
|
||||
|
||||
CELERY_BROKER_URL: Optional[str] = Field(
|
||||
CELERY_BROKER_URL: str | None = Field(
|
||||
description="URL of the message broker for Celery tasks.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CELERY_USE_SENTINEL: Optional[bool] = Field(
|
||||
CELERY_USE_SENTINEL: bool | None = Field(
|
||||
description="Whether to use Redis Sentinel for high availability.",
|
||||
default=False,
|
||||
)
|
||||
|
||||
CELERY_SENTINEL_MASTER_NAME: Optional[str] = Field(
|
||||
CELERY_SENTINEL_MASTER_NAME: str | None = Field(
|
||||
description="Name of the Redis Sentinel master.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CELERY_SENTINEL_PASSWORD: Optional[str] = Field(
|
||||
CELERY_SENTINEL_PASSWORD: str | None = Field(
|
||||
description="Password of the Redis Sentinel master.",
|
||||
default=None,
|
||||
)
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
|
||||
description="Timeout for Redis Sentinel socket operations in seconds.",
|
||||
default=0.1,
|
||||
)
|
||||
|
|
@ -268,12 +268,12 @@ class InternalTestConfig(BaseSettings):
|
|||
Configuration settings for Internal Test
|
||||
"""
|
||||
|
||||
AWS_SECRET_ACCESS_KEY: Optional[str] = Field(
|
||||
AWS_SECRET_ACCESS_KEY: str | None = Field(
|
||||
description="Internal test AWS secret access key",
|
||||
default=None,
|
||||
)
|
||||
|
||||
AWS_ACCESS_KEY_ID: Optional[str] = Field(
|
||||
AWS_ACCESS_KEY_ID: str | None = Field(
|
||||
description="Internal test AWS access key ID",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -284,15 +284,15 @@ class DatasetQueueMonitorConfig(BaseSettings):
|
|||
Configuration settings for Dataset Queue Monitor
|
||||
"""
|
||||
|
||||
QUEUE_MONITOR_THRESHOLD: Optional[NonNegativeInt] = Field(
|
||||
QUEUE_MONITOR_THRESHOLD: NonNegativeInt | None = Field(
|
||||
description="Threshold for dataset queue monitor",
|
||||
default=200,
|
||||
)
|
||||
QUEUE_MONITOR_ALERT_EMAILS: Optional[str] = Field(
|
||||
QUEUE_MONITOR_ALERT_EMAILS: str | None = Field(
|
||||
description="Emails for dataset queue monitor alert, separated by commas",
|
||||
default=None,
|
||||
)
|
||||
QUEUE_MONITOR_INTERVAL: Optional[NonNegativeFloat] = Field(
|
||||
QUEUE_MONITOR_INTERVAL: NonNegativeFloat | None = Field(
|
||||
description="Interval for dataset queue monitor in minutes",
|
||||
default=30,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -19,12 +17,12 @@ class RedisConfig(BaseSettings):
|
|||
default=6379,
|
||||
)
|
||||
|
||||
REDIS_USERNAME: Optional[str] = Field(
|
||||
REDIS_USERNAME: str | None = Field(
|
||||
description="Username for Redis authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_PASSWORD: Optional[str] = Field(
|
||||
REDIS_PASSWORD: str | None = Field(
|
||||
description="Password for Redis authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -44,47 +42,47 @@ class RedisConfig(BaseSettings):
|
|||
default="CERT_NONE",
|
||||
)
|
||||
|
||||
REDIS_SSL_CA_CERTS: Optional[str] = Field(
|
||||
REDIS_SSL_CA_CERTS: str | None = Field(
|
||||
description="Path to the CA certificate file for SSL verification",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SSL_CERTFILE: Optional[str] = Field(
|
||||
REDIS_SSL_CERTFILE: str | None = Field(
|
||||
description="Path to the client certificate file for SSL authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SSL_KEYFILE: Optional[str] = Field(
|
||||
REDIS_SSL_KEYFILE: str | None = Field(
|
||||
description="Path to the client private key file for SSL authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_USE_SENTINEL: Optional[bool] = Field(
|
||||
REDIS_USE_SENTINEL: bool | None = Field(
|
||||
description="Enable Redis Sentinel mode for high availability",
|
||||
default=False,
|
||||
)
|
||||
|
||||
REDIS_SENTINELS: Optional[str] = Field(
|
||||
REDIS_SENTINELS: str | None = Field(
|
||||
description="Comma-separated list of Redis Sentinel nodes (host:port)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SENTINEL_SERVICE_NAME: Optional[str] = Field(
|
||||
REDIS_SENTINEL_SERVICE_NAME: str | None = Field(
|
||||
description="Name of the Redis Sentinel service to monitor",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SENTINEL_USERNAME: Optional[str] = Field(
|
||||
REDIS_SENTINEL_USERNAME: str | None = Field(
|
||||
description="Username for Redis Sentinel authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SENTINEL_PASSWORD: Optional[str] = Field(
|
||||
REDIS_SENTINEL_PASSWORD: str | None = Field(
|
||||
description="Password for Redis Sentinel authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
|
||||
REDIS_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
|
||||
description="Socket timeout in seconds for Redis Sentinel connections",
|
||||
default=0.1,
|
||||
)
|
||||
|
|
@ -94,12 +92,12 @@ class RedisConfig(BaseSettings):
|
|||
default=False,
|
||||
)
|
||||
|
||||
REDIS_CLUSTERS: Optional[str] = Field(
|
||||
REDIS_CLUSTERS: str | None = Field(
|
||||
description="Comma-separated list of Redis Clusters nodes (host:port)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
REDIS_CLUSTERS_PASSWORD: Optional[str] = Field(
|
||||
REDIS_CLUSTERS_PASSWORD: str | None = Field(
|
||||
description="Password for Redis Clusters authentication (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,37 +7,37 @@ class AliyunOSSStorageConfig(BaseSettings):
|
|||
Configuration settings for Aliyun Object Storage Service (OSS)
|
||||
"""
|
||||
|
||||
ALIYUN_OSS_BUCKET_NAME: Optional[str] = Field(
|
||||
ALIYUN_OSS_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Aliyun OSS bucket to store and retrieve objects",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_ACCESS_KEY: Optional[str] = Field(
|
||||
ALIYUN_OSS_ACCESS_KEY: str | None = Field(
|
||||
description="Access key ID for authenticating with Aliyun OSS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_SECRET_KEY: Optional[str] = Field(
|
||||
ALIYUN_OSS_SECRET_KEY: str | None = Field(
|
||||
description="Secret access key for authenticating with Aliyun OSS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_ENDPOINT: Optional[str] = Field(
|
||||
ALIYUN_OSS_ENDPOINT: str | None = Field(
|
||||
description="URL of the Aliyun OSS endpoint for your chosen region",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_REGION: Optional[str] = Field(
|
||||
ALIYUN_OSS_REGION: str | None = Field(
|
||||
description="Aliyun OSS region where your bucket is located (e.g., 'oss-cn-hangzhou')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_AUTH_VERSION: Optional[str] = Field(
|
||||
ALIYUN_OSS_AUTH_VERSION: str | None = Field(
|
||||
description="Version of the authentication protocol to use with Aliyun OSS (e.g., 'v4')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ALIYUN_OSS_PATH: Optional[str] = Field(
|
||||
ALIYUN_OSS_PATH: str | None = Field(
|
||||
description="Base path within the bucket to store objects (e.g., 'my-app-data/')",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Literal, Optional
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
|
@ -9,27 +9,27 @@ class S3StorageConfig(BaseSettings):
|
|||
Configuration settings for S3-compatible object storage
|
||||
"""
|
||||
|
||||
S3_ENDPOINT: Optional[str] = Field(
|
||||
S3_ENDPOINT: str | None = Field(
|
||||
description="URL of the S3-compatible storage endpoint (e.g., 'https://s3.amazonaws.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
S3_REGION: Optional[str] = Field(
|
||||
S3_REGION: str | None = Field(
|
||||
description="Region where the S3 bucket is located (e.g., 'us-east-1')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
S3_BUCKET_NAME: Optional[str] = Field(
|
||||
S3_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the S3 bucket to store and retrieve objects",
|
||||
default=None,
|
||||
)
|
||||
|
||||
S3_ACCESS_KEY: Optional[str] = Field(
|
||||
S3_ACCESS_KEY: str | None = Field(
|
||||
description="Access key ID for authenticating with the S3 service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
S3_SECRET_KEY: Optional[str] = Field(
|
||||
S3_SECRET_KEY: str | None = Field(
|
||||
description="Secret access key for authenticating with the S3 service",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,22 +7,22 @@ class AzureBlobStorageConfig(BaseSettings):
|
|||
Configuration settings for Azure Blob Storage
|
||||
"""
|
||||
|
||||
AZURE_BLOB_ACCOUNT_NAME: Optional[str] = Field(
|
||||
AZURE_BLOB_ACCOUNT_NAME: str | None = Field(
|
||||
description="Name of the Azure Storage account (e.g., 'mystorageaccount')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
AZURE_BLOB_ACCOUNT_KEY: Optional[str] = Field(
|
||||
AZURE_BLOB_ACCOUNT_KEY: str | None = Field(
|
||||
description="Access key for authenticating with the Azure Storage account",
|
||||
default=None,
|
||||
)
|
||||
|
||||
AZURE_BLOB_CONTAINER_NAME: Optional[str] = Field(
|
||||
AZURE_BLOB_CONTAINER_NAME: str | None = Field(
|
||||
description="Name of the Azure Blob container to store and retrieve objects",
|
||||
default=None,
|
||||
)
|
||||
|
||||
AZURE_BLOB_ACCOUNT_URL: Optional[str] = Field(
|
||||
AZURE_BLOB_ACCOUNT_URL: str | None = Field(
|
||||
description="URL of the Azure Blob storage endpoint (e.g., 'https://mystorageaccount.blob.core.windows.net')",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,22 +7,22 @@ class BaiduOBSStorageConfig(BaseSettings):
|
|||
Configuration settings for Baidu Object Storage Service (OBS)
|
||||
"""
|
||||
|
||||
BAIDU_OBS_BUCKET_NAME: Optional[str] = Field(
|
||||
BAIDU_OBS_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Baidu OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_OBS_ACCESS_KEY: Optional[str] = Field(
|
||||
BAIDU_OBS_ACCESS_KEY: str | None = Field(
|
||||
description="Access Key ID for authenticating with Baidu OBS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_OBS_SECRET_KEY: Optional[str] = Field(
|
||||
BAIDU_OBS_SECRET_KEY: str | None = Field(
|
||||
description="Secret Access Key for authenticating with Baidu OBS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_OBS_ENDPOINT: Optional[str] = Field(
|
||||
BAIDU_OBS_ENDPOINT: str | None = Field(
|
||||
description="URL of the Baidu OSS endpoint for your chosen region (e.g., 'https://.bj.bcebos.com')",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
"""ClickZetta Volume Storage Configuration"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,17 +7,17 @@ from pydantic_settings import BaseSettings
|
|||
class ClickZettaVolumeStorageConfig(BaseSettings):
|
||||
"""Configuration for ClickZetta Volume storage."""
|
||||
|
||||
CLICKZETTA_VOLUME_USERNAME: Optional[str] = Field(
|
||||
CLICKZETTA_VOLUME_USERNAME: str | None = Field(
|
||||
description="Username for ClickZetta Volume authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_PASSWORD: Optional[str] = Field(
|
||||
CLICKZETTA_VOLUME_PASSWORD: str | None = Field(
|
||||
description="Password for ClickZetta Volume authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_INSTANCE: Optional[str] = Field(
|
||||
CLICKZETTA_VOLUME_INSTANCE: str | None = Field(
|
||||
description="ClickZetta instance identifier",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -49,7 +47,7 @@ class ClickZettaVolumeStorageConfig(BaseSettings):
|
|||
default="user",
|
||||
)
|
||||
|
||||
CLICKZETTA_VOLUME_NAME: Optional[str] = Field(
|
||||
CLICKZETTA_VOLUME_NAME: str | None = Field(
|
||||
description="ClickZetta volume name for external volumes",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,12 +7,12 @@ class GoogleCloudStorageConfig(BaseSettings):
|
|||
Configuration settings for Google Cloud Storage
|
||||
"""
|
||||
|
||||
GOOGLE_STORAGE_BUCKET_NAME: Optional[str] = Field(
|
||||
GOOGLE_STORAGE_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Google Cloud Storage bucket to store and retrieve objects (e.g., 'my-gcs-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: Optional[str] = Field(
|
||||
GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: str | None = Field(
|
||||
description="Base64-encoded JSON key file for Google Cloud service account authentication",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,22 +7,22 @@ class HuaweiCloudOBSStorageConfig(BaseSettings):
|
|||
Configuration settings for Huawei Cloud Object Storage Service (OBS)
|
||||
"""
|
||||
|
||||
HUAWEI_OBS_BUCKET_NAME: Optional[str] = Field(
|
||||
HUAWEI_OBS_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Huawei Cloud OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_OBS_ACCESS_KEY: Optional[str] = Field(
|
||||
HUAWEI_OBS_ACCESS_KEY: str | None = Field(
|
||||
description="Access Key ID for authenticating with Huawei Cloud OBS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_OBS_SECRET_KEY: Optional[str] = Field(
|
||||
HUAWEI_OBS_SECRET_KEY: str | None = Field(
|
||||
description="Secret Access Key for authenticating with Huawei Cloud OBS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_OBS_SERVER: Optional[str] = Field(
|
||||
HUAWEI_OBS_SERVER: str | None = Field(
|
||||
description="Endpoint URL for Huawei Cloud OBS (e.g., 'https://obs.cn-north-4.myhuaweicloud.com')",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,27 +7,27 @@ class OCIStorageConfig(BaseSettings):
|
|||
Configuration settings for Oracle Cloud Infrastructure (OCI) Object Storage
|
||||
"""
|
||||
|
||||
OCI_ENDPOINT: Optional[str] = Field(
|
||||
OCI_ENDPOINT: str | None = Field(
|
||||
description="URL of the OCI Object Storage endpoint (e.g., 'https://objectstorage.us-phoenix-1.oraclecloud.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCI_REGION: Optional[str] = Field(
|
||||
OCI_REGION: str | None = Field(
|
||||
description="OCI region where the bucket is located (e.g., 'us-phoenix-1')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCI_BUCKET_NAME: Optional[str] = Field(
|
||||
OCI_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the OCI Object Storage bucket to store and retrieve objects (e.g., 'my-oci-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCI_ACCESS_KEY: Optional[str] = Field(
|
||||
OCI_ACCESS_KEY: str | None = Field(
|
||||
description="Access key (also known as API key) for authenticating with OCI Object Storage",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCI_SECRET_KEY: Optional[str] = Field(
|
||||
OCI_SECRET_KEY: str | None = Field(
|
||||
description="Secret key associated with the access key for authenticating with OCI Object Storage",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,17 +7,17 @@ class SupabaseStorageConfig(BaseSettings):
|
|||
Configuration settings for Supabase Object Storage Service
|
||||
"""
|
||||
|
||||
SUPABASE_BUCKET_NAME: Optional[str] = Field(
|
||||
SUPABASE_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SUPABASE_API_KEY: Optional[str] = Field(
|
||||
SUPABASE_API_KEY: str | None = Field(
|
||||
description="API KEY for authenticating with Supabase",
|
||||
default=None,
|
||||
)
|
||||
|
||||
SUPABASE_URL: Optional[str] = Field(
|
||||
SUPABASE_URL: str | None = Field(
|
||||
description="URL of the Supabase",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,27 +7,27 @@ class TencentCloudCOSStorageConfig(BaseSettings):
|
|||
Configuration settings for Tencent Cloud Object Storage (COS)
|
||||
"""
|
||||
|
||||
TENCENT_COS_BUCKET_NAME: Optional[str] = Field(
|
||||
TENCENT_COS_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Tencent Cloud COS bucket to store and retrieve objects",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_COS_REGION: Optional[str] = Field(
|
||||
TENCENT_COS_REGION: str | None = Field(
|
||||
description="Tencent Cloud region where the COS bucket is located (e.g., 'ap-guangzhou')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_COS_SECRET_ID: Optional[str] = Field(
|
||||
TENCENT_COS_SECRET_ID: str | None = Field(
|
||||
description="SecretId for authenticating with Tencent Cloud COS (part of API credentials)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_COS_SECRET_KEY: Optional[str] = Field(
|
||||
TENCENT_COS_SECRET_KEY: str | None = Field(
|
||||
description="SecretKey for authenticating with Tencent Cloud COS (part of API credentials)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_COS_SCHEME: Optional[str] = Field(
|
||||
TENCENT_COS_SCHEME: str | None = Field(
|
||||
description="Protocol scheme for COS requests: 'https' (recommended) or 'http'",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,27 +7,27 @@ class VolcengineTOSStorageConfig(BaseSettings):
|
|||
Configuration settings for Volcengine Tinder Object Storage (TOS)
|
||||
"""
|
||||
|
||||
VOLCENGINE_TOS_BUCKET_NAME: Optional[str] = Field(
|
||||
VOLCENGINE_TOS_BUCKET_NAME: str | None = Field(
|
||||
description="Name of the Volcengine TOS bucket to store and retrieve objects (e.g., 'my-tos-bucket')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VOLCENGINE_TOS_ACCESS_KEY: Optional[str] = Field(
|
||||
VOLCENGINE_TOS_ACCESS_KEY: str | None = Field(
|
||||
description="Access Key ID for authenticating with Volcengine TOS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VOLCENGINE_TOS_SECRET_KEY: Optional[str] = Field(
|
||||
VOLCENGINE_TOS_SECRET_KEY: str | None = Field(
|
||||
description="Secret Access Key for authenticating with Volcengine TOS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VOLCENGINE_TOS_ENDPOINT: Optional[str] = Field(
|
||||
VOLCENGINE_TOS_ENDPOINT: str | None = Field(
|
||||
description="URL of the Volcengine TOS endpoint (e.g., 'https://tos-cn-beijing.volces.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VOLCENGINE_TOS_REGION: Optional[str] = Field(
|
||||
VOLCENGINE_TOS_REGION: str | None = Field(
|
||||
description="Volcengine region where the TOS bucket is located (e.g., 'cn-beijing')",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -11,37 +9,37 @@ class AnalyticdbConfig(BaseSettings):
|
|||
https://www.alibabacloud.com/help/en/analyticdb-for-postgresql/getting-started/create-an-instance-instances-with-vector-engine-optimization-enabled
|
||||
"""
|
||||
|
||||
ANALYTICDB_KEY_ID: Optional[str] = Field(
|
||||
ANALYTICDB_KEY_ID: str | None = Field(
|
||||
default=None, description="The Access Key ID provided by Alibaba Cloud for API authentication."
|
||||
)
|
||||
ANALYTICDB_KEY_SECRET: Optional[str] = Field(
|
||||
ANALYTICDB_KEY_SECRET: str | None = Field(
|
||||
default=None, description="The Secret Access Key corresponding to the Access Key ID for secure API access."
|
||||
)
|
||||
ANALYTICDB_REGION_ID: Optional[str] = Field(
|
||||
ANALYTICDB_REGION_ID: str | None = Field(
|
||||
default=None,
|
||||
description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou', 'ap-southeast-1').",
|
||||
)
|
||||
ANALYTICDB_INSTANCE_ID: Optional[str] = Field(
|
||||
ANALYTICDB_INSTANCE_ID: str | None = Field(
|
||||
default=None,
|
||||
description="The unique identifier of the AnalyticDB instance you want to connect to.",
|
||||
)
|
||||
ANALYTICDB_ACCOUNT: Optional[str] = Field(
|
||||
ANALYTICDB_ACCOUNT: str | None = Field(
|
||||
default=None,
|
||||
description="The account name used to log in to the AnalyticDB instance"
|
||||
" (usually the initial account created with the instance).",
|
||||
)
|
||||
ANALYTICDB_PASSWORD: Optional[str] = Field(
|
||||
ANALYTICDB_PASSWORD: str | None = Field(
|
||||
default=None, description="The password associated with the AnalyticDB account for database authentication."
|
||||
)
|
||||
ANALYTICDB_NAMESPACE: Optional[str] = Field(
|
||||
ANALYTICDB_NAMESPACE: str | None = Field(
|
||||
default=None, description="The namespace within AnalyticDB for schema isolation (if using namespace feature)."
|
||||
)
|
||||
ANALYTICDB_NAMESPACE_PASSWORD: Optional[str] = Field(
|
||||
ANALYTICDB_NAMESPACE_PASSWORD: str | None = Field(
|
||||
default=None,
|
||||
description="The password for accessing the specified namespace within the AnalyticDB instance"
|
||||
" (if namespace feature is enabled).",
|
||||
)
|
||||
ANALYTICDB_HOST: Optional[str] = Field(
|
||||
ANALYTICDB_HOST: str | None = Field(
|
||||
default=None, description="The host of the AnalyticDB instance you want to connect to."
|
||||
)
|
||||
ANALYTICDB_PORT: PositiveInt = Field(
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,7 +7,7 @@ class BaiduVectorDBConfig(BaseSettings):
|
|||
Configuration settings for Baidu Vector Database
|
||||
"""
|
||||
|
||||
BAIDU_VECTOR_DB_ENDPOINT: Optional[str] = Field(
|
||||
BAIDU_VECTOR_DB_ENDPOINT: str | None = Field(
|
||||
description="URL of the Baidu Vector Database service (e.g., 'http://vdb.bj.baidubce.com')",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -19,17 +17,17 @@ class BaiduVectorDBConfig(BaseSettings):
|
|||
default=30000,
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_ACCOUNT: Optional[str] = Field(
|
||||
BAIDU_VECTOR_DB_ACCOUNT: str | None = Field(
|
||||
description="Account for authenticating with the Baidu Vector Database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_API_KEY: Optional[str] = Field(
|
||||
BAIDU_VECTOR_DB_API_KEY: str | None = Field(
|
||||
description="API key for authenticating with the Baidu Vector Database service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
BAIDU_VECTOR_DB_DATABASE: Optional[str] = Field(
|
||||
BAIDU_VECTOR_DB_DATABASE: str | None = Field(
|
||||
description="Name of the specific Baidu Vector Database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,7 +7,7 @@ class ChromaConfig(BaseSettings):
|
|||
Configuration settings for Chroma vector database
|
||||
"""
|
||||
|
||||
CHROMA_HOST: Optional[str] = Field(
|
||||
CHROMA_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the Chroma server (e.g., 'localhost' or '192.168.1.100')",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -19,22 +17,22 @@ class ChromaConfig(BaseSettings):
|
|||
default=8000,
|
||||
)
|
||||
|
||||
CHROMA_TENANT: Optional[str] = Field(
|
||||
CHROMA_TENANT: str | None = Field(
|
||||
description="Tenant identifier for multi-tenancy support in Chroma",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CHROMA_DATABASE: Optional[str] = Field(
|
||||
CHROMA_DATABASE: str | None = Field(
|
||||
description="Name of the Chroma database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CHROMA_AUTH_PROVIDER: Optional[str] = Field(
|
||||
CHROMA_AUTH_PROVIDER: str | None = Field(
|
||||
description="Authentication provider for Chroma (e.g., 'basic', 'token', or a custom provider)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CHROMA_AUTH_CREDENTIALS: Optional[str] = Field(
|
||||
CHROMA_AUTH_CREDENTIALS: str | None = Field(
|
||||
description="Authentication credentials for Chroma (format depends on the auth provider)",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,62 +7,62 @@ class ClickzettaConfig(BaseSettings):
|
|||
Clickzetta Lakehouse vector database configuration
|
||||
"""
|
||||
|
||||
CLICKZETTA_USERNAME: Optional[str] = Field(
|
||||
CLICKZETTA_USERNAME: str | None = Field(
|
||||
description="Username for authenticating with Clickzetta Lakehouse",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_PASSWORD: Optional[str] = Field(
|
||||
CLICKZETTA_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with Clickzetta Lakehouse",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_INSTANCE: Optional[str] = Field(
|
||||
CLICKZETTA_INSTANCE: str | None = Field(
|
||||
description="Clickzetta Lakehouse instance ID",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CLICKZETTA_SERVICE: Optional[str] = Field(
|
||||
CLICKZETTA_SERVICE: str | None = Field(
|
||||
description="Clickzetta API service endpoint (e.g., 'api.clickzetta.com')",
|
||||
default="api.clickzetta.com",
|
||||
)
|
||||
|
||||
CLICKZETTA_WORKSPACE: Optional[str] = Field(
|
||||
CLICKZETTA_WORKSPACE: str | None = Field(
|
||||
description="Clickzetta workspace name",
|
||||
default="default",
|
||||
)
|
||||
|
||||
CLICKZETTA_VCLUSTER: Optional[str] = Field(
|
||||
CLICKZETTA_VCLUSTER: str | None = Field(
|
||||
description="Clickzetta virtual cluster name",
|
||||
default="default_ap",
|
||||
)
|
||||
|
||||
CLICKZETTA_SCHEMA: Optional[str] = Field(
|
||||
CLICKZETTA_SCHEMA: str | None = Field(
|
||||
description="Database schema name in Clickzetta",
|
||||
default="public",
|
||||
)
|
||||
|
||||
CLICKZETTA_BATCH_SIZE: Optional[int] = Field(
|
||||
CLICKZETTA_BATCH_SIZE: int | None = Field(
|
||||
description="Batch size for bulk insert operations",
|
||||
default=100,
|
||||
)
|
||||
|
||||
CLICKZETTA_ENABLE_INVERTED_INDEX: Optional[bool] = Field(
|
||||
CLICKZETTA_ENABLE_INVERTED_INDEX: bool | None = Field(
|
||||
description="Enable inverted index for full-text search capabilities",
|
||||
default=True,
|
||||
)
|
||||
|
||||
CLICKZETTA_ANALYZER_TYPE: Optional[str] = Field(
|
||||
CLICKZETTA_ANALYZER_TYPE: str | None = Field(
|
||||
description="Analyzer type for full-text search: keyword, english, chinese, unicode",
|
||||
default="chinese",
|
||||
)
|
||||
|
||||
CLICKZETTA_ANALYZER_MODE: Optional[str] = Field(
|
||||
CLICKZETTA_ANALYZER_MODE: str | None = Field(
|
||||
description="Analyzer mode for tokenization: max_word (fine-grained) or smart (intelligent)",
|
||||
default="smart",
|
||||
)
|
||||
|
||||
CLICKZETTA_VECTOR_DISTANCE_FUNCTION: Optional[str] = Field(
|
||||
CLICKZETTA_VECTOR_DISTANCE_FUNCTION: str | None = Field(
|
||||
description="Distance function for vector similarity: l2_distance or cosine_distance",
|
||||
default="cosine_distance",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,27 +7,27 @@ class CouchbaseConfig(BaseSettings):
|
|||
Couchbase configs
|
||||
"""
|
||||
|
||||
COUCHBASE_CONNECTION_STRING: Optional[str] = Field(
|
||||
COUCHBASE_CONNECTION_STRING: str | None = Field(
|
||||
description="COUCHBASE connection string",
|
||||
default=None,
|
||||
)
|
||||
|
||||
COUCHBASE_USER: Optional[str] = Field(
|
||||
COUCHBASE_USER: str | None = Field(
|
||||
description="COUCHBASE user",
|
||||
default=None,
|
||||
)
|
||||
|
||||
COUCHBASE_PASSWORD: Optional[str] = Field(
|
||||
COUCHBASE_PASSWORD: str | None = Field(
|
||||
description="COUCHBASE password",
|
||||
default=None,
|
||||
)
|
||||
|
||||
COUCHBASE_BUCKET_NAME: Optional[str] = Field(
|
||||
COUCHBASE_BUCKET_NAME: str | None = Field(
|
||||
description="COUCHBASE bucket name",
|
||||
default=None,
|
||||
)
|
||||
|
||||
COUCHBASE_SCOPE_NAME: Optional[str] = Field(
|
||||
COUCHBASE_SCOPE_NAME: str | None = Field(
|
||||
description="COUCHBASE scope name",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt, model_validator
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -10,7 +8,7 @@ class ElasticsearchConfig(BaseSettings):
|
|||
Can load from environment variables or .env files.
|
||||
"""
|
||||
|
||||
ELASTICSEARCH_HOST: Optional[str] = Field(
|
||||
ELASTICSEARCH_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the Elasticsearch server (e.g., 'localhost' or '192.168.1.100')",
|
||||
default="127.0.0.1",
|
||||
)
|
||||
|
|
@ -20,30 +18,28 @@ class ElasticsearchConfig(BaseSettings):
|
|||
default=9200,
|
||||
)
|
||||
|
||||
ELASTICSEARCH_USERNAME: Optional[str] = Field(
|
||||
ELASTICSEARCH_USERNAME: str | None = Field(
|
||||
description="Username for authenticating with Elasticsearch (default is 'elastic')",
|
||||
default="elastic",
|
||||
)
|
||||
|
||||
ELASTICSEARCH_PASSWORD: Optional[str] = Field(
|
||||
ELASTICSEARCH_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with Elasticsearch (default is 'elastic')",
|
||||
default="elastic",
|
||||
)
|
||||
|
||||
# Elastic Cloud (optional)
|
||||
ELASTICSEARCH_USE_CLOUD: Optional[bool] = Field(
|
||||
ELASTICSEARCH_USE_CLOUD: bool | None = Field(
|
||||
description="Set to True to use Elastic Cloud instead of self-hosted Elasticsearch", default=False
|
||||
)
|
||||
ELASTICSEARCH_CLOUD_URL: Optional[str] = Field(
|
||||
ELASTICSEARCH_CLOUD_URL: str | None = Field(
|
||||
description="Full URL for Elastic Cloud deployment (e.g., 'https://example.es.region.aws.found.io:443')",
|
||||
default=None,
|
||||
)
|
||||
ELASTICSEARCH_API_KEY: Optional[str] = Field(
|
||||
description="API key for authenticating with Elastic Cloud", default=None
|
||||
)
|
||||
ELASTICSEARCH_API_KEY: str | None = Field(description="API key for authenticating with Elastic Cloud", default=None)
|
||||
|
||||
# Common options
|
||||
ELASTICSEARCH_CA_CERTS: Optional[str] = Field(
|
||||
ELASTICSEARCH_CA_CERTS: str | None = Field(
|
||||
description="Path to CA certificate file for SSL verification", default=None
|
||||
)
|
||||
ELASTICSEARCH_VERIFY_CERTS: bool = Field(
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,17 +7,17 @@ class HuaweiCloudConfig(BaseSettings):
|
|||
Configuration settings for Huawei cloud search service
|
||||
"""
|
||||
|
||||
HUAWEI_CLOUD_HOSTS: Optional[str] = Field(
|
||||
HUAWEI_CLOUD_HOSTS: str | None = Field(
|
||||
description="Hostname or IP address of the Huawei cloud search service instance",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_CLOUD_USER: Optional[str] = Field(
|
||||
HUAWEI_CLOUD_USER: str | None = Field(
|
||||
description="Username for authenticating with Huawei cloud search service",
|
||||
default=None,
|
||||
)
|
||||
|
||||
HUAWEI_CLOUD_PASSWORD: Optional[str] = Field(
|
||||
HUAWEI_CLOUD_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with Huawei cloud search service",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,27 +7,27 @@ class LindormConfig(BaseSettings):
|
|||
Lindorm configs
|
||||
"""
|
||||
|
||||
LINDORM_URL: Optional[str] = Field(
|
||||
LINDORM_URL: str | None = Field(
|
||||
description="Lindorm url",
|
||||
default=None,
|
||||
)
|
||||
LINDORM_USERNAME: Optional[str] = Field(
|
||||
LINDORM_USERNAME: str | None = Field(
|
||||
description="Lindorm user",
|
||||
default=None,
|
||||
)
|
||||
LINDORM_PASSWORD: Optional[str] = Field(
|
||||
LINDORM_PASSWORD: str | None = Field(
|
||||
description="Lindorm password",
|
||||
default=None,
|
||||
)
|
||||
DEFAULT_INDEX_TYPE: Optional[str] = Field(
|
||||
LINDORM_INDEX_TYPE: str | None = Field(
|
||||
description="Lindorm Vector Index Type, hnsw or flat is available in dify",
|
||||
default="hnsw",
|
||||
)
|
||||
DEFAULT_DISTANCE_TYPE: Optional[str] = Field(
|
||||
LINDORM_DISTANCE_TYPE: str | None = Field(
|
||||
description="Vector Distance Type, support l2, cosinesimil, innerproduct", default="l2"
|
||||
)
|
||||
USING_UGC_INDEX: Optional[bool] = Field(
|
||||
description="Using UGC index will store the same type of Index in a single index but can retrieve separately.",
|
||||
default=False,
|
||||
LINDORM_USING_UGC: bool | None = Field(
|
||||
description="Using UGC index will store indexes with the same IndexType/Dimension in a single big index.",
|
||||
default=True,
|
||||
)
|
||||
LINDORM_QUERY_TIMEOUT: Optional[float] = Field(description="The lindorm search request timeout (s)", default=2.0)
|
||||
LINDORM_QUERY_TIMEOUT: float | None = Field(description="The lindorm search request timeout (s)", default=2.0)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,22 +7,22 @@ class MilvusConfig(BaseSettings):
|
|||
Configuration settings for Milvus vector database
|
||||
"""
|
||||
|
||||
MILVUS_URI: Optional[str] = Field(
|
||||
MILVUS_URI: str | None = Field(
|
||||
description="URI for connecting to the Milvus server (e.g., 'http://localhost:19530' or 'https://milvus-instance.example.com:19530')",
|
||||
default="http://127.0.0.1:19530",
|
||||
)
|
||||
|
||||
MILVUS_TOKEN: Optional[str] = Field(
|
||||
MILVUS_TOKEN: str | None = Field(
|
||||
description="Authentication token for Milvus, if token-based authentication is enabled",
|
||||
default=None,
|
||||
)
|
||||
|
||||
MILVUS_USER: Optional[str] = Field(
|
||||
MILVUS_USER: str | None = Field(
|
||||
description="Username for authenticating with Milvus, if username/password authentication is enabled",
|
||||
default=None,
|
||||
)
|
||||
|
||||
MILVUS_PASSWORD: Optional[str] = Field(
|
||||
MILVUS_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with Milvus, if username/password authentication is enabled",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -40,7 +38,7 @@ class MilvusConfig(BaseSettings):
|
|||
default=True,
|
||||
)
|
||||
|
||||
MILVUS_ANALYZER_PARAMS: Optional[str] = Field(
|
||||
MILVUS_ANALYZER_PARAMS: str | None = Field(
|
||||
description='Milvus text analyzer parameters, e.g., {"type": "chinese"} for Chinese segmentation support.',
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,27 +7,27 @@ class OceanBaseVectorConfig(BaseSettings):
|
|||
Configuration settings for OceanBase Vector database
|
||||
"""
|
||||
|
||||
OCEANBASE_VECTOR_HOST: Optional[str] = Field(
|
||||
OCEANBASE_VECTOR_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the OceanBase Vector server (e.g. 'localhost')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCEANBASE_VECTOR_PORT: Optional[PositiveInt] = Field(
|
||||
OCEANBASE_VECTOR_PORT: PositiveInt | None = Field(
|
||||
description="Port number on which the OceanBase Vector server is listening (default is 2881)",
|
||||
default=2881,
|
||||
)
|
||||
|
||||
OCEANBASE_VECTOR_USER: Optional[str] = Field(
|
||||
OCEANBASE_VECTOR_USER: str | None = Field(
|
||||
description="Username for authenticating with the OceanBase Vector database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCEANBASE_VECTOR_PASSWORD: Optional[str] = Field(
|
||||
OCEANBASE_VECTOR_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the OceanBase Vector database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OCEANBASE_VECTOR_DATABASE: Optional[str] = Field(
|
||||
OCEANBASE_VECTOR_DATABASE: str | None = Field(
|
||||
description="Name of the OceanBase Vector database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,7 +7,7 @@ class OpenGaussConfig(BaseSettings):
|
|||
Configuration settings for OpenGauss
|
||||
"""
|
||||
|
||||
OPENGAUSS_HOST: Optional[str] = Field(
|
||||
OPENGAUSS_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the OpenGauss server(e.g., 'localhost')",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -19,17 +17,17 @@ class OpenGaussConfig(BaseSettings):
|
|||
default=6600,
|
||||
)
|
||||
|
||||
OPENGAUSS_USER: Optional[str] = Field(
|
||||
OPENGAUSS_USER: str | None = Field(
|
||||
description="Username for authenticating with the OpenGauss database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OPENGAUSS_PASSWORD: Optional[str] = Field(
|
||||
OPENGAUSS_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the OpenGauss database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OPENGAUSS_DATABASE: Optional[str] = Field(
|
||||
OPENGAUSS_DATABASE: str | None = Field(
|
||||
description="Name of the OpenGauss database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from enum import Enum
|
||||
from typing import Literal, Optional
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
|
@ -18,7 +18,7 @@ class OpenSearchConfig(BaseSettings):
|
|||
BASIC = "basic"
|
||||
AWS_MANAGED_IAM = "aws_managed_iam"
|
||||
|
||||
OPENSEARCH_HOST: Optional[str] = Field(
|
||||
OPENSEARCH_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the OpenSearch server (e.g., 'localhost' or 'opensearch.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -43,21 +43,21 @@ class OpenSearchConfig(BaseSettings):
|
|||
default=AuthMethod.BASIC,
|
||||
)
|
||||
|
||||
OPENSEARCH_USER: Optional[str] = Field(
|
||||
OPENSEARCH_USER: str | None = Field(
|
||||
description="Username for authenticating with OpenSearch",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OPENSEARCH_PASSWORD: Optional[str] = Field(
|
||||
OPENSEARCH_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with OpenSearch",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OPENSEARCH_AWS_REGION: Optional[str] = Field(
|
||||
OPENSEARCH_AWS_REGION: str | None = Field(
|
||||
description="AWS region for OpenSearch (e.g. 'us-west-2')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
OPENSEARCH_AWS_SERVICE: Optional[Literal["es", "aoss"]] = Field(
|
||||
OPENSEARCH_AWS_SERVICE: Literal["es", "aoss"] | None = Field(
|
||||
description="AWS service for OpenSearch (e.g. 'aoss' for OpenSearch Serverless)", default=None
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,33 +7,33 @@ class OracleConfig(BaseSettings):
|
|||
Configuration settings for Oracle database
|
||||
"""
|
||||
|
||||
ORACLE_USER: Optional[str] = Field(
|
||||
ORACLE_USER: str | None = Field(
|
||||
description="Username for authenticating with the Oracle database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ORACLE_PASSWORD: Optional[str] = Field(
|
||||
ORACLE_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the Oracle database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ORACLE_DSN: Optional[str] = Field(
|
||||
ORACLE_DSN: str | None = Field(
|
||||
description="Oracle database connection string. For traditional database, use format 'host:port/service_name'. "
|
||||
"For autonomous database, use the service name from tnsnames.ora in the wallet",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ORACLE_CONFIG_DIR: Optional[str] = Field(
|
||||
ORACLE_CONFIG_DIR: str | None = Field(
|
||||
description="Directory containing the tnsnames.ora configuration file. Only used in thin mode connection",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ORACLE_WALLET_LOCATION: Optional[str] = Field(
|
||||
ORACLE_WALLET_LOCATION: str | None = Field(
|
||||
description="Oracle wallet directory path containing the wallet files for secure connection",
|
||||
default=None,
|
||||
)
|
||||
|
||||
ORACLE_WALLET_PASSWORD: Optional[str] = Field(
|
||||
ORACLE_WALLET_PASSWORD: str | None = Field(
|
||||
description="Password to decrypt the Oracle wallet, if it is encrypted",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,7 +7,7 @@ class PGVectorConfig(BaseSettings):
|
|||
Configuration settings for PGVector (PostgreSQL with vector extension)
|
||||
"""
|
||||
|
||||
PGVECTOR_HOST: Optional[str] = Field(
|
||||
PGVECTOR_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the PostgreSQL server with PGVector extension (e.g., 'localhost')",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -19,17 +17,17 @@ class PGVectorConfig(BaseSettings):
|
|||
default=5433,
|
||||
)
|
||||
|
||||
PGVECTOR_USER: Optional[str] = Field(
|
||||
PGVECTOR_USER: str | None = Field(
|
||||
description="Username for authenticating with the PostgreSQL database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTOR_PASSWORD: Optional[str] = Field(
|
||||
PGVECTOR_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the PostgreSQL database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTOR_DATABASE: Optional[str] = Field(
|
||||
PGVECTOR_DATABASE: str | None = Field(
|
||||
description="Name of the PostgreSQL database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,7 +7,7 @@ class PGVectoRSConfig(BaseSettings):
|
|||
Configuration settings for PGVecto.RS (Rust-based vector extension for PostgreSQL)
|
||||
"""
|
||||
|
||||
PGVECTO_RS_HOST: Optional[str] = Field(
|
||||
PGVECTO_RS_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the PostgreSQL server with PGVecto.RS extension (e.g., 'localhost')",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -19,17 +17,17 @@ class PGVectoRSConfig(BaseSettings):
|
|||
default=5431,
|
||||
)
|
||||
|
||||
PGVECTO_RS_USER: Optional[str] = Field(
|
||||
PGVECTO_RS_USER: str | None = Field(
|
||||
description="Username for authenticating with the PostgreSQL database using PGVecto.RS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTO_RS_PASSWORD: Optional[str] = Field(
|
||||
PGVECTO_RS_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the PostgreSQL database using PGVecto.RS",
|
||||
default=None,
|
||||
)
|
||||
|
||||
PGVECTO_RS_DATABASE: Optional[str] = Field(
|
||||
PGVECTO_RS_DATABASE: str | None = Field(
|
||||
description="Name of the PostgreSQL database with PGVecto.RS extension to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,12 +7,12 @@ class QdrantConfig(BaseSettings):
|
|||
Configuration settings for Qdrant vector database
|
||||
"""
|
||||
|
||||
QDRANT_URL: Optional[str] = Field(
|
||||
QDRANT_URL: str | None = Field(
|
||||
description="URL of the Qdrant server (e.g., 'http://localhost:6333' or 'https://qdrant.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
QDRANT_API_KEY: Optional[str] = Field(
|
||||
QDRANT_API_KEY: str | None = Field(
|
||||
description="API key for authenticating with the Qdrant server",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,7 +7,7 @@ class RelytConfig(BaseSettings):
|
|||
Configuration settings for Relyt database
|
||||
"""
|
||||
|
||||
RELYT_HOST: Optional[str] = Field(
|
||||
RELYT_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the Relyt server (e.g., 'localhost' or 'relyt.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -19,17 +17,17 @@ class RelytConfig(BaseSettings):
|
|||
default=9200,
|
||||
)
|
||||
|
||||
RELYT_USER: Optional[str] = Field(
|
||||
RELYT_USER: str | None = Field(
|
||||
description="Username for authenticating with the Relyt database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RELYT_PASSWORD: Optional[str] = Field(
|
||||
RELYT_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the Relyt database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
RELYT_DATABASE: Optional[str] = Field(
|
||||
RELYT_DATABASE: str | None = Field(
|
||||
description="Name of the Relyt database to connect to (default is 'default')",
|
||||
default="default",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,22 +7,22 @@ class TableStoreConfig(BaseSettings):
|
|||
Configuration settings for TableStore.
|
||||
"""
|
||||
|
||||
TABLESTORE_ENDPOINT: Optional[str] = Field(
|
||||
TABLESTORE_ENDPOINT: str | None = Field(
|
||||
description="Endpoint address of the TableStore server (e.g. 'https://instance-name.cn-hangzhou.ots.aliyuncs.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TABLESTORE_INSTANCE_NAME: Optional[str] = Field(
|
||||
TABLESTORE_INSTANCE_NAME: str | None = Field(
|
||||
description="Instance name to access TableStore server (eg. 'instance-name')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TABLESTORE_ACCESS_KEY_ID: Optional[str] = Field(
|
||||
TABLESTORE_ACCESS_KEY_ID: str | None = Field(
|
||||
description="AccessKey id for the instance name",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TABLESTORE_ACCESS_KEY_SECRET: Optional[str] = Field(
|
||||
TABLESTORE_ACCESS_KEY_SECRET: str | None = Field(
|
||||
description="AccessKey secret for the instance name",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,12 +7,12 @@ class TencentVectorDBConfig(BaseSettings):
|
|||
Configuration settings for Tencent Vector Database
|
||||
"""
|
||||
|
||||
TENCENT_VECTOR_DB_URL: Optional[str] = Field(
|
||||
TENCENT_VECTOR_DB_URL: str | None = Field(
|
||||
description="URL of the Tencent Vector Database service (e.g., 'https://vectordb.tencentcloudapi.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_API_KEY: Optional[str] = Field(
|
||||
TENCENT_VECTOR_DB_API_KEY: str | None = Field(
|
||||
description="API key for authenticating with the Tencent Vector Database service",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -24,12 +22,12 @@ class TencentVectorDBConfig(BaseSettings):
|
|||
default=30,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_USERNAME: Optional[str] = Field(
|
||||
TENCENT_VECTOR_DB_USERNAME: str | None = Field(
|
||||
description="Username for authenticating with the Tencent Vector Database (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_PASSWORD: Optional[str] = Field(
|
||||
TENCENT_VECTOR_DB_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the Tencent Vector Database (if required)",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -44,7 +42,7 @@ class TencentVectorDBConfig(BaseSettings):
|
|||
default=2,
|
||||
)
|
||||
|
||||
TENCENT_VECTOR_DB_DATABASE: Optional[str] = Field(
|
||||
TENCENT_VECTOR_DB_DATABASE: str | None = Field(
|
||||
description="Name of the specific Tencent Vector Database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, NonNegativeInt, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,12 +7,12 @@ class TidbOnQdrantConfig(BaseSettings):
|
|||
Tidb on Qdrant configs
|
||||
"""
|
||||
|
||||
TIDB_ON_QDRANT_URL: Optional[str] = Field(
|
||||
TIDB_ON_QDRANT_URL: str | None = Field(
|
||||
description="Tidb on Qdrant url",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_ON_QDRANT_API_KEY: Optional[str] = Field(
|
||||
TIDB_ON_QDRANT_API_KEY: str | None = Field(
|
||||
description="Tidb on Qdrant api key",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -34,37 +32,37 @@ class TidbOnQdrantConfig(BaseSettings):
|
|||
default=6334,
|
||||
)
|
||||
|
||||
TIDB_PUBLIC_KEY: Optional[str] = Field(
|
||||
TIDB_PUBLIC_KEY: str | None = Field(
|
||||
description="Tidb account public key",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_PRIVATE_KEY: Optional[str] = Field(
|
||||
TIDB_PRIVATE_KEY: str | None = Field(
|
||||
description="Tidb account private key",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_API_URL: Optional[str] = Field(
|
||||
TIDB_API_URL: str | None = Field(
|
||||
description="Tidb API url",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_IAM_API_URL: Optional[str] = Field(
|
||||
TIDB_IAM_API_URL: str | None = Field(
|
||||
description="Tidb IAM API url",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_REGION: Optional[str] = Field(
|
||||
TIDB_REGION: str | None = Field(
|
||||
description="Tidb serverless region",
|
||||
default="regions/aws-us-east-1",
|
||||
)
|
||||
|
||||
TIDB_PROJECT_ID: Optional[str] = Field(
|
||||
TIDB_PROJECT_ID: str | None = Field(
|
||||
description="Tidb project id",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_SPEND_LIMIT: Optional[int] = Field(
|
||||
TIDB_SPEND_LIMIT: int | None = Field(
|
||||
description="Tidb spend limit",
|
||||
default=100,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,27 +7,27 @@ class TiDBVectorConfig(BaseSettings):
|
|||
Configuration settings for TiDB Vector database
|
||||
"""
|
||||
|
||||
TIDB_VECTOR_HOST: Optional[str] = Field(
|
||||
TIDB_VECTOR_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the TiDB Vector server (e.g., 'localhost' or 'tidb.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_VECTOR_PORT: Optional[PositiveInt] = Field(
|
||||
TIDB_VECTOR_PORT: PositiveInt | None = Field(
|
||||
description="Port number on which the TiDB Vector server is listening (default is 4000)",
|
||||
default=4000,
|
||||
)
|
||||
|
||||
TIDB_VECTOR_USER: Optional[str] = Field(
|
||||
TIDB_VECTOR_USER: str | None = Field(
|
||||
description="Username for authenticating with the TiDB Vector database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_VECTOR_PASSWORD: Optional[str] = Field(
|
||||
TIDB_VECTOR_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the TiDB Vector database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
TIDB_VECTOR_DATABASE: Optional[str] = Field(
|
||||
TIDB_VECTOR_DATABASE: str | None = Field(
|
||||
description="Name of the TiDB Vector database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,12 +7,12 @@ class UpstashConfig(BaseSettings):
|
|||
Configuration settings for Upstash vector database
|
||||
"""
|
||||
|
||||
UPSTASH_VECTOR_URL: Optional[str] = Field(
|
||||
UPSTASH_VECTOR_URL: str | None = Field(
|
||||
description="URL of the upstash server (e.g., 'https://vector.upstash.io')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
UPSTASH_VECTOR_TOKEN: Optional[str] = Field(
|
||||
UPSTASH_VECTOR_TOKEN: str | None = Field(
|
||||
description="Token for authenticating with the upstash server",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,7 +7,7 @@ class VastbaseVectorConfig(BaseSettings):
|
|||
Configuration settings for Vector (Vastbase with vector extension)
|
||||
"""
|
||||
|
||||
VASTBASE_HOST: Optional[str] = Field(
|
||||
VASTBASE_HOST: str | None = Field(
|
||||
description="Hostname or IP address of the Vastbase server with Vector extension (e.g., 'localhost')",
|
||||
default=None,
|
||||
)
|
||||
|
|
@ -19,17 +17,17 @@ class VastbaseVectorConfig(BaseSettings):
|
|||
default=5432,
|
||||
)
|
||||
|
||||
VASTBASE_USER: Optional[str] = Field(
|
||||
VASTBASE_USER: str | None = Field(
|
||||
description="Username for authenticating with the Vastbase database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VASTBASE_PASSWORD: Optional[str] = Field(
|
||||
VASTBASE_PASSWORD: str | None = Field(
|
||||
description="Password for authenticating with the Vastbase database",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VASTBASE_DATABASE: Optional[str] = Field(
|
||||
VASTBASE_DATABASE: str | None = Field(
|
||||
description="Name of the Vastbase database to connect to",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -11,14 +9,14 @@ class VikingDBConfig(BaseSettings):
|
|||
https://www.volcengine.com/docs/6291/65568
|
||||
"""
|
||||
|
||||
VIKINGDB_ACCESS_KEY: Optional[str] = Field(
|
||||
VIKINGDB_ACCESS_KEY: str | None = Field(
|
||||
description="The Access Key provided by Volcengine VikingDB for API authentication."
|
||||
"Refer to the following documentation for details on obtaining credentials:"
|
||||
"https://www.volcengine.com/docs/6291/65568",
|
||||
default=None,
|
||||
)
|
||||
|
||||
VIKINGDB_SECRET_KEY: Optional[str] = Field(
|
||||
VIKINGDB_SECRET_KEY: str | None = Field(
|
||||
description="The Secret Key provided by Volcengine VikingDB for API authentication.",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import Field, PositiveInt
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
|
@ -9,12 +7,12 @@ class WeaviateConfig(BaseSettings):
|
|||
Configuration settings for Weaviate vector database
|
||||
"""
|
||||
|
||||
WEAVIATE_ENDPOINT: Optional[str] = Field(
|
||||
WEAVIATE_ENDPOINT: str | None = Field(
|
||||
description="URL of the Weaviate server (e.g., 'http://localhost:8080' or 'https://weaviate.example.com')",
|
||||
default=None,
|
||||
)
|
||||
|
||||
WEAVIATE_API_KEY: Optional[str] = Field(
|
||||
WEAVIATE_API_KEY: str | None = Field(
|
||||
description="API key for authenticating with the Weaviate server",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from collections.abc import Mapping
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic.fields import FieldInfo
|
||||
|
|
@ -15,22 +15,22 @@ class ApolloSettingsSourceInfo(BaseSettings):
|
|||
Packaging build information
|
||||
"""
|
||||
|
||||
APOLLO_APP_ID: Optional[str] = Field(
|
||||
APOLLO_APP_ID: str | None = Field(
|
||||
description="apollo app_id",
|
||||
default=None,
|
||||
)
|
||||
|
||||
APOLLO_CLUSTER: Optional[str] = Field(
|
||||
APOLLO_CLUSTER: str | None = Field(
|
||||
description="apollo cluster",
|
||||
default=None,
|
||||
)
|
||||
|
||||
APOLLO_CONFIG_URL: Optional[str] = Field(
|
||||
APOLLO_CONFIG_URL: str | None = Field(
|
||||
description="apollo config url",
|
||||
default=None,
|
||||
)
|
||||
|
||||
APOLLO_NAMESPACE: Optional[str] = Field(
|
||||
APOLLO_NAMESPACE: str | None = Field(
|
||||
description="apollo namespace",
|
||||
default=None,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -54,90 +54,90 @@ api.add_resource(AppImportCheckDependenciesApi, "/apps/imports/<string:app_id>/c
|
|||
|
||||
# Import other controllers
|
||||
from . import (
|
||||
admin, # pyright: ignore[reportUnusedImport]
|
||||
apikey, # pyright: ignore[reportUnusedImport]
|
||||
extension, # pyright: ignore[reportUnusedImport]
|
||||
feature, # pyright: ignore[reportUnusedImport]
|
||||
init_validate, # pyright: ignore[reportUnusedImport]
|
||||
ping, # pyright: ignore[reportUnusedImport]
|
||||
setup, # pyright: ignore[reportUnusedImport]
|
||||
version, # pyright: ignore[reportUnusedImport]
|
||||
admin,
|
||||
apikey,
|
||||
extension,
|
||||
feature,
|
||||
init_validate,
|
||||
ping,
|
||||
setup,
|
||||
version,
|
||||
)
|
||||
|
||||
# Import app controllers
|
||||
from .app import (
|
||||
advanced_prompt_template, # pyright: ignore[reportUnusedImport]
|
||||
agent, # pyright: ignore[reportUnusedImport]
|
||||
annotation, # pyright: ignore[reportUnusedImport]
|
||||
app, # pyright: ignore[reportUnusedImport]
|
||||
audio, # pyright: ignore[reportUnusedImport]
|
||||
completion, # pyright: ignore[reportUnusedImport]
|
||||
conversation, # pyright: ignore[reportUnusedImport]
|
||||
conversation_variables, # pyright: ignore[reportUnusedImport]
|
||||
generator, # pyright: ignore[reportUnusedImport]
|
||||
mcp_server, # pyright: ignore[reportUnusedImport]
|
||||
message, # pyright: ignore[reportUnusedImport]
|
||||
model_config, # pyright: ignore[reportUnusedImport]
|
||||
ops_trace, # pyright: ignore[reportUnusedImport]
|
||||
site, # pyright: ignore[reportUnusedImport]
|
||||
statistic, # pyright: ignore[reportUnusedImport]
|
||||
workflow, # pyright: ignore[reportUnusedImport]
|
||||
workflow_app_log, # pyright: ignore[reportUnusedImport]
|
||||
workflow_draft_variable, # pyright: ignore[reportUnusedImport]
|
||||
workflow_run, # pyright: ignore[reportUnusedImport]
|
||||
workflow_statistic, # pyright: ignore[reportUnusedImport]
|
||||
advanced_prompt_template,
|
||||
agent,
|
||||
annotation,
|
||||
app,
|
||||
audio,
|
||||
completion,
|
||||
conversation,
|
||||
conversation_variables,
|
||||
generator,
|
||||
mcp_server,
|
||||
message,
|
||||
model_config,
|
||||
ops_trace,
|
||||
site,
|
||||
statistic,
|
||||
workflow,
|
||||
workflow_app_log,
|
||||
workflow_draft_variable,
|
||||
workflow_run,
|
||||
workflow_statistic,
|
||||
)
|
||||
|
||||
# Import auth controllers
|
||||
from .auth import (
|
||||
activate, # pyright: ignore[reportUnusedImport]
|
||||
data_source_bearer_auth, # pyright: ignore[reportUnusedImport]
|
||||
data_source_oauth, # pyright: ignore[reportUnusedImport]
|
||||
email_register, # pyright: ignore[reportUnusedImport]
|
||||
forgot_password, # pyright: ignore[reportUnusedImport]
|
||||
login, # pyright: ignore[reportUnusedImport]
|
||||
oauth, # pyright: ignore[reportUnusedImport]
|
||||
oauth_server, # pyright: ignore[reportUnusedImport]
|
||||
activate,
|
||||
data_source_bearer_auth,
|
||||
data_source_oauth,
|
||||
email_register,
|
||||
forgot_password,
|
||||
login,
|
||||
oauth,
|
||||
oauth_server,
|
||||
)
|
||||
|
||||
# Import billing controllers
|
||||
from .billing import billing, compliance # pyright: ignore[reportUnusedImport]
|
||||
from .billing import billing, compliance
|
||||
|
||||
# Import datasets controllers
|
||||
from .datasets import (
|
||||
data_source, # pyright: ignore[reportUnusedImport]
|
||||
datasets, # pyright: ignore[reportUnusedImport]
|
||||
datasets_document, # pyright: ignore[reportUnusedImport]
|
||||
datasets_segments, # pyright: ignore[reportUnusedImport]
|
||||
external, # pyright: ignore[reportUnusedImport]
|
||||
hit_testing, # pyright: ignore[reportUnusedImport]
|
||||
metadata, # pyright: ignore[reportUnusedImport]
|
||||
website, # pyright: ignore[reportUnusedImport]
|
||||
data_source,
|
||||
datasets,
|
||||
datasets_document,
|
||||
datasets_segments,
|
||||
external,
|
||||
hit_testing,
|
||||
metadata,
|
||||
website,
|
||||
)
|
||||
|
||||
# Import explore controllers
|
||||
from .explore import (
|
||||
installed_app, # pyright: ignore[reportUnusedImport]
|
||||
parameter, # pyright: ignore[reportUnusedImport]
|
||||
recommended_app, # pyright: ignore[reportUnusedImport]
|
||||
saved_message, # pyright: ignore[reportUnusedImport]
|
||||
installed_app,
|
||||
parameter,
|
||||
recommended_app,
|
||||
saved_message,
|
||||
)
|
||||
|
||||
# Import tag controllers
|
||||
from .tag import tags # pyright: ignore[reportUnusedImport]
|
||||
from .tag import tags
|
||||
|
||||
# Import workspace controllers
|
||||
from .workspace import (
|
||||
account, # pyright: ignore[reportUnusedImport]
|
||||
agent_providers, # pyright: ignore[reportUnusedImport]
|
||||
endpoint, # pyright: ignore[reportUnusedImport]
|
||||
load_balancing_config, # pyright: ignore[reportUnusedImport]
|
||||
members, # pyright: ignore[reportUnusedImport]
|
||||
model_providers, # pyright: ignore[reportUnusedImport]
|
||||
models, # pyright: ignore[reportUnusedImport]
|
||||
plugin, # pyright: ignore[reportUnusedImport]
|
||||
tool_providers, # pyright: ignore[reportUnusedImport]
|
||||
workspace, # pyright: ignore[reportUnusedImport]
|
||||
account,
|
||||
agent_providers,
|
||||
endpoint,
|
||||
load_balancing_config,
|
||||
members,
|
||||
model_providers,
|
||||
models,
|
||||
plugin,
|
||||
tool_providers,
|
||||
workspace,
|
||||
)
|
||||
|
||||
# Explore Audio
|
||||
|
|
@ -212,3 +212,70 @@ api.add_resource(
|
|||
)
|
||||
|
||||
api.add_namespace(console_ns)
|
||||
|
||||
__all__ = [
|
||||
"account",
|
||||
"activate",
|
||||
"admin",
|
||||
"advanced_prompt_template",
|
||||
"agent",
|
||||
"agent_providers",
|
||||
"annotation",
|
||||
"api",
|
||||
"apikey",
|
||||
"app",
|
||||
"audio",
|
||||
"billing",
|
||||
"bp",
|
||||
"completion",
|
||||
"compliance",
|
||||
"console_ns",
|
||||
"conversation",
|
||||
"conversation_variables",
|
||||
"data_source",
|
||||
"data_source_bearer_auth",
|
||||
"data_source_oauth",
|
||||
"datasets",
|
||||
"datasets_document",
|
||||
"datasets_segments",
|
||||
"email_register",
|
||||
"endpoint",
|
||||
"extension",
|
||||
"external",
|
||||
"feature",
|
||||
"forgot_password",
|
||||
"generator",
|
||||
"hit_testing",
|
||||
"init_validate",
|
||||
"installed_app",
|
||||
"load_balancing_config",
|
||||
"login",
|
||||
"mcp_server",
|
||||
"members",
|
||||
"message",
|
||||
"metadata",
|
||||
"model_config",
|
||||
"model_providers",
|
||||
"models",
|
||||
"oauth",
|
||||
"oauth_server",
|
||||
"ops_trace",
|
||||
"parameter",
|
||||
"ping",
|
||||
"plugin",
|
||||
"recommended_app",
|
||||
"saved_message",
|
||||
"setup",
|
||||
"site",
|
||||
"statistic",
|
||||
"tags",
|
||||
"tool_providers",
|
||||
"version",
|
||||
"website",
|
||||
"workflow",
|
||||
"workflow_app_log",
|
||||
"workflow_draft_variable",
|
||||
"workflow_run",
|
||||
"workflow_statistic",
|
||||
"workspace",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
import flask_restx
|
||||
from flask_login import current_user
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
|
|
@ -50,7 +48,7 @@ class BaseApiKeyListResource(Resource):
|
|||
method_decorators = [account_initialization_required, login_required, setup_required]
|
||||
|
||||
resource_type: str | None = None
|
||||
resource_model: Optional[type] = None
|
||||
resource_model: type | None = None
|
||||
resource_id_field: str | None = None
|
||||
token_prefix: str | None = None
|
||||
max_keys = 10
|
||||
|
|
@ -103,7 +101,7 @@ class BaseApiKeyResource(Resource):
|
|||
method_decorators = [account_initialization_required, login_required, setup_required]
|
||||
|
||||
resource_type: str | None = None
|
||||
resource_model: Optional[type] = None
|
||||
resource_model: type | None = None
|
||||
resource_id_field: str | None = None
|
||||
|
||||
def delete(self, resource_id, api_key_id):
|
||||
|
|
|
|||
|
|
@ -4,13 +4,13 @@ from collections.abc import Sequence
|
|||
from typing import cast
|
||||
|
||||
from flask import abort, request
|
||||
from flask_restx import Resource, inputs, marshal_with, reqparse
|
||||
from flask_restx import Resource, fields, inputs, marshal_with, reqparse
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
from configs import dify_config
|
||||
from controllers.console import api
|
||||
from controllers.console import api, console_ns
|
||||
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
|
|
@ -57,7 +57,13 @@ def _parse_file(workflow: Workflow, files: list[dict] | None = None) -> Sequence
|
|||
return file_objs
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft")
|
||||
class DraftWorkflowApi(Resource):
|
||||
@api.doc("get_draft_workflow")
|
||||
@api.doc(description="Get draft workflow for an application")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.response(200, "Draft workflow retrieved successfully", workflow_fields)
|
||||
@api.response(404, "Draft workflow not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -86,6 +92,23 @@ class DraftWorkflowApi(Resource):
|
|||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
@api.doc("sync_draft_workflow")
|
||||
@api.doc(description="Sync draft workflow configuration")
|
||||
@api.expect(
|
||||
api.model(
|
||||
"SyncDraftWorkflowRequest",
|
||||
{
|
||||
"graph": fields.Raw(required=True, description="Workflow graph configuration"),
|
||||
"features": fields.Raw(required=True, description="Workflow features configuration"),
|
||||
"hash": fields.String(description="Workflow hash for validation"),
|
||||
"environment_variables": fields.List(fields.Raw, required=True, description="Environment variables"),
|
||||
"conversation_variables": fields.List(fields.Raw, description="Conversation variables"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Draft workflow synced successfully", workflow_fields)
|
||||
@api.response(400, "Invalid workflow configuration")
|
||||
@api.response(403, "Permission denied")
|
||||
def post(self, app_model: App):
|
||||
"""
|
||||
Sync draft workflow
|
||||
|
|
@ -159,7 +182,25 @@ class DraftWorkflowApi(Resource):
|
|||
}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/run")
|
||||
class AdvancedChatDraftWorkflowRunApi(Resource):
|
||||
@api.doc("run_advanced_chat_draft_workflow")
|
||||
@api.doc(description="Run draft workflow for advanced chat application")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.expect(
|
||||
api.model(
|
||||
"AdvancedChatWorkflowRunRequest",
|
||||
{
|
||||
"query": fields.String(required=True, description="User query"),
|
||||
"inputs": fields.Raw(description="Input variables"),
|
||||
"files": fields.List(fields.Raw, description="File uploads"),
|
||||
"conversation_id": fields.String(description="Conversation ID"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Workflow run started successfully")
|
||||
@api.response(400, "Invalid request parameters")
|
||||
@api.response(403, "Permission denied")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -208,7 +249,23 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
|
|||
raise InternalServerError()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/iteration/nodes/<string:node_id>/run")
|
||||
class AdvancedChatDraftRunIterationNodeApi(Resource):
|
||||
@api.doc("run_advanced_chat_draft_iteration_node")
|
||||
@api.doc(description="Run draft workflow iteration node for advanced chat")
|
||||
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
|
||||
@api.expect(
|
||||
api.model(
|
||||
"IterationNodeRunRequest",
|
||||
{
|
||||
"task_id": fields.String(required=True, description="Task ID"),
|
||||
"inputs": fields.Raw(description="Input variables"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Iteration node run started successfully")
|
||||
@api.response(403, "Permission denied")
|
||||
@api.response(404, "Node not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -244,7 +301,23 @@ class AdvancedChatDraftRunIterationNodeApi(Resource):
|
|||
raise InternalServerError()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run")
|
||||
class WorkflowDraftRunIterationNodeApi(Resource):
|
||||
@api.doc("run_workflow_draft_iteration_node")
|
||||
@api.doc(description="Run draft workflow iteration node")
|
||||
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
|
||||
@api.expect(
|
||||
api.model(
|
||||
"WorkflowIterationNodeRunRequest",
|
||||
{
|
||||
"task_id": fields.String(required=True, description="Task ID"),
|
||||
"inputs": fields.Raw(description="Input variables"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Workflow iteration node run started successfully")
|
||||
@api.response(403, "Permission denied")
|
||||
@api.response(404, "Node not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -280,7 +353,23 @@ class WorkflowDraftRunIterationNodeApi(Resource):
|
|||
raise InternalServerError()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/loop/nodes/<string:node_id>/run")
|
||||
class AdvancedChatDraftRunLoopNodeApi(Resource):
|
||||
@api.doc("run_advanced_chat_draft_loop_node")
|
||||
@api.doc(description="Run draft workflow loop node for advanced chat")
|
||||
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
|
||||
@api.expect(
|
||||
api.model(
|
||||
"LoopNodeRunRequest",
|
||||
{
|
||||
"task_id": fields.String(required=True, description="Task ID"),
|
||||
"inputs": fields.Raw(description="Input variables"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Loop node run started successfully")
|
||||
@api.response(403, "Permission denied")
|
||||
@api.response(404, "Node not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -317,7 +406,23 @@ class AdvancedChatDraftRunLoopNodeApi(Resource):
|
|||
raise InternalServerError()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/loop/nodes/<string:node_id>/run")
|
||||
class WorkflowDraftRunLoopNodeApi(Resource):
|
||||
@api.doc("run_workflow_draft_loop_node")
|
||||
@api.doc(description="Run draft workflow loop node")
|
||||
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
|
||||
@api.expect(
|
||||
api.model(
|
||||
"WorkflowLoopNodeRunRequest",
|
||||
{
|
||||
"task_id": fields.String(required=True, description="Task ID"),
|
||||
"inputs": fields.Raw(description="Input variables"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Workflow loop node run started successfully")
|
||||
@api.response(403, "Permission denied")
|
||||
@api.response(404, "Node not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -354,7 +459,22 @@ class WorkflowDraftRunLoopNodeApi(Resource):
|
|||
raise InternalServerError()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/run")
|
||||
class DraftWorkflowRunApi(Resource):
|
||||
@api.doc("run_draft_workflow")
|
||||
@api.doc(description="Run draft workflow")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.expect(
|
||||
api.model(
|
||||
"DraftWorkflowRunRequest",
|
||||
{
|
||||
"inputs": fields.Raw(required=True, description="Input variables"),
|
||||
"files": fields.List(fields.Raw, description="File uploads"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Draft workflow run started successfully")
|
||||
@api.response(403, "Permission denied")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -393,7 +513,14 @@ class DraftWorkflowRunApi(Resource):
|
|||
raise InvokeRateLimitHttpError(ex.description)
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop")
|
||||
class WorkflowTaskStopApi(Resource):
|
||||
@api.doc("stop_workflow_task")
|
||||
@api.doc(description="Stop running workflow task")
|
||||
@api.doc(params={"app_id": "Application ID", "task_id": "Task ID"})
|
||||
@api.response(200, "Task stopped successfully")
|
||||
@api.response(404, "Task not found")
|
||||
@api.response(403, "Permission denied")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -414,7 +541,22 @@ class WorkflowTaskStopApi(Resource):
|
|||
return {"result": "success"}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/run")
|
||||
class DraftWorkflowNodeRunApi(Resource):
|
||||
@api.doc("run_draft_workflow_node")
|
||||
@api.doc(description="Run draft workflow node")
|
||||
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
|
||||
@api.expect(
|
||||
api.model(
|
||||
"DraftWorkflowNodeRunRequest",
|
||||
{
|
||||
"inputs": fields.Raw(description="Input variables"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Node run started successfully", workflow_run_node_execution_fields)
|
||||
@api.response(403, "Permission denied")
|
||||
@api.response(404, "Node not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -462,7 +604,13 @@ class DraftWorkflowNodeRunApi(Resource):
|
|||
return workflow_node_execution
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/publish")
|
||||
class PublishedWorkflowApi(Resource):
|
||||
@api.doc("get_published_workflow")
|
||||
@api.doc(description="Get published workflow for an application")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.response(200, "Published workflow retrieved successfully", workflow_fields)
|
||||
@api.response(404, "Published workflow not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -534,7 +682,12 @@ class PublishedWorkflowApi(Resource):
|
|||
}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/default-workflow-block-configs")
|
||||
class DefaultBlockConfigsApi(Resource):
|
||||
@api.doc("get_default_block_configs")
|
||||
@api.doc(description="Get default block configurations for workflow")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.response(200, "Default block configurations retrieved successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -555,7 +708,13 @@ class DefaultBlockConfigsApi(Resource):
|
|||
return workflow_service.get_default_block_configs()
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>")
|
||||
class DefaultBlockConfigApi(Resource):
|
||||
@api.doc("get_default_block_config")
|
||||
@api.doc(description="Get default block configuration by type")
|
||||
@api.doc(params={"app_id": "Application ID", "block_type": "Block type"})
|
||||
@api.response(200, "Default block configuration retrieved successfully")
|
||||
@api.response(404, "Block type not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -588,7 +747,14 @@ class DefaultBlockConfigApi(Resource):
|
|||
return workflow_service.get_default_block_config(node_type=block_type, filters=filters)
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/convert-to-workflow")
|
||||
class ConvertToWorkflowApi(Resource):
|
||||
@api.doc("convert_to_workflow")
|
||||
@api.doc(description="Convert application to workflow mode")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.response(200, "Application converted to workflow successfully")
|
||||
@api.response(400, "Application cannot be converted")
|
||||
@api.response(403, "Permission denied")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -625,9 +791,14 @@ class ConvertToWorkflowApi(Resource):
|
|||
}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/config")
|
||||
class WorkflowConfigApi(Resource):
|
||||
"""Resource for workflow configuration."""
|
||||
|
||||
@api.doc("get_workflow_config")
|
||||
@api.doc(description="Get workflow configuration")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.response(200, "Workflow configuration retrieved successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -638,7 +809,12 @@ class WorkflowConfigApi(Resource):
|
|||
}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows")
|
||||
class PublishedAllWorkflowApi(Resource):
|
||||
@api.doc("get_all_published_workflows")
|
||||
@api.doc(description="Get all published workflows for an application")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.response(200, "Published workflows retrieved successfully", workflow_pagination_fields)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -689,7 +865,23 @@ class PublishedAllWorkflowApi(Resource):
|
|||
}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/<string:workflow_id>")
|
||||
class WorkflowByIdApi(Resource):
|
||||
@api.doc("update_workflow_by_id")
|
||||
@api.doc(description="Update workflow by ID")
|
||||
@api.doc(params={"app_id": "Application ID", "workflow_id": "Workflow ID"})
|
||||
@api.expect(
|
||||
api.model(
|
||||
"UpdateWorkflowRequest",
|
||||
{
|
||||
"environment_variables": fields.List(fields.Raw, description="Environment variables"),
|
||||
"conversation_variables": fields.List(fields.Raw, description="Conversation variables"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Workflow updated successfully", workflow_fields)
|
||||
@api.response(404, "Workflow not found")
|
||||
@api.response(403, "Permission denied")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -780,7 +972,14 @@ class WorkflowByIdApi(Resource):
|
|||
return None, 204
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/last-run")
|
||||
class DraftWorkflowNodeLastRunApi(Resource):
|
||||
@api.doc("get_draft_workflow_node_last_run")
|
||||
@api.doc(description="Get last run result for draft workflow node")
|
||||
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
|
||||
@api.response(200, "Node last run retrieved successfully", workflow_run_node_execution_fields)
|
||||
@api.response(404, "Node last run not found")
|
||||
@api.response(403, "Permission denied")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -799,73 +998,3 @@ class DraftWorkflowNodeLastRunApi(Resource):
|
|||
if node_exec is None:
|
||||
raise NotFound("last run not found")
|
||||
return node_exec
|
||||
|
||||
|
||||
api.add_resource(
|
||||
DraftWorkflowApi,
|
||||
"/apps/<uuid:app_id>/workflows/draft",
|
||||
)
|
||||
api.add_resource(
|
||||
WorkflowConfigApi,
|
||||
"/apps/<uuid:app_id>/workflows/draft/config",
|
||||
)
|
||||
api.add_resource(
|
||||
AdvancedChatDraftWorkflowRunApi,
|
||||
"/apps/<uuid:app_id>/advanced-chat/workflows/draft/run",
|
||||
)
|
||||
api.add_resource(
|
||||
DraftWorkflowRunApi,
|
||||
"/apps/<uuid:app_id>/workflows/draft/run",
|
||||
)
|
||||
api.add_resource(
|
||||
WorkflowTaskStopApi,
|
||||
"/apps/<uuid:app_id>/workflow-runs/tasks/<string:task_id>/stop",
|
||||
)
|
||||
api.add_resource(
|
||||
DraftWorkflowNodeRunApi,
|
||||
"/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/run",
|
||||
)
|
||||
api.add_resource(
|
||||
AdvancedChatDraftRunIterationNodeApi,
|
||||
"/apps/<uuid:app_id>/advanced-chat/workflows/draft/iteration/nodes/<string:node_id>/run",
|
||||
)
|
||||
api.add_resource(
|
||||
WorkflowDraftRunIterationNodeApi,
|
||||
"/apps/<uuid:app_id>/workflows/draft/iteration/nodes/<string:node_id>/run",
|
||||
)
|
||||
api.add_resource(
|
||||
AdvancedChatDraftRunLoopNodeApi,
|
||||
"/apps/<uuid:app_id>/advanced-chat/workflows/draft/loop/nodes/<string:node_id>/run",
|
||||
)
|
||||
api.add_resource(
|
||||
WorkflowDraftRunLoopNodeApi,
|
||||
"/apps/<uuid:app_id>/workflows/draft/loop/nodes/<string:node_id>/run",
|
||||
)
|
||||
api.add_resource(
|
||||
PublishedWorkflowApi,
|
||||
"/apps/<uuid:app_id>/workflows/publish",
|
||||
)
|
||||
api.add_resource(
|
||||
PublishedAllWorkflowApi,
|
||||
"/apps/<uuid:app_id>/workflows",
|
||||
)
|
||||
api.add_resource(
|
||||
DefaultBlockConfigsApi,
|
||||
"/apps/<uuid:app_id>/workflows/default-workflow-block-configs",
|
||||
)
|
||||
api.add_resource(
|
||||
DefaultBlockConfigApi,
|
||||
"/apps/<uuid:app_id>/workflows/default-workflow-block-configs/<string:block_type>",
|
||||
)
|
||||
api.add_resource(
|
||||
ConvertToWorkflowApi,
|
||||
"/apps/<uuid:app_id>/convert-to-workflow",
|
||||
)
|
||||
api.add_resource(
|
||||
WorkflowByIdApi,
|
||||
"/apps/<uuid:app_id>/workflows/<string:workflow_id>",
|
||||
)
|
||||
api.add_resource(
|
||||
DraftWorkflowNodeLastRunApi,
|
||||
"/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/last-run",
|
||||
)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from flask_restx import Resource, marshal_with, reqparse
|
|||
from flask_restx.inputs import int_range
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from controllers.console import api
|
||||
from controllers.console import api, console_ns
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from core.workflow.entities.workflow_execution import WorkflowExecutionStatus
|
||||
|
|
@ -15,7 +15,24 @@ from models.model import AppMode
|
|||
from services.workflow_app_service import WorkflowAppService
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow-app-logs")
|
||||
class WorkflowAppLogApi(Resource):
|
||||
@api.doc("get_workflow_app_logs")
|
||||
@api.doc(description="Get workflow application execution logs")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.doc(
|
||||
params={
|
||||
"keyword": "Search keyword for filtering logs",
|
||||
"status": "Filter by execution status (succeeded, failed, stopped, partial-succeeded)",
|
||||
"created_at__before": "Filter logs created before this timestamp",
|
||||
"created_at__after": "Filter logs created after this timestamp",
|
||||
"created_by_end_user_session_id": "Filter by end user session ID",
|
||||
"created_by_account": "Filter by account",
|
||||
"page": "Page number (1-99999)",
|
||||
"limit": "Number of items per page (1-100)",
|
||||
}
|
||||
)
|
||||
@api.response(200, "Workflow app logs retrieved successfully", workflow_app_log_pagination_fields)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -78,6 +95,3 @@ class WorkflowAppLogApi(Resource):
|
|||
)
|
||||
|
||||
return workflow_app_log_pagination
|
||||
|
||||
|
||||
api.add_resource(WorkflowAppLogApi, "/apps/<uuid:app_id>/workflow-app-logs")
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqpars
|
|||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import Forbidden
|
||||
|
||||
from controllers.console import api
|
||||
from controllers.console import api, console_ns
|
||||
from controllers.console.app.error import (
|
||||
DraftWorkflowNotExist,
|
||||
)
|
||||
|
|
@ -144,7 +144,13 @@ def _api_prerequisite(f):
|
|||
return wrapper
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables")
|
||||
class WorkflowVariableCollectionApi(Resource):
|
||||
@api.doc("get_workflow_variables")
|
||||
@api.doc(description="Get draft workflow variables")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.doc(params={"page": "Page number (1-100000)", "limit": "Number of items per page (1-100)"})
|
||||
@api.response(200, "Workflow variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
|
||||
@_api_prerequisite
|
||||
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
|
||||
def get(self, app_model: App):
|
||||
|
|
@ -173,6 +179,9 @@ class WorkflowVariableCollectionApi(Resource):
|
|||
|
||||
return workflow_vars
|
||||
|
||||
@api.doc("delete_workflow_variables")
|
||||
@api.doc(description="Delete all draft workflow variables")
|
||||
@api.response(204, "Workflow variables deleted successfully")
|
||||
@_api_prerequisite
|
||||
def delete(self, app_model: App):
|
||||
draft_var_srv = WorkflowDraftVariableService(
|
||||
|
|
@ -201,7 +210,12 @@ def validate_node_id(node_id: str) -> NoReturn | None:
|
|||
return None
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/variables")
|
||||
class NodeVariableCollectionApi(Resource):
|
||||
@api.doc("get_node_variables")
|
||||
@api.doc(description="Get variables for a specific node")
|
||||
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
|
||||
@api.response(200, "Node variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
|
||||
@_api_prerequisite
|
||||
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
|
||||
def get(self, app_model: App, node_id: str):
|
||||
|
|
@ -214,6 +228,9 @@ class NodeVariableCollectionApi(Resource):
|
|||
|
||||
return node_vars
|
||||
|
||||
@api.doc("delete_node_variables")
|
||||
@api.doc(description="Delete all variables for a specific node")
|
||||
@api.response(204, "Node variables deleted successfully")
|
||||
@_api_prerequisite
|
||||
def delete(self, app_model: App, node_id: str):
|
||||
validate_node_id(node_id)
|
||||
|
|
@ -223,10 +240,16 @@ class NodeVariableCollectionApi(Resource):
|
|||
return Response("", 204)
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>")
|
||||
class VariableApi(Resource):
|
||||
_PATCH_NAME_FIELD = "name"
|
||||
_PATCH_VALUE_FIELD = "value"
|
||||
|
||||
@api.doc("get_variable")
|
||||
@api.doc(description="Get a specific workflow variable")
|
||||
@api.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"})
|
||||
@api.response(200, "Variable retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
|
||||
@api.response(404, "Variable not found")
|
||||
@_api_prerequisite
|
||||
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
|
||||
def get(self, app_model: App, variable_id: str):
|
||||
|
|
@ -240,6 +263,19 @@ class VariableApi(Resource):
|
|||
raise NotFoundError(description=f"variable not found, id={variable_id}")
|
||||
return variable
|
||||
|
||||
@api.doc("update_variable")
|
||||
@api.doc(description="Update a workflow variable")
|
||||
@api.expect(
|
||||
api.model(
|
||||
"UpdateVariableRequest",
|
||||
{
|
||||
"name": fields.String(description="Variable name"),
|
||||
"value": fields.Raw(description="Variable value"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Variable updated successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
|
||||
@api.response(404, "Variable not found")
|
||||
@_api_prerequisite
|
||||
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
|
||||
def patch(self, app_model: App, variable_id: str):
|
||||
|
|
@ -302,6 +338,10 @@ class VariableApi(Resource):
|
|||
db.session.commit()
|
||||
return variable
|
||||
|
||||
@api.doc("delete_variable")
|
||||
@api.doc(description="Delete a workflow variable")
|
||||
@api.response(204, "Variable deleted successfully")
|
||||
@api.response(404, "Variable not found")
|
||||
@_api_prerequisite
|
||||
def delete(self, app_model: App, variable_id: str):
|
||||
draft_var_srv = WorkflowDraftVariableService(
|
||||
|
|
@ -317,7 +357,14 @@ class VariableApi(Resource):
|
|||
return Response("", 204)
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>/reset")
|
||||
class VariableResetApi(Resource):
|
||||
@api.doc("reset_variable")
|
||||
@api.doc(description="Reset a workflow variable to its default value")
|
||||
@api.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"})
|
||||
@api.response(200, "Variable reset successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
|
||||
@api.response(204, "Variable reset (no content)")
|
||||
@api.response(404, "Variable not found")
|
||||
@_api_prerequisite
|
||||
def put(self, app_model: App, variable_id: str):
|
||||
draft_var_srv = WorkflowDraftVariableService(
|
||||
|
|
@ -358,7 +405,13 @@ def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList:
|
|||
return draft_vars
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/conversation-variables")
|
||||
class ConversationVariableCollectionApi(Resource):
|
||||
@api.doc("get_conversation_variables")
|
||||
@api.doc(description="Get conversation variables for workflow")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.response(200, "Conversation variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
|
||||
@api.response(404, "Draft workflow not found")
|
||||
@_api_prerequisite
|
||||
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
|
||||
def get(self, app_model: App):
|
||||
|
|
@ -374,14 +427,25 @@ class ConversationVariableCollectionApi(Resource):
|
|||
return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID)
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/system-variables")
|
||||
class SystemVariableCollectionApi(Resource):
|
||||
@api.doc("get_system_variables")
|
||||
@api.doc(description="Get system variables for workflow")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.response(200, "System variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
|
||||
@_api_prerequisite
|
||||
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
|
||||
def get(self, app_model: App):
|
||||
return _get_variable_list(app_model, SYSTEM_VARIABLE_NODE_ID)
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/environment-variables")
|
||||
class EnvironmentVariableCollectionApi(Resource):
|
||||
@api.doc("get_environment_variables")
|
||||
@api.doc(description="Get environment variables for workflow")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.response(200, "Environment variables retrieved successfully")
|
||||
@api.response(404, "Draft workflow not found")
|
||||
@_api_prerequisite
|
||||
def get(self, app_model: App):
|
||||
"""
|
||||
|
|
@ -413,16 +477,3 @@ class EnvironmentVariableCollectionApi(Resource):
|
|||
)
|
||||
|
||||
return {"items": env_vars_list}
|
||||
|
||||
|
||||
api.add_resource(
|
||||
WorkflowVariableCollectionApi,
|
||||
"/apps/<uuid:app_id>/workflows/draft/variables",
|
||||
)
|
||||
api.add_resource(NodeVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/variables")
|
||||
api.add_resource(VariableApi, "/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>")
|
||||
api.add_resource(VariableResetApi, "/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>/reset")
|
||||
|
||||
api.add_resource(ConversationVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/conversation-variables")
|
||||
api.add_resource(SystemVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/system-variables")
|
||||
api.add_resource(EnvironmentVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/environment-variables")
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ from flask_login import current_user
|
|||
from flask_restx import Resource, marshal_with, reqparse
|
||||
from flask_restx.inputs import int_range
|
||||
|
||||
from controllers.console import api
|
||||
from controllers.console import api, console_ns
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from fields.workflow_run_fields import (
|
||||
|
|
@ -19,7 +19,13 @@ from models import Account, App, AppMode, EndUser
|
|||
from services.workflow_run_service import WorkflowRunService
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs")
|
||||
class AdvancedChatAppWorkflowRunListApi(Resource):
|
||||
@api.doc("get_advanced_chat_workflow_runs")
|
||||
@api.doc(description="Get advanced chat workflow run list")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
|
||||
@api.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_fields)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -40,7 +46,13 @@ class AdvancedChatAppWorkflowRunListApi(Resource):
|
|||
return result
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow-runs")
|
||||
class WorkflowRunListApi(Resource):
|
||||
@api.doc("get_workflow_runs")
|
||||
@api.doc(description="Get workflow run list")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
|
||||
@api.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_fields)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -61,7 +73,13 @@ class WorkflowRunListApi(Resource):
|
|||
return result
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>")
|
||||
class WorkflowRunDetailApi(Resource):
|
||||
@api.doc("get_workflow_run_detail")
|
||||
@api.doc(description="Get workflow run detail")
|
||||
@api.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
|
||||
@api.response(200, "Workflow run detail retrieved successfully", workflow_run_detail_fields)
|
||||
@api.response(404, "Workflow run not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -79,7 +97,13 @@ class WorkflowRunDetailApi(Resource):
|
|||
return workflow_run
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/node-executions")
|
||||
class WorkflowRunNodeExecutionListApi(Resource):
|
||||
@api.doc("get_workflow_run_node_executions")
|
||||
@api.doc(description="Get workflow run node execution list")
|
||||
@api.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
|
||||
@api.response(200, "Node executions retrieved successfully", workflow_run_node_execution_list_fields)
|
||||
@api.response(404, "Workflow run not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -100,9 +124,3 @@ class WorkflowRunNodeExecutionListApi(Resource):
|
|||
)
|
||||
|
||||
return {"data": node_executions}
|
||||
|
||||
|
||||
api.add_resource(AdvancedChatAppWorkflowRunListApi, "/apps/<uuid:app_id>/advanced-chat/workflow-runs")
|
||||
api.add_resource(WorkflowRunListApi, "/apps/<uuid:app_id>/workflow-runs")
|
||||
api.add_resource(WorkflowRunDetailApi, "/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>")
|
||||
api.add_resource(WorkflowRunNodeExecutionListApi, "/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/node-executions")
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from flask import jsonify
|
|||
from flask_login import current_user
|
||||
from flask_restx import Resource, reqparse
|
||||
|
||||
from controllers.console import api
|
||||
from controllers.console import api, console_ns
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from extensions.ext_database import db
|
||||
|
|
@ -17,7 +17,13 @@ from models.enums import WorkflowRunTriggeredFrom
|
|||
from models.model import AppMode
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/daily-conversations")
|
||||
class WorkflowDailyRunsStatistic(Resource):
|
||||
@api.doc("get_workflow_daily_runs_statistic")
|
||||
@api.doc(description="Get workflow daily runs statistics")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
|
||||
@api.response(200, "Daily runs statistics retrieved successfully")
|
||||
@get_app_model
|
||||
@setup_required
|
||||
@login_required
|
||||
|
|
@ -79,7 +85,13 @@ WHERE
|
|||
return jsonify({"data": response_data})
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/daily-terminals")
|
||||
class WorkflowDailyTerminalsStatistic(Resource):
|
||||
@api.doc("get_workflow_daily_terminals_statistic")
|
||||
@api.doc(description="Get workflow daily terminals statistics")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
|
||||
@api.response(200, "Daily terminals statistics retrieved successfully")
|
||||
@get_app_model
|
||||
@setup_required
|
||||
@login_required
|
||||
|
|
@ -141,7 +153,13 @@ WHERE
|
|||
return jsonify({"data": response_data})
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/token-costs")
|
||||
class WorkflowDailyTokenCostStatistic(Resource):
|
||||
@api.doc("get_workflow_daily_token_cost_statistic")
|
||||
@api.doc(description="Get workflow daily token cost statistics")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
|
||||
@api.response(200, "Daily token cost statistics retrieved successfully")
|
||||
@get_app_model
|
||||
@setup_required
|
||||
@login_required
|
||||
|
|
@ -208,7 +226,13 @@ WHERE
|
|||
return jsonify({"data": response_data})
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/average-app-interactions")
|
||||
class WorkflowAverageAppInteractionStatistic(Resource):
|
||||
@api.doc("get_workflow_average_app_interaction_statistic")
|
||||
@api.doc(description="Get workflow average app interaction statistics")
|
||||
@api.doc(params={"app_id": "Application ID"})
|
||||
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
|
||||
@api.response(200, "Average app interaction statistics retrieved successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -285,11 +309,3 @@ GROUP BY
|
|||
)
|
||||
|
||||
return jsonify({"data": response_data})
|
||||
|
||||
|
||||
api.add_resource(WorkflowDailyRunsStatistic, "/apps/<uuid:app_id>/workflow/statistics/daily-conversations")
|
||||
api.add_resource(WorkflowDailyTerminalsStatistic, "/apps/<uuid:app_id>/workflow/statistics/daily-terminals")
|
||||
api.add_resource(WorkflowDailyTokenCostStatistic, "/apps/<uuid:app_id>/workflow/statistics/token-costs")
|
||||
api.add_resource(
|
||||
WorkflowAverageAppInteractionStatistic, "/apps/<uuid:app_id>/workflow/statistics/average-app-interactions"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from collections.abc import Callable
|
||||
from functools import wraps
|
||||
from typing import Optional, ParamSpec, TypeVar, Union
|
||||
from typing import ParamSpec, TypeVar, Union
|
||||
|
||||
from controllers.console.app.error import AppNotFoundError
|
||||
from extensions.ext_database import db
|
||||
|
|
@ -12,7 +12,7 @@ P = ParamSpec("P")
|
|||
R = TypeVar("R")
|
||||
|
||||
|
||||
def _load_app_model(app_id: str) -> Optional[App]:
|
||||
def _load_app_model(app_id: str) -> App | None:
|
||||
assert isinstance(current_user, Account)
|
||||
app_model = (
|
||||
db.session.query(App)
|
||||
|
|
@ -22,7 +22,7 @@ def _load_app_model(app_id: str) -> Optional[App]:
|
|||
return app_model
|
||||
|
||||
|
||||
def get_app_model(view: Optional[Callable[P, R]] = None, *, mode: Union[AppMode, list[AppMode], None] = None):
|
||||
def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None):
|
||||
def decorator(view_func: Callable[P, R]):
|
||||
@wraps(view_func)
|
||||
def decorated_view(*args: P.args, **kwargs: P.kwargs):
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
from flask import current_app, redirect, request
|
||||
|
|
@ -157,8 +156,8 @@ class OAuthCallback(Resource):
|
|||
)
|
||||
|
||||
|
||||
def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Optional[Account]:
|
||||
account: Optional[Account] = Account.get_by_openid(provider, user_info.id)
|
||||
def _get_account_by_openid_or_email(provider: str, user_info: OAuthUserInfo) -> Account | None:
|
||||
account: Account | None = Account.get_by_openid(provider, user_info.id)
|
||||
|
||||
if not account:
|
||||
with Session(db.engine) as session:
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
import flask_restx
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
from flask_restx import Resource, marshal, marshal_with, reqparse
|
||||
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
|
||||
from sqlalchemy import select
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
import services
|
||||
from configs import dify_config
|
||||
from controllers.console import api
|
||||
from controllers.console import api, console_ns
|
||||
from controllers.console.apikey import api_key_fields, api_key_list
|
||||
from controllers.console.app.error import ProviderNotInitializeError
|
||||
from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError
|
||||
|
|
@ -48,7 +48,21 @@ def _validate_description_length(description):
|
|||
return description
|
||||
|
||||
|
||||
@console_ns.route("/datasets")
|
||||
class DatasetListApi(Resource):
|
||||
@api.doc("get_datasets")
|
||||
@api.doc(description="Get list of datasets")
|
||||
@api.doc(
|
||||
params={
|
||||
"page": "Page number (default: 1)",
|
||||
"limit": "Number of items per page (default: 20)",
|
||||
"ids": "Filter by dataset IDs (list)",
|
||||
"keyword": "Search keyword",
|
||||
"tag_ids": "Filter by tag IDs (list)",
|
||||
"include_all": "Include all datasets (default: false)",
|
||||
}
|
||||
)
|
||||
@api.response(200, "Datasets retrieved successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -100,6 +114,24 @@ class DatasetListApi(Resource):
|
|||
response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page}
|
||||
return response, 200
|
||||
|
||||
@api.doc("create_dataset")
|
||||
@api.doc(description="Create a new dataset")
|
||||
@api.expect(
|
||||
api.model(
|
||||
"CreateDatasetRequest",
|
||||
{
|
||||
"name": fields.String(required=True, description="Dataset name (1-40 characters)"),
|
||||
"description": fields.String(description="Dataset description (max 400 characters)"),
|
||||
"indexing_technique": fields.String(description="Indexing technique"),
|
||||
"permission": fields.String(description="Dataset permission"),
|
||||
"provider": fields.String(description="Provider"),
|
||||
"external_knowledge_api_id": fields.String(description="External knowledge API ID"),
|
||||
"external_knowledge_id": fields.String(description="External knowledge ID"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(201, "Dataset created successfully")
|
||||
@api.response(400, "Invalid request parameters")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -172,7 +204,14 @@ class DatasetListApi(Resource):
|
|||
return marshal(dataset, dataset_detail_fields), 201
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>")
|
||||
class DatasetApi(Resource):
|
||||
@api.doc("get_dataset")
|
||||
@api.doc(description="Get dataset details")
|
||||
@api.doc(params={"dataset_id": "Dataset ID"})
|
||||
@api.response(200, "Dataset retrieved successfully", dataset_detail_fields)
|
||||
@api.response(404, "Dataset not found")
|
||||
@api.response(403, "Permission denied")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -215,6 +254,23 @@ class DatasetApi(Resource):
|
|||
|
||||
return data, 200
|
||||
|
||||
@api.doc("update_dataset")
|
||||
@api.doc(description="Update dataset details")
|
||||
@api.expect(
|
||||
api.model(
|
||||
"UpdateDatasetRequest",
|
||||
{
|
||||
"name": fields.String(description="Dataset name"),
|
||||
"description": fields.String(description="Dataset description"),
|
||||
"permission": fields.String(description="Dataset permission"),
|
||||
"indexing_technique": fields.String(description="Indexing technique"),
|
||||
"external_retrieval_model": fields.Raw(description="External retrieval model settings"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Dataset updated successfully", dataset_detail_fields)
|
||||
@api.response(404, "Dataset not found")
|
||||
@api.response(403, "Permission denied")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -344,7 +400,12 @@ class DatasetApi(Resource):
|
|||
raise DatasetInUseError()
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/use-check")
|
||||
class DatasetUseCheckApi(Resource):
|
||||
@api.doc("check_dataset_use")
|
||||
@api.doc(description="Check if dataset is in use")
|
||||
@api.doc(params={"dataset_id": "Dataset ID"})
|
||||
@api.response(200, "Dataset use status retrieved successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -355,7 +416,12 @@ class DatasetUseCheckApi(Resource):
|
|||
return {"is_using": dataset_is_using}, 200
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/queries")
|
||||
class DatasetQueryApi(Resource):
|
||||
@api.doc("get_dataset_queries")
|
||||
@api.doc(description="Get dataset query history")
|
||||
@api.doc(params={"dataset_id": "Dataset ID"})
|
||||
@api.response(200, "Query history retrieved successfully", dataset_query_detail_fields)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -385,7 +451,11 @@ class DatasetQueryApi(Resource):
|
|||
return response, 200
|
||||
|
||||
|
||||
@console_ns.route("/datasets/indexing-estimate")
|
||||
class DatasetIndexingEstimateApi(Resource):
|
||||
@api.doc("estimate_dataset_indexing")
|
||||
@api.doc(description="Estimate dataset indexing cost")
|
||||
@api.response(200, "Indexing estimate calculated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -486,7 +556,12 @@ class DatasetIndexingEstimateApi(Resource):
|
|||
return response.model_dump(), 200
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/related-apps")
|
||||
class DatasetRelatedAppListApi(Resource):
|
||||
@api.doc("get_dataset_related_apps")
|
||||
@api.doc(description="Get applications related to dataset")
|
||||
@api.doc(params={"dataset_id": "Dataset ID"})
|
||||
@api.response(200, "Related apps retrieved successfully", related_app_list)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -513,7 +588,12 @@ class DatasetRelatedAppListApi(Resource):
|
|||
return {"data": related_apps, "total": len(related_apps)}, 200
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/indexing-status")
|
||||
class DatasetIndexingStatusApi(Resource):
|
||||
@api.doc("get_dataset_indexing_status")
|
||||
@api.doc(description="Get dataset indexing status")
|
||||
@api.doc(params={"dataset_id": "Dataset ID"})
|
||||
@api.response(200, "Indexing status retrieved successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -560,11 +640,15 @@ class DatasetIndexingStatusApi(Resource):
|
|||
return data, 200
|
||||
|
||||
|
||||
@console_ns.route("/datasets/api-keys")
|
||||
class DatasetApiKeyApi(Resource):
|
||||
max_keys = 10
|
||||
token_prefix = "dataset-"
|
||||
resource_type = "dataset"
|
||||
|
||||
@api.doc("get_dataset_api_keys")
|
||||
@api.doc(description="Get dataset API keys")
|
||||
@api.response(200, "API keys retrieved successfully", api_key_list)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -609,9 +693,14 @@ class DatasetApiKeyApi(Resource):
|
|||
return api_token, 200
|
||||
|
||||
|
||||
@console_ns.route("/datasets/api-keys/<uuid:api_key_id>")
|
||||
class DatasetApiDeleteApi(Resource):
|
||||
resource_type = "dataset"
|
||||
|
||||
@api.doc("delete_dataset_api_key")
|
||||
@api.doc(description="Delete dataset API key")
|
||||
@api.doc(params={"api_key_id": "API key ID"})
|
||||
@api.response(204, "API key deleted successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -641,7 +730,11 @@ class DatasetApiDeleteApi(Resource):
|
|||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/datasets/api-base-info")
|
||||
class DatasetApiBaseUrlApi(Resource):
|
||||
@api.doc("get_dataset_api_base_info")
|
||||
@api.doc(description="Get dataset API base information")
|
||||
@api.response(200, "API base info retrieved successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -649,7 +742,11 @@ class DatasetApiBaseUrlApi(Resource):
|
|||
return {"api_base_url": (dify_config.SERVICE_API_URL or request.host_url.rstrip("/")) + "/v1"}
|
||||
|
||||
|
||||
@console_ns.route("/datasets/retrieval-setting")
|
||||
class DatasetRetrievalSettingApi(Resource):
|
||||
@api.doc("get_dataset_retrieval_setting")
|
||||
@api.doc(description="Get dataset retrieval settings")
|
||||
@api.response(200, "Retrieval settings retrieved successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -700,7 +797,12 @@ class DatasetRetrievalSettingApi(Resource):
|
|||
raise ValueError(f"Unsupported vector db type {vector_type}.")
|
||||
|
||||
|
||||
@console_ns.route("/datasets/retrieval-setting/<string:vector_type>")
|
||||
class DatasetRetrievalSettingMockApi(Resource):
|
||||
@api.doc("get_dataset_retrieval_setting_mock")
|
||||
@api.doc(description="Get mock dataset retrieval settings by vector type")
|
||||
@api.doc(params={"vector_type": "Vector store type"})
|
||||
@api.response(200, "Mock retrieval settings retrieved successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -749,7 +851,13 @@ class DatasetRetrievalSettingMockApi(Resource):
|
|||
raise ValueError(f"Unsupported vector db type {vector_type}.")
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/error-docs")
|
||||
class DatasetErrorDocs(Resource):
|
||||
@api.doc("get_dataset_error_docs")
|
||||
@api.doc(description="Get dataset error documents")
|
||||
@api.doc(params={"dataset_id": "Dataset ID"})
|
||||
@api.response(200, "Error documents retrieved successfully")
|
||||
@api.response(404, "Dataset not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -763,7 +871,14 @@ class DatasetErrorDocs(Resource):
|
|||
return {"data": [marshal(item, document_status_fields) for item in results], "total": len(results)}, 200
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/permission-part-users")
|
||||
class DatasetPermissionUserListApi(Resource):
|
||||
@api.doc("get_dataset_permission_users")
|
||||
@api.doc(description="Get dataset permission user list")
|
||||
@api.doc(params={"dataset_id": "Dataset ID"})
|
||||
@api.response(200, "Permission users retrieved successfully")
|
||||
@api.response(404, "Dataset not found")
|
||||
@api.response(403, "Permission denied")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -784,7 +899,13 @@ class DatasetPermissionUserListApi(Resource):
|
|||
}, 200
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/auto-disable-logs")
|
||||
class DatasetAutoDisableLogApi(Resource):
|
||||
@api.doc("get_dataset_auto_disable_logs")
|
||||
@api.doc(description="Get dataset auto disable logs")
|
||||
@api.doc(params={"dataset_id": "Dataset ID"})
|
||||
@api.response(200, "Auto disable logs retrieved successfully")
|
||||
@api.response(404, "Dataset not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -794,20 +915,3 @@ class DatasetAutoDisableLogApi(Resource):
|
|||
if dataset is None:
|
||||
raise NotFound("Dataset not found.")
|
||||
return DatasetService.get_dataset_auto_disable_logs(dataset_id_str), 200
|
||||
|
||||
|
||||
api.add_resource(DatasetListApi, "/datasets")
|
||||
api.add_resource(DatasetApi, "/datasets/<uuid:dataset_id>")
|
||||
api.add_resource(DatasetUseCheckApi, "/datasets/<uuid:dataset_id>/use-check")
|
||||
api.add_resource(DatasetQueryApi, "/datasets/<uuid:dataset_id>/queries")
|
||||
api.add_resource(DatasetErrorDocs, "/datasets/<uuid:dataset_id>/error-docs")
|
||||
api.add_resource(DatasetIndexingEstimateApi, "/datasets/indexing-estimate")
|
||||
api.add_resource(DatasetRelatedAppListApi, "/datasets/<uuid:dataset_id>/related-apps")
|
||||
api.add_resource(DatasetIndexingStatusApi, "/datasets/<uuid:dataset_id>/indexing-status")
|
||||
api.add_resource(DatasetApiKeyApi, "/datasets/api-keys")
|
||||
api.add_resource(DatasetApiDeleteApi, "/datasets/api-keys/<uuid:api_key_id>")
|
||||
api.add_resource(DatasetApiBaseUrlApi, "/datasets/api-base-info")
|
||||
api.add_resource(DatasetRetrievalSettingApi, "/datasets/retrieval-setting")
|
||||
api.add_resource(DatasetRetrievalSettingMockApi, "/datasets/retrieval-setting/<string:vector_type>")
|
||||
api.add_resource(DatasetPermissionUserListApi, "/datasets/<uuid:dataset_id>/permission-part-users")
|
||||
api.add_resource(DatasetAutoDisableLogApi, "/datasets/<uuid:dataset_id>/auto-disable-logs")
|
||||
|
|
|
|||
|
|
@ -5,12 +5,12 @@ from typing import Literal, cast
|
|||
|
||||
from flask import request
|
||||
from flask_login import current_user
|
||||
from flask_restx import Resource, marshal, marshal_with, reqparse
|
||||
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
|
||||
from sqlalchemy import asc, desc, select
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
import services
|
||||
from controllers.console import api
|
||||
from controllers.console import api, console_ns
|
||||
from controllers.console.app.error import (
|
||||
ProviderModelCurrentlyNotSupportError,
|
||||
ProviderNotInitializeError,
|
||||
|
|
@ -98,7 +98,12 @@ class DocumentResource(Resource):
|
|||
return documents
|
||||
|
||||
|
||||
@console_ns.route("/datasets/process-rule")
|
||||
class GetProcessRuleApi(Resource):
|
||||
@api.doc("get_process_rule")
|
||||
@api.doc(description="Get dataset document processing rules")
|
||||
@api.doc(params={"document_id": "Document ID (optional)"})
|
||||
@api.response(200, "Process rules retrieved successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -140,7 +145,21 @@ class GetProcessRuleApi(Resource):
|
|||
return {"mode": mode, "rules": rules, "limits": limits}
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/documents")
|
||||
class DatasetDocumentListApi(Resource):
|
||||
@api.doc("get_dataset_documents")
|
||||
@api.doc(description="Get documents in a dataset")
|
||||
@api.doc(
|
||||
params={
|
||||
"dataset_id": "Dataset ID",
|
||||
"page": "Page number (default: 1)",
|
||||
"limit": "Number of items per page (default: 20)",
|
||||
"keyword": "Search keyword",
|
||||
"sort": "Sort order (default: -created_at)",
|
||||
"fetch": "Fetch full details (default: false)",
|
||||
}
|
||||
)
|
||||
@api.response(200, "Documents retrieved successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -324,7 +343,23 @@ class DatasetDocumentListApi(Resource):
|
|||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/datasets/init")
|
||||
class DatasetInitApi(Resource):
|
||||
@api.doc("init_dataset")
|
||||
@api.doc(description="Initialize dataset with documents")
|
||||
@api.expect(
|
||||
api.model(
|
||||
"DatasetInitRequest",
|
||||
{
|
||||
"upload_file_id": fields.String(required=True, description="Upload file ID"),
|
||||
"indexing_technique": fields.String(description="Indexing technique"),
|
||||
"process_rule": fields.Raw(description="Processing rules"),
|
||||
"data_source": fields.Raw(description="Data source configuration"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(201, "Dataset initialized successfully", dataset_and_document_fields)
|
||||
@api.response(400, "Invalid request parameters")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -394,7 +429,14 @@ class DatasetInitApi(Resource):
|
|||
return response
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-estimate")
|
||||
class DocumentIndexingEstimateApi(DocumentResource):
|
||||
@api.doc("estimate_document_indexing")
|
||||
@api.doc(description="Estimate document indexing cost")
|
||||
@api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
|
||||
@api.response(200, "Indexing estimate calculated successfully")
|
||||
@api.response(404, "Document not found")
|
||||
@api.response(400, "Document already finished")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -457,6 +499,7 @@ class DocumentIndexingEstimateApi(DocumentResource):
|
|||
return response, 200
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-estimate")
|
||||
class DocumentBatchIndexingEstimateApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
|
|
@ -549,6 +592,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
|
|||
raise IndexingEstimateError(str(e))
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-status")
|
||||
class DocumentBatchIndexingStatusApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
|
|
@ -593,7 +637,13 @@ class DocumentBatchIndexingStatusApi(DocumentResource):
|
|||
return data
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-status")
|
||||
class DocumentIndexingStatusApi(DocumentResource):
|
||||
@api.doc("get_document_indexing_status")
|
||||
@api.doc(description="Get document indexing status")
|
||||
@api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
|
||||
@api.response(200, "Indexing status retrieved successfully")
|
||||
@api.response(404, "Document not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -635,9 +685,21 @@ class DocumentIndexingStatusApi(DocumentResource):
|
|||
return marshal(document_dict, document_status_fields)
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
|
||||
class DocumentApi(DocumentResource):
|
||||
METADATA_CHOICES = {"all", "only", "without"}
|
||||
|
||||
@api.doc("get_document")
|
||||
@api.doc(description="Get document details")
|
||||
@api.doc(
|
||||
params={
|
||||
"dataset_id": "Dataset ID",
|
||||
"document_id": "Document ID",
|
||||
"metadata": "Metadata inclusion (all/only/without)",
|
||||
}
|
||||
)
|
||||
@api.response(200, "Document retrieved successfully")
|
||||
@api.response(404, "Document not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -746,7 +808,16 @@ class DocumentApi(DocumentResource):
|
|||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/<string:action>")
|
||||
class DocumentProcessingApi(DocumentResource):
|
||||
@api.doc("update_document_processing")
|
||||
@api.doc(description="Update document processing status (pause/resume)")
|
||||
@api.doc(
|
||||
params={"dataset_id": "Dataset ID", "document_id": "Document ID", "action": "Action to perform (pause/resume)"}
|
||||
)
|
||||
@api.response(200, "Processing status updated successfully")
|
||||
@api.response(404, "Document not found")
|
||||
@api.response(400, "Invalid action")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -781,7 +852,23 @@ class DocumentProcessingApi(DocumentResource):
|
|||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/metadata")
|
||||
class DocumentMetadataApi(DocumentResource):
|
||||
@api.doc("update_document_metadata")
|
||||
@api.doc(description="Update document metadata")
|
||||
@api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
|
||||
@api.expect(
|
||||
api.model(
|
||||
"UpdateDocumentMetadataRequest",
|
||||
{
|
||||
"doc_type": fields.String(description="Document type"),
|
||||
"doc_metadata": fields.Raw(description="Document metadata"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Document metadata updated successfully")
|
||||
@api.response(404, "Document not found")
|
||||
@api.response(403, "Permission denied")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -825,6 +912,7 @@ class DocumentMetadataApi(DocumentResource):
|
|||
return {"result": "success", "message": "Document metadata updated."}, 200
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/documents/status/<string:action>/batch")
|
||||
class DocumentStatusApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
|
|
@ -861,6 +949,7 @@ class DocumentStatusApi(DocumentResource):
|
|||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/pause")
|
||||
class DocumentPauseApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
|
|
@ -894,6 +983,7 @@ class DocumentPauseApi(DocumentResource):
|
|||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/resume")
|
||||
class DocumentRecoverApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
|
|
@ -924,6 +1014,7 @@ class DocumentRecoverApi(DocumentResource):
|
|||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/retry")
|
||||
class DocumentRetryApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
|
|
@ -967,6 +1058,7 @@ class DocumentRetryApi(DocumentResource):
|
|||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/rename")
|
||||
class DocumentRenameApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
|
|
@ -990,6 +1082,7 @@ class DocumentRenameApi(DocumentResource):
|
|||
return document
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/website-sync")
|
||||
class WebsiteDocumentSyncApi(DocumentResource):
|
||||
@setup_required
|
||||
@login_required
|
||||
|
|
@ -1015,26 +1108,3 @@ class WebsiteDocumentSyncApi(DocumentResource):
|
|||
DocumentService.sync_website_document(dataset_id, document)
|
||||
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
api.add_resource(GetProcessRuleApi, "/datasets/process-rule")
|
||||
api.add_resource(DatasetDocumentListApi, "/datasets/<uuid:dataset_id>/documents")
|
||||
api.add_resource(DatasetInitApi, "/datasets/init")
|
||||
api.add_resource(
|
||||
DocumentIndexingEstimateApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-estimate"
|
||||
)
|
||||
api.add_resource(DocumentBatchIndexingEstimateApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-estimate")
|
||||
api.add_resource(DocumentBatchIndexingStatusApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-status")
|
||||
api.add_resource(DocumentIndexingStatusApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-status")
|
||||
api.add_resource(DocumentApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
|
||||
api.add_resource(
|
||||
DocumentProcessingApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/<string:action>"
|
||||
)
|
||||
api.add_resource(DocumentMetadataApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/metadata")
|
||||
api.add_resource(DocumentStatusApi, "/datasets/<uuid:dataset_id>/documents/status/<string:action>/batch")
|
||||
api.add_resource(DocumentPauseApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/pause")
|
||||
api.add_resource(DocumentRecoverApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/resume")
|
||||
api.add_resource(DocumentRetryApi, "/datasets/<uuid:dataset_id>/retry")
|
||||
api.add_resource(DocumentRenameApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/rename")
|
||||
|
||||
api.add_resource(WebsiteDocumentSyncApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/website-sync")
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
from flask import request
|
||||
from flask_login import current_user
|
||||
from flask_restx import Resource, marshal, reqparse
|
||||
from flask_restx import Resource, fields, marshal, reqparse
|
||||
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
||||
|
||||
import services
|
||||
from controllers.console import api
|
||||
from controllers.console import api, console_ns
|
||||
from controllers.console.datasets.error import DatasetNameDuplicateError
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from fields.dataset_fields import dataset_detail_fields
|
||||
|
|
@ -21,7 +21,18 @@ def _validate_name(name):
|
|||
return name
|
||||
|
||||
|
||||
@console_ns.route("/datasets/external-knowledge-api")
|
||||
class ExternalApiTemplateListApi(Resource):
|
||||
@api.doc("get_external_api_templates")
|
||||
@api.doc(description="Get external knowledge API templates")
|
||||
@api.doc(
|
||||
params={
|
||||
"page": "Page number (default: 1)",
|
||||
"limit": "Number of items per page (default: 20)",
|
||||
"keyword": "Search keyword",
|
||||
}
|
||||
)
|
||||
@api.response(200, "External API templates retrieved successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -79,7 +90,13 @@ class ExternalApiTemplateListApi(Resource):
|
|||
return external_knowledge_api.to_dict(), 201
|
||||
|
||||
|
||||
@console_ns.route("/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>")
|
||||
class ExternalApiTemplateApi(Resource):
|
||||
@api.doc("get_external_api_template")
|
||||
@api.doc(description="Get external knowledge API template details")
|
||||
@api.doc(params={"external_knowledge_api_id": "External knowledge API ID"})
|
||||
@api.response(200, "External API template retrieved successfully")
|
||||
@api.response(404, "Template not found")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -138,7 +155,12 @@ class ExternalApiTemplateApi(Resource):
|
|||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>/use-check")
|
||||
class ExternalApiUseCheckApi(Resource):
|
||||
@api.doc("check_external_api_usage")
|
||||
@api.doc(description="Check if external knowledge API is being used")
|
||||
@api.doc(params={"external_knowledge_api_id": "External knowledge API ID"})
|
||||
@api.response(200, "Usage check completed successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -151,7 +173,24 @@ class ExternalApiUseCheckApi(Resource):
|
|||
return {"is_using": external_knowledge_api_is_using, "count": count}, 200
|
||||
|
||||
|
||||
@console_ns.route("/datasets/external")
|
||||
class ExternalDatasetCreateApi(Resource):
|
||||
@api.doc("create_external_dataset")
|
||||
@api.doc(description="Create external knowledge dataset")
|
||||
@api.expect(
|
||||
api.model(
|
||||
"CreateExternalDatasetRequest",
|
||||
{
|
||||
"external_knowledge_api_id": fields.String(required=True, description="External knowledge API ID"),
|
||||
"external_knowledge_id": fields.String(required=True, description="External knowledge ID"),
|
||||
"name": fields.String(required=True, description="Dataset name"),
|
||||
"description": fields.String(description="Dataset description"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(201, "External dataset created successfully", dataset_detail_fields)
|
||||
@api.response(400, "Invalid parameters")
|
||||
@api.response(403, "Permission denied")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -191,7 +230,24 @@ class ExternalDatasetCreateApi(Resource):
|
|||
return marshal(dataset, dataset_detail_fields), 201
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/external-hit-testing")
|
||||
class ExternalKnowledgeHitTestingApi(Resource):
|
||||
@api.doc("test_external_knowledge_retrieval")
|
||||
@api.doc(description="Test external knowledge retrieval for dataset")
|
||||
@api.doc(params={"dataset_id": "Dataset ID"})
|
||||
@api.expect(
|
||||
api.model(
|
||||
"ExternalHitTestingRequest",
|
||||
{
|
||||
"query": fields.String(required=True, description="Query text for testing"),
|
||||
"retrieval_model": fields.Raw(description="Retrieval model configuration"),
|
||||
"external_retrieval_model": fields.Raw(description="External retrieval model configuration"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "External hit testing completed successfully")
|
||||
@api.response(404, "Dataset not found")
|
||||
@api.response(400, "Invalid parameters")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -228,8 +284,22 @@ class ExternalKnowledgeHitTestingApi(Resource):
|
|||
raise InternalServerError(str(e))
|
||||
|
||||
|
||||
@console_ns.route("/test/retrieval")
|
||||
class BedrockRetrievalApi(Resource):
|
||||
# this api is only for internal testing
|
||||
@api.doc("bedrock_retrieval_test")
|
||||
@api.doc(description="Bedrock retrieval test (internal use only)")
|
||||
@api.expect(
|
||||
api.model(
|
||||
"BedrockRetrievalTestRequest",
|
||||
{
|
||||
"retrieval_setting": fields.Raw(required=True, description="Retrieval settings"),
|
||||
"query": fields.String(required=True, description="Query text"),
|
||||
"knowledge_id": fields.String(required=True, description="Knowledge ID"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Bedrock retrieval test completed")
|
||||
def post(self):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("retrieval_setting", nullable=False, required=True, type=dict, location="json")
|
||||
|
|
@ -247,12 +317,3 @@ class BedrockRetrievalApi(Resource):
|
|||
args["retrieval_setting"], args["query"], args["knowledge_id"]
|
||||
)
|
||||
return result, 200
|
||||
|
||||
|
||||
api.add_resource(ExternalKnowledgeHitTestingApi, "/datasets/<uuid:dataset_id>/external-hit-testing")
|
||||
api.add_resource(ExternalDatasetCreateApi, "/datasets/external")
|
||||
api.add_resource(ExternalApiTemplateListApi, "/datasets/external-knowledge-api")
|
||||
api.add_resource(ExternalApiTemplateApi, "/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>")
|
||||
api.add_resource(ExternalApiUseCheckApi, "/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>/use-check")
|
||||
# this api is only for internal test
|
||||
api.add_resource(BedrockRetrievalApi, "/test/retrieval")
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from flask_restx import Resource
|
||||
from flask_restx import Resource, fields
|
||||
|
||||
from controllers.console import api
|
||||
from controllers.console import api, console_ns
|
||||
from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase
|
||||
from controllers.console.wraps import (
|
||||
account_initialization_required,
|
||||
|
|
@ -10,7 +10,25 @@ from controllers.console.wraps import (
|
|||
from libs.login import login_required
|
||||
|
||||
|
||||
@console_ns.route("/datasets/<uuid:dataset_id>/hit-testing")
|
||||
class HitTestingApi(Resource, DatasetsHitTestingBase):
|
||||
@api.doc("test_dataset_retrieval")
|
||||
@api.doc(description="Test dataset knowledge retrieval")
|
||||
@api.doc(params={"dataset_id": "Dataset ID"})
|
||||
@api.expect(
|
||||
api.model(
|
||||
"HitTestingRequest",
|
||||
{
|
||||
"query": fields.String(required=True, description="Query text for testing"),
|
||||
"retrieval_model": fields.Raw(description="Retrieval model configuration"),
|
||||
"top_k": fields.Integer(description="Number of top results to return"),
|
||||
"score_threshold": fields.Float(description="Score threshold for filtering results"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Hit testing completed successfully")
|
||||
@api.response(404, "Dataset not found")
|
||||
@api.response(400, "Invalid parameters")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -23,6 +41,3 @@ class HitTestingApi(Resource, DatasetsHitTestingBase):
|
|||
self.hit_testing_args_check(args)
|
||||
|
||||
return self.perform_hit_testing(dataset, args)
|
||||
|
||||
|
||||
api.add_resource(HitTestingApi, "/datasets/<uuid:dataset_id>/hit-testing")
|
||||
|
|
|
|||
|
|
@ -1,13 +1,32 @@
|
|||
from flask_restx import Resource, reqparse
|
||||
from flask_restx import Resource, fields, reqparse
|
||||
|
||||
from controllers.console import api
|
||||
from controllers.console import api, console_ns
|
||||
from controllers.console.datasets.error import WebsiteCrawlError
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from libs.login import login_required
|
||||
from services.website_service import WebsiteCrawlApiRequest, WebsiteCrawlStatusApiRequest, WebsiteService
|
||||
|
||||
|
||||
@console_ns.route("/website/crawl")
|
||||
class WebsiteCrawlApi(Resource):
|
||||
@api.doc("crawl_website")
|
||||
@api.doc(description="Crawl website content")
|
||||
@api.expect(
|
||||
api.model(
|
||||
"WebsiteCrawlRequest",
|
||||
{
|
||||
"provider": fields.String(
|
||||
required=True,
|
||||
description="Crawl provider (firecrawl/watercrawl/jinareader)",
|
||||
enum=["firecrawl", "watercrawl", "jinareader"],
|
||||
),
|
||||
"url": fields.String(required=True, description="URL to crawl"),
|
||||
"options": fields.Raw(required=True, description="Crawl options"),
|
||||
},
|
||||
)
|
||||
)
|
||||
@api.response(200, "Website crawl initiated successfully")
|
||||
@api.response(400, "Invalid crawl parameters")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -39,7 +58,14 @@ class WebsiteCrawlApi(Resource):
|
|||
return result, 200
|
||||
|
||||
|
||||
@console_ns.route("/website/crawl/status/<string:job_id>")
|
||||
class WebsiteCrawlStatusApi(Resource):
|
||||
@api.doc("get_crawl_status")
|
||||
@api.doc(description="Get website crawl status")
|
||||
@api.doc(params={"job_id": "Crawl job ID", "provider": "Crawl provider (firecrawl/watercrawl/jinareader)"})
|
||||
@api.response(200, "Crawl status retrieved successfully")
|
||||
@api.response(404, "Crawl job not found")
|
||||
@api.response(400, "Invalid provider")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
|
|
@ -62,7 +88,3 @@ class WebsiteCrawlStatusApi(Resource):
|
|||
except Exception as e:
|
||||
raise WebsiteCrawlError(str(e))
|
||||
return result, 200
|
||||
|
||||
|
||||
api.add_resource(WebsiteCrawlApi, "/website/crawl")
|
||||
api.add_resource(WebsiteCrawlStatusApi, "/website/crawl/status/<string:job_id>")
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from collections.abc import Callable
|
||||
from functools import wraps
|
||||
from typing import Concatenate, Optional, ParamSpec, TypeVar
|
||||
from typing import Concatenate, ParamSpec, TypeVar
|
||||
|
||||
from flask_login import current_user
|
||||
from flask_restx import Resource
|
||||
|
|
@ -20,7 +20,7 @@ R = TypeVar("R")
|
|||
T = TypeVar("T")
|
||||
|
||||
|
||||
def installed_app_required(view: Optional[Callable[Concatenate[InstalledApp, P], R]] = None):
|
||||
def installed_app_required(view: Callable[Concatenate[InstalledApp, P], R] | None = None):
|
||||
def decorator(view: Callable[Concatenate[InstalledApp, P], R]):
|
||||
@wraps(view)
|
||||
def decorated(installed_app_id: str, *args: P.args, **kwargs: P.kwargs):
|
||||
|
|
@ -50,7 +50,7 @@ def installed_app_required(view: Optional[Callable[Concatenate[InstalledApp, P],
|
|||
return decorator
|
||||
|
||||
|
||||
def user_allowed_to_access_app(view: Optional[Callable[Concatenate[InstalledApp, P], R]] = None):
|
||||
def user_allowed_to_access_app(view: Callable[Concatenate[InstalledApp, P], R] | None = None):
|
||||
def decorator(view: Callable[Concatenate[InstalledApp, P], R]):
|
||||
@wraps(view)
|
||||
def decorated(installed_app: InstalledApp, *args: P.args, **kwargs: P.kwargs):
|
||||
|
|
|
|||
|
|
@ -14,6 +14,15 @@ api = ExternalApi(
|
|||
|
||||
files_ns = Namespace("files", description="File operations", path="/")
|
||||
|
||||
from . import image_preview, tool_files, upload # pyright: ignore[reportUnusedImport]
|
||||
from . import image_preview, tool_files, upload
|
||||
|
||||
api.add_namespace(files_ns)
|
||||
|
||||
__all__ = [
|
||||
"api",
|
||||
"bp",
|
||||
"files_ns",
|
||||
"image_preview",
|
||||
"tool_files",
|
||||
"upload",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
from mimetypes import guess_extension
|
||||
from typing import Optional
|
||||
|
||||
from flask_restx import Resource, reqparse
|
||||
from flask_restx.api import HTTPStatus
|
||||
|
|
@ -73,11 +72,11 @@ class PluginUploadFileApi(Resource):
|
|||
nonce: str = args["nonce"]
|
||||
sign: str = args["sign"]
|
||||
tenant_id: str = args["tenant_id"]
|
||||
user_id: Optional[str] = args.get("user_id")
|
||||
user_id: str | None = args.get("user_id")
|
||||
user = get_user(tenant_id, user_id)
|
||||
|
||||
filename: Optional[str] = file.filename
|
||||
mimetype: Optional[str] = file.mimetype
|
||||
filename: str | None = file.filename
|
||||
mimetype: str | None = file.mimetype
|
||||
|
||||
if not filename or not mimetype:
|
||||
raise Forbidden("Invalid request.")
|
||||
|
|
|
|||
|
|
@ -15,8 +15,17 @@ api = ExternalApi(
|
|||
# Create namespace
|
||||
inner_api_ns = Namespace("inner_api", description="Internal API operations", path="/")
|
||||
|
||||
from . import mail as _mail # pyright: ignore[reportUnusedImport]
|
||||
from .plugin import plugin as _plugin # pyright: ignore[reportUnusedImport]
|
||||
from .workspace import workspace as _workspace # pyright: ignore[reportUnusedImport]
|
||||
from . import mail as _mail
|
||||
from .plugin import plugin as _plugin
|
||||
from .workspace import workspace as _workspace
|
||||
|
||||
api.add_namespace(inner_api_ns)
|
||||
|
||||
__all__ = [
|
||||
"_mail",
|
||||
"_plugin",
|
||||
"_workspace",
|
||||
"api",
|
||||
"bp",
|
||||
"inner_api_ns",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from collections.abc import Callable
|
||||
from functools import wraps
|
||||
from typing import Optional, ParamSpec, TypeVar, cast
|
||||
from typing import ParamSpec, TypeVar, cast
|
||||
|
||||
from flask import current_app, request
|
||||
from flask_login import user_logged_in
|
||||
|
|
@ -54,7 +54,7 @@ def get_user(tenant_id: str, user_id: str | None) -> EndUser:
|
|||
return user_model
|
||||
|
||||
|
||||
def get_user_tenant(view: Optional[Callable[P, R]] = None):
|
||||
def get_user_tenant(view: Callable[P, R] | None = None):
|
||||
def decorator(view_func: Callable[P, R]):
|
||||
@wraps(view_func)
|
||||
def decorated_view(*args: P.args, **kwargs: P.kwargs):
|
||||
|
|
@ -106,7 +106,7 @@ def get_user_tenant(view: Optional[Callable[P, R]] = None):
|
|||
return decorator(view)
|
||||
|
||||
|
||||
def plugin_data(view: Optional[Callable[P, R]] = None, *, payload_type: type[BaseModel]):
|
||||
def plugin_data(view: Callable[P, R] | None = None, *, payload_type: type[BaseModel]):
|
||||
def decorator(view_func: Callable[P, R]):
|
||||
def decorated_view(*args: P.args, **kwargs: P.kwargs):
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -14,6 +14,13 @@ api = ExternalApi(
|
|||
|
||||
mcp_ns = Namespace("mcp", description="MCP operations", path="/")
|
||||
|
||||
from . import mcp # pyright: ignore[reportUnusedImport]
|
||||
from . import mcp
|
||||
|
||||
api.add_namespace(mcp_ns)
|
||||
|
||||
__all__ = [
|
||||
"api",
|
||||
"bp",
|
||||
"mcp",
|
||||
"mcp_ns",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Optional, Union
|
||||
from typing import Union
|
||||
|
||||
from flask import Response
|
||||
from flask_restx import Resource, reqparse
|
||||
|
|
@ -73,7 +73,7 @@ class MCPAppApi(Resource):
|
|||
ValidationError: Invalid request format or parameters
|
||||
"""
|
||||
args = mcp_request_parser.parse_args()
|
||||
request_id: Optional[Union[int, str]] = args.get("id")
|
||||
request_id: Union[int, str] | None = args.get("id")
|
||||
mcp_request = self._parse_mcp_request(args)
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
|
|
@ -107,7 +107,7 @@ class MCPAppApi(Resource):
|
|||
def _process_mcp_message(
|
||||
self,
|
||||
mcp_request: mcp_types.ClientRequest | mcp_types.ClientNotification,
|
||||
request_id: Optional[Union[int, str]],
|
||||
request_id: Union[int, str] | None,
|
||||
app: App,
|
||||
mcp_server: AppMCPServer,
|
||||
user_input_form: list[VariableEntity],
|
||||
|
|
@ -130,7 +130,7 @@ class MCPAppApi(Resource):
|
|||
def _handle_request(
|
||||
self,
|
||||
mcp_request: mcp_types.ClientRequest,
|
||||
request_id: Optional[Union[int, str]],
|
||||
request_id: Union[int, str] | None,
|
||||
app: App,
|
||||
mcp_server: AppMCPServer,
|
||||
user_input_form: list[VariableEntity],
|
||||
|
|
|
|||
|
|
@ -14,26 +14,46 @@ api = ExternalApi(
|
|||
|
||||
service_api_ns = Namespace("service_api", description="Service operations", path="/")
|
||||
|
||||
from . import index # pyright: ignore[reportUnusedImport]
|
||||
from . import index
|
||||
from .app import (
|
||||
annotation, # pyright: ignore[reportUnusedImport]
|
||||
app, # pyright: ignore[reportUnusedImport]
|
||||
audio, # pyright: ignore[reportUnusedImport]
|
||||
completion, # pyright: ignore[reportUnusedImport]
|
||||
conversation, # pyright: ignore[reportUnusedImport]
|
||||
file, # pyright: ignore[reportUnusedImport]
|
||||
file_preview, # pyright: ignore[reportUnusedImport]
|
||||
message, # pyright: ignore[reportUnusedImport]
|
||||
site, # pyright: ignore[reportUnusedImport]
|
||||
workflow, # pyright: ignore[reportUnusedImport]
|
||||
annotation,
|
||||
app,
|
||||
audio,
|
||||
completion,
|
||||
conversation,
|
||||
file,
|
||||
file_preview,
|
||||
message,
|
||||
site,
|
||||
workflow,
|
||||
)
|
||||
from .dataset import (
|
||||
dataset, # pyright: ignore[reportUnusedImport]
|
||||
document, # pyright: ignore[reportUnusedImport]
|
||||
hit_testing, # pyright: ignore[reportUnusedImport]
|
||||
metadata, # pyright: ignore[reportUnusedImport]
|
||||
segment, # pyright: ignore[reportUnusedImport]
|
||||
dataset,
|
||||
document,
|
||||
hit_testing,
|
||||
metadata,
|
||||
segment,
|
||||
)
|
||||
from .workspace import models # pyright: ignore[reportUnusedImport]
|
||||
from .workspace import models
|
||||
|
||||
__all__ = [
|
||||
"annotation",
|
||||
"app",
|
||||
"audio",
|
||||
"completion",
|
||||
"conversation",
|
||||
"dataset",
|
||||
"document",
|
||||
"file",
|
||||
"file_preview",
|
||||
"hit_testing",
|
||||
"index",
|
||||
"message",
|
||||
"metadata",
|
||||
"models",
|
||||
"segment",
|
||||
"site",
|
||||
"workflow",
|
||||
]
|
||||
|
||||
api.add_namespace(service_api_ns)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from collections.abc import Callable
|
|||
from datetime import timedelta
|
||||
from enum import StrEnum, auto
|
||||
from functools import wraps
|
||||
from typing import Concatenate, Optional, ParamSpec, TypeVar
|
||||
from typing import Concatenate, ParamSpec, TypeVar
|
||||
|
||||
from flask import current_app, request
|
||||
from flask_login import user_logged_in
|
||||
|
|
@ -42,7 +42,7 @@ class FetchUserArg(BaseModel):
|
|||
required: bool = False
|
||||
|
||||
|
||||
def validate_app_token(view: Optional[Callable[P, R]] = None, *, fetch_user_arg: Optional[FetchUserArg] = None):
|
||||
def validate_app_token(view: Callable[P, R] | None = None, *, fetch_user_arg: FetchUserArg | None = None):
|
||||
def decorator(view_func: Callable[P, R]):
|
||||
@wraps(view_func)
|
||||
def decorated_view(*args: P.args, **kwargs: P.kwargs):
|
||||
|
|
@ -189,7 +189,7 @@ def cloud_edition_billing_rate_limit_check(resource: str, api_token_type: str):
|
|||
return interceptor
|
||||
|
||||
|
||||
def validate_dataset_token(view: Optional[Callable[Concatenate[T, P], R]] = None):
|
||||
def validate_dataset_token(view: Callable[Concatenate[T, P], R] | None = None):
|
||||
def decorator(view: Callable[Concatenate[T, P], R]):
|
||||
@wraps(view)
|
||||
def decorated(*args: P.args, **kwargs: P.kwargs):
|
||||
|
|
@ -267,7 +267,7 @@ def validate_and_get_api_token(scope: str | None = None):
|
|||
return api_token
|
||||
|
||||
|
||||
def create_or_update_end_user_for_user_id(app_model: App, user_id: Optional[str] = None) -> EndUser:
|
||||
def create_or_update_end_user_for_user_id(app_model: App, user_id: str | None = None) -> EndUser:
|
||||
"""
|
||||
Create or update session terminal based on user ID.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -16,20 +16,40 @@ api = ExternalApi(
|
|||
web_ns = Namespace("web", description="Web application API operations", path="/")
|
||||
|
||||
from . import (
|
||||
app, # pyright: ignore[reportUnusedImport]
|
||||
audio, # pyright: ignore[reportUnusedImport]
|
||||
completion, # pyright: ignore[reportUnusedImport]
|
||||
conversation, # pyright: ignore[reportUnusedImport]
|
||||
feature, # pyright: ignore[reportUnusedImport]
|
||||
files, # pyright: ignore[reportUnusedImport]
|
||||
forgot_password, # pyright: ignore[reportUnusedImport]
|
||||
login, # pyright: ignore[reportUnusedImport]
|
||||
message, # pyright: ignore[reportUnusedImport]
|
||||
passport, # pyright: ignore[reportUnusedImport]
|
||||
remote_files, # pyright: ignore[reportUnusedImport]
|
||||
saved_message, # pyright: ignore[reportUnusedImport]
|
||||
site, # pyright: ignore[reportUnusedImport]
|
||||
workflow, # pyright: ignore[reportUnusedImport]
|
||||
app,
|
||||
audio,
|
||||
completion,
|
||||
conversation,
|
||||
feature,
|
||||
files,
|
||||
forgot_password,
|
||||
login,
|
||||
message,
|
||||
passport,
|
||||
remote_files,
|
||||
saved_message,
|
||||
site,
|
||||
workflow,
|
||||
)
|
||||
|
||||
api.add_namespace(web_ns)
|
||||
|
||||
__all__ = [
|
||||
"api",
|
||||
"app",
|
||||
"audio",
|
||||
"bp",
|
||||
"completion",
|
||||
"conversation",
|
||||
"feature",
|
||||
"files",
|
||||
"forgot_password",
|
||||
"login",
|
||||
"message",
|
||||
"passport",
|
||||
"remote_files",
|
||||
"saved_message",
|
||||
"site",
|
||||
"web_ns",
|
||||
"workflow",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
from collections.abc import Callable
|
||||
from datetime import UTC, datetime
|
||||
from functools import wraps
|
||||
from typing import Concatenate, Optional, ParamSpec, TypeVar
|
||||
from typing import Concatenate, ParamSpec, TypeVar
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource
|
||||
|
|
@ -21,7 +21,7 @@ P = ParamSpec("P")
|
|||
R = TypeVar("R")
|
||||
|
||||
|
||||
def validate_jwt_token(view: Optional[Callable[Concatenate[App, EndUser, P], R]] = None):
|
||||
def validate_jwt_token(view: Callable[Concatenate[App, EndUser, P], R] | None = None):
|
||||
def decorator(view: Callable[Concatenate[App, EndUser, P], R]):
|
||||
@wraps(view)
|
||||
def decorated(*args: P.args, **kwargs: P.kwargs):
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from typing import Optional, Union, cast
|
||||
from typing import Union, cast
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
|
|
@ -60,8 +60,8 @@ class BaseAgentRunner(AppRunner):
|
|||
message: Message,
|
||||
user_id: str,
|
||||
model_instance: ModelInstance,
|
||||
memory: Optional[TokenBufferMemory] = None,
|
||||
prompt_messages: Optional[list[PromptMessage]] = None,
|
||||
memory: TokenBufferMemory | None = None,
|
||||
prompt_messages: list[PromptMessage] | None = None,
|
||||
):
|
||||
self.tenant_id = tenant_id
|
||||
self.application_generate_entity = application_generate_entity
|
||||
|
|
@ -112,7 +112,7 @@ class BaseAgentRunner(AppRunner):
|
|||
features = model_schema.features if model_schema and model_schema.features else []
|
||||
self.stream_tool_call = ModelFeature.STREAM_TOOL_CALL in features
|
||||
self.files = application_generate_entity.files if ModelFeature.VISION in features else []
|
||||
self.query: Optional[str] = ""
|
||||
self.query: str | None = ""
|
||||
self._current_thoughts: list[PromptMessage] = []
|
||||
|
||||
def _repack_app_generate_entity(
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import json
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from core.agent.base_agent_runner import BaseAgentRunner
|
||||
from core.agent.entities import AgentScratchpadUnit
|
||||
|
|
@ -70,12 +70,12 @@ class CotAgentRunner(BaseAgentRunner, ABC):
|
|||
self._prompt_messages_tools = prompt_messages_tools
|
||||
|
||||
function_call_state = True
|
||||
llm_usage: dict[str, Optional[LLMUsage]] = {"usage": None}
|
||||
llm_usage: dict[str, LLMUsage | None] = {"usage": None}
|
||||
final_answer = ""
|
||||
prompt_messages: list = [] # Initialize prompt_messages
|
||||
agent_thought_id = "" # Initialize agent_thought_id
|
||||
|
||||
def increase_usage(final_llm_usage_dict: dict[str, Optional[LLMUsage]], usage: LLMUsage):
|
||||
def increase_usage(final_llm_usage_dict: dict[str, LLMUsage | None], usage: LLMUsage):
|
||||
if not final_llm_usage_dict["usage"]:
|
||||
final_llm_usage_dict["usage"] = usage
|
||||
else:
|
||||
|
|
@ -122,7 +122,7 @@ class CotAgentRunner(BaseAgentRunner, ABC):
|
|||
callbacks=[],
|
||||
)
|
||||
|
||||
usage_dict: dict[str, Optional[LLMUsage]] = {}
|
||||
usage_dict: dict[str, LLMUsage | None] = {}
|
||||
react_chunks = CotAgentOutputParser.handle_react_stream_output(chunks, usage_dict)
|
||||
scratchpad = AgentScratchpadUnit(
|
||||
agent_response="",
|
||||
|
|
@ -274,7 +274,7 @@ class CotAgentRunner(BaseAgentRunner, ABC):
|
|||
action: AgentScratchpadUnit.Action,
|
||||
tool_instances: Mapping[str, Tool],
|
||||
message_file_ids: list[str],
|
||||
trace_manager: Optional[TraceQueueManager] = None,
|
||||
trace_manager: TraceQueueManager | None = None,
|
||||
) -> tuple[str, ToolInvokeMeta]:
|
||||
"""
|
||||
handle invoke action
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import json
|
||||
from typing import Optional
|
||||
|
||||
from core.agent.cot_agent_runner import CotAgentRunner
|
||||
from core.model_runtime.entities.message_entities import (
|
||||
|
|
@ -31,7 +30,7 @@ class CotCompletionAgentRunner(CotAgentRunner):
|
|||
|
||||
return system_prompt
|
||||
|
||||
def _organize_historic_prompt(self, current_session_messages: Optional[list[PromptMessage]] = None) -> str:
|
||||
def _organize_historic_prompt(self, current_session_messages: list[PromptMessage] | None = None) -> str:
|
||||
"""
|
||||
Organize historic prompt
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from enum import StrEnum
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any, Union
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
|
@ -50,11 +50,11 @@ class AgentScratchpadUnit(BaseModel):
|
|||
"action_input": self.action_input,
|
||||
}
|
||||
|
||||
agent_response: Optional[str] = None
|
||||
thought: Optional[str] = None
|
||||
action_str: Optional[str] = None
|
||||
observation: Optional[str] = None
|
||||
action: Optional[Action] = None
|
||||
agent_response: str | None = None
|
||||
thought: str | None = None
|
||||
action_str: str | None = None
|
||||
observation: str | None = None
|
||||
action: Action | None = None
|
||||
|
||||
def is_final(self) -> bool:
|
||||
"""
|
||||
|
|
@ -81,8 +81,8 @@ class AgentEntity(BaseModel):
|
|||
provider: str
|
||||
model: str
|
||||
strategy: Strategy
|
||||
prompt: Optional[AgentPromptEntity] = None
|
||||
tools: Optional[list[AgentToolEntity]] = None
|
||||
prompt: AgentPromptEntity | None = None
|
||||
tools: list[AgentToolEntity] | None = None
|
||||
max_iteration: int = 10
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import json
|
|||
import logging
|
||||
from collections.abc import Generator
|
||||
from copy import deepcopy
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any, Union
|
||||
|
||||
from core.agent.base_agent_runner import BaseAgentRunner
|
||||
from core.app.apps.base_app_queue_manager import PublishFrom
|
||||
|
|
@ -52,14 +52,14 @@ class FunctionCallAgentRunner(BaseAgentRunner):
|
|||
|
||||
# continue to run until there is not any tool call
|
||||
function_call_state = True
|
||||
llm_usage: dict[str, Optional[LLMUsage]] = {"usage": None}
|
||||
llm_usage: dict[str, LLMUsage | None] = {"usage": None}
|
||||
final_answer = ""
|
||||
prompt_messages: list = [] # Initialize prompt_messages
|
||||
|
||||
# get tracing instance
|
||||
trace_manager = app_generate_entity.trace_manager
|
||||
|
||||
def increase_usage(final_llm_usage_dict: dict[str, Optional[LLMUsage]], usage: LLMUsage):
|
||||
def increase_usage(final_llm_usage_dict: dict[str, LLMUsage | None], usage: LLMUsage):
|
||||
if not final_llm_usage_dict["usage"]:
|
||||
final_llm_usage_dict["usage"] = usage
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from enum import StrEnum
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator
|
||||
|
||||
|
|
@ -53,7 +53,7 @@ class AgentStrategyParameter(PluginParameter):
|
|||
return cast_parameter_value(self, value)
|
||||
|
||||
type: AgentStrategyParameterType = Field(..., description="The type of the parameter")
|
||||
help: Optional[I18nObject] = None
|
||||
help: I18nObject | None = None
|
||||
|
||||
def init_frontend_parameter(self, value: Any):
|
||||
return init_frontend_parameter(self, self.type, value)
|
||||
|
|
@ -61,7 +61,7 @@ class AgentStrategyParameter(PluginParameter):
|
|||
|
||||
class AgentStrategyProviderEntity(BaseModel):
|
||||
identity: AgentStrategyProviderIdentity
|
||||
plugin_id: Optional[str] = Field(None, description="The id of the plugin")
|
||||
plugin_id: str | None = Field(None, description="The id of the plugin")
|
||||
|
||||
|
||||
class AgentStrategyIdentity(ToolIdentity):
|
||||
|
|
@ -84,9 +84,9 @@ class AgentStrategyEntity(BaseModel):
|
|||
identity: AgentStrategyIdentity
|
||||
parameters: list[AgentStrategyParameter] = Field(default_factory=list)
|
||||
description: I18nObject = Field(..., description="The description of the agent strategy")
|
||||
output_schema: Optional[dict] = None
|
||||
features: Optional[list[AgentFeature]] = None
|
||||
meta_version: Optional[str] = None
|
||||
output_schema: dict | None = None
|
||||
features: list[AgentFeature] | None = None
|
||||
meta_version: str | None = None
|
||||
# pydantic configs
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Generator, Sequence
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from core.agent.entities import AgentInvokeMessage
|
||||
from core.agent.plugin_entities import AgentStrategyParameter
|
||||
|
|
@ -16,10 +16,10 @@ class BaseAgentStrategy(ABC):
|
|||
self,
|
||||
params: dict[str, Any],
|
||||
user_id: str,
|
||||
conversation_id: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
message_id: Optional[str] = None,
|
||||
credentials: Optional[InvokeCredentials] = None,
|
||||
conversation_id: str | None = None,
|
||||
app_id: str | None = None,
|
||||
message_id: str | None = None,
|
||||
credentials: InvokeCredentials | None = None,
|
||||
) -> Generator[AgentInvokeMessage, None, None]:
|
||||
"""
|
||||
Invoke the agent strategy.
|
||||
|
|
@ -37,9 +37,9 @@ class BaseAgentStrategy(ABC):
|
|||
self,
|
||||
params: dict[str, Any],
|
||||
user_id: str,
|
||||
conversation_id: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
message_id: Optional[str] = None,
|
||||
credentials: Optional[InvokeCredentials] = None,
|
||||
conversation_id: str | None = None,
|
||||
app_id: str | None = None,
|
||||
message_id: str | None = None,
|
||||
credentials: InvokeCredentials | None = None,
|
||||
) -> Generator[AgentInvokeMessage, None, None]:
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from collections.abc import Generator, Sequence
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from core.agent.entities import AgentInvokeMessage
|
||||
from core.agent.plugin_entities import AgentStrategyEntity, AgentStrategyParameter
|
||||
|
|
@ -38,10 +38,10 @@ class PluginAgentStrategy(BaseAgentStrategy):
|
|||
self,
|
||||
params: dict[str, Any],
|
||||
user_id: str,
|
||||
conversation_id: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
message_id: Optional[str] = None,
|
||||
credentials: Optional[InvokeCredentials] = None,
|
||||
conversation_id: str | None = None,
|
||||
app_id: str | None = None,
|
||||
message_id: str | None = None,
|
||||
credentials: InvokeCredentials | None = None,
|
||||
) -> Generator[AgentInvokeMessage, None, None]:
|
||||
"""
|
||||
Invoke the agent strategy.
|
||||
|
|
|
|||
|
|
@ -1,12 +1,10 @@
|
|||
from typing import Optional
|
||||
|
||||
from core.app.app_config.entities import SensitiveWordAvoidanceEntity
|
||||
from core.moderation.factory import ModerationFactory
|
||||
|
||||
|
||||
class SensitiveWordAvoidanceConfigManager:
|
||||
@classmethod
|
||||
def convert(cls, config: dict) -> Optional[SensitiveWordAvoidanceEntity]:
|
||||
def convert(cls, config: dict) -> SensitiveWordAvoidanceEntity | None:
|
||||
sensitive_word_avoidance_dict = config.get("sensitive_word_avoidance")
|
||||
if not sensitive_word_avoidance_dict:
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -1,12 +1,10 @@
|
|||
from typing import Optional
|
||||
|
||||
from core.agent.entities import AgentEntity, AgentPromptEntity, AgentToolEntity
|
||||
from core.agent.prompt.template import REACT_PROMPT_TEMPLATES
|
||||
|
||||
|
||||
class AgentConfigManager:
|
||||
@classmethod
|
||||
def convert(cls, config: dict) -> Optional[AgentEntity]:
|
||||
def convert(cls, config: dict) -> AgentEntity | None:
|
||||
"""
|
||||
Convert model config to model config
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import uuid
|
||||
from typing import Optional
|
||||
|
||||
from core.app.app_config.entities import (
|
||||
DatasetEntity,
|
||||
|
|
@ -14,7 +13,7 @@ from services.dataset_service import DatasetService
|
|||
|
||||
class DatasetConfigManager:
|
||||
@classmethod
|
||||
def convert(cls, config: dict) -> Optional[DatasetEntity]:
|
||||
def convert(cls, config: dict) -> DatasetEntity | None:
|
||||
"""
|
||||
Convert model config to model config
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from collections.abc import Sequence
|
||||
from enum import StrEnum, auto
|
||||
from typing import Any, Literal, Optional
|
||||
from typing import Any, Literal
|
||||
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
|
|
@ -17,7 +17,7 @@ class ModelConfigEntity(BaseModel):
|
|||
|
||||
provider: str
|
||||
model: str
|
||||
mode: Optional[str] = None
|
||||
mode: str | None = None
|
||||
parameters: dict[str, Any] = Field(default_factory=dict)
|
||||
stop: list[str] = Field(default_factory=list)
|
||||
|
||||
|
|
@ -53,7 +53,7 @@ class AdvancedCompletionPromptTemplateEntity(BaseModel):
|
|||
assistant: str
|
||||
|
||||
prompt: str
|
||||
role_prefix: Optional[RolePrefixEntity] = None
|
||||
role_prefix: RolePrefixEntity | None = None
|
||||
|
||||
|
||||
class PromptTemplateEntity(BaseModel):
|
||||
|
|
@ -84,9 +84,9 @@ class PromptTemplateEntity(BaseModel):
|
|||
raise ValueError(f"invalid prompt type value {value}")
|
||||
|
||||
prompt_type: PromptType
|
||||
simple_prompt_template: Optional[str] = None
|
||||
advanced_chat_prompt_template: Optional[AdvancedChatPromptTemplateEntity] = None
|
||||
advanced_completion_prompt_template: Optional[AdvancedCompletionPromptTemplateEntity] = None
|
||||
simple_prompt_template: str | None = None
|
||||
advanced_chat_prompt_template: AdvancedChatPromptTemplateEntity | None = None
|
||||
advanced_completion_prompt_template: AdvancedCompletionPromptTemplateEntity | None = None
|
||||
|
||||
|
||||
class VariableEntityType(StrEnum):
|
||||
|
|
@ -112,7 +112,7 @@ class VariableEntity(BaseModel):
|
|||
type: VariableEntityType
|
||||
required: bool = False
|
||||
hide: bool = False
|
||||
max_length: Optional[int] = None
|
||||
max_length: int | None = None
|
||||
options: Sequence[str] = Field(default_factory=list)
|
||||
allowed_file_types: Sequence[FileType] = Field(default_factory=list)
|
||||
allowed_file_extensions: Sequence[str] = Field(default_factory=list)
|
||||
|
|
@ -173,7 +173,7 @@ class ModelConfig(BaseModel):
|
|||
|
||||
class Condition(BaseModel):
|
||||
"""
|
||||
Conditon detail
|
||||
Condition detail
|
||||
"""
|
||||
|
||||
name: str
|
||||
|
|
@ -186,8 +186,8 @@ class MetadataFilteringCondition(BaseModel):
|
|||
Metadata Filtering Condition.
|
||||
"""
|
||||
|
||||
logical_operator: Optional[Literal["and", "or"]] = "and"
|
||||
conditions: Optional[list[Condition]] = Field(default=None, deprecated=True)
|
||||
logical_operator: Literal["and", "or"] | None = "and"
|
||||
conditions: list[Condition] | None = Field(default=None, deprecated=True)
|
||||
|
||||
|
||||
class DatasetRetrieveConfigEntity(BaseModel):
|
||||
|
|
@ -217,18 +217,18 @@ class DatasetRetrieveConfigEntity(BaseModel):
|
|||
return mode
|
||||
raise ValueError(f"invalid retrieve strategy value {value}")
|
||||
|
||||
query_variable: Optional[str] = None # Only when app mode is completion
|
||||
query_variable: str | None = None # Only when app mode is completion
|
||||
|
||||
retrieve_strategy: RetrieveStrategy
|
||||
top_k: Optional[int] = None
|
||||
score_threshold: Optional[float] = 0.0
|
||||
rerank_mode: Optional[str] = "reranking_model"
|
||||
reranking_model: Optional[dict] = None
|
||||
weights: Optional[dict] = None
|
||||
reranking_enabled: Optional[bool] = True
|
||||
metadata_filtering_mode: Optional[Literal["disabled", "automatic", "manual"]] = "disabled"
|
||||
metadata_model_config: Optional[ModelConfig] = None
|
||||
metadata_filtering_conditions: Optional[MetadataFilteringCondition] = None
|
||||
top_k: int | None = None
|
||||
score_threshold: float | None = 0.0
|
||||
rerank_mode: str | None = "reranking_model"
|
||||
reranking_model: dict | None = None
|
||||
weights: dict | None = None
|
||||
reranking_enabled: bool | None = True
|
||||
metadata_filtering_mode: Literal["disabled", "automatic", "manual"] | None = "disabled"
|
||||
metadata_model_config: ModelConfig | None = None
|
||||
metadata_filtering_conditions: MetadataFilteringCondition | None = None
|
||||
|
||||
|
||||
class DatasetEntity(BaseModel):
|
||||
|
|
@ -255,8 +255,8 @@ class TextToSpeechEntity(BaseModel):
|
|||
"""
|
||||
|
||||
enabled: bool
|
||||
voice: Optional[str] = None
|
||||
language: Optional[str] = None
|
||||
voice: str | None = None
|
||||
language: str | None = None
|
||||
|
||||
|
||||
class TracingConfigEntity(BaseModel):
|
||||
|
|
@ -269,15 +269,15 @@ class TracingConfigEntity(BaseModel):
|
|||
|
||||
|
||||
class AppAdditionalFeatures(BaseModel):
|
||||
file_upload: Optional[FileUploadConfig] = None
|
||||
opening_statement: Optional[str] = None
|
||||
file_upload: FileUploadConfig | None = None
|
||||
opening_statement: str | None = None
|
||||
suggested_questions: list[str] = []
|
||||
suggested_questions_after_answer: bool = False
|
||||
show_retrieve_source: bool = False
|
||||
more_like_this: bool = False
|
||||
speech_to_text: bool = False
|
||||
text_to_speech: Optional[TextToSpeechEntity] = None
|
||||
trace_config: Optional[TracingConfigEntity] = None
|
||||
text_to_speech: TextToSpeechEntity | None = None
|
||||
trace_config: TracingConfigEntity | None = None
|
||||
|
||||
|
||||
class AppConfig(BaseModel):
|
||||
|
|
@ -290,7 +290,7 @@ class AppConfig(BaseModel):
|
|||
app_mode: AppMode
|
||||
additional_features: AppAdditionalFeatures
|
||||
variables: list[VariableEntity] = []
|
||||
sensitive_word_avoidance: Optional[SensitiveWordAvoidanceEntity] = None
|
||||
sensitive_word_avoidance: SensitiveWordAvoidanceEntity | None = None
|
||||
|
||||
|
||||
class EasyUIBasedAppModelConfigFrom(StrEnum):
|
||||
|
|
@ -313,7 +313,7 @@ class EasyUIBasedAppConfig(AppConfig):
|
|||
app_model_config_dict: dict
|
||||
model: ModelConfigEntity
|
||||
prompt_template: PromptTemplateEntity
|
||||
dataset: Optional[DatasetEntity] = None
|
||||
dataset: DatasetEntity | None = None
|
||||
external_data_variables: list[ExternalDataVariableEntity] = []
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import logging
|
|||
import threading
|
||||
import uuid
|
||||
from collections.abc import Generator, Mapping
|
||||
from typing import Any, Literal, Optional, Union, overload
|
||||
from typing import Any, Literal, Union, overload
|
||||
|
||||
from flask import Flask, current_app
|
||||
from pydantic import ValidationError
|
||||
|
|
@ -390,7 +390,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||
application_generate_entity: AdvancedChatAppGenerateEntity,
|
||||
workflow_execution_repository: WorkflowExecutionRepository,
|
||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||
conversation: Optional[Conversation] = None,
|
||||
conversation: Conversation | None = None,
|
||||
stream: bool = True,
|
||||
variable_loader: VariableLoader = DUMMY_VARIABLE_LOADER,
|
||||
) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], Any, None]:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from collections.abc import Mapping
|
||||
from typing import Any, Optional, cast
|
||||
from typing import Any, cast
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
|
@ -231,7 +231,7 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
|
|||
|
||||
def query_app_annotations_to_reply(
|
||||
self, app_record: App, message: Message, query: str, user_id: str, invoke_from: InvokeFrom
|
||||
) -> Optional[MessageAnnotation]:
|
||||
) -> MessageAnnotation | None:
|
||||
"""
|
||||
Query app annotations to reply
|
||||
:param app_record: app record
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import time
|
|||
from collections.abc import Callable, Generator, Mapping
|
||||
from contextlib import contextmanager
|
||||
from threading import Thread
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any, Union
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
|
@ -233,7 +233,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
return None
|
||||
|
||||
def _wrapper_process_stream_response(
|
||||
self, trace_manager: Optional[TraceQueueManager] = None
|
||||
self, trace_manager: TraceQueueManager | None = None
|
||||
) -> Generator[StreamResponse, None, None]:
|
||||
tts_publisher = None
|
||||
task_id = self._application_generate_entity.task_id
|
||||
|
|
@ -294,7 +294,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
if not self._workflow_run_id:
|
||||
raise ValueError("workflow run not initialized.")
|
||||
|
||||
def _ensure_graph_runtime_initialized(self, graph_runtime_state: Optional[GraphRuntimeState]) -> GraphRuntimeState:
|
||||
def _ensure_graph_runtime_initialized(self, graph_runtime_state: GraphRuntimeState | None) -> GraphRuntimeState:
|
||||
"""Fluent validation for graph runtime state."""
|
||||
if not graph_runtime_state:
|
||||
raise ValueError("graph runtime state not initialized.")
|
||||
|
|
@ -411,8 +411,8 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
self,
|
||||
event: QueueTextChunkEvent,
|
||||
*,
|
||||
tts_publisher: Optional[AppGeneratorTTSPublisher] = None,
|
||||
queue_message: Optional[Union[WorkflowQueueMessage, MessageQueueMessage]] = None,
|
||||
tts_publisher: AppGeneratorTTSPublisher | None = None,
|
||||
queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None,
|
||||
**kwargs,
|
||||
) -> Generator[StreamResponse, None, None]:
|
||||
"""Handle text chunk events."""
|
||||
|
|
@ -538,8 +538,8 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
self,
|
||||
event: QueueWorkflowSucceededEvent,
|
||||
*,
|
||||
graph_runtime_state: Optional[GraphRuntimeState] = None,
|
||||
trace_manager: Optional[TraceQueueManager] = None,
|
||||
graph_runtime_state: GraphRuntimeState | None = None,
|
||||
trace_manager: TraceQueueManager | None = None,
|
||||
**kwargs,
|
||||
) -> Generator[StreamResponse, None, None]:
|
||||
"""Handle workflow succeeded events."""
|
||||
|
|
@ -569,8 +569,8 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
self,
|
||||
event: QueueWorkflowPartialSuccessEvent,
|
||||
*,
|
||||
graph_runtime_state: Optional[GraphRuntimeState] = None,
|
||||
trace_manager: Optional[TraceQueueManager] = None,
|
||||
graph_runtime_state: GraphRuntimeState | None = None,
|
||||
trace_manager: TraceQueueManager | None = None,
|
||||
**kwargs,
|
||||
) -> Generator[StreamResponse, None, None]:
|
||||
"""Handle workflow partial success events."""
|
||||
|
|
@ -601,8 +601,8 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
self,
|
||||
event: QueueWorkflowFailedEvent,
|
||||
*,
|
||||
graph_runtime_state: Optional[GraphRuntimeState] = None,
|
||||
trace_manager: Optional[TraceQueueManager] = None,
|
||||
graph_runtime_state: GraphRuntimeState | None = None,
|
||||
trace_manager: TraceQueueManager | None = None,
|
||||
**kwargs,
|
||||
) -> Generator[StreamResponse, None, None]:
|
||||
"""Handle workflow failed events."""
|
||||
|
|
@ -636,8 +636,8 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
self,
|
||||
event: QueueStopEvent,
|
||||
*,
|
||||
graph_runtime_state: Optional[GraphRuntimeState] = None,
|
||||
trace_manager: Optional[TraceQueueManager] = None,
|
||||
graph_runtime_state: GraphRuntimeState | None = None,
|
||||
trace_manager: TraceQueueManager | None = None,
|
||||
**kwargs,
|
||||
) -> Generator[StreamResponse, None, None]:
|
||||
"""Handle stop events."""
|
||||
|
|
@ -677,7 +677,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
self,
|
||||
event: QueueAdvancedChatMessageEndEvent,
|
||||
*,
|
||||
graph_runtime_state: Optional[GraphRuntimeState] = None,
|
||||
graph_runtime_state: GraphRuntimeState | None = None,
|
||||
**kwargs,
|
||||
) -> Generator[StreamResponse, None, None]:
|
||||
"""Handle advanced chat message end events."""
|
||||
|
|
@ -775,10 +775,10 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
self,
|
||||
event: Any,
|
||||
*,
|
||||
graph_runtime_state: Optional[GraphRuntimeState] = None,
|
||||
tts_publisher: Optional[AppGeneratorTTSPublisher] = None,
|
||||
trace_manager: Optional[TraceQueueManager] = None,
|
||||
queue_message: Optional[Union[WorkflowQueueMessage, MessageQueueMessage]] = None,
|
||||
graph_runtime_state: GraphRuntimeState | None = None,
|
||||
tts_publisher: AppGeneratorTTSPublisher | None = None,
|
||||
trace_manager: TraceQueueManager | None = None,
|
||||
queue_message: Union[WorkflowQueueMessage, MessageQueueMessage] | None = None,
|
||||
) -> Generator[StreamResponse, None, None]:
|
||||
"""Dispatch events using elegant pattern matching."""
|
||||
handlers = self._get_event_handlers()
|
||||
|
|
@ -830,15 +830,15 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
|
||||
def _process_stream_response(
|
||||
self,
|
||||
tts_publisher: Optional[AppGeneratorTTSPublisher] = None,
|
||||
trace_manager: Optional[TraceQueueManager] = None,
|
||||
tts_publisher: AppGeneratorTTSPublisher | None = None,
|
||||
trace_manager: TraceQueueManager | None = None,
|
||||
) -> Generator[StreamResponse, None, None]:
|
||||
"""
|
||||
Process stream response using elegant Fluent Python patterns.
|
||||
Maintains exact same functionality as original 57-if-statement version.
|
||||
"""
|
||||
# Initialize graph runtime state
|
||||
graph_runtime_state: Optional[GraphRuntimeState] = None
|
||||
graph_runtime_state: GraphRuntimeState | None = None
|
||||
|
||||
for queue_message in self._base_task_pipeline.queue_manager.listen():
|
||||
event = queue_message.event
|
||||
|
|
@ -888,7 +888,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||
if self._conversation_name_generate_thread:
|
||||
self._conversation_name_generate_thread.join()
|
||||
|
||||
def _save_message(self, *, session: Session, graph_runtime_state: Optional[GraphRuntimeState] = None):
|
||||
def _save_message(self, *, session: Session, graph_runtime_state: GraphRuntimeState | None = None):
|
||||
message = self._get_message(session=session)
|
||||
|
||||
# If there are assistant files, remove markdown image links from answer
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import uuid
|
||||
from collections.abc import Mapping
|
||||
from typing import Any, Optional, cast
|
||||
from typing import Any, cast
|
||||
|
||||
from core.agent.entities import AgentEntity
|
||||
from core.app.app_config.base_app_config_manager import BaseAppConfigManager
|
||||
|
|
@ -30,7 +30,7 @@ class AgentChatAppConfig(EasyUIBasedAppConfig):
|
|||
Agent Chatbot App Config Entity.
|
||||
"""
|
||||
|
||||
agent: Optional[AgentEntity] = None
|
||||
agent: AgentEntity | None = None
|
||||
|
||||
|
||||
class AgentChatAppConfigManager(BaseAppConfigManager):
|
||||
|
|
@ -39,8 +39,8 @@ class AgentChatAppConfigManager(BaseAppConfigManager):
|
|||
cls,
|
||||
app_model: App,
|
||||
app_model_config: AppModelConfig,
|
||||
conversation: Optional[Conversation] = None,
|
||||
override_config_dict: Optional[dict] = None,
|
||||
conversation: Conversation | None = None,
|
||||
override_config_dict: dict | None = None,
|
||||
) -> AgentChatAppConfig:
|
||||
"""
|
||||
Convert app model config to agent chat app config
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any, Optional, Union, final
|
||||
from typing import TYPE_CHECKING, Any, Union, final
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
|
|
@ -24,7 +24,7 @@ class BaseAppGenerator:
|
|||
def _prepare_user_inputs(
|
||||
self,
|
||||
*,
|
||||
user_inputs: Optional[Mapping[str, Any]],
|
||||
user_inputs: Mapping[str, Any] | None,
|
||||
variables: Sequence["VariableEntity"],
|
||||
tenant_id: str,
|
||||
strict_type_validation: bool = False,
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import queue
|
|||
import time
|
||||
from abc import abstractmethod
|
||||
from enum import IntEnum, auto
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.orm import DeclarativeMeta
|
||||
|
||||
|
|
@ -116,7 +116,7 @@ class AppQueueManager:
|
|||
Set task stop flag
|
||||
:return:
|
||||
"""
|
||||
result: Optional[Any] = redis_client.get(cls._generate_task_belong_cache_key(task_id))
|
||||
result: Any | None = redis_client.get(cls._generate_task_belong_cache_key(task_id))
|
||||
if result is None:
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
import time
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any, Optional, Union
|
||||
from typing import TYPE_CHECKING, Any, Union
|
||||
|
||||
from core.app.app_config.entities import ExternalDataVariableEntity, PromptTemplateEntity
|
||||
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
||||
|
|
@ -82,11 +82,11 @@ class AppRunner:
|
|||
prompt_template_entity: PromptTemplateEntity,
|
||||
inputs: Mapping[str, str],
|
||||
files: Sequence["File"],
|
||||
query: Optional[str] = None,
|
||||
context: Optional[str] = None,
|
||||
memory: Optional[TokenBufferMemory] = None,
|
||||
image_detail_config: Optional[ImagePromptMessageContent.DETAIL] = None,
|
||||
) -> tuple[list[PromptMessage], Optional[list[str]]]:
|
||||
query: str | None = None,
|
||||
context: str | None = None,
|
||||
memory: TokenBufferMemory | None = None,
|
||||
image_detail_config: ImagePromptMessageContent.DETAIL | None = None,
|
||||
) -> tuple[list[PromptMessage], list[str] | None]:
|
||||
"""
|
||||
Organize prompt messages
|
||||
:param context:
|
||||
|
|
@ -161,7 +161,7 @@ class AppRunner:
|
|||
prompt_messages: list,
|
||||
text: str,
|
||||
stream: bool,
|
||||
usage: Optional[LLMUsage] = None,
|
||||
usage: LLMUsage | None = None,
|
||||
):
|
||||
"""
|
||||
Direct output
|
||||
|
|
@ -375,7 +375,7 @@ class AppRunner:
|
|||
|
||||
def query_app_annotations_to_reply(
|
||||
self, app_record: App, message: Message, query: str, user_id: str, invoke_from: InvokeFrom
|
||||
) -> Optional[MessageAnnotation]:
|
||||
) -> MessageAnnotation | None:
|
||||
"""
|
||||
Query app annotations to reply
|
||||
:param app_record: app record
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from core.app.app_config.base_app_config_manager import BaseAppConfigManager
|
||||
from core.app.app_config.common.sensitive_word_avoidance.manager import SensitiveWordAvoidanceConfigManager
|
||||
from core.app.app_config.easy_ui_based_app.dataset.manager import DatasetConfigManager
|
||||
|
|
@ -32,8 +30,8 @@ class ChatAppConfigManager(BaseAppConfigManager):
|
|||
cls,
|
||||
app_model: App,
|
||||
app_model_config: AppModelConfig,
|
||||
conversation: Optional[Conversation] = None,
|
||||
override_config_dict: Optional[dict] = None,
|
||||
conversation: Conversation | None = None,
|
||||
override_config_dict: dict | None = None,
|
||||
) -> ChatAppConfig:
|
||||
"""
|
||||
Convert app model config to chat app config
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import time
|
||||
from collections.abc import Mapping, Sequence
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any, Optional, Union, cast
|
||||
from typing import Any, Union, cast
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
|
|
@ -140,7 +140,7 @@ class WorkflowResponseConverter:
|
|||
event: QueueNodeStartedEvent,
|
||||
task_id: str,
|
||||
workflow_node_execution: WorkflowNodeExecution,
|
||||
) -> Optional[NodeStartStreamResponse]:
|
||||
) -> NodeStartStreamResponse | None:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||
return None
|
||||
if not workflow_node_execution.workflow_execution_id:
|
||||
|
|
@ -190,7 +190,7 @@ class WorkflowResponseConverter:
|
|||
| QueueNodeExceptionEvent,
|
||||
task_id: str,
|
||||
workflow_node_execution: WorkflowNodeExecution,
|
||||
) -> Optional[NodeFinishStreamResponse]:
|
||||
) -> NodeFinishStreamResponse | None:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||
return None
|
||||
if not workflow_node_execution.workflow_execution_id:
|
||||
|
|
@ -235,7 +235,7 @@ class WorkflowResponseConverter:
|
|||
event: QueueNodeRetryEvent,
|
||||
task_id: str,
|
||||
workflow_node_execution: WorkflowNodeExecution,
|
||||
) -> Optional[Union[NodeRetryStreamResponse, NodeFinishStreamResponse]]:
|
||||
) -> Union[NodeRetryStreamResponse, NodeFinishStreamResponse] | None:
|
||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||
return None
|
||||
if not workflow_node_execution.workflow_execution_id:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
from typing import Optional
|
||||
|
||||
from core.app.app_config.base_app_config_manager import BaseAppConfigManager
|
||||
from core.app.app_config.common.sensitive_word_avoidance.manager import SensitiveWordAvoidanceConfigManager
|
||||
from core.app.app_config.easy_ui_based_app.dataset.manager import DatasetConfigManager
|
||||
|
|
@ -24,7 +22,7 @@ class CompletionAppConfig(EasyUIBasedAppConfig):
|
|||
class CompletionAppConfigManager(BaseAppConfigManager):
|
||||
@classmethod
|
||||
def get_app_config(
|
||||
cls, app_model: App, app_model_config: AppModelConfig, override_config_dict: Optional[dict] = None
|
||||
cls, app_model: App, app_model_config: AppModelConfig, override_config_dict: dict | None = None
|
||||
) -> CompletionAppConfig:
|
||||
"""
|
||||
Convert app model config to completion app config
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue