Merge branch 'main' into jzh

This commit is contained in:
JzoNg 2026-03-16 15:43:20 +08:00
commit dd0dee739d
69 changed files with 5358 additions and 1316 deletions

View File

@ -187,53 +187,12 @@ const Template = useMemo(() => {
**When**: Component directly handles API calls, data transformation, or complex async operations.
**Dify Convention**: Use `@tanstack/react-query` hooks from `web/service/use-*.ts` or create custom data hooks.
```typescript
// ❌ Before: API logic in component
const MCPServiceCard = () => {
const [basicAppConfig, setBasicAppConfig] = useState({})
useEffect(() => {
if (isBasicApp && appId) {
(async () => {
const res = await fetchAppDetail({ url: '/apps', id: appId })
setBasicAppConfig(res?.model_config || {})
})()
}
}, [appId, isBasicApp])
// More API-related logic...
}
// ✅ After: Extract to data hook using React Query
// use-app-config.ts
import { useQuery } from '@tanstack/react-query'
import { get } from '@/service/base'
const NAME_SPACE = 'appConfig'
export const useAppConfig = (appId: string, isBasicApp: boolean) => {
return useQuery({
enabled: isBasicApp && !!appId,
queryKey: [NAME_SPACE, 'detail', appId],
queryFn: () => get<AppDetailResponse>(`/apps/${appId}`),
select: data => data?.model_config || {},
})
}
// Component becomes cleaner
const MCPServiceCard = () => {
const { data: config, isLoading } = useAppConfig(appId, isBasicApp)
// UI only
}
```
**React Query Best Practices in Dify**:
- Define `NAME_SPACE` for query key organization
- Use `enabled` option for conditional fetching
- Use `select` for data transformation
- Export invalidation hooks: `useInvalidXxx`
**Dify Convention**:
- This skill is for component decomposition, not query/mutation design.
- When refactoring data fetching, follow `web/AGENTS.md`.
- Use `frontend-query-mutation` for contracts, query shape, data-fetching wrappers, query/mutation call-site patterns, conditional queries, invalidation, and mutation error handling.
- Do not introduce deprecated `useInvalid` / `useReset`.
- Do not add thin passthrough `useQuery` wrappers during refactoring; only extract a custom hook when it truly orchestrates multiple queries/mutations or shared derived state.
**Dify Examples**:
- `web/service/use-workflow.ts`

View File

@ -155,48 +155,14 @@ const Configuration: FC = () => {
## Common Hook Patterns in Dify
### 1. Data Fetching Hook (React Query)
### 1. Data Fetching / Mutation Hooks
```typescript
// Pattern: Use @tanstack/react-query for data fetching
import { useQuery, useQueryClient } from '@tanstack/react-query'
import { get } from '@/service/base'
import { useInvalid } from '@/service/use-base'
When hook extraction touches query or mutation code, do not use this reference as the source of truth for data-layer patterns.
const NAME_SPACE = 'appConfig'
// Query keys for cache management
export const appConfigQueryKeys = {
detail: (appId: string) => [NAME_SPACE, 'detail', appId] as const,
}
// Main data hook
export const useAppConfig = (appId: string) => {
return useQuery({
enabled: !!appId,
queryKey: appConfigQueryKeys.detail(appId),
queryFn: () => get<AppDetailResponse>(`/apps/${appId}`),
select: data => data?.model_config || null,
})
}
// Invalidation hook for refreshing data
export const useInvalidAppConfig = () => {
return useInvalid([NAME_SPACE])
}
// Usage in component
const Component = () => {
const { data: config, isLoading, error, refetch } = useAppConfig(appId)
const invalidAppConfig = useInvalidAppConfig()
const handleRefresh = () => {
invalidAppConfig() // Invalidates cache and triggers refetch
}
return <div>...</div>
}
```
- Follow `web/AGENTS.md` first.
- Use `frontend-query-mutation` for contracts, query shape, data-fetching wrappers, query/mutation call-site patterns, conditional queries, invalidation, and mutation error handling.
- Do not introduce deprecated `useInvalid` / `useReset`.
- Do not extract thin passthrough `useQuery` hooks; only extract orchestration hooks.
### 2. Form State Hook

View File

@ -0,0 +1,44 @@
---
name: frontend-query-mutation
description: Guide for implementing Dify frontend query and mutation patterns with TanStack Query and oRPC. Trigger when creating or updating contracts in web/contract, wiring router composition, consuming consoleQuery or marketplaceQuery in components or services, deciding whether to call queryOptions() directly or extract a helper or use-* hook, handling conditional queries, cache invalidation, mutation error handling, or migrating legacy service calls to contract-first query and mutation helpers.
---
# Frontend Query & Mutation
## Intent
- Keep contract as the single source of truth in `web/contract/*`.
- Prefer contract-shaped `queryOptions()` and `mutationOptions()`.
- Keep invalidation and mutation flow knowledge in the service layer.
- Keep abstractions minimal to preserve TypeScript inference.
## Workflow
1. Identify the change surface.
- Read `references/contract-patterns.md` for contract files, router composition, client helpers, and query or mutation call-site shape.
- Read `references/runtime-rules.md` for conditional queries, invalidation, error handling, and legacy migrations.
- Read both references when a task spans contract shape and runtime behavior.
2. Implement the smallest abstraction that fits the task.
- Default to direct `useQuery(...)` or `useMutation(...)` calls with oRPC helpers at the call site.
- Extract a small shared query helper only when multiple call sites share the same extra options.
- Create `web/service/use-{domain}.ts` only for orchestration or shared domain behavior.
3. Preserve Dify conventions.
- Keep contract inputs in `{ params, query?, body? }` shape.
- Bind invalidation in the service-layer mutation definition.
- Prefer `mutate(...)`; use `mutateAsync(...)` only when Promise semantics are required.
## Files Commonly Touched
- `web/contract/console/*.ts`
- `web/contract/marketplace.ts`
- `web/contract/router.ts`
- `web/service/client.ts`
- `web/service/use-*.ts`
- component and hook call sites using `consoleQuery` or `marketplaceQuery`
## References
- Use `references/contract-patterns.md` for contract shape, router registration, query and mutation helpers, and anti-patterns that degrade inference.
- Use `references/runtime-rules.md` for conditional queries, invalidation, `mutate` versus `mutateAsync`, and legacy migration rules.
Treat this skill as the single query and mutation entry point for Dify frontend work. Keep detailed rules in the reference files instead of duplicating them in project docs.

View File

@ -0,0 +1,4 @@
interface:
display_name: "Frontend Query & Mutation"
short_description: "Dify TanStack Query and oRPC patterns"
default_prompt: "Use this skill when implementing or reviewing Dify frontend contracts, query and mutation call sites, conditional queries, invalidation, or legacy query/mutation migrations."

View File

@ -0,0 +1,98 @@
# Contract Patterns
## Table of Contents
- Intent
- Minimal structure
- Core workflow
- Query usage decision rule
- Mutation usage decision rule
- Anti-patterns
- Contract rules
- Type export
## Intent
- Keep contract as the single source of truth in `web/contract/*`.
- Default query usage to call-site `useQuery(consoleQuery|marketplaceQuery.xxx.queryOptions(...))` when endpoint behavior maps 1:1 to the contract.
- Keep abstractions minimal and preserve TypeScript inference.
## Minimal Structure
```text
web/contract/
├── base.ts
├── router.ts
├── marketplace.ts
└── console/
├── billing.ts
└── ...other domains
web/service/client.ts
```
## Core Workflow
1. Define contract in `web/contract/console/{domain}.ts` or `web/contract/marketplace.ts`.
- Use `base.route({...}).output(type<...>())` as the baseline.
- Add `.input(type<...>())` only when the request has `params`, `query`, or `body`.
- For `GET` without input, omit `.input(...)`; do not use `.input(type<unknown>())`.
2. Register contract in `web/contract/router.ts`.
- Import directly from domain files and nest by API prefix.
3. Consume from UI call sites via oRPC query utilities.
```typescript
import { useQuery } from '@tanstack/react-query'
import { consoleQuery } from '@/service/client'
const invoiceQuery = useQuery(consoleQuery.billing.invoices.queryOptions({
staleTime: 5 * 60 * 1000,
throwOnError: true,
select: invoice => invoice.url,
}))
```
## Query Usage Decision Rule
1. Default to direct `*.queryOptions(...)` usage at the call site.
2. If 3 or more call sites share the same extra options, extract a small query helper, not a `use-*` passthrough hook.
3. Create `web/service/use-{domain}.ts` only for orchestration.
- Combine multiple queries or mutations.
- Share domain-level derived state or invalidation helpers.
```typescript
const invoicesBaseQueryOptions = () =>
consoleQuery.billing.invoices.queryOptions({ retry: false })
const invoiceQuery = useQuery({
...invoicesBaseQueryOptions(),
throwOnError: true,
})
```
## Mutation Usage Decision Rule
1. Default to mutation helpers from `consoleQuery` or `marketplaceQuery`, for example `useMutation(consoleQuery.billing.bindPartnerStack.mutationOptions(...))`.
2. If the mutation flow is heavily custom, use oRPC clients as `mutationFn`, for example `consoleClient.xxx` or `marketplaceClient.xxx`, instead of handwritten non-oRPC mutation logic.
## Anti-Patterns
- Do not wrap `useQuery` with `options?: Partial<UseQueryOptions>`.
- Do not split local `queryKey` and `queryFn` when oRPC `queryOptions` already exists and fits the use case.
- Do not create thin `use-*` passthrough hooks for a single endpoint.
- These patterns can degrade inference, especially around `throwOnError` and `select`, and add unnecessary indirection.
## Contract Rules
- Input structure: always use `{ params, query?, body? }`.
- No-input `GET`: omit `.input(...)`; do not use `.input(type<unknown>())`.
- Path params: use `{paramName}` in the path and match it in the `params` object.
- Router nesting: group by API prefix, for example `/billing/*` becomes `billing: {}`.
- No barrel files: import directly from specific files.
- Types: import from `@/types/` and use the `type<T>()` helper.
- Mutations: prefer `mutationOptions`; use explicit `mutationKey` mainly for defaults, filtering, and devtools.
## Type Export
```typescript
export type ConsoleInputs = InferContractRouterInputs<typeof consoleRouterContract>
```

View File

@ -0,0 +1,133 @@
# Runtime Rules
## Table of Contents
- Conditional queries
- Cache invalidation
- Key API guide
- `mutate` vs `mutateAsync`
- Legacy migration
## Conditional Queries
Prefer contract-shaped `queryOptions(...)`.
When required input is missing, prefer `input: skipToken` instead of placeholder params or non-null assertions.
Use `enabled` only for extra business gating after the input itself is already valid.
```typescript
import { skipToken, useQuery } from '@tanstack/react-query'
// Disable the query by skipping input construction.
function useAccessMode(appId: string | undefined) {
return useQuery(consoleQuery.accessControl.appAccessMode.queryOptions({
input: appId
? { params: { appId } }
: skipToken,
}))
}
// Avoid runtime-only guards that bypass type checking.
function useBadAccessMode(appId: string | undefined) {
return useQuery(consoleQuery.accessControl.appAccessMode.queryOptions({
input: { params: { appId: appId! } },
enabled: !!appId,
}))
}
```
## Cache Invalidation
Bind invalidation in the service-layer mutation definition.
Components may add UI feedback in call-site callbacks, but they should not decide which queries to invalidate.
Use:
- `.key()` for namespace or prefix invalidation
- `.queryKey(...)` only for exact cache reads or writes such as `getQueryData` and `setQueryData`
- `queryClient.invalidateQueries(...)` in mutation `onSuccess`
Do not use deprecated `useInvalid` from `use-base.ts`.
```typescript
// Service layer owns cache invalidation.
export const useUpdateAccessMode = () => {
const queryClient = useQueryClient()
return useMutation(consoleQuery.accessControl.updateAccessMode.mutationOptions({
onSuccess: () => {
queryClient.invalidateQueries({
queryKey: consoleQuery.accessControl.appWhitelistSubjects.key(),
})
},
}))
}
// Component only adds UI behavior.
updateAccessMode({ appId, mode }, {
onSuccess: () => Toast.notify({ type: 'success', message: '...' }),
})
// Avoid putting invalidation knowledge in the component.
mutate({ appId, mode }, {
onSuccess: () => {
queryClient.invalidateQueries({
queryKey: consoleQuery.accessControl.appWhitelistSubjects.key(),
})
},
})
```
## Key API Guide
- `.key(...)`
- Use for partial matching operations.
- Prefer it for invalidation, refetch, and cancel patterns.
- Example: `queryClient.invalidateQueries({ queryKey: consoleQuery.billing.key() })`
- `.queryKey(...)`
- Use for a specific query's full key.
- Prefer it for exact cache addressing and direct reads or writes.
- `.mutationKey(...)`
- Use for a specific mutation's full key.
- Prefer it for mutation defaults registration, mutation-status filtering, and devtools grouping.
## `mutate` vs `mutateAsync`
Prefer `mutate` by default.
Use `mutateAsync` only when Promise semantics are truly required, such as parallel mutations or sequential steps with result dependencies.
Rules:
- Event handlers should usually call `mutate(...)` with `onSuccess` or `onError`.
- Every `await mutateAsync(...)` must be wrapped in `try/catch`.
- Do not use `mutateAsync` when callbacks already express the flow clearly.
```typescript
// Default case.
mutation.mutate(data, {
onSuccess: result => router.push(result.url),
})
// Promise semantics are required.
try {
const order = await createOrder.mutateAsync(orderData)
await confirmPayment.mutateAsync({ orderId: order.id, token })
router.push(`/orders/${order.id}`)
}
catch (error) {
Toast.notify({
type: 'error',
message: error instanceof Error ? error.message : 'Unknown error',
})
}
```
## Legacy Migration
When touching old code, migrate it toward these rules:
| Old pattern | New pattern |
|---|---|
| `useInvalid(key)` in service layer | `queryClient.invalidateQueries(...)` inside mutation `onSuccess` |
| component-triggered invalidation after mutation | move invalidation into the service-layer mutation definition |
| imperative fetch plus manual invalidation | wrap it in `useMutation(...mutationOptions(...))` |
| `await mutateAsync()` without `try/catch` | switch to `mutate(...)` or add `try/catch` |

View File

@ -1,103 +0,0 @@
---
name: orpc-contract-first
description: Guide for implementing oRPC contract-first API patterns in Dify frontend. Trigger when creating or updating contracts in web/contract, wiring router composition, integrating TanStack Query with typed contracts, migrating legacy service calls to oRPC, or deciding whether to call queryOptions directly vs extracting a helper or use-* hook in web/service.
---
# oRPC Contract-First Development
## Intent
- Keep contract as single source of truth in `web/contract/*`.
- Default query usage: call-site `useQuery(consoleQuery|marketplaceQuery.xxx.queryOptions(...))` when endpoint behavior maps 1:1 to the contract.
- Keep abstractions minimal and preserve TypeScript inference.
## Minimal Structure
```text
web/contract/
├── base.ts
├── router.ts
├── marketplace.ts
└── console/
├── billing.ts
└── ...other domains
web/service/client.ts
```
## Core Workflow
1. Define contract in `web/contract/console/{domain}.ts` or `web/contract/marketplace.ts`
- Use `base.route({...}).output(type<...>())` as baseline.
- Add `.input(type<...>())` only when request has `params/query/body`.
- For `GET` without input, omit `.input(...)` (do not use `.input(type<unknown>())`).
2. Register contract in `web/contract/router.ts`
- Import directly from domain files and nest by API prefix.
3. Consume from UI call sites via oRPC query utils.
```typescript
import { useQuery } from '@tanstack/react-query'
import { consoleQuery } from '@/service/client'
const invoiceQuery = useQuery(consoleQuery.billing.invoices.queryOptions({
staleTime: 5 * 60 * 1000,
throwOnError: true,
select: invoice => invoice.url,
}))
```
## Query Usage Decision Rule
1. Default: call site directly uses `*.queryOptions(...)`.
2. If 3+ call sites share the same extra options (for example `retry: false`), extract a small queryOptions helper, not a `use-*` passthrough hook.
3. Create `web/service/use-{domain}.ts` only for orchestration:
- Combine multiple queries/mutations.
- Share domain-level derived state or invalidation helpers.
```typescript
const invoicesBaseQueryOptions = () =>
consoleQuery.billing.invoices.queryOptions({ retry: false })
const invoiceQuery = useQuery({
...invoicesBaseQueryOptions(),
throwOnError: true,
})
```
## Mutation Usage Decision Rule
1. Default: call mutation helpers from `consoleQuery` / `marketplaceQuery`, for example `useMutation(consoleQuery.billing.bindPartnerStack.mutationOptions(...))`.
2. If mutation flow is heavily custom, use oRPC clients as `mutationFn` (for example `consoleClient.xxx` / `marketplaceClient.xxx`), instead of generic handwritten non-oRPC mutation logic.
## Key API Guide (`.key` vs `.queryKey` vs `.mutationKey`)
- `.key(...)`:
- Use for partial matching operations (recommended for invalidation/refetch/cancel patterns).
- Example: `queryClient.invalidateQueries({ queryKey: consoleQuery.billing.key() })`
- `.queryKey(...)`:
- Use for a specific query's full key (exact query identity / direct cache addressing).
- `.mutationKey(...)`:
- Use for a specific mutation's full key.
- Typical use cases: mutation defaults registration, mutation-status filtering (`useIsMutating`, `queryClient.isMutating`), or explicit devtools grouping.
## Anti-Patterns
- Do not wrap `useQuery` with `options?: Partial<UseQueryOptions>`.
- Do not split local `queryKey/queryFn` when oRPC `queryOptions` already exists and fits the use case.
- Do not create thin `use-*` passthrough hooks for a single endpoint.
- Reason: these patterns can degrade inference (`data` may become `unknown`, especially around `throwOnError`/`select`) and add unnecessary indirection.
## Contract Rules
- **Input structure**: Always use `{ params, query?, body? }` format
- **No-input GET**: Omit `.input(...)`; do not use `.input(type<unknown>())`
- **Path params**: Use `{paramName}` in path, match in `params` object
- **Router nesting**: Group by API prefix (e.g., `/billing/*` -> `billing: {}`)
- **No barrel files**: Import directly from specific files
- **Types**: Import from `@/types/`, use `type<T>()` helper
- **Mutations**: Prefer `mutationOptions`; use explicit `mutationKey` mainly for defaults/filtering/devtools
## Type Export
```typescript
export type ConsoleInputs = InferContractRouterInputs<typeof consoleRouterContract>
```

View File

@ -0,0 +1 @@
../../.agents/skills/frontend-query-mutation

View File

@ -1 +0,0 @@
../../.agents/skills/orpc-contract-first

View File

@ -97,7 +97,7 @@ ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
# Download nltk data
RUN mkdir -p /usr/local/share/nltk_data \
&& NLTK_DATA=/usr/local/share/nltk_data python -c "import nltk; from unstructured.nlp.tokenize import download_nltk_packages; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger'); nltk.download('stopwords'); download_nltk_packages()" \
&& NLTK_DATA=/usr/local/share/nltk_data python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger'); nltk.download('stopwords')" \
&& chmod -R 755 /usr/local/share/nltk_data
ENV TIKTOKEN_CACHE_DIR=/app/api/.tiktoken_cache

View File

@ -1,16 +1,45 @@
import logging
import time
from flask import request
from opentelemetry.trace import get_current_span
from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID
from configs import dify_config
from contexts.wrapper import RecyclableContextVar
from controllers.console.error import UnauthorizedAndForceLogout
from core.logging.context import init_request_context
from dify_app import DifyApp
from services.enterprise.enterprise_service import EnterpriseService
from services.feature_service import LicenseStatus
logger = logging.getLogger(__name__)
# Console bootstrap APIs exempt from license check.
# Defined at module level to avoid per-request tuple construction.
# - system-features: license status for expiry UI (GlobalPublicStoreProvider)
# - setup: install/setup status check (AppInitializer)
# - init: init password validation for fresh install (InitPasswordPopup)
# - login: auto-login after setup completion (InstallForm)
# - features: billing/plan features (ProviderContextProvider)
# - account/profile: login check + user profile (AppContextProvider, useIsLogin)
# - workspaces/current: workspace + model providers (AppContextProvider)
# - version: version check (AppContextProvider)
# - activate/check: invitation link validation (signin page)
# Without these exemptions, the signin page triggers location.reload()
# on unauthorized_and_force_logout, causing an infinite loop.
_CONSOLE_EXEMPT_PREFIXES = (
"/console/api/system-features",
"/console/api/setup",
"/console/api/init",
"/console/api/login",
"/console/api/features",
"/console/api/account/profile",
"/console/api/workspaces/current",
"/console/api/version",
"/console/api/activate/check",
)
# ----------------------------
# Application Factory Function
@ -31,6 +60,39 @@ def create_flask_app_with_configs() -> DifyApp:
init_request_context()
RecyclableContextVar.increment_thread_recycles()
# Enterprise license validation for API endpoints (both console and webapp)
# When license expires, block all API access except bootstrap endpoints needed
# for the frontend to load the license expiration page without infinite reloads.
if dify_config.ENTERPRISE_ENABLED:
is_console_api = request.path.startswith("/console/api/")
is_webapp_api = request.path.startswith("/api/")
if is_console_api or is_webapp_api:
if is_console_api:
is_exempt = any(request.path.startswith(p) for p in _CONSOLE_EXEMPT_PREFIXES)
else: # webapp API
is_exempt = request.path.startswith("/api/system-features")
if not is_exempt:
try:
# Check license status (cached — see EnterpriseService for TTL details)
license_status = EnterpriseService.get_cached_license_status()
if license_status in (LicenseStatus.INACTIVE, LicenseStatus.EXPIRED, LicenseStatus.LOST):
raise UnauthorizedAndForceLogout(
f"Enterprise license is {license_status}. Please contact your administrator."
)
if license_status is None:
raise UnauthorizedAndForceLogout(
"Unable to verify enterprise license. Please contact your administrator."
)
except UnauthorizedAndForceLogout:
raise
except Exception:
logger.exception("Failed to check enterprise license status")
raise UnauthorizedAndForceLogout(
"Unable to verify enterprise license. Please contact your administrator."
)
# add after request hook for injecting trace headers from OpenTelemetry span context
# Only adds headers when OTEL is enabled and has valid context
@dify_app.after_request

View File

@ -23,7 +23,7 @@ from dify_graph.variables.types import SegmentType
from extensions.ext_database import db
from factories.file_factory import build_from_mapping, build_from_mappings
from factories.variable_factory import build_segment_with_type
from libs.login import login_required
from libs.login import current_user, login_required
from models import App, AppMode
from models.workflow import WorkflowDraftVariable
from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService
@ -100,6 +100,18 @@ def _serialize_full_content(variable: WorkflowDraftVariable) -> dict | None:
}
def _ensure_variable_access(
variable: WorkflowDraftVariable | None,
app_id: str,
variable_id: str,
) -> WorkflowDraftVariable:
if variable is None:
raise NotFoundError(description=f"variable not found, id={variable_id}")
if variable.app_id != app_id or variable.user_id != current_user.id:
raise NotFoundError(description=f"variable not found, id={variable_id}")
return variable
_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS = {
"id": fields.String,
"type": fields.String(attribute=lambda model: model.get_variable_type()),
@ -238,6 +250,7 @@ class WorkflowVariableCollectionApi(Resource):
app_id=app_model.id,
page=args.page,
limit=args.limit,
user_id=current_user.id,
)
return workflow_vars
@ -250,7 +263,7 @@ class WorkflowVariableCollectionApi(Resource):
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
draft_var_srv.delete_workflow_variables(app_model.id)
draft_var_srv.delete_user_workflow_variables(app_model.id, user_id=current_user.id)
db.session.commit()
return Response("", 204)
@ -287,7 +300,7 @@ class NodeVariableCollectionApi(Resource):
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
node_vars = draft_var_srv.list_node_variables(app_model.id, node_id)
node_vars = draft_var_srv.list_node_variables(app_model.id, node_id, user_id=current_user.id)
return node_vars
@ -298,7 +311,7 @@ class NodeVariableCollectionApi(Resource):
def delete(self, app_model: App, node_id: str):
validate_node_id(node_id)
srv = WorkflowDraftVariableService(db.session())
srv.delete_node_variables(app_model.id, node_id)
srv.delete_node_variables(app_model.id, node_id, user_id=current_user.id)
db.session.commit()
return Response("", 204)
@ -319,11 +332,11 @@ class VariableApi(Resource):
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
variable = draft_var_srv.get_variable(variable_id=variable_id)
if variable is None:
raise NotFoundError(description=f"variable not found, id={variable_id}")
if variable.app_id != app_model.id:
raise NotFoundError(description=f"variable not found, id={variable_id}")
variable = _ensure_variable_access(
variable=draft_var_srv.get_variable(variable_id=variable_id),
app_id=app_model.id,
variable_id=variable_id,
)
return variable
@console_ns.doc("update_variable")
@ -360,11 +373,11 @@ class VariableApi(Resource):
)
args_model = WorkflowDraftVariableUpdatePayload.model_validate(console_ns.payload or {})
variable = draft_var_srv.get_variable(variable_id=variable_id)
if variable is None:
raise NotFoundError(description=f"variable not found, id={variable_id}")
if variable.app_id != app_model.id:
raise NotFoundError(description=f"variable not found, id={variable_id}")
variable = _ensure_variable_access(
variable=draft_var_srv.get_variable(variable_id=variable_id),
app_id=app_model.id,
variable_id=variable_id,
)
new_name = args_model.name
raw_value = args_model.value
@ -397,11 +410,11 @@ class VariableApi(Resource):
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
variable = draft_var_srv.get_variable(variable_id=variable_id)
if variable is None:
raise NotFoundError(description=f"variable not found, id={variable_id}")
if variable.app_id != app_model.id:
raise NotFoundError(description=f"variable not found, id={variable_id}")
variable = _ensure_variable_access(
variable=draft_var_srv.get_variable(variable_id=variable_id),
app_id=app_model.id,
variable_id=variable_id,
)
draft_var_srv.delete_variable(variable)
db.session.commit()
return Response("", 204)
@ -427,11 +440,11 @@ class VariableResetApi(Resource):
raise NotFoundError(
f"Draft workflow not found, app_id={app_model.id}",
)
variable = draft_var_srv.get_variable(variable_id=variable_id)
if variable is None:
raise NotFoundError(description=f"variable not found, id={variable_id}")
if variable.app_id != app_model.id:
raise NotFoundError(description=f"variable not found, id={variable_id}")
variable = _ensure_variable_access(
variable=draft_var_srv.get_variable(variable_id=variable_id),
app_id=app_model.id,
variable_id=variable_id,
)
resetted = draft_var_srv.reset_variable(draft_workflow, variable)
db.session.commit()
@ -447,11 +460,15 @@ def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList:
session=session,
)
if node_id == CONVERSATION_VARIABLE_NODE_ID:
draft_vars = draft_var_srv.list_conversation_variables(app_model.id)
draft_vars = draft_var_srv.list_conversation_variables(app_model.id, user_id=current_user.id)
elif node_id == SYSTEM_VARIABLE_NODE_ID:
draft_vars = draft_var_srv.list_system_variables(app_model.id)
draft_vars = draft_var_srv.list_system_variables(app_model.id, user_id=current_user.id)
else:
draft_vars = draft_var_srv.list_node_variables(app_id=app_model.id, node_id=node_id)
draft_vars = draft_var_srv.list_node_variables(
app_id=app_model.id,
node_id=node_id,
user_id=current_user.id,
)
return draft_vars
@ -472,7 +489,7 @@ class ConversationVariableCollectionApi(Resource):
if draft_workflow is None:
raise NotFoundError(description=f"draft workflow not found, id={app_model.id}")
draft_var_srv = WorkflowDraftVariableService(db.session())
draft_var_srv.prefill_conversation_variable_default_values(draft_workflow)
draft_var_srv.prefill_conversation_variable_default_values(draft_workflow, user_id=current_user.id)
db.session.commit()
return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID)

View File

@ -102,6 +102,7 @@ class RagPipelineVariableCollectionApi(Resource):
app_id=pipeline.id,
page=query.page,
limit=query.limit,
user_id=current_user.id,
)
return workflow_vars
@ -111,7 +112,7 @@ class RagPipelineVariableCollectionApi(Resource):
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
draft_var_srv.delete_workflow_variables(pipeline.id)
draft_var_srv.delete_user_workflow_variables(pipeline.id, user_id=current_user.id)
db.session.commit()
return Response("", 204)
@ -144,7 +145,7 @@ class RagPipelineNodeVariableCollectionApi(Resource):
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
node_vars = draft_var_srv.list_node_variables(pipeline.id, node_id)
node_vars = draft_var_srv.list_node_variables(pipeline.id, node_id, user_id=current_user.id)
return node_vars
@ -152,7 +153,7 @@ class RagPipelineNodeVariableCollectionApi(Resource):
def delete(self, pipeline: Pipeline, node_id: str):
validate_node_id(node_id)
srv = WorkflowDraftVariableService(db.session())
srv.delete_node_variables(pipeline.id, node_id)
srv.delete_node_variables(pipeline.id, node_id, user_id=current_user.id)
db.session.commit()
return Response("", 204)
@ -283,11 +284,11 @@ def _get_variable_list(pipeline: Pipeline, node_id) -> WorkflowDraftVariableList
session=session,
)
if node_id == CONVERSATION_VARIABLE_NODE_ID:
draft_vars = draft_var_srv.list_conversation_variables(pipeline.id)
draft_vars = draft_var_srv.list_conversation_variables(pipeline.id, user_id=current_user.id)
elif node_id == SYSTEM_VARIABLE_NODE_ID:
draft_vars = draft_var_srv.list_system_variables(pipeline.id)
draft_vars = draft_var_srv.list_system_variables(pipeline.id, user_id=current_user.id)
else:
draft_vars = draft_var_srv.list_node_variables(app_id=pipeline.id, node_id=node_id)
draft_vars = draft_var_srv.list_node_variables(app_id=pipeline.id, node_id=node_id, user_id=current_user.id)
return draft_vars

View File

@ -330,9 +330,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
engine=db.engine,
app_id=application_generate_entity.app_config.app_id,
tenant_id=application_generate_entity.app_config.tenant_id,
user_id=user.id,
)
draft_var_srv = WorkflowDraftVariableService(db.session())
draft_var_srv.prefill_conversation_variable_default_values(workflow)
draft_var_srv.prefill_conversation_variable_default_values(workflow, user_id=user.id)
return self._generate(
workflow=workflow,
@ -413,9 +414,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
engine=db.engine,
app_id=application_generate_entity.app_config.app_id,
tenant_id=application_generate_entity.app_config.tenant_id,
user_id=user.id,
)
draft_var_srv = WorkflowDraftVariableService(db.session())
draft_var_srv.prefill_conversation_variable_default_values(workflow)
draft_var_srv.prefill_conversation_variable_default_values(workflow, user_id=user.id)
return self._generate(
workflow=workflow,

View File

@ -419,11 +419,12 @@ class PipelineGenerator(BaseAppGenerator):
triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
)
draft_var_srv = WorkflowDraftVariableService(db.session())
draft_var_srv.prefill_conversation_variable_default_values(workflow)
draft_var_srv.prefill_conversation_variable_default_values(workflow, user_id=user.id)
var_loader = DraftVarLoader(
engine=db.engine,
app_id=application_generate_entity.app_config.app_id,
tenant_id=application_generate_entity.app_config.tenant_id,
user_id=user.id,
)
return self._generate(
@ -514,11 +515,12 @@ class PipelineGenerator(BaseAppGenerator):
triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
)
draft_var_srv = WorkflowDraftVariableService(db.session())
draft_var_srv.prefill_conversation_variable_default_values(workflow)
draft_var_srv.prefill_conversation_variable_default_values(workflow, user_id=user.id)
var_loader = DraftVarLoader(
engine=db.engine,
app_id=application_generate_entity.app_config.app_id,
tenant_id=application_generate_entity.app_config.tenant_id,
user_id=user.id,
)
return self._generate(

View File

@ -414,11 +414,12 @@ class WorkflowAppGenerator(BaseAppGenerator):
triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
)
draft_var_srv = WorkflowDraftVariableService(db.session())
draft_var_srv.prefill_conversation_variable_default_values(workflow)
draft_var_srv.prefill_conversation_variable_default_values(workflow, user_id=user.id)
var_loader = DraftVarLoader(
engine=db.engine,
app_id=application_generate_entity.app_config.app_id,
tenant_id=application_generate_entity.app_config.tenant_id,
user_id=user.id,
)
return self._generate(
@ -497,11 +498,12 @@ class WorkflowAppGenerator(BaseAppGenerator):
triggered_from=WorkflowNodeExecutionTriggeredFrom.SINGLE_STEP,
)
draft_var_srv = WorkflowDraftVariableService(db.session())
draft_var_srv.prefill_conversation_variable_default_values(workflow)
draft_var_srv.prefill_conversation_variable_default_values(workflow, user_id=user.id)
var_loader = DraftVarLoader(
engine=db.engine,
app_id=application_generate_entity.app_config.app_id,
tenant_id=application_generate_entity.app_config.tenant_id,
user_id=user.id,
)
return self._generate(
app_model=app_model,

View File

@ -8,6 +8,7 @@ document embeddings used in retrieval-augmented generation workflows.
import datetime
import json
import logging
import threading
import uuid as _uuid
from typing import Any
from urllib.parse import urlparse
@ -32,6 +33,9 @@ from models.dataset import Dataset
logger = logging.getLogger(__name__)
_weaviate_client: weaviate.WeaviateClient | None = None
_weaviate_client_lock = threading.Lock()
class WeaviateConfig(BaseModel):
"""
@ -99,43 +103,52 @@ class WeaviateVector(BaseVector):
Configures both HTTP and gRPC connections with proper authentication.
"""
p = urlparse(config.endpoint)
host = p.hostname or config.endpoint.replace("https://", "").replace("http://", "")
http_secure = p.scheme == "https"
http_port = p.port or (443 if http_secure else 80)
global _weaviate_client
if _weaviate_client and _weaviate_client.is_ready():
return _weaviate_client
# Parse gRPC configuration
if config.grpc_endpoint:
# Urls without scheme won't be parsed correctly in some python versions,
# see https://bugs.python.org/issue27657
grpc_endpoint_with_scheme = (
config.grpc_endpoint if "://" in config.grpc_endpoint else f"grpc://{config.grpc_endpoint}"
with _weaviate_client_lock:
if _weaviate_client and _weaviate_client.is_ready():
return _weaviate_client
p = urlparse(config.endpoint)
host = p.hostname or config.endpoint.replace("https://", "").replace("http://", "")
http_secure = p.scheme == "https"
http_port = p.port or (443 if http_secure else 80)
# Parse gRPC configuration
if config.grpc_endpoint:
# Urls without scheme won't be parsed correctly in some python versions,
# see https://bugs.python.org/issue27657
grpc_endpoint_with_scheme = (
config.grpc_endpoint if "://" in config.grpc_endpoint else f"grpc://{config.grpc_endpoint}"
)
grpc_p = urlparse(grpc_endpoint_with_scheme)
grpc_host = grpc_p.hostname or "localhost"
grpc_port = grpc_p.port or (443 if grpc_p.scheme == "grpcs" else 50051)
grpc_secure = grpc_p.scheme == "grpcs"
else:
# Infer from HTTP endpoint as fallback
grpc_host = host
grpc_secure = http_secure
grpc_port = 443 if grpc_secure else 50051
client = weaviate.connect_to_custom(
http_host=host,
http_port=http_port,
http_secure=http_secure,
grpc_host=grpc_host,
grpc_port=grpc_port,
grpc_secure=grpc_secure,
auth_credentials=Auth.api_key(config.api_key) if config.api_key else None,
skip_init_checks=True, # Skip PyPI version check to avoid unnecessary HTTP requests
)
grpc_p = urlparse(grpc_endpoint_with_scheme)
grpc_host = grpc_p.hostname or "localhost"
grpc_port = grpc_p.port or (443 if grpc_p.scheme == "grpcs" else 50051)
grpc_secure = grpc_p.scheme == "grpcs"
else:
# Infer from HTTP endpoint as fallback
grpc_host = host
grpc_secure = http_secure
grpc_port = 443 if grpc_secure else 50051
client = weaviate.connect_to_custom(
http_host=host,
http_port=http_port,
http_secure=http_secure,
grpc_host=grpc_host,
grpc_port=grpc_port,
grpc_secure=grpc_secure,
auth_credentials=Auth.api_key(config.api_key) if config.api_key else None,
skip_init_checks=True, # Skip PyPI version check to avoid unnecessary HTTP requests
)
if not client.is_ready():
raise ConnectionError("Vector database is not ready")
if not client.is_ready():
raise ConnectionError("Vector database is not ready")
return client
_weaviate_client = client
return client
def get_type(self) -> str:
"""Returns the vector database type identifier."""

View File

@ -0,0 +1,69 @@
"""add user_id and switch workflow_draft_variables unique key to user scope
Revision ID: 6b5f9f8b1a2c
Revises: 0ec65df55790
Create Date: 2026-03-04 16:00:00.000000
"""
import sqlalchemy as sa
from alembic import op
import models as models
# revision identifiers, used by Alembic.
revision = "6b5f9f8b1a2c"
down_revision = "0ec65df55790"
branch_labels = None
depends_on = None
def _is_pg(conn) -> bool:
return conn.dialect.name == "postgresql"
def upgrade():
conn = op.get_bind()
table_name = "workflow_draft_variables"
with op.batch_alter_table(table_name, schema=None) as batch_op:
batch_op.add_column(sa.Column("user_id", models.types.StringUUID(), nullable=True))
if _is_pg(conn):
with op.get_context().autocommit_block():
op.create_index(
"workflow_draft_variables_app_id_user_id_key",
"workflow_draft_variables",
["app_id", "user_id", "node_id", "name"],
unique=True,
postgresql_concurrently=True,
)
else:
op.create_index(
"workflow_draft_variables_app_id_user_id_key",
"workflow_draft_variables",
["app_id", "user_id", "node_id", "name"],
unique=True,
)
with op.batch_alter_table(table_name, schema=None) as batch_op:
batch_op.drop_constraint(op.f("workflow_draft_variables_app_id_key"), type_="unique")
def downgrade():
conn = op.get_bind()
with op.batch_alter_table("workflow_draft_variables", schema=None) as batch_op:
batch_op.create_unique_constraint(
op.f("workflow_draft_variables_app_id_key"),
["app_id", "node_id", "name"],
)
if _is_pg(conn):
with op.get_context().autocommit_block():
op.drop_index("workflow_draft_variables_app_id_user_id_key", postgresql_concurrently=True)
else:
op.drop_index("workflow_draft_variables_app_id_user_id_key", table_name="workflow_draft_variables")
with op.batch_alter_table("workflow_draft_variables", schema=None) as batch_op:
batch_op.drop_column("user_id")

View File

@ -1286,16 +1286,17 @@ class WorkflowDraftVariable(Base):
"""
@staticmethod
def unique_app_id_node_id_name() -> list[str]:
def unique_app_id_user_id_node_id_name() -> list[str]:
return [
"app_id",
"user_id",
"node_id",
"name",
]
__tablename__ = "workflow_draft_variables"
__table_args__ = (
UniqueConstraint(*unique_app_id_node_id_name()),
UniqueConstraint(*unique_app_id_user_id_node_id_name()),
Index("workflow_draft_variable_file_id_idx", "file_id"),
)
# Required for instance variable annotation.
@ -1321,6 +1322,11 @@ class WorkflowDraftVariable(Base):
# "`app_id` maps to the `id` field in the `model.App` model."
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# Owner of this draft variable.
#
# This field is nullable during migration and will be migrated to NOT NULL
# in a follow-up release.
user_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True, default=None)
# `last_edited_at` records when the value of a given draft variable
# is edited.
@ -1573,6 +1579,7 @@ class WorkflowDraftVariable(Base):
cls,
*,
app_id: str,
user_id: str | None,
node_id: str,
name: str,
value: Segment,
@ -1586,6 +1593,7 @@ class WorkflowDraftVariable(Base):
variable.updated_at = naive_utc_now()
variable.description = description
variable.app_id = app_id
variable.user_id = user_id
variable.node_id = node_id
variable.name = name
variable.set_value(value)
@ -1599,12 +1607,14 @@ class WorkflowDraftVariable(Base):
cls,
*,
app_id: str,
user_id: str | None = None,
name: str,
value: Segment,
description: str = "",
) -> "WorkflowDraftVariable":
variable = cls._new(
app_id=app_id,
user_id=user_id,
node_id=CONVERSATION_VARIABLE_NODE_ID,
name=name,
value=value,
@ -1619,6 +1629,7 @@ class WorkflowDraftVariable(Base):
cls,
*,
app_id: str,
user_id: str | None = None,
name: str,
value: Segment,
node_execution_id: str,
@ -1626,6 +1637,7 @@ class WorkflowDraftVariable(Base):
) -> "WorkflowDraftVariable":
variable = cls._new(
app_id=app_id,
user_id=user_id,
node_id=SYSTEM_VARIABLE_NODE_ID,
name=name,
node_execution_id=node_execution_id,
@ -1639,6 +1651,7 @@ class WorkflowDraftVariable(Base):
cls,
*,
app_id: str,
user_id: str | None = None,
node_id: str,
name: str,
value: Segment,
@ -1649,6 +1662,7 @@ class WorkflowDraftVariable(Base):
) -> "WorkflowDraftVariable":
variable = cls._new(
app_id=app_id,
user_id=user_id,
node_id=node_id,
name=name,
node_execution_id=node_execution_id,

View File

@ -87,7 +87,7 @@ dependencies = [
"flask-restx~=1.3.2",
"packaging~=23.2",
"croniter>=6.0.0",
"weaviate-client==4.17.0",
"weaviate-client==4.20.4",
"apscheduler>=3.11.0",
"weave>=0.52.16",
"fastopenapi[flask]>=0.7.0",
@ -202,28 +202,28 @@ tools = ["cloudscraper~=1.2.71", "nltk~=3.9.1"]
############################################################
vdb = [
"alibabacloud_gpdb20160503~=3.8.0",
"alibabacloud_tea_openapi~=0.3.9",
"alibabacloud_tea_openapi~=0.4.3",
"chromadb==0.5.20",
"clickhouse-connect~=0.10.0",
"clickhouse-connect~=0.14.1",
"clickzetta-connector-python>=0.8.102",
"couchbase~=4.3.0",
"couchbase~=4.5.0",
"elasticsearch==8.14.0",
"opensearch-py==3.1.0",
"oracledb==3.3.0",
"oracledb==3.4.2",
"pgvecto-rs[sqlalchemy]~=0.2.1",
"pgvector==0.2.5",
"pymilvus~=2.5.0",
"pymochow==2.2.9",
"pgvector==0.4.2",
"pymilvus~=2.6.10",
"pymochow==2.3.6",
"pyobvector~=0.2.17",
"qdrant-client==1.9.0",
"intersystems-irispython>=5.1.0",
"tablestore==6.3.7",
"tcvectordb~=1.6.4",
"tidb-vector==0.0.9",
"upstash-vector==0.6.0",
"tablestore==6.4.1",
"tcvectordb~=2.0.0",
"tidb-vector==0.0.15",
"upstash-vector==0.8.0",
"volcengine-compat~=1.0.0",
"weaviate-client==4.17.0",
"xinference-client~=1.2.2",
"weaviate-client==4.20.4",
"xinference-client~=2.3.1",
"mo-vector~=0.1.13",
"mysql-connector-python>=9.3.0",
"holo-search-sdk>=0.4.1",

View File

@ -304,7 +304,7 @@ class AppDslService:
)
draft_var_srv = WorkflowDraftVariableService(session=self._session)
draft_var_srv.delete_workflow_variables(app_id=app.id)
draft_var_srv.delete_app_workflow_variables(app_id=app.id)
return Import(
id=import_id,
status=status,

View File

@ -6,6 +6,13 @@ from typing import Any
import httpx
from core.helper.trace_id_helper import generate_traceparent_header
from services.errors.enterprise import (
EnterpriseAPIBadRequestError,
EnterpriseAPIError,
EnterpriseAPIForbiddenError,
EnterpriseAPINotFoundError,
EnterpriseAPIUnauthorizedError,
)
logger = logging.getLogger(__name__)
@ -64,10 +71,51 @@ class BaseRequest:
request_kwargs["timeout"] = timeout
response = client.request(method, url, **request_kwargs)
if raise_for_status:
response.raise_for_status()
# Validate HTTP status and raise domain-specific errors
if not response.is_success:
cls._handle_error_response(response)
return response.json()
@classmethod
def _handle_error_response(cls, response: httpx.Response) -> None:
"""
Handle non-2xx HTTP responses by raising appropriate domain errors.
Attempts to extract error message from JSON response body,
falls back to status text if parsing fails.
"""
error_message = f"Enterprise API request failed: {response.status_code} {response.reason_phrase}"
# Try to extract error message from JSON response
try:
error_data = response.json()
if isinstance(error_data, dict):
# Common error response formats:
# {"error": "...", "message": "..."}
# {"message": "..."}
# {"detail": "..."}
error_message = (
error_data.get("message") or error_data.get("error") or error_data.get("detail") or error_message
)
except Exception:
# If JSON parsing fails, use the default message
logger.debug(
"Failed to parse error response from enterprise API (status=%s)", response.status_code, exc_info=True
)
# Raise specific error based on status code
if response.status_code == 400:
raise EnterpriseAPIBadRequestError(error_message)
elif response.status_code == 401:
raise EnterpriseAPIUnauthorizedError(error_message)
elif response.status_code == 403:
raise EnterpriseAPIForbiddenError(error_message)
elif response.status_code == 404:
raise EnterpriseAPINotFoundError(error_message)
else:
raise EnterpriseAPIError(error_message, status_code=response.status_code)
class EnterpriseRequest(BaseRequest):
base_url = os.environ.get("ENTERPRISE_API_URL", "ENTERPRISE_API_URL")

View File

@ -1,15 +1,26 @@
from __future__ import annotations
import logging
import uuid
from datetime import datetime
from typing import TYPE_CHECKING
from pydantic import BaseModel, ConfigDict, Field, model_validator
from configs import dify_config
from extensions.ext_redis import redis_client
from services.enterprise.base import EnterpriseRequest
if TYPE_CHECKING:
from services.feature_service import LicenseStatus
logger = logging.getLogger(__name__)
DEFAULT_WORKSPACE_JOIN_TIMEOUT_SECONDS = 1.0
# License status cache configuration
LICENSE_STATUS_CACHE_KEY = "enterprise:license:status"
VALID_LICENSE_CACHE_TTL = 600 # 10 minutes — valid licenses are stable
INVALID_LICENSE_CACHE_TTL = 30 # 30 seconds — short so admin fixes are picked up quickly
class WebAppSettings(BaseModel):
@ -52,7 +63,7 @@ class DefaultWorkspaceJoinResult(BaseModel):
model_config = ConfigDict(extra="forbid", populate_by_name=True)
@model_validator(mode="after")
def _check_workspace_id_when_joined(self) -> "DefaultWorkspaceJoinResult":
def _check_workspace_id_when_joined(self) -> DefaultWorkspaceJoinResult:
if self.joined and not self.workspace_id:
raise ValueError("workspace_id must be non-empty when joined is True")
return self
@ -115,7 +126,6 @@ class EnterpriseService:
"/default-workspace/members",
json={"account_id": account_id},
timeout=DEFAULT_WORKSPACE_JOIN_TIMEOUT_SECONDS,
raise_for_status=True,
)
if not isinstance(data, dict):
raise ValueError("Invalid response format from enterprise default workspace API")
@ -223,3 +233,64 @@ class EnterpriseService:
params = {"appId": app_id}
EnterpriseRequest.send_request("DELETE", "/webapp/clean", params=params)
@classmethod
def get_cached_license_status(cls) -> LicenseStatus | None:
"""Get enterprise license status with Redis caching to reduce HTTP calls.
Caches valid statuses (active/expiring) for 10 minutes and invalid statuses
(inactive/expired/lost) for 30 seconds. The shorter TTL for invalid statuses
balances prompt license-fix detection against DoS mitigation without
caching, every request on an expired license would hit the enterprise API.
Returns:
LicenseStatus enum value, or None if enterprise is disabled / unreachable.
"""
if not dify_config.ENTERPRISE_ENABLED:
return None
cached = cls._read_cached_license_status()
if cached is not None:
return cached
return cls._fetch_and_cache_license_status()
@classmethod
def _read_cached_license_status(cls) -> LicenseStatus | None:
"""Read license status from Redis cache, returning None on miss or failure."""
from services.feature_service import LicenseStatus
try:
raw = redis_client.get(LICENSE_STATUS_CACHE_KEY)
if raw:
value = raw.decode("utf-8") if isinstance(raw, bytes) else raw
return LicenseStatus(value)
except Exception:
logger.debug("Failed to read license status from cache", exc_info=True)
return None
@classmethod
def _fetch_and_cache_license_status(cls) -> LicenseStatus | None:
"""Fetch license status from enterprise API and cache the result."""
from services.feature_service import LicenseStatus
try:
info = cls.get_info()
license_info = info.get("License")
if not license_info:
return None
status = LicenseStatus(license_info.get("status", LicenseStatus.INACTIVE))
ttl = (
VALID_LICENSE_CACHE_TTL
if status in (LicenseStatus.ACTIVE, LicenseStatus.EXPIRING)
else INVALID_LICENSE_CACHE_TTL
)
try:
redis_client.setex(LICENSE_STATUS_CACHE_KEY, ttl, status)
except Exception:
logger.debug("Failed to cache license status", exc_info=True)
return status
except Exception:
logger.debug("Failed to fetch enterprise license status", exc_info=True)
return None

View File

@ -70,7 +70,6 @@ class PluginManagerService:
"POST",
"/pre-uninstall-plugin",
json=body.model_dump(),
raise_for_status=True,
timeout=dify_config.ENTERPRISE_REQUEST_TIMEOUT,
)
except Exception:

View File

@ -7,6 +7,7 @@ from . import (
conversation,
dataset,
document,
enterprise,
file,
index,
message,
@ -21,6 +22,7 @@ __all__ = [
"conversation",
"dataset",
"document",
"enterprise",
"file",
"index",
"message",

View File

@ -0,0 +1,45 @@
"""Enterprise service errors."""
from services.errors.base import BaseServiceError
class EnterpriseServiceError(BaseServiceError):
"""Base exception for enterprise service errors."""
def __init__(self, description: str | None = None, status_code: int | None = None):
super().__init__(description)
self.status_code = status_code
class EnterpriseAPIError(EnterpriseServiceError):
"""Generic enterprise API error (non-2xx response)."""
pass
class EnterpriseAPINotFoundError(EnterpriseServiceError):
"""Enterprise API returned 404 Not Found."""
def __init__(self, description: str | None = None):
super().__init__(description, status_code=404)
class EnterpriseAPIForbiddenError(EnterpriseServiceError):
"""Enterprise API returned 403 Forbidden."""
def __init__(self, description: str | None = None):
super().__init__(description, status_code=403)
class EnterpriseAPIUnauthorizedError(EnterpriseServiceError):
"""Enterprise API returned 401 Unauthorized."""
def __init__(self, description: str | None = None):
super().__init__(description, status_code=401)
class EnterpriseAPIBadRequestError(EnterpriseServiceError):
"""Enterprise API returned 400 Bad Request."""
def __init__(self, description: str | None = None):
super().__init__(description, status_code=400)

View File

@ -379,14 +379,19 @@ class FeatureService:
)
features.webapp_auth.sso_config.protocol = enterprise_info.get("SSOEnforcedForWebProtocol", "")
if is_authenticated and (license_info := enterprise_info.get("License")):
# SECURITY NOTE: Only license *status* is exposed to unauthenticated callers
# so the login page can detect an expired/inactive license after force-logout.
# All other license details (expiry date, workspace usage) remain auth-gated.
# This behavior reflects prior internal review of information-leakage risks.
if license_info := enterprise_info.get("License"):
features.license.status = LicenseStatus(license_info.get("status", LicenseStatus.INACTIVE))
features.license.expired_at = license_info.get("expiredAt", "")
if workspaces_info := license_info.get("workspaces"):
features.license.workspaces.enabled = workspaces_info.get("enabled", False)
features.license.workspaces.limit = workspaces_info.get("limit", 0)
features.license.workspaces.size = workspaces_info.get("used", 0)
if is_authenticated:
features.license.expired_at = license_info.get("expiredAt", "")
if workspaces_info := license_info.get("workspaces"):
features.license.workspaces.enabled = workspaces_info.get("enabled", False)
features.license.workspaces.limit = workspaces_info.get("limit", 0)
features.license.workspaces.size = workspaces_info.get("used", 0)
if "PluginInstallationPermission" in enterprise_info:
plugin_installation_info = enterprise_info["PluginInstallationPermission"]

View File

@ -472,6 +472,7 @@ class RagPipelineService:
engine=db.engine,
app_id=pipeline.id,
tenant_id=pipeline.tenant_id,
user_id=account.id,
),
),
start_at=start_at,
@ -1237,6 +1238,7 @@ class RagPipelineService:
engine=db.engine,
app_id=pipeline.id,
tenant_id=pipeline.tenant_id,
user_id=current_user.id,
),
),
start_at=start_at,

View File

@ -77,6 +77,7 @@ class DraftVarLoader(VariableLoader):
_engine: Engine
# Application ID for which variables are being loaded.
_app_id: str
_user_id: str
_tenant_id: str
_fallback_variables: Sequence[VariableBase]
@ -85,10 +86,12 @@ class DraftVarLoader(VariableLoader):
engine: Engine,
app_id: str,
tenant_id: str,
user_id: str,
fallback_variables: Sequence[VariableBase] | None = None,
):
self._engine = engine
self._app_id = app_id
self._user_id = user_id
self._tenant_id = tenant_id
self._fallback_variables = fallback_variables or []
@ -104,7 +107,7 @@ class DraftVarLoader(VariableLoader):
with Session(bind=self._engine, expire_on_commit=False) as session:
srv = WorkflowDraftVariableService(session)
draft_vars = srv.get_draft_variables_by_selectors(self._app_id, selectors)
draft_vars = srv.get_draft_variables_by_selectors(self._app_id, selectors, user_id=self._user_id)
# Important:
files: list[File] = []
@ -218,6 +221,7 @@ class WorkflowDraftVariableService:
self,
app_id: str,
selectors: Sequence[list[str]],
user_id: str,
) -> list[WorkflowDraftVariable]:
"""
Retrieve WorkflowDraftVariable instances based on app_id and selectors.
@ -238,22 +242,30 @@ class WorkflowDraftVariableService:
# Alternatively, a `SELECT` statement could be constructed for each selector and
# combined using `UNION` to fetch all rows.
# Benchmarking indicates that both approaches yield comparable performance.
variables = (
query = (
self._session.query(WorkflowDraftVariable)
.options(
orm.selectinload(WorkflowDraftVariable.variable_file).selectinload(
WorkflowDraftVariableFile.upload_file
)
)
.where(WorkflowDraftVariable.app_id == app_id, or_(*ors))
.all()
.where(
WorkflowDraftVariable.app_id == app_id,
WorkflowDraftVariable.user_id == user_id,
or_(*ors),
)
)
return variables
return query.all()
def list_variables_without_values(self, app_id: str, page: int, limit: int) -> WorkflowDraftVariableList:
criteria = WorkflowDraftVariable.app_id == app_id
def list_variables_without_values(
self, app_id: str, page: int, limit: int, user_id: str
) -> WorkflowDraftVariableList:
criteria = [
WorkflowDraftVariable.app_id == app_id,
WorkflowDraftVariable.user_id == user_id,
]
total = None
query = self._session.query(WorkflowDraftVariable).where(criteria)
query = self._session.query(WorkflowDraftVariable).where(*criteria)
if page == 1:
total = query.count()
variables = (
@ -269,11 +281,12 @@ class WorkflowDraftVariableService:
return WorkflowDraftVariableList(variables=variables, total=total)
def _list_node_variables(self, app_id: str, node_id: str) -> WorkflowDraftVariableList:
criteria = (
def _list_node_variables(self, app_id: str, node_id: str, user_id: str) -> WorkflowDraftVariableList:
criteria = [
WorkflowDraftVariable.app_id == app_id,
WorkflowDraftVariable.node_id == node_id,
)
WorkflowDraftVariable.user_id == user_id,
]
query = self._session.query(WorkflowDraftVariable).where(*criteria)
variables = (
query.options(orm.selectinload(WorkflowDraftVariable.variable_file))
@ -282,36 +295,36 @@ class WorkflowDraftVariableService:
)
return WorkflowDraftVariableList(variables=variables)
def list_node_variables(self, app_id: str, node_id: str) -> WorkflowDraftVariableList:
return self._list_node_variables(app_id, node_id)
def list_node_variables(self, app_id: str, node_id: str, user_id: str) -> WorkflowDraftVariableList:
return self._list_node_variables(app_id, node_id, user_id=user_id)
def list_conversation_variables(self, app_id: str) -> WorkflowDraftVariableList:
return self._list_node_variables(app_id, CONVERSATION_VARIABLE_NODE_ID)
def list_conversation_variables(self, app_id: str, user_id: str) -> WorkflowDraftVariableList:
return self._list_node_variables(app_id, CONVERSATION_VARIABLE_NODE_ID, user_id=user_id)
def list_system_variables(self, app_id: str) -> WorkflowDraftVariableList:
return self._list_node_variables(app_id, SYSTEM_VARIABLE_NODE_ID)
def list_system_variables(self, app_id: str, user_id: str) -> WorkflowDraftVariableList:
return self._list_node_variables(app_id, SYSTEM_VARIABLE_NODE_ID, user_id=user_id)
def get_conversation_variable(self, app_id: str, name: str) -> WorkflowDraftVariable | None:
return self._get_variable(app_id=app_id, node_id=CONVERSATION_VARIABLE_NODE_ID, name=name)
def get_conversation_variable(self, app_id: str, name: str, user_id: str) -> WorkflowDraftVariable | None:
return self._get_variable(app_id=app_id, node_id=CONVERSATION_VARIABLE_NODE_ID, name=name, user_id=user_id)
def get_system_variable(self, app_id: str, name: str) -> WorkflowDraftVariable | None:
return self._get_variable(app_id=app_id, node_id=SYSTEM_VARIABLE_NODE_ID, name=name)
def get_system_variable(self, app_id: str, name: str, user_id: str) -> WorkflowDraftVariable | None:
return self._get_variable(app_id=app_id, node_id=SYSTEM_VARIABLE_NODE_ID, name=name, user_id=user_id)
def get_node_variable(self, app_id: str, node_id: str, name: str) -> WorkflowDraftVariable | None:
return self._get_variable(app_id, node_id, name)
def get_node_variable(self, app_id: str, node_id: str, name: str, user_id: str) -> WorkflowDraftVariable | None:
return self._get_variable(app_id, node_id, name, user_id=user_id)
def _get_variable(self, app_id: str, node_id: str, name: str) -> WorkflowDraftVariable | None:
variable = (
def _get_variable(self, app_id: str, node_id: str, name: str, user_id: str) -> WorkflowDraftVariable | None:
return (
self._session.query(WorkflowDraftVariable)
.options(orm.selectinload(WorkflowDraftVariable.variable_file))
.where(
WorkflowDraftVariable.app_id == app_id,
WorkflowDraftVariable.node_id == node_id,
WorkflowDraftVariable.name == name,
WorkflowDraftVariable.user_id == user_id,
)
.first()
)
return variable
def update_variable(
self,
@ -462,7 +475,17 @@ class WorkflowDraftVariableService:
self._session.delete(upload_file)
self._session.delete(variable)
def delete_workflow_variables(self, app_id: str):
def delete_user_workflow_variables(self, app_id: str, user_id: str):
(
self._session.query(WorkflowDraftVariable)
.where(
WorkflowDraftVariable.app_id == app_id,
WorkflowDraftVariable.user_id == user_id,
)
.delete(synchronize_session=False)
)
def delete_app_workflow_variables(self, app_id: str):
(
self._session.query(WorkflowDraftVariable)
.where(WorkflowDraftVariable.app_id == app_id)
@ -501,28 +524,35 @@ class WorkflowDraftVariableService:
self._session.delete(upload_file)
self._session.delete(variable_file)
def delete_node_variables(self, app_id: str, node_id: str):
return self._delete_node_variables(app_id, node_id)
def delete_node_variables(self, app_id: str, node_id: str, user_id: str):
return self._delete_node_variables(app_id, node_id, user_id=user_id)
def _delete_node_variables(self, app_id: str, node_id: str):
self._session.query(WorkflowDraftVariable).where(
WorkflowDraftVariable.app_id == app_id,
WorkflowDraftVariable.node_id == node_id,
).delete()
def _delete_node_variables(self, app_id: str, node_id: str, user_id: str):
(
self._session.query(WorkflowDraftVariable)
.where(
WorkflowDraftVariable.app_id == app_id,
WorkflowDraftVariable.node_id == node_id,
WorkflowDraftVariable.user_id == user_id,
)
.delete(synchronize_session=False)
)
def _get_conversation_id_from_draft_variable(self, app_id: str) -> str | None:
def _get_conversation_id_from_draft_variable(self, app_id: str, user_id: str) -> str | None:
draft_var = self._get_variable(
app_id=app_id,
node_id=SYSTEM_VARIABLE_NODE_ID,
name=str(SystemVariableKey.CONVERSATION_ID),
user_id=user_id,
)
if draft_var is None:
return None
segment = draft_var.get_value()
if not isinstance(segment, StringSegment):
logger.warning(
"sys.conversation_id variable is not a string: app_id=%s, id=%s",
"sys.conversation_id variable is not a string: app_id=%s, user_id=%s, id=%s",
app_id,
user_id,
draft_var.id,
)
return None
@ -543,7 +573,7 @@ class WorkflowDraftVariableService:
If no such conversation exists, a new conversation is created and its ID is returned.
"""
conv_id = self._get_conversation_id_from_draft_variable(workflow.app_id)
conv_id = self._get_conversation_id_from_draft_variable(workflow.app_id, account_id)
if conv_id is not None:
conversation = (
@ -580,12 +610,13 @@ class WorkflowDraftVariableService:
self._session.flush()
return conversation.id
def prefill_conversation_variable_default_values(self, workflow: Workflow):
def prefill_conversation_variable_default_values(self, workflow: Workflow, user_id: str):
""""""
draft_conv_vars: list[WorkflowDraftVariable] = []
for conv_var in workflow.conversation_variables:
draft_var = WorkflowDraftVariable.new_conversation_variable(
app_id=workflow.app_id,
user_id=user_id,
name=conv_var.name,
value=conv_var,
description=conv_var.description,
@ -635,7 +666,7 @@ def _batch_upsert_draft_variable(
stmt = pg_insert(WorkflowDraftVariable).values([_model_to_insertion_dict(v) for v in draft_vars])
if policy == _UpsertPolicy.OVERWRITE:
stmt = stmt.on_conflict_do_update(
index_elements=WorkflowDraftVariable.unique_app_id_node_id_name(),
index_elements=WorkflowDraftVariable.unique_app_id_user_id_node_id_name(),
set_={
# Refresh creation timestamp to ensure updated variables
# appear first in chronologically sorted result sets.
@ -652,7 +683,9 @@ def _batch_upsert_draft_variable(
},
)
elif policy == _UpsertPolicy.IGNORE:
stmt = stmt.on_conflict_do_nothing(index_elements=WorkflowDraftVariable.unique_app_id_node_id_name())
stmt = stmt.on_conflict_do_nothing(
index_elements=WorkflowDraftVariable.unique_app_id_user_id_node_id_name()
)
else:
stmt = mysql_insert(WorkflowDraftVariable).values([_model_to_insertion_dict(v) for v in draft_vars]) # type: ignore[assignment]
if policy == _UpsertPolicy.OVERWRITE:
@ -682,6 +715,7 @@ def _model_to_insertion_dict(model: WorkflowDraftVariable) -> dict[str, Any]:
d: dict[str, Any] = {
"id": model.id,
"app_id": model.app_id,
"user_id": model.user_id,
"last_edited_at": None,
"node_id": model.node_id,
"name": model.name,
@ -807,6 +841,7 @@ class DraftVariableSaver:
def _create_dummy_output_variable(self):
return WorkflowDraftVariable.new_node_variable(
app_id=self._app_id,
user_id=self._user.id,
node_id=self._node_id,
name=self._DUMMY_OUTPUT_IDENTITY,
node_execution_id=self._node_execution_id,
@ -842,6 +877,7 @@ class DraftVariableSaver:
draft_vars.append(
WorkflowDraftVariable.new_conversation_variable(
app_id=self._app_id,
user_id=self._user.id,
name=item.name,
value=segment,
)
@ -862,6 +898,7 @@ class DraftVariableSaver:
draft_vars.append(
WorkflowDraftVariable.new_node_variable(
app_id=self._app_id,
user_id=self._user.id,
node_id=self._node_id,
name=name,
node_execution_id=self._node_execution_id,
@ -884,6 +921,7 @@ class DraftVariableSaver:
draft_vars.append(
WorkflowDraftVariable.new_sys_variable(
app_id=self._app_id,
user_id=self._user.id,
name=name,
node_execution_id=self._node_execution_id,
value=value_seg,
@ -1019,6 +1057,7 @@ class DraftVariableSaver:
# Create the draft variable
draft_var = WorkflowDraftVariable.new_node_variable(
app_id=self._app_id,
user_id=self._user.id,
node_id=self._node_id,
name=name,
node_execution_id=self._node_execution_id,
@ -1032,6 +1071,7 @@ class DraftVariableSaver:
# Create the draft variable
draft_var = WorkflowDraftVariable.new_node_variable(
app_id=self._app_id,
user_id=self._user.id,
node_id=self._node_id,
name=name,
node_execution_id=self._node_execution_id,

View File

@ -697,7 +697,7 @@ class WorkflowService:
with Session(bind=db.engine, expire_on_commit=False) as session, session.begin():
draft_var_srv = WorkflowDraftVariableService(session)
draft_var_srv.prefill_conversation_variable_default_values(draft_workflow)
draft_var_srv.prefill_conversation_variable_default_values(draft_workflow, user_id=account.id)
node_config = draft_workflow.get_node_config_by_id(node_id)
node_type = Workflow.get_node_type_from_node_config(node_config)
@ -740,6 +740,7 @@ class WorkflowService:
engine=db.engine,
app_id=app_model.id,
tenant_id=app_model.tenant_id,
user_id=account.id,
)
enclosing_node_type_and_id = draft_workflow.get_enclosing_node_type_and_id(node_config)
@ -831,6 +832,7 @@ class WorkflowService:
workflow=draft_workflow,
node_config=node_config,
manual_inputs=inputs or {},
user_id=account.id,
)
node = self._build_human_input_node(
workflow=draft_workflow,
@ -891,6 +893,7 @@ class WorkflowService:
workflow=draft_workflow,
node_config=node_config,
manual_inputs=inputs or {},
user_id=account.id,
)
node = self._build_human_input_node(
workflow=draft_workflow,
@ -967,6 +970,7 @@ class WorkflowService:
workflow=draft_workflow,
node_config=node_config,
manual_inputs=inputs or {},
user_id=account.id,
)
node = self._build_human_input_node(
workflow=draft_workflow,
@ -1102,10 +1106,11 @@ class WorkflowService:
workflow: Workflow,
node_config: NodeConfigDict,
manual_inputs: Mapping[str, Any],
user_id: str,
) -> VariablePool:
with Session(bind=db.engine, expire_on_commit=False) as session, session.begin():
draft_var_srv = WorkflowDraftVariableService(session)
draft_var_srv.prefill_conversation_variable_default_values(workflow)
draft_var_srv.prefill_conversation_variable_default_values(workflow, user_id=user_id)
variable_pool = VariablePool(
system_variables=SystemVariable.default(),
@ -1118,6 +1123,7 @@ class WorkflowService:
engine=db.engine,
app_id=app_model.id,
tenant_id=app_model.tenant_id,
user_id=user_id,
)
variable_mapping = HumanInputNode.extract_variable_selector_to_variable_mapping(
graph_config=workflow.graph_dict,

View File

@ -30,6 +30,7 @@ from services.workflow_draft_variable_service import (
class TestWorkflowDraftVariableService(unittest.TestCase):
_test_app_id: str
_session: Session
_test_user_id: str
_node1_id = "test_node_1"
_node2_id = "test_node_2"
_node_exec_id = str(uuid.uuid4())
@ -99,13 +100,13 @@ class TestWorkflowDraftVariableService(unittest.TestCase):
def test_list_variables(self):
srv = self._get_test_srv()
var_list = srv.list_variables_without_values(self._test_app_id, page=1, limit=2)
var_list = srv.list_variables_without_values(self._test_app_id, page=1, limit=2, user_id=self._test_user_id)
assert var_list.total == 5
assert len(var_list.variables) == 2
page1_var_ids = {v.id for v in var_list.variables}
assert page1_var_ids.issubset(self._variable_ids)
var_list_2 = srv.list_variables_without_values(self._test_app_id, page=2, limit=2)
var_list_2 = srv.list_variables_without_values(self._test_app_id, page=2, limit=2, user_id=self._test_user_id)
assert var_list_2.total is None
assert len(var_list_2.variables) == 2
page2_var_ids = {v.id for v in var_list_2.variables}
@ -114,7 +115,7 @@ class TestWorkflowDraftVariableService(unittest.TestCase):
def test_get_node_variable(self):
srv = self._get_test_srv()
node_var = srv.get_node_variable(self._test_app_id, self._node1_id, "str_var")
node_var = srv.get_node_variable(self._test_app_id, self._node1_id, "str_var", user_id=self._test_user_id)
assert node_var is not None
assert node_var.id == self._node1_str_var_id
assert node_var.name == "str_var"
@ -122,7 +123,7 @@ class TestWorkflowDraftVariableService(unittest.TestCase):
def test_get_system_variable(self):
srv = self._get_test_srv()
sys_var = srv.get_system_variable(self._test_app_id, "sys_var")
sys_var = srv.get_system_variable(self._test_app_id, "sys_var", user_id=self._test_user_id)
assert sys_var is not None
assert sys_var.id == self._sys_var_id
assert sys_var.name == "sys_var"
@ -130,7 +131,7 @@ class TestWorkflowDraftVariableService(unittest.TestCase):
def test_get_conversation_variable(self):
srv = self._get_test_srv()
conv_var = srv.get_conversation_variable(self._test_app_id, "conv_var")
conv_var = srv.get_conversation_variable(self._test_app_id, "conv_var", user_id=self._test_user_id)
assert conv_var is not None
assert conv_var.id == self._conv_var_id
assert conv_var.name == "conv_var"
@ -138,7 +139,7 @@ class TestWorkflowDraftVariableService(unittest.TestCase):
def test_delete_node_variables(self):
srv = self._get_test_srv()
srv.delete_node_variables(self._test_app_id, self._node2_id)
srv.delete_node_variables(self._test_app_id, self._node2_id, user_id=self._test_user_id)
node2_var_count = (
self._session.query(WorkflowDraftVariable)
.where(
@ -162,7 +163,7 @@ class TestWorkflowDraftVariableService(unittest.TestCase):
def test__list_node_variables(self):
srv = self._get_test_srv()
node_vars = srv._list_node_variables(self._test_app_id, self._node2_id)
node_vars = srv._list_node_variables(self._test_app_id, self._node2_id, user_id=self._test_user_id)
assert len(node_vars.variables) == 2
assert {v.id for v in node_vars.variables} == set(self._node2_var_ids)
@ -173,7 +174,7 @@ class TestWorkflowDraftVariableService(unittest.TestCase):
[self._node2_id, "str_var"],
[self._node2_id, "int_var"],
]
variables = srv.get_draft_variables_by_selectors(self._test_app_id, selectors)
variables = srv.get_draft_variables_by_selectors(self._test_app_id, selectors, user_id=self._test_user_id)
assert len(variables) == 3
assert {v.id for v in variables} == {self._node1_str_var_id} | set(self._node2_var_ids)
@ -206,19 +207,23 @@ class TestDraftVariableLoader(unittest.TestCase):
def setUp(self):
self._test_app_id = str(uuid.uuid4())
self._test_tenant_id = str(uuid.uuid4())
self._test_user_id = str(uuid.uuid4())
sys_var = WorkflowDraftVariable.new_sys_variable(
app_id=self._test_app_id,
user_id=self._test_user_id,
name="sys_var",
value=build_segment("sys_value"),
node_execution_id=self._node_exec_id,
)
conv_var = WorkflowDraftVariable.new_conversation_variable(
app_id=self._test_app_id,
user_id=self._test_user_id,
name="conv_var",
value=build_segment("conv_value"),
)
node_var = WorkflowDraftVariable.new_node_variable(
app_id=self._test_app_id,
user_id=self._test_user_id,
node_id=self._node1_id,
name="str_var",
value=build_segment("str_value"),
@ -248,12 +253,22 @@ class TestDraftVariableLoader(unittest.TestCase):
session.commit()
def test_variable_loader_with_empty_selector(self):
var_loader = DraftVarLoader(engine=db.engine, app_id=self._test_app_id, tenant_id=self._test_tenant_id)
var_loader = DraftVarLoader(
engine=db.engine,
app_id=self._test_app_id,
tenant_id=self._test_tenant_id,
user_id=self._test_user_id,
)
variables = var_loader.load_variables([])
assert len(variables) == 0
def test_variable_loader_with_non_empty_selector(self):
var_loader = DraftVarLoader(engine=db.engine, app_id=self._test_app_id, tenant_id=self._test_tenant_id)
var_loader = DraftVarLoader(
engine=db.engine,
app_id=self._test_app_id,
tenant_id=self._test_tenant_id,
user_id=self._test_user_id,
)
variables = var_loader.load_variables(
[
[SYSTEM_VARIABLE_NODE_ID, "sys_var"],
@ -296,7 +311,12 @@ class TestDraftVariableLoader(unittest.TestCase):
session.commit()
# Now test loading using DraftVarLoader
var_loader = DraftVarLoader(engine=db.engine, app_id=self._test_app_id, tenant_id=self._test_tenant_id)
var_loader = DraftVarLoader(
engine=db.engine,
app_id=self._test_app_id,
tenant_id=self._test_tenant_id,
user_id=setup_account.id,
)
# Load the variable using the standard workflow
variables = var_loader.load_variables([["test_offload_node", "offloaded_string_var"]])
@ -313,7 +333,7 @@ class TestDraftVariableLoader(unittest.TestCase):
# Clean up - delete all draft variables for this app
with Session(bind=db.engine) as session:
service = WorkflowDraftVariableService(session)
service.delete_workflow_variables(self._test_app_id)
service.delete_app_workflow_variables(self._test_app_id)
session.commit()
def test_load_offloaded_variable_object_type_integration(self):
@ -364,6 +384,7 @@ class TestDraftVariableLoader(unittest.TestCase):
# Now create the offloaded draft variable with the correct file_id
offloaded_var = WorkflowDraftVariable.new_node_variable(
app_id=self._test_app_id,
user_id=self._test_user_id,
node_id="test_offload_node",
name="offloaded_object_var",
value=build_segment({"truncated": True}),
@ -379,7 +400,9 @@ class TestDraftVariableLoader(unittest.TestCase):
# Use the service method that properly preloads relationships
service = WorkflowDraftVariableService(session)
draft_vars = service.get_draft_variables_by_selectors(
self._test_app_id, [["test_offload_node", "offloaded_object_var"]]
self._test_app_id,
[["test_offload_node", "offloaded_object_var"]],
user_id=self._test_user_id,
)
assert len(draft_vars) == 1
@ -387,7 +410,12 @@ class TestDraftVariableLoader(unittest.TestCase):
assert loaded_var.is_truncated()
# Create DraftVarLoader and test loading
var_loader = DraftVarLoader(engine=db.engine, app_id=self._test_app_id, tenant_id=self._test_tenant_id)
var_loader = DraftVarLoader(
engine=db.engine,
app_id=self._test_app_id,
tenant_id=self._test_tenant_id,
user_id=self._test_user_id,
)
# Test the _load_offloaded_variable method
selector_tuple, variable = var_loader._load_offloaded_variable(loaded_var)
@ -459,6 +487,7 @@ class TestDraftVariableLoader(unittest.TestCase):
# Now create the offloaded draft variable with the correct file_id
offloaded_var = WorkflowDraftVariable.new_node_variable(
app_id=self._test_app_id,
user_id=self._test_user_id,
node_id="test_integration_node",
name="offloaded_integration_var",
value=build_segment("truncated"),
@ -473,7 +502,12 @@ class TestDraftVariableLoader(unittest.TestCase):
# Test load_variables with both regular and offloaded variables
# This method should handle the relationship preloading internally
var_loader = DraftVarLoader(engine=db.engine, app_id=self._test_app_id, tenant_id=self._test_tenant_id)
var_loader = DraftVarLoader(
engine=db.engine,
app_id=self._test_app_id,
tenant_id=self._test_tenant_id,
user_id=self._test_user_id,
)
variables = var_loader.load_variables(
[
@ -572,6 +606,7 @@ class TestWorkflowDraftVariableServiceResetVariable(unittest.TestCase):
# Create test variables
self._node_var_with_exec = WorkflowDraftVariable.new_node_variable(
app_id=self._test_app_id,
user_id=self._test_user_id,
node_id=self._node_id,
name="test_var",
value=build_segment("old_value"),
@ -581,6 +616,7 @@ class TestWorkflowDraftVariableServiceResetVariable(unittest.TestCase):
self._node_var_without_exec = WorkflowDraftVariable.new_node_variable(
app_id=self._test_app_id,
user_id=self._test_user_id,
node_id=self._node_id,
name="no_exec_var",
value=build_segment("some_value"),
@ -591,6 +627,7 @@ class TestWorkflowDraftVariableServiceResetVariable(unittest.TestCase):
self._node_var_missing_exec = WorkflowDraftVariable.new_node_variable(
app_id=self._test_app_id,
user_id=self._test_user_id,
node_id=self._node_id,
name="missing_exec_var",
value=build_segment("some_value"),
@ -599,6 +636,7 @@ class TestWorkflowDraftVariableServiceResetVariable(unittest.TestCase):
self._conv_var = WorkflowDraftVariable.new_conversation_variable(
app_id=self._test_app_id,
user_id=self._test_user_id,
name="conv_var_1",
value=build_segment("old_conv_value"),
)
@ -764,6 +802,7 @@ class TestWorkflowDraftVariableServiceResetVariable(unittest.TestCase):
# Create a system variable
sys_var = WorkflowDraftVariable.new_sys_variable(
app_id=self._test_app_id,
user_id=self._test_user_id,
name="sys_var",
value=build_segment("sys_value"),
node_execution_id=self._node_exec_id,

View File

@ -358,10 +358,9 @@ class TestFeatureService:
assert result is not None
assert isinstance(result, SystemFeatureModel)
# --- 1. Verify Response Payload Optimization (Data Minimization) ---
# Ensure only essential UI flags are returned to unauthenticated clients
# to keep the payload lightweight and adhere to architectural boundaries.
assert result.license.status == LicenseStatus.NONE
# --- 1. Verify only license *status* is exposed to unauthenticated clients ---
# Detailed license info (expiry, workspaces) remains auth-gated.
assert result.license.status == LicenseStatus.ACTIVE
assert result.license.expired_at == ""
assert result.license.workspaces.enabled is False
assert result.license.workspaces.limit == 0

View File

@ -122,6 +122,7 @@ class TestWorkflowDraftVariableService:
name,
value,
variable_type: DraftVariableType = DraftVariableType.CONVERSATION,
user_id: str | None = None,
fake=None,
):
"""
@ -144,10 +145,15 @@ class TestWorkflowDraftVariableService:
WorkflowDraftVariable: Created test variable instance with proper type configuration
"""
fake = fake or Faker()
if user_id is None:
app = db_session_with_containers.query(App).filter_by(id=app_id).first()
assert app is not None
user_id = app.created_by
if variable_type == "conversation":
# Create conversation variable using the appropriate factory method
variable = WorkflowDraftVariable.new_conversation_variable(
app_id=app_id,
user_id=user_id,
name=name,
value=value,
description=fake.text(max_nb_chars=20),
@ -156,6 +162,7 @@ class TestWorkflowDraftVariableService:
# Create system variable with editable flag and execution context
variable = WorkflowDraftVariable.new_sys_variable(
app_id=app_id,
user_id=user_id,
name=name,
value=value,
node_execution_id=fake.uuid4(),
@ -165,6 +172,7 @@ class TestWorkflowDraftVariableService:
# Create node variable with visibility and editability settings
variable = WorkflowDraftVariable.new_node_variable(
app_id=app_id,
user_id=user_id,
node_id=node_id,
name=name,
value=value,
@ -189,7 +197,13 @@ class TestWorkflowDraftVariableService:
app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake)
test_value = StringSegment(value=fake.word())
variable = self._create_test_variable(
db_session_with_containers, app.id, CONVERSATION_VARIABLE_NODE_ID, "test_var", test_value, fake=fake
db_session_with_containers,
app.id,
CONVERSATION_VARIABLE_NODE_ID,
"test_var",
test_value,
user_id=app.created_by,
fake=fake,
)
service = WorkflowDraftVariableService(db_session_with_containers)
retrieved_variable = service.get_variable(variable.id)
@ -250,7 +264,7 @@ class TestWorkflowDraftVariableService:
["test_node_1", "var3"],
]
service = WorkflowDraftVariableService(db_session_with_containers)
retrieved_variables = service.get_draft_variables_by_selectors(app.id, selectors)
retrieved_variables = service.get_draft_variables_by_selectors(app.id, selectors, user_id=app.created_by)
assert len(retrieved_variables) == 3
var_names = [var.name for var in retrieved_variables]
assert "var1" in var_names
@ -288,7 +302,7 @@ class TestWorkflowDraftVariableService:
fake=fake,
)
service = WorkflowDraftVariableService(db_session_with_containers)
result = service.list_variables_without_values(app.id, page=1, limit=3)
result = service.list_variables_without_values(app.id, page=1, limit=3, user_id=app.created_by)
assert result.total == 5
assert len(result.variables) == 3
assert result.variables[0].created_at >= result.variables[1].created_at
@ -339,7 +353,7 @@ class TestWorkflowDraftVariableService:
fake=fake,
)
service = WorkflowDraftVariableService(db_session_with_containers)
result = service.list_node_variables(app.id, node_id)
result = service.list_node_variables(app.id, node_id, user_id=app.created_by)
assert len(result.variables) == 2
for var in result.variables:
assert var.node_id == node_id
@ -381,7 +395,7 @@ class TestWorkflowDraftVariableService:
fake=fake,
)
service = WorkflowDraftVariableService(db_session_with_containers)
result = service.list_conversation_variables(app.id)
result = service.list_conversation_variables(app.id, user_id=app.created_by)
assert len(result.variables) == 2
for var in result.variables:
assert var.node_id == CONVERSATION_VARIABLE_NODE_ID
@ -559,7 +573,7 @@ class TestWorkflowDraftVariableService:
assert len(app_variables) == 3
assert len(other_app_variables) == 1
service = WorkflowDraftVariableService(db_session_with_containers)
service.delete_workflow_variables(app.id)
service.delete_user_workflow_variables(app.id, user_id=app.created_by)
app_variables_after = db_session_with_containers.query(WorkflowDraftVariable).filter_by(app_id=app.id).all()
other_app_variables_after = (
db_session_with_containers.query(WorkflowDraftVariable).filter_by(app_id=other_app.id).all()
@ -567,6 +581,69 @@ class TestWorkflowDraftVariableService:
assert len(app_variables_after) == 0
assert len(other_app_variables_after) == 1
def test_draft_variables_are_isolated_between_users(
self, db_session_with_containers: Session, mock_external_service_dependencies
):
"""
Test draft variable isolation for different users in the same app.
This test verifies that:
1. Query APIs return only variables owned by the target user.
2. User-scoped deletion only removes variables for that user and keeps
other users' variables in the same app untouched.
"""
fake = Faker()
app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake)
user_a = app.created_by
user_b = fake.uuid4()
# Use identical variable names on purpose to verify uniqueness scope includes user_id.
self._create_test_variable(
db_session_with_containers,
app.id,
CONVERSATION_VARIABLE_NODE_ID,
"shared_name",
StringSegment(value="value_a"),
user_id=user_a,
fake=fake,
)
self._create_test_variable(
db_session_with_containers,
app.id,
CONVERSATION_VARIABLE_NODE_ID,
"shared_name",
StringSegment(value="value_b"),
user_id=user_b,
fake=fake,
)
self._create_test_variable(
db_session_with_containers,
app.id,
CONVERSATION_VARIABLE_NODE_ID,
"only_a",
StringSegment(value="only_a"),
user_id=user_a,
fake=fake,
)
service = WorkflowDraftVariableService(db_session_with_containers)
user_a_vars = service.list_conversation_variables(app.id, user_id=user_a)
user_b_vars = service.list_conversation_variables(app.id, user_id=user_b)
assert {v.name for v in user_a_vars.variables} == {"shared_name", "only_a"}
assert {v.name for v in user_b_vars.variables} == {"shared_name"}
service.delete_user_workflow_variables(app.id, user_id=user_a)
user_a_remaining = (
db_session_with_containers.query(WorkflowDraftVariable).filter_by(app_id=app.id, user_id=user_a).count()
)
user_b_remaining = (
db_session_with_containers.query(WorkflowDraftVariable).filter_by(app_id=app.id, user_id=user_b).count()
)
assert user_a_remaining == 0
assert user_b_remaining == 1
def test_delete_node_variables_success(
self, db_session_with_containers: Session, mock_external_service_dependencies
):
@ -627,7 +704,7 @@ class TestWorkflowDraftVariableService:
assert len(other_node_variables) == 1
assert len(conv_variables) == 1
service = WorkflowDraftVariableService(db_session_with_containers)
service.delete_node_variables(app.id, node_id)
service.delete_node_variables(app.id, node_id, user_id=app.created_by)
target_node_variables_after = (
db_session_with_containers.query(WorkflowDraftVariable).filter_by(app_id=app.id, node_id=node_id).all()
)
@ -675,7 +752,7 @@ class TestWorkflowDraftVariableService:
db_session_with_containers.commit()
service = WorkflowDraftVariableService(db_session_with_containers)
service.prefill_conversation_variable_default_values(workflow)
service.prefill_conversation_variable_default_values(workflow, user_id="00000000-0000-0000-0000-000000000001")
draft_variables = (
db_session_with_containers.query(WorkflowDraftVariable)
.filter_by(app_id=app.id, node_id=CONVERSATION_VARIABLE_NODE_ID)
@ -715,7 +792,7 @@ class TestWorkflowDraftVariableService:
fake=fake,
)
service = WorkflowDraftVariableService(db_session_with_containers)
retrieved_conv_id = service._get_conversation_id_from_draft_variable(app.id)
retrieved_conv_id = service._get_conversation_id_from_draft_variable(app.id, app.created_by)
assert retrieved_conv_id == conversation_id
def test_get_conversation_id_from_draft_variable_not_found(
@ -731,7 +808,7 @@ class TestWorkflowDraftVariableService:
fake = Faker()
app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake)
service = WorkflowDraftVariableService(db_session_with_containers)
retrieved_conv_id = service._get_conversation_id_from_draft_variable(app.id)
retrieved_conv_id = service._get_conversation_id_from_draft_variable(app.id, app.created_by)
assert retrieved_conv_id is None
def test_list_system_variables_success(
@ -772,7 +849,7 @@ class TestWorkflowDraftVariableService:
db_session_with_containers, app.id, CONVERSATION_VARIABLE_NODE_ID, "conv_var", conv_var_value, fake=fake
)
service = WorkflowDraftVariableService(db_session_with_containers)
result = service.list_system_variables(app.id)
result = service.list_system_variables(app.id, user_id=app.created_by)
assert len(result.variables) == 2
for var in result.variables:
assert var.node_id == SYSTEM_VARIABLE_NODE_ID
@ -819,15 +896,15 @@ class TestWorkflowDraftVariableService:
fake=fake,
)
service = WorkflowDraftVariableService(db_session_with_containers)
retrieved_conv_var = service.get_conversation_variable(app.id, "test_conv_var")
retrieved_conv_var = service.get_conversation_variable(app.id, "test_conv_var", user_id=app.created_by)
assert retrieved_conv_var is not None
assert retrieved_conv_var.name == "test_conv_var"
assert retrieved_conv_var.node_id == CONVERSATION_VARIABLE_NODE_ID
retrieved_sys_var = service.get_system_variable(app.id, "test_sys_var")
retrieved_sys_var = service.get_system_variable(app.id, "test_sys_var", user_id=app.created_by)
assert retrieved_sys_var is not None
assert retrieved_sys_var.name == "test_sys_var"
assert retrieved_sys_var.node_id == SYSTEM_VARIABLE_NODE_ID
retrieved_node_var = service.get_node_variable(app.id, "test_node", "test_node_var")
retrieved_node_var = service.get_node_variable(app.id, "test_node", "test_node_var", user_id=app.created_by)
assert retrieved_node_var is not None
assert retrieved_node_var.name == "test_node_var"
assert retrieved_node_var.node_id == "test_node"
@ -845,9 +922,14 @@ class TestWorkflowDraftVariableService:
fake = Faker()
app = self._create_test_app(db_session_with_containers, mock_external_service_dependencies, fake=fake)
service = WorkflowDraftVariableService(db_session_with_containers)
retrieved_conv_var = service.get_conversation_variable(app.id, "non_existent_conv_var")
retrieved_conv_var = service.get_conversation_variable(app.id, "non_existent_conv_var", user_id=app.created_by)
assert retrieved_conv_var is None
retrieved_sys_var = service.get_system_variable(app.id, "non_existent_sys_var")
retrieved_sys_var = service.get_system_variable(app.id, "non_existent_sys_var", user_id=app.created_by)
assert retrieved_sys_var is None
retrieved_node_var = service.get_node_variable(app.id, "test_node", "non_existent_node_var")
retrieved_node_var = service.get_node_variable(
app.id,
"test_node",
"non_existent_node_var",
user_id=app.created_by,
)
assert retrieved_node_var is None

View File

@ -398,6 +398,7 @@ class TestWorkflowDraftVariableEndpoints:
method = _unwrap(api.get)
monkeypatch.setattr(workflow_draft_variable_module, "db", SimpleNamespace(engine=MagicMock()))
monkeypatch.setattr(workflow_draft_variable_module, "current_user", SimpleNamespace(id="user-1"))
class DummySession:
def __enter__(self):

View File

@ -234,6 +234,7 @@ class TestAdvancedChatAppGeneratorInternals:
captured: dict[str, object] = {}
prefill_calls: list[object] = []
var_loader = SimpleNamespace(loader="draft")
workflow = SimpleNamespace(id="workflow-id")
monkeypatch.setattr(
"core.app.apps.advanced_chat.app_generator.AdvancedChatAppConfigManager.get_app_config",
@ -260,8 +261,8 @@ class TestAdvancedChatAppGeneratorInternals:
def __init__(self, session):
_ = session
def prefill_conversation_variable_default_values(self, workflow):
prefill_calls.append(workflow)
def prefill_conversation_variable_default_values(self, workflow, user_id):
prefill_calls.append((workflow, user_id))
monkeypatch.setattr("core.app.apps.advanced_chat.app_generator.WorkflowDraftVariableService", _DraftVarService)
@ -273,7 +274,7 @@ class TestAdvancedChatAppGeneratorInternals:
result = generator.single_iteration_generate(
app_model=SimpleNamespace(id="app", tenant_id="tenant"),
workflow=SimpleNamespace(id="workflow-id"),
workflow=workflow,
node_id="node-1",
user=SimpleNamespace(id="user-id"),
args={"inputs": {"foo": "bar"}},
@ -281,7 +282,7 @@ class TestAdvancedChatAppGeneratorInternals:
)
assert result == {"ok": True}
assert prefill_calls
assert prefill_calls == [(workflow, "user-id")]
assert captured["variable_loader"] is var_loader
assert captured["application_generate_entity"].single_iteration_run.node_id == "node-1"
@ -291,6 +292,7 @@ class TestAdvancedChatAppGeneratorInternals:
captured: dict[str, object] = {}
prefill_calls: list[object] = []
var_loader = SimpleNamespace(loader="draft")
workflow = SimpleNamespace(id="workflow-id")
monkeypatch.setattr(
"core.app.apps.advanced_chat.app_generator.AdvancedChatAppConfigManager.get_app_config",
@ -317,8 +319,8 @@ class TestAdvancedChatAppGeneratorInternals:
def __init__(self, session):
_ = session
def prefill_conversation_variable_default_values(self, workflow):
prefill_calls.append(workflow)
def prefill_conversation_variable_default_values(self, workflow, user_id):
prefill_calls.append((workflow, user_id))
monkeypatch.setattr("core.app.apps.advanced_chat.app_generator.WorkflowDraftVariableService", _DraftVarService)
@ -330,7 +332,7 @@ class TestAdvancedChatAppGeneratorInternals:
result = generator.single_loop_generate(
app_model=SimpleNamespace(id="app", tenant_id="tenant"),
workflow=SimpleNamespace(id="workflow-id"),
workflow=workflow,
node_id="node-2",
user=SimpleNamespace(id="user-id"),
args=SimpleNamespace(inputs={"foo": "bar"}),
@ -338,7 +340,7 @@ class TestAdvancedChatAppGeneratorInternals:
)
assert result == {"ok": True}
assert prefill_calls
assert prefill_calls == [(workflow, "user-id")]
assert captured["variable_loader"] is var_loader
assert captured["application_generate_entity"].single_loop_run.node_id == "node-2"

View File

@ -0,0 +1,33 @@
from unittest.mock import MagicMock, patch
from core.rag.datasource.vdb.weaviate.weaviate_vector import WeaviateConfig, WeaviateVector
def test_init_client_with_valid_config():
"""Test successful client initialization with valid configuration."""
config = WeaviateConfig(
endpoint="http://localhost:8080",
api_key="WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih",
)
with patch("weaviate.connect_to_custom") as mock_connect:
mock_client = MagicMock()
mock_client.is_ready.return_value = True
mock_connect.return_value = mock_client
vector = WeaviateVector(
collection_name="test_collection",
config=config,
attributes=["doc_id"],
)
assert vector._client == mock_client
mock_connect.assert_called_once()
call_kwargs = mock_connect.call_args[1]
assert call_kwargs["http_host"] == "localhost"
assert call_kwargs["http_port"] == 8080
assert call_kwargs["http_secure"] is False
assert call_kwargs["grpc_host"] == "localhost"
assert call_kwargs["grpc_port"] == 50051
assert call_kwargs["grpc_secure"] is False
assert call_kwargs["auth_credentials"] is not None

View File

@ -11,6 +11,7 @@ import unittest
from types import SimpleNamespace
from unittest.mock import MagicMock, patch
from core.rag.datasource.vdb.weaviate import weaviate_vector as weaviate_vector_module
from core.rag.datasource.vdb.weaviate.weaviate_vector import WeaviateConfig, WeaviateVector
from core.rag.models.document import Document
@ -19,6 +20,7 @@ class TestWeaviateVector(unittest.TestCase):
"""Tests for WeaviateVector class with focus on doc_type metadata handling."""
def setUp(self):
weaviate_vector_module._weaviate_client = None
self.config = WeaviateConfig(
endpoint="http://localhost:8080",
api_key="test-key",
@ -27,6 +29,9 @@ class TestWeaviateVector(unittest.TestCase):
self.collection_name = "Test_Collection_Node"
self.attributes = ["doc_id", "dataset_id", "document_id", "doc_hash", "doc_type"]
def tearDown(self):
weaviate_vector_module._weaviate_client = None
@patch("core.rag.datasource.vdb.weaviate.weaviate_vector.weaviate")
def _create_weaviate_vector(self, mock_weaviate_module):
"""Helper to create a WeaviateVector instance with mocked client."""

View File

@ -1,13 +1,12 @@
import rsa as pyrsa
from Crypto.PublicKey import RSA
from libs import gmpy2_pkcs10aep_cipher
def test_gmpy2_pkcs10aep_cipher():
rsa_key_pair = pyrsa.newkeys(2048)
public_key = rsa_key_pair[0].save_pkcs1()
private_key = rsa_key_pair[1].save_pkcs1()
rsa_key = RSA.generate(2048)
public_key = rsa_key.publickey().export_key(format="PEM")
private_key = rsa_key.export_key(format="PEM")
public_rsa_key = RSA.import_key(public_key)
public_cipher_rsa2 = gmpy2_pkcs10aep_cipher.new(public_rsa_key)

View File

@ -1,9 +1,8 @@
"""Unit tests for enterprise service integrations.
This module covers the enterprise-only default workspace auto-join behavior:
- Enterprise mode disabled: no external calls
- Successful join / skipped join: no errors
- Failures (network/invalid response/invalid UUID): soft-fail wrapper must not raise
Covers:
- Default workspace auto-join behavior
- License status caching (get_cached_license_status)
"""
from unittest.mock import patch
@ -11,6 +10,9 @@ from unittest.mock import patch
import pytest
from services.enterprise.enterprise_service import (
INVALID_LICENSE_CACHE_TTL,
LICENSE_STATUS_CACHE_KEY,
VALID_LICENSE_CACHE_TTL,
DefaultWorkspaceJoinResult,
EnterpriseService,
try_join_default_workspace,
@ -37,7 +39,6 @@ class TestJoinDefaultWorkspace:
"/default-workspace/members",
json={"account_id": account_id},
timeout=1.0,
raise_for_status=True,
)
def test_join_default_workspace_invalid_response_format_raises(self):
@ -139,3 +140,134 @@ class TestTryJoinDefaultWorkspace:
# Should not raise even though UUID parsing fails inside join_default_workspace
try_join_default_workspace("not-a-uuid")
# ---------------------------------------------------------------------------
# get_cached_license_status
# ---------------------------------------------------------------------------
_EE_SVC = "services.enterprise.enterprise_service"
class TestGetCachedLicenseStatus:
"""Tests for EnterpriseService.get_cached_license_status."""
def test_returns_none_when_enterprise_disabled(self):
with patch(f"{_EE_SVC}.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = False
assert EnterpriseService.get_cached_license_status() is None
def test_cache_hit_returns_license_status_enum(self):
from services.feature_service import LicenseStatus
with (
patch(f"{_EE_SVC}.dify_config") as mock_config,
patch(f"{_EE_SVC}.redis_client") as mock_redis,
patch.object(EnterpriseService, "get_info") as mock_get_info,
):
mock_config.ENTERPRISE_ENABLED = True
mock_redis.get.return_value = b"active"
result = EnterpriseService.get_cached_license_status()
assert result == LicenseStatus.ACTIVE
assert isinstance(result, LicenseStatus)
mock_get_info.assert_not_called()
def test_cache_miss_fetches_api_and_caches_valid_status(self):
from services.feature_service import LicenseStatus
with (
patch(f"{_EE_SVC}.dify_config") as mock_config,
patch(f"{_EE_SVC}.redis_client") as mock_redis,
patch.object(EnterpriseService, "get_info") as mock_get_info,
):
mock_config.ENTERPRISE_ENABLED = True
mock_redis.get.return_value = None
mock_get_info.return_value = {"License": {"status": "active"}}
result = EnterpriseService.get_cached_license_status()
assert result == LicenseStatus.ACTIVE
mock_redis.setex.assert_called_once_with(
LICENSE_STATUS_CACHE_KEY, VALID_LICENSE_CACHE_TTL, LicenseStatus.ACTIVE
)
def test_cache_miss_fetches_api_and_caches_invalid_status_with_short_ttl(self):
from services.feature_service import LicenseStatus
with (
patch(f"{_EE_SVC}.dify_config") as mock_config,
patch(f"{_EE_SVC}.redis_client") as mock_redis,
patch.object(EnterpriseService, "get_info") as mock_get_info,
):
mock_config.ENTERPRISE_ENABLED = True
mock_redis.get.return_value = None
mock_get_info.return_value = {"License": {"status": "expired"}}
result = EnterpriseService.get_cached_license_status()
assert result == LicenseStatus.EXPIRED
mock_redis.setex.assert_called_once_with(
LICENSE_STATUS_CACHE_KEY, INVALID_LICENSE_CACHE_TTL, LicenseStatus.EXPIRED
)
def test_redis_read_failure_falls_through_to_api(self):
from services.feature_service import LicenseStatus
with (
patch(f"{_EE_SVC}.dify_config") as mock_config,
patch(f"{_EE_SVC}.redis_client") as mock_redis,
patch.object(EnterpriseService, "get_info") as mock_get_info,
):
mock_config.ENTERPRISE_ENABLED = True
mock_redis.get.side_effect = ConnectionError("redis down")
mock_get_info.return_value = {"License": {"status": "active"}}
result = EnterpriseService.get_cached_license_status()
assert result == LicenseStatus.ACTIVE
mock_get_info.assert_called_once()
def test_redis_write_failure_still_returns_status(self):
from services.feature_service import LicenseStatus
with (
patch(f"{_EE_SVC}.dify_config") as mock_config,
patch(f"{_EE_SVC}.redis_client") as mock_redis,
patch.object(EnterpriseService, "get_info") as mock_get_info,
):
mock_config.ENTERPRISE_ENABLED = True
mock_redis.get.return_value = None
mock_redis.setex.side_effect = ConnectionError("redis down")
mock_get_info.return_value = {"License": {"status": "expiring"}}
result = EnterpriseService.get_cached_license_status()
assert result == LicenseStatus.EXPIRING
def test_api_failure_returns_none(self):
with (
patch(f"{_EE_SVC}.dify_config") as mock_config,
patch(f"{_EE_SVC}.redis_client") as mock_redis,
patch.object(EnterpriseService, "get_info") as mock_get_info,
):
mock_config.ENTERPRISE_ENABLED = True
mock_redis.get.return_value = None
mock_get_info.side_effect = Exception("network failure")
assert EnterpriseService.get_cached_license_status() is None
def test_api_returns_no_license_info(self):
with (
patch(f"{_EE_SVC}.dify_config") as mock_config,
patch(f"{_EE_SVC}.redis_client") as mock_redis,
patch.object(EnterpriseService, "get_info") as mock_get_info,
):
mock_config.ENTERPRISE_ENABLED = True
mock_redis.get.return_value = None
mock_get_info.return_value = {} # no "License" key
assert EnterpriseService.get_cached_license_status() is None
mock_redis.setex.assert_not_called()

View File

@ -34,7 +34,6 @@ class TestTryPreUninstallPlugin:
"POST",
"/pre-uninstall-plugin",
json={"tenant_id": "tenant-123", "plugin_unique_identifier": "com.example.my_plugin"},
raise_for_status=True,
timeout=dify_config.ENTERPRISE_REQUEST_TIMEOUT,
)
@ -62,7 +61,6 @@ class TestTryPreUninstallPlugin:
"POST",
"/pre-uninstall-plugin",
json={"tenant_id": "tenant-456", "plugin_unique_identifier": "com.example.other_plugin"},
raise_for_status=True,
timeout=dify_config.ENTERPRISE_REQUEST_TIMEOUT,
)
mock_logger.exception.assert_called_once()
@ -87,7 +85,6 @@ class TestTryPreUninstallPlugin:
"POST",
"/pre-uninstall-plugin",
json={"tenant_id": "tenant-789", "plugin_unique_identifier": "com.example.failing_plugin"},
raise_for_status=True,
timeout=dify_config.ENTERPRISE_REQUEST_TIMEOUT,
)
mock_logger.exception.assert_called_once()

View File

@ -263,7 +263,7 @@ def test_import_app_completed_uses_declared_dependencies(monkeypatch):
assert result.status == ImportStatus.COMPLETED
assert result.app_id == "app-new"
draft_var_service.delete_workflow_variables.assert_called_once_with(app_id="app-new")
draft_var_service.delete_app_workflow_variables.assert_called_once_with(app_id="app-new")
@pytest.mark.parametrize("has_workflow", [True, False])
@ -305,7 +305,7 @@ def test_import_app_legacy_versions_extract_dependencies(monkeypatch, has_workfl
account=_account_mock(), import_mode=ImportMode.YAML_CONTENT, yaml_content=_yaml_dump(data)
)
assert result.status == ImportStatus.COMPLETED_WITH_WARNINGS
draft_var_service.delete_workflow_variables.assert_called_once_with(app_id="app-legacy")
draft_var_service.delete_app_workflow_variables.assert_called_once_with(app_id="app-legacy")
def test_import_app_yaml_error_returns_failed(monkeypatch):

View File

@ -24,7 +24,11 @@ class TestDraftVarLoaderSimple:
def draft_var_loader(self, mock_engine):
"""Create DraftVarLoader instance for testing."""
return DraftVarLoader(
engine=mock_engine, app_id="test-app-id", tenant_id="test-tenant-id", fallback_variables=[]
engine=mock_engine,
app_id="test-app-id",
tenant_id="test-tenant-id",
user_id="test-user-id",
fallback_variables=[],
)
def test_load_offloaded_variable_string_type_unit(self, draft_var_loader):
@ -323,7 +327,9 @@ class TestDraftVarLoaderSimple:
# Verify service method was called
mock_service.get_draft_variables_by_selectors.assert_called_once_with(
draft_var_loader._app_id, selectors
draft_var_loader._app_id,
selectors,
user_id=draft_var_loader._user_id,
)
# Verify offloaded variable loading was called

View File

@ -8,7 +8,7 @@ from sqlalchemy import Engine
from sqlalchemy.orm import Session
from dify_graph.constants import SYSTEM_VARIABLE_NODE_ID
from dify_graph.enums import BuiltinNodeTypes
from dify_graph.enums import BuiltinNodeTypes, SystemVariableKey
from dify_graph.variables.segments import StringSegment
from dify_graph.variables.types import SegmentType
from libs.uuid_utils import uuidv7
@ -182,6 +182,42 @@ class TestDraftVariableSaver:
draft_vars = mock_batch_upsert.call_args[0][1]
assert len(draft_vars) == 2
@patch("services.workflow_draft_variable_service._batch_upsert_draft_variable", autospec=True)
def test_start_node_save_persists_sys_timestamp_and_workflow_run_id(self, mock_batch_upsert):
"""Start node should persist common `sys.*` variables, not only `sys.files`."""
mock_session = MagicMock(spec=Session)
mock_user = MagicMock(spec=Account)
mock_user.id = "test-user-id"
mock_user.tenant_id = "test-tenant-id"
saver = DraftVariableSaver(
session=mock_session,
app_id="test-app-id",
node_id="start-node-id",
node_type=BuiltinNodeTypes.START,
node_execution_id="exec-id",
user=mock_user,
)
outputs = {
f"{SYSTEM_VARIABLE_NODE_ID}.{SystemVariableKey.TIMESTAMP}": 1700000000,
f"{SYSTEM_VARIABLE_NODE_ID}.{SystemVariableKey.WORKFLOW_EXECUTION_ID}": "run-id-123",
}
saver.save(outputs=outputs)
mock_batch_upsert.assert_called_once()
draft_vars = mock_batch_upsert.call_args[0][1]
# plus one dummy output because there are no non-sys Start inputs
assert len(draft_vars) == 3
sys_vars = [v for v in draft_vars if v.node_id == SYSTEM_VARIABLE_NODE_ID]
assert {v.name for v in sys_vars} == {
str(SystemVariableKey.TIMESTAMP),
str(SystemVariableKey.WORKFLOW_EXECUTION_ID),
}
class TestWorkflowDraftVariableService:
def _get_test_app_id(self):

View File

@ -245,6 +245,7 @@ class TestWorkflowService:
workflow=workflow,
node_config=node_config,
manual_inputs={"#node-0.result#": "LLM output"},
user_id="account-1",
)
node.render_form_content_with_outputs.assert_called_once()

353
api/uv.lock generated
View File

@ -271,16 +271,19 @@ sdist = { url = "https://files.pythonhosted.org/packages/22/8a/ef8ddf5ee0350984c
[[package]]
name = "alibabacloud-tea-openapi"
version = "0.3.16"
version = "0.4.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "alibabacloud-credentials" },
{ name = "alibabacloud-gateway-spi" },
{ name = "alibabacloud-openapi-util" },
{ name = "alibabacloud-tea-util" },
{ name = "alibabacloud-tea-xml" },
{ name = "cryptography" },
{ name = "darabonba-core" },
]
sdist = { url = "https://files.pythonhosted.org/packages/91/4f/b5288eea8f4d4b032c9a8f2cd1d926d5017977d10b874956f31e5343f299/alibabacloud_tea_openapi-0.4.3.tar.gz", hash = "sha256:12aef036ed993637b6f141abbd1de9d6199d5516f4a901588bb65d6a3768d41b", size = 21864, upload-time = "2026-01-15T07:55:16.744Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a5/37/48ee5468ecad19c6d44cf3b9629d77078e836ee3ec760f0366247f307b7c/alibabacloud_tea_openapi-0.4.3-py3-none-any.whl", hash = "sha256:d0b3a373b760ef6278b25fc128c73284301e07888977bf97519e7636d47bdf0a", size = 26159, upload-time = "2026-01-15T07:55:15.72Z" },
]
sdist = { url = "https://files.pythonhosted.org/packages/09/be/f594e79625e5ccfcfe7f12d7d70709a3c59e920878469c998886211c850d/alibabacloud_tea_openapi-0.3.16.tar.gz", hash = "sha256:6bffed8278597592e67860156f424bde4173a6599d7b6039fb640a3612bae292", size = 13087, upload-time = "2025-07-04T09:30:10.689Z" }
[[package]]
name = "alibabacloud-tea-util"
@ -1123,7 +1126,7 @@ wheels = [
[[package]]
name = "clickhouse-connect"
version = "0.10.0"
version = "0.14.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
@ -1132,24 +1135,24 @@ dependencies = [
{ name = "urllib3" },
{ name = "zstandard" },
]
sdist = { url = "https://files.pythonhosted.org/packages/7b/fd/f8bea1157d40f117248dcaa9abdbf68c729513fcf2098ab5cb4aa58768b8/clickhouse_connect-0.10.0.tar.gz", hash = "sha256:a0256328802c6e5580513e197cef7f9ba49a99fc98e9ba410922873427569564", size = 104753, upload-time = "2025-11-14T20:31:00.947Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f5/0e/96958db88b6ce6e9d96dc7a836f12c7644934b3a436b04843f19eb8da2db/clickhouse_connect-0.14.1.tar.gz", hash = "sha256:dc107ae9ab7b86409049ae8abe21817543284b438291796d3dd639ad5496a1ab", size = 120093, upload-time = "2026-03-12T15:51:03.606Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/bf/4e/f90caf963d14865c7a3f0e5d80b77e67e0fe0bf39b3de84110707746fa6b/clickhouse_connect-0.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:195f1824405501b747b572e1365c6265bb1629eeb712ce91eda91da3c5794879", size = 272911, upload-time = "2025-11-14T20:29:57.129Z" },
{ url = "https://files.pythonhosted.org/packages/50/c7/e01bd2dd80ea4fbda8968e5022c60091a872fd9de0a123239e23851da231/clickhouse_connect-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7907624635fe7f28e1b85c7c8b125a72679a63ecdb0b9f4250b704106ef438f8", size = 265938, upload-time = "2025-11-14T20:29:58.443Z" },
{ url = "https://files.pythonhosted.org/packages/f4/07/8b567b949abca296e118331d13380bbdefa4225d7d1d32233c59d4b4b2e1/clickhouse_connect-0.10.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60772faa54d56f0fa34650460910752a583f5948f44dddeabfafaecbca21fc54", size = 1113548, upload-time = "2025-11-14T20:29:59.781Z" },
{ url = "https://files.pythonhosted.org/packages/9c/13/11f2d37fc95e74d7e2d80702cde87666ce372486858599a61f5209e35fc5/clickhouse_connect-0.10.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7fe2a6cd98517330c66afe703fb242c0d3aa2c91f2f7dc9fb97c122c5c60c34b", size = 1135061, upload-time = "2025-11-14T20:30:01.244Z" },
{ url = "https://files.pythonhosted.org/packages/a0/d0/517181ea80060f84d84cff4d42d330c80c77bb352b728fb1f9681fbad291/clickhouse_connect-0.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a2427d312bc3526520a0be8c648479af3f6353da7a33a62db2368d6203b08efd", size = 1105105, upload-time = "2025-11-14T20:30:02.679Z" },
{ url = "https://files.pythonhosted.org/packages/7c/b2/4ad93e898562725b58c537cad83ab2694c9b1c1ef37fa6c3f674bdad366a/clickhouse_connect-0.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:63bbb5721bfece698e155c01b8fa95ce4377c584f4d04b43f383824e8a8fa129", size = 1150791, upload-time = "2025-11-14T20:30:03.824Z" },
{ url = "https://files.pythonhosted.org/packages/45/a4/fdfbfacc1fa67b8b1ce980adcf42f9e3202325586822840f04f068aff395/clickhouse_connect-0.10.0-cp311-cp311-win32.whl", hash = "sha256:48554e836c6b56fe0854d9a9f565569010583d4960094d60b68a53f9f83042f0", size = 244014, upload-time = "2025-11-14T20:30:05.157Z" },
{ url = "https://files.pythonhosted.org/packages/08/50/cf53f33f4546a9ce2ab1b9930db4850aa1ae53bff1e4e4fa97c566cdfa19/clickhouse_connect-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9eb8df083e5fda78ac7249938691c2c369e8578b5df34c709467147e8289f1d9", size = 262356, upload-time = "2025-11-14T20:30:06.478Z" },
{ url = "https://files.pythonhosted.org/packages/9e/59/fadbbf64f4c6496cd003a0a3c9223772409a86d0eea9d4ff45d2aa88aabf/clickhouse_connect-0.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b090c7d8e602dd084b2795265cd30610461752284763d9ad93a5d619a0e0ff21", size = 276401, upload-time = "2025-11-14T20:30:07.469Z" },
{ url = "https://files.pythonhosted.org/packages/1c/e3/781f9970f2ef202410f0d64681e42b2aecd0010097481a91e4df186a36c7/clickhouse_connect-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b8a708d38b81dcc8c13bb85549c904817e304d2b7f461246fed2945524b7a31b", size = 268193, upload-time = "2025-11-14T20:30:08.503Z" },
{ url = "https://files.pythonhosted.org/packages/f0/e0/64ab66b38fce762b77b5203a4fcecc603595f2a2361ce1605fc7bb79c835/clickhouse_connect-0.10.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3646fc9184a5469b95cf4a0846e6954e6e9e85666f030a5d2acae58fa8afb37e", size = 1123810, upload-time = "2025-11-14T20:30:09.62Z" },
{ url = "https://files.pythonhosted.org/packages/f5/03/19121aecf11a30feaf19049be96988131798c54ac6ba646a38e5faecaa0a/clickhouse_connect-0.10.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fe7e6be0f40a8a77a90482944f5cc2aa39084c1570899e8d2d1191f62460365b", size = 1153409, upload-time = "2025-11-14T20:30:10.855Z" },
{ url = "https://files.pythonhosted.org/packages/ce/ee/63870fd8b666c6030393950ad4ee76b7b69430f5a49a5d3fa32a70b11942/clickhouse_connect-0.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:88b4890f13163e163bf6fa61f3a013bb974c95676853b7a4e63061faf33911ac", size = 1104696, upload-time = "2025-11-14T20:30:12.187Z" },
{ url = "https://files.pythonhosted.org/packages/e9/bc/fcd8da1c4d007ebce088783979c495e3d7360867cfa8c91327ed235778f5/clickhouse_connect-0.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6286832cc79affc6fddfbf5563075effa65f80e7cd1481cf2b771ce317c67d08", size = 1156389, upload-time = "2025-11-14T20:30:13.385Z" },
{ url = "https://files.pythonhosted.org/packages/4e/33/7cb99cc3fc503c23fd3a365ec862eb79cd81c8dc3037242782d709280fa9/clickhouse_connect-0.10.0-cp312-cp312-win32.whl", hash = "sha256:92b8b6691a92d2613ee35f5759317bd4be7ba66d39bf81c4deed620feb388ca6", size = 243682, upload-time = "2025-11-14T20:30:14.52Z" },
{ url = "https://files.pythonhosted.org/packages/48/5c/12eee6a1f5ecda2dfc421781fde653c6d6ca6f3080f24547c0af40485a5a/clickhouse_connect-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:1159ee2c33e7eca40b53dda917a8b6a2ed889cb4c54f3d83b303b31ddb4f351d", size = 262790, upload-time = "2025-11-14T20:30:15.555Z" },
{ url = "https://files.pythonhosted.org/packages/66/b0/04bc82ca70d4dcc35987c83e4ef04f6dec3c29d3cce4cda3523ebf4498dc/clickhouse_connect-0.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2b1d1acb8f64c3cd9d922d9e8c0b6328238c4a38e084598c86cc95a0edbd8bd", size = 278797, upload-time = "2026-03-12T15:49:34.728Z" },
{ url = "https://files.pythonhosted.org/packages/97/03/f8434ed43946dcab2d8b4ccf8e90b1c6d69abea0fa8b8aaddb1dc9931657/clickhouse_connect-0.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:573f3e5a6b49135b711c086050f46510d4738cc09e5a354cc18ef26f8de5cd98", size = 271849, upload-time = "2026-03-12T15:49:35.881Z" },
{ url = "https://files.pythonhosted.org/packages/a0/db/b3665f4d855c780be8d00638d874fc0d62613d1f1c06ffcad7c11a333f06/clickhouse_connect-0.14.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:86b28932faab182a312779e5c3cf341abe19d31028a399bda9d8b06b3b9adab4", size = 1090975, upload-time = "2026-03-12T15:49:37.064Z" },
{ url = "https://files.pythonhosted.org/packages/ea/a2/7ba2d9669c5771734573397b034169653cdf3348dc4cc66bd66d8ab18910/clickhouse_connect-0.14.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfc9650906ff96452c2b5676a7e68e8a77a5642504596f8482e0f3c0ccdffbf1", size = 1095899, upload-time = "2026-03-12T15:49:38.36Z" },
{ url = "https://files.pythonhosted.org/packages/e2/f4/0394af37b491ca832610f2ca7a129e85d8d857d40c94a42f2c2e6d3d9481/clickhouse_connect-0.14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b379749a962599f9d6ec81e773a3b907ac58b001f4a977e4ac397f6a76fedff2", size = 1077567, upload-time = "2026-03-12T15:49:40.027Z" },
{ url = "https://files.pythonhosted.org/packages/9a/b8/9279a88afac94c262b55cc75aadc6a3e83f7fa1641e618f9060d9d38415f/clickhouse_connect-0.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:43ccb5debd13d41b97af81940c0cac01e92d39f17131d984591bedee13439a5d", size = 1100264, upload-time = "2026-03-12T15:49:41.414Z" },
{ url = "https://files.pythonhosted.org/packages/19/36/20e19ab392c211b83c967e275eb46f663853e0b8ce4da89056fda8a35fc6/clickhouse_connect-0.14.1-cp311-cp311-win32.whl", hash = "sha256:13cbe46c04be8e49da4f6aed698f2570a5295d15f498dd5511b4f761d1ef0edc", size = 250488, upload-time = "2026-03-12T15:49:42.649Z" },
{ url = "https://files.pythonhosted.org/packages/9d/3b/74a07e692a21cad4692e72595cdefbd709bd74a9f778c7334d57a98ee548/clickhouse_connect-0.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:7038cf547c542a17a465e062cd837659f46f99c991efcb010a9ea08ce70960ab", size = 268730, upload-time = "2026-03-12T15:49:44.225Z" },
{ url = "https://files.pythonhosted.org/packages/58/9e/d84a14241967b3aa1e657bbbee83e2eee02d3d6df1ebe8edd4ed72cd8643/clickhouse_connect-0.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:97665169090889a8bc4dbae4a5fc758b91a23e49a8f8ddc1ae993f18f6d71e02", size = 280679, upload-time = "2026-03-12T15:49:45.497Z" },
{ url = "https://files.pythonhosted.org/packages/d8/29/80835a980be6298a7a2ae42d5a14aab0c9c066ecafe1763bc1958a6f6f0f/clickhouse_connect-0.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3ee6b513ca7d83e0f7b46d87bc2e48260316431cb466680e3540400379bcd1db", size = 271570, upload-time = "2026-03-12T15:49:46.721Z" },
{ url = "https://files.pythonhosted.org/packages/8b/bf/25c17cb91d72143742d2b060c6954e8000a7753c1fd21f7bf8b49ef2bd89/clickhouse_connect-0.14.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2a0e8a3f46aba99f1c574927d196e12f1ee689e31c41bf0caec86ad3e181abf3", size = 1115637, upload-time = "2026-03-12T15:49:47.921Z" },
{ url = "https://files.pythonhosted.org/packages/2d/5f/5d5df3585d98889aedc55c9eeb2ea90dba27ec4329eee392101619daf0c0/clickhouse_connect-0.14.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:25698cddcdd6c2e4ea12dc5c56d6035d77fc99c5d75e96a54123826c36fdd8ae", size = 1131995, upload-time = "2026-03-12T15:49:49.791Z" },
{ url = "https://files.pythonhosted.org/packages/ad/50/acc9f4c6a1d712f2ed11626f8451eff222e841cf0809655362f0e90454b6/clickhouse_connect-0.14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:29ab49e5cac44b830b58de73d17a7d895f6c362bf67a50134ff405b428774f44", size = 1095380, upload-time = "2026-03-12T15:49:51.388Z" },
{ url = "https://files.pythonhosted.org/packages/08/18/1ef01beee93d243ec9d9c37f0ce62b3083478a5dd7f59cc13279600cd3a5/clickhouse_connect-0.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3cbf7d7a134692bacd68dd5f8661e87f5db94af60db9f3a74bd732596794910a", size = 1127217, upload-time = "2026-03-12T15:49:53.016Z" },
{ url = "https://files.pythonhosted.org/packages/18/e2/b4daee8287dc49eb9918c77b1e57f5644e47008f719b77281bf5fca63f6e/clickhouse_connect-0.14.1-cp312-cp312-win32.whl", hash = "sha256:6f295b66f3e2ed931dd0d3bb80e00ee94c6f4a584b2dc6d998872b2e0ceaa706", size = 250775, upload-time = "2026-03-12T15:49:54.639Z" },
{ url = "https://files.pythonhosted.org/packages/01/c7/7b55d346952fcd8f0f491faca4449f607a04764fd23cada846dc93facb9e/clickhouse_connect-0.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:c6bb2cce37041c90f8a3b1b380665acbaf252f125e401c13ce8f8df105378f69", size = 269353, upload-time = "2026-03-12T15:49:55.854Z" },
]
[[package]]
@ -1255,22 +1258,22 @@ wheels = [
[[package]]
name = "couchbase"
version = "4.3.6"
version = "4.5.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/2f/70/7cf92b2443330e7a4b626a02fe15fbeb1531337d75e6ae6393294e960d18/couchbase-4.3.6.tar.gz", hash = "sha256:d58c5ccdad5d85fc026f328bf4190c4fc0041fdbe68ad900fb32fc5497c3f061", size = 6517695, upload-time = "2025-05-15T17:21:38.157Z" }
sdist = { url = "https://files.pythonhosted.org/packages/73/2f/8f92e743a91c2f4e2ebad0bcfc31ef386c817c64415d89bf44e64dde227a/couchbase-4.5.0.tar.gz", hash = "sha256:fb74386ea5e807ae12cfa294fa6740fe6be3ecaf3bb9ce4fb9ea73706ed05982", size = 6562752, upload-time = "2025-09-30T01:27:37.423Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f3/0a/eae21d3a9331f7c93e8483f686e1bcb9e3b48f2ce98193beb0637a620926/couchbase-4.3.6-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:4c10fd26271c5630196b9bcc0dd7e17a45fa9c7e46ed5756e5690d125423160c", size = 4775710, upload-time = "2025-05-15T17:20:29.388Z" },
{ url = "https://files.pythonhosted.org/packages/f6/98/0ca042a42f5807bbf8050f52fff39ebceebc7bea7e5897907758f3e1ad39/couchbase-4.3.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:811eee7a6013cea7b15a718e201ee1188df162c656d27c7882b618ab57a08f3a", size = 4020743, upload-time = "2025-05-15T17:20:31.515Z" },
{ url = "https://files.pythonhosted.org/packages/f8/0f/c91407cb082d2322217e8f7ca4abb8eda016a81a4db5a74b7ac6b737597d/couchbase-4.3.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fc177e0161beb1e6e8c4b9561efcb97c51aed55a77ee11836ca194d33ae22b7", size = 4796091, upload-time = "2025-05-15T17:20:33.818Z" },
{ url = "https://files.pythonhosted.org/packages/8c/02/5567b660543828bdbbc68dcae080e388cb0be391aa8a97cce9d8c8a6c147/couchbase-4.3.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02afb1c1edd6b215f702510412b5177ed609df8135930c23789bbc5901dd1b45", size = 5015684, upload-time = "2025-05-15T17:20:36.364Z" },
{ url = "https://files.pythonhosted.org/packages/dc/d1/767908826d5bdd258addab26d7f1d21bc42bafbf5f30d1b556ace06295af/couchbase-4.3.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:594e9eb17bb76ba8e10eeee17a16aef897dd90d33c6771cf2b5b4091da415b32", size = 5673513, upload-time = "2025-05-15T17:20:38.972Z" },
{ url = "https://files.pythonhosted.org/packages/f2/25/39ecde0a06692abce8bb0df4f15542933f05883647a1a57cdc7bbed9c77c/couchbase-4.3.6-cp311-cp311-win_amd64.whl", hash = "sha256:db22c56e38b8313f65807aa48309c8b8c7c44d5517b9ff1d8b4404d4740ec286", size = 4010728, upload-time = "2025-05-15T17:20:43.286Z" },
{ url = "https://files.pythonhosted.org/packages/b1/55/c12b8f626de71363fbe30578f4a0de1b8bb41afbe7646ff8538c3b38ce2a/couchbase-4.3.6-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:a2ae13432b859f513485d4cee691e1e4fce4af23ed4218b9355874b146343f8c", size = 4693517, upload-time = "2025-05-15T17:20:45.433Z" },
{ url = "https://files.pythonhosted.org/packages/a1/aa/2184934d283d99b34a004f577bf724d918278a2962781ca5690d4fa4b6c6/couchbase-4.3.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4ea5ca7e34b5d023c8bab406211ab5d71e74a976ba25fa693b4f8e6c74f85aa2", size = 4022393, upload-time = "2025-05-15T17:20:47.442Z" },
{ url = "https://files.pythonhosted.org/packages/80/29/ba6d3b205a51c04c270c1b56ea31da678b7edc565b35a34237ec2cfc708d/couchbase-4.3.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6eaca0a71fd8f9af4344b7d6474d7b74d1784ae9a658f6bc3751df5f9a4185ae", size = 4798396, upload-time = "2025-05-15T17:20:49.473Z" },
{ url = "https://files.pythonhosted.org/packages/4a/94/d7d791808bd9064c01f965015ff40ee76e6bac10eaf2c73308023b9bdedf/couchbase-4.3.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0470378b986f69368caed6d668ac6530e635b0c1abaef3d3f524cfac0dacd878", size = 5018099, upload-time = "2025-05-15T17:20:52.541Z" },
{ url = "https://files.pythonhosted.org/packages/a6/04/cec160f9f4b862788e2a0167616472a5695b2f569bd62204938ab674835d/couchbase-4.3.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:374ce392558f1688ac073aa0b15c256b1a441201d965811fd862357ff05d27a9", size = 5672633, upload-time = "2025-05-15T17:20:55.994Z" },
{ url = "https://files.pythonhosted.org/packages/1b/a2/1da2ab45412b9414e2c6a578e0e7a24f29b9261ef7de11707c2fc98045b8/couchbase-4.3.6-cp312-cp312-win_amd64.whl", hash = "sha256:cd734333de34d8594504c163bb6c47aea9cc1f2cefdf8e91875dd9bf14e61e29", size = 4013298, upload-time = "2025-05-15T17:20:59.533Z" },
{ url = "https://files.pythonhosted.org/packages/ca/a7/ba28fcab4f211e570582990d9592d8a57566158a0712fbc9d0d9ac486c2a/couchbase-4.5.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:3d3258802baa87d9ffeccbb2b31dcabe2a4ef27c9be81e0d3d710fd7436da24a", size = 5037084, upload-time = "2025-09-30T01:25:16.748Z" },
{ url = "https://files.pythonhosted.org/packages/85/38/f26912b56a41f22ab9606304014ef1435fc4bef76144382f91c1a4ce1d4c/couchbase-4.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:18b47f1f3a2007f88203f611570d96e62bb1fb9568dec0483a292a5e87f6d1df", size = 4323514, upload-time = "2025-09-30T01:25:22.628Z" },
{ url = "https://files.pythonhosted.org/packages/35/a6/5ef140f8681a2488ed6eb2a2bc9fc918b6f11e9f71bbad75e4de73b8dbf3/couchbase-4.5.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9c2a16830db9437aae92e31f9ceda6c7b70707e316152fc99552b866b09a1967", size = 5181111, upload-time = "2025-09-30T01:25:30.538Z" },
{ url = "https://files.pythonhosted.org/packages/7b/2e/1f0f06e920dbae07c3d8af6b2af3d5213e43d3825e0931c19564fe4d5c1b/couchbase-4.5.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4a86774680e46488a7955c6eae8fba5200a1fd5f9de9ac0a34acb6c87dc2b513", size = 5442969, upload-time = "2025-09-30T01:25:37.976Z" },
{ url = "https://files.pythonhosted.org/packages/9a/2e/6ece47df4d987dbeaae3fdcf7aa4d6a8154c949c28e925f01074dfd0b8b8/couchbase-4.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b68dae005ab4c157930c76a3116e478df25aa1af00fa10cc1cc755df1831ad59", size = 6108562, upload-time = "2025-09-30T01:25:45.674Z" },
{ url = "https://files.pythonhosted.org/packages/be/a7/2f84a1d117cf70ad30e8b08ae9b1c4a03c65146bab030ed6eb84f454045b/couchbase-4.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbc50956fb68d42929d21d969f4512b38798259ae48c47cbf6d676cc3a01b058", size = 4269303, upload-time = "2025-09-30T01:25:49.341Z" },
{ url = "https://files.pythonhosted.org/packages/2f/bc/3b00403edd8b188a93f48b8231dbf7faf7b40d318d3e73bb0e68c4965bbd/couchbase-4.5.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:be1ac2bf7cbccf28eebd7fa8b1d7199fbe84c96b0f7f2c0d69963b1d6ce53985", size = 5128307, upload-time = "2025-09-30T01:25:53.615Z" },
{ url = "https://files.pythonhosted.org/packages/7f/52/2ccfa8c8650cc341813713a47eeeb8ad13a25e25b0f4747d224106602a24/couchbase-4.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:035c394d38297c484bd57fc92b27f6a571a36ab5675b4ec873fd15bf65e8f28e", size = 4326149, upload-time = "2025-09-30T01:25:57.524Z" },
{ url = "https://files.pythonhosted.org/packages/32/80/fe3f074f321474c824ec67b97c5c4aa99047d45c777bb29353f9397c6604/couchbase-4.5.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:117685f6827abbc332e151625b0a9890c2fafe0d3c3d9e564b903d5c411abe5d", size = 5184623, upload-time = "2025-09-30T01:26:02.166Z" },
{ url = "https://files.pythonhosted.org/packages/f3/e5/86381f49e4cf1c6db23c397b6a32b532cd4df7b9975b0cd2da3db2ffe269/couchbase-4.5.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:632a918f81a7373832991b79b6ab429e56ef4ff68dfb3517af03f0e2be7e3e4f", size = 5446579, upload-time = "2025-09-30T01:26:09.39Z" },
{ url = "https://files.pythonhosted.org/packages/c8/85/a68d04233a279e419062ceb1c6866b61852c016d1854cd09cde7f00bc53c/couchbase-4.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:67fc0fd1a4535b5be093f834116a70fb6609085399e6b63539241b919da737b7", size = 6104619, upload-time = "2025-09-30T01:26:15.525Z" },
{ url = "https://files.pythonhosted.org/packages/56/8c/0511bac5dd2d998aeabcfba6a2804ecd9eb3d83f9d21cc3293a56fbc70a8/couchbase-4.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:02199b4528f3106c231c00aaf85b7cc6723accbc654b903bb2027f78a04d12f4", size = 4274424, upload-time = "2025-09-30T01:26:21.484Z" },
]
[[package]]
@ -1369,47 +1372,43 @@ wheels = [
[[package]]
name = "cryptography"
version = "46.0.5"
version = "44.0.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" }
sdist = { url = "https://files.pythonhosted.org/packages/53/d6/1411ab4d6108ab167d06254c5be517681f1e331f90edf1379895bcb87020/cryptography-44.0.3.tar.gz", hash = "sha256:fe19d8bc5536a91a24a8133328880a41831b6c5df54599a8417b62fe015d3053", size = 711096, upload-time = "2025-05-02T19:36:04.667Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" },
{ url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" },
{ url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" },
{ url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" },
{ url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" },
{ url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" },
{ url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" },
{ url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" },
{ url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" },
{ url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" },
{ url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" },
{ url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" },
{ url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" },
{ url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" },
{ url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" },
{ url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" },
{ url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" },
{ url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" },
{ url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" },
{ url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" },
{ url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" },
{ url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" },
{ url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" },
{ url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" },
{ url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" },
{ url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" },
{ url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" },
{ url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" },
{ url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" },
{ url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" },
{ url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" },
{ url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" },
{ url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" },
{ url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" },
{ url = "https://files.pythonhosted.org/packages/08/53/c776d80e9d26441bb3868457909b4e74dd9ccabd182e10b2b0ae7a07e265/cryptography-44.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:962bc30480a08d133e631e8dfd4783ab71cc9e33d5d7c1e192f0b7c06397bb88", size = 6670281, upload-time = "2025-05-02T19:34:50.665Z" },
{ url = "https://files.pythonhosted.org/packages/6a/06/af2cf8d56ef87c77319e9086601bef621bedf40f6f59069e1b6d1ec498c5/cryptography-44.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc61e8f3bf5b60346d89cd3d37231019c17a081208dfbbd6e1605ba03fa137", size = 3959305, upload-time = "2025-05-02T19:34:53.042Z" },
{ url = "https://files.pythonhosted.org/packages/ae/01/80de3bec64627207d030f47bf3536889efee8913cd363e78ca9a09b13c8e/cryptography-44.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58968d331425a6f9eedcee087f77fd3c927c88f55368f43ff7e0a19891f2642c", size = 4171040, upload-time = "2025-05-02T19:34:54.675Z" },
{ url = "https://files.pythonhosted.org/packages/bd/48/bb16b7541d207a19d9ae8b541c70037a05e473ddc72ccb1386524d4f023c/cryptography-44.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e28d62e59a4dbd1d22e747f57d4f00c459af22181f0b2f787ea83f5a876d7c76", size = 3963411, upload-time = "2025-05-02T19:34:56.61Z" },
{ url = "https://files.pythonhosted.org/packages/42/b2/7d31f2af5591d217d71d37d044ef5412945a8a8e98d5a2a8ae4fd9cd4489/cryptography-44.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af653022a0c25ef2e3ffb2c673a50e5a0d02fecc41608f4954176f1933b12359", size = 3689263, upload-time = "2025-05-02T19:34:58.591Z" },
{ url = "https://files.pythonhosted.org/packages/25/50/c0dfb9d87ae88ccc01aad8eb93e23cfbcea6a6a106a9b63a7b14c1f93c75/cryptography-44.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:157f1f3b8d941c2bd8f3ffee0af9b049c9665c39d3da9db2dc338feca5e98a43", size = 4196198, upload-time = "2025-05-02T19:35:00.988Z" },
{ url = "https://files.pythonhosted.org/packages/66/c9/55c6b8794a74da652690c898cb43906310a3e4e4f6ee0b5f8b3b3e70c441/cryptography-44.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:c6cd67722619e4d55fdb42ead64ed8843d64638e9c07f4011163e46bc512cf01", size = 3966502, upload-time = "2025-05-02T19:35:03.091Z" },
{ url = "https://files.pythonhosted.org/packages/b6/f7/7cb5488c682ca59a02a32ec5f975074084db4c983f849d47b7b67cc8697a/cryptography-44.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b424563394c369a804ecbee9b06dfb34997f19d00b3518e39f83a5642618397d", size = 4196173, upload-time = "2025-05-02T19:35:05.018Z" },
{ url = "https://files.pythonhosted.org/packages/d2/0b/2f789a8403ae089b0b121f8f54f4a3e5228df756e2146efdf4a09a3d5083/cryptography-44.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c91fc8e8fd78af553f98bc7f2a1d8db977334e4eea302a4bfd75b9461c2d8904", size = 4087713, upload-time = "2025-05-02T19:35:07.187Z" },
{ url = "https://files.pythonhosted.org/packages/1d/aa/330c13655f1af398fc154089295cf259252f0ba5df93b4bc9d9c7d7f843e/cryptography-44.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:25cd194c39fa5a0aa4169125ee27d1172097857b27109a45fadc59653ec06f44", size = 4299064, upload-time = "2025-05-02T19:35:08.879Z" },
{ url = "https://files.pythonhosted.org/packages/10/a8/8c540a421b44fd267a7d58a1fd5f072a552d72204a3f08194f98889de76d/cryptography-44.0.3-cp37-abi3-win32.whl", hash = "sha256:3be3f649d91cb182c3a6bd336de8b61a0a71965bd13d1a04a0e15b39c3d5809d", size = 2773887, upload-time = "2025-05-02T19:35:10.41Z" },
{ url = "https://files.pythonhosted.org/packages/b9/0d/c4b1657c39ead18d76bbd122da86bd95bdc4095413460d09544000a17d56/cryptography-44.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:3883076d5c4cc56dbef0b898a74eb6992fdac29a7b9013870b34efe4ddb39a0d", size = 3209737, upload-time = "2025-05-02T19:35:12.12Z" },
{ url = "https://files.pythonhosted.org/packages/34/a3/ad08e0bcc34ad436013458d7528e83ac29910943cea42ad7dd4141a27bbb/cryptography-44.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:5639c2b16764c6f76eedf722dbad9a0914960d3489c0cc38694ddf9464f1bb2f", size = 6673501, upload-time = "2025-05-02T19:35:13.775Z" },
{ url = "https://files.pythonhosted.org/packages/b1/f0/7491d44bba8d28b464a5bc8cc709f25a51e3eac54c0a4444cf2473a57c37/cryptography-44.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3ffef566ac88f75967d7abd852ed5f182da252d23fac11b4766da3957766759", size = 3960307, upload-time = "2025-05-02T19:35:15.917Z" },
{ url = "https://files.pythonhosted.org/packages/f7/c8/e5c5d0e1364d3346a5747cdcd7ecbb23ca87e6dea4f942a44e88be349f06/cryptography-44.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:192ed30fac1728f7587c6f4613c29c584abdc565d7417c13904708db10206645", size = 4170876, upload-time = "2025-05-02T19:35:18.138Z" },
{ url = "https://files.pythonhosted.org/packages/73/96/025cb26fc351d8c7d3a1c44e20cf9a01e9f7cf740353c9c7a17072e4b264/cryptography-44.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7d5fe7195c27c32a64955740b949070f21cba664604291c298518d2e255931d2", size = 3964127, upload-time = "2025-05-02T19:35:19.864Z" },
{ url = "https://files.pythonhosted.org/packages/01/44/eb6522db7d9f84e8833ba3bf63313f8e257729cf3a8917379473fcfd6601/cryptography-44.0.3-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3f07943aa4d7dad689e3bb1638ddc4944cc5e0921e3c227486daae0e31a05e54", size = 3689164, upload-time = "2025-05-02T19:35:21.449Z" },
{ url = "https://files.pythonhosted.org/packages/68/fb/d61a4defd0d6cee20b1b8a1ea8f5e25007e26aeb413ca53835f0cae2bcd1/cryptography-44.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cb90f60e03d563ca2445099edf605c16ed1d5b15182d21831f58460c48bffb93", size = 4198081, upload-time = "2025-05-02T19:35:23.187Z" },
{ url = "https://files.pythonhosted.org/packages/1b/50/457f6911d36432a8811c3ab8bd5a6090e8d18ce655c22820994913dd06ea/cryptography-44.0.3-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ab0b005721cc0039e885ac3503825661bd9810b15d4f374e473f8c89b7d5460c", size = 3967716, upload-time = "2025-05-02T19:35:25.426Z" },
{ url = "https://files.pythonhosted.org/packages/35/6e/dca39d553075980ccb631955c47b93d87d27f3596da8d48b1ae81463d915/cryptography-44.0.3-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3bb0847e6363c037df8f6ede57d88eaf3410ca2267fb12275370a76f85786a6f", size = 4197398, upload-time = "2025-05-02T19:35:27.678Z" },
{ url = "https://files.pythonhosted.org/packages/9b/9d/d1f2fe681eabc682067c66a74addd46c887ebacf39038ba01f8860338d3d/cryptography-44.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b0cc66c74c797e1db750aaa842ad5b8b78e14805a9b5d1348dc603612d3e3ff5", size = 4087900, upload-time = "2025-05-02T19:35:29.312Z" },
{ url = "https://files.pythonhosted.org/packages/c4/f5/3599e48c5464580b73b236aafb20973b953cd2e7b44c7c2533de1d888446/cryptography-44.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6866df152b581f9429020320e5eb9794c8780e90f7ccb021940d7f50ee00ae0b", size = 4301067, upload-time = "2025-05-02T19:35:31.547Z" },
{ url = "https://files.pythonhosted.org/packages/a7/6c/d2c48c8137eb39d0c193274db5c04a75dab20d2f7c3f81a7dcc3a8897701/cryptography-44.0.3-cp39-abi3-win32.whl", hash = "sha256:c138abae3a12a94c75c10499f1cbae81294a6f983b3af066390adee73f433028", size = 2775467, upload-time = "2025-05-02T19:35:33.805Z" },
{ url = "https://files.pythonhosted.org/packages/c9/ad/51f212198681ea7b0deaaf8846ee10af99fba4e894f67b353524eab2bbe5/cryptography-44.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:5d186f32e52e66994dce4f766884bcb9c68b8da62d61d9d215bfe5fb56d21334", size = 3210375, upload-time = "2025-05-02T19:35:35.369Z" },
{ url = "https://files.pythonhosted.org/packages/8d/4b/c11ad0b6c061902de5223892d680e89c06c7c4d606305eb8de56c5427ae6/cryptography-44.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:896530bc9107b226f265effa7ef3f21270f18a2026bc09fed1ebd7b66ddf6375", size = 3390230, upload-time = "2025-05-02T19:35:49.062Z" },
{ url = "https://files.pythonhosted.org/packages/58/11/0a6bf45d53b9b2290ea3cec30e78b78e6ca29dc101e2e296872a0ffe1335/cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9b4d4a5dbee05a2c390bf212e78b99434efec37b17a4bff42f50285c5c8c9647", size = 3895216, upload-time = "2025-05-02T19:35:51.351Z" },
{ url = "https://files.pythonhosted.org/packages/0a/27/b28cdeb7270e957f0077a2c2bfad1b38f72f1f6d699679f97b816ca33642/cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02f55fb4f8b79c1221b0961488eaae21015b69b210e18c386b69de182ebb1259", size = 4115044, upload-time = "2025-05-02T19:35:53.044Z" },
{ url = "https://files.pythonhosted.org/packages/35/b0/ec4082d3793f03cb248881fecefc26015813199b88f33e3e990a43f79835/cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:dd3db61b8fe5be220eee484a17233287d0be6932d056cf5738225b9c05ef4fff", size = 3898034, upload-time = "2025-05-02T19:35:54.72Z" },
{ url = "https://files.pythonhosted.org/packages/0b/7f/adf62e0b8e8d04d50c9a91282a57628c00c54d4ae75e2b02a223bd1f2613/cryptography-44.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:978631ec51a6bbc0b7e58f23b68a8ce9e5f09721940933e9c217068388789fe5", size = 4114449, upload-time = "2025-05-02T19:35:57.139Z" },
{ url = "https://files.pythonhosted.org/packages/87/62/d69eb4a8ee231f4bf733a92caf9da13f1c81a44e874b1d4080c25ecbb723/cryptography-44.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:5d20cc348cca3a8aa7312f42ab953a56e15323800ca3ab0706b8cd452a3a056c", size = 3134369, upload-time = "2025-05-02T19:35:58.907Z" },
]
[[package]]
@ -1436,6 +1435,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/94/fb/1b681635bfd5f2274d0caa8f934b58435db6c091b97f5593738065ddb786/cymem-2.0.13-cp312-cp312-win_arm64.whl", hash = "sha256:6bbd701338df7bf408648191dff52472a9b334f71bcd31a21a41d83821050f67", size = 35959, upload-time = "2025-11-14T14:57:41.682Z" },
]
[[package]]
name = "darabonba-core"
version = "1.0.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohttp" },
{ name = "alibabacloud-tea" },
{ name = "requests" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/66/d3/a7daaee544c904548e665829b51a9fa2572acb82c73ad787a8ff90273002/darabonba_core-1.0.5-py3-none-any.whl", hash = "sha256:671ab8dbc4edc2a8f88013da71646839bb8914f1259efc069353243ef52ea27c", size = 24580, upload-time = "2025-12-12T07:53:59.494Z" },
]
[[package]]
name = "databricks-sdk"
version = "0.73.0"
@ -1797,7 +1809,7 @@ requires-dist = [
{ name = "transformers", specifier = "~=5.3.0" },
{ name = "unstructured", extras = ["docx", "epub", "md", "ppt", "pptx"], specifier = "~=0.21.5" },
{ name = "weave", specifier = ">=0.52.16" },
{ name = "weaviate-client", specifier = "==4.17.0" },
{ name = "weaviate-client", specifier = "==4.20.4" },
{ name = "webvtt-py", specifier = "~=0.5.1" },
{ name = "yarl", specifier = "~=1.23.0" },
]
@ -1885,31 +1897,31 @@ tools = [
]
vdb = [
{ name = "alibabacloud-gpdb20160503", specifier = "~=3.8.0" },
{ name = "alibabacloud-tea-openapi", specifier = "~=0.3.9" },
{ name = "alibabacloud-tea-openapi", specifier = "~=0.4.3" },
{ name = "chromadb", specifier = "==0.5.20" },
{ name = "clickhouse-connect", specifier = "~=0.10.0" },
{ name = "clickhouse-connect", specifier = "~=0.14.1" },
{ name = "clickzetta-connector-python", specifier = ">=0.8.102" },
{ name = "couchbase", specifier = "~=4.3.0" },
{ name = "couchbase", specifier = "~=4.5.0" },
{ name = "elasticsearch", specifier = "==8.14.0" },
{ name = "holo-search-sdk", specifier = ">=0.4.1" },
{ name = "intersystems-irispython", specifier = ">=5.1.0" },
{ name = "mo-vector", specifier = "~=0.1.13" },
{ name = "mysql-connector-python", specifier = ">=9.3.0" },
{ name = "opensearch-py", specifier = "==3.1.0" },
{ name = "oracledb", specifier = "==3.3.0" },
{ name = "oracledb", specifier = "==3.4.2" },
{ name = "pgvecto-rs", extras = ["sqlalchemy"], specifier = "~=0.2.1" },
{ name = "pgvector", specifier = "==0.2.5" },
{ name = "pymilvus", specifier = "~=2.5.0" },
{ name = "pymochow", specifier = "==2.2.9" },
{ name = "pgvector", specifier = "==0.4.2" },
{ name = "pymilvus", specifier = "~=2.6.10" },
{ name = "pymochow", specifier = "==2.3.6" },
{ name = "pyobvector", specifier = "~=0.2.17" },
{ name = "qdrant-client", specifier = "==1.9.0" },
{ name = "tablestore", specifier = "==6.3.7" },
{ name = "tcvectordb", specifier = "~=1.6.4" },
{ name = "tidb-vector", specifier = "==0.0.9" },
{ name = "upstash-vector", specifier = "==0.6.0" },
{ name = "tablestore", specifier = "==6.4.1" },
{ name = "tcvectordb", specifier = "~=2.0.0" },
{ name = "tidb-vector", specifier = "==0.0.15" },
{ name = "upstash-vector", specifier = "==0.8.0" },
{ name = "volcengine-compat", specifier = "~=1.0.0" },
{ name = "weaviate-client", specifier = "==4.17.0" },
{ name = "xinference-client", specifier = "~=1.2.2" },
{ name = "weaviate-client", specifier = "==4.20.4" },
{ name = "xinference-client", specifier = "~=2.3.1" },
]
[[package]]
@ -1978,6 +1990,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b0/0d/9feae160378a3553fa9a339b0e9c1a048e147a4127210e286ef18b730f03/durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286", size = 3922, upload-time = "2025-05-17T13:52:36.463Z" },
]
[[package]]
name = "ecdsa"
version = "0.19.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "six" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793, upload-time = "2025-03-13T11:52:43.25Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607, upload-time = "2025-03-13T11:52:41.757Z" },
]
[[package]]
name = "elastic-transport"
version = "8.17.1"
@ -2477,16 +2501,15 @@ wheels = [
[[package]]
name = "google-auth"
version = "2.49.0"
version = "2.49.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cryptography" },
{ name = "pyasn1-modules" },
{ name = "rsa" },
]
sdist = { url = "https://files.pythonhosted.org/packages/7d/59/7371175bfd949abfb1170aa076352131d7281bd9449c0f978604fc4431c3/google_auth-2.49.0.tar.gz", hash = "sha256:9cc2d9259d3700d7a257681f81052db6737495a1a46b610597f4b8bafe5286ae", size = 333444, upload-time = "2026-03-06T21:53:06.07Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ea/80/6a696a07d3d3b0a92488933532f03dbefa4a24ab80fb231395b9a2a1be77/google_auth-2.49.1.tar.gz", hash = "sha256:16d40da1c3c5a0533f57d268fe72e0ebb0ae1cc3b567024122651c045d879b64", size = 333825, upload-time = "2026-03-12T19:30:58.135Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/37/45/de64b823b639103de4b63dd193480dce99526bd36be6530c2dba85bf7817/google_auth-2.49.0-py3-none-any.whl", hash = "sha256:f893ef7307f19cf53700b7e2f61b5a6affe3aa0edf9943b13788920ab92d8d87", size = 240676, upload-time = "2026-03-06T21:52:38.304Z" },
{ url = "https://files.pythonhosted.org/packages/e9/eb/c6c2478d8a8d633460be40e2a8a6f8f429171997a35a96f81d3b680dec83/google_auth-2.49.1-py3-none-any.whl", hash = "sha256:195ebe3dca18eddd1b3db5edc5189b76c13e96f29e73043b923ebcf3f1a860f7", size = 240737, upload-time = "2026-03-12T19:30:53.159Z" },
]
[package.optional-dependencies]
@ -3678,20 +3701,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
]
[[package]]
name = "milvus-lite"
version = "2.5.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "tqdm", marker = "sys_platform != 'win32'" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/a9/b2/acc5024c8e8b6a0b034670b8e8af306ebd633ede777dcbf557eac4785937/milvus_lite-2.5.1-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:6b014453200ba977be37ba660cb2d021030375fa6a35bc53c2e1d92980a0c512", size = 27934713, upload-time = "2025-06-30T04:23:37.028Z" },
{ url = "https://files.pythonhosted.org/packages/9b/2e/746f5bb1d6facd1e73eb4af6dd5efda11125b0f29d7908a097485ca6cad9/milvus_lite-2.5.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a2e031088bf308afe5f8567850412d618cfb05a65238ed1a6117f60decccc95a", size = 24421451, upload-time = "2025-06-30T04:23:51.747Z" },
{ url = "https://files.pythonhosted.org/packages/2e/cf/3d1fee5c16c7661cf53977067a34820f7269ed8ba99fe9cf35efc1700866/milvus_lite-2.5.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:a13277e9bacc6933dea172e42231f7e6135bd3bdb073dd2688ee180418abd8d9", size = 45337093, upload-time = "2025-06-30T04:24:06.706Z" },
{ url = "https://files.pythonhosted.org/packages/d3/82/41d9b80f09b82e066894d9b508af07b7b0fa325ce0322980674de49106a0/milvus_lite-2.5.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:25ce13f4b8d46876dd2b7ac8563d7d8306da7ff3999bb0d14b116b30f71d706c", size = 55263911, upload-time = "2025-06-30T04:24:19.434Z" },
]
[[package]]
name = "mlflow-skinny"
version = "3.10.1"
@ -3930,21 +3939,21 @@ wheels = [
[[package]]
name = "mysql-connector-python"
version = "9.5.0"
version = "9.6.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/39/33/b332b001bc8c5ee09255a0d4b09a254da674450edd6a3e5228b245ca82a0/mysql_connector_python-9.5.0.tar.gz", hash = "sha256:92fb924285a86d8c146ebd63d94f9eaefa548da7813bc46271508fdc6cc1d596", size = 12251077, upload-time = "2025-10-22T09:05:45.423Z" }
sdist = { url = "https://files.pythonhosted.org/packages/6f/6e/c89babc7de3df01467d159854414659c885152579903a8220c8db02a3835/mysql_connector_python-9.6.0.tar.gz", hash = "sha256:c453bb55347174d87504b534246fb10c589daf5d057515bf615627198a3c7ef1", size = 12254999, upload-time = "2026-02-10T12:04:52.63Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/05/03/77347d58b0027ce93a41858477e08422e498c6ebc24348b1f725ed7a67ae/mysql_connector_python-9.5.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:653e70cd10cf2d18dd828fae58dff5f0f7a5cf7e48e244f2093314dddf84a4b9", size = 17578984, upload-time = "2025-10-22T09:01:41.213Z" },
{ url = "https://files.pythonhosted.org/packages/a5/bb/0f45c7ee55ebc56d6731a593d85c0e7f25f83af90a094efebfd5be9fe010/mysql_connector_python-9.5.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:5add93f60b3922be71ea31b89bc8a452b876adbb49262561bd559860dae96b3f", size = 18445067, upload-time = "2025-10-22T09:01:43.215Z" },
{ url = "https://files.pythonhosted.org/packages/1c/ec/054de99d4aa50d851a37edca9039280f7194cc1bfd30aab38f5bd6977ebe/mysql_connector_python-9.5.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:20950a5e44896c03e3dc93ceb3a5e9b48c9acae18665ca6e13249b3fe5b96811", size = 33668029, upload-time = "2025-10-22T09:01:45.74Z" },
{ url = "https://files.pythonhosted.org/packages/90/a2/e6095dc3a7ad5c959fe4a65681db63af131f572e57cdffcc7816bc84e3ad/mysql_connector_python-9.5.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:7fdd3205b9242c284019310fa84437f3357b13f598e3f9b5d80d337d4a6406b8", size = 34101687, upload-time = "2025-10-22T09:01:48.462Z" },
{ url = "https://files.pythonhosted.org/packages/9c/88/bc13c33fca11acaf808bd1809d8602d78f5bb84f7b1e7b1a288c383a14fd/mysql_connector_python-9.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:c021d8b0830958b28712c70c53b206b4cf4766948dae201ea7ca588a186605e0", size = 16511749, upload-time = "2025-10-22T09:01:51.032Z" },
{ url = "https://files.pythonhosted.org/packages/02/89/167ebee82f4b01ba7339c241c3cc2518886a2be9f871770a1efa81b940a0/mysql_connector_python-9.5.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:a72c2ef9d50b84f3c567c31b3bf30901af740686baa2a4abead5f202e0b7ea61", size = 17581904, upload-time = "2025-10-22T09:01:53.21Z" },
{ url = "https://files.pythonhosted.org/packages/67/46/630ca969ce10b30fdc605d65dab4a6157556d8cc3b77c724f56c2d83cb79/mysql_connector_python-9.5.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:bd9ba5a946cfd3b3b2688a75135357e862834b0321ed936fd968049be290872b", size = 18448195, upload-time = "2025-10-22T09:01:55.378Z" },
{ url = "https://files.pythonhosted.org/packages/f6/87/4c421f41ad169d8c9065ad5c46673c7af889a523e4899c1ac1d6bfd37262/mysql_connector_python-9.5.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:5ef7accbdf8b5f6ec60d2a1550654b7e27e63bf6f7b04020d5fb4191fb02bc4d", size = 33668638, upload-time = "2025-10-22T09:01:57.896Z" },
{ url = "https://files.pythonhosted.org/packages/a6/01/67cf210d50bfefbb9224b9a5c465857c1767388dade1004c903c8e22a991/mysql_connector_python-9.5.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:a6e0a4a0274d15e3d4c892ab93f58f46431222117dba20608178dfb2cc4d5fd8", size = 34102899, upload-time = "2025-10-22T09:02:00.291Z" },
{ url = "https://files.pythonhosted.org/packages/cd/ef/3d1a67d503fff38cc30e11d111cf28f0976987fb175f47b10d44494e1080/mysql_connector_python-9.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:b6c69cb37600b7e22f476150034e2afbd53342a175e20aea887f8158fc5e3ff6", size = 16512684, upload-time = "2025-10-22T09:02:02.411Z" },
{ url = "https://files.pythonhosted.org/packages/95/e1/45373c06781340c7b74fe9b88b85278ac05321889a307eaa5be079a997d4/mysql_connector_python-9.5.0-py2.py3-none-any.whl", hash = "sha256:ace137b88eb6fdafa1e5b2e03ac76ce1b8b1844b3a4af1192a02ae7c1a45bdee", size = 479047, upload-time = "2025-10-22T09:02:27.809Z" },
{ url = "https://files.pythonhosted.org/packages/2a/08/0e9bce000736454c2b8bb4c40bded79328887483689487dad7df4cf59fb7/mysql_connector_python-9.6.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:011931f7392a1087e10d305b0303f2a20cc1af2c1c8a15cd5691609aa95dfcbd", size = 17582646, upload-time = "2026-01-21T09:04:48.327Z" },
{ url = "https://files.pythonhosted.org/packages/93/aa/3dd4db039fc6a9bcbdbade83be9914ead6786c0be4918170dfaf89327b76/mysql_connector_python-9.6.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:b5212372aff6833473d2560ac87d3df9fb2498d0faacb7ebf231d947175fa36a", size = 18449358, upload-time = "2026-01-21T09:04:50.278Z" },
{ url = "https://files.pythonhosted.org/packages/53/38/ecd6d35382b6265ff5f030464d53b45e51ff2c2523ab88771c277fd84c05/mysql_connector_python-9.6.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:61deca6e243fafbb3cf08ae27bd0c83d0f8188de8456e46aeba0d3db15bb7230", size = 34169309, upload-time = "2026-01-21T09:04:52.402Z" },
{ url = "https://files.pythonhosted.org/packages/18/1d/fe1133eb76089342854d8fbe88e28598f7e06bc684a763d21fc7b23f1d5e/mysql_connector_python-9.6.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:adabbc5e1475cdf5fb6f1902a25edc3bd1e0726fa45f01ab1b8f479ff43b3337", size = 34541101, upload-time = "2026-01-21T09:04:55.897Z" },
{ url = "https://files.pythonhosted.org/packages/3f/99/da0f55beb970ca049fd7d37a6391d686222af89a8b13e636d8e9bbd06536/mysql_connector_python-9.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:8732ca0b7417b45238bcbfc7e64d9c4d62c759672207c6284f0921c366efddc7", size = 16514767, upload-time = "2026-02-10T12:03:50.584Z" },
{ url = "https://files.pythonhosted.org/packages/8f/d9/2a4b4d90b52f4241f0f71618cd4bd8779dd6d18db8058b0a4dd83ec0541c/mysql_connector_python-9.6.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9664e217c72dd6fb700f4c8512af90261f72d2f5d7c00c4e13e4c1e09bfa3d5e", size = 17585672, upload-time = "2026-02-10T12:03:52.955Z" },
{ url = "https://files.pythonhosted.org/packages/33/91/2495835733a054e716a17dc28404748b33f2dc1da1ae4396fb45574adf40/mysql_connector_python-9.6.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:1ed4b5c4761e5333035293e746683890e4ef2e818e515d14023fd80293bc31fa", size = 18452624, upload-time = "2026-02-10T12:03:56.153Z" },
{ url = "https://files.pythonhosted.org/packages/7a/69/e83abbbbf7f8eed855b5a5ff7285bc0afb1199418ac036c7691edf41e154/mysql_connector_python-9.6.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:5095758dcb89a6bce2379f349da336c268c407129002b595c5dba82ce387e2a5", size = 34169154, upload-time = "2026-02-10T12:03:58.831Z" },
{ url = "https://files.pythonhosted.org/packages/82/44/67bb61c71f398fbc739d07e8dcadad94e2f655874cb32ae851454066bea0/mysql_connector_python-9.6.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4ae4e7780fad950a4f267dea5851048d160f5b71314a342cdbf30b154f1c74f7", size = 34542947, upload-time = "2026-02-10T12:04:02.408Z" },
{ url = "https://files.pythonhosted.org/packages/ba/39/994c4f7e9c59d3ca534a831d18442ac4c529865db20aeaa4fd94e2af5efd/mysql_connector_python-9.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c180e0b4100d7402e03993bfac5c97d18e01d7ca9d198d742fffc245077f8ffe", size = 16515709, upload-time = "2026-02-10T12:04:04.924Z" },
{ url = "https://files.pythonhosted.org/packages/15/dd/b3250826c29cee7816de4409a2fe5e469a68b9a89f6bfaa5eed74f05532c/mysql_connector_python-9.6.0-py2.py3-none-any.whl", hash = "sha256:44b0fb57207ebc6ae05b5b21b7968a9ed33b29187fe87b38951bad2a334d75d5", size = 480527, upload-time = "2026-02-10T12:04:36.176Z" },
]
[[package]]
@ -4559,23 +4568,24 @@ numpy = [
[[package]]
name = "oracledb"
version = "3.3.0"
version = "3.4.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cryptography" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/51/c9/fae18fa5d803712d188486f8e86ad4f4e00316793ca19745d7c11092c360/oracledb-3.3.0.tar.gz", hash = "sha256:e830d3544a1578296bcaa54c6e8c8ae10a58c7db467c528c4b27adbf9c8b4cb0", size = 811776, upload-time = "2025-07-29T22:34:10.489Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f7/02/70a872d1a4a739b4f7371ab8d3d5ed8c6e57e142e2503531aafcb220893c/oracledb-3.4.2.tar.gz", hash = "sha256:46e0f2278ff1fe83fbc33a3b93c72d429323ec7eed47bc9484e217776cd437e5", size = 855467, upload-time = "2026-01-28T17:25:39.91Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3f/35/95d9a502fdc48ce1ef3a513ebd027488353441e15aa0448619abb3d09d32/oracledb-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d9adb74f837838e21898d938e3a725cf73099c65f98b0b34d77146b453e945e0", size = 3963945, upload-time = "2025-07-29T22:34:28.633Z" },
{ url = "https://files.pythonhosted.org/packages/16/a7/8f1ef447d995bb51d9fdc36356697afeceb603932f16410c12d52b2df1a4/oracledb-3.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b063d1007882570f170ebde0f364e78d4a70c8f015735cc900663278b9ceef7", size = 2449385, upload-time = "2025-07-29T22:34:30.592Z" },
{ url = "https://files.pythonhosted.org/packages/b3/fa/6a78480450bc7d256808d0f38ade3385735fb5a90dab662167b4257dcf94/oracledb-3.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:187728f0a2d161676b8c581a9d8f15d9631a8fea1e628f6d0e9fa2f01280cd22", size = 2634943, upload-time = "2025-07-29T22:34:33.142Z" },
{ url = "https://files.pythonhosted.org/packages/5b/90/ea32b569a45fb99fac30b96f1ac0fb38b029eeebb78357bc6db4be9dde41/oracledb-3.3.0-cp311-cp311-win32.whl", hash = "sha256:920f14314f3402c5ab98f2efc5932e0547e9c0a4ca9338641357f73844e3e2b1", size = 1483549, upload-time = "2025-07-29T22:34:35.015Z" },
{ url = "https://files.pythonhosted.org/packages/81/55/ae60f72836eb8531b630299f9ed68df3fe7868c6da16f820a108155a21f9/oracledb-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:825edb97976468db1c7e52c78ba38d75ce7e2b71a2e88f8629bcf02be8e68a8a", size = 1834737, upload-time = "2025-07-29T22:34:36.824Z" },
{ url = "https://files.pythonhosted.org/packages/08/a8/f6b7809d70e98e113786d5a6f1294da81c046d2fa901ad656669fc5d7fae/oracledb-3.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9d25e37d640872731ac9b73f83cbc5fc4743cd744766bdb250488caf0d7696a8", size = 3943512, upload-time = "2025-07-29T22:34:39.237Z" },
{ url = "https://files.pythonhosted.org/packages/df/b9/8145ad8991f4864d3de4a911d439e5bc6cdbf14af448f3ab1e846a54210c/oracledb-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0bf7cdc2b668f939aa364f552861bc7a149d7cd3f3794730d43ef07613b2bf9", size = 2276258, upload-time = "2025-07-29T22:34:41.547Z" },
{ url = "https://files.pythonhosted.org/packages/56/bf/f65635ad5df17d6e4a2083182750bb136ac663ff0e9996ce59d77d200f60/oracledb-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fe20540fde64a6987046807ea47af93be918fd70b9766b3eb803c01e6d4202e", size = 2458811, upload-time = "2025-07-29T22:34:44.648Z" },
{ url = "https://files.pythonhosted.org/packages/7d/30/e0c130b6278c10b0e6cd77a3a1a29a785c083c549676cf701c5d180b8e63/oracledb-3.3.0-cp312-cp312-win32.whl", hash = "sha256:db080be9345cbf9506ffdaea3c13d5314605355e76d186ec4edfa49960ffb813", size = 1445525, upload-time = "2025-07-29T22:34:46.603Z" },
{ url = "https://files.pythonhosted.org/packages/1a/5c/7254f5e1a33a5d6b8bf6813d4f4fdcf5c4166ec8a7af932d987879d5595c/oracledb-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:be81e3afe79f6c8ece79a86d6067ad1572d2992ce1c590a086f3755a09535eb4", size = 1789976, upload-time = "2025-07-29T22:34:48.5Z" },
{ url = "https://files.pythonhosted.org/packages/64/80/be263b668ba32b258d07c85f7bfb6967a9677e016c299207b28734f04c4b/oracledb-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b8e4b8a852251cef09038b75f30fce1227010835f4e19cfbd436027acba2697c", size = 4228552, upload-time = "2026-01-28T17:25:54.844Z" },
{ url = "https://files.pythonhosted.org/packages/91/bc/e832a649529da7c60409a81be41f3213b4c7ffda4fe424222b2145e8d43c/oracledb-3.4.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1617a1db020346883455af005efbefd51be2c4d797e43b1b38455a19f8526b48", size = 2421924, upload-time = "2026-01-28T17:25:56.984Z" },
{ url = "https://files.pythonhosted.org/packages/86/21/d867c37e493a63b5521bd248110ad5b97b18253d64a30703e3e8f3d9631e/oracledb-3.4.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed78d7e7079a778062744ccf42141ce4806818c3f4dd6463e4a7edd561c9f86", size = 2599301, upload-time = "2026-01-28T17:25:58.529Z" },
{ url = "https://files.pythonhosted.org/packages/2a/de/9b1843ea27f7791449652d7f340f042c3053336d2c11caf29e59bab86189/oracledb-3.4.2-cp311-cp311-win32.whl", hash = "sha256:0e16fe3d057e0c41a23ad2ae95bfa002401690773376d476be608f79ac74bf05", size = 1492890, upload-time = "2026-01-28T17:26:00.662Z" },
{ url = "https://files.pythonhosted.org/packages/d6/10/cbc8afa2db0cec80530858d3e4574f9734fae8c0b7f1df261398aa026c5f/oracledb-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:f93cae08e8ed20f2d5b777a8602a71f9418389c661d2c937e84d94863e7e7011", size = 1843355, upload-time = "2026-01-28T17:26:02.637Z" },
{ url = "https://files.pythonhosted.org/packages/8f/81/2e6154f34b71cd93b4946c73ea13b69d54b8d45a5f6bbffe271793240d21/oracledb-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a7396664e592881225ba66385ee83ce339d864f39003d6e4ca31a894a7e7c552", size = 4220806, upload-time = "2026-01-28T17:26:04.322Z" },
{ url = "https://files.pythonhosted.org/packages/ab/a9/a1d59aaac77d8f727156ec6a3b03399917c90b7da4f02d057f92e5601f56/oracledb-3.4.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f04a2d62073407672f114d02529921de0677c6883ed7c64d8d1a3c04caa3238", size = 2233795, upload-time = "2026-01-28T17:26:05.877Z" },
{ url = "https://files.pythonhosted.org/packages/94/ec/8c4a38020cd251572bd406ddcbde98ca052ec94b5684f9aa9ef1ddfcc68c/oracledb-3.4.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d8d75e4f879b908be66cce05ba6c05791a5dbb4a15e39abc01aa25c8a2492bd9", size = 2424756, upload-time = "2026-01-28T17:26:07.35Z" },
{ url = "https://files.pythonhosted.org/packages/fa/7d/c251c2a8567151ccfcfbe3467ea9a60fb5480dc4719342e2e6b7a9679e5d/oracledb-3.4.2-cp312-cp312-win32.whl", hash = "sha256:31b7ee83c23d0439778303de8a675717f805f7e8edb5556d48c4d8343bcf14f5", size = 1453486, upload-time = "2026-01-28T17:26:08.869Z" },
{ url = "https://files.pythonhosted.org/packages/4c/78/c939f3c16fb39400c4734d5a3340db5659ba4e9dce23032d7b33ccfd3fe5/oracledb-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:ac25a0448fc830fb7029ad50cd136cdbfcd06975d53967e269772cc5cb8c203a", size = 1794445, upload-time = "2026-01-28T17:26:10.66Z" },
]
[[package]]
@ -4750,13 +4760,14 @@ sqlalchemy = [
[[package]]
name = "pgvector"
version = "0.2.5"
version = "0.4.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/25/6c/6d8b4b03b958c02fa8687ec6063c49d952a189f8c91ebbe51e877dfab8f7/pgvector-0.4.2.tar.gz", hash = "sha256:322cac0c1dc5d41c9ecf782bd9991b7966685dee3a00bc873631391ed949513a", size = 31354, upload-time = "2025-12-05T01:07:17.87Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/29/bb/4686b1090a7c68fa367e981130a074dc6c1236571d914ffa6e05c882b59d/pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b", size = 9638, upload-time = "2024-02-07T19:35:03.8Z" },
{ url = "https://files.pythonhosted.org/packages/5a/26/6cee8a1ce8c43625ec561aff19df07f9776b7525d9002c86bceb3e0ac970/pgvector-0.4.2-py3-none-any.whl", hash = "sha256:549d45f7a18593783d5eec609ea1684a724ba8405c4cb182a0b2b08aeff04e08", size = 27441, upload-time = "2025-12-05T01:07:16.536Z" },
]
[[package]]
@ -5300,34 +5311,35 @@ crypto = [
[[package]]
name = "pymilvus"
version = "2.5.17"
version = "2.6.10"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cachetools" },
{ name = "grpcio" },
{ name = "milvus-lite", marker = "sys_platform != 'win32'" },
{ name = "orjson" },
{ name = "pandas" },
{ name = "protobuf" },
{ name = "python-dotenv" },
{ name = "requests" },
{ name = "setuptools" },
{ name = "ujson" },
]
sdist = { url = "https://files.pythonhosted.org/packages/dc/85/91828a9282bb7f9b210c0a93831979c5829cba5533ac12e87014b6e2208b/pymilvus-2.5.17.tar.gz", hash = "sha256:48ff55db9598e1b4cc25f4fe645b00d64ebcfb03f79f9f741267fc2a35526d43", size = 1281485, upload-time = "2025-11-10T03:24:53.058Z" }
sdist = { url = "https://files.pythonhosted.org/packages/9e/85/90362066ccda5ff6fec693a55693cde659fdcd36d08f1bd7012ae958248d/pymilvus-2.6.10.tar.gz", hash = "sha256:58a44ee0f1dddd7727ae830ef25325872d8946f029d801a37105164e6699f1b8", size = 1561042, upload-time = "2026-03-13T09:54:22.441Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/59/44/ee0c64617f58c123f570293f36b40f7b56fc123a2aa9573aa22e6ff0fb86/pymilvus-2.5.17-py3-none-any.whl", hash = "sha256:a43d36f2e5f793040917d35858d1ed2532307b7dfb03bc3eaf813aac085bc5a4", size = 244036, upload-time = "2025-11-10T03:24:51.496Z" },
{ url = "https://files.pythonhosted.org/packages/88/10/fe7fbb6795aa20038afd55e9c653991e7c69fb24c741ebb39ba3b0aa5c13/pymilvus-2.6.10-py3-none-any.whl", hash = "sha256:a048b6f3ebad93742bca559beabf44fe578f0983555a109c4436b5fb2c1dbd40", size = 312797, upload-time = "2026-03-13T09:54:21.081Z" },
]
[[package]]
name = "pymochow"
version = "2.2.9"
version = "2.3.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "future" },
{ name = "orjson" },
{ name = "requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b5/29/d9b112684ce490057b90bddede3fb6a69cf2787a3fd7736bdce203e77388/pymochow-2.2.9.tar.gz", hash = "sha256:5a28058edc8861deb67524410e786814571ed9fe0700c8c9fc0bc2ad5835b06c", size = 50079, upload-time = "2025-06-05T08:33:19.59Z" }
sdist = { url = "https://files.pythonhosted.org/packages/5e/04/2edda5447aa7c87a0b2b7c75406cc0fbcceeddd09c76b04edfb84eb47499/pymochow-2.3.6.tar.gz", hash = "sha256:6249a2fa410ef22e9e702710d725e7e052f492af87233ffe911845f931557632", size = 51123, upload-time = "2025-12-12T06:23:24.162Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/bf/9b/be18f9709dfd8187ff233be5acb253a9f4f1b07f1db0e7b09d84197c28e2/pymochow-2.2.9-py3-none-any.whl", hash = "sha256:639192b97f143d4a22fc163872be12aee19523c46f12e22416e8f289f1354d15", size = 77899, upload-time = "2025-06-05T08:33:17.424Z" },
{ url = "https://files.pythonhosted.org/packages/aa/86/588c75acbcc7dd9860252f1ef2233212f36b6751ac0cdec15867fc2fc4d6/pymochow-2.3.6-py3-none-any.whl", hash = "sha256:d46cb3af4d908f0c15d875190b1945c0353b907d7e32f068636ee04433cf06b1", size = 78963, upload-time = "2025-12-12T06:23:21.419Z" },
]
[[package]]
@ -5341,7 +5353,7 @@ wheels = [
[[package]]
name = "pyobvector"
version = "0.2.20"
version = "0.2.25"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiomysql" },
@ -5351,9 +5363,9 @@ dependencies = [
{ name = "sqlalchemy" },
{ name = "sqlglot" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ca/6f/24ae2d4ba811e5e112c89bb91ba7c50eb79658563650c8fc65caa80655f8/pyobvector-0.2.20.tar.gz", hash = "sha256:72a54044632ba3bb27d340fb660c50b22548d34c6a9214b6653bc18eee4287c4", size = 46648, upload-time = "2025-11-20T09:30:16.354Z" }
sdist = { url = "https://files.pythonhosted.org/packages/38/8a/c459f45844f1f90e9edf80c0f434ec3b1a65132efb240cfab8f26b1836c3/pyobvector-0.2.25.tar.gz", hash = "sha256:94d987583255ed8aba701d37a5d7c2727ec5fd7e0288cd9dd87a1f5ee36dd923", size = 78511, upload-time = "2026-03-10T07:18:32.283Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ae/21/630c4e9f0d30b7a6eebe0590cd97162e82a2d3ac4ed3a33259d0a67e0861/pyobvector-0.2.20-py3-none-any.whl", hash = "sha256:9a3c1d3eb5268eae64185f8807b10fd182f271acf33323ee731c2ad554d1c076", size = 60131, upload-time = "2025-11-20T09:30:14.88Z" },
{ url = "https://files.pythonhosted.org/packages/d1/7d/037401cecb34728d1c28ea05e196ea3c9d50a1ce0f2172e586e075ff55d8/pyobvector-0.2.25-py3-none-any.whl", hash = "sha256:ae0153f99bd0222783ed7e3951efc31a0d2b462d926b6f86ebd2033409aede8f", size = 64663, upload-time = "2026-03-10T07:18:29.789Z" },
]
[[package]]
@ -6019,18 +6031,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/87/f4/09ffb3ebd0cbb9e2c7c9b84d252557ecf434cd71584ee1e32f66013824df/rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:f7728653900035fb7b8d06e1e5900545d8088efc9d5d4545782da7df03ec803f", size = 564054, upload-time = "2025-11-16T14:50:37.733Z" },
]
[[package]]
name = "rsa"
version = "4.9.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pyasn1" },
]
sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" },
]
[[package]]
name = "ruff"
version = "0.15.5"
@ -6104,16 +6104,16 @@ wheels = [
[[package]]
name = "sendgrid"
version = "6.12.5"
version = "6.12.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cryptography" },
{ name = "ecdsa" },
{ name = "python-http-client" },
{ name = "werkzeug" },
]
sdist = { url = "https://files.pythonhosted.org/packages/da/fa/f718b2b953f99c1f0085811598ac7e31ccbd4229a81ec2a5290be868187a/sendgrid-6.12.5.tar.gz", hash = "sha256:ea9aae30cd55c332e266bccd11185159482edfc07c149b6cd15cf08869fabdb7", size = 50310, upload-time = "2025-09-19T06:23:09.229Z" }
sdist = { url = "https://files.pythonhosted.org/packages/11/31/62e00433878dccf33edf07f8efa417b9030a2464eb3b04bbd797a11b4447/sendgrid-6.12.4.tar.gz", hash = "sha256:9e88b849daf0fa4bdf256c3b5da9f5a3272402c0c2fd6b1928c9de440db0a03d", size = 50271, upload-time = "2025-06-12T10:29:37.213Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/bd/55/b3c3880a77082e8f7374954e0074aafafaa9bc78bdf9c8f5a92c2e7afc6a/sendgrid-6.12.5-py3-none-any.whl", hash = "sha256:96f92cc91634bf552fdb766b904bbb53968018da7ae41fdac4d1090dc0311ca8", size = 102173, upload-time = "2025-09-19T06:23:07.93Z" },
{ url = "https://files.pythonhosted.org/packages/c2/9c/45d068fd831a65e6ed1e2ab3233de58784842afdc62fdcdd0a01bbb6b39d/sendgrid-6.12.4-py3-none-any.whl", hash = "sha256:9a211b96241e63bd5b9ed9afcc8608f4bcac426e4a319b3920ab877c8426e92c", size = 102122, upload-time = "2025-06-12T10:29:35.457Z" },
]
[[package]]
@ -6454,7 +6454,7 @@ wheels = [
[[package]]
name = "tablestore"
version = "6.3.7"
version = "6.4.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohttp" },
@ -6467,9 +6467,9 @@ dependencies = [
{ name = "six" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f1/39/47a3ec8e42fe74dd05af1dfed9c3b02b8f8adfdd8656b2c5d4f95f975c9f/tablestore-6.3.7.tar.gz", hash = "sha256:990682dbf6b602f317a2d359b4281dcd054b4326081e7a67b73dbbe95407be51", size = 117440, upload-time = "2025-10-29T02:57:57.415Z" }
sdist = { url = "https://files.pythonhosted.org/packages/62/00/53f8eeb0016e7ad518f92b085de8855891d10581b42f86d15d1df7a56d33/tablestore-6.4.1.tar.gz", hash = "sha256:005c6939832f2ecd403e01220b7045de45f2e53f1ffaf0c2efc435810885fffb", size = 120319, upload-time = "2026-02-13T06:58:37.267Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fe/55/1b24d8c369204a855ac652712f815e88a4909802094e613fe3742a2d80e3/tablestore-6.3.7-py3-none-any.whl", hash = "sha256:38dcc55085912ab2515e183afd4532a58bb628a763590a99fc1bd2a4aba6855c", size = 139041, upload-time = "2025-10-29T02:57:55.727Z" },
{ url = "https://files.pythonhosted.org/packages/cc/96/a132bdecb753dc9dc34124a53019da29672baaa34485c8c504895897ea96/tablestore-6.4.1-py3-none-any.whl", hash = "sha256:616898d294dfe22f0d427463c241c6788374cdb2ace9aaf85673ce2c2a18d7e0", size = 141556, upload-time = "2026-02-13T06:58:35.579Z" },
]
[[package]]
@ -6495,7 +6495,7 @@ sdist = { url = "https://files.pythonhosted.org/packages/20/81/be13f417065200182
[[package]]
name = "tcvectordb"
version = "1.6.4"
version = "2.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cachetools" },
@ -6508,9 +6508,9 @@ dependencies = [
{ name = "ujson" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/19/ec/c80579aff1539257aafcf8dc3f3c13630171f299d65b33b68440e166f27c/tcvectordb-1.6.4.tar.gz", hash = "sha256:6fb18e15ccc6744d5147e9bbd781f84df3d66112de7d9cc615878b3f72d3a29a", size = 75188, upload-time = "2025-03-05T09:14:19.925Z" }
sdist = { url = "https://files.pythonhosted.org/packages/16/21/3bcd466df20ac69408c0228b1c5e793cf3283085238d3ef5d352c556b6ad/tcvectordb-2.0.0.tar.gz", hash = "sha256:38c6ed17931b9bd702138941ca6cfe10b2b60301424ffa36b64a3c2686318941", size = 82209, upload-time = "2025-12-27T07:55:27.376Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/68/bf/f38d9f629324ecffca8fe934e8df47e1233a9021b0739447e59e9fb248f9/tcvectordb-1.6.4-py3-none-any.whl", hash = "sha256:06ef13e7edb4575b04615065fc90e1a28374e318ada305f3786629aec5c9318a", size = 88917, upload-time = "2025-03-05T09:14:17.494Z" },
{ url = "https://files.pythonhosted.org/packages/af/10/e807b273348edef3b321194bc13b67d2cd4df64e22f0404b9e39082415c7/tcvectordb-2.0.0-py3-none-any.whl", hash = "sha256:1731d9c6c0d17a4199872747ddfb1dd3feb26f14ffe7a657f8a5ac3af4ddcdd1", size = 96256, upload-time = "2025-12-27T07:55:24.362Z" },
]
[[package]]
@ -6578,14 +6578,14 @@ wheels = [
[[package]]
name = "tidb-vector"
version = "0.0.9"
version = "0.0.15"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/1a/98/ab324fdfbbf064186ca621e21aa3871ddf886ecb78358a9864509241e802/tidb_vector-0.0.9.tar.gz", hash = "sha256:e10680872532808e1bcffa7a92dd2b05bb65d63982f833edb3c6cd590dec7709", size = 16948, upload-time = "2024-05-08T07:54:36.955Z" }
sdist = { url = "https://files.pythonhosted.org/packages/b1/55/6247b3b8dd0c0ec05a7b0dd7d4f016d03337d6f089db9cc221a31de1308c/tidb_vector-0.0.15.tar.gz", hash = "sha256:dfd16b31b06f025737f5c7432a08e04265dde8a7c9c67d037e6e694c8125f6f5", size = 20702, upload-time = "2025-07-15T09:48:07.423Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5d/bb/0f3b7b4d31537e90f4dd01f50fa58daef48807c789c1c1bdd610204ff103/tidb_vector-0.0.9-py3-none-any.whl", hash = "sha256:db060ee1c981326d3882d0810e0b8b57811f278668f9381168997b360c4296c2", size = 17026, upload-time = "2024-05-08T07:54:34.849Z" },
{ url = "https://files.pythonhosted.org/packages/24/27/5a4aeeae058f75c1925646ff82215551903688ec33acc64ca46135eac631/tidb_vector-0.0.15-py3-none-any.whl", hash = "sha256:2bc7d02f5508ba153c8d67d049ab1e661c850e09e3a29286dc8b19945e512ad8", size = 21924, upload-time = "2025-07-15T09:48:05.834Z" },
]
[[package]]
@ -7332,14 +7332,14 @@ wheels = [
[[package]]
name = "upstash-vector"
version = "0.6.0"
version = "0.8.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
]
sdist = { url = "https://files.pythonhosted.org/packages/94/a6/a9178fef247687917701a60eb66542eb5361c58af40c033ba8174ff7366d/upstash_vector-0.6.0.tar.gz", hash = "sha256:a716ed4d0251362208518db8b194158a616d37d1ccbb1155f619df690599e39b", size = 15075, upload-time = "2024-09-27T12:02:13.533Z" }
sdist = { url = "https://files.pythonhosted.org/packages/65/22/1b9161b82ef52addc2b71ffca9498cb745b34b2e43e77ef1c921d96fb3f1/upstash_vector-0.8.0.tar.gz", hash = "sha256:cdeeeeabe08c813f0f525d9b6ceefbf17abb720bd30190cd6df88b9f2c318334", size = 18565, upload-time = "2025-02-27T11:52:38.14Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5d/45/95073b83b7fd7b83f10ea314f197bae3989bfe022e736b90145fe9ea4362/upstash_vector-0.6.0-py3-none-any.whl", hash = "sha256:d0bdad7765b8a7f5c205b7a9c81ca4b9a4cee3ee4952afc7d5ea5fb76c3f3c3c", size = 15061, upload-time = "2024-09-27T12:02:12.041Z" },
{ url = "https://files.pythonhosted.org/packages/ab/ce/1528e6e37d4a1ba7a333ebca7191b638986f4ba9f73ba17458b45c4d36e2/upstash_vector-0.8.0-py3-none-any.whl", hash = "sha256:e8a7560e6e80e22ff2a4d95ff0b08723b22bafaae7dab38eddce51feb30c5785", size = 18480, upload-time = "2025-02-27T11:52:36.189Z" },
]
[[package]]
@ -7614,7 +7614,7 @@ wheels = [
[[package]]
name = "weaviate-client"
version = "4.17.0"
version = "4.20.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "authlib" },
@ -7625,9 +7625,9 @@ dependencies = [
{ name = "pydantic" },
{ name = "validators" },
]
sdist = { url = "https://files.pythonhosted.org/packages/bd/0e/e4582b007427187a9fde55fa575db4b766c81929d2b43a3dd8becce50567/weaviate_client-4.17.0.tar.gz", hash = "sha256:731d58d84b0989df4db399b686357ed285fb95971a492ccca8dec90bb2343c51", size = 769019, upload-time = "2025-09-26T11:20:27.381Z" }
sdist = { url = "https://files.pythonhosted.org/packages/c9/1c/82b560254f612f95b644849d86e092da6407f17965d61e22b583b30b72cf/weaviate_client-4.20.4.tar.gz", hash = "sha256:08703234b59e4e03739f39e740e9e88cb50cd0aa147d9408b88ea6ce995c37b6", size = 809529, upload-time = "2026-03-10T15:08:13.845Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/5b/c5/2da3a45866da7a935dab8ad07be05dcaee48b3ad4955144583b651929be7/weaviate_client-4.17.0-py3-none-any.whl", hash = "sha256:60e4a355b90537ee1e942ab0b76a94750897a13d9cf13c5a6decbd166d0ca8b5", size = 582763, upload-time = "2025-09-26T11:20:25.864Z" },
{ url = "https://files.pythonhosted.org/packages/1d/d7/9461c3e7d8c44080d2307078e33dc7fefefa3171c8f930f2b83a5cbf67f2/weaviate_client-4.20.4-py3-none-any.whl", hash = "sha256:7af3a213bebcb30dcf456b0db8b6225d8926106b835d7b883276de9dc1c301fe", size = 619517, upload-time = "2026-03-10T15:08:12.047Z" },
]
[[package]]
@ -7731,16 +7731,17 @@ wheels = [
[[package]]
name = "xinference-client"
version = "1.2.2"
version = "2.3.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohttp" },
{ name = "pydantic" },
{ name = "requests" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/4b/cf/7f825a311b11d1e0f7947a94f88adcf1d31e707c54a6d76d61a5d98604ed/xinference-client-1.2.2.tar.gz", hash = "sha256:85d2ba0fcbaae616b06719c422364123cbac97f3e3c82e614095fe6d0e630ed0", size = 44824, upload-time = "2025-02-08T09:28:56.692Z" }
sdist = { url = "https://files.pythonhosted.org/packages/bc/7a/33aeef9cffdc331de0046c25412622c5a16226d1b4e0cca9ed512ad00b9a/xinference_client-2.3.1.tar.gz", hash = "sha256:23ae225f47ff9adf4c6f7718c54993d1be8c704d727509f6e5cb670de3e02c4d", size = 58414, upload-time = "2026-03-15T05:53:23.994Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/77/0f/fc58e062cf2f7506a33d2fe5446a1e88eb7f64914addffd7ed8b12749712/xinference_client-1.2.2-py3-none-any.whl", hash = "sha256:6941d87cf61283a9d6e81cee6cb2609a183d34c6b7d808c6ba0c33437520518f", size = 25723, upload-time = "2025-02-08T09:28:54.046Z" },
{ url = "https://files.pythonhosted.org/packages/74/8d/d9ab0a457718050a279b9bb6515b7245d114118dc5e275f190ef2628dd16/xinference_client-2.3.1-py3-none-any.whl", hash = "sha256:f7c4f0b56635b46be9cfd9b2affa8e15275491597ac9b958e14b13da5745133e", size = 40012, upload-time = "2026-03-15T05:53:22.797Z" },
]
[[package]]

View File

@ -8,6 +8,10 @@
- In new or modified code, use only overlay primitives from `@/app/components/base/ui/*`.
- Do not introduce deprecated overlay imports from `@/app/components/base/*`; when touching legacy callers, prefer migrating them and keep the allowlist shrinking (never expanding).
## Query & Mutation (Mandatory)
- `frontend-query-mutation` is the source of truth for Dify frontend contracts, query and mutation call-site patterns, conditional queries, invalidation, and mutation error handling.
## Automated Test Generation
- Use `./docs/test.md` as the canonical instruction set for generating frontend automated tests.

View File

@ -0,0 +1,186 @@
import {
getChangedBranchCoverage,
getChangedStatementCoverage,
getIgnoredChangedLinesFromSource,
normalizeToRepoRelative,
parseChangedLineMap,
} from '../scripts/check-components-diff-coverage-lib.mjs'
describe('check-components-diff-coverage helpers', () => {
it('should parse changed line maps from unified diffs', () => {
const diff = [
'diff --git a/web/app/components/share/a.ts b/web/app/components/share/a.ts',
'+++ b/web/app/components/share/a.ts',
'@@ -10,0 +11,2 @@',
'+const a = 1',
'+const b = 2',
'diff --git a/web/app/components/base/b.ts b/web/app/components/base/b.ts',
'+++ b/web/app/components/base/b.ts',
'@@ -20 +21 @@',
'+const c = 3',
'diff --git a/web/README.md b/web/README.md',
'+++ b/web/README.md',
'@@ -1 +1 @@',
'+ignore me',
].join('\n')
const lineMap = parseChangedLineMap(diff, (filePath: string) => filePath.startsWith('web/app/components/'))
expect([...lineMap.entries()]).toEqual([
['web/app/components/share/a.ts', new Set([11, 12])],
['web/app/components/base/b.ts', new Set([21])],
])
})
it('should normalize coverage and absolute paths to repo-relative paths', () => {
const repoRoot = '/repo'
const webRoot = '/repo/web'
expect(normalizeToRepoRelative('web/app/components/share/a.ts', {
appComponentsCoveragePrefix: 'app/components/',
appComponentsPrefix: 'web/app/components/',
repoRoot,
sharedTestPrefix: 'web/__tests__/',
webRoot,
})).toBe('web/app/components/share/a.ts')
expect(normalizeToRepoRelative('app/components/share/a.ts', {
appComponentsCoveragePrefix: 'app/components/',
appComponentsPrefix: 'web/app/components/',
repoRoot,
sharedTestPrefix: 'web/__tests__/',
webRoot,
})).toBe('web/app/components/share/a.ts')
expect(normalizeToRepoRelative('/repo/web/app/components/share/a.ts', {
appComponentsCoveragePrefix: 'app/components/',
appComponentsPrefix: 'web/app/components/',
repoRoot,
sharedTestPrefix: 'web/__tests__/',
webRoot,
})).toBe('web/app/components/share/a.ts')
})
it('should calculate changed statement coverage from changed lines', () => {
const entry = {
s: { 0: 1, 1: 0 },
statementMap: {
0: { start: { line: 10 }, end: { line: 10 } },
1: { start: { line: 12 }, end: { line: 13 } },
},
}
const coverage = getChangedStatementCoverage(entry, new Set([10, 12]))
expect(coverage).toEqual({
covered: 1,
total: 2,
uncoveredLines: [12],
})
})
it('should report the first changed line inside a multi-line uncovered statement', () => {
const entry = {
s: { 0: 0 },
statementMap: {
0: { start: { line: 10 }, end: { line: 14 } },
},
}
const coverage = getChangedStatementCoverage(entry, new Set([13, 14]))
expect(coverage).toEqual({
covered: 0,
total: 1,
uncoveredLines: [13],
})
})
it('should fail changed lines when a source file has no coverage entry', () => {
const coverage = getChangedStatementCoverage(undefined, new Set([42, 43]))
expect(coverage).toEqual({
covered: 0,
total: 2,
uncoveredLines: [42, 43],
})
})
it('should calculate changed branch coverage using changed branch definitions', () => {
const entry = {
b: {
0: [1, 0],
},
branchMap: {
0: {
line: 20,
loc: { start: { line: 20 }, end: { line: 20 } },
locations: [
{ start: { line: 20 }, end: { line: 20 } },
{ start: { line: 21 }, end: { line: 21 } },
],
type: 'if',
},
},
}
const coverage = getChangedBranchCoverage(entry, new Set([20]))
expect(coverage).toEqual({
covered: 1,
total: 2,
uncoveredBranches: [
{ armIndex: 1, line: 21 },
],
})
})
it('should report the first changed line inside a multi-line uncovered branch arm', () => {
const entry = {
b: {
0: [0, 0],
},
branchMap: {
0: {
line: 30,
loc: { start: { line: 30 }, end: { line: 35 } },
locations: [
{ start: { line: 31 }, end: { line: 34 } },
{ start: { line: 35 }, end: { line: 38 } },
],
type: 'if',
},
},
}
const coverage = getChangedBranchCoverage(entry, new Set([33]))
expect(coverage).toEqual({
covered: 0,
total: 2,
uncoveredBranches: [
{ armIndex: 0, line: 33 },
{ armIndex: 1, line: 35 },
],
})
})
it('should ignore changed lines with valid pragma reasons and report invalid pragmas', () => {
const sourceCode = [
'const a = 1',
'const b = 2 // diff-coverage-ignore-line: defensive fallback',
'const c = 3 // diff-coverage-ignore-line:',
'const d = 4 // diff-coverage-ignore-line: not changed',
].join('\n')
const result = getIgnoredChangedLinesFromSource(sourceCode, new Set([2, 3]))
expect([...result.effectiveChangedLines]).toEqual([3])
expect([...result.ignoredLines.entries()]).toEqual([
[2, 'defensive fallback'],
])
expect(result.invalidPragmas).toEqual([
{ line: 3, reason: 'missing ignore reason' },
])
})
})

View File

@ -0,0 +1,24 @@
.instrumentSerif {
font-family: "Instrument Serif", serif;
font-style: italic;
}
@font-face {
font-family: "Instrument Serif";
font-style: italic;
font-weight: 400;
font-display: swap;
src: url("./InstrumentSerif-Italic-Latin.woff2") format("woff2");
unicode-range:
U+0000-00FF,
U+0100-024F,
U+0259,
U+0300-036F,
U+1E00-1EFF,
U+2010-205E,
U+20A0-20CF,
U+2113,
U+2212,
U+2C60-2C7F,
U+A720-A7FF;
}

View File

@ -1,8 +1,10 @@
import { RiCloseLine } from '@remixicon/react'
import * as React from 'react'
import { useTranslation } from 'react-i18next'
import { cn } from '@/utils/classnames'
import Button from '../../base/button'
import DifyLogo from '../../base/logo/dify-logo'
import styles from './header.module.css'
type HeaderProps = {
onClose: () => void
@ -20,11 +22,16 @@ const Header = ({
<div className="py-[5px]">
<DifyLogo className="h-[27px] w-[60px]" />
</div>
<span className="bg-billing-plan-title-bg bg-clip-text px-1.5 font-instrument text-[37px] italic leading-[1.2] text-transparent">
<span
className={cn(
'bg-billing-plan-title-bg bg-clip-text px-1.5 text-[37px] leading-[1.2] text-transparent',
styles.instrumentSerif,
)}
>
{t('plansCommon.title.plans', { ns: 'billing' })}
</span>
</div>
<p className="system-sm-regular text-text-tertiary">
<p className="text-text-tertiary system-sm-regular">
{t('plansCommon.title.description', { ns: 'billing' })}
</p>
<Button

View File

@ -275,7 +275,7 @@ describe('useTextGenerationBatch', () => {
})
act(() => {
result.current.handleCompleted({ answer: 'failed' } as unknown as string, 1, false)
result.current.handleCompleted('{"answer":"failed"}', 1, false)
})
expect(result.current.allFailedTaskList).toEqual([
@ -291,7 +291,7 @@ describe('useTextGenerationBatch', () => {
{
'Name': 'Alice',
'Score': '',
'generation.completionResult': JSON.stringify({ answer: 'failed' }),
'generation.completionResult': '{"answer":"failed"}',
},
])

View File

@ -241,10 +241,7 @@ export const useTextGenerationBatch = ({
result[variable.name] = String(task.params.inputs[variable.key] ?? '')
})
let completionValue = batchCompletionMap[String(task.id)]
if (typeof completionValue === 'object')
completionValue = JSON.stringify(completionValue)
const completionValue = batchCompletionMap[String(task.id)] ?? ''
result[t('generation.completionResult', { ns: 'share' })] = completionValue
return result
})

View File

@ -0,0 +1,334 @@
import type { PromptConfig } from '@/models/debug'
import type { SiteInfo } from '@/models/share'
import type { IOtherOptions } from '@/service/base'
import type { VisionSettings } from '@/types/app'
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
import { AppSourceType } from '@/service/share'
import { Resolution, TransferMethod } from '@/types/app'
import Result from '../index'
const {
notifyMock,
sendCompletionMessageMock,
sendWorkflowMessageMock,
stopChatMessageRespondingMock,
textGenerationResPropsSpy,
} = vi.hoisted(() => ({
notifyMock: vi.fn(),
sendCompletionMessageMock: vi.fn(),
sendWorkflowMessageMock: vi.fn(),
stopChatMessageRespondingMock: vi.fn(),
textGenerationResPropsSpy: vi.fn(),
}))
vi.mock('i18next', () => ({
t: (key: string) => key,
}))
vi.mock('@/app/components/base/toast', () => ({
default: {
notify: notifyMock,
},
}))
vi.mock('@/utils', async () => {
const actual = await vi.importActual<typeof import('@/utils')>('@/utils')
return {
...actual,
sleep: () => new Promise<void>(() => {}),
}
})
vi.mock('@/service/share', async () => {
const actual = await vi.importActual<typeof import('@/service/share')>('@/service/share')
return {
...actual,
sendCompletionMessage: (...args: Parameters<typeof actual.sendCompletionMessage>) => sendCompletionMessageMock(...args),
sendWorkflowMessage: (...args: Parameters<typeof actual.sendWorkflowMessage>) => sendWorkflowMessageMock(...args),
stopChatMessageResponding: (...args: Parameters<typeof actual.stopChatMessageResponding>) => stopChatMessageRespondingMock(...args),
}
})
vi.mock('@/app/components/app/text-generate/item', () => ({
default: (props: Record<string, unknown>) => {
textGenerationResPropsSpy(props)
return (
<div data-testid="text-generation-res">
{typeof props.content === 'string' ? props.content : JSON.stringify(props.content ?? null)}
</div>
)
},
}))
vi.mock('@/app/components/share/text-generation/no-data', () => ({
default: () => <div data-testid="no-data">No data</div>,
}))
const promptConfig: PromptConfig = {
prompt_template: 'template',
prompt_variables: [
{ key: 'name', name: 'Name', type: 'string', required: true },
],
}
const siteInfo: SiteInfo = {
title: 'Share title',
description: 'Share description',
icon_type: 'emoji',
icon: 'robot',
}
const visionConfig: VisionSettings = {
enabled: false,
number_limits: 2,
detail: Resolution.low,
transfer_methods: [TransferMethod.local_file],
}
const baseProps = {
appId: 'app-1',
appSourceType: AppSourceType.webApp,
completionFiles: [],
controlRetry: 0,
controlSend: 0,
controlStopResponding: 0,
handleSaveMessage: vi.fn(),
inputs: { name: 'Alice' },
isCallBatchAPI: false,
isError: false,
isMobile: false,
isPC: true,
isShowTextToSpeech: true,
isWorkflow: false,
moreLikeThisEnabled: true,
onCompleted: vi.fn(),
onRunControlChange: vi.fn(),
onRunStart: vi.fn(),
onShowRes: vi.fn(),
promptConfig,
siteInfo,
visionConfig,
}
describe('Result', () => {
beforeEach(() => {
vi.clearAllMocks()
stopChatMessageRespondingMock.mockResolvedValue(undefined)
})
it('should render no data before the first execution', () => {
render(<Result {...baseProps} />)
expect(screen.getByTestId('no-data')).toBeTruthy()
expect(screen.queryByTestId('text-generation-res')).toBeNull()
})
it('should stream completion results and stop the current task', async () => {
let completionHandlers: {
onCompleted: () => void
onData: (chunk: string, isFirstMessage: boolean, info: { messageId: string, taskId?: string }) => void
onError: () => void
onMessageReplace: (messageReplace: { answer: string }) => void
} | null = null
sendCompletionMessageMock.mockImplementation(async (_data, handlers) => {
completionHandlers = handlers
})
const onCompleted = vi.fn()
const onRunControlChange = vi.fn()
const { rerender } = render(
<Result
{...baseProps}
onCompleted={onCompleted}
onRunControlChange={onRunControlChange}
/>,
)
rerender(
<Result
{...baseProps}
controlSend={1}
onCompleted={onCompleted}
onRunControlChange={onRunControlChange}
/>,
)
expect(sendCompletionMessageMock).toHaveBeenCalledTimes(1)
expect(screen.getByRole('status', { name: 'appApi.loading' })).toBeTruthy()
await act(async () => {
completionHandlers?.onData('Hello', false, {
messageId: 'message-1',
taskId: 'task-1',
})
})
expect(screen.getByTestId('text-generation-res').textContent).toContain('Hello')
await waitFor(() => {
expect(onRunControlChange).toHaveBeenLastCalledWith(expect.objectContaining({
isStopping: false,
}))
})
fireEvent.click(screen.getByRole('button', { name: 'operation.stopResponding' }))
await waitFor(() => {
expect(stopChatMessageRespondingMock).toHaveBeenCalledWith('app-1', 'task-1', AppSourceType.webApp, 'app-1')
})
await act(async () => {
completionHandlers?.onCompleted()
})
expect(onCompleted).toHaveBeenCalledWith('Hello', undefined, true)
expect(textGenerationResPropsSpy).toHaveBeenLastCalledWith(expect.objectContaining({
messageId: 'message-1',
}))
})
it('should render workflow results after workflow completion', async () => {
let workflowHandlers: IOtherOptions | null = null
sendWorkflowMessageMock.mockImplementation(async (_data, handlers) => {
workflowHandlers = handlers
})
const onCompleted = vi.fn()
const { rerender } = render(
<Result
{...baseProps}
isWorkflow
onCompleted={onCompleted}
/>,
)
rerender(
<Result
{...baseProps}
isWorkflow
controlSend={1}
onCompleted={onCompleted}
/>,
)
await act(async () => {
workflowHandlers?.onWorkflowStarted?.({
workflow_run_id: 'run-1',
task_id: 'task-1',
event: 'workflow_started',
data: {
id: 'run-1',
workflow_id: 'wf-1',
created_at: 0,
},
})
workflowHandlers?.onTextChunk?.({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'text_chunk',
data: {
text: 'Hello',
},
})
workflowHandlers?.onWorkflowFinished?.({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'workflow_finished',
data: {
id: 'run-1',
workflow_id: 'wf-1',
status: 'succeeded',
outputs: {
answer: 'Hello',
},
error: '',
elapsed_time: 0,
total_tokens: 0,
total_steps: 0,
created_at: 0,
created_by: {
id: 'user-1',
name: 'User',
email: 'user@example.com',
},
finished_at: 0,
},
})
})
expect(screen.getByTestId('text-generation-res').textContent).toContain('{"answer":"Hello"}')
expect(textGenerationResPropsSpy).toHaveBeenLastCalledWith(expect.objectContaining({
workflowProcessData: expect.objectContaining({
resultText: 'Hello',
status: 'succeeded',
}),
}))
expect(onCompleted).toHaveBeenCalledWith('{"answer":"Hello"}', undefined, true)
})
it('should render batch task ids for both short and long indexes', () => {
const { rerender } = render(
<Result
{...baseProps}
isCallBatchAPI
taskId={3}
/>,
)
expect(textGenerationResPropsSpy).toHaveBeenLastCalledWith(expect.objectContaining({
taskId: '03',
}))
rerender(
<Result
{...baseProps}
isCallBatchAPI
taskId={12}
/>,
)
expect(textGenerationResPropsSpy).toHaveBeenLastCalledWith(expect.objectContaining({
taskId: '12',
}))
})
it('should render the mobile stop button layout while a batch run is responding', async () => {
let completionHandlers: {
onData: (chunk: string, isFirstMessage: boolean, info: { messageId: string, taskId?: string }) => void
} | null = null
sendCompletionMessageMock.mockImplementation(async (_data, handlers) => {
completionHandlers = handlers
})
const { rerender } = render(
<Result
{...baseProps}
isCallBatchAPI
isMobile
isPC={false}
taskId={2}
/>,
)
rerender(
<Result
{...baseProps}
controlSend={1}
isCallBatchAPI
isMobile
isPC={false}
taskId={2}
/>,
)
await act(async () => {
completionHandlers?.onData('Hello', false, {
messageId: 'message-batch',
taskId: 'task-batch',
})
})
expect(screen.getByRole('button', { name: 'operation.stopResponding' }).parentElement?.className).toContain('justify-center')
})
})

View File

@ -0,0 +1,293 @@
import type { FileEntity } from '@/app/components/base/file-uploader/types'
import type { PromptConfig } from '@/models/debug'
import type { VisionFile, VisionSettings } from '@/types/app'
import { Resolution, TransferMethod } from '@/types/app'
import { buildResultRequestData, validateResultRequest } from '../result-request'
const createTranslator = () => vi.fn((key: string) => key)
const createFileEntity = (overrides: Partial<FileEntity> = {}): FileEntity => ({
id: 'file-1',
name: 'example.txt',
size: 128,
type: 'text/plain',
progress: 100,
transferMethod: TransferMethod.local_file,
supportFileType: 'document',
uploadedId: 'uploaded-1',
url: 'https://example.com/file.txt',
...overrides,
})
const createVisionFile = (overrides: Partial<VisionFile> = {}): VisionFile => ({
type: 'image',
transfer_method: TransferMethod.local_file,
upload_file_id: 'upload-1',
url: 'https://example.com/image.png',
...overrides,
})
const promptConfig: PromptConfig = {
prompt_template: 'template',
prompt_variables: [
{ key: 'name', name: 'Name', type: 'string', required: true },
{ key: 'enabled', name: 'Enabled', type: 'boolean', required: true },
{ key: 'file', name: 'File', type: 'file', required: false },
{ key: 'files', name: 'Files', type: 'file-list', required: false },
],
}
const visionConfig: VisionSettings = {
enabled: true,
number_limits: 2,
detail: Resolution.low,
transfer_methods: [TransferMethod.local_file],
}
describe('result-request', () => {
it('should reject missing required non-boolean inputs', () => {
const t = createTranslator()
const result = validateResultRequest({
completionFiles: [],
inputs: {
enabled: false,
},
isCallBatchAPI: false,
promptConfig,
t,
})
expect(result).toEqual({
canSend: false,
notification: {
type: 'error',
message: 'errorMessage.valueOfVarRequired',
},
})
})
it('should allow required number inputs with a value of zero', () => {
const result = validateResultRequest({
completionFiles: [],
inputs: {
count: 0,
},
isCallBatchAPI: false,
promptConfig: {
prompt_template: 'template',
prompt_variables: [
{ key: 'count', name: 'Count', type: 'number', required: true },
],
},
t: createTranslator(),
})
expect(result).toEqual({ canSend: true })
})
it('should reject required text inputs that only contain whitespace', () => {
const result = validateResultRequest({
completionFiles: [],
inputs: {
name: ' ',
},
isCallBatchAPI: false,
promptConfig: {
prompt_template: 'template',
prompt_variables: [
{ key: 'name', name: 'Name', type: 'string', required: true },
],
},
t: createTranslator(),
})
expect(result).toEqual({
canSend: false,
notification: {
type: 'error',
message: 'errorMessage.valueOfVarRequired',
},
})
})
it('should reject required file lists when no files are selected', () => {
const result = validateResultRequest({
completionFiles: [],
inputs: {
files: [],
},
isCallBatchAPI: false,
promptConfig: {
prompt_template: 'template',
prompt_variables: [
{ key: 'files', name: 'Files', type: 'file-list', required: true },
],
},
t: createTranslator(),
})
expect(result).toEqual({
canSend: false,
notification: {
type: 'error',
message: 'errorMessage.valueOfVarRequired',
},
})
})
it('should allow required file inputs when a file is selected', () => {
const result = validateResultRequest({
completionFiles: [],
inputs: {
file: createFileEntity(),
},
isCallBatchAPI: false,
promptConfig: {
prompt_template: 'template',
prompt_variables: [
{ key: 'file', name: 'File', type: 'file', required: true },
],
},
t: createTranslator(),
})
expect(result).toEqual({ canSend: true })
})
it('should reject pending local uploads outside batch mode', () => {
const t = createTranslator()
const result = validateResultRequest({
completionFiles: [
createVisionFile({ upload_file_id: '' }),
],
inputs: {
name: 'Alice',
},
isCallBatchAPI: false,
promptConfig,
t,
})
expect(result).toEqual({
canSend: false,
notification: {
type: 'info',
message: 'errorMessage.waitForFileUpload',
},
})
})
it('should handle missing prompt metadata with and without pending uploads', () => {
const t = createTranslator()
const blocked = validateResultRequest({
completionFiles: [
createVisionFile({ upload_file_id: '' }),
],
inputs: {},
isCallBatchAPI: false,
promptConfig: null,
t,
})
const allowed = validateResultRequest({
completionFiles: [],
inputs: {},
isCallBatchAPI: false,
promptConfig: null,
t,
})
expect(blocked).toEqual({
canSend: false,
notification: {
type: 'info',
message: 'errorMessage.waitForFileUpload',
},
})
expect(allowed).toEqual({ canSend: true })
})
it('should skip validation in batch mode', () => {
const result = validateResultRequest({
completionFiles: [
createVisionFile({ upload_file_id: '' }),
],
inputs: {},
isCallBatchAPI: true,
promptConfig,
t: createTranslator(),
})
expect(result).toEqual({ canSend: true })
})
it('should build request data for single and list file inputs', () => {
const file = createFileEntity()
const secondFile = createFileEntity({
id: 'file-2',
name: 'second.txt',
uploadedId: 'uploaded-2',
url: 'https://example.com/second.txt',
})
const result = buildResultRequestData({
completionFiles: [
createVisionFile(),
createVisionFile({
transfer_method: TransferMethod.remote_url,
upload_file_id: '',
url: 'https://example.com/remote.png',
}),
],
inputs: {
enabled: true,
file,
files: [file, secondFile],
name: 'Alice',
},
promptConfig,
visionConfig,
})
expect(result).toEqual({
files: [
expect.objectContaining({
transfer_method: TransferMethod.local_file,
upload_file_id: 'upload-1',
url: '',
}),
expect.objectContaining({
transfer_method: TransferMethod.remote_url,
url: 'https://example.com/remote.png',
}),
],
inputs: {
enabled: true,
file: {
type: 'document',
transfer_method: TransferMethod.local_file,
upload_file_id: 'uploaded-1',
url: 'https://example.com/file.txt',
},
files: [
{
type: 'document',
transfer_method: TransferMethod.local_file,
upload_file_id: 'uploaded-1',
url: 'https://example.com/file.txt',
},
{
type: 'document',
transfer_method: TransferMethod.local_file,
upload_file_id: 'uploaded-2',
url: 'https://example.com/second.txt',
},
],
name: 'Alice',
},
})
})
})

View File

@ -0,0 +1,901 @@
import type { WorkflowProcess } from '@/app/components/base/chat/types'
import type { IOtherOptions } from '@/service/base'
import type { HumanInputFormData, HumanInputFormTimeoutData, NodeTracing } from '@/types/workflow'
import { act } from '@testing-library/react'
import { BlockEnum, NodeRunningStatus, WorkflowRunningStatus } from '@/app/components/workflow/types'
import {
appendParallelNext,
appendParallelStart,
appendResultText,
applyWorkflowFinishedState,
applyWorkflowOutputs,
applyWorkflowPaused,
createWorkflowStreamHandlers,
finishParallelTrace,
finishWorkflowNode,
markNodesStopped,
replaceResultText,
updateHumanInputFilled,
updateHumanInputRequired,
updateHumanInputTimeout,
upsertWorkflowNode,
} from '../workflow-stream-handlers'
const sseGetMock = vi.fn()
type TraceOverrides = Omit<Partial<NodeTracing>, 'execution_metadata'> & {
execution_metadata?: Partial<NonNullable<NodeTracing['execution_metadata']>>
}
vi.mock('@/service/base', async () => {
const actual = await vi.importActual<typeof import('@/service/base')>('@/service/base')
return {
...actual,
sseGet: (...args: Parameters<typeof actual.sseGet>) => sseGetMock(...args),
}
})
const createTrace = (overrides: TraceOverrides = {}): NodeTracing => {
const { execution_metadata, ...restOverrides } = overrides
return {
id: 'trace-1',
index: 0,
predecessor_node_id: '',
node_id: 'node-1',
node_type: BlockEnum.LLM,
title: 'Node',
inputs: {},
inputs_truncated: false,
process_data: {},
process_data_truncated: false,
outputs: {},
outputs_truncated: false,
status: NodeRunningStatus.Running,
elapsed_time: 0,
metadata: {
iterator_length: 0,
iterator_index: 0,
loop_length: 0,
loop_index: 0,
},
created_at: 0,
created_by: {
id: 'user-1',
name: 'User',
email: 'user@example.com',
},
finished_at: 0,
details: [[]],
execution_metadata: {
total_tokens: 0,
total_price: 0,
currency: 'USD',
...execution_metadata,
},
...restOverrides,
}
}
const createWorkflowProcess = (): WorkflowProcess => ({
status: WorkflowRunningStatus.Running,
tracing: [],
expand: false,
resultText: '',
})
const createHumanInput = (overrides: Partial<HumanInputFormData> = {}): HumanInputFormData => ({
form_id: 'form-1',
node_id: 'node-1',
node_title: 'Node',
form_content: 'content',
inputs: [],
actions: [],
form_token: 'token-1',
resolved_default_values: {},
display_in_ui: true,
expiration_time: 100,
...overrides,
})
describe('workflow-stream-handlers helpers', () => {
it('should update tracing, result text, and human input state', () => {
const parallelTrace = createTrace({
node_id: 'parallel-node',
execution_metadata: { parallel_id: 'parallel-1' },
details: [[]],
})
let workflowProcessData = appendParallelStart(undefined, parallelTrace)
workflowProcessData = appendParallelNext(workflowProcessData, parallelTrace)
workflowProcessData = finishParallelTrace(workflowProcessData, createTrace({
node_id: 'parallel-node',
execution_metadata: { parallel_id: 'parallel-1' },
error: 'failed',
}))
workflowProcessData = upsertWorkflowNode(workflowProcessData, createTrace({
node_id: 'node-1',
execution_metadata: { parallel_id: 'parallel-2' },
}))!
workflowProcessData = appendResultText(workflowProcessData, 'Hello ')
workflowProcessData = replaceResultText(workflowProcessData, 'Hello world')
workflowProcessData = updateHumanInputRequired(workflowProcessData, createHumanInput())
workflowProcessData = updateHumanInputFilled(workflowProcessData, {
action_id: 'action-1',
action_text: 'Submit',
node_id: 'node-1',
node_title: 'Node',
rendered_content: 'Done',
})
workflowProcessData = updateHumanInputTimeout(workflowProcessData, {
node_id: 'node-1',
node_title: 'Node',
expiration_time: 200,
} satisfies HumanInputFormTimeoutData)
workflowProcessData = applyWorkflowPaused(workflowProcessData)
expect(workflowProcessData.expand).toBe(false)
expect(workflowProcessData.resultText).toBe('Hello world')
expect(workflowProcessData.humanInputFilledFormDataList).toEqual([
expect.objectContaining({
action_text: 'Submit',
}),
])
expect(workflowProcessData.tracing[0]).toEqual(expect.objectContaining({
node_id: 'parallel-node',
expand: true,
}))
})
it('should initialize missing parallel details on start and next events', () => {
const parallelTrace = createTrace({
node_id: 'parallel-node',
execution_metadata: { parallel_id: 'parallel-1' },
})
const startedProcess = appendParallelStart(undefined, parallelTrace)
const nextProcess = appendParallelNext(startedProcess, parallelTrace)
expect(startedProcess.tracing[0]?.details).toEqual([[]])
expect(nextProcess.tracing[0]?.details).toEqual([[], []])
})
it('should leave tracing unchanged when a parallel next event has no matching trace', () => {
const process = createWorkflowProcess()
process.tracing = [
createTrace({
node_id: 'parallel-node',
execution_metadata: { parallel_id: 'parallel-1' },
details: [[]],
}),
]
const nextProcess = appendParallelNext(process, createTrace({
node_id: 'missing-node',
execution_metadata: { parallel_id: 'parallel-2' },
}))
expect(nextProcess.tracing).toEqual(process.tracing)
expect(nextProcess.expand).toBe(true)
})
it('should mark running nodes as stopped recursively', () => {
const workflowProcessData = createWorkflowProcess()
workflowProcessData.tracing = [
createTrace({
status: NodeRunningStatus.Running,
details: [[createTrace({ status: NodeRunningStatus.Waiting })]],
}),
]
const stoppedWorkflow = applyWorkflowFinishedState(workflowProcessData, WorkflowRunningStatus.Stopped)
markNodesStopped(stoppedWorkflow.tracing)
expect(stoppedWorkflow.status).toBe(WorkflowRunningStatus.Stopped)
expect(stoppedWorkflow.tracing[0].status).toBe(NodeRunningStatus.Stopped)
expect(stoppedWorkflow.tracing[0].details?.[0][0].status).toBe(NodeRunningStatus.Stopped)
})
it('should cover unmatched and replacement helper branches', () => {
const process = createWorkflowProcess()
process.tracing = [
createTrace({
node_id: 'node-1',
parallel_id: 'parallel-1',
extras: {
source: 'extra',
},
status: NodeRunningStatus.Succeeded,
}),
]
process.humanInputFormDataList = [
createHumanInput({ node_id: 'node-1' }),
]
process.humanInputFilledFormDataList = [
{
action_id: 'action-0',
action_text: 'Existing',
node_id: 'node-0',
node_title: 'Node 0',
rendered_content: 'Existing',
},
]
const parallelMatched = appendParallelNext(process, createTrace({
node_id: 'node-1',
execution_metadata: {
parallel_id: 'parallel-1',
},
}))
const notFinished = finishParallelTrace(process, createTrace({
node_id: 'missing',
execution_metadata: {
parallel_id: 'parallel-missing',
},
}))
const ignoredIteration = upsertWorkflowNode(process, createTrace({
iteration_id: 'iteration-1',
}))
const replacedNode = upsertWorkflowNode(process, createTrace({
node_id: 'node-1',
}))
const ignoredFinish = finishWorkflowNode(process, createTrace({
loop_id: 'loop-1',
}))
const unmatchedFinish = finishWorkflowNode(process, createTrace({
node_id: 'missing',
execution_metadata: {
parallel_id: 'missing',
},
}))
const finishedWithExtras = finishWorkflowNode(process, createTrace({
node_id: 'node-1',
execution_metadata: {
parallel_id: 'parallel-1',
},
error: 'failed',
}))
const succeededWorkflow = applyWorkflowFinishedState(process, WorkflowRunningStatus.Succeeded)
const outputlessWorkflow = applyWorkflowOutputs(undefined, null)
const updatedHumanInput = updateHumanInputRequired(process, createHumanInput({
node_id: 'node-1',
expiration_time: 300,
}))
const appendedHumanInput = updateHumanInputRequired(process, createHumanInput({
node_id: 'node-2',
}))
const noListFilled = updateHumanInputFilled(undefined, {
action_id: 'action-1',
action_text: 'Submit',
node_id: 'node-1',
node_title: 'Node',
rendered_content: 'Done',
})
const appendedFilled = updateHumanInputFilled(process, {
action_id: 'action-2',
action_text: 'Append',
node_id: 'node-2',
node_title: 'Node 2',
rendered_content: 'More',
})
const timeoutWithoutList = updateHumanInputTimeout(undefined, {
node_id: 'node-1',
node_title: 'Node',
expiration_time: 200,
})
const timeoutWithMatch = updateHumanInputTimeout(process, {
node_id: 'node-1',
node_title: 'Node',
expiration_time: 400,
})
markNodesStopped(undefined)
expect(parallelMatched.tracing[0].details).toHaveLength(2)
expect(notFinished).toEqual(expect.objectContaining({
expand: true,
tracing: process.tracing,
}))
expect(ignoredIteration).toEqual(process)
expect(replacedNode?.tracing[0]).toEqual(expect.objectContaining({
node_id: 'node-1',
status: NodeRunningStatus.Running,
}))
expect(ignoredFinish).toEqual(process)
expect(unmatchedFinish).toEqual(process)
expect(finishedWithExtras?.tracing[0]).toEqual(expect.objectContaining({
extras: {
source: 'extra',
},
error: 'failed',
}))
expect(succeededWorkflow.status).toBe(WorkflowRunningStatus.Succeeded)
expect(outputlessWorkflow.files).toEqual([])
expect(updatedHumanInput.humanInputFormDataList?.[0].expiration_time).toBe(300)
expect(appendedHumanInput.humanInputFormDataList).toHaveLength(2)
expect(noListFilled.humanInputFilledFormDataList).toHaveLength(1)
expect(appendedFilled.humanInputFilledFormDataList).toHaveLength(2)
expect(timeoutWithoutList).toEqual(expect.objectContaining({
status: WorkflowRunningStatus.Running,
tracing: [],
}))
expect(timeoutWithMatch.humanInputFormDataList?.[0].expiration_time).toBe(400)
})
})
describe('createWorkflowStreamHandlers', () => {
beforeEach(() => {
vi.clearAllMocks()
})
const setupHandlers = (overrides: { isTimedOut?: () => boolean } = {}) => {
let completionRes = ''
let currentTaskId: string | null = null
let isStopping = false
let messageId: string | null = null
let workflowProcessData: WorkflowProcess | undefined
const setCurrentTaskId = vi.fn((value: string | null | ((prev: string | null) => string | null)) => {
currentTaskId = typeof value === 'function' ? value(currentTaskId) : value
})
const setIsStopping = vi.fn((value: boolean | ((prev: boolean) => boolean)) => {
isStopping = typeof value === 'function' ? value(isStopping) : value
})
const setMessageId = vi.fn((value: string | null | ((prev: string | null) => string | null)) => {
messageId = typeof value === 'function' ? value(messageId) : value
})
const setWorkflowProcessData = vi.fn((value: WorkflowProcess | undefined) => {
workflowProcessData = value
})
const setCompletionRes = vi.fn((value: string) => {
completionRes = value
})
const notify = vi.fn()
const onCompleted = vi.fn()
const resetRunState = vi.fn()
const setRespondingFalse = vi.fn()
const markEnded = vi.fn()
const handlers = createWorkflowStreamHandlers({
getCompletionRes: () => completionRes,
getWorkflowProcessData: () => workflowProcessData,
isTimedOut: overrides.isTimedOut ?? (() => false),
markEnded,
notify,
onCompleted,
resetRunState,
setCompletionRes,
setCurrentTaskId,
setIsStopping,
setMessageId,
setRespondingFalse,
setWorkflowProcessData,
t: (key: string) => key,
taskId: 3,
})
return {
currentTaskId: () => currentTaskId,
handlers,
isStopping: () => isStopping,
messageId: () => messageId,
notify,
onCompleted,
resetRunState,
setCompletionRes,
setCurrentTaskId,
setMessageId,
setRespondingFalse,
workflowProcessData: () => workflowProcessData,
}
}
it('should process workflow success and paused events', () => {
const setup = setupHandlers()
const handlers = setup.handlers as Required<Pick<IOtherOptions, 'onWorkflowStarted' | 'onTextChunk' | 'onHumanInputRequired' | 'onHumanInputFormFilled' | 'onHumanInputFormTimeout' | 'onWorkflowPaused' | 'onWorkflowFinished' | 'onNodeStarted' | 'onNodeFinished' | 'onIterationStart' | 'onIterationNext' | 'onIterationFinish' | 'onLoopStart' | 'onLoopNext' | 'onLoopFinish'>>
act(() => {
handlers.onWorkflowStarted({
workflow_run_id: 'run-1',
task_id: 'task-1',
event: 'workflow_started',
data: { id: 'run-1', workflow_id: 'wf-1', created_at: 0 },
})
handlers.onNodeStarted({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'node_started',
data: createTrace({ node_id: 'node-1' }),
})
handlers.onNodeFinished({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'node_finished',
data: createTrace({ node_id: 'node-1', error: '' }),
})
handlers.onIterationStart({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'iteration_start',
data: createTrace({
node_id: 'iter-1',
execution_metadata: { parallel_id: 'parallel-1' },
details: [[]],
}),
})
handlers.onIterationNext({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'iteration_next',
data: createTrace({
node_id: 'iter-1',
execution_metadata: { parallel_id: 'parallel-1' },
details: [[]],
}),
})
handlers.onIterationFinish({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'iteration_finish',
data: createTrace({
node_id: 'iter-1',
execution_metadata: { parallel_id: 'parallel-1' },
}),
})
handlers.onLoopStart({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'loop_start',
data: createTrace({
node_id: 'loop-1',
execution_metadata: { parallel_id: 'parallel-2' },
details: [[]],
}),
})
handlers.onLoopNext({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'loop_next',
data: createTrace({
node_id: 'loop-1',
execution_metadata: { parallel_id: 'parallel-2' },
details: [[]],
}),
})
handlers.onLoopFinish({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'loop_finish',
data: createTrace({
node_id: 'loop-1',
execution_metadata: { parallel_id: 'parallel-2' },
}),
})
handlers.onTextChunk({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'text_chunk',
data: { text: 'Hello' },
})
handlers.onHumanInputRequired({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'human_input_required',
data: createHumanInput({ node_id: 'node-1' }),
})
handlers.onHumanInputFormFilled({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'human_input_form_filled',
data: {
node_id: 'node-1',
node_title: 'Node',
rendered_content: 'Done',
action_id: 'action-1',
action_text: 'Submit',
},
})
handlers.onHumanInputFormTimeout({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'human_input_form_timeout',
data: {
node_id: 'node-1',
node_title: 'Node',
expiration_time: 200,
},
})
handlers.onWorkflowPaused({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'workflow_paused',
data: {
outputs: {},
paused_nodes: [],
reasons: [],
workflow_run_id: 'run-1',
},
})
handlers.onWorkflowFinished({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'workflow_finished',
data: {
id: 'run-1',
workflow_id: 'wf-1',
status: WorkflowRunningStatus.Succeeded,
outputs: { answer: 'Hello' },
error: '',
elapsed_time: 0,
total_tokens: 0,
total_steps: 0,
created_at: 0,
created_by: {
id: 'user-1',
name: 'User',
email: 'user@example.com',
},
finished_at: 0,
},
})
})
expect(setup.currentTaskId()).toBe('task-1')
expect(setup.isStopping()).toBe(false)
expect(setup.workflowProcessData()).toEqual(expect.objectContaining({
resultText: 'Hello',
status: WorkflowRunningStatus.Succeeded,
}))
expect(sseGetMock).toHaveBeenCalledWith('/workflow/run-1/events', {}, expect.any(Object))
expect(setup.messageId()).toBe('run-1')
expect(setup.onCompleted).toHaveBeenCalledWith('{"answer":"Hello"}', 3, true)
expect(setup.setRespondingFalse).toHaveBeenCalled()
expect(setup.resetRunState).toHaveBeenCalled()
})
it('should handle timeout and workflow failures', () => {
const timeoutSetup = setupHandlers({
isTimedOut: () => true,
})
const timeoutHandlers = timeoutSetup.handlers as Required<Pick<IOtherOptions, 'onWorkflowFinished'>>
act(() => {
timeoutHandlers.onWorkflowFinished({
task_id: 'task-1',
workflow_run_id: 'run-1',
event: 'workflow_finished',
data: {
id: 'run-1',
workflow_id: 'wf-1',
status: WorkflowRunningStatus.Succeeded,
outputs: null,
error: '',
elapsed_time: 0,
total_tokens: 0,
total_steps: 0,
created_at: 0,
created_by: {
id: 'user-1',
name: 'User',
email: 'user@example.com',
},
finished_at: 0,
},
})
})
expect(timeoutSetup.notify).toHaveBeenCalledWith({
type: 'warning',
message: 'warningMessage.timeoutExceeded',
})
const failureSetup = setupHandlers()
const failureHandlers = failureSetup.handlers as Required<Pick<IOtherOptions, 'onWorkflowStarted' | 'onWorkflowFinished'>>
act(() => {
failureHandlers.onWorkflowStarted({
workflow_run_id: 'run-2',
task_id: 'task-2',
event: 'workflow_started',
data: { id: 'run-2', workflow_id: 'wf-2', created_at: 0 },
})
failureHandlers.onWorkflowFinished({
task_id: 'task-2',
workflow_run_id: 'run-2',
event: 'workflow_finished',
data: {
id: 'run-2',
workflow_id: 'wf-2',
status: WorkflowRunningStatus.Failed,
outputs: null,
error: 'failed',
elapsed_time: 0,
total_tokens: 0,
total_steps: 0,
created_at: 0,
created_by: {
id: 'user-1',
name: 'User',
email: 'user@example.com',
},
finished_at: 0,
},
})
})
expect(failureSetup.notify).toHaveBeenCalledWith({
type: 'error',
message: 'failed',
})
expect(failureSetup.onCompleted).toHaveBeenCalledWith('', 3, false)
})
it('should cover existing workflow starts, stopped runs, and non-string outputs', () => {
const setup = setupHandlers()
let existingProcess: WorkflowProcess = {
status: WorkflowRunningStatus.Paused,
tracing: [
createTrace({
node_id: 'existing-node',
status: NodeRunningStatus.Waiting,
}),
],
expand: false,
resultText: '',
}
const handlers = createWorkflowStreamHandlers({
getCompletionRes: () => '',
getWorkflowProcessData: () => existingProcess,
isTimedOut: () => false,
markEnded: vi.fn(),
notify: setup.notify,
onCompleted: setup.onCompleted,
resetRunState: setup.resetRunState,
setCompletionRes: setup.setCompletionRes,
setCurrentTaskId: setup.setCurrentTaskId,
setIsStopping: vi.fn(),
setMessageId: setup.setMessageId,
setRespondingFalse: setup.setRespondingFalse,
setWorkflowProcessData: (value) => {
existingProcess = value!
},
t: (key: string) => key,
taskId: 5,
}) as Required<Pick<IOtherOptions, 'onWorkflowStarted' | 'onWorkflowFinished' | 'onTextReplace'>>
act(() => {
handlers.onWorkflowStarted({
workflow_run_id: 'run-existing',
task_id: '',
event: 'workflow_started',
data: { id: 'run-existing', workflow_id: 'wf-1', created_at: 0 },
})
handlers.onTextReplace({
task_id: 'task-existing',
workflow_run_id: 'run-existing',
event: 'text_replace',
data: { text: 'Replaced text' },
})
})
expect(existingProcess).toEqual(expect.objectContaining({
expand: true,
status: WorkflowRunningStatus.Running,
resultText: 'Replaced text',
}))
act(() => {
handlers.onWorkflowFinished({
task_id: 'task-existing',
workflow_run_id: 'run-existing',
event: 'workflow_finished',
data: {
id: 'run-existing',
workflow_id: 'wf-1',
status: WorkflowRunningStatus.Stopped,
outputs: null,
error: '',
elapsed_time: 0,
total_tokens: 0,
total_steps: 0,
created_at: 0,
created_by: {
id: 'user-1',
name: 'User',
email: 'user@example.com',
},
finished_at: 0,
},
})
})
expect(existingProcess.status).toBe(WorkflowRunningStatus.Stopped)
expect(existingProcess.tracing[0].status).toBe(NodeRunningStatus.Stopped)
expect(setup.onCompleted).toHaveBeenCalledWith('', 5, false)
const noOutputSetup = setupHandlers()
const noOutputHandlers = noOutputSetup.handlers as Required<Pick<IOtherOptions, 'onWorkflowStarted' | 'onWorkflowFinished' | 'onTextReplace'>>
act(() => {
noOutputHandlers.onWorkflowStarted({
workflow_run_id: 'run-no-output',
task_id: '',
event: 'workflow_started',
data: { id: 'run-no-output', workflow_id: 'wf-2', created_at: 0 },
})
noOutputHandlers.onTextReplace({
task_id: 'task-no-output',
workflow_run_id: 'run-no-output',
event: 'text_replace',
data: { text: 'Draft' },
})
noOutputHandlers.onWorkflowFinished({
task_id: 'task-no-output',
workflow_run_id: 'run-no-output',
event: 'workflow_finished',
data: {
id: 'run-no-output',
workflow_id: 'wf-2',
status: WorkflowRunningStatus.Succeeded,
outputs: null,
error: '',
elapsed_time: 0,
total_tokens: 0,
total_steps: 0,
created_at: 0,
created_by: {
id: 'user-1',
name: 'User',
email: 'user@example.com',
},
finished_at: 0,
},
})
})
expect(noOutputSetup.setCompletionRes).toHaveBeenCalledWith('')
const objectOutputSetup = setupHandlers()
const objectOutputHandlers = objectOutputSetup.handlers as Required<Pick<IOtherOptions, 'onWorkflowStarted' | 'onWorkflowFinished'>>
act(() => {
objectOutputHandlers.onWorkflowStarted({
workflow_run_id: 'run-object',
task_id: undefined as unknown as string,
event: 'workflow_started',
data: { id: 'run-object', workflow_id: 'wf-3', created_at: 0 },
})
objectOutputHandlers.onWorkflowFinished({
task_id: 'task-object',
workflow_run_id: 'run-object',
event: 'workflow_finished',
data: {
id: 'run-object',
workflow_id: 'wf-3',
status: WorkflowRunningStatus.Succeeded,
outputs: {
answer: 'Hello',
meta: {
mode: 'object',
},
},
error: '',
elapsed_time: 0,
total_tokens: 0,
total_steps: 0,
created_at: 0,
created_by: {
id: 'user-1',
name: 'User',
email: 'user@example.com',
},
finished_at: 0,
},
})
})
expect(objectOutputSetup.currentTaskId()).toBeNull()
expect(objectOutputSetup.setCompletionRes).toHaveBeenCalledWith('{"answer":"Hello","meta":{"mode":"object"}}')
expect(objectOutputSetup.workflowProcessData()).toEqual(expect.objectContaining({
status: WorkflowRunningStatus.Succeeded,
resultText: '',
}))
})
it('should serialize empty, string, and circular workflow outputs', () => {
const noOutputSetup = setupHandlers()
const noOutputHandlers = noOutputSetup.handlers as Required<Pick<IOtherOptions, 'onWorkflowFinished'>>
act(() => {
noOutputHandlers.onWorkflowFinished({
task_id: 'task-empty',
workflow_run_id: 'run-empty',
event: 'workflow_finished',
data: {
id: 'run-empty',
workflow_id: 'wf-empty',
status: WorkflowRunningStatus.Succeeded,
outputs: null,
error: '',
elapsed_time: 0,
total_tokens: 0,
total_steps: 0,
created_at: 0,
created_by: {
id: 'user-1',
name: 'User',
email: 'user@example.com',
},
finished_at: 0,
},
})
})
expect(noOutputSetup.setCompletionRes).toHaveBeenCalledWith('')
const stringOutputSetup = setupHandlers()
const stringOutputHandlers = stringOutputSetup.handlers as Required<Pick<IOtherOptions, 'onWorkflowFinished'>>
act(() => {
stringOutputHandlers.onWorkflowFinished({
task_id: 'task-string',
workflow_run_id: 'run-string',
event: 'workflow_finished',
data: {
id: 'run-string',
workflow_id: 'wf-string',
status: WorkflowRunningStatus.Succeeded,
outputs: 'plain text output',
error: '',
elapsed_time: 0,
total_tokens: 0,
total_steps: 0,
created_at: 0,
created_by: {
id: 'user-1',
name: 'User',
email: 'user@example.com',
},
finished_at: 0,
},
})
})
expect(stringOutputSetup.setCompletionRes).toHaveBeenCalledWith('plain text output')
const circularOutputSetup = setupHandlers()
const circularOutputHandlers = circularOutputSetup.handlers as Required<Pick<IOtherOptions, 'onWorkflowFinished'>>
const circularOutputs: Record<string, unknown> = {
answer: 'Hello',
}
circularOutputs.self = circularOutputs
act(() => {
circularOutputHandlers.onWorkflowFinished({
task_id: 'task-circular',
workflow_run_id: 'run-circular',
event: 'workflow_finished',
data: {
id: 'run-circular',
workflow_id: 'wf-circular',
status: WorkflowRunningStatus.Succeeded,
outputs: circularOutputs,
error: '',
elapsed_time: 0,
total_tokens: 0,
total_steps: 0,
created_at: 0,
created_by: {
id: 'user-1',
name: 'User',
email: 'user@example.com',
},
finished_at: 0,
},
})
})
expect(circularOutputSetup.setCompletionRes).toHaveBeenCalledWith('[object Object]')
})
})

View File

@ -0,0 +1,200 @@
import type { FeedbackType } from '@/app/components/base/chat/chat/type'
import { act, renderHook, waitFor } from '@testing-library/react'
import { AppSourceType } from '@/service/share'
import { useResultRunState } from '../use-result-run-state'
const {
stopChatMessageRespondingMock,
stopWorkflowMessageMock,
updateFeedbackMock,
} = vi.hoisted(() => ({
stopChatMessageRespondingMock: vi.fn(),
stopWorkflowMessageMock: vi.fn(),
updateFeedbackMock: vi.fn(),
}))
vi.mock('@/service/share', async () => {
const actual = await vi.importActual<typeof import('@/service/share')>('@/service/share')
return {
...actual,
stopChatMessageResponding: (...args: Parameters<typeof actual.stopChatMessageResponding>) => stopChatMessageRespondingMock(...args),
stopWorkflowMessage: (...args: Parameters<typeof actual.stopWorkflowMessage>) => stopWorkflowMessageMock(...args),
updateFeedback: (...args: Parameters<typeof actual.updateFeedback>) => updateFeedbackMock(...args),
}
})
describe('useResultRunState', () => {
beforeEach(() => {
vi.clearAllMocks()
stopChatMessageRespondingMock.mockResolvedValue(undefined)
stopWorkflowMessageMock.mockResolvedValue(undefined)
updateFeedbackMock.mockResolvedValue(undefined)
})
it('should expose run control and stop completion requests', async () => {
const notify = vi.fn()
const onRunControlChange = vi.fn()
const { result } = renderHook(() => useResultRunState({
appId: 'app-1',
appSourceType: AppSourceType.webApp,
controlStopResponding: 0,
isWorkflow: false,
notify,
onRunControlChange,
}))
const abort = vi.fn()
act(() => {
result.current.abortControllerRef.current = { abort } as unknown as AbortController
result.current.setCurrentTaskId('task-1')
result.current.setRespondingTrue()
})
await waitFor(() => {
expect(onRunControlChange).toHaveBeenLastCalledWith(expect.objectContaining({
isStopping: false,
}))
})
await act(async () => {
await result.current.handleStop()
})
expect(stopChatMessageRespondingMock).toHaveBeenCalledWith('app-1', 'task-1', AppSourceType.webApp, 'app-1')
expect(abort).toHaveBeenCalledTimes(1)
})
it('should update feedback and react to external stop control', async () => {
const notify = vi.fn()
const onRunControlChange = vi.fn()
const { result, rerender } = renderHook(({ controlStopResponding }) => useResultRunState({
appId: 'app-2',
appSourceType: AppSourceType.installedApp,
controlStopResponding,
isWorkflow: true,
notify,
onRunControlChange,
}), {
initialProps: { controlStopResponding: 0 },
})
const abort = vi.fn()
act(() => {
result.current.abortControllerRef.current = { abort } as unknown as AbortController
result.current.setMessageId('message-1')
})
await act(async () => {
await result.current.handleFeedback({
rating: 'like',
} satisfies FeedbackType)
})
expect(updateFeedbackMock).toHaveBeenCalledWith({
url: '/messages/message-1/feedbacks',
body: {
rating: 'like',
content: undefined,
},
}, AppSourceType.installedApp, 'app-2')
expect(result.current.feedback).toEqual({
rating: 'like',
})
act(() => {
result.current.setCurrentTaskId('task-2')
result.current.setRespondingTrue()
})
rerender({ controlStopResponding: 1 })
await waitFor(() => {
expect(abort).toHaveBeenCalled()
expect(result.current.currentTaskId).toBeNull()
expect(onRunControlChange).toHaveBeenLastCalledWith(null)
})
})
it('should stop workflow requests through the workflow stop API', async () => {
const notify = vi.fn()
const { result } = renderHook(() => useResultRunState({
appId: 'app-3',
appSourceType: AppSourceType.installedApp,
controlStopResponding: 0,
isWorkflow: true,
notify,
}))
act(() => {
result.current.setCurrentTaskId('task-3')
})
await act(async () => {
await result.current.handleStop()
})
expect(stopWorkflowMessageMock).toHaveBeenCalledWith('app-3', 'task-3', AppSourceType.installedApp, 'app-3')
})
it('should ignore invalid stops and report non-Error failures', async () => {
const notify = vi.fn()
stopChatMessageRespondingMock.mockRejectedValueOnce('stop failed')
const { result } = renderHook(() => useResultRunState({
appSourceType: AppSourceType.webApp,
controlStopResponding: 0,
isWorkflow: false,
notify,
}))
await act(async () => {
await result.current.handleStop()
})
expect(stopChatMessageRespondingMock).not.toHaveBeenCalled()
act(() => {
result.current.setCurrentTaskId('task-4')
result.current.setIsStopping(prev => !prev)
result.current.setIsStopping(prev => !prev)
})
await act(async () => {
await result.current.handleStop()
})
expect(stopChatMessageRespondingMock).toHaveBeenCalledWith(undefined, 'task-4', AppSourceType.webApp, '')
expect(notify).toHaveBeenCalledWith({
type: 'error',
message: 'stop failed',
})
expect(result.current.isStopping).toBe(false)
})
it('should report Error instances from workflow stop failures without an app id fallback', async () => {
const notify = vi.fn()
stopWorkflowMessageMock.mockRejectedValueOnce(new Error('workflow stop failed'))
const { result } = renderHook(() => useResultRunState({
appSourceType: AppSourceType.installedApp,
controlStopResponding: 0,
isWorkflow: true,
notify,
}))
act(() => {
result.current.setCurrentTaskId('task-5')
})
await act(async () => {
await result.current.handleStop()
})
expect(stopWorkflowMessageMock).toHaveBeenCalledWith(undefined, 'task-5', AppSourceType.installedApp, '')
expect(notify).toHaveBeenCalledWith({
type: 'error',
message: 'workflow stop failed',
})
})
})

View File

@ -0,0 +1,510 @@
import type { ResultInputValue } from '../../result-request'
import type { ResultRunStateController } from '../use-result-run-state'
import type { PromptConfig } from '@/models/debug'
import type { AppSourceType } from '@/service/share'
import type { VisionSettings } from '@/types/app'
import { act, renderHook, waitFor } from '@testing-library/react'
import { AppSourceType as AppSourceTypeEnum } from '@/service/share'
import { Resolution, TransferMethod } from '@/types/app'
import { useResultSender } from '../use-result-sender'
const {
buildResultRequestDataMock,
createWorkflowStreamHandlersMock,
sendCompletionMessageMock,
sendWorkflowMessageMock,
sleepMock,
validateResultRequestMock,
} = vi.hoisted(() => ({
buildResultRequestDataMock: vi.fn(),
createWorkflowStreamHandlersMock: vi.fn(),
sendCompletionMessageMock: vi.fn(),
sendWorkflowMessageMock: vi.fn(),
sleepMock: vi.fn(),
validateResultRequestMock: vi.fn(),
}))
vi.mock('@/service/share', async () => {
const actual = await vi.importActual<typeof import('@/service/share')>('@/service/share')
return {
...actual,
sendCompletionMessage: (...args: Parameters<typeof actual.sendCompletionMessage>) => sendCompletionMessageMock(...args),
sendWorkflowMessage: (...args: Parameters<typeof actual.sendWorkflowMessage>) => sendWorkflowMessageMock(...args),
}
})
vi.mock('@/utils', async () => {
const actual = await vi.importActual<typeof import('@/utils')>('@/utils')
return {
...actual,
sleep: (...args: Parameters<typeof actual.sleep>) => sleepMock(...args),
}
})
vi.mock('../../result-request', () => ({
buildResultRequestData: (...args: unknown[]) => buildResultRequestDataMock(...args),
validateResultRequest: (...args: unknown[]) => validateResultRequestMock(...args),
}))
vi.mock('../../workflow-stream-handlers', () => ({
createWorkflowStreamHandlers: (...args: unknown[]) => createWorkflowStreamHandlersMock(...args),
}))
type RunStateHarness = {
state: {
completionRes: string
currentTaskId: string | null
messageId: string | null
workflowProcessData: ResultRunStateController['workflowProcessData']
}
runState: ResultRunStateController
}
type CompletionHandlers = {
getAbortController: (abortController: AbortController) => void
onCompleted: () => void
onData: (chunk: string, isFirstMessage: boolean, info: { messageId: string, taskId?: string }) => void
onError: () => void
onMessageReplace: (messageReplace: { answer: string }) => void
}
const createRunStateHarness = (): RunStateHarness => {
const state: RunStateHarness['state'] = {
completionRes: '',
currentTaskId: null,
messageId: null,
workflowProcessData: undefined,
}
const runState: ResultRunStateController = {
abortControllerRef: { current: null },
clearMoreLikeThis: vi.fn(),
completionRes: '',
controlClearMoreLikeThis: 0,
currentTaskId: null,
feedback: { rating: null },
getCompletionRes: vi.fn(() => state.completionRes),
getWorkflowProcessData: vi.fn(() => state.workflowProcessData),
handleFeedback: vi.fn(),
handleStop: vi.fn(),
isResponding: false,
isStopping: false,
messageId: null,
prepareForNewRun: vi.fn(() => {
state.completionRes = ''
state.currentTaskId = null
state.messageId = null
state.workflowProcessData = undefined
runState.completionRes = ''
runState.currentTaskId = null
runState.messageId = null
runState.workflowProcessData = undefined
}),
resetRunState: vi.fn(() => {
state.currentTaskId = null
runState.currentTaskId = null
runState.isStopping = false
}),
setCompletionRes: vi.fn((value: string) => {
state.completionRes = value
runState.completionRes = value
}),
setCurrentTaskId: vi.fn((value) => {
state.currentTaskId = typeof value === 'function' ? value(state.currentTaskId) : value
runState.currentTaskId = state.currentTaskId
}),
setIsStopping: vi.fn((value) => {
runState.isStopping = typeof value === 'function' ? value(runState.isStopping) : value
}),
setMessageId: vi.fn((value) => {
state.messageId = typeof value === 'function' ? value(state.messageId) : value
runState.messageId = state.messageId
}),
setRespondingFalse: vi.fn(() => {
runState.isResponding = false
}),
setRespondingTrue: vi.fn(() => {
runState.isResponding = true
}),
setWorkflowProcessData: vi.fn((value) => {
state.workflowProcessData = value
runState.workflowProcessData = value
}),
workflowProcessData: undefined,
}
return {
state,
runState,
}
}
const promptConfig: PromptConfig = {
prompt_template: 'template',
prompt_variables: [
{ key: 'name', name: 'Name', type: 'string', required: true },
],
}
const visionConfig: VisionSettings = {
enabled: false,
number_limits: 2,
detail: Resolution.low,
transfer_methods: [TransferMethod.local_file],
}
type RenderSenderOptions = {
appSourceType?: AppSourceType
controlRetry?: number
controlSend?: number
inputs?: Record<string, ResultInputValue>
isPC?: boolean
isWorkflow?: boolean
runState?: ResultRunStateController
taskId?: number
}
const renderSender = ({
appSourceType = AppSourceTypeEnum.webApp,
controlRetry = 0,
controlSend = 0,
inputs = { name: 'Alice' },
isPC = true,
isWorkflow = false,
runState,
taskId,
}: RenderSenderOptions = {}) => {
const notify = vi.fn()
const onCompleted = vi.fn()
const onRunStart = vi.fn()
const onShowRes = vi.fn()
const hook = renderHook((props: { controlRetry: number, controlSend: number }) => useResultSender({
appId: 'app-1',
appSourceType,
completionFiles: [],
controlRetry: props.controlRetry,
controlSend: props.controlSend,
inputs,
isCallBatchAPI: false,
isPC,
isWorkflow,
notify,
onCompleted,
onRunStart,
onShowRes,
promptConfig,
runState: runState || createRunStateHarness().runState,
t: (key: string) => key,
taskId,
visionConfig,
}), {
initialProps: {
controlRetry,
controlSend,
},
})
return {
...hook,
notify,
onCompleted,
onRunStart,
onShowRes,
}
}
describe('useResultSender', () => {
beforeEach(() => {
vi.clearAllMocks()
validateResultRequestMock.mockReturnValue({ canSend: true })
buildResultRequestDataMock.mockReturnValue({ inputs: { name: 'Alice' } })
createWorkflowStreamHandlersMock.mockReturnValue({ onWorkflowFinished: vi.fn() })
sendCompletionMessageMock.mockResolvedValue(undefined)
sendWorkflowMessageMock.mockResolvedValue(undefined)
sleepMock.mockImplementation(() => new Promise<void>(() => {}))
})
it('should reject sends while a response is already in progress', async () => {
const { runState } = createRunStateHarness()
runState.isResponding = true
const { result, notify } = renderSender({ runState })
await act(async () => {
expect(await result.current.handleSend()).toBe(false)
})
expect(notify).toHaveBeenCalledWith({
type: 'info',
message: 'errorMessage.waitForResponse',
})
expect(validateResultRequestMock).not.toHaveBeenCalled()
expect(sendCompletionMessageMock).not.toHaveBeenCalled()
})
it('should surface validation failures without building request payloads', async () => {
const { runState } = createRunStateHarness()
validateResultRequestMock.mockReturnValue({
canSend: false,
notification: {
type: 'error',
message: 'invalid',
},
})
const { result, notify } = renderSender({ runState })
await act(async () => {
expect(await result.current.handleSend()).toBe(false)
})
expect(notify).toHaveBeenCalledWith({
type: 'error',
message: 'invalid',
})
expect(buildResultRequestDataMock).not.toHaveBeenCalled()
expect(sendCompletionMessageMock).not.toHaveBeenCalled()
})
it('should send completion requests when controlSend changes and process callbacks', async () => {
const harness = createRunStateHarness()
let completionHandlers: CompletionHandlers | undefined
sendCompletionMessageMock.mockImplementation(async (_data, handlers) => {
completionHandlers = handlers as CompletionHandlers
})
const { rerender, onCompleted, onRunStart, onShowRes } = renderSender({
controlSend: 0,
isPC: false,
runState: harness.runState,
taskId: 7,
})
rerender({
controlRetry: 0,
controlSend: 1,
})
expect(validateResultRequestMock).toHaveBeenCalledWith(expect.objectContaining({
inputs: { name: 'Alice' },
isCallBatchAPI: false,
}))
expect(buildResultRequestDataMock).toHaveBeenCalled()
expect(harness.runState.prepareForNewRun).toHaveBeenCalledTimes(1)
expect(harness.runState.setRespondingTrue).toHaveBeenCalledTimes(1)
expect(harness.runState.clearMoreLikeThis).toHaveBeenCalledTimes(1)
expect(onShowRes).toHaveBeenCalledTimes(1)
expect(onRunStart).toHaveBeenCalledTimes(1)
expect(sendCompletionMessageMock).toHaveBeenCalledWith(
{ inputs: { name: 'Alice' } },
expect.objectContaining({
onCompleted: expect.any(Function),
onData: expect.any(Function),
}),
AppSourceTypeEnum.webApp,
'app-1',
)
const abortController = {} as AbortController
expect(completionHandlers).toBeDefined()
completionHandlers!.getAbortController(abortController)
expect(harness.runState.abortControllerRef.current).toBe(abortController)
await act(async () => {
completionHandlers!.onData('Hello', false, {
messageId: 'message-1',
taskId: 'task-1',
})
})
expect(harness.runState.setCurrentTaskId).toHaveBeenCalled()
expect(harness.runState.currentTaskId).toBe('task-1')
await act(async () => {
completionHandlers!.onMessageReplace({ answer: 'Replaced' })
completionHandlers!.onCompleted()
})
expect(harness.runState.setCompletionRes).toHaveBeenLastCalledWith('Replaced')
expect(harness.runState.setRespondingFalse).toHaveBeenCalled()
expect(harness.runState.resetRunState).toHaveBeenCalled()
expect(harness.runState.setMessageId).toHaveBeenCalledWith('message-1')
expect(onCompleted).toHaveBeenCalledWith('Replaced', 7, true)
})
it('should trigger workflow sends on retry and report workflow request failures', async () => {
const harness = createRunStateHarness()
sendWorkflowMessageMock.mockRejectedValue(new Error('workflow failed'))
const { rerender, notify } = renderSender({
controlRetry: 0,
isWorkflow: true,
runState: harness.runState,
})
rerender({
controlRetry: 2,
controlSend: 0,
})
await waitFor(() => {
expect(createWorkflowStreamHandlersMock).toHaveBeenCalledWith(expect.objectContaining({
getCompletionRes: harness.runState.getCompletionRes,
resetRunState: harness.runState.resetRunState,
setWorkflowProcessData: harness.runState.setWorkflowProcessData,
}))
expect(sendWorkflowMessageMock).toHaveBeenCalledWith(
{ inputs: { name: 'Alice' } },
expect.any(Object),
AppSourceTypeEnum.webApp,
'app-1',
)
})
await waitFor(() => {
expect(harness.runState.setRespondingFalse).toHaveBeenCalled()
expect(harness.runState.resetRunState).toHaveBeenCalled()
expect(notify).toHaveBeenCalledWith({
type: 'error',
message: 'workflow failed',
})
})
expect(harness.runState.clearMoreLikeThis).not.toHaveBeenCalled()
})
it('should stringify non-Error workflow failures', async () => {
const harness = createRunStateHarness()
sendWorkflowMessageMock.mockRejectedValue('workflow failed')
const { result, notify } = renderSender({
isWorkflow: true,
runState: harness.runState,
})
await act(async () => {
await result.current.handleSend()
})
await waitFor(() => {
expect(notify).toHaveBeenCalledWith({
type: 'error',
message: 'workflow failed',
})
})
})
it('should timeout unfinished completion requests', async () => {
const harness = createRunStateHarness()
sleepMock.mockResolvedValue(undefined)
const { result, onCompleted } = renderSender({
runState: harness.runState,
taskId: 9,
})
await act(async () => {
expect(await result.current.handleSend()).toBe(true)
})
await waitFor(() => {
expect(harness.runState.setRespondingFalse).toHaveBeenCalled()
expect(harness.runState.resetRunState).toHaveBeenCalled()
expect(onCompleted).toHaveBeenCalledWith('', 9, false)
})
})
it('should ignore empty task ids and surface timeout warnings from stream callbacks', async () => {
const harness = createRunStateHarness()
let completionHandlers: CompletionHandlers | undefined
sleepMock.mockResolvedValue(undefined)
sendCompletionMessageMock.mockImplementation(async (_data, handlers) => {
completionHandlers = handlers as CompletionHandlers
})
const { result, notify, onCompleted } = renderSender({
runState: harness.runState,
taskId: 11,
})
await act(async () => {
await result.current.handleSend()
})
await act(async () => {
completionHandlers!.onData('Hello', false, {
messageId: 'message-2',
taskId: ' ',
})
completionHandlers!.onCompleted()
completionHandlers!.onError()
})
expect(harness.runState.currentTaskId).toBeNull()
expect(notify).toHaveBeenNthCalledWith(1, {
type: 'warning',
message: 'warningMessage.timeoutExceeded',
})
expect(notify).toHaveBeenNthCalledWith(2, {
type: 'warning',
message: 'warningMessage.timeoutExceeded',
})
expect(onCompleted).toHaveBeenCalledWith('', 11, false)
})
it('should avoid timeout fallback after a completion response has already ended', async () => {
const harness = createRunStateHarness()
let resolveSleep!: () => void
let completionHandlers: CompletionHandlers | undefined
sleepMock.mockImplementation(() => new Promise<void>((resolve) => {
resolveSleep = resolve
}))
sendCompletionMessageMock.mockImplementation(async (_data, handlers) => {
completionHandlers = handlers as CompletionHandlers
})
const { result, onCompleted } = renderSender({
runState: harness.runState,
taskId: 12,
})
await act(async () => {
await result.current.handleSend()
})
await act(async () => {
harness.runState.setCompletionRes('Done')
completionHandlers!.onCompleted()
resolveSleep()
await Promise.resolve()
})
expect(onCompleted).toHaveBeenCalledWith('Done', 12, true)
expect(onCompleted).toHaveBeenCalledTimes(1)
})
it('should handle non-timeout stream errors as failed completions', async () => {
const harness = createRunStateHarness()
let completionHandlers: CompletionHandlers | undefined
sendCompletionMessageMock.mockImplementation(async (_data, handlers) => {
completionHandlers = handlers as CompletionHandlers
})
const { result, onCompleted } = renderSender({
runState: harness.runState,
taskId: 13,
})
await act(async () => {
await result.current.handleSend()
completionHandlers!.onError()
})
expect(harness.runState.setRespondingFalse).toHaveBeenCalled()
expect(harness.runState.resetRunState).toHaveBeenCalled()
expect(onCompleted).toHaveBeenCalledWith('', 13, false)
})
})

View File

@ -0,0 +1,237 @@
import type { Dispatch, MutableRefObject, SetStateAction } from 'react'
import type { FeedbackType } from '@/app/components/base/chat/chat/type'
import type { WorkflowProcess } from '@/app/components/base/chat/types'
import type { AppSourceType } from '@/service/share'
import { useBoolean } from 'ahooks'
import { useCallback, useEffect, useReducer, useRef, useState } from 'react'
import {
stopChatMessageResponding,
stopWorkflowMessage,
updateFeedback,
} from '@/service/share'
type Notify = (payload: { type: 'error', message: string }) => void
type RunControlState = {
currentTaskId: string | null
isStopping: boolean
}
type RunControlAction
= | { type: 'reset' }
| { type: 'setCurrentTaskId', value: SetStateAction<string | null> }
| { type: 'setIsStopping', value: SetStateAction<boolean> }
type UseResultRunStateOptions = {
appId?: string
appSourceType: AppSourceType
controlStopResponding?: number
isWorkflow: boolean
notify: Notify
onRunControlChange?: (control: { onStop: () => Promise<void> | void, isStopping: boolean } | null) => void
}
export type ResultRunStateController = {
abortControllerRef: MutableRefObject<AbortController | null>
clearMoreLikeThis: () => void
completionRes: string
controlClearMoreLikeThis: number
currentTaskId: string | null
feedback: FeedbackType
getCompletionRes: () => string
getWorkflowProcessData: () => WorkflowProcess | undefined
handleFeedback: (feedback: FeedbackType) => Promise<void>
handleStop: () => Promise<void>
isResponding: boolean
isStopping: boolean
messageId: string | null
prepareForNewRun: () => void
resetRunState: () => void
setCompletionRes: (res: string) => void
setCurrentTaskId: Dispatch<SetStateAction<string | null>>
setIsStopping: Dispatch<SetStateAction<boolean>>
setMessageId: Dispatch<SetStateAction<string | null>>
setRespondingFalse: () => void
setRespondingTrue: () => void
setWorkflowProcessData: (data: WorkflowProcess | undefined) => void
workflowProcessData: WorkflowProcess | undefined
}
const runControlReducer = (state: RunControlState, action: RunControlAction): RunControlState => {
switch (action.type) {
case 'reset':
return {
currentTaskId: null,
isStopping: false,
}
case 'setCurrentTaskId':
return {
...state,
currentTaskId: typeof action.value === 'function' ? action.value(state.currentTaskId) : action.value,
}
case 'setIsStopping':
return {
...state,
isStopping: typeof action.value === 'function' ? action.value(state.isStopping) : action.value,
}
}
}
export const useResultRunState = ({
appId,
appSourceType,
controlStopResponding,
isWorkflow,
notify,
onRunControlChange,
}: UseResultRunStateOptions): ResultRunStateController => {
const [isResponding, { setTrue: setRespondingTrue, setFalse: setRespondingFalse }] = useBoolean(false)
const [completionResState, setCompletionResState] = useState<string>('')
const completionResRef = useRef<string>('')
const [workflowProcessDataState, setWorkflowProcessDataState] = useState<WorkflowProcess>()
const workflowProcessDataRef = useRef<WorkflowProcess | undefined>(undefined)
const [messageId, setMessageId] = useState<string | null>(null)
const [feedback, setFeedback] = useState<FeedbackType>({
rating: null,
})
const [controlClearMoreLikeThis, setControlClearMoreLikeThis] = useState(0)
const abortControllerRef = useRef<AbortController | null>(null)
const [{ currentTaskId, isStopping }, dispatchRunControl] = useReducer(runControlReducer, {
currentTaskId: null,
isStopping: false,
})
const setCurrentTaskId = useCallback<Dispatch<SetStateAction<string | null>>>((value) => {
dispatchRunControl({
type: 'setCurrentTaskId',
value,
})
}, [])
const setIsStopping = useCallback<Dispatch<SetStateAction<boolean>>>((value) => {
dispatchRunControl({
type: 'setIsStopping',
value,
})
}, [])
const setCompletionRes = useCallback((res: string) => {
completionResRef.current = res
setCompletionResState(res)
}, [])
const getCompletionRes = useCallback(() => completionResRef.current, [])
const setWorkflowProcessData = useCallback((data: WorkflowProcess | undefined) => {
workflowProcessDataRef.current = data
setWorkflowProcessDataState(data)
}, [])
const getWorkflowProcessData = useCallback(() => workflowProcessDataRef.current, [])
const resetRunState = useCallback(() => {
dispatchRunControl({ type: 'reset' })
abortControllerRef.current = null
onRunControlChange?.(null)
}, [onRunControlChange])
const prepareForNewRun = useCallback(() => {
setMessageId(null)
setFeedback({ rating: null })
setCompletionRes('')
setWorkflowProcessData(undefined)
resetRunState()
}, [resetRunState, setCompletionRes, setWorkflowProcessData])
const handleFeedback = useCallback(async (nextFeedback: FeedbackType) => {
await updateFeedback({
url: `/messages/${messageId}/feedbacks`,
body: {
rating: nextFeedback.rating,
content: nextFeedback.content,
},
}, appSourceType, appId)
setFeedback(nextFeedback)
}, [appId, appSourceType, messageId])
const handleStop = useCallback(async () => {
if (!currentTaskId || isStopping)
return
setIsStopping(true)
try {
if (isWorkflow)
await stopWorkflowMessage(appId!, currentTaskId, appSourceType, appId || '')
else
await stopChatMessageResponding(appId!, currentTaskId, appSourceType, appId || '')
abortControllerRef.current?.abort()
}
catch (error) {
const message = error instanceof Error ? error.message : String(error)
notify({ type: 'error', message })
}
finally {
setIsStopping(false)
}
}, [appId, appSourceType, currentTaskId, isStopping, isWorkflow, notify, setIsStopping])
const clearMoreLikeThis = useCallback(() => {
setControlClearMoreLikeThis(Date.now())
}, [])
useEffect(() => {
const abortCurrentRequest = () => {
abortControllerRef.current?.abort()
}
if (controlStopResponding) {
abortCurrentRequest()
setRespondingFalse()
resetRunState()
}
return abortCurrentRequest
}, [controlStopResponding, resetRunState, setRespondingFalse])
useEffect(() => {
if (!onRunControlChange)
return
if (isResponding && currentTaskId) {
onRunControlChange({
onStop: handleStop,
isStopping,
})
return
}
onRunControlChange(null)
}, [currentTaskId, handleStop, isResponding, isStopping, onRunControlChange])
return {
abortControllerRef,
clearMoreLikeThis,
completionRes: completionResState,
controlClearMoreLikeThis,
currentTaskId,
feedback,
getCompletionRes,
getWorkflowProcessData,
handleFeedback,
handleStop,
isResponding,
isStopping,
messageId,
prepareForNewRun,
resetRunState,
setCompletionRes,
setCurrentTaskId,
setIsStopping,
setMessageId,
setRespondingFalse,
setRespondingTrue,
setWorkflowProcessData,
workflowProcessData: workflowProcessDataState,
}
}

View File

@ -0,0 +1,230 @@
import type { ResultInputValue } from '../result-request'
import type { ResultRunStateController } from './use-result-run-state'
import type { PromptConfig } from '@/models/debug'
import type { AppSourceType } from '@/service/share'
import type { VisionFile, VisionSettings } from '@/types/app'
import { useCallback, useEffect, useRef } from 'react'
import { TEXT_GENERATION_TIMEOUT_MS } from '@/config'
import {
sendCompletionMessage,
sendWorkflowMessage,
} from '@/service/share'
import { sleep } from '@/utils'
import { buildResultRequestData, validateResultRequest } from '../result-request'
import { createWorkflowStreamHandlers } from '../workflow-stream-handlers'
type Notify = (payload: { type: 'error' | 'info' | 'warning', message: string }) => void
type Translate = (key: string, options?: Record<string, unknown>) => string
type UseResultSenderOptions = {
appId?: string
appSourceType: AppSourceType
completionFiles: VisionFile[]
controlRetry?: number
controlSend?: number
inputs: Record<string, ResultInputValue>
isCallBatchAPI: boolean
isPC: boolean
isWorkflow: boolean
notify: Notify
onCompleted: (completionRes: string, taskId?: number, success?: boolean) => void
onRunStart: () => void
onShowRes: () => void
promptConfig: PromptConfig | null
runState: ResultRunStateController
t: Translate
taskId?: number
visionConfig: VisionSettings
}
const logRequestError = (notify: Notify, error: unknown) => {
const message = error instanceof Error ? error.message : String(error)
notify({ type: 'error', message })
}
export const useResultSender = ({
appId,
appSourceType,
completionFiles,
controlRetry,
controlSend,
inputs,
isCallBatchAPI,
isPC,
isWorkflow,
notify,
onCompleted,
onRunStart,
onShowRes,
promptConfig,
runState,
t,
taskId,
visionConfig,
}: UseResultSenderOptions) => {
const { clearMoreLikeThis } = runState
const handleSend = useCallback(async () => {
if (runState.isResponding) {
notify({ type: 'info', message: t('errorMessage.waitForResponse', { ns: 'appDebug' }) })
return false
}
const validation = validateResultRequest({
completionFiles,
inputs,
isCallBatchAPI,
promptConfig,
t,
})
if (!validation.canSend) {
notify(validation.notification!)
return false
}
const data = buildResultRequestData({
completionFiles,
inputs,
promptConfig,
visionConfig,
})
runState.prepareForNewRun()
if (!isPC) {
onShowRes()
onRunStart()
}
runState.setRespondingTrue()
let isEnd = false
let isTimeout = false
let completionChunks: string[] = []
let tempMessageId = ''
void (async () => {
await sleep(TEXT_GENERATION_TIMEOUT_MS)
if (!isEnd) {
runState.setRespondingFalse()
onCompleted(runState.getCompletionRes(), taskId, false)
runState.resetRunState()
isTimeout = true
}
})()
if (isWorkflow) {
const otherOptions = createWorkflowStreamHandlers({
getCompletionRes: runState.getCompletionRes,
getWorkflowProcessData: runState.getWorkflowProcessData,
isTimedOut: () => isTimeout,
markEnded: () => {
isEnd = true
},
notify,
onCompleted,
resetRunState: runState.resetRunState,
setCompletionRes: runState.setCompletionRes,
setCurrentTaskId: runState.setCurrentTaskId,
setIsStopping: runState.setIsStopping,
setMessageId: runState.setMessageId,
setRespondingFalse: runState.setRespondingFalse,
setWorkflowProcessData: runState.setWorkflowProcessData,
t,
taskId,
})
void sendWorkflowMessage(data, otherOptions, appSourceType, appId).catch((error) => {
runState.setRespondingFalse()
runState.resetRunState()
logRequestError(notify, error)
})
return true
}
void sendCompletionMessage(data, {
onData: (chunk, _isFirstMessage, { messageId, taskId: nextTaskId }) => {
tempMessageId = messageId
if (nextTaskId && nextTaskId.trim() !== '')
runState.setCurrentTaskId(prev => prev ?? nextTaskId)
completionChunks.push(chunk)
runState.setCompletionRes(completionChunks.join(''))
},
onCompleted: () => {
if (isTimeout) {
notify({ type: 'warning', message: t('warningMessage.timeoutExceeded', { ns: 'appDebug' }) })
return
}
runState.setRespondingFalse()
runState.resetRunState()
runState.setMessageId(tempMessageId)
onCompleted(runState.getCompletionRes(), taskId, true)
isEnd = true
},
onMessageReplace: (messageReplace) => {
completionChunks = [messageReplace.answer]
runState.setCompletionRes(completionChunks.join(''))
},
onError: () => {
if (isTimeout) {
notify({ type: 'warning', message: t('warningMessage.timeoutExceeded', { ns: 'appDebug' }) })
return
}
runState.setRespondingFalse()
runState.resetRunState()
onCompleted(runState.getCompletionRes(), taskId, false)
isEnd = true
},
getAbortController: (abortController) => {
runState.abortControllerRef.current = abortController
},
}, appSourceType, appId)
return true
}, [
appId,
appSourceType,
completionFiles,
inputs,
isCallBatchAPI,
isPC,
isWorkflow,
notify,
onCompleted,
onRunStart,
onShowRes,
promptConfig,
runState,
t,
taskId,
visionConfig,
])
const handleSendRef = useRef(handleSend)
useEffect(() => {
handleSendRef.current = handleSend
}, [handleSend])
useEffect(() => {
if (!controlSend)
return
void handleSendRef.current()
clearMoreLikeThis()
}, [clearMoreLikeThis, controlSend])
useEffect(() => {
if (!controlRetry)
return
void handleSendRef.current()
}, [controlRetry])
return {
handleSend,
}
}

View File

@ -1,46 +1,18 @@
'use client'
import type { FC } from 'react'
import type { FeedbackType } from '@/app/components/base/chat/chat/type'
import type { WorkflowProcess } from '@/app/components/base/chat/types'
import type { FileEntity } from '@/app/components/base/file-uploader/types'
import type { PromptConfig } from '@/models/debug'
import type { SiteInfo } from '@/models/share'
import type {
IOtherOptions,
} from '@/service/base'
import type { AppSourceType } from '@/service/share'
import type { VisionFile, VisionSettings } from '@/types/app'
import { RiLoader2Line } from '@remixicon/react'
import { useBoolean } from 'ahooks'
import { t } from 'i18next'
import { produce } from 'immer'
import * as React from 'react'
import { useCallback, useEffect, useRef, useState } from 'react'
import TextGenerationRes from '@/app/components/app/text-generate/item'
import Button from '@/app/components/base/button'
import {
getFilesInLogs,
getProcessedFiles,
} from '@/app/components/base/file-uploader/utils'
import { StopCircle } from '@/app/components/base/icons/src/vender/solid/mediaAndDevices'
import Loading from '@/app/components/base/loading'
import Toast from '@/app/components/base/toast'
import NoData from '@/app/components/share/text-generation/no-data'
import { NodeRunningStatus, WorkflowRunningStatus } from '@/app/components/workflow/types'
import { TEXT_GENERATION_TIMEOUT_MS } from '@/config'
import {
sseGet,
} from '@/service/base'
import {
AppSourceType,
sendCompletionMessage,
sendWorkflowMessage,
stopChatMessageResponding,
stopWorkflowMessage,
updateFeedback,
} from '@/service/share'
import { TransferMethod } from '@/types/app'
import { sleep } from '@/utils'
import { formatBooleanInputs } from '@/utils/model-config'
import { useResultRunState } from './hooks/use-result-run-state'
import { useResultSender } from './hooks/use-result-sender'
export type IResultProps = {
isWorkflow: boolean
@ -95,554 +67,52 @@ const Result: FC<IResultProps> = ({
onRunControlChange,
hideInlineStopButton = false,
}) => {
const [isResponding, { setTrue: setRespondingTrue, setFalse: setRespondingFalse }] = useBoolean(false)
const [completionRes, doSetCompletionRes] = useState<string>('')
const completionResRef = useRef<string>('')
const setCompletionRes = (res: string) => {
completionResRef.current = res
doSetCompletionRes(res)
}
const getCompletionRes = () => completionResRef.current
const [workflowProcessData, doSetWorkflowProcessData] = useState<WorkflowProcess>()
const workflowProcessDataRef = useRef<WorkflowProcess | undefined>(undefined)
const setWorkflowProcessData = useCallback((data: WorkflowProcess | undefined) => {
workflowProcessDataRef.current = data
doSetWorkflowProcessData(data)
}, [])
const getWorkflowProcessData = () => workflowProcessDataRef.current
const [currentTaskId, setCurrentTaskId] = useState<string | null>(null)
const [isStopping, setIsStopping] = useState(false)
const abortControllerRef = useRef<AbortController | null>(null)
const resetRunState = useCallback(() => {
setCurrentTaskId(null)
setIsStopping(false)
abortControllerRef.current = null
onRunControlChange?.(null)
}, [onRunControlChange])
useEffect(() => {
const abortCurrentRequest = () => {
abortControllerRef.current?.abort()
}
if (controlStopResponding) {
abortCurrentRequest()
setRespondingFalse()
resetRunState()
}
return abortCurrentRequest
}, [controlStopResponding, resetRunState, setRespondingFalse])
const { notify } = Toast
const isNoData = !completionRes
const [messageId, setMessageId] = useState<string | null>(null)
const [feedback, setFeedback] = useState<FeedbackType>({
rating: null,
const runState = useResultRunState({
appId,
appSourceType,
controlStopResponding,
isWorkflow,
notify,
onRunControlChange,
})
const handleFeedback = async (feedback: FeedbackType) => {
await updateFeedback({ url: `/messages/${messageId}/feedbacks`, body: { rating: feedback.rating, content: feedback.content } }, appSourceType, appId)
setFeedback(feedback)
}
const { handleSend } = useResultSender({
appId,
appSourceType,
completionFiles,
controlRetry,
controlSend,
inputs,
isCallBatchAPI,
isPC,
isWorkflow,
notify,
onCompleted,
onRunStart,
onShowRes,
promptConfig,
runState,
t,
taskId,
visionConfig,
})
const logError = (message: string) => {
notify({ type: 'error', message })
}
const handleStop = useCallback(async () => {
if (!currentTaskId || isStopping)
return
setIsStopping(true)
try {
if (isWorkflow)
await stopWorkflowMessage(appId!, currentTaskId, appSourceType, appId || '')
else
await stopChatMessageResponding(appId!, currentTaskId, appSourceType, appId || '')
abortControllerRef.current?.abort()
}
catch (error) {
const message = error instanceof Error ? error.message : String(error)
notify({ type: 'error', message })
}
finally {
setIsStopping(false)
}
}, [appId, currentTaskId, appSourceType, isStopping, isWorkflow, notify])
useEffect(() => {
if (!onRunControlChange)
return
if (isResponding && currentTaskId) {
onRunControlChange({
onStop: handleStop,
isStopping,
})
}
else {
onRunControlChange(null)
}
}, [currentTaskId, handleStop, isResponding, isStopping, onRunControlChange])
const checkCanSend = () => {
// batch will check outer
if (isCallBatchAPI)
return true
const prompt_variables = promptConfig?.prompt_variables
if (!prompt_variables || prompt_variables?.length === 0) {
if (completionFiles.some(item => item.transfer_method === TransferMethod.local_file && !item.upload_file_id)) {
notify({ type: 'info', message: t('errorMessage.waitForFileUpload', { ns: 'appDebug' }) })
return false
}
return true
}
let hasEmptyInput = ''
const requiredVars = prompt_variables?.filter(({ key, name, required, type }) => {
if (type === 'boolean' || type === 'checkbox')
return false // boolean/checkbox input is not required
const res = (!key || !key.trim()) || (!name || !name.trim()) || (required || required === undefined || required === null)
return res
}) || [] // compatible with old version
requiredVars.forEach(({ key, name }) => {
if (hasEmptyInput)
return
if (!inputs[key])
hasEmptyInput = name
})
if (hasEmptyInput) {
logError(t('errorMessage.valueOfVarRequired', { ns: 'appDebug', key: hasEmptyInput }))
return false
}
if (completionFiles.some(item => item.transfer_method === TransferMethod.local_file && !item.upload_file_id)) {
notify({ type: 'info', message: t('errorMessage.waitForFileUpload', { ns: 'appDebug' }) })
return false
}
return !hasEmptyInput
}
const handleSend = async () => {
if (isResponding) {
notify({ type: 'info', message: t('errorMessage.waitForResponse', { ns: 'appDebug' }) })
return false
}
if (!checkCanSend())
return
// Process inputs: convert file entities to API format
const processedInputs = { ...formatBooleanInputs(promptConfig?.prompt_variables, inputs) }
promptConfig?.prompt_variables.forEach((variable) => {
const value = processedInputs[variable.key]
if (variable.type === 'file' && value && typeof value === 'object' && !Array.isArray(value)) {
// Convert single file entity to API format
processedInputs[variable.key] = getProcessedFiles([value as FileEntity])[0]
}
else if (variable.type === 'file-list' && Array.isArray(value) && value.length > 0) {
// Convert file entity array to API format
processedInputs[variable.key] = getProcessedFiles(value as FileEntity[])
}
})
const data: Record<string, any> = {
inputs: processedInputs,
}
if (visionConfig.enabled && completionFiles && completionFiles?.length > 0) {
data.files = completionFiles.map((item) => {
if (item.transfer_method === TransferMethod.local_file) {
return {
...item,
url: '',
}
}
return item
})
}
setMessageId(null)
setFeedback({
rating: null,
})
setCompletionRes('')
setWorkflowProcessData(undefined)
resetRunState()
let res: string[] = []
let tempMessageId = ''
if (!isPC) {
onShowRes()
onRunStart()
}
setRespondingTrue()
let isEnd = false
let isTimeout = false;
(async () => {
await sleep(TEXT_GENERATION_TIMEOUT_MS)
if (!isEnd) {
setRespondingFalse()
onCompleted(getCompletionRes(), taskId, false)
resetRunState()
isTimeout = true
}
})()
if (isWorkflow) {
const otherOptions: IOtherOptions = {
isPublicAPI: appSourceType === AppSourceType.webApp,
onWorkflowStarted: ({ workflow_run_id, task_id }) => {
const workflowProcessData = getWorkflowProcessData()
if (workflowProcessData && workflowProcessData.tracing.length > 0) {
setWorkflowProcessData(produce(workflowProcessData, (draft) => {
draft.expand = true
draft.status = WorkflowRunningStatus.Running
}))
}
else {
tempMessageId = workflow_run_id
setCurrentTaskId(task_id || null)
setIsStopping(false)
setWorkflowProcessData({
status: WorkflowRunningStatus.Running,
tracing: [],
expand: false,
resultText: '',
})
}
},
onIterationStart: ({ data }) => {
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.expand = true
draft.tracing!.push({
...data,
status: NodeRunningStatus.Running,
expand: true,
})
}))
},
onIterationNext: () => {
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.expand = true
const iterations = draft.tracing.find(item => item.node_id === data.node_id
&& (item.execution_metadata?.parallel_id === data.execution_metadata?.parallel_id || item.parallel_id === data.execution_metadata?.parallel_id))!
iterations?.details!.push([])
}))
},
onIterationFinish: ({ data }) => {
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.expand = true
const iterationsIndex = draft.tracing.findIndex(item => item.node_id === data.node_id
&& (item.execution_metadata?.parallel_id === data.execution_metadata?.parallel_id || item.parallel_id === data.execution_metadata?.parallel_id))!
draft.tracing[iterationsIndex] = {
...data,
expand: !!data.error,
}
}))
},
onLoopStart: ({ data }) => {
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.expand = true
draft.tracing!.push({
...data,
status: NodeRunningStatus.Running,
expand: true,
})
}))
},
onLoopNext: () => {
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.expand = true
const loops = draft.tracing.find(item => item.node_id === data.node_id
&& (item.execution_metadata?.parallel_id === data.execution_metadata?.parallel_id || item.parallel_id === data.execution_metadata?.parallel_id))!
loops?.details!.push([])
}))
},
onLoopFinish: ({ data }) => {
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.expand = true
const loopsIndex = draft.tracing.findIndex(item => item.node_id === data.node_id
&& (item.execution_metadata?.parallel_id === data.execution_metadata?.parallel_id || item.parallel_id === data.execution_metadata?.parallel_id))!
draft.tracing[loopsIndex] = {
...data,
expand: !!data.error,
}
}))
},
onNodeStarted: ({ data }) => {
if (data.iteration_id)
return
if (data.loop_id)
return
const workflowProcessData = getWorkflowProcessData()
setWorkflowProcessData(produce(workflowProcessData!, (draft) => {
if (draft.tracing.length > 0) {
const currentIndex = draft.tracing.findIndex(item => item.node_id === data.node_id)
if (currentIndex > -1) {
draft.expand = true
draft.tracing![currentIndex] = {
...data,
status: NodeRunningStatus.Running,
expand: true,
}
}
else {
draft.expand = true
draft.tracing.push({
...data,
status: NodeRunningStatus.Running,
expand: true,
})
}
}
else {
draft.expand = true
draft.tracing!.push({
...data,
status: NodeRunningStatus.Running,
expand: true,
})
}
}))
},
onNodeFinished: ({ data }) => {
if (data.iteration_id)
return
if (data.loop_id)
return
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
const currentIndex = draft.tracing!.findIndex(trace => trace.node_id === data.node_id
&& (trace.execution_metadata?.parallel_id === data.execution_metadata?.parallel_id || trace.parallel_id === data.execution_metadata?.parallel_id))
if (currentIndex > -1 && draft.tracing) {
draft.tracing[currentIndex] = {
...(draft.tracing[currentIndex].extras
? { extras: draft.tracing[currentIndex].extras }
: {}),
...data,
expand: !!data.error,
}
}
}))
},
onWorkflowFinished: ({ data }) => {
if (isTimeout) {
notify({ type: 'warning', message: t('warningMessage.timeoutExceeded', { ns: 'appDebug' }) })
return
}
const workflowStatus = data.status as WorkflowRunningStatus | undefined
const markNodesStopped = (traces?: WorkflowProcess['tracing']) => {
if (!traces)
return
const markTrace = (trace: WorkflowProcess['tracing'][number]) => {
if ([NodeRunningStatus.Running, NodeRunningStatus.Waiting].includes(trace.status as NodeRunningStatus))
trace.status = NodeRunningStatus.Stopped
trace.details?.forEach(detailGroup => detailGroup.forEach(markTrace))
trace.retryDetail?.forEach(markTrace)
trace.parallelDetail?.children?.forEach(markTrace)
}
traces.forEach(markTrace)
}
if (workflowStatus === WorkflowRunningStatus.Stopped) {
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.status = WorkflowRunningStatus.Stopped
markNodesStopped(draft.tracing)
}))
setRespondingFalse()
resetRunState()
onCompleted(getCompletionRes(), taskId, false)
isEnd = true
return
}
if (data.error) {
notify({ type: 'error', message: data.error })
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.status = WorkflowRunningStatus.Failed
markNodesStopped(draft.tracing)
}))
setRespondingFalse()
resetRunState()
onCompleted(getCompletionRes(), taskId, false)
isEnd = true
return
}
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.status = WorkflowRunningStatus.Succeeded
draft.files = getFilesInLogs(data.outputs || []) as any[]
}))
if (!data.outputs) {
setCompletionRes('')
}
else {
setCompletionRes(data.outputs)
const isStringOutput = Object.keys(data.outputs).length === 1 && typeof data.outputs[Object.keys(data.outputs)[0]] === 'string'
if (isStringOutput) {
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.resultText = data.outputs[Object.keys(data.outputs)[0]]
}))
}
}
setRespondingFalse()
resetRunState()
setMessageId(tempMessageId)
onCompleted(getCompletionRes(), taskId, true)
isEnd = true
},
onTextChunk: (params) => {
const { data: { text } } = params
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.resultText += text
}))
},
onTextReplace: (params) => {
const { data: { text } } = params
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.resultText = text
}))
},
onHumanInputRequired: ({ data: humanInputRequiredData }) => {
const workflowProcessData = getWorkflowProcessData()
setWorkflowProcessData(produce(workflowProcessData!, (draft) => {
if (!draft.humanInputFormDataList) {
draft.humanInputFormDataList = [humanInputRequiredData]
}
else {
const currentFormIndex = draft.humanInputFormDataList.findIndex(item => item.node_id === humanInputRequiredData.node_id)
if (currentFormIndex > -1) {
draft.humanInputFormDataList[currentFormIndex] = humanInputRequiredData
}
else {
draft.humanInputFormDataList.push(humanInputRequiredData)
}
}
const currentIndex = draft.tracing!.findIndex(item => item.node_id === humanInputRequiredData.node_id)
if (currentIndex > -1) {
draft.tracing![currentIndex].status = NodeRunningStatus.Paused
}
}))
},
onHumanInputFormFilled: ({ data: humanInputFilledFormData }) => {
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
if (draft.humanInputFormDataList?.length) {
const currentFormIndex = draft.humanInputFormDataList.findIndex(item => item.node_id === humanInputFilledFormData.node_id)
draft.humanInputFormDataList.splice(currentFormIndex, 1)
}
if (!draft.humanInputFilledFormDataList) {
draft.humanInputFilledFormDataList = [humanInputFilledFormData]
}
else {
draft.humanInputFilledFormDataList.push(humanInputFilledFormData)
}
}))
},
onHumanInputFormTimeout: ({ data: humanInputFormTimeoutData }) => {
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
if (draft.humanInputFormDataList?.length) {
const currentFormIndex = draft.humanInputFormDataList.findIndex(item => item.node_id === humanInputFormTimeoutData.node_id)
draft.humanInputFormDataList[currentFormIndex].expiration_time = humanInputFormTimeoutData.expiration_time
}
}))
},
onWorkflowPaused: ({ data: workflowPausedData }) => {
tempMessageId = workflowPausedData.workflow_run_id
const url = `/workflow/${workflowPausedData.workflow_run_id}/events`
sseGet(
url,
{},
otherOptions,
)
setWorkflowProcessData(produce(getWorkflowProcessData()!, (draft) => {
draft.expand = false
draft.status = WorkflowRunningStatus.Paused
}))
},
}
sendWorkflowMessage(
data,
otherOptions,
appSourceType,
appId,
).catch((error) => {
setRespondingFalse()
resetRunState()
const message = error instanceof Error ? error.message : String(error)
notify({ type: 'error', message })
})
}
else {
sendCompletionMessage(data, {
onData: (data: string, _isFirstMessage: boolean, { messageId, taskId }) => {
tempMessageId = messageId
if (taskId && typeof taskId === 'string' && taskId.trim() !== '')
setCurrentTaskId(prev => prev ?? taskId)
res.push(data)
setCompletionRes(res.join(''))
},
onCompleted: () => {
if (isTimeout) {
notify({ type: 'warning', message: t('warningMessage.timeoutExceeded', { ns: 'appDebug' }) })
return
}
setRespondingFalse()
resetRunState()
setMessageId(tempMessageId)
onCompleted(getCompletionRes(), taskId, true)
isEnd = true
},
onMessageReplace: (messageReplace) => {
res = [messageReplace.answer]
setCompletionRes(res.join(''))
},
onError() {
if (isTimeout) {
notify({ type: 'warning', message: t('warningMessage.timeoutExceeded', { ns: 'appDebug' }) })
return
}
setRespondingFalse()
resetRunState()
onCompleted(getCompletionRes(), taskId, false)
isEnd = true
},
getAbortController: (abortController) => {
abortControllerRef.current = abortController
},
}, appSourceType, appId)
}
}
const [controlClearMoreLikeThis, setControlClearMoreLikeThis] = useState(0)
useEffect(() => {
if (controlSend) {
handleSend()
setControlClearMoreLikeThis(Date.now())
}
}, [controlSend])
useEffect(() => {
if (controlRetry)
handleSend()
}, [controlRetry])
const isNoData = !runState.completionRes
const renderTextGenerationRes = () => (
<>
{!hideInlineStopButton && isResponding && currentTaskId && (
{!hideInlineStopButton && runState.isResponding && runState.currentTaskId && (
<div className={`mb-3 flex ${isPC ? 'justify-end' : 'justify-center'}`}>
<Button
variant="secondary"
disabled={isStopping}
onClick={handleStop}
disabled={runState.isStopping}
onClick={runState.handleStop}
>
{
isStopping
? <RiLoader2Line className="mr-[5px] h-3.5 w-3.5 animate-spin" />
: <StopCircle className="mr-[5px] h-3.5 w-3.5" />
runState.isStopping
? <span aria-hidden className="i-ri-loader-2-line mr-[5px] h-3.5 w-3.5 animate-spin" />
: <span aria-hidden className="i-ri-stop-circle-fill mr-[5px] h-3.5 w-3.5" />
}
<span className="text-xs font-normal">{t('operation.stopResponding', { ns: 'appDebug' })}</span>
</Button>
@ -650,15 +120,15 @@ const Result: FC<IResultProps> = ({
)}
<TextGenerationRes
isWorkflow={isWorkflow}
workflowProcessData={workflowProcessData}
workflowProcessData={runState.workflowProcessData}
isError={isError}
onRetry={handleSend}
content={completionRes}
messageId={messageId}
content={runState.completionRes}
messageId={runState.messageId}
isInWebApp
moreLikeThis={moreLikeThisEnabled}
onFeedback={handleFeedback}
feedback={feedback}
onFeedback={runState.handleFeedback}
feedback={runState.feedback}
onSave={handleSaveMessage}
isMobile={isMobile}
appSourceType={appSourceType}
@ -666,7 +136,7 @@ const Result: FC<IResultProps> = ({
// isLoading={isCallBatchAPI ? (!completionRes && isResponding) : false}
isLoading={false}
taskId={isCallBatchAPI ? ((taskId as number) < 10 ? `0${taskId}` : `${taskId}`) : undefined}
controlClearMoreLikeThis={controlClearMoreLikeThis}
controlClearMoreLikeThis={runState.controlClearMoreLikeThis}
isShowTextToSpeech={isShowTextToSpeech}
hideProcessDetail
siteInfo={siteInfo}
@ -677,7 +147,7 @@ const Result: FC<IResultProps> = ({
return (
<>
{!isCallBatchAPI && !isWorkflow && (
(isResponding && !completionRes)
(runState.isResponding && !runState.completionRes)
? (
<div className="flex h-full w-full items-center justify-center">
<Loading type="area" />
@ -692,13 +162,13 @@ const Result: FC<IResultProps> = ({
)
)}
{!isCallBatchAPI && isWorkflow && (
(isResponding && !workflowProcessData)
(runState.isResponding && !runState.workflowProcessData)
? (
<div className="flex h-full w-full items-center justify-center">
<Loading type="area" />
</div>
)
: !workflowProcessData
: !runState.workflowProcessData
? <NoData />
: renderTextGenerationRes()
)}

View File

@ -0,0 +1,156 @@
import type { FileEntity } from '@/app/components/base/file-uploader/types'
import type { PromptConfig } from '@/models/debug'
import type { VisionFile, VisionSettings } from '@/types/app'
import { getProcessedFiles } from '@/app/components/base/file-uploader/utils'
import { TransferMethod } from '@/types/app'
import { formatBooleanInputs } from '@/utils/model-config'
export type ResultInputValue
= | string
| boolean
| number
| string[]
| Record<string, unknown>
| FileEntity
| FileEntity[]
| undefined
type Translate = (key: string, options?: Record<string, unknown>) => string
type ValidationResult = {
canSend: boolean
notification?: {
type: 'error' | 'info'
message: string
}
}
type ValidateResultRequestParams = {
completionFiles: VisionFile[]
inputs: Record<string, ResultInputValue>
isCallBatchAPI: boolean
promptConfig: PromptConfig | null
t: Translate
}
type BuildResultRequestDataParams = {
completionFiles: VisionFile[]
inputs: Record<string, ResultInputValue>
promptConfig: PromptConfig | null
visionConfig: VisionSettings
}
const isMissingRequiredInput = (
variable: PromptConfig['prompt_variables'][number],
value: ResultInputValue,
) => {
if (value === undefined || value === null)
return true
if (variable.type === 'file-list')
return !Array.isArray(value) || value.length === 0
if (['string', 'paragraph', 'number', 'json_object', 'select'].includes(variable.type))
return typeof value !== 'string' ? false : value.trim() === ''
return false
}
const hasPendingLocalFiles = (completionFiles: VisionFile[]) => {
return completionFiles.some(item => item.transfer_method === TransferMethod.local_file && !item.upload_file_id)
}
export const validateResultRequest = ({
completionFiles,
inputs,
isCallBatchAPI,
promptConfig,
t,
}: ValidateResultRequestParams): ValidationResult => {
if (isCallBatchAPI)
return { canSend: true }
const promptVariables = promptConfig?.prompt_variables
if (!promptVariables?.length) {
if (hasPendingLocalFiles(completionFiles)) {
return {
canSend: false,
notification: {
type: 'info',
message: t('errorMessage.waitForFileUpload', { ns: 'appDebug' }),
},
}
}
return { canSend: true }
}
const requiredVariables = promptVariables.filter(({ key, name, required, type }) => {
if (type === 'boolean' || type === 'checkbox')
return false
return (!key || !key.trim()) || (!name || !name.trim()) || required === undefined || required === null || required
})
const missingRequiredVariable = requiredVariables.find(variable => isMissingRequiredInput(variable, inputs[variable.key]))?.name
if (missingRequiredVariable) {
return {
canSend: false,
notification: {
type: 'error',
message: t('errorMessage.valueOfVarRequired', {
ns: 'appDebug',
key: missingRequiredVariable,
}),
},
}
}
if (hasPendingLocalFiles(completionFiles)) {
return {
canSend: false,
notification: {
type: 'info',
message: t('errorMessage.waitForFileUpload', { ns: 'appDebug' }),
},
}
}
return { canSend: true }
}
export const buildResultRequestData = ({
completionFiles,
inputs,
promptConfig,
visionConfig,
}: BuildResultRequestDataParams) => {
const processedInputs = {
...formatBooleanInputs(promptConfig?.prompt_variables, inputs as Record<string, string | number | boolean | object>),
}
promptConfig?.prompt_variables.forEach((variable) => {
const value = processedInputs[variable.key]
if (variable.type === 'file' && value && typeof value === 'object' && !Array.isArray(value)) {
processedInputs[variable.key] = getProcessedFiles([value as FileEntity])[0]
return
}
if (variable.type === 'file-list' && Array.isArray(value) && value.length > 0)
processedInputs[variable.key] = getProcessedFiles(value as FileEntity[])
})
return {
inputs: processedInputs,
...(visionConfig.enabled && completionFiles.length > 0
? {
files: completionFiles.map((item) => {
if (item.transfer_method === TransferMethod.local_file)
return { ...item, url: '' }
return item
}),
}
: {}),
}
}

View File

@ -0,0 +1,404 @@
import type { Dispatch, SetStateAction } from 'react'
import type { WorkflowProcess } from '@/app/components/base/chat/types'
import type { IOtherOptions } from '@/service/base'
import type { HumanInputFormTimeoutData, NodeTracing, WorkflowFinishedResponse } from '@/types/workflow'
import { produce } from 'immer'
import { getFilesInLogs } from '@/app/components/base/file-uploader/utils'
import { NodeRunningStatus, WorkflowRunningStatus } from '@/app/components/workflow/types'
import { sseGet } from '@/service/base'
type Notify = (payload: { type: 'error' | 'warning', message: string }) => void
type Translate = (key: string, options?: Record<string, unknown>) => string
type CreateWorkflowStreamHandlersParams = {
getCompletionRes: () => string
getWorkflowProcessData: () => WorkflowProcess | undefined
isTimedOut: () => boolean
markEnded: () => void
notify: Notify
onCompleted: (completionRes: string, taskId?: number, success?: boolean) => void
resetRunState: () => void
setCompletionRes: (res: string) => void
setCurrentTaskId: Dispatch<SetStateAction<string | null>>
setIsStopping: Dispatch<SetStateAction<boolean>>
setMessageId: Dispatch<SetStateAction<string | null>>
setRespondingFalse: () => void
setWorkflowProcessData: (data: WorkflowProcess | undefined) => void
t: Translate
taskId?: number
}
const createInitialWorkflowProcess = (): WorkflowProcess => ({
status: WorkflowRunningStatus.Running,
tracing: [],
expand: false,
resultText: '',
})
const updateWorkflowProcess = (
current: WorkflowProcess | undefined,
updater: (draft: WorkflowProcess) => void,
) => {
return produce(current ?? createInitialWorkflowProcess(), updater)
}
const matchParallelTrace = (trace: WorkflowProcess['tracing'][number], data: NodeTracing) => {
return trace.node_id === data.node_id
&& (trace.execution_metadata?.parallel_id === data.execution_metadata?.parallel_id
|| trace.parallel_id === data.execution_metadata?.parallel_id)
}
const ensureParallelTraceDetails = (details?: NodeTracing['details']) => {
return details?.length ? details : [[]]
}
const appendParallelStart = (current: WorkflowProcess | undefined, data: NodeTracing) => {
return updateWorkflowProcess(current, (draft) => {
draft.expand = true
draft.tracing.push({
...data,
details: ensureParallelTraceDetails(data.details),
status: NodeRunningStatus.Running,
expand: true,
})
})
}
const appendParallelNext = (current: WorkflowProcess | undefined, data: NodeTracing) => {
return updateWorkflowProcess(current, (draft) => {
draft.expand = true
const trace = draft.tracing.find(item => matchParallelTrace(item, data))
if (!trace)
return
trace.details = ensureParallelTraceDetails(trace.details)
trace.details.push([])
})
}
const finishParallelTrace = (current: WorkflowProcess | undefined, data: NodeTracing) => {
return updateWorkflowProcess(current, (draft) => {
draft.expand = true
const traceIndex = draft.tracing.findIndex(item => matchParallelTrace(item, data))
if (traceIndex > -1) {
draft.tracing[traceIndex] = {
...data,
expand: !!data.error,
}
}
})
}
const upsertWorkflowNode = (current: WorkflowProcess | undefined, data: NodeTracing) => {
if (data.iteration_id || data.loop_id)
return current
return updateWorkflowProcess(current, (draft) => {
draft.expand = true
const currentIndex = draft.tracing.findIndex(item => item.node_id === data.node_id)
const nextTrace = {
...data,
status: NodeRunningStatus.Running,
expand: true,
}
if (currentIndex > -1)
draft.tracing[currentIndex] = nextTrace
else
draft.tracing.push(nextTrace)
})
}
const finishWorkflowNode = (current: WorkflowProcess | undefined, data: NodeTracing) => {
if (data.iteration_id || data.loop_id)
return current
return updateWorkflowProcess(current, (draft) => {
const currentIndex = draft.tracing.findIndex(trace => matchParallelTrace(trace, data))
if (currentIndex > -1) {
draft.tracing[currentIndex] = {
...(draft.tracing[currentIndex].extras
? { extras: draft.tracing[currentIndex].extras }
: {}),
...data,
expand: !!data.error,
}
}
})
}
const markNodesStopped = (traces?: WorkflowProcess['tracing']) => {
if (!traces)
return
const markTrace = (trace: WorkflowProcess['tracing'][number]) => {
if ([NodeRunningStatus.Running, NodeRunningStatus.Waiting].includes(trace.status as NodeRunningStatus))
trace.status = NodeRunningStatus.Stopped
trace.details?.forEach(detailGroup => detailGroup.forEach(markTrace))
trace.retryDetail?.forEach(markTrace)
trace.parallelDetail?.children?.forEach(markTrace)
}
traces.forEach(markTrace)
}
const applyWorkflowFinishedState = (
current: WorkflowProcess | undefined,
status: WorkflowRunningStatus,
) => {
return updateWorkflowProcess(current, (draft) => {
draft.status = status
if ([WorkflowRunningStatus.Stopped, WorkflowRunningStatus.Failed].includes(status))
markNodesStopped(draft.tracing)
})
}
const applyWorkflowOutputs = (
current: WorkflowProcess | undefined,
outputs: WorkflowFinishedResponse['data']['outputs'],
) => {
return updateWorkflowProcess(current, (draft) => {
draft.status = WorkflowRunningStatus.Succeeded
draft.files = getFilesInLogs(outputs || []) as unknown as WorkflowProcess['files']
})
}
const appendResultText = (current: WorkflowProcess | undefined, text: string) => {
return updateWorkflowProcess(current, (draft) => {
draft.resultText = `${draft.resultText || ''}${text}`
})
}
const replaceResultText = (current: WorkflowProcess | undefined, text: string) => {
return updateWorkflowProcess(current, (draft) => {
draft.resultText = text
})
}
const updateHumanInputRequired = (
current: WorkflowProcess | undefined,
data: NonNullable<WorkflowProcess['humanInputFormDataList']>[number],
) => {
return updateWorkflowProcess(current, (draft) => {
if (!draft.humanInputFormDataList) {
draft.humanInputFormDataList = [data]
}
else {
const currentFormIndex = draft.humanInputFormDataList.findIndex(item => item.node_id === data.node_id)
if (currentFormIndex > -1)
draft.humanInputFormDataList[currentFormIndex] = data
else
draft.humanInputFormDataList.push(data)
}
const currentIndex = draft.tracing.findIndex(item => item.node_id === data.node_id)
if (currentIndex > -1)
draft.tracing[currentIndex].status = NodeRunningStatus.Paused
})
}
const updateHumanInputFilled = (
current: WorkflowProcess | undefined,
data: NonNullable<WorkflowProcess['humanInputFilledFormDataList']>[number],
) => {
return updateWorkflowProcess(current, (draft) => {
if (draft.humanInputFormDataList?.length) {
const currentFormIndex = draft.humanInputFormDataList.findIndex(item => item.node_id === data.node_id)
if (currentFormIndex > -1)
draft.humanInputFormDataList.splice(currentFormIndex, 1)
}
if (!draft.humanInputFilledFormDataList)
draft.humanInputFilledFormDataList = [data]
else
draft.humanInputFilledFormDataList.push(data)
})
}
const updateHumanInputTimeout = (
current: WorkflowProcess | undefined,
data: HumanInputFormTimeoutData,
) => {
return updateWorkflowProcess(current, (draft) => {
if (!draft.humanInputFormDataList?.length)
return
const currentFormIndex = draft.humanInputFormDataList.findIndex(item => item.node_id === data.node_id)
if (currentFormIndex > -1)
draft.humanInputFormDataList[currentFormIndex].expiration_time = data.expiration_time
})
}
const applyWorkflowPaused = (current: WorkflowProcess | undefined) => {
return updateWorkflowProcess(current, (draft) => {
draft.expand = false
draft.status = WorkflowRunningStatus.Paused
})
}
const serializeWorkflowOutputs = (outputs: WorkflowFinishedResponse['data']['outputs']) => {
if (outputs === undefined || outputs === null)
return ''
if (typeof outputs === 'string')
return outputs
try {
return JSON.stringify(outputs) ?? ''
}
catch {
return String(outputs)
}
}
export const createWorkflowStreamHandlers = ({
getCompletionRes,
getWorkflowProcessData,
isTimedOut,
markEnded,
notify,
onCompleted,
resetRunState,
setCompletionRes,
setCurrentTaskId,
setIsStopping,
setMessageId,
setRespondingFalse,
setWorkflowProcessData,
t,
taskId,
}: CreateWorkflowStreamHandlersParams): IOtherOptions => {
let tempMessageId = ''
const finishWithFailure = () => {
setRespondingFalse()
resetRunState()
onCompleted(getCompletionRes(), taskId, false)
markEnded()
}
const finishWithSuccess = () => {
setRespondingFalse()
resetRunState()
setMessageId(tempMessageId)
onCompleted(getCompletionRes(), taskId, true)
markEnded()
}
const otherOptions: IOtherOptions = {
onWorkflowStarted: ({ workflow_run_id, task_id }) => {
const workflowProcessData = getWorkflowProcessData()
if (workflowProcessData?.tracing.length) {
setWorkflowProcessData(updateWorkflowProcess(workflowProcessData, (draft) => {
draft.expand = true
draft.status = WorkflowRunningStatus.Running
}))
return
}
tempMessageId = workflow_run_id
setCurrentTaskId(task_id || null)
setIsStopping(false)
setWorkflowProcessData(createInitialWorkflowProcess())
},
onIterationStart: ({ data }) => {
setWorkflowProcessData(appendParallelStart(getWorkflowProcessData(), data))
},
onIterationNext: ({ data }) => {
setWorkflowProcessData(appendParallelNext(getWorkflowProcessData(), data))
},
onIterationFinish: ({ data }) => {
setWorkflowProcessData(finishParallelTrace(getWorkflowProcessData(), data))
},
onLoopStart: ({ data }) => {
setWorkflowProcessData(appendParallelStart(getWorkflowProcessData(), data))
},
onLoopNext: ({ data }) => {
setWorkflowProcessData(appendParallelNext(getWorkflowProcessData(), data))
},
onLoopFinish: ({ data }) => {
setWorkflowProcessData(finishParallelTrace(getWorkflowProcessData(), data))
},
onNodeStarted: ({ data }) => {
setWorkflowProcessData(upsertWorkflowNode(getWorkflowProcessData(), data))
},
onNodeFinished: ({ data }) => {
setWorkflowProcessData(finishWorkflowNode(getWorkflowProcessData(), data))
},
onWorkflowFinished: ({ data }) => {
if (isTimedOut()) {
notify({ type: 'warning', message: t('warningMessage.timeoutExceeded', { ns: 'appDebug' }) })
return
}
const workflowStatus = data.status as WorkflowRunningStatus | undefined
if (workflowStatus === WorkflowRunningStatus.Stopped) {
setWorkflowProcessData(applyWorkflowFinishedState(getWorkflowProcessData(), WorkflowRunningStatus.Stopped))
finishWithFailure()
return
}
if (data.error) {
notify({ type: 'error', message: data.error })
setWorkflowProcessData(applyWorkflowFinishedState(getWorkflowProcessData(), WorkflowRunningStatus.Failed))
finishWithFailure()
return
}
setWorkflowProcessData(applyWorkflowOutputs(getWorkflowProcessData(), data.outputs))
const serializedOutputs = serializeWorkflowOutputs(data.outputs)
setCompletionRes(serializedOutputs)
if (data.outputs) {
const outputKeys = Object.keys(data.outputs)
const isStringOutput = outputKeys.length === 1 && typeof data.outputs[outputKeys[0]] === 'string'
if (isStringOutput) {
setWorkflowProcessData(updateWorkflowProcess(getWorkflowProcessData(), (draft) => {
draft.resultText = data.outputs[outputKeys[0]]
}))
}
}
finishWithSuccess()
},
onTextChunk: ({ data: { text } }) => {
setWorkflowProcessData(appendResultText(getWorkflowProcessData(), text))
},
onTextReplace: ({ data: { text } }) => {
setWorkflowProcessData(replaceResultText(getWorkflowProcessData(), text))
},
onHumanInputRequired: ({ data }) => {
setWorkflowProcessData(updateHumanInputRequired(getWorkflowProcessData(), data))
},
onHumanInputFormFilled: ({ data }) => {
setWorkflowProcessData(updateHumanInputFilled(getWorkflowProcessData(), data))
},
onHumanInputFormTimeout: ({ data }) => {
setWorkflowProcessData(updateHumanInputTimeout(getWorkflowProcessData(), data))
},
onWorkflowPaused: ({ data }) => {
tempMessageId = data.workflow_run_id
void sseGet(`/workflow/${data.workflow_run_id}/events`, {}, otherOptions)
setWorkflowProcessData(applyWorkflowPaused(getWorkflowProcessData()))
},
}
return otherOptions
}
export {
appendParallelNext,
appendParallelStart,
appendResultText,
applyWorkflowFinishedState,
applyWorkflowOutputs,
applyWorkflowPaused,
finishParallelTrace,
finishWorkflowNode,
markNodesStopped,
replaceResultText,
updateHumanInputFilled,
updateHumanInputRequired,
updateHumanInputTimeout,
upsertWorkflowNode,
}

View File

@ -2,14 +2,12 @@ import type { Viewport } from 'next'
import { Agentation } from 'agentation'
import { Provider as JotaiProvider } from 'jotai/react'
import { ThemeProvider } from 'next-themes'
import { Instrument_Serif } from 'next/font/google'
import { NuqsAdapter } from 'nuqs/adapters/next/app'
import { IS_DEV } from '@/config'
import GlobalPublicStoreProvider from '@/context/global-public-context'
import { TanstackQueryInitializer } from '@/context/query-client'
import { getDatasetMap } from '@/env'
import { getLocaleOnServer } from '@/i18n-config/server'
import { cn } from '@/utils/classnames'
import { ToastProvider } from './components/base/toast'
import { TooltipProvider } from './components/base/ui/tooltip'
import BrowserInitializer from './components/browser-initializer'
@ -28,13 +26,6 @@ export const viewport: Viewport = {
userScalable: false,
}
const instrumentSerif = Instrument_Serif({
weight: ['400'],
style: ['normal', 'italic'],
subsets: ['latin'],
variable: '--font-instrument-serif',
})
const LocaleLayout = async ({
children,
}: {
@ -44,7 +35,7 @@ const LocaleLayout = async ({
const datasetMap = getDatasetMap()
return (
<html lang={locale ?? 'en'} className={cn('h-full', instrumentSerif.variable)} suppressHydrationWarning>
<html lang={locale ?? 'en'} className="h-full" suppressHydrationWarning>
<head>
<link rel="manifest" href="/manifest.json" />
<meta name="theme-color" content="#1C64F2" />

View File

@ -2839,11 +2839,6 @@
"count": 2
}
},
"app/components/billing/pricing/header.tsx": {
"tailwindcss/enforce-consistent-class-order": {
"count": 1
}
},
"app/components/billing/pricing/index.tsx": {
"react-refresh/only-export-components": {
"count": 1
@ -5964,11 +5959,8 @@
}
},
"app/components/share/text-generation/result/index.tsx": {
"react-hooks-extra/no-direct-set-state-in-use-effect": {
"count": 3
},
"ts/no-explicit-any": {
"count": 3
"count": 1
}
},
"app/components/share/text-generation/run-batch/csv-download/index.tsx": {

View File

@ -0,0 +1,271 @@
import fs from 'node:fs'
import path from 'node:path'
const DIFF_COVERAGE_IGNORE_LINE_TOKEN = 'diff-coverage-ignore-line:'
export function parseChangedLineMap(diff, isTrackedComponentSourceFile) {
const lineMap = new Map()
let currentFile = null
for (const line of diff.split('\n')) {
if (line.startsWith('+++ b/')) {
currentFile = line.slice(6).trim()
continue
}
if (!currentFile || !isTrackedComponentSourceFile(currentFile))
continue
const match = line.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/)
if (!match)
continue
const start = Number(match[1])
const count = match[2] ? Number(match[2]) : 1
if (count === 0)
continue
const linesForFile = lineMap.get(currentFile) ?? new Set()
for (let offset = 0; offset < count; offset += 1)
linesForFile.add(start + offset)
lineMap.set(currentFile, linesForFile)
}
return lineMap
}
export function normalizeToRepoRelative(filePath, {
appComponentsCoveragePrefix,
appComponentsPrefix,
repoRoot,
sharedTestPrefix,
webRoot,
}) {
if (!filePath)
return ''
if (filePath.startsWith(appComponentsPrefix) || filePath.startsWith(sharedTestPrefix))
return filePath
if (filePath.startsWith(appComponentsCoveragePrefix))
return `web/${filePath}`
const absolutePath = path.isAbsolute(filePath)
? filePath
: path.resolve(webRoot, filePath)
return path.relative(repoRoot, absolutePath).split(path.sep).join('/')
}
export function getLineHits(entry) {
if (entry?.l && Object.keys(entry.l).length > 0)
return entry.l
const lineHits = {}
for (const [statementId, statement] of Object.entries(entry?.statementMap ?? {})) {
const line = statement?.start?.line
if (!line)
continue
const hits = entry?.s?.[statementId] ?? 0
const previous = lineHits[line]
lineHits[line] = previous === undefined ? hits : Math.max(previous, hits)
}
return lineHits
}
export function getChangedStatementCoverage(entry, changedLines) {
const normalizedChangedLines = [...(changedLines ?? [])].sort((a, b) => a - b)
if (!entry) {
return {
covered: 0,
total: normalizedChangedLines.length,
uncoveredLines: normalizedChangedLines,
}
}
const uncoveredLines = []
let covered = 0
let total = 0
for (const [statementId, statement] of Object.entries(entry.statementMap ?? {})) {
if (!rangeIntersectsChangedLines(statement, changedLines))
continue
total += 1
const hits = entry.s?.[statementId] ?? 0
if (hits > 0) {
covered += 1
continue
}
uncoveredLines.push(getFirstChangedLineInRange(statement, normalizedChangedLines))
}
return {
covered,
total,
uncoveredLines: uncoveredLines.sort((a, b) => a - b),
}
}
export function getChangedBranchCoverage(entry, changedLines) {
const normalizedChangedLines = [...(changedLines ?? [])].sort((a, b) => a - b)
if (!entry) {
return {
covered: 0,
total: 0,
uncoveredBranches: [],
}
}
const uncoveredBranches = []
let covered = 0
let total = 0
for (const [branchId, branch] of Object.entries(entry.branchMap ?? {})) {
if (!branchIntersectsChangedLines(branch, changedLines))
continue
const hits = Array.isArray(entry.b?.[branchId]) ? entry.b[branchId] : []
const locations = getBranchLocations(branch)
const armCount = Math.max(locations.length, hits.length)
for (let armIndex = 0; armIndex < armCount; armIndex += 1) {
total += 1
if ((hits[armIndex] ?? 0) > 0) {
covered += 1
continue
}
const location = locations[armIndex] ?? branch.loc ?? branch
uncoveredBranches.push({
armIndex,
line: getFirstChangedLineInRange(location, normalizedChangedLines, branch.line ?? 1),
})
}
}
uncoveredBranches.sort((a, b) => a.line - b.line || a.armIndex - b.armIndex)
return {
covered,
total,
uncoveredBranches,
}
}
export function getIgnoredChangedLinesFromFile(filePath, changedLines) {
if (!fs.existsSync(filePath))
return emptyIgnoreResult(changedLines)
const sourceCode = fs.readFileSync(filePath, 'utf8')
return getIgnoredChangedLinesFromSource(sourceCode, changedLines)
}
export function getIgnoredChangedLinesFromSource(sourceCode, changedLines) {
const ignoredLines = new Map()
const invalidPragmas = []
const changedLineSet = new Set(changedLines ?? [])
const sourceLines = sourceCode.split('\n')
sourceLines.forEach((lineText, index) => {
const lineNumber = index + 1
const commentIndex = lineText.indexOf('//')
if (commentIndex < 0)
return
const tokenIndex = lineText.indexOf(DIFF_COVERAGE_IGNORE_LINE_TOKEN, commentIndex + 2)
if (tokenIndex < 0)
return
const reason = lineText.slice(tokenIndex + DIFF_COVERAGE_IGNORE_LINE_TOKEN.length).trim()
if (!changedLineSet.has(lineNumber))
return
if (!reason) {
invalidPragmas.push({
line: lineNumber,
reason: 'missing ignore reason',
})
return
}
ignoredLines.set(lineNumber, reason)
})
const effectiveChangedLines = new Set(
[...changedLineSet].filter(lineNumber => !ignoredLines.has(lineNumber)),
)
return {
effectiveChangedLines,
ignoredLines,
invalidPragmas,
}
}
function emptyIgnoreResult(changedLines = []) {
return {
effectiveChangedLines: new Set(changedLines),
ignoredLines: new Map(),
invalidPragmas: [],
}
}
function branchIntersectsChangedLines(branch, changedLines) {
if (!changedLines || changedLines.size === 0)
return false
if (rangeIntersectsChangedLines(branch.loc, changedLines))
return true
const locations = getBranchLocations(branch)
if (locations.some(location => rangeIntersectsChangedLines(location, changedLines)))
return true
return branch.line ? changedLines.has(branch.line) : false
}
function getBranchLocations(branch) {
return Array.isArray(branch?.locations) ? branch.locations.filter(Boolean) : []
}
function rangeIntersectsChangedLines(location, changedLines) {
if (!location || !changedLines || changedLines.size === 0)
return false
const startLine = getLocationStartLine(location)
const endLine = getLocationEndLine(location) ?? startLine
if (!startLine || !endLine)
return false
for (const lineNumber of changedLines) {
if (lineNumber >= startLine && lineNumber <= endLine)
return true
}
return false
}
function getFirstChangedLineInRange(location, changedLines, fallbackLine = 1) {
const startLine = getLocationStartLine(location)
const endLine = getLocationEndLine(location) ?? startLine
if (!startLine || !endLine)
return startLine ?? fallbackLine
for (const lineNumber of changedLines) {
if (lineNumber >= startLine && lineNumber <= endLine)
return lineNumber
}
return startLine ?? fallbackLine
}
function getLocationStartLine(location) {
return location?.start?.line ?? location?.line ?? null
}
function getLocationEndLine(location) {
return location?.end?.line ?? location?.line ?? null
}

View File

@ -1,6 +1,14 @@
import { execFileSync } from 'node:child_process'
import fs from 'node:fs'
import path from 'node:path'
import {
getChangedBranchCoverage,
getChangedStatementCoverage,
getIgnoredChangedLinesFromFile,
getLineHits,
normalizeToRepoRelative,
parseChangedLineMap,
} from './check-components-diff-coverage-lib.mjs'
import {
collectComponentCoverageExcludedFiles,
COMPONENT_COVERAGE_EXCLUDE_LABEL,
@ -54,7 +62,13 @@ if (changedSourceFiles.length === 0) {
const coverageEntries = new Map()
for (const [file, entry] of Object.entries(coverage)) {
const repoRelativePath = normalizeToRepoRelative(entry.path ?? file)
const repoRelativePath = normalizeToRepoRelative(entry.path ?? file, {
appComponentsCoveragePrefix: APP_COMPONENTS_COVERAGE_PREFIX,
appComponentsPrefix: APP_COMPONENTS_PREFIX,
repoRoot,
sharedTestPrefix: SHARED_TEST_PREFIX,
webRoot,
})
if (!isTrackedComponentSourceFile(repoRelativePath))
continue
@ -74,46 +88,53 @@ for (const [file, entry] of coverageEntries.entries()) {
const overallCoverage = sumCoverageStats(fileCoverageRows)
const diffChanges = getChangedLineMap(baseSha, headSha)
const diffRows = []
const ignoredDiffLines = []
const invalidIgnorePragmas = []
for (const [file, changedLines] of diffChanges.entries()) {
if (!isTrackedComponentSourceFile(file))
continue
const entry = coverageEntries.get(file)
const lineHits = entry ? getLineHits(entry) : {}
const executableChangedLines = [...changedLines]
.filter(line => !entry || lineHits[line] !== undefined)
.sort((a, b) => a - b)
if (executableChangedLines.length === 0) {
diffRows.push({
const ignoreInfo = getIgnoredChangedLinesFromFile(path.join(repoRoot, file), changedLines)
for (const [line, reason] of ignoreInfo.ignoredLines.entries()) {
ignoredDiffLines.push({
file,
moduleName: getModuleName(file),
total: 0,
covered: 0,
uncoveredLines: [],
line,
reason,
})
}
for (const invalidPragma of ignoreInfo.invalidPragmas) {
invalidIgnorePragmas.push({
file,
...invalidPragma,
})
continue
}
const uncoveredLines = executableChangedLines.filter(line => (lineHits[line] ?? 0) === 0)
const statements = getChangedStatementCoverage(entry, ignoreInfo.effectiveChangedLines)
const branches = getChangedBranchCoverage(entry, ignoreInfo.effectiveChangedLines)
diffRows.push({
branches,
file,
ignoredLineCount: ignoreInfo.ignoredLines.size,
moduleName: getModuleName(file),
total: executableChangedLines.length,
covered: executableChangedLines.length - uncoveredLines.length,
uncoveredLines,
statements,
})
}
const diffTotals = diffRows.reduce((acc, row) => {
acc.total += row.total
acc.covered += row.covered
acc.statements.total += row.statements.total
acc.statements.covered += row.statements.covered
acc.branches.total += row.branches.total
acc.branches.covered += row.branches.covered
return acc
}, { total: 0, covered: 0 })
}, {
branches: { total: 0, covered: 0 },
statements: { total: 0, covered: 0 },
})
const diffCoveragePct = percentage(diffTotals.covered, diffTotals.total)
const diffFailures = diffRows.filter(row => row.uncoveredLines.length > 0)
const diffStatementFailures = diffRows.filter(row => row.statements.uncoveredLines.length > 0)
const diffBranchFailures = diffRows.filter(row => row.branches.uncoveredBranches.length > 0)
const overallThresholdFailures = getThresholdFailures(overallCoverage, COMPONENTS_GLOBAL_THRESHOLDS)
const moduleCoverageRows = [...moduleCoverageMap.entries()]
.map(([moduleName, stats]) => ({
@ -139,25 +160,38 @@ appendSummary(buildSummary({
overallThresholdFailures,
moduleCoverageRows,
moduleThresholdFailures,
diffBranchFailures,
diffRows,
diffFailures,
diffCoveragePct,
diffStatementFailures,
diffTotals,
changedSourceFiles,
changedTestFiles,
ignoredDiffLines,
invalidIgnorePragmas,
missingTestTouch,
}))
if (diffFailures.length > 0 && process.env.CI) {
for (const failure of diffFailures.slice(0, 20)) {
const firstLine = failure.uncoveredLines[0] ?? 1
console.log(`::error file=${failure.file},line=${firstLine}::Uncovered changed lines: ${formatLineRanges(failure.uncoveredLines)}`)
if (process.env.CI) {
for (const failure of diffStatementFailures.slice(0, 20)) {
const firstLine = failure.statements.uncoveredLines[0] ?? 1
console.log(`::error file=${failure.file},line=${firstLine}::Uncovered changed statements: ${formatLineRanges(failure.statements.uncoveredLines)}`)
}
for (const failure of diffBranchFailures.slice(0, 20)) {
const firstBranch = failure.branches.uncoveredBranches[0]
const line = firstBranch?.line ?? 1
console.log(`::error file=${failure.file},line=${line}::Uncovered changed branches: ${formatBranchRefs(failure.branches.uncoveredBranches)}`)
}
for (const invalidPragma of invalidIgnorePragmas.slice(0, 20)) {
console.log(`::error file=${invalidPragma.file},line=${invalidPragma.line}::Invalid diff coverage ignore pragma: ${invalidPragma.reason}`)
}
}
if (
overallThresholdFailures.length > 0
|| moduleThresholdFailures.length > 0
|| diffFailures.length > 0
|| diffStatementFailures.length > 0
|| diffBranchFailures.length > 0
|| invalidIgnorePragmas.length > 0
|| (STRICT_TEST_FILE_TOUCH && missingTestTouch)
) {
process.exit(1)
@ -168,11 +202,14 @@ function buildSummary({
overallThresholdFailures,
moduleCoverageRows,
moduleThresholdFailures,
diffBranchFailures,
diffRows,
diffFailures,
diffCoveragePct,
diffStatementFailures,
diffTotals,
changedSourceFiles,
changedTestFiles,
ignoredDiffLines,
invalidIgnorePragmas,
missingTestTouch,
}) {
const lines = [
@ -189,7 +226,8 @@ function buildSummary({
`| Overall tracked statements | ${formatPercent(overallCoverage.statements)} | ${overallCoverage.statements.covered}/${overallCoverage.statements.total}; threshold ${COMPONENTS_GLOBAL_THRESHOLDS.statements}% |`,
`| Overall tracked functions | ${formatPercent(overallCoverage.functions)} | ${overallCoverage.functions.covered}/${overallCoverage.functions.total}; threshold ${COMPONENTS_GLOBAL_THRESHOLDS.functions}% |`,
`| Overall tracked branches | ${formatPercent(overallCoverage.branches)} | ${overallCoverage.branches.covered}/${overallCoverage.branches.total}; threshold ${COMPONENTS_GLOBAL_THRESHOLDS.branches}% |`,
`| Changed executable lines | ${formatPercent({ covered: diffTotals.covered, total: diffTotals.total })} | ${diffTotals.covered}/${diffTotals.total} |`,
`| Changed statements | ${formatDiffPercent(diffTotals.statements)} | ${diffTotals.statements.covered}/${diffTotals.statements.total} |`,
`| Changed branches | ${formatDiffPercent(diffTotals.branches)} | ${diffTotals.branches.covered}/${diffTotals.branches.total} |`,
'',
]
@ -239,20 +277,19 @@ function buildSummary({
lines.push('')
const changedRows = diffRows
.filter(row => row.total > 0)
.filter(row => row.statements.total > 0 || row.branches.total > 0)
.sort((a, b) => {
const aPct = percentage(rowCovered(a), rowTotal(a))
const bPct = percentage(rowCovered(b), rowTotal(b))
return aPct - bPct || a.file.localeCompare(b.file)
const aScore = percentage(a.statements.covered + a.branches.covered, a.statements.total + a.branches.total)
const bScore = percentage(b.statements.covered + b.branches.covered, b.statements.total + b.branches.total)
return aScore - bScore || a.file.localeCompare(b.file)
})
lines.push('<details><summary>Changed file coverage</summary>')
lines.push('')
lines.push('| File | Module | Changed executable lines | Coverage | Uncovered lines |')
lines.push('|---|---|---:|---:|---|')
lines.push('| File | Module | Changed statements | Statement coverage | Uncovered statements | Changed branches | Branch coverage | Uncovered branches | Ignored lines |')
lines.push('|---|---|---:|---:|---|---:|---:|---|---:|')
for (const row of changedRows) {
const rowPct = percentage(row.covered, row.total)
lines.push(`| ${row.file.replace('web/', '')} | ${row.moduleName} | ${row.total} | ${rowPct.toFixed(2)}% | ${formatLineRanges(row.uncoveredLines)} |`)
lines.push(`| ${row.file.replace('web/', '')} | ${row.moduleName} | ${row.statements.total} | ${formatDiffPercent(row.statements)} | ${formatLineRanges(row.statements.uncoveredLines)} | ${row.branches.total} | ${formatDiffPercent(row.branches)} | ${formatBranchRefs(row.branches.uncoveredBranches)} | ${row.ignoredLineCount} |`)
}
lines.push('</details>')
lines.push('')
@ -268,16 +305,41 @@ function buildSummary({
lines.push('')
}
if (diffFailures.length > 0) {
lines.push('Uncovered changed lines:')
for (const row of diffFailures) {
lines.push(`- ${row.file.replace('web/', '')}: ${formatLineRanges(row.uncoveredLines)}`)
if (diffStatementFailures.length > 0) {
lines.push('Uncovered changed statements:')
for (const row of diffStatementFailures) {
lines.push(`- ${row.file.replace('web/', '')}: ${formatLineRanges(row.statements.uncoveredLines)}`)
}
lines.push('')
}
if (diffBranchFailures.length > 0) {
lines.push('Uncovered changed branches:')
for (const row of diffBranchFailures) {
lines.push(`- ${row.file.replace('web/', '')}: ${formatBranchRefs(row.branches.uncoveredBranches)}`)
}
lines.push('')
}
if (ignoredDiffLines.length > 0) {
lines.push('Ignored changed lines via pragma:')
for (const ignoredLine of ignoredDiffLines) {
lines.push(`- ${ignoredLine.file.replace('web/', '')}:${ignoredLine.line} - ${ignoredLine.reason}`)
}
lines.push('')
}
if (invalidIgnorePragmas.length > 0) {
lines.push('Invalid diff coverage ignore pragmas:')
for (const invalidPragma of invalidIgnorePragmas) {
lines.push(`- ${invalidPragma.file.replace('web/', '')}:${invalidPragma.line} - ${invalidPragma.reason}`)
}
lines.push('')
}
lines.push(`Changed source files checked: ${changedSourceFiles.length}`)
lines.push(`Changed executable line coverage: ${diffCoveragePct.toFixed(2)}%`)
lines.push(`Changed statement coverage: ${formatDiffPercent(diffTotals.statements)}`)
lines.push(`Changed branch coverage: ${formatDiffPercent(diffTotals.branches)}`)
return lines
}
@ -312,34 +374,7 @@ function getChangedFiles(base, head) {
function getChangedLineMap(base, head) {
const diff = execGit(['diff', '--unified=0', '--no-color', '--diff-filter=ACMR', `${base}...${head}`, '--', 'web/app/components'])
const lineMap = new Map()
let currentFile = null
for (const line of diff.split('\n')) {
if (line.startsWith('+++ b/')) {
currentFile = line.slice(6).trim()
continue
}
if (!currentFile || !isTrackedComponentSourceFile(currentFile))
continue
const match = line.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/)
if (!match)
continue
const start = Number(match[1])
const count = match[2] ? Number(match[2]) : 1
if (count === 0)
continue
const linesForFile = lineMap.get(currentFile) ?? new Set()
for (let offset = 0; offset < count; offset += 1)
linesForFile.add(start + offset)
lineMap.set(currentFile, linesForFile)
}
return lineMap
return parseChangedLineMap(diff, isTrackedComponentSourceFile)
}
function isAnyComponentSourceFile(filePath) {
@ -407,24 +442,6 @@ function getCoverageStats(entry) {
}
}
function getLineHits(entry) {
if (entry.l && Object.keys(entry.l).length > 0)
return entry.l
const lineHits = {}
for (const [statementId, statement] of Object.entries(entry.statementMap ?? {})) {
const line = statement?.start?.line
if (!line)
continue
const hits = entry.s?.[statementId] ?? 0
const previous = lineHits[line]
lineHits[line] = previous === undefined ? hits : Math.max(previous, hits)
}
return lineHits
}
function sumCoverageStats(rows) {
const total = createEmptyCoverageStats()
for (const row of rows)
@ -479,23 +496,6 @@ function getModuleName(filePath) {
return segments.length === 1 ? '(root)' : segments[0]
}
function normalizeToRepoRelative(filePath) {
if (!filePath)
return ''
if (filePath.startsWith(APP_COMPONENTS_PREFIX) || filePath.startsWith(SHARED_TEST_PREFIX))
return filePath
if (filePath.startsWith(APP_COMPONENTS_COVERAGE_PREFIX))
return `web/${filePath}`
const absolutePath = path.isAbsolute(filePath)
? filePath
: path.resolve(webRoot, filePath)
return path.relative(repoRoot, absolutePath).split(path.sep).join('/')
}
function formatLineRanges(lines) {
if (!lines || lines.length === 0)
return ''
@ -520,6 +520,13 @@ function formatLineRanges(lines) {
return ranges.join(', ')
}
function formatBranchRefs(branches) {
if (!branches || branches.length === 0)
return ''
return branches.map(branch => `${branch.line}[${branch.armIndex}]`).join(', ')
}
function percentage(covered, total) {
if (total === 0)
return 100
@ -530,6 +537,13 @@ function formatPercent(metric) {
return `${percentage(metric.covered, metric.total).toFixed(2)}%`
}
function formatDiffPercent(metric) {
if (metric.total === 0)
return 'n/a'
return `${percentage(metric.covered, metric.total).toFixed(2)}%`
}
function appendSummary(lines) {
const content = `${lines.join('\n')}\n`
if (process.env.GITHUB_STEP_SUMMARY)
@ -550,11 +564,3 @@ function repoRootFromCwd() {
encoding: 'utf8',
}).trim()
}
function rowCovered(row) {
return row.covered
}
function rowTotal(row) {
return row.total
}

View File

@ -92,10 +92,10 @@ export const COMPONENT_MODULE_THRESHOLDS = {
branches: 90,
},
'share': {
lines: 15,
statements: 15,
functions: 20,
branches: 20,
lines: 95,
statements: 95,
functions: 95,
branches: 95,
},
'signin': {
lines: 95,

View File

@ -1,31 +1,29 @@
import type { QueryKey } from '@tanstack/react-query'
import {
useQueryClient,
} from '@tanstack/react-query'
import { useQueryClient } from '@tanstack/react-query'
import { useCallback } from 'react'
/**
* @deprecated Convenience wrapper scheduled for removal.
* Prefer binding invalidation in `useMutation` callbacks at the service layer.
*/
export const useInvalid = (key?: QueryKey) => {
const queryClient = useQueryClient()
return () => {
return useCallback(() => {
if (!key)
return
queryClient.invalidateQueries(
{
queryKey: key,
},
)
}
queryClient.invalidateQueries({ queryKey: key })
}, [queryClient, key])
}
/**
* @deprecated Convenience wrapper scheduled for removal.
* Prefer binding reset in `useMutation` callbacks at the service layer.
*/
export const useReset = (key?: QueryKey) => {
const queryClient = useQueryClient()
return () => {
return useCallback(() => {
if (!key)
return
queryClient.resetQueries(
{
queryKey: key,
},
)
}
queryClient.resetQueries({ queryKey: key })
}, [queryClient, key])
}

View File

@ -113,9 +113,6 @@ const config = {
2: '0.02',
8: '0.08',
},
fontFamily: {
instrument: ['var(--font-instrument-serif)', 'serif'],
},
fontSize: {
'2xs': '0.625rem',
},