mirror of
https://github.com/langgenius/dify.git
synced 2026-05-13 00:33:37 +08:00
Compare commits
157 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6facd9360c | ||
|
|
a18d7f51eb | ||
|
|
680ef077ae | ||
|
|
c26be9d3f4 | ||
|
|
51a8f79d67 | ||
|
|
bb73776339 | ||
|
|
9424bf60b0 | ||
|
|
cbedcd2882 | ||
|
|
1a93af5cd0 | ||
|
|
cd90d7ffc1 | ||
|
|
4bb987eca3 | ||
|
|
4fd4615c56 | ||
|
|
c7d30bf09a | ||
|
|
59dab7deac | ||
|
|
a60cb3b800 | ||
|
|
6164408da1 | ||
|
|
7fc40e6c9e | ||
|
|
d625ac0bf1 | ||
|
|
1082f488a1 | ||
|
|
f1c4c1a5ff | ||
|
|
dd1cdbbd41 | ||
|
|
74a04afe27 | ||
|
|
b108ea42f6 | ||
|
|
1aa6188b7d | ||
|
|
bd0d10ac5c | ||
|
|
2162ea6a68 | ||
|
|
153064bbd4 | ||
|
|
a643b05368 | ||
|
|
279b66bc7f | ||
|
|
e134c1e0d5 | ||
|
|
9127209dd5 | ||
|
|
a2ee151e48 | ||
|
|
9e3e616391 | ||
|
|
837b5cad86 | ||
|
|
1a011dc14a | ||
|
|
bf117dd0c8 | ||
|
|
1e6dc62470 | ||
|
|
0b70eec695 | ||
|
|
e8dc706414 | ||
|
|
9a2bea9287 | ||
|
|
b95e6f6a7a | ||
|
|
b99ba74aa4 | ||
|
|
7b5c371b9d | ||
|
|
c67ce6f66d | ||
|
|
e48d7bb097 | ||
|
|
24ea21db25 | ||
|
|
8581a68174 | ||
|
|
f720a3bed2 | ||
|
|
4a56763d2f | ||
|
|
861f73267c | ||
|
|
1efd365b62 | ||
|
|
65c36a51ef | ||
|
|
19476109da | ||
|
|
f3eb3ab4dd | ||
|
|
2c9e30426d | ||
|
|
2bb1f0906b | ||
|
|
d5ad6aedc0 | ||
|
|
5ebeb34feb | ||
|
|
c5ac191a79 | ||
|
|
140ad6ba4e | ||
|
|
e03eb3a76c | ||
|
|
38a419d073 | ||
|
|
c74cbb68da | ||
|
|
271019006e | ||
|
|
19bf36a716 | ||
|
|
48d27e250b | ||
|
|
d06b5529b3 | ||
|
|
8132c444dc | ||
|
|
cb0356e9d7 | ||
|
|
4d80892d7b | ||
|
|
af754f497a | ||
|
|
8f93bb36ba | ||
|
|
82f24b336d | ||
|
|
927a17804b | ||
|
|
29f34848cd | ||
|
|
1b0d4637b3 | ||
|
|
936a09c704 | ||
|
|
5cc62fd1c9 | ||
|
|
7bc19d8251 | ||
|
|
e845475408 | ||
|
|
9a8aa6a0c3 | ||
|
|
76a7f5f4b9 | ||
|
|
2ff50514c8 | ||
|
|
7901ac9a97 | ||
|
|
ecd830083a | ||
|
|
203b3a9499 | ||
|
|
9331024d91 | ||
|
|
c6a5de3c18 | ||
|
|
cd3327013a | ||
|
|
cd66559ebf | ||
|
|
8b77ec7f31 | ||
|
|
bb3de5dd32 | ||
|
|
1e2d309122 | ||
|
|
a24ec60e51 | ||
|
|
8fd616d27f | ||
|
|
e5bdc40dce | ||
|
|
376c43e5ac | ||
|
|
3ebb449d25 | ||
|
|
5297ac76ec | ||
|
|
bbed1d4a7c | ||
|
|
c804dbed8c | ||
|
|
00bf3f83f2 | ||
|
|
7e6745e105 | ||
|
|
d648ce6888 | ||
|
|
f3c3534e33 | ||
|
|
8967ff34b3 | ||
|
|
57539792c1 | ||
|
|
03e227f8f1 | ||
|
|
506e1a8bc7 | ||
|
|
f8873ec07b | ||
|
|
b2dacf0718 | ||
|
|
70eb98d6c5 | ||
|
|
b83f296634 | ||
|
|
5c68f12bb8 | ||
|
|
4df7c00859 | ||
|
|
995c43f3dd | ||
|
|
c0431ec843 | ||
|
|
a0af10abc8 | ||
|
|
8e2b8168be | ||
|
|
1f29565673 | ||
|
|
90fe54ca9e | ||
|
|
b43ebf539d | ||
|
|
853b859032 | ||
|
|
8f3e42e9c2 | ||
|
|
1359c03216 | ||
|
|
4b7dc17546 | ||
|
|
81090effe2 | ||
|
|
d92c336394 | ||
|
|
cd9daef564 | ||
|
|
2876839d7e | ||
|
|
7ba408eebe | ||
|
|
3708e3eef1 | ||
|
|
ff5c2c57a1 | ||
|
|
955c25589d | ||
|
|
54bde0bdf6 | ||
|
|
87add9a4f3 | ||
|
|
574d5865f4 | ||
|
|
458fab1c48 | ||
|
|
88196c186e | ||
|
|
dcf21a6a84 | ||
|
|
91f92c7083 | ||
|
|
0ca339103f | ||
|
|
5cf741895f | ||
|
|
11c52e90f6 | ||
|
|
f01e099729 | ||
|
|
195ff4711d | ||
|
|
fe2f7a8920 | ||
|
|
3b1458c08f | ||
|
|
9f47317032 | ||
|
|
e751ec323e | ||
|
|
f1d72eb5d2 | ||
|
|
44242d03b4 | ||
|
|
ed7ea68f7d | ||
|
|
afbc30c9ed | ||
|
|
0e55dcb297 | ||
|
|
25973c7d77 | ||
|
|
73ecdd5494 |
@ -63,7 +63,7 @@ pnpm analyze-component <path> --json
|
|||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// ❌ Before: Complex state logic in component
|
// ❌ Before: Complex state logic in component
|
||||||
const Configuration: FC = () => {
|
function Configuration() {
|
||||||
const [modelConfig, setModelConfig] = useState<ModelConfig>(...)
|
const [modelConfig, setModelConfig] = useState<ModelConfig>(...)
|
||||||
const [datasetConfigs, setDatasetConfigs] = useState<DatasetConfigs>(...)
|
const [datasetConfigs, setDatasetConfigs] = useState<DatasetConfigs>(...)
|
||||||
const [completionParams, setCompletionParams] = useState<FormValue>({})
|
const [completionParams, setCompletionParams] = useState<FormValue>({})
|
||||||
@ -85,7 +85,7 @@ export const useModelConfig = (appId: string) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Component becomes cleaner
|
// Component becomes cleaner
|
||||||
const Configuration: FC = () => {
|
function Configuration() {
|
||||||
const { modelConfig, setModelConfig } = useModelConfig(appId)
|
const { modelConfig, setModelConfig } = useModelConfig(appId)
|
||||||
return <div>...</div>
|
return <div>...</div>
|
||||||
}
|
}
|
||||||
@ -189,8 +189,6 @@ const Template = useMemo(() => {
|
|||||||
|
|
||||||
**Dify Convention**:
|
**Dify Convention**:
|
||||||
- This skill is for component decomposition, not query/mutation design.
|
- This skill is for component decomposition, not query/mutation design.
|
||||||
- When refactoring data fetching, follow `web/AGENTS.md`.
|
|
||||||
- Use `frontend-query-mutation` for contracts, query shape, data-fetching wrappers, query/mutation call-site patterns, conditional queries, invalidation, and mutation error handling.
|
|
||||||
- Do not introduce deprecated `useInvalid` / `useReset`.
|
- Do not introduce deprecated `useInvalid` / `useReset`.
|
||||||
- Do not add thin passthrough `useQuery` wrappers during refactoring; only extract a custom hook when it truly orchestrates multiple queries/mutations or shared derived state.
|
- Do not add thin passthrough `useQuery` wrappers during refactoring; only extract a custom hook when it truly orchestrates multiple queries/mutations or shared derived state.
|
||||||
|
|
||||||
|
|||||||
@ -60,8 +60,10 @@ const Template = useMemo(() => {
|
|||||||
**After** (complexity: ~3):
|
**After** (complexity: ~3):
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
|
import type { ComponentType } from 'react'
|
||||||
|
|
||||||
// Define lookup table outside component
|
// Define lookup table outside component
|
||||||
const TEMPLATE_MAP: Record<AppModeEnum, Record<string, FC<TemplateProps>>> = {
|
const TEMPLATE_MAP: Record<AppModeEnum, Record<string, ComponentType<TemplateProps>>> = {
|
||||||
[AppModeEnum.CHAT]: {
|
[AppModeEnum.CHAT]: {
|
||||||
[LanguagesSupported[1]]: TemplateChatZh,
|
[LanguagesSupported[1]]: TemplateChatZh,
|
||||||
[LanguagesSupported[7]]: TemplateChatJa,
|
[LanguagesSupported[7]]: TemplateChatJa,
|
||||||
|
|||||||
@ -65,10 +65,10 @@ interface ConfigurationHeaderProps {
|
|||||||
onPublish: () => void
|
onPublish: () => void
|
||||||
}
|
}
|
||||||
|
|
||||||
const ConfigurationHeader: FC<ConfigurationHeaderProps> = ({
|
function ConfigurationHeader({
|
||||||
isAdvancedMode,
|
isAdvancedMode,
|
||||||
onPublish,
|
onPublish,
|
||||||
}) => {
|
}: ConfigurationHeaderProps) {
|
||||||
const { t } = useTranslation()
|
const { t } = useTranslation()
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@ -136,7 +136,7 @@ const AppInfo = () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ✅ After: Separate view components
|
// ✅ After: Separate view components
|
||||||
const AppInfoExpanded: FC<AppInfoViewProps> = ({ appDetail, onAction }) => {
|
function AppInfoExpanded({ appDetail, onAction }: AppInfoViewProps) {
|
||||||
return (
|
return (
|
||||||
<div className="expanded">
|
<div className="expanded">
|
||||||
{/* Clean, focused expanded view */}
|
{/* Clean, focused expanded view */}
|
||||||
@ -144,7 +144,7 @@ const AppInfoExpanded: FC<AppInfoViewProps> = ({ appDetail, onAction }) => {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const AppInfoCollapsed: FC<AppInfoViewProps> = ({ appDetail, onAction }) => {
|
function AppInfoCollapsed({ appDetail, onAction }: AppInfoViewProps) {
|
||||||
return (
|
return (
|
||||||
<div className="collapsed">
|
<div className="collapsed">
|
||||||
{/* Clean, focused collapsed view */}
|
{/* Clean, focused collapsed view */}
|
||||||
@ -203,12 +203,12 @@ interface AppInfoModalsProps {
|
|||||||
onSuccess: () => void
|
onSuccess: () => void
|
||||||
}
|
}
|
||||||
|
|
||||||
const AppInfoModals: FC<AppInfoModalsProps> = ({
|
function AppInfoModals({
|
||||||
appDetail,
|
appDetail,
|
||||||
activeModal,
|
activeModal,
|
||||||
onClose,
|
onClose,
|
||||||
onSuccess,
|
onSuccess,
|
||||||
}) => {
|
}: AppInfoModalsProps) {
|
||||||
const handleEdit = async (data) => { /* logic */ }
|
const handleEdit = async (data) => { /* logic */ }
|
||||||
const handleDuplicate = async (data) => { /* logic */ }
|
const handleDuplicate = async (data) => { /* logic */ }
|
||||||
const handleDelete = async () => { /* logic */ }
|
const handleDelete = async () => { /* logic */ }
|
||||||
@ -296,7 +296,7 @@ interface OperationItemProps {
|
|||||||
onAction: (id: string) => void
|
onAction: (id: string) => void
|
||||||
}
|
}
|
||||||
|
|
||||||
const OperationItem: FC<OperationItemProps> = ({ operation, onAction }) => {
|
function OperationItem({ operation, onAction }: OperationItemProps) {
|
||||||
return (
|
return (
|
||||||
<div className="operation-item">
|
<div className="operation-item">
|
||||||
<span className="icon">{operation.icon}</span>
|
<span className="icon">{operation.icon}</span>
|
||||||
@ -435,7 +435,7 @@ interface ChildProps {
|
|||||||
onSubmit: () => void
|
onSubmit: () => void
|
||||||
}
|
}
|
||||||
|
|
||||||
const Child: FC<ChildProps> = ({ value, onChange, onSubmit }) => {
|
function Child({ value, onChange, onSubmit }: ChildProps) {
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
<input value={value} onChange={e => onChange(e.target.value)} />
|
<input value={value} onChange={e => onChange(e.target.value)} />
|
||||||
|
|||||||
@ -112,13 +112,13 @@ export const useModelConfig = ({
|
|||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// Before: 50+ lines of state management
|
// Before: 50+ lines of state management
|
||||||
const Configuration: FC = () => {
|
function Configuration() {
|
||||||
const [modelConfig, setModelConfig] = useState<ModelConfig>(...)
|
const [modelConfig, setModelConfig] = useState<ModelConfig>(...)
|
||||||
// ... lots of related state and effects
|
// ... lots of related state and effects
|
||||||
}
|
}
|
||||||
|
|
||||||
// After: Clean component
|
// After: Clean component
|
||||||
const Configuration: FC = () => {
|
function Configuration() {
|
||||||
const {
|
const {
|
||||||
modelConfig,
|
modelConfig,
|
||||||
setModelConfig,
|
setModelConfig,
|
||||||
@ -159,8 +159,6 @@ const Configuration: FC = () => {
|
|||||||
|
|
||||||
When hook extraction touches query or mutation code, do not use this reference as the source of truth for data-layer patterns.
|
When hook extraction touches query or mutation code, do not use this reference as the source of truth for data-layer patterns.
|
||||||
|
|
||||||
- Follow `web/AGENTS.md` first.
|
|
||||||
- Use `frontend-query-mutation` for contracts, query shape, data-fetching wrappers, query/mutation call-site patterns, conditional queries, invalidation, and mutation error handling.
|
|
||||||
- Do not introduce deprecated `useInvalid` / `useReset`.
|
- Do not introduce deprecated `useInvalid` / `useReset`.
|
||||||
- Do not extract thin passthrough `useQuery` hooks; only extract orchestration hooks.
|
- Do not extract thin passthrough `useQuery` hooks; only extract orchestration hooks.
|
||||||
|
|
||||||
|
|||||||
@ -23,7 +23,7 @@ Use this skill for Dify's repository-level E2E suite in `e2e/`. Use [`e2e/AGENTS
|
|||||||
- `e2e/scripts/run-cucumber.ts` and `e2e/cucumber.config.ts` when tags or execution flow matter
|
- `e2e/scripts/run-cucumber.ts` and `e2e/cucumber.config.ts` when tags or execution flow matter
|
||||||
3. Read [`references/playwright-best-practices.md`](references/playwright-best-practices.md) only when locator, assertion, isolation, or waiting choices are involved.
|
3. Read [`references/playwright-best-practices.md`](references/playwright-best-practices.md) only when locator, assertion, isolation, or waiting choices are involved.
|
||||||
4. Read [`references/cucumber-best-practices.md`](references/cucumber-best-practices.md) only when scenario wording, step granularity, tags, or expression design are involved.
|
4. Read [`references/cucumber-best-practices.md`](references/cucumber-best-practices.md) only when scenario wording, step granularity, tags, or expression design are involved.
|
||||||
5. Re-check official docs with Context7 before introducing a new Playwright or Cucumber pattern.
|
5. Re-check official Playwright or Cucumber docs with the available documentation tools before introducing a new framework pattern.
|
||||||
|
|
||||||
## Local Rules
|
## Local Rules
|
||||||
|
|
||||||
|
|||||||
@ -9,18 +9,18 @@ Category: Performance
|
|||||||
|
|
||||||
When rendering React Flow, prefer `useNodes`/`useEdges` for UI consumption and rely on `useStoreApi` inside callbacks that mutate or read node/edge state. Avoid manually pulling Flow data outside of these hooks.
|
When rendering React Flow, prefer `useNodes`/`useEdges` for UI consumption and rely on `useStoreApi` inside callbacks that mutate or read node/edge state. Avoid manually pulling Flow data outside of these hooks.
|
||||||
|
|
||||||
## Complex prop memoization
|
## Complex prop stability
|
||||||
|
|
||||||
IsUrgent: True
|
IsUrgent: False
|
||||||
Category: Performance
|
Category: Performance
|
||||||
|
|
||||||
### Description
|
### Description
|
||||||
|
|
||||||
Wrap complex prop values (objects, arrays, maps) in `useMemo` prior to passing them into child components to guarantee stable references and prevent unnecessary renders.
|
Only require stable object, array, or map props when there is a clear reason: the child is memoized, the value participates in effect/query dependencies, the value is part of a stable-reference API contract, or profiling/local behavior shows avoidable re-renders. Do not request `useMemo` for every inline object by default; `how-to-write-component` treats memoization as a targeted optimization.
|
||||||
|
|
||||||
Update this file when adding, editing, or removing Performance rules so the catalog remains accurate.
|
Update this file when adding, editing, or removing Performance rules so the catalog remains accurate.
|
||||||
|
|
||||||
Wrong:
|
Risky:
|
||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
<HeavyComp
|
<HeavyComp
|
||||||
@ -31,7 +31,7 @@ Wrong:
|
|||||||
/>
|
/>
|
||||||
```
|
```
|
||||||
|
|
||||||
Right:
|
Better when stable identity matters:
|
||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
const config = useMemo(() => ({
|
const config = useMemo(() => ({
|
||||||
|
|||||||
@ -1,44 +0,0 @@
|
|||||||
---
|
|
||||||
name: frontend-query-mutation
|
|
||||||
description: Guide for implementing Dify frontend query and mutation patterns with TanStack Query and oRPC. Trigger when creating or updating contracts in web/contract, wiring router composition, consuming consoleQuery or marketplaceQuery in components or services, deciding whether to call queryOptions() directly or extract a helper or use-* hook, handling conditional queries, cache invalidation, mutation error handling, or migrating legacy service calls to contract-first query and mutation helpers.
|
|
||||||
---
|
|
||||||
|
|
||||||
# Frontend Query & Mutation
|
|
||||||
|
|
||||||
## Intent
|
|
||||||
|
|
||||||
- Keep contract as the single source of truth in `web/contract/*`.
|
|
||||||
- Prefer contract-shaped `queryOptions()` and `mutationOptions()`.
|
|
||||||
- Keep invalidation and mutation flow knowledge in the service layer.
|
|
||||||
- Keep abstractions minimal to preserve TypeScript inference.
|
|
||||||
|
|
||||||
## Workflow
|
|
||||||
|
|
||||||
1. Identify the change surface.
|
|
||||||
- Read `references/contract-patterns.md` for contract files, router composition, client helpers, and query or mutation call-site shape.
|
|
||||||
- Read `references/runtime-rules.md` for conditional queries, invalidation, error handling, and legacy migrations.
|
|
||||||
- Read both references when a task spans contract shape and runtime behavior.
|
|
||||||
2. Implement the smallest abstraction that fits the task.
|
|
||||||
- Default to direct `useQuery(...)` or `useMutation(...)` calls with oRPC helpers at the call site.
|
|
||||||
- Extract a small shared query helper only when multiple call sites share the same extra options.
|
|
||||||
- Create `web/service/use-{domain}.ts` only for orchestration or shared domain behavior.
|
|
||||||
3. Preserve Dify conventions.
|
|
||||||
- Keep contract inputs in `{ params, query?, body? }` shape.
|
|
||||||
- Bind invalidation in the service-layer mutation definition.
|
|
||||||
- Prefer `mutate(...)`; use `mutateAsync(...)` only when Promise semantics are required.
|
|
||||||
|
|
||||||
## Files Commonly Touched
|
|
||||||
|
|
||||||
- `web/contract/console/*.ts`
|
|
||||||
- `web/contract/marketplace.ts`
|
|
||||||
- `web/contract/router.ts`
|
|
||||||
- `web/service/client.ts`
|
|
||||||
- `web/service/use-*.ts`
|
|
||||||
- component and hook call sites using `consoleQuery` or `marketplaceQuery`
|
|
||||||
|
|
||||||
## References
|
|
||||||
|
|
||||||
- Use `references/contract-patterns.md` for contract shape, router registration, query and mutation helpers, and anti-patterns that degrade inference.
|
|
||||||
- Use `references/runtime-rules.md` for conditional queries, invalidation, `mutate` versus `mutateAsync`, and legacy migration rules.
|
|
||||||
|
|
||||||
Treat this skill as the single query and mutation entry point for Dify frontend work. Keep detailed rules in the reference files instead of duplicating them in project docs.
|
|
||||||
@ -1,4 +0,0 @@
|
|||||||
interface:
|
|
||||||
display_name: "Frontend Query & Mutation"
|
|
||||||
short_description: "Dify TanStack Query and oRPC patterns"
|
|
||||||
default_prompt: "Use this skill when implementing or reviewing Dify frontend contracts, query and mutation call sites, conditional queries, invalidation, or legacy query/mutation migrations."
|
|
||||||
@ -1,98 +0,0 @@
|
|||||||
# Contract Patterns
|
|
||||||
|
|
||||||
## Table of Contents
|
|
||||||
|
|
||||||
- Intent
|
|
||||||
- Minimal structure
|
|
||||||
- Core workflow
|
|
||||||
- Query usage decision rule
|
|
||||||
- Mutation usage decision rule
|
|
||||||
- Anti-patterns
|
|
||||||
- Contract rules
|
|
||||||
- Type export
|
|
||||||
|
|
||||||
## Intent
|
|
||||||
|
|
||||||
- Keep contract as the single source of truth in `web/contract/*`.
|
|
||||||
- Default query usage to call-site `useQuery(consoleQuery|marketplaceQuery.xxx.queryOptions(...))` when endpoint behavior maps 1:1 to the contract.
|
|
||||||
- Keep abstractions minimal and preserve TypeScript inference.
|
|
||||||
|
|
||||||
## Minimal Structure
|
|
||||||
|
|
||||||
```text
|
|
||||||
web/contract/
|
|
||||||
├── base.ts
|
|
||||||
├── router.ts
|
|
||||||
├── marketplace.ts
|
|
||||||
└── console/
|
|
||||||
├── billing.ts
|
|
||||||
└── ...other domains
|
|
||||||
web/service/client.ts
|
|
||||||
```
|
|
||||||
|
|
||||||
## Core Workflow
|
|
||||||
|
|
||||||
1. Define contract in `web/contract/console/{domain}.ts` or `web/contract/marketplace.ts`.
|
|
||||||
- Use `base.route({...}).output(type<...>())` as the baseline.
|
|
||||||
- Add `.input(type<...>())` only when the request has `params`, `query`, or `body`.
|
|
||||||
- For `GET` without input, omit `.input(...)`; do not use `.input(type<unknown>())`.
|
|
||||||
2. Register contract in `web/contract/router.ts`.
|
|
||||||
- Import directly from domain files and nest by API prefix.
|
|
||||||
3. Consume from UI call sites via oRPC query utilities.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { useQuery } from '@tanstack/react-query'
|
|
||||||
import { consoleQuery } from '@/service/client'
|
|
||||||
|
|
||||||
const invoiceQuery = useQuery(consoleQuery.billing.invoices.queryOptions({
|
|
||||||
staleTime: 5 * 60 * 1000,
|
|
||||||
throwOnError: true,
|
|
||||||
select: invoice => invoice.url,
|
|
||||||
}))
|
|
||||||
```
|
|
||||||
|
|
||||||
## Query Usage Decision Rule
|
|
||||||
|
|
||||||
1. Default to direct `*.queryOptions(...)` usage at the call site.
|
|
||||||
2. If 3 or more call sites share the same extra options, extract a small query helper, not a `use-*` passthrough hook.
|
|
||||||
3. Create `web/service/use-{domain}.ts` only for orchestration.
|
|
||||||
- Combine multiple queries or mutations.
|
|
||||||
- Share domain-level derived state or invalidation helpers.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const invoicesBaseQueryOptions = () =>
|
|
||||||
consoleQuery.billing.invoices.queryOptions({ retry: false })
|
|
||||||
|
|
||||||
const invoiceQuery = useQuery({
|
|
||||||
...invoicesBaseQueryOptions(),
|
|
||||||
throwOnError: true,
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
## Mutation Usage Decision Rule
|
|
||||||
|
|
||||||
1. Default to mutation helpers from `consoleQuery` or `marketplaceQuery`, for example `useMutation(consoleQuery.billing.bindPartnerStack.mutationOptions(...))`.
|
|
||||||
2. If the mutation flow is heavily custom, use oRPC clients as `mutationFn`, for example `consoleClient.xxx` or `marketplaceClient.xxx`, instead of handwritten non-oRPC mutation logic.
|
|
||||||
|
|
||||||
## Anti-Patterns
|
|
||||||
|
|
||||||
- Do not wrap `useQuery` with `options?: Partial<UseQueryOptions>`.
|
|
||||||
- Do not split local `queryKey` and `queryFn` when oRPC `queryOptions` already exists and fits the use case.
|
|
||||||
- Do not create thin `use-*` passthrough hooks for a single endpoint.
|
|
||||||
- These patterns can degrade inference, especially around `throwOnError` and `select`, and add unnecessary indirection.
|
|
||||||
|
|
||||||
## Contract Rules
|
|
||||||
|
|
||||||
- Input structure: always use `{ params, query?, body? }`.
|
|
||||||
- No-input `GET`: omit `.input(...)`; do not use `.input(type<unknown>())`.
|
|
||||||
- Path params: use `{paramName}` in the path and match it in the `params` object.
|
|
||||||
- Router nesting: group by API prefix, for example `/billing/*` becomes `billing: {}`.
|
|
||||||
- No barrel files: import directly from specific files.
|
|
||||||
- Types: import from `@/types/` and use the `type<T>()` helper.
|
|
||||||
- Mutations: prefer `mutationOptions`; use explicit `mutationKey` mainly for defaults, filtering, and devtools.
|
|
||||||
|
|
||||||
## Type Export
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
export type ConsoleInputs = InferContractRouterInputs<typeof consoleRouterContract>
|
|
||||||
```
|
|
||||||
@ -1,130 +0,0 @@
|
|||||||
# Runtime Rules
|
|
||||||
|
|
||||||
## Table of Contents
|
|
||||||
|
|
||||||
- Conditional queries
|
|
||||||
- Cache invalidation
|
|
||||||
- Key API guide
|
|
||||||
- `mutate` vs `mutateAsync`
|
|
||||||
- Legacy migration
|
|
||||||
|
|
||||||
## Conditional Queries
|
|
||||||
|
|
||||||
Prefer contract-shaped `queryOptions(...)`.
|
|
||||||
When required input is missing, prefer `input: skipToken` instead of placeholder params or non-null assertions.
|
|
||||||
Use `enabled` only for extra business gating after the input itself is already valid.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { skipToken, useQuery } from '@tanstack/react-query'
|
|
||||||
|
|
||||||
// Disable the query by skipping input construction.
|
|
||||||
function useAccessMode(appId: string | undefined) {
|
|
||||||
return useQuery(consoleQuery.accessControl.appAccessMode.queryOptions({
|
|
||||||
input: appId
|
|
||||||
? { params: { appId } }
|
|
||||||
: skipToken,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Avoid runtime-only guards that bypass type checking.
|
|
||||||
function useBadAccessMode(appId: string | undefined) {
|
|
||||||
return useQuery(consoleQuery.accessControl.appAccessMode.queryOptions({
|
|
||||||
input: { params: { appId: appId! } },
|
|
||||||
enabled: !!appId,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Cache Invalidation
|
|
||||||
|
|
||||||
Bind invalidation in the service-layer mutation definition.
|
|
||||||
Components may add UI feedback in call-site callbacks, but they should not decide which queries to invalidate.
|
|
||||||
|
|
||||||
Use:
|
|
||||||
|
|
||||||
- `.key()` for namespace or prefix invalidation
|
|
||||||
- `.queryKey(...)` only for exact cache reads or writes such as `getQueryData` and `setQueryData`
|
|
||||||
- `queryClient.invalidateQueries(...)` in mutation `onSuccess`
|
|
||||||
|
|
||||||
Do not use deprecated `useInvalid` from `use-base.ts`.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Service layer owns cache invalidation.
|
|
||||||
export const useUpdateAccessMode = () => {
|
|
||||||
const queryClient = useQueryClient()
|
|
||||||
|
|
||||||
return useMutation(consoleQuery.accessControl.updateAccessMode.mutationOptions({
|
|
||||||
onSuccess: () => {
|
|
||||||
queryClient.invalidateQueries({
|
|
||||||
queryKey: consoleQuery.accessControl.appWhitelistSubjects.key(),
|
|
||||||
})
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Component only adds UI behavior.
|
|
||||||
updateAccessMode({ appId, mode }, {
|
|
||||||
onSuccess: () => toast.success('...'),
|
|
||||||
})
|
|
||||||
|
|
||||||
// Avoid putting invalidation knowledge in the component.
|
|
||||||
mutate({ appId, mode }, {
|
|
||||||
onSuccess: () => {
|
|
||||||
queryClient.invalidateQueries({
|
|
||||||
queryKey: consoleQuery.accessControl.appWhitelistSubjects.key(),
|
|
||||||
})
|
|
||||||
},
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
## Key API Guide
|
|
||||||
|
|
||||||
- `.key(...)`
|
|
||||||
- Use for partial matching operations.
|
|
||||||
- Prefer it for invalidation, refetch, and cancel patterns.
|
|
||||||
- Example: `queryClient.invalidateQueries({ queryKey: consoleQuery.billing.key() })`
|
|
||||||
- `.queryKey(...)`
|
|
||||||
- Use for a specific query's full key.
|
|
||||||
- Prefer it for exact cache addressing and direct reads or writes.
|
|
||||||
- `.mutationKey(...)`
|
|
||||||
- Use for a specific mutation's full key.
|
|
||||||
- Prefer it for mutation defaults registration, mutation-status filtering, and devtools grouping.
|
|
||||||
|
|
||||||
## `mutate` vs `mutateAsync`
|
|
||||||
|
|
||||||
Prefer `mutate` by default.
|
|
||||||
Use `mutateAsync` only when Promise semantics are truly required, such as parallel mutations or sequential steps with result dependencies.
|
|
||||||
|
|
||||||
Rules:
|
|
||||||
|
|
||||||
- Event handlers should usually call `mutate(...)` with `onSuccess` or `onError`.
|
|
||||||
- Every `await mutateAsync(...)` must be wrapped in `try/catch`.
|
|
||||||
- Do not use `mutateAsync` when callbacks already express the flow clearly.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Default case.
|
|
||||||
mutation.mutate(data, {
|
|
||||||
onSuccess: result => router.push(result.url),
|
|
||||||
})
|
|
||||||
|
|
||||||
// Promise semantics are required.
|
|
||||||
try {
|
|
||||||
const order = await createOrder.mutateAsync(orderData)
|
|
||||||
await confirmPayment.mutateAsync({ orderId: order.id, token })
|
|
||||||
router.push(`/orders/${order.id}`)
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
toast.error(error instanceof Error ? error.message : 'Unknown error')
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Legacy Migration
|
|
||||||
|
|
||||||
When touching old code, migrate it toward these rules:
|
|
||||||
|
|
||||||
| Old pattern | New pattern |
|
|
||||||
|---|---|
|
|
||||||
| `useInvalid(key)` in service layer | `queryClient.invalidateQueries(...)` inside mutation `onSuccess` |
|
|
||||||
| component-triggered invalidation after mutation | move invalidation into the service-layer mutation definition |
|
|
||||||
| imperative fetch plus manual invalidation | wrap it in `useMutation(...mutationOptions(...))` |
|
|
||||||
| `await mutateAsync()` without `try/catch` | switch to `mutate(...)` or add `try/catch` |
|
|
||||||
@ -5,7 +5,7 @@ description: Generate Vitest + React Testing Library tests for Dify frontend com
|
|||||||
|
|
||||||
# Dify Frontend Testing Skill
|
# Dify Frontend Testing Skill
|
||||||
|
|
||||||
This skill enables Claude to generate high-quality, comprehensive frontend tests for the Dify project following established conventions and best practices.
|
This skill enables Codex to generate high-quality, comprehensive frontend tests for the Dify project following established conventions and best practices.
|
||||||
|
|
||||||
> **⚠️ Authoritative Source**: This skill is derived from `web/docs/test.md`. Use Vitest mock/timer APIs (`vi.*`).
|
> **⚠️ Authoritative Source**: This skill is derived from `web/docs/test.md`. Use Vitest mock/timer APIs (`vi.*`).
|
||||||
|
|
||||||
@ -24,35 +24,27 @@ Apply this skill when the user:
|
|||||||
**Do NOT apply** when:
|
**Do NOT apply** when:
|
||||||
|
|
||||||
- User is asking about backend/API tests (Python/pytest)
|
- User is asking about backend/API tests (Python/pytest)
|
||||||
- User is asking about E2E tests (Playwright/Cypress)
|
- User is asking about E2E tests (Cucumber + Playwright under `e2e/`)
|
||||||
- User is only asking conceptual questions without code context
|
- User is only asking conceptual questions without code context
|
||||||
|
|
||||||
## Quick Reference
|
## Quick Reference
|
||||||
|
|
||||||
### Tech Stack
|
|
||||||
|
|
||||||
| Tool | Version | Purpose |
|
|
||||||
|------|---------|---------|
|
|
||||||
| Vitest | 4.0.16 | Test runner |
|
|
||||||
| React Testing Library | 16.0 | Component testing |
|
|
||||||
| jsdom | - | Test environment |
|
|
||||||
| nock | 14.0 | HTTP mocking |
|
|
||||||
| TypeScript | 5.x | Type safety |
|
|
||||||
|
|
||||||
### Key Commands
|
### Key Commands
|
||||||
|
|
||||||
|
Run these commands from `web/`. From the repository root, prefix them with `pnpm -C web`.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Run all tests
|
# Run all tests
|
||||||
pnpm test
|
pnpm test
|
||||||
|
|
||||||
# Watch mode
|
# Watch mode
|
||||||
pnpm test:watch
|
pnpm test --watch
|
||||||
|
|
||||||
# Run specific file
|
# Run specific file
|
||||||
pnpm test path/to/file.spec.tsx
|
pnpm test path/to/file.spec.tsx
|
||||||
|
|
||||||
# Generate coverage report
|
# Generate coverage report
|
||||||
pnpm test:coverage
|
pnpm test --coverage
|
||||||
|
|
||||||
# Analyze component complexity
|
# Analyze component complexity
|
||||||
pnpm analyze-component <path>
|
pnpm analyze-component <path>
|
||||||
@ -228,7 +220,10 @@ Every test should clearly separate:
|
|||||||
### 2. Black-Box Testing
|
### 2. Black-Box Testing
|
||||||
|
|
||||||
- Test observable behavior, not implementation details
|
- Test observable behavior, not implementation details
|
||||||
- Use semantic queries (getByRole, getByLabelText)
|
- Use semantic queries (`getByRole` with accessible `name`, `getByLabelText`, `getByPlaceholderText`, `getByText`, and scoped `within(...)`)
|
||||||
|
- Treat `getByTestId` as a last resort. If a control cannot be found by role/name, label, landmark, or dialog scope, fix the component accessibility first instead of adding or relying on `data-testid`.
|
||||||
|
- Remove production `data-testid` attributes when semantic selectors can cover the behavior. Keep them only for non-visual mocked boundaries, editor/browser shims such as Monaco, canvas/chart output, or third-party widgets with no accessible DOM in the test environment.
|
||||||
|
- Do not assert decorative icons by test id. Assert the named control that contains them, or mark decorative icons `aria-hidden`.
|
||||||
- Avoid testing internal state directly
|
- Avoid testing internal state directly
|
||||||
- **Prefer pattern matching over hardcoded strings** in assertions:
|
- **Prefer pattern matching over hardcoded strings** in assertions:
|
||||||
|
|
||||||
|
|||||||
@ -56,7 +56,7 @@ See [Zustand Store Testing](#zustand-store-testing) section for full details.
|
|||||||
|
|
||||||
| Location | Purpose |
|
| Location | Purpose |
|
||||||
|----------|---------|
|
|----------|---------|
|
||||||
| `web/vitest.setup.ts` | Global mocks shared by all tests (`react-i18next`, `next/image`, `zustand`) |
|
| `web/vitest.setup.ts` | Global mocks shared by all tests (`react-i18next`, `zustand`, clipboard, FloatingPortal, Monaco, localStorage`) |
|
||||||
| `web/__mocks__/zustand.ts` | Zustand mock implementation (auto-resets stores after each test) |
|
| `web/__mocks__/zustand.ts` | Zustand mock implementation (auto-resets stores after each test) |
|
||||||
| `web/__mocks__/` | Reusable mock factories shared across multiple test files |
|
| `web/__mocks__/` | Reusable mock factories shared across multiple test files |
|
||||||
| Test file | Test-specific mocks, inline with `vi.mock()` |
|
| Test file | Test-specific mocks, inline with `vi.mock()` |
|
||||||
@ -216,28 +216,21 @@ describe('Component', () => {
|
|||||||
})
|
})
|
||||||
```
|
```
|
||||||
|
|
||||||
### 5. HTTP Mocking with Nock
|
### 5. HTTP and `fetch` Mocking
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import nock from 'nock'
|
|
||||||
|
|
||||||
const GITHUB_HOST = 'https://api.github.com'
|
|
||||||
const GITHUB_PATH = '/repos/owner/repo'
|
|
||||||
|
|
||||||
const mockGithubApi = (status: number, body: Record<string, unknown>, delayMs = 0) => {
|
|
||||||
return nock(GITHUB_HOST)
|
|
||||||
.get(GITHUB_PATH)
|
|
||||||
.delay(delayMs)
|
|
||||||
.reply(status, body)
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('GithubComponent', () => {
|
describe('GithubComponent', () => {
|
||||||
afterEach(() => {
|
beforeEach(() => {
|
||||||
nock.cleanAll()
|
vi.clearAllMocks()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should display repo info', async () => {
|
it('should display repo info', async () => {
|
||||||
mockGithubApi(200, { name: 'dify', stars: 1000 })
|
vi.mocked(globalThis.fetch).mockResolvedValueOnce(
|
||||||
|
new Response(JSON.stringify({ name: 'dify', stars: 1000 }), {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
render(<GithubComponent />)
|
render(<GithubComponent />)
|
||||||
|
|
||||||
@ -247,7 +240,12 @@ describe('GithubComponent', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should handle API error', async () => {
|
it('should handle API error', async () => {
|
||||||
mockGithubApi(500, { message: 'Server error' })
|
vi.mocked(globalThis.fetch).mockResolvedValueOnce(
|
||||||
|
new Response(JSON.stringify({ message: 'Server error' }), {
|
||||||
|
status: 500,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
render(<GithubComponent />)
|
render(<GithubComponent />)
|
||||||
|
|
||||||
@ -258,6 +256,8 @@ describe('GithubComponent', () => {
|
|||||||
})
|
})
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Prefer mocking `@/service/*` modules or spying on `global.fetch` / `ky` clients with deterministic responses. Do not introduce an HTTP interception dependency such as `nock` or MSW unless it is already declared in the workspace or adding it is part of the task.
|
||||||
|
|
||||||
### 6. Context Providers
|
### 6. Context Providers
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
@ -332,7 +332,7 @@ const renderWithQueryClient = (ui: React.ReactElement) => {
|
|||||||
1. **Don't mock Zustand store modules** - Use real stores with `setState()`
|
1. **Don't mock Zustand store modules** - Use real stores with `setState()`
|
||||||
1. Don't mock components you can import directly
|
1. Don't mock components you can import directly
|
||||||
1. Don't create overly simplified mocks that miss conditional logic
|
1. Don't create overly simplified mocks that miss conditional logic
|
||||||
1. Don't forget to clean up nock after each test
|
1. Don't leave HTTP mocks or service mock state leaking between tests
|
||||||
1. Don't use `any` types in mocks without necessity
|
1. Don't use `any` types in mocks without necessity
|
||||||
|
|
||||||
### Mock Decision Tree
|
### Mock Decision Tree
|
||||||
|
|||||||
@ -227,12 +227,12 @@ Failing tests compound:
|
|||||||
|
|
||||||
**Fix failures immediately before proceeding.**
|
**Fix failures immediately before proceeding.**
|
||||||
|
|
||||||
## Integration with Claude's Todo Feature
|
## Integration with Codex's Todo Feature
|
||||||
|
|
||||||
When using Claude for multi-file testing:
|
When using Codex for multi-file testing:
|
||||||
|
|
||||||
1. **Ask Claude to create a todo list** before starting
|
1. **Create a todo list** before starting
|
||||||
1. **Request one file at a time** or ensure Claude processes incrementally
|
1. **Process one file at a time**
|
||||||
1. **Verify each test passes** before asking for the next
|
1. **Verify each test passes** before asking for the next
|
||||||
1. **Mark todos complete** as you progress
|
1. **Mark todos complete** as you progress
|
||||||
|
|
||||||
|
|||||||
71
.agents/skills/how-to-write-component/SKILL.md
Normal file
71
.agents/skills/how-to-write-component/SKILL.md
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
---
|
||||||
|
name: how-to-write-component
|
||||||
|
description: React/TypeScript component style guide. Use when writing, refactoring, or reviewing React components, especially around props typing, state boundaries, shared local state with Jotai atoms, API types, query/mutation contracts, navigation, memoization, wrappers, and empty-state handling.
|
||||||
|
---
|
||||||
|
|
||||||
|
# How To Write A Component
|
||||||
|
|
||||||
|
Use this as the decision guide for React/TypeScript component structure. Existing code is reference material, not automatic precedent; when it conflicts with these rules, adapt the approach instead of reproducing the violation.
|
||||||
|
|
||||||
|
## Core Defaults
|
||||||
|
|
||||||
|
- Search before adding UI, hooks, helpers, or styling patterns. Reuse existing base components, feature components, hooks, utilities, and design styles when they fit.
|
||||||
|
- Group code by feature workflow, route, or ownership area: components, hooks, local types, query helpers, atoms, constants, and small utilities should live near the code that changes with them.
|
||||||
|
- Promote code to shared only when multiple verticals need the same stable primitive. Otherwise keep it local and compose shared primitives inside the owning feature.
|
||||||
|
- Use Tailwind CSS v4.1+ rules via the `tailwind-css-rules` skill. Prefer v4 utilities, `gap`, `text-size/line-height`, `min-h-dvh`, and avoid deprecated utilities and `@apply`.
|
||||||
|
|
||||||
|
## Ownership
|
||||||
|
|
||||||
|
- Put local state, queries, mutations, handlers, and derived UI data in the lowest component that uses them. Extract a purpose-built owner component only when the logic has no natural home.
|
||||||
|
- Repeated TanStack query calls in sibling components are acceptable when each component independently consumes the data. Do not hoist a query only because it is duplicated; TanStack Query handles deduplication and cache sharing.
|
||||||
|
- Hoist state, queries, or callbacks to a parent only when the parent consumes the data, coordinates shared loading/error/empty UI, needs one consistent snapshot, or owns a workflow spanning children.
|
||||||
|
- Avoid prop drilling. One pass-through layer is acceptable; repeated forwarding means ownership should move down or into feature-scoped Jotai UI state. Keep server/cache state in query and API data flow.
|
||||||
|
- Keep callbacks in a parent only for workflow coordination such as form submission, shared selection, batch behavior, or navigation. Otherwise let the child or row own its action.
|
||||||
|
- Prefer uncontrolled DOM state and CSS variables before adding controlled props.
|
||||||
|
|
||||||
|
## Components, Props, And Types
|
||||||
|
|
||||||
|
- Type component signatures directly; do not use `FC` or `React.FC`.
|
||||||
|
- Prefer `function` for top-level components and module helpers. Use arrow functions for local callbacks, handlers, and lambda-style APIs.
|
||||||
|
- Prefer named exports. Use default exports only where the framework requires them, such as Next.js route files.
|
||||||
|
- Type simple one-off props inline. Use a named `Props` type only when reused, exported, complex, or clearer.
|
||||||
|
- Use API-generated or API-returned types at component boundaries. Keep small UI conversion helpers beside the component that needs them.
|
||||||
|
- Name values by their domain role and backend API contract, and keep that name stable across the call chain, especially IDs like `appInstanceId`. Normalize framework or route params at the boundary.
|
||||||
|
- Keep fallback and invariant checks at the lowest component that already handles that state; callers should pass raw values through instead of duplicating checks.
|
||||||
|
|
||||||
|
## Queries And Mutations
|
||||||
|
|
||||||
|
- Keep `web/contract/*` as the single source of truth for API shape; follow existing domain/router patterns and the `{ params, query?, body? }` input shape.
|
||||||
|
- Consume queries directly with `useQuery(consoleQuery.xxx.queryOptions(...))` or `useQuery(marketplaceQuery.xxx.queryOptions(...))`.
|
||||||
|
- Avoid pass-through hooks and thin `web/service/use-*` wrappers that only rename `queryOptions()` or `mutationOptions()`. Extract a small `queryOptions` helper only when repeated call-site options justify it.
|
||||||
|
- Keep feature hooks for real orchestration, workflow state, or shared domain behavior.
|
||||||
|
- For missing required query input, use `input: skipToken`; use `enabled` only for extra business gating after the input is valid.
|
||||||
|
- Consume mutations directly with `useMutation(consoleQuery.xxx.mutationOptions(...))` or `useMutation(marketplaceQuery.xxx.mutationOptions(...))`; use oRPC clients as `mutationFn` only for custom flows.
|
||||||
|
- Put shared cache behavior in `createTanstackQueryUtils(...experimental_defaults...)`; components may add UI feedback callbacks, but should not own shared invalidation rules.
|
||||||
|
- Do not use deprecated `useInvalid` or `useReset`.
|
||||||
|
- Prefer `mutate(...)`; use `mutateAsync(...)` only when Promise semantics are required, and wrap awaited calls in `try/catch`.
|
||||||
|
|
||||||
|
## Component Boundaries
|
||||||
|
|
||||||
|
- Use the first level below a page or tab to organize independent page sections when it adds real structure. This layer is layout/semantic first, not automatically the data owner.
|
||||||
|
- Split deeper components by the data and state each layer actually needs. Each component should access only necessary data, and ownership should stay at the lowest consumer.
|
||||||
|
- Keep cohesive forms, menu bodies, and one-off helpers local unless they need their own state, reuse, or semantic boundary.
|
||||||
|
- Separate hidden secondary surfaces from the trigger's main flow. For dialogs, dropdowns, popovers, and similar branches, extract a small local component that owns the trigger, open state, and hidden content when it would obscure the parent flow.
|
||||||
|
- Preserve composability by separating behavior ownership from layout ownership. A dropdown action may own its trigger, open state, and menu content; the caller owns placement such as slots, offsets, and alignment.
|
||||||
|
- Avoid unnecessary DOM hierarchy. Do not add wrapper elements unless they provide layout, semantics, accessibility, state ownership, or integration with a library API; prefer fragments or styling an existing element when possible.
|
||||||
|
- Avoid shallow wrappers and prop renaming unless the wrapper adds validation, orchestration, error handling, state ownership, or a real semantic boundary.
|
||||||
|
|
||||||
|
## You Might Not Need An Effect
|
||||||
|
|
||||||
|
- Use Effects only to synchronize with external systems such as browser APIs, non-React widgets, subscriptions, timers, analytics that must run because the component was shown, or imperative DOM integration.
|
||||||
|
- Do not use Effects to transform props or state for rendering. Calculate derived values during render, and use `useMemo` only when the calculation is actually expensive.
|
||||||
|
- Do not use Effects to handle user actions. Put action-specific logic in the event handler where the cause is known.
|
||||||
|
- Do not use Effects to copy one state value into another state value representing the same concept. Pick one source of truth and derive the rest during render.
|
||||||
|
- Do not reset or adjust state from props with an Effect. Prefer a `key` reset, storing a stable ID and deriving the selected object, or guarded same-component render-time adjustment when truly necessary.
|
||||||
|
- Prefer framework data APIs or TanStack Query for data fetching instead of writing request Effects in components.
|
||||||
|
- If an Effect still seems necessary, first name the external system it synchronizes with. If there is no external system, remove the Effect and restructure the state or event flow.
|
||||||
|
|
||||||
|
## Navigation And Performance
|
||||||
|
|
||||||
|
- Prefer `Link` for normal navigation. Use router APIs only for command-flow side effects such as mutation success, guarded redirects, or form submission.
|
||||||
|
- Avoid `memo`, `useMemo`, and `useCallback` unless there is a clear performance reason.
|
||||||
367
.agents/skills/tailwind-css-rules/SKILL.md
Normal file
367
.agents/skills/tailwind-css-rules/SKILL.md
Normal file
@ -0,0 +1,367 @@
|
|||||||
|
---
|
||||||
|
name: tailwind-css-rules
|
||||||
|
description: Tailwind CSS v4.1+ rules and best practices. Use when writing, reviewing, refactoring, or upgrading Tailwind CSS classes and styles, especially v4 utility migrations, layout spacing, typography, responsive variants, dark mode, gradients, CSS variables, and component styling.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Tailwind CSS Rules and Best Practices
|
||||||
|
|
||||||
|
## Core Principles
|
||||||
|
|
||||||
|
- **Always use Tailwind CSS v4.1+** - Ensure the codebase is using the latest version
|
||||||
|
- **Do not use deprecated or removed utilities** - ALWAYS use the replacement
|
||||||
|
- **Never use `@apply`** - Use CSS variables, the `--spacing()` function, or framework components instead
|
||||||
|
- **Check for redundant classes** - Remove any classes that aren't necessary
|
||||||
|
- **Group elements logically** to simplify responsive tweaks later
|
||||||
|
|
||||||
|
## Upgrading to Tailwind CSS v4
|
||||||
|
|
||||||
|
### Before Upgrading
|
||||||
|
|
||||||
|
- **Always read the upgrade documentation first** - Read https://tailwindcss.com/docs/upgrade-guide and https://tailwindcss.com/blog/tailwindcss-v4 before starting an upgrade.
|
||||||
|
- Ensure the git repository is in a clean state before starting
|
||||||
|
|
||||||
|
### Upgrade Process
|
||||||
|
|
||||||
|
1. Run the upgrade command: `npx @tailwindcss/upgrade@latest` for both major and minor updates
|
||||||
|
2. The tool will convert JavaScript config files to the new CSS format
|
||||||
|
3. Review all changes extensively to clean up any false positives
|
||||||
|
4. Test thoroughly across your application
|
||||||
|
|
||||||
|
## Breaking Changes Reference
|
||||||
|
|
||||||
|
### Removed Utilities (NEVER use these in v4)
|
||||||
|
|
||||||
|
| ❌ Deprecated | ✅ Replacement |
|
||||||
|
| ----------------------- | ------------------------------------------------- |
|
||||||
|
| `bg-opacity-*` | Use opacity modifiers like `bg-black/50` |
|
||||||
|
| `text-opacity-*` | Use opacity modifiers like `text-black/50` |
|
||||||
|
| `border-opacity-*` | Use opacity modifiers like `border-black/50` |
|
||||||
|
| `divide-opacity-*` | Use opacity modifiers like `divide-black/50` |
|
||||||
|
| `ring-opacity-*` | Use opacity modifiers like `ring-black/50` |
|
||||||
|
| `placeholder-opacity-*` | Use opacity modifiers like `placeholder-black/50` |
|
||||||
|
| `flex-shrink-*` | `shrink-*` |
|
||||||
|
| `flex-grow-*` | `grow-*` |
|
||||||
|
| `overflow-ellipsis` | `text-ellipsis` |
|
||||||
|
| `decoration-slice` | `box-decoration-slice` |
|
||||||
|
| `decoration-clone` | `box-decoration-clone` |
|
||||||
|
|
||||||
|
### Renamed Utilities
|
||||||
|
|
||||||
|
Use the v4 name when migrating code that still carries Tailwind v3 semantics. Do not blanket-replace existing v4 classes: classes such as `rounded-sm`, `shadow-sm`, `ring-1`, and `ring-2` are valid in this codebase when they intentionally represent the current design scale.
|
||||||
|
|
||||||
|
| ❌ v3 pattern | ✅ v4 pattern |
|
||||||
|
| ------------------- | -------------------------------------------------- |
|
||||||
|
| `bg-gradient-*` | `bg-linear-*` |
|
||||||
|
| old shadow scale | verify against the current Tailwind/design scale |
|
||||||
|
| old blur scale | verify against the current Tailwind/design scale |
|
||||||
|
| old radius scale | use the Dify radius token mapping when applicable |
|
||||||
|
| `outline-none` | `outline-hidden` |
|
||||||
|
| bare `ring` utility | use an explicit ring width such as `ring-1`/`ring-2`/`ring-3` |
|
||||||
|
|
||||||
|
For Figma radius tokens, follow `packages/dify-ui/AGENTS.md`. For example, `--radius/xs` maps to `rounded-sm`; do not rewrite it to `rounded-xs`.
|
||||||
|
|
||||||
|
## Layout and Spacing Rules
|
||||||
|
|
||||||
|
### Flexbox and Grid Spacing
|
||||||
|
|
||||||
|
#### Always use gap utilities for internal spacing
|
||||||
|
|
||||||
|
Gap provides consistent spacing without edge cases (no extra space on last items). It's cleaner and more maintainable than margins on children.
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!-- ❌ Don't do this -->
|
||||||
|
<div class="flex">
|
||||||
|
<div class="mr-4">Item 1</div>
|
||||||
|
<div class="mr-4">Item 2</div>
|
||||||
|
<div>Item 3</div>
|
||||||
|
<!-- No margin on last -->
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- ✅ Do this instead -->
|
||||||
|
<div class="flex gap-4">
|
||||||
|
<div>Item 1</div>
|
||||||
|
<div>Item 2</div>
|
||||||
|
<div>Item 3</div>
|
||||||
|
</div>
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Gap vs Space utilities
|
||||||
|
|
||||||
|
- **Never use `space-x-*` or `space-y-*` in flex/grid layouts** - always use gap
|
||||||
|
- Space utilities add margins to children and have issues with wrapped items
|
||||||
|
- Gap works correctly with flex-wrap and all flex directions
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!-- ❌ Avoid space utilities in flex containers -->
|
||||||
|
<div class="flex flex-wrap space-x-4">
|
||||||
|
<!-- Space utilities break with wrapped items -->
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- ✅ Use gap for consistent spacing -->
|
||||||
|
<div class="flex flex-wrap gap-4">
|
||||||
|
<!-- Gap works perfectly with wrapping -->
|
||||||
|
</div>
|
||||||
|
```
|
||||||
|
|
||||||
|
### General Spacing Guidelines
|
||||||
|
|
||||||
|
- **Prefer top and left margins** over bottom and right margins (unless conditionally rendered)
|
||||||
|
- **Use padding on parent containers** instead of bottom margins on the last child
|
||||||
|
- **Always use `min-h-dvh` instead of `min-h-screen`** - `min-h-screen` is buggy on mobile Safari
|
||||||
|
- **Prefer `size-*` utilities** over separate `w-*` and `h-*` when setting equal dimensions
|
||||||
|
- For max-widths, prefer the container scale (e.g., `max-w-2xs` over `max-w-72`)
|
||||||
|
|
||||||
|
## Typography Rules
|
||||||
|
|
||||||
|
### Line Heights
|
||||||
|
|
||||||
|
- **Never use `leading-*` classes** - Always use line height modifiers with text size
|
||||||
|
- **Always use fixed line heights from the spacing scale** - Don't use named values
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!-- ❌ Don't do this -->
|
||||||
|
<p class="text-base leading-7">Text with separate line height</p>
|
||||||
|
<p class="text-lg leading-relaxed">Text with named line height</p>
|
||||||
|
|
||||||
|
<!-- ✅ Do this instead -->
|
||||||
|
<p class="text-base/7">Text with line height modifier</p>
|
||||||
|
<p class="text-lg/8">Text with specific line height</p>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Font Size Reference
|
||||||
|
|
||||||
|
Be precise with font sizes - know the actual pixel values:
|
||||||
|
|
||||||
|
- `text-xs` = 12px
|
||||||
|
- `text-sm` = 14px
|
||||||
|
- `text-base` = 16px
|
||||||
|
- `text-lg` = 18px
|
||||||
|
- `text-xl` = 20px
|
||||||
|
|
||||||
|
## Color and Opacity
|
||||||
|
|
||||||
|
### Opacity Modifiers
|
||||||
|
|
||||||
|
**Never use `bg-opacity-*`, `text-opacity-*`, etc.** - use the opacity modifier syntax:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!-- ❌ Don't do this -->
|
||||||
|
<div class="bg-red-500 bg-opacity-60">Old opacity syntax</div>
|
||||||
|
|
||||||
|
<!-- ✅ Do this instead -->
|
||||||
|
<div class="bg-red-500/60">Modern opacity syntax</div>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Responsive Design
|
||||||
|
|
||||||
|
### Breakpoint Optimization
|
||||||
|
|
||||||
|
- **Check for redundant classes across breakpoints**
|
||||||
|
- **Only add breakpoint variants when values change**
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!-- ❌ Redundant breakpoint classes -->
|
||||||
|
<div class="px-4 md:px-4 lg:px-4">
|
||||||
|
<!-- md:px-4 and lg:px-4 are redundant -->
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- ✅ Efficient breakpoint usage -->
|
||||||
|
<div class="px-4 lg:px-8">
|
||||||
|
<!-- Only specify when value changes -->
|
||||||
|
</div>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Dark Mode
|
||||||
|
|
||||||
|
### Dark Mode Best Practices
|
||||||
|
|
||||||
|
- Use the plain `dark:` variant pattern
|
||||||
|
- Put light mode styles first, then dark mode styles
|
||||||
|
- Ensure `dark:` variant comes before other variants
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!-- ✅ Correct dark mode pattern -->
|
||||||
|
<div class="bg-white text-black dark:bg-black dark:text-white">
|
||||||
|
<button class="hover:bg-gray-100 dark:hover:bg-gray-800">Click me</button>
|
||||||
|
</div>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Gradient Utilities
|
||||||
|
|
||||||
|
- **ALWAYS Use `bg-linear-*` instead of `bg-gradient-*` utilities** - The gradient utilities were renamed in v4
|
||||||
|
- Use the new `bg-radial` or `bg-radial-[<position>]` to create radial gradients
|
||||||
|
- Use the new `bg-conic` or `bg-conic-*` to create conic gradients
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!-- ✅ Use the new gradient utilities -->
|
||||||
|
<div class="h-14 bg-linear-to-br from-violet-500 to-fuchsia-500"></div>
|
||||||
|
<div
|
||||||
|
class="size-18 bg-radial-[at_50%_75%] from-sky-200 via-blue-400 to-indigo-900 to-90%"
|
||||||
|
></div>
|
||||||
|
<div
|
||||||
|
class="size-24 bg-conic-180 from-indigo-600 via-indigo-50 to-indigo-600"
|
||||||
|
></div>
|
||||||
|
|
||||||
|
<!-- ❌ Do not use bg-gradient-* utilities -->
|
||||||
|
<div class="h-14 bg-gradient-to-br from-violet-500 to-fuchsia-500"></div>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Working with CSS Variables
|
||||||
|
|
||||||
|
### Accessing Theme Values
|
||||||
|
|
||||||
|
Tailwind CSS v4 exposes all theme values as CSS variables:
|
||||||
|
|
||||||
|
```css
|
||||||
|
/* Access colors, and other theme values */
|
||||||
|
.custom-element {
|
||||||
|
background: var(--color-red-500);
|
||||||
|
border-radius: var(--radius-lg);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### The `--spacing()` Function
|
||||||
|
|
||||||
|
Use the dedicated `--spacing()` function for spacing calculations:
|
||||||
|
|
||||||
|
```css
|
||||||
|
.custom-class {
|
||||||
|
margin-top: calc(100vh - --spacing(16));
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Extending theme values
|
||||||
|
|
||||||
|
Use CSS to extend theme values:
|
||||||
|
|
||||||
|
```css
|
||||||
|
@import "tailwindcss";
|
||||||
|
|
||||||
|
@theme {
|
||||||
|
--color-mint-500: oklch(0.72 0.11 178);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```html
|
||||||
|
<div class="bg-mint-500">
|
||||||
|
<!-- ... -->
|
||||||
|
</div>
|
||||||
|
```
|
||||||
|
|
||||||
|
## New v4 Features
|
||||||
|
|
||||||
|
### Container Queries
|
||||||
|
|
||||||
|
Use the `@container` class and size variants:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<article class="@container">
|
||||||
|
<div class="flex flex-col @md:flex-row @lg:gap-8">
|
||||||
|
<img class="w-full @md:w-48" />
|
||||||
|
<div class="mt-4 @md:mt-0">
|
||||||
|
<!-- Content adapts to container size -->
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</article>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Container Query Units
|
||||||
|
|
||||||
|
Use container-based units like `cqw` for responsive sizing:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<div class="@container">
|
||||||
|
<h1 class="text-[50cqw]">Responsive to container width</h1>
|
||||||
|
</div>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Text Shadows (v4.1)
|
||||||
|
|
||||||
|
Use text-shadow-\* utilities from text-shadow-2xs to text-shadow-lg:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!-- ✅ Text shadow examples -->
|
||||||
|
<h1 class="text-shadow-lg">Large shadow</h1>
|
||||||
|
<p class="text-shadow-sm/50">Small shadow with opacity</p>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Masking (v4.1)
|
||||||
|
|
||||||
|
Use the new composable mask utilities for image and gradient masks:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!-- ✅ Linear gradient masks on specific sides -->
|
||||||
|
<div class="mask-t-from-50%">Top fade</div>
|
||||||
|
<div class="mask-b-from-20% mask-b-to-80%">Bottom gradient</div>
|
||||||
|
<div class="mask-linear-from-white mask-linear-to-black/60">
|
||||||
|
Fade from white to black
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- ✅ Radial gradient masks -->
|
||||||
|
<div class="mask-radial-[100%_100%] mask-radial-from-75% mask-radial-at-left">
|
||||||
|
Radial mask
|
||||||
|
</div>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Component Patterns
|
||||||
|
|
||||||
|
### Avoiding Utility Inheritance
|
||||||
|
|
||||||
|
Don't add utilities to parents that you override in children:
|
||||||
|
|
||||||
|
```html
|
||||||
|
<!-- ❌ Avoid this pattern -->
|
||||||
|
<div class="text-center">
|
||||||
|
<h1>Centered Heading</h1>
|
||||||
|
<div class="text-left">Left-aligned content</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- ✅ Better approach -->
|
||||||
|
<div>
|
||||||
|
<h1 class="text-center">Centered Heading</h1>
|
||||||
|
<div>Left-aligned content</div>
|
||||||
|
</div>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Component Extraction
|
||||||
|
|
||||||
|
- Extract repeated patterns into framework components, not CSS classes
|
||||||
|
- Keep utility classes in templates/JSX
|
||||||
|
- Use data attributes for complex state-based styling
|
||||||
|
|
||||||
|
## CSS Best Practices
|
||||||
|
|
||||||
|
### Nesting Guidelines
|
||||||
|
|
||||||
|
- Use nesting when styling both parent and children
|
||||||
|
- Avoid empty parent selectors
|
||||||
|
|
||||||
|
```css
|
||||||
|
/* ✅ Good nesting - parent has styles */
|
||||||
|
.card {
|
||||||
|
padding: --spacing(4);
|
||||||
|
|
||||||
|
> .card-title {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ❌ Avoid empty parents */
|
||||||
|
ul {
|
||||||
|
> li {
|
||||||
|
/* Parent has no styles */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Pitfalls to Avoid
|
||||||
|
|
||||||
|
1. **Using old opacity utilities** - Always use `/opacity` syntax like `bg-red-500/60`
|
||||||
|
2. **Redundant breakpoint classes** - Only specify changes
|
||||||
|
3. **Space utilities in flex/grid** - Always use gap
|
||||||
|
4. **Leading utilities** - Use line-height modifiers like `text-sm/6`
|
||||||
|
5. **Arbitrary values** - Use the design scale
|
||||||
|
6. **@apply directive** - Use components or CSS variables
|
||||||
|
7. **min-h-screen on mobile** - Use min-h-dvh
|
||||||
|
8. **Separate width/height** - Use size utilities when equal
|
||||||
|
9. **Arbitrary values** - Always use Tailwind's predefined scale whenever possible (e.g., use `ml-4` over `ml-[16px]`)
|
||||||
3
.github/CODEOWNERS
vendored
3
.github/CODEOWNERS
vendored
@ -6,6 +6,9 @@
|
|||||||
|
|
||||||
* @crazywoola @laipz8200 @Yeuoly
|
* @crazywoola @laipz8200 @Yeuoly
|
||||||
|
|
||||||
|
# ESLint suppression file is maintained by autofix.ci pruning.
|
||||||
|
/eslint-suppressions.json
|
||||||
|
|
||||||
# CODEOWNERS file
|
# CODEOWNERS file
|
||||||
/.github/CODEOWNERS @laipz8200 @crazywoola
|
/.github/CODEOWNERS @laipz8200 @crazywoola
|
||||||
|
|
||||||
|
|||||||
2
.github/actions/setup-web/action.yml
vendored
2
.github/actions/setup-web/action.yml
vendored
@ -4,7 +4,7 @@ runs:
|
|||||||
using: composite
|
using: composite
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Vite+
|
- name: Setup Vite+
|
||||||
uses: voidzero-dev/setup-vp@20553a7a7429c429a74894104a2835d7fed28a72 # v1.3.0
|
uses: voidzero-dev/setup-vp@4f5aa3e38c781f1b01e78fb9255527cee8a6efa6 # v1.8.0
|
||||||
with:
|
with:
|
||||||
node-version-file: .nvmrc
|
node-version-file: .nvmrc
|
||||||
cache: true
|
cache: true
|
||||||
|
|||||||
1
.github/labeler.yml
vendored
1
.github/labeler.yml
vendored
@ -6,5 +6,4 @@ web:
|
|||||||
- 'package.json'
|
- 'package.json'
|
||||||
- 'pnpm-lock.yaml'
|
- 'pnpm-lock.yaml'
|
||||||
- 'pnpm-workspace.yaml'
|
- 'pnpm-workspace.yaml'
|
||||||
- '.npmrc'
|
|
||||||
- '.nvmrc'
|
- '.nvmrc'
|
||||||
|
|||||||
2
.github/workflows/api-tests.yml
vendored
2
.github/workflows/api-tests.yml
vendored
@ -99,7 +99,7 @@ jobs:
|
|||||||
- name: Set up dotenvs
|
- name: Set up dotenvs
|
||||||
run: |
|
run: |
|
||||||
cp docker/.env.example docker/.env
|
cp docker/.env.example docker/.env
|
||||||
cp docker/middleware.env.example docker/middleware.env
|
cp docker/envs/middleware.env.example docker/middleware.env
|
||||||
|
|
||||||
- name: Expose Service Ports
|
- name: Expose Service Ports
|
||||||
run: sh .github/workflows/expose_service_ports.sh
|
run: sh .github/workflows/expose_service_ports.sh
|
||||||
|
|||||||
9
.github/workflows/autofix.yml
vendored
9
.github/workflows/autofix.yml
vendored
@ -43,7 +43,6 @@ jobs:
|
|||||||
package.json
|
package.json
|
||||||
pnpm-lock.yaml
|
pnpm-lock.yaml
|
||||||
pnpm-workspace.yaml
|
pnpm-workspace.yaml
|
||||||
.npmrc
|
|
||||||
.nvmrc
|
.nvmrc
|
||||||
- name: Check api inputs
|
- name: Check api inputs
|
||||||
if: github.event_name != 'merge_group'
|
if: github.event_name != 'merge_group'
|
||||||
@ -114,9 +113,15 @@ jobs:
|
|||||||
find . -name "*.py.bak" -type f -delete
|
find . -name "*.py.bak" -type f -delete
|
||||||
|
|
||||||
- name: Setup web environment
|
- name: Setup web environment
|
||||||
if: github.event_name != 'merge_group' && steps.web-changes.outputs.any_changed == 'true'
|
if: github.event_name != 'merge_group'
|
||||||
uses: ./.github/actions/setup-web
|
uses: ./.github/actions/setup-web
|
||||||
|
|
||||||
|
- name: Generate API docs
|
||||||
|
if: github.event_name != 'merge_group' && steps.api-changes.outputs.any_changed == 'true'
|
||||||
|
run: |
|
||||||
|
cd api
|
||||||
|
uv run dev/generate_swagger_markdown_docs.py --swagger-dir openapi --markdown-dir openapi/markdown
|
||||||
|
|
||||||
- name: ESLint autofix
|
- name: ESLint autofix
|
||||||
if: github.event_name != 'merge_group' && steps.web-changes.outputs.any_changed == 'true'
|
if: github.event_name != 'merge_group' && steps.web-changes.outputs.any_changed == 'true'
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
8
.github/workflows/build-push.yml
vendored
8
.github/workflows/build-push.yml
vendored
@ -74,7 +74,7 @@ jobs:
|
|||||||
password: ${{ env.DOCKERHUB_TOKEN }}
|
password: ${{ env.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Set up Depot CLI
|
- name: Set up Depot CLI
|
||||||
uses: depot/setup-action@v1
|
uses: depot/setup-action@15c09a5f77a0840ad4bce955686522a257853461 # v1.7.1
|
||||||
|
|
||||||
- name: Extract metadata for Docker
|
- name: Extract metadata for Docker
|
||||||
id: meta
|
id: meta
|
||||||
@ -84,7 +84,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build Docker image
|
- name: Build Docker image
|
||||||
id: build
|
id: build
|
||||||
uses: depot/build-push-action@v1
|
uses: depot/build-push-action@5f3b3c2e5a00f0093de47f657aeaefcedff27d18 # v1.17.0
|
||||||
with:
|
with:
|
||||||
project: ${{ vars.DEPOT_PROJECT_ID }}
|
project: ${{ vars.DEPOT_PROJECT_ID }}
|
||||||
context: ${{ matrix.build_context }}
|
context: ${{ matrix.build_context }}
|
||||||
@ -124,10 +124,10 @@ jobs:
|
|||||||
file: "web/Dockerfile"
|
file: "web/Dockerfile"
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@98e3b2c9eab4f4f98a95c0c0a3ea5e5e672fd2a8 # v3.10.0
|
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||||
|
|
||||||
- name: Validate Docker image
|
- name: Validate Docker image
|
||||||
uses: docker/build-push-action@5cd29d66b4a8d8e6f4d5dfe2e9329f0b1d446289 # v6.18.0
|
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
|
||||||
with:
|
with:
|
||||||
push: false
|
push: false
|
||||||
context: ${{ matrix.build_context }}
|
context: ${{ matrix.build_context }}
|
||||||
|
|||||||
4
.github/workflows/db-migration-test.yml
vendored
4
.github/workflows/db-migration-test.yml
vendored
@ -37,7 +37,7 @@ jobs:
|
|||||||
- name: Prepare middleware env
|
- name: Prepare middleware env
|
||||||
run: |
|
run: |
|
||||||
cd docker
|
cd docker
|
||||||
cp middleware.env.example middleware.env
|
cp envs/middleware.env.example middleware.env
|
||||||
|
|
||||||
- name: Set up Middlewares
|
- name: Set up Middlewares
|
||||||
uses: hoverkraft-tech/compose-action@d2bee4f07e8ca410d6b196d00f90c12e7d48c33a # v2.6.0
|
uses: hoverkraft-tech/compose-action@d2bee4f07e8ca410d6b196d00f90c12e7d48c33a # v2.6.0
|
||||||
@ -87,7 +87,7 @@ jobs:
|
|||||||
- name: Prepare middleware env for MySQL
|
- name: Prepare middleware env for MySQL
|
||||||
run: |
|
run: |
|
||||||
cd docker
|
cd docker
|
||||||
cp middleware.env.example middleware.env
|
cp envs/middleware.env.example middleware.env
|
||||||
sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' middleware.env
|
sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' middleware.env
|
||||||
sed -i 's/DB_HOST=db_postgres/DB_HOST=db_mysql/' middleware.env
|
sed -i 's/DB_HOST=db_postgres/DB_HOST=db_mysql/' middleware.env
|
||||||
sed -i 's/DB_PORT=5432/DB_PORT=3306/' middleware.env
|
sed -i 's/DB_PORT=5432/DB_PORT=3306/' middleware.env
|
||||||
|
|||||||
8
.github/workflows/docker-build.yml
vendored
8
.github/workflows/docker-build.yml
vendored
@ -44,10 +44,10 @@ jobs:
|
|||||||
file: "web/Dockerfile"
|
file: "web/Dockerfile"
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Depot CLI
|
- name: Set up Depot CLI
|
||||||
uses: depot/setup-action@v1
|
uses: depot/setup-action@15c09a5f77a0840ad4bce955686522a257853461 # v1.7.1
|
||||||
|
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: depot/build-push-action@v1
|
uses: depot/build-push-action@5f3b3c2e5a00f0093de47f657aeaefcedff27d18 # v1.17.0
|
||||||
with:
|
with:
|
||||||
project: ${{ vars.DEPOT_PROJECT_ID }}
|
project: ${{ vars.DEPOT_PROJECT_ID }}
|
||||||
push: false
|
push: false
|
||||||
@ -71,10 +71,10 @@ jobs:
|
|||||||
file: "web/Dockerfile"
|
file: "web/Dockerfile"
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@98e3b2c9eab4f4f98a95c0c0a3ea5e5e672fd2a8 # v3.10.0
|
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
|
||||||
|
|
||||||
- name: Build Docker Image
|
- name: Build Docker Image
|
||||||
uses: docker/build-push-action@5cd29d66b4a8d8e6f4d5dfe2e9329f0b1d446289 # v6.18.0
|
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
|
||||||
with:
|
with:
|
||||||
push: false
|
push: false
|
||||||
context: ${{ matrix.context }}
|
context: ${{ matrix.context }}
|
||||||
|
|||||||
2
.github/workflows/labeler.yml
vendored
2
.github/workflows/labeler.yml
vendored
@ -9,6 +9,6 @@ jobs:
|
|||||||
pull-requests: write
|
pull-requests: write
|
||||||
runs-on: depot-ubuntu-24.04
|
runs-on: depot-ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
- uses: actions/labeler@f27b608878404679385c85cfa523b85ccb86e213 # v6.1.0
|
||||||
with:
|
with:
|
||||||
sync-labels: true
|
sync-labels: true
|
||||||
|
|||||||
10
.github/workflows/main-ci.yml
vendored
10
.github/workflows/main-ci.yml
vendored
@ -57,7 +57,7 @@ jobs:
|
|||||||
- '.github/workflows/api-tests.yml'
|
- '.github/workflows/api-tests.yml'
|
||||||
- '.github/workflows/expose_service_ports.sh'
|
- '.github/workflows/expose_service_ports.sh'
|
||||||
- 'docker/.env.example'
|
- 'docker/.env.example'
|
||||||
- 'docker/middleware.env.example'
|
- 'docker/envs/middleware.env.example'
|
||||||
- 'docker/docker-compose.middleware.yaml'
|
- 'docker/docker-compose.middleware.yaml'
|
||||||
- 'docker/docker-compose-template.yaml'
|
- 'docker/docker-compose-template.yaml'
|
||||||
- 'docker/generate_docker_compose'
|
- 'docker/generate_docker_compose'
|
||||||
@ -69,7 +69,6 @@ jobs:
|
|||||||
- 'package.json'
|
- 'package.json'
|
||||||
- 'pnpm-lock.yaml'
|
- 'pnpm-lock.yaml'
|
||||||
- 'pnpm-workspace.yaml'
|
- 'pnpm-workspace.yaml'
|
||||||
- '.npmrc'
|
|
||||||
- '.nvmrc'
|
- '.nvmrc'
|
||||||
- '.github/workflows/web-tests.yml'
|
- '.github/workflows/web-tests.yml'
|
||||||
- '.github/actions/setup-web/**'
|
- '.github/actions/setup-web/**'
|
||||||
@ -83,10 +82,9 @@ jobs:
|
|||||||
- 'package.json'
|
- 'package.json'
|
||||||
- 'pnpm-lock.yaml'
|
- 'pnpm-lock.yaml'
|
||||||
- 'pnpm-workspace.yaml'
|
- 'pnpm-workspace.yaml'
|
||||||
- '.npmrc'
|
|
||||||
- '.nvmrc'
|
- '.nvmrc'
|
||||||
- 'docker/docker-compose.middleware.yaml'
|
- 'docker/docker-compose.middleware.yaml'
|
||||||
- 'docker/middleware.env.example'
|
- 'docker/envs/middleware.env.example'
|
||||||
- '.github/workflows/web-e2e.yml'
|
- '.github/workflows/web-e2e.yml'
|
||||||
- '.github/actions/setup-web/**'
|
- '.github/actions/setup-web/**'
|
||||||
vdb:
|
vdb:
|
||||||
@ -96,7 +94,7 @@ jobs:
|
|||||||
- '.github/workflows/vdb-tests.yml'
|
- '.github/workflows/vdb-tests.yml'
|
||||||
- '.github/workflows/expose_service_ports.sh'
|
- '.github/workflows/expose_service_ports.sh'
|
||||||
- 'docker/.env.example'
|
- 'docker/.env.example'
|
||||||
- 'docker/middleware.env.example'
|
- 'docker/envs/middleware.env.example'
|
||||||
- 'docker/docker-compose.yaml'
|
- 'docker/docker-compose.yaml'
|
||||||
- 'docker/docker-compose-template.yaml'
|
- 'docker/docker-compose-template.yaml'
|
||||||
- 'docker/generate_docker_compose'
|
- 'docker/generate_docker_compose'
|
||||||
@ -118,7 +116,7 @@ jobs:
|
|||||||
- '.github/workflows/db-migration-test.yml'
|
- '.github/workflows/db-migration-test.yml'
|
||||||
- '.github/workflows/expose_service_ports.sh'
|
- '.github/workflows/expose_service_ports.sh'
|
||||||
- 'docker/.env.example'
|
- 'docker/.env.example'
|
||||||
- 'docker/middleware.env.example'
|
- 'docker/envs/middleware.env.example'
|
||||||
- 'docker/docker-compose.middleware.yaml'
|
- 'docker/docker-compose.middleware.yaml'
|
||||||
- 'docker/docker-compose-template.yaml'
|
- 'docker/docker-compose-template.yaml'
|
||||||
- 'docker/generate_docker_compose'
|
- 'docker/generate_docker_compose'
|
||||||
|
|||||||
22
.github/workflows/pyrefly-diff-comment.yml
vendored
22
.github/workflows/pyrefly-diff-comment.yml
vendored
@ -77,10 +77,28 @@ jobs:
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (diff.trim()) {
|
if (diff.trim()) {
|
||||||
await github.rest.issues.createComment({
|
const body = '### Pyrefly Diff\n<details>\n<summary>base → PR</summary>\n\n```diff\n' + diff + '\n```\n</details>';
|
||||||
|
const marker = '### Pyrefly Diff';
|
||||||
|
const { data: comments } = await github.rest.issues.listComments({
|
||||||
issue_number: prNumber,
|
issue_number: prNumber,
|
||||||
owner: context.repo.owner,
|
owner: context.repo.owner,
|
||||||
repo: context.repo.repo,
|
repo: context.repo.repo,
|
||||||
body: '### Pyrefly Diff\n<details>\n<summary>base → PR</summary>\n\n```diff\n' + diff + '\n```\n</details>',
|
|
||||||
});
|
});
|
||||||
|
const existing = comments.find((comment) => comment.body.startsWith(marker));
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
await github.rest.issues.updateComment({
|
||||||
|
comment_id: existing.id,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
issue_number: prNumber,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
21
.github/workflows/pyrefly-diff.yml
vendored
21
.github/workflows/pyrefly-diff.yml
vendored
@ -103,9 +103,26 @@ jobs:
|
|||||||
].join('\n')
|
].join('\n')
|
||||||
: '### Pyrefly Diff\nNo changes detected.';
|
: '### Pyrefly Diff\nNo changes detected.';
|
||||||
|
|
||||||
await github.rest.issues.createComment({
|
const marker = '### Pyrefly Diff';
|
||||||
|
const { data: comments } = await github.rest.issues.listComments({
|
||||||
issue_number: prNumber,
|
issue_number: prNumber,
|
||||||
owner: context.repo.owner,
|
owner: context.repo.owner,
|
||||||
repo: context.repo.repo,
|
repo: context.repo.repo,
|
||||||
body,
|
|
||||||
});
|
});
|
||||||
|
const existing = comments.find((comment) => comment.body.startsWith(marker));
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
await github.rest.issues.updateComment({
|
||||||
|
comment_id: existing.id,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await github.rest.issues.createComment({
|
||||||
|
issue_number: prNumber,
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
body,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|||||||
1
.github/workflows/style.yml
vendored
1
.github/workflows/style.yml
vendored
@ -83,7 +83,6 @@ jobs:
|
|||||||
package.json
|
package.json
|
||||||
pnpm-lock.yaml
|
pnpm-lock.yaml
|
||||||
pnpm-workspace.yaml
|
pnpm-workspace.yaml
|
||||||
.npmrc
|
|
||||||
.nvmrc
|
.nvmrc
|
||||||
.github/workflows/style.yml
|
.github/workflows/style.yml
|
||||||
.github/actions/setup-web/**
|
.github/actions/setup-web/**
|
||||||
|
|||||||
1
.github/workflows/tool-test-sdks.yaml
vendored
1
.github/workflows/tool-test-sdks.yaml
vendored
@ -9,7 +9,6 @@ on:
|
|||||||
- package.json
|
- package.json
|
||||||
- pnpm-lock.yaml
|
- pnpm-lock.yaml
|
||||||
- pnpm-workspace.yaml
|
- pnpm-workspace.yaml
|
||||||
- .npmrc
|
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: sdk-tests-${{ github.head_ref || github.run_id }}
|
group: sdk-tests-${{ github.head_ref || github.run_id }}
|
||||||
|
|||||||
2
.github/workflows/translate-i18n-claude.yml
vendored
2
.github/workflows/translate-i18n-claude.yml
vendored
@ -158,7 +158,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Run Claude Code for Translation Sync
|
- name: Run Claude Code for Translation Sync
|
||||||
if: steps.context.outputs.CHANGED_FILES != ''
|
if: steps.context.outputs.CHANGED_FILES != ''
|
||||||
uses: anthropics/claude-code-action@567fe954a4527e81f132d87d1bdbcc94f7737434 # v1.0.107
|
uses: anthropics/claude-code-action@476e359e6203e73dad705c8b322e333fabbd7416 # v1.0.119
|
||||||
with:
|
with:
|
||||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
2
.github/workflows/vdb-tests-full.yml
vendored
2
.github/workflows/vdb-tests-full.yml
vendored
@ -51,7 +51,7 @@ jobs:
|
|||||||
- name: Set up dotenvs
|
- name: Set up dotenvs
|
||||||
run: |
|
run: |
|
||||||
cp docker/.env.example docker/.env
|
cp docker/.env.example docker/.env
|
||||||
cp docker/middleware.env.example docker/middleware.env
|
cp docker/envs/middleware.env.example docker/middleware.env
|
||||||
|
|
||||||
- name: Expose Service Ports
|
- name: Expose Service Ports
|
||||||
run: sh .github/workflows/expose_service_ports.sh
|
run: sh .github/workflows/expose_service_ports.sh
|
||||||
|
|||||||
2
.github/workflows/vdb-tests.yml
vendored
2
.github/workflows/vdb-tests.yml
vendored
@ -48,7 +48,7 @@ jobs:
|
|||||||
- name: Set up dotenvs
|
- name: Set up dotenvs
|
||||||
run: |
|
run: |
|
||||||
cp docker/.env.example docker/.env
|
cp docker/.env.example docker/.env
|
||||||
cp docker/middleware.env.example docker/middleware.env
|
cp docker/envs/middleware.env.example docker/middleware.env
|
||||||
|
|
||||||
- name: Expose Service Ports
|
- name: Expose Service Ports
|
||||||
run: sh .github/workflows/expose_service_ports.sh
|
run: sh .github/workflows/expose_service_ports.sh
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@ -219,6 +219,9 @@ node_modules
|
|||||||
# plugin migrate
|
# plugin migrate
|
||||||
plugins.jsonl
|
plugins.jsonl
|
||||||
|
|
||||||
|
# generated API OpenAPI specs
|
||||||
|
packages/contracts/openapi/
|
||||||
|
|
||||||
# mise
|
# mise
|
||||||
mise.toml
|
mise.toml
|
||||||
|
|
||||||
|
|||||||
27
Makefile
27
Makefile
@ -3,6 +3,10 @@ DOCKER_REGISTRY=langgenius
|
|||||||
WEB_IMAGE=$(DOCKER_REGISTRY)/dify-web
|
WEB_IMAGE=$(DOCKER_REGISTRY)/dify-web
|
||||||
API_IMAGE=$(DOCKER_REGISTRY)/dify-api
|
API_IMAGE=$(DOCKER_REGISTRY)/dify-api
|
||||||
VERSION=latest
|
VERSION=latest
|
||||||
|
DOCKER_DIR=docker
|
||||||
|
DOCKER_MIDDLEWARE_ENV=$(DOCKER_DIR)/middleware.env
|
||||||
|
DOCKER_MIDDLEWARE_ENV_EXAMPLE=$(DOCKER_DIR)/envs/middleware.env.example
|
||||||
|
DOCKER_MIDDLEWARE_PROJECT=dify-middlewares-dev
|
||||||
|
|
||||||
# Default target - show help
|
# Default target - show help
|
||||||
.DEFAULT_GOAL := help
|
.DEFAULT_GOAL := help
|
||||||
@ -17,8 +21,13 @@ dev-setup: prepare-docker prepare-web prepare-api
|
|||||||
# Step 1: Prepare Docker middleware
|
# Step 1: Prepare Docker middleware
|
||||||
prepare-docker:
|
prepare-docker:
|
||||||
@echo "🐳 Setting up Docker middleware..."
|
@echo "🐳 Setting up Docker middleware..."
|
||||||
@cp -n docker/middleware.env.example docker/middleware.env 2>/dev/null || echo "Docker middleware.env already exists"
|
@if [ ! -f "$(DOCKER_MIDDLEWARE_ENV)" ]; then \
|
||||||
@cd docker && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p dify-middlewares-dev up -d
|
cp "$(DOCKER_MIDDLEWARE_ENV_EXAMPLE)" "$(DOCKER_MIDDLEWARE_ENV)"; \
|
||||||
|
echo "Docker middleware.env created"; \
|
||||||
|
else \
|
||||||
|
echo "Docker middleware.env already exists"; \
|
||||||
|
fi
|
||||||
|
@cd $(DOCKER_DIR) && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p $(DOCKER_MIDDLEWARE_PROJECT) up -d
|
||||||
@echo "✅ Docker middleware started"
|
@echo "✅ Docker middleware started"
|
||||||
|
|
||||||
# Step 2: Prepare web environment
|
# Step 2: Prepare web environment
|
||||||
@ -39,12 +48,18 @@ prepare-api:
|
|||||||
# Clean dev environment
|
# Clean dev environment
|
||||||
dev-clean:
|
dev-clean:
|
||||||
@echo "⚠️ Stopping Docker containers..."
|
@echo "⚠️ Stopping Docker containers..."
|
||||||
@cd docker && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p dify-middlewares-dev down
|
@if [ -f "$(DOCKER_MIDDLEWARE_ENV)" ]; then \
|
||||||
|
cd $(DOCKER_DIR) && docker compose -f docker-compose.middleware.yaml --env-file middleware.env -p $(DOCKER_MIDDLEWARE_PROJECT) down; \
|
||||||
|
else \
|
||||||
|
echo "Docker middleware.env does not exist, skipping compose down"; \
|
||||||
|
fi
|
||||||
@echo "🗑️ Removing volumes..."
|
@echo "🗑️ Removing volumes..."
|
||||||
@rm -rf docker/volumes/db
|
@rm -rf docker/volumes/db
|
||||||
|
@rm -rf docker/volumes/mysql
|
||||||
@rm -rf docker/volumes/redis
|
@rm -rf docker/volumes/redis
|
||||||
@rm -rf docker/volumes/plugin_daemon
|
@rm -rf docker/volumes/plugin_daemon
|
||||||
@rm -rf docker/volumes/weaviate
|
@rm -rf docker/volumes/weaviate
|
||||||
|
@rm -rf docker/volumes/sandbox/dependencies
|
||||||
@rm -rf api/storage
|
@rm -rf api/storage
|
||||||
@echo "✅ Cleanup complete"
|
@echo "✅ Cleanup complete"
|
||||||
|
|
||||||
@ -71,13 +86,13 @@ type-check:
|
|||||||
@echo "📝 Running type checks (basedpyright + pyrefly + mypy)..."
|
@echo "📝 Running type checks (basedpyright + pyrefly + mypy)..."
|
||||||
@./dev/basedpyright-check $(PATH_TO_CHECK)
|
@./dev/basedpyright-check $(PATH_TO_CHECK)
|
||||||
@./dev/pyrefly-check-local
|
@./dev/pyrefly-check-local
|
||||||
@uv --directory api run mypy --exclude-gitignore --exclude 'tests/' --exclude 'migrations/' --check-untyped-defs --disable-error-code=import-untyped .
|
@uv --directory api run mypy --exclude-gitignore --exclude 'tests/' --exclude 'migrations/' --exclude 'dev/generate_swagger_specs.py' --check-untyped-defs --disable-error-code=import-untyped .
|
||||||
@echo "✅ Type checks complete"
|
@echo "✅ Type checks complete"
|
||||||
|
|
||||||
type-check-core:
|
type-check-core:
|
||||||
@echo "📝 Running core type checks (basedpyright + mypy)..."
|
@echo "📝 Running core type checks (basedpyright + mypy)..."
|
||||||
@./dev/basedpyright-check $(PATH_TO_CHECK)
|
@./dev/basedpyright-check $(PATH_TO_CHECK)
|
||||||
@uv --directory api run mypy --exclude-gitignore --exclude 'tests/' --exclude 'migrations/' --check-untyped-defs --disable-error-code=import-untyped .
|
@uv --directory api run mypy --exclude-gitignore --exclude 'tests/' --exclude 'migrations/' --exclude 'dev/generate_swagger_specs.py' --exclude 'dev/generate_fastopenapi_specs.py' --check-untyped-defs --disable-error-code=import-untyped .
|
||||||
@echo "✅ Core type checks complete"
|
@echo "✅ Core type checks complete"
|
||||||
|
|
||||||
test:
|
test:
|
||||||
@ -132,7 +147,7 @@ help:
|
|||||||
@echo " make prepare-docker - Set up Docker middleware"
|
@echo " make prepare-docker - Set up Docker middleware"
|
||||||
@echo " make prepare-web - Set up web environment"
|
@echo " make prepare-web - Set up web environment"
|
||||||
@echo " make prepare-api - Set up API environment"
|
@echo " make prepare-api - Set up API environment"
|
||||||
@echo " make dev-clean - Stop Docker middleware containers"
|
@echo " make dev-clean - Stop Docker middleware containers and remove dev data"
|
||||||
@echo ""
|
@echo ""
|
||||||
@echo "Backend Code Quality:"
|
@echo "Backend Code Quality:"
|
||||||
@echo " make format - Format code with ruff"
|
@echo " make format - Format code with ruff"
|
||||||
|
|||||||
@ -137,7 +137,7 @@ Star Dify on GitHub and be instantly notified of new releases.
|
|||||||
|
|
||||||
### Custom configurations
|
### Custom configurations
|
||||||
|
|
||||||
If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
|
If you need to customize the configuration, edit `docker/.env`. The essential startup defaults live in [`docker/.env.example`](docker/.env.example), and optional advanced variables are split under `docker/envs/` by theme. After making any changes, re-run `docker compose up -d` from the `docker` directory. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments).
|
||||||
|
|
||||||
### Metrics Monitoring with Grafana
|
### Metrics Monitoring with Grafana
|
||||||
|
|
||||||
|
|||||||
@ -34,7 +34,7 @@ TRIGGER_URL=http://localhost:5001
|
|||||||
FILES_ACCESS_TIMEOUT=300
|
FILES_ACCESS_TIMEOUT=300
|
||||||
|
|
||||||
# Collaboration mode toggle
|
# Collaboration mode toggle
|
||||||
ENABLE_COLLABORATION_MODE=false
|
ENABLE_COLLABORATION_MODE=true
|
||||||
|
|
||||||
# Access token expiration time in minutes
|
# Access token expiration time in minutes
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||||
@ -88,6 +88,10 @@ REDIS_HEALTH_CHECK_INTERVAL=30
|
|||||||
CELERY_BROKER_URL=redis://:difyai123456@localhost:${REDIS_PORT}/1
|
CELERY_BROKER_URL=redis://:difyai123456@localhost:${REDIS_PORT}/1
|
||||||
CELERY_BACKEND=redis
|
CELERY_BACKEND=redis
|
||||||
|
|
||||||
|
# Ops trace retry configuration
|
||||||
|
OPS_TRACE_RETRYABLE_DISPATCH_MAX_RETRIES=60
|
||||||
|
OPS_TRACE_RETRYABLE_DISPATCH_DELAY_SECONDS=5
|
||||||
|
|
||||||
# Database configuration
|
# Database configuration
|
||||||
DB_TYPE=postgresql
|
DB_TYPE=postgresql
|
||||||
DB_USERNAME=postgres
|
DB_USERNAME=postgres
|
||||||
@ -98,6 +102,8 @@ DB_DATABASE=dify
|
|||||||
|
|
||||||
SQLALCHEMY_POOL_PRE_PING=true
|
SQLALCHEMY_POOL_PRE_PING=true
|
||||||
SQLALCHEMY_POOL_TIMEOUT=30
|
SQLALCHEMY_POOL_TIMEOUT=30
|
||||||
|
# Connection pool reset behavior on return
|
||||||
|
SQLALCHEMY_POOL_RESET_ON_RETURN=rollback
|
||||||
|
|
||||||
# Storage configuration
|
# Storage configuration
|
||||||
# use for store upload files, private keys...
|
# use for store upload files, private keys...
|
||||||
@ -381,7 +387,7 @@ VIKINGDB_ACCESS_KEY=your-ak
|
|||||||
VIKINGDB_SECRET_KEY=your-sk
|
VIKINGDB_SECRET_KEY=your-sk
|
||||||
VIKINGDB_REGION=cn-shanghai
|
VIKINGDB_REGION=cn-shanghai
|
||||||
VIKINGDB_HOST=api-vikingdb.xxx.volces.com
|
VIKINGDB_HOST=api-vikingdb.xxx.volces.com
|
||||||
VIKINGDB_SCHEMA=http
|
VIKINGDB_SCHEME=http
|
||||||
VIKINGDB_CONNECTION_TIMEOUT=30
|
VIKINGDB_CONNECTION_TIMEOUT=30
|
||||||
VIKINGDB_SOCKET_TIMEOUT=30
|
VIKINGDB_SOCKET_TIMEOUT=30
|
||||||
|
|
||||||
@ -432,8 +438,6 @@ UPLOAD_FILE_EXTENSION_BLACKLIST=
|
|||||||
|
|
||||||
# Model configuration
|
# Model configuration
|
||||||
MULTIMODAL_SEND_FORMAT=base64
|
MULTIMODAL_SEND_FORMAT=base64
|
||||||
PROMPT_GENERATION_MAX_TOKENS=512
|
|
||||||
CODE_GENERATION_MAX_TOKENS=1024
|
|
||||||
PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
|
PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
|
||||||
|
|
||||||
# Mail configuration, support: resend, smtp, sendgrid
|
# Mail configuration, support: resend, smtp, sendgrid
|
||||||
|
|||||||
@ -193,6 +193,10 @@ Before opening a PR / submitting:
|
|||||||
- Controllers: parse input via Pydantic, invoke services, return serialised responses; no business logic.
|
- Controllers: parse input via Pydantic, invoke services, return serialised responses; no business logic.
|
||||||
- Services: coordinate repositories, providers, background tasks; keep side effects explicit.
|
- Services: coordinate repositories, providers, background tasks; keep side effects explicit.
|
||||||
- Document non-obvious behaviour with concise docstrings and comments.
|
- Document non-obvious behaviour with concise docstrings and comments.
|
||||||
|
- For Flask-RESTX controller request, query, and response schemas, follow `controllers/API_SCHEMA_GUIDE.md`.
|
||||||
|
In short: use Pydantic models, document GET query params with `query_params_from_model(...)`, register response
|
||||||
|
DTOs with `register_response_schema_models(...)`, serialize with `ResponseModel.model_validate(...).model_dump(...)`,
|
||||||
|
and avoid adding new legacy `ns.model(...)`, `@marshal_with(...)`, or GET `@ns.expect(...)` patterns.
|
||||||
|
|
||||||
### Miscellaneous
|
### Miscellaneous
|
||||||
|
|
||||||
|
|||||||
@ -181,7 +181,6 @@ def initialize_extensions(app: DifyApp):
|
|||||||
ext_import_modules,
|
ext_import_modules,
|
||||||
ext_orjson,
|
ext_orjson,
|
||||||
ext_forward_refs,
|
ext_forward_refs,
|
||||||
ext_set_secretkey,
|
|
||||||
ext_compress,
|
ext_compress,
|
||||||
ext_code_based_extension,
|
ext_code_based_extension,
|
||||||
ext_database,
|
ext_database,
|
||||||
@ -189,6 +188,7 @@ def initialize_extensions(app: DifyApp):
|
|||||||
ext_migrate,
|
ext_migrate,
|
||||||
ext_redis,
|
ext_redis,
|
||||||
ext_storage,
|
ext_storage,
|
||||||
|
ext_set_secretkey,
|
||||||
ext_logstore, # Initialize logstore after storage, before celery
|
ext_logstore, # Initialize logstore after storage, before celery
|
||||||
ext_celery,
|
ext_celery,
|
||||||
ext_login,
|
ext_login,
|
||||||
|
|||||||
@ -113,8 +113,18 @@ def create_tenant(email: str, language: str | None = None, name: str | None = No
|
|||||||
# Validates name encoding for non-Latin characters.
|
# Validates name encoding for non-Latin characters.
|
||||||
name = name.strip().encode("utf-8").decode("utf-8") if name else None
|
name = name.strip().encode("utf-8").decode("utf-8") if name else None
|
||||||
|
|
||||||
# generate random password
|
# Generate a random password that satisfies the password policy.
|
||||||
new_password = secrets.token_urlsafe(16)
|
# The iteration limit guards against infinite loops caused by unexpected bugs in valid_password.
|
||||||
|
for _ in range(100):
|
||||||
|
new_password = secrets.token_urlsafe(16)
|
||||||
|
try:
|
||||||
|
valid_password(new_password)
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
click.echo(click.style("Failed to generate a valid password. Please try again.", fg="red"))
|
||||||
|
return
|
||||||
|
|
||||||
# register account
|
# register account
|
||||||
account = RegisterService.register(
|
account = RegisterService.register(
|
||||||
|
|||||||
@ -23,9 +23,9 @@ class SecurityConfig(BaseSettings):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
SECRET_KEY: str = Field(
|
SECRET_KEY: str = Field(
|
||||||
description="Secret key for secure session cookie signing."
|
description="Secret key for secure session cookie signing. "
|
||||||
"Make sure you are changing this key for your deployment with a strong key."
|
"Leave empty to let Dify generate a persistent key in the storage directory, "
|
||||||
"Generate a strong key using `openssl rand -base64 42` or set via the `SECRET_KEY` environment variable.",
|
"or set a strong value via the `SECRET_KEY` environment variable.",
|
||||||
default="",
|
default="",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1137,6 +1137,18 @@ class MultiModalTransferConfig(BaseSettings):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class OpsTraceConfig(BaseSettings):
|
||||||
|
OPS_TRACE_RETRYABLE_DISPATCH_MAX_RETRIES: PositiveInt = Field(
|
||||||
|
description="Maximum retry attempts for transient ops trace provider dispatch failures.",
|
||||||
|
default=60,
|
||||||
|
)
|
||||||
|
|
||||||
|
OPS_TRACE_RETRYABLE_DISPATCH_DELAY_SECONDS: PositiveInt = Field(
|
||||||
|
description="Delay in seconds between transient ops trace provider dispatch retry attempts.",
|
||||||
|
default=5,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class CeleryBeatConfig(BaseSettings):
|
class CeleryBeatConfig(BaseSettings):
|
||||||
CELERY_BEAT_SCHEDULER_TIME: int = Field(
|
CELERY_BEAT_SCHEDULER_TIME: int = Field(
|
||||||
description="Interval in days for Celery Beat scheduler execution, default to 1 day",
|
description="Interval in days for Celery Beat scheduler execution, default to 1 day",
|
||||||
@ -1298,7 +1310,7 @@ class PositionConfig(BaseSettings):
|
|||||||
class CollaborationConfig(BaseSettings):
|
class CollaborationConfig(BaseSettings):
|
||||||
ENABLE_COLLABORATION_MODE: bool = Field(
|
ENABLE_COLLABORATION_MODE: bool = Field(
|
||||||
description="Whether to enable collaboration mode features across the workspace",
|
description="Whether to enable collaboration mode features across the workspace",
|
||||||
default=False,
|
default=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -1417,6 +1429,7 @@ class FeatureConfig(
|
|||||||
ModelLoadBalanceConfig,
|
ModelLoadBalanceConfig,
|
||||||
ModerationConfig,
|
ModerationConfig,
|
||||||
MultiModalTransferConfig,
|
MultiModalTransferConfig,
|
||||||
|
OpsTraceConfig,
|
||||||
PositionConfig,
|
PositionConfig,
|
||||||
RagEtlConfig,
|
RagEtlConfig,
|
||||||
RepositoryConfig,
|
RepositoryConfig,
|
||||||
|
|||||||
@ -114,7 +114,7 @@ class SQLAlchemyEngineOptionsDict(TypedDict):
|
|||||||
pool_pre_ping: bool
|
pool_pre_ping: bool
|
||||||
connect_args: dict[str, str]
|
connect_args: dict[str, str]
|
||||||
pool_use_lifo: bool
|
pool_use_lifo: bool
|
||||||
pool_reset_on_return: None
|
pool_reset_on_return: Literal["commit", "rollback", None]
|
||||||
pool_timeout: int
|
pool_timeout: int
|
||||||
|
|
||||||
|
|
||||||
@ -223,6 +223,11 @@ class DatabaseConfig(BaseSettings):
|
|||||||
default=30,
|
default=30,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
SQLALCHEMY_POOL_RESET_ON_RETURN: Literal["commit", "rollback", None] = Field(
|
||||||
|
description="Connection pool reset behavior on return. Options: 'commit', 'rollback', or None",
|
||||||
|
default="rollback",
|
||||||
|
)
|
||||||
|
|
||||||
RETRIEVAL_SERVICE_EXECUTORS: NonNegativeInt = Field(
|
RETRIEVAL_SERVICE_EXECUTORS: NonNegativeInt = Field(
|
||||||
description="Number of processes for the retrieval service, default to CPU cores.",
|
description="Number of processes for the retrieval service, default to CPU cores.",
|
||||||
default=os.cpu_count() or 1,
|
default=os.cpu_count() or 1,
|
||||||
@ -252,7 +257,7 @@ class DatabaseConfig(BaseSettings):
|
|||||||
"pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING,
|
"pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING,
|
||||||
"connect_args": connect_args,
|
"connect_args": connect_args,
|
||||||
"pool_use_lifo": self.SQLALCHEMY_POOL_USE_LIFO,
|
"pool_use_lifo": self.SQLALCHEMY_POOL_USE_LIFO,
|
||||||
"pool_reset_on_return": None,
|
"pool_reset_on_return": self.SQLALCHEMY_POOL_RESET_ON_RETURN,
|
||||||
"pool_timeout": self.SQLALCHEMY_POOL_TIMEOUT,
|
"pool_timeout": self.SQLALCHEMY_POOL_TIMEOUT,
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
|
|||||||
38
api/configs/secret_key.py
Normal file
38
api/configs/secret_key.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
"""SECRET_KEY persistence helpers for runtime setup."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import secrets
|
||||||
|
|
||||||
|
from extensions.ext_storage import storage
|
||||||
|
|
||||||
|
GENERATED_SECRET_KEY_FILENAME = ".dify_secret_key"
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_secret_key(secret_key: str) -> str:
|
||||||
|
"""Return an explicit SECRET_KEY or a generated key persisted in storage."""
|
||||||
|
if secret_key:
|
||||||
|
return secret_key
|
||||||
|
|
||||||
|
return _load_or_create_secret_key()
|
||||||
|
|
||||||
|
|
||||||
|
def _load_or_create_secret_key() -> str:
|
||||||
|
try:
|
||||||
|
persisted_key = storage.load_once(GENERATED_SECRET_KEY_FILENAME).decode("utf-8").strip()
|
||||||
|
if persisted_key:
|
||||||
|
return persisted_key
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
generated_key = secrets.token_urlsafe(48)
|
||||||
|
|
||||||
|
try:
|
||||||
|
storage.save(GENERATED_SECRET_KEY_FILENAME, f"{generated_key}\n".encode())
|
||||||
|
except Exception as exc:
|
||||||
|
raise ValueError(
|
||||||
|
f"SECRET_KEY is not set and could not be generated at {GENERATED_SECRET_KEY_FILENAME}. "
|
||||||
|
"Set SECRET_KEY explicitly or make storage writable."
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return generated_key
|
||||||
@ -19,7 +19,7 @@
|
|||||||
"name": "Website Generator"
|
"name": "Website Generator"
|
||||||
},
|
},
|
||||||
"app_id": "b53545b1-79ea-4da3-b31a-c39391c6f041",
|
"app_id": "b53545b1-79ea-4da3-b31a-c39391c6f041",
|
||||||
"category": "Programming",
|
"categories": ["Programming"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": null,
|
"description": null,
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -35,7 +35,7 @@
|
|||||||
"name": "Investment Analysis Report Copilot"
|
"name": "Investment Analysis Report Copilot"
|
||||||
},
|
},
|
||||||
"app_id": "a23b57fa-85da-49c0-a571-3aff375976c1",
|
"app_id": "a23b57fa-85da-49c0-a571-3aff375976c1",
|
||||||
"category": "Agent",
|
"categories": ["Agent"],
|
||||||
"copyright": "Dify.AI",
|
"copyright": "Dify.AI",
|
||||||
"description": "Welcome to your personalized Investment Analysis Copilot service, where we delve into the depths of stock analysis to provide you with comprehensive insights. \n",
|
"description": "Welcome to your personalized Investment Analysis Copilot service, where we delve into the depths of stock analysis to provide you with comprehensive insights. \n",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -51,7 +51,7 @@
|
|||||||
"name": "Workflow Planning Assistant "
|
"name": "Workflow Planning Assistant "
|
||||||
},
|
},
|
||||||
"app_id": "f3303a7d-a81c-404e-b401-1f8711c998c1",
|
"app_id": "f3303a7d-a81c-404e-b401-1f8711c998c1",
|
||||||
"category": "Workflow",
|
"categories": ["Workflow"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": "An assistant that helps you plan and select the right node for a workflow (V0.6.0). ",
|
"description": "An assistant that helps you plan and select the right node for a workflow (V0.6.0). ",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -67,7 +67,7 @@
|
|||||||
"name": "Automated Email Reply "
|
"name": "Automated Email Reply "
|
||||||
},
|
},
|
||||||
"app_id": "e9d92058-7d20-4904-892f-75d90bef7587",
|
"app_id": "e9d92058-7d20-4904-892f-75d90bef7587",
|
||||||
"category": "Workflow",
|
"categories": ["Workflow"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": "Reply emails using Gmail API. It will automatically retrieve email in your inbox and create a response in Gmail. \nConfigure your Gmail API in Google Cloud Console. ",
|
"description": "Reply emails using Gmail API. It will automatically retrieve email in your inbox and create a response in Gmail. \nConfigure your Gmail API in Google Cloud Console. ",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -83,7 +83,7 @@
|
|||||||
"name": "Book Translation "
|
"name": "Book Translation "
|
||||||
},
|
},
|
||||||
"app_id": "98b87f88-bd22-4d86-8b74-86beba5e0ed4",
|
"app_id": "98b87f88-bd22-4d86-8b74-86beba5e0ed4",
|
||||||
"category": "Workflow",
|
"categories": ["Workflow"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": "A workflow designed to translate a full book up to 15000 tokens per run. Uses Code node to separate text into chunks and Iteration to translate each chunk. ",
|
"description": "A workflow designed to translate a full book up to 15000 tokens per run. Uses Code node to separate text into chunks and Iteration to translate each chunk. ",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -99,7 +99,7 @@
|
|||||||
"name": "Python bug fixer"
|
"name": "Python bug fixer"
|
||||||
},
|
},
|
||||||
"app_id": "cae337e6-aec5-4c7b-beca-d6f1a808bd5e",
|
"app_id": "cae337e6-aec5-4c7b-beca-d6f1a808bd5e",
|
||||||
"category": "Programming",
|
"categories": ["Programming"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": null,
|
"description": null,
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -115,7 +115,7 @@
|
|||||||
"name": "Code Interpreter"
|
"name": "Code Interpreter"
|
||||||
},
|
},
|
||||||
"app_id": "d077d587-b072-4f2c-b631-69ed1e7cdc0f",
|
"app_id": "d077d587-b072-4f2c-b631-69ed1e7cdc0f",
|
||||||
"category": "Programming",
|
"categories": ["Programming"],
|
||||||
"copyright": "Copyright 2023 Dify",
|
"copyright": "Copyright 2023 Dify",
|
||||||
"description": "Code interpreter, clarifying the syntax and semantics of the code.",
|
"description": "Code interpreter, clarifying the syntax and semantics of the code.",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -131,7 +131,7 @@
|
|||||||
"name": "SVG Logo Design "
|
"name": "SVG Logo Design "
|
||||||
},
|
},
|
||||||
"app_id": "73fbb5f1-c15d-4d74-9cc8-46d9db9b2cca",
|
"app_id": "73fbb5f1-c15d-4d74-9cc8-46d9db9b2cca",
|
||||||
"category": "Agent",
|
"categories": ["Agent"],
|
||||||
"copyright": "Dify.AI",
|
"copyright": "Dify.AI",
|
||||||
"description": "Hello, I am your creative partner in bringing ideas to vivid life! I can assist you in creating stunning designs by leveraging abilities of DALL·E 3. ",
|
"description": "Hello, I am your creative partner in bringing ideas to vivid life! I can assist you in creating stunning designs by leveraging abilities of DALL·E 3. ",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -147,7 +147,7 @@
|
|||||||
"name": "Long Story Generator (Iteration) "
|
"name": "Long Story Generator (Iteration) "
|
||||||
},
|
},
|
||||||
"app_id": "5efb98d7-176b-419c-b6ef-50767391ab62",
|
"app_id": "5efb98d7-176b-419c-b6ef-50767391ab62",
|
||||||
"category": "Workflow",
|
"categories": ["Workflow"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": "A workflow demonstrating how to use Iteration node to generate long article that is longer than the context length of LLMs. ",
|
"description": "A workflow demonstrating how to use Iteration node to generate long article that is longer than the context length of LLMs. ",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -163,7 +163,7 @@
|
|||||||
"name": "Text Summarization Workflow"
|
"name": "Text Summarization Workflow"
|
||||||
},
|
},
|
||||||
"app_id": "f00c4531-6551-45ee-808f-1d7903099515",
|
"app_id": "f00c4531-6551-45ee-808f-1d7903099515",
|
||||||
"category": "Workflow",
|
"categories": ["Workflow"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": "Based on users' choice, retrieve external knowledge to more accurately summarize articles.",
|
"description": "Based on users' choice, retrieve external knowledge to more accurately summarize articles.",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -179,7 +179,7 @@
|
|||||||
"name": "YouTube Channel Data Analysis"
|
"name": "YouTube Channel Data Analysis"
|
||||||
},
|
},
|
||||||
"app_id": "be591209-2ca8-410f-8f3b-ca0e530dd638",
|
"app_id": "be591209-2ca8-410f-8f3b-ca0e530dd638",
|
||||||
"category": "Agent",
|
"categories": ["Agent"],
|
||||||
"copyright": "Dify.AI",
|
"copyright": "Dify.AI",
|
||||||
"description": "I am a YouTube Channel Data Analysis Copilot, I am here to provide expert data analysis tailored to your needs. ",
|
"description": "I am a YouTube Channel Data Analysis Copilot, I am here to provide expert data analysis tailored to your needs. ",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -195,7 +195,7 @@
|
|||||||
"name": "Article Grading Bot"
|
"name": "Article Grading Bot"
|
||||||
},
|
},
|
||||||
"app_id": "a747f7b4-c48b-40d6-b313-5e628232c05f",
|
"app_id": "a747f7b4-c48b-40d6-b313-5e628232c05f",
|
||||||
"category": "Writing",
|
"categories": ["Writing"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": "Assess the quality of articles and text based on user defined criteria. ",
|
"description": "Assess the quality of articles and text based on user defined criteria. ",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -211,7 +211,7 @@
|
|||||||
"name": "SEO Blog Generator"
|
"name": "SEO Blog Generator"
|
||||||
},
|
},
|
||||||
"app_id": "18f3bd03-524d-4d7a-8374-b30dbe7c69d5",
|
"app_id": "18f3bd03-524d-4d7a-8374-b30dbe7c69d5",
|
||||||
"category": "Workflow",
|
"categories": ["Workflow"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": "Workflow for retrieving information from the internet, followed by segmented generation of SEO blogs.",
|
"description": "Workflow for retrieving information from the internet, followed by segmented generation of SEO blogs.",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -227,7 +227,7 @@
|
|||||||
"name": "SQL Creator"
|
"name": "SQL Creator"
|
||||||
},
|
},
|
||||||
"app_id": "050ef42e-3e0c-40c1-a6b6-a64f2c49d744",
|
"app_id": "050ef42e-3e0c-40c1-a6b6-a64f2c49d744",
|
||||||
"category": "Programming",
|
"categories": ["Programming"],
|
||||||
"copyright": "Copyright 2023 Dify",
|
"copyright": "Copyright 2023 Dify",
|
||||||
"description": "Write SQL from natural language by pasting in your schema with the request.Please describe your query requirements in natural language and select the target database type.",
|
"description": "Write SQL from natural language by pasting in your schema with the request.Please describe your query requirements in natural language and select the target database type.",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -243,7 +243,7 @@
|
|||||||
"name": "Sentiment Analysis "
|
"name": "Sentiment Analysis "
|
||||||
},
|
},
|
||||||
"app_id": "f06bf86b-d50c-4895-a942-35112dbe4189",
|
"app_id": "f06bf86b-d50c-4895-a942-35112dbe4189",
|
||||||
"category": "Workflow",
|
"categories": ["Workflow"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": "Batch sentiment analysis of text, followed by JSON output of sentiment classification along with scores.",
|
"description": "Batch sentiment analysis of text, followed by JSON output of sentiment classification along with scores.",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -259,7 +259,7 @@
|
|||||||
"name": "Strategic Consulting Expert"
|
"name": "Strategic Consulting Expert"
|
||||||
},
|
},
|
||||||
"app_id": "7e8ca1ae-02f2-4b5f-979e-62d19133bee2",
|
"app_id": "7e8ca1ae-02f2-4b5f-979e-62d19133bee2",
|
||||||
"category": "Assistant",
|
"categories": ["Assistant"],
|
||||||
"copyright": "Copyright 2023 Dify",
|
"copyright": "Copyright 2023 Dify",
|
||||||
"description": "I can answer your questions related to strategic marketing.",
|
"description": "I can answer your questions related to strategic marketing.",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -275,7 +275,7 @@
|
|||||||
"name": "Code Converter"
|
"name": "Code Converter"
|
||||||
},
|
},
|
||||||
"app_id": "4006c4b2-0735-4f37-8dbb-fb1a8c5bd87a",
|
"app_id": "4006c4b2-0735-4f37-8dbb-fb1a8c5bd87a",
|
||||||
"category": "Programming",
|
"categories": ["Programming"],
|
||||||
"copyright": "Copyright 2023 Dify",
|
"copyright": "Copyright 2023 Dify",
|
||||||
"description": "This is an application that provides the ability to convert code snippets in multiple programming languages. You can input the code you wish to convert, select the target programming language, and get the desired output.",
|
"description": "This is an application that provides the ability to convert code snippets in multiple programming languages. You can input the code you wish to convert, select the target programming language, and get the desired output.",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -291,7 +291,7 @@
|
|||||||
"name": "Question Classifier + Knowledge + Chatbot "
|
"name": "Question Classifier + Knowledge + Chatbot "
|
||||||
},
|
},
|
||||||
"app_id": "d9f6b733-e35d-4a40-9f38-ca7bbfa009f7",
|
"app_id": "d9f6b733-e35d-4a40-9f38-ca7bbfa009f7",
|
||||||
"category": "Workflow",
|
"categories": ["Workflow"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": "Basic Workflow Template, a chatbot capable of identifying intents alongside with a knowledge base.",
|
"description": "Basic Workflow Template, a chatbot capable of identifying intents alongside with a knowledge base.",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -307,7 +307,7 @@
|
|||||||
"name": "AI Front-end interviewer"
|
"name": "AI Front-end interviewer"
|
||||||
},
|
},
|
||||||
"app_id": "127efead-8944-4e20-ba9d-12402eb345e0",
|
"app_id": "127efead-8944-4e20-ba9d-12402eb345e0",
|
||||||
"category": "HR",
|
"categories": ["HR"],
|
||||||
"copyright": "Copyright 2023 Dify",
|
"copyright": "Copyright 2023 Dify",
|
||||||
"description": "A simulated front-end interviewer that tests the skill level of front-end development through questioning.",
|
"description": "A simulated front-end interviewer that tests the skill level of front-end development through questioning.",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -323,7 +323,7 @@
|
|||||||
"name": "Knowledge Retrieval + Chatbot "
|
"name": "Knowledge Retrieval + Chatbot "
|
||||||
},
|
},
|
||||||
"app_id": "e9870913-dd01-4710-9f06-15d4180ca1ce",
|
"app_id": "e9870913-dd01-4710-9f06-15d4180ca1ce",
|
||||||
"category": "Workflow",
|
"categories": ["Workflow"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": "Basic Workflow Template, A chatbot with a knowledge base. ",
|
"description": "Basic Workflow Template, A chatbot with a knowledge base. ",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -339,7 +339,7 @@
|
|||||||
"name": "Email Assistant Workflow "
|
"name": "Email Assistant Workflow "
|
||||||
},
|
},
|
||||||
"app_id": "dd5b6353-ae9b-4bce-be6a-a681a12cf709",
|
"app_id": "dd5b6353-ae9b-4bce-be6a-a681a12cf709",
|
||||||
"category": "Workflow",
|
"categories": ["Workflow"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": "A multifunctional email assistant capable of summarizing, replying, composing, proofreading, and checking grammar.",
|
"description": "A multifunctional email assistant capable of summarizing, replying, composing, proofreading, and checking grammar.",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
@ -355,7 +355,7 @@
|
|||||||
"name": "Customer Review Analysis Workflow "
|
"name": "Customer Review Analysis Workflow "
|
||||||
},
|
},
|
||||||
"app_id": "9c0cd31f-4b62-4005-adf5-e3888d08654a",
|
"app_id": "9c0cd31f-4b62-4005-adf5-e3888d08654a",
|
||||||
"category": "Workflow",
|
"categories": ["Workflow"],
|
||||||
"copyright": null,
|
"copyright": null,
|
||||||
"description": "Utilize LLM (Large Language Models) to classify customer reviews and forward them to the internal system.",
|
"description": "Utilize LLM (Large Language Models) to classify customer reviews and forward them to the internal system.",
|
||||||
"is_listed": true,
|
"is_listed": true,
|
||||||
|
|||||||
193
api/controllers/API_SCHEMA_GUIDE.md
Normal file
193
api/controllers/API_SCHEMA_GUIDE.md
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
# API Schema Guide
|
||||||
|
|
||||||
|
This guide describes the expected Flask-RESTX + Pydantic pattern for controller request payloads, query
|
||||||
|
parameters, response schemas, and Swagger documentation.
|
||||||
|
|
||||||
|
## Principles
|
||||||
|
|
||||||
|
- Use Pydantic `BaseModel` for request bodies and query parameters.
|
||||||
|
- Use `fields.base.ResponseModel` for response DTOs.
|
||||||
|
- Keep runtime validation and Swagger documentation wired to the same Pydantic model.
|
||||||
|
- Prefer explicit validation and serialization in controller methods over Flask-RESTX marshalling.
|
||||||
|
- Do not add new Flask-RESTX `fields.*` dictionaries, `Namespace.model(...)` exports, or `@marshal_with(...)` for migrated or new endpoints.
|
||||||
|
- Do not use `@ns.expect(...)` for GET query parameters. Flask-RESTX documents that as a request body.
|
||||||
|
|
||||||
|
## Naming
|
||||||
|
|
||||||
|
- Request body models: use a `Payload` suffix.
|
||||||
|
- Example: `WorkflowRunPayload`, `DatasourceVariablesPayload`.
|
||||||
|
- Query parameter models: use a `Query` suffix.
|
||||||
|
- Example: `WorkflowRunListQuery`, `MessageListQuery`.
|
||||||
|
- Response models: use a `Response` suffix and inherit from `ResponseModel`.
|
||||||
|
- Example: `WorkflowRunDetailResponse`, `WorkflowRunNodeExecutionListResponse`.
|
||||||
|
- Use `ListResponse` or `PaginationResponse` for wrapper responses.
|
||||||
|
- Example: `WorkflowRunNodeExecutionListResponse`, `WorkflowRunPaginationResponse`.
|
||||||
|
- Keep these models near the controller when they are endpoint-specific. Move them to `fields/*_fields.py` only when shared by multiple controllers.
|
||||||
|
|
||||||
|
## Registering Models For Swagger
|
||||||
|
|
||||||
|
Use helpers from `controllers.common.schema`.
|
||||||
|
|
||||||
|
```python
|
||||||
|
from controllers.common.schema import (
|
||||||
|
query_params_from_model,
|
||||||
|
register_response_schema_models,
|
||||||
|
register_schema_models,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Register request payload and query models with `register_schema_models(...)`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
register_schema_models(
|
||||||
|
console_ns,
|
||||||
|
WorkflowRunPayload,
|
||||||
|
WorkflowRunListQuery,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Register response models with `register_response_schema_models(...)`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
register_response_schema_models(
|
||||||
|
console_ns,
|
||||||
|
WorkflowRunDetailResponse,
|
||||||
|
WorkflowRunPaginationResponse,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Response models are registered in Pydantic serialization mode. This matters when a response model uses
|
||||||
|
`validation_alias` to read internal object attributes but emits public API field names. For example, a response model
|
||||||
|
can validate from `inputs_dict` while documenting and serializing `inputs`.
|
||||||
|
|
||||||
|
## Request Bodies
|
||||||
|
|
||||||
|
For non-GET request bodies:
|
||||||
|
|
||||||
|
1. Define a Pydantic `Payload` model.
|
||||||
|
2. Register it with `register_schema_models(...)`.
|
||||||
|
3. Use `@ns.expect(ns.models[Payload.__name__])` for Swagger documentation.
|
||||||
|
4. Validate from `ns.payload or {}` inside the controller.
|
||||||
|
|
||||||
|
```python
|
||||||
|
class DraftWorkflowNodeRunPayload(BaseModel):
|
||||||
|
inputs: dict[str, Any]
|
||||||
|
query: str = ""
|
||||||
|
|
||||||
|
|
||||||
|
register_schema_models(console_ns, DraftWorkflowNodeRunPayload)
|
||||||
|
|
||||||
|
|
||||||
|
@console_ns.expect(console_ns.models[DraftWorkflowNodeRunPayload.__name__])
|
||||||
|
def post(self, app_model: App, node_id: str):
|
||||||
|
payload = DraftWorkflowNodeRunPayload.model_validate(console_ns.payload or {})
|
||||||
|
result = service.run(..., inputs=payload.inputs, query=payload.query)
|
||||||
|
return WorkflowRunNodeExecutionResponse.model_validate(result, from_attributes=True).model_dump(mode="json")
|
||||||
|
```
|
||||||
|
|
||||||
|
## Query Parameters
|
||||||
|
|
||||||
|
For GET query parameters:
|
||||||
|
|
||||||
|
1. Define a Pydantic `Query` model.
|
||||||
|
2. Register it with `register_schema_models(...)` if it is referenced elsewhere in docs, or only use
|
||||||
|
`query_params_from_model(...)` if a body schema is not needed.
|
||||||
|
3. Use `@ns.doc(params=query_params_from_model(QueryModel))`.
|
||||||
|
4. Validate from `request.args.to_dict(flat=True)` or an explicit dict when type coercion is needed.
|
||||||
|
|
||||||
|
```python
|
||||||
|
class WorkflowRunListQuery(BaseModel):
|
||||||
|
last_id: str | None = Field(default=None, description="Last run ID for pagination")
|
||||||
|
limit: int = Field(default=20, ge=1, le=100, description="Number of items per page (1-100)")
|
||||||
|
|
||||||
|
|
||||||
|
@console_ns.doc(params=query_params_from_model(WorkflowRunListQuery))
|
||||||
|
def get(self, app_model: App):
|
||||||
|
query = WorkflowRunListQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
result = service.list(..., limit=query.limit, last_id=query.last_id)
|
||||||
|
return WorkflowRunPaginationResponse.model_validate(result, from_attributes=True).model_dump(mode="json")
|
||||||
|
```
|
||||||
|
|
||||||
|
Do not do this for GET query parameters:
|
||||||
|
|
||||||
|
```python
|
||||||
|
@console_ns.expect(console_ns.models[WorkflowRunListQuery.__name__])
|
||||||
|
def get(...):
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
That documents a GET request body and is not the expected contract.
|
||||||
|
|
||||||
|
## Responses
|
||||||
|
|
||||||
|
Response models should inherit from `ResponseModel`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
class WorkflowRunNodeExecutionResponse(ResponseModel):
|
||||||
|
id: str
|
||||||
|
inputs: Any = Field(default=None, validation_alias="inputs_dict")
|
||||||
|
process_data: Any = Field(default=None, validation_alias="process_data_dict")
|
||||||
|
outputs: Any = Field(default=None, validation_alias="outputs_dict")
|
||||||
|
```
|
||||||
|
|
||||||
|
Document response models with `@ns.response(...)`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Node run started successfully",
|
||||||
|
console_ns.models[WorkflowRunNodeExecutionResponse.__name__],
|
||||||
|
)
|
||||||
|
def post(...):
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
Serialize explicitly:
|
||||||
|
|
||||||
|
```python
|
||||||
|
return WorkflowRunNodeExecutionResponse.model_validate(
|
||||||
|
workflow_node_execution,
|
||||||
|
from_attributes=True,
|
||||||
|
).model_dump(mode="json")
|
||||||
|
```
|
||||||
|
|
||||||
|
If the service can return `None`, translate that into the expected HTTP error before validation:
|
||||||
|
|
||||||
|
```python
|
||||||
|
workflow_run = service.get_workflow_run(...)
|
||||||
|
if workflow_run is None:
|
||||||
|
raise NotFound("Workflow run not found")
|
||||||
|
|
||||||
|
return WorkflowRunDetailResponse.model_validate(workflow_run, from_attributes=True).model_dump(mode="json")
|
||||||
|
```
|
||||||
|
|
||||||
|
## Legacy Flask-RESTX Patterns
|
||||||
|
|
||||||
|
Avoid adding these patterns to new or migrated endpoints:
|
||||||
|
|
||||||
|
- `ns.model(...)` for new request/response DTOs.
|
||||||
|
- Module-level exported RESTX model objects such as `workflow_run_detail_model`.
|
||||||
|
- `fields.Nested({...})` with raw inline dict field maps.
|
||||||
|
- `@marshal_with(...)` for response serialization.
|
||||||
|
- `@ns.expect(...)` for GET query params.
|
||||||
|
|
||||||
|
Existing legacy field dictionaries may remain where an endpoint has not yet been migrated. Keep that compatibility local
|
||||||
|
to the legacy area and avoid importing RESTX model objects from controllers.
|
||||||
|
|
||||||
|
## Verifying Swagger
|
||||||
|
|
||||||
|
For schema and documentation changes, run focused tests and generate Swagger JSON:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
uv run --project . pytest tests/unit_tests/controllers/common/test_schema.py
|
||||||
|
uv run --project . pytest tests/unit_tests/commands/test_generate_swagger_specs.py tests/unit_tests/controllers/test_swagger.py
|
||||||
|
uv run --project . dev/generate_swagger_specs.py --output-dir /tmp/dify-openapi-check
|
||||||
|
```
|
||||||
|
|
||||||
|
Inspect affected endpoints with `jq`. Check that:
|
||||||
|
|
||||||
|
- GET parameters are `in: query`.
|
||||||
|
- Request bodies appear only where the endpoint has a body.
|
||||||
|
- Responses reference the expected `*Response` schema.
|
||||||
|
- Response schemas use public serialized names, not internal validation aliases like `inputs_dict`.
|
||||||
|
|
||||||
@ -41,7 +41,8 @@ def guess_file_info_from_response(response: httpx.Response):
|
|||||||
# Try to extract filename from URL
|
# Try to extract filename from URL
|
||||||
parsed_url = urllib.parse.urlparse(url)
|
parsed_url = urllib.parse.urlparse(url)
|
||||||
url_path = parsed_url.path
|
url_path = parsed_url.path
|
||||||
filename = os.path.basename(url_path)
|
# Decode percent-encoded characters in the path segment
|
||||||
|
filename = urllib.parse.unquote(os.path.basename(url_path))
|
||||||
|
|
||||||
# If filename couldn't be extracted, use Content-Disposition header
|
# If filename couldn't be extracted, use Content-Disposition header
|
||||||
if not filename:
|
if not filename:
|
||||||
|
|||||||
@ -1,6 +1,14 @@
|
|||||||
"""Helpers for registering Pydantic models with Flask-RESTX namespaces."""
|
"""Helpers for registering Pydantic models with Flask-RESTX namespaces.
|
||||||
|
|
||||||
|
Flask-RESTX treats `SchemaModel` bodies as opaque JSON schemas; it does not
|
||||||
|
promote Pydantic's nested `$defs` into top-level Swagger `definitions`.
|
||||||
|
These helpers keep that translation centralized so models registered through
|
||||||
|
`register_schema_models` emit resolvable Swagger 2.0 references.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
from enum import StrEnum
|
from enum import StrEnum
|
||||||
|
from typing import Any, Literal, NotRequired, TypedDict
|
||||||
|
|
||||||
from flask_restx import Namespace
|
from flask_restx import Namespace
|
||||||
from pydantic import BaseModel, TypeAdapter
|
from pydantic import BaseModel, TypeAdapter
|
||||||
@ -8,10 +16,59 @@ from pydantic import BaseModel, TypeAdapter
|
|||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||||
|
|
||||||
|
|
||||||
def register_schema_model(namespace: Namespace, model: type[BaseModel]) -> None:
|
QueryParamDoc = TypedDict(
|
||||||
"""Register a single BaseModel with a namespace for Swagger documentation."""
|
"QueryParamDoc",
|
||||||
|
{
|
||||||
|
"in": NotRequired[str],
|
||||||
|
"type": NotRequired[str],
|
||||||
|
"items": NotRequired[dict[str, object]],
|
||||||
|
"required": NotRequired[bool],
|
||||||
|
"description": NotRequired[str],
|
||||||
|
"enum": NotRequired[list[object]],
|
||||||
|
"default": NotRequired[object],
|
||||||
|
"minimum": NotRequired[int | float],
|
||||||
|
"maximum": NotRequired[int | float],
|
||||||
|
"minLength": NotRequired[int],
|
||||||
|
"maxLength": NotRequired[int],
|
||||||
|
"minItems": NotRequired[int],
|
||||||
|
"maxItems": NotRequired[int],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
namespace.schema_model(model.__name__, model.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
|
||||||
|
def _register_json_schema(namespace: Namespace, name: str, schema: dict) -> None:
|
||||||
|
"""Register a JSON schema and promote any nested Pydantic `$defs`."""
|
||||||
|
|
||||||
|
nested_definitions = schema.get("$defs")
|
||||||
|
schema_to_register = dict(schema)
|
||||||
|
if isinstance(nested_definitions, dict):
|
||||||
|
schema_to_register.pop("$defs")
|
||||||
|
|
||||||
|
namespace.schema_model(name, schema_to_register)
|
||||||
|
|
||||||
|
if not isinstance(nested_definitions, dict):
|
||||||
|
return
|
||||||
|
|
||||||
|
for nested_name, nested_schema in nested_definitions.items():
|
||||||
|
if isinstance(nested_schema, dict):
|
||||||
|
_register_json_schema(namespace, nested_name, nested_schema)
|
||||||
|
|
||||||
|
|
||||||
|
JsonSchemaMode = Literal["validation", "serialization"]
|
||||||
|
|
||||||
|
|
||||||
|
def _register_schema_model(namespace: Namespace, model: type[BaseModel], *, mode: JsonSchemaMode) -> None:
|
||||||
|
_register_json_schema(
|
||||||
|
namespace,
|
||||||
|
model.__name__,
|
||||||
|
model.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0, mode=mode),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def register_schema_model(namespace: Namespace, model: type[BaseModel]) -> None:
|
||||||
|
"""Register a BaseModel and its nested schema definitions for Swagger documentation."""
|
||||||
|
|
||||||
|
_register_schema_model(namespace, model, mode="validation")
|
||||||
|
|
||||||
|
|
||||||
def register_schema_models(namespace: Namespace, *models: type[BaseModel]) -> None:
|
def register_schema_models(namespace: Namespace, *models: type[BaseModel]) -> None:
|
||||||
@ -21,6 +78,19 @@ def register_schema_models(namespace: Namespace, *models: type[BaseModel]) -> No
|
|||||||
register_schema_model(namespace, model)
|
register_schema_model(namespace, model)
|
||||||
|
|
||||||
|
|
||||||
|
def register_response_schema_model(namespace: Namespace, model: type[BaseModel]) -> None:
|
||||||
|
"""Register a BaseModel using its serialized response shape."""
|
||||||
|
|
||||||
|
_register_schema_model(namespace, model, mode="serialization")
|
||||||
|
|
||||||
|
|
||||||
|
def register_response_schema_models(namespace: Namespace, *models: type[BaseModel]) -> None:
|
||||||
|
"""Register multiple response BaseModels using their serialized response shape."""
|
||||||
|
|
||||||
|
for model in models:
|
||||||
|
register_response_schema_model(namespace, model)
|
||||||
|
|
||||||
|
|
||||||
def get_or_create_model(model_name: str, field_def):
|
def get_or_create_model(model_name: str, field_def):
|
||||||
# Import lazily to avoid circular imports between console controllers and schema helpers.
|
# Import lazily to avoid circular imports between console controllers and schema helpers.
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
@ -34,15 +104,114 @@ def get_or_create_model(model_name: str, field_def):
|
|||||||
def register_enum_models(namespace: Namespace, *models: type[StrEnum]) -> None:
|
def register_enum_models(namespace: Namespace, *models: type[StrEnum]) -> None:
|
||||||
"""Register multiple StrEnum with a namespace."""
|
"""Register multiple StrEnum with a namespace."""
|
||||||
for model in models:
|
for model in models:
|
||||||
namespace.schema_model(
|
_register_json_schema(
|
||||||
model.__name__, TypeAdapter(model).json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
namespace,
|
||||||
|
model.__name__,
|
||||||
|
TypeAdapter(model).json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def query_params_from_model(model: type[BaseModel]) -> dict[str, QueryParamDoc]:
|
||||||
|
"""Build Flask-RESTX query parameter docs from a flat Pydantic model.
|
||||||
|
|
||||||
|
`Namespace.expect()` treats Pydantic schema models as request bodies, so GET
|
||||||
|
endpoints should keep runtime validation on the Pydantic model and feed this
|
||||||
|
derived mapping to `Namespace.doc(params=...)` for Swagger documentation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
schema = model.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
||||||
|
properties = schema.get("properties", {})
|
||||||
|
if not isinstance(properties, Mapping):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
required = schema.get("required", [])
|
||||||
|
required_names = set(required) if isinstance(required, list) else set()
|
||||||
|
|
||||||
|
params: dict[str, QueryParamDoc] = {}
|
||||||
|
for name, property_schema in properties.items():
|
||||||
|
if not isinstance(name, str) or not isinstance(property_schema, Mapping):
|
||||||
|
continue
|
||||||
|
|
||||||
|
params[name] = _query_param_from_property(property_schema, required=name in required_names)
|
||||||
|
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
def _query_param_from_property(property_schema: Mapping[str, Any], *, required: bool) -> QueryParamDoc:
|
||||||
|
param_schema = _nullable_property_schema(property_schema)
|
||||||
|
param_doc: QueryParamDoc = {"in": "query", "required": required}
|
||||||
|
|
||||||
|
description = param_schema.get("description")
|
||||||
|
if isinstance(description, str):
|
||||||
|
param_doc["description"] = description
|
||||||
|
|
||||||
|
schema_type = param_schema.get("type")
|
||||||
|
if isinstance(schema_type, str) and schema_type in {"array", "boolean", "integer", "number", "string"}:
|
||||||
|
param_doc["type"] = schema_type
|
||||||
|
if schema_type == "array":
|
||||||
|
items = param_schema.get("items")
|
||||||
|
if isinstance(items, Mapping):
|
||||||
|
item_type = items.get("type")
|
||||||
|
if isinstance(item_type, str):
|
||||||
|
param_doc["items"] = {"type": item_type}
|
||||||
|
|
||||||
|
enum = param_schema.get("enum")
|
||||||
|
if isinstance(enum, list):
|
||||||
|
param_doc["enum"] = enum
|
||||||
|
|
||||||
|
default = param_schema.get("default")
|
||||||
|
if default is not None:
|
||||||
|
param_doc["default"] = default
|
||||||
|
|
||||||
|
minimum = param_schema.get("minimum")
|
||||||
|
if isinstance(minimum, int | float):
|
||||||
|
param_doc["minimum"] = minimum
|
||||||
|
|
||||||
|
maximum = param_schema.get("maximum")
|
||||||
|
if isinstance(maximum, int | float):
|
||||||
|
param_doc["maximum"] = maximum
|
||||||
|
|
||||||
|
min_length = param_schema.get("minLength")
|
||||||
|
if isinstance(min_length, int):
|
||||||
|
param_doc["minLength"] = min_length
|
||||||
|
|
||||||
|
max_length = param_schema.get("maxLength")
|
||||||
|
if isinstance(max_length, int):
|
||||||
|
param_doc["maxLength"] = max_length
|
||||||
|
|
||||||
|
min_items = param_schema.get("minItems")
|
||||||
|
if isinstance(min_items, int):
|
||||||
|
param_doc["minItems"] = min_items
|
||||||
|
|
||||||
|
max_items = param_schema.get("maxItems")
|
||||||
|
if isinstance(max_items, int):
|
||||||
|
param_doc["maxItems"] = max_items
|
||||||
|
|
||||||
|
return param_doc
|
||||||
|
|
||||||
|
|
||||||
|
def _nullable_property_schema(property_schema: Mapping[str, Any]) -> Mapping[str, Any]:
|
||||||
|
any_of = property_schema.get("anyOf")
|
||||||
|
if not isinstance(any_of, list):
|
||||||
|
return property_schema
|
||||||
|
|
||||||
|
non_null_candidates = [
|
||||||
|
candidate for candidate in any_of if isinstance(candidate, Mapping) and candidate.get("type") != "null"
|
||||||
|
]
|
||||||
|
|
||||||
|
if len(non_null_candidates) == 1:
|
||||||
|
return {**property_schema, **non_null_candidates[0]}
|
||||||
|
|
||||||
|
return property_schema
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"DEFAULT_REF_TEMPLATE_SWAGGER_2_0",
|
"DEFAULT_REF_TEMPLATE_SWAGGER_2_0",
|
||||||
"get_or_create_model",
|
"get_or_create_model",
|
||||||
|
"query_params_from_model",
|
||||||
"register_enum_models",
|
"register_enum_models",
|
||||||
|
"register_response_schema_model",
|
||||||
|
"register_response_schema_models",
|
||||||
"register_schema_model",
|
"register_schema_model",
|
||||||
"register_schema_models",
|
"register_schema_models",
|
||||||
]
|
]
|
||||||
|
|||||||
@ -3,6 +3,7 @@ import io
|
|||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import cast
|
from typing import cast
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restx import Resource
|
from flask_restx import Resource
|
||||||
@ -12,6 +13,7 @@ from werkzeug.exceptions import BadRequest, NotFound, Unauthorized
|
|||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
from constants.languages import supported_language
|
from constants.languages import supported_language
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.wraps import only_edition_cloud
|
from controllers.console.wraps import only_edition_cloud
|
||||||
from core.db.session_factory import session_factory
|
from core.db.session_factory import session_factory
|
||||||
@ -20,8 +22,6 @@ from libs.token import extract_access_token
|
|||||||
from models.model import App, ExporleBanner, InstalledApp, RecommendedApp, TrialApp
|
from models.model import App, ExporleBanner, InstalledApp, RecommendedApp, TrialApp
|
||||||
from services.billing_service import BillingService, LangContentDict
|
from services.billing_service import BillingService, LangContentDict
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class InsertExploreAppPayload(BaseModel):
|
class InsertExploreAppPayload(BaseModel):
|
||||||
app_id: str = Field(...)
|
app_id: str = Field(...)
|
||||||
@ -58,15 +58,7 @@ class InsertExploreBannerPayload(BaseModel):
|
|||||||
model_config = {"populate_by_name": True}
|
model_config = {"populate_by_name": True}
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
register_schema_models(console_ns, InsertExploreAppPayload, InsertExploreBannerPayload)
|
||||||
InsertExploreAppPayload.__name__,
|
|
||||||
InsertExploreAppPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
|
||||||
InsertExploreBannerPayload.__name__,
|
|
||||||
InsertExploreBannerPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def admin_required[**P, R](view: Callable[P, R]) -> Callable[P, R]:
|
def admin_required[**P, R](view: Callable[P, R]) -> Callable[P, R]:
|
||||||
@ -190,7 +182,7 @@ class InsertExploreAppApi(Resource):
|
|||||||
@console_ns.response(204, "App removed successfully")
|
@console_ns.response(204, "App removed successfully")
|
||||||
@only_edition_cloud
|
@only_edition_cloud
|
||||||
@admin_required
|
@admin_required
|
||||||
def delete(self, app_id):
|
def delete(self, app_id: UUID):
|
||||||
with session_factory.create_session() as session:
|
with session_factory.create_session() as session:
|
||||||
recommended_app = session.execute(
|
recommended_app = session.execute(
|
||||||
select(RecommendedApp).where(RecommendedApp.app_id == str(app_id))
|
select(RecommendedApp).where(RecommendedApp.app_id == str(app_id))
|
||||||
@ -301,15 +293,7 @@ class BatchAddNotificationAccountsPayload(BaseModel):
|
|||||||
user_email: list[str] = Field(..., description="List of account email addresses")
|
user_email: list[str] = Field(..., description="List of account email addresses")
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
register_schema_models(console_ns, UpsertNotificationPayload, BatchAddNotificationAccountsPayload)
|
||||||
UpsertNotificationPayload.__name__,
|
|
||||||
UpsertNotificationPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
|
||||||
BatchAddNotificationAccountsPayload.__name__,
|
|
||||||
BatchAddNotificationAccountsPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/admin/upsert_notification")
|
@console_ns.route("/admin/upsert_notification")
|
||||||
@ -411,11 +395,11 @@ class BatchAddNotificationAccountsApi(Resource):
|
|||||||
raise BadRequest("Invalid file type. Only CSV (.csv) and TXT (.txt) files are allowed.")
|
raise BadRequest("Invalid file type. Only CSV (.csv) and TXT (.txt) files are allowed.")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
content = file.read().decode("utf-8")
|
content = file.stream.read().decode("utf-8")
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
try:
|
try:
|
||||||
file.seek(0)
|
file.stream.seek(0)
|
||||||
content = file.read().decode("gbk")
|
content = file.stream.read().decode("gbk")
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
raise BadRequest("Unable to decode the file. Please use UTF-8 or GBK encoding.")
|
raise BadRequest("Unable to decode the file. Please use UTF-8 or GBK encoding.")
|
||||||
|
|
||||||
|
|||||||
@ -34,7 +34,7 @@ class AdvancedPromptTemplateList(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self):
|
def get(self):
|
||||||
args = AdvancedPromptTemplateQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = AdvancedPromptTemplateQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
prompt_args: AdvancedPromptTemplateArgs = {
|
prompt_args: AdvancedPromptTemplateArgs = {
|
||||||
"app_mode": args.app_mode,
|
"app_mode": args.app_mode,
|
||||||
"model_mode": args.model_mode,
|
"model_mode": args.model_mode,
|
||||||
|
|||||||
@ -2,6 +2,7 @@ from flask import request
|
|||||||
from flask_restx import Resource, fields
|
from flask_restx import Resource, fields
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field, field_validator
|
||||||
|
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
from controllers.console.wraps import account_initialization_required, setup_required
|
from controllers.console.wraps import account_initialization_required, setup_required
|
||||||
@ -10,8 +11,6 @@ from libs.login import login_required
|
|||||||
from models.model import AppMode
|
from models.model import AppMode
|
||||||
from services.agent_service import AgentService
|
from services.agent_service import AgentService
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class AgentLogQuery(BaseModel):
|
class AgentLogQuery(BaseModel):
|
||||||
message_id: str = Field(..., description="Message UUID")
|
message_id: str = Field(..., description="Message UUID")
|
||||||
@ -23,9 +22,7 @@ class AgentLogQuery(BaseModel):
|
|||||||
return uuid_value(value)
|
return uuid_value(value)
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
register_schema_models(console_ns, AgentLogQuery)
|
||||||
AgentLogQuery.__name__, AgentLogQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/agent/logs")
|
@console_ns.route("/apps/<uuid:app_id>/agent/logs")
|
||||||
@ -44,6 +41,6 @@ class AgentLogApi(Resource):
|
|||||||
@get_app_model(mode=[AppMode.AGENT_CHAT])
|
@get_app_model(mode=[AppMode.AGENT_CHAT])
|
||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
"""Get agent logs"""
|
"""Get agent logs"""
|
||||||
args = AgentLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = AgentLogQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
return AgentService.get_agent_logs(app_model, args.conversation_id, args.message_id)
|
return AgentService.get_agent_logs(app_model, args.conversation_id, args.message_id)
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
from typing import Any, Literal
|
from typing import Any, Literal
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
from flask import abort, make_response, request
|
from flask import abort, make_response, request
|
||||||
from flask_restx import Resource
|
from flask_restx import Resource
|
||||||
@ -33,8 +34,6 @@ from services.annotation_service import (
|
|||||||
UpsertAnnotationArgs,
|
UpsertAnnotationArgs,
|
||||||
)
|
)
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class AnnotationReplyPayload(BaseModel):
|
class AnnotationReplyPayload(BaseModel):
|
||||||
score_threshold: float = Field(..., description="Score threshold for annotation matching")
|
score_threshold: float = Field(..., description="Score threshold for annotation matching")
|
||||||
@ -87,17 +86,6 @@ class AnnotationFilePayload(BaseModel):
|
|||||||
return uuid_value(value)
|
return uuid_value(value)
|
||||||
|
|
||||||
|
|
||||||
def reg(model: type[BaseModel]) -> None:
|
|
||||||
console_ns.schema_model(model.__name__, model.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
|
||||||
|
|
||||||
|
|
||||||
reg(AnnotationReplyPayload)
|
|
||||||
reg(AnnotationSettingUpdatePayload)
|
|
||||||
reg(AnnotationListQuery)
|
|
||||||
reg(CreateAnnotationPayload)
|
|
||||||
reg(UpdateAnnotationPayload)
|
|
||||||
reg(AnnotationReplyStatusQuery)
|
|
||||||
reg(AnnotationFilePayload)
|
|
||||||
register_schema_models(
|
register_schema_models(
|
||||||
console_ns,
|
console_ns,
|
||||||
Annotation,
|
Annotation,
|
||||||
@ -105,6 +93,13 @@ register_schema_models(
|
|||||||
AnnotationExportList,
|
AnnotationExportList,
|
||||||
AnnotationHitHistory,
|
AnnotationHitHistory,
|
||||||
AnnotationHitHistoryList,
|
AnnotationHitHistoryList,
|
||||||
|
AnnotationReplyPayload,
|
||||||
|
AnnotationSettingUpdatePayload,
|
||||||
|
AnnotationListQuery,
|
||||||
|
CreateAnnotationPayload,
|
||||||
|
UpdateAnnotationPayload,
|
||||||
|
AnnotationReplyStatusQuery,
|
||||||
|
AnnotationFilePayload,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -121,8 +116,7 @@ class AnnotationReplyActionApi(Resource):
|
|||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@cloud_edition_billing_resource_check("annotation")
|
@cloud_edition_billing_resource_check("annotation")
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def post(self, app_id, action: Literal["enable", "disable"]):
|
def post(self, app_id: UUID, action: Literal["enable", "disable"]):
|
||||||
app_id = str(app_id)
|
|
||||||
args = AnnotationReplyPayload.model_validate(console_ns.payload)
|
args = AnnotationReplyPayload.model_validate(console_ns.payload)
|
||||||
match action:
|
match action:
|
||||||
case "enable":
|
case "enable":
|
||||||
@ -131,9 +125,9 @@ class AnnotationReplyActionApi(Resource):
|
|||||||
"embedding_provider_name": args.embedding_provider_name,
|
"embedding_provider_name": args.embedding_provider_name,
|
||||||
"embedding_model_name": args.embedding_model_name,
|
"embedding_model_name": args.embedding_model_name,
|
||||||
}
|
}
|
||||||
result = AppAnnotationService.enable_app_annotation(enable_args, app_id)
|
result = AppAnnotationService.enable_app_annotation(enable_args, str(app_id))
|
||||||
case "disable":
|
case "disable":
|
||||||
result = AppAnnotationService.disable_app_annotation(app_id)
|
result = AppAnnotationService.disable_app_annotation(str(app_id))
|
||||||
return result, 200
|
return result, 200
|
||||||
|
|
||||||
|
|
||||||
@ -148,9 +142,8 @@ class AppAnnotationSettingDetailApi(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def get(self, app_id):
|
def get(self, app_id: UUID):
|
||||||
app_id = str(app_id)
|
result = AppAnnotationService.get_app_annotation_setting_by_app_id(str(app_id))
|
||||||
result = AppAnnotationService.get_app_annotation_setting_by_app_id(app_id)
|
|
||||||
return result, 200
|
return result, 200
|
||||||
|
|
||||||
|
|
||||||
@ -166,14 +159,13 @@ class AppAnnotationSettingUpdateApi(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def post(self, app_id, annotation_setting_id):
|
def post(self, app_id: UUID, annotation_setting_id):
|
||||||
app_id = str(app_id)
|
|
||||||
annotation_setting_id = str(annotation_setting_id)
|
annotation_setting_id = str(annotation_setting_id)
|
||||||
|
|
||||||
args = AnnotationSettingUpdatePayload.model_validate(console_ns.payload)
|
args = AnnotationSettingUpdatePayload.model_validate(console_ns.payload)
|
||||||
|
|
||||||
setting_args: UpdateAnnotationSettingArgs = {"score_threshold": args.score_threshold}
|
setting_args: UpdateAnnotationSettingArgs = {"score_threshold": args.score_threshold}
|
||||||
result = AppAnnotationService.update_app_annotation_setting(app_id, annotation_setting_id, setting_args)
|
result = AppAnnotationService.update_app_annotation_setting(str(app_id), annotation_setting_id, setting_args)
|
||||||
return result, 200
|
return result, 200
|
||||||
|
|
||||||
|
|
||||||
@ -189,7 +181,7 @@ class AnnotationReplyActionStatusApi(Resource):
|
|||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@cloud_edition_billing_resource_check("annotation")
|
@cloud_edition_billing_resource_check("annotation")
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def get(self, app_id, job_id, action):
|
def get(self, app_id: UUID, job_id, action):
|
||||||
job_id = str(job_id)
|
job_id = str(job_id)
|
||||||
app_annotation_job_key = f"{action}_app_annotation_job_{str(job_id)}"
|
app_annotation_job_key = f"{action}_app_annotation_job_{str(job_id)}"
|
||||||
cache_result = redis_client.get(app_annotation_job_key)
|
cache_result = redis_client.get(app_annotation_job_key)
|
||||||
@ -217,14 +209,13 @@ class AnnotationApi(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def get(self, app_id):
|
def get(self, app_id: UUID):
|
||||||
args = AnnotationListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = AnnotationListQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
page = args.page
|
page = args.page
|
||||||
limit = args.limit
|
limit = args.limit
|
||||||
keyword = args.keyword
|
keyword = args.keyword
|
||||||
|
|
||||||
app_id = str(app_id)
|
annotation_list, total = AppAnnotationService.get_annotation_list_by_app_id(str(app_id), page, limit, keyword)
|
||||||
annotation_list, total = AppAnnotationService.get_annotation_list_by_app_id(app_id, page, limit, keyword)
|
|
||||||
annotation_models = TypeAdapter(list[Annotation]).validate_python(annotation_list, from_attributes=True)
|
annotation_models = TypeAdapter(list[Annotation]).validate_python(annotation_list, from_attributes=True)
|
||||||
response = AnnotationList(
|
response = AnnotationList(
|
||||||
data=annotation_models,
|
data=annotation_models,
|
||||||
@ -246,8 +237,7 @@ class AnnotationApi(Resource):
|
|||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@cloud_edition_billing_resource_check("annotation")
|
@cloud_edition_billing_resource_check("annotation")
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def post(self, app_id):
|
def post(self, app_id: UUID):
|
||||||
app_id = str(app_id)
|
|
||||||
args = CreateAnnotationPayload.model_validate(console_ns.payload)
|
args = CreateAnnotationPayload.model_validate(console_ns.payload)
|
||||||
upsert_args: UpsertAnnotationArgs = {}
|
upsert_args: UpsertAnnotationArgs = {}
|
||||||
if args.answer is not None:
|
if args.answer is not None:
|
||||||
@ -258,15 +248,14 @@ class AnnotationApi(Resource):
|
|||||||
upsert_args["message_id"] = args.message_id
|
upsert_args["message_id"] = args.message_id
|
||||||
if args.question is not None:
|
if args.question is not None:
|
||||||
upsert_args["question"] = args.question
|
upsert_args["question"] = args.question
|
||||||
annotation = AppAnnotationService.up_insert_app_annotation_from_message(upsert_args, app_id)
|
annotation = AppAnnotationService.up_insert_app_annotation_from_message(upsert_args, str(app_id))
|
||||||
return Annotation.model_validate(annotation, from_attributes=True).model_dump(mode="json")
|
return Annotation.model_validate(annotation, from_attributes=True).model_dump(mode="json")
|
||||||
|
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def delete(self, app_id):
|
def delete(self, app_id: UUID):
|
||||||
app_id = str(app_id)
|
|
||||||
|
|
||||||
# Use request.args.getlist to get annotation_ids array directly
|
# Use request.args.getlist to get annotation_ids array directly
|
||||||
annotation_ids = request.args.getlist("annotation_id")
|
annotation_ids = request.args.getlist("annotation_id")
|
||||||
@ -280,11 +269,11 @@ class AnnotationApi(Resource):
|
|||||||
"message": "annotation_ids are required if the parameter is provided.",
|
"message": "annotation_ids are required if the parameter is provided.",
|
||||||
}, 400
|
}, 400
|
||||||
|
|
||||||
result = AppAnnotationService.delete_app_annotations_in_batch(app_id, annotation_ids)
|
result = AppAnnotationService.delete_app_annotations_in_batch(str(app_id), annotation_ids)
|
||||||
return result, 204
|
return result, 204
|
||||||
# If no annotation_ids are provided, handle clearing all annotations
|
# If no annotation_ids are provided, handle clearing all annotations
|
||||||
else:
|
else:
|
||||||
AppAnnotationService.clear_all_annotations(app_id)
|
AppAnnotationService.clear_all_annotations(str(app_id))
|
||||||
return {"result": "success"}, 204
|
return {"result": "success"}, 204
|
||||||
|
|
||||||
|
|
||||||
@ -303,9 +292,8 @@ class AnnotationExportApi(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def get(self, app_id):
|
def get(self, app_id: UUID):
|
||||||
app_id = str(app_id)
|
annotation_list = AppAnnotationService.export_annotation_list_by_app_id(str(app_id))
|
||||||
annotation_list = AppAnnotationService.export_annotation_list_by_app_id(app_id)
|
|
||||||
annotation_models = TypeAdapter(list[Annotation]).validate_python(annotation_list, from_attributes=True)
|
annotation_models = TypeAdapter(list[Annotation]).validate_python(annotation_list, from_attributes=True)
|
||||||
response_data = AnnotationExportList(data=annotation_models).model_dump(mode="json")
|
response_data = AnnotationExportList(data=annotation_models).model_dump(mode="json")
|
||||||
|
|
||||||
@ -331,26 +319,22 @@ class AnnotationUpdateDeleteApi(Resource):
|
|||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@cloud_edition_billing_resource_check("annotation")
|
@cloud_edition_billing_resource_check("annotation")
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def post(self, app_id, annotation_id):
|
def post(self, app_id: UUID, annotation_id: UUID):
|
||||||
app_id = str(app_id)
|
|
||||||
annotation_id = str(annotation_id)
|
|
||||||
args = UpdateAnnotationPayload.model_validate(console_ns.payload)
|
args = UpdateAnnotationPayload.model_validate(console_ns.payload)
|
||||||
update_args: UpdateAnnotationArgs = {}
|
update_args: UpdateAnnotationArgs = {}
|
||||||
if args.answer is not None:
|
if args.answer is not None:
|
||||||
update_args["answer"] = args.answer
|
update_args["answer"] = args.answer
|
||||||
if args.question is not None:
|
if args.question is not None:
|
||||||
update_args["question"] = args.question
|
update_args["question"] = args.question
|
||||||
annotation = AppAnnotationService.update_app_annotation_directly(update_args, app_id, annotation_id)
|
annotation = AppAnnotationService.update_app_annotation_directly(update_args, str(app_id), str(annotation_id))
|
||||||
return Annotation.model_validate(annotation, from_attributes=True).model_dump(mode="json")
|
return Annotation.model_validate(annotation, from_attributes=True).model_dump(mode="json")
|
||||||
|
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def delete(self, app_id, annotation_id):
|
def delete(self, app_id: UUID, annotation_id: UUID):
|
||||||
app_id = str(app_id)
|
AppAnnotationService.delete_app_annotation(str(app_id), str(annotation_id))
|
||||||
annotation_id = str(annotation_id)
|
|
||||||
AppAnnotationService.delete_app_annotation(app_id, annotation_id)
|
|
||||||
return {"result": "success"}, 204
|
return {"result": "success"}, 204
|
||||||
|
|
||||||
|
|
||||||
@ -371,11 +355,9 @@ class AnnotationBatchImportApi(Resource):
|
|||||||
@annotation_import_rate_limit
|
@annotation_import_rate_limit
|
||||||
@annotation_import_concurrency_limit
|
@annotation_import_concurrency_limit
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def post(self, app_id):
|
def post(self, app_id: UUID):
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
|
|
||||||
app_id = str(app_id)
|
|
||||||
|
|
||||||
# check file
|
# check file
|
||||||
if "file" not in request.files:
|
if "file" not in request.files:
|
||||||
raise NoFileUploadedError()
|
raise NoFileUploadedError()
|
||||||
@ -391,9 +373,9 @@ class AnnotationBatchImportApi(Resource):
|
|||||||
raise ValueError("Invalid file type. Only CSV files are allowed")
|
raise ValueError("Invalid file type. Only CSV files are allowed")
|
||||||
|
|
||||||
# Check file size before processing
|
# Check file size before processing
|
||||||
file.seek(0, 2) # Seek to end of file
|
file.stream.seek(0, 2) # Seek to end of file
|
||||||
file_size = file.tell()
|
file_size = file.stream.tell()
|
||||||
file.seek(0) # Reset to beginning
|
file.stream.seek(0) # Reset to beginning
|
||||||
|
|
||||||
max_size_bytes = dify_config.ANNOTATION_IMPORT_FILE_SIZE_LIMIT * 1024 * 1024
|
max_size_bytes = dify_config.ANNOTATION_IMPORT_FILE_SIZE_LIMIT * 1024 * 1024
|
||||||
if file_size > max_size_bytes:
|
if file_size > max_size_bytes:
|
||||||
@ -406,7 +388,7 @@ class AnnotationBatchImportApi(Resource):
|
|||||||
if file_size == 0:
|
if file_size == 0:
|
||||||
raise ValueError("The uploaded file is empty")
|
raise ValueError("The uploaded file is empty")
|
||||||
|
|
||||||
return AppAnnotationService.batch_import_app_annotations(app_id, file)
|
return AppAnnotationService.batch_import_app_annotations(str(app_id), file)
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/annotations/batch-import-status/<uuid:job_id>")
|
@console_ns.route("/apps/<uuid:app_id>/annotations/batch-import-status/<uuid:job_id>")
|
||||||
@ -421,8 +403,7 @@ class AnnotationBatchImportStatusApi(Resource):
|
|||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@cloud_edition_billing_resource_check("annotation")
|
@cloud_edition_billing_resource_check("annotation")
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def get(self, app_id, job_id):
|
def get(self, app_id: UUID, job_id: UUID):
|
||||||
job_id = str(job_id)
|
|
||||||
indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}"
|
indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}"
|
||||||
cache_result = redis_client.get(indexing_cache_key)
|
cache_result = redis_client.get(indexing_cache_key)
|
||||||
if cache_result is None:
|
if cache_result is None:
|
||||||
@ -456,13 +437,11 @@ class AnnotationHitHistoryListApi(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def get(self, app_id, annotation_id):
|
def get(self, app_id: UUID, annotation_id: UUID):
|
||||||
page = request.args.get("page", default=1, type=int)
|
page = request.args.get("page", default=1, type=int)
|
||||||
limit = request.args.get("limit", default=20, type=int)
|
limit = request.args.get("limit", default=20, type=int)
|
||||||
app_id = str(app_id)
|
|
||||||
annotation_id = str(annotation_id)
|
|
||||||
annotation_hit_history_list, total = AppAnnotationService.get_annotation_hit_histories(
|
annotation_hit_history_list, total = AppAnnotationService.get_annotation_hit_histories(
|
||||||
app_id, annotation_id, page, limit
|
str(app_id), str(annotation_id), page, limit
|
||||||
)
|
)
|
||||||
history_models = TypeAdapter(list[AnnotationHitHistory]).validate_python(
|
history_models = TypeAdapter(list[AnnotationHitHistory]).validate_python(
|
||||||
annotation_hit_history_list, from_attributes=True
|
annotation_hit_history_list, from_attributes=True
|
||||||
|
|||||||
@ -1,13 +1,16 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import re
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Any, Literal
|
from typing import Any, Literal
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restx import Resource
|
from flask_restx import Resource
|
||||||
from pydantic import AliasChoices, BaseModel, Field, computed_field, field_validator
|
from pydantic import AliasChoices, BaseModel, Field, computed_field, field_validator
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
from werkzeug.datastructures import MultiDict
|
||||||
from werkzeug.exceptions import BadRequest
|
from werkzeug.exceptions import BadRequest
|
||||||
|
|
||||||
from controllers.common.helpers import FileInfo
|
from controllers.common.helpers import FileInfo
|
||||||
@ -23,6 +26,7 @@ from controllers.console.wraps import (
|
|||||||
is_admin_or_owner_required,
|
is_admin_or_owner_required,
|
||||||
setup_required,
|
setup_required,
|
||||||
)
|
)
|
||||||
|
from core.db.session_factory import session_factory
|
||||||
from core.ops.ops_trace_manager import OpsTraceManager
|
from core.ops.ops_trace_manager import OpsTraceManager
|
||||||
from core.rag.entities import PreProcessingRule, Rule, Segmentation
|
from core.rag.entities import PreProcessingRule, Rule, Segmentation
|
||||||
from core.rag.retrieval.retrieval_methods import RetrievalMethod
|
from core.rag.retrieval.retrieval_methods import RetrievalMethod
|
||||||
@ -35,7 +39,7 @@ from libs.login import current_account_with_tenant, login_required
|
|||||||
from models import App, DatasetPermissionEnum, Workflow
|
from models import App, DatasetPermissionEnum, Workflow
|
||||||
from models.model import IconType
|
from models.model import IconType
|
||||||
from services.app_dsl_service import AppDslService
|
from services.app_dsl_service import AppDslService
|
||||||
from services.app_service import AppService
|
from services.app_service import AppListParams, AppService, CreateAppParams
|
||||||
from services.enterprise.enterprise_service import EnterpriseService
|
from services.enterprise.enterprise_service import EnterpriseService
|
||||||
from services.entities.dsl_entities import ImportMode, ImportStatus
|
from services.entities.dsl_entities import ImportMode, ImportStatus
|
||||||
from services.entities.knowledge_entities.knowledge_entities import (
|
from services.entities.knowledge_entities.knowledge_entities import (
|
||||||
@ -57,6 +61,7 @@ ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "co
|
|||||||
register_enum_models(console_ns, IconType)
|
register_enum_models(console_ns, IconType)
|
||||||
|
|
||||||
_logger = logging.getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
_TAG_IDS_BRACKET_PATTERN = re.compile(r"^tag_ids\[(\d+)\]$")
|
||||||
|
|
||||||
|
|
||||||
class AppListQuery(BaseModel):
|
class AppListQuery(BaseModel):
|
||||||
@ -66,22 +71,19 @@ class AppListQuery(BaseModel):
|
|||||||
default="all", description="App mode filter"
|
default="all", description="App mode filter"
|
||||||
)
|
)
|
||||||
name: str | None = Field(default=None, description="Filter by app name")
|
name: str | None = Field(default=None, description="Filter by app name")
|
||||||
tag_ids: list[str] | None = Field(default=None, description="Comma-separated tag IDs")
|
tag_ids: list[str] | None = Field(default=None, description="Filter by tag IDs")
|
||||||
is_created_by_me: bool | None = Field(default=None, description="Filter by creator")
|
is_created_by_me: bool | None = Field(default=None, description="Filter by creator")
|
||||||
|
|
||||||
@field_validator("tag_ids", mode="before")
|
@field_validator("tag_ids", mode="before")
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_tag_ids(cls, value: str | list[str] | None) -> list[str] | None:
|
def validate_tag_ids(cls, value: list[str] | None) -> list[str] | None:
|
||||||
if not value:
|
if not value:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if isinstance(value, str):
|
if not isinstance(value, list):
|
||||||
items = [item.strip() for item in value.split(",") if item.strip()]
|
raise ValueError("Unsupported tag_ids type.")
|
||||||
elif isinstance(value, list):
|
|
||||||
items = [str(item).strip() for item in value if item and str(item).strip()]
|
|
||||||
else:
|
|
||||||
raise TypeError("Unsupported tag_ids type.")
|
|
||||||
|
|
||||||
|
items = [str(item).strip() for item in value if item and str(item).strip()]
|
||||||
if not items:
|
if not items:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@ -91,6 +93,26 @@ class AppListQuery(BaseModel):
|
|||||||
raise ValueError("Invalid UUID format in tag_ids.") from exc
|
raise ValueError("Invalid UUID format in tag_ids.") from exc
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_app_list_query_args(query_args: MultiDict[str, str]) -> dict[str, str | list[str]]:
|
||||||
|
normalized: dict[str, str | list[str]] = {}
|
||||||
|
indexed_tag_ids: list[tuple[int, str]] = []
|
||||||
|
|
||||||
|
for key in query_args:
|
||||||
|
match = _TAG_IDS_BRACKET_PATTERN.fullmatch(key)
|
||||||
|
if match:
|
||||||
|
indexed_tag_ids.extend((int(match.group(1)), value) for value in query_args.getlist(key))
|
||||||
|
continue
|
||||||
|
|
||||||
|
value = query_args.get(key)
|
||||||
|
if value is not None:
|
||||||
|
normalized[key] = value
|
||||||
|
|
||||||
|
if indexed_tag_ids:
|
||||||
|
normalized["tag_ids"] = [value for _, value in sorted(indexed_tag_ids)]
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
|
||||||
class CreateAppPayload(BaseModel):
|
class CreateAppPayload(BaseModel):
|
||||||
name: str = Field(..., min_length=1, description="App name")
|
name: str = Field(..., min_length=1, description="App name")
|
||||||
description: str | None = Field(default=None, description="App description (max 400 chars)", max_length=400)
|
description: str | None = Field(default=None, description="App description (max 400 chars)", max_length=400)
|
||||||
@ -455,12 +477,19 @@ class AppListApi(Resource):
|
|||||||
"""Get app list"""
|
"""Get app list"""
|
||||||
current_user, current_tenant_id = current_account_with_tenant()
|
current_user, current_tenant_id = current_account_with_tenant()
|
||||||
|
|
||||||
args = AppListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = AppListQuery.model_validate(_normalize_app_list_query_args(request.args))
|
||||||
args_dict = args.model_dump()
|
params = AppListParams(
|
||||||
|
page=args.page,
|
||||||
|
limit=args.limit,
|
||||||
|
mode=args.mode,
|
||||||
|
name=args.name,
|
||||||
|
tag_ids=args.tag_ids,
|
||||||
|
is_created_by_me=args.is_created_by_me,
|
||||||
|
)
|
||||||
|
|
||||||
# get app list
|
# get app list
|
||||||
app_service = AppService()
|
app_service = AppService()
|
||||||
app_pagination = app_service.get_paginate_apps(current_user.id, current_tenant_id, args_dict)
|
app_pagination = app_service.get_paginate_apps(current_user.id, current_tenant_id, params)
|
||||||
if not app_pagination:
|
if not app_pagination:
|
||||||
empty = AppPagination(page=args.page, limit=args.limit, total=0, has_more=False, data=[])
|
empty = AppPagination(page=args.page, limit=args.limit, total=0, has_more=False, data=[])
|
||||||
return empty.model_dump(mode="json"), 200
|
return empty.model_dump(mode="json"), 200
|
||||||
@ -524,9 +553,17 @@ class AppListApi(Resource):
|
|||||||
"""Create app"""
|
"""Create app"""
|
||||||
current_user, current_tenant_id = current_account_with_tenant()
|
current_user, current_tenant_id = current_account_with_tenant()
|
||||||
args = CreateAppPayload.model_validate(console_ns.payload)
|
args = CreateAppPayload.model_validate(console_ns.payload)
|
||||||
|
params = CreateAppParams(
|
||||||
|
name=args.name,
|
||||||
|
description=args.description,
|
||||||
|
mode=args.mode,
|
||||||
|
icon_type=args.icon_type,
|
||||||
|
icon=args.icon,
|
||||||
|
icon_background=args.icon_background,
|
||||||
|
)
|
||||||
|
|
||||||
app_service = AppService()
|
app_service = AppService()
|
||||||
app = app_service.create_app(current_tenant_id, args.model_dump(), current_user)
|
app = app_service.create_app(current_tenant_id, params, current_user)
|
||||||
app_detail = AppDetail.model_validate(app, from_attributes=True)
|
app_detail = AppDetail.model_validate(app, from_attributes=True)
|
||||||
return app_detail.model_dump(mode="json"), 201
|
return app_detail.model_dump(mode="json"), 201
|
||||||
|
|
||||||
@ -680,7 +717,7 @@ class AppExportApi(Resource):
|
|||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
"""Export app"""
|
"""Export app"""
|
||||||
args = AppExportQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = AppExportQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
payload = AppExportResponse(
|
payload = AppExportResponse(
|
||||||
data=AppDslService.export_dsl(
|
data=AppDslService.export_dsl(
|
||||||
@ -819,9 +856,10 @@ class AppTraceApi(Resource):
|
|||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self, app_id):
|
def get(self, app_id: UUID):
|
||||||
"""Get app trace"""
|
"""Get app trace"""
|
||||||
app_trace_config = OpsTraceManager.get_app_tracing_config(app_id=app_id)
|
with session_factory.create_session() as session:
|
||||||
|
app_trace_config = OpsTraceManager.get_app_tracing_config(str(app_id), session)
|
||||||
|
|
||||||
return app_trace_config
|
return app_trace_config
|
||||||
|
|
||||||
@ -835,12 +873,12 @@ class AppTraceApi(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def post(self, app_id):
|
def post(self, app_id: UUID):
|
||||||
# add app trace
|
# add app trace
|
||||||
args = AppTracePayload.model_validate(console_ns.payload)
|
args = AppTracePayload.model_validate(console_ns.payload)
|
||||||
|
|
||||||
OpsTraceManager.update_app_tracing_config(
|
OpsTraceManager.update_app_tracing_config(
|
||||||
app_id=app_id,
|
app_id=str(app_id),
|
||||||
enabled=args.enabled,
|
enabled=args.enabled,
|
||||||
tracing_provider=args.tracing_provider,
|
tracing_provider=args.tracing_provider,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -2,7 +2,7 @@ from flask_restx import Resource
|
|||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from controllers.common.schema import register_schema_models
|
from controllers.common.schema import register_enum_models, register_schema_models
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
from controllers.console.wraps import (
|
from controllers.console.wraps import (
|
||||||
account_initialization_required,
|
account_initialization_required,
|
||||||
@ -33,6 +33,7 @@ class AppImportPayload(BaseModel):
|
|||||||
app_id: str | None = Field(None)
|
app_id: str | None = Field(None)
|
||||||
|
|
||||||
|
|
||||||
|
register_enum_models(console_ns, ImportStatus)
|
||||||
register_schema_models(console_ns, AppImportPayload, Import, CheckDependenciesResult)
|
register_schema_models(console_ns, AppImportPayload, Import, CheckDependenciesResult)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -173,7 +173,7 @@ class TextModesApi(Resource):
|
|||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
try:
|
try:
|
||||||
args = TextToSpeechVoiceQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = TextToSpeechVoiceQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
response = AudioService.transcript_tts_voices(
|
response = AudioService.transcript_tts_voices(
|
||||||
tenant_id=app_model.tenant_id,
|
tenant_id=app_model.tenant_id,
|
||||||
|
|||||||
@ -7,6 +7,7 @@ from pydantic import BaseModel, Field, field_validator
|
|||||||
from werkzeug.exceptions import InternalServerError, NotFound
|
from werkzeug.exceptions import InternalServerError, NotFound
|
||||||
|
|
||||||
import services
|
import services
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.app.error import (
|
from controllers.console.app.error import (
|
||||||
AppUnavailableError,
|
AppUnavailableError,
|
||||||
@ -37,7 +38,6 @@ from services.app_task_service import AppTaskService
|
|||||||
from services.errors.llm import InvokeRateLimitError
|
from services.errors.llm import InvokeRateLimitError
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class BaseMessagePayload(BaseModel):
|
class BaseMessagePayload(BaseModel):
|
||||||
@ -65,13 +65,7 @@ class ChatMessagePayload(BaseMessagePayload):
|
|||||||
return uuid_value(value)
|
return uuid_value(value)
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
register_schema_models(console_ns, CompletionMessagePayload, ChatMessagePayload)
|
||||||
CompletionMessagePayload.__name__,
|
|
||||||
CompletionMessagePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
console_ns.schema_model(
|
|
||||||
ChatMessagePayload.__name__, ChatMessagePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# define completion message api for user
|
# define completion message api for user
|
||||||
|
|||||||
@ -39,8 +39,6 @@ from models.model import AppMode
|
|||||||
from services.conversation_service import ConversationService
|
from services.conversation_service import ConversationService
|
||||||
from services.errors.conversation import ConversationNotExistsError
|
from services.errors.conversation import ConversationNotExistsError
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class BaseConversationQuery(BaseModel):
|
class BaseConversationQuery(BaseModel):
|
||||||
keyword: str | None = Field(default=None, description="Search keyword")
|
keyword: str | None = Field(default=None, description="Search keyword")
|
||||||
@ -70,15 +68,6 @@ class ChatConversationQuery(BaseConversationQuery):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
|
||||||
CompletionConversationQuery.__name__,
|
|
||||||
CompletionConversationQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
console_ns.schema_model(
|
|
||||||
ChatConversationQuery.__name__,
|
|
||||||
ChatConversationQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
|
|
||||||
register_schema_models(
|
register_schema_models(
|
||||||
console_ns,
|
console_ns,
|
||||||
CompletionConversationQuery,
|
CompletionConversationQuery,
|
||||||
@ -89,6 +78,8 @@ register_schema_models(
|
|||||||
ConversationWithSummaryPaginationResponse,
|
ConversationWithSummaryPaginationResponse,
|
||||||
ConversationDetailResponse,
|
ConversationDetailResponse,
|
||||||
ResultResponse,
|
ResultResponse,
|
||||||
|
CompletionConversationQuery,
|
||||||
|
ChatConversationQuery,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -107,7 +98,7 @@ class CompletionConversationApi(Resource):
|
|||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
current_user, _ = current_account_with_tenant()
|
current_user, _ = current_account_with_tenant()
|
||||||
args = CompletionConversationQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = CompletionConversationQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
query = sa.select(Conversation).where(
|
query = sa.select(Conversation).where(
|
||||||
Conversation.app_id == app_model.id, Conversation.mode == "completion", Conversation.is_deleted.is_(False)
|
Conversation.app_id == app_model.id, Conversation.mode == "completion", Conversation.is_deleted.is_(False)
|
||||||
@ -221,7 +212,7 @@ class ChatConversationApi(Resource):
|
|||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
current_user, _ = current_account_with_tenant()
|
current_user, _ = current_account_with_tenant()
|
||||||
args = ChatConversationQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ChatConversationQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
subquery = (
|
subquery = (
|
||||||
sa.select(Conversation.id.label("conversation_id"), EndUser.session_id.label("from_end_user_session_id"))
|
sa.select(Conversation.id.label("conversation_id"), EndUser.session_id.label("from_end_user_session_id"))
|
||||||
|
|||||||
@ -100,7 +100,7 @@ class ConversationVariablesApi(Resource):
|
|||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_app_model(mode=AppMode.ADVANCED_CHAT)
|
@get_app_model(mode=AppMode.ADVANCED_CHAT)
|
||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
args = ConversationVariablesQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ConversationVariablesQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
stmt = (
|
stmt = (
|
||||||
select(ConversationVariable)
|
select(ConversationVariable)
|
||||||
|
|||||||
@ -3,6 +3,7 @@ from collections.abc import Sequence
|
|||||||
from flask_restx import Resource
|
from flask_restx import Resource
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from controllers.common.schema import register_enum_models, register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.app.error import (
|
from controllers.console.app.error import (
|
||||||
CompletionRequestError,
|
CompletionRequestError,
|
||||||
@ -19,13 +20,12 @@ from core.helper.code_executor.python3.python3_code_provider import Python3CodeP
|
|||||||
from core.llm_generator.entities import RuleCodeGeneratePayload, RuleGeneratePayload, RuleStructuredOutputPayload
|
from core.llm_generator.entities import RuleCodeGeneratePayload, RuleGeneratePayload, RuleStructuredOutputPayload
|
||||||
from core.llm_generator.llm_generator import LLMGenerator
|
from core.llm_generator.llm_generator import LLMGenerator
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
|
from graphon.model_runtime.entities.llm_entities import LLMMode
|
||||||
from graphon.model_runtime.errors.invoke import InvokeError
|
from graphon.model_runtime.errors.invoke import InvokeError
|
||||||
from libs.login import current_account_with_tenant, login_required
|
from libs.login import current_account_with_tenant, login_required
|
||||||
from models import App
|
from models import App
|
||||||
from services.workflow_service import WorkflowService
|
from services.workflow_service import WorkflowService
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class InstructionGeneratePayload(BaseModel):
|
class InstructionGeneratePayload(BaseModel):
|
||||||
flow_id: str = Field(..., description="Workflow/Flow ID")
|
flow_id: str = Field(..., description="Workflow/Flow ID")
|
||||||
@ -41,16 +41,16 @@ class InstructionTemplatePayload(BaseModel):
|
|||||||
type: str = Field(..., description="Instruction template type")
|
type: str = Field(..., description="Instruction template type")
|
||||||
|
|
||||||
|
|
||||||
def reg(cls: type[BaseModel]):
|
register_enum_models(console_ns, LLMMode)
|
||||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
register_schema_models(
|
||||||
|
console_ns,
|
||||||
|
RuleGeneratePayload,
|
||||||
reg(RuleGeneratePayload)
|
RuleCodeGeneratePayload,
|
||||||
reg(RuleCodeGeneratePayload)
|
RuleStructuredOutputPayload,
|
||||||
reg(RuleStructuredOutputPayload)
|
InstructionGeneratePayload,
|
||||||
reg(InstructionGeneratePayload)
|
InstructionTemplatePayload,
|
||||||
reg(InstructionTemplatePayload)
|
ModelConfig,
|
||||||
reg(ModelConfig)
|
)
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/rule-generate")
|
@console_ns.route("/rule-generate")
|
||||||
|
|||||||
@ -1,18 +1,18 @@
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restx import Resource, fields
|
from flask_restx import Resource, fields
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from werkzeug.exceptions import BadRequest
|
from werkzeug.exceptions import BadRequest
|
||||||
|
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.app.error import TracingConfigCheckError, TracingConfigIsExist, TracingConfigNotExist
|
from controllers.console.app.error import TracingConfigCheckError, TracingConfigIsExist, TracingConfigNotExist
|
||||||
from controllers.console.wraps import account_initialization_required, setup_required
|
from controllers.console.wraps import account_initialization_required, setup_required
|
||||||
from libs.login import login_required
|
from libs.login import login_required
|
||||||
from services.ops_service import OpsService
|
from services.ops_service import OpsService
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class TraceProviderQuery(BaseModel):
|
class TraceProviderQuery(BaseModel):
|
||||||
tracing_provider: str = Field(..., description="Tracing provider name")
|
tracing_provider: str = Field(..., description="Tracing provider name")
|
||||||
@ -23,13 +23,7 @@ class TraceConfigPayload(BaseModel):
|
|||||||
tracing_config: dict[str, Any] = Field(..., description="Tracing configuration data")
|
tracing_config: dict[str, Any] = Field(..., description="Tracing configuration data")
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
register_schema_models(console_ns, TraceProviderQuery, TraceConfigPayload)
|
||||||
TraceProviderQuery.__name__,
|
|
||||||
TraceProviderQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
console_ns.schema_model(
|
|
||||||
TraceConfigPayload.__name__, TraceConfigPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/trace-config")
|
@console_ns.route("/apps/<uuid:app_id>/trace-config")
|
||||||
@ -49,11 +43,11 @@ class TraceAppConfigApi(Resource):
|
|||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self, app_id):
|
def get(self, app_id: UUID):
|
||||||
args = TraceProviderQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = TraceProviderQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
trace_config = OpsService.get_tracing_app_config(app_id=app_id, tracing_provider=args.tracing_provider)
|
trace_config = OpsService.get_tracing_app_config(app_id=str(app_id), tracing_provider=args.tracing_provider)
|
||||||
if not trace_config:
|
if not trace_config:
|
||||||
return {"has_not_configured": True}
|
return {"has_not_configured": True}
|
||||||
return trace_config
|
return trace_config
|
||||||
@ -71,13 +65,13 @@ class TraceAppConfigApi(Resource):
|
|||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def post(self, app_id):
|
def post(self, app_id: UUID):
|
||||||
"""Create a new trace app configuration"""
|
"""Create a new trace app configuration"""
|
||||||
args = TraceConfigPayload.model_validate(console_ns.payload)
|
args = TraceConfigPayload.model_validate(console_ns.payload)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = OpsService.create_tracing_app_config(
|
result = OpsService.create_tracing_app_config(
|
||||||
app_id=app_id, tracing_provider=args.tracing_provider, tracing_config=args.tracing_config
|
app_id=str(app_id), tracing_provider=args.tracing_provider, tracing_config=args.tracing_config
|
||||||
)
|
)
|
||||||
if not result:
|
if not result:
|
||||||
raise TracingConfigIsExist()
|
raise TracingConfigIsExist()
|
||||||
@ -96,13 +90,13 @@ class TraceAppConfigApi(Resource):
|
|||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def patch(self, app_id):
|
def patch(self, app_id: UUID):
|
||||||
"""Update an existing trace app configuration"""
|
"""Update an existing trace app configuration"""
|
||||||
args = TraceConfigPayload.model_validate(console_ns.payload)
|
args = TraceConfigPayload.model_validate(console_ns.payload)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = OpsService.update_tracing_app_config(
|
result = OpsService.update_tracing_app_config(
|
||||||
app_id=app_id, tracing_provider=args.tracing_provider, tracing_config=args.tracing_config
|
app_id=str(app_id), tracing_provider=args.tracing_provider, tracing_config=args.tracing_config
|
||||||
)
|
)
|
||||||
if not result:
|
if not result:
|
||||||
raise TracingConfigNotExist()
|
raise TracingConfigNotExist()
|
||||||
@ -119,12 +113,12 @@ class TraceAppConfigApi(Resource):
|
|||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def delete(self, app_id):
|
def delete(self, app_id: UUID):
|
||||||
"""Delete an existing trace app configuration"""
|
"""Delete an existing trace app configuration"""
|
||||||
args = TraceProviderQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = TraceProviderQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = OpsService.delete_tracing_app_config(app_id=app_id, tracing_provider=args.tracing_provider)
|
result = OpsService.delete_tracing_app_config(app_id=str(app_id), tracing_provider=args.tracing_provider)
|
||||||
if not result:
|
if not result:
|
||||||
raise TracingConfigNotExist()
|
raise TracingConfigNotExist()
|
||||||
return {"result": "success"}, 204
|
return {"result": "success"}, 204
|
||||||
|
|||||||
@ -5,6 +5,7 @@ from flask import abort, jsonify, request
|
|||||||
from flask_restx import Resource, fields
|
from flask_restx import Resource, fields
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field, field_validator
|
||||||
|
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
from controllers.console.wraps import account_initialization_required, setup_required
|
from controllers.console.wraps import account_initialization_required, setup_required
|
||||||
@ -15,8 +16,6 @@ from libs.helper import convert_datetime_to_date
|
|||||||
from libs.login import current_account_with_tenant, login_required
|
from libs.login import current_account_with_tenant, login_required
|
||||||
from models import AppMode
|
from models import AppMode
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class StatisticTimeRangeQuery(BaseModel):
|
class StatisticTimeRangeQuery(BaseModel):
|
||||||
start: str | None = Field(default=None, description="Start date (YYYY-MM-DD HH:MM)")
|
start: str | None = Field(default=None, description="Start date (YYYY-MM-DD HH:MM)")
|
||||||
@ -30,10 +29,7 @@ class StatisticTimeRangeQuery(BaseModel):
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
register_schema_models(console_ns, StatisticTimeRangeQuery)
|
||||||
StatisticTimeRangeQuery.__name__,
|
|
||||||
StatisticTimeRangeQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/statistics/daily-messages")
|
@console_ns.route("/apps/<uuid:app_id>/statistics/daily-messages")
|
||||||
@ -54,7 +50,7 @@ class DailyMessageStatistic(Resource):
|
|||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
account, _ = current_account_with_tenant()
|
account, _ = current_account_with_tenant()
|
||||||
|
|
||||||
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
converted_created_at = convert_datetime_to_date("created_at")
|
converted_created_at = convert_datetime_to_date("created_at")
|
||||||
sql_query = f"""SELECT
|
sql_query = f"""SELECT
|
||||||
@ -111,7 +107,7 @@ class DailyConversationStatistic(Resource):
|
|||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
account, _ = current_account_with_tenant()
|
account, _ = current_account_with_tenant()
|
||||||
|
|
||||||
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
converted_created_at = convert_datetime_to_date("created_at")
|
converted_created_at = convert_datetime_to_date("created_at")
|
||||||
sql_query = f"""SELECT
|
sql_query = f"""SELECT
|
||||||
@ -167,7 +163,7 @@ class DailyTerminalsStatistic(Resource):
|
|||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
account, _ = current_account_with_tenant()
|
account, _ = current_account_with_tenant()
|
||||||
|
|
||||||
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
converted_created_at = convert_datetime_to_date("created_at")
|
converted_created_at = convert_datetime_to_date("created_at")
|
||||||
sql_query = f"""SELECT
|
sql_query = f"""SELECT
|
||||||
@ -224,7 +220,7 @@ class DailyTokenCostStatistic(Resource):
|
|||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
account, _ = current_account_with_tenant()
|
account, _ = current_account_with_tenant()
|
||||||
|
|
||||||
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
converted_created_at = convert_datetime_to_date("created_at")
|
converted_created_at = convert_datetime_to_date("created_at")
|
||||||
sql_query = f"""SELECT
|
sql_query = f"""SELECT
|
||||||
@ -284,7 +280,7 @@ class AverageSessionInteractionStatistic(Resource):
|
|||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
account, _ = current_account_with_tenant()
|
account, _ = current_account_with_tenant()
|
||||||
|
|
||||||
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
converted_created_at = convert_datetime_to_date("c.created_at")
|
converted_created_at = convert_datetime_to_date("c.created_at")
|
||||||
sql_query = f"""SELECT
|
sql_query = f"""SELECT
|
||||||
@ -360,7 +356,7 @@ class UserSatisfactionRateStatistic(Resource):
|
|||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
account, _ = current_account_with_tenant()
|
account, _ = current_account_with_tenant()
|
||||||
|
|
||||||
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
converted_created_at = convert_datetime_to_date("m.created_at")
|
converted_created_at = convert_datetime_to_date("m.created_at")
|
||||||
sql_query = f"""SELECT
|
sql_query = f"""SELECT
|
||||||
@ -426,7 +422,7 @@ class AverageResponseTimeStatistic(Resource):
|
|||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
account, _ = current_account_with_tenant()
|
account, _ = current_account_with_tenant()
|
||||||
|
|
||||||
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
converted_created_at = convert_datetime_to_date("created_at")
|
converted_created_at = convert_datetime_to_date("created_at")
|
||||||
sql_query = f"""SELECT
|
sql_query = f"""SELECT
|
||||||
@ -482,7 +478,7 @@ class TokensPerSecondStatistic(Resource):
|
|||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
account, _ = current_account_with_tenant()
|
account, _ = current_account_with_tenant()
|
||||||
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
converted_created_at = convert_datetime_to_date("created_at")
|
converted_created_at = convert_datetime_to_date("created_at")
|
||||||
sql_query = f"""SELECT
|
sql_query = f"""SELECT
|
||||||
|
|||||||
@ -11,9 +11,9 @@ from werkzeug.exceptions import BadRequest, Forbidden, InternalServerError, NotF
|
|||||||
|
|
||||||
import services
|
import services
|
||||||
from controllers.common.controller_schemas import DefaultBlockConfigQuery, WorkflowListQuery, WorkflowUpdatePayload
|
from controllers.common.controller_schemas import DefaultBlockConfigQuery, WorkflowListQuery, WorkflowUpdatePayload
|
||||||
|
from controllers.common.schema import register_response_schema_model, register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
|
from controllers.console.app.error import ConversationCompletedError, DraftWorkflowNotExist, DraftWorkflowNotSync
|
||||||
from controllers.console.app.workflow_run import workflow_run_node_execution_model
|
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
|
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
|
||||||
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
|
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
|
||||||
@ -37,6 +37,7 @@ from factories import file_factory, variable_factory
|
|||||||
from fields.member_fields import simple_account_fields
|
from fields.member_fields import simple_account_fields
|
||||||
from fields.online_user_fields import online_user_list_fields
|
from fields.online_user_fields import online_user_list_fields
|
||||||
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
|
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
|
||||||
|
from fields.workflow_run_fields import WorkflowRunNodeExecutionResponse
|
||||||
from graphon.enums import NodeType
|
from graphon.enums import NodeType
|
||||||
from graphon.file import File
|
from graphon.file import File
|
||||||
from graphon.file import helpers as file_helpers
|
from graphon.file import helpers as file_helpers
|
||||||
@ -56,11 +57,13 @@ from services.errors.llm import InvokeRateLimitError
|
|||||||
from services.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError, WorkflowService
|
from services.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError, WorkflowService
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
_file_access_controller = DatabaseFileAccessController()
|
_file_access_controller = DatabaseFileAccessController()
|
||||||
LISTENING_RETRY_IN = 2000
|
LISTENING_RETRY_IN = 2000
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
RESTORE_SOURCE_WORKFLOW_MUST_BE_PUBLISHED_MESSAGE = "source workflow must be published"
|
RESTORE_SOURCE_WORKFLOW_MUST_BE_PUBLISHED_MESSAGE = "source workflow must be published"
|
||||||
MAX_WORKFLOW_ONLINE_USERS_QUERY_IDS = 50
|
MAX_WORKFLOW_ONLINE_USERS_REQUEST_IDS = 1000
|
||||||
|
WORKFLOW_ONLINE_USERS_REDIS_BATCH_SIZE = 50
|
||||||
|
|
||||||
# Register models for flask_restx to avoid dict type issues in Swagger
|
# Register models for flask_restx to avoid dict type issues in Swagger
|
||||||
# Register in dependency order: base models first, then dependent models
|
# Register in dependency order: base models first, then dependent models
|
||||||
@ -158,8 +161,13 @@ class WorkflowFeaturesPayload(BaseModel):
|
|||||||
features: dict[str, Any] = Field(..., description="Workflow feature configuration")
|
features: dict[str, Any] = Field(..., description="Workflow feature configuration")
|
||||||
|
|
||||||
|
|
||||||
class WorkflowOnlineUsersQuery(BaseModel):
|
class WorkflowOnlineUsersPayload(BaseModel):
|
||||||
app_ids: str = Field(..., description="Comma-separated app IDs")
|
app_ids: list[str] = Field(default_factory=list, description="App IDs")
|
||||||
|
|
||||||
|
@field_validator("app_ids")
|
||||||
|
@classmethod
|
||||||
|
def normalize_app_ids(cls, app_ids: list[str]) -> list[str]:
|
||||||
|
return list(dict.fromkeys(app_id.strip() for app_id in app_ids if app_id.strip()))
|
||||||
|
|
||||||
|
|
||||||
class DraftWorkflowTriggerRunPayload(BaseModel):
|
class DraftWorkflowTriggerRunPayload(BaseModel):
|
||||||
@ -170,25 +178,25 @@ class DraftWorkflowTriggerRunAllPayload(BaseModel):
|
|||||||
node_ids: list[str]
|
node_ids: list[str]
|
||||||
|
|
||||||
|
|
||||||
def reg(cls: type[BaseModel]):
|
register_schema_models(
|
||||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
console_ns,
|
||||||
|
SyncDraftWorkflowPayload,
|
||||||
|
AdvancedChatWorkflowRunPayload,
|
||||||
reg(SyncDraftWorkflowPayload)
|
IterationNodeRunPayload,
|
||||||
reg(AdvancedChatWorkflowRunPayload)
|
LoopNodeRunPayload,
|
||||||
reg(IterationNodeRunPayload)
|
DraftWorkflowRunPayload,
|
||||||
reg(LoopNodeRunPayload)
|
DraftWorkflowNodeRunPayload,
|
||||||
reg(DraftWorkflowRunPayload)
|
PublishWorkflowPayload,
|
||||||
reg(DraftWorkflowNodeRunPayload)
|
DefaultBlockConfigQuery,
|
||||||
reg(PublishWorkflowPayload)
|
ConvertToWorkflowPayload,
|
||||||
reg(DefaultBlockConfigQuery)
|
WorkflowListQuery,
|
||||||
reg(ConvertToWorkflowPayload)
|
WorkflowUpdatePayload,
|
||||||
reg(WorkflowListQuery)
|
WorkflowFeaturesPayload,
|
||||||
reg(WorkflowUpdatePayload)
|
WorkflowOnlineUsersPayload,
|
||||||
reg(WorkflowFeaturesPayload)
|
DraftWorkflowTriggerRunPayload,
|
||||||
reg(WorkflowOnlineUsersQuery)
|
DraftWorkflowTriggerRunAllPayload,
|
||||||
reg(DraftWorkflowTriggerRunPayload)
|
)
|
||||||
reg(DraftWorkflowTriggerRunAllPayload)
|
register_response_schema_model(console_ns, WorkflowRunNodeExecutionResponse)
|
||||||
|
|
||||||
|
|
||||||
# TODO(QuantumGhost): Refactor existing node run API to handle file parameter parsing
|
# TODO(QuantumGhost): Refactor existing node run API to handle file parameter parsing
|
||||||
@ -534,9 +542,12 @@ class HumanInputDeliveryTestPayload(BaseModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
reg(HumanInputFormPreviewPayload)
|
register_schema_models(
|
||||||
reg(HumanInputFormSubmitPayload)
|
console_ns,
|
||||||
reg(HumanInputDeliveryTestPayload)
|
HumanInputFormPreviewPayload,
|
||||||
|
HumanInputFormSubmitPayload,
|
||||||
|
HumanInputDeliveryTestPayload,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/human-input/nodes/<string:node_id>/form/preview")
|
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflows/draft/human-input/nodes/<string:node_id>/form/preview")
|
||||||
@ -754,14 +765,17 @@ class DraftWorkflowNodeRunApi(Resource):
|
|||||||
@console_ns.doc(description="Run draft workflow node")
|
@console_ns.doc(description="Run draft workflow node")
|
||||||
@console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
|
@console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
|
||||||
@console_ns.expect(console_ns.models[DraftWorkflowNodeRunPayload.__name__])
|
@console_ns.expect(console_ns.models[DraftWorkflowNodeRunPayload.__name__])
|
||||||
@console_ns.response(200, "Node run started successfully", workflow_run_node_execution_model)
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Node run started successfully",
|
||||||
|
console_ns.models[WorkflowRunNodeExecutionResponse.__name__],
|
||||||
|
)
|
||||||
@console_ns.response(403, "Permission denied")
|
@console_ns.response(403, "Permission denied")
|
||||||
@console_ns.response(404, "Node not found")
|
@console_ns.response(404, "Node not found")
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||||
@marshal_with(workflow_run_node_execution_model)
|
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
def post(self, app_model: App, node_id: str):
|
def post(self, app_model: App, node_id: str):
|
||||||
"""
|
"""
|
||||||
@ -793,7 +807,9 @@ class DraftWorkflowNodeRunApi(Resource):
|
|||||||
files=files,
|
files=files,
|
||||||
)
|
)
|
||||||
|
|
||||||
return workflow_node_execution
|
return WorkflowRunNodeExecutionResponse.model_validate(
|
||||||
|
workflow_node_execution, from_attributes=True
|
||||||
|
).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/workflows/publish")
|
@console_ns.route("/apps/<uuid:app_id>/workflows/publish")
|
||||||
@ -896,7 +912,7 @@ class DefaultBlockConfigApi(Resource):
|
|||||||
"""
|
"""
|
||||||
Get default block config
|
Get default block config
|
||||||
"""
|
"""
|
||||||
args = DefaultBlockConfigQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = DefaultBlockConfigQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
filters = None
|
filters = None
|
||||||
if args.q:
|
if args.q:
|
||||||
@ -989,7 +1005,7 @@ class PublishedAllWorkflowApi(Resource):
|
|||||||
"""
|
"""
|
||||||
current_user, _ = current_account_with_tenant()
|
current_user, _ = current_account_with_tenant()
|
||||||
|
|
||||||
args = WorkflowListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = WorkflowListQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
page = args.page
|
page = args.page
|
||||||
limit = args.limit
|
limit = args.limit
|
||||||
user_id = args.user_id
|
user_id = args.user_id
|
||||||
@ -1137,14 +1153,17 @@ class DraftWorkflowNodeLastRunApi(Resource):
|
|||||||
@console_ns.doc("get_draft_workflow_node_last_run")
|
@console_ns.doc("get_draft_workflow_node_last_run")
|
||||||
@console_ns.doc(description="Get last run result for draft workflow node")
|
@console_ns.doc(description="Get last run result for draft workflow node")
|
||||||
@console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
|
@console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
|
||||||
@console_ns.response(200, "Node last run retrieved successfully", workflow_run_node_execution_model)
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Node last run retrieved successfully",
|
||||||
|
console_ns.models[WorkflowRunNodeExecutionResponse.__name__],
|
||||||
|
)
|
||||||
@console_ns.response(404, "Node last run not found")
|
@console_ns.response(404, "Node last run not found")
|
||||||
@console_ns.response(403, "Permission denied")
|
@console_ns.response(403, "Permission denied")
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||||
@marshal_with(workflow_run_node_execution_model)
|
|
||||||
def get(self, app_model: App, node_id: str):
|
def get(self, app_model: App, node_id: str):
|
||||||
srv = WorkflowService()
|
srv = WorkflowService()
|
||||||
workflow = srv.get_draft_workflow(app_model)
|
workflow = srv.get_draft_workflow(app_model)
|
||||||
@ -1157,7 +1176,7 @@ class DraftWorkflowNodeLastRunApi(Resource):
|
|||||||
)
|
)
|
||||||
if node_exec is None:
|
if node_exec is None:
|
||||||
raise NotFound("last run not found")
|
raise NotFound("last run not found")
|
||||||
return node_exec
|
return WorkflowRunNodeExecutionResponse.model_validate(node_exec, from_attributes=True).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/trigger/run")
|
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/trigger/run")
|
||||||
@ -1384,19 +1403,19 @@ class DraftWorkflowTriggerRunAllApi(Resource):
|
|||||||
|
|
||||||
@console_ns.route("/apps/workflows/online-users")
|
@console_ns.route("/apps/workflows/online-users")
|
||||||
class WorkflowOnlineUsersApi(Resource):
|
class WorkflowOnlineUsersApi(Resource):
|
||||||
@console_ns.expect(console_ns.models[WorkflowOnlineUsersQuery.__name__])
|
@console_ns.expect(console_ns.models[WorkflowOnlineUsersPayload.__name__])
|
||||||
@console_ns.doc("get_workflow_online_users")
|
@console_ns.doc("get_workflow_online_users")
|
||||||
@console_ns.doc(description="Get workflow online users")
|
@console_ns.doc(description="Get workflow online users")
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@marshal_with(online_user_list_fields)
|
@marshal_with(online_user_list_fields)
|
||||||
def get(self):
|
def post(self):
|
||||||
args = WorkflowOnlineUsersQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = WorkflowOnlineUsersPayload.model_validate(console_ns.payload or {})
|
||||||
|
|
||||||
app_ids = list(dict.fromkeys(app_id.strip() for app_id in args.app_ids.split(",") if app_id.strip()))
|
app_ids = args.app_ids
|
||||||
if len(app_ids) > MAX_WORKFLOW_ONLINE_USERS_QUERY_IDS:
|
if len(app_ids) > MAX_WORKFLOW_ONLINE_USERS_REQUEST_IDS:
|
||||||
raise BadRequest(f"Maximum {MAX_WORKFLOW_ONLINE_USERS_QUERY_IDS} app_ids are allowed per request.")
|
raise BadRequest(f"Maximum {MAX_WORKFLOW_ONLINE_USERS_REQUEST_IDS} app_ids are allowed per request.")
|
||||||
|
|
||||||
if not app_ids:
|
if not app_ids:
|
||||||
return {"data": []}
|
return {"data": []}
|
||||||
@ -1404,13 +1423,24 @@ class WorkflowOnlineUsersApi(Resource):
|
|||||||
_, current_tenant_id = current_account_with_tenant()
|
_, current_tenant_id = current_account_with_tenant()
|
||||||
workflow_service = WorkflowService()
|
workflow_service = WorkflowService()
|
||||||
accessible_app_ids = workflow_service.get_accessible_app_ids(app_ids, current_tenant_id)
|
accessible_app_ids = workflow_service.get_accessible_app_ids(app_ids, current_tenant_id)
|
||||||
|
ordered_accessible_app_ids = [app_id for app_id in app_ids if app_id in accessible_app_ids]
|
||||||
|
|
||||||
|
users_json_by_app_id: dict[str, Any] = {}
|
||||||
|
for start_index in range(0, len(ordered_accessible_app_ids), WORKFLOW_ONLINE_USERS_REDIS_BATCH_SIZE):
|
||||||
|
app_id_batch = ordered_accessible_app_ids[
|
||||||
|
start_index : start_index + WORKFLOW_ONLINE_USERS_REDIS_BATCH_SIZE
|
||||||
|
]
|
||||||
|
pipe = redis_client.pipeline(transaction=False)
|
||||||
|
for app_id in app_id_batch:
|
||||||
|
pipe.hgetall(f"{WORKFLOW_ONLINE_USERS_PREFIX}{app_id}")
|
||||||
|
|
||||||
|
users_json_batch = pipe.execute()
|
||||||
|
for app_id, users_json in zip(app_id_batch, users_json_batch):
|
||||||
|
users_json_by_app_id[app_id] = users_json
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
for app_id in app_ids:
|
for app_id in ordered_accessible_app_ids:
|
||||||
if app_id not in accessible_app_ids:
|
users_json = users_json_by_app_id.get(app_id, {})
|
||||||
continue
|
|
||||||
|
|
||||||
users_json = redis_client.hgetall(f"{WORKFLOW_ONLINE_USERS_PREFIX}{app_id}")
|
|
||||||
|
|
||||||
users = []
|
users = []
|
||||||
for _, user_info_json in users_json.items():
|
for _, user_info_json in users_json.items():
|
||||||
|
|||||||
@ -185,7 +185,7 @@ class WorkflowAppLogApi(Resource):
|
|||||||
"""
|
"""
|
||||||
Get workflow app logs
|
Get workflow app logs
|
||||||
"""
|
"""
|
||||||
args = WorkflowAppLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = WorkflowAppLogQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
# get paginate workflow app logs
|
# get paginate workflow app logs
|
||||||
workflow_app_service = WorkflowAppService()
|
workflow_app_service = WorkflowAppService()
|
||||||
@ -228,7 +228,7 @@ class WorkflowArchivedLogApi(Resource):
|
|||||||
"""
|
"""
|
||||||
Get workflow archived logs
|
Get workflow archived logs
|
||||||
"""
|
"""
|
||||||
args = WorkflowAppLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = WorkflowAppLogQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
workflow_app_service = WorkflowAppService()
|
workflow_app_service = WorkflowAppService()
|
||||||
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
with sessionmaker(db.engine, expire_on_commit=False).begin() as session:
|
||||||
|
|||||||
@ -23,7 +23,6 @@ from services.account_service import TenantService
|
|||||||
from services.workflow_comment_service import WorkflowCommentService
|
from services.workflow_comment_service import WorkflowCommentService
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowCommentCreatePayload(BaseModel):
|
class WorkflowCommentCreatePayload(BaseModel):
|
||||||
@ -52,13 +51,14 @@ class WorkflowCommentMentionUsersPayload(BaseModel):
|
|||||||
users: list[AccountWithRole]
|
users: list[AccountWithRole]
|
||||||
|
|
||||||
|
|
||||||
for model in (
|
register_schema_models(
|
||||||
|
console_ns,
|
||||||
|
AccountWithRole,
|
||||||
|
WorkflowCommentMentionUsersPayload,
|
||||||
WorkflowCommentCreatePayload,
|
WorkflowCommentCreatePayload,
|
||||||
WorkflowCommentUpdatePayload,
|
WorkflowCommentUpdatePayload,
|
||||||
WorkflowCommentReplyPayload,
|
WorkflowCommentReplyPayload,
|
||||||
):
|
)
|
||||||
console_ns.schema_model(model.__name__, model.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
|
||||||
register_schema_models(console_ns, AccountWithRole, WorkflowCommentMentionUsersPayload)
|
|
||||||
|
|
||||||
workflow_comment_basic_model = console_ns.model("WorkflowCommentBasic", workflow_comment_basic_fields)
|
workflow_comment_basic_model = console_ns.model("WorkflowCommentBasic", workflow_comment_basic_fields)
|
||||||
workflow_comment_detail_model = console_ns.model("WorkflowCommentDetail", workflow_comment_detail_fields)
|
workflow_comment_detail_model = console_ns.model("WorkflowCommentDetail", workflow_comment_detail_fields)
|
||||||
|
|||||||
@ -8,6 +8,7 @@ from flask_restx import Resource, fields, marshal, marshal_with
|
|||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.app.error import (
|
from controllers.console.app.error import (
|
||||||
DraftWorkflowNotExist,
|
DraftWorkflowNotExist,
|
||||||
@ -33,7 +34,6 @@ from services.workflow_service import WorkflowService
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
_file_access_controller = DatabaseFileAccessController()
|
_file_access_controller = DatabaseFileAccessController()
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowDraftVariableListQuery(BaseModel):
|
class WorkflowDraftVariableListQuery(BaseModel):
|
||||||
@ -56,33 +56,25 @@ class EnvironmentVariableUpdatePayload(BaseModel):
|
|||||||
environment_variables: list[dict[str, Any]] = Field(..., description="Environment variables for the draft workflow")
|
environment_variables: list[dict[str, Any]] = Field(..., description="Environment variables for the draft workflow")
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
register_schema_models(
|
||||||
WorkflowDraftVariableListQuery.__name__,
|
console_ns,
|
||||||
WorkflowDraftVariableListQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
WorkflowDraftVariableListQuery,
|
||||||
)
|
WorkflowDraftVariableUpdatePayload,
|
||||||
console_ns.schema_model(
|
ConversationVariableUpdatePayload,
|
||||||
WorkflowDraftVariableUpdatePayload.__name__,
|
EnvironmentVariableUpdatePayload,
|
||||||
WorkflowDraftVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
console_ns.schema_model(
|
|
||||||
ConversationVariableUpdatePayload.__name__,
|
|
||||||
ConversationVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
console_ns.schema_model(
|
|
||||||
EnvironmentVariableUpdatePayload.__name__,
|
|
||||||
EnvironmentVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _convert_values_to_json_serializable_object(value: Segment):
|
def _convert_values_to_json_serializable_object(value: Segment):
|
||||||
if isinstance(value, FileSegment):
|
match value:
|
||||||
return value.value.model_dump()
|
case FileSegment():
|
||||||
elif isinstance(value, ArrayFileSegment):
|
return value.value.model_dump()
|
||||||
return [i.model_dump() for i in value.value]
|
case ArrayFileSegment():
|
||||||
elif isinstance(value, SegmentGroup):
|
return [i.model_dump() for i in value.value]
|
||||||
return [_convert_values_to_json_serializable_object(i) for i in value.value]
|
case SegmentGroup():
|
||||||
else:
|
return [_convert_values_to_json_serializable_object(i) for i in value.value]
|
||||||
return value.value
|
case _:
|
||||||
|
return value.value
|
||||||
|
|
||||||
|
|
||||||
def _serialize_var_value(variable: WorkflowDraftVariable):
|
def _serialize_var_value(variable: WorkflowDraftVariable):
|
||||||
@ -259,7 +251,7 @@ class WorkflowVariableCollectionApi(Resource):
|
|||||||
"""
|
"""
|
||||||
Get draft workflow
|
Get draft workflow
|
||||||
"""
|
"""
|
||||||
args = WorkflowDraftVariableListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = WorkflowDraftVariableListQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
# fetch draft workflow by app_model
|
# fetch draft workflow by app_model
|
||||||
workflow_service = WorkflowService()
|
workflow_service = WorkflowService()
|
||||||
|
|||||||
@ -1,30 +1,28 @@
|
|||||||
from datetime import UTC, datetime, timedelta
|
from datetime import UTC, datetime, timedelta
|
||||||
from typing import Literal, TypedDict, cast
|
from typing import Literal, cast
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restx import Resource, fields, marshal_with
|
from flask_restx import Resource
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field, field_validator
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
|
from controllers.common.schema import query_params_from_model, register_response_schema_models, register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
from controllers.console.wraps import account_initialization_required, setup_required
|
from controllers.console.wraps import account_initialization_required, setup_required
|
||||||
from controllers.web.error import NotFoundError
|
from controllers.web.error import NotFoundError
|
||||||
from core.workflow.human_input_forms import load_form_tokens_by_form_id as _load_form_tokens_by_form_id
|
from core.workflow.human_input_forms import load_form_tokens_by_form_id as _load_form_tokens_by_form_id
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from fields.end_user_fields import simple_end_user_fields
|
from fields.base import ResponseModel
|
||||||
from fields.member_fields import simple_account_fields
|
|
||||||
from fields.workflow_run_fields import (
|
from fields.workflow_run_fields import (
|
||||||
advanced_chat_workflow_run_for_list_fields,
|
AdvancedChatWorkflowRunPaginationResponse,
|
||||||
advanced_chat_workflow_run_pagination_fields,
|
WorkflowRunCountResponse,
|
||||||
workflow_run_count_fields,
|
WorkflowRunDetailResponse,
|
||||||
workflow_run_detail_fields,
|
WorkflowRunNodeExecutionListResponse,
|
||||||
workflow_run_for_list_fields,
|
WorkflowRunNodeExecutionResponse,
|
||||||
workflow_run_node_execution_fields,
|
WorkflowRunPaginationResponse,
|
||||||
workflow_run_node_execution_list_fields,
|
|
||||||
workflow_run_pagination_fields,
|
|
||||||
)
|
)
|
||||||
from graphon.entities.pause_reason import HumanInputRequired
|
from graphon.entities.pause_reason import HumanInputRequired
|
||||||
from graphon.enums import WorkflowExecutionStatus
|
from graphon.enums import WorkflowExecutionStatus
|
||||||
@ -52,82 +50,6 @@ def _build_backstage_input_url(form_token: str | None) -> str | None:
|
|||||||
WORKFLOW_RUN_STATUS_CHOICES = ["running", "succeeded", "failed", "stopped", "partial-succeeded"]
|
WORKFLOW_RUN_STATUS_CHOICES = ["running", "succeeded", "failed", "stopped", "partial-succeeded"]
|
||||||
EXPORT_SIGNED_URL_EXPIRE_SECONDS = 3600
|
EXPORT_SIGNED_URL_EXPIRE_SECONDS = 3600
|
||||||
|
|
||||||
# Register models for flask_restx to avoid dict type issues in Swagger
|
|
||||||
# Register in dependency order: base models first, then dependent models
|
|
||||||
|
|
||||||
# Base models
|
|
||||||
simple_account_model = console_ns.model("SimpleAccount", simple_account_fields)
|
|
||||||
|
|
||||||
simple_end_user_model = console_ns.model("SimpleEndUser", simple_end_user_fields)
|
|
||||||
|
|
||||||
# Models that depend on simple_account_fields
|
|
||||||
workflow_run_for_list_fields_copy = workflow_run_for_list_fields.copy()
|
|
||||||
workflow_run_for_list_fields_copy["created_by_account"] = fields.Nested(
|
|
||||||
simple_account_model, attribute="created_by_account", allow_null=True
|
|
||||||
)
|
|
||||||
workflow_run_for_list_model = console_ns.model("WorkflowRunForList", workflow_run_for_list_fields_copy)
|
|
||||||
|
|
||||||
advanced_chat_workflow_run_for_list_fields_copy = advanced_chat_workflow_run_for_list_fields.copy()
|
|
||||||
advanced_chat_workflow_run_for_list_fields_copy["created_by_account"] = fields.Nested(
|
|
||||||
simple_account_model, attribute="created_by_account", allow_null=True
|
|
||||||
)
|
|
||||||
advanced_chat_workflow_run_for_list_model = console_ns.model(
|
|
||||||
"AdvancedChatWorkflowRunForList", advanced_chat_workflow_run_for_list_fields_copy
|
|
||||||
)
|
|
||||||
|
|
||||||
workflow_run_detail_fields_copy = workflow_run_detail_fields.copy()
|
|
||||||
workflow_run_detail_fields_copy["created_by_account"] = fields.Nested(
|
|
||||||
simple_account_model, attribute="created_by_account", allow_null=True
|
|
||||||
)
|
|
||||||
workflow_run_detail_fields_copy["created_by_end_user"] = fields.Nested(
|
|
||||||
simple_end_user_model, attribute="created_by_end_user", allow_null=True
|
|
||||||
)
|
|
||||||
workflow_run_detail_model = console_ns.model("WorkflowRunDetail", workflow_run_detail_fields_copy)
|
|
||||||
|
|
||||||
workflow_run_node_execution_fields_copy = workflow_run_node_execution_fields.copy()
|
|
||||||
workflow_run_node_execution_fields_copy["created_by_account"] = fields.Nested(
|
|
||||||
simple_account_model, attribute="created_by_account", allow_null=True
|
|
||||||
)
|
|
||||||
workflow_run_node_execution_fields_copy["created_by_end_user"] = fields.Nested(
|
|
||||||
simple_end_user_model, attribute="created_by_end_user", allow_null=True
|
|
||||||
)
|
|
||||||
workflow_run_node_execution_model = console_ns.model(
|
|
||||||
"WorkflowRunNodeExecution", workflow_run_node_execution_fields_copy
|
|
||||||
)
|
|
||||||
|
|
||||||
# Simple models without nested dependencies
|
|
||||||
workflow_run_count_model = console_ns.model("WorkflowRunCount", workflow_run_count_fields)
|
|
||||||
|
|
||||||
# Pagination models that depend on list models
|
|
||||||
advanced_chat_workflow_run_pagination_fields_copy = advanced_chat_workflow_run_pagination_fields.copy()
|
|
||||||
advanced_chat_workflow_run_pagination_fields_copy["data"] = fields.List(
|
|
||||||
fields.Nested(advanced_chat_workflow_run_for_list_model), attribute="data"
|
|
||||||
)
|
|
||||||
advanced_chat_workflow_run_pagination_model = console_ns.model(
|
|
||||||
"AdvancedChatWorkflowRunPagination", advanced_chat_workflow_run_pagination_fields_copy
|
|
||||||
)
|
|
||||||
|
|
||||||
workflow_run_pagination_fields_copy = workflow_run_pagination_fields.copy()
|
|
||||||
workflow_run_pagination_fields_copy["data"] = fields.List(fields.Nested(workflow_run_for_list_model), attribute="data")
|
|
||||||
workflow_run_pagination_model = console_ns.model("WorkflowRunPagination", workflow_run_pagination_fields_copy)
|
|
||||||
|
|
||||||
workflow_run_node_execution_list_fields_copy = workflow_run_node_execution_list_fields.copy()
|
|
||||||
workflow_run_node_execution_list_fields_copy["data"] = fields.List(fields.Nested(workflow_run_node_execution_model))
|
|
||||||
workflow_run_node_execution_list_model = console_ns.model(
|
|
||||||
"WorkflowRunNodeExecutionList", workflow_run_node_execution_list_fields_copy
|
|
||||||
)
|
|
||||||
|
|
||||||
workflow_run_export_fields = console_ns.model(
|
|
||||||
"WorkflowRunExport",
|
|
||||||
{
|
|
||||||
"status": fields.String(description="Export status: success/failed"),
|
|
||||||
"presigned_url": fields.String(description="Pre-signed URL for download", required=False),
|
|
||||||
"presigned_url_expires_at": fields.String(description="Pre-signed URL expiration time", required=False),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowRunListQuery(BaseModel):
|
class WorkflowRunListQuery(BaseModel):
|
||||||
last_id: str | None = Field(default=None, description="Last run ID for pagination")
|
last_id: str | None = Field(default=None, description="Last run ID for pagination")
|
||||||
@ -136,7 +58,7 @@ class WorkflowRunListQuery(BaseModel):
|
|||||||
default=None, description="Workflow run status filter"
|
default=None, description="Workflow run status filter"
|
||||||
)
|
)
|
||||||
triggered_from: Literal["debugging", "app-run"] | None = Field(
|
triggered_from: Literal["debugging", "app-run"] | None = Field(
|
||||||
default=None, description="Filter by trigger source: debugging or app-run"
|
default=None, description="Filter by trigger source: debugging or app-run. Default: debugging"
|
||||||
)
|
)
|
||||||
|
|
||||||
@field_validator("last_id")
|
@field_validator("last_id")
|
||||||
@ -151,9 +73,15 @@ class WorkflowRunCountQuery(BaseModel):
|
|||||||
status: Literal["running", "succeeded", "failed", "stopped", "partial-succeeded"] | None = Field(
|
status: Literal["running", "succeeded", "failed", "stopped", "partial-succeeded"] | None = Field(
|
||||||
default=None, description="Workflow run status filter"
|
default=None, description="Workflow run status filter"
|
||||||
)
|
)
|
||||||
time_range: str | None = Field(default=None, description="Time range filter (e.g., 7d, 4h, 30m, 30s)")
|
time_range: str | None = Field(
|
||||||
|
default=None,
|
||||||
|
description=(
|
||||||
|
"Filter by time range (optional): e.g., 7d (7 days), 4h (4 hours), "
|
||||||
|
"30m (30 minutes), 30s (30 seconds). Filters by created_at field."
|
||||||
|
),
|
||||||
|
)
|
||||||
triggered_from: Literal["debugging", "app-run"] | None = Field(
|
triggered_from: Literal["debugging", "app-run"] | None = Field(
|
||||||
default=None, description="Filter by trigger source: debugging or app-run"
|
default=None, description="Filter by trigger source: debugging or app-run. Default: debugging"
|
||||||
)
|
)
|
||||||
|
|
||||||
@field_validator("time_range")
|
@field_validator("time_range")
|
||||||
@ -164,56 +92,69 @@ class WorkflowRunCountQuery(BaseModel):
|
|||||||
return time_duration(value)
|
return time_duration(value)
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
class WorkflowRunExportResponse(ResponseModel):
|
||||||
WorkflowRunListQuery.__name__, WorkflowRunListQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
status: str = Field(description="Export status: success/failed")
|
||||||
)
|
presigned_url: str | None = Field(default=None, description="Pre-signed URL for download")
|
||||||
console_ns.schema_model(
|
presigned_url_expires_at: str | None = Field(default=None, description="Pre-signed URL expiration time")
|
||||||
WorkflowRunCountQuery.__name__,
|
|
||||||
WorkflowRunCountQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class HumanInputPauseTypeResponse(TypedDict):
|
class HumanInputPauseTypeResponse(ResponseModel):
|
||||||
type: Literal["human_input"]
|
type: Literal["human_input"]
|
||||||
form_id: str
|
form_id: str
|
||||||
backstage_input_url: str | None
|
backstage_input_url: str | None = None
|
||||||
|
|
||||||
|
|
||||||
class PausedNodeResponse(TypedDict):
|
class PausedNodeResponse(ResponseModel):
|
||||||
node_id: str
|
node_id: str
|
||||||
node_title: str
|
node_title: str
|
||||||
pause_type: HumanInputPauseTypeResponse
|
pause_type: HumanInputPauseTypeResponse
|
||||||
|
|
||||||
|
|
||||||
class WorkflowPauseDetailsResponse(TypedDict):
|
class WorkflowPauseDetailsResponse(ResponseModel):
|
||||||
paused_at: str | None
|
paused_at: str | None = None
|
||||||
paused_nodes: list[PausedNodeResponse]
|
paused_nodes: list[PausedNodeResponse]
|
||||||
|
|
||||||
|
|
||||||
|
register_schema_models(
|
||||||
|
console_ns,
|
||||||
|
WorkflowRunListQuery,
|
||||||
|
WorkflowRunCountQuery,
|
||||||
|
)
|
||||||
|
register_response_schema_models(
|
||||||
|
console_ns,
|
||||||
|
AdvancedChatWorkflowRunPaginationResponse,
|
||||||
|
WorkflowRunPaginationResponse,
|
||||||
|
WorkflowRunCountResponse,
|
||||||
|
WorkflowRunDetailResponse,
|
||||||
|
WorkflowRunNodeExecutionResponse,
|
||||||
|
WorkflowRunNodeExecutionListResponse,
|
||||||
|
WorkflowRunExportResponse,
|
||||||
|
HumanInputPauseTypeResponse,
|
||||||
|
PausedNodeResponse,
|
||||||
|
WorkflowPauseDetailsResponse,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs")
|
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs")
|
||||||
class AdvancedChatAppWorkflowRunListApi(Resource):
|
class AdvancedChatAppWorkflowRunListApi(Resource):
|
||||||
@console_ns.doc("get_advanced_chat_workflow_runs")
|
@console_ns.doc("get_advanced_chat_workflow_runs")
|
||||||
@console_ns.doc(description="Get advanced chat workflow run list")
|
@console_ns.doc(description="Get advanced chat workflow run list")
|
||||||
@console_ns.doc(params={"app_id": "Application ID"})
|
@console_ns.doc(params={"app_id": "Application ID"})
|
||||||
@console_ns.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
|
@console_ns.doc(params=query_params_from_model(WorkflowRunListQuery))
|
||||||
@console_ns.doc(
|
@console_ns.response(
|
||||||
params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}
|
200,
|
||||||
|
"Workflow runs retrieved successfully",
|
||||||
|
console_ns.models[AdvancedChatWorkflowRunPaginationResponse.__name__],
|
||||||
)
|
)
|
||||||
@console_ns.doc(
|
|
||||||
params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}
|
|
||||||
)
|
|
||||||
@console_ns.expect(console_ns.models[WorkflowRunListQuery.__name__])
|
|
||||||
@console_ns.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_model)
|
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT])
|
@get_app_model(mode=[AppMode.ADVANCED_CHAT])
|
||||||
@marshal_with(advanced_chat_workflow_run_pagination_model)
|
|
||||||
def get(self, app_model: App):
|
def get(self, app_model: App):
|
||||||
"""
|
"""
|
||||||
Get advanced chat app workflow run list
|
Get advanced chat app workflow run list
|
||||||
"""
|
"""
|
||||||
args_model = WorkflowRunListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args_model = WorkflowRunListQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
args: WorkflowRunListArgs = {"limit": args_model.limit}
|
args: WorkflowRunListArgs = {"limit": args_model.limit}
|
||||||
if args_model.last_id is not None:
|
if args_model.last_id is not None:
|
||||||
args["last_id"] = args_model.last_id
|
args["last_id"] = args_model.last_id
|
||||||
@ -232,7 +173,9 @@ class AdvancedChatAppWorkflowRunListApi(Resource):
|
|||||||
app_model=app_model, args=args, triggered_from=triggered_from
|
app_model=app_model, args=args, triggered_from=triggered_from
|
||||||
)
|
)
|
||||||
|
|
||||||
return result
|
return AdvancedChatWorkflowRunPaginationResponse.model_validate(result, from_attributes=True).model_dump(
|
||||||
|
mode="json"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/export")
|
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/export")
|
||||||
@ -240,7 +183,7 @@ class WorkflowRunExportApi(Resource):
|
|||||||
@console_ns.doc("get_workflow_run_export_url")
|
@console_ns.doc("get_workflow_run_export_url")
|
||||||
@console_ns.doc(description="Generate a download URL for an archived workflow run.")
|
@console_ns.doc(description="Generate a download URL for an archived workflow run.")
|
||||||
@console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
|
@console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
|
||||||
@console_ns.response(200, "Export URL generated", workflow_run_export_fields)
|
@console_ns.response(200, "Export URL generated", console_ns.models[WorkflowRunExportResponse.__name__])
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@ -278,11 +221,14 @@ class WorkflowRunExportApi(Resource):
|
|||||||
expires_in=EXPORT_SIGNED_URL_EXPIRE_SECONDS,
|
expires_in=EXPORT_SIGNED_URL_EXPIRE_SECONDS,
|
||||||
)
|
)
|
||||||
expires_at = datetime.now(UTC) + timedelta(seconds=EXPORT_SIGNED_URL_EXPIRE_SECONDS)
|
expires_at = datetime.now(UTC) + timedelta(seconds=EXPORT_SIGNED_URL_EXPIRE_SECONDS)
|
||||||
return {
|
response = WorkflowRunExportResponse.model_validate(
|
||||||
"status": "success",
|
{
|
||||||
"presigned_url": presigned_url,
|
"status": "success",
|
||||||
"presigned_url_expires_at": expires_at.isoformat(),
|
"presigned_url": presigned_url,
|
||||||
}, 200
|
"presigned_url_expires_at": expires_at.isoformat(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return response.model_dump(mode="json"), 200
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs/count")
|
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs/count")
|
||||||
@ -290,32 +236,21 @@ class AdvancedChatAppWorkflowRunCountApi(Resource):
|
|||||||
@console_ns.doc("get_advanced_chat_workflow_runs_count")
|
@console_ns.doc("get_advanced_chat_workflow_runs_count")
|
||||||
@console_ns.doc(description="Get advanced chat workflow runs count statistics")
|
@console_ns.doc(description="Get advanced chat workflow runs count statistics")
|
||||||
@console_ns.doc(params={"app_id": "Application ID"})
|
@console_ns.doc(params={"app_id": "Application ID"})
|
||||||
@console_ns.doc(
|
@console_ns.doc(params=query_params_from_model(WorkflowRunCountQuery))
|
||||||
params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Workflow runs count retrieved successfully",
|
||||||
|
console_ns.models[WorkflowRunCountResponse.__name__],
|
||||||
)
|
)
|
||||||
@console_ns.doc(
|
|
||||||
params={
|
|
||||||
"time_range": (
|
|
||||||
"Filter by time range (optional): e.g., 7d (7 days), 4h (4 hours), "
|
|
||||||
"30m (30 minutes), 30s (30 seconds). Filters by created_at field."
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@console_ns.doc(
|
|
||||||
params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}
|
|
||||||
)
|
|
||||||
@console_ns.response(200, "Workflow runs count retrieved successfully", workflow_run_count_model)
|
|
||||||
@console_ns.expect(console_ns.models[WorkflowRunCountQuery.__name__])
|
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT])
|
@get_app_model(mode=[AppMode.ADVANCED_CHAT])
|
||||||
@marshal_with(workflow_run_count_model)
|
|
||||||
def get(self, app_model: App):
|
def get(self, app_model: App):
|
||||||
"""
|
"""
|
||||||
Get advanced chat workflow runs count statistics
|
Get advanced chat workflow runs count statistics
|
||||||
"""
|
"""
|
||||||
args_model = WorkflowRunCountQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args_model = WorkflowRunCountQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
args = args_model.model_dump(exclude_none=True)
|
args = args_model.model_dump(exclude_none=True)
|
||||||
|
|
||||||
# Default to DEBUGGING if not specified
|
# Default to DEBUGGING if not specified
|
||||||
@ -333,7 +268,7 @@ class AdvancedChatAppWorkflowRunCountApi(Resource):
|
|||||||
triggered_from=triggered_from,
|
triggered_from=triggered_from,
|
||||||
)
|
)
|
||||||
|
|
||||||
return result
|
return WorkflowRunCountResponse.model_validate(result).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/workflow-runs")
|
@console_ns.route("/apps/<uuid:app_id>/workflow-runs")
|
||||||
@ -341,25 +276,21 @@ class WorkflowRunListApi(Resource):
|
|||||||
@console_ns.doc("get_workflow_runs")
|
@console_ns.doc("get_workflow_runs")
|
||||||
@console_ns.doc(description="Get workflow run list")
|
@console_ns.doc(description="Get workflow run list")
|
||||||
@console_ns.doc(params={"app_id": "Application ID"})
|
@console_ns.doc(params={"app_id": "Application ID"})
|
||||||
@console_ns.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
|
@console_ns.doc(params=query_params_from_model(WorkflowRunListQuery))
|
||||||
@console_ns.doc(
|
@console_ns.response(
|
||||||
params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}
|
200,
|
||||||
|
"Workflow runs retrieved successfully",
|
||||||
|
console_ns.models[WorkflowRunPaginationResponse.__name__],
|
||||||
)
|
)
|
||||||
@console_ns.doc(
|
|
||||||
params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}
|
|
||||||
)
|
|
||||||
@console_ns.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_model)
|
|
||||||
@console_ns.expect(console_ns.models[WorkflowRunListQuery.__name__])
|
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||||
@marshal_with(workflow_run_pagination_model)
|
|
||||||
def get(self, app_model: App):
|
def get(self, app_model: App):
|
||||||
"""
|
"""
|
||||||
Get workflow run list
|
Get workflow run list
|
||||||
"""
|
"""
|
||||||
args_model = WorkflowRunListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args_model = WorkflowRunListQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
args: WorkflowRunListArgs = {"limit": args_model.limit}
|
args: WorkflowRunListArgs = {"limit": args_model.limit}
|
||||||
if args_model.last_id is not None:
|
if args_model.last_id is not None:
|
||||||
args["last_id"] = args_model.last_id
|
args["last_id"] = args_model.last_id
|
||||||
@ -378,7 +309,7 @@ class WorkflowRunListApi(Resource):
|
|||||||
app_model=app_model, args=args, triggered_from=triggered_from
|
app_model=app_model, args=args, triggered_from=triggered_from
|
||||||
)
|
)
|
||||||
|
|
||||||
return result
|
return WorkflowRunPaginationResponse.model_validate(result, from_attributes=True).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/count")
|
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/count")
|
||||||
@ -386,32 +317,21 @@ class WorkflowRunCountApi(Resource):
|
|||||||
@console_ns.doc("get_workflow_runs_count")
|
@console_ns.doc("get_workflow_runs_count")
|
||||||
@console_ns.doc(description="Get workflow runs count statistics")
|
@console_ns.doc(description="Get workflow runs count statistics")
|
||||||
@console_ns.doc(params={"app_id": "Application ID"})
|
@console_ns.doc(params={"app_id": "Application ID"})
|
||||||
@console_ns.doc(
|
@console_ns.doc(params=query_params_from_model(WorkflowRunCountQuery))
|
||||||
params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Workflow runs count retrieved successfully",
|
||||||
|
console_ns.models[WorkflowRunCountResponse.__name__],
|
||||||
)
|
)
|
||||||
@console_ns.doc(
|
|
||||||
params={
|
|
||||||
"time_range": (
|
|
||||||
"Filter by time range (optional): e.g., 7d (7 days), 4h (4 hours), "
|
|
||||||
"30m (30 minutes), 30s (30 seconds). Filters by created_at field."
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@console_ns.doc(
|
|
||||||
params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}
|
|
||||||
)
|
|
||||||
@console_ns.response(200, "Workflow runs count retrieved successfully", workflow_run_count_model)
|
|
||||||
@console_ns.expect(console_ns.models[WorkflowRunCountQuery.__name__])
|
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||||
@marshal_with(workflow_run_count_model)
|
|
||||||
def get(self, app_model: App):
|
def get(self, app_model: App):
|
||||||
"""
|
"""
|
||||||
Get workflow runs count statistics
|
Get workflow runs count statistics
|
||||||
"""
|
"""
|
||||||
args_model = WorkflowRunCountQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args_model = WorkflowRunCountQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
args = args_model.model_dump(exclude_none=True)
|
args = args_model.model_dump(exclude_none=True)
|
||||||
|
|
||||||
# Default to DEBUGGING for workflow if not specified (backward compatibility)
|
# Default to DEBUGGING for workflow if not specified (backward compatibility)
|
||||||
@ -429,7 +349,7 @@ class WorkflowRunCountApi(Resource):
|
|||||||
triggered_from=triggered_from,
|
triggered_from=triggered_from,
|
||||||
)
|
)
|
||||||
|
|
||||||
return result
|
return WorkflowRunCountResponse.model_validate(result).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>")
|
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>")
|
||||||
@ -437,13 +357,16 @@ class WorkflowRunDetailApi(Resource):
|
|||||||
@console_ns.doc("get_workflow_run_detail")
|
@console_ns.doc("get_workflow_run_detail")
|
||||||
@console_ns.doc(description="Get workflow run detail")
|
@console_ns.doc(description="Get workflow run detail")
|
||||||
@console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
|
@console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
|
||||||
@console_ns.response(200, "Workflow run detail retrieved successfully", workflow_run_detail_model)
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Workflow run detail retrieved successfully",
|
||||||
|
console_ns.models[WorkflowRunDetailResponse.__name__],
|
||||||
|
)
|
||||||
@console_ns.response(404, "Workflow run not found")
|
@console_ns.response(404, "Workflow run not found")
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||||
@marshal_with(workflow_run_detail_model)
|
|
||||||
def get(self, app_model: App, run_id):
|
def get(self, app_model: App, run_id):
|
||||||
"""
|
"""
|
||||||
Get workflow run detail
|
Get workflow run detail
|
||||||
@ -452,8 +375,10 @@ class WorkflowRunDetailApi(Resource):
|
|||||||
|
|
||||||
workflow_run_service = WorkflowRunService()
|
workflow_run_service = WorkflowRunService()
|
||||||
workflow_run = workflow_run_service.get_workflow_run(app_model=app_model, run_id=run_id)
|
workflow_run = workflow_run_service.get_workflow_run(app_model=app_model, run_id=run_id)
|
||||||
|
if workflow_run is None:
|
||||||
|
raise NotFoundError("Workflow run not found")
|
||||||
|
|
||||||
return workflow_run
|
return WorkflowRunDetailResponse.model_validate(workflow_run, from_attributes=True).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/node-executions")
|
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/node-executions")
|
||||||
@ -461,13 +386,16 @@ class WorkflowRunNodeExecutionListApi(Resource):
|
|||||||
@console_ns.doc("get_workflow_run_node_executions")
|
@console_ns.doc("get_workflow_run_node_executions")
|
||||||
@console_ns.doc(description="Get workflow run node execution list")
|
@console_ns.doc(description="Get workflow run node execution list")
|
||||||
@console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
|
@console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
|
||||||
@console_ns.response(200, "Node executions retrieved successfully", workflow_run_node_execution_list_model)
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Node executions retrieved successfully",
|
||||||
|
console_ns.models[WorkflowRunNodeExecutionListResponse.__name__],
|
||||||
|
)
|
||||||
@console_ns.response(404, "Workflow run not found")
|
@console_ns.response(404, "Workflow run not found")
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||||
@marshal_with(workflow_run_node_execution_list_model)
|
|
||||||
def get(self, app_model: App, run_id):
|
def get(self, app_model: App, run_id):
|
||||||
"""
|
"""
|
||||||
Get workflow run node execution list
|
Get workflow run node execution list
|
||||||
@ -482,13 +410,24 @@ class WorkflowRunNodeExecutionListApi(Resource):
|
|||||||
user=user,
|
user=user,
|
||||||
)
|
)
|
||||||
|
|
||||||
return {"data": node_executions}
|
return WorkflowRunNodeExecutionListResponse.model_validate(
|
||||||
|
{"data": node_executions}, from_attributes=True
|
||||||
|
).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/workflow/<string:workflow_run_id>/pause-details")
|
@console_ns.route("/workflow/<string:workflow_run_id>/pause-details")
|
||||||
class ConsoleWorkflowPauseDetailsApi(Resource):
|
class ConsoleWorkflowPauseDetailsApi(Resource):
|
||||||
"""Console API for getting workflow pause details."""
|
"""Console API for getting workflow pause details."""
|
||||||
|
|
||||||
|
@console_ns.doc("get_workflow_pause_details")
|
||||||
|
@console_ns.doc(description="Get workflow pause details")
|
||||||
|
@console_ns.doc(params={"workflow_run_id": "Workflow run ID"})
|
||||||
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Workflow pause details retrieved successfully",
|
||||||
|
console_ns.models[WorkflowPauseDetailsResponse.__name__],
|
||||||
|
)
|
||||||
|
@console_ns.response(404, "Workflow run not found")
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@ -515,11 +454,8 @@ class ConsoleWorkflowPauseDetailsApi(Resource):
|
|||||||
# Check if workflow is suspended
|
# Check if workflow is suspended
|
||||||
is_paused = workflow_run.status == WorkflowExecutionStatus.PAUSED
|
is_paused = workflow_run.status == WorkflowExecutionStatus.PAUSED
|
||||||
if not is_paused:
|
if not is_paused:
|
||||||
empty_response: WorkflowPauseDetailsResponse = {
|
empty_response = WorkflowPauseDetailsResponse(paused_at=None, paused_nodes=[])
|
||||||
"paused_at": None,
|
return empty_response.model_dump(mode="json"), 200
|
||||||
"paused_nodes": [],
|
|
||||||
}
|
|
||||||
return empty_response, 200
|
|
||||||
|
|
||||||
pause_entity = workflow_run_repo.get_workflow_pause(workflow_run_id)
|
pause_entity = workflow_run_repo.get_workflow_pause(workflow_run_id)
|
||||||
pause_reasons = pause_entity.get_pause_reasons() if pause_entity else []
|
pause_reasons = pause_entity.get_pause_reasons() if pause_entity else []
|
||||||
@ -530,27 +466,25 @@ class ConsoleWorkflowPauseDetailsApi(Resource):
|
|||||||
# Build response
|
# Build response
|
||||||
paused_at = pause_entity.paused_at if pause_entity else None
|
paused_at = pause_entity.paused_at if pause_entity else None
|
||||||
paused_nodes: list[PausedNodeResponse] = []
|
paused_nodes: list[PausedNodeResponse] = []
|
||||||
response: WorkflowPauseDetailsResponse = {
|
|
||||||
"paused_at": paused_at.isoformat() + "Z" if paused_at else None,
|
|
||||||
"paused_nodes": paused_nodes,
|
|
||||||
}
|
|
||||||
|
|
||||||
for reason in pause_reasons:
|
for reason in pause_reasons:
|
||||||
if isinstance(reason, HumanInputRequired):
|
if isinstance(reason, HumanInputRequired):
|
||||||
paused_nodes.append(
|
paused_nodes.append(
|
||||||
{
|
PausedNodeResponse(
|
||||||
"node_id": reason.node_id,
|
node_id=reason.node_id,
|
||||||
"node_title": reason.node_title,
|
node_title=reason.node_title,
|
||||||
"pause_type": {
|
pause_type=HumanInputPauseTypeResponse(
|
||||||
"type": "human_input",
|
type="human_input",
|
||||||
"form_id": reason.form_id,
|
form_id=reason.form_id,
|
||||||
"backstage_input_url": _build_backstage_input_url(
|
backstage_input_url=_build_backstage_input_url(form_tokens_by_form_id.get(reason.form_id)),
|
||||||
form_tokens_by_form_id.get(reason.form_id)
|
),
|
||||||
),
|
)
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise AssertionError("unimplemented.")
|
raise AssertionError("unimplemented.")
|
||||||
|
|
||||||
return response, 200
|
response = WorkflowPauseDetailsResponse(
|
||||||
|
paused_at=paused_at.isoformat() + "Z" if paused_at else None,
|
||||||
|
paused_nodes=paused_nodes,
|
||||||
|
)
|
||||||
|
return response.model_dump(mode="json"), 200
|
||||||
|
|||||||
@ -3,6 +3,7 @@ from flask_restx import Resource
|
|||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field, field_validator
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.app.wraps import get_app_model
|
from controllers.console.app.wraps import get_app_model
|
||||||
from controllers.console.wraps import account_initialization_required, setup_required
|
from controllers.console.wraps import account_initialization_required, setup_required
|
||||||
@ -13,8 +14,6 @@ from models.enums import WorkflowRunTriggeredFrom
|
|||||||
from models.model import AppMode
|
from models.model import AppMode
|
||||||
from repositories.factory import DifyAPIRepositoryFactory
|
from repositories.factory import DifyAPIRepositoryFactory
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowStatisticQuery(BaseModel):
|
class WorkflowStatisticQuery(BaseModel):
|
||||||
start: str | None = Field(default=None, description="Start date and time (YYYY-MM-DD HH:MM)")
|
start: str | None = Field(default=None, description="Start date and time (YYYY-MM-DD HH:MM)")
|
||||||
@ -28,10 +27,7 @@ class WorkflowStatisticQuery(BaseModel):
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
register_schema_models(console_ns, WorkflowStatisticQuery)
|
||||||
WorkflowStatisticQuery.__name__,
|
|
||||||
WorkflowStatisticQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/daily-conversations")
|
@console_ns.route("/apps/<uuid:app_id>/workflow/statistics/daily-conversations")
|
||||||
@ -53,7 +49,7 @@ class WorkflowDailyRunsStatistic(Resource):
|
|||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
account, _ = current_account_with_tenant()
|
account, _ = current_account_with_tenant()
|
||||||
|
|
||||||
args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
assert account.timezone is not None
|
assert account.timezone is not None
|
||||||
|
|
||||||
@ -93,7 +89,7 @@ class WorkflowDailyTerminalsStatistic(Resource):
|
|||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
account, _ = current_account_with_tenant()
|
account, _ = current_account_with_tenant()
|
||||||
|
|
||||||
args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
assert account.timezone is not None
|
assert account.timezone is not None
|
||||||
|
|
||||||
@ -133,7 +129,7 @@ class WorkflowDailyTokenCostStatistic(Resource):
|
|||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
account, _ = current_account_with_tenant()
|
account, _ = current_account_with_tenant()
|
||||||
|
|
||||||
args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
assert account.timezone is not None
|
assert account.timezone is not None
|
||||||
|
|
||||||
@ -173,7 +169,7 @@ class WorkflowAverageAppInteractionStatistic(Resource):
|
|||||||
def get(self, app_model):
|
def get(self, app_model):
|
||||||
account, _ = current_account_with_tenant()
|
account, _ = current_account_with_tenant()
|
||||||
|
|
||||||
args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
assert account.timezone is not None
|
assert account.timezone is not None
|
||||||
|
|
||||||
|
|||||||
@ -94,7 +94,7 @@ class WebhookTriggerApi(Resource):
|
|||||||
@console_ns.response(200, "Success", console_ns.models[WebhookTriggerResponse.__name__])
|
@console_ns.response(200, "Success", console_ns.models[WebhookTriggerResponse.__name__])
|
||||||
def get(self, app_model: App):
|
def get(self, app_model: App):
|
||||||
"""Get webhook trigger for a node"""
|
"""Get webhook trigger for a node"""
|
||||||
args = Parser.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = Parser.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
node_id = args.node_id
|
node_id = args.node_id
|
||||||
|
|
||||||
|
|||||||
@ -63,7 +63,7 @@ class ActivateCheckApi(Resource):
|
|||||||
console_ns.models[ActivationCheckResponse.__name__],
|
console_ns.models[ActivationCheckResponse.__name__],
|
||||||
)
|
)
|
||||||
def get(self):
|
def get(self):
|
||||||
args = ActivateCheckQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ActivateCheckQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
workspaceId = args.workspace_id
|
workspaceId = args.workspace_id
|
||||||
token = args.token
|
token = args.token
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
from flask_restx import Resource
|
from flask_restx import Resource
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from libs.login import current_account_with_tenant, login_required
|
from libs.login import current_account_with_tenant, login_required
|
||||||
from services.auth.api_key_auth_service import ApiKeyAuthService
|
from services.auth.api_key_auth_service import ApiKeyAuthService
|
||||||
|
|
||||||
@ -8,8 +9,6 @@ from .. import console_ns
|
|||||||
from ..auth.error import ApiKeyAuthFailedError
|
from ..auth.error import ApiKeyAuthFailedError
|
||||||
from ..wraps import account_initialization_required, is_admin_or_owner_required, setup_required
|
from ..wraps import account_initialization_required, is_admin_or_owner_required, setup_required
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class ApiKeyAuthBindingPayload(BaseModel):
|
class ApiKeyAuthBindingPayload(BaseModel):
|
||||||
category: str = Field(...)
|
category: str = Field(...)
|
||||||
@ -17,10 +16,7 @@ class ApiKeyAuthBindingPayload(BaseModel):
|
|||||||
credentials: dict = Field(...)
|
credentials: dict = Field(...)
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
register_schema_models(console_ns, ApiKeyAuthBindingPayload)
|
||||||
ApiKeyAuthBindingPayload.__name__,
|
|
||||||
ApiKeyAuthBindingPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/api-key-auth/data-source")
|
@console_ns.route("/api-key-auth/data-source")
|
||||||
|
|||||||
@ -4,6 +4,7 @@ from pydantic import BaseModel, Field, field_validator
|
|||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
from constants.languages import languages
|
from constants.languages import languages
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.auth.error import (
|
from controllers.console.auth.error import (
|
||||||
EmailAlreadyInUseError,
|
EmailAlreadyInUseError,
|
||||||
@ -23,8 +24,6 @@ from services.errors.account import AccountNotFoundError, AccountRegisterError
|
|||||||
from ..error import AccountInFreezeError, EmailSendIpLimitError
|
from ..error import AccountInFreezeError, EmailSendIpLimitError
|
||||||
from ..wraps import email_password_login_enabled, email_register_enabled, setup_required
|
from ..wraps import email_password_login_enabled, email_register_enabled, setup_required
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class EmailRegisterSendPayload(BaseModel):
|
class EmailRegisterSendPayload(BaseModel):
|
||||||
email: EmailStr = Field(..., description="Email address")
|
email: EmailStr = Field(..., description="Email address")
|
||||||
@ -48,8 +47,7 @@ class EmailRegisterResetPayload(BaseModel):
|
|||||||
return valid_password(value)
|
return valid_password(value)
|
||||||
|
|
||||||
|
|
||||||
for model in (EmailRegisterSendPayload, EmailRegisterValidityPayload, EmailRegisterResetPayload):
|
register_schema_models(console_ns, EmailRegisterSendPayload, EmailRegisterValidityPayload, EmailRegisterResetPayload)
|
||||||
console_ns.schema_model(model.__name__, model.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/email-register/send-email")
|
@console_ns.route("/email-register/send-email")
|
||||||
|
|||||||
@ -28,8 +28,6 @@ from services.entities.auth_entities import (
|
|||||||
)
|
)
|
||||||
from services.feature_service import FeatureService
|
from services.feature_service import FeatureService
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class ForgotPasswordEmailResponse(BaseModel):
|
class ForgotPasswordEmailResponse(BaseModel):
|
||||||
result: str = Field(description="Operation result")
|
result: str = Field(description="Operation result")
|
||||||
|
|||||||
@ -9,6 +9,7 @@ from werkzeug.exceptions import Unauthorized
|
|||||||
import services
|
import services
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
from constants.languages import get_valid_language
|
from constants.languages import get_valid_language
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.auth.error import (
|
from controllers.console.auth.error import (
|
||||||
AuthenticationFailedError,
|
AuthenticationFailedError,
|
||||||
@ -50,7 +51,6 @@ from services.errors.account import AccountRegisterError
|
|||||||
from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkspacesLimitExceededError
|
from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkspacesLimitExceededError
|
||||||
from services.feature_service import FeatureService
|
from services.feature_service import FeatureService
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -71,13 +71,7 @@ class EmailCodeLoginPayload(BaseModel):
|
|||||||
language: str | None = Field(default=None)
|
language: str | None = Field(default=None)
|
||||||
|
|
||||||
|
|
||||||
def reg(cls: type[BaseModel]):
|
register_schema_models(console_ns, LoginPayload, EmailPayload, EmailCodeLoginPayload)
|
||||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
|
||||||
|
|
||||||
|
|
||||||
reg(LoginPayload)
|
|
||||||
reg(EmailPayload)
|
|
||||||
reg(EmailCodeLoginPayload)
|
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/login")
|
@console_ns.route("/login")
|
||||||
|
|||||||
@ -606,63 +606,63 @@ class DatasetIndexingEstimateApi(Resource):
|
|||||||
# validate args
|
# validate args
|
||||||
DocumentService.estimate_args_validate(args)
|
DocumentService.estimate_args_validate(args)
|
||||||
extract_settings = []
|
extract_settings = []
|
||||||
if args["info_list"]["data_source_type"] == "upload_file":
|
match args["info_list"]["data_source_type"]:
|
||||||
file_ids = args["info_list"]["file_info_list"]["file_ids"]
|
case "upload_file":
|
||||||
file_details = db.session.scalars(
|
file_ids = args["info_list"]["file_info_list"]["file_ids"]
|
||||||
select(UploadFile).where(UploadFile.tenant_id == current_tenant_id, UploadFile.id.in_(file_ids))
|
file_details = db.session.scalars(
|
||||||
).all()
|
select(UploadFile).where(UploadFile.tenant_id == current_tenant_id, UploadFile.id.in_(file_ids))
|
||||||
|
).all()
|
||||||
|
if file_details is None:
|
||||||
|
raise NotFound("File not found.")
|
||||||
|
|
||||||
if file_details is None:
|
if file_details:
|
||||||
raise NotFound("File not found.")
|
for file_detail in file_details:
|
||||||
|
extract_setting = ExtractSetting(
|
||||||
if file_details:
|
datasource_type=DatasourceType.FILE,
|
||||||
for file_detail in file_details:
|
upload_file=file_detail,
|
||||||
|
document_model=args["doc_form"],
|
||||||
|
)
|
||||||
|
extract_settings.append(extract_setting)
|
||||||
|
case "notion_import":
|
||||||
|
notion_info_list = args["info_list"]["notion_info_list"]
|
||||||
|
for notion_info in notion_info_list:
|
||||||
|
workspace_id = notion_info["workspace_id"]
|
||||||
|
credential_id = notion_info.get("credential_id")
|
||||||
|
for page in notion_info["pages"]:
|
||||||
|
extract_setting = ExtractSetting(
|
||||||
|
datasource_type=DatasourceType.NOTION,
|
||||||
|
notion_info=NotionInfo.model_validate(
|
||||||
|
{
|
||||||
|
"credential_id": credential_id,
|
||||||
|
"notion_workspace_id": workspace_id,
|
||||||
|
"notion_obj_id": page["page_id"],
|
||||||
|
"notion_page_type": page["type"],
|
||||||
|
"tenant_id": current_tenant_id,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
document_model=args["doc_form"],
|
||||||
|
)
|
||||||
|
extract_settings.append(extract_setting)
|
||||||
|
case "website_crawl":
|
||||||
|
website_info_list = args["info_list"]["website_info_list"]
|
||||||
|
for url in website_info_list["urls"]:
|
||||||
extract_setting = ExtractSetting(
|
extract_setting = ExtractSetting(
|
||||||
datasource_type=DatasourceType.FILE,
|
datasource_type=DatasourceType.WEBSITE,
|
||||||
upload_file=file_detail,
|
website_info=WebsiteInfo.model_validate(
|
||||||
document_model=args["doc_form"],
|
|
||||||
)
|
|
||||||
extract_settings.append(extract_setting)
|
|
||||||
elif args["info_list"]["data_source_type"] == "notion_import":
|
|
||||||
notion_info_list = args["info_list"]["notion_info_list"]
|
|
||||||
for notion_info in notion_info_list:
|
|
||||||
workspace_id = notion_info["workspace_id"]
|
|
||||||
credential_id = notion_info.get("credential_id")
|
|
||||||
for page in notion_info["pages"]:
|
|
||||||
extract_setting = ExtractSetting(
|
|
||||||
datasource_type=DatasourceType.NOTION,
|
|
||||||
notion_info=NotionInfo.model_validate(
|
|
||||||
{
|
{
|
||||||
"credential_id": credential_id,
|
"provider": website_info_list["provider"],
|
||||||
"notion_workspace_id": workspace_id,
|
"job_id": website_info_list["job_id"],
|
||||||
"notion_obj_id": page["page_id"],
|
"url": url,
|
||||||
"notion_page_type": page["type"],
|
|
||||||
"tenant_id": current_tenant_id,
|
"tenant_id": current_tenant_id,
|
||||||
|
"mode": "crawl",
|
||||||
|
"only_main_content": website_info_list["only_main_content"],
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
document_model=args["doc_form"],
|
document_model=args["doc_form"],
|
||||||
)
|
)
|
||||||
extract_settings.append(extract_setting)
|
extract_settings.append(extract_setting)
|
||||||
elif args["info_list"]["data_source_type"] == "website_crawl":
|
case _:
|
||||||
website_info_list = args["info_list"]["website_info_list"]
|
raise ValueError("Data source type not support")
|
||||||
for url in website_info_list["urls"]:
|
|
||||||
extract_setting = ExtractSetting(
|
|
||||||
datasource_type=DatasourceType.WEBSITE,
|
|
||||||
website_info=WebsiteInfo.model_validate(
|
|
||||||
{
|
|
||||||
"provider": website_info_list["provider"],
|
|
||||||
"job_id": website_info_list["job_id"],
|
|
||||||
"url": url,
|
|
||||||
"tenant_id": current_tenant_id,
|
|
||||||
"mode": "crawl",
|
|
||||||
"only_main_content": website_info_list["only_main_content"],
|
|
||||||
}
|
|
||||||
),
|
|
||||||
document_model=args["doc_form"],
|
|
||||||
)
|
|
||||||
extract_settings.append(extract_setting)
|
|
||||||
else:
|
|
||||||
raise ValueError("Data source type not support")
|
|
||||||
indexing_runner = IndexingRunner()
|
indexing_runner = IndexingRunner()
|
||||||
try:
|
try:
|
||||||
response = indexing_runner.indexing_estimate(
|
response = indexing_runner.indexing_estimate(
|
||||||
|
|||||||
@ -369,28 +369,31 @@ class DatasetDocumentListApi(Resource):
|
|||||||
else:
|
else:
|
||||||
sort_logic = asc
|
sort_logic = asc
|
||||||
|
|
||||||
if sort == "hit_count":
|
match sort:
|
||||||
sub_query = (
|
case "hit_count":
|
||||||
sa.select(DocumentSegment.document_id, sa.func.sum(DocumentSegment.hit_count).label("total_hit_count"))
|
sub_query = (
|
||||||
.where(DocumentSegment.dataset_id == str(dataset_id))
|
sa.select(
|
||||||
.group_by(DocumentSegment.document_id)
|
DocumentSegment.document_id, sa.func.sum(DocumentSegment.hit_count).label("total_hit_count")
|
||||||
.subquery()
|
)
|
||||||
)
|
.where(DocumentSegment.dataset_id == str(dataset_id))
|
||||||
|
.group_by(DocumentSegment.document_id)
|
||||||
|
.subquery()
|
||||||
|
)
|
||||||
|
|
||||||
query = query.outerjoin(sub_query, sub_query.c.document_id == Document.id).order_by(
|
query = query.outerjoin(sub_query, sub_query.c.document_id == Document.id).order_by(
|
||||||
sort_logic(sa.func.coalesce(sub_query.c.total_hit_count, 0)),
|
sort_logic(sa.func.coalesce(sub_query.c.total_hit_count, 0)),
|
||||||
sort_logic(Document.position),
|
sort_logic(Document.position),
|
||||||
)
|
)
|
||||||
elif sort == "created_at":
|
case "created_at":
|
||||||
query = query.order_by(
|
query = query.order_by(
|
||||||
sort_logic(Document.created_at),
|
sort_logic(Document.created_at),
|
||||||
sort_logic(Document.position),
|
sort_logic(Document.position),
|
||||||
)
|
)
|
||||||
else:
|
case _:
|
||||||
query = query.order_by(
|
query = query.order_by(
|
||||||
desc(Document.created_at),
|
desc(Document.created_at),
|
||||||
desc(Document.position),
|
desc(Document.position),
|
||||||
)
|
)
|
||||||
|
|
||||||
paginated_documents = db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False)
|
paginated_documents = db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False)
|
||||||
documents = paginated_documents.items
|
documents = paginated_documents.items
|
||||||
|
|||||||
@ -4,6 +4,7 @@ from flask_restx import ( # type: ignore
|
|||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from werkzeug.exceptions import Forbidden
|
from werkzeug.exceptions import Forbidden
|
||||||
|
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.datasets.wraps import get_rag_pipeline
|
from controllers.console.datasets.wraps import get_rag_pipeline
|
||||||
from controllers.console.wraps import account_initialization_required, setup_required
|
from controllers.console.wraps import account_initialization_required, setup_required
|
||||||
@ -12,8 +13,6 @@ from models import Account
|
|||||||
from models.dataset import Pipeline
|
from models.dataset import Pipeline
|
||||||
from services.rag_pipeline.rag_pipeline import RagPipelineService
|
from services.rag_pipeline.rag_pipeline import RagPipelineService
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class Parser(BaseModel):
|
class Parser(BaseModel):
|
||||||
inputs: dict
|
inputs: dict
|
||||||
@ -21,7 +20,7 @@ class Parser(BaseModel):
|
|||||||
credential_id: str | None = None
|
credential_id: str | None = None
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(Parser.__name__, Parser.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
register_schema_models(console_ns, Parser)
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/datasource/nodes/<string:node_id>/preview")
|
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/datasource/nodes/<string:node_id>/preview")
|
||||||
|
|||||||
@ -10,7 +10,7 @@ from werkzeug.exceptions import BadRequest, Forbidden, InternalServerError, NotF
|
|||||||
|
|
||||||
import services
|
import services
|
||||||
from controllers.common.controller_schemas import DefaultBlockConfigQuery, WorkflowListQuery, WorkflowUpdatePayload
|
from controllers.common.controller_schemas import DefaultBlockConfigQuery, WorkflowListQuery, WorkflowUpdatePayload
|
||||||
from controllers.common.schema import register_schema_models
|
from controllers.common.schema import register_response_schema_models, register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.app.error import (
|
from controllers.console.app.error import (
|
||||||
ConversationCompletedError,
|
ConversationCompletedError,
|
||||||
@ -22,12 +22,6 @@ from controllers.console.app.workflow import (
|
|||||||
workflow_model,
|
workflow_model,
|
||||||
workflow_pagination_model,
|
workflow_pagination_model,
|
||||||
)
|
)
|
||||||
from controllers.console.app.workflow_run import (
|
|
||||||
workflow_run_detail_model,
|
|
||||||
workflow_run_node_execution_list_model,
|
|
||||||
workflow_run_node_execution_model,
|
|
||||||
workflow_run_pagination_model,
|
|
||||||
)
|
|
||||||
from controllers.console.datasets.wraps import get_rag_pipeline
|
from controllers.console.datasets.wraps import get_rag_pipeline
|
||||||
from controllers.console.wraps import (
|
from controllers.console.wraps import (
|
||||||
account_initialization_required,
|
account_initialization_required,
|
||||||
@ -40,6 +34,12 @@ from core.app.apps.pipeline.pipeline_generator import PipelineGenerator
|
|||||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from factories import variable_factory
|
from factories import variable_factory
|
||||||
|
from fields.workflow_run_fields import (
|
||||||
|
WorkflowRunDetailResponse,
|
||||||
|
WorkflowRunNodeExecutionListResponse,
|
||||||
|
WorkflowRunNodeExecutionResponse,
|
||||||
|
WorkflowRunPaginationResponse,
|
||||||
|
)
|
||||||
from graphon.model_runtime.utils.encoders import jsonable_encoder
|
from graphon.model_runtime.utils.encoders import jsonable_encoder
|
||||||
from libs import helper
|
from libs import helper
|
||||||
from libs.helper import TimestampField, UUIDStrOrEmpty
|
from libs.helper import TimestampField, UUIDStrOrEmpty
|
||||||
@ -131,6 +131,13 @@ register_schema_models(
|
|||||||
DatasourceVariablesPayload,
|
DatasourceVariablesPayload,
|
||||||
RagPipelineRecommendedPluginQuery,
|
RagPipelineRecommendedPluginQuery,
|
||||||
)
|
)
|
||||||
|
register_response_schema_models(
|
||||||
|
console_ns,
|
||||||
|
WorkflowRunDetailResponse,
|
||||||
|
WorkflowRunNodeExecutionListResponse,
|
||||||
|
WorkflowRunNodeExecutionResponse,
|
||||||
|
WorkflowRunPaginationResponse,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft")
|
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft")
|
||||||
@ -415,12 +422,16 @@ class RagPipelineDraftDatasourceNodeRunApi(Resource):
|
|||||||
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/run")
|
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/run")
|
||||||
class RagPipelineDraftNodeRunApi(Resource):
|
class RagPipelineDraftNodeRunApi(Resource):
|
||||||
@console_ns.expect(console_ns.models[NodeRunRequiredPayload.__name__])
|
@console_ns.expect(console_ns.models[NodeRunRequiredPayload.__name__])
|
||||||
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Node run started successfully",
|
||||||
|
console_ns.models[WorkflowRunNodeExecutionResponse.__name__],
|
||||||
|
)
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_rag_pipeline
|
@get_rag_pipeline
|
||||||
@marshal_with(workflow_run_node_execution_model)
|
|
||||||
def post(self, pipeline: Pipeline, node_id: str):
|
def post(self, pipeline: Pipeline, node_id: str):
|
||||||
"""
|
"""
|
||||||
Run draft workflow node
|
Run draft workflow node
|
||||||
@ -439,7 +450,9 @@ class RagPipelineDraftNodeRunApi(Resource):
|
|||||||
if workflow_node_execution is None:
|
if workflow_node_execution is None:
|
||||||
raise ValueError("Workflow node execution not found")
|
raise ValueError("Workflow node execution not found")
|
||||||
|
|
||||||
return workflow_node_execution
|
return WorkflowRunNodeExecutionResponse.model_validate(
|
||||||
|
workflow_node_execution, from_attributes=True
|
||||||
|
).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs/tasks/<string:task_id>/stop")
|
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs/tasks/<string:task_id>/stop")
|
||||||
@ -778,11 +791,15 @@ class DraftRagPipelineSecondStepApi(Resource):
|
|||||||
|
|
||||||
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs")
|
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs")
|
||||||
class RagPipelineWorkflowRunListApi(Resource):
|
class RagPipelineWorkflowRunListApi(Resource):
|
||||||
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Workflow runs retrieved successfully",
|
||||||
|
console_ns.models[WorkflowRunPaginationResponse.__name__],
|
||||||
|
)
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_rag_pipeline
|
@get_rag_pipeline
|
||||||
@marshal_with(workflow_run_pagination_model)
|
|
||||||
def get(self, pipeline: Pipeline):
|
def get(self, pipeline: Pipeline):
|
||||||
"""
|
"""
|
||||||
Get workflow run list
|
Get workflow run list
|
||||||
@ -801,16 +818,20 @@ class RagPipelineWorkflowRunListApi(Resource):
|
|||||||
rag_pipeline_service = RagPipelineService()
|
rag_pipeline_service = RagPipelineService()
|
||||||
result = rag_pipeline_service.get_rag_pipeline_paginate_workflow_runs(pipeline=pipeline, args=args)
|
result = rag_pipeline_service.get_rag_pipeline_paginate_workflow_runs(pipeline=pipeline, args=args)
|
||||||
|
|
||||||
return result
|
return WorkflowRunPaginationResponse.model_validate(result, from_attributes=True).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs/<uuid:run_id>")
|
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs/<uuid:run_id>")
|
||||||
class RagPipelineWorkflowRunDetailApi(Resource):
|
class RagPipelineWorkflowRunDetailApi(Resource):
|
||||||
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Workflow run detail retrieved successfully",
|
||||||
|
console_ns.models[WorkflowRunDetailResponse.__name__],
|
||||||
|
)
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_rag_pipeline
|
@get_rag_pipeline
|
||||||
@marshal_with(workflow_run_detail_model)
|
|
||||||
def get(self, pipeline: Pipeline, run_id):
|
def get(self, pipeline: Pipeline, run_id):
|
||||||
"""
|
"""
|
||||||
Get workflow run detail
|
Get workflow run detail
|
||||||
@ -819,17 +840,23 @@ class RagPipelineWorkflowRunDetailApi(Resource):
|
|||||||
|
|
||||||
rag_pipeline_service = RagPipelineService()
|
rag_pipeline_service = RagPipelineService()
|
||||||
workflow_run = rag_pipeline_service.get_rag_pipeline_workflow_run(pipeline=pipeline, run_id=run_id)
|
workflow_run = rag_pipeline_service.get_rag_pipeline_workflow_run(pipeline=pipeline, run_id=run_id)
|
||||||
|
if workflow_run is None:
|
||||||
|
raise NotFound("Workflow run not found")
|
||||||
|
|
||||||
return workflow_run
|
return WorkflowRunDetailResponse.model_validate(workflow_run, from_attributes=True).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs/<uuid:run_id>/node-executions")
|
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs/<uuid:run_id>/node-executions")
|
||||||
class RagPipelineWorkflowRunNodeExecutionListApi(Resource):
|
class RagPipelineWorkflowRunNodeExecutionListApi(Resource):
|
||||||
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Node executions retrieved successfully",
|
||||||
|
console_ns.models[WorkflowRunNodeExecutionListResponse.__name__],
|
||||||
|
)
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_rag_pipeline
|
@get_rag_pipeline
|
||||||
@marshal_with(workflow_run_node_execution_list_model)
|
|
||||||
def get(self, pipeline: Pipeline, run_id: str):
|
def get(self, pipeline: Pipeline, run_id: str):
|
||||||
"""
|
"""
|
||||||
Get workflow run node execution list
|
Get workflow run node execution list
|
||||||
@ -844,7 +871,9 @@ class RagPipelineWorkflowRunNodeExecutionListApi(Resource):
|
|||||||
user=user,
|
user=user,
|
||||||
)
|
)
|
||||||
|
|
||||||
return {"data": node_executions}
|
return WorkflowRunNodeExecutionListResponse.model_validate(
|
||||||
|
{"data": node_executions}, from_attributes=True
|
||||||
|
).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/rag/pipelines/datasource-plugins")
|
@console_ns.route("/rag/pipelines/datasource-plugins")
|
||||||
@ -859,11 +888,15 @@ class DatasourceListApi(Resource):
|
|||||||
|
|
||||||
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/last-run")
|
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/last-run")
|
||||||
class RagPipelineWorkflowLastRunApi(Resource):
|
class RagPipelineWorkflowLastRunApi(Resource):
|
||||||
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Node last run retrieved successfully",
|
||||||
|
console_ns.models[WorkflowRunNodeExecutionResponse.__name__],
|
||||||
|
)
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_rag_pipeline
|
@get_rag_pipeline
|
||||||
@marshal_with(workflow_run_node_execution_model)
|
|
||||||
def get(self, pipeline: Pipeline, node_id: str):
|
def get(self, pipeline: Pipeline, node_id: str):
|
||||||
rag_pipeline_service = RagPipelineService()
|
rag_pipeline_service = RagPipelineService()
|
||||||
workflow = rag_pipeline_service.get_draft_workflow(pipeline=pipeline)
|
workflow = rag_pipeline_service.get_draft_workflow(pipeline=pipeline)
|
||||||
@ -876,7 +909,7 @@ class RagPipelineWorkflowLastRunApi(Resource):
|
|||||||
)
|
)
|
||||||
if node_exec is None:
|
if node_exec is None:
|
||||||
raise NotFound("last run not found")
|
raise NotFound("last run not found")
|
||||||
return node_exec
|
return WorkflowRunNodeExecutionResponse.model_validate(node_exec, from_attributes=True).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/rag/pipelines/transform/datasets/<uuid:dataset_id>")
|
@console_ns.route("/rag/pipelines/transform/datasets/<uuid:dataset_id>")
|
||||||
@ -899,12 +932,16 @@ class RagPipelineTransformApi(Resource):
|
|||||||
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/variables-inspect")
|
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/variables-inspect")
|
||||||
class RagPipelineDatasourceVariableApi(Resource):
|
class RagPipelineDatasourceVariableApi(Resource):
|
||||||
@console_ns.expect(console_ns.models[DatasourceVariablesPayload.__name__])
|
@console_ns.expect(console_ns.models[DatasourceVariablesPayload.__name__])
|
||||||
|
@console_ns.response(
|
||||||
|
200,
|
||||||
|
"Datasource variables set successfully",
|
||||||
|
console_ns.models[WorkflowRunNodeExecutionResponse.__name__],
|
||||||
|
)
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
@get_rag_pipeline
|
@get_rag_pipeline
|
||||||
@edit_permission_required
|
@edit_permission_required
|
||||||
@marshal_with(workflow_run_node_execution_model)
|
|
||||||
def post(self, pipeline: Pipeline):
|
def post(self, pipeline: Pipeline):
|
||||||
"""
|
"""
|
||||||
Set datasource variables
|
Set datasource variables
|
||||||
@ -918,7 +955,9 @@ class RagPipelineDatasourceVariableApi(Resource):
|
|||||||
args=args,
|
args=args,
|
||||||
current_user=current_user,
|
current_user=current_user,
|
||||||
)
|
)
|
||||||
return workflow_node_execution
|
return WorkflowRunNodeExecutionResponse.model_validate(
|
||||||
|
workflow_node_execution, from_attributes=True
|
||||||
|
).model_dump(mode="json")
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/rag/pipelines/recommended-plugins")
|
@console_ns.route("/rag/pipelines/recommended-plugins")
|
||||||
|
|||||||
@ -1,11 +1,12 @@
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restx import Resource
|
from flask_restx import Resource
|
||||||
from pydantic import BaseModel, Field, computed_field, field_validator
|
from pydantic import BaseModel, Field, computed_field, field_validator
|
||||||
|
|
||||||
from constants.languages import languages
|
from constants.languages import languages
|
||||||
from controllers.common.schema import register_schema_models
|
from controllers.common.schema import query_params_from_model, register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.wraps import account_initialization_required
|
from controllers.console.wraps import account_initialization_required
|
||||||
from fields.base import ResponseModel
|
from fields.base import ResponseModel
|
||||||
@ -15,7 +16,7 @@ from services.recommended_app_service import RecommendedAppService
|
|||||||
|
|
||||||
|
|
||||||
class RecommendedAppsQuery(BaseModel):
|
class RecommendedAppsQuery(BaseModel):
|
||||||
language: str | None = Field(default=None)
|
language: str | None = Field(default=None, description="Language code for recommended app localization")
|
||||||
|
|
||||||
|
|
||||||
class RecommendedAppInfoResponse(ResponseModel):
|
class RecommendedAppInfoResponse(ResponseModel):
|
||||||
@ -52,7 +53,7 @@ class RecommendedAppResponse(ResponseModel):
|
|||||||
copyright: str | None = None
|
copyright: str | None = None
|
||||||
privacy_policy: str | None = None
|
privacy_policy: str | None = None
|
||||||
custom_disclaimer: str | None = None
|
custom_disclaimer: str | None = None
|
||||||
category: str | None = None
|
categories: list[str] = Field(default_factory=list)
|
||||||
position: int | None = None
|
position: int | None = None
|
||||||
is_listed: bool | None = None
|
is_listed: bool | None = None
|
||||||
can_trial: bool | None = None
|
can_trial: bool | None = None
|
||||||
@ -74,13 +75,13 @@ register_schema_models(
|
|||||||
|
|
||||||
@console_ns.route("/explore/apps")
|
@console_ns.route("/explore/apps")
|
||||||
class RecommendedAppListApi(Resource):
|
class RecommendedAppListApi(Resource):
|
||||||
@console_ns.expect(console_ns.models[RecommendedAppsQuery.__name__])
|
@console_ns.doc(params=query_params_from_model(RecommendedAppsQuery))
|
||||||
@console_ns.response(200, "Success", console_ns.models[RecommendedAppListResponse.__name__])
|
@console_ns.response(200, "Success", console_ns.models[RecommendedAppListResponse.__name__])
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self):
|
def get(self):
|
||||||
# language args
|
# language args
|
||||||
args = RecommendedAppsQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = RecommendedAppsQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
language = args.language
|
language = args.language
|
||||||
if language and language in languages:
|
if language and language in languages:
|
||||||
language_prefix = language
|
language_prefix = language
|
||||||
@ -99,6 +100,5 @@ class RecommendedAppListApi(Resource):
|
|||||||
class RecommendedAppApi(Resource):
|
class RecommendedAppApi(Resource):
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self, app_id):
|
def get(self, app_id: UUID):
|
||||||
app_id = str(app_id)
|
return RecommendedAppService.get_recommend_app_detail(str(app_id))
|
||||||
return RecommendedAppService.get_recommend_app_detail(app_id)
|
|
||||||
|
|||||||
@ -10,7 +10,7 @@ from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
|
|||||||
import services
|
import services
|
||||||
from controllers.common.fields import Parameters as ParametersResponse
|
from controllers.common.fields import Parameters as ParametersResponse
|
||||||
from controllers.common.fields import Site as SiteResponse
|
from controllers.common.fields import Site as SiteResponse
|
||||||
from controllers.common.schema import get_or_create_model
|
from controllers.common.schema import get_or_create_model, register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.app.error import (
|
from controllers.console.app.error import (
|
||||||
AppUnavailableError,
|
AppUnavailableError,
|
||||||
@ -106,7 +106,7 @@ app_detail_fields_with_site_copy["tags"] = fields.List(fields.Nested(tag_model))
|
|||||||
app_detail_fields_with_site_copy["site"] = fields.Nested(site_model)
|
app_detail_fields_with_site_copy["site"] = fields.Nested(site_model)
|
||||||
app_detail_with_site_model = get_or_create_model("TrialAppDetailWithSite", app_detail_fields_with_site_copy)
|
app_detail_with_site_model = get_or_create_model("TrialAppDetailWithSite", app_detail_fields_with_site_copy)
|
||||||
|
|
||||||
simple_account_model = get_or_create_model("SimpleAccount", simple_account_fields)
|
simple_account_model = get_or_create_model("TrialSimpleAccount", simple_account_fields)
|
||||||
conversation_variable_model = get_or_create_model("TrialConversationVariable", conversation_variable_fields)
|
conversation_variable_model = get_or_create_model("TrialConversationVariable", conversation_variable_fields)
|
||||||
pipeline_variable_model = get_or_create_model("TrialPipelineVariable", pipeline_variable_fields)
|
pipeline_variable_model = get_or_create_model("TrialPipelineVariable", pipeline_variable_fields)
|
||||||
|
|
||||||
@ -120,10 +120,6 @@ workflow_fields_copy["rag_pipeline_variables"] = fields.List(fields.Nested(pipel
|
|||||||
workflow_model = get_or_create_model("TrialWorkflow", workflow_fields_copy)
|
workflow_model = get_or_create_model("TrialWorkflow", workflow_fields_copy)
|
||||||
|
|
||||||
|
|
||||||
# Pydantic models for request validation
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowRunRequest(BaseModel):
|
class WorkflowRunRequest(BaseModel):
|
||||||
inputs: dict
|
inputs: dict
|
||||||
files: list | None = None
|
files: list | None = None
|
||||||
@ -153,19 +149,7 @@ class CompletionRequest(BaseModel):
|
|||||||
retriever_from: str = "explore_app"
|
retriever_from: str = "explore_app"
|
||||||
|
|
||||||
|
|
||||||
# Register schemas for Swagger documentation
|
register_schema_models(console_ns, WorkflowRunRequest, ChatRequest, TextToSpeechRequest, CompletionRequest)
|
||||||
console_ns.schema_model(
|
|
||||||
WorkflowRunRequest.__name__, WorkflowRunRequest.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
|
||||||
)
|
|
||||||
console_ns.schema_model(
|
|
||||||
ChatRequest.__name__, ChatRequest.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
|
||||||
)
|
|
||||||
console_ns.schema_model(
|
|
||||||
TextToSpeechRequest.__name__, TextToSpeechRequest.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
|
||||||
)
|
|
||||||
console_ns.schema_model(
|
|
||||||
CompletionRequest.__name__, CompletionRequest.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TrialAppWorkflowRunApi(TrialAppResource):
|
class TrialAppWorkflowRunApi(TrialAppResource):
|
||||||
|
|||||||
@ -89,7 +89,7 @@ class CodeBasedExtensionAPI(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self):
|
def get(self):
|
||||||
query = CodeBasedExtensionQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
query = CodeBasedExtensionQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
return CodeBasedExtensionResponse(
|
return CodeBasedExtensionResponse(
|
||||||
module=query.module,
|
module=query.module,
|
||||||
|
|||||||
@ -82,7 +82,7 @@ class FileApi(Resource):
|
|||||||
try:
|
try:
|
||||||
upload_file = FileService(db.engine).upload_file(
|
upload_file = FileService(db.engine).upload_file(
|
||||||
filename=file.filename,
|
filename=file.filename,
|
||||||
content=file.read(),
|
content=file.stream.read(),
|
||||||
mimetype=file.mimetype,
|
mimetype=file.mimetype,
|
||||||
user=current_user,
|
user=current_user,
|
||||||
source=source,
|
source=source,
|
||||||
|
|||||||
@ -32,12 +32,7 @@ class TagBindingPayload(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class TagBindingRemovePayload(BaseModel):
|
class TagBindingRemovePayload(BaseModel):
|
||||||
tag_id: str = Field(description="Tag ID to remove")
|
tag_ids: list[str] = Field(description="Tag IDs to remove", min_length=1)
|
||||||
target_id: str = Field(description="Target ID to unbind tag from")
|
|
||||||
type: TagType = Field(description="Tag type")
|
|
||||||
|
|
||||||
|
|
||||||
class TagBindingItemDeletePayload(BaseModel):
|
|
||||||
target_id: str = Field(description="Target ID to unbind tag from")
|
target_id: str = Field(description="Target ID to unbind tag from")
|
||||||
type: TagType = Field(description="Tag type")
|
type: TagType = Field(description="Tag type")
|
||||||
|
|
||||||
@ -75,7 +70,6 @@ register_schema_models(
|
|||||||
TagBasePayload,
|
TagBasePayload,
|
||||||
TagBindingPayload,
|
TagBindingPayload,
|
||||||
TagBindingRemovePayload,
|
TagBindingRemovePayload,
|
||||||
TagBindingItemDeletePayload,
|
|
||||||
TagListQueryParam,
|
TagListQueryParam,
|
||||||
TagResponse,
|
TagResponse,
|
||||||
)
|
)
|
||||||
@ -184,13 +178,13 @@ def _create_tag_bindings() -> tuple[dict[str, str], int]:
|
|||||||
return {"result": "success"}, 200
|
return {"result": "success"}, 200
|
||||||
|
|
||||||
|
|
||||||
def _remove_tag_binding() -> tuple[dict[str, str], int]:
|
def _remove_tag_bindings() -> tuple[dict[str, str], int]:
|
||||||
_require_tag_binding_edit_permission()
|
_require_tag_binding_edit_permission()
|
||||||
|
|
||||||
payload = TagBindingRemovePayload.model_validate(console_ns.payload or {})
|
payload = TagBindingRemovePayload.model_validate(console_ns.payload or {})
|
||||||
TagService.delete_tag_binding(
|
TagService.delete_tag_binding(
|
||||||
TagBindingDeletePayload(
|
TagBindingDeletePayload(
|
||||||
tag_id=payload.tag_id,
|
tag_ids=payload.tag_ids,
|
||||||
target_id=payload.target_id,
|
target_id=payload.target_id,
|
||||||
type=payload.type,
|
type=payload.type,
|
||||||
)
|
)
|
||||||
@ -211,54 +205,15 @@ class TagBindingCollectionApi(Resource):
|
|||||||
return _create_tag_bindings()
|
return _create_tag_bindings()
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/tag-bindings/<uuid:id>")
|
|
||||||
class TagBindingItemApi(Resource):
|
|
||||||
"""Canonical item resource for tag binding deletion."""
|
|
||||||
|
|
||||||
@console_ns.doc("delete_tag_binding")
|
|
||||||
@console_ns.doc(params={"id": "Tag ID"})
|
|
||||||
@console_ns.expect(console_ns.models[TagBindingItemDeletePayload.__name__])
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
def delete(self, id):
|
|
||||||
_require_tag_binding_edit_permission()
|
|
||||||
payload = TagBindingItemDeletePayload.model_validate(console_ns.payload or {})
|
|
||||||
TagService.delete_tag_binding(
|
|
||||||
TagBindingDeletePayload(
|
|
||||||
tag_id=str(id),
|
|
||||||
target_id=payload.target_id,
|
|
||||||
type=payload.type,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return {"result": "success"}, 200
|
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/tag-bindings/create")
|
|
||||||
class DeprecatedTagBindingCreateApi(Resource):
|
|
||||||
"""Deprecated verb-based alias for tag binding creation."""
|
|
||||||
|
|
||||||
@console_ns.doc("create_tag_binding_deprecated")
|
|
||||||
@console_ns.doc(deprecated=True)
|
|
||||||
@console_ns.doc(description="Deprecated legacy alias. Use POST /tag-bindings instead.")
|
|
||||||
@console_ns.expect(console_ns.models[TagBindingPayload.__name__])
|
|
||||||
@setup_required
|
|
||||||
@login_required
|
|
||||||
@account_initialization_required
|
|
||||||
def post(self):
|
|
||||||
return _create_tag_bindings()
|
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/tag-bindings/remove")
|
@console_ns.route("/tag-bindings/remove")
|
||||||
class DeprecatedTagBindingRemoveApi(Resource):
|
class TagBindingRemoveApi(Resource):
|
||||||
"""Deprecated verb-based alias for tag binding deletion."""
|
"""Batch resource for tag binding deletion."""
|
||||||
|
|
||||||
@console_ns.doc("delete_tag_binding_deprecated")
|
@console_ns.doc("remove_tag_bindings")
|
||||||
@console_ns.doc(deprecated=True)
|
@console_ns.doc(description="Remove one or more tag bindings from a target.")
|
||||||
@console_ns.doc(description="Deprecated legacy alias. Use DELETE /tag-bindings/{id} instead.")
|
|
||||||
@console_ns.expect(console_ns.models[TagBindingRemovePayload.__name__])
|
@console_ns.expect(console_ns.models[TagBindingRemovePayload.__name__])
|
||||||
@setup_required
|
@setup_required
|
||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def post(self):
|
def post(self):
|
||||||
return _remove_tag_binding()
|
return _remove_tag_bindings()
|
||||||
|
|||||||
@ -8,6 +8,7 @@ from flask import request
|
|||||||
from flask_restx import Resource
|
from flask_restx import Resource
|
||||||
from pydantic import BaseModel, Field, field_validator, model_validator
|
from pydantic import BaseModel, Field, field_validator, model_validator
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
|
from werkzeug.exceptions import NotFound
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
from constants.languages import supported_language
|
from constants.languages import supported_language
|
||||||
@ -45,12 +46,12 @@ from libs.helper import EmailStr, extract_remote_ip, timezone
|
|||||||
from libs.login import current_account_with_tenant, login_required
|
from libs.login import current_account_with_tenant, login_required
|
||||||
from models import AccountIntegrate, InvitationCode
|
from models import AccountIntegrate, InvitationCode
|
||||||
from models.account import AccountStatus, InvitationCodeStatus
|
from models.account import AccountStatus, InvitationCodeStatus
|
||||||
|
from models.enums import CreatorUserRole
|
||||||
|
from models.model import UploadFile
|
||||||
from services.account_service import AccountService
|
from services.account_service import AccountService
|
||||||
from services.billing_service import BillingService
|
from services.billing_service import BillingService
|
||||||
from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError
|
from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class AccountInitPayload(BaseModel):
|
class AccountInitPayload(BaseModel):
|
||||||
interface_language: str
|
interface_language: str
|
||||||
@ -158,27 +159,26 @@ class CheckEmailUniquePayload(BaseModel):
|
|||||||
email: EmailStr
|
email: EmailStr
|
||||||
|
|
||||||
|
|
||||||
def reg(cls: type[BaseModel]):
|
register_schema_models(
|
||||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
console_ns,
|
||||||
|
AccountResponse,
|
||||||
|
AccountInitPayload,
|
||||||
reg(AccountInitPayload)
|
AccountNamePayload,
|
||||||
reg(AccountNamePayload)
|
AccountAvatarPayload,
|
||||||
reg(AccountAvatarPayload)
|
AccountAvatarQuery,
|
||||||
reg(AccountAvatarQuery)
|
AccountInterfaceLanguagePayload,
|
||||||
reg(AccountInterfaceLanguagePayload)
|
AccountInterfaceThemePayload,
|
||||||
reg(AccountInterfaceThemePayload)
|
AccountTimezonePayload,
|
||||||
reg(AccountTimezonePayload)
|
AccountPasswordPayload,
|
||||||
reg(AccountPasswordPayload)
|
AccountDeletePayload,
|
||||||
reg(AccountDeletePayload)
|
AccountDeletionFeedbackPayload,
|
||||||
reg(AccountDeletionFeedbackPayload)
|
EducationActivatePayload,
|
||||||
reg(EducationActivatePayload)
|
EducationAutocompleteQuery,
|
||||||
reg(EducationAutocompleteQuery)
|
ChangeEmailSendPayload,
|
||||||
reg(ChangeEmailSendPayload)
|
ChangeEmailValidityPayload,
|
||||||
reg(ChangeEmailValidityPayload)
|
ChangeEmailResetPayload,
|
||||||
reg(ChangeEmailResetPayload)
|
CheckEmailUniquePayload,
|
||||||
reg(CheckEmailUniquePayload)
|
)
|
||||||
register_schema_models(console_ns, AccountResponse)
|
|
||||||
|
|
||||||
|
|
||||||
def _serialize_account(account) -> dict[str, Any]:
|
def _serialize_account(account) -> dict[str, Any]:
|
||||||
@ -322,9 +322,24 @@ class AccountAvatarApi(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self):
|
def get(self):
|
||||||
args = AccountAvatarQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
current_user, current_tenant_id = current_account_with_tenant()
|
||||||
|
args = AccountAvatarQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
avatar = args.avatar
|
||||||
|
|
||||||
avatar_url = file_helpers.get_signed_file_url(args.avatar)
|
if avatar.startswith(("http://", "https://")):
|
||||||
|
return {"avatar_url": avatar}
|
||||||
|
|
||||||
|
upload_file = db.session.scalar(select(UploadFile).where(UploadFile.id == avatar).limit(1))
|
||||||
|
if upload_file is None:
|
||||||
|
raise NotFound("Avatar file not found")
|
||||||
|
|
||||||
|
if upload_file.tenant_id != current_tenant_id:
|
||||||
|
raise NotFound("Avatar file not found")
|
||||||
|
|
||||||
|
if upload_file.created_by_role != CreatorUserRole.ACCOUNT or upload_file.created_by != current_user.id:
|
||||||
|
raise NotFound("Avatar file not found")
|
||||||
|
|
||||||
|
avatar_url = file_helpers.get_signed_file_url(upload_file_id=upload_file.id)
|
||||||
return {"avatar_url": avatar_url}
|
return {"avatar_url": avatar_url}
|
||||||
|
|
||||||
@console_ns.expect(console_ns.models[AccountAvatarPayload.__name__])
|
@console_ns.expect(console_ns.models[AccountAvatarPayload.__name__])
|
||||||
|
|||||||
@ -20,8 +20,6 @@ from graphon.model_runtime.utils.encoders import jsonable_encoder
|
|||||||
from libs.login import current_account_with_tenant, login_required
|
from libs.login import current_account_with_tenant, login_required
|
||||||
from services.plugin.endpoint_service import EndpointService
|
from services.plugin.endpoint_service import EndpointService
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class EndpointCreatePayload(BaseModel):
|
class EndpointCreatePayload(BaseModel):
|
||||||
plugin_unique_identifier: str
|
plugin_unique_identifier: str
|
||||||
@ -80,10 +78,6 @@ class EndpointDisableResponse(BaseModel):
|
|||||||
success: bool = Field(description="Operation success")
|
success: bool = Field(description="Operation success")
|
||||||
|
|
||||||
|
|
||||||
def reg(cls: type[BaseModel]):
|
|
||||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
|
||||||
|
|
||||||
|
|
||||||
register_schema_models(
|
register_schema_models(
|
||||||
console_ns,
|
console_ns,
|
||||||
EndpointCreatePayload,
|
EndpointCreatePayload,
|
||||||
@ -215,7 +209,7 @@ class EndpointListApi(Resource):
|
|||||||
def get(self):
|
def get(self):
|
||||||
user, tenant_id = current_account_with_tenant()
|
user, tenant_id = current_account_with_tenant()
|
||||||
|
|
||||||
args = EndpointListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = EndpointListQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
page = args.page
|
page = args.page
|
||||||
page_size = args.page_size
|
page_size = args.page_size
|
||||||
@ -248,7 +242,7 @@ class EndpointListForSinglePluginApi(Resource):
|
|||||||
def get(self):
|
def get(self):
|
||||||
user, tenant_id = current_account_with_tenant()
|
user, tenant_id = current_account_with_tenant()
|
||||||
|
|
||||||
args = EndpointListForPluginQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = EndpointListForPluginQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
page = args.page
|
page = args.page
|
||||||
page_size = args.page_size
|
page_size = args.page_size
|
||||||
|
|||||||
@ -33,8 +33,6 @@ from services.account_service import AccountService, RegisterService, TenantServ
|
|||||||
from services.errors.account import AccountAlreadyInTenantError
|
from services.errors.account import AccountAlreadyInTenantError
|
||||||
from services.feature_service import FeatureService
|
from services.feature_service import FeatureService
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class MemberInvitePayload(BaseModel):
|
class MemberInvitePayload(BaseModel):
|
||||||
emails: list[str] = Field(default_factory=list)
|
emails: list[str] = Field(default_factory=list)
|
||||||
@ -59,17 +57,23 @@ class OwnerTransferPayload(BaseModel):
|
|||||||
token: str
|
token: str
|
||||||
|
|
||||||
|
|
||||||
def reg(cls: type[BaseModel]):
|
|
||||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
|
||||||
|
|
||||||
|
|
||||||
reg(MemberInvitePayload)
|
|
||||||
reg(MemberRoleUpdatePayload)
|
|
||||||
reg(OwnerTransferEmailPayload)
|
|
||||||
reg(OwnerTransferCheckPayload)
|
|
||||||
reg(OwnerTransferPayload)
|
|
||||||
register_enum_models(console_ns, TenantAccountRole)
|
register_enum_models(console_ns, TenantAccountRole)
|
||||||
register_schema_models(console_ns, AccountWithRole, AccountWithRoleList)
|
register_schema_models(
|
||||||
|
console_ns,
|
||||||
|
AccountWithRole,
|
||||||
|
AccountWithRoleList,
|
||||||
|
MemberInvitePayload,
|
||||||
|
MemberRoleUpdatePayload,
|
||||||
|
OwnerTransferEmailPayload,
|
||||||
|
OwnerTransferCheckPayload,
|
||||||
|
OwnerTransferPayload,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_role_enabled(role: TenantAccountRole | str, tenant_id: str) -> bool:
|
||||||
|
if role != TenantAccountRole.DATASET_OPERATOR:
|
||||||
|
return True
|
||||||
|
return FeatureService.get_features(tenant_id=tenant_id).dataset_operator_enabled
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/workspaces/current/members")
|
@console_ns.route("/workspaces/current/members")
|
||||||
@ -112,6 +116,8 @@ class MemberInviteEmailApi(Resource):
|
|||||||
inviter = current_user
|
inviter = current_user
|
||||||
if not inviter.current_tenant:
|
if not inviter.current_tenant:
|
||||||
raise ValueError("No current tenant")
|
raise ValueError("No current tenant")
|
||||||
|
if not _is_role_enabled(invitee_role, inviter.current_tenant.id):
|
||||||
|
return {"code": "invalid-role", "message": "Invalid role"}, 400
|
||||||
|
|
||||||
# Check workspace permission for member invitations
|
# Check workspace permission for member invitations
|
||||||
from libs.workspace_permission import check_workspace_member_invite_permission
|
from libs.workspace_permission import check_workspace_member_invite_permission
|
||||||
@ -210,6 +216,8 @@ class MemberUpdateRoleApi(Resource):
|
|||||||
current_user, _ = current_account_with_tenant()
|
current_user, _ = current_account_with_tenant()
|
||||||
if not current_user.current_tenant:
|
if not current_user.current_tenant:
|
||||||
raise ValueError("No current tenant")
|
raise ValueError("No current tenant")
|
||||||
|
if not _is_role_enabled(new_role, current_user.current_tenant.id):
|
||||||
|
return {"code": "invalid-role", "message": "Invalid role"}, 400
|
||||||
member = db.session.get(Account, str(member_id))
|
member = db.session.get(Account, str(member_id))
|
||||||
if not member:
|
if not member:
|
||||||
abort(404)
|
abort(404)
|
||||||
@ -217,11 +225,17 @@ class MemberUpdateRoleApi(Resource):
|
|||||||
try:
|
try:
|
||||||
assert member is not None, "Member not found"
|
assert member is not None, "Member not found"
|
||||||
TenantService.update_member_role(current_user.current_tenant, member, new_role, current_user)
|
TenantService.update_member_role(current_user.current_tenant, member, new_role, current_user)
|
||||||
|
except services.errors.account.CannotOperateSelfError as e:
|
||||||
|
return {"code": "cannot-operate-self", "message": str(e)}, 400
|
||||||
|
except services.errors.account.NoPermissionError as e:
|
||||||
|
return {"code": "forbidden", "message": str(e)}, 403
|
||||||
|
except services.errors.account.MemberNotInTenantError as e:
|
||||||
|
return {"code": "member-not-found", "message": str(e)}, 404
|
||||||
|
except services.errors.account.RoleAlreadyAssignedError as e:
|
||||||
|
return {"code": "role-already-assigned", "message": str(e)}, 400
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise ValueError(str(e))
|
raise ValueError(str(e))
|
||||||
|
|
||||||
# todo: 403
|
|
||||||
|
|
||||||
return {"result": "success"}
|
return {"result": "success"}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -5,6 +5,7 @@ from flask import request, send_file
|
|||||||
from flask_restx import Resource
|
from flask_restx import Resource
|
||||||
from pydantic import BaseModel, Field, field_validator
|
from pydantic import BaseModel, Field, field_validator
|
||||||
|
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.wraps import account_initialization_required, is_admin_or_owner_required, setup_required
|
from controllers.console.wraps import account_initialization_required, is_admin_or_owner_required, setup_required
|
||||||
from graphon.model_runtime.entities.model_entities import ModelType
|
from graphon.model_runtime.entities.model_entities import ModelType
|
||||||
@ -15,8 +16,6 @@ from libs.login import current_account_with_tenant, login_required
|
|||||||
from services.billing_service import BillingService
|
from services.billing_service import BillingService
|
||||||
from services.model_provider_service import ModelProviderService
|
from services.model_provider_service import ModelProviderService
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class ParserModelList(BaseModel):
|
class ParserModelList(BaseModel):
|
||||||
model_type: ModelType | None = None
|
model_type: ModelType | None = None
|
||||||
@ -75,18 +74,17 @@ class ParserPreferredProviderType(BaseModel):
|
|||||||
preferred_provider_type: Literal["system", "custom"]
|
preferred_provider_type: Literal["system", "custom"]
|
||||||
|
|
||||||
|
|
||||||
def reg(cls: type[BaseModel]):
|
register_schema_models(
|
||||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
console_ns,
|
||||||
|
ParserModelList,
|
||||||
|
ParserCredentialId,
|
||||||
reg(ParserModelList)
|
ParserCredentialCreate,
|
||||||
reg(ParserCredentialId)
|
ParserCredentialUpdate,
|
||||||
reg(ParserCredentialCreate)
|
ParserCredentialDelete,
|
||||||
reg(ParserCredentialUpdate)
|
ParserCredentialSwitch,
|
||||||
reg(ParserCredentialDelete)
|
ParserCredentialValidate,
|
||||||
reg(ParserCredentialSwitch)
|
ParserPreferredProviderType,
|
||||||
reg(ParserCredentialValidate)
|
)
|
||||||
reg(ParserPreferredProviderType)
|
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/workspaces/current/model-providers")
|
@console_ns.route("/workspaces/current/model-providers")
|
||||||
|
|||||||
@ -17,7 +17,6 @@ from services.model_load_balancing_service import ModelLoadBalancingService
|
|||||||
from services.model_provider_service import ModelProviderService
|
from services.model_provider_service import ModelProviderService
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class ParserGetDefault(BaseModel):
|
class ParserGetDefault(BaseModel):
|
||||||
@ -107,6 +106,12 @@ class ParserParameter(BaseModel):
|
|||||||
model: str
|
model: str
|
||||||
|
|
||||||
|
|
||||||
|
class ParserSwitch(BaseModel):
|
||||||
|
model: str
|
||||||
|
model_type: ModelType
|
||||||
|
credential_id: str
|
||||||
|
|
||||||
|
|
||||||
register_schema_models(
|
register_schema_models(
|
||||||
console_ns,
|
console_ns,
|
||||||
ParserGetDefault,
|
ParserGetDefault,
|
||||||
@ -119,6 +124,7 @@ register_schema_models(
|
|||||||
ParserDeleteCredential,
|
ParserDeleteCredential,
|
||||||
ParserParameter,
|
ParserParameter,
|
||||||
Inner,
|
Inner,
|
||||||
|
ParserSwitch,
|
||||||
)
|
)
|
||||||
|
|
||||||
register_enum_models(console_ns, ModelType)
|
register_enum_models(console_ns, ModelType)
|
||||||
@ -133,7 +139,7 @@ class DefaultModelApi(Resource):
|
|||||||
def get(self):
|
def get(self):
|
||||||
_, tenant_id = current_account_with_tenant()
|
_, tenant_id = current_account_with_tenant()
|
||||||
|
|
||||||
args = ParserGetDefault.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ParserGetDefault.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
model_provider_service = ModelProviderService()
|
model_provider_service = ModelProviderService()
|
||||||
default_model_entity = model_provider_service.get_default_model_of_model_type(
|
default_model_entity = model_provider_service.get_default_model_of_model_type(
|
||||||
@ -261,7 +267,7 @@ class ModelProviderModelCredentialApi(Resource):
|
|||||||
def get(self, provider: str):
|
def get(self, provider: str):
|
||||||
_, tenant_id = current_account_with_tenant()
|
_, tenant_id = current_account_with_tenant()
|
||||||
|
|
||||||
args = ParserGetCredentials.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ParserGetCredentials.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
model_provider_service = ModelProviderService()
|
model_provider_service = ModelProviderService()
|
||||||
current_credential = model_provider_service.get_model_credential(
|
current_credential = model_provider_service.get_model_credential(
|
||||||
@ -387,17 +393,6 @@ class ModelProviderModelCredentialApi(Resource):
|
|||||||
return {"result": "success"}, 204
|
return {"result": "success"}, 204
|
||||||
|
|
||||||
|
|
||||||
class ParserSwitch(BaseModel):
|
|
||||||
model: str
|
|
||||||
model_type: ModelType
|
|
||||||
credential_id: str
|
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
|
||||||
ParserSwitch.__name__, ParserSwitch.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/workspaces/current/model-providers/<path:provider>/models/credentials/switch")
|
@console_ns.route("/workspaces/current/model-providers/<path:provider>/models/credentials/switch")
|
||||||
class ModelProviderModelCredentialSwitchApi(Resource):
|
class ModelProviderModelCredentialSwitchApi(Resource):
|
||||||
@console_ns.expect(console_ns.models[ParserSwitch.__name__])
|
@console_ns.expect(console_ns.models[ParserSwitch.__name__])
|
||||||
@ -468,9 +463,7 @@ class ParserValidate(BaseModel):
|
|||||||
credentials: dict[str, Any]
|
credentials: dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
console_ns.schema_model(
|
register_schema_models(console_ns, ParserSwitch, ParserValidate)
|
||||||
ParserValidate.__name__, ParserValidate.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@console_ns.route("/workspaces/current/model-providers/<path:provider>/models/credentials/validate")
|
@console_ns.route("/workspaces/current/model-providers/<path:provider>/models/credentials/validate")
|
||||||
@ -515,7 +508,7 @@ class ModelProviderModelParameterRuleApi(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self, provider: str):
|
def get(self, provider: str):
|
||||||
args = ParserParameter.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ParserParameter.model_validate(request.args.to_dict(flat=True))
|
||||||
_, tenant_id = current_account_with_tenant()
|
_, tenant_id = current_account_with_tenant()
|
||||||
|
|
||||||
model_provider_service = ModelProviderService()
|
model_provider_service = ModelProviderService()
|
||||||
|
|||||||
@ -177,7 +177,7 @@ def _read_upload_content(file: FileStorage, max_size: int) -> bytes:
|
|||||||
FileStorage.content_length is not reliable for multipart test uploads and may be zero even when
|
FileStorage.content_length is not reliable for multipart test uploads and may be zero even when
|
||||||
content exists, so the controllers validate against the loaded bytes instead.
|
content exists, so the controllers validate against the loaded bytes instead.
|
||||||
"""
|
"""
|
||||||
content = file.read()
|
content = file.stream.read()
|
||||||
if len(content) > max_size:
|
if len(content) > max_size:
|
||||||
raise ValueError("File size exceeds the maximum allowed size")
|
raise ValueError("File size exceeds the maximum allowed size")
|
||||||
|
|
||||||
@ -211,7 +211,7 @@ class PluginListApi(Resource):
|
|||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self):
|
def get(self):
|
||||||
_, tenant_id = current_account_with_tenant()
|
_, tenant_id = current_account_with_tenant()
|
||||||
args = ParserList.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ParserList.model_validate(request.args.to_dict(flat=True))
|
||||||
try:
|
try:
|
||||||
plugins_with_total = PluginService.list_with_total(tenant_id, args.page, args.page_size)
|
plugins_with_total = PluginService.list_with_total(tenant_id, args.page, args.page_size)
|
||||||
except PluginDaemonClientSideError as e:
|
except PluginDaemonClientSideError as e:
|
||||||
@ -261,7 +261,7 @@ class PluginIconApi(Resource):
|
|||||||
@console_ns.expect(console_ns.models[ParserIcon.__name__])
|
@console_ns.expect(console_ns.models[ParserIcon.__name__])
|
||||||
@setup_required
|
@setup_required
|
||||||
def get(self):
|
def get(self):
|
||||||
args = ParserIcon.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ParserIcon.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
icon_bytes, mimetype = PluginService.get_asset(args.tenant_id, args.filename)
|
icon_bytes, mimetype = PluginService.get_asset(args.tenant_id, args.filename)
|
||||||
@ -279,7 +279,7 @@ class PluginAssetApi(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self):
|
def get(self):
|
||||||
args = ParserAsset.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ParserAsset.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
_, tenant_id = current_account_with_tenant()
|
_, tenant_id = current_account_with_tenant()
|
||||||
try:
|
try:
|
||||||
@ -421,7 +421,7 @@ class PluginFetchMarketplacePkgApi(Resource):
|
|||||||
@plugin_permission_required(install_required=True)
|
@plugin_permission_required(install_required=True)
|
||||||
def get(self):
|
def get(self):
|
||||||
_, tenant_id = current_account_with_tenant()
|
_, tenant_id = current_account_with_tenant()
|
||||||
args = ParserPluginIdentifierQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ParserPluginIdentifierQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return jsonable_encoder(
|
return jsonable_encoder(
|
||||||
@ -446,7 +446,7 @@ class PluginFetchManifestApi(Resource):
|
|||||||
def get(self):
|
def get(self):
|
||||||
_, tenant_id = current_account_with_tenant()
|
_, tenant_id = current_account_with_tenant()
|
||||||
|
|
||||||
args = ParserPluginIdentifierQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ParserPluginIdentifierQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return jsonable_encoder(
|
return jsonable_encoder(
|
||||||
@ -466,7 +466,7 @@ class PluginFetchInstallTasksApi(Resource):
|
|||||||
def get(self):
|
def get(self):
|
||||||
_, tenant_id = current_account_with_tenant()
|
_, tenant_id = current_account_with_tenant()
|
||||||
|
|
||||||
args = ParserTasks.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ParserTasks.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return jsonable_encoder({"tasks": PluginService.fetch_install_tasks(tenant_id, args.page, args.page_size)})
|
return jsonable_encoder({"tasks": PluginService.fetch_install_tasks(tenant_id, args.page, args.page_size)})
|
||||||
@ -660,7 +660,7 @@ class PluginFetchDynamicSelectOptionsApi(Resource):
|
|||||||
current_user, tenant_id = current_account_with_tenant()
|
current_user, tenant_id = current_account_with_tenant()
|
||||||
user_id = current_user.id
|
user_id = current_user.id
|
||||||
|
|
||||||
args = ParserDynamicOptions.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ParserDynamicOptions.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
options = PluginParameterService.get_dynamic_select_options(
|
options = PluginParameterService.get_dynamic_select_options(
|
||||||
@ -822,7 +822,7 @@ class PluginReadmeApi(Resource):
|
|||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self):
|
def get(self):
|
||||||
_, tenant_id = current_account_with_tenant()
|
_, tenant_id = current_account_with_tenant()
|
||||||
args = ParserReadme.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = ParserReadme.model_validate(request.args.to_dict(flat=True))
|
||||||
return jsonable_encoder(
|
return jsonable_encoder(
|
||||||
{"readme": PluginService.fetch_plugin_readme(tenant_id, args.plugin_unique_identifier, args.language)}
|
{"readme": PluginService.fetch_plugin_readme(tenant_id, args.plugin_unique_identifier, args.language)}
|
||||||
)
|
)
|
||||||
|
|||||||
@ -876,10 +876,10 @@ class ToolBuiltinProviderSetDefaultApi(Resource):
|
|||||||
@login_required
|
@login_required
|
||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def post(self, provider):
|
def post(self, provider):
|
||||||
current_user, current_tenant_id = current_account_with_tenant()
|
_, current_tenant_id = current_account_with_tenant()
|
||||||
payload = BuiltinProviderDefaultCredentialPayload.model_validate(console_ns.payload or {})
|
payload = BuiltinProviderDefaultCredentialPayload.model_validate(console_ns.payload or {})
|
||||||
return BuiltinToolManageService.set_default_provider(
|
return BuiltinToolManageService.set_default_provider(
|
||||||
tenant_id=current_tenant_id, user_id=current_user.id, provider=provider, id=payload.id
|
tenant_id=current_tenant_id, provider=provider, id=payload.id
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -16,6 +16,7 @@ from controllers.common.errors import (
|
|||||||
TooManyFilesError,
|
TooManyFilesError,
|
||||||
UnsupportedFileTypeError,
|
UnsupportedFileTypeError,
|
||||||
)
|
)
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.console import console_ns
|
from controllers.console import console_ns
|
||||||
from controllers.console.admin import admin_required
|
from controllers.console.admin import admin_required
|
||||||
from controllers.console.error import AccountNotLinkTenantError
|
from controllers.console.error import AccountNotLinkTenantError
|
||||||
@ -39,7 +40,6 @@ from services.file_service import FileService
|
|||||||
from services.workspace_service import WorkspaceService
|
from services.workspace_service import WorkspaceService
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceListQuery(BaseModel):
|
class WorkspaceListQuery(BaseModel):
|
||||||
@ -91,15 +91,14 @@ class TenantInfoResponse(ResponseModel):
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def reg(cls: type[BaseModel]):
|
register_schema_models(
|
||||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
console_ns,
|
||||||
|
WorkspaceListQuery,
|
||||||
|
SwitchWorkspacePayload,
|
||||||
reg(WorkspaceListQuery)
|
WorkspaceCustomConfigPayload,
|
||||||
reg(SwitchWorkspacePayload)
|
WorkspaceInfoPayload,
|
||||||
reg(WorkspaceCustomConfigPayload)
|
TenantInfoResponse,
|
||||||
reg(WorkspaceInfoPayload)
|
)
|
||||||
reg(TenantInfoResponse)
|
|
||||||
|
|
||||||
provider_fields = {
|
provider_fields = {
|
||||||
"provider_name": fields.String,
|
"provider_name": fields.String,
|
||||||
@ -322,7 +321,7 @@ class WebappLogoWorkspaceApi(Resource):
|
|||||||
try:
|
try:
|
||||||
upload_file = FileService(db.engine).upload_file(
|
upload_file = FileService(db.engine).upload_file(
|
||||||
filename=file.filename,
|
filename=file.filename,
|
||||||
content=file.read(),
|
content=file.stream.read(),
|
||||||
mimetype=file.mimetype,
|
mimetype=file.mimetype,
|
||||||
user=current_user,
|
user=current_user,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -8,13 +8,12 @@ from werkzeug.exceptions import NotFound
|
|||||||
import services
|
import services
|
||||||
from controllers.common.errors import UnsupportedFileTypeError
|
from controllers.common.errors import UnsupportedFileTypeError
|
||||||
from controllers.common.file_response import enforce_download_for_html
|
from controllers.common.file_response import enforce_download_for_html
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.files import files_ns
|
from controllers.files import files_ns
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from services.account_service import TenantService
|
from services.account_service import TenantService
|
||||||
from services.file_service import FileService
|
from services.file_service import FileService
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class FileSignatureQuery(BaseModel):
|
class FileSignatureQuery(BaseModel):
|
||||||
timestamp: str = Field(..., description="Unix timestamp used in the signature")
|
timestamp: str = Field(..., description="Unix timestamp used in the signature")
|
||||||
@ -26,12 +25,7 @@ class FilePreviewQuery(FileSignatureQuery):
|
|||||||
as_attachment: bool = Field(default=False, description="Whether to download as attachment")
|
as_attachment: bool = Field(default=False, description="Whether to download as attachment")
|
||||||
|
|
||||||
|
|
||||||
files_ns.schema_model(
|
register_schema_models(files_ns, FileSignatureQuery, FilePreviewQuery)
|
||||||
FileSignatureQuery.__name__, FileSignatureQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
|
||||||
)
|
|
||||||
files_ns.schema_model(
|
|
||||||
FilePreviewQuery.__name__, FilePreviewQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@files_ns.route("/<uuid:file_id>/image-preview")
|
@files_ns.route("/<uuid:file_id>/image-preview")
|
||||||
@ -58,7 +52,7 @@ class ImagePreviewApi(Resource):
|
|||||||
def get(self, file_id):
|
def get(self, file_id):
|
||||||
file_id = str(file_id)
|
file_id = str(file_id)
|
||||||
|
|
||||||
args = FileSignatureQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = FileSignatureQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
timestamp = args.timestamp
|
timestamp = args.timestamp
|
||||||
nonce = args.nonce
|
nonce = args.nonce
|
||||||
sign = args.sign
|
sign = args.sign
|
||||||
@ -100,7 +94,7 @@ class FilePreviewApi(Resource):
|
|||||||
def get(self, file_id):
|
def get(self, file_id):
|
||||||
file_id = str(file_id)
|
file_id = str(file_id)
|
||||||
|
|
||||||
args = FilePreviewQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = FilePreviewQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
generator, upload_file = FileService(db.engine).get_file_generator_by_file_id(
|
generator, upload_file = FileService(db.engine).get_file_generator_by_file_id(
|
||||||
|
|||||||
@ -7,12 +7,11 @@ from werkzeug.exceptions import Forbidden, NotFound
|
|||||||
|
|
||||||
from controllers.common.errors import UnsupportedFileTypeError
|
from controllers.common.errors import UnsupportedFileTypeError
|
||||||
from controllers.common.file_response import enforce_download_for_html
|
from controllers.common.file_response import enforce_download_for_html
|
||||||
|
from controllers.common.schema import register_schema_models
|
||||||
from controllers.files import files_ns
|
from controllers.files import files_ns
|
||||||
from core.tools.signature import verify_tool_file_signature
|
from core.tools.signature import verify_tool_file_signature
|
||||||
from core.tools.tool_file_manager import ToolFileManager
|
from core.tools.tool_file_manager import ToolFileManager
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class ToolFileQuery(BaseModel):
|
class ToolFileQuery(BaseModel):
|
||||||
timestamp: str = Field(..., description="Unix timestamp")
|
timestamp: str = Field(..., description="Unix timestamp")
|
||||||
@ -21,9 +20,7 @@ class ToolFileQuery(BaseModel):
|
|||||||
as_attachment: bool = Field(default=False, description="Download as attachment")
|
as_attachment: bool = Field(default=False, description="Download as attachment")
|
||||||
|
|
||||||
|
|
||||||
files_ns.schema_model(
|
register_schema_models(files_ns, ToolFileQuery)
|
||||||
ToolFileQuery.__name__, ToolFileQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@files_ns.route("/tools/<uuid:file_id>.<string:extension>")
|
@files_ns.route("/tools/<uuid:file_id>.<string:extension>")
|
||||||
|
|||||||
@ -20,8 +20,6 @@ from ..console.wraps import setup_required
|
|||||||
from ..files import files_ns
|
from ..files import files_ns
|
||||||
from ..inner_api.plugin.wraps import get_user
|
from ..inner_api.plugin.wraps import get_user
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class PluginUploadQuery(BaseModel):
|
class PluginUploadQuery(BaseModel):
|
||||||
timestamp: str = Field(..., description="Unix timestamp for signature verification")
|
timestamp: str = Field(..., description="Unix timestamp for signature verification")
|
||||||
@ -31,9 +29,8 @@ class PluginUploadQuery(BaseModel):
|
|||||||
user_id: str | None = Field(default=None, description="User identifier")
|
user_id: str | None = Field(default=None, description="User identifier")
|
||||||
|
|
||||||
|
|
||||||
files_ns.schema_model(
|
register_schema_models(files_ns, PluginUploadQuery)
|
||||||
PluginUploadQuery.__name__, PluginUploadQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
|
|
||||||
)
|
|
||||||
|
|
||||||
register_schema_models(files_ns, FileResponse)
|
register_schema_models(files_ns, FileResponse)
|
||||||
|
|
||||||
@ -69,7 +66,7 @@ class PluginUploadFileApi(Resource):
|
|||||||
FileTooLargeError: File exceeds size limit
|
FileTooLargeError: File exceeds size limit
|
||||||
UnsupportedFileTypeError: File type not supported
|
UnsupportedFileTypeError: File type not supported
|
||||||
"""
|
"""
|
||||||
args = PluginUploadQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
args = PluginUploadQuery.model_validate(request.args.to_dict(flat=True))
|
||||||
|
|
||||||
file = request.files.get("file")
|
file = request.files.get("file")
|
||||||
if file is None:
|
if file is None:
|
||||||
@ -103,7 +100,7 @@ class PluginUploadFileApi(Resource):
|
|||||||
tool_file = ToolFileManager().create_file_by_raw(
|
tool_file = ToolFileManager().create_file_by_raw(
|
||||||
user_id=user.id,
|
user_id=user.id,
|
||||||
tenant_id=tenant_id,
|
tenant_id=tenant_id,
|
||||||
file_binary=file.read(),
|
file_binary=file.stream.read(),
|
||||||
mimetype=mimetype,
|
mimetype=mimetype,
|
||||||
filename=filename,
|
filename=filename,
|
||||||
conversation_id=None,
|
conversation_id=None,
|
||||||
|
|||||||
@ -58,7 +58,7 @@ class FileApi(Resource):
|
|||||||
try:
|
try:
|
||||||
upload_file = FileService(db.engine).upload_file(
|
upload_file = FileService(db.engine).upload_file(
|
||||||
filename=file.filename,
|
filename=file.filename,
|
||||||
content=file.read(),
|
content=file.stream.read(),
|
||||||
mimetype=file.mimetype,
|
mimetype=file.mimetype,
|
||||||
user=end_user,
|
user=end_user,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -2,11 +2,11 @@ from typing import Any, Literal, cast
|
|||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_restx import marshal
|
from flask_restx import marshal
|
||||||
from pydantic import BaseModel, Field, TypeAdapter, field_validator
|
from pydantic import BaseModel, Field, TypeAdapter, field_validator, model_validator
|
||||||
from werkzeug.exceptions import Forbidden, NotFound
|
from werkzeug.exceptions import Forbidden, NotFound
|
||||||
|
|
||||||
import services
|
import services
|
||||||
from controllers.common.schema import register_schema_models
|
from controllers.common.schema import register_enum_models, register_schema_models
|
||||||
from controllers.console.wraps import edit_permission_required
|
from controllers.console.wraps import edit_permission_required
|
||||||
from controllers.service_api import service_api_ns
|
from controllers.service_api import service_api_ns
|
||||||
from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError, InvalidActionError
|
from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError, InvalidActionError
|
||||||
@ -34,13 +34,7 @@ from services.tag_service import (
|
|||||||
UpdateTagPayload,
|
UpdateTagPayload,
|
||||||
)
|
)
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
register_enum_models(service_api_ns, DatasetPermissionEnum)
|
||||||
|
|
||||||
|
|
||||||
service_api_ns.schema_model(
|
|
||||||
DatasetPermissionEnum.__name__,
|
|
||||||
TypeAdapter(DatasetPermissionEnum).json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class DatasetCreatePayload(BaseModel):
|
class DatasetCreatePayload(BaseModel):
|
||||||
@ -100,9 +94,27 @@ class TagBindingPayload(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class TagUnbindingPayload(BaseModel):
|
class TagUnbindingPayload(BaseModel):
|
||||||
tag_id: str
|
"""Accept the legacy single-tag Service API payload while exposing a normalized tag_ids list internally."""
|
||||||
|
|
||||||
|
tag_ids: list[str] = Field(default_factory=list)
|
||||||
|
tag_id: str | None = None
|
||||||
target_id: str
|
target_id: str
|
||||||
|
|
||||||
|
@model_validator(mode="before")
|
||||||
|
@classmethod
|
||||||
|
def normalize_legacy_tag_id(cls, data: object) -> object:
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return data
|
||||||
|
if not data.get("tag_ids") and data.get("tag_id"):
|
||||||
|
return {**data, "tag_ids": [data["tag_id"]]}
|
||||||
|
return data
|
||||||
|
|
||||||
|
@model_validator(mode="after")
|
||||||
|
def validate_tag_ids(self) -> "TagUnbindingPayload":
|
||||||
|
if not self.tag_ids:
|
||||||
|
raise ValueError("Tag IDs is required.")
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
class DatasetListQuery(BaseModel):
|
class DatasetListQuery(BaseModel):
|
||||||
page: int = Field(default=1, description="Page number")
|
page: int = Field(default=1, description="Page number")
|
||||||
@ -601,11 +613,11 @@ class DatasetTagBindingApi(DatasetApiResource):
|
|||||||
@service_api_ns.route("/datasets/tags/unbinding")
|
@service_api_ns.route("/datasets/tags/unbinding")
|
||||||
class DatasetTagUnbindingApi(DatasetApiResource):
|
class DatasetTagUnbindingApi(DatasetApiResource):
|
||||||
@service_api_ns.expect(service_api_ns.models[TagUnbindingPayload.__name__])
|
@service_api_ns.expect(service_api_ns.models[TagUnbindingPayload.__name__])
|
||||||
@service_api_ns.doc("unbind_dataset_tag")
|
@service_api_ns.doc("unbind_dataset_tags")
|
||||||
@service_api_ns.doc(description="Unbind a tag from a dataset")
|
@service_api_ns.doc(description="Unbind tags from a dataset")
|
||||||
@service_api_ns.doc(
|
@service_api_ns.doc(
|
||||||
responses={
|
responses={
|
||||||
204: "Tag unbound successfully",
|
204: "Tags unbound successfully",
|
||||||
401: "Unauthorized - invalid API token",
|
401: "Unauthorized - invalid API token",
|
||||||
403: "Forbidden - insufficient permissions",
|
403: "Forbidden - insufficient permissions",
|
||||||
}
|
}
|
||||||
@ -618,7 +630,7 @@ class DatasetTagUnbindingApi(DatasetApiResource):
|
|||||||
|
|
||||||
payload = TagUnbindingPayload.model_validate(service_api_ns.payload or {})
|
payload = TagUnbindingPayload.model_validate(service_api_ns.payload or {})
|
||||||
TagService.delete_tag_binding(
|
TagService.delete_tag_binding(
|
||||||
TagBindingDeletePayload(tag_id=payload.tag_id, target_id=payload.target_id, type=TagType.KNOWLEDGE)
|
TagBindingDeletePayload(tag_ids=payload.tag_ids, target_id=payload.target_id, type=TagType.KNOWLEDGE)
|
||||||
)
|
)
|
||||||
|
|
||||||
return "", 204
|
return "", 204
|
||||||
|
|||||||
@ -77,9 +77,6 @@ class DocumentTextCreatePayload(BaseModel):
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
|
||||||
|
|
||||||
|
|
||||||
class DocumentTextUpdate(BaseModel):
|
class DocumentTextUpdate(BaseModel):
|
||||||
name: str | None = None
|
name: str | None = None
|
||||||
text: str | None = None
|
text: str | None = None
|
||||||
@ -139,7 +136,7 @@ def _create_document_by_text(tenant_id: str, dataset_id: UUID) -> tuple[Mapping[
|
|||||||
if not dataset:
|
if not dataset:
|
||||||
raise ValueError("Dataset does not exist.")
|
raise ValueError("Dataset does not exist.")
|
||||||
|
|
||||||
if not dataset.indexing_technique and not args["indexing_technique"]:
|
if not dataset.indexing_technique and not args.get("indexing_technique"):
|
||||||
raise ValueError("indexing_technique is required.")
|
raise ValueError("indexing_technique is required.")
|
||||||
|
|
||||||
embedding_model_provider = payload.embedding_model_provider
|
embedding_model_provider = payload.embedding_model_provider
|
||||||
@ -435,7 +432,7 @@ class DocumentAddByFileApi(DatasetApiResource):
|
|||||||
raise ValueError("current_user is required")
|
raise ValueError("current_user is required")
|
||||||
upload_file = FileService(db.engine).upload_file(
|
upload_file = FileService(db.engine).upload_file(
|
||||||
filename=file.filename,
|
filename=file.filename,
|
||||||
content=file.read(),
|
content=file.stream.read(),
|
||||||
mimetype=file.mimetype,
|
mimetype=file.mimetype,
|
||||||
user=current_user,
|
user=current_user,
|
||||||
source="datasets",
|
source="datasets",
|
||||||
@ -509,7 +506,7 @@ def _update_document_by_file(tenant_id: str, dataset_id: UUID, document_id: UUID
|
|||||||
try:
|
try:
|
||||||
upload_file = FileService(db.engine).upload_file(
|
upload_file = FileService(db.engine).upload_file(
|
||||||
filename=file.filename,
|
filename=file.filename,
|
||||||
content=file.read(),
|
content=file.stream.read(),
|
||||||
mimetype=file.mimetype,
|
mimetype=file.mimetype,
|
||||||
user=current_user,
|
user=current_user,
|
||||||
source="datasets",
|
source="datasets",
|
||||||
|
|||||||
@ -241,7 +241,7 @@ class KnowledgebasePipelineFileUploadApi(DatasetApiResource):
|
|||||||
try:
|
try:
|
||||||
upload_file = FileService(db.engine).upload_file(
|
upload_file = FileService(db.engine).upload_file(
|
||||||
filename=file.filename,
|
filename=file.filename,
|
||||||
content=file.read(),
|
content=file.stream.read(),
|
||||||
mimetype=file.mimetype,
|
mimetype=file.mimetype,
|
||||||
user=current_user,
|
user=current_user,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -23,7 +23,7 @@ from controllers.web.wraps import WebApiResource
|
|||||||
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
|
||||||
from graphon.model_runtime.errors.invoke import InvokeError
|
from graphon.model_runtime.errors.invoke import InvokeError
|
||||||
from libs.helper import uuid_value
|
from libs.helper import uuid_value
|
||||||
from models.model import App
|
from models.model import App, EndUser
|
||||||
from services.audio_service import AudioService
|
from services.audio_service import AudioService
|
||||||
from services.errors.audio import (
|
from services.errors.audio import (
|
||||||
AudioTooLargeServiceError,
|
AudioTooLargeServiceError,
|
||||||
@ -69,12 +69,12 @@ class AudioApi(WebApiResource):
|
|||||||
500: "Internal Server Error",
|
500: "Internal Server Error",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
def post(self, app_model: App, end_user):
|
def post(self, app_model: App, end_user: EndUser):
|
||||||
"""Convert audio to text"""
|
"""Convert audio to text"""
|
||||||
file = request.files["file"]
|
file = request.files["file"]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = AudioService.transcript_asr(app_model=app_model, file=file, end_user=end_user)
|
response = AudioService.transcript_asr(app_model=app_model, file=file, end_user=end_user.external_user_id)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
except services.errors.app_model_config.AppModelConfigBrokenError:
|
except services.errors.app_model_config.AppModelConfigBrokenError:
|
||||||
@ -117,7 +117,7 @@ class TextApi(WebApiResource):
|
|||||||
500: "Internal Server Error",
|
500: "Internal Server Error",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
def post(self, app_model: App, end_user):
|
def post(self, app_model: App, end_user: EndUser):
|
||||||
"""Convert text to audio"""
|
"""Convert text to audio"""
|
||||||
try:
|
try:
|
||||||
payload = TextToAudioPayload.model_validate(web_ns.payload or {})
|
payload = TextToAudioPayload.model_validate(web_ns.payload or {})
|
||||||
|
|||||||
@ -73,7 +73,7 @@ class FileApi(WebApiResource):
|
|||||||
try:
|
try:
|
||||||
upload_file = FileService(db.engine).upload_file(
|
upload_file = FileService(db.engine).upload_file(
|
||||||
filename=file.filename,
|
filename=file.filename,
|
||||||
content=file.read(),
|
content=file.stream.read(),
|
||||||
mimetype=file.mimetype,
|
mimetype=file.mimetype,
|
||||||
user=end_user,
|
user=end_user,
|
||||||
source="datasets" if source == "datasets" else None,
|
source="datasets" if source == "datasets" else None,
|
||||||
|
|||||||
@ -532,7 +532,6 @@ class BaseAgentRunner(AppRunner):
|
|||||||
file_objs = file_factory.build_from_message_files(
|
file_objs = file_factory.build_from_message_files(
|
||||||
message_files=files,
|
message_files=files,
|
||||||
tenant_id=self.tenant_id,
|
tenant_id=self.tenant_id,
|
||||||
config=file_extra_config,
|
|
||||||
access_controller=_file_access_controller,
|
access_controller=_file_access_controller,
|
||||||
)
|
)
|
||||||
if not file_objs:
|
if not file_objs:
|
||||||
|
|||||||
@ -75,7 +75,7 @@ class PromptTemplateConfigManager:
|
|||||||
if not config.get("prompt_type"):
|
if not config.get("prompt_type"):
|
||||||
config["prompt_type"] = PromptTemplateEntity.PromptType.SIMPLE
|
config["prompt_type"] = PromptTemplateEntity.PromptType.SIMPLE
|
||||||
|
|
||||||
prompt_type_vals = [typ.value for typ in PromptTemplateEntity.PromptType]
|
prompt_type_vals = list(PromptTemplateEntity.PromptType)
|
||||||
if config["prompt_type"] not in prompt_type_vals:
|
if config["prompt_type"] not in prompt_type_vals:
|
||||||
raise ValueError(f"prompt_type must be in {prompt_type_vals}")
|
raise ValueError(f"prompt_type must be in {prompt_type_vals}")
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user