mirror of https://github.com/langgenius/dify.git
feat: v1
This commit is contained in:
parent
f0d02b4b91
commit
277926f5a3
|
|
@ -55,6 +55,14 @@ class InstructionTemplatePayload(BaseModel):
|
|||
type: str = Field(..., description="Instruction template type")
|
||||
|
||||
|
||||
class FlowchartGeneratePayload(BaseModel):
|
||||
instruction: str = Field(..., description="Workflow flowchart generation instruction")
|
||||
model_config_data: dict[str, Any] = Field(..., alias="model_config", description="Model configuration")
|
||||
available_nodes: list[dict[str, Any]] = Field(default_factory=list, description="Available node types")
|
||||
existing_nodes: list[dict[str, Any]] = Field(default_factory=list, description="Existing workflow nodes")
|
||||
available_tools: list[dict[str, Any]] = Field(default_factory=list, description="Available tools")
|
||||
|
||||
|
||||
def reg(cls: type[BaseModel]):
|
||||
console_ns.schema_model(cls.__name__, cls.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
||||
|
||||
|
|
@ -64,6 +72,7 @@ reg(RuleCodeGeneratePayload)
|
|||
reg(RuleStructuredOutputPayload)
|
||||
reg(InstructionGeneratePayload)
|
||||
reg(InstructionTemplatePayload)
|
||||
reg(FlowchartGeneratePayload)
|
||||
|
||||
|
||||
@console_ns.route("/rule-generate")
|
||||
|
|
@ -255,6 +264,42 @@ class InstructionGenerateApi(Resource):
|
|||
raise CompletionRequestError(e.description)
|
||||
|
||||
|
||||
@console_ns.route("/flowchart-generate")
|
||||
class FlowchartGenerateApi(Resource):
|
||||
@console_ns.doc("generate_workflow_flowchart")
|
||||
@console_ns.doc(description="Generate workflow flowchart using LLM")
|
||||
@console_ns.expect(console_ns.models[FlowchartGeneratePayload.__name__])
|
||||
@console_ns.response(200, "Flowchart generated successfully")
|
||||
@console_ns.response(400, "Invalid request parameters")
|
||||
@console_ns.response(402, "Provider quota exceeded")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def post(self):
|
||||
args = FlowchartGeneratePayload.model_validate(console_ns.payload)
|
||||
_, current_tenant_id = current_account_with_tenant()
|
||||
|
||||
try:
|
||||
result = LLMGenerator.generate_workflow_flowchart(
|
||||
tenant_id=current_tenant_id,
|
||||
instruction=args.instruction,
|
||||
model_config=args.model_config_data,
|
||||
available_nodes=args.available_nodes,
|
||||
existing_nodes=args.existing_nodes,
|
||||
available_tools=args.available_tools,
|
||||
)
|
||||
except ProviderTokenNotInitError as ex:
|
||||
raise ProviderNotInitializeError(ex.description)
|
||||
except QuotaExceededError:
|
||||
raise ProviderQuotaExceededError()
|
||||
except ModelCurrentlyNotSupportError:
|
||||
raise ProviderModelCurrentlyNotSupportError()
|
||||
except InvokeError as e:
|
||||
raise CompletionRequestError(e.description)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@console_ns.route("/instruction-generate/template")
|
||||
class InstructionGenerationTemplateApi(Resource):
|
||||
@console_ns.doc("get_instruction_template")
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ from core.llm_generator.prompts import (
|
|||
SUGGESTED_QUESTIONS_MAX_TOKENS,
|
||||
SUGGESTED_QUESTIONS_TEMPERATURE,
|
||||
SYSTEM_STRUCTURED_OUTPUT_GENERATE,
|
||||
WORKFLOW_FLOWCHART_PROMPT_TEMPLATE,
|
||||
WORKFLOW_RULE_CONFIG_PROMPT_GENERATE_TEMPLATE,
|
||||
)
|
||||
from core.model_manager import ModelManager
|
||||
|
|
@ -285,6 +286,61 @@ class LLMGenerator:
|
|||
|
||||
return rule_config
|
||||
|
||||
@classmethod
|
||||
def generate_workflow_flowchart(
|
||||
cls,
|
||||
tenant_id: str,
|
||||
instruction: str,
|
||||
model_config: dict,
|
||||
available_nodes: Sequence[dict[str, object]] | None = None,
|
||||
existing_nodes: Sequence[dict[str, object]] | None = None,
|
||||
available_tools: Sequence[dict[str, object]] | None = None,
|
||||
):
|
||||
model_parameters = model_config.get("completion_params", {})
|
||||
prompt_template = PromptTemplateParser(WORKFLOW_FLOWCHART_PROMPT_TEMPLATE)
|
||||
prompt_generate = prompt_template.format(
|
||||
inputs={
|
||||
"TASK_DESCRIPTION": instruction,
|
||||
"AVAILABLE_NODES": json.dumps(available_nodes or [], ensure_ascii=False),
|
||||
"EXISTING_NODES": json.dumps(existing_nodes or [], ensure_ascii=False),
|
||||
"AVAILABLE_TOOLS": json.dumps(available_tools or [], ensure_ascii=False),
|
||||
},
|
||||
remove_template_variables=False,
|
||||
)
|
||||
|
||||
prompt_messages = [UserPromptMessage(content=prompt_generate)]
|
||||
|
||||
model_manager = ModelManager()
|
||||
model_instance = model_manager.get_model_instance(
|
||||
tenant_id=tenant_id,
|
||||
model_type=ModelType.LLM,
|
||||
provider=model_config.get("provider", ""),
|
||||
model=model_config.get("name", ""),
|
||||
)
|
||||
|
||||
flowchart = ""
|
||||
error = ""
|
||||
|
||||
try:
|
||||
response: LLMResult = model_instance.invoke_llm(
|
||||
prompt_messages=list(prompt_messages),
|
||||
model_parameters=model_parameters,
|
||||
stream=False,
|
||||
)
|
||||
content = response.message.get_text_content()
|
||||
if not isinstance(content, str):
|
||||
raise ValueError("Flowchart response is not a string")
|
||||
|
||||
match = re.search(r"```(?:mermaid)?\s*([\s\S]+?)```", content, flags=re.IGNORECASE)
|
||||
flowchart = (match.group(1) if match else content).strip()
|
||||
except InvokeError as e:
|
||||
error = str(e)
|
||||
except Exception as e:
|
||||
logger.exception("Failed to generate workflow flowchart, model: %s", model_config.get("name"))
|
||||
error = str(e)
|
||||
|
||||
return {"flowchart": flowchart, "error": error}
|
||||
|
||||
@classmethod
|
||||
def generate_code(cls, tenant_id: str, instruction: str, model_config: dict, code_language: str = "javascript"):
|
||||
if code_language == "python":
|
||||
|
|
|
|||
|
|
@ -143,6 +143,40 @@ Based on task description, please create a well-structured prompt template that
|
|||
Please generate the full prompt template with at least 300 words and output only the prompt template.
|
||||
""" # noqa: E501
|
||||
|
||||
WORKFLOW_FLOWCHART_PROMPT_TEMPLATE = """
|
||||
You are an expert workflow designer. Generate a Mermaid flowchart based on the user's request.
|
||||
|
||||
Constraints:
|
||||
- Use only node types listed in <available_nodes>.
|
||||
- Use only tools listed in <available_tools>. When using a tool node, set type=tool and tool=<provider_id>/<tool_name>.
|
||||
- Prefer reusing node titles from <existing_nodes> when possible.
|
||||
- Output must be valid Mermaid flowchart syntax, no markdown, no extra text.
|
||||
- First line must be: flowchart LR
|
||||
- Every node must be declared on its own line using:
|
||||
<id>["type=<type>|title=<title>|tool=<provider_id>/<tool_name>"]
|
||||
- type is required and must match a type in <available_nodes>.
|
||||
- title is required for non-tool nodes.
|
||||
- tool is required only when type=tool, otherwise omit tool.
|
||||
- Edges must use:
|
||||
<id> --> <id>
|
||||
<id> -->|true| <id>
|
||||
<id> -->|false| <id>
|
||||
- Keep node ids unique and simple (N1, N2, ...).
|
||||
|
||||
<user_request>
|
||||
{{TASK_DESCRIPTION}}
|
||||
</user_request>
|
||||
<available_nodes>
|
||||
{{AVAILABLE_NODES}}
|
||||
</available_nodes>
|
||||
<existing_nodes>
|
||||
{{EXISTING_NODES}}
|
||||
</existing_nodes>
|
||||
<available_tools>
|
||||
{{AVAILABLE_TOOLS}}
|
||||
</available_tools>
|
||||
""" # noqa: E501
|
||||
|
||||
RULE_CONFIG_PROMPT_GENERATE_TEMPLATE = """
|
||||
Here is a task description for which I would like you to create a high-quality prompt template for:
|
||||
<task_description>
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import { forumCommand } from './forum'
|
|||
import { languageCommand } from './language'
|
||||
import { slashCommandRegistry } from './registry'
|
||||
import { themeCommand } from './theme'
|
||||
import { vibeCommand } from './vibe'
|
||||
import { zenCommand } from './zen'
|
||||
|
||||
export const slashAction: ActionItem = {
|
||||
|
|
@ -41,6 +42,7 @@ export const registerSlashCommands = (deps: Record<string, any>) => {
|
|||
slashCommandRegistry.register(communityCommand, {})
|
||||
slashCommandRegistry.register(accountCommand, {})
|
||||
slashCommandRegistry.register(zenCommand, {})
|
||||
slashCommandRegistry.register(vibeCommand, {})
|
||||
}
|
||||
|
||||
export const unregisterSlashCommands = () => {
|
||||
|
|
@ -52,6 +54,7 @@ export const unregisterSlashCommands = () => {
|
|||
slashCommandRegistry.unregister('community')
|
||||
slashCommandRegistry.unregister('account')
|
||||
slashCommandRegistry.unregister('zen')
|
||||
slashCommandRegistry.unregister('vibe')
|
||||
}
|
||||
|
||||
export const SlashCommandProvider = () => {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,59 @@
|
|||
import type { SlashCommandHandler } from './types'
|
||||
import { RiSparklingFill } from '@remixicon/react'
|
||||
import * as React from 'react'
|
||||
import { isInWorkflowPage, VIBE_COMMAND_EVENT } from '@/app/components/workflow/constants'
|
||||
import i18n from '@/i18n-config/i18next-config'
|
||||
import { registerCommands, unregisterCommands } from './command-bus'
|
||||
|
||||
type VibeDeps = Record<string, never>
|
||||
|
||||
const VIBE_PROMPT_EXAMPLE = 'Summarize a document, classify sentiment, then notify Slack'
|
||||
|
||||
const dispatchVibeCommand = (input?: string) => {
|
||||
if (typeof document === 'undefined')
|
||||
return
|
||||
|
||||
document.dispatchEvent(new CustomEvent(VIBE_COMMAND_EVENT, { detail: { dsl: input } }))
|
||||
}
|
||||
|
||||
export const vibeCommand: SlashCommandHandler<VibeDeps> = {
|
||||
name: 'vibe',
|
||||
description: i18n.t('app.gotoAnything.actions.vibeDesc'),
|
||||
mode: 'submenu',
|
||||
isAvailable: () => isInWorkflowPage(),
|
||||
|
||||
async search(args: string, locale: string = 'en') {
|
||||
const trimmed = args.trim()
|
||||
const hasInput = !!trimmed
|
||||
|
||||
return [{
|
||||
id: 'vibe',
|
||||
title: i18n.t('app.gotoAnything.actions.vibeTitle', { lng: locale }) || 'Vibe',
|
||||
description: hasInput
|
||||
? i18n.t('app.gotoAnything.actions.vibeDesc', { lng: locale })
|
||||
: i18n.t('app.gotoAnything.actions.vibeHint', { lng: locale, prompt: VIBE_PROMPT_EXAMPLE }),
|
||||
type: 'command' as const,
|
||||
icon: (
|
||||
<div className="flex h-6 w-6 items-center justify-center rounded-md border-[0.5px] border-divider-regular bg-components-panel-bg">
|
||||
<RiSparklingFill className="h-4 w-4 text-text-tertiary" />
|
||||
</div>
|
||||
),
|
||||
data: {
|
||||
command: 'workflow.vibe',
|
||||
args: { dsl: trimmed },
|
||||
},
|
||||
}]
|
||||
},
|
||||
|
||||
register(_deps: VibeDeps) {
|
||||
registerCommands({
|
||||
'workflow.vibe': async (args) => {
|
||||
dispatchVibeCommand(args?.dsl)
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
unregister() {
|
||||
unregisterCommands(['workflow.vibe'])
|
||||
},
|
||||
}
|
||||
|
|
@ -9,6 +9,7 @@ export const NODE_WIDTH = 240
|
|||
export const X_OFFSET = 60
|
||||
export const NODE_WIDTH_X_OFFSET = NODE_WIDTH + X_OFFSET
|
||||
export const Y_OFFSET = 39
|
||||
export const VIBE_COMMAND_EVENT = 'workflow-vibe-command'
|
||||
export const START_INITIAL_POSITION = { x: 80, y: 282 }
|
||||
export const AUTO_LAYOUT_OFFSET = {
|
||||
x: -42,
|
||||
|
|
|
|||
|
|
@ -24,3 +24,4 @@ export * from './use-workflow-run'
|
|||
export * from './use-workflow-search'
|
||||
export * from './use-workflow-start-run'
|
||||
export * from './use-workflow-variables'
|
||||
export * from './use-workflow-vibe'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,672 @@
|
|||
'use client'
|
||||
|
||||
import type { ToolDefaultValue } from '../block-selector/types'
|
||||
import type { Edge, Node, ToolWithProvider } from '../types'
|
||||
import type { Tool } from '@/app/components/tools/types'
|
||||
import type { Model } from '@/types/app'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useStoreApi } from 'reactflow'
|
||||
import { v4 as uuid4 } from 'uuid'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
|
||||
import { useGetLanguage } from '@/context/i18n'
|
||||
import { generateFlowchart } from '@/service/debug'
|
||||
import {
|
||||
useAllBuiltInTools,
|
||||
useAllCustomTools,
|
||||
useAllMCPTools,
|
||||
useAllWorkflowTools,
|
||||
} from '@/service/use-tools'
|
||||
import { ModelModeType } from '@/types/app'
|
||||
import { basePath } from '@/utils/var'
|
||||
import {
|
||||
CUSTOM_EDGE,
|
||||
NODE_WIDTH,
|
||||
NODE_WIDTH_X_OFFSET,
|
||||
VIBE_COMMAND_EVENT,
|
||||
} from '../constants'
|
||||
import { BlockEnum } from '../types'
|
||||
import {
|
||||
generateNewNode,
|
||||
getLayoutByDagre,
|
||||
getNodeCustomTypeByNodeDataType,
|
||||
getNodesConnectedSourceOrTargetHandleIdsMap,
|
||||
} from '../utils'
|
||||
import { useNodesMetaData } from './use-nodes-meta-data'
|
||||
import { useNodesSyncDraft } from './use-nodes-sync-draft'
|
||||
import { useNodesReadOnly } from './use-workflow'
|
||||
import { useWorkflowHistory, WorkflowHistoryEvent } from './use-workflow-history'
|
||||
|
||||
type VibeCommandDetail = {
|
||||
dsl?: string
|
||||
}
|
||||
|
||||
type ParsedNodeDraft = {
|
||||
id: string
|
||||
type?: BlockEnum
|
||||
title?: string
|
||||
toolKey?: string
|
||||
}
|
||||
|
||||
type ParsedNode = {
|
||||
id: string
|
||||
type: BlockEnum
|
||||
title?: string
|
||||
toolKey?: string
|
||||
}
|
||||
|
||||
type ParsedEdge = {
|
||||
sourceId: string
|
||||
targetId: string
|
||||
label?: string
|
||||
}
|
||||
|
||||
type ParseError = {
|
||||
error: 'invalidMermaid' | 'missingNodeType' | 'unknownNodeType' | 'unknownTool' | 'missingNodeDefinition'
|
||||
detail?: string
|
||||
}
|
||||
|
||||
type ParseResult = {
|
||||
nodes: ParsedNode[]
|
||||
edges: ParsedEdge[]
|
||||
}
|
||||
|
||||
const NODE_DECLARATION = /^([A-Z][\w-]*)\s*\[(?:"([^"]+)"|([^\]]+))\]\s*$/i
|
||||
const EDGE_DECLARATION = /^(.+?)\s*-->\s*(?:\|([^|]+)\|\s*)?(.+)$/
|
||||
|
||||
const extractMermaidCode = (raw: string) => {
|
||||
const fencedMatch = raw.match(/```(?:mermaid)?\s*([\s\S]*?)```/i)
|
||||
return (fencedMatch ? fencedMatch[1] : raw).trim()
|
||||
}
|
||||
|
||||
const isMermaidFlowchart = (value: string) => {
|
||||
const trimmed = value.trim().toLowerCase()
|
||||
return trimmed.startsWith('flowchart') || trimmed.startsWith('graph')
|
||||
}
|
||||
|
||||
const normalizeKey = (value: string) => value.trim().toLowerCase().replace(/[^\p{L}\p{N}]/gu, '')
|
||||
|
||||
const normalizeProviderIcon = (icon?: ToolWithProvider['icon']) => {
|
||||
if (!icon)
|
||||
return icon
|
||||
if (typeof icon === 'string' && basePath && icon.startsWith('/') && !icon.startsWith(`${basePath}/`))
|
||||
return `${basePath}${icon}`
|
||||
return icon
|
||||
}
|
||||
|
||||
const parseNodeLabel = (label: string) => {
|
||||
const tokens = label.split('|').map(token => token.trim()).filter(Boolean)
|
||||
const info: Record<string, string> = {}
|
||||
|
||||
tokens.forEach((token) => {
|
||||
const [rawKey, ...rest] = token.split('=')
|
||||
if (!rawKey || rest.length === 0)
|
||||
return
|
||||
info[rawKey.trim().toLowerCase()] = rest.join('=').trim()
|
||||
})
|
||||
|
||||
if (!info.type && tokens.length === 1 && !tokens[0].includes('=')) {
|
||||
info.type = tokens[0]
|
||||
}
|
||||
|
||||
return info
|
||||
}
|
||||
|
||||
const parseNodeToken = (token: string) => {
|
||||
const trimmed = token.trim()
|
||||
const match = trimmed.match(NODE_DECLARATION)
|
||||
if (match)
|
||||
return { id: match[1], label: match[2] || match[3] }
|
||||
const idMatch = trimmed.match(/^([A-Z][\w-]*)$/i)
|
||||
if (idMatch)
|
||||
return { id: idMatch[1] }
|
||||
return null
|
||||
}
|
||||
|
||||
const parseMermaidFlowchart = (
|
||||
raw: string,
|
||||
nodeTypeLookup: Map<string, BlockEnum>,
|
||||
toolLookup: Map<string, ToolDefaultValue>,
|
||||
): ParseResult | ParseError => {
|
||||
const code = extractMermaidCode(raw)
|
||||
const lines = code.split(/\r?\n/).map((line) => {
|
||||
const commentIndex = line.indexOf('%%')
|
||||
return (commentIndex >= 0 ? line.slice(0, commentIndex) : line).trim()
|
||||
}).filter(Boolean)
|
||||
|
||||
const nodesMap = new Map<string, ParsedNodeDraft>()
|
||||
const edges: ParsedEdge[] = []
|
||||
|
||||
const registerNode = (id: string, label?: string): ParseError | null => {
|
||||
const existing = nodesMap.get(id)
|
||||
if (!label) {
|
||||
if (!existing)
|
||||
nodesMap.set(id, { id })
|
||||
return null
|
||||
}
|
||||
|
||||
const info = parseNodeLabel(label)
|
||||
if (!info.type)
|
||||
return { error: 'missingNodeType', detail: label }
|
||||
|
||||
const typeKey = normalizeKey(info.type)
|
||||
const nodeType = nodeTypeLookup.get(typeKey)
|
||||
if (!nodeType)
|
||||
return { error: 'unknownNodeType', detail: info.type }
|
||||
|
||||
const nodeData: ParsedNodeDraft = {
|
||||
id,
|
||||
type: nodeType,
|
||||
title: info.title,
|
||||
}
|
||||
|
||||
if (nodeType === BlockEnum.Tool) {
|
||||
if (!info.tool)
|
||||
return { error: 'unknownTool', detail: 'tool' }
|
||||
const toolKey = normalizeKey(info.tool)
|
||||
if (!toolLookup.has(toolKey))
|
||||
return { error: 'unknownTool', detail: info.tool }
|
||||
nodeData.toolKey = toolKey
|
||||
}
|
||||
|
||||
nodesMap.set(id, { ...(existing || {}), ...nodeData })
|
||||
return null
|
||||
}
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.toLowerCase().startsWith('flowchart') || line.toLowerCase().startsWith('graph'))
|
||||
continue
|
||||
|
||||
if (line.includes('-->')) {
|
||||
const edgeMatch = line.match(EDGE_DECLARATION)
|
||||
if (!edgeMatch)
|
||||
return { error: 'invalidMermaid', detail: line }
|
||||
|
||||
const sourceToken = parseNodeToken(edgeMatch[1])
|
||||
const targetToken = parseNodeToken(edgeMatch[3])
|
||||
if (!sourceToken || !targetToken)
|
||||
return { error: 'invalidMermaid', detail: line }
|
||||
|
||||
const sourceError = registerNode(sourceToken.id, sourceToken.label)
|
||||
if (sourceError)
|
||||
return sourceError
|
||||
const targetError = registerNode(targetToken.id, targetToken.label)
|
||||
if (targetError)
|
||||
return targetError
|
||||
|
||||
edges.push({
|
||||
sourceId: sourceToken.id,
|
||||
targetId: targetToken.id,
|
||||
label: edgeMatch[2]?.trim() || undefined,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const nodeMatch = line.match(NODE_DECLARATION)
|
||||
if (nodeMatch) {
|
||||
const error = registerNode(nodeMatch[1], nodeMatch[2] || nodeMatch[3])
|
||||
if (error)
|
||||
return error
|
||||
}
|
||||
}
|
||||
|
||||
const parsedNodes: ParsedNode[] = []
|
||||
for (const node of nodesMap.values()) {
|
||||
if (!node.type)
|
||||
return { error: 'missingNodeDefinition', detail: node.id }
|
||||
parsedNodes.push(node as ParsedNode)
|
||||
}
|
||||
|
||||
if (!parsedNodes.length)
|
||||
return { error: 'invalidMermaid', detail: '' }
|
||||
|
||||
return { nodes: parsedNodes, edges }
|
||||
}
|
||||
|
||||
const dedupeHandles = (handles?: string[]) => {
|
||||
if (!handles)
|
||||
return handles
|
||||
return Array.from(new Set(handles))
|
||||
}
|
||||
|
||||
const normalizeBranchLabel = (label?: string) => {
|
||||
if (!label)
|
||||
return ''
|
||||
const normalized = label.trim().toLowerCase()
|
||||
if (['true', 'yes', 'y', '1'].includes(normalized))
|
||||
return 'true'
|
||||
if (['false', 'no', 'n', '0'].includes(normalized))
|
||||
return 'false'
|
||||
return ''
|
||||
}
|
||||
|
||||
const buildToolParams = (parameters?: Tool['parameters']) => {
|
||||
const params: Record<string, string> = {}
|
||||
if (!parameters)
|
||||
return params
|
||||
parameters.forEach((item) => {
|
||||
params[item.name] = ''
|
||||
})
|
||||
return params
|
||||
}
|
||||
|
||||
export const useWorkflowVibe = () => {
|
||||
const { t } = useTranslation()
|
||||
const store = useStoreApi()
|
||||
const language = useGetLanguage()
|
||||
const { nodesMap: nodesMetaDataMap } = useNodesMetaData()
|
||||
const { handleSyncWorkflowDraft } = useNodesSyncDraft()
|
||||
const { getNodesReadOnly } = useNodesReadOnly()
|
||||
const { saveStateToHistory } = useWorkflowHistory()
|
||||
const { defaultModel } = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.textGeneration)
|
||||
|
||||
const { data: buildInTools } = useAllBuiltInTools()
|
||||
const { data: customTools } = useAllCustomTools()
|
||||
const { data: workflowTools } = useAllWorkflowTools()
|
||||
const { data: mcpTools } = useAllMCPTools()
|
||||
|
||||
const [modelConfig, setModelConfig] = useState<Model | null>(null)
|
||||
const isGeneratingRef = useRef(false)
|
||||
|
||||
useEffect(() => {
|
||||
const storedModel = (() => {
|
||||
if (typeof window === 'undefined')
|
||||
return null
|
||||
const stored = localStorage.getItem('auto-gen-model')
|
||||
if (!stored)
|
||||
return null
|
||||
try {
|
||||
return JSON.parse(stored) as Model
|
||||
}
|
||||
catch {
|
||||
return null
|
||||
}
|
||||
})()
|
||||
|
||||
if (storedModel) {
|
||||
setModelConfig(storedModel)
|
||||
return
|
||||
}
|
||||
|
||||
if (defaultModel) {
|
||||
setModelConfig({
|
||||
name: defaultModel.model,
|
||||
provider: defaultModel.provider.provider,
|
||||
mode: ModelModeType.chat,
|
||||
completion_params: {} as Model['completion_params'],
|
||||
})
|
||||
}
|
||||
}, [defaultModel])
|
||||
|
||||
const availableNodesList = useMemo(() => {
|
||||
if (!nodesMetaDataMap)
|
||||
return []
|
||||
return Object.values(nodesMetaDataMap).map(node => ({
|
||||
type: node.metaData.type,
|
||||
title: node.metaData.title,
|
||||
description: node.metaData.description,
|
||||
}))
|
||||
}, [nodesMetaDataMap])
|
||||
|
||||
const toolOptions = useMemo(() => {
|
||||
const collections = [
|
||||
buildInTools,
|
||||
customTools,
|
||||
workflowTools,
|
||||
mcpTools,
|
||||
].filter(Boolean) as ToolWithProvider[][]
|
||||
|
||||
const tools: ToolDefaultValue[] = []
|
||||
const seen = new Set<string>()
|
||||
|
||||
collections.forEach((collection) => {
|
||||
collection.forEach((provider) => {
|
||||
provider.tools.forEach((tool) => {
|
||||
const key = `${provider.id}:${tool.name}`
|
||||
if (seen.has(key))
|
||||
return
|
||||
seen.add(key)
|
||||
|
||||
const params = buildToolParams(tool.parameters)
|
||||
const toolDescription = typeof tool.description === 'object'
|
||||
? tool.description?.[language]
|
||||
: tool.description
|
||||
tools.push({
|
||||
provider_id: provider.id,
|
||||
provider_type: provider.type,
|
||||
provider_name: provider.name,
|
||||
plugin_id: provider.plugin_id,
|
||||
plugin_unique_identifier: provider.plugin_unique_identifier,
|
||||
provider_icon: normalizeProviderIcon(provider.icon),
|
||||
provider_icon_dark: normalizeProviderIcon(provider.icon_dark),
|
||||
tool_name: tool.name,
|
||||
tool_label: tool.label[language] || tool.name,
|
||||
tool_description: toolDescription || '',
|
||||
is_team_authorization: provider.is_team_authorization,
|
||||
paramSchemas: tool.parameters,
|
||||
params,
|
||||
output_schema: tool.output_schema,
|
||||
meta: provider.meta,
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
return tools
|
||||
}, [buildInTools, customTools, workflowTools, mcpTools, language])
|
||||
|
||||
const toolLookup = useMemo(() => {
|
||||
const map = new Map<string, ToolDefaultValue>()
|
||||
toolOptions.forEach((tool) => {
|
||||
const primaryKey = normalizeKey(`${tool.provider_id}/${tool.tool_name}`)
|
||||
map.set(primaryKey, tool)
|
||||
|
||||
const providerNameKey = normalizeKey(`${tool.provider_name}/${tool.tool_name}`)
|
||||
map.set(providerNameKey, tool)
|
||||
|
||||
const labelKey = normalizeKey(tool.tool_label)
|
||||
map.set(labelKey, tool)
|
||||
})
|
||||
return map
|
||||
}, [toolOptions])
|
||||
|
||||
const nodeTypeLookup = useMemo(() => {
|
||||
const map = new Map<string, BlockEnum>()
|
||||
if (!nodesMetaDataMap)
|
||||
return map
|
||||
Object.values(nodesMetaDataMap).forEach((node) => {
|
||||
map.set(normalizeKey(node.metaData.type), node.metaData.type)
|
||||
if (node.metaData.title)
|
||||
map.set(normalizeKey(node.metaData.title), node.metaData.type)
|
||||
})
|
||||
map.set('ifelse', BlockEnum.IfElse)
|
||||
map.set('ifelsecase', BlockEnum.IfElse)
|
||||
return map
|
||||
}, [nodesMetaDataMap])
|
||||
|
||||
const handleVibeCommand = useCallback(async (dsl?: string) => {
|
||||
if (getNodesReadOnly()) {
|
||||
Toast.notify({ type: 'error', message: t('workflow.vibe.readOnly') })
|
||||
return
|
||||
}
|
||||
|
||||
const trimmed = dsl?.trim() || ''
|
||||
if (!trimmed) {
|
||||
Toast.notify({ type: 'error', message: t('workflow.vibe.missingInstruction') })
|
||||
return
|
||||
}
|
||||
|
||||
if (!nodesMetaDataMap || Object.keys(nodesMetaDataMap).length === 0) {
|
||||
Toast.notify({ type: 'error', message: t('workflow.vibe.nodesUnavailable') })
|
||||
return
|
||||
}
|
||||
|
||||
if (!modelConfig && !isMermaidFlowchart(trimmed)) {
|
||||
Toast.notify({ type: 'error', message: t('workflow.vibe.modelUnavailable') })
|
||||
return
|
||||
}
|
||||
|
||||
if (isGeneratingRef.current)
|
||||
return
|
||||
isGeneratingRef.current = true
|
||||
|
||||
try {
|
||||
const { getNodes, setNodes, edges, setEdges } = store.getState()
|
||||
const nodes = getNodes()
|
||||
|
||||
const existingNodesPayload = nodes.map(node => ({
|
||||
id: node.id,
|
||||
type: node.data.type,
|
||||
title: node.data.title || '',
|
||||
}))
|
||||
|
||||
const toolsPayload = toolOptions.map(tool => ({
|
||||
provider_id: tool.provider_id,
|
||||
provider_name: tool.provider_name,
|
||||
tool_name: tool.tool_name,
|
||||
tool_label: tool.tool_label,
|
||||
tool_key: `${tool.provider_id}/${tool.tool_name}`,
|
||||
}))
|
||||
|
||||
const availableNodesPayload = availableNodesList.map(node => ({
|
||||
type: node.type,
|
||||
title: node.title,
|
||||
description: node.description,
|
||||
}))
|
||||
|
||||
let mermaidCode = trimmed
|
||||
if (!isMermaidFlowchart(trimmed)) {
|
||||
const { error, flowchart } = await generateFlowchart({
|
||||
instruction: trimmed,
|
||||
model_config: modelConfig,
|
||||
available_nodes: availableNodesPayload,
|
||||
existing_nodes: existingNodesPayload,
|
||||
available_tools: toolsPayload,
|
||||
})
|
||||
|
||||
if (error) {
|
||||
Toast.notify({ type: 'error', message: error })
|
||||
return
|
||||
}
|
||||
|
||||
if (!flowchart) {
|
||||
Toast.notify({ type: 'error', message: t('workflow.vibe.missingFlowchart') })
|
||||
return
|
||||
}
|
||||
|
||||
mermaidCode = flowchart
|
||||
}
|
||||
|
||||
const parseResult = parseMermaidFlowchart(mermaidCode, nodeTypeLookup, toolLookup)
|
||||
if ('error' in parseResult) {
|
||||
switch (parseResult.error) {
|
||||
case 'missingNodeType':
|
||||
case 'missingNodeDefinition':
|
||||
Toast.notify({ type: 'error', message: t('workflow.vibe.invalidFlowchart') })
|
||||
return
|
||||
case 'unknownNodeType':
|
||||
Toast.notify({ type: 'error', message: t('workflow.vibe.nodeTypeUnavailable', { type: parseResult.detail }) })
|
||||
return
|
||||
case 'unknownTool':
|
||||
Toast.notify({ type: 'error', message: t('workflow.vibe.toolUnavailable', { tool: parseResult.detail }) })
|
||||
return
|
||||
default:
|
||||
Toast.notify({ type: 'error', message: t('workflow.vibe.invalidFlowchart') })
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const existingStartNode = nodes.find(node => node.data.type === BlockEnum.Start)
|
||||
const newNodes: Node[] = []
|
||||
const nodeIdMap = new Map<string, Node>()
|
||||
|
||||
parseResult.nodes.forEach((nodeSpec) => {
|
||||
if (nodeSpec.type === BlockEnum.Start && existingStartNode) {
|
||||
nodeIdMap.set(nodeSpec.id, existingStartNode)
|
||||
return
|
||||
}
|
||||
|
||||
const nodeDefault = nodesMetaDataMap[nodeSpec.type]
|
||||
if (!nodeDefault)
|
||||
return
|
||||
|
||||
const defaultValue = nodeDefault.defaultValue || {}
|
||||
const title = nodeSpec.title?.trim() || nodeDefault.metaData.title || defaultValue.title || nodeSpec.type
|
||||
|
||||
const toolDefaultValue = nodeSpec.toolKey ? toolLookup.get(nodeSpec.toolKey) : undefined
|
||||
const desc = (toolDefaultValue?.tool_description || (defaultValue as { desc?: string }).desc || '') as string
|
||||
|
||||
const data = {
|
||||
...(defaultValue as Record<string, unknown>),
|
||||
title,
|
||||
desc,
|
||||
type: nodeSpec.type,
|
||||
selected: false,
|
||||
...(toolDefaultValue || {}),
|
||||
}
|
||||
|
||||
const newNode = generateNewNode({
|
||||
id: uuid4(),
|
||||
type: getNodeCustomTypeByNodeDataType(nodeSpec.type),
|
||||
data,
|
||||
position: { x: 0, y: 0 },
|
||||
}).newNode
|
||||
|
||||
newNodes.push(newNode)
|
||||
nodeIdMap.set(nodeSpec.id, newNode)
|
||||
})
|
||||
|
||||
if (!newNodes.length) {
|
||||
Toast.notify({ type: 'error', message: t('workflow.vibe.invalidFlowchart') })
|
||||
return
|
||||
}
|
||||
|
||||
const buildEdge = (
|
||||
source: Node,
|
||||
target: Node,
|
||||
sourceHandle = 'source',
|
||||
targetHandle = 'target',
|
||||
): Edge => ({
|
||||
id: `${source.id}-${sourceHandle}-${target.id}-${targetHandle}`,
|
||||
type: CUSTOM_EDGE,
|
||||
source: source.id,
|
||||
sourceHandle,
|
||||
target: target.id,
|
||||
targetHandle,
|
||||
data: {
|
||||
sourceType: source.data.type,
|
||||
targetType: target.data.type,
|
||||
isInIteration: false,
|
||||
isInLoop: false,
|
||||
_connectedNodeIsSelected: false,
|
||||
},
|
||||
zIndex: 0,
|
||||
})
|
||||
|
||||
const newEdges: Edge[] = []
|
||||
parseResult.edges.forEach((edgeSpec) => {
|
||||
const sourceNode = nodeIdMap.get(edgeSpec.sourceId)
|
||||
const targetNode = nodeIdMap.get(edgeSpec.targetId)
|
||||
if (!sourceNode || !targetNode)
|
||||
return
|
||||
|
||||
let sourceHandle = 'source'
|
||||
if (sourceNode.data.type === BlockEnum.IfElse) {
|
||||
const branchLabel = normalizeBranchLabel(edgeSpec.label)
|
||||
if (branchLabel === 'true') {
|
||||
sourceHandle = (sourceNode.data as { cases?: { case_id: string }[] })?.cases?.[0]?.case_id || 'true'
|
||||
}
|
||||
if (branchLabel === 'false') {
|
||||
sourceHandle = 'false'
|
||||
}
|
||||
}
|
||||
|
||||
newEdges.push(buildEdge(sourceNode, targetNode, sourceHandle))
|
||||
})
|
||||
|
||||
const bounds = nodes.reduce(
|
||||
(acc, node) => {
|
||||
const width = node.width ?? NODE_WIDTH
|
||||
acc.maxX = Math.max(acc.maxX, node.position.x + width)
|
||||
acc.minY = Math.min(acc.minY, node.position.y)
|
||||
return acc
|
||||
},
|
||||
{ maxX: 0, minY: 0 },
|
||||
)
|
||||
|
||||
const baseX = nodes.length ? bounds.maxX + NODE_WIDTH_X_OFFSET : 0
|
||||
const baseY = Number.isFinite(bounds.minY) ? bounds.minY : 0
|
||||
const branchOffset = Math.max(120, NODE_WIDTH_X_OFFSET / 2)
|
||||
|
||||
const layoutNodeIds = new Set(newNodes.map(node => node.id))
|
||||
const layoutEdges = newEdges.filter(edge =>
|
||||
layoutNodeIds.has(edge.source) && layoutNodeIds.has(edge.target),
|
||||
)
|
||||
|
||||
try {
|
||||
const layout = await getLayoutByDagre(newNodes, layoutEdges)
|
||||
const layoutedNodes = newNodes.map((node) => {
|
||||
const info = layout.nodes.get(node.id)
|
||||
if (!info)
|
||||
return node
|
||||
return {
|
||||
...node,
|
||||
position: {
|
||||
x: baseX + info.x,
|
||||
y: baseY + info.y,
|
||||
},
|
||||
}
|
||||
})
|
||||
newNodes.splice(0, newNodes.length, ...layoutedNodes)
|
||||
}
|
||||
catch {
|
||||
newNodes.forEach((node, index) => {
|
||||
const row = Math.floor(index / 4)
|
||||
const col = index % 4
|
||||
node.position = {
|
||||
x: baseX + col * NODE_WIDTH_X_OFFSET,
|
||||
y: baseY + row * branchOffset,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const allNodes = [...nodes, ...newNodes]
|
||||
const nodesConnectedMap = getNodesConnectedSourceOrTargetHandleIdsMap(
|
||||
newEdges.map(edge => ({ type: 'add', edge })),
|
||||
allNodes,
|
||||
)
|
||||
|
||||
const updatedNodes = allNodes.map((node) => {
|
||||
const connected = nodesConnectedMap[node.id]
|
||||
if (!connected)
|
||||
return node
|
||||
|
||||
return {
|
||||
...node,
|
||||
data: {
|
||||
...node.data,
|
||||
...connected,
|
||||
_connectedSourceHandleIds: dedupeHandles(connected._connectedSourceHandleIds),
|
||||
_connectedTargetHandleIds: dedupeHandles(connected._connectedTargetHandleIds),
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
setNodes(updatedNodes)
|
||||
setEdges([...edges, ...newEdges])
|
||||
saveStateToHistory(WorkflowHistoryEvent.NodeAdd, { nodeId: newNodes[0].id })
|
||||
handleSyncWorkflowDraft()
|
||||
}
|
||||
finally {
|
||||
isGeneratingRef.current = false
|
||||
}
|
||||
}, [
|
||||
availableNodesList,
|
||||
getNodesReadOnly,
|
||||
handleSyncWorkflowDraft,
|
||||
modelConfig,
|
||||
nodeTypeLookup,
|
||||
nodesMetaDataMap,
|
||||
saveStateToHistory,
|
||||
store,
|
||||
t,
|
||||
toolLookup,
|
||||
toolOptions,
|
||||
])
|
||||
|
||||
useEffect(() => {
|
||||
const handler = (event: CustomEvent<VibeCommandDetail>) => {
|
||||
handleVibeCommand(event.detail?.dsl)
|
||||
}
|
||||
|
||||
document.addEventListener(VIBE_COMMAND_EVENT, handler as EventListener)
|
||||
|
||||
return () => {
|
||||
document.removeEventListener(VIBE_COMMAND_EVENT, handler as EventListener)
|
||||
}
|
||||
}, [handleVibeCommand])
|
||||
|
||||
return null
|
||||
}
|
||||
|
|
@ -67,6 +67,7 @@ import {
|
|||
useWorkflow,
|
||||
useWorkflowReadOnly,
|
||||
useWorkflowRefreshDraft,
|
||||
useWorkflowVibe,
|
||||
} from './hooks'
|
||||
import { HooksStoreContextProvider, useHooksStore } from './hooks-store'
|
||||
import { useWorkflowSearch } from './hooks/use-workflow-search'
|
||||
|
|
@ -318,6 +319,7 @@ export const Workflow: FC<WorkflowProps> = memo(({
|
|||
useShortcuts()
|
||||
// Initialize workflow node search functionality
|
||||
useWorkflowSearch()
|
||||
useWorkflowVibe()
|
||||
|
||||
// Set up scroll to node event listener using the utility function
|
||||
useEffect(() => {
|
||||
|
|
|
|||
|
|
@ -333,6 +333,9 @@ const translation = {
|
|||
feedbackDesc: 'Open community feedback discussions',
|
||||
zenTitle: 'Zen Mode',
|
||||
zenDesc: 'Toggle canvas focus mode',
|
||||
vibeTitle: 'Vibe Workflow',
|
||||
vibeDesc: 'Generate a workflow from natural language',
|
||||
vibeHint: 'Describe the workflow, e.g. "{{prompt}}"',
|
||||
},
|
||||
emptyState: {
|
||||
noAppsFound: 'No apps found',
|
||||
|
|
|
|||
|
|
@ -123,6 +123,16 @@ const translation = {
|
|||
noHistory: 'No History',
|
||||
tagBound: 'Number of apps using this tag',
|
||||
},
|
||||
vibe: {
|
||||
readOnly: 'This workflow is read-only.',
|
||||
missingInstruction: 'Describe the workflow you want to build.',
|
||||
modelUnavailable: 'No model available for flowchart generation.',
|
||||
nodesUnavailable: 'Workflow nodes are not available yet.',
|
||||
missingFlowchart: 'No flowchart was generated.',
|
||||
invalidFlowchart: 'The generated flowchart could not be parsed.',
|
||||
nodeTypeUnavailable: 'Node type "{{type}}" is not available in this workflow.',
|
||||
toolUnavailable: 'Tool "{{tool}}" is not available in this workspace.',
|
||||
},
|
||||
publishLimit: {
|
||||
startNodeTitlePrefix: 'Upgrade to',
|
||||
startNodeTitleSuffix: 'unlock unlimited triggers per workflow',
|
||||
|
|
|
|||
|
|
@ -19,6 +19,11 @@ export type GenRes = {
|
|||
error?: string
|
||||
}
|
||||
|
||||
export type FlowchartGenRes = {
|
||||
flowchart: string
|
||||
error?: string
|
||||
}
|
||||
|
||||
export type CodeGenRes = {
|
||||
code: string
|
||||
language: string[]
|
||||
|
|
@ -93,6 +98,12 @@ export const generateRule = (body: Record<string, any>) => {
|
|||
})
|
||||
}
|
||||
|
||||
export const generateFlowchart = (body: Record<string, any>) => {
|
||||
return post<FlowchartGenRes>('/flowchart-generate', {
|
||||
body,
|
||||
})
|
||||
}
|
||||
|
||||
export const fetchModelParams = (providerName: string, modelId: string) => {
|
||||
return get(`workspaces/current/model-providers/${providerName}/models/parameter-rules`, {
|
||||
params: {
|
||||
|
|
|
|||
Loading…
Reference in New Issue