Merge remote-tracking branch 'origin/feat/rag-2' into feat/rag-2

This commit is contained in:
jyong 2025-08-27 17:46:05 +08:00
commit 6c8212d509
33 changed files with 1134 additions and 701 deletions

View File

@ -1,6 +1,6 @@
#!/bin/bash
npm add -g pnpm@10.15.0
corepack enable
cd web && pnpm install
pipx install uv

View File

@ -1,19 +1,20 @@
from flask import Blueprint
from flask_restx import Namespace
from libs.external_api import ExternalApi
from .files import FileApi
from .remote_files import RemoteFileInfoApi, RemoteFileUploadApi
bp = Blueprint("web", __name__, url_prefix="/api")
api = ExternalApi(bp)
# Files
api.add_resource(FileApi, "/files/upload")
api = ExternalApi(
bp,
version="1.0",
title="Web API",
description="Public APIs for web applications including file uploads, chat interactions, and app management",
doc="/docs", # Enable Swagger UI at /api/docs
)
# Remote files
api.add_resource(RemoteFileInfoApi, "/remote-files/<path:url>")
api.add_resource(RemoteFileUploadApi, "/remote-files/upload")
# Create namespace
web_ns = Namespace("web", description="Web application API operations", path="/")
from . import (
app,
@ -21,11 +22,15 @@ from . import (
completion,
conversation,
feature,
files,
forgot_password,
login,
message,
passport,
remote_files,
saved_message,
site,
workflow,
)
api.add_namespace(web_ns)

View File

@ -1,12 +1,21 @@
from flask_restx import Resource
from controllers.web import api
from controllers.web import web_ns
from services.feature_service import FeatureService
@web_ns.route("/system-features")
class SystemFeatureApi(Resource):
@web_ns.doc("get_system_features")
@web_ns.doc(description="Get system feature flags and configuration")
@web_ns.doc(responses={200: "System features retrieved successfully", 500: "Internal server error"})
def get(self):
"""Get system feature flags and configuration.
Returns the current system feature flags and configuration
that control various functionalities across the platform.
Returns:
dict: System feature configuration object
"""
return FeatureService.get_system_features().model_dump()
api.add_resource(SystemFeatureApi, "/system-features")

View File

@ -9,14 +9,50 @@ from controllers.common.errors import (
TooManyFilesError,
UnsupportedFileTypeError,
)
from controllers.web import web_ns
from controllers.web.wraps import WebApiResource
from fields.file_fields import file_fields
from fields.file_fields import build_file_model
from services.file_service import FileService
@web_ns.route("/files/upload")
class FileApi(WebApiResource):
@marshal_with(file_fields)
@web_ns.doc("upload_file")
@web_ns.doc(description="Upload a file for use in web applications")
@web_ns.doc(
responses={
201: "File uploaded successfully",
400: "Bad request - invalid file or parameters",
413: "File too large",
415: "Unsupported file type",
}
)
@marshal_with(build_file_model(web_ns))
def post(self, app_model, end_user):
"""Upload a file for use in web applications.
Accepts file uploads for use within web applications, supporting
multiple file types with automatic validation and storage.
Args:
app_model: The associated application model
end_user: The end user uploading the file
Form Parameters:
file: The file to upload (required)
source: Optional source type (datasets or None)
Returns:
dict: File information including ID, URL, and metadata
int: HTTP status code 201 for success
Raises:
NoFileUploadedError: No file provided in request
TooManyFilesError: Multiple files provided (only one allowed)
FilenameNotExistsError: File has no filename
FileTooLargeError: File exceeds size limit
UnsupportedFileTypeError: File type not supported
"""
if "file" not in request.files:
raise NoFileUploadedError()

View File

@ -16,7 +16,7 @@ from controllers.console.auth.error import (
)
from controllers.console.error import EmailSendIpLimitError
from controllers.console.wraps import email_password_login_enabled, only_edition_enterprise, setup_required
from controllers.web import api
from controllers.web import web_ns
from extensions.ext_database import db
from libs.helper import email, extract_remote_ip
from libs.password import hash_password, valid_password
@ -24,10 +24,21 @@ from models.account import Account
from services.account_service import AccountService
@web_ns.route("/forgot-password")
class ForgotPasswordSendEmailApi(Resource):
@only_edition_enterprise
@setup_required
@email_password_login_enabled
@web_ns.doc("send_forgot_password_email")
@web_ns.doc(description="Send password reset email")
@web_ns.doc(
responses={
200: "Password reset email sent successfully",
400: "Bad request - invalid email format",
404: "Account not found",
429: "Too many requests - rate limit exceeded",
}
)
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("email", type=email, required=True, location="json")
@ -54,10 +65,16 @@ class ForgotPasswordSendEmailApi(Resource):
return {"result": "success", "data": token}
@web_ns.route("/forgot-password/validity")
class ForgotPasswordCheckApi(Resource):
@only_edition_enterprise
@setup_required
@email_password_login_enabled
@web_ns.doc("check_forgot_password_token")
@web_ns.doc(description="Verify password reset token validity")
@web_ns.doc(
responses={200: "Token is valid", 400: "Bad request - invalid token format", 401: "Invalid or expired token"}
)
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("email", type=str, required=True, location="json")
@ -94,10 +111,21 @@ class ForgotPasswordCheckApi(Resource):
return {"is_valid": True, "email": token_data.get("email"), "token": new_token}
@web_ns.route("/forgot-password/resets")
class ForgotPasswordResetApi(Resource):
@only_edition_enterprise
@setup_required
@email_password_login_enabled
@web_ns.doc("reset_password")
@web_ns.doc(description="Reset user password with verification token")
@web_ns.doc(
responses={
200: "Password reset successfully",
400: "Bad request - invalid parameters or password mismatch",
401: "Invalid or expired token",
404: "Account not found",
}
)
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
@ -141,8 +169,3 @@ class ForgotPasswordResetApi(Resource):
account.password = base64.b64encode(password_hashed).decode()
account.password_salt = base64.b64encode(salt).decode()
session.commit()
api.add_resource(ForgotPasswordSendEmailApi, "/forgot-password")
api.add_resource(ForgotPasswordCheckApi, "/forgot-password/validity")
api.add_resource(ForgotPasswordResetApi, "/forgot-password/resets")

View File

@ -9,18 +9,30 @@ from controllers.console.auth.error import (
)
from controllers.console.error import AccountBannedError
from controllers.console.wraps import only_edition_enterprise, setup_required
from controllers.web import api
from controllers.web import web_ns
from libs.helper import email
from libs.password import valid_password
from services.account_service import AccountService
from services.webapp_auth_service import WebAppAuthService
@web_ns.route("/login")
class LoginApi(Resource):
"""Resource for web app email/password login."""
@setup_required
@only_edition_enterprise
@web_ns.doc("web_app_login")
@web_ns.doc(description="Authenticate user for web application access")
@web_ns.doc(
responses={
200: "Authentication successful",
400: "Bad request - invalid email or password format",
401: "Authentication failed - email or password mismatch",
403: "Account banned or login disabled",
404: "Account not found",
}
)
def post(self):
"""Authenticate user and login."""
parser = reqparse.RequestParser()
@ -51,9 +63,19 @@ class LoginApi(Resource):
# return {"result": "success"}
@web_ns.route("/email-code-login")
class EmailCodeLoginSendEmailApi(Resource):
@setup_required
@only_edition_enterprise
@web_ns.doc("send_email_code_login")
@web_ns.doc(description="Send email verification code for login")
@web_ns.doc(
responses={
200: "Email code sent successfully",
400: "Bad request - invalid email format",
404: "Account not found",
}
)
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("email", type=email, required=True, location="json")
@ -74,9 +96,20 @@ class EmailCodeLoginSendEmailApi(Resource):
return {"result": "success", "data": token}
@web_ns.route("/email-code-login/validity")
class EmailCodeLoginApi(Resource):
@setup_required
@only_edition_enterprise
@web_ns.doc("verify_email_code_login")
@web_ns.doc(description="Verify email code and complete login")
@web_ns.doc(
responses={
200: "Email code verified and login successful",
400: "Bad request - invalid code or token",
401: "Invalid token or expired code",
404: "Account not found",
}
)
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("email", type=str, required=True, location="json")
@ -104,9 +137,3 @@ class EmailCodeLoginApi(Resource):
token = WebAppAuthService.login(account=account)
AccountService.reset_login_error_rate_limit(args["email"])
return {"result": "success", "data": {"access_token": token}}
api.add_resource(LoginApi, "/login")
# api.add_resource(LogoutApi, "/logout")
api.add_resource(EmailCodeLoginSendEmailApi, "/email-code-login")
api.add_resource(EmailCodeLoginApi, "/email-code-login/validity")

View File

@ -7,7 +7,7 @@ from sqlalchemy import func, select
from werkzeug.exceptions import NotFound, Unauthorized
from configs import dify_config
from controllers.web import api
from controllers.web import web_ns
from controllers.web.error import WebAppAuthRequiredError
from extensions.ext_database import db
from libs.passport import PassportService
@ -17,9 +17,19 @@ from services.feature_service import FeatureService
from services.webapp_auth_service import WebAppAuthService, WebAppAuthType
@web_ns.route("/passport")
class PassportResource(Resource):
"""Base resource for passport."""
@web_ns.doc("get_passport")
@web_ns.doc(description="Get authentication passport for web application access")
@web_ns.doc(
responses={
200: "Passport retrieved successfully",
401: "Unauthorized - missing app code or invalid authentication",
404: "Application or user not found",
}
)
def get(self):
system_features = FeatureService.get_system_features()
app_code = request.headers.get("X-App-Code")
@ -94,9 +104,6 @@ class PassportResource(Resource):
}
api.add_resource(PassportResource, "/passport")
def decode_enterprise_webapp_user_id(jwt_token: str | None):
"""
Decode the enterprise user session from the Authorization header.

View File

@ -10,16 +10,44 @@ from controllers.common.errors import (
RemoteFileUploadError,
UnsupportedFileTypeError,
)
from controllers.web import web_ns
from controllers.web.wraps import WebApiResource
from core.file import helpers as file_helpers
from core.helper import ssrf_proxy
from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields
from fields.file_fields import build_file_with_signed_url_model, build_remote_file_info_model
from services.file_service import FileService
@web_ns.route("/remote-files/<path:url>")
class RemoteFileInfoApi(WebApiResource):
@marshal_with(remote_file_info_fields)
@web_ns.doc("get_remote_file_info")
@web_ns.doc(description="Get information about a remote file")
@web_ns.doc(
responses={
200: "Remote file information retrieved successfully",
400: "Bad request - invalid URL",
404: "Remote file not found",
500: "Failed to fetch remote file",
}
)
@marshal_with(build_remote_file_info_model(web_ns))
def get(self, app_model, end_user, url):
"""Get information about a remote file.
Retrieves basic information about a file located at a remote URL,
including content type and content length.
Args:
app_model: The associated application model
end_user: The end user making the request
url: URL-encoded path to the remote file
Returns:
dict: Remote file information including type and length
Raises:
HTTPException: If the remote file cannot be accessed
"""
decoded_url = urllib.parse.unquote(url)
resp = ssrf_proxy.head(decoded_url)
if resp.status_code != httpx.codes.OK:
@ -32,9 +60,42 @@ class RemoteFileInfoApi(WebApiResource):
}
@web_ns.route("/remote-files/upload")
class RemoteFileUploadApi(WebApiResource):
@marshal_with(file_fields_with_signed_url)
def post(self, app_model, end_user): # Add app_model and end_user parameters
@web_ns.doc("upload_remote_file")
@web_ns.doc(description="Upload a file from a remote URL")
@web_ns.doc(
responses={
201: "Remote file uploaded successfully",
400: "Bad request - invalid URL or parameters",
413: "File too large",
415: "Unsupported file type",
500: "Failed to fetch remote file",
}
)
@marshal_with(build_file_with_signed_url_model(web_ns))
def post(self, app_model, end_user):
"""Upload a file from a remote URL.
Downloads a file from the provided remote URL and uploads it
to the platform storage for use in web applications.
Args:
app_model: The associated application model
end_user: The end user making the request
JSON Parameters:
url: The remote URL to download the file from (required)
Returns:
dict: File information including ID, signed URL, and metadata
int: HTTP status code 201 for success
Raises:
RemoteFileUploadError: Failed to fetch file from remote URL
FileTooLargeError: File exceeds size limit
UnsupportedFileTypeError: File type not supported
"""
parser = reqparse.RequestParser()
parser.add_argument("url", type=str, required=True, help="URL is required")
args = parser.parse_args()

View File

@ -27,10 +27,11 @@ const ChunkDetailModal: FC<Props> = ({
}) => {
const { t } = useTranslation()
const { segment, score, child_chunks } = payload
const { position, content, sign_content, keywords, document } = segment
const { position, content, sign_content, keywords, document, answer } = segment
const isParentChildRetrieval = !!(child_chunks && child_chunks.length > 0)
const extension = document.name.split('.').slice(-1)[0] as FileAppearanceTypeEnum
const heighClassName = isParentChildRetrieval ? 'h-[min(627px,_80vh)] overflow-y-auto' : 'h-[min(539px,_80vh)] overflow-y-auto'
const labelPrefix = isParentChildRetrieval ? t('datasetDocuments.segment.parentChunk') : t('datasetDocuments.segment.chunk')
return (
<Modal
title={t(`${i18nPrefix}.chunkDetail`)}
@ -45,7 +46,7 @@ const ChunkDetailModal: FC<Props> = ({
<div className='flex items-center justify-between'>
<div className='flex grow items-center space-x-2'>
<SegmentIndexTag
labelPrefix={`${isParentChildRetrieval ? 'Parent-' : ''}Chunk`}
labelPrefix={labelPrefix}
positionId={position}
className={cn('w-fit group-hover:opacity-100')}
/>
@ -57,11 +58,29 @@ const ChunkDetailModal: FC<Props> = ({
</div>
<Score value={score} />
</div>
<Markdown
className={cn('!mt-2 !text-text-secondary', heighClassName)}
content={sign_content || content}
customDisallowedElements={['input']}
/>
{!answer && (
<Markdown
className={cn('!mt-2 !text-text-secondary', heighClassName)}
content={sign_content || content}
customDisallowedElements={['input']}
/>
)}
{answer && (
<div>
<div className='flex gap-x-1'>
<div className='w-4 shrink-0 text-[13px] font-medium leading-[20px] text-text-tertiary'>Q</div>
<div className={cn('body-md-regular line-clamp-20 text-text-secondary')}>
{content}
</div>
</div>
<div className='flex gap-x-1'>
<div className='w-4 shrink-0 text-[13px] font-medium leading-[20px] text-text-tertiary'>A</div>
<div className={cn('body-md-regular line-clamp-20 text-text-secondary')}>
{answer}
</div>
</div>
</div>
)}
{!isParentChildRetrieval && keywords && keywords.length > 0 && (
<div className='mt-6'>
<div className='text-xs font-medium uppercase text-text-tertiary'>{t(`${i18nPrefix}.keyword`)}</div>

View File

@ -163,7 +163,7 @@ const Preparation = () => {
{datasourceType === DatasourceType.localFile && (
<LocalFile
allowedExtensions={datasource!.nodeData.fileExtensions || []}
notSupportBatchUpload={false} // only support single file upload in test run
notSupportBatchUpload // only support single file upload in test run
/>
)}
{datasourceType === DatasourceType.onlineDocument && (

View File

@ -11,10 +11,12 @@ import type {
import { useIsChatMode } from './use-workflow'
import { useStoreApi } from 'reactflow'
import type { Type } from '../nodes/llm/types'
import useMatchSchemaType from '../nodes/_base/components/variable/use-match-schema-type'
export const useWorkflowVariables = () => {
const { t } = useTranslation()
const workflowStore = useWorkflowStore()
const { getMatchedSchemaType } = useMatchSchemaType()
const getNodeAvailableVars = useCallback(({
parentNode,
@ -57,8 +59,9 @@ export const useWorkflowVariables = () => {
mcpTools,
dataSourceList: dataSourceList ?? [],
},
getMatchedSchemaType,
})
}, [t, workflowStore])
}, [t, workflowStore, getMatchedSchemaType])
const getCurrentVariableType = useCallback(({
parentNode,
@ -105,6 +108,7 @@ export const useWorkflowVariables = () => {
mcpTools,
dataSourceList: dataSourceList ?? [],
},
getMatchedSchemaType,
})
}, [workflowStore])

View File

@ -221,7 +221,7 @@ const findExceptVarInObject = (obj: any, filterVar: (payload: Var, selector: Val
variable: obj.variable,
type: isFile ? VarType.file : VarType.object,
children: childrenResult,
alias: obj.alias,
schemaType: obj.schemaType,
}
return res
@ -233,6 +233,7 @@ const formatItem = (
filterVar: (payload: Var, selector: ValueSelector) => boolean,
allPluginInfoList: Record<string, ToolWithProvider[]>,
ragVars?: Var[],
getMatchedSchemaType = (_obj: any) => '',
): NodeOutPutVar => {
const { id, data } = item
@ -414,7 +415,7 @@ const formatItem = (
}
case BlockEnum.Tool: {
const toolOutputVars = ToolNodeDefault.getOutputVars?.(data as ToolNodeType, allPluginInfoList) || []
const toolOutputVars = ToolNodeDefault.getOutputVars?.(data as ToolNodeType, allPluginInfoList, [], { getMatchedSchemaType }) || []
res.vars = toolOutputVars
break
}
@ -510,7 +511,7 @@ const formatItem = (
case BlockEnum.DataSource: {
const payload = data as DataSourceNodeType
const dataSourceVars = DataSourceNodeDefault.getOutputVars?.(payload, allPluginInfoList, ragVars) || []
const dataSourceVars = DataSourceNodeDefault.getOutputVars?.(payload, allPluginInfoList, ragVars, { getMatchedSchemaType }) || []
res.vars = dataSourceVars
break
}
@ -640,6 +641,7 @@ export const toNodeOutputVars = (
conversationVariables: ConversationVariable[] = [],
ragVariables: RAGPipelineVariable[] = [],
allPluginInfoList: Record<string, ToolWithProvider[]>,
getMatchedSchemaType = (_obj: any) => '',
): NodeOutPutVar[] => {
// ENV_NODE data format
const ENV_NODE = {
@ -697,7 +699,7 @@ export const toNodeOutputVars = (
description: ragVariable.label,
isRagVariable: true,
} as Var),
)),
), getMatchedSchemaType),
isStartNode: node.data.type === BlockEnum.Start,
}
}).filter(item => item.vars.length > 0)
@ -822,6 +824,7 @@ export const getVarType = ({
conversationVariables = [],
ragVariables = [],
allPluginInfoList,
getMatchedSchemaType,
}: {
valueSelector: ValueSelector
parentNode?: Node | null
@ -834,6 +837,7 @@ export const getVarType = ({
conversationVariables?: ConversationVariable[]
ragVariables?: RAGPipelineVariable[]
allPluginInfoList: Record<string, ToolWithProvider[]>
getMatchedSchemaType: (obj: any) => string
}): VarType => {
if (isConstant)
return VarType.string
@ -846,6 +850,7 @@ export const getVarType = ({
conversationVariables,
ragVariables,
allPluginInfoList,
getMatchedSchemaType,
)
const isIterationInnerVar = parentNode?.data.type === BlockEnum.Iteration
@ -972,6 +977,7 @@ export const toNodeAvailableVars = ({
ragVariables,
filterVar,
allPluginInfoList,
getMatchedSchemaType,
}: {
parentNode?: Node | null
t?: any
@ -986,6 +992,7 @@ export const toNodeAvailableVars = ({
ragVariables?: RAGPipelineVariable[]
filterVar: (payload: Var, selector: ValueSelector) => boolean
allPluginInfoList: Record<string, ToolWithProvider[]>
getMatchedSchemaType: (obj: any) => string
}): NodeOutPutVar[] => {
const beforeNodesOutputVars = toNodeOutputVars(
beforeNodes,
@ -995,6 +1002,7 @@ export const toNodeAvailableVars = ({
conversationVariables,
ragVariables,
allPluginInfoList,
getMatchedSchemaType,
)
const isInIteration = parentNode?.data.type === BlockEnum.Iteration
if (isInIteration) {
@ -1008,6 +1016,7 @@ export const toNodeAvailableVars = ({
environmentVariables,
conversationVariables,
allPluginInfoList,
getMatchedSchemaType,
})
const itemChildren = itemType === VarType.file
? {

View File

@ -79,6 +79,7 @@ type Props = {
zIndex?: number
currentTool?: Tool
currentProvider?: ToolWithProvider
preferSchemaType?: boolean
}
const DEFAULT_VALUE_SELECTOR: Props['value'] = []
@ -111,6 +112,7 @@ const VarReferencePicker: FC<Props> = ({
zIndex,
currentTool,
currentProvider,
preferSchemaType,
}) => {
const { t } = useTranslation()
const store = useStoreApi()
@ -562,6 +564,7 @@ const VarReferencePicker: FC<Props> = ({
itemWidth={isAddBtnTrigger ? 260 : (minWidth || triggerWidth)}
isSupportFileVar={isSupportFileVar}
zIndex={zIndex}
preferSchemaType={preferSchemaType}
/>
)}
</PortalToFollowElemContent>

View File

@ -15,6 +15,7 @@ type Props = {
itemWidth?: number
isSupportFileVar?: boolean
zIndex?: number
preferSchemaType?: boolean
}
const VarReferencePopup: FC<Props> = ({
vars,
@ -23,6 +24,7 @@ const VarReferencePopup: FC<Props> = ({
itemWidth,
isSupportFileVar = true,
zIndex,
preferSchemaType,
}) => {
const { t } = useTranslation()
const pipelineId = useStore(s => s.pipelineId)
@ -69,6 +71,7 @@ const VarReferencePopup: FC<Props> = ({
zIndex={zIndex}
showManageInputField={showManageRagInputFields}
onManageInputField={() => setShowInputFieldPanel?.(true)}
preferSchemaType={preferSchemaType}
/>
}
</div >

View File

@ -34,6 +34,7 @@ type ObjectChildrenProps = {
onHovering?: (value: boolean) => void
itemWidth?: number
isSupportFileVar?: boolean
preferSchemaType?: boolean
}
type ItemProps = {
@ -51,6 +52,7 @@ type ItemProps = {
isInCodeGeneratorInstructionEditor?: boolean
zIndex?: number
className?: string
preferSchemaType?: boolean
}
const objVarTypes = [VarType.object, VarType.file]
@ -69,6 +71,7 @@ const Item: FC<ItemProps> = ({
isInCodeGeneratorInstructionEditor,
zIndex,
className,
preferSchemaType,
}) => {
const isStructureOutput = itemData.type === VarType.object && (itemData.children as StructuredOutput)?.schema?.properties
const isFile = itemData.type === VarType.file && !isStructureOutput
@ -211,7 +214,7 @@ const Item: FC<ItemProps> = ({
<div title={itemData.des} className='system-sm-medium ml-1 w-0 grow truncate text-text-secondary'>{itemData.variable.split('.').slice(-1)[0]}</div>
)}
</div>
<div className='ml-1 shrink-0 text-xs font-normal capitalize text-text-tertiary'>{itemData.alias || itemData.type}</div>
<div className='ml-1 shrink-0 text-xs font-normal capitalize text-text-tertiary'>{(preferSchemaType && itemData.schemaType) ? itemData.schemaType : itemData.type}</div>
{
(isObj || isStructureOutput) && (
<ChevronRight className={cn('ml-0.5 h-3 w-3 text-text-quaternary', isHovering && 'text-text-tertiary')} />
@ -224,7 +227,7 @@ const Item: FC<ItemProps> = ({
}}>
{(isStructureOutput || isObj) && (
<PickerStructurePanel
root={{ nodeId, nodeName: title, attrName: itemData.variable, attrAlias: itemData.alias }}
root={{ nodeId, nodeName: title, attrName: itemData.variable, attrAlias: itemData.schemaType }}
payload={structuredOutput!}
onHovering={setIsChildrenHovering}
onSelect={(valueSelector) => {
@ -246,6 +249,7 @@ const ObjectChildren: FC<ObjectChildrenProps> = ({
onHovering,
itemWidth,
isSupportFileVar,
preferSchemaType,
}) => {
const currObjPath = objPath
const itemRef = useRef<HTMLDivElement>(null)
@ -290,6 +294,7 @@ const ObjectChildren: FC<ObjectChildrenProps> = ({
onHovering={setIsChildrenHovering}
isSupportFileVar={isSupportFileVar}
isException={v.isException}
preferSchemaType={preferSchemaType}
/>
))
}
@ -312,6 +317,7 @@ type Props = {
showManageInputField?: boolean
onManageInputField?: () => void
autoFocus?: boolean
preferSchemaType?: boolean
}
const VarReferenceVars: FC<Props> = ({
hideSearch,
@ -328,6 +334,7 @@ const VarReferenceVars: FC<Props> = ({
showManageInputField,
onManageInputField,
autoFocus = true,
preferSchemaType,
}) => {
const { t } = useTranslation()
const [searchText, setSearchText] = useState('')
@ -417,6 +424,7 @@ const VarReferenceVars: FC<Props> = ({
isFlat={item.isFlat}
isInCodeGeneratorInstructionEditor={isInCodeGeneratorInstructionEditor}
zIndex={zIndex}
preferSchemaType={preferSchemaType}
/>
))}
{item.isFlat && !filteredVars[i + 1]?.isFlat && !!filteredVars.find(item => !item.isFlat) && (

View File

@ -74,6 +74,7 @@ import type { CustomRunFormProps } from '@/app/components/workflow/nodes/data-so
import { DataSourceClassification } from '@/app/components/workflow/nodes/data-source/types'
import { useModalContext } from '@/context/modal-context'
import DataSourceBeforeRunForm from '@/app/components/workflow/nodes/data-source/before-run-form'
import useInspectVarsCrud from '@/app/components/workflow/hooks/use-inspect-vars-crud'
const getCustomRunForm = (params: CustomRunFormProps): React.JSX.Element => {
const nodeType = params.payload.type
@ -222,10 +223,12 @@ const BasePanel: FC<BasePanelProps> = ({
runInputData,
runInputDataRef,
runResult,
setRunResult,
getInputVars,
toVarInputs,
tabType,
isRunAfterSingleRun,
setIsRunAfterSingleRun,
setTabType,
handleAfterCustomSingleRun,
singleRunParams,
@ -281,6 +284,10 @@ const BasePanel: FC<BasePanelProps> = ({
setShowAccountSettingModal({ payload: 'data-source' })
}, [setShowAccountSettingModal])
const {
appendNodeInspectVars,
} = useInspectVarsCrud()
if (logParams.showSpecialResultPanel) {
return (
<div className={cn(
@ -309,9 +316,16 @@ const BasePanel: FC<BasePanelProps> = ({
if (isShowSingleRun) {
const form = getCustomRunForm({
nodeId: id,
flowId: configsMap?.flowId || '',
flowType: configsMap?.flowType || FlowType.appFlow,
payload: data,
setRunResult,
setIsRunAfterSingleRun,
isPaused,
isRunAfterSingleRun,
onSuccess: handleAfterCustomSingleRun,
onCancel: hideSingleRun,
appendNodeInspectVars,
})
return (

View File

@ -174,7 +174,7 @@ const useLastRun = <T>({
})
const toSubmitData = useCallback((data: Record<string, any>) => {
if(!isIterationNode && !isLoopNode)
if (!isIterationNode && !isLoopNode)
return data
const allVarObject = singleRunParams?.allVarObject || {}
@ -183,7 +183,7 @@ const useLastRun = <T>({
const [varSectorStr, nodeId] = key.split(DELIMITER)
formattedData[`${nodeId}.${allVarObject[key].inSingleRunPassedKey}`] = data[varSectorStr]
})
if(isIterationNode) {
if (isIterationNode) {
const iteratorInputKey = `${id}.input_selector`
formattedData[iteratorInputKey] = data[iteratorInputKey]
}
@ -203,7 +203,7 @@ const useLastRun = <T>({
const initShowLastRunTab = useStore(s => s.initShowLastRunTab)
const [tabType, setTabType] = useState<TabType>(initShowLastRunTab ? TabType.lastRun : TabType.settings)
useEffect(() => {
if(initShowLastRunTab)
if (initShowLastRunTab)
setTabType(TabType.lastRun)
setInitShowLastRunTab(false)
@ -212,7 +212,7 @@ const useLastRun = <T>({
const handleRunWithParams = async (data: Record<string, any>) => {
const { isValid } = checkValid()
if(!isValid)
if (!isValid)
return
setNodeRunning()
setIsRunAfterSingleRun(true)
@ -236,14 +236,14 @@ const useLastRun = <T>({
const values: Record<string, boolean> = {}
form.inputs.forEach(({ variable, getVarValueFromDependent }) => {
const isGetValueFromDependent = getVarValueFromDependent || !variable.includes('.')
if(isGetValueFromDependent && !singleRunParams?.getDependentVar)
if (isGetValueFromDependent && !singleRunParams?.getDependentVar)
return
const selector = isGetValueFromDependent ? (singleRunParams?.getDependentVar(variable) || []) : variable.slice(1, -1).split('.')
if(!selector || selector.length === 0)
if (!selector || selector.length === 0)
return
const [nodeId, varName] = selector.slice(0, 2)
if(!isStartNode && nodeId === id) { // inner vars like loop vars
if (!isStartNode && nodeId === id) { // inner vars like loop vars
values[variable] = true
return
}
@ -257,7 +257,7 @@ const useLastRun = <T>({
}
const isAllVarsHasValue = (vars?: ValueSelector[]) => {
if(!vars || vars.length === 0)
if (!vars || vars.length === 0)
return true
return vars.every((varItem) => {
const [nodeId, varName] = varItem.slice(0, 2)
@ -267,7 +267,7 @@ const useLastRun = <T>({
}
const isSomeVarsHasValue = (vars?: ValueSelector[]) => {
if(!vars || vars.length === 0)
if (!vars || vars.length === 0)
return true
return vars.some((varItem) => {
const [nodeId, varName] = varItem.slice(0, 2)
@ -294,7 +294,7 @@ const useLastRun = <T>({
}
const checkAggregatorVarsSet = (vars: ValueSelector[][]) => {
if(!vars || vars.length === 0)
if (!vars || vars.length === 0)
return true
// in each group, at last one set is ok
return vars.every((varItem) => {
@ -310,9 +310,9 @@ const useLastRun = <T>({
const handleSingleRun = () => {
const { isValid } = checkValid()
if(!isValid)
if (!isValid)
return
if(isCustomRunNode) {
if (isCustomRunNode) {
showSingleRun()
return
}
@ -335,6 +335,7 @@ const useLastRun = <T>({
...oneStepRunRes,
tabType,
isRunAfterSingleRun,
setIsRunAfterSingleRun,
setTabType: handleTabClicked,
handleAfterCustomSingleRun,
singleRunParams,

View File

@ -663,6 +663,7 @@ const useOneStepRun = <T>({
runInputDataRef,
setRunInputData: handleSetRunInputData,
runResult,
setRunResult: doSetRunResult,
iterationRunResult,
loopRunResult,
setNodeRunning,

View File

@ -1,7 +1,7 @@
'use client'
import type { FC } from 'react'
import React, { useCallback } from 'react'
import type { CustomRunFormProps, DataSourceNodeType } from './types'
import type { CustomRunFormProps } from './types'
import { DatasourceType } from '@/models/pipeline'
import LocalFile from '@/app/components/datasets/documents/create-from-pipeline/data-source/local-file'
import OnlineDocuments from '@/app/components/datasets/documents/create-from-pipeline/data-source/online-documents'
@ -13,18 +13,24 @@ import Button from '@/app/components/base/button'
import { useTranslation } from 'react-i18next'
import DataSourceProvider from '@/app/components/datasets/documents/create-from-pipeline/data-source/store/provider'
import PanelWrap from '../_base/components/before-run-form/panel-wrap'
import useBeforeRunForm from './hooks/use-before-run-form'
const BeforeRunForm: FC<CustomRunFormProps> = ({
nodeId,
payload,
onSuccess,
onCancel,
}) => {
const BeforeRunForm: FC<CustomRunFormProps> = (props) => {
const {
nodeId,
payload,
onCancel,
} = props
const { t } = useTranslation()
const datasourceType = payload.provider_type
const datasourceNodeData = payload as DataSourceNodeType
const dataSourceStore = useDataSourceStore()
const {
isPending,
handleRunWithSyncDraft,
datasourceType,
datasourceNodeData,
} = useBeforeRunForm(props)
const { clearOnlineDocumentData } = useOnlineDocument()
const { clearWebsiteCrawlData } = useWebsiteCrawl()
const { clearOnlineDriveData } = useOnlineDrive()
@ -44,10 +50,6 @@ const BeforeRunForm: FC<CustomRunFormProps> = ({
setCurrentCredentialId(credentialId)
}, [dataSourceStore])
const handleRun = useCallback(() => {
onSuccess()
}, [onSuccess])
return (
<PanelWrap
nodeName={payload.title}
@ -57,13 +59,14 @@ const BeforeRunForm: FC<CustomRunFormProps> = ({
{datasourceType === DatasourceType.localFile && (
<LocalFile
allowedExtensions={datasourceNodeData.fileExtensions || []}
notSupportBatchUpload={false}
notSupportBatchUpload
/>
)}
{datasourceType === DatasourceType.onlineDocument && (
<OnlineDocuments
nodeId={nodeId}
nodeData={datasourceNodeData}
isInPipeline
onCredentialChange={handleCredentialChange}
/>
)}
@ -71,6 +74,7 @@ const BeforeRunForm: FC<CustomRunFormProps> = ({
<WebsiteCrawl
nodeId={nodeId}
nodeData={datasourceNodeData}
isInPipeline
onCredentialChange={handleCredentialChange}
/>
)}
@ -78,12 +82,22 @@ const BeforeRunForm: FC<CustomRunFormProps> = ({
<OnlineDrive
nodeId={nodeId}
nodeData={datasourceNodeData}
isInPipeline
onCredentialChange={handleCredentialChange}
/>
)}
<div className='flex justify-end gap-x-2'>
<Button onClick={onCancel}>{t('common.operation.cancel')}</Button>
<Button onClick={handleRun} variant='primary'>{t('workflow.singleRun.startRun')}</Button>
<Button onClick={onCancel}>
{t('common.operation.cancel')}
</Button>
<Button
onClick={handleRunWithSyncDraft}
variant='primary'
loading={isPending}
disabled={isPending}
>
{t('workflow.singleRun.startRun')}
</Button>
</div>
</div>
</PanelWrap>

View File

@ -8,7 +8,7 @@ import {
LOCAL_FILE_OUTPUT,
} from './constants'
import { VarType as VarKindType } from '@/app/components/workflow/nodes/tool/types'
import { getOutputVariableAlias } from '@/app/components/workflow/utils/tool'
import type { AnyObj } from '../_base/components/variable/match-schema-type'
const i18nPrefix = 'workflow.errorMsg'
@ -54,12 +54,13 @@ const nodeDefault: NodeDefault<DataSourceNodeType> = {
errorMessage,
}
},
getOutputVars(payload, allPluginInfoList, ragVars = []) {
getOutputVars(payload, allPluginInfoList, ragVars = [], { getMatchedSchemaType } = { getMatchedSchemaType: (_obj: AnyObj) => '' }) {
const {
plugin_id,
datasource_name,
provider_type,
} = payload
const isLocalFile = provider_type === DataSourceClassification.localFile
const currentDataSource = allPluginInfoList.dataSourceList?.find((ds: any) => ds.plugin_id === plugin_id)
const currentDataSourceItem = currentDataSource?.tools?.find((tool: any) => tool.name === datasource_name)
@ -70,19 +71,19 @@ const nodeDefault: NodeDefault<DataSourceNodeType> = {
Object.keys(output_schema.properties).forEach((outputKey) => {
const output = output_schema.properties[outputKey]
const dataType = output.type
const alias = getOutputVariableAlias(output.properties)
let type = dataType === 'array'
? `array[${output.items?.type.slice(0, 1).toLocaleLowerCase()}${output.items?.type.slice(1)}]`
: `${dataType.slice(0, 1).toLocaleLowerCase()}${dataType.slice(1)}`
const schemaType = getMatchedSchemaType?.(output)
if (type === 'object' && alias === 'file')
if (type === 'object' && schemaType === 'file')
type = 'file'
dynamicOutputSchema.push({
variable: outputKey,
type,
description: output.description,
alias,
schemaType,
children: output.type === 'object' ? {
schema: {
type: 'object',

View File

@ -0,0 +1,180 @@
import { useStoreApi } from 'reactflow'
import type { CustomRunFormProps, DataSourceNodeType } from '../types'
import { useEffect, useRef } from 'react'
import { useNodeDataUpdate, useNodesSyncDraft } from '../../../hooks'
import { NodeRunningStatus } from '../../../types'
import { useInvalidLastRun } from '@/service/use-workflow'
import type { NodeRunResult } from '@/types/workflow'
import { fetchNodeInspectVars } from '@/service/workflow'
import { FlowType } from '@/types/common'
import { useDatasourceSingleRun } from '@/service/use-pipeline'
import { useDataSourceStore } from '@/app/components/datasets/documents/create-from-pipeline/data-source/store'
import { DatasourceType } from '@/models/pipeline'
import { TransferMethod } from '@/types/app'
const useBeforeRunForm = ({
nodeId,
flowId,
flowType,
payload,
setRunResult,
isPaused,
isRunAfterSingleRun,
setIsRunAfterSingleRun,
onSuccess,
appendNodeInspectVars,
}: CustomRunFormProps) => {
const store = useStoreApi()
const dataSourceStore = useDataSourceStore()
const isPausedRef = useRef(isPaused)
const { handleNodeDataUpdate } = useNodeDataUpdate()
const datasourceType = payload.provider_type as DatasourceType
const datasourceNodeData = payload as DataSourceNodeType
useEffect(() => {
isPausedRef.current = isPaused
}, [isPaused])
const runningStatus = payload._singleRunningStatus || NodeRunningStatus.NotStart
const setNodeRunning = () => {
handleNodeDataUpdate({
id: nodeId,
data: {
...payload,
_singleRunningStatus: NodeRunningStatus.Running,
},
})
}
const invalidLastRun = useInvalidLastRun(flowType, flowId, nodeId)
const updateRunResult = async (data: NodeRunResult) => {
const isPaused = isPausedRef.current
// The backend don't support pause the single run, so the frontend handle the pause state.
if (isPaused)
return
const canRunLastRun = !isRunAfterSingleRun || runningStatus === NodeRunningStatus.Succeeded
if (!canRunLastRun) {
setRunResult(data)
return
}
// run fail may also update the inspect vars when the node set the error default output.
const vars = await fetchNodeInspectVars(FlowType.ragPipeline, flowId, nodeId)
const { getNodes } = store.getState()
const nodes = getNodes()
appendNodeInspectVars(nodeId, vars, nodes)
if (data?.status === NodeRunningStatus.Succeeded)
onSuccess()
}
const { mutateAsync: handleDatasourceSingleRun, isPending } = useDatasourceSingleRun()
const handleRun = () => {
let datasourceInfo: Record<string, any> = {}
const { currentCredentialId: credentialId } = dataSourceStore.getState()
if (datasourceType === DatasourceType.localFile) {
const { localFileList } = dataSourceStore.getState()
const { id, name, type, size, extension, mime_type } = localFileList[0].file
const documentInfo = {
related_id: id,
name,
type,
size,
extension,
mime_type,
url: '',
transfer_method: TransferMethod.local_file,
}
datasourceInfo = documentInfo
}
if (datasourceType === DatasourceType.onlineDocument) {
const { onlineDocuments } = dataSourceStore.getState()
const { workspace_id, ...rest } = onlineDocuments[0]
const documentInfo = {
workspace_id,
page: rest,
credential_id: credentialId,
}
datasourceInfo = documentInfo
}
if (datasourceType === DatasourceType.websiteCrawl) {
const { websitePages } = dataSourceStore.getState()
datasourceInfo = {
...websitePages[0],
credential_id: credentialId,
}
}
if (datasourceType === DatasourceType.onlineDrive) {
const { bucket, fileList, selectedFileIds } = dataSourceStore.getState()
const file = fileList.find(file => file.id === selectedFileIds[0])
datasourceInfo = {
bucket,
id: file?.id,
type: file?.type,
credential_id: credentialId,
}
}
let hasError = false
handleDatasourceSingleRun({
pipeline_id: flowId,
start_node_id: nodeId,
start_node_title: datasourceNodeData.title,
datasource_type: datasourceType,
datasource_info: datasourceInfo,
}, {
onError: () => {
hasError = true
invalidLastRun()
if (isPausedRef.current)
return
handleNodeDataUpdate({
id: nodeId,
data: {
...payload,
_isSingleRun: false,
_singleRunningStatus: NodeRunningStatus.Failed,
},
})
},
onSettled: (data) => {
updateRunResult(data!)
if (!hasError && !isPausedRef.current) {
handleNodeDataUpdate({
id: nodeId,
data: {
...payload,
_isSingleRun: false,
_singleRunningStatus: NodeRunningStatus.Succeeded,
},
})
}
},
})
}
const { handleSyncWorkflowDraft } = useNodesSyncDraft()
const handleRunWithSyncDraft = () => {
setNodeRunning()
setIsRunAfterSingleRun(true)
handleSyncWorkflowDraft(true, true, {
onSuccess() {
handleRun()
},
})
}
return {
isPending,
handleRunWithSyncDraft,
datasourceType,
datasourceNodeData,
}
}
export default useBeforeRunForm

View File

@ -1,4 +1,7 @@
import type { CommonNodeType, ValueSelector } from '@/app/components/workflow/types'
import type { CommonNodeType, Node, ValueSelector } from '@/app/components/workflow/types'
import type { FlowType } from '@/types/common'
import type { NodeRunResult, VarInInspect } from '@/types/workflow'
import type { Dispatch, SetStateAction } from 'react'
export enum VarType {
variable = 'variable',
@ -31,7 +34,14 @@ export type DataSourceNodeType = CommonNodeType & {
export type CustomRunFormProps = {
nodeId: string
flowId: string
flowType: FlowType
payload: CommonNodeType
setRunResult: Dispatch<SetStateAction<NodeRunResult | null>>
setIsRunAfterSingleRun: Dispatch<SetStateAction<boolean>>
isPaused: boolean
isRunAfterSingleRun: boolean
onSuccess: () => void
onCancel: () => void
appendNodeInspectVars: (nodeId: string, vars: VarInInspect[], nodes: Node[]) => void
}

View File

@ -41,6 +41,7 @@ import { Variable02 } from '@/app/components/base/icons/src/vender/solid/develop
import BoolValue from '@/app/components/workflow/panel/chat-variable-panel/components/bool-value'
import { getVarType } from '@/app/components/workflow/nodes/_base/components/variable/utils'
import { useIsChatMode } from '@/app/components/workflow/hooks/use-workflow'
import useMatchSchemaType from '../../../_base/components/variable/use-match-schema-type'
const optionNameI18NPrefix = 'workflow.nodes.ifElse.optionName'
type ConditionItemProps = {
@ -208,6 +209,7 @@ const ConditionItem = ({
onRemoveCondition?.(caseId, condition.id)
}, [caseId, condition, conditionId, isSubVariableKey, onRemoveCondition, onRemoveSubVariableCondition])
const { getMatchedSchemaType } = useMatchSchemaType()
const handleVarChange = useCallback((valueSelector: ValueSelector, _varItem: Var) => {
const {
conversationVariables,
@ -224,6 +226,7 @@ const ConditionItem = ({
workflowTools,
dataSourceList: dataSourceList ?? [],
},
getMatchedSchemaType,
})
const newCondition = produce(condition, (draft) => {

View File

@ -6,7 +6,6 @@ import {
import { useTranslation } from 'react-i18next'
import type { KnowledgeBaseNodeType } from './types'
import {
ChunkStructureEnum,
IndexMethodEnum,
} from './types'
import ChunkStructure from './components/chunk-structure'
@ -24,7 +23,6 @@ import Split from '../_base/components/split'
import { useNodesReadOnly } from '@/app/components/workflow/hooks'
import VarReferencePicker from '@/app/components/workflow/nodes/_base/components/variable/var-reference-picker'
import type { Var } from '@/app/components/workflow/types'
import { CHUNK_TYPE_MAP } from '@/app/components/workflow/utils/tool'
const Panel: FC<NodePanelProps<KnowledgeBaseNodeType>> = ({
id,
@ -48,13 +46,9 @@ const Panel: FC<NodePanelProps<KnowledgeBaseNodeType>> = ({
} = useConfig(id)
const filterVar = useCallback((variable: Var) => {
if (data.chunk_structure === ChunkStructureEnum.general && variable.alias === CHUNK_TYPE_MAP.general_chunks)
return true
if (data.chunk_structure === ChunkStructureEnum.parent_child && variable.alias === CHUNK_TYPE_MAP.parent_child_chunks)
return true
if (data.chunk_structure === ChunkStructureEnum.question_answer && variable.alias === CHUNK_TYPE_MAP.qa_chunks)
return true
return false
// console.log(variable.schemaType)
// return variable.schemaType === 'aaa'
return true
}, [data.chunk_structure])
return (
@ -78,6 +72,7 @@ const Panel: FC<NodePanelProps<KnowledgeBaseNodeType>> = ({
filterVar={filterVar}
isFilterFileVar
isSupportFileVar={false}
preferSchemaType
/>
</BoxGroupField>
<Group

View File

@ -1,4 +1,4 @@
import { genNodeMetaData, getOutputVariableAlias } from '@/app/components/workflow/utils'
import { genNodeMetaData } from '@/app/components/workflow/utils'
import { BlockEnum, VarType } from '@/app/components/workflow/types'
import type { NodeDefault, ToolWithProvider } from '../../types'
import type { ToolNodeType } from './types'
@ -6,6 +6,7 @@ import { VarType as VarKindType } from '@/app/components/workflow/nodes/tool/typ
import { TOOL_OUTPUT_STRUCT } from '../../constants'
import { CollectionType } from '@/app/components/tools/types'
import { canFindTool } from '@/utils'
import type { AnyObj } from '../_base/components/variable/match-schema-type'
const i18nPrefix = 'workflow.errorMsg'
@ -65,7 +66,7 @@ const nodeDefault: NodeDefault<ToolNodeType> = {
errorMessage: errorMessages,
}
},
getOutputVars(payload: ToolNodeType, allPluginInfoList: Record<string, ToolWithProvider[]>) {
getOutputVars(payload: ToolNodeType, allPluginInfoList: Record<string, ToolWithProvider[]>, _ragVars: any, { getMatchedSchemaType } = { getMatchedSchemaType: (_obj: AnyObj) => '' }) {
const { provider_id, provider_type } = payload
let currentTools: ToolWithProvider[] = []
switch (provider_type) {
@ -96,19 +97,19 @@ const nodeDefault: NodeDefault<ToolNodeType> = {
Object.keys(output_schema.properties).forEach((outputKey) => {
const output = output_schema.properties[outputKey]
const dataType = output.type
const alias = getOutputVariableAlias(output.properties)
const schemaType = getMatchedSchemaType?.(output.value)
let type = dataType === 'array'
? `array[${output.items?.type.slice(0, 1).toLocaleLowerCase()}${output.items?.type.slice(1)}]`
: `${output.type.slice(0, 1).toLocaleLowerCase()}${output.type.slice(1)}`
if (type === VarType.object && alias === 'file')
if (type === VarType.object && schemaType === 'file')
type = VarType.file
outputSchema.push({
variable: outputKey,
type,
description: output.description,
alias,
schemaType,
children: output.type === 'object' ? {
schema: {
type: 'object',

View File

@ -304,7 +304,7 @@ export type Var = {
isLoopVariable?: boolean
nodeId?: string
isRagVariable?: boolean
alias?: string
schemaType?: string
}
export type NodeOutPutVar = {
@ -332,7 +332,7 @@ export type NodeDefault<T = {}> = {
defaultValue: Partial<T>
defaultRunInputData?: Record<string, any>
checkValid: (payload: T, t: any, moreDataForCheckValid?: any) => { isValid: boolean; errorMessage?: string }
getOutputVars?: (payload: T, allPluginInfoList: Record<string, ToolWithProvider[]>, ragVariables?: Var[]) => Var[]
getOutputVars?: (payload: T, allPluginInfoList: Record<string, ToolWithProvider[]>, ragVariables?: Var[], utils?: { getMatchedSchemaType: (obj: any) => string }) => Var[]
}
export type OnSelectBlock = (type: BlockEnum, toolDefaultValue?: ToolDefaultValue | DataSourceDefaultValue) => void

View File

@ -50,18 +50,6 @@ export const CHUNK_TYPE_MAP = {
qa_chunks: 'QAStructureChunk',
}
// deprecated, use schemaType in llm/types.ts instead
export const getOutputVariableAlias = (variable: Record<string, any>) => {
if (variable?.general_chunks)
return CHUNK_TYPE_MAP.general_chunks
if (variable?.parent_child_chunks)
return CHUNK_TYPE_MAP.parent_child_chunks
if (variable?.qa_chunks)
return CHUNK_TYPE_MAP.qa_chunks
if (variable?.file_type)
return 'file'
}
export const wrapStructuredVarItem = (outputItem: any, matchedSchemaType: string): StructuredOutput => {
const dataType = Type.object
return {

View File

@ -9,6 +9,7 @@ import tailwind from 'eslint-plugin-tailwindcss'
import reactHooks from 'eslint-plugin-react-hooks'
import sonar from 'eslint-plugin-sonarjs'
import oxlint from 'eslint-plugin-oxlint'
import next from '@next/eslint-plugin-next'
// import reactRefresh from 'eslint-plugin-react-refresh'
@ -63,12 +64,14 @@ export default combine(
}),
unicorn(),
node(),
// use nextjs config will break @eslint/config-inspector
// use `ESLINT_CONFIG_INSPECTOR=true pnpx @eslint/config-inspector` to check the config
// ...process.env.ESLINT_CONFIG_INSPECTOR
// ? []
// Next.js configuration
{
plugins: {
'@next/next': next,
},
rules: {
...next.configs.recommended.rules,
...next.configs['core-web-vitals'].rules,
// performance issue, and not used.
'@next/next/no-html-link-for-pages': 'off',
},

View File

@ -582,6 +582,7 @@ export type Segment = {
keywords: string[]
hit_count: number
index_node_hash: string
answer: string
}
export type Document = {

View File

@ -6,6 +6,7 @@ import type { AppIconSelection } from '@/app/components/base/app-icon-picker'
import type { Viewport } from 'reactflow'
import type { TransferMethod } from '@/types/app'
import { BaseFieldType } from '@/app/components/base/form/form-scenarios/base/types'
import type { NodeRunResult } from '@/types/workflow'
export enum DatasourceType {
localFile = 'local_file',
@ -287,3 +288,13 @@ export type OnlineDriveFile = {
size?: number
type: OnlineDriveFileType
}
export type DatasourceNodeSingleRunRequest = {
pipeline_id: string
start_node_id: string
start_node_title: string
datasource_type: DatasourceType
datasource_info: Record<string, any>
}
export type DatasourceNodeSingleRunResponse = NodeRunResult

View File

@ -25,7 +25,7 @@
"start": "cp -r .next/static .next/standalone/.next/static && cp -r public .next/standalone/public && cross-env PORT=$npm_config_port HOSTNAME=$npm_config_host node .next/standalone/server.js",
"lint": "pnpx oxlint && pnpm eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache",
"lint-only-show-error": "pnpx oxlint && pnpm eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --quiet",
"fix": "next lint --fix",
"fix": "eslint --fix .",
"eslint-fix": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix",
"eslint-fix-only-show-error": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix --quiet",
"eslint-complexity": "eslint --rule 'complexity: [error, {max: 15}]' --quiet",
@ -103,14 +103,14 @@
"mime": "^4.0.4",
"mitt": "^3.0.1",
"negotiator": "^0.6.3",
"next": "~15.3.5",
"next": "15.5.0",
"next-themes": "^0.4.3",
"pinyin-pro": "^3.25.0",
"qrcode.react": "^4.2.0",
"qs": "^6.13.0",
"react": "~19.1.0",
"react": "19.1.1",
"react-18-input-autosize": "^3.0.0",
"react-dom": "~19.1.0",
"react-dom": "19.1.1",
"react-easy-crop": "^5.1.0",
"react-error-boundary": "^4.1.2",
"react-headless-pagination": "^1.1.6",
@ -161,9 +161,9 @@
"@happy-dom/jest-environment": "^17.4.4",
"@mdx-js/loader": "^3.1.0",
"@mdx-js/react": "^3.1.0",
"@next/bundle-analyzer": "^15.4.1",
"@next/eslint-plugin-next": "~15.4.5",
"@next/mdx": "~15.3.5",
"@next/bundle-analyzer": "15.5.0",
"@next/eslint-plugin-next": "15.5.0",
"@next/mdx": "15.5.0",
"@rgrove/parse-xml": "^4.1.0",
"@storybook/addon-essentials": "8.5.0",
"@storybook/addon-interactions": "8.5.0",
@ -185,8 +185,8 @@
"@types/negotiator": "^0.6.3",
"@types/node": "18.15.0",
"@types/qs": "^6.9.16",
"@types/react": "~19.1.8",
"@types/react-dom": "~19.1.6",
"@types/react": "19.1.11",
"@types/react-dom": "19.1.7",
"@types/react-slider": "^1.3.6",
"@types/react-syntax-highlighter": "^15.5.13",
"@types/react-window": "^1.8.8",
@ -200,7 +200,7 @@
"code-inspector-plugin": "^0.18.1",
"cross-env": "^7.0.3",
"eslint": "^9.32.0",
"eslint-config-next": "~15.4.5",
"eslint-config-next": "15.5.0",
"eslint-plugin-oxlint": "^1.6.0",
"eslint-plugin-react-hooks": "^5.1.0",
"eslint-plugin-react-refresh": "^0.4.19",
@ -223,8 +223,8 @@
"uglify-js": "^3.19.3"
},
"resolutions": {
"@types/react": "~19.1.8",
"@types/react-dom": "~19.1.6",
"@types/react": "19.1.11",
"@types/react-dom": "19.1.7",
"string-width": "4.2.3"
},
"lint-staged": {

1121
web/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -4,6 +4,8 @@ import { del, get, patch, post } from './base'
import { DatasourceType } from '@/models/pipeline'
import type {
ConversionResponse,
DatasourceNodeSingleRunRequest,
DatasourceNodeSingleRunResponse,
DeleteTemplateResponse,
ExportTemplateDSLResponse,
ImportPipelineDSLConfirmResponse,
@ -367,3 +369,18 @@ export const useConvertDatasetToPipeline = () => {
},
})
}
export const useDatasourceSingleRun = (
mutationOptions: MutationOptions<DatasourceNodeSingleRunResponse, Error, DatasourceNodeSingleRunRequest> = {},
) => {
return useMutation({
mutationKey: [NAME_SPACE, 'datasource-node-single-run'],
mutationFn: (params: DatasourceNodeSingleRunRequest) => {
const { pipeline_id: pipelineId, ...rest } = params
return post<DatasourceNodeSingleRunResponse>(`/rag/pipelines/${pipelineId}/workflows/draft/datasource/variables-inspect`, {
body: rest,
})
},
...mutationOptions,
})
}