From 7beed12eab14189f918fa8167f1a636e8ef9d36f Mon Sep 17 00:00:00 2001 From: yyh <92089059+lyzno1@users.noreply.github.com> Date: Tue, 6 Jan 2026 20:18:27 +0800 Subject: [PATCH 01/15] refactor(web): migrate legacy forms to TanStack Form (#30631) --- .../base/form/hooks/use-get-form-values.ts | 2 +- web/app/components/base/form/utils/index.ts | 1 - .../base/form/utils/zod-submit-validator.ts | 22 ++ .../ForgotPasswordForm.spec.tsx | 163 +++++++++++ .../forgot-password/ForgotPasswordForm.tsx | 119 ++++---- web/app/install/installForm.spec.tsx | 158 +++++++++++ web/app/install/installForm.tsx | 255 ++++++++++-------- web/package.json | 2 - web/pnpm-lock.yaml | 31 --- 9 files changed, 551 insertions(+), 202 deletions(-) delete mode 100644 web/app/components/base/form/utils/index.ts create mode 100644 web/app/components/base/form/utils/zod-submit-validator.ts create mode 100644 web/app/forgot-password/ForgotPasswordForm.spec.tsx create mode 100644 web/app/install/installForm.spec.tsx diff --git a/web/app/components/base/form/hooks/use-get-form-values.ts b/web/app/components/base/form/hooks/use-get-form-values.ts index 9ea418ea00..3dd2eceb30 100644 --- a/web/app/components/base/form/hooks/use-get-form-values.ts +++ b/web/app/components/base/form/hooks/use-get-form-values.ts @@ -4,7 +4,7 @@ import type { GetValuesOptions, } from '../types' import { useCallback } from 'react' -import { getTransformedValuesWhenSecretInputPristine } from '../utils' +import { getTransformedValuesWhenSecretInputPristine } from '../utils/secret-input' import { useCheckValidated } from './use-check-validated' export const useGetFormValues = (form: AnyFormApi, formSchemas: FormSchema[]) => { diff --git a/web/app/components/base/form/utils/index.ts b/web/app/components/base/form/utils/index.ts deleted file mode 100644 index 0abb8d1ad5..0000000000 --- a/web/app/components/base/form/utils/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './secret-input' diff --git a/web/app/components/base/form/utils/zod-submit-validator.ts b/web/app/components/base/form/utils/zod-submit-validator.ts new file mode 100644 index 0000000000..23eacaf8a4 --- /dev/null +++ b/web/app/components/base/form/utils/zod-submit-validator.ts @@ -0,0 +1,22 @@ +import type { ZodSchema } from 'zod' + +type SubmitValidator = ({ value }: { value: T }) => { fields: Record } | undefined + +export const zodSubmitValidator = (schema: ZodSchema): SubmitValidator => { + return ({ value }) => { + const result = schema.safeParse(value) + if (!result.success) { + const fieldErrors: Record = {} + for (const issue of result.error.issues) { + const path = issue.path[0] + if (path === undefined) + continue + const key = String(path) + if (!fieldErrors[key]) + fieldErrors[key] = issue.message + } + return { fields: fieldErrors } + } + return undefined + } +} diff --git a/web/app/forgot-password/ForgotPasswordForm.spec.tsx b/web/app/forgot-password/ForgotPasswordForm.spec.tsx new file mode 100644 index 0000000000..aa360cb6c3 --- /dev/null +++ b/web/app/forgot-password/ForgotPasswordForm.spec.tsx @@ -0,0 +1,163 @@ +import type { InitValidateStatusResponse, SetupStatusResponse } from '@/models/common' +import { fireEvent, render, screen, waitFor } from '@testing-library/react' +import { fetchInitValidateStatus, fetchSetupStatus, sendForgotPasswordEmail } from '@/service/common' +import ForgotPasswordForm from './ForgotPasswordForm' + +const mockPush = vi.fn() + +vi.mock('next/navigation', () => ({ + useRouter: () => ({ push: mockPush }), +})) + +vi.mock('@/service/common', () => ({ + fetchSetupStatus: vi.fn(), + fetchInitValidateStatus: vi.fn(), + sendForgotPasswordEmail: vi.fn(), +})) + +const mockFetchSetupStatus = vi.mocked(fetchSetupStatus) +const mockFetchInitValidateStatus = vi.mocked(fetchInitValidateStatus) +const mockSendForgotPasswordEmail = vi.mocked(sendForgotPasswordEmail) + +const prepareLoadedState = () => { + mockFetchSetupStatus.mockResolvedValue({ step: 'not_started' } as SetupStatusResponse) + mockFetchInitValidateStatus.mockResolvedValue({ status: 'finished' } as InitValidateStatusResponse) +} + +describe('ForgotPasswordForm', () => { + beforeEach(() => { + vi.clearAllMocks() + prepareLoadedState() + }) + + it('should render form after loading', async () => { + render() + + expect(await screen.findByLabelText('login.email')).toBeInTheDocument() + }) + + it('should show validation error when email is empty', async () => { + render() + + await screen.findByLabelText('login.email') + + fireEvent.click(screen.getByRole('button', { name: /login\.sendResetLink/ })) + + await waitFor(() => { + expect(screen.getByText('login.error.emailInValid')).toBeInTheDocument() + }) + expect(mockSendForgotPasswordEmail).not.toHaveBeenCalled() + }) + + it('should send reset email and navigate after confirmation', async () => { + mockSendForgotPasswordEmail.mockResolvedValue({ result: 'success', data: 'ok' } as any) + + render() + + const emailInput = await screen.findByLabelText('login.email') + fireEvent.change(emailInput, { target: { value: 'test@example.com' } }) + + fireEvent.click(screen.getByRole('button', { name: /login\.sendResetLink/ })) + + await waitFor(() => { + expect(mockSendForgotPasswordEmail).toHaveBeenCalledWith({ + url: '/forgot-password', + body: { email: 'test@example.com' }, + }) + }) + + await waitFor(() => { + expect(screen.getByRole('button', { name: /login\.backToSignIn/ })).toBeInTheDocument() + }) + + fireEvent.click(screen.getByRole('button', { name: /login\.backToSignIn/ })) + expect(mockPush).toHaveBeenCalledWith('/signin') + }) + + it('should submit when form is submitted', async () => { + mockSendForgotPasswordEmail.mockResolvedValue({ result: 'success', data: 'ok' } as any) + + render() + + fireEvent.change(await screen.findByLabelText('login.email'), { target: { value: 'test@example.com' } }) + + const form = screen.getByRole('button', { name: /login\.sendResetLink/ }).closest('form') + expect(form).not.toBeNull() + + fireEvent.submit(form as HTMLFormElement) + + await waitFor(() => { + expect(mockSendForgotPasswordEmail).toHaveBeenCalledWith({ + url: '/forgot-password', + body: { email: 'test@example.com' }, + }) + }) + }) + + it('should disable submit while request is in flight', async () => { + let resolveRequest: ((value: any) => void) | undefined + const requestPromise = new Promise((resolve) => { + resolveRequest = resolve + }) + mockSendForgotPasswordEmail.mockReturnValue(requestPromise as any) + + render() + + fireEvent.change(await screen.findByLabelText('login.email'), { target: { value: 'test@example.com' } }) + + const button = screen.getByRole('button', { name: /login\.sendResetLink/ }) + fireEvent.click(button) + + await waitFor(() => { + expect(button).toBeDisabled() + }) + + fireEvent.click(button) + expect(mockSendForgotPasswordEmail).toHaveBeenCalledTimes(1) + + resolveRequest?.({ result: 'success', data: 'ok' }) + + await waitFor(() => { + expect(screen.getByRole('button', { name: /login\.backToSignIn/ })).toBeInTheDocument() + }) + }) + + it('should keep form state when request fails', async () => { + const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) + mockSendForgotPasswordEmail.mockResolvedValue({ result: 'fail', data: 'error' } as any) + + render() + + fireEvent.change(await screen.findByLabelText('login.email'), { target: { value: 'test@example.com' } }) + fireEvent.click(screen.getByRole('button', { name: /login\.sendResetLink/ })) + + await waitFor(() => { + expect(mockSendForgotPasswordEmail).toHaveBeenCalledTimes(1) + }) + + expect(screen.getByRole('button', { name: /login\.sendResetLink/ })).toBeInTheDocument() + expect(mockPush).not.toHaveBeenCalled() + + consoleSpy.mockRestore() + }) + + it('should redirect to init when status is not started', async () => { + const originalLocation = window.location + Object.defineProperty(window, 'location', { + value: { href: '' }, + writable: true, + }) + mockFetchInitValidateStatus.mockResolvedValue({ status: 'not_started' } as InitValidateStatusResponse) + + render() + + await waitFor(() => { + expect(window.location.href).toBe('/init') + }) + + Object.defineProperty(window, 'location', { + value: originalLocation, + writable: true, + }) + }) +}) diff --git a/web/app/forgot-password/ForgotPasswordForm.tsx b/web/app/forgot-password/ForgotPasswordForm.tsx index 7299d24ebc..ff33cccc82 100644 --- a/web/app/forgot-password/ForgotPasswordForm.tsx +++ b/web/app/forgot-password/ForgotPasswordForm.tsx @@ -1,15 +1,16 @@ 'use client' import type { InitValidateStatusResponse } from '@/models/common' -import { zodResolver } from '@hookform/resolvers/zod' +import { useStore } from '@tanstack/react-form' import { useRouter } from 'next/navigation' import * as React from 'react' import { useEffect, useState } from 'react' -import { useForm } from 'react-hook-form' import { useTranslation } from 'react-i18next' import { z } from 'zod' import Button from '@/app/components/base/button' +import { formContext, useAppForm } from '@/app/components/base/form' +import { zodSubmitValidator } from '@/app/components/base/form/utils/zod-submit-validator' import { fetchInitValidateStatus, fetchSetupStatus, @@ -27,44 +28,45 @@ const accountFormSchema = z.object({ .email('error.emailInValid'), }) -type AccountFormValues = z.infer - const ForgotPasswordForm = () => { const { t } = useTranslation() const router = useRouter() const [loading, setLoading] = useState(true) const [isEmailSent, setIsEmailSent] = useState(false) - const { register, trigger, getValues, formState: { errors } } = useForm({ - resolver: zodResolver(accountFormSchema), + + const form = useAppForm({ defaultValues: { email: '' }, + validators: { + onSubmit: zodSubmitValidator(accountFormSchema), + }, + onSubmit: async ({ value }) => { + try { + const res = await sendForgotPasswordEmail({ + url: '/forgot-password', + body: { email: value.email }, + }) + if (res.result === 'success') + setIsEmailSent(true) + else console.error('Email verification failed') + } + catch (error) { + console.error('Request failed:', error) + } + }, }) - const handleSendResetPasswordEmail = async (email: string) => { - try { - const res = await sendForgotPasswordEmail({ - url: '/forgot-password', - body: { email }, - }) - if (res.result === 'success') - setIsEmailSent(true) - - else console.error('Email verification failed') - } - catch (error) { - console.error('Request failed:', error) - } - } + const isSubmitting = useStore(form.store, state => state.isSubmitting) + const emailErrors = useStore(form.store, state => state.fieldMeta.email?.errors) const handleSendResetPasswordClick = async () => { + if (isSubmitting) + return + if (isEmailSent) { router.push('/signin') } else { - const isValid = await trigger('email') - if (isValid) { - const email = getValues('email') - await handleSendResetPasswordEmail(email) - } + form.handleSubmit() } } @@ -94,30 +96,51 @@ const ForgotPasswordForm = () => {
-
- {!isEmailSent && ( -
- -
- - {errors.email && {t(`${errors.email?.message}` as 'error.emailInValid', { ns: 'login' })}} + + { + e.preventDefault() + e.stopPropagation() + form.handleSubmit() + }} + > + {!isEmailSent && ( +
+ +
+ + {field => ( + field.handleChange(e.target.value)} + onBlur={field.handleBlur} + placeholder={t('emailPlaceholder', { ns: 'login' }) || ''} + /> + )} + + {emailErrors && emailErrors.length > 0 && ( + + {t(`${emailErrors[0]}` as 'error.emailInValid', { ns: 'login' })} + + )} +
+ )} +
+
- )} -
- -
- + +
diff --git a/web/app/install/installForm.spec.tsx b/web/app/install/installForm.spec.tsx new file mode 100644 index 0000000000..74602f916a --- /dev/null +++ b/web/app/install/installForm.spec.tsx @@ -0,0 +1,158 @@ +import type { InitValidateStatusResponse, SetupStatusResponse } from '@/models/common' +import { fireEvent, render, screen, waitFor } from '@testing-library/react' +import { fetchInitValidateStatus, fetchSetupStatus, login, setup } from '@/service/common' +import { encryptPassword } from '@/utils/encryption' +import InstallForm from './installForm' + +const mockPush = vi.fn() +const mockReplace = vi.fn() + +vi.mock('next/navigation', () => ({ + useRouter: () => ({ push: mockPush, replace: mockReplace }), +})) + +vi.mock('@/service/common', () => ({ + fetchSetupStatus: vi.fn(), + fetchInitValidateStatus: vi.fn(), + setup: vi.fn(), + login: vi.fn(), + getSystemFeatures: vi.fn(), +})) + +const mockFetchSetupStatus = vi.mocked(fetchSetupStatus) +const mockFetchInitValidateStatus = vi.mocked(fetchInitValidateStatus) +const mockSetup = vi.mocked(setup) +const mockLogin = vi.mocked(login) + +const prepareLoadedState = () => { + mockFetchSetupStatus.mockResolvedValue({ step: 'not_started' } as SetupStatusResponse) + mockFetchInitValidateStatus.mockResolvedValue({ status: 'finished' } as InitValidateStatusResponse) +} + +describe('InstallForm', () => { + beforeEach(() => { + vi.clearAllMocks() + prepareLoadedState() + }) + + it('should render form after loading', async () => { + render() + + expect(await screen.findByLabelText('login.email')).toBeInTheDocument() + expect(screen.getByRole('button', { name: /login\.installBtn/ })).toBeInTheDocument() + }) + + it('should show validation error when required fields are empty', async () => { + render() + + await screen.findByLabelText('login.email') + + fireEvent.click(screen.getByRole('button', { name: /login\.installBtn/ })) + + await waitFor(() => { + expect(screen.getByText('login.error.emailInValid')).toBeInTheDocument() + expect(screen.getByText('login.error.nameEmpty')).toBeInTheDocument() + }) + expect(mockSetup).not.toHaveBeenCalled() + }) + + it('should submit and redirect to apps on successful login', async () => { + mockSetup.mockResolvedValue({ result: 'success' } as any) + mockLogin.mockResolvedValue({ result: 'success', data: { access_token: 'token' } } as any) + + render() + + fireEvent.change(await screen.findByLabelText('login.email'), { target: { value: 'admin@example.com' } }) + fireEvent.change(screen.getByLabelText('login.name'), { target: { value: 'Admin' } }) + fireEvent.change(screen.getByLabelText('login.password'), { target: { value: 'Password123' } }) + + const form = screen.getByRole('button', { name: /login\.installBtn/ }).closest('form') + expect(form).not.toBeNull() + + fireEvent.submit(form as HTMLFormElement) + + await waitFor(() => { + expect(mockSetup).toHaveBeenCalledWith({ + body: { + email: 'admin@example.com', + name: 'Admin', + password: 'Password123', + language: 'en', + }, + }) + }) + + await waitFor(() => { + expect(mockLogin).toHaveBeenCalledWith({ + url: '/login', + body: { + email: 'admin@example.com', + password: encryptPassword('Password123'), + }, + }) + }) + + await waitFor(() => { + expect(mockReplace).toHaveBeenCalledWith('/apps') + }) + }) + + it('should redirect to sign in when login fails', async () => { + mockSetup.mockResolvedValue({ result: 'success' } as any) + mockLogin.mockResolvedValue({ result: 'fail', data: 'error', code: 'login_failed', message: 'login failed' } as any) + + render() + + fireEvent.change(await screen.findByLabelText('login.email'), { target: { value: 'admin@example.com' } }) + fireEvent.change(screen.getByLabelText('login.name'), { target: { value: 'Admin' } }) + fireEvent.change(screen.getByLabelText('login.password'), { target: { value: 'Password123' } }) + + fireEvent.click(screen.getByRole('button', { name: /login\.installBtn/ })) + + await waitFor(() => { + expect(mockReplace).toHaveBeenCalledWith('/signin') + }) + }) + + it('should disable submit while request is in flight', async () => { + let resolveSetup: ((value: any) => void) | undefined + const setupPromise = new Promise((resolve) => { + resolveSetup = resolve + }) + mockSetup.mockReturnValue(setupPromise as any) + mockLogin.mockResolvedValue({ result: 'success', data: { access_token: 'token' } } as any) + + render() + + fireEvent.change(await screen.findByLabelText('login.email'), { target: { value: 'admin@example.com' } }) + fireEvent.change(screen.getByLabelText('login.name'), { target: { value: 'Admin' } }) + fireEvent.change(screen.getByLabelText('login.password'), { target: { value: 'Password123' } }) + + const button = screen.getByRole('button', { name: /login\.installBtn/ }) + fireEvent.click(button) + + await waitFor(() => { + expect(button).toBeDisabled() + }) + + fireEvent.click(button) + expect(mockSetup).toHaveBeenCalledTimes(1) + + resolveSetup?.({ result: 'success' }) + + await waitFor(() => { + expect(mockLogin).toHaveBeenCalledTimes(1) + }) + }) + + it('should redirect to sign in when setup is finished', async () => { + mockFetchSetupStatus.mockResolvedValue({ step: 'finished' } as SetupStatusResponse) + + render() + + await waitFor(() => { + expect(localStorage.setItem).toHaveBeenCalledWith('setup_status', 'finished') + expect(mockPush).toHaveBeenCalledWith('/signin') + }) + }) +}) diff --git a/web/app/install/installForm.tsx b/web/app/install/installForm.tsx index c43fbb4251..de32f18bc7 100644 --- a/web/app/install/installForm.tsx +++ b/web/app/install/installForm.tsx @@ -1,18 +1,17 @@ 'use client' -import type { SubmitHandler } from 'react-hook-form' import type { InitValidateStatusResponse, SetupStatusResponse } from '@/models/common' -import { zodResolver } from '@hookform/resolvers/zod' - -import { useDebounceFn } from 'ahooks' +import { useStore } from '@tanstack/react-form' import Link from 'next/link' import { useRouter } from 'next/navigation' import * as React from 'react' -import { useCallback, useEffect } from 'react' -import { useForm } from 'react-hook-form' +import { useEffect } from 'react' import { useTranslation } from 'react-i18next' import { z } from 'zod' import Button from '@/app/components/base/button' +import { formContext, useAppForm } from '@/app/components/base/form' +import { zodSubmitValidator } from '@/app/components/base/form/utils/zod-submit-validator' +import Input from '@/app/components/base/input' import { validPassword } from '@/config' import { useDocLink } from '@/context/i18n' @@ -33,8 +32,6 @@ const accountFormSchema = z.object({ }).regex(validPassword, 'error.passwordInvalid'), }) -type AccountFormValues = z.infer - const InstallForm = () => { useDocumentTitle('') const { t, i18n } = useTranslation() @@ -42,64 +39,49 @@ const InstallForm = () => { const router = useRouter() const [showPassword, setShowPassword] = React.useState(false) const [loading, setLoading] = React.useState(true) - const { - register, - handleSubmit, - formState: { errors, isSubmitting }, - } = useForm({ - resolver: zodResolver(accountFormSchema), + + const form = useAppForm({ defaultValues: { name: '', password: '', email: '', }, - }) + validators: { + onSubmit: zodSubmitValidator(accountFormSchema), + }, + onSubmit: async ({ value }) => { + // First, setup the admin account + await setup({ + body: { + ...value, + language: i18n.language, + }, + }) - const onSubmit: SubmitHandler = async (data) => { - // First, setup the admin account - await setup({ - body: { - ...data, - language: i18n.language, - }, - }) + // Then, automatically login with the same credentials + const loginRes = await login({ + url: '/login', + body: { + email: value.email, + password: encodePassword(value.password), + }, + }) - // Then, automatically login with the same credentials - const loginRes = await login({ - url: '/login', - body: { - email: data.email, - password: encodePassword(data.password), - }, - }) - - // Store tokens and redirect to apps if login successful - if (loginRes.result === 'success') { - router.replace('/apps') - } - else { - // Fallback to signin page if auto-login fails - router.replace('/signin') - } - } - - const handleSetting = async () => { - if (isSubmitting) - return - handleSubmit(onSubmit)() - } - - const { run: debouncedHandleKeyDown } = useDebounceFn( - (e: React.KeyboardEvent) => { - if (e.key === 'Enter') { - e.preventDefault() - handleSetting() + // Store tokens and redirect to apps if login successful + if (loginRes.result === 'success') { + router.replace('/apps') + } + else { + // Fallback to signin page if auto-login fails + router.replace('/signin') } }, - { wait: 200 }, - ) + }) - const handleKeyDown = useCallback(debouncedHandleKeyDown, [debouncedHandleKeyDown]) + const isSubmitting = useStore(form.store, state => state.isSubmitting) + const emailErrors = useStore(form.store, state => state.fieldMeta.email?.errors) + const nameErrors = useStore(form.store, state => state.fieldMeta.name?.errors) + const passwordErrors = useStore(form.store, state => state.fieldMeta.password?.errors) useEffect(() => { fetchSetupStatus().then((res: SetupStatusResponse) => { @@ -128,76 +110,111 @@ const InstallForm = () => {
-
-
- -
- - {errors.email && {t(`${errors.email?.message}` as 'error.emailInValid', { ns: 'login' })}} -
- -
- -
- -
- -
- {errors.name && {t(`${errors.name.message}` as 'error.nameEmpty', { ns: 'login' })}} -
- -
- -
- - -
- + + { + e.preventDefault() + e.stopPropagation() + if (isSubmitting) + return + form.handleSubmit() + }} + > +
+ +
+ + {field => ( + field.handleChange(e.target.value)} + onBlur={field.handleBlur} + placeholder={t('emailPlaceholder', { ns: 'login' }) || ''} + /> + )} + + {emailErrors && emailErrors.length > 0 && ( + + {t(`${emailErrors[0]}` as 'error.emailInValid', { ns: 'login' })} + + )}
-
- {t('error.passwordInvalid', { ns: 'login' })} +
+ +
+ + {field => ( + field.handleChange(e.target.value)} + onBlur={field.handleBlur} + placeholder={t('namePlaceholder', { ns: 'login' }) || ''} + /> + )} + +
+ {nameErrors && nameErrors.length > 0 && ( + + {t(`${nameErrors[0]}` as 'error.nameEmpty', { ns: 'login' })} + + )}
-
-
- -
- +
+ +
+ + {field => ( + field.handleChange(e.target.value)} + onBlur={field.handleBlur} + placeholder={t('passwordPlaceholder', { ns: 'login' }) || ''} + /> + )} + + +
+ +
+
+ +
0, + })} + > + {t('error.passwordInvalid', { ns: 'login' })} +
+
+ +
+ +
+ +
{t('license.tip', { ns: 'login' })} -   +   = 16 || ^19.0.0-rc' - '@hookform/resolvers@5.2.2': - resolution: {integrity: sha512-A/IxlMLShx3KjV/HeTcTfaMxdwy690+L/ZADoeaTltLx+CVuzkeVIPuybK3jrRfw7YZnmdKsVVHAlEPIAEUNlA==} - peerDependencies: - react-hook-form: ^7.55.0 - '@humanfs/core@0.19.1': resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} engines: {node: '>=18.18.0'} @@ -3211,9 +3200,6 @@ packages: '@standard-schema/spec@1.1.0': resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==} - '@standard-schema/utils@0.3.0': - resolution: {integrity: sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==} - '@storybook/addon-docs@9.1.13': resolution: {integrity: sha512-V1nCo7bfC3kQ5VNVq0VDcHsIhQf507m+BxMA5SIYiwdJHljH2BXpW2fL3FFn9gv9Wp57AEEzhm+wh4zANaJgkg==} peerDependencies: @@ -7436,12 +7422,6 @@ packages: react-fast-compare@3.2.2: resolution: {integrity: sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ==} - react-hook-form@7.68.0: - resolution: {integrity: sha512-oNN3fjrZ/Xo40SWlHf1yCjlMK417JxoSJVUXQjGdvdRCU07NTFei1i1f8ApUAts+IVh14e4EdakeLEA+BEAs/Q==} - engines: {node: '>=18.0.0'} - peerDependencies: - react: ^16.8.0 || ^17 || ^18 || ^19 - react-hotkeys-hook@4.6.2: resolution: {integrity: sha512-FmP+ZriY3EG59Ug/lxNfrObCnW9xQShgk7Nb83+CkpfkcCpfS95ydv+E9JuXA5cp8KtskU7LGlIARpkc92X22Q==} peerDependencies: @@ -10516,11 +10496,6 @@ snapshots: dependencies: react: 19.2.3 - '@hookform/resolvers@5.2.2(react-hook-form@7.68.0(react@19.2.3))': - dependencies: - '@standard-schema/utils': 0.3.0 - react-hook-form: 7.68.0(react@19.2.3) - '@humanfs/core@0.19.1': {} '@humanfs/node@0.16.7': @@ -11782,8 +11757,6 @@ snapshots: '@standard-schema/spec@1.1.0': {} - '@standard-schema/utils@0.3.0': {} - '@storybook/addon-docs@9.1.13(@types/react@19.2.7)(storybook@9.1.17(@testing-library/dom@10.4.1)(vite@7.3.0(@types/node@18.15.0)(jiti@1.21.7)(sass@1.95.0)(terser@5.44.1)(tsx@4.21.0)(yaml@2.8.2)))': dependencies: '@mdx-js/react': 3.1.1(@types/react@19.2.7)(react@19.2.3) @@ -16931,10 +16904,6 @@ snapshots: react-fast-compare@3.2.2: {} - react-hook-form@7.68.0(react@19.2.3): - dependencies: - react: 19.2.3 - react-hotkeys-hook@4.6.2(react-dom@19.2.3(react@19.2.3))(react@19.2.3): dependencies: react: 19.2.3 From 44d7aaaf337424d8dc16a21fa3aaf34d8a9674a5 Mon Sep 17 00:00:00 2001 From: yyh <92089059+lyzno1@users.noreply.github.com> Date: Tue, 6 Jan 2026 20:19:22 +0800 Subject: [PATCH 02/15] fix: prevent empty state flash and add skeleton loading for app list (#30616) Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- web/app/components/apps/app-card-skeleton.tsx | 41 +++++++++++++++ web/app/components/apps/list.tsx | 52 ++++++++++++------- web/app/components/apps/new-app-card.tsx | 8 ++- web/service/use-apps.ts | 2 + 4 files changed, 84 insertions(+), 19 deletions(-) create mode 100644 web/app/components/apps/app-card-skeleton.tsx diff --git a/web/app/components/apps/app-card-skeleton.tsx b/web/app/components/apps/app-card-skeleton.tsx new file mode 100644 index 0000000000..806f19973a --- /dev/null +++ b/web/app/components/apps/app-card-skeleton.tsx @@ -0,0 +1,41 @@ +'use client' + +import * as React from 'react' +import { SkeletonContainer, SkeletonRectangle, SkeletonRow } from '@/app/components/base/skeleton' + +type AppCardSkeletonProps = { + count?: number +} + +/** + * Skeleton placeholder for App cards during loading states. + * Matches the visual layout of AppCard component. + */ +export const AppCardSkeleton = React.memo(({ count = 6 }: AppCardSkeletonProps) => { + return ( + <> + {Array.from({ length: count }).map((_, index) => ( +
+ + + +
+ + +
+
+
+ + +
+
+
+ ))} + + ) +}) + +AppCardSkeleton.displayName = 'AppCardSkeleton' diff --git a/web/app/components/apps/list.tsx b/web/app/components/apps/list.tsx index 003b463595..290a73fc7c 100644 --- a/web/app/components/apps/list.tsx +++ b/web/app/components/apps/list.tsx @@ -27,7 +27,9 @@ import { useGlobalPublicStore } from '@/context/global-public-context' import { CheckModal } from '@/hooks/use-pay' import { useInfiniteAppList } from '@/service/use-apps' import { AppModeEnum } from '@/types/app' +import { cn } from '@/utils/classnames' import AppCard from './app-card' +import { AppCardSkeleton } from './app-card-skeleton' import Empty from './empty' import Footer from './footer' import useAppsQueryState from './hooks/use-apps-query-state' @@ -45,7 +47,7 @@ const List = () => { const { t } = useTranslation() const { systemFeatures } = useGlobalPublicStore() const router = useRouter() - const { isCurrentWorkspaceEditor, isCurrentWorkspaceDatasetOperator } = useAppContext() + const { isCurrentWorkspaceEditor, isCurrentWorkspaceDatasetOperator, isLoadingCurrentWorkspace } = useAppContext() const showTagManagementModal = useTagStore(s => s.showTagManagementModal) const [activeTab, setActiveTab] = useQueryState( 'category', @@ -89,6 +91,7 @@ const List = () => { const { data, isLoading, + isFetching, isFetchingNextPage, fetchNextPage, hasNextPage, @@ -172,6 +175,8 @@ const List = () => { const pages = data?.pages ?? [] const hasAnyApp = (pages[0]?.total ?? 0) > 0 + // Show skeleton during initial load or when refetching with no previous data + const showSkeleton = isLoading || (isFetching && pages.length === 0) return ( <> @@ -205,23 +210,34 @@ const List = () => { />
- {hasAnyApp - ? ( -
- {isCurrentWorkspaceEditor - && } - {pages.map(({ data: apps }) => apps.map(app => ( - - )))} -
- ) - : ( -
- {isCurrentWorkspaceEditor - && } - -
- )} +
+ {(isCurrentWorkspaceEditor || isLoadingCurrentWorkspace) && ( + + )} + {(() => { + if (showSkeleton) + return + + if (hasAnyApp) { + return pages.flatMap(({ data: apps }) => apps).map(app => ( + + )) + } + + // No apps - show empty state + return + })()} +
{isCurrentWorkspaceEditor && (
import('@/app/components/app/create-fro export type CreateAppCardProps = { className?: string + isLoading?: boolean onSuccess?: () => void ref: React.RefObject selectedAppType?: string @@ -33,6 +34,7 @@ export type CreateAppCardProps = { const CreateAppCard = ({ ref, className, + isLoading = false, onSuccess, selectedAppType, }: CreateAppCardProps) => { @@ -56,7 +58,11 @@ const CreateAppCard = ({ return (
{t('createApp', { ns: 'app' })}
diff --git a/web/service/use-apps.ts b/web/service/use-apps.ts index 0f6c4a64ac..d16d44af20 100644 --- a/web/service/use-apps.ts +++ b/web/service/use-apps.ts @@ -12,6 +12,7 @@ import type { } from '@/models/app' import type { App, AppModeEnum } from '@/types/app' import { + keepPreviousData, useInfiniteQuery, useQuery, useQueryClient, @@ -107,6 +108,7 @@ export const useInfiniteAppList = (params: AppListParams, options?: { enabled?: queryFn: ({ pageParam = normalizedParams.page }) => get('/apps', { params: { ...normalizedParams, page: pageParam } }), getNextPageParam: lastPage => lastPage.has_more ? lastPage.page + 1 : undefined, initialPageParam: normalizedParams.page, + placeholderData: keepPreviousData, ...options, }) } From f57aa08a3fff64019a41ddd5af923f25d1e06f57 Mon Sep 17 00:00:00 2001 From: Toshiki Sugimizu Date: Tue, 6 Jan 2026 21:23:59 +0900 Subject: [PATCH 03/15] fix: flask db check fails due to nullable mismatch between migrations and models (#30474) Co-authored-by: Claude Opus 4.5 Co-authored-by: Maries --- api/models/model.py | 2 +- api/models/provider.py | 2 +- api/models/trigger.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/api/models/model.py b/api/models/model.py index b6f2751a72..52e409d85a 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -1534,7 +1534,7 @@ class OperationLog(TypeBase): tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) account_id: Mapped[str] = mapped_column(StringUUID, nullable=False) action: Mapped[str] = mapped_column(String(255), nullable=False) - content: Mapped[Any] = mapped_column(sa.JSON) + content: Mapped[Any | None] = mapped_column(sa.JSON, nullable=True) created_at: Mapped[datetime] = mapped_column( sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False ) diff --git a/api/models/provider.py b/api/models/provider.py index 2afd8c5329..d68d56d32a 100644 --- a/api/models/provider.py +++ b/api/models/provider.py @@ -76,7 +76,7 @@ class Provider(TypeBase): quota_type: Mapped[str | None] = mapped_column(String(40), nullable=True, server_default=text("''"), default="") quota_limit: Mapped[int | None] = mapped_column(sa.BigInteger, nullable=True, default=None) - quota_used: Mapped[int] = mapped_column(sa.BigInteger, nullable=False, default=0) + quota_used: Mapped[int | None] = mapped_column(sa.BigInteger, nullable=True, default=0) created_at: Mapped[datetime] = mapped_column( DateTime, nullable=False, server_default=func.current_timestamp(), init=False diff --git a/api/models/trigger.py b/api/models/trigger.py index 87e2a5ccfc..209345eb84 100644 --- a/api/models/trigger.py +++ b/api/models/trigger.py @@ -415,7 +415,7 @@ class AppTrigger(TypeBase): node_id: Mapped[str | None] = mapped_column(String(64), nullable=False) trigger_type: Mapped[str] = mapped_column(EnumText(AppTriggerType, length=50), nullable=False) title: Mapped[str] = mapped_column(String(255), nullable=False) - provider_name: Mapped[str] = mapped_column(String(255), server_default="", default="") # why it is nullable? + provider_name: Mapped[str | None] = mapped_column(String(255), nullable=True, server_default="", default="") status: Mapped[str] = mapped_column( EnumText(AppTriggerStatus, length=50), nullable=False, default=AppTriggerStatus.ENABLED ) From 9b128048c4659ab8ac0027654b4baeed9df92a7c Mon Sep 17 00:00:00 2001 From: Coding On Star <447357187@qq.com> Date: Tue, 6 Jan 2026 21:57:21 +0800 Subject: [PATCH 04/15] refactor: restructure DatasetCard component for improved readability and maintainability (#30617) Co-authored-by: CodingOnStar --- .../dataset-card/components/corner-labels.tsx | 36 ++ .../components/dataset-card-footer.tsx | 62 +++ .../components/dataset-card-header.tsx | 148 +++++++ .../components/dataset-card-modals.tsx | 55 +++ .../dataset-card/components/description.tsx | 18 + .../components/operations-popover.tsx | 52 +++ .../list/dataset-card/components/tag-area.tsx | 55 +++ .../hooks/use-dataset-card-state.ts | 138 +++++++ .../datasets/list/dataset-card/index.tsx | 369 +++--------------- web/service/use-dataset-card.ts | 18 + 10 files changed, 641 insertions(+), 310 deletions(-) create mode 100644 web/app/components/datasets/list/dataset-card/components/corner-labels.tsx create mode 100644 web/app/components/datasets/list/dataset-card/components/dataset-card-footer.tsx create mode 100644 web/app/components/datasets/list/dataset-card/components/dataset-card-header.tsx create mode 100644 web/app/components/datasets/list/dataset-card/components/dataset-card-modals.tsx create mode 100644 web/app/components/datasets/list/dataset-card/components/description.tsx create mode 100644 web/app/components/datasets/list/dataset-card/components/operations-popover.tsx create mode 100644 web/app/components/datasets/list/dataset-card/components/tag-area.tsx create mode 100644 web/app/components/datasets/list/dataset-card/hooks/use-dataset-card-state.ts create mode 100644 web/service/use-dataset-card.ts diff --git a/web/app/components/datasets/list/dataset-card/components/corner-labels.tsx b/web/app/components/datasets/list/dataset-card/components/corner-labels.tsx new file mode 100644 index 0000000000..03ca543ee7 --- /dev/null +++ b/web/app/components/datasets/list/dataset-card/components/corner-labels.tsx @@ -0,0 +1,36 @@ +import type { DataSet } from '@/models/datasets' +import * as React from 'react' +import { useTranslation } from 'react-i18next' +import CornerLabel from '@/app/components/base/corner-label' + +type CornerLabelsProps = { + dataset: DataSet +} + +const CornerLabels = ({ dataset }: CornerLabelsProps) => { + const { t } = useTranslation() + + if (!dataset.embedding_available) { + return ( + + ) + } + + if (dataset.runtime_mode === 'rag_pipeline') { + return ( + + ) + } + + return null +} + +export default React.memo(CornerLabels) diff --git a/web/app/components/datasets/list/dataset-card/components/dataset-card-footer.tsx b/web/app/components/datasets/list/dataset-card/components/dataset-card-footer.tsx new file mode 100644 index 0000000000..854f34f49c --- /dev/null +++ b/web/app/components/datasets/list/dataset-card/components/dataset-card-footer.tsx @@ -0,0 +1,62 @@ +import type { DataSet } from '@/models/datasets' +import { RiFileTextFill, RiRobot2Fill } from '@remixicon/react' +import * as React from 'react' +import { useMemo } from 'react' +import { useTranslation } from 'react-i18next' +import Tooltip from '@/app/components/base/tooltip' +import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' +import { cn } from '@/utils/classnames' + +const EXTERNAL_PROVIDER = 'external' + +type DatasetCardFooterProps = { + dataset: DataSet +} + +const DatasetCardFooter = ({ dataset }: DatasetCardFooterProps) => { + const { t } = useTranslation() + const { formatTimeFromNow } = useFormatTimeFromNow() + const isExternalProvider = dataset.provider === EXTERNAL_PROVIDER + + const documentCount = useMemo(() => { + const availableDocCount = dataset.total_available_documents ?? 0 + if (availableDocCount < dataset.document_count) + return `${availableDocCount} / ${dataset.document_count}` + return `${dataset.document_count}` + }, [dataset.document_count, dataset.total_available_documents]) + + const documentCountTooltip = useMemo(() => { + const availableDocCount = dataset.total_available_documents ?? 0 + if (availableDocCount < dataset.document_count) + return t('partialEnabled', { ns: 'dataset', count: dataset.document_count, num: availableDocCount }) + return t('docAllEnabled', { ns: 'dataset', count: availableDocCount }) + }, [t, dataset.document_count, dataset.total_available_documents]) + + return ( +
+ +
+ + {documentCount} +
+
+ {!isExternalProvider && ( + +
+ + {dataset.app_count} +
+
+ )} + / + {`${t('updated', { ns: 'dataset' })} ${formatTimeFromNow(dataset.updated_at * 1000)}`} +
+ ) +} + +export default React.memo(DatasetCardFooter) diff --git a/web/app/components/datasets/list/dataset-card/components/dataset-card-header.tsx b/web/app/components/datasets/list/dataset-card/components/dataset-card-header.tsx new file mode 100644 index 0000000000..abe7595e14 --- /dev/null +++ b/web/app/components/datasets/list/dataset-card/components/dataset-card-header.tsx @@ -0,0 +1,148 @@ +import type { DataSet } from '@/models/datasets' +import * as React from 'react' +import { useMemo } from 'react' +import { useTranslation } from 'react-i18next' +import AppIcon from '@/app/components/base/app-icon' +import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' +import { useKnowledge } from '@/hooks/use-knowledge' +import { DOC_FORM_ICON_WITH_BG, DOC_FORM_TEXT } from '@/models/datasets' +import { cn } from '@/utils/classnames' + +const EXTERNAL_PROVIDER = 'external' + +type DatasetCardHeaderProps = { + dataset: DataSet +} + +// DocModeInfo component - placed before usage +type DocModeInfoProps = { + dataset: DataSet + isExternalProvider: boolean + isShowDocModeInfo: boolean +} + +const DocModeInfo = ({ + dataset, + isExternalProvider, + isShowDocModeInfo, +}: DocModeInfoProps) => { + const { t } = useTranslation() + const { formatIndexingTechniqueAndMethod } = useKnowledge() + + if (isExternalProvider) { + return ( +
+ {t('externalKnowledgeBase', { ns: 'dataset' })} +
+ ) + } + + if (!isShowDocModeInfo) + return null + + const indexingText = dataset.indexing_technique + ? formatIndexingTechniqueAndMethod( + dataset.indexing_technique as 'economy' | 'high_quality', + dataset.retrieval_model_dict?.search_method as Parameters[1], + ) + : '' + + return ( +
+ {dataset.doc_form && ( + + {t(`chunkingMode.${DOC_FORM_TEXT[dataset.doc_form]}`, { ns: 'dataset' })} + + )} + {dataset.indexing_technique && indexingText && ( + + {indexingText} + + )} + {dataset.is_multimodal && ( + + {t('multimodal', { ns: 'dataset' })} + + )} +
+ ) +} + +// Main DatasetCardHeader component +const DatasetCardHeader = ({ dataset }: DatasetCardHeaderProps) => { + const { t } = useTranslation() + const { formatTimeFromNow } = useFormatTimeFromNow() + + const isExternalProvider = dataset.provider === EXTERNAL_PROVIDER + + const isShowChunkingModeIcon = dataset.doc_form && (dataset.runtime_mode !== 'rag_pipeline' || dataset.is_published) + const isShowDocModeInfo = Boolean( + dataset.doc_form + && dataset.indexing_technique + && dataset.retrieval_model_dict?.search_method + && (dataset.runtime_mode !== 'rag_pipeline' || dataset.is_published), + ) + + const chunkingModeIcon = dataset.doc_form ? DOC_FORM_ICON_WITH_BG[dataset.doc_form] : React.Fragment + const Icon = isExternalProvider ? DOC_FORM_ICON_WITH_BG.external : chunkingModeIcon + + const iconInfo = useMemo(() => dataset.icon_info || { + icon: '๐Ÿ“™', + icon_type: 'emoji' as const, + icon_background: '#FFF4ED', + icon_url: '', + }, [dataset.icon_info]) + + const editTimeText = useMemo( + () => `${t('segment.editedAt', { ns: 'datasetDocuments' })} ${formatTimeFromNow(dataset.updated_at * 1000)}`, + [t, dataset.updated_at, formatTimeFromNow], + ) + + return ( +
+
+ + {(isShowChunkingModeIcon || isExternalProvider) && ( +
+ +
+ )} +
+
+
+ {dataset.name} +
+
+
{dataset.author_name}
+
ยท
+
{editTimeText}
+
+ +
+
+ ) +} + +export default React.memo(DatasetCardHeader) diff --git a/web/app/components/datasets/list/dataset-card/components/dataset-card-modals.tsx b/web/app/components/datasets/list/dataset-card/components/dataset-card-modals.tsx new file mode 100644 index 0000000000..8162bc94c4 --- /dev/null +++ b/web/app/components/datasets/list/dataset-card/components/dataset-card-modals.tsx @@ -0,0 +1,55 @@ +import type { DataSet } from '@/models/datasets' +import * as React from 'react' +import { useTranslation } from 'react-i18next' +import Confirm from '@/app/components/base/confirm' +import RenameDatasetModal from '../../../rename-modal' + +type ModalState = { + showRenameModal: boolean + showConfirmDelete: boolean + confirmMessage: string +} + +type DatasetCardModalsProps = { + dataset: DataSet + modalState: ModalState + onCloseRename: () => void + onCloseConfirm: () => void + onConfirmDelete: () => void + onSuccess?: () => void +} + +const DatasetCardModals = ({ + dataset, + modalState, + onCloseRename, + onCloseConfirm, + onConfirmDelete, + onSuccess, +}: DatasetCardModalsProps) => { + const { t } = useTranslation() + + return ( + <> + {modalState.showRenameModal && ( + + )} + {modalState.showConfirmDelete && ( + + )} + + ) +} + +export default React.memo(DatasetCardModals) diff --git a/web/app/components/datasets/list/dataset-card/components/description.tsx b/web/app/components/datasets/list/dataset-card/components/description.tsx new file mode 100644 index 0000000000..79604e92ab --- /dev/null +++ b/web/app/components/datasets/list/dataset-card/components/description.tsx @@ -0,0 +1,18 @@ +import type { DataSet } from '@/models/datasets' +import * as React from 'react' +import { cn } from '@/utils/classnames' + +type DescriptionProps = { + dataset: DataSet +} + +const Description = ({ dataset }: DescriptionProps) => ( +
+ {dataset.description} +
+) + +export default React.memo(Description) diff --git a/web/app/components/datasets/list/dataset-card/components/operations-popover.tsx b/web/app/components/datasets/list/dataset-card/components/operations-popover.tsx new file mode 100644 index 0000000000..80ae2fb7a1 --- /dev/null +++ b/web/app/components/datasets/list/dataset-card/components/operations-popover.tsx @@ -0,0 +1,52 @@ +import type { DataSet } from '@/models/datasets' +import { RiMoreFill } from '@remixicon/react' +import * as React from 'react' +import CustomPopover from '@/app/components/base/popover' +import { cn } from '@/utils/classnames' +import Operations from '../operations' + +type OperationsPopoverProps = { + dataset: DataSet + isCurrentWorkspaceDatasetOperator: boolean + openRenameModal: () => void + handleExportPipeline: (include?: boolean) => void + detectIsUsedByApp: () => void +} + +const OperationsPopover = ({ + dataset, + isCurrentWorkspaceDatasetOperator, + openRenameModal, + handleExportPipeline, + detectIsUsedByApp, +}: OperationsPopoverProps) => ( +
+ + )} + className="z-20 min-w-[186px]" + popupClassName="rounded-xl bg-none shadow-none ring-0 min-w-[186px]" + position="br" + trigger="click" + btnElement={( +
+ +
+ )} + btnClassName={open => + cn( + 'size-9 cursor-pointer justify-center rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0 shadow-lg shadow-shadow-shadow-5 ring-[2px] ring-inset ring-components-actionbar-bg hover:border-components-actionbar-border', + open ? 'border-components-actionbar-border bg-state-base-hover' : '', + )} + /> +
+) + +export default React.memo(OperationsPopover) diff --git a/web/app/components/datasets/list/dataset-card/components/tag-area.tsx b/web/app/components/datasets/list/dataset-card/components/tag-area.tsx new file mode 100644 index 0000000000..f55a064387 --- /dev/null +++ b/web/app/components/datasets/list/dataset-card/components/tag-area.tsx @@ -0,0 +1,55 @@ +import type { Tag } from '@/app/components/base/tag-management/constant' +import type { DataSet } from '@/models/datasets' +import * as React from 'react' +import TagSelector from '@/app/components/base/tag-management/selector' +import { cn } from '@/utils/classnames' + +type TagAreaProps = { + dataset: DataSet + tags: Tag[] + setTags: (tags: Tag[]) => void + onSuccess?: () => void + isHoveringTagSelector: boolean + onClick: (e: React.MouseEvent) => void +} + +const TagArea = React.forwardRef(({ + dataset, + tags, + setTags, + onSuccess, + isHoveringTagSelector, + onClick, +}, ref) => ( +
+
0 && 'visible', + )} + > + tag.id)} + selectedTags={tags} + onCacheUpdate={setTags} + onChange={onSuccess} + /> +
+
+
+)) +TagArea.displayName = 'TagArea' + +export default TagArea diff --git a/web/app/components/datasets/list/dataset-card/hooks/use-dataset-card-state.ts b/web/app/components/datasets/list/dataset-card/hooks/use-dataset-card-state.ts new file mode 100644 index 0000000000..ad68a1df1c --- /dev/null +++ b/web/app/components/datasets/list/dataset-card/hooks/use-dataset-card-state.ts @@ -0,0 +1,138 @@ +import type { Tag } from '@/app/components/base/tag-management/constant' +import type { DataSet } from '@/models/datasets' +import { useCallback, useEffect, useState } from 'react' +import { useTranslation } from 'react-i18next' +import Toast from '@/app/components/base/toast' +import { useCheckDatasetUsage, useDeleteDataset } from '@/service/use-dataset-card' +import { useExportPipelineDSL } from '@/service/use-pipeline' + +type ModalState = { + showRenameModal: boolean + showConfirmDelete: boolean + confirmMessage: string +} + +type UseDatasetCardStateOptions = { + dataset: DataSet + onSuccess?: () => void +} + +export const useDatasetCardState = ({ dataset, onSuccess }: UseDatasetCardStateOptions) => { + const { t } = useTranslation() + const [tags, setTags] = useState(dataset.tags) + + useEffect(() => { + setTags(dataset.tags) + }, [dataset.tags]) + + // Modal state + const [modalState, setModalState] = useState({ + showRenameModal: false, + showConfirmDelete: false, + confirmMessage: '', + }) + + // Export state + const [exporting, setExporting] = useState(false) + + // Modal handlers + const openRenameModal = useCallback(() => { + setModalState(prev => ({ ...prev, showRenameModal: true })) + }, []) + + const closeRenameModal = useCallback(() => { + setModalState(prev => ({ ...prev, showRenameModal: false })) + }, []) + + const closeConfirmDelete = useCallback(() => { + setModalState(prev => ({ ...prev, showConfirmDelete: false })) + }, []) + + // API mutations + const { mutateAsync: checkUsage } = useCheckDatasetUsage() + const { mutateAsync: deleteDatasetMutation } = useDeleteDataset() + const { mutateAsync: exportPipelineConfig } = useExportPipelineDSL() + + // Export pipeline handler + const handleExportPipeline = useCallback(async (include: boolean = false) => { + const { pipeline_id, name } = dataset + if (!pipeline_id || exporting) + return + + try { + setExporting(true) + const { data } = await exportPipelineConfig({ + pipelineId: pipeline_id, + include, + }) + const a = document.createElement('a') + const file = new Blob([data], { type: 'application/yaml' }) + const url = URL.createObjectURL(file) + a.href = url + a.download = `${name}.pipeline` + a.click() + URL.revokeObjectURL(url) + } + catch { + Toast.notify({ type: 'error', message: t('exportFailed', { ns: 'app' }) }) + } + finally { + setExporting(false) + } + }, [dataset, exportPipelineConfig, exporting, t]) + + // Delete flow handlers + const detectIsUsedByApp = useCallback(async () => { + try { + const { is_using: isUsedByApp } = await checkUsage(dataset.id) + const message = isUsedByApp + ? t('datasetUsedByApp', { ns: 'dataset' })! + : t('deleteDatasetConfirmContent', { ns: 'dataset' })! + setModalState(prev => ({ + ...prev, + confirmMessage: message, + showConfirmDelete: true, + })) + } + catch (e: unknown) { + if (e instanceof Response) { + const res = await e.json() + Toast.notify({ type: 'error', message: res?.message || 'Unknown error' }) + } + else { + Toast.notify({ type: 'error', message: (e as Error)?.message || 'Unknown error' }) + } + } + }, [dataset.id, checkUsage, t]) + + const onConfirmDelete = useCallback(async () => { + try { + await deleteDatasetMutation(dataset.id) + Toast.notify({ type: 'success', message: t('datasetDeleted', { ns: 'dataset' }) }) + onSuccess?.() + } + finally { + closeConfirmDelete() + } + }, [dataset.id, deleteDatasetMutation, onSuccess, t, closeConfirmDelete]) + + return { + // Tag state + tags, + setTags, + + // Modal state + modalState, + openRenameModal, + closeRenameModal, + closeConfirmDelete, + + // Export state + exporting, + + // Handlers + handleExportPipeline, + detectIsUsedByApp, + onConfirmDelete, + } +} diff --git a/web/app/components/datasets/list/dataset-card/index.tsx b/web/app/components/datasets/list/dataset-card/index.tsx index 99404b0454..85dba7e8ff 100644 --- a/web/app/components/datasets/list/dataset-card/index.tsx +++ b/web/app/components/datasets/list/dataset-card/index.tsx @@ -1,28 +1,17 @@ 'use client' -import type { Tag } from '@/app/components/base/tag-management/constant' import type { DataSet } from '@/models/datasets' -import { RiFileTextFill, RiMoreFill, RiRobot2Fill } from '@remixicon/react' import { useHover } from 'ahooks' import { useRouter } from 'next/navigation' -import * as React from 'react' -import { useCallback, useEffect, useMemo, useRef, useState } from 'react' -import { useTranslation } from 'react-i18next' -import AppIcon from '@/app/components/base/app-icon' -import Confirm from '@/app/components/base/confirm' -import CornerLabel from '@/app/components/base/corner-label' -import CustomPopover from '@/app/components/base/popover' -import TagSelector from '@/app/components/base/tag-management/selector' -import Toast from '@/app/components/base/toast' -import Tooltip from '@/app/components/base/tooltip' +import { useMemo, useRef } from 'react' import { useSelector as useAppContextWithSelector } from '@/context/app-context' -import { useFormatTimeFromNow } from '@/hooks/use-format-time-from-now' -import { useKnowledge } from '@/hooks/use-knowledge' -import { DOC_FORM_ICON_WITH_BG, DOC_FORM_TEXT } from '@/models/datasets' -import { checkIsUsedInApp, deleteDataset } from '@/service/datasets' -import { useExportPipelineDSL } from '@/service/use-pipeline' -import { cn } from '@/utils/classnames' -import RenameDatasetModal from '../../rename-modal' -import Operations from './operations' +import CornerLabels from './components/corner-labels' +import DatasetCardFooter from './components/dataset-card-footer' +import DatasetCardHeader from './components/dataset-card-header' +import DatasetCardModals from './components/dataset-card-modals' +import Description from './components/description' +import OperationsPopover from './components/operations-popover' +import TagArea from './components/tag-area' +import { useDatasetCardState } from './hooks/use-dataset-card-state' const EXTERNAL_PROVIDER = 'external' @@ -35,320 +24,80 @@ const DatasetCard = ({ dataset, onSuccess, }: DatasetCardProps) => { - const { t } = useTranslation() const { push } = useRouter() const isCurrentWorkspaceDatasetOperator = useAppContextWithSelector(state => state.isCurrentWorkspaceDatasetOperator) - const [tags, setTags] = useState(dataset.tags) const tagSelectorRef = useRef(null) const isHoveringTagSelector = useHover(tagSelectorRef) - const [showRenameModal, setShowRenameModal] = useState(false) - const [showConfirmDelete, setShowConfirmDelete] = useState(false) - const [confirmMessage, setConfirmMessage] = useState('') - const [exporting, setExporting] = useState(false) + const { + tags, + setTags, + modalState, + openRenameModal, + closeRenameModal, + closeConfirmDelete, + handleExportPipeline, + detectIsUsedByApp, + onConfirmDelete, + } = useDatasetCardState({ dataset, onSuccess }) - const isExternalProvider = useMemo(() => { - return dataset.provider === EXTERNAL_PROVIDER - }, [dataset.provider]) + const isExternalProvider = dataset.provider === EXTERNAL_PROVIDER const isPipelineUnpublished = useMemo(() => { return dataset.runtime_mode === 'rag_pipeline' && !dataset.is_published }, [dataset.runtime_mode, dataset.is_published]) - const isShowChunkingModeIcon = useMemo(() => { - return dataset.doc_form && (dataset.runtime_mode !== 'rag_pipeline' || dataset.is_published) - }, [dataset.doc_form, dataset.runtime_mode, dataset.is_published]) - const isShowDocModeInfo = useMemo(() => { - return dataset.doc_form && dataset.indexing_technique && dataset.retrieval_model_dict?.search_method && (dataset.runtime_mode !== 'rag_pipeline' || dataset.is_published) - }, [dataset.doc_form, dataset.indexing_technique, dataset.retrieval_model_dict?.search_method, dataset.runtime_mode, dataset.is_published]) - const chunkingModeIcon = dataset.doc_form ? DOC_FORM_ICON_WITH_BG[dataset.doc_form] : React.Fragment - const Icon = isExternalProvider ? DOC_FORM_ICON_WITH_BG.external : chunkingModeIcon - const iconInfo = dataset.icon_info || { - icon: '๐Ÿ“™', - icon_type: 'emoji', - icon_background: '#FFF4ED', - icon_url: '', + const handleCardClick = (e: React.MouseEvent) => { + e.preventDefault() + if (isExternalProvider) + push(`/datasets/${dataset.id}/hitTesting`) + else if (isPipelineUnpublished) + push(`/datasets/${dataset.id}/pipeline`) + else + push(`/datasets/${dataset.id}/documents`) } - const { formatIndexingTechniqueAndMethod } = useKnowledge() - const documentCount = useMemo(() => { - const availableDocCount = dataset.total_available_documents ?? 0 - if (availableDocCount === dataset.document_count) - return `${dataset.document_count}` - if (availableDocCount < dataset.document_count) - return `${availableDocCount} / ${dataset.document_count}` - }, [dataset.document_count, dataset.total_available_documents]) - const documentCountTooltip = useMemo(() => { - const availableDocCount = dataset.total_available_documents ?? 0 - if (availableDocCount === dataset.document_count) - return t('docAllEnabled', { ns: 'dataset', count: availableDocCount }) - if (availableDocCount < dataset.document_count) - return t('partialEnabled', { ns: 'dataset', count: dataset.document_count, num: availableDocCount }) - }, [t, dataset.document_count, dataset.total_available_documents]) - const { formatTimeFromNow } = useFormatTimeFromNow() - const editTimeText = useMemo(() => { - return `${t('segment.editedAt', { ns: 'datasetDocuments' })} ${formatTimeFromNow(dataset.updated_at * 1000)}` - }, [t, dataset.updated_at, formatTimeFromNow]) - - const openRenameModal = useCallback(() => { - setShowRenameModal(true) - }, []) - - const { mutateAsync: exportPipelineConfig } = useExportPipelineDSL() - - const handleExportPipeline = useCallback(async (include = false) => { - const { pipeline_id, name } = dataset - if (!pipeline_id) - return - - if (exporting) - return - - try { - setExporting(true) - const { data } = await exportPipelineConfig({ - pipelineId: pipeline_id, - include, - }) - const a = document.createElement('a') - const file = new Blob([data], { type: 'application/yaml' }) - const url = URL.createObjectURL(file) - a.href = url - a.download = `${name}.pipeline` - a.click() - URL.revokeObjectURL(url) - } - catch { - Toast.notify({ type: 'error', message: t('exportFailed', { ns: 'app' }) }) - } - finally { - setExporting(false) - } - }, [dataset, exportPipelineConfig, exporting, t]) - - const detectIsUsedByApp = useCallback(async () => { - try { - const { is_using: isUsedByApp } = await checkIsUsedInApp(dataset.id) - setConfirmMessage(isUsedByApp ? t('datasetUsedByApp', { ns: 'dataset' })! : t('deleteDatasetConfirmContent', { ns: 'dataset' })!) - setShowConfirmDelete(true) - } - catch (e: any) { - const res = await e.json() - Toast.notify({ type: 'error', message: res?.message || 'Unknown error' }) - } - }, [dataset.id, t]) - - const onConfirmDelete = useCallback(async () => { - try { - await deleteDataset(dataset.id) - Toast.notify({ type: 'success', message: t('datasetDeleted', { ns: 'dataset' }) }) - if (onSuccess) - onSuccess() - } - finally { - setShowConfirmDelete(false) - } - }, [dataset.id, onSuccess, t]) - - useEffect(() => { - setTags(dataset.tags) - }, [dataset]) + const handleTagAreaClick = (e: React.MouseEvent) => { + e.stopPropagation() + e.preventDefault() + } return ( <>
{ - e.preventDefault() - if (isExternalProvider) - push(`/datasets/${dataset.id}/hitTesting`) - else if (isPipelineUnpublished) - push(`/datasets/${dataset.id}/pipeline`) - else - push(`/datasets/${dataset.id}/documents`) - }} + onClick={handleCardClick} > - {!dataset.embedding_available && ( - - )} - {dataset.embedding_available && dataset.runtime_mode === 'rag_pipeline' && ( - - )} -
-
- - {(isShowChunkingModeIcon || isExternalProvider) && ( -
- -
- )} -
-
-
- {dataset.name} -
-
-
{dataset.author_name}
-
ยท
-
{editTimeText}
-
-
- {isExternalProvider && {t('externalKnowledgeBase', { ns: 'dataset' })}} - {!isExternalProvider && isShowDocModeInfo && ( - <> - {dataset.doc_form && ( - - {t(`chunkingMode.${DOC_FORM_TEXT[dataset.doc_form]}`, { ns: 'dataset' })} - - )} - {dataset.indexing_technique && ( - - {formatIndexingTechniqueAndMethod(dataset.indexing_technique, dataset.retrieval_model_dict?.search_method) as any} - - )} - {dataset.is_multimodal && ( - - {t('multimodal', { ns: 'dataset' })} - - )} - - )} -
-
-
-
- {dataset.description} -
-
{ - e.stopPropagation() - e.preventDefault() - }} - > -
0 && 'visible', - )} - > - tag.id)} - selectedTags={tags} - onCacheUpdate={setTags} - onChange={onSuccess} - /> -
- {/* Tag Mask */} -
-
-
- -
- - {documentCount} -
-
- {!isExternalProvider && ( - -
- - {dataset.app_count} -
-
- )} - / - {`${t('updated', { ns: 'dataset' })} ${formatTimeFromNow(dataset.updated_at * 1000)}`} -
-
- - )} - className="z-20 min-w-[186px]" - popupClassName="rounded-xl bg-none shadow-none ring-0 min-w-[186px]" - position="br" - trigger="click" - btnElement={( -
- -
- )} - btnClassName={open => - cn( - 'size-9 cursor-pointer justify-center rounded-[10px] border-[0.5px] border-components-actionbar-border bg-components-actionbar-bg p-0 shadow-lg shadow-shadow-shadow-5 ring-[2px] ring-inset ring-components-actionbar-bg hover:border-components-actionbar-border', - open ? 'border-components-actionbar-border bg-state-base-hover' : '', - )} - /> -
-
- {showRenameModal && ( - + + + setShowRenameModal(false)} + tags={tags} + setTags={setTags} onSuccess={onSuccess} + isHoveringTagSelector={isHoveringTagSelector} + onClick={handleTagAreaClick} /> - )} - {showConfirmDelete && ( - setShowConfirmDelete(false)} + + - )} +
+ ) } diff --git a/web/service/use-dataset-card.ts b/web/service/use-dataset-card.ts new file mode 100644 index 0000000000..05365479dc --- /dev/null +++ b/web/service/use-dataset-card.ts @@ -0,0 +1,18 @@ +import { useMutation } from '@tanstack/react-query' +import { checkIsUsedInApp, deleteDataset } from './datasets' + +const NAME_SPACE = 'dataset-card' + +export const useCheckDatasetUsage = () => { + return useMutation({ + mutationKey: [NAME_SPACE, 'check-usage'], + mutationFn: (datasetId: string) => checkIsUsedInApp(datasetId), + }) +} + +export const useDeleteDataset = () => { + return useMutation({ + mutationKey: [NAME_SPACE, 'delete'], + mutationFn: (datasetId: string) => deleteDataset(datasetId), + }) +} From 55de731f9c702ea109cd0b119392021ce679e721 Mon Sep 17 00:00:00 2001 From: -LAN- Date: Tue, 6 Jan 2026 22:48:06 +0800 Subject: [PATCH 05/15] refactor(api): clarify published RAG pipeline invoke naming (#30644) --- .../datasets/rag_pipeline/rag_pipeline_workflow.py | 2 +- .../dataset/rag_pipeline/rag_pipeline_workflow.py | 2 +- api/core/app/apps/pipeline/pipeline_generator.py | 4 ++-- api/core/app/entities/app_invoke_entities.py | 3 ++- api/services/rag_pipeline/rag_pipeline.py | 4 ++-- api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py | 2 +- api/tasks/rag_pipeline/rag_pipeline_run_task.py | 2 +- .../tasks/test_rag_pipeline_run_tasks.py | 8 ++++---- .../common/test_workflow_response_converter_truncation.py | 6 +++--- 9 files changed, 17 insertions(+), 16 deletions(-) diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py index 46d67f0581..02efc54eea 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py @@ -355,7 +355,7 @@ class PublishedRagPipelineRunApi(Resource): pipeline=pipeline, user=current_user, args=args, - invoke_from=InvokeFrom.DEBUGGER if payload.is_preview else InvokeFrom.PUBLISHED, + invoke_from=InvokeFrom.DEBUGGER if payload.is_preview else InvokeFrom.PUBLISHED_PIPELINE, streaming=streaming, ) diff --git a/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py index 0a2017e2bd..70b5030237 100644 --- a/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/service_api/dataset/rag_pipeline/rag_pipeline_workflow.py @@ -174,7 +174,7 @@ class PipelineRunApi(DatasetApiResource): pipeline=pipeline, user=current_user, args=payload.model_dump(), - invoke_from=InvokeFrom.PUBLISHED if payload.is_published else InvokeFrom.DEBUGGER, + invoke_from=InvokeFrom.PUBLISHED_PIPELINE if payload.is_published else InvokeFrom.DEBUGGER, streaming=payload.response_mode == "streaming", ) diff --git a/api/core/app/apps/pipeline/pipeline_generator.py b/api/core/app/apps/pipeline/pipeline_generator.py index 13eb40fd60..ea4441b5d8 100644 --- a/api/core/app/apps/pipeline/pipeline_generator.py +++ b/api/core/app/apps/pipeline/pipeline_generator.py @@ -130,7 +130,7 @@ class PipelineGenerator(BaseAppGenerator): pipeline=pipeline, workflow=workflow, start_node_id=start_node_id ) documents: list[Document] = [] - if invoke_from == InvokeFrom.PUBLISHED and not is_retry and not args.get("original_document_id"): + if invoke_from == InvokeFrom.PUBLISHED_PIPELINE and not is_retry and not args.get("original_document_id"): from services.dataset_service import DocumentService for datasource_info in datasource_info_list: @@ -156,7 +156,7 @@ class PipelineGenerator(BaseAppGenerator): for i, datasource_info in enumerate(datasource_info_list): workflow_run_id = str(uuid.uuid4()) document_id = args.get("original_document_id") or None - if invoke_from == InvokeFrom.PUBLISHED and not is_retry: + if invoke_from == InvokeFrom.PUBLISHED_PIPELINE and not is_retry: document_id = document_id or documents[i].id document_pipeline_execution_log = DocumentPipelineExecutionLog( document_id=document_id, diff --git a/api/core/app/entities/app_invoke_entities.py b/api/core/app/entities/app_invoke_entities.py index 0cb573cb86..5bc453420d 100644 --- a/api/core/app/entities/app_invoke_entities.py +++ b/api/core/app/entities/app_invoke_entities.py @@ -42,7 +42,8 @@ class InvokeFrom(StrEnum): # DEBUGGER indicates that this invocation is from # the workflow (or chatflow) edit page. DEBUGGER = "debugger" - PUBLISHED = "published" + # PUBLISHED_PIPELINE indicates that this invocation runs a published RAG pipeline workflow. + PUBLISHED_PIPELINE = "published" # VALIDATION indicates that this invocation is from validation. VALIDATION = "validation" diff --git a/api/services/rag_pipeline/rag_pipeline.py b/api/services/rag_pipeline/rag_pipeline.py index f53448e7fe..1ba64813ba 100644 --- a/api/services/rag_pipeline/rag_pipeline.py +++ b/api/services/rag_pipeline/rag_pipeline.py @@ -874,7 +874,7 @@ class RagPipelineService: variable_pool = node_instance.graph_runtime_state.variable_pool invoke_from = variable_pool.get(["sys", SystemVariableKey.INVOKE_FROM]) if invoke_from: - if invoke_from.value == InvokeFrom.PUBLISHED: + if invoke_from.value == InvokeFrom.PUBLISHED_PIPELINE: document_id = variable_pool.get(["sys", SystemVariableKey.DOCUMENT_ID]) if document_id: document = db.session.query(Document).where(Document.id == document_id.value).first() @@ -1318,7 +1318,7 @@ class RagPipelineService: "datasource_info_list": [json.loads(document_pipeline_execution_log.datasource_info)], "original_document_id": document.id, }, - invoke_from=InvokeFrom.PUBLISHED, + invoke_from=InvokeFrom.PUBLISHED_PIPELINE, streaming=False, call_depth=0, workflow_thread_pool_id=None, diff --git a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py index 1eef361a92..3c5e152520 100644 --- a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py @@ -178,7 +178,7 @@ def run_single_rag_pipeline_task(rag_pipeline_invoke_entity: Mapping[str, Any], workflow_id=workflow_id, user=account, application_generate_entity=entity, - invoke_from=InvokeFrom.PUBLISHED, + invoke_from=InvokeFrom.PUBLISHED_PIPELINE, workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, streaming=streaming, diff --git a/api/tasks/rag_pipeline/rag_pipeline_run_task.py b/api/tasks/rag_pipeline/rag_pipeline_run_task.py index 275f5abe6e..093342d1a3 100644 --- a/api/tasks/rag_pipeline/rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/rag_pipeline_run_task.py @@ -178,7 +178,7 @@ def run_single_rag_pipeline_task(rag_pipeline_invoke_entity: Mapping[str, Any], workflow_id=workflow_id, user=account, application_generate_entity=entity, - invoke_from=InvokeFrom.PUBLISHED, + invoke_from=InvokeFrom.PUBLISHED_PIPELINE, workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, streaming=streaming, diff --git a/api/tests/test_containers_integration_tests/tasks/test_rag_pipeline_run_tasks.py b/api/tests/test_containers_integration_tests/tasks/test_rag_pipeline_run_tasks.py index e29b98037f..b9977b1fb6 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_rag_pipeline_run_tasks.py +++ b/api/tests/test_containers_integration_tests/tasks/test_rag_pipeline_run_tasks.py @@ -165,7 +165,7 @@ class TestRagPipelineRunTasks: "files": [], "user_id": account.id, "stream": False, - "invoke_from": "published", + "invoke_from": InvokeFrom.PUBLISHED_PIPELINE.value, "workflow_execution_id": str(uuid.uuid4()), "pipeline_config": { "app_id": str(uuid.uuid4()), @@ -249,7 +249,7 @@ class TestRagPipelineRunTasks: assert call_kwargs["pipeline"].id == pipeline.id assert call_kwargs["workflow_id"] == workflow.id assert call_kwargs["user"].id == account.id - assert call_kwargs["invoke_from"] == InvokeFrom.PUBLISHED + assert call_kwargs["invoke_from"] == InvokeFrom.PUBLISHED_PIPELINE assert call_kwargs["streaming"] == False assert isinstance(call_kwargs["application_generate_entity"], RagPipelineGenerateEntity) @@ -294,7 +294,7 @@ class TestRagPipelineRunTasks: assert call_kwargs["pipeline"].id == pipeline.id assert call_kwargs["workflow_id"] == workflow.id assert call_kwargs["user"].id == account.id - assert call_kwargs["invoke_from"] == InvokeFrom.PUBLISHED + assert call_kwargs["invoke_from"] == InvokeFrom.PUBLISHED_PIPELINE assert call_kwargs["streaming"] == False assert isinstance(call_kwargs["application_generate_entity"], RagPipelineGenerateEntity) @@ -743,7 +743,7 @@ class TestRagPipelineRunTasks: assert call_kwargs["pipeline"].id == pipeline.id assert call_kwargs["workflow_id"] == workflow.id assert call_kwargs["user"].id == account.id - assert call_kwargs["invoke_from"] == InvokeFrom.PUBLISHED + assert call_kwargs["invoke_from"] == InvokeFrom.PUBLISHED_PIPELINE assert call_kwargs["streaming"] == False assert isinstance(call_kwargs["application_generate_entity"], RagPipelineGenerateEntity) diff --git a/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_truncation.py b/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_truncation.py index 1c9f577a50..6b40bf462b 100644 --- a/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_truncation.py +++ b/api/tests/unit_tests/core/app/apps/common/test_workflow_response_converter_truncation.py @@ -431,10 +431,10 @@ class TestWorkflowResponseConverterServiceApiTruncation: description="Explore calls should have truncation enabled", ), TestCase( - name="published_truncation_enabled", - invoke_from=InvokeFrom.PUBLISHED, + name="published_pipeline_truncation_enabled", + invoke_from=InvokeFrom.PUBLISHED_PIPELINE, expected_truncation_enabled=True, - description="Published app calls should have truncation enabled", + description="Published pipeline calls should have truncation enabled", ), ], ids=lambda x: x.name, From 0294555893e6875d4731ff9411050594d8a09b59 Mon Sep 17 00:00:00 2001 From: Asuka Minato Date: Tue, 6 Jan 2026 23:55:58 +0900 Subject: [PATCH 06/15] refactor: port api/fields/file_fields.py (#30638) --- api/controllers/console/files.py | 39 +++-- api/controllers/console/remote_files.py | 41 ++--- api/controllers/files/upload.py | 44 +++--- api/controllers/service_api/app/file.py | 12 +- api/controllers/web/files.py | 11 +- api/controllers/web/remote_files.py | 39 ++--- api/fields/file_fields.py | 146 +++++++++--------- .../console/test_files_security.py | 9 +- .../unit_tests/fields/test_file_fields.py | 78 ++++++++++ 9 files changed, 257 insertions(+), 162 deletions(-) create mode 100644 api/tests/unit_tests/fields/test_file_fields.py diff --git a/api/controllers/console/files.py b/api/controllers/console/files.py index 29417dc896..109a3cd0d3 100644 --- a/api/controllers/console/files.py +++ b/api/controllers/console/files.py @@ -1,7 +1,7 @@ from typing import Literal from flask import request -from flask_restx import Resource, marshal_with +from flask_restx import Resource from werkzeug.exceptions import Forbidden import services @@ -15,18 +15,21 @@ from controllers.common.errors import ( TooManyFilesError, UnsupportedFileTypeError, ) +from controllers.common.schema import register_schema_models from controllers.console.wraps import ( account_initialization_required, cloud_edition_billing_resource_check, setup_required, ) from extensions.ext_database import db -from fields.file_fields import file_fields, upload_config_fields +from fields.file_fields import FileResponse, UploadConfig from libs.login import current_account_with_tenant, login_required from services.file_service import FileService from . import console_ns +register_schema_models(console_ns, UploadConfig, FileResponse) + PREVIEW_WORDS_LIMIT = 3000 @@ -35,26 +38,27 @@ class FileApi(Resource): @setup_required @login_required @account_initialization_required - @marshal_with(upload_config_fields) + @console_ns.response(200, "Success", console_ns.models[UploadConfig.__name__]) def get(self): - return { - "file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT, - "batch_count_limit": dify_config.UPLOAD_FILE_BATCH_LIMIT, - "file_upload_limit": dify_config.BATCH_UPLOAD_LIMIT, - "image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT, - "video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT, - "audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT, - "workflow_file_upload_limit": dify_config.WORKFLOW_FILE_UPLOAD_LIMIT, - "image_file_batch_limit": dify_config.IMAGE_FILE_BATCH_LIMIT, - "single_chunk_attachment_limit": dify_config.SINGLE_CHUNK_ATTACHMENT_LIMIT, - "attachment_image_file_size_limit": dify_config.ATTACHMENT_IMAGE_FILE_SIZE_LIMIT, - }, 200 + config = UploadConfig( + file_size_limit=dify_config.UPLOAD_FILE_SIZE_LIMIT, + batch_count_limit=dify_config.UPLOAD_FILE_BATCH_LIMIT, + file_upload_limit=dify_config.BATCH_UPLOAD_LIMIT, + image_file_size_limit=dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT, + video_file_size_limit=dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT, + audio_file_size_limit=dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT, + workflow_file_upload_limit=dify_config.WORKFLOW_FILE_UPLOAD_LIMIT, + image_file_batch_limit=dify_config.IMAGE_FILE_BATCH_LIMIT, + single_chunk_attachment_limit=dify_config.SINGLE_CHUNK_ATTACHMENT_LIMIT, + attachment_image_file_size_limit=dify_config.ATTACHMENT_IMAGE_FILE_SIZE_LIMIT, + ) + return config.model_dump(mode="json"), 200 @setup_required @login_required @account_initialization_required - @marshal_with(file_fields) @cloud_edition_billing_resource_check("documents") + @console_ns.response(201, "File uploaded successfully", console_ns.models[FileResponse.__name__]) def post(self): current_user, _ = current_account_with_tenant() source_str = request.form.get("source") @@ -90,7 +94,8 @@ class FileApi(Resource): except services.errors.file.BlockedFileExtensionError as blocked_extension_error: raise BlockedFileExtensionError(blocked_extension_error.description) - return upload_file, 201 + response = FileResponse.model_validate(upload_file, from_attributes=True) + return response.model_dump(mode="json"), 201 @console_ns.route("/files//preview") diff --git a/api/controllers/console/remote_files.py b/api/controllers/console/remote_files.py index 47eef7eb7e..70c7b80ffa 100644 --- a/api/controllers/console/remote_files.py +++ b/api/controllers/console/remote_files.py @@ -1,7 +1,7 @@ import urllib.parse import httpx -from flask_restx import Resource, marshal_with +from flask_restx import Resource from pydantic import BaseModel, Field import services @@ -11,19 +11,22 @@ from controllers.common.errors import ( RemoteFileUploadError, UnsupportedFileTypeError, ) +from controllers.common.schema import register_schema_models from core.file import helpers as file_helpers from core.helper import ssrf_proxy from extensions.ext_database import db -from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields +from fields.file_fields import FileWithSignedUrl, RemoteFileInfo from libs.login import current_account_with_tenant from services.file_service import FileService from . import console_ns +register_schema_models(console_ns, RemoteFileInfo, FileWithSignedUrl) + @console_ns.route("/remote-files/") class RemoteFileInfoApi(Resource): - @marshal_with(remote_file_info_fields) + @console_ns.response(200, "Remote file info", console_ns.models[RemoteFileInfo.__name__]) def get(self, url): decoded_url = urllib.parse.unquote(url) resp = ssrf_proxy.head(decoded_url) @@ -31,10 +34,11 @@ class RemoteFileInfoApi(Resource): # failed back to get method resp = ssrf_proxy.get(decoded_url, timeout=3) resp.raise_for_status() - return { - "file_type": resp.headers.get("Content-Type", "application/octet-stream"), - "file_length": int(resp.headers.get("Content-Length", 0)), - } + info = RemoteFileInfo( + file_type=resp.headers.get("Content-Type", "application/octet-stream"), + file_length=int(resp.headers.get("Content-Length", 0)), + ) + return info.model_dump(mode="json") class RemoteFileUploadPayload(BaseModel): @@ -50,7 +54,7 @@ console_ns.schema_model( @console_ns.route("/remote-files/upload") class RemoteFileUploadApi(Resource): @console_ns.expect(console_ns.models[RemoteFileUploadPayload.__name__]) - @marshal_with(file_fields_with_signed_url) + @console_ns.response(201, "Remote file uploaded", console_ns.models[FileWithSignedUrl.__name__]) def post(self): args = RemoteFileUploadPayload.model_validate(console_ns.payload) url = args.url @@ -85,13 +89,14 @@ class RemoteFileUploadApi(Resource): except services.errors.file.UnsupportedFileTypeError: raise UnsupportedFileTypeError() - return { - "id": upload_file.id, - "name": upload_file.name, - "size": upload_file.size, - "extension": upload_file.extension, - "url": file_helpers.get_signed_file_url(upload_file_id=upload_file.id), - "mime_type": upload_file.mime_type, - "created_by": upload_file.created_by, - "created_at": upload_file.created_at, - }, 201 + payload = FileWithSignedUrl( + id=upload_file.id, + name=upload_file.name, + size=upload_file.size, + extension=upload_file.extension, + url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id), + mime_type=upload_file.mime_type, + created_by=upload_file.created_by, + created_at=int(upload_file.created_at.timestamp()), + ) + return payload.model_dump(mode="json"), 201 diff --git a/api/controllers/files/upload.py b/api/controllers/files/upload.py index 6096a87c56..28ec4b3935 100644 --- a/api/controllers/files/upload.py +++ b/api/controllers/files/upload.py @@ -4,18 +4,18 @@ from flask import request from flask_restx import Resource from flask_restx.api import HTTPStatus from pydantic import BaseModel, Field -from werkzeug.datastructures import FileStorage from werkzeug.exceptions import Forbidden import services from core.file.helpers import verify_plugin_file_signature from core.tools.tool_file_manager import ToolFileManager -from fields.file_fields import build_file_model +from fields.file_fields import FileResponse from ..common.errors import ( FileTooLargeError, UnsupportedFileTypeError, ) +from ..common.schema import register_schema_models from ..console.wraps import setup_required from ..files import files_ns from ..inner_api.plugin.wraps import get_user @@ -35,6 +35,8 @@ files_ns.schema_model( PluginUploadQuery.__name__, PluginUploadQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0) ) +register_schema_models(files_ns, FileResponse) + @files_ns.route("/upload/for-plugin") class PluginUploadFileApi(Resource): @@ -51,7 +53,7 @@ class PluginUploadFileApi(Resource): 415: "Unsupported file type", } ) - @files_ns.marshal_with(build_file_model(files_ns), code=HTTPStatus.CREATED) + @files_ns.response(HTTPStatus.CREATED, "File uploaded", files_ns.models[FileResponse.__name__]) def post(self): """Upload a file for plugin usage. @@ -69,7 +71,7 @@ class PluginUploadFileApi(Resource): """ args = PluginUploadQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore - file: FileStorage | None = request.files.get("file") + file = request.files.get("file") if file is None: raise Forbidden("File is required.") @@ -80,8 +82,8 @@ class PluginUploadFileApi(Resource): user_id = args.user_id user = get_user(tenant_id, user_id) - filename: str | None = file.filename - mimetype: str | None = file.mimetype + filename = file.filename + mimetype = file.mimetype if not filename or not mimetype: raise Forbidden("Invalid request.") @@ -111,22 +113,22 @@ class PluginUploadFileApi(Resource): preview_url = ToolFileManager.sign_file(tool_file_id=tool_file.id, extension=extension) # Create a dictionary with all the necessary attributes - result = { - "id": tool_file.id, - "user_id": tool_file.user_id, - "tenant_id": tool_file.tenant_id, - "conversation_id": tool_file.conversation_id, - "file_key": tool_file.file_key, - "mimetype": tool_file.mimetype, - "original_url": tool_file.original_url, - "name": tool_file.name, - "size": tool_file.size, - "mime_type": mimetype, - "extension": extension, - "preview_url": preview_url, - } + result = FileResponse( + id=tool_file.id, + name=tool_file.name, + size=tool_file.size, + extension=extension, + mime_type=mimetype, + preview_url=preview_url, + source_url=tool_file.original_url, + original_url=tool_file.original_url, + user_id=tool_file.user_id, + tenant_id=tool_file.tenant_id, + conversation_id=tool_file.conversation_id, + file_key=tool_file.file_key, + ) - return result, 201 + return result.model_dump(mode="json"), 201 except services.errors.file.FileTooLargeError as file_too_large_error: raise FileTooLargeError(file_too_large_error.description) except services.errors.file.UnsupportedFileTypeError: diff --git a/api/controllers/service_api/app/file.py b/api/controllers/service_api/app/file.py index ffe4e0b492..6f6dadf768 100644 --- a/api/controllers/service_api/app/file.py +++ b/api/controllers/service_api/app/file.py @@ -10,13 +10,16 @@ from controllers.common.errors import ( TooManyFilesError, UnsupportedFileTypeError, ) +from controllers.common.schema import register_schema_models from controllers.service_api import service_api_ns from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token from extensions.ext_database import db -from fields.file_fields import build_file_model +from fields.file_fields import FileResponse from models import App, EndUser from services.file_service import FileService +register_schema_models(service_api_ns, FileResponse) + @service_api_ns.route("/files/upload") class FileApi(Resource): @@ -31,8 +34,8 @@ class FileApi(Resource): 415: "Unsupported file type", } ) - @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.FORM)) - @service_api_ns.marshal_with(build_file_model(service_api_ns), code=HTTPStatus.CREATED) + @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.FORM)) # type: ignore + @service_api_ns.response(HTTPStatus.CREATED, "File uploaded", service_api_ns.models[FileResponse.__name__]) def post(self, app_model: App, end_user: EndUser): """Upload a file for use in conversations. @@ -64,4 +67,5 @@ class FileApi(Resource): except services.errors.file.UnsupportedFileTypeError: raise UnsupportedFileTypeError() - return upload_file, 201 + response = FileResponse.model_validate(upload_file, from_attributes=True) + return response.model_dump(mode="json"), 201 diff --git a/api/controllers/web/files.py b/api/controllers/web/files.py index 80ad61e549..0036c90800 100644 --- a/api/controllers/web/files.py +++ b/api/controllers/web/files.py @@ -1,5 +1,4 @@ from flask import request -from flask_restx import marshal_with import services from controllers.common.errors import ( @@ -9,12 +8,15 @@ from controllers.common.errors import ( TooManyFilesError, UnsupportedFileTypeError, ) +from controllers.common.schema import register_schema_models from controllers.web import web_ns from controllers.web.wraps import WebApiResource from extensions.ext_database import db -from fields.file_fields import build_file_model +from fields.file_fields import FileResponse from services.file_service import FileService +register_schema_models(web_ns, FileResponse) + @web_ns.route("/files/upload") class FileApi(WebApiResource): @@ -28,7 +30,7 @@ class FileApi(WebApiResource): 415: "Unsupported file type", } ) - @marshal_with(build_file_model(web_ns)) + @web_ns.response(201, "File uploaded successfully", web_ns.models[FileResponse.__name__]) def post(self, app_model, end_user): """Upload a file for use in web applications. @@ -81,4 +83,5 @@ class FileApi(WebApiResource): except services.errors.file.UnsupportedFileTypeError: raise UnsupportedFileTypeError() - return upload_file, 201 + response = FileResponse.model_validate(upload_file, from_attributes=True) + return response.model_dump(mode="json"), 201 diff --git a/api/controllers/web/remote_files.py b/api/controllers/web/remote_files.py index c1f976829f..b08b3fe858 100644 --- a/api/controllers/web/remote_files.py +++ b/api/controllers/web/remote_files.py @@ -1,7 +1,6 @@ import urllib.parse import httpx -from flask_restx import marshal_with from pydantic import BaseModel, Field, HttpUrl import services @@ -14,7 +13,7 @@ from controllers.common.errors import ( from core.file import helpers as file_helpers from core.helper import ssrf_proxy from extensions.ext_database import db -from fields.file_fields import build_file_with_signed_url_model, build_remote_file_info_model +from fields.file_fields import FileWithSignedUrl, RemoteFileInfo from services.file_service import FileService from ..common.schema import register_schema_models @@ -26,7 +25,7 @@ class RemoteFileUploadPayload(BaseModel): url: HttpUrl = Field(description="Remote file URL") -register_schema_models(web_ns, RemoteFileUploadPayload) +register_schema_models(web_ns, RemoteFileUploadPayload, RemoteFileInfo, FileWithSignedUrl) @web_ns.route("/remote-files/") @@ -41,7 +40,7 @@ class RemoteFileInfoApi(WebApiResource): 500: "Failed to fetch remote file", } ) - @marshal_with(build_remote_file_info_model(web_ns)) + @web_ns.response(200, "Remote file info", web_ns.models[RemoteFileInfo.__name__]) def get(self, app_model, end_user, url): """Get information about a remote file. @@ -65,10 +64,11 @@ class RemoteFileInfoApi(WebApiResource): # failed back to get method resp = ssrf_proxy.get(decoded_url, timeout=3) resp.raise_for_status() - return { - "file_type": resp.headers.get("Content-Type", "application/octet-stream"), - "file_length": int(resp.headers.get("Content-Length", -1)), - } + info = RemoteFileInfo( + file_type=resp.headers.get("Content-Type", "application/octet-stream"), + file_length=int(resp.headers.get("Content-Length", -1)), + ) + return info.model_dump(mode="json") @web_ns.route("/remote-files/upload") @@ -84,7 +84,7 @@ class RemoteFileUploadApi(WebApiResource): 500: "Failed to fetch remote file", } ) - @marshal_with(build_file_with_signed_url_model(web_ns)) + @web_ns.response(201, "Remote file uploaded", web_ns.models[FileWithSignedUrl.__name__]) def post(self, app_model, end_user): """Upload a file from a remote URL. @@ -139,13 +139,14 @@ class RemoteFileUploadApi(WebApiResource): except services.errors.file.UnsupportedFileTypeError: raise UnsupportedFileTypeError - return { - "id": upload_file.id, - "name": upload_file.name, - "size": upload_file.size, - "extension": upload_file.extension, - "url": file_helpers.get_signed_file_url(upload_file_id=upload_file.id), - "mime_type": upload_file.mime_type, - "created_by": upload_file.created_by, - "created_at": upload_file.created_at, - }, 201 + payload1 = FileWithSignedUrl( + id=upload_file.id, + name=upload_file.name, + size=upload_file.size, + extension=upload_file.extension, + url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id), + mime_type=upload_file.mime_type, + created_by=upload_file.created_by, + created_at=int(upload_file.created_at.timestamp()), + ) + return payload1.model_dump(mode="json"), 201 diff --git a/api/fields/file_fields.py b/api/fields/file_fields.py index 70138404c7..913fb675f9 100644 --- a/api/fields/file_fields.py +++ b/api/fields/file_fields.py @@ -1,93 +1,85 @@ -from flask_restx import Namespace, fields +from __future__ import annotations -from libs.helper import TimestampField +from datetime import datetime -upload_config_fields = { - "file_size_limit": fields.Integer, - "batch_count_limit": fields.Integer, - "image_file_size_limit": fields.Integer, - "video_file_size_limit": fields.Integer, - "audio_file_size_limit": fields.Integer, - "workflow_file_upload_limit": fields.Integer, - "image_file_batch_limit": fields.Integer, - "single_chunk_attachment_limit": fields.Integer, -} +from pydantic import BaseModel, ConfigDict, field_validator -def build_upload_config_model(api_or_ns: Namespace): - """Build the upload config model for the API or Namespace. - - Args: - api_or_ns: Flask-RestX Api or Namespace instance - - Returns: - The registered model - """ - return api_or_ns.model("UploadConfig", upload_config_fields) +class ResponseModel(BaseModel): + model_config = ConfigDict( + from_attributes=True, + extra="ignore", + populate_by_name=True, + serialize_by_alias=True, + protected_namespaces=(), + ) -file_fields = { - "id": fields.String, - "name": fields.String, - "size": fields.Integer, - "extension": fields.String, - "mime_type": fields.String, - "created_by": fields.String, - "created_at": TimestampField, - "preview_url": fields.String, - "source_url": fields.String, -} +def _to_timestamp(value: datetime | int | None) -> int | None: + if isinstance(value, datetime): + return int(value.timestamp()) + return value -def build_file_model(api_or_ns: Namespace): - """Build the file model for the API or Namespace. - - Args: - api_or_ns: Flask-RestX Api or Namespace instance - - Returns: - The registered model - """ - return api_or_ns.model("File", file_fields) +class UploadConfig(ResponseModel): + file_size_limit: int + batch_count_limit: int + file_upload_limit: int | None = None + image_file_size_limit: int + video_file_size_limit: int + audio_file_size_limit: int + workflow_file_upload_limit: int + image_file_batch_limit: int + single_chunk_attachment_limit: int + attachment_image_file_size_limit: int | None = None -remote_file_info_fields = { - "file_type": fields.String(attribute="file_type"), - "file_length": fields.Integer(attribute="file_length"), -} +class FileResponse(ResponseModel): + id: str + name: str + size: int + extension: str | None = None + mime_type: str | None = None + created_by: str | None = None + created_at: int | None = None + preview_url: str | None = None + source_url: str | None = None + original_url: str | None = None + user_id: str | None = None + tenant_id: str | None = None + conversation_id: str | None = None + file_key: str | None = None + + @field_validator("created_at", mode="before") + @classmethod + def _normalize_created_at(cls, value: datetime | int | None) -> int | None: + return _to_timestamp(value) -def build_remote_file_info_model(api_or_ns: Namespace): - """Build the remote file info model for the API or Namespace. - - Args: - api_or_ns: Flask-RestX Api or Namespace instance - - Returns: - The registered model - """ - return api_or_ns.model("RemoteFileInfo", remote_file_info_fields) +class RemoteFileInfo(ResponseModel): + file_type: str + file_length: int -file_fields_with_signed_url = { - "id": fields.String, - "name": fields.String, - "size": fields.Integer, - "extension": fields.String, - "url": fields.String, - "mime_type": fields.String, - "created_by": fields.String, - "created_at": TimestampField, -} +class FileWithSignedUrl(ResponseModel): + id: str + name: str + size: int + extension: str | None = None + url: str | None = None + mime_type: str | None = None + created_by: str | None = None + created_at: int | None = None + + @field_validator("created_at", mode="before") + @classmethod + def _normalize_created_at(cls, value: datetime | int | None) -> int | None: + return _to_timestamp(value) -def build_file_with_signed_url_model(api_or_ns: Namespace): - """Build the file with signed URL model for the API or Namespace. - - Args: - api_or_ns: Flask-RestX Api or Namespace instance - - Returns: - The registered model - """ - return api_or_ns.model("FileWithSignedUrl", file_fields_with_signed_url) +__all__ = [ + "FileResponse", + "FileWithSignedUrl", + "RemoteFileInfo", + "UploadConfig", +] diff --git a/api/tests/unit_tests/controllers/console/test_files_security.py b/api/tests/unit_tests/controllers/console/test_files_security.py index 2630fbcfd0..370bf63fdb 100644 --- a/api/tests/unit_tests/controllers/console/test_files_security.py +++ b/api/tests/unit_tests/controllers/console/test_files_security.py @@ -1,7 +1,9 @@ +import builtins import io from unittest.mock import patch import pytest +from flask.views import MethodView from werkzeug.exceptions import Forbidden from controllers.common.errors import ( @@ -14,6 +16,9 @@ from controllers.common.errors import ( from services.errors.file import FileTooLargeError as ServiceFileTooLargeError from services.errors.file import UnsupportedFileTypeError as ServiceUnsupportedFileTypeError +if not hasattr(builtins, "MethodView"): + builtins.MethodView = MethodView # type: ignore[attr-defined] + class TestFileUploadSecurity: """Test file upload security logic without complex framework setup""" @@ -128,7 +133,7 @@ class TestFileUploadSecurity: # Test passes if no exception is raised # Test 4: Service error handling - @patch("services.file_service.FileService.upload_file") + @patch("controllers.console.files.FileService.upload_file") def test_should_handle_file_too_large_error(self, mock_upload): """Test that service FileTooLargeError is properly converted""" mock_upload.side_effect = ServiceFileTooLargeError("File too large") @@ -140,7 +145,7 @@ class TestFileUploadSecurity: with pytest.raises(FileTooLargeError): raise FileTooLargeError(e.description) - @patch("services.file_service.FileService.upload_file") + @patch("controllers.console.files.FileService.upload_file") def test_should_handle_unsupported_file_type_error(self, mock_upload): """Test that service UnsupportedFileTypeError is properly converted""" mock_upload.side_effect = ServiceUnsupportedFileTypeError() diff --git a/api/tests/unit_tests/fields/test_file_fields.py b/api/tests/unit_tests/fields/test_file_fields.py new file mode 100644 index 0000000000..8be8df16f4 --- /dev/null +++ b/api/tests/unit_tests/fields/test_file_fields.py @@ -0,0 +1,78 @@ +from __future__ import annotations + +from datetime import datetime +from types import SimpleNamespace + +from fields.file_fields import FileResponse, FileWithSignedUrl, RemoteFileInfo, UploadConfig + + +def test_file_response_serializes_datetime() -> None: + created_at = datetime(2024, 1, 1, 12, 0, 0) + file_obj = SimpleNamespace( + id="file-1", + name="example.txt", + size=1024, + extension="txt", + mime_type="text/plain", + created_by="user-1", + created_at=created_at, + preview_url="https://preview", + source_url="https://source", + original_url="https://origin", + user_id="user-1", + tenant_id="tenant-1", + conversation_id="conv-1", + file_key="key-1", + ) + + serialized = FileResponse.model_validate(file_obj, from_attributes=True).model_dump(mode="json") + + assert serialized["id"] == "file-1" + assert serialized["created_at"] == int(created_at.timestamp()) + assert serialized["preview_url"] == "https://preview" + assert serialized["source_url"] == "https://source" + assert serialized["original_url"] == "https://origin" + assert serialized["user_id"] == "user-1" + assert serialized["tenant_id"] == "tenant-1" + assert serialized["conversation_id"] == "conv-1" + assert serialized["file_key"] == "key-1" + + +def test_file_with_signed_url_builds_payload() -> None: + payload = FileWithSignedUrl( + id="file-2", + name="remote.pdf", + size=2048, + extension="pdf", + url="https://signed", + mime_type="application/pdf", + created_by="user-2", + created_at=datetime(2024, 1, 2, 0, 0, 0), + ) + + dumped = payload.model_dump(mode="json") + + assert dumped["url"] == "https://signed" + assert dumped["created_at"] == int(datetime(2024, 1, 2, 0, 0, 0).timestamp()) + + +def test_remote_file_info_and_upload_config() -> None: + info = RemoteFileInfo(file_type="text/plain", file_length=123) + assert info.model_dump(mode="json") == {"file_type": "text/plain", "file_length": 123} + + config = UploadConfig( + file_size_limit=1, + batch_count_limit=2, + file_upload_limit=3, + image_file_size_limit=4, + video_file_size_limit=5, + audio_file_size_limit=6, + workflow_file_upload_limit=7, + image_file_batch_limit=8, + single_chunk_attachment_limit=9, + attachment_image_file_size_limit=10, + ) + + dumped = config.model_dump(mode="json") + assert dumped["file_upload_limit"] == 3 + assert dumped["attachment_image_file_size_limit"] == 10 From 4f0fb6df2bfe39cde7bf886993b64b015fa8c6bc Mon Sep 17 00:00:00 2001 From: Sara Rasool <83841462+sarxxt@users.noreply.github.com> Date: Tue, 6 Jan 2026 19:57:20 +0500 Subject: [PATCH 07/15] chore: use from __future__ import annotations (#30254) Co-authored-by: Dev Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Asuka Minato Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> --- api/controllers/console/workspace/account.py | 4 +- .../datasource/__base/datasource_plugin.py | 4 +- .../entities/datasource_entities.py | 10 +++-- api/core/entities/mcp_provider.py | 4 +- api/core/entities/provider_entities.py | 4 +- api/core/mcp/session/base_session.py | 8 +--- .../entities/message_entities.py | 4 +- .../model_runtime/entities/model_entities.py | 6 ++- .../model_providers/model_provider_factory.py | 6 ++- api/core/plugin/entities/plugin_daemon.py | 4 +- .../vdb/clickzetta/clickzetta_vector.py | 30 +++++++------ api/core/rag/docstore/dataset_docstore.py | 4 +- api/core/rag/pipeline/queue.py | 4 +- api/core/schemas/registry.py | 8 ++-- api/core/tools/__base/tool.py | 6 ++- api/core/tools/builtin_tool/tool.py | 4 +- api/core/tools/custom_tool/provider.py | 4 +- api/core/tools/entities/tool_entities.py | 14 +++--- api/core/tools/mcp_tool/tool.py | 4 +- api/core/tools/plugin_tool/tool.py | 4 +- api/core/tools/workflow_as_tool/provider.py | 4 +- api/core/tools/workflow_as_tool/tool.py | 4 +- api/core/variables/types.py | 14 +++--- .../workflow/entities/workflow_execution.py | 4 +- api/core/workflow/graph/graph.py | 16 ++++--- .../workflow/graph_engine/graph_engine.py | 4 +- .../graph_engine/ready_queue/factory.py | 4 +- .../response_coordinator/session.py | 4 +- api/core/workflow/nodes/agent/agent_node.py | 10 ++--- api/core/workflow/nodes/base/entities.py | 4 +- api/core/workflow/nodes/base/node.py | 14 +++--- api/core/workflow/nodes/base/template.py | 6 ++- api/core/workflow/nodes/llm/node.py | 24 +++++----- .../repositories/draft_variable_repository.py | 4 +- api/core/workflow/runtime/variable_pool.py | 4 +- api/core/workflow/system_variable.py | 6 ++- api/extensions/logstore/aliyun_logstore.py | 6 ++- .../clickzetta_volume/file_lifecycle.py | 4 +- api/libs/broadcast_channel/channel.py | 4 +- api/libs/broadcast_channel/redis/channel.py | 4 +- .../redis/sharded_channel.py | 4 +- api/libs/email_i18n.py | 4 +- api/models/model.py | 18 ++++---- api/models/provider.py | 6 ++- api/models/tools.py | 12 ++--- api/models/workflow.py | 44 ++++++++++--------- api/services/variable_truncator.py | 4 +- api/services/website_service.py | 6 ++- .../workflow/graph_engine/test_mock_config.py | 28 ++++++------ .../workflow/nodes/tool/test_tool_node.py | 10 +++-- 50 files changed, 253 insertions(+), 163 deletions(-) diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index 55eaa2f09f..03ad0f423b 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from datetime import datetime from typing import Literal @@ -99,7 +101,7 @@ class AccountPasswordPayload(BaseModel): repeat_new_password: str @model_validator(mode="after") - def check_passwords_match(self) -> "AccountPasswordPayload": + def check_passwords_match(self) -> AccountPasswordPayload: if self.new_password != self.repeat_new_password: raise RepeatPasswordNotMatchError() return self diff --git a/api/core/datasource/__base/datasource_plugin.py b/api/core/datasource/__base/datasource_plugin.py index 50c7249fe4..451e4fda0e 100644 --- a/api/core/datasource/__base/datasource_plugin.py +++ b/api/core/datasource/__base/datasource_plugin.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from abc import ABC, abstractmethod from configs import dify_config @@ -30,7 +32,7 @@ class DatasourcePlugin(ABC): """ return DatasourceProviderType.LOCAL_FILE - def fork_datasource_runtime(self, runtime: DatasourceRuntime) -> "DatasourcePlugin": + def fork_datasource_runtime(self, runtime: DatasourceRuntime) -> DatasourcePlugin: return self.__class__( entity=self.entity.model_copy(), runtime=runtime, diff --git a/api/core/datasource/entities/datasource_entities.py b/api/core/datasource/entities/datasource_entities.py index 260dcf04f5..dde7d59726 100644 --- a/api/core/datasource/entities/datasource_entities.py +++ b/api/core/datasource/entities/datasource_entities.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import enum from enum import StrEnum from typing import Any @@ -31,7 +33,7 @@ class DatasourceProviderType(enum.StrEnum): ONLINE_DRIVE = "online_drive" @classmethod - def value_of(cls, value: str) -> "DatasourceProviderType": + def value_of(cls, value: str) -> DatasourceProviderType: """ Get value of given mode. @@ -81,7 +83,7 @@ class DatasourceParameter(PluginParameter): typ: DatasourceParameterType, required: bool, options: list[str] | None = None, - ) -> "DatasourceParameter": + ) -> DatasourceParameter: """ get a simple datasource parameter @@ -187,14 +189,14 @@ class DatasourceInvokeMeta(BaseModel): tool_config: dict | None = None @classmethod - def empty(cls) -> "DatasourceInvokeMeta": + def empty(cls) -> DatasourceInvokeMeta: """ Get an empty instance of DatasourceInvokeMeta """ return cls(time_cost=0.0, error=None, tool_config={}) @classmethod - def error_instance(cls, error: str) -> "DatasourceInvokeMeta": + def error_instance(cls, error: str) -> DatasourceInvokeMeta: """ Get an instance of DatasourceInvokeMeta with error """ diff --git a/api/core/entities/mcp_provider.py b/api/core/entities/mcp_provider.py index 7fdf5e4be6..135d2a4945 100644 --- a/api/core/entities/mcp_provider.py +++ b/api/core/entities/mcp_provider.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from datetime import datetime from enum import StrEnum @@ -75,7 +77,7 @@ class MCPProviderEntity(BaseModel): updated_at: datetime @classmethod - def from_db_model(cls, db_provider: "MCPToolProvider") -> "MCPProviderEntity": + def from_db_model(cls, db_provider: MCPToolProvider) -> MCPProviderEntity: """Create entity from database model with decryption""" return cls( diff --git a/api/core/entities/provider_entities.py b/api/core/entities/provider_entities.py index 8a8067332d..0078ec7e4f 100644 --- a/api/core/entities/provider_entities.py +++ b/api/core/entities/provider_entities.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from enum import StrEnum, auto from typing import Union @@ -178,7 +180,7 @@ class BasicProviderConfig(BaseModel): TOOLS_SELECTOR = CommonParameterType.TOOLS_SELECTOR @classmethod - def value_of(cls, value: str) -> "ProviderConfig.Type": + def value_of(cls, value: str) -> ProviderConfig.Type: """ Get value of given mode. diff --git a/api/core/mcp/session/base_session.py b/api/core/mcp/session/base_session.py index c97ae6eac7..84a6fd0d1f 100644 --- a/api/core/mcp/session/base_session.py +++ b/api/core/mcp/session/base_session.py @@ -68,13 +68,7 @@ class RequestResponder(Generic[ReceiveRequestT, SendResultT]): request_id: RequestId, request_meta: RequestParams.Meta | None, request: ReceiveRequestT, - session: """BaseSession[ - SendRequestT, - SendNotificationT, - SendResultT, - ReceiveRequestT, - ReceiveNotificationT - ]""", + session: """BaseSession[SendRequestT, SendNotificationT, SendResultT, ReceiveRequestT, ReceiveNotificationT]""", on_complete: Callable[["RequestResponder[ReceiveRequestT, SendResultT]"], Any], ): self.request_id = request_id diff --git a/api/core/model_runtime/entities/message_entities.py b/api/core/model_runtime/entities/message_entities.py index 89dae2dbff..3ac83b4c96 100644 --- a/api/core/model_runtime/entities/message_entities.py +++ b/api/core/model_runtime/entities/message_entities.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from abc import ABC from collections.abc import Mapping, Sequence from enum import StrEnum, auto @@ -17,7 +19,7 @@ class PromptMessageRole(StrEnum): TOOL = auto() @classmethod - def value_of(cls, value: str) -> "PromptMessageRole": + def value_of(cls, value: str) -> PromptMessageRole: """ Get value of given mode. diff --git a/api/core/model_runtime/entities/model_entities.py b/api/core/model_runtime/entities/model_entities.py index aee6ce1108..19194d162c 100644 --- a/api/core/model_runtime/entities/model_entities.py +++ b/api/core/model_runtime/entities/model_entities.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from decimal import Decimal from enum import StrEnum, auto from typing import Any @@ -20,7 +22,7 @@ class ModelType(StrEnum): TTS = auto() @classmethod - def value_of(cls, origin_model_type: str) -> "ModelType": + def value_of(cls, origin_model_type: str) -> ModelType: """ Get model type from origin model type. @@ -103,7 +105,7 @@ class DefaultParameterName(StrEnum): JSON_SCHEMA = auto() @classmethod - def value_of(cls, value: Any) -> "DefaultParameterName": + def value_of(cls, value: Any) -> DefaultParameterName: """ Get parameter name from value. diff --git a/api/core/model_runtime/model_providers/model_provider_factory.py b/api/core/model_runtime/model_providers/model_provider_factory.py index 12a202ce64..28f162a928 100644 --- a/api/core/model_runtime/model_providers/model_provider_factory.py +++ b/api/core/model_runtime/model_providers/model_provider_factory.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import hashlib import logging from collections.abc import Sequence @@ -38,7 +40,7 @@ class ModelProviderFactory: plugin_providers = self.get_plugin_model_providers() return [provider.declaration for provider in plugin_providers] - def get_plugin_model_providers(self) -> Sequence["PluginModelProviderEntity"]: + def get_plugin_model_providers(self) -> Sequence[PluginModelProviderEntity]: """ Get all plugin model providers :return: list of plugin model providers @@ -76,7 +78,7 @@ class ModelProviderFactory: plugin_model_provider_entity = self.get_plugin_model_provider(provider=provider) return plugin_model_provider_entity.declaration - def get_plugin_model_provider(self, provider: str) -> "PluginModelProviderEntity": + def get_plugin_model_provider(self, provider: str) -> PluginModelProviderEntity: """ Get plugin model provider :param provider: provider name diff --git a/api/core/plugin/entities/plugin_daemon.py b/api/core/plugin/entities/plugin_daemon.py index 3b83121357..6674228dc0 100644 --- a/api/core/plugin/entities/plugin_daemon.py +++ b/api/core/plugin/entities/plugin_daemon.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import enum from collections.abc import Mapping, Sequence from datetime import datetime @@ -242,7 +244,7 @@ class CredentialType(enum.StrEnum): return [item.value for item in cls] @classmethod - def of(cls, credential_type: str) -> "CredentialType": + def of(cls, credential_type: str) -> CredentialType: type_name = credential_type.lower() if type_name in {"api-key", "api_key"}: return cls.API_KEY diff --git a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py index e05b70ba22..91bb71bfa6 100644 --- a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py +++ b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import contextlib import json import logging @@ -6,7 +8,7 @@ import re import threading import time import uuid -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any import clickzetta # type: ignore from pydantic import BaseModel, model_validator @@ -76,7 +78,7 @@ class ClickzettaConnectionPool: Manages connection reuse across ClickzettaVector instances. """ - _instance: Optional["ClickzettaConnectionPool"] = None + _instance: ClickzettaConnectionPool | None = None _lock = threading.Lock() def __init__(self): @@ -89,7 +91,7 @@ class ClickzettaConnectionPool: self._start_cleanup_thread() @classmethod - def get_instance(cls) -> "ClickzettaConnectionPool": + def get_instance(cls) -> ClickzettaConnectionPool: """Get singleton instance of connection pool.""" if cls._instance is None: with cls._lock: @@ -104,7 +106,7 @@ class ClickzettaConnectionPool: f"{config.workspace}:{config.vcluster}:{config.schema_name}" ) - def _create_connection(self, config: ClickzettaConfig) -> "Connection": + def _create_connection(self, config: ClickzettaConfig) -> Connection: """Create a new ClickZetta connection.""" max_retries = 3 retry_delay = 1.0 @@ -134,7 +136,7 @@ class ClickzettaConnectionPool: raise RuntimeError(f"Failed to create ClickZetta connection after {max_retries} attempts") - def _configure_connection(self, connection: "Connection"): + def _configure_connection(self, connection: Connection): """Configure connection session settings.""" try: with connection.cursor() as cursor: @@ -181,7 +183,7 @@ class ClickzettaConnectionPool: except Exception: logger.exception("Failed to configure connection, continuing with defaults") - def _is_connection_valid(self, connection: "Connection") -> bool: + def _is_connection_valid(self, connection: Connection) -> bool: """Check if connection is still valid.""" try: with connection.cursor() as cursor: @@ -190,7 +192,7 @@ class ClickzettaConnectionPool: except Exception: return False - def get_connection(self, config: ClickzettaConfig) -> "Connection": + def get_connection(self, config: ClickzettaConfig) -> Connection: """Get a connection from the pool or create a new one.""" config_key = self._get_config_key(config) @@ -221,7 +223,7 @@ class ClickzettaConnectionPool: # No valid connection found, create new one return self._create_connection(config) - def return_connection(self, config: ClickzettaConfig, connection: "Connection"): + def return_connection(self, config: ClickzettaConfig, connection: Connection): """Return a connection to the pool.""" config_key = self._get_config_key(config) @@ -315,22 +317,22 @@ class ClickzettaVector(BaseVector): self._connection_pool = ClickzettaConnectionPool.get_instance() self._init_write_queue() - def _get_connection(self) -> "Connection": + def _get_connection(self) -> Connection: """Get a connection from the pool.""" return self._connection_pool.get_connection(self._config) - def _return_connection(self, connection: "Connection"): + def _return_connection(self, connection: Connection): """Return a connection to the pool.""" self._connection_pool.return_connection(self._config, connection) class ConnectionContext: """Context manager for borrowing and returning connections.""" - def __init__(self, vector_instance: "ClickzettaVector"): + def __init__(self, vector_instance: ClickzettaVector): self.vector = vector_instance self.connection: Connection | None = None - def __enter__(self) -> "Connection": + def __enter__(self) -> Connection: self.connection = self.vector._get_connection() return self.connection @@ -338,7 +340,7 @@ class ClickzettaVector(BaseVector): if self.connection: self.vector._return_connection(self.connection) - def get_connection_context(self) -> "ClickzettaVector.ConnectionContext": + def get_connection_context(self) -> ClickzettaVector.ConnectionContext: """Get a connection context manager.""" return self.ConnectionContext(self) @@ -437,7 +439,7 @@ class ClickzettaVector(BaseVector): """Return the vector database type.""" return "clickzetta" - def _ensure_connection(self) -> "Connection": + def _ensure_connection(self) -> Connection: """Get a connection from the pool.""" return self._get_connection() diff --git a/api/core/rag/docstore/dataset_docstore.py b/api/core/rag/docstore/dataset_docstore.py index 1fe74d3042..69adac522d 100644 --- a/api/core/rag/docstore/dataset_docstore.py +++ b/api/core/rag/docstore/dataset_docstore.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Sequence from typing import Any @@ -22,7 +24,7 @@ class DatasetDocumentStore: self._document_id = document_id @classmethod - def from_dict(cls, config_dict: dict[str, Any]) -> "DatasetDocumentStore": + def from_dict(cls, config_dict: dict[str, Any]) -> DatasetDocumentStore: return cls(**config_dict) def to_dict(self) -> dict[str, Any]: diff --git a/api/core/rag/pipeline/queue.py b/api/core/rag/pipeline/queue.py index 7472598a7f..bf8db95b4e 100644 --- a/api/core/rag/pipeline/queue.py +++ b/api/core/rag/pipeline/queue.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from collections.abc import Sequence from typing import Any @@ -16,7 +18,7 @@ class TaskWrapper(BaseModel): return self.model_dump_json() @classmethod - def deserialize(cls, serialized_data: str) -> "TaskWrapper": + def deserialize(cls, serialized_data: str) -> TaskWrapper: return cls.model_validate_json(serialized_data) diff --git a/api/core/schemas/registry.py b/api/core/schemas/registry.py index 51bfae1cd3..b4ecfe47ff 100644 --- a/api/core/schemas/registry.py +++ b/api/core/schemas/registry.py @@ -1,9 +1,11 @@ +from __future__ import annotations + import json import logging import threading from collections.abc import Mapping, MutableMapping from pathlib import Path -from typing import Any, ClassVar, Optional +from typing import Any, ClassVar class SchemaRegistry: @@ -11,7 +13,7 @@ class SchemaRegistry: logger: ClassVar[logging.Logger] = logging.getLogger(__name__) - _default_instance: ClassVar[Optional["SchemaRegistry"]] = None + _default_instance: ClassVar[SchemaRegistry | None] = None _lock: ClassVar[threading.Lock] = threading.Lock() def __init__(self, base_dir: str): @@ -20,7 +22,7 @@ class SchemaRegistry: self.metadata: MutableMapping[str, MutableMapping[str, Any]] = {} @classmethod - def default_registry(cls) -> "SchemaRegistry": + def default_registry(cls) -> SchemaRegistry: """Returns the default schema registry for builtin schemas (thread-safe singleton)""" if cls._default_instance is None: with cls._lock: diff --git a/api/core/tools/__base/tool.py b/api/core/tools/__base/tool.py index 8ca4eabb7a..ebd200a822 100644 --- a/api/core/tools/__base/tool.py +++ b/api/core/tools/__base/tool.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from abc import ABC, abstractmethod from collections.abc import Generator from copy import deepcopy @@ -24,7 +26,7 @@ class Tool(ABC): self.entity = entity self.runtime = runtime - def fork_tool_runtime(self, runtime: ToolRuntime) -> "Tool": + def fork_tool_runtime(self, runtime: ToolRuntime) -> Tool: """ fork a new tool with metadata :return: the new tool @@ -166,7 +168,7 @@ class Tool(ABC): type=ToolInvokeMessage.MessageType.IMAGE, message=ToolInvokeMessage.TextMessage(text=image) ) - def create_file_message(self, file: "File") -> ToolInvokeMessage: + def create_file_message(self, file: File) -> ToolInvokeMessage: return ToolInvokeMessage( type=ToolInvokeMessage.MessageType.FILE, message=ToolInvokeMessage.FileMessage(), diff --git a/api/core/tools/builtin_tool/tool.py b/api/core/tools/builtin_tool/tool.py index 84efefba07..51b0407886 100644 --- a/api/core/tools/builtin_tool/tool.py +++ b/api/core/tools/builtin_tool/tool.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from core.model_runtime.entities.llm_entities import LLMResult from core.model_runtime.entities.message_entities import PromptMessage, SystemPromptMessage, UserPromptMessage from core.tools.__base.tool import Tool @@ -24,7 +26,7 @@ class BuiltinTool(Tool): super().__init__(**kwargs) self.provider = provider - def fork_tool_runtime(self, runtime: ToolRuntime) -> "BuiltinTool": + def fork_tool_runtime(self, runtime: ToolRuntime) -> BuiltinTool: """ fork a new tool with metadata :return: the new tool diff --git a/api/core/tools/custom_tool/provider.py b/api/core/tools/custom_tool/provider.py index 0cc992155a..e2f6c00555 100644 --- a/api/core/tools/custom_tool/provider.py +++ b/api/core/tools/custom_tool/provider.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pydantic import Field from sqlalchemy import select @@ -32,7 +34,7 @@ class ApiToolProviderController(ToolProviderController): self.tools = [] @classmethod - def from_db(cls, db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> "ApiToolProviderController": + def from_db(cls, db_provider: ApiToolProvider, auth_type: ApiProviderAuthType) -> ApiToolProviderController: credentials_schema = [ ProviderConfig( name="auth_type", diff --git a/api/core/tools/entities/tool_entities.py b/api/core/tools/entities/tool_entities.py index 583a3584f7..b5c7a6310c 100644 --- a/api/core/tools/entities/tool_entities.py +++ b/api/core/tools/entities/tool_entities.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import base64 import contextlib from collections.abc import Mapping @@ -55,7 +57,7 @@ class ToolProviderType(StrEnum): MCP = auto() @classmethod - def value_of(cls, value: str) -> "ToolProviderType": + def value_of(cls, value: str) -> ToolProviderType: """ Get value of given mode. @@ -79,7 +81,7 @@ class ApiProviderSchemaType(StrEnum): OPENAI_ACTIONS = auto() @classmethod - def value_of(cls, value: str) -> "ApiProviderSchemaType": + def value_of(cls, value: str) -> ApiProviderSchemaType: """ Get value of given mode. @@ -102,7 +104,7 @@ class ApiProviderAuthType(StrEnum): API_KEY_QUERY = auto() @classmethod - def value_of(cls, value: str) -> "ApiProviderAuthType": + def value_of(cls, value: str) -> ApiProviderAuthType: """ Get value of given mode. @@ -307,7 +309,7 @@ class ToolParameter(PluginParameter): typ: ToolParameterType, required: bool, options: list[str] | None = None, - ) -> "ToolParameter": + ) -> ToolParameter: """ get a simple tool parameter @@ -429,14 +431,14 @@ class ToolInvokeMeta(BaseModel): tool_config: dict | None = None @classmethod - def empty(cls) -> "ToolInvokeMeta": + def empty(cls) -> ToolInvokeMeta: """ Get an empty instance of ToolInvokeMeta """ return cls(time_cost=0.0, error=None, tool_config={}) @classmethod - def error_instance(cls, error: str) -> "ToolInvokeMeta": + def error_instance(cls, error: str) -> ToolInvokeMeta: """ Get an instance of ToolInvokeMeta with error """ diff --git a/api/core/tools/mcp_tool/tool.py b/api/core/tools/mcp_tool/tool.py index 96917045e3..ef9e9c103a 100644 --- a/api/core/tools/mcp_tool/tool.py +++ b/api/core/tools/mcp_tool/tool.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import base64 import json import logging @@ -118,7 +120,7 @@ class MCPTool(Tool): for item in json_list: yield self.create_json_message(item) - def fork_tool_runtime(self, runtime: ToolRuntime) -> "MCPTool": + def fork_tool_runtime(self, runtime: ToolRuntime) -> MCPTool: return MCPTool( entity=self.entity, runtime=runtime, diff --git a/api/core/tools/plugin_tool/tool.py b/api/core/tools/plugin_tool/tool.py index 828dc3b810..d3a2ad488c 100644 --- a/api/core/tools/plugin_tool/tool.py +++ b/api/core/tools/plugin_tool/tool.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Generator from typing import Any @@ -46,7 +48,7 @@ class PluginTool(Tool): message_id=message_id, ) - def fork_tool_runtime(self, runtime: ToolRuntime) -> "PluginTool": + def fork_tool_runtime(self, runtime: ToolRuntime) -> PluginTool: return PluginTool( entity=self.entity, runtime=runtime, diff --git a/api/core/tools/workflow_as_tool/provider.py b/api/core/tools/workflow_as_tool/provider.py index 5422f5250b..a706f101ca 100644 --- a/api/core/tools/workflow_as_tool/provider.py +++ b/api/core/tools/workflow_as_tool/provider.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping from pydantic import Field @@ -47,7 +49,7 @@ class WorkflowToolProviderController(ToolProviderController): self.provider_id = provider_id @classmethod - def from_db(cls, db_provider: WorkflowToolProvider) -> "WorkflowToolProviderController": + def from_db(cls, db_provider: WorkflowToolProvider) -> WorkflowToolProviderController: with session_factory.create_session() as session, session.begin(): app = session.get(App, db_provider.app_id) if not app: diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index 30334f5da8..81a1d54199 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json import logging from collections.abc import Generator, Mapping, Sequence @@ -181,7 +183,7 @@ class WorkflowTool(Tool): return found return None - def fork_tool_runtime(self, runtime: ToolRuntime) -> "WorkflowTool": + def fork_tool_runtime(self, runtime: ToolRuntime) -> WorkflowTool: """ fork a new tool with metadata diff --git a/api/core/variables/types.py b/api/core/variables/types.py index ce71711344..13b926c978 100644 --- a/api/core/variables/types.py +++ b/api/core/variables/types.py @@ -1,6 +1,8 @@ +from __future__ import annotations + from collections.abc import Mapping from enum import StrEnum -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from core.file.models import File @@ -52,7 +54,7 @@ class SegmentType(StrEnum): return self in _ARRAY_TYPES @classmethod - def infer_segment_type(cls, value: Any) -> Optional["SegmentType"]: + def infer_segment_type(cls, value: Any) -> SegmentType | None: """ Attempt to infer the `SegmentType` based on the Python type of the `value` parameter. @@ -173,7 +175,7 @@ class SegmentType(StrEnum): raise AssertionError("this statement should be unreachable.") @staticmethod - def cast_value(value: Any, type_: "SegmentType"): + def cast_value(value: Any, type_: SegmentType): # Cast Python's `bool` type to `int` when the runtime type requires # an integer or number. # @@ -193,7 +195,7 @@ class SegmentType(StrEnum): return [int(i) for i in value] return value - def exposed_type(self) -> "SegmentType": + def exposed_type(self) -> SegmentType: """Returns the type exposed to the frontend. The frontend treats `INTEGER` and `FLOAT` as `NUMBER`, so these are returned as `NUMBER` here. @@ -202,7 +204,7 @@ class SegmentType(StrEnum): return SegmentType.NUMBER return self - def element_type(self) -> "SegmentType | None": + def element_type(self) -> SegmentType | None: """Return the element type of the current segment type, or `None` if the element type is undefined. Raises: @@ -217,7 +219,7 @@ class SegmentType(StrEnum): return _ARRAY_ELEMENT_TYPES_MAPPING.get(self) @staticmethod - def get_zero_value(t: "SegmentType"): + def get_zero_value(t: SegmentType): # Lazy import to avoid circular dependency from factories import variable_factory diff --git a/api/core/workflow/entities/workflow_execution.py b/api/core/workflow/entities/workflow_execution.py index a8a86d3db2..1b3fb36f1f 100644 --- a/api/core/workflow/entities/workflow_execution.py +++ b/api/core/workflow/entities/workflow_execution.py @@ -5,6 +5,8 @@ Models are independent of the storage mechanism and don't contain implementation details like tenant_id, app_id, etc. """ +from __future__ import annotations + from collections.abc import Mapping from datetime import datetime from typing import Any @@ -59,7 +61,7 @@ class WorkflowExecution(BaseModel): graph: Mapping[str, Any], inputs: Mapping[str, Any], started_at: datetime, - ) -> "WorkflowExecution": + ) -> WorkflowExecution: return WorkflowExecution( id_=id_, workflow_id=workflow_id, diff --git a/api/core/workflow/graph/graph.py b/api/core/workflow/graph/graph.py index ba5a01fc94..7be94c2426 100644 --- a/api/core/workflow/graph/graph.py +++ b/api/core/workflow/graph/graph.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging from collections import defaultdict from collections.abc import Mapping, Sequence @@ -175,7 +177,7 @@ class Graph: def _create_node_instances( cls, node_configs_map: dict[str, dict[str, object]], - node_factory: "NodeFactory", + node_factory: NodeFactory, ) -> dict[str, Node]: """ Create node instances from configurations using the node factory. @@ -197,7 +199,7 @@ class Graph: return nodes @classmethod - def new(cls) -> "GraphBuilder": + def new(cls) -> GraphBuilder: """Create a fluent builder for assembling a graph programmatically.""" return GraphBuilder(graph_cls=cls) @@ -284,9 +286,9 @@ class Graph: cls, *, graph_config: Mapping[str, object], - node_factory: "NodeFactory", + node_factory: NodeFactory, root_node_id: str | None = None, - ) -> "Graph": + ) -> Graph: """ Initialize graph @@ -383,7 +385,7 @@ class GraphBuilder: self._edges: list[Edge] = [] self._edge_counter = 0 - def add_root(self, node: Node) -> "GraphBuilder": + def add_root(self, node: Node) -> GraphBuilder: """Register the root node. Must be called exactly once.""" if self._nodes: @@ -398,7 +400,7 @@ class GraphBuilder: *, from_node_id: str | None = None, source_handle: str = "source", - ) -> "GraphBuilder": + ) -> GraphBuilder: """Append a node and connect it from the specified predecessor.""" if not self._nodes: @@ -419,7 +421,7 @@ class GraphBuilder: return self - def connect(self, *, tail: str, head: str, source_handle: str = "source") -> "GraphBuilder": + def connect(self, *, tail: str, head: str, source_handle: str = "source") -> GraphBuilder: """Connect two existing nodes without adding a new node.""" if tail not in self._nodes_by_id: diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index 500ba4487b..9a870d7bf5 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -5,6 +5,8 @@ This engine uses a modular architecture with separated packages following Domain-Driven Design principles for improved maintainability and testability. """ +from __future__ import annotations + import contextvars import logging import queue @@ -232,7 +234,7 @@ class GraphEngine: ) -> None: layer.initialize(ReadOnlyGraphRuntimeStateWrapper(self._graph_runtime_state), self._command_channel) - def layer(self, layer: GraphEngineLayer) -> "GraphEngine": + def layer(self, layer: GraphEngineLayer) -> GraphEngine: """Add a layer for extending functionality.""" self._layers.append(layer) self._bind_layer_context(layer) diff --git a/api/core/workflow/graph_engine/ready_queue/factory.py b/api/core/workflow/graph_engine/ready_queue/factory.py index 1144e1de69..a9d4f470e5 100644 --- a/api/core/workflow/graph_engine/ready_queue/factory.py +++ b/api/core/workflow/graph_engine/ready_queue/factory.py @@ -2,6 +2,8 @@ Factory for creating ReadyQueue instances from serialized state. """ +from __future__ import annotations + from typing import TYPE_CHECKING from .in_memory import InMemoryReadyQueue @@ -11,7 +13,7 @@ if TYPE_CHECKING: from .protocol import ReadyQueue -def create_ready_queue_from_state(state: ReadyQueueState) -> "ReadyQueue": +def create_ready_queue_from_state(state: ReadyQueueState) -> ReadyQueue: """ Create a ReadyQueue instance from a serialized state. diff --git a/api/core/workflow/graph_engine/response_coordinator/session.py b/api/core/workflow/graph_engine/response_coordinator/session.py index 8b7c2e441e..8ceaa428c3 100644 --- a/api/core/workflow/graph_engine/response_coordinator/session.py +++ b/api/core/workflow/graph_engine/response_coordinator/session.py @@ -5,6 +5,8 @@ This module contains the private ResponseSession class used internally by ResponseStreamCoordinator to manage streaming sessions. """ +from __future__ import annotations + from dataclasses import dataclass from core.workflow.nodes.answer.answer_node import AnswerNode @@ -27,7 +29,7 @@ class ResponseSession: index: int = 0 # Current position in the template segments @classmethod - def from_node(cls, node: Node) -> "ResponseSession": + def from_node(cls, node: Node) -> ResponseSession: """ Create a ResponseSession from an AnswerNode or EndNode. diff --git a/api/core/workflow/nodes/agent/agent_node.py b/api/core/workflow/nodes/agent/agent_node.py index 4be006de11..234651ce96 100644 --- a/api/core/workflow/nodes/agent/agent_node.py +++ b/api/core/workflow/nodes/agent/agent_node.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from collections.abc import Generator, Mapping, Sequence from typing import TYPE_CHECKING, Any, cast @@ -167,7 +169,7 @@ class AgentNode(Node[AgentNodeData]): variable_pool: VariablePool, node_data: AgentNodeData, for_log: bool = False, - strategy: "PluginAgentStrategy", + strategy: PluginAgentStrategy, ) -> dict[str, Any]: """ Generate parameters based on the given tool parameters, variable pool, and node data. @@ -328,7 +330,7 @@ class AgentNode(Node[AgentNodeData]): def _generate_credentials( self, parameters: dict[str, Any], - ) -> "InvokeCredentials": + ) -> InvokeCredentials: """ Generate credentials based on the given agent parameters. """ @@ -442,9 +444,7 @@ class AgentNode(Node[AgentNodeData]): model_schema.features.remove(feature) return model_schema - def _filter_mcp_type_tool( - self, strategy: "PluginAgentStrategy", tools: list[dict[str, Any]] - ) -> list[dict[str, Any]]: + def _filter_mcp_type_tool(self, strategy: PluginAgentStrategy, tools: list[dict[str, Any]]) -> list[dict[str, Any]]: """ Filter MCP type tool :param strategy: plugin agent strategy diff --git a/api/core/workflow/nodes/base/entities.py b/api/core/workflow/nodes/base/entities.py index 5aab6bbde4..e5a20c8e91 100644 --- a/api/core/workflow/nodes/base/entities.py +++ b/api/core/workflow/nodes/base/entities.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from abc import ABC from builtins import type as type_ @@ -111,7 +113,7 @@ class DefaultValue(BaseModel): raise DefaultValueTypeError(f"Cannot convert to number: {value}") @model_validator(mode="after") - def validate_value_type(self) -> "DefaultValue": + def validate_value_type(self) -> DefaultValue: # Type validation configuration type_validators = { DefaultValueType.STRING: { diff --git a/api/core/workflow/nodes/base/node.py b/api/core/workflow/nodes/base/node.py index e7282313b6..55c8db40ea 100644 --- a/api/core/workflow/nodes/base/node.py +++ b/api/core/workflow/nodes/base/node.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import importlib import logging import operator @@ -59,7 +61,7 @@ logger = logging.getLogger(__name__) class Node(Generic[NodeDataT]): - node_type: ClassVar["NodeType"] + node_type: ClassVar[NodeType] execution_type: NodeExecutionType = NodeExecutionType.EXECUTABLE _node_data_type: ClassVar[type[BaseNodeData]] = BaseNodeData @@ -198,14 +200,14 @@ class Node(Generic[NodeDataT]): return None # Global registry populated via __init_subclass__ - _registry: ClassVar[dict["NodeType", dict[str, type["Node"]]]] = {} + _registry: ClassVar[dict[NodeType, dict[str, type[Node]]]] = {} def __init__( self, id: str, config: Mapping[str, Any], - graph_init_params: "GraphInitParams", - graph_runtime_state: "GraphRuntimeState", + graph_init_params: GraphInitParams, + graph_runtime_state: GraphRuntimeState, ) -> None: self._graph_init_params = graph_init_params self.id = id @@ -241,7 +243,7 @@ class Node(Generic[NodeDataT]): return @property - def graph_init_params(self) -> "GraphInitParams": + def graph_init_params(self) -> GraphInitParams: return self._graph_init_params @property @@ -457,7 +459,7 @@ class Node(Generic[NodeDataT]): raise NotImplementedError("subclasses of BaseNode must implement `version` method.") @classmethod - def get_node_type_classes_mapping(cls) -> Mapping["NodeType", Mapping[str, type["Node"]]]: + def get_node_type_classes_mapping(cls) -> Mapping[NodeType, Mapping[str, type[Node]]]: """Return mapping of NodeType -> {version -> Node subclass} using __init_subclass__ registry. Import all modules under core.workflow.nodes so subclasses register themselves on import. diff --git a/api/core/workflow/nodes/base/template.py b/api/core/workflow/nodes/base/template.py index ba3e2058cf..81f4b9f6fb 100644 --- a/api/core/workflow/nodes/base/template.py +++ b/api/core/workflow/nodes/base/template.py @@ -4,6 +4,8 @@ This module provides a unified template structure for both Answer and End nodes, similar to SegmentGroup but focused on template representation without values. """ +from __future__ import annotations + from abc import ABC, abstractmethod from collections.abc import Sequence from dataclasses import dataclass @@ -58,7 +60,7 @@ class Template: segments: list[TemplateSegmentUnion] @classmethod - def from_answer_template(cls, template_str: str) -> "Template": + def from_answer_template(cls, template_str: str) -> Template: """Create a Template from an Answer node template string. Example: @@ -107,7 +109,7 @@ class Template: return cls(segments=segments) @classmethod - def from_end_outputs(cls, outputs_config: list[dict[str, Any]]) -> "Template": + def from_end_outputs(cls, outputs_config: list[dict[str, Any]]) -> Template: """Create a Template from an End node outputs configuration. End nodes are treated as templates of concatenated variables with newlines. diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index 04e2802191..dfb55dcd80 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import base64 import io import json @@ -113,7 +115,7 @@ class LLMNode(Node[LLMNodeData]): # Instance attributes specific to LLMNode. # Output variable for file - _file_outputs: list["File"] + _file_outputs: list[File] _llm_file_saver: LLMFileSaver @@ -121,8 +123,8 @@ class LLMNode(Node[LLMNodeData]): self, id: str, config: Mapping[str, Any], - graph_init_params: "GraphInitParams", - graph_runtime_state: "GraphRuntimeState", + graph_init_params: GraphInitParams, + graph_runtime_state: GraphRuntimeState, *, llm_file_saver: LLMFileSaver | None = None, ): @@ -361,7 +363,7 @@ class LLMNode(Node[LLMNodeData]): structured_output_enabled: bool, structured_output: Mapping[str, Any] | None = None, file_saver: LLMFileSaver, - file_outputs: list["File"], + file_outputs: list[File], node_id: str, node_type: NodeType, reasoning_format: Literal["separated", "tagged"] = "tagged", @@ -415,7 +417,7 @@ class LLMNode(Node[LLMNodeData]): *, invoke_result: LLMResult | Generator[LLMResultChunk | LLMStructuredOutput, None, None], file_saver: LLMFileSaver, - file_outputs: list["File"], + file_outputs: list[File], node_id: str, node_type: NodeType, reasoning_format: Literal["separated", "tagged"] = "tagged", @@ -525,7 +527,7 @@ class LLMNode(Node[LLMNodeData]): ) @staticmethod - def _image_file_to_markdown(file: "File", /): + def _image_file_to_markdown(file: File, /): text_chunk = f"![]({file.generate_url()})" return text_chunk @@ -774,7 +776,7 @@ class LLMNode(Node[LLMNodeData]): def fetch_prompt_messages( *, sys_query: str | None = None, - sys_files: Sequence["File"], + sys_files: Sequence[File], context: str | None = None, memory: TokenBufferMemory | None = None, model_config: ModelConfigWithCredentialsEntity, @@ -785,7 +787,7 @@ class LLMNode(Node[LLMNodeData]): variable_pool: VariablePool, jinja2_variables: Sequence[VariableSelector], tenant_id: str, - context_files: list["File"] | None = None, + context_files: list[File] | None = None, ) -> tuple[Sequence[PromptMessage], Sequence[str] | None]: prompt_messages: list[PromptMessage] = [] @@ -1137,7 +1139,7 @@ class LLMNode(Node[LLMNodeData]): *, invoke_result: LLMResult | LLMResultWithStructuredOutput, saver: LLMFileSaver, - file_outputs: list["File"], + file_outputs: list[File], reasoning_format: Literal["separated", "tagged"] = "tagged", request_latency: float | None = None, ) -> ModelInvokeCompletedEvent: @@ -1179,7 +1181,7 @@ class LLMNode(Node[LLMNodeData]): *, content: ImagePromptMessageContent, file_saver: LLMFileSaver, - ) -> "File": + ) -> File: """_save_multimodal_output saves multi-modal contents generated by LLM plugins. There are two kinds of multimodal outputs: @@ -1229,7 +1231,7 @@ class LLMNode(Node[LLMNodeData]): *, contents: str | list[PromptMessageContentUnionTypes] | None, file_saver: LLMFileSaver, - file_outputs: list["File"], + file_outputs: list[File], ) -> Generator[str, None, None]: """Convert intermediate prompt messages into strings and yield them to the caller. diff --git a/api/core/workflow/repositories/draft_variable_repository.py b/api/core/workflow/repositories/draft_variable_repository.py index 97bfcd5666..66ef714c16 100644 --- a/api/core/workflow/repositories/draft_variable_repository.py +++ b/api/core/workflow/repositories/draft_variable_repository.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import abc from collections.abc import Mapping from typing import Any, Protocol @@ -23,7 +25,7 @@ class DraftVariableSaverFactory(Protocol): node_type: NodeType, node_execution_id: str, enclosing_node_id: str | None = None, - ) -> "DraftVariableSaver": + ) -> DraftVariableSaver: pass diff --git a/api/core/workflow/runtime/variable_pool.py b/api/core/workflow/runtime/variable_pool.py index 7fbaec9e70..85ceb9d59e 100644 --- a/api/core/workflow/runtime/variable_pool.py +++ b/api/core/workflow/runtime/variable_pool.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import re from collections import defaultdict from collections.abc import Mapping, Sequence @@ -267,6 +269,6 @@ class VariablePool(BaseModel): self.add(selector, value) @classmethod - def empty(cls) -> "VariablePool": + def empty(cls) -> VariablePool: """Create an empty variable pool.""" return cls(system_variables=SystemVariable.empty()) diff --git a/api/core/workflow/system_variable.py b/api/core/workflow/system_variable.py index ad925912a4..cda8091771 100644 --- a/api/core/workflow/system_variable.py +++ b/api/core/workflow/system_variable.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections.abc import Mapping, Sequence from types import MappingProxyType from typing import Any @@ -70,7 +72,7 @@ class SystemVariable(BaseModel): return data @classmethod - def empty(cls) -> "SystemVariable": + def empty(cls) -> SystemVariable: return cls() def to_dict(self) -> dict[SystemVariableKey, Any]: @@ -114,7 +116,7 @@ class SystemVariable(BaseModel): d[SystemVariableKey.TIMESTAMP] = self.timestamp return d - def as_view(self) -> "SystemVariableReadOnlyView": + def as_view(self) -> SystemVariableReadOnlyView: return SystemVariableReadOnlyView(self) diff --git a/api/extensions/logstore/aliyun_logstore.py b/api/extensions/logstore/aliyun_logstore.py index 22d1f473a3..8c64a25be4 100644 --- a/api/extensions/logstore/aliyun_logstore.py +++ b/api/extensions/logstore/aliyun_logstore.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging import os import threading @@ -33,7 +35,7 @@ class AliyunLogStore: Ensures only one instance exists to prevent multiple PG connection pools. """ - _instance: "AliyunLogStore | None" = None + _instance: AliyunLogStore | None = None _initialized: bool = False # Track delayed PG connection for newly created projects @@ -66,7 +68,7 @@ class AliyunLogStore: "\t", ] - def __new__(cls) -> "AliyunLogStore": + def __new__(cls) -> AliyunLogStore: """Implement singleton pattern.""" if cls._instance is None: cls._instance = super().__new__(cls) diff --git a/api/extensions/storage/clickzetta_volume/file_lifecycle.py b/api/extensions/storage/clickzetta_volume/file_lifecycle.py index 51a97b20f8..1d9911465b 100644 --- a/api/extensions/storage/clickzetta_volume/file_lifecycle.py +++ b/api/extensions/storage/clickzetta_volume/file_lifecycle.py @@ -5,6 +5,8 @@ automatic cleanup, backup and restore. Supports complete lifecycle management for knowledge base files. """ +from __future__ import annotations + import json import logging import operator @@ -48,7 +50,7 @@ class FileMetadata: return data @classmethod - def from_dict(cls, data: dict) -> "FileMetadata": + def from_dict(cls, data: dict) -> FileMetadata: """Create instance from dictionary""" data = data.copy() data["created_at"] = datetime.fromisoformat(data["created_at"]) diff --git a/api/libs/broadcast_channel/channel.py b/api/libs/broadcast_channel/channel.py index 5bbf0c79a3..d4cb3e9971 100644 --- a/api/libs/broadcast_channel/channel.py +++ b/api/libs/broadcast_channel/channel.py @@ -2,6 +2,8 @@ Broadcast channel for Pub/Sub messaging. """ +from __future__ import annotations + import types from abc import abstractmethod from collections.abc import Iterator @@ -129,6 +131,6 @@ class BroadcastChannel(Protocol): """ @abstractmethod - def topic(self, topic: str) -> "Topic": + def topic(self, topic: str) -> Topic: """topic returns a `Topic` instance for the given topic name.""" ... diff --git a/api/libs/broadcast_channel/redis/channel.py b/api/libs/broadcast_channel/redis/channel.py index 1fc3db8156..5bb4f579c1 100644 --- a/api/libs/broadcast_channel/redis/channel.py +++ b/api/libs/broadcast_channel/redis/channel.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from libs.broadcast_channel.channel import Producer, Subscriber, Subscription from redis import Redis @@ -20,7 +22,7 @@ class BroadcastChannel: ): self._client = redis_client - def topic(self, topic: str) -> "Topic": + def topic(self, topic: str) -> Topic: return Topic(self._client, topic) diff --git a/api/libs/broadcast_channel/redis/sharded_channel.py b/api/libs/broadcast_channel/redis/sharded_channel.py index 16e3a80ee1..d190c51bbc 100644 --- a/api/libs/broadcast_channel/redis/sharded_channel.py +++ b/api/libs/broadcast_channel/redis/sharded_channel.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from libs.broadcast_channel.channel import Producer, Subscriber, Subscription from redis import Redis @@ -18,7 +20,7 @@ class ShardedRedisBroadcastChannel: ): self._client = redis_client - def topic(self, topic: str) -> "ShardedTopic": + def topic(self, topic: str) -> ShardedTopic: return ShardedTopic(self._client, topic) diff --git a/api/libs/email_i18n.py b/api/libs/email_i18n.py index ff74ccbe8e..0828cf80bf 100644 --- a/api/libs/email_i18n.py +++ b/api/libs/email_i18n.py @@ -6,6 +6,8 @@ in Dify. It follows Domain-Driven Design principles with proper type hints and eliminates the need for repetitive language switching logic. """ +from __future__ import annotations + from dataclasses import dataclass from enum import StrEnum, auto from typing import Any, Protocol @@ -53,7 +55,7 @@ class EmailLanguage(StrEnum): ZH_HANS = "zh-Hans" @classmethod - def from_language_code(cls, language_code: str) -> "EmailLanguage": + def from_language_code(cls, language_code: str) -> EmailLanguage: """Convert a language code to EmailLanguage with fallback to English.""" if language_code == "zh-Hans": return cls.ZH_HANS diff --git a/api/models/model.py b/api/models/model.py index 52e409d85a..46df047237 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json import re import uuid @@ -5,7 +7,7 @@ from collections.abc import Mapping from datetime import datetime from decimal import Decimal from enum import StrEnum, auto -from typing import TYPE_CHECKING, Any, Literal, Optional, cast +from typing import TYPE_CHECKING, Any, Literal, cast from uuid import uuid4 import sqlalchemy as sa @@ -54,7 +56,7 @@ class AppMode(StrEnum): RAG_PIPELINE = "rag-pipeline" @classmethod - def value_of(cls, value: str) -> "AppMode": + def value_of(cls, value: str) -> AppMode: """ Get value of given mode. @@ -121,19 +123,19 @@ class App(Base): return "" @property - def site(self) -> Optional["Site"]: + def site(self) -> Site | None: site = db.session.query(Site).where(Site.app_id == self.id).first() return site @property - def app_model_config(self) -> Optional["AppModelConfig"]: + def app_model_config(self) -> AppModelConfig | None: if self.app_model_config_id: return db.session.query(AppModelConfig).where(AppModelConfig.id == self.app_model_config_id).first() return None @property - def workflow(self) -> Optional["Workflow"]: + def workflow(self) -> Workflow | None: if self.workflow_id: from .workflow import Workflow @@ -288,7 +290,7 @@ class App(Base): return deleted_tools @property - def tags(self) -> list["Tag"]: + def tags(self) -> list[Tag]: tags = ( db.session.query(Tag) .join(TagBinding, Tag.id == TagBinding.tag_id) @@ -1194,7 +1196,7 @@ class Message(Base): return json.loads(self.message_metadata) if self.message_metadata else {} @property - def agent_thoughts(self) -> list["MessageAgentThought"]: + def agent_thoughts(self) -> list[MessageAgentThought]: return ( db.session.query(MessageAgentThought) .where(MessageAgentThought.message_id == self.id) @@ -1307,7 +1309,7 @@ class Message(Base): } @classmethod - def from_dict(cls, data: dict[str, Any]) -> "Message": + def from_dict(cls, data: dict[str, Any]) -> Message: return cls( id=data["id"], app_id=data["app_id"], diff --git a/api/models/provider.py b/api/models/provider.py index d68d56d32a..441b54c797 100644 --- a/api/models/provider.py +++ b/api/models/provider.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from datetime import datetime from enum import StrEnum, auto from functools import cached_property @@ -19,7 +21,7 @@ class ProviderType(StrEnum): SYSTEM = auto() @staticmethod - def value_of(value: str) -> "ProviderType": + def value_of(value: str) -> ProviderType: for member in ProviderType: if member.value == value: return member @@ -37,7 +39,7 @@ class ProviderQuotaType(StrEnum): """hosted trial quota""" @staticmethod - def value_of(value: str) -> "ProviderQuotaType": + def value_of(value: str) -> ProviderQuotaType: for member in ProviderQuotaType: if member.value == value: return member diff --git a/api/models/tools.py b/api/models/tools.py index e4f9bcb582..e7b98dcf27 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from datetime import datetime from decimal import Decimal @@ -167,11 +169,11 @@ class ApiToolProvider(TypeBase): ) @property - def schema_type(self) -> "ApiProviderSchemaType": + def schema_type(self) -> ApiProviderSchemaType: return ApiProviderSchemaType.value_of(self.schema_type_str) @property - def tools(self) -> list["ApiToolBundle"]: + def tools(self) -> list[ApiToolBundle]: return [ApiToolBundle.model_validate(tool) for tool in json.loads(self.tools_str)] @property @@ -267,7 +269,7 @@ class WorkflowToolProvider(TypeBase): return db.session.query(Tenant).where(Tenant.id == self.tenant_id).first() @property - def parameter_configurations(self) -> list["WorkflowToolParameterConfiguration"]: + def parameter_configurations(self) -> list[WorkflowToolParameterConfiguration]: return [ WorkflowToolParameterConfiguration.model_validate(config) for config in json.loads(self.parameter_configuration) @@ -359,7 +361,7 @@ class MCPToolProvider(TypeBase): except (json.JSONDecodeError, TypeError): return [] - def to_entity(self) -> "MCPProviderEntity": + def to_entity(self) -> MCPProviderEntity: """Convert to domain entity""" from core.entities.mcp_provider import MCPProviderEntity @@ -533,5 +535,5 @@ class DeprecatedPublishedAppTool(TypeBase): ) @property - def description_i18n(self) -> "I18nObject": + def description_i18n(self) -> I18nObject: return I18nObject.model_validate(json.loads(self.description)) diff --git a/api/models/workflow.py b/api/models/workflow.py index c04e4e9762..a18939523b 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -1,9 +1,11 @@ +from __future__ import annotations + import json import logging from collections.abc import Generator, Mapping, Sequence from datetime import datetime from enum import StrEnum -from typing import TYPE_CHECKING, Any, Optional, Union, cast +from typing import TYPE_CHECKING, Any, Union, cast from uuid import uuid4 import sqlalchemy as sa @@ -67,7 +69,7 @@ class WorkflowType(StrEnum): RAG_PIPELINE = "rag-pipeline" @classmethod - def value_of(cls, value: str) -> "WorkflowType": + def value_of(cls, value: str) -> WorkflowType: """ Get value of given mode. @@ -80,7 +82,7 @@ class WorkflowType(StrEnum): raise ValueError(f"invalid workflow type value {value}") @classmethod - def from_app_mode(cls, app_mode: Union[str, "AppMode"]) -> "WorkflowType": + def from_app_mode(cls, app_mode: Union[str, AppMode]) -> WorkflowType: """ Get workflow type from app mode. @@ -181,7 +183,7 @@ class Workflow(Base): # bug rag_pipeline_variables: list[dict], marked_name: str = "", marked_comment: str = "", - ) -> "Workflow": + ) -> Workflow: workflow = Workflow() workflow.id = str(uuid4()) workflow.tenant_id = tenant_id @@ -619,7 +621,7 @@ class WorkflowRun(Base): finished_at: Mapped[datetime | None] = mapped_column(DateTime) exceptions_count: Mapped[int] = mapped_column(sa.Integer, server_default=sa.text("0"), nullable=True) - pause: Mapped[Optional["WorkflowPause"]] = orm.relationship( + pause: Mapped[WorkflowPause | None] = orm.relationship( "WorkflowPause", primaryjoin="WorkflowRun.id == foreign(WorkflowPause.workflow_run_id)", uselist=False, @@ -689,7 +691,7 @@ class WorkflowRun(Base): } @classmethod - def from_dict(cls, data: dict[str, Any]) -> "WorkflowRun": + def from_dict(cls, data: dict[str, Any]) -> WorkflowRun: return cls( id=data.get("id"), tenant_id=data.get("tenant_id"), @@ -841,7 +843,7 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo created_by: Mapped[str] = mapped_column(StringUUID) finished_at: Mapped[datetime | None] = mapped_column(DateTime) - offload_data: Mapped[list["WorkflowNodeExecutionOffload"]] = orm.relationship( + offload_data: Mapped[list[WorkflowNodeExecutionOffload]] = orm.relationship( "WorkflowNodeExecutionOffload", primaryjoin="WorkflowNodeExecutionModel.id == foreign(WorkflowNodeExecutionOffload.node_execution_id)", uselist=True, @@ -851,13 +853,13 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo @staticmethod def preload_offload_data( - query: Select[tuple["WorkflowNodeExecutionModel"]] | orm.Query["WorkflowNodeExecutionModel"], + query: Select[tuple[WorkflowNodeExecutionModel]] | orm.Query[WorkflowNodeExecutionModel], ): return query.options(orm.selectinload(WorkflowNodeExecutionModel.offload_data)) @staticmethod def preload_offload_data_and_files( - query: Select[tuple["WorkflowNodeExecutionModel"]] | orm.Query["WorkflowNodeExecutionModel"], + query: Select[tuple[WorkflowNodeExecutionModel]] | orm.Query[WorkflowNodeExecutionModel], ): return query.options( orm.selectinload(WorkflowNodeExecutionModel.offload_data).options( @@ -932,7 +934,7 @@ class WorkflowNodeExecutionModel(Base): # This model is expected to have `offlo ) return extras - def _get_offload_by_type(self, type_: ExecutionOffLoadType) -> Optional["WorkflowNodeExecutionOffload"]: + def _get_offload_by_type(self, type_: ExecutionOffLoadType) -> WorkflowNodeExecutionOffload | None: return next(iter([i for i in self.offload_data if i.type_ == type_]), None) @property @@ -1046,7 +1048,7 @@ class WorkflowNodeExecutionOffload(Base): back_populates="offload_data", ) - file: Mapped[Optional["UploadFile"]] = orm.relationship( + file: Mapped[UploadFile | None] = orm.relationship( foreign_keys=[file_id], lazy="raise", uselist=False, @@ -1064,7 +1066,7 @@ class WorkflowAppLogCreatedFrom(StrEnum): INSTALLED_APP = "installed-app" @classmethod - def value_of(cls, value: str) -> "WorkflowAppLogCreatedFrom": + def value_of(cls, value: str) -> WorkflowAppLogCreatedFrom: """ Get value of given mode. @@ -1181,7 +1183,7 @@ class ConversationVariable(TypeBase): ) @classmethod - def from_variable(cls, *, app_id: str, conversation_id: str, variable: Variable) -> "ConversationVariable": + def from_variable(cls, *, app_id: str, conversation_id: str, variable: Variable) -> ConversationVariable: obj = cls( id=variable.id, app_id=app_id, @@ -1334,7 +1336,7 @@ class WorkflowDraftVariable(Base): ) # Relationship to WorkflowDraftVariableFile - variable_file: Mapped[Optional["WorkflowDraftVariableFile"]] = orm.relationship( + variable_file: Mapped[WorkflowDraftVariableFile | None] = orm.relationship( foreign_keys=[file_id], lazy="raise", uselist=False, @@ -1504,7 +1506,7 @@ class WorkflowDraftVariable(Base): node_execution_id: str | None, description: str = "", file_id: str | None = None, - ) -> "WorkflowDraftVariable": + ) -> WorkflowDraftVariable: variable = WorkflowDraftVariable() variable.id = str(uuid4()) variable.created_at = naive_utc_now() @@ -1527,7 +1529,7 @@ class WorkflowDraftVariable(Base): name: str, value: Segment, description: str = "", - ) -> "WorkflowDraftVariable": + ) -> WorkflowDraftVariable: variable = cls._new( app_id=app_id, node_id=CONVERSATION_VARIABLE_NODE_ID, @@ -1548,7 +1550,7 @@ class WorkflowDraftVariable(Base): value: Segment, node_execution_id: str, editable: bool = False, - ) -> "WorkflowDraftVariable": + ) -> WorkflowDraftVariable: variable = cls._new( app_id=app_id, node_id=SYSTEM_VARIABLE_NODE_ID, @@ -1571,7 +1573,7 @@ class WorkflowDraftVariable(Base): visible: bool = True, editable: bool = True, file_id: str | None = None, - ) -> "WorkflowDraftVariable": + ) -> WorkflowDraftVariable: variable = cls._new( app_id=app_id, node_id=node_id, @@ -1667,7 +1669,7 @@ class WorkflowDraftVariableFile(Base): ) # Relationship to UploadFile - upload_file: Mapped["UploadFile"] = orm.relationship( + upload_file: Mapped[UploadFile] = orm.relationship( foreign_keys=[upload_file_id], lazy="raise", uselist=False, @@ -1734,7 +1736,7 @@ class WorkflowPause(DefaultFieldsMixin, Base): state_object_key: Mapped[str] = mapped_column(String(length=255), nullable=False) # Relationship to WorkflowRun - workflow_run: Mapped["WorkflowRun"] = orm.relationship( + workflow_run: Mapped[WorkflowRun] = orm.relationship( foreign_keys=[workflow_run_id], # require explicit preloading. lazy="raise", @@ -1790,7 +1792,7 @@ class WorkflowPauseReason(DefaultFieldsMixin, Base): ) @classmethod - def from_entity(cls, pause_reason: PauseReason) -> "WorkflowPauseReason": + def from_entity(cls, pause_reason: PauseReason) -> WorkflowPauseReason: if isinstance(pause_reason, HumanInputRequired): return cls( type_=PauseReasonType.HUMAN_INPUT_REQUIRED, form_id=pause_reason.form_id, node_id=pause_reason.node_id diff --git a/api/services/variable_truncator.py b/api/services/variable_truncator.py index 0f969207cf..f973361341 100644 --- a/api/services/variable_truncator.py +++ b/api/services/variable_truncator.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import dataclasses from abc import ABC, abstractmethod from collections.abc import Mapping @@ -106,7 +108,7 @@ class VariableTruncator(BaseTruncator): self._max_size_bytes = max_size_bytes @classmethod - def default(cls) -> "VariableTruncator": + def default(cls) -> VariableTruncator: return VariableTruncator( max_size_bytes=dify_config.WORKFLOW_VARIABLE_TRUNCATION_MAX_SIZE, array_element_limit=dify_config.WORKFLOW_VARIABLE_TRUNCATION_ARRAY_LENGTH, diff --git a/api/services/website_service.py b/api/services/website_service.py index a23f01ec71..fe48c3b08e 100644 --- a/api/services/website_service.py +++ b/api/services/website_service.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import datetime import json from dataclasses import dataclass @@ -78,7 +80,7 @@ class WebsiteCrawlApiRequest: return CrawlRequest(url=self.url, provider=self.provider, options=options) @classmethod - def from_args(cls, args: dict) -> "WebsiteCrawlApiRequest": + def from_args(cls, args: dict) -> WebsiteCrawlApiRequest: """Create from Flask-RESTful parsed arguments.""" provider = args.get("provider") url = args.get("url") @@ -102,7 +104,7 @@ class WebsiteCrawlStatusApiRequest: job_id: str @classmethod - def from_args(cls, args: dict, job_id: str) -> "WebsiteCrawlStatusApiRequest": + def from_args(cls, args: dict, job_id: str) -> WebsiteCrawlStatusApiRequest: """Create from Flask-RESTful parsed arguments.""" provider = args.get("provider") if not provider: diff --git a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_config.py b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_config.py index b02f90588b..5ceb8dd7f7 100644 --- a/api/tests/unit_tests/core/workflow/graph_engine/test_mock_config.py +++ b/api/tests/unit_tests/core/workflow/graph_engine/test_mock_config.py @@ -5,6 +5,8 @@ This module provides a flexible configuration system for customizing the behavior of mock nodes during testing. """ +from __future__ import annotations + from collections.abc import Callable from dataclasses import dataclass, field from typing import Any @@ -95,67 +97,67 @@ class MockConfigBuilder: def __init__(self) -> None: self._config = MockConfig() - def with_auto_mock(self, enabled: bool = True) -> "MockConfigBuilder": + def with_auto_mock(self, enabled: bool = True) -> MockConfigBuilder: """Enable or disable auto-mocking.""" self._config.enable_auto_mock = enabled return self - def with_delays(self, enabled: bool = True) -> "MockConfigBuilder": + def with_delays(self, enabled: bool = True) -> MockConfigBuilder: """Enable or disable simulated execution delays.""" self._config.simulate_delays = enabled return self - def with_llm_response(self, response: str) -> "MockConfigBuilder": + def with_llm_response(self, response: str) -> MockConfigBuilder: """Set default LLM response.""" self._config.default_llm_response = response return self - def with_agent_response(self, response: str) -> "MockConfigBuilder": + def with_agent_response(self, response: str) -> MockConfigBuilder: """Set default agent response.""" self._config.default_agent_response = response return self - def with_tool_response(self, response: dict[str, Any]) -> "MockConfigBuilder": + def with_tool_response(self, response: dict[str, Any]) -> MockConfigBuilder: """Set default tool response.""" self._config.default_tool_response = response return self - def with_retrieval_response(self, response: str) -> "MockConfigBuilder": + def with_retrieval_response(self, response: str) -> MockConfigBuilder: """Set default retrieval response.""" self._config.default_retrieval_response = response return self - def with_http_response(self, response: dict[str, Any]) -> "MockConfigBuilder": + def with_http_response(self, response: dict[str, Any]) -> MockConfigBuilder: """Set default HTTP response.""" self._config.default_http_response = response return self - def with_template_transform_response(self, response: str) -> "MockConfigBuilder": + def with_template_transform_response(self, response: str) -> MockConfigBuilder: """Set default template transform response.""" self._config.default_template_transform_response = response return self - def with_code_response(self, response: dict[str, Any]) -> "MockConfigBuilder": + def with_code_response(self, response: dict[str, Any]) -> MockConfigBuilder: """Set default code execution response.""" self._config.default_code_response = response return self - def with_node_output(self, node_id: str, outputs: dict[str, Any]) -> "MockConfigBuilder": + def with_node_output(self, node_id: str, outputs: dict[str, Any]) -> MockConfigBuilder: """Set outputs for a specific node.""" self._config.set_node_outputs(node_id, outputs) return self - def with_node_error(self, node_id: str, error: str) -> "MockConfigBuilder": + def with_node_error(self, node_id: str, error: str) -> MockConfigBuilder: """Set error for a specific node.""" self._config.set_node_error(node_id, error) return self - def with_node_config(self, config: NodeMockConfig) -> "MockConfigBuilder": + def with_node_config(self, config: NodeMockConfig) -> MockConfigBuilder: """Add a node-specific configuration.""" self._config.set_node_config(config.node_id, config) return self - def with_default_config(self, node_type: NodeType, config: dict[str, Any]) -> "MockConfigBuilder": + def with_default_config(self, node_type: NodeType, config: dict[str, Any]) -> MockConfigBuilder: """Set default configuration for a node type.""" self._config.set_default_config(node_type, config) return self diff --git a/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py b/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py index 09b8191870..06927cddcf 100644 --- a/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys import types from collections.abc import Generator @@ -21,7 +23,7 @@ if TYPE_CHECKING: # pragma: no cover - imported for type checking only @pytest.fixture -def tool_node(monkeypatch) -> "ToolNode": +def tool_node(monkeypatch) -> ToolNode: module_name = "core.ops.ops_trace_manager" if module_name not in sys.modules: ops_stub = types.ModuleType(module_name) @@ -85,7 +87,7 @@ def _collect_events(generator: Generator) -> tuple[list[Any], LLMUsage]: return events, stop.value -def _run_transform(tool_node: "ToolNode", message: ToolInvokeMessage) -> tuple[list[Any], LLMUsage]: +def _run_transform(tool_node: ToolNode, message: ToolInvokeMessage) -> tuple[list[Any], LLMUsage]: def _identity_transform(messages, *_args, **_kwargs): return messages @@ -103,7 +105,7 @@ def _run_transform(tool_node: "ToolNode", message: ToolInvokeMessage) -> tuple[l return _collect_events(generator) -def test_link_messages_with_file_populate_files_output(tool_node: "ToolNode"): +def test_link_messages_with_file_populate_files_output(tool_node: ToolNode): file_obj = File( tenant_id="tenant-id", type=FileType.DOCUMENT, @@ -139,7 +141,7 @@ def test_link_messages_with_file_populate_files_output(tool_node: "ToolNode"): assert files_segment.value == [file_obj] -def test_plain_link_messages_remain_links(tool_node: "ToolNode"): +def test_plain_link_messages_remain_links(tool_node: ToolNode): message = ToolInvokeMessage( type=ToolInvokeMessage.MessageType.LINK, message=ToolInvokeMessage.TextMessage(text="https://dify.ai"), From 9c6c2a3c14b8fb46ba94f208cd01a877b5b0ff8b Mon Sep 17 00:00:00 2001 From: yyh <92089059+lyzno1@users.noreply.github.com> Date: Wed, 7 Jan 2026 10:07:35 +0800 Subject: [PATCH 08/15] chore: add skill creator for create agent skills (#30652) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .claude/skills/skill-creator/SKILL.md | 355 ++++++++++++++++++ .../references/output-patterns.md | 86 +++++ .../skill-creator/references/workflows.md | 28 ++ .../skill-creator/scripts/init_skill.py | 300 +++++++++++++++ .../skill-creator/scripts/package_skill.py | 110 ++++++ .../skill-creator/scripts/quick_validate.py | 97 +++++ 6 files changed, 976 insertions(+) create mode 100644 .claude/skills/skill-creator/SKILL.md create mode 100644 .claude/skills/skill-creator/references/output-patterns.md create mode 100644 .claude/skills/skill-creator/references/workflows.md create mode 100755 .claude/skills/skill-creator/scripts/init_skill.py create mode 100755 .claude/skills/skill-creator/scripts/package_skill.py create mode 100755 .claude/skills/skill-creator/scripts/quick_validate.py diff --git a/.claude/skills/skill-creator/SKILL.md b/.claude/skills/skill-creator/SKILL.md new file mode 100644 index 0000000000..b49da5ac68 --- /dev/null +++ b/.claude/skills/skill-creator/SKILL.md @@ -0,0 +1,355 @@ +--- +name: skill-creator +description: Guide for creating effective skills. This skill should be used when users want to create a new skill (or update an existing skill) that extends Claude's capabilities with specialized knowledge, workflows, or tool integrations. +--- + +# Skill Creator + +This skill provides guidance for creating effective skills. + +## About Skills + +Skills are modular, self-contained packages that extend Claude's capabilities by providing +specialized knowledge, workflows, and tools. Think of them as "onboarding guides" for specific +domains or tasksโ€”they transform Claude from a general-purpose agent into a specialized agent +equipped with procedural knowledge that no model can fully possess. + +### What Skills Provide + +1. Specialized workflows - Multi-step procedures for specific domains +2. Tool integrations - Instructions for working with specific file formats or APIs +3. Domain expertise - Company-specific knowledge, schemas, business logic +4. Bundled resources - Scripts, references, and assets for complex and repetitive tasks + +## Core Principles + +### Concise is Key + +The context window is a public good. Skills share the context window with everything else Claude needs: system prompt, conversation history, other Skills' metadata, and the actual user request. + +**Default assumption: Claude is already very smart.** Only add context Claude doesn't already have. Challenge each piece of information: "Does Claude really need this explanation?" and "Does this paragraph justify its token cost?" + +Prefer concise examples over verbose explanations. + +### Set Appropriate Degrees of Freedom + +Match the level of specificity to the task's fragility and variability: + +**High freedom (text-based instructions)**: Use when multiple approaches are valid, decisions depend on context, or heuristics guide the approach. + +**Medium freedom (pseudocode or scripts with parameters)**: Use when a preferred pattern exists, some variation is acceptable, or configuration affects behavior. + +**Low freedom (specific scripts, few parameters)**: Use when operations are fragile and error-prone, consistency is critical, or a specific sequence must be followed. + +Think of Claude as exploring a path: a narrow bridge with cliffs needs specific guardrails (low freedom), while an open field allows many routes (high freedom). + +### Anatomy of a Skill + +Every skill consists of a required SKILL.md file and optional bundled resources: + +``` +skill-name/ +โ”œโ”€โ”€ SKILL.md (required) +โ”‚ โ”œโ”€โ”€ YAML frontmatter metadata (required) +โ”‚ โ”‚ โ”œโ”€โ”€ name: (required) +โ”‚ โ”‚ โ””โ”€โ”€ description: (required) +โ”‚ โ””โ”€โ”€ Markdown instructions (required) +โ””โ”€โ”€ Bundled Resources (optional) + โ”œโ”€โ”€ scripts/ - Executable code (Python/Bash/etc.) + โ”œโ”€โ”€ references/ - Documentation intended to be loaded into context as needed + โ””โ”€โ”€ assets/ - Files used in output (templates, icons, fonts, etc.) +``` + +#### SKILL.md (required) + +Every SKILL.md consists of: + +- **Frontmatter** (YAML): Contains `name` and `description` fields. These are the only fields that Claude reads to determine when the skill gets used, thus it is very important to be clear and comprehensive in describing what the skill is, and when it should be used. +- **Body** (Markdown): Instructions and guidance for using the skill. Only loaded AFTER the skill triggers (if at all). + +#### Bundled Resources (optional) + +##### Scripts (`scripts/`) + +Executable code (Python/Bash/etc.) for tasks that require deterministic reliability or are repeatedly rewritten. + +- **When to include**: When the same code is being rewritten repeatedly or deterministic reliability is needed +- **Example**: `scripts/rotate_pdf.py` for PDF rotation tasks +- **Benefits**: Token efficient, deterministic, may be executed without loading into context +- **Note**: Scripts may still need to be read by Claude for patching or environment-specific adjustments + +##### References (`references/`) + +Documentation and reference material intended to be loaded as needed into context to inform Claude's process and thinking. + +- **When to include**: For documentation that Claude should reference while working +- **Examples**: `references/finance.md` for financial schemas, `references/mnda.md` for company NDA template, `references/policies.md` for company policies, `references/api_docs.md` for API specifications +- **Use cases**: Database schemas, API documentation, domain knowledge, company policies, detailed workflow guides +- **Benefits**: Keeps SKILL.md lean, loaded only when Claude determines it's needed +- **Best practice**: If files are large (>10k words), include grep search patterns in SKILL.md +- **Avoid duplication**: Information should live in either SKILL.md or references files, not both. Prefer references files for detailed information unless it's truly core to the skillโ€”this keeps SKILL.md lean while making information discoverable without hogging the context window. Keep only essential procedural instructions and workflow guidance in SKILL.md; move detailed reference material, schemas, and examples to references files. + +##### Assets (`assets/`) + +Files not intended to be loaded into context, but rather used within the output Claude produces. + +- **When to include**: When the skill needs files that will be used in the final output +- **Examples**: `assets/logo.png` for brand assets, `assets/slides.pptx` for PowerPoint templates, `assets/frontend-template/` for HTML/React boilerplate, `assets/font.ttf` for typography +- **Use cases**: Templates, images, icons, boilerplate code, fonts, sample documents that get copied or modified +- **Benefits**: Separates output resources from documentation, enables Claude to use files without loading them into context + +#### What to Not Include in a Skill + +A skill should only contain essential files that directly support its functionality. Do NOT create extraneous documentation or auxiliary files, including: + +- README.md +- INSTALLATION_GUIDE.md +- QUICK_REFERENCE.md +- CHANGELOG.md +- etc. + +The skill should only contain the information needed for an AI agent to do the job at hand. It should not contain auxilary context about the process that went into creating it, setup and testing procedures, user-facing documentation, etc. Creating additional documentation files just adds clutter and confusion. + +### Progressive Disclosure Design Principle + +Skills use a three-level loading system to manage context efficiently: + +1. **Metadata (name + description)** - Always in context (~100 words) +2. **SKILL.md body** - When skill triggers (<5k words) +3. **Bundled resources** - As needed by Claude (Unlimited because scripts can be executed without reading into context window) + +#### Progressive Disclosure Patterns + +Keep SKILL.md body to the essentials and under 500 lines to minimize context bloat. Split content into separate files when approaching this limit. When splitting out content into other files, it is very important to reference them from SKILL.md and describe clearly when to read them, to ensure the reader of the skill knows they exist and when to use them. + +**Key principle:** When a skill supports multiple variations, frameworks, or options, keep only the core workflow and selection guidance in SKILL.md. Move variant-specific details (patterns, examples, configuration) into separate reference files. + +**Pattern 1: High-level guide with references** + +```markdown +# PDF Processing + +## Quick start + +Extract text with pdfplumber: +[code example] + +## Advanced features + +- **Form filling**: See [FORMS.md](FORMS.md) for complete guide +- **API reference**: See [REFERENCE.md](REFERENCE.md) for all methods +- **Examples**: See [EXAMPLES.md](EXAMPLES.md) for common patterns +``` + +Claude loads FORMS.md, REFERENCE.md, or EXAMPLES.md only when needed. + +**Pattern 2: Domain-specific organization** + +For Skills with multiple domains, organize content by domain to avoid loading irrelevant context: + +``` +bigquery-skill/ +โ”œโ”€โ”€ SKILL.md (overview and navigation) +โ””โ”€โ”€ reference/ + โ”œโ”€โ”€ finance.md (revenue, billing metrics) + โ”œโ”€โ”€ sales.md (opportunities, pipeline) + โ”œโ”€โ”€ product.md (API usage, features) + โ””โ”€โ”€ marketing.md (campaigns, attribution) +``` + +When a user asks about sales metrics, Claude only reads sales.md. + +Similarly, for skills supporting multiple frameworks or variants, organize by variant: + +``` +cloud-deploy/ +โ”œโ”€โ”€ SKILL.md (workflow + provider selection) +โ””โ”€โ”€ references/ + โ”œโ”€โ”€ aws.md (AWS deployment patterns) + โ”œโ”€โ”€ gcp.md (GCP deployment patterns) + โ””โ”€โ”€ azure.md (Azure deployment patterns) +``` + +When the user chooses AWS, Claude only reads aws.md. + +**Pattern 3: Conditional details** + +Show basic content, link to advanced content: + +```markdown +# DOCX Processing + +## Creating documents + +Use docx-js for new documents. See [DOCX-JS.md](DOCX-JS.md). + +## Editing documents + +For simple edits, modify the XML directly. + +**For tracked changes**: See [REDLINING.md](REDLINING.md) +**For OOXML details**: See [OOXML.md](OOXML.md) +``` + +Claude reads REDLINING.md or OOXML.md only when the user needs those features. + +**Important guidelines:** + +- **Avoid deeply nested references** - Keep references one level deep from SKILL.md. All reference files should link directly from SKILL.md. +- **Structure longer reference files** - For files longer than 100 lines, include a table of contents at the top so Claude can see the full scope when previewing. + +## Skill Creation Process + +Skill creation involves these steps: + +1. Understand the skill with concrete examples +2. Plan reusable skill contents (scripts, references, assets) +3. Initialize the skill (run init_skill.py) +4. Edit the skill (implement resources and write SKILL.md) +5. Package the skill (run package_skill.py) +6. Iterate based on real usage + +Follow these steps in order, skipping only if there is a clear reason why they are not applicable. + +### Step 1: Understanding the Skill with Concrete Examples + +Skip this step only when the skill's usage patterns are already clearly understood. It remains valuable even when working with an existing skill. + +To create an effective skill, clearly understand concrete examples of how the skill will be used. This understanding can come from either direct user examples or generated examples that are validated with user feedback. + +For example, when building an image-editor skill, relevant questions include: + +- "What functionality should the image-editor skill support? Editing, rotating, anything else?" +- "Can you give some examples of how this skill would be used?" +- "I can imagine users asking for things like 'Remove the red-eye from this image' or 'Rotate this image'. Are there other ways you imagine this skill being used?" +- "What would a user say that should trigger this skill?" + +To avoid overwhelming users, avoid asking too many questions in a single message. Start with the most important questions and follow up as needed for better effectiveness. + +Conclude this step when there is a clear sense of the functionality the skill should support. + +### Step 2: Planning the Reusable Skill Contents + +To turn concrete examples into an effective skill, analyze each example by: + +1. Considering how to execute on the example from scratch +2. Identifying what scripts, references, and assets would be helpful when executing these workflows repeatedly + +Example: When building a `pdf-editor` skill to handle queries like "Help me rotate this PDF," the analysis shows: + +1. Rotating a PDF requires re-writing the same code each time +2. A `scripts/rotate_pdf.py` script would be helpful to store in the skill + +Example: When designing a `frontend-webapp-builder` skill for queries like "Build me a todo app" or "Build me a dashboard to track my steps," the analysis shows: + +1. Writing a frontend webapp requires the same boilerplate HTML/React each time +2. An `assets/hello-world/` template containing the boilerplate HTML/React project files would be helpful to store in the skill + +Example: When building a `big-query` skill to handle queries like "How many users have logged in today?" the analysis shows: + +1. Querying BigQuery requires re-discovering the table schemas and relationships each time +2. A `references/schema.md` file documenting the table schemas would be helpful to store in the skill + +To establish the skill's contents, analyze each concrete example to create a list of the reusable resources to include: scripts, references, and assets. + +### Step 3: Initializing the Skill + +At this point, it is time to actually create the skill. + +Skip this step only if the skill being developed already exists, and iteration or packaging is needed. In this case, continue to the next step. + +When creating a new skill from scratch, always run the `init_skill.py` script. The script conveniently generates a new template skill directory that automatically includes everything a skill requires, making the skill creation process much more efficient and reliable. + +Usage: + +```bash +scripts/init_skill.py --path +``` + +The script: + +- Creates the skill directory at the specified path +- Generates a SKILL.md template with proper frontmatter and TODO placeholders +- Creates example resource directories: `scripts/`, `references/`, and `assets/` +- Adds example files in each directory that can be customized or deleted + +After initialization, customize or remove the generated SKILL.md and example files as needed. + +### Step 4: Edit the Skill + +When editing the (newly-generated or existing) skill, remember that the skill is being created for another instance of Claude to use. Include information that would be beneficial and non-obvious to Claude. Consider what procedural knowledge, domain-specific details, or reusable assets would help another Claude instance execute these tasks more effectively. + +#### Learn Proven Design Patterns + +Consult these helpful guides based on your skill's needs: + +- **Multi-step processes**: See references/workflows.md for sequential workflows and conditional logic +- **Specific output formats or quality standards**: See references/output-patterns.md for template and example patterns + +These files contain established best practices for effective skill design. + +#### Start with Reusable Skill Contents + +To begin implementation, start with the reusable resources identified above: `scripts/`, `references/`, and `assets/` files. Note that this step may require user input. For example, when implementing a `brand-guidelines` skill, the user may need to provide brand assets or templates to store in `assets/`, or documentation to store in `references/`. + +Added scripts must be tested by actually running them to ensure there are no bugs and that the output matches what is expected. If there are many similar scripts, only a representative sample needs to be tested to ensure confidence that they all work while balancing time to completion. + +Any example files and directories not needed for the skill should be deleted. The initialization script creates example files in `scripts/`, `references/`, and `assets/` to demonstrate structure, but most skills won't need all of them. + +#### Update SKILL.md + +**Writing Guidelines:** Always use imperative/infinitive form. + +##### Frontmatter + +Write the YAML frontmatter with `name` and `description`: + +- `name`: The skill name +- `description`: This is the primary triggering mechanism for your skill, and helps Claude understand when to use the skill. + - Include both what the Skill does and specific triggers/contexts for when to use it. + - Include all "when to use" information here - Not in the body. The body is only loaded after triggering, so "When to Use This Skill" sections in the body are not helpful to Claude. + - Example description for a `docx` skill: "Comprehensive document creation, editing, and analysis with support for tracked changes, comments, formatting preservation, and text extraction. Use when Claude needs to work with professional documents (.docx files) for: (1) Creating new documents, (2) Modifying or editing content, (3) Working with tracked changes, (4) Adding comments, or any other document tasks" + +Do not include any other fields in YAML frontmatter. + +##### Body + +Write instructions for using the skill and its bundled resources. + +### Step 5: Packaging a Skill + +Once development of the skill is complete, it must be packaged into a distributable .skill file that gets shared with the user. The packaging process automatically validates the skill first to ensure it meets all requirements: + +```bash +scripts/package_skill.py +``` + +Optional output directory specification: + +```bash +scripts/package_skill.py ./dist +``` + +The packaging script will: + +1. **Validate** the skill automatically, checking: + + - YAML frontmatter format and required fields + - Skill naming conventions and directory structure + - Description completeness and quality + - File organization and resource references + +2. **Package** the skill if validation passes, creating a .skill file named after the skill (e.g., `my-skill.skill`) that includes all files and maintains the proper directory structure for distribution. The .skill file is a zip file with a .skill extension. + +If validation fails, the script will report the errors and exit without creating a package. Fix any validation errors and run the packaging command again. + +### Step 6: Iterate + +After testing the skill, users may request improvements. Often this happens right after using the skill, with fresh context of how the skill performed. + +**Iteration workflow:** + +1. Use the skill on real tasks +2. Notice struggles or inefficiencies +3. Identify how SKILL.md or bundled resources should be updated +4. Implement changes and test again diff --git a/.claude/skills/skill-creator/references/output-patterns.md b/.claude/skills/skill-creator/references/output-patterns.md new file mode 100644 index 0000000000..022e85fe5e --- /dev/null +++ b/.claude/skills/skill-creator/references/output-patterns.md @@ -0,0 +1,86 @@ +# Output Patterns + +Use these patterns when skills need to produce consistent, high-quality output. + +## Template Pattern + +Provide templates for output format. Match the level of strictness to your needs. + +**For strict requirements (like API responses or data formats):** + +```markdown +## Report structure + +ALWAYS use this exact template structure: + +# [Analysis Title] + +## Executive summary +[One-paragraph overview of key findings] + +## Key findings +- Finding 1 with supporting data +- Finding 2 with supporting data +- Finding 3 with supporting data + +## Recommendations +1. Specific actionable recommendation +2. Specific actionable recommendation +``` + +**For flexible guidance (when adaptation is useful):** + +```markdown +## Report structure + +Here is a sensible default format, but use your best judgment: + +# [Analysis Title] + +## Executive summary +[Overview] + +## Key findings +[Adapt sections based on what you discover] + +## Recommendations +[Tailor to the specific context] + +Adjust sections as needed for the specific analysis type. +``` + +## Examples Pattern + +For skills where output quality depends on seeing examples, provide input/output pairs: + +```markdown +## Commit message format + +Generate commit messages following these examples: + +**Example 1:** +Input: Added user authentication with JWT tokens +Output: +``` + +feat(auth): implement JWT-based authentication + +Add login endpoint and token validation middleware + +``` + +**Example 2:** +Input: Fixed bug where dates displayed incorrectly in reports +Output: +``` + +fix(reports): correct date formatting in timezone conversion + +Use UTC timestamps consistently across report generation + +``` + +Follow this style: type(scope): brief description, then detailed explanation. +``` + +Examples help Claude understand the desired style and level of detail more clearly than descriptions alone. diff --git a/.claude/skills/skill-creator/references/workflows.md b/.claude/skills/skill-creator/references/workflows.md new file mode 100644 index 0000000000..54b0174078 --- /dev/null +++ b/.claude/skills/skill-creator/references/workflows.md @@ -0,0 +1,28 @@ +# Workflow Patterns + +## Sequential Workflows + +For complex tasks, break operations into clear, sequential steps. It is often helpful to give Claude an overview of the process towards the beginning of SKILL.md: + +```markdown +Filling a PDF form involves these steps: + +1. Analyze the form (run analyze_form.py) +2. Create field mapping (edit fields.json) +3. Validate mapping (run validate_fields.py) +4. Fill the form (run fill_form.py) +5. Verify output (run verify_output.py) +``` + +## Conditional Workflows + +For tasks with branching logic, guide Claude through decision points: + +```markdown +1. Determine the modification type: + **Creating new content?** โ†’ Follow "Creation workflow" below + **Editing existing content?** โ†’ Follow "Editing workflow" below + +2. Creation workflow: [steps] +3. Editing workflow: [steps] +``` diff --git a/.claude/skills/skill-creator/scripts/init_skill.py b/.claude/skills/skill-creator/scripts/init_skill.py new file mode 100755 index 0000000000..249fffcbbd --- /dev/null +++ b/.claude/skills/skill-creator/scripts/init_skill.py @@ -0,0 +1,300 @@ +#!/usr/bin/env python3 +""" +Skill Initializer - Creates a new skill from template + +Usage: + init_skill.py --path + +Examples: + init_skill.py my-new-skill --path skills/public + init_skill.py my-api-helper --path skills/private + init_skill.py custom-skill --path /custom/location +""" + +import sys +from pathlib import Path + + +SKILL_TEMPLATE = """--- +name: {skill_name} +description: [TODO: Complete and informative explanation of what the skill does and when to use it. Include WHEN to use this skill - specific scenarios, file types, or tasks that trigger it.] +--- + +# {skill_title} + +## Overview + +[TODO: 1-2 sentences explaining what this skill enables] + +## Structuring This Skill + +[TODO: Choose the structure that best fits this skill's purpose. Common patterns: + +**1. Workflow-Based** (best for sequential processes) +- Works well when there are clear step-by-step procedures +- Example: DOCX skill with "Workflow Decision Tree" โ†’ "Reading" โ†’ "Creating" โ†’ "Editing" +- Structure: ## Overview โ†’ ## Workflow Decision Tree โ†’ ## Step 1 โ†’ ## Step 2... + +**2. Task-Based** (best for tool collections) +- Works well when the skill offers different operations/capabilities +- Example: PDF skill with "Quick Start" โ†’ "Merge PDFs" โ†’ "Split PDFs" โ†’ "Extract Text" +- Structure: ## Overview โ†’ ## Quick Start โ†’ ## Task Category 1 โ†’ ## Task Category 2... + +**3. Reference/Guidelines** (best for standards or specifications) +- Works well for brand guidelines, coding standards, or requirements +- Example: Brand styling with "Brand Guidelines" โ†’ "Colors" โ†’ "Typography" โ†’ "Features" +- Structure: ## Overview โ†’ ## Guidelines โ†’ ## Specifications โ†’ ## Usage... + +**4. Capabilities-Based** (best for integrated systems) +- Works well when the skill provides multiple interrelated features +- Example: Product Management with "Core Capabilities" โ†’ numbered capability list +- Structure: ## Overview โ†’ ## Core Capabilities โ†’ ### 1. Feature โ†’ ### 2. Feature... + +Patterns can be mixed and matched as needed. Most skills combine patterns (e.g., start with task-based, add workflow for complex operations). + +Delete this entire "Structuring This Skill" section when done - it's just guidance.] + +## [TODO: Replace with the first main section based on chosen structure] + +[TODO: Add content here. See examples in existing skills: +- Code samples for technical skills +- Decision trees for complex workflows +- Concrete examples with realistic user requests +- References to scripts/templates/references as needed] + +## Resources + +This skill includes example resource directories that demonstrate how to organize different types of bundled resources: + +### scripts/ +Executable code (Python/Bash/etc.) that can be run directly to perform specific operations. + +**Examples from other skills:** +- PDF skill: `fill_fillable_fields.py`, `extract_form_field_info.py` - utilities for PDF manipulation +- DOCX skill: `document.py`, `utilities.py` - Python modules for document processing + +**Appropriate for:** Python scripts, shell scripts, or any executable code that performs automation, data processing, or specific operations. + +**Note:** Scripts may be executed without loading into context, but can still be read by Claude for patching or environment adjustments. + +### references/ +Documentation and reference material intended to be loaded into context to inform Claude's process and thinking. + +**Examples from other skills:** +- Product management: `communication.md`, `context_building.md` - detailed workflow guides +- BigQuery: API reference documentation and query examples +- Finance: Schema documentation, company policies + +**Appropriate for:** In-depth documentation, API references, database schemas, comprehensive guides, or any detailed information that Claude should reference while working. + +### assets/ +Files not intended to be loaded into context, but rather used within the output Claude produces. + +**Examples from other skills:** +- Brand styling: PowerPoint template files (.pptx), logo files +- Frontend builder: HTML/React boilerplate project directories +- Typography: Font files (.ttf, .woff2) + +**Appropriate for:** Templates, boilerplate code, document templates, images, icons, fonts, or any files meant to be copied or used in the final output. + +--- + +**Any unneeded directories can be deleted.** Not every skill requires all three types of resources. +""" + +EXAMPLE_SCRIPT = '''#!/usr/bin/env python3 +""" +Example helper script for {skill_name} + +This is a placeholder script that can be executed directly. +Replace with actual implementation or delete if not needed. + +Example real scripts from other skills: +- pdf/scripts/fill_fillable_fields.py - Fills PDF form fields +- pdf/scripts/convert_pdf_to_images.py - Converts PDF pages to images +""" + +def main(): + print("This is an example script for {skill_name}") + # TODO: Add actual script logic here + # This could be data processing, file conversion, API calls, etc. + +if __name__ == "__main__": + main() +''' + +EXAMPLE_REFERENCE = """# Reference Documentation for {skill_title} + +This is a placeholder for detailed reference documentation. +Replace with actual reference content or delete if not needed. + +Example real reference docs from other skills: +- product-management/references/communication.md - Comprehensive guide for status updates +- product-management/references/context_building.md - Deep-dive on gathering context +- bigquery/references/ - API references and query examples + +## When Reference Docs Are Useful + +Reference docs are ideal for: +- Comprehensive API documentation +- Detailed workflow guides +- Complex multi-step processes +- Information too lengthy for main SKILL.md +- Content that's only needed for specific use cases + +## Structure Suggestions + +### API Reference Example +- Overview +- Authentication +- Endpoints with examples +- Error codes +- Rate limits + +### Workflow Guide Example +- Prerequisites +- Step-by-step instructions +- Common patterns +- Troubleshooting +- Best practices +""" + +EXAMPLE_ASSET = """# Example Asset File + +This placeholder represents where asset files would be stored. +Replace with actual asset files (templates, images, fonts, etc.) or delete if not needed. + +Asset files are NOT intended to be loaded into context, but rather used within +the output Claude produces. + +Example asset files from other skills: +- Brand guidelines: logo.png, slides_template.pptx +- Frontend builder: hello-world/ directory with HTML/React boilerplate +- Typography: custom-font.ttf, font-family.woff2 +- Data: sample_data.csv, test_dataset.json + +## Common Asset Types + +- Templates: .pptx, .docx, boilerplate directories +- Images: .png, .jpg, .svg, .gif +- Fonts: .ttf, .otf, .woff, .woff2 +- Boilerplate code: Project directories, starter files +- Icons: .ico, .svg +- Data files: .csv, .json, .xml, .yaml + +Note: This is a text placeholder. Actual assets can be any file type. +""" + + +def title_case_skill_name(skill_name): + """Convert hyphenated skill name to Title Case for display.""" + return " ".join(word.capitalize() for word in skill_name.split("-")) + + +def init_skill(skill_name, path): + """ + Initialize a new skill directory with template SKILL.md. + + Args: + skill_name: Name of the skill + path: Path where the skill directory should be created + + Returns: + Path to created skill directory, or None if error + """ + # Determine skill directory path + skill_dir = Path(path).resolve() / skill_name + + # Check if directory already exists + if skill_dir.exists(): + print(f"โŒ Error: Skill directory already exists: {skill_dir}") + return None + + # Create skill directory + try: + skill_dir.mkdir(parents=True, exist_ok=False) + print(f"โœ… Created skill directory: {skill_dir}") + except Exception as e: + print(f"โŒ Error creating directory: {e}") + return None + + # Create SKILL.md from template + skill_title = title_case_skill_name(skill_name) + skill_content = SKILL_TEMPLATE.format(skill_name=skill_name, skill_title=skill_title) + + skill_md_path = skill_dir / "SKILL.md" + try: + skill_md_path.write_text(skill_content) + print("โœ… Created SKILL.md") + except Exception as e: + print(f"โŒ Error creating SKILL.md: {e}") + return None + + # Create resource directories with example files + try: + # Create scripts/ directory with example script + scripts_dir = skill_dir / "scripts" + scripts_dir.mkdir(exist_ok=True) + example_script = scripts_dir / "example.py" + example_script.write_text(EXAMPLE_SCRIPT.format(skill_name=skill_name)) + example_script.chmod(0o755) + print("โœ… Created scripts/example.py") + + # Create references/ directory with example reference doc + references_dir = skill_dir / "references" + references_dir.mkdir(exist_ok=True) + example_reference = references_dir / "api_reference.md" + example_reference.write_text(EXAMPLE_REFERENCE.format(skill_title=skill_title)) + print("โœ… Created references/api_reference.md") + + # Create assets/ directory with example asset placeholder + assets_dir = skill_dir / "assets" + assets_dir.mkdir(exist_ok=True) + example_asset = assets_dir / "example_asset.txt" + example_asset.write_text(EXAMPLE_ASSET) + print("โœ… Created assets/example_asset.txt") + except Exception as e: + print(f"โŒ Error creating resource directories: {e}") + return None + + # Print next steps + print(f"\nโœ… Skill '{skill_name}' initialized successfully at {skill_dir}") + print("\nNext steps:") + print("1. Edit SKILL.md to complete the TODO items and update the description") + print("2. Customize or delete the example files in scripts/, references/, and assets/") + print("3. Run the validator when ready to check the skill structure") + + return skill_dir + + +def main(): + if len(sys.argv) < 4 or sys.argv[2] != "--path": + print("Usage: init_skill.py --path ") + print("\nSkill name requirements:") + print(" - Hyphen-case identifier (e.g., 'data-analyzer')") + print(" - Lowercase letters, digits, and hyphens only") + print(" - Max 40 characters") + print(" - Must match directory name exactly") + print("\nExamples:") + print(" init_skill.py my-new-skill --path skills/public") + print(" init_skill.py my-api-helper --path skills/private") + print(" init_skill.py custom-skill --path /custom/location") + sys.exit(1) + + skill_name = sys.argv[1] + path = sys.argv[3] + + print(f"๐Ÿš€ Initializing skill: {skill_name}") + print(f" Location: {path}") + print() + + result = init_skill(skill_name, path) + + if result: + sys.exit(0) + else: + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/.claude/skills/skill-creator/scripts/package_skill.py b/.claude/skills/skill-creator/scripts/package_skill.py new file mode 100755 index 0000000000..736b928be0 --- /dev/null +++ b/.claude/skills/skill-creator/scripts/package_skill.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python3 +""" +Skill Packager - Creates a distributable .skill file of a skill folder + +Usage: + python utils/package_skill.py [output-directory] + +Example: + python utils/package_skill.py skills/public/my-skill + python utils/package_skill.py skills/public/my-skill ./dist +""" + +import sys +import zipfile +from pathlib import Path +from quick_validate import validate_skill + + +def package_skill(skill_path, output_dir=None): + """ + Package a skill folder into a .skill file. + + Args: + skill_path: Path to the skill folder + output_dir: Optional output directory for the .skill file (defaults to current directory) + + Returns: + Path to the created .skill file, or None if error + """ + skill_path = Path(skill_path).resolve() + + # Validate skill folder exists + if not skill_path.exists(): + print(f"โŒ Error: Skill folder not found: {skill_path}") + return None + + if not skill_path.is_dir(): + print(f"โŒ Error: Path is not a directory: {skill_path}") + return None + + # Validate SKILL.md exists + skill_md = skill_path / "SKILL.md" + if not skill_md.exists(): + print(f"โŒ Error: SKILL.md not found in {skill_path}") + return None + + # Run validation before packaging + print("๐Ÿ” Validating skill...") + valid, message = validate_skill(skill_path) + if not valid: + print(f"โŒ Validation failed: {message}") + print(" Please fix the validation errors before packaging.") + return None + print(f"โœ… {message}\n") + + # Determine output location + skill_name = skill_path.name + if output_dir: + output_path = Path(output_dir).resolve() + output_path.mkdir(parents=True, exist_ok=True) + else: + output_path = Path.cwd() + + skill_filename = output_path / f"{skill_name}.skill" + + # Create the .skill file (zip format) + try: + with zipfile.ZipFile(skill_filename, "w", zipfile.ZIP_DEFLATED) as zipf: + # Walk through the skill directory + for file_path in skill_path.rglob("*"): + if file_path.is_file(): + # Calculate the relative path within the zip + arcname = file_path.relative_to(skill_path.parent) + zipf.write(file_path, arcname) + print(f" Added: {arcname}") + + print(f"\nโœ… Successfully packaged skill to: {skill_filename}") + return skill_filename + + except Exception as e: + print(f"โŒ Error creating .skill file: {e}") + return None + + +def main(): + if len(sys.argv) < 2: + print("Usage: python utils/package_skill.py [output-directory]") + print("\nExample:") + print(" python utils/package_skill.py skills/public/my-skill") + print(" python utils/package_skill.py skills/public/my-skill ./dist") + sys.exit(1) + + skill_path = sys.argv[1] + output_dir = sys.argv[2] if len(sys.argv) > 2 else None + + print(f"๐Ÿ“ฆ Packaging skill: {skill_path}") + if output_dir: + print(f" Output directory: {output_dir}") + print() + + result = package_skill(skill_path, output_dir) + + if result: + sys.exit(0) + else: + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/.claude/skills/skill-creator/scripts/quick_validate.py b/.claude/skills/skill-creator/scripts/quick_validate.py new file mode 100755 index 0000000000..66eb0a71bf --- /dev/null +++ b/.claude/skills/skill-creator/scripts/quick_validate.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python3 +""" +Quick validation script for skills - minimal version +""" + +import sys +import os +import re +import yaml +from pathlib import Path + + +def validate_skill(skill_path): + """Basic validation of a skill""" + skill_path = Path(skill_path) + + # Check SKILL.md exists + skill_md = skill_path / "SKILL.md" + if not skill_md.exists(): + return False, "SKILL.md not found" + + # Read and validate frontmatter + content = skill_md.read_text() + if not content.startswith("---"): + return False, "No YAML frontmatter found" + + # Extract frontmatter + match = re.match(r"^---\n(.*?)\n---", content, re.DOTALL) + if not match: + return False, "Invalid frontmatter format" + + frontmatter_text = match.group(1) + + # Parse YAML frontmatter + try: + frontmatter = yaml.safe_load(frontmatter_text) + if not isinstance(frontmatter, dict): + return False, "Frontmatter must be a YAML dictionary" + except yaml.YAMLError as e: + return False, f"Invalid YAML in frontmatter: {e}" + + # Define allowed properties + ALLOWED_PROPERTIES = {"name", "description", "license", "allowed-tools", "metadata"} + + # Check for unexpected properties (excluding nested keys under metadata) + unexpected_keys = set(frontmatter.keys()) - ALLOWED_PROPERTIES + if unexpected_keys: + return False, ( + f"Unexpected key(s) in SKILL.md frontmatter: {', '.join(sorted(unexpected_keys))}. " + f"Allowed properties are: {', '.join(sorted(ALLOWED_PROPERTIES))}" + ) + + # Check required fields + if "name" not in frontmatter: + return False, "Missing 'name' in frontmatter" + if "description" not in frontmatter: + return False, "Missing 'description' in frontmatter" + + # Extract name for validation + name = frontmatter.get("name", "") + if not isinstance(name, str): + return False, f"Name must be a string, got {type(name).__name__}" + name = name.strip() + if name: + # Check naming convention (hyphen-case: lowercase with hyphens) + if not re.match(r"^[a-z0-9-]+$", name): + return False, f"Name '{name}' should be hyphen-case (lowercase letters, digits, and hyphens only)" + if name.startswith("-") or name.endswith("-") or "--" in name: + return False, f"Name '{name}' cannot start/end with hyphen or contain consecutive hyphens" + # Check name length (max 64 characters per spec) + if len(name) > 64: + return False, f"Name is too long ({len(name)} characters). Maximum is 64 characters." + + # Extract and validate description + description = frontmatter.get("description", "") + if not isinstance(description, str): + return False, f"Description must be a string, got {type(description).__name__}" + description = description.strip() + if description: + # Check for angle brackets + if "<" in description or ">" in description: + return False, "Description cannot contain angle brackets (< or >)" + # Check description length (max 1024 characters per spec) + if len(description) > 1024: + return False, f"Description is too long ({len(description)} characters). Maximum is 1024 characters." + + return True, "Skill is valid!" + + +if __name__ == "__main__": + if len(sys.argv) != 2: + print("Usage: python quick_validate.py ") + sys.exit(1) + + valid, message = validate_skill(sys.argv[1]) + print(message) + sys.exit(0 if valid else 1) From 93faa672cca24a88df0b9906e848eed405461f26 Mon Sep 17 00:00:00 2001 From: Xiangxuan Qu Date: Wed, 7 Jan 2026 11:16:17 +0900 Subject: [PATCH 09/15] fix: add DB_TYPE environment variable to unit tests (#30660) Co-authored-by: fghpdf --- api/tests/unit_tests/configs/test_dify_config.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/api/tests/unit_tests/configs/test_dify_config.py b/api/tests/unit_tests/configs/test_dify_config.py index 209b6bf59b..6fce7849f9 100644 --- a/api/tests/unit_tests/configs/test_dify_config.py +++ b/api/tests/unit_tests/configs/test_dify_config.py @@ -16,6 +16,7 @@ def test_dify_config(monkeypatch: pytest.MonkeyPatch): monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30") # Custom value for testing + monkeypatch.setenv("DB_TYPE", "postgresql") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") @@ -51,6 +52,7 @@ def test_http_timeout_defaults(monkeypatch: pytest.MonkeyPatch): os.environ.clear() # Set minimal required env vars + monkeypatch.setenv("DB_TYPE", "postgresql") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") @@ -75,6 +77,7 @@ def test_flask_configs(monkeypatch: pytest.MonkeyPatch): # Set environment variables using monkeypatch monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") + monkeypatch.setenv("DB_TYPE", "postgresql") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") @@ -124,6 +127,7 @@ def test_inner_api_config_exist(monkeypatch: pytest.MonkeyPatch): # Set environment variables using monkeypatch monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") + monkeypatch.setenv("DB_TYPE", "postgresql") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") @@ -140,6 +144,7 @@ def test_inner_api_config_exist(monkeypatch: pytest.MonkeyPatch): def test_db_extras_options_merging(monkeypatch: pytest.MonkeyPatch): """Test that DB_EXTRAS options are properly merged with default timezone setting""" # Set environment variables + monkeypatch.setenv("DB_TYPE", "postgresql") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") @@ -199,6 +204,7 @@ def test_celery_broker_url_with_special_chars_password( # Set up basic required environment variables (following existing pattern) monkeypatch.setenv("CONSOLE_API_URL", "https://example.com") monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com") + monkeypatch.setenv("DB_TYPE", "postgresql") monkeypatch.setenv("DB_USERNAME", "postgres") monkeypatch.setenv("DB_PASSWORD", "postgres") monkeypatch.setenv("DB_HOST", "localhost") From ace8ad429feb6f21f28da76591551f5018c630de Mon Sep 17 00:00:00 2001 From: wangxiaolei Date: Wed, 7 Jan 2026 10:19:14 +0800 Subject: [PATCH 10/15] fix: fix not record access token (#30654) --- web/app/signin/components/mail-and-password-auth.tsx | 2 ++ 1 file changed, 2 insertions(+) diff --git a/web/app/signin/components/mail-and-password-auth.tsx b/web/app/signin/components/mail-and-password-auth.tsx index 101ddf559a..01ec4c74ed 100644 --- a/web/app/signin/components/mail-and-password-auth.tsx +++ b/web/app/signin/components/mail-and-password-auth.tsx @@ -11,6 +11,7 @@ import Toast from '@/app/components/base/toast' import { emailRegex } from '@/config' import { useLocale } from '@/context/i18n' import { login } from '@/service/common' +import { setWebAppAccessToken } from '@/service/webapp-auth' import { encryptPassword } from '@/utils/encryption' import { resolvePostLoginRedirect } from '../utils/post-login-redirect' @@ -65,6 +66,7 @@ export default function MailAndPasswordAuth({ isInvite, isEmailSetup, allowRegis }) if (res.result === 'success') { // Track login success event + setWebAppAccessToken(res.data.access_token) trackEvent('user_login_success', { method: 'email_password', is_invite: isInvite, From 357548ca07b9c470b5c9a777edef0769b5e47219 Mon Sep 17 00:00:00 2001 From: yyh <92089059+lyzno1@users.noreply.github.com> Date: Wed, 7 Jan 2026 10:25:52 +0800 Subject: [PATCH 11/15] chore: rename ralph-wiggum plugin to ralph-loop (#30664) Co-authored-by: Claude Opus 4.5 --- .claude/settings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.claude/settings.json b/.claude/settings.json index c5c514b5f5..509dbe8447 100644 --- a/.claude/settings.json +++ b/.claude/settings.json @@ -4,6 +4,6 @@ "context7@claude-plugins-official": true, "typescript-lsp@claude-plugins-official": true, "pyright-lsp@claude-plugins-official": true, - "ralph-wiggum@claude-plugins-official": true + "ralph-loop@claude-plugins-official": true } } From e335cd0ef42ffafecd31880730a66b2e757677de Mon Sep 17 00:00:00 2001 From: Stephen Zhou <38493346+hyoban@users.noreply.github.com> Date: Wed, 7 Jan 2026 13:20:09 +0800 Subject: [PATCH 12/15] refactor(web): remove useMixedTranslation, better resource loading (#30630) Co-authored-by: Claude Opus 4.5 --- .../search-error-handling.test.ts | 8 +++ web/app/(commonLayout)/plugins/page.tsx | 4 +- .../chat/chat-with-history/hooks.spec.tsx | 2 +- .../base/chat/chat-with-history/hooks.tsx | 2 +- .../base/chat/embedded-chatbot/hooks.spec.tsx | 2 +- .../base/chat/embedded-chatbot/hooks.tsx | 2 +- .../actions/commands/account.tsx | 3 +- .../actions/commands/community.tsx | 3 +- .../goto-anything/actions/commands/docs.tsx | 5 +- .../goto-anything/actions/commands/forum.tsx | 3 +- .../actions/commands/language.tsx | 3 +- .../goto-anything/actions/commands/slash.tsx | 4 +- .../goto-anything/actions/commands/theme.tsx | 3 +- .../goto-anything/actions/commands/zen.tsx | 3 +- .../install-from-marketplace.tsx | 3 - .../account-setting/language-page/index.tsx | 6 +- .../install-from-marketplace.tsx | 3 - web/app/components/i18n-server.tsx | 22 ------ web/app/components/i18n.tsx | 45 ------------- .../plugins/base/deprecation-notice.tsx | 6 +- .../components/plugins/card/index.spec.tsx | 25 ------- web/app/components/plugins/card/index.tsx | 13 ++-- web/app/components/plugins/hooks.ts | 11 ++- .../plugins/marketplace/empty/index.spec.tsx | 45 ++----------- .../plugins/marketplace/empty/index.tsx | 6 +- .../components/plugins/marketplace/hooks.ts | 17 ----- .../plugins/marketplace/index.spec.tsx | 54 --------------- .../components/plugins/marketplace/index.tsx | 5 -- .../plugins/marketplace/list/card-wrapper.tsx | 18 ++--- .../plugins/marketplace/list/index.spec.tsx | 65 +++--------------- .../plugins/marketplace/list/index.tsx | 7 +- .../marketplace/list/list-with-collection.tsx | 9 +-- .../plugins/marketplace/list/list-wrapper.tsx | 10 +-- .../marketplace/plugin-type-switch.tsx | 6 +- .../marketplace/search-box/index.spec.tsx | 33 +-------- .../plugins/marketplace/search-box/index.tsx | 4 -- .../search-box/search-box-wrapper.tsx | 12 +--- .../marketplace/search-box/tags-filter.tsx | 9 +-- .../search-box/trigger/marketplace.tsx | 6 +- .../marketplace/sort-dropdown/index.spec.tsx | 43 +----------- .../marketplace/sort-dropdown/index.tsx | 11 +-- .../sticky-search-and-switch-wrapper.tsx | 5 +- .../components/plugins/plugin-item/index.tsx | 2 +- web/app/components/provider/i18n-server.tsx | 21 ++++++ web/app/components/provider/i18n.tsx | 24 +++++++ .../share/text-generation/index.tsx | 2 +- .../tools/marketplace/index.spec.tsx | 6 -- .../components/tools/marketplace/index.tsx | 5 +- web/app/layout.tsx | 15 +++-- web/context/i18n.ts | 6 +- web/i18n-config/client.ts | 35 ++++++++++ web/i18n-config/index.ts | 6 +- web/i18n-config/lib.client.ts | 2 +- web/i18n-config/lib.server.ts | 2 +- .../{i18next-config.ts => resources.ts} | 67 ++----------------- web/i18n-config/server.ts | 24 +++++-- web/i18n-config/settings.ts | 13 ++++ web/types/i18n.d.ts | 2 +- 58 files changed, 230 insertions(+), 548 deletions(-) delete mode 100644 web/app/components/i18n-server.tsx delete mode 100644 web/app/components/i18n.tsx create mode 100644 web/app/components/provider/i18n-server.tsx create mode 100644 web/app/components/provider/i18n.tsx create mode 100644 web/i18n-config/client.ts rename web/i18n-config/{i18next-config.ts => resources.ts} (59%) create mode 100644 web/i18n-config/settings.ts diff --git a/web/__tests__/goto-anything/search-error-handling.test.ts b/web/__tests__/goto-anything/search-error-handling.test.ts index 3a495834cd..42eb829583 100644 --- a/web/__tests__/goto-anything/search-error-handling.test.ts +++ b/web/__tests__/goto-anything/search-error-handling.test.ts @@ -14,6 +14,14 @@ import { fetchAppList } from '@/service/apps' import { postMarketplace } from '@/service/base' import { fetchDatasets } from '@/service/datasets' +// Mock react-i18next before importing modules that use it +vi.mock('react-i18next', () => ({ + getI18n: () => ({ + t: (key: string) => key, + language: 'en', + }), +})) + // Mock API functions vi.mock('@/service/base', () => ({ postMarketplace: vi.fn(), diff --git a/web/app/(commonLayout)/plugins/page.tsx b/web/app/(commonLayout)/plugins/page.tsx index 2df9cf23c4..81bda3a8a3 100644 --- a/web/app/(commonLayout)/plugins/page.tsx +++ b/web/app/(commonLayout)/plugins/page.tsx @@ -1,14 +1,12 @@ import Marketplace from '@/app/components/plugins/marketplace' import PluginPage from '@/app/components/plugins/plugin-page' import PluginsPanel from '@/app/components/plugins/plugin-page/plugins-panel' -import { getLocaleOnServer } from '@/i18n-config/server' const PluginList = async () => { - const locale = await getLocaleOnServer() return ( } - marketplace={} + marketplace={} /> ) } diff --git a/web/app/components/base/chat/chat-with-history/hooks.spec.tsx b/web/app/components/base/chat/chat-with-history/hooks.spec.tsx index 32ef133453..a6d51d8643 100644 --- a/web/app/components/base/chat/chat-with-history/hooks.spec.tsx +++ b/web/app/components/base/chat/chat-with-history/hooks.spec.tsx @@ -17,7 +17,7 @@ vi.mock('@/hooks/use-app-favicon', () => ({ useAppFavicon: vi.fn(), })) -vi.mock('@/i18n-config/i18next-config', () => ({ +vi.mock('@/i18n-config/client', () => ({ changeLanguage: vi.fn().mockResolvedValue(undefined), })) diff --git a/web/app/components/base/chat/chat-with-history/hooks.tsx b/web/app/components/base/chat/chat-with-history/hooks.tsx index 5ff8e61ff6..ed1981b530 100644 --- a/web/app/components/base/chat/chat-with-history/hooks.tsx +++ b/web/app/components/base/chat/chat-with-history/hooks.tsx @@ -25,7 +25,7 @@ import { useToastContext } from '@/app/components/base/toast' import { InputVarType } from '@/app/components/workflow/types' import { useWebAppStore } from '@/context/web-app-context' import { useAppFavicon } from '@/hooks/use-app-favicon' -import { changeLanguage } from '@/i18n-config/i18next-config' +import { changeLanguage } from '@/i18n-config/client' import { delConversation, pinConversation, diff --git a/web/app/components/base/chat/embedded-chatbot/hooks.spec.tsx b/web/app/components/base/chat/embedded-chatbot/hooks.spec.tsx index ca6a90c4d8..066fb8ebe9 100644 --- a/web/app/components/base/chat/embedded-chatbot/hooks.spec.tsx +++ b/web/app/components/base/chat/embedded-chatbot/hooks.spec.tsx @@ -13,7 +13,7 @@ import { shareQueryKeys } from '@/service/use-share' import { CONVERSATION_ID_INFO } from '../constants' import { useEmbeddedChatbot } from './hooks' -vi.mock('@/i18n-config/i18next-config', () => ({ +vi.mock('@/i18n-config/client', () => ({ changeLanguage: vi.fn().mockResolvedValue(undefined), })) diff --git a/web/app/components/base/chat/embedded-chatbot/hooks.tsx b/web/app/components/base/chat/embedded-chatbot/hooks.tsx index 803e905837..9028d10000 100644 --- a/web/app/components/base/chat/embedded-chatbot/hooks.tsx +++ b/web/app/components/base/chat/embedded-chatbot/hooks.tsx @@ -23,7 +23,7 @@ import { useToastContext } from '@/app/components/base/toast' import { addFileInfos, sortAgentSorts } from '@/app/components/tools/utils' import { InputVarType } from '@/app/components/workflow/types' import { useWebAppStore } from '@/context/web-app-context' -import { changeLanguage } from '@/i18n-config/i18next-config' +import { changeLanguage } from '@/i18n-config/client' import { updateFeedback } from '@/service/share' import { useInvalidateShareConversations, diff --git a/web/app/components/goto-anything/actions/commands/account.tsx b/web/app/components/goto-anything/actions/commands/account.tsx index 6465932a75..d1fa36b6f0 100644 --- a/web/app/components/goto-anything/actions/commands/account.tsx +++ b/web/app/components/goto-anything/actions/commands/account.tsx @@ -1,7 +1,7 @@ import type { SlashCommandHandler } from './types' import { RiUser3Line } from '@remixicon/react' import * as React from 'react' -import i18n from '@/i18n-config/i18next-config' +import { getI18n } from 'react-i18next' import { registerCommands, unregisterCommands } from './command-bus' // Account command dependency types - no external dependencies needed @@ -21,6 +21,7 @@ export const accountCommand: SlashCommandHandler = { }, async search(args: string, locale: string = 'en') { + const i18n = getI18n() return [{ id: 'account', title: i18n.t('account.account', { ns: 'common', lng: locale }), diff --git a/web/app/components/goto-anything/actions/commands/community.tsx b/web/app/components/goto-anything/actions/commands/community.tsx index fcd9a15000..685149402d 100644 --- a/web/app/components/goto-anything/actions/commands/community.tsx +++ b/web/app/components/goto-anything/actions/commands/community.tsx @@ -1,7 +1,7 @@ import type { SlashCommandHandler } from './types' import { RiDiscordLine } from '@remixicon/react' import * as React from 'react' -import i18n from '@/i18n-config/i18next-config' +import { getI18n } from 'react-i18next' import { registerCommands, unregisterCommands } from './command-bus' // Community command dependency types @@ -22,6 +22,7 @@ export const communityCommand: SlashCommandHandler = { }, async search(args: string, locale: string = 'en') { + const i18n = getI18n() return [{ id: 'community', title: i18n.t('userProfile.community', { ns: 'common', lng: locale }), diff --git a/web/app/components/goto-anything/actions/commands/docs.tsx b/web/app/components/goto-anything/actions/commands/docs.tsx index 9f09d32094..8b04e84157 100644 --- a/web/app/components/goto-anything/actions/commands/docs.tsx +++ b/web/app/components/goto-anything/actions/commands/docs.tsx @@ -1,8 +1,8 @@ import type { SlashCommandHandler } from './types' import { RiBookOpenLine } from '@remixicon/react' import * as React from 'react' +import { getI18n } from 'react-i18next' import { defaultDocBaseUrl } from '@/context/i18n' -import i18n from '@/i18n-config/i18next-config' import { getDocLanguage } from '@/i18n-config/language' import { registerCommands, unregisterCommands } from './command-bus' @@ -19,6 +19,7 @@ export const docsCommand: SlashCommandHandler = { // Direct execution function execute: () => { + const i18n = getI18n() const currentLocale = i18n.language const docLanguage = getDocLanguage(currentLocale) const url = `${defaultDocBaseUrl}/${docLanguage}` @@ -26,6 +27,7 @@ export const docsCommand: SlashCommandHandler = { }, async search(args: string, locale: string = 'en') { + const i18n = getI18n() return [{ id: 'doc', title: i18n.t('userProfile.helpCenter', { ns: 'common', lng: locale }), @@ -41,6 +43,7 @@ export const docsCommand: SlashCommandHandler = { }, register(_deps: DocDeps) { + const i18n = getI18n() registerCommands({ 'navigation.doc': async (_args) => { // Get the current language from i18n diff --git a/web/app/components/goto-anything/actions/commands/forum.tsx b/web/app/components/goto-anything/actions/commands/forum.tsx index e32632b4b5..36116ceb1f 100644 --- a/web/app/components/goto-anything/actions/commands/forum.tsx +++ b/web/app/components/goto-anything/actions/commands/forum.tsx @@ -1,7 +1,7 @@ import type { SlashCommandHandler } from './types' import { RiFeedbackLine } from '@remixicon/react' import * as React from 'react' -import i18n from '@/i18n-config/i18next-config' +import { getI18n } from 'react-i18next' import { registerCommands, unregisterCommands } from './command-bus' // Forum command dependency types @@ -22,6 +22,7 @@ export const forumCommand: SlashCommandHandler = { }, async search(args: string, locale: string = 'en') { + const i18n = getI18n() return [{ id: 'forum', title: i18n.t('userProfile.forum', { ns: 'common', lng: locale }), diff --git a/web/app/components/goto-anything/actions/commands/language.tsx b/web/app/components/goto-anything/actions/commands/language.tsx index df94fd49ce..f4bafc1d58 100644 --- a/web/app/components/goto-anything/actions/commands/language.tsx +++ b/web/app/components/goto-anything/actions/commands/language.tsx @@ -1,6 +1,6 @@ import type { CommandSearchResult } from '../types' import type { SlashCommandHandler } from './types' -import i18n from '@/i18n-config/i18next-config' +import { getI18n } from 'react-i18next' import { languages } from '@/i18n-config/language' import { registerCommands, unregisterCommands } from './command-bus' @@ -14,6 +14,7 @@ const buildLanguageCommands = (query: string): CommandSearchResult[] => { const list = languages.filter(item => item.supported && ( !q || item.name.toLowerCase().includes(q) || String(item.value).toLowerCase().includes(q) )) + const i18n = getI18n() return list.map(item => ({ id: `lang-${item.value}`, title: item.name, diff --git a/web/app/components/goto-anything/actions/commands/slash.tsx b/web/app/components/goto-anything/actions/commands/slash.tsx index ec0f333cd4..6aad67731f 100644 --- a/web/app/components/goto-anything/actions/commands/slash.tsx +++ b/web/app/components/goto-anything/actions/commands/slash.tsx @@ -2,8 +2,8 @@ import type { ActionItem } from '../types' import { useTheme } from 'next-themes' import { useEffect } from 'react' +import { getI18n } from 'react-i18next' import { setLocaleOnClient } from '@/i18n-config' -import i18n from '@/i18n-config/i18next-config' import { accountCommand } from './account' import { executeCommand } from './command-bus' import { communityCommand } from './community' @@ -14,6 +14,8 @@ import { slashCommandRegistry } from './registry' import { themeCommand } from './theme' import { zenCommand } from './zen' +const i18n = getI18n() + export const slashAction: ActionItem = { key: '/', shortcut: '/', diff --git a/web/app/components/goto-anything/actions/commands/theme.tsx b/web/app/components/goto-anything/actions/commands/theme.tsx index 335182af67..ba1416229d 100644 --- a/web/app/components/goto-anything/actions/commands/theme.tsx +++ b/web/app/components/goto-anything/actions/commands/theme.tsx @@ -2,7 +2,7 @@ import type { CommandSearchResult } from '../types' import type { SlashCommandHandler } from './types' import { RiComputerLine, RiMoonLine, RiSunLine } from '@remixicon/react' import * as React from 'react' -import i18n from '@/i18n-config/i18next-config' +import { getI18n } from 'react-i18next' import { registerCommands, unregisterCommands } from './command-bus' // Theme dependency types @@ -32,6 +32,7 @@ const THEME_ITEMS = [ ] as const const buildThemeCommands = (query: string, locale?: string): CommandSearchResult[] => { + const i18n = getI18n() const q = query.toLowerCase() const list = THEME_ITEMS.filter(item => !q diff --git a/web/app/components/goto-anything/actions/commands/zen.tsx b/web/app/components/goto-anything/actions/commands/zen.tsx index d6d9f1e5a2..1645e40fd9 100644 --- a/web/app/components/goto-anything/actions/commands/zen.tsx +++ b/web/app/components/goto-anything/actions/commands/zen.tsx @@ -1,8 +1,8 @@ import type { SlashCommandHandler } from './types' import { RiFullscreenLine } from '@remixicon/react' import * as React from 'react' +import { getI18n } from 'react-i18next' import { isInWorkflowPage } from '@/app/components/workflow/constants' -import i18n from '@/i18n-config/i18next-config' import { registerCommands, unregisterCommands } from './command-bus' // Zen command dependency types - no external dependencies needed @@ -32,6 +32,7 @@ export const zenCommand: SlashCommandHandler = { execute: toggleZenMode, async search(_args: string, locale: string = 'en') { + const i18n = getI18n() return [{ id: 'zen', title: i18n.t('gotoAnything.actions.zenTitle', { ns: 'app', lng: locale }) || 'Zen Mode', diff --git a/web/app/components/header/account-setting/data-source-page-new/install-from-marketplace.tsx b/web/app/components/header/account-setting/data-source-page-new/install-from-marketplace.tsx index 956352d6d3..f02e276f55 100644 --- a/web/app/components/header/account-setting/data-source-page-new/install-from-marketplace.tsx +++ b/web/app/components/header/account-setting/data-source-page-new/install-from-marketplace.tsx @@ -15,7 +15,6 @@ import Divider from '@/app/components/base/divider' import Loading from '@/app/components/base/loading' import List from '@/app/components/plugins/marketplace/list' import ProviderCard from '@/app/components/plugins/provider-card' -import { getLocaleOnClient } from '@/i18n-config' import { cn } from '@/utils/classnames' import { getMarketplaceUrl } from '@/utils/var' import { @@ -33,7 +32,6 @@ const InstallFromMarketplace = ({ const { t } = useTranslation() const { theme } = useTheme() const [collapse, setCollapse] = useState(false) - const locale = getLocaleOnClient() const { plugins: allPlugins, isLoading: isAllPluginsLoading, @@ -70,7 +68,6 @@ const InstallFromMarketplace = ({ marketplaceCollectionPluginsMap={{}} plugins={allPlugins} showInstallButton - locale={locale} cardContainerClassName="grid grid-cols-2 gap-2" cardRender={cardRender} emptyClassName="h-auto" diff --git a/web/app/components/header/account-setting/language-page/index.tsx b/web/app/components/header/account-setting/language-page/index.tsx index 5d888281e9..2a0604421f 100644 --- a/web/app/components/header/account-setting/language-page/index.tsx +++ b/web/app/components/header/account-setting/language-page/index.tsx @@ -2,13 +2,13 @@ import type { Item } from '@/app/components/base/select' import type { Locale } from '@/i18n-config' +import { useRouter } from 'next/navigation' import { useState } from 'react' import { useTranslation } from 'react-i18next' import { useContext } from 'use-context-selector' import { SimpleSelect } from '@/app/components/base/select' import { ToastContext } from '@/app/components/base/toast' import { useAppContext } from '@/context/app-context' - import { useLocale } from '@/context/i18n' import { setLocaleOnClient } from '@/i18n-config' import { languages } from '@/i18n-config/language' @@ -25,6 +25,7 @@ export default function LanguagePage() { const { notify } = useContext(ToastContext) const [editing, setEditing] = useState(false) const { t } = useTranslation() + const router = useRouter() const handleSelectLanguage = async (item: Item) => { const url = '/account/interface-language' @@ -35,7 +36,8 @@ export default function LanguagePage() { await updateUserProfile({ url, body: { [bodyKey]: item.value } }) notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) }) - setLocaleOnClient(item.value.toString() as Locale) + setLocaleOnClient(item.value.toString() as Locale, false) + router.refresh() } catch (e) { notify({ type: 'error', message: (e as Error).message }) diff --git a/web/app/components/header/account-setting/model-provider-page/install-from-marketplace.tsx b/web/app/components/header/account-setting/model-provider-page/install-from-marketplace.tsx index 0e7506bf96..289146f2d2 100644 --- a/web/app/components/header/account-setting/model-provider-page/install-from-marketplace.tsx +++ b/web/app/components/header/account-setting/model-provider-page/install-from-marketplace.tsx @@ -14,7 +14,6 @@ import Divider from '@/app/components/base/divider' import Loading from '@/app/components/base/loading' import List from '@/app/components/plugins/marketplace/list' import ProviderCard from '@/app/components/plugins/provider-card' -import { getLocaleOnClient } from '@/i18n-config' import { cn } from '@/utils/classnames' import { getMarketplaceUrl } from '@/utils/var' import { @@ -32,7 +31,6 @@ const InstallFromMarketplace = ({ const { t } = useTranslation() const { theme } = useTheme() const [collapse, setCollapse] = useState(false) - const locale = getLocaleOnClient() const { plugins: allPlugins, isLoading: isAllPluginsLoading, @@ -69,7 +67,6 @@ const InstallFromMarketplace = ({ marketplaceCollectionPluginsMap={{}} plugins={allPlugins} showInstallButton - locale={locale} cardContainerClassName="grid grid-cols-2 gap-2" cardRender={cardRender} emptyClassName="h-auto" diff --git a/web/app/components/i18n-server.tsx b/web/app/components/i18n-server.tsx deleted file mode 100644 index 01dc5f0f13..0000000000 --- a/web/app/components/i18n-server.tsx +++ /dev/null @@ -1,22 +0,0 @@ -import * as React from 'react' -import { getLocaleOnServer } from '@/i18n-config/server' -import { ToastProvider } from './base/toast' -import I18N from './i18n' - -export type II18NServerProps = { - children: React.ReactNode -} - -const I18NServer = async ({ - children, -}: II18NServerProps) => { - const locale = await getLocaleOnServer() - - return ( - - {children} - - ) -} - -export default I18NServer diff --git a/web/app/components/i18n.tsx b/web/app/components/i18n.tsx deleted file mode 100644 index e9af2face9..0000000000 --- a/web/app/components/i18n.tsx +++ /dev/null @@ -1,45 +0,0 @@ -'use client' - -import type { FC } from 'react' -import type { Locale } from '@/i18n-config' -import { usePrefetchQuery } from '@tanstack/react-query' -import { useHydrateAtoms } from 'jotai/utils' -import * as React from 'react' -import { useEffect, useState } from 'react' -import { localeAtom } from '@/context/i18n' -import { setLocaleOnClient } from '@/i18n-config' -import { getSystemFeatures } from '@/service/common' -import Loading from './base/loading' - -export type II18nProps = { - locale: Locale - children: React.ReactNode -} -const I18n: FC = ({ - locale, - children, -}) => { - useHydrateAtoms([[localeAtom, locale]]) - const [loading, setLoading] = useState(true) - - usePrefetchQuery({ - queryKey: ['systemFeatures'], - queryFn: getSystemFeatures, - }) - - useEffect(() => { - setLocaleOnClient(locale, false).then(() => { - setLoading(false) - }) - }, [locale]) - - if (loading) - return
- - return ( - <> - {children} - - ) -} -export default React.memo(I18n) diff --git a/web/app/components/plugins/base/deprecation-notice.tsx b/web/app/components/plugins/base/deprecation-notice.tsx index c2ddfa6975..513b27a2cf 100644 --- a/web/app/components/plugins/base/deprecation-notice.tsx +++ b/web/app/components/plugins/base/deprecation-notice.tsx @@ -1,4 +1,5 @@ import type { FC } from 'react' +import { useTranslation } from '#i18n' import { RiAlertFill } from '@remixicon/react' import { camelCase } from 'es-toolkit/string' import Link from 'next/link' @@ -6,14 +7,12 @@ import * as React from 'react' import { useMemo } from 'react' import { Trans } from 'react-i18next' import { cn } from '@/utils/classnames' -import { useMixedTranslation } from '../marketplace/hooks' type DeprecationNoticeProps = { status: 'deleted' | 'active' deprecatedReason: string alternativePluginId: string alternativePluginURL: string - locale?: string className?: string innerWrapperClassName?: string iconWrapperClassName?: string @@ -34,13 +33,12 @@ const DeprecationNotice: FC = ({ deprecatedReason, alternativePluginId, alternativePluginURL, - locale, className, innerWrapperClassName, iconWrapperClassName, textClassName, }) => { - const { t } = useMixedTranslation(locale) + const { t } = useTranslation() const deprecatedReasonKey = useMemo(() => { if (!deprecatedReason) diff --git a/web/app/components/plugins/card/index.spec.tsx b/web/app/components/plugins/card/index.spec.tsx index 4a3e5a587b..fd97534ec4 100644 --- a/web/app/components/plugins/card/index.spec.tsx +++ b/web/app/components/plugins/card/index.spec.tsx @@ -502,31 +502,6 @@ describe('Card', () => { }) }) - // ================================ - // Locale Tests - // ================================ - describe('Locale', () => { - it('should use locale from props when provided', () => { - const plugin = createMockPlugin({ - label: { 'en-US': 'English Title', 'zh-Hans': 'ไธญๆ–‡ๆ ‡้ข˜' }, - }) - - render() - - expect(screen.getByText('ไธญๆ–‡ๆ ‡้ข˜')).toBeInTheDocument() - }) - - it('should fallback to default locale when prop locale not found', () => { - const plugin = createMockPlugin({ - label: { 'en-US': 'English Title' }, - }) - - render() - - expect(screen.getByText('English Title')).toBeInTheDocument() - }) - }) - // ================================ // Memoization Tests // ================================ diff --git a/web/app/components/plugins/card/index.tsx b/web/app/components/plugins/card/index.tsx index ada26801de..8578421116 100644 --- a/web/app/components/plugins/card/index.tsx +++ b/web/app/components/plugins/card/index.tsx @@ -1,15 +1,13 @@ 'use client' import type { Plugin } from '../types' -import type { Locale } from '@/i18n-config' +import { useTranslation } from '#i18n' import { RiAlertFill } from '@remixicon/react' import * as React from 'react' -import { useMixedTranslation } from '@/app/components/plugins/marketplace/hooks' import { useGetLanguage } from '@/context/i18n' import useTheme from '@/hooks/use-theme' import { renderI18nObject, } from '@/i18n-config' -import { getLanguage } from '@/i18n-config/language' import { Theme } from '@/types/app' import { cn } from '@/utils/classnames' import Partner from '../base/badges/partner' @@ -33,7 +31,6 @@ export type Props = { footer?: React.ReactNode isLoading?: boolean loadingFileName?: string - locale?: Locale limitedInstall?: boolean } @@ -48,13 +45,11 @@ const Card = ({ footer, isLoading = false, loadingFileName, - locale: localeFromProps, limitedInstall = false, }: Props) => { - const defaultLocale = useGetLanguage() - const locale = localeFromProps ? getLanguage(localeFromProps) : defaultLocale - const { t } = useMixedTranslation(localeFromProps) - const { categoriesMap } = useCategories(t, true) + const locale = useGetLanguage() + const { t } = useTranslation() + const { categoriesMap } = useCategories(true) const { category, type, name, org, label, brief, icon, icon_dark, verified, badges = [] } = payload const { theme } = useTheme() const iconSrc = theme === Theme.dark && icon_dark ? icon_dark : icon diff --git a/web/app/components/plugins/hooks.ts b/web/app/components/plugins/hooks.ts index 262935205b..65d073cc2f 100644 --- a/web/app/components/plugins/hooks.ts +++ b/web/app/components/plugins/hooks.ts @@ -1,4 +1,3 @@ -import type { TFunction } from 'i18next' import type { CategoryKey, TagKey } from './constants' import { useMemo } from 'react' import { useTranslation } from 'react-i18next' @@ -13,9 +12,8 @@ export type Tag = { label: string } -export const useTags = (translateFromOut?: TFunction) => { - const { t: translation } = useTranslation() - const t = translateFromOut || translation +export const useTags = () => { + const { t } = useTranslation() const tags = useMemo(() => { return tagKeys.map((tag) => { @@ -53,9 +51,8 @@ type Category = { label: string } -export const useCategories = (translateFromOut?: TFunction, isSingle?: boolean) => { - const { t: translation } = useTranslation() - const t = translateFromOut || translation +export const useCategories = (isSingle?: boolean) => { + const { t } = useTranslation() const categories = useMemo(() => { return categoryKeys.map((category) => { diff --git a/web/app/components/plugins/marketplace/empty/index.spec.tsx b/web/app/components/plugins/marketplace/empty/index.spec.tsx index 4cbc85a309..bc8e701dfc 100644 --- a/web/app/components/plugins/marketplace/empty/index.spec.tsx +++ b/web/app/components/plugins/marketplace/empty/index.spec.tsx @@ -7,9 +7,9 @@ import Line from './line' // Mock external dependencies only // ================================ -// Mock useMixedTranslation hook -vi.mock('../hooks', () => ({ - useMixedTranslation: (_locale?: string) => ({ +// Mock i18n translation hook +vi.mock('#i18n', () => ({ + useTranslation: () => ({ t: (key: string, options?: { ns?: string }) => { // Build full key with namespace prefix if provided const fullKey = options?.ns ? `${options.ns}.${key}` : key @@ -471,36 +471,6 @@ describe('Empty', () => { }) }) - // ================================ - // Locale Prop Tests - // ================================ - describe('Locale Prop', () => { - it('should pass locale to useMixedTranslation', () => { - render() - - // Translation should still work - expect(screen.getByText('No plugin found')).toBeInTheDocument() - }) - - it('should handle undefined locale', () => { - render() - - expect(screen.getByText('No plugin found')).toBeInTheDocument() - }) - - it('should handle en-US locale', () => { - render() - - expect(screen.getByText('No plugin found')).toBeInTheDocument() - }) - - it('should handle ja-JP locale', () => { - render() - - expect(screen.getByText('No plugin found')).toBeInTheDocument() - }) - }) - // ================================ // Placeholder Cards Layout Tests // ================================ @@ -634,7 +604,6 @@ describe('Empty', () => { text="Custom message" lightCard className="custom-wrapper" - locale="en-US" />, ) @@ -695,12 +664,6 @@ describe('Empty', () => { expect(container.querySelector('.only-class')).toBeInTheDocument() }) - it('should render with only locale prop', () => { - render() - - expect(screen.getByText('No plugin found')).toBeInTheDocument() - }) - it('should handle text with unicode characters', () => { render() @@ -813,7 +776,7 @@ describe('Empty and Line Integration', () => { }) it('should render complete Empty component structure', () => { - const { container } = render() + const { container } = render() // Container expect(container.querySelector('.test')).toBeInTheDocument() diff --git a/web/app/components/plugins/marketplace/empty/index.tsx b/web/app/components/plugins/marketplace/empty/index.tsx index 3c33d9b92a..6e5adff1b4 100644 --- a/web/app/components/plugins/marketplace/empty/index.tsx +++ b/web/app/components/plugins/marketplace/empty/index.tsx @@ -1,6 +1,6 @@ 'use client' +import { useTranslation } from '#i18n' import { Group } from '@/app/components/base/icons/src/vender/other' -import { useMixedTranslation } from '@/app/components/plugins/marketplace/hooks' import { cn } from '@/utils/classnames' import Line from './line' @@ -8,16 +8,14 @@ type Props = { text?: string lightCard?: boolean className?: string - locale?: string } const Empty = ({ text, lightCard, className, - locale, }: Props) => { - const { t } = useMixedTranslation(locale) + const { t } = useTranslation() return (
{ } } -/** - * ! Support zh-Hans, pt-BR, ja-JP and en-US for Marketplace page - * ! For other languages, use en-US as fallback - */ -export const useMixedTranslation = (localeFromOuter?: string) => { - let t = useTranslation().t - - if (localeFromOuter) - t = i18n.getFixedT(localeFromOuter) - - return { - t, - } -} - export const useMarketplaceContainerScroll = ( callback: () => void, scrollContainerId = 'marketplace-container', diff --git a/web/app/components/plugins/marketplace/index.spec.tsx b/web/app/components/plugins/marketplace/index.spec.tsx index 3073897ba1..b3b1d58dd4 100644 --- a/web/app/components/plugins/marketplace/index.spec.tsx +++ b/web/app/components/plugins/marketplace/index.spec.tsx @@ -11,7 +11,6 @@ import { PluginCategoryEnum } from '@/app/components/plugins/types' // Note: Import after mocks are set up import { DEFAULT_SORT, SCROLL_BOTTOM_THRESHOLD } from './constants' import { MarketplaceContext, MarketplaceContextProvider, useMarketplaceContext } from './context' -import { useMixedTranslation } from './hooks' import PluginTypeSwitch, { PLUGIN_TYPE_SEARCH_MAP } from './plugin-type-switch' import StickySearchAndSwitchWrapper from './sticky-search-and-switch-wrapper' import { @@ -602,48 +601,6 @@ describe('utils', () => { }) }) -// ================================ -// Hooks Tests -// ================================ -describe('hooks', () => { - describe('useMixedTranslation', () => { - it('should return translation function', () => { - const { result } = renderHook(() => useMixedTranslation()) - - expect(result.current.t).toBeDefined() - expect(typeof result.current.t).toBe('function') - }) - - it('should return translation key when no translation found', () => { - const { result } = renderHook(() => useMixedTranslation()) - - // The global mock returns key with namespace prefix - expect(result.current.t('category.all', { ns: 'plugin' })).toBe('plugin.category.all') - }) - - it('should use locale from outer when provided', () => { - const { result } = renderHook(() => useMixedTranslation('zh-Hans')) - - expect(result.current.t).toBeDefined() - }) - - it('should handle different locale values', () => { - const locales = ['en-US', 'zh-Hans', 'ja-JP', 'pt-BR'] - locales.forEach((locale) => { - const { result } = renderHook(() => useMixedTranslation(locale)) - expect(result.current.t).toBeDefined() - expect(typeof result.current.t).toBe('function') - }) - }) - - it('should use getFixedT when localeFromOuter is provided', () => { - const { result } = renderHook(() => useMixedTranslation('fr-FR')) - // The global mock returns key with namespace prefix - expect(result.current.t('search', { ns: 'plugin' })).toBe('plugin.search') - }) - }) -}) - // ================================ // useMarketplaceCollectionsAndPlugins Tests // ================================ @@ -2088,17 +2045,6 @@ describe('StickySearchAndSwitchWrapper', () => { }) describe('Props', () => { - it('should accept locale prop', () => { - render( - - - , - ) - - // Component should render without errors - expect(screen.getByTestId('portal-elem')).toBeInTheDocument() - }) - it('should accept showSearchParams prop', () => { render( diff --git a/web/app/components/plugins/marketplace/index.tsx b/web/app/components/plugins/marketplace/index.tsx index f9f7e86b9a..08d1bc833f 100644 --- a/web/app/components/plugins/marketplace/index.tsx +++ b/web/app/components/plugins/marketplace/index.tsx @@ -1,6 +1,5 @@ import type { MarketplaceCollection, SearchParams } from './types' import type { Plugin } from '@/app/components/plugins/types' -import type { Locale } from '@/i18n-config' import { TanstackQueryInitializer } from '@/context/query-client' import { MarketplaceContextProvider } from './context' import Description from './description' @@ -9,7 +8,6 @@ import StickySearchAndSwitchWrapper from './sticky-search-and-switch-wrapper' import { getMarketplaceCollectionsAndPlugins } from './utils' type MarketplaceProps = { - locale: Locale showInstallButton?: boolean shouldExclude?: boolean searchParams?: SearchParams @@ -18,7 +16,6 @@ type MarketplaceProps = { showSearchParams?: boolean } const Marketplace = async ({ - locale, showInstallButton = true, shouldExclude, searchParams, @@ -44,12 +41,10 @@ const Marketplace = async ({ > { - const { t } = useMixedTranslation(locale) + const { t } = useTranslation() const { theme } = useTheme() const [isShowInstallFromMarketplace, { setTrue: showInstallFromMarketplace, setFalse: hideInstallFromMarketplace, }] = useBoolean(false) - const localeFromLocale = useLocale() - const { getTagLabel } = useTags(t) + const locale = useLocale() + const { getTagLabel } = useTags() // Memoize marketplace link params to prevent unnecessary re-renders const marketplaceLinkParams = useMemo(() => ({ - language: localeFromLocale, + language: locale, theme, - }), [localeFromLocale, theme]) + }), [locale, theme]) // Memoize tag labels to prevent recreating array on every render const tagLabels = useMemo(() => @@ -52,7 +48,6 @@ const CardWrapperComponent = ({ ({ - useMixedTranslation: (_locale?: string) => ({ +// Mock i18n translation hook +vi.mock('#i18n', () => ({ + useTranslation: () => ({ t: (key: string, options?: { ns?: string, num?: number }) => { // Build full key with namespace prefix if provided const fullKey = options?.ns ? `${options.ns}.${key}` : key @@ -28,6 +27,7 @@ vi.mock('../hooks', () => ({ return translations[fullKey] || key }, }), + useLocale: () => 'en-US', })) // Mock useMarketplaceContext with controllable values @@ -148,15 +148,15 @@ vi.mock('@/app/components/plugins/install-plugin/install-from-marketplace', () = // Mock SortDropdown component vi.mock('../sort-dropdown', () => ({ - default: ({ locale }: { locale: Locale }) => ( -
Sort
+ default: () => ( +
Sort
), })) // Mock Empty component vi.mock('../empty', () => ({ - default: ({ className, locale }: { className?: string, locale?: string }) => ( -
+ default: ({ className }: { className?: string }) => ( +
No plugins found
), @@ -233,7 +233,6 @@ describe('List', () => { marketplaceCollectionPluginsMap: {} as Record, plugins: undefined, showInstallButton: false, - locale: 'en-US' as Locale, cardContainerClassName: '', cardRender: undefined, onMoreClick: undefined, @@ -351,18 +350,6 @@ describe('List', () => { expect(screen.getByTestId('empty-component')).toHaveClass('custom-empty-class') }) - it('should pass locale to Empty component', () => { - render( - , - ) - - expect(screen.getByTestId('empty-component')).toHaveAttribute('data-locale', 'zh-CN') - }) - it('should pass showInstallButton to CardWrapper', () => { const plugins = createMockPluginList(1) @@ -508,7 +495,6 @@ describe('ListWithCollection', () => { marketplaceCollections: [] as MarketplaceCollection[], marketplaceCollectionPluginsMap: {} as Record, showInstallButton: false, - locale: 'en-US' as Locale, cardContainerClassName: '', cardRender: undefined, onMoreClick: undefined, @@ -820,7 +806,6 @@ describe('ListWrapper', () => { marketplaceCollections: [] as MarketplaceCollection[], marketplaceCollectionPluginsMap: {} as Record, showInstallButton: false, - locale: 'en-US' as Locale, } beforeEach(() => { @@ -901,14 +886,6 @@ describe('ListWrapper', () => { expect(screen.queryByTestId('sort-dropdown')).not.toBeInTheDocument() }) - - it('should pass locale to SortDropdown', () => { - mockContextValues.plugins = createMockPluginList(1) - - render() - - expect(screen.getByTestId('sort-dropdown')).toHaveAttribute('data-locale', 'zh-CN') - }) }) // ================================ @@ -1169,7 +1146,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollections={[]} marketplaceCollectionPluginsMap={{}} plugins={[plugin]} - locale="en-US" />, ) @@ -1188,7 +1164,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollections={[]} marketplaceCollectionPluginsMap={{}} plugins={[plugin]} - locale="en-US" />, ) @@ -1209,7 +1184,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollections={[]} marketplaceCollectionPluginsMap={{}} plugins={plugins} - locale="en-US" />, ) @@ -1231,7 +1205,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollectionPluginsMap={{}} plugins={[plugin]} showInstallButton={true} - locale="en-US" />, ) @@ -1252,7 +1225,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollectionPluginsMap={{}} plugins={[plugin]} showInstallButton={true} - locale="en-US" />, ) @@ -1274,7 +1246,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollectionPluginsMap={{}} plugins={[plugin]} showInstallButton={true} - locale="en-US" />, ) @@ -1293,7 +1264,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollectionPluginsMap={{}} plugins={[plugin]} showInstallButton={true} - locale="en-US" />, ) @@ -1310,7 +1280,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollectionPluginsMap={{}} plugins={[plugin]} showInstallButton={true} - locale="en-US" />, ) @@ -1327,7 +1296,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollectionPluginsMap={{}} plugins={[plugin]} showInstallButton={true} - locale="en-US" />, ) @@ -1354,7 +1322,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollectionPluginsMap={{}} plugins={[plugin]} showInstallButton={false} - locale="en-US" />, ) @@ -1375,7 +1342,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollectionPluginsMap={{}} plugins={[plugin]} showInstallButton={false} - locale="en-US" />, ) @@ -1390,7 +1356,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollections={[]} marketplaceCollectionPluginsMap={{}} plugins={[plugin]} - locale="en-US" />, ) @@ -1414,7 +1379,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollections={[]} marketplaceCollectionPluginsMap={{}} plugins={[plugin]} - locale="en-US" />, ) @@ -1432,7 +1396,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollections={[]} marketplaceCollectionPluginsMap={{}} plugins={[plugin]} - locale="en-US" />, ) @@ -1450,7 +1413,6 @@ describe('CardWrapper (via List integration)', () => { marketplaceCollections={[]} marketplaceCollectionPluginsMap={{}} plugins={[plugin]} - locale="en-US" />, ) @@ -1482,7 +1444,6 @@ describe('Combined Workflows', () => { , ) @@ -1501,7 +1462,6 @@ describe('Combined Workflows', () => { , ) @@ -1521,7 +1481,6 @@ describe('Combined Workflows', () => { , ) @@ -1535,7 +1494,6 @@ describe('Combined Workflows', () => { , ) @@ -1551,7 +1509,6 @@ describe('Combined Workflows', () => { , ) @@ -1569,7 +1526,6 @@ describe('Combined Workflows', () => { , ) @@ -1601,7 +1557,6 @@ describe('Accessibility', () => { , ) @@ -1625,7 +1580,6 @@ describe('Accessibility', () => { marketplaceCollections={collections} marketplaceCollectionPluginsMap={pluginsMap} onMoreClick={onMoreClick} - locale="en-US" />, ) @@ -1642,7 +1596,6 @@ describe('Accessibility', () => { marketplaceCollections={[]} marketplaceCollectionPluginsMap={{}} plugins={plugins} - locale="en-US" />, ) @@ -1668,7 +1621,6 @@ describe('Performance', () => { marketplaceCollections={[]} marketplaceCollectionPluginsMap={{}} plugins={plugins} - locale="en-US" />, ) const endTime = performance.now() @@ -1689,7 +1641,6 @@ describe('Performance', () => { , ) const endTime = performance.now() diff --git a/web/app/components/plugins/marketplace/list/index.tsx b/web/app/components/plugins/marketplace/list/index.tsx index 54889b232f..80b33d0ffd 100644 --- a/web/app/components/plugins/marketplace/list/index.tsx +++ b/web/app/components/plugins/marketplace/list/index.tsx @@ -1,7 +1,6 @@ 'use client' import type { Plugin } from '../../types' import type { MarketplaceCollection } from '../types' -import type { Locale } from '@/i18n-config' import { cn } from '@/utils/classnames' import Empty from '../empty' import CardWrapper from './card-wrapper' @@ -12,7 +11,6 @@ type ListProps = { marketplaceCollectionPluginsMap: Record plugins?: Plugin[] showInstallButton?: boolean - locale: Locale cardContainerClassName?: string cardRender?: (plugin: Plugin) => React.JSX.Element | null onMoreClick?: () => void @@ -23,7 +21,6 @@ const List = ({ marketplaceCollectionPluginsMap, plugins, showInstallButton, - locale, cardContainerClassName, cardRender, onMoreClick, @@ -37,7 +34,6 @@ const List = ({ marketplaceCollections={marketplaceCollections} marketplaceCollectionPluginsMap={marketplaceCollectionPluginsMap} showInstallButton={showInstallButton} - locale={locale} cardContainerClassName={cardContainerClassName} cardRender={cardRender} onMoreClick={onMoreClick} @@ -61,7 +57,6 @@ const List = ({ key={`${plugin.org}/${plugin.name}`} plugin={plugin} showInstallButton={showInstallButton} - locale={locale} /> ) }) @@ -71,7 +66,7 @@ const List = ({ } { plugins && !plugins.length && ( - + ) } diff --git a/web/app/components/plugins/marketplace/list/list-with-collection.tsx b/web/app/components/plugins/marketplace/list/list-with-collection.tsx index 8830cc5ddf..c17715e71e 100644 --- a/web/app/components/plugins/marketplace/list/list-with-collection.tsx +++ b/web/app/components/plugins/marketplace/list/list-with-collection.tsx @@ -3,9 +3,8 @@ import type { MarketplaceCollection } from '../types' import type { SearchParamsFromCollection } from '@/app/components/plugins/marketplace/types' import type { Plugin } from '@/app/components/plugins/types' -import type { Locale } from '@/i18n-config' +import { useLocale, useTranslation } from '#i18n' import { RiArrowRightSLine } from '@remixicon/react' -import { useMixedTranslation } from '@/app/components/plugins/marketplace/hooks' import { getLanguage } from '@/i18n-config/language' import { cn } from '@/utils/classnames' import CardWrapper from './card-wrapper' @@ -14,7 +13,6 @@ type ListWithCollectionProps = { marketplaceCollections: MarketplaceCollection[] marketplaceCollectionPluginsMap: Record showInstallButton?: boolean - locale: Locale cardContainerClassName?: string cardRender?: (plugin: Plugin) => React.JSX.Element | null onMoreClick?: (searchParams?: SearchParamsFromCollection) => void @@ -23,12 +21,12 @@ const ListWithCollection = ({ marketplaceCollections, marketplaceCollectionPluginsMap, showInstallButton, - locale, cardContainerClassName, cardRender, onMoreClick, }: ListWithCollectionProps) => { - const { t } = useMixedTranslation(locale) + const { t } = useTranslation() + const locale = useLocale() return ( <> @@ -72,7 +70,6 @@ const ListWithCollection = ({ key={plugin.plugin_id} plugin={plugin} showInstallButton={showInstallButton} - locale={locale} /> ) }) diff --git a/web/app/components/plugins/marketplace/list/list-wrapper.tsx b/web/app/components/plugins/marketplace/list/list-wrapper.tsx index f8126eb34b..84fcf92daf 100644 --- a/web/app/components/plugins/marketplace/list/list-wrapper.tsx +++ b/web/app/components/plugins/marketplace/list/list-wrapper.tsx @@ -1,10 +1,9 @@ 'use client' import type { Plugin } from '../../types' import type { MarketplaceCollection } from '../types' -import type { Locale } from '@/i18n-config' +import { useTranslation } from '#i18n' import { useEffect } from 'react' import Loading from '@/app/components/base/loading' -import { useMixedTranslation } from '@/app/components/plugins/marketplace/hooks' import { useMarketplaceContext } from '../context' import SortDropdown from '../sort-dropdown' import List from './index' @@ -13,15 +12,13 @@ type ListWrapperProps = { marketplaceCollections: MarketplaceCollection[] marketplaceCollectionPluginsMap: Record showInstallButton?: boolean - locale: Locale } const ListWrapper = ({ marketplaceCollections, marketplaceCollectionPluginsMap, showInstallButton, - locale, }: ListWrapperProps) => { - const { t } = useMixedTranslation(locale) + const { t } = useTranslation() const plugins = useMarketplaceContext(v => v.plugins) const pluginsTotal = useMarketplaceContext(v => v.pluginsTotal) const marketplaceCollectionsFromClient = useMarketplaceContext(v => v.marketplaceCollectionsFromClient) @@ -55,7 +52,7 @@ const ListWrapper = ({
{t('marketplace.pluginsResult', { ns: 'plugin', num: pluginsTotal })}
- +
) } @@ -73,7 +70,6 @@ const ListWrapper = ({ marketplaceCollectionPluginsMap={marketplaceCollectionPluginsMapFromClient || marketplaceCollectionPluginsMap} plugins={plugins} showInstallButton={showInstallButton} - locale={locale} onMoreClick={handleMoreClick} /> ) diff --git a/web/app/components/plugins/marketplace/plugin-type-switch.tsx b/web/app/components/plugins/marketplace/plugin-type-switch.tsx index 2a89e6847e..b9572413ed 100644 --- a/web/app/components/plugins/marketplace/plugin-type-switch.tsx +++ b/web/app/components/plugins/marketplace/plugin-type-switch.tsx @@ -1,4 +1,5 @@ 'use client' +import { useTranslation } from '#i18n' import { RiArchive2Line, RiBrain2Line, @@ -12,7 +13,6 @@ import { Trigger as TriggerIcon } from '@/app/components/base/icons/src/vender/p import { cn } from '@/utils/classnames' import { PluginCategoryEnum } from '../types' import { useMarketplaceContext } from './context' -import { useMixedTranslation } from './hooks' export const PLUGIN_TYPE_SEARCH_MAP = { all: 'all', @@ -25,16 +25,14 @@ export const PLUGIN_TYPE_SEARCH_MAP = { bundle: 'bundle', } type PluginTypeSwitchProps = { - locale?: string className?: string showSearchParams?: boolean } const PluginTypeSwitch = ({ - locale, className, showSearchParams, }: PluginTypeSwitchProps) => { - const { t } = useMixedTranslation(locale) + const { t } = useTranslation() const activePluginType = useMarketplaceContext(s => s.activePluginType) const handleActivePluginTypeChange = useMarketplaceContext(s => s.handleActivePluginTypeChange) diff --git a/web/app/components/plugins/marketplace/search-box/index.spec.tsx b/web/app/components/plugins/marketplace/search-box/index.spec.tsx index 8c3131f6d1..3e9cc40be0 100644 --- a/web/app/components/plugins/marketplace/search-box/index.spec.tsx +++ b/web/app/components/plugins/marketplace/search-box/index.spec.tsx @@ -10,9 +10,9 @@ import ToolSelectorTrigger from './trigger/tool-selector' // Mock external dependencies only // ================================ -// Mock useMixedTranslation hook -vi.mock('../hooks', () => ({ - useMixedTranslation: (_locale?: string) => ({ +// Mock i18n translation hook +vi.mock('#i18n', () => ({ + useTranslation: () => ({ t: (key: string, options?: { ns?: string }) => { // Build full key with namespace prefix if provided const fullKey = options?.ns ? `${options.ns}.${key}` : key @@ -364,13 +364,6 @@ describe('SearchBox', () => { expect(container.querySelector('.custom-input-class')).toBeInTheDocument() }) - it('should pass locale to TagsFilter', () => { - render() - - // TagsFilter should be rendered with locale - expect(screen.getByTestId('portal-elem')).toBeInTheDocument() - }) - it('should handle empty placeholder', () => { render() @@ -449,12 +442,6 @@ describe('SearchBoxWrapper', () => { expect(screen.getByRole('textbox')).toBeInTheDocument() }) - it('should render with locale prop', () => { - render() - - expect(screen.getByRole('textbox')).toBeInTheDocument() - }) - it('should render in marketplace mode', () => { const { container } = render() @@ -500,13 +487,6 @@ describe('SearchBoxWrapper', () => { expect(screen.getByPlaceholderText('Search plugins')).toBeInTheDocument() }) - - it('should pass locale to useMixedTranslation', () => { - render() - - // Translation should still work - expect(screen.getByPlaceholderText('Search plugins')).toBeInTheDocument() - }) }) }) @@ -665,12 +645,6 @@ describe('MarketplaceTrigger', () => { }) describe('Props Variations', () => { - it('should handle locale prop', () => { - render() - - expect(screen.getByText('All Tags')).toBeInTheDocument() - }) - it('should handle empty tagsMap', () => { const { container } = render( , @@ -1251,7 +1225,6 @@ describe('Combined Workflows', () => { supportAddCustomTool onShowAddCustomCollectionModal={vi.fn()} placeholder="Search plugins" - locale="en-US" wrapperClassName="custom-wrapper" inputClassName="custom-input" autoFocus={false} diff --git a/web/app/components/plugins/marketplace/search-box/index.tsx b/web/app/components/plugins/marketplace/search-box/index.tsx index 05f98782b9..b6e1f8ee70 100644 --- a/web/app/components/plugins/marketplace/search-box/index.tsx +++ b/web/app/components/plugins/marketplace/search-box/index.tsx @@ -13,7 +13,6 @@ type SearchBoxProps = { tags: string[] onTagsChange: (tags: string[]) => void placeholder?: string - locale?: string supportAddCustomTool?: boolean usedInMarketplace?: boolean onShowAddCustomCollectionModal?: () => void @@ -28,7 +27,6 @@ const SearchBox = ({ tags, onTagsChange, placeholder = '', - locale, usedInMarketplace = false, supportAddCustomTool, onShowAddCustomCollectionModal, @@ -49,7 +47,6 @@ const SearchBox = ({ tags={tags} onTagsChange={onTagsChange} usedInMarketplace - locale={locale} />
@@ -109,7 +106,6 @@ const SearchBox = ({ ) diff --git a/web/app/components/plugins/marketplace/search-box/search-box-wrapper.tsx b/web/app/components/plugins/marketplace/search-box/search-box-wrapper.tsx index 1290c26210..d7fc004236 100644 --- a/web/app/components/plugins/marketplace/search-box/search-box-wrapper.tsx +++ b/web/app/components/plugins/marketplace/search-box/search-box-wrapper.tsx @@ -1,16 +1,11 @@ 'use client' +import { useTranslation } from '#i18n' import { useMarketplaceContext } from '../context' -import { useMixedTranslation } from '../hooks' import SearchBox from './index' -type SearchBoxWrapperProps = { - locale?: string -} -const SearchBoxWrapper = ({ - locale, -}: SearchBoxWrapperProps) => { - const { t } = useMixedTranslation(locale) +const SearchBoxWrapper = () => { + const { t } = useTranslation() const searchPluginText = useMarketplaceContext(v => v.searchPluginText) const handleSearchPluginTextChange = useMarketplaceContext(v => v.handleSearchPluginTextChange) const filterPluginTags = useMarketplaceContext(v => v.filterPluginTags) @@ -24,7 +19,6 @@ const SearchBoxWrapper = ({ onSearchChange={handleSearchPluginTextChange} tags={filterPluginTags} onTagsChange={handleFilterPluginTagsChange} - locale={locale} placeholder={t('searchPlugins', { ns: 'plugin' })} usedInMarketplace /> diff --git a/web/app/components/plugins/marketplace/search-box/tags-filter.tsx b/web/app/components/plugins/marketplace/search-box/tags-filter.tsx index df4d3eebab..9a8035e2e3 100644 --- a/web/app/components/plugins/marketplace/search-box/tags-filter.tsx +++ b/web/app/components/plugins/marketplace/search-box/tags-filter.tsx @@ -1,5 +1,6 @@ 'use client' +import { useTranslation } from '#i18n' import { useState } from 'react' import Checkbox from '@/app/components/base/checkbox' import Input from '@/app/components/base/input' @@ -9,7 +10,6 @@ import { PortalToFollowElemTrigger, } from '@/app/components/base/portal-to-follow-elem' import { useTags } from '@/app/components/plugins/hooks' -import { useMixedTranslation } from '@/app/components/plugins/marketplace/hooks' import MarketplaceTrigger from './trigger/marketplace' import ToolSelectorTrigger from './trigger/tool-selector' @@ -17,18 +17,16 @@ type TagsFilterProps = { tags: string[] onTagsChange: (tags: string[]) => void usedInMarketplace?: boolean - locale?: string } const TagsFilter = ({ tags, onTagsChange, usedInMarketplace = false, - locale, }: TagsFilterProps) => { - const { t } = useMixedTranslation(locale) + const { t } = useTranslation() const [open, setOpen] = useState(false) const [searchText, setSearchText] = useState('') - const { tags: options, tagsMap } = useTags(t) + const { tags: options, tagsMap } = useTags() const filteredOptions = options.filter(option => option.label.toLowerCase().includes(searchText.toLowerCase())) const handleCheck = (id: string) => { if (tags.includes(id)) @@ -59,7 +57,6 @@ const TagsFilter = ({ open={open} tags={tags} tagsMap={tagsMap} - locale={locale} onTagsChange={onTagsChange} /> ) diff --git a/web/app/components/plugins/marketplace/search-box/trigger/marketplace.tsx b/web/app/components/plugins/marketplace/search-box/trigger/marketplace.tsx index 2ba03bd2f2..e387d52d0e 100644 --- a/web/app/components/plugins/marketplace/search-box/trigger/marketplace.tsx +++ b/web/app/components/plugins/marketplace/search-box/trigger/marketplace.tsx @@ -1,15 +1,14 @@ import type { Tag } from '../../../hooks' +import { useTranslation } from '#i18n' import { RiArrowDownSLine, RiCloseCircleFill, RiFilter3Line } from '@remixicon/react' import * as React from 'react' import { cn } from '@/utils/classnames' -import { useMixedTranslation } from '../../hooks' type MarketplaceTriggerProps = { selectedTagsLength: number open: boolean tags: string[] tagsMap: Record - locale?: string onTagsChange: (tags: string[]) => void } @@ -18,10 +17,9 @@ const MarketplaceTrigger = ({ open, tags, tagsMap, - locale, onTagsChange, }: MarketplaceTriggerProps) => { - const { t } = useMixedTranslation(locale) + const { t } = useTranslation() return (
{ // Build full key with namespace prefix if provided const fullKey = options?.ns ? `${options.ns}.${key}` : key @@ -22,8 +22,8 @@ const mockTranslation = vi.fn((key: string, options?: { ns?: string }) => { return translations[fullKey] || key }) -vi.mock('../hooks', () => ({ - useMixedTranslation: (_locale?: string) => ({ +vi.mock('#i18n', () => ({ + useTranslation: () => ({ t: mockTranslation, }), })) @@ -145,36 +145,6 @@ describe('SortDropdown', () => { }) }) - // ================================ - // Props Testing - // ================================ - describe('Props', () => { - it('should accept locale prop', () => { - render() - - expect(screen.getByTestId('portal-wrapper')).toBeInTheDocument() - }) - - it('should call useMixedTranslation with provided locale', () => { - render() - - // Translation function should be called for labels - expect(mockTranslation).toHaveBeenCalledWith('marketplace.sortBy', { ns: 'plugin' }) - }) - - it('should render without locale prop (undefined)', () => { - render() - - expect(screen.getByText('Sort by')).toBeInTheDocument() - }) - - it('should render with empty string locale', () => { - render() - - expect(screen.getByText('Sort by')).toBeInTheDocument() - }) - }) - // ================================ // State Management Tests // ================================ @@ -618,13 +588,6 @@ describe('SortDropdown', () => { expect(mockTranslation).toHaveBeenCalledWith('marketplace.sortOption.newlyReleased', { ns: 'plugin' }) expect(mockTranslation).toHaveBeenCalledWith('marketplace.sortOption.firstReleased', { ns: 'plugin' }) }) - - it('should pass locale to useMixedTranslation', () => { - render() - - // Verify component renders with locale - expect(screen.getByTestId('portal-wrapper')).toBeInTheDocument() - }) }) // ================================ diff --git a/web/app/components/plugins/marketplace/sort-dropdown/index.tsx b/web/app/components/plugins/marketplace/sort-dropdown/index.tsx index a1f6631735..984b114d03 100644 --- a/web/app/components/plugins/marketplace/sort-dropdown/index.tsx +++ b/web/app/components/plugins/marketplace/sort-dropdown/index.tsx @@ -1,4 +1,5 @@ 'use client' +import { useTranslation } from '#i18n' import { RiArrowDownSLine, RiCheckLine, @@ -9,16 +10,10 @@ import { PortalToFollowElemContent, PortalToFollowElemTrigger, } from '@/app/components/base/portal-to-follow-elem' -import { useMixedTranslation } from '@/app/components/plugins/marketplace/hooks' import { useMarketplaceContext } from '../context' -type SortDropdownProps = { - locale?: string -} -const SortDropdown = ({ - locale, -}: SortDropdownProps) => { - const { t } = useMixedTranslation(locale) +const SortDropdown = () => { + const { t } = useTranslation() const options = [ { value: 'install_count', diff --git a/web/app/components/plugins/marketplace/sticky-search-and-switch-wrapper.tsx b/web/app/components/plugins/marketplace/sticky-search-and-switch-wrapper.tsx index 602a1e9af2..3d3530c83e 100644 --- a/web/app/components/plugins/marketplace/sticky-search-and-switch-wrapper.tsx +++ b/web/app/components/plugins/marketplace/sticky-search-and-switch-wrapper.tsx @@ -5,13 +5,11 @@ import PluginTypeSwitch from './plugin-type-switch' import SearchBoxWrapper from './search-box/search-box-wrapper' type StickySearchAndSwitchWrapperProps = { - locale?: string pluginTypeSwitchClassName?: string showSearchParams?: boolean } const StickySearchAndSwitchWrapper = ({ - locale, pluginTypeSwitchClassName, showSearchParams, }: StickySearchAndSwitchWrapperProps) => { @@ -25,9 +23,8 @@ const StickySearchAndSwitchWrapper = ({ pluginTypeSwitchClassName, )} > - +
diff --git a/web/app/components/plugins/plugin-item/index.tsx b/web/app/components/plugins/plugin-item/index.tsx index d287bd9e9a..3f658c63a8 100644 --- a/web/app/components/plugins/plugin-item/index.tsx +++ b/web/app/components/plugins/plugin-item/index.tsx @@ -44,7 +44,7 @@ const PluginItem: FC = ({ }) => { const { t } = useTranslation() const { theme } = useTheme() - const { categoriesMap } = useCategories(t, true) + const { categoriesMap } = useCategories(true) const currentPluginID = usePluginPageContext(v => v.currentPluginID) const setCurrentPluginID = usePluginPageContext(v => v.setCurrentPluginID) const { refreshPluginList } = useRefreshPluginList() diff --git a/web/app/components/provider/i18n-server.tsx b/web/app/components/provider/i18n-server.tsx new file mode 100644 index 0000000000..23391cf428 --- /dev/null +++ b/web/app/components/provider/i18n-server.tsx @@ -0,0 +1,21 @@ +import { getLocaleOnServer, getResources } from '@/i18n-config/server' + +import { I18nClientProvider } from './i18n' + +export async function I18nServerProvider({ + children, +}: { + children: React.ReactNode +}) { + const locale = await getLocaleOnServer() + const resource = await getResources(locale) + + return ( + + {children} + + ) +} diff --git a/web/app/components/provider/i18n.tsx b/web/app/components/provider/i18n.tsx new file mode 100644 index 0000000000..6441a09dd3 --- /dev/null +++ b/web/app/components/provider/i18n.tsx @@ -0,0 +1,24 @@ +'use client' + +import type { Resource } from 'i18next' +import type { Locale } from '@/i18n-config' +import { I18nextProvider } from 'react-i18next' +import { createI18nextInstance } from '@/i18n-config/client' + +export function I18nClientProvider({ + locale, + resource, + children, +}: { + locale: Locale + resource: Resource + children: React.ReactNode +}) { + const i18n = createI18nextInstance(locale, resource) + + return ( + + {children} + + ) +} diff --git a/web/app/components/share/text-generation/index.tsx b/web/app/components/share/text-generation/index.tsx index b9bb59664a..b793a03ce7 100644 --- a/web/app/components/share/text-generation/index.tsx +++ b/web/app/components/share/text-generation/index.tsx @@ -32,7 +32,7 @@ import { useWebAppStore } from '@/context/web-app-context' import { useAppFavicon } from '@/hooks/use-app-favicon' import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints' import useDocumentTitle from '@/hooks/use-document-title' -import { changeLanguage } from '@/i18n-config/i18next-config' +import { changeLanguage } from '@/i18n-config/client' import { AccessMode } from '@/models/access-control' import { fetchSavedMessage as doFetchSavedMessage, removeMessage, saveMessage } from '@/service/share' import { Resolution, TransferMethod } from '@/types/app' diff --git a/web/app/components/tools/marketplace/index.spec.tsx b/web/app/components/tools/marketplace/index.spec.tsx index 354c717f2d..493d960e2a 100644 --- a/web/app/components/tools/marketplace/index.spec.tsx +++ b/web/app/components/tools/marketplace/index.spec.tsx @@ -19,7 +19,6 @@ vi.mock('@/app/components/plugins/marketplace/list', () => ({ marketplaceCollectionPluginsMap: Record plugins?: unknown[] showInstallButton?: boolean - locale: string }) => { listRenderSpy(props) return
@@ -42,10 +41,6 @@ vi.mock('@/utils/var', () => ({ getMarketplaceUrl: vi.fn(() => 'https://marketplace.test/market'), })) -vi.mock('@/i18n-config', () => ({ - getLocaleOnClient: () => 'en', -})) - vi.mock('next-themes', () => ({ useTheme: () => ({ theme: 'light' }), })) @@ -148,7 +143,6 @@ describe('Marketplace', () => { expect(screen.getByTestId('marketplace-list')).toBeInTheDocument() expect(listRenderSpy).toHaveBeenCalledWith(expect.objectContaining({ showInstallButton: true, - locale: 'en', })) }) }) diff --git a/web/app/components/tools/marketplace/index.tsx b/web/app/components/tools/marketplace/index.tsx index 476a47d8f6..3900a9e505 100644 --- a/web/app/components/tools/marketplace/index.tsx +++ b/web/app/components/tools/marketplace/index.tsx @@ -1,4 +1,5 @@ import type { useMarketplace } from './hooks' +import { useLocale } from '#i18n' import { RiArrowRightUpLine, RiArrowUpDoubleLine, @@ -7,7 +8,6 @@ import { useTheme } from 'next-themes' import { useTranslation } from 'react-i18next' import Loading from '@/app/components/base/loading' import List from '@/app/components/plugins/marketplace/list' -import { getLocaleOnClient } from '@/i18n-config' import { getMarketplaceUrl } from '@/utils/var' type MarketplaceProps = { @@ -24,7 +24,7 @@ const Marketplace = ({ showMarketplacePanel, marketplaceContext, }: MarketplaceProps) => { - const locale = getLocaleOnClient() + const locale = useLocale() const { t } = useTranslation() const { theme } = useTheme() const { @@ -104,7 +104,6 @@ const Marketplace = ({ marketplaceCollectionPluginsMap={marketplaceCollectionPluginsMap || {}} plugins={plugins} showInstallButton - locale={locale} /> ) } diff --git a/web/app/layout.tsx b/web/app/layout.tsx index 8fc5f8abcc..acd56e1da6 100644 --- a/web/app/layout.tsx +++ b/web/app/layout.tsx @@ -8,9 +8,10 @@ import { TanstackQueryInitializer } from '@/context/query-client' import { getLocaleOnServer } from '@/i18n-config/server' import { DatasetAttr } from '@/types/feature' import { cn } from '@/utils/classnames' +import { ToastProvider } from './components/base/toast' import BrowserInitializer from './components/browser-initializer' import { ReactScanLoader } from './components/devtools/react-scan/loader' -import I18nServer from './components/i18n-server' +import { I18nServerProvider } from './components/provider/i18n-server' import SentryInitializer from './components/sentry-initializer' import RoutePrefixHandle from './routePrefixHandle' import './styles/globals.css' @@ -104,11 +105,13 @@ const LocaleLayout = async ({ - - - {children} - - + + + + {children} + + + diff --git a/web/context/i18n.ts b/web/context/i18n.ts index e65049b506..d57fc5b984 100644 --- a/web/context/i18n.ts +++ b/web/context/i18n.ts @@ -1,10 +1,10 @@ import type { Locale } from '@/i18n-config/language' -import { atom, useAtomValue } from 'jotai' +import { useTranslation } from '#i18n' import { getDocLanguage, getLanguage, getPricingPageLanguage } from '@/i18n-config/language' -export const localeAtom = atom('en-US') export const useLocale = () => { - return useAtomValue(localeAtom) + const { i18n } = useTranslation() + return i18n.language as Locale } export const useGetLanguage = () => { diff --git a/web/i18n-config/client.ts b/web/i18n-config/client.ts new file mode 100644 index 0000000000..17d3dceae1 --- /dev/null +++ b/web/i18n-config/client.ts @@ -0,0 +1,35 @@ +'use client' +import type { Resource } from 'i18next' +import type { Locale } from '.' +import type { NamespaceCamelCase, NamespaceKebabCase } from './resources' +import { kebabCase } from 'es-toolkit/string' +import { createInstance } from 'i18next' +import resourcesToBackend from 'i18next-resources-to-backend' +import { getI18n, initReactI18next } from 'react-i18next' +import { getInitOptions } from './settings' + +export function createI18nextInstance(lng: Locale, resources: Resource) { + const instance = createInstance() + instance + .use(initReactI18next) + .use(resourcesToBackend(( + language: Locale, + namespace: NamespaceKebabCase | NamespaceCamelCase, + ) => { + const namespaceKebab = kebabCase(namespace) + return import(`../i18n/${language}/${namespaceKebab}.json`) + })) + .init({ + ...getInitOptions(), + lng, + resources, + }) + return instance +} + +export const changeLanguage = async (lng?: Locale) => { + if (!lng) + return + const i18n = getI18n() + await i18n.changeLanguage(lng) +} diff --git a/web/i18n-config/index.ts b/web/i18n-config/index.ts index bb73ef4b71..e24fd8533d 100644 --- a/web/i18n-config/index.ts +++ b/web/i18n-config/index.ts @@ -2,7 +2,7 @@ import type { Locale } from '@/i18n-config/language' import Cookies from 'js-cookie' import { LOCALE_COOKIE_NAME } from '@/config' -import { changeLanguage } from '@/i18n-config/i18next-config' +import { changeLanguage } from '@/i18n-config/client' import { LanguagesSupported } from '@/i18n-config/language' export const i18n = { @@ -19,10 +19,6 @@ export const setLocaleOnClient = async (locale: Locale, reloadPage = true) => { location.reload() } -export const getLocaleOnClient = (): Locale => { - return Cookies.get(LOCALE_COOKIE_NAME) as Locale || i18n.defaultLocale -} - export const renderI18nObject = (obj: Record, language: string) => { if (!obj) return '' diff --git a/web/i18n-config/lib.client.ts b/web/i18n-config/lib.client.ts index fc40384458..fffb4d95ae 100644 --- a/web/i18n-config/lib.client.ts +++ b/web/i18n-config/lib.client.ts @@ -1,6 +1,6 @@ 'use client' -import type { NamespaceCamelCase } from './i18next-config' +import type { NamespaceCamelCase } from './resources' import { useTranslation as useTranslationOriginal } from 'react-i18next' export function useTranslation(ns?: NamespaceCamelCase) { diff --git a/web/i18n-config/lib.server.ts b/web/i18n-config/lib.server.ts index 4969492cd4..4727ed482f 100644 --- a/web/i18n-config/lib.server.ts +++ b/web/i18n-config/lib.server.ts @@ -1,4 +1,4 @@ -import type { NamespaceCamelCase } from './i18next-config' +import type { NamespaceCamelCase } from './resources' import { use } from 'react' import { getLocaleOnServer, getTranslation } from './server' diff --git a/web/i18n-config/i18next-config.ts b/web/i18n-config/resources.ts similarity index 59% rename from web/i18n-config/i18next-config.ts rename to web/i18n-config/resources.ts index 0997485967..4bcfb98e14 100644 --- a/web/i18n-config/i18next-config.ts +++ b/web/i18n-config/resources.ts @@ -1,8 +1,4 @@ -'use client' -import type { Locale } from '.' -import { camelCase, kebabCase } from 'es-toolkit/string' -import i18n from 'i18next' -import { initReactI18next } from 'react-i18next' +import { kebabCase } from 'es-toolkit/string' import appAnnotation from '../i18n/en-US/app-annotation.json' import appApi from '../i18n/en-US/app-api.json' import appDebug from '../i18n/en-US/app-debug.json' @@ -35,7 +31,7 @@ import tools from '../i18n/en-US/tools.json' import workflow from '../i18n/en-US/workflow.json' // @keep-sorted -export const resources = { +const resources = { app, appAnnotation, appApi, @@ -82,60 +78,5 @@ export type Resources = typeof resources export type NamespaceCamelCase = keyof Resources export type NamespaceKebabCase = KebabCase -const requireSilent = async (lang: Locale, namespace: NamespaceKebabCase) => { - let res - try { - res = (await import(`../i18n/${lang}/${namespace}.json`)).default - } - catch { - res = (await import(`../i18n/en-US/${namespace}.json`)).default - } - - return res -} - -const NAMESPACES = Object.keys(resources).map(kebabCase) as NamespaceKebabCase[] - -// Load a single namespace for a language -export const loadNamespace = async (lang: Locale, ns: NamespaceKebabCase) => { - const camelNs = camelCase(ns) as NamespaceCamelCase - if (i18n.hasResourceBundle(lang, camelNs)) - return - - const resource = await requireSilent(lang, ns) - i18n.addResourceBundle(lang, camelNs, resource, true, true) -} - -// Load all namespaces for a language (used when switching language) -export const loadLangResources = async (lang: Locale) => { - await Promise.all( - NAMESPACES.map(ns => loadNamespace(lang, ns)), - ) -} - -// Initial resources: load en-US namespaces for fallback/default locale -const getInitialTranslations = () => { - return { - 'en-US': resources, - } -} - -if (!i18n.isInitialized) { - i18n.use(initReactI18next).init({ - lng: undefined, - fallbackLng: 'en-US', - resources: getInitialTranslations(), - defaultNS: 'common', - ns: Object.keys(resources), - keySeparator: false, - }) -} - -export const changeLanguage = async (lng?: Locale) => { - if (!lng) - return - await loadLangResources(lng) - await i18n.changeLanguage(lng) -} - -export default i18n +export const namespacesCamelCase = Object.keys(resources) as NamespaceCamelCase[] +export const namespacesKebabCase = namespacesCamelCase.map(ns => kebabCase(ns)) as NamespaceKebabCase[] diff --git a/web/i18n-config/server.ts b/web/i18n-config/server.ts index f5966149fd..403040c134 100644 --- a/web/i18n-config/server.ts +++ b/web/i18n-config/server.ts @@ -1,15 +1,19 @@ -import type { i18n as I18nInstance } from 'i18next' +import type { i18n as I18nInstance, Resource, ResourceLanguage } from 'i18next' import type { Locale } from '.' -import type { NamespaceCamelCase, NamespaceKebabCase } from './i18next-config' +import type { NamespaceCamelCase, NamespaceKebabCase } from './resources' import { match } from '@formatjs/intl-localematcher' import { kebabCase } from 'es-toolkit/compat' +import { camelCase } from 'es-toolkit/string' import { createInstance } from 'i18next' import resourcesToBackend from 'i18next-resources-to-backend' import Negotiator from 'negotiator' import { cookies, headers } from 'next/headers' +import { cache } from 'react' import { initReactI18next } from 'react-i18next/initReactI18next' import { serverOnlyContext } from '@/utils/server-only-context' import { i18n } from '.' +import { namespacesKebabCase } from './resources' +import { getInitOptions } from './settings' const [getLocaleCache, setLocaleCache] = serverOnlyContext(null) const [getI18nInstance, setI18nInstance] = serverOnlyContext(null) @@ -27,9 +31,8 @@ const getOrCreateI18next = async (lng: Locale) => { return import(`../i18n/${language}/${fileNamespace}.json`) })) .init({ + ...getInitOptions(), lng, - fallbackLng: 'en-US', - keySeparator: false, }) setI18nInstance(instance) return instance @@ -76,3 +79,16 @@ export const getLocaleOnServer = async (): Promise => { setLocaleCache(matchedLocale) return matchedLocale } + +export const getResources = cache(async (lng: Locale): Promise => { + const messages = {} as ResourceLanguage + + await Promise.all( + (namespacesKebabCase).map(async (ns) => { + const mod = await import(`../i18n/${lng}/${ns}.json`) + messages[camelCase(ns)] = mod.default + }), + ) + + return { [lng]: messages } +}) diff --git a/web/i18n-config/settings.ts b/web/i18n-config/settings.ts new file mode 100644 index 0000000000..1bf37ab21d --- /dev/null +++ b/web/i18n-config/settings.ts @@ -0,0 +1,13 @@ +import type { InitOptions } from 'i18next' +import { namespacesCamelCase } from './resources' + +export function getInitOptions(): InitOptions { + return { + // We do not have en for fallback + load: 'currentOnly', + fallbackLng: 'en-US', + partialBundledLanguages: true, + keySeparator: false, + ns: namespacesCamelCase, + } +} diff --git a/web/types/i18n.d.ts b/web/types/i18n.d.ts index 160d385730..9e20d5a55a 100644 --- a/web/types/i18n.d.ts +++ b/web/types/i18n.d.ts @@ -1,4 +1,4 @@ -import type { NamespaceCamelCase, Resources } from '../i18n-config/i18next-config' +import type { NamespaceCamelCase, Resources } from '../i18n-config/resources' import 'i18next' declare module 'i18next' { From 160b4d194bad9546cb3aff483b74dd0388bf3b1c Mon Sep 17 00:00:00 2001 From: yyh <92089059+lyzno1@users.noreply.github.com> Date: Wed, 7 Jan 2026 14:20:38 +0800 Subject: [PATCH 13/15] fix: signin page stuck on loading when refresh token valid but access token expired (#30675) Co-authored-by: Claude Opus 4.5 --- web/app/signin/normal-form.tsx | 4 +++- web/service/use-common.ts | 9 ++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/web/app/signin/normal-form.tsx b/web/app/signin/normal-form.tsx index bf4d80326b..be0feea6c1 100644 --- a/web/app/signin/normal-form.tsx +++ b/web/app/signin/normal-form.tsx @@ -28,7 +28,8 @@ const NormalForm = () => { const message = decodeURIComponent(searchParams.get('message') || '') const invite_token = decodeURIComponent(searchParams.get('invite_token') || '') const [isInitCheckLoading, setInitCheckLoading] = useState(true) - const isLoading = isCheckLoading || loginData?.logged_in || isInitCheckLoading + const [isRedirecting, setIsRedirecting] = useState(false) + const isLoading = isCheckLoading || isInitCheckLoading || isRedirecting const { systemFeatures } = useGlobalPublicStore() const [authType, updateAuthType] = useState<'code' | 'password'>('password') const [showORLine, setShowORLine] = useState(false) @@ -40,6 +41,7 @@ const NormalForm = () => { const init = useCallback(async () => { try { if (isLoggedIn) { + setIsRedirecting(true) const redirectUrl = resolvePostLoginRedirect(searchParams) router.replace(redirectUrl || '/apps') return diff --git a/web/service/use-common.ts b/web/service/use-common.ts index a1edb041c0..ca0845d95a 100644 --- a/web/service/use-common.ts +++ b/web/service/use-common.ts @@ -221,13 +221,12 @@ export const useIsLogin = () => { await get('/account/profile', {}, { silent: true, }) - } - catch (e: any) { - if (e.status === 401) - return { logged_in: false } return { logged_in: true } } - return { logged_in: true } + catch { + // Any error (401, 500, network error, etc.) means not logged in + return { logged_in: false } + } }, }) } From 666640f7d59ef787a6916d6c7b2d48153105f3f0 Mon Sep 17 00:00:00 2001 From: Xiangxuan Qu Date: Wed, 7 Jan 2026 15:40:35 +0900 Subject: [PATCH 14/15] refactor: remove unnecessary type: ignore from rag_pipeline_fields.py (#30666) Co-authored-by: fghpdf --- api/fields/rag_pipeline_fields.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/fields/rag_pipeline_fields.py b/api/fields/rag_pipeline_fields.py index f9e858c68b..97c02e7085 100644 --- a/api/fields/rag_pipeline_fields.py +++ b/api/fields/rag_pipeline_fields.py @@ -1,4 +1,4 @@ -from flask_restx import fields # type: ignore +from flask_restx import fields from fields.workflow_fields import workflow_partial_fields from libs.helper import AppIconUrlField, TimestampField From 187bfafe8b001cf8afad95e257374f2b2e7e5983 Mon Sep 17 00:00:00 2001 From: wangxiaolei Date: Wed, 7 Jan 2026 14:54:11 +0800 Subject: [PATCH 15/15] fix: fix assign value stand as default (#30651) Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> --- .../advanced_chat/generate_task_pipeline.py | 19 + ...test_generate_task_pipeline_answer_node.py | 390 ++++++++++++++++++ 2 files changed, 409 insertions(+) create mode 100644 api/tests/unit_tests/core/app/apps/advanced_chat/test_generate_task_pipeline_answer_node.py diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index da1e9f19b6..4dd95be52d 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -358,6 +358,25 @@ class AdvancedChatAppGenerateTaskPipeline(GraphRuntimeStateSupport): if node_finish_resp: yield node_finish_resp + # For ANSWER nodes, check if we need to send a message_replace event + # Only send if the final output differs from the accumulated task_state.answer + # This happens when variables were updated by variable_assigner during workflow execution + if event.node_type == NodeType.ANSWER and event.outputs: + final_answer = event.outputs.get("answer") + if final_answer is not None and final_answer != self._task_state.answer: + logger.info( + "ANSWER node final output '%s' differs from accumulated answer '%s', sending message_replace event", + final_answer, + self._task_state.answer, + ) + # Update the task state answer + self._task_state.answer = str(final_answer) + # Send message_replace event to update the UI + yield self._message_cycle_manager.message_replace_to_stream_response( + answer=str(final_answer), + reason="variable_update", + ) + def _handle_node_failed_events( self, event: Union[QueueNodeFailedEvent, QueueNodeExceptionEvent], diff --git a/api/tests/unit_tests/core/app/apps/advanced_chat/test_generate_task_pipeline_answer_node.py b/api/tests/unit_tests/core/app/apps/advanced_chat/test_generate_task_pipeline_answer_node.py new file mode 100644 index 0000000000..205b157542 --- /dev/null +++ b/api/tests/unit_tests/core/app/apps/advanced_chat/test_generate_task_pipeline_answer_node.py @@ -0,0 +1,390 @@ +""" +Tests for AdvancedChatAppGenerateTaskPipeline._handle_node_succeeded_event method, +specifically testing the ANSWER node message_replace logic. +""" + +from datetime import datetime +from types import SimpleNamespace +from unittest.mock import MagicMock, Mock, patch + +import pytest + +from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity +from core.app.entities.queue_entities import QueueNodeSucceededEvent +from core.workflow.enums import NodeType +from models import EndUser +from models.model import AppMode + + +class TestAnswerNodeMessageReplace: + """Test cases for ANSWER node message_replace event logic.""" + + @pytest.fixture + def mock_application_generate_entity(self): + """Create a mock application generate entity.""" + entity = Mock(spec=AdvancedChatAppGenerateEntity) + entity.task_id = "test-task-id" + entity.app_id = "test-app-id" + entity.workflow_run_id = "test-workflow-run-id" + # minimal app_config used by pipeline internals + entity.app_config = SimpleNamespace( + tenant_id="test-tenant-id", + app_id="test-app-id", + app_mode=AppMode.ADVANCED_CHAT, + app_model_config_dict={}, + additional_features=None, + sensitive_word_avoidance=None, + ) + entity.query = "test query" + entity.files = [] + entity.extras = {} + entity.trace_manager = None + entity.inputs = {} + entity.invoke_from = "debugger" + return entity + + @pytest.fixture + def mock_workflow(self): + """Create a mock workflow.""" + workflow = Mock() + workflow.id = "test-workflow-id" + workflow.features_dict = {} + return workflow + + @pytest.fixture + def mock_queue_manager(self): + """Create a mock queue manager.""" + manager = Mock() + manager.listen.return_value = [] + manager.graph_runtime_state = None + return manager + + @pytest.fixture + def mock_conversation(self): + """Create a mock conversation.""" + conversation = Mock() + conversation.id = "test-conversation-id" + conversation.mode = "advanced_chat" + return conversation + + @pytest.fixture + def mock_message(self): + """Create a mock message.""" + message = Mock() + message.id = "test-message-id" + message.query = "test query" + message.created_at = Mock() + message.created_at.timestamp.return_value = 1234567890 + return message + + @pytest.fixture + def mock_user(self): + """Create a mock end user.""" + user = MagicMock(spec=EndUser) + user.id = "test-user-id" + user.session_id = "test-session-id" + return user + + @pytest.fixture + def mock_draft_var_saver_factory(self): + """Create a mock draft variable saver factory.""" + return Mock() + + @pytest.fixture + def pipeline( + self, + mock_application_generate_entity, + mock_workflow, + mock_queue_manager, + mock_conversation, + mock_message, + mock_user, + mock_draft_var_saver_factory, + ): + """Create an AdvancedChatAppGenerateTaskPipeline instance with mocked dependencies.""" + from core.app.apps.advanced_chat.generate_task_pipeline import AdvancedChatAppGenerateTaskPipeline + + with patch("core.app.apps.advanced_chat.generate_task_pipeline.db"): + pipeline = AdvancedChatAppGenerateTaskPipeline( + application_generate_entity=mock_application_generate_entity, + workflow=mock_workflow, + queue_manager=mock_queue_manager, + conversation=mock_conversation, + message=mock_message, + user=mock_user, + stream=True, + dialogue_count=1, + draft_var_saver_factory=mock_draft_var_saver_factory, + ) + # Initialize workflow run id to avoid validation errors + pipeline._workflow_run_id = "test-workflow-run-id" + # Mock the message cycle manager methods we need to track + pipeline._message_cycle_manager.message_replace_to_stream_response = Mock() + return pipeline + + def test_answer_node_with_different_output_sends_message_replace(self, pipeline, mock_application_generate_entity): + """ + Test that when an ANSWER node's final output differs from accumulated answer, + a message_replace event is sent. + """ + # Arrange: Set initial accumulated answer + pipeline._task_state.answer = "initial answer" + + # Create ANSWER node succeeded event with different final output + event = QueueNodeSucceededEvent( + node_execution_id="test-node-execution-id", + node_id="test-answer-node", + node_type=NodeType.ANSWER, + start_at=datetime.now(), + outputs={"answer": "updated final answer"}, + ) + + # Mock the workflow response converter to avoid extra processing + pipeline._workflow_response_converter.workflow_node_finish_to_stream_response = Mock(return_value=None) + pipeline._save_output_for_event = Mock() + + # Act + responses = list(pipeline._handle_node_succeeded_event(event)) + + # Assert + assert pipeline._task_state.answer == "updated final answer" + # Verify message_replace was called + pipeline._message_cycle_manager.message_replace_to_stream_response.assert_called_once_with( + answer="updated final answer", reason="variable_update" + ) + + def test_answer_node_with_same_output_does_not_send_message_replace(self, pipeline): + """ + Test that when an ANSWER node's final output is the same as accumulated answer, + no message_replace event is sent. + """ + # Arrange: Set initial accumulated answer + pipeline._task_state.answer = "same answer" + + # Create ANSWER node succeeded event with same output + event = QueueNodeSucceededEvent( + node_execution_id="test-node-execution-id", + node_id="test-answer-node", + node_type=NodeType.ANSWER, + start_at=datetime.now(), + outputs={"answer": "same answer"}, + ) + + # Mock the workflow response converter + pipeline._workflow_response_converter.workflow_node_finish_to_stream_response = Mock(return_value=None) + pipeline._save_output_for_event = Mock() + + # Act + list(pipeline._handle_node_succeeded_event(event)) + + # Assert: answer should remain unchanged + assert pipeline._task_state.answer == "same answer" + # Verify message_replace was NOT called + pipeline._message_cycle_manager.message_replace_to_stream_response.assert_not_called() + + def test_answer_node_with_none_output_does_not_send_message_replace(self, pipeline): + """ + Test that when an ANSWER node's output is None or missing 'answer' key, + no message_replace event is sent. + """ + # Arrange: Set initial accumulated answer + pipeline._task_state.answer = "existing answer" + + # Create ANSWER node succeeded event with None output + event = QueueNodeSucceededEvent( + node_execution_id="test-node-execution-id", + node_id="test-answer-node", + node_type=NodeType.ANSWER, + start_at=datetime.now(), + outputs={"answer": None}, + ) + + # Mock the workflow response converter + pipeline._workflow_response_converter.workflow_node_finish_to_stream_response = Mock(return_value=None) + pipeline._save_output_for_event = Mock() + + # Act + list(pipeline._handle_node_succeeded_event(event)) + + # Assert: answer should remain unchanged + assert pipeline._task_state.answer == "existing answer" + # Verify message_replace was NOT called + pipeline._message_cycle_manager.message_replace_to_stream_response.assert_not_called() + + def test_answer_node_with_empty_outputs_does_not_send_message_replace(self, pipeline): + """ + Test that when an ANSWER node has empty outputs dict, + no message_replace event is sent. + """ + # Arrange: Set initial accumulated answer + pipeline._task_state.answer = "existing answer" + + # Create ANSWER node succeeded event with empty outputs + event = QueueNodeSucceededEvent( + node_execution_id="test-node-execution-id", + node_id="test-answer-node", + node_type=NodeType.ANSWER, + start_at=datetime.now(), + outputs={}, + ) + + # Mock the workflow response converter + pipeline._workflow_response_converter.workflow_node_finish_to_stream_response = Mock(return_value=None) + pipeline._save_output_for_event = Mock() + + # Act + list(pipeline._handle_node_succeeded_event(event)) + + # Assert: answer should remain unchanged + assert pipeline._task_state.answer == "existing answer" + # Verify message_replace was NOT called + pipeline._message_cycle_manager.message_replace_to_stream_response.assert_not_called() + + def test_answer_node_with_no_answer_key_in_outputs(self, pipeline): + """ + Test that when an ANSWER node's outputs don't contain 'answer' key, + no message_replace event is sent. + """ + # Arrange: Set initial accumulated answer + pipeline._task_state.answer = "existing answer" + + # Create ANSWER node succeeded event without 'answer' key in outputs + event = QueueNodeSucceededEvent( + node_execution_id="test-node-execution-id", + node_id="test-answer-node", + node_type=NodeType.ANSWER, + start_at=datetime.now(), + outputs={"other_key": "some value"}, + ) + + # Mock the workflow response converter + pipeline._workflow_response_converter.workflow_node_finish_to_stream_response = Mock(return_value=None) + pipeline._save_output_for_event = Mock() + + # Act + list(pipeline._handle_node_succeeded_event(event)) + + # Assert: answer should remain unchanged + assert pipeline._task_state.answer == "existing answer" + # Verify message_replace was NOT called + pipeline._message_cycle_manager.message_replace_to_stream_response.assert_not_called() + + def test_non_answer_node_does_not_send_message_replace(self, pipeline): + """ + Test that non-ANSWER nodes (e.g., LLM, END) don't trigger message_replace events. + """ + # Arrange: Set initial accumulated answer + pipeline._task_state.answer = "existing answer" + + # Test with LLM node + llm_event = QueueNodeSucceededEvent( + node_execution_id="test-llm-execution-id", + node_id="test-llm-node", + node_type=NodeType.LLM, + start_at=datetime.now(), + outputs={"answer": "different answer"}, + ) + + # Mock the workflow response converter + pipeline._workflow_response_converter.workflow_node_finish_to_stream_response = Mock(return_value=None) + pipeline._save_output_for_event = Mock() + + # Act + list(pipeline._handle_node_succeeded_event(llm_event)) + + # Assert: answer should remain unchanged + assert pipeline._task_state.answer == "existing answer" + # Verify message_replace was NOT called + pipeline._message_cycle_manager.message_replace_to_stream_response.assert_not_called() + + def test_end_node_does_not_send_message_replace(self, pipeline): + """ + Test that END nodes don't trigger message_replace events even with 'answer' output. + """ + # Arrange: Set initial accumulated answer + pipeline._task_state.answer = "existing answer" + + # Create END node succeeded event with answer output + event = QueueNodeSucceededEvent( + node_execution_id="test-end-execution-id", + node_id="test-end-node", + node_type=NodeType.END, + start_at=datetime.now(), + outputs={"answer": "different answer"}, + ) + + # Mock the workflow response converter + pipeline._workflow_response_converter.workflow_node_finish_to_stream_response = Mock(return_value=None) + pipeline._save_output_for_event = Mock() + + # Act + list(pipeline._handle_node_succeeded_event(event)) + + # Assert: answer should remain unchanged + assert pipeline._task_state.answer == "existing answer" + # Verify message_replace was NOT called + pipeline._message_cycle_manager.message_replace_to_stream_response.assert_not_called() + + def test_answer_node_with_numeric_output_converts_to_string(self, pipeline): + """ + Test that when an ANSWER node's final output is numeric, + it gets converted to string properly. + """ + # Arrange: Set initial accumulated answer + pipeline._task_state.answer = "text answer" + + # Create ANSWER node succeeded event with numeric output + event = QueueNodeSucceededEvent( + node_execution_id="test-node-execution-id", + node_id="test-answer-node", + node_type=NodeType.ANSWER, + start_at=datetime.now(), + outputs={"answer": 12345}, + ) + + # Mock the workflow response converter + pipeline._workflow_response_converter.workflow_node_finish_to_stream_response = Mock(return_value=None) + pipeline._save_output_for_event = Mock() + + # Act + list(pipeline._handle_node_succeeded_event(event)) + + # Assert: answer should be converted to string + assert pipeline._task_state.answer == "12345" + # Verify message_replace was called with string + pipeline._message_cycle_manager.message_replace_to_stream_response.assert_called_once_with( + answer="12345", reason="variable_update" + ) + + def test_answer_node_files_are_recorded(self, pipeline): + """ + Test that ANSWER nodes properly record files from outputs. + """ + # Arrange + pipeline._task_state.answer = "existing answer" + + # Create ANSWER node succeeded event with files + event = QueueNodeSucceededEvent( + node_execution_id="test-node-execution-id", + node_id="test-answer-node", + node_type=NodeType.ANSWER, + start_at=datetime.now(), + outputs={ + "answer": "same answer", + "files": [ + {"type": "image", "transfer_method": "remote_url", "remote_url": "http://example.com/img.png"} + ], + }, + ) + + # Mock the workflow response converter + pipeline._workflow_response_converter.fetch_files_from_node_outputs = Mock(return_value=event.outputs["files"]) + pipeline._workflow_response_converter.workflow_node_finish_to_stream_response = Mock(return_value=None) + pipeline._save_output_for_event = Mock() + + # Act + list(pipeline._handle_node_succeeded_event(event)) + + # Assert: files should be recorded + assert len(pipeline._recorded_files) == 1 + assert pipeline._recorded_files[0] == event.outputs["files"][0]